repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
sapia-oss/corus | modules/client/src/main/java/org/sapia/corus/client/common/encryption/Encryptable.java | 1006 | package org.sapia.corus.client.common.encryption;
import org.sapia.corus.client.common.OptionalValue;
/**
* Specifies the behavior for encrypting/decrypting.
*
* @author yduchesne
*
*/
public interface Encryptable<T> {
/**
* @param ctx the {@link EncryptionContext} to use for encryption.
* @return a new, encrypted copy of this instance.
*/
public Encryptable<T> encrypt(EncryptionContext ctx);
/**
* @param ctx the {@link DecryptionContext} to use for decryption.
* @return a new, decrypted copy of this instance.
*/
public Encryptable<T> decrypt(DecryptionContext ctx);
/**
* @return <code>true</code> of this instance's value is encrypted.
*/
public boolean isEncrypted();
/**
* @return <code>true</code> of this instance's value is decrypted.
*/
public boolean isDecrypted();
/**
* @return this instance's value, which will not be set of this instance
* is in the encrypted state.
*/
public OptionalValue<T> getValue();
}
| gpl-3.0 |
jlesquembre/jlle | jlle/releaser/vcs.py | 10459 | import logging
import os
import re
import sys
from jlle.releaser import pypi
from jlle.releaser import utils
VERSION_PATTERN = re.compile(r"""
version\W*=\W* # 'version = ' with possible whitespace
\d # Some digit, start of version.
""", re.VERBOSE)
UNDERSCORED_VERSION_PATTERN = re.compile(r"""
__version__\W*=\W* # '__version__ = ' with possible whitespace
\d # Some digit, start of version.
""", re.VERBOSE)
logger = logging.getLogger(__name__)
class BaseVersionControl(object):
"Shared implementation between all version control systems"
internal_filename = '' # e.g. '.svn' or '.hg'
setuptools_helper_package = ''
def __init__(self):
self.workingdir = os.getcwd()
def is_setuptools_helper_package_installed(self):
try:
__import__(self.setuptools_helper_package)
except ImportError:
return False
return True
def get_setup_py_version(self):
if os.path.exists('setup.py'):
# First run egg_info, as that may get rid of some warnings
# that otherwise end up in the extracted version, like
# UserWarnings.
utils.system(utils.setup_py('egg_info'))
version = utils.system(utils.setup_py('--version'))
if version.startswith('Traceback'):
# Likely cause is for example forgetting to 'import
# os' when using 'os' in setup.py.
logger.critical('The setup.py of this package has an error:')
print(version)
logger.critical('No version found.')
sys.exit(1)
return utils.strip_version(version)
def get_setup_py_name(self):
if os.path.exists('setup.py'):
# First run egg_info, as that may get rid of some warnings
# that otherwise end up in the extracted name, like
# UserWarnings.
utils.system(utils.setup_py('egg_info'))
return utils.system(utils.setup_py('--name')).strip()
def get_version_txt_version(self):
version_file = self.filefind(['version.txt', 'version'])
if version_file:
f = open(version_file, 'r')
version = f.read()
return utils.strip_version(version)
def get_python_file_version(self):
setup_cfg = pypi.SetupConfig()
if not setup_cfg.python_file_with_version():
return
lines = open(setup_cfg.python_file_with_version()).read().split('\n')
for line in lines:
match = UNDERSCORED_VERSION_PATTERN.search(line)
if match:
logger.debug("Matching __version__ line found: %r", line)
line = line.lstrip('__version__').strip()
line = line.lstrip('=').strip()
line = line.replace('"', '').replace("'", "")
return utils.strip_version(line)
def filefind(self, names):
"""Return first found file matching name (case-insensitive).
Some packages have docs/HISTORY.txt and
package/name/HISTORY.txt. We make sure we only return the one
in the docs directory if no other can be found.
'names' can be a string or a list of strings; if you have both
a CHANGES.txt and a docs/HISTORY.txt, you want the top level
CHANGES.txt to be found first.
"""
if type(names) is str:
names = [names]
lower_names = []
for name in names:
lower_names.append(name.lower())
names = lower_names
files = self.list_files()
found = []
for fullpath in files:
filename = os.path.basename(fullpath)
if filename.lower() in names:
logger.debug("Found %s", fullpath)
if not os.path.exists(fullpath):
# Strange. It at least happens in the tests when
# we deliberately remove a CHANGES.txt file.
logger.warn("Found file %s in version control but not on "
"file system.", fullpath)
continue
found.append(fullpath)
if not found:
return
if len(found) > 1:
found.sort(key=len)
logger.warn("Found more than one file, picked the shortest one to "
"change: %s", ', '.join(found))
return found[0]
def history_file(self, location=None):
"""Return history file location.
"""
if location:
# Hardcoded location passed from the config file.
if os.path.exists(location):
return location
else:
logger.warn("The specified history file %s doesn't exist",
location)
filenames = []
for base in ['CHANGES', 'HISTORY', 'CHANGELOG']:
filenames.append(base)
for extension in ['rst', 'txt', 'markdown']:
filenames.append('.'.join([base, extension]))
history = self.filefind(filenames)
if history:
return history
def tag_exists(self, version):
"""Check if a tag has already been created with the name of the
version.
"""
for tag in self.available_tags():
if tag == version:
return True
return False
def _extract_version(self):
"""Extract the version from setup.py or version.txt.
If there is a setup.py and it gives back a version that differs
from version.txt then this version.txt is not the one we should
use. This can happen in packages like ZopeSkel that have one or
more version.txt files that have nothing to do with the version of
the package itself.
So when in doubt: use setup.py.
But if there's an explicitly configured Python file that has to be
searched for a ``__version__`` attribute, use that one.
"""
return (self.get_python_file_version() or
self.get_setup_py_version() or
self.get_version_txt_version())
def _update_version(self, version):
"""Find out where to change the version and change it.
There are three places where the version can be defined. The first one
is an explicitly defined Python file with a ``__version__``
attribute. The second one is some version.txt that gets read by
setup.py. The third is directly in setup.py.
"""
if self.get_python_file_version():
setup_cfg = pypi.SetupConfig()
filename = setup_cfg.python_file_with_version()
lines = open(filename).read().split('\n')
for index, line in enumerate(lines):
match = UNDERSCORED_VERSION_PATTERN.search(line)
if match:
lines[index] = "__version__ = '%s'" % version
contents = '\n'.join(lines)
open(filename, 'w').write(contents)
logger.info("Set __version__ in %s to %r", filename, version)
return
versionfile = self.filefind(['version.txt', 'version'])
if versionfile:
# We have a version.txt file but does it match the setup.py
# version (if any)?
setup_version = self.get_setup_py_version()
if not setup_version or (setup_version ==
self.get_version_txt_version()):
open(versionfile, 'w').write(version + '\n')
logger.info("Changed %s to %r", versionfile, version)
return
good_version = "version = '%s'" % version
line_number = 0
setup_lines = open('setup.py').read().split('\n')
for line in setup_lines:
match = VERSION_PATTERN.search(line)
if match:
logger.debug("Matching version line found: %r", line)
if line.startswith(' '):
# oh, probably ' version = 1.0,' line.
indentation = line.split('version')[0]
# Note: no spaces around the '='.
good_version = indentation + "version='%s'," % version
setup_lines[line_number] = good_version
break
line_number += 1
contents = '\n'.join(setup_lines)
open('setup.py', 'w').write(contents)
logger.info("Set setup.py's version to %r", version)
version = property(_extract_version, _update_version)
#
# Methods that need to be supplied by child classes
#
@property
def name(self):
"Name of the project under version control"
raise NotImplementedError()
def available_tags(self):
"""Return available tags."""
raise NotImplementedError()
def prepare_checkout_dir(self):
"""Return a tempoary checkout location. Create this directory first
if necessary."""
raise NotImplementedError()
def tag_url(self, version):
"URL to tag of version."
raise NotImplementedError()
def cmd_diff(self):
"diff command"
raise NotImplementedError()
def cmd_commit(self, message):
"commit command: should specify a verbose option if possible"
raise NotImplementedError()
def cmd_diff_last_commit_against_tag(self, version):
"""Return diffs between a tagged version and the last commit of
the working copy.
"""
raise NotImplementedError()
def cmd_log_since_tag(self, version):
"""Return log since a tagged version till the last commit of
the working copy.
"""
raise NotImplementedError()
def cmd_create_tag(self, version):
"Create a tag from a version name."
raise NotImplementedError()
def checkout_from_tag(self, version):
package = self.name
prefix = '%s-%s-' % (package, version)
tagdir = self.prepare_checkout_dir(prefix)
os.chdir(tagdir)
cmd = self.cmd_checkout_from_tag(version, tagdir)
print(utils.system(cmd))
def is_clean_checkout(self):
"Is this a clean checkout?"
raise NotImplementedError()
def push_commands(self):
"""Return commands to push changes to the server.
Needed if a commit isn't enough.
"""
return []
| gpl-3.0 |
Theophilix/event-table-edit | site/views/appointmentform/tmpl/default.php | 9345 | <?php
/**
* $Id: default.php 144 2011-01-13 08:17:03Z kapsl $.
*
* @copyright (C) 2007 - 2020 Manuel Kaspar and Theophilix
* @license GNU/GPL, see LICENSE.php in the installation package
* This file is part of Event Table Edit
*
* Event Table Edit is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* Event Table Edit is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with Event Table Edit. If not, see <http://www.gnu.org/licenses/>.
*/
// no direct access
defined('_JEXEC') or die;
JHtml::_('behavior.tooltip');
JHtml::_('behavior.formvalidation');
$app = JFactory::getApplication();
$main = $app->input;
$Itemid = $main->getInt('Itemid', '');
$id = $main->getInt('id', '');
$postget = $main->getArray();
$totalappointments_row_col = explode(',', $postget['rowcolmix']);
$datesofhead = [];
$appointmentsdate = [];
foreach ($totalappointments_row_col as $rowcol) {
$temps = explode('_', $rowcol);
$rops = $temps[0];
$cops = $temps[1];
$cols = $this->heads[$cops];
$rows = $this->rows[$rops];
$details = $rows[$cops];
//if($details == 'free'){
// add weekday in first row (head) //
if (1 === (int)$this->item->showdayname) {
$namesofday = strtoupper(date('l', strtotime(str_replace('.', '-', trim($cols->name)))));
$datesofhead[] = JTEXT::_('COM_EVENTTABLEEDIT_'.strtoupper($namesofday)).' '.$cols->name.' '.JText::_('COM_EVENTTABLEEDIT_UM').' '.$rows['0'];
} else {
$datesofhead[] = $cols->name.JText::_('COM_EVENTTABLEEDIT_UM').$rows['0'];
}
$appointmentsdate[] = str_replace('.', '-', $cols->name).' '.$rows['0'].':00';
//}
// END add weekday in first row (head) //
}
$datesofhead = implode(',', $datesofhead);
?>
<!--
<p><?php echo JText::sprintf('COM_EVENTTABLEEDIT_BOOK_BEGIN', $datesofhead); ?></p>
<p>
<?php echo JText::_('COM_EVENTTABLEEDIT_BUTTON_GO_BACKTEXT'); ?>
</p>-->
<script>
function goback1(){
window.location = "<?php echo JRoute::_('index.php?option=com_eventtableedit&view=appointments&id='.$id.'&Itemid='.$Itemid, false); ?>";
}
</script>
<div class="appointmentforms">
<h2>
<?php echo JText::_('COM_EVENTTABLEEDIT_RESERVATION'); ?>
</h2>
<div class="span6" style="float: right;">
<?php
$model = $this->getModel('appointmentform');
$cols = $model->getHeads();
$rows = $model->getRows();
$totalappointments_row_col = explode(',', $postget['rowcolmix']);
foreach ($totalappointments_row_col as $rowcol) {
$temps = explode('_', $rowcol);
$rops = $temps[0];
$cops = $temps[1];
$roweditpost = $rops;
$coleditpost = $cops;
$to_time = strtotime($rows['rows'][0][0]);
$from_time = strtotime($rows['rows'][1][0]);
$mintdiffrence = round(abs($from_time - $to_time) / 60, 2);
}
$postdateappointment = $appointmentsdate;
if (count($appointmentsdate) > 0) {
$timeArr = $postdateappointment;
sort($timeArr);
$date_array = [];
$start = '';
$ref_start = &$start;
$end = '';
$ref_end = &$end;
foreach ($timeArr as $time) {
$date = date('Y-m-d', strtotime($time));
if ('' === $start || strtotime($time) < strtotime($start)) {
$ref_start = $time;
}
if (strtotime($time) > strtotime($end) && strtotime($time) <= strtotime('+ '.$mintdiffrence.' minutes', strtotime($end))) {
$ref_end = $time;
} else {
$ref_start = $time;
$ref_end = $time;
$date_array[$time] = $time;
}
$date_array[$start] = $end;
}
$array = [];
foreach ($date_array as $key => $value) {
$key = date('Y-m-d H:i:s', strtotime($key));
$array[$key] = $value;
}
ksort($array);
$date_array = [];
foreach ($array as $key => $value) {
$key = date('d.m.Y H:i:s', strtotime($key));
$date_array[$key] = $value;
} ?>
<h3><?php echo JText::_('COM_EVENTTABLEEDIT_TABLE_BOOKING'); ?></h3>
<ul class="appintments_list">
<?php
foreach ($date_array as $keystart => $valueend) {
?>
<li>
<?php $exp_startdate = explode(' ', $keystart);
$exp_sdate = explode('-', $exp_startdate[0]);
$timesremovedsec = explode(':', $exp_startdate[1]);
$exp_stime = explode(':', $exp_startdate[1]);
$starttimeonly = $exp_stime[0].':'.$exp_stime[1];
$exp_enddate = explode(' ', $valueend);
$exp_edate = explode('-', $exp_enddate[0]);
$exp_etime = explode(':', $exp_enddate[1]);
$mintplus = intval($exp_etime[1]) + intval($mintdiffrence);
if ($mintplus >= 60) {
$mintsend = $mintplus - 60;
if ($mintsend > 9) {
$mintsendadd = $mintsend;
} else {
$mintsendadd = '0'.$mintsend;
}
if ($exp_etime[0] >= 9) {
$hoursends = $exp_etime[0] + 1;
} else {
$hoursends1 = $exp_etime[0] + 1;
$hoursends = '0'.$hoursends1;
}
if (24 === $hoursends) {
$endtimeonly = '00:'.$mintsendadd;
} else {
$endtimeonly = $hoursends.':'.$mintsendadd;
}
} else {
$endtimeonly = $exp_etime[0].':'.$mintplus;
}
$namesofday1 = date('l', strtotime($keystart));
echo JTEXT::_('COM_EVENTTABLEEDIT_'.strtoupper($namesofday1)).', '.date('d.m.Y', strtotime($keystart)).', '.$starttimeonly.' - '.$endtimeonly; ?>
</li>
<?php
} ?>
</ul>
<?php
} ?>
<?php
$session = JFactory::getSession();
$corresponding_table = $session->get('corresponding_table');
if ($corresponding_table) {
$corresptable = json_decode($this->item->corresptable, true);
$corresponding_table_name = '';
foreach ($corresptable as $key => $corresptabl) {
if ($corresptabl === $corresponding_table) {
$corresponding_table_name = $key;
}
}
echo '<p><b>'.JTEXT::_('COM_EVENTTABLEEDIT_SELECTED_OPTION').":</b> $corresponding_table_name</p>";
}
?>
</div>
<form action="<?php echo JRoute::_('index.php?option=com_eventtableedit'); ?>" name="adminForm" id="adminForm" method="post" class="form-validate span6 appointmentform" style="float:left;">
<div class="control-group">
<label class="control-label"><?php echo JText::_('COM_EVENTTABLEEDIT_FIRSTNAME'); ?>*</label>
<div class="controls"><input type="text" value="" name="first_name" class="required"></div>
</div>
<div class="control-group">
<label class="control-label"><?php echo JText::_('COM_EVENTTABLEEDIT_LASTNAME'); ?>*</label>
<div class="controls"><input type="text" value="" name="last_name" class="required"></div>
</div>
<div class="control-group">
<label class="control-label"><?php echo JText::_('COM_EVENTTABLEEDIT_EMAIL'); ?>*</label>
<div class="controls"><input type="text" value="" name="email" class="required validate-email"></div>
</div>
<div class="control-group">
<div class="controls" style="width:16px;float: left;margin-top: -3px;"><input type="checkbox" value="yes" name="oneics" id="oneics" class=""></div>
<label class="control-label" for="oneics"><?php echo JText::_('COM_EVENTTABLEEDIT_ONE_ICS'); ?></label>
</div>
<div class="control-group">
<label class="control-label"><?php echo JText::_('COM_EVENTTABLEEDIT_COMMENT'); ?></label>
<div class="controls"><textarea name="comment" id="comment" cols="10" rows="5"></textarea></div>
</div>
<p>* <?php echo JText::_('COM_EVENTTABLEEDIT_STAR'); ?></p>
<br>
<input type="hidden" name="option" value="com_eventtableedit" />
<input type="hidden" name="view" value="appointmentform" />
<input type="submit" name="submit" class="btn" value="<?php echo JText::_('COM_EVENTTABLEEDIT_FINAL_RESERVATION'); ?>">
<br>
<input type="button" class="btn goback" value="<?php echo JText::_('COM_EVENTTABLEEDIT_GO_BACK'); ?>" name="goback" onclick="goback1();">
<input type="hidden" name="task" value="appointmentform.save" />
<input type="hidden" name="id" value="<?php echo $this->item->id; ?>" />
<input type="hidden" name="rowcolmix" value="<?php echo $postget['rowcolmix']; ?>" />
<!--<input type="hidden" name="col" value="<?php //echo $postget['col'];?>" />
-->
<input type="hidden" name="Itemid" value="<?php echo $Itemid; ?>" />
<input type="hidden" name="dateappointment" value="<?php echo implode(',', $appointmentsdate); ?>" />
<?php echo JHtml::_('form.token'); ?>
</form>
</div>
<div style="clear:both"></div>
| gpl-3.0 |
mandisimakwakwa/Camagru | sources/backEnd/config/database.php | 523 | <?php
//Session Creator
session_start();
//DB Connection Config
$DB_DSN = 'mysql:host=localhost';
$DB_USER = 'root';
$DB_PASSWORD = 'password';
$DB_NAME = 'Camagru';
//DB Conn Sessions
$_SESSION['dbConnDSN'] = $DB_DSN;
$_SESSION['dbConnUser'] = $DB_USER;
$_SESSION['dbConnPassword'] = $DB_PASSWORD;
$_SESSION['dbConnName'] = $DB_NAME;
//Debug Connection to database.php
function ft_checkDatabaseLinking() {
echo 'database.php is accessible<br>';
}
?> | gpl-3.0 |
glitchwrks/rails_services | app/mailers/application_mailer.rb | 159 | class ApplicationMailer < ActionMailer::Base
default :from => 'noreply@glitchwrks.com'
default :reply_to => 'noreply@glitchwrks.com'
layout 'mailer'
end
| gpl-3.0 |
niebaoding/YourPhalconCms | app/backend/controllers/BaseController.php | 519 | <?php
/**
* 后台基类控制器
* @category YourPhalconCms
* @copyright Copyright (c) 2016 YourPhalconCms team (http://www.ycms.com)
* @license GNU General Public License 2.0
* @link www.ycms.com
*/
namespace MyApp\App\Backend\Controllers;
use \MyApp\App\Core\PhalBaseController;
class BaseController extends PhalBaseController
{
public function initialize()
{
parent::initialize();
}
/**
* 后台检测是否登录
*/
public function checkAdminLogin()
{
}
}
| gpl-3.0 |
jlppc/OpMon | src/utils/StringKeys.cpp | 7538 | /*
StringKeys.cpp
Author : Cyrielle
Contributor : BAKFR
File under GNU GPL v3.0 license
*/
#include "StringKeys.hpp"
#include "../opmon/model/storage/ResourceLoader.hpp"
#include "./log.hpp"
#include <cstdio>
#include <fstream>
#include <queue>
#define DIALOG_LIMIT 33
namespace Utils {
namespace StringKeys {
std::vector<std::string> keys = std::vector<std::string>();
std::vector<sf::String> strings = std::vector<sf::String>();
sf::String readLine(std::ifstream &input) {
std::basic_string<unsigned char> toReturn;
for(unsigned int i = 0; i < 1024; i++) {
int got = input.get();
// char traded = got;
if(got == '\n' || got < 31) {
break;
} else {
unsigned char truc = got;
toReturn += truc;
}
}
sf::String toReelReturn;
toReelReturn = sf::String::fromUtf8(toReturn.begin(), toReturn.end());
return toReelReturn;
}
std::string sfStringtoStdString(sf::String const &str) {
std::string toReelReturn;
std::basic_string<unsigned char> bs = str.toUtf8();
for(unsigned int i = 0; i < bs.size(); i++) {
toReelReturn += bs.at(i);
}
return toReelReturn;
}
bool initialize(const std::string &keysFileS) {
std::ifstream keysFile;
OpMon::Model::ResourceLoader::loadKeysFile(keysFileS.c_str(), keysFile);
keys = std::vector<std::string>();
strings = std::vector<sf::String>();
Log::oplog("Keys initialization");
if(!keysFile) {
// Opening of th keys file failed
return false;
}
//Keys recovering
int itore = 0;
while(true) {
sf::String read;
read = readLine(keysFile);
if((sfStringtoStdString(read) == "end")) { //Checks if the line is not the ending line
break; //Else, stops reading
}
//Splits the string in two parts
if(read[0] != '#' || read[0] != ' ' || !read.isEmpty()) { //Checks if the string is valid
std::vector<sf::String> strSplit = split(read, '=');
if(!strSplit.size() == 0 && strSplit[0] != "") {
keys.push_back(strSplit[0]);
if(strSplit.size() < 2) {
strings.push_back(" ");
} else {
strings.push_back(sf::String(strSplit[1]));
itore++;
}
}
}
}
return true;
}
sf::String voi;
sf::String &get(std::string key) {
key = std::string("key.") + key; //Adding "key" prefix
for(unsigned int i = 0; i < keys.size(); i++) { //Scans keys
if(keys[i] == key) {
return strings[i];
}
}
Log::warn("Key " + key + " not found in the keys files.");
return voi; //If nothing found, returns an empty string
}
int getIndex(std::string key) {
key = std::string("key.") + key; //Adding "key" prefix
for(unsigned int i = 0; i < keys.size(); i++) { //Scans keys
if(keys[i] == key) {
return i;
}
}
return -1; //If nothing found, returns -1
}
sf::String split(sf::String const &str, char const &splitter, int const &part) {
int instances = 0; //Counts splitter's instances
for(unsigned int i = 0;
i < str.toUtf32().size(); i++) { //Scans the string to find the number of splitter's intances
if((char)str.toUtf32()[i] == splitter) {
instances++;
}
}
std::vector<sf::String> toReturn(instances + 1); //Create an array containing all the splits
for(unsigned int i = 0, enCours = 0; i < str.toUtf32().size(); i++) { //Browses the array to split it
if((char)str.toUtf32()[i] == splitter) {
enCours++;
} else {
toReturn[enCours] += str.toUtf32()[i];
}
}
return toReturn[part];
}
//TODO : Factorize code to avoid copy-pasted code
std::vector<sf::String> split(sf::String const &str, char const &splitter) {
int instances = 0; //Counts splitter's instances
for(unsigned int i = 0;
i < str.toUtf32().size(); i++) { //Scans the string to find the number of splitter's intances
if((char)str.toUtf32()[i] == splitter) {
instances++;
}
}
std::vector<sf::String> toReturn(instances + 1); //Create an array containing all the splits
for(unsigned int i = 0, enCours = 0; i < str.toUtf32().size(); i++) { //Browses the array to split it
if((char)str.toUtf32()[i] == splitter) {
enCours++;
} else {
toReturn[enCours] += str.toUtf32()[i];
}
}
return toReturn;
}
int countInstances(sf::String const &str, char const &toSearch) {
int instances = 0;
for(unsigned int i = 0; i < str.toUtf32().size(); i++) {
if((char)str.toUtf32()[i] == toSearch) {
instances++;
}
}
return instances;
}
std::string getStd(std::string const &key) { return sfStringtoStdString(get(key)); }
std::queue<sf::String> autoNewLine(sf::String str, unsigned int limit) {
sf::String currentWord;
std::queue<sf::String> strings;
strings.push(sf::String());
for(unsigned int i = 0; i < str.getSize(); i++) {
if(str[i] != ' ' && str[i] != '|' && str[i] != '$' && i != (str.getSize() - 1)) {
currentWord += str[i];
} else {
if((strings.back().getSize() + currentWord.getSize()) >= limit) {
strings.push(sf::String());
}
if(i == str.getSize() - 1)
currentWord += str[i];
strings.back() += currentWord;
if(str[i] == ' ')
strings.back() += " ";
currentWord.clear();
if(str[i] == '|' || str[i] == '$') {
strings.push(sf::String());
}
while(str[i] == '$' && (strings.size() % 3) != 1) {
strings.back() += " ";
strings.push(sf::String());
}
}
}
if(strings.back().isEmpty())
strings.back() += " ";
while((strings.size() % 3) != 0) {
strings.push(sf::String(" "));
}
return strings;
}
} // namespace StringKeys
} // namespace Utils
| gpl-3.0 |
moldeo/moldeojs | src/app/mo-gui-manager.ts | 7346 | import { moText } from "./mo-text";
import { moAbstract } from "./mo-abstract";
import {
MOfloat, MOdouble,
MOint, MOuint, MOlong, MOulong
} from "./mo-types";
import { moResource, moResourceElement, moAttribute, moAttributeArray } from "./mo-resource";
import {
moVector, moVector2f, moVector3f, moVector4f,
moVector2fArray, moVector3fArray, moVector4fArray,
moVector3iArray, moMatrix3fArray,
moVector2i, moVector3i, moVector4i,
moMatrix, moMatrix3f, moMatrix4f,
moMathManager
} from "./mo-math-manager";
import { moGLMatrixf } from "./mo-gl-manager";
import * as THREE from 'three';
import MaterialBase = THREE.Material;
import MeshStandardMaterial = THREE.MeshStandardMaterial;
//export type moCamera3DBase = THREE.Camera;
import { moSceneNode, moSceneNodeArray } from "./mo-3d-model-manager";
export type moPointf = moVector3f;
export type moPointd = moVector3f;
export type moPoint = moPointf;
export type moVertex3f = moVector3f;
export type moVertex = moVector3f;
export type moTCoord = moVector2f;
export type moPosition = moVertex;
export type moFace = moVector3i;
export type moFace3 = moMatrix3f;
export type moColorRGB = moVertex3f;
export type moColorRGBA = moVector4f;
//export type moColorx = THREE.Color;
export class moColor extends THREE.Color { };
export type moCameraMatrix = moGLMatrixf;
export type moCamera3DBase = moGLMatrixf;
export type moPointArray = moVector3fArray;
export type moColorArray = moVector3fArray;
export type moColor4fArray = moVector3fArray;
export type moColorRGBAArray = moVector3fArray;
export type moTCoordArray = moVector2fArray;
export type moVertexArray = moVector3fArray;
export type moFaceArray = moVector3iArray;
export type moFace3Array = moMatrix3fArray;
/*
typedef moVector3fArray moColorArray;
typedef moVector3fArray moColorRGBArray;
typedef moVector4fArray moColor4fArray;
typedef moVector4fArray moColorRGBAArray;
typedef moVector2fArray moTCoordArray;
typedef moVector3fArray moVertexArray;
typedef moVector3iArray moFaceArray;
typedef moMatrix3fArray moFace3Array;
*/
export class moCamera3D extends THREE.Camera {
/*
m_Position : moPosition;
m_Center: moPosition;
*/
};
export enum moGeometryType {
MO_GEOMETRY_UNDEFINED=-1,
MO_GEOMETRY_POINT=0,
MO_GEOMETRY_BOX=1,
MO_GEOMETRY_CIRCLE=2,
MO_GEOMETRY_CYLINDER=3,
MO_GEOMETRY_SHAPE=4,
MO_GEOMETRY_PLANE=5,
MO_GEOMETRY_EXTRUDE=6,
MO_GEOMETRY_RING=7,
MO_GEOMETRY_SPHERE=8,
MO_GEOMETRY_POLYHEDRON=9,
MO_GEOMETRY_ICOSAHEDRON=10,
MO_GEOMETRY_DODECAHEDRON=11,
MO_GEOMETRY_TETRAHEDRON=13,
MO_GEOMETRY_TEXT=14,
MO_GEOMETRY_TUBE=15,
MO_GEOMETRY_MAX=16
};
//export class moGeometry extends moResourceElement {
export class moGeometry extends THREE.Geometry {
/*
geometry: THREE.Geometry;
n_Name : moText;
m_Type : moGeometryType;
m_Vertices : moPointArray;
m_VerticesUvs : moTCoordArray;
m_Normals : moVertexArray;
m_Colors : moColorArray;
m_VerticesBuffer : MOfloat[];
m_NormalsBuffer : MOfloat[];
m_VerticesUVBuffer : MOfloat[];
m_ColorBuffer : MOfloat[];
m_Faces : moFaceArray;//array of triangles, 3 points referencing each an index of m_Vertices.
m_FaceVertexUvs : moTCoordArray;//array of texture coordinates for each vertex, corresponding to each face from m_Faces
m_Attributes : moAttributeArray;
constructor() {
super();
this.geometry = new THREE.Geometry();
}
*/
}
export class moPath extends moResourceElement {
}
export class moMaterialBase extends moResourceElement {
//material_basic: MaterialBase;
constructor() {
super();
//this.material = new MaterialBase();
}
Init(): boolean {
return super.Init();
}
}
/*
export class moMaterial extends moMaterialBase {
material: MeshStandardMaterial;
constructor() {
super();
this.material = new MeshStandardMaterial();
}
Init(): boolean {
return super.Init();
}
}
*/
export class moMaterial extends THREE.MeshStandardMaterial {
}
export class moMaterialBasic extends THREE.MeshBasicMaterial {
}
/**
* mo3dModelManager
*
* Managing all 3d scene nodes
*/
//export class moObject3D extends moSceneNode {
export class moObject3D extends THREE.Object3D {
/*
m_Geometry : moGeometry;
m_Material : moMaterial;
m_Position : moPosition;
m_Scale : moVector3f;
m_Rotation : moVector3f;
constructor(p_geometry: moGeometry, p_material: moMaterial ) {
super();
this.m_Geometry = p_geometry;
this.m_Material = p_material;
}
*/
}
export class moBone extends THREE.Bone {
}
export class moSprite extends THREE.Sprite {
}
export class moLine extends THREE.Line {
}
export class moLineSegments extends THREE.LineSegments {
}
export class moPoints extends THREE.Points {
}
export class moLOD extends THREE.LOD {
}
export class moSkinnedMesh extends THREE.SkinnedMesh {
}
export class moSkeleton extends THREE.Skeleton {
}
export class moMesh extends THREE.Mesh {
ResetMatrix() {
this.matrix.identity();
/*
this.position.set( 0.0, 0.0, 0.0);
this.scale.set( 1.0, 1.0, 1.0);
this.rotation.set( 0.0 , 0.0, 0.0);
this.updateMatrix();
*/
}
SetModelMatrix(matrix: moGLMatrixf) {
this.matrixAutoUpdate = false;
this.ResetMatrix();
this.applyMatrix( matrix );
this.matrixAutoUpdate = true;
//console.log(this.matrix);
}
}
export class moGroup extends THREE.Group {
ResetMatrix() {
this.matrix.identity();
/*
this.position.set( 0.0, 0.0, 0.0);
this.scale.set( 1.0, 1.0, 1.0);
this.rotation.set( 0.0 , 0.0, 0.0);
this.updateMatrix();
*/
}
SetModelMatrix(matrix: moGLMatrixf) {
this.matrixAutoUpdate = false;
this.ResetMatrix();
this.applyMatrix( matrix );
this.matrixAutoUpdate = true;
}
}
export class moShape extends THREE.Shape {
}
export class moBoxGeometry extends THREE.BoxGeometry {
}
export class moCircleGeometry extends moGeometry {
}
export class moCylinderGeometry extends moGeometry {
}
export class moPolyhedronGeometry extends moGeometry {
}
export class moDodecahedronGeometry extends moGeometry {
}
export class moIcosahedronGeometry extends moGeometry {
}
export class moTetrahedronGeometry extends moGeometry {
}
export class moShapeGeometry extends moGeometry {
}
export class moPlaneGeometry extends THREE.PlaneGeometry {
m_Width: MOint;
m_Height: MOint;
constructor( w: number, h: number, ws: number, hs: number ) {
super(w, h, ws, hs);
this.m_Width = w;
this.m_Height = h;
}
}
export class moExtrudeGeometry extends moGeometry {
}
export class moRingGeometry extends moGeometry {
}
export class moSphereGeometry extends THREE.SphereGeometry {
//m_SphereGeometry: THREE.SphereGeometry;
/*constructor( radius : MOfloat, width: MOint, height: MOint) {
super();
this.m_SphereGeometry = new THREE.SphereGeometry(radius,width,height);
}*/
}
export class moTextGeometry extends moGeometry {
}
export class moTubeGeometry extends moGeometry {
}
export class moAxis3D extends moGeometry {
}
export class moBoundingBox3D extends moGeometry {
}
export class moGuiObject extends moAbstract {
}
export class moWidget extends moGuiObject {
}
export class moWindow extends moWidget {
}
export class mo3dWidget extends moWidget {
}
export class moGUIManager extends moResource {
constructor() {
super();
this.SetName("_guimanager_");
}
}
| gpl-3.0 |
Filip-Mitov/python_movie_catalog | gui.py | 14367 | import sys
import os
from PySide.QtCore import *
from PySide.QtGui import *
import database
pixmap_formats = [
'.bmp', '.gif', '.ico', '.jpeg', '.jpg',
'.mng', '.pbm', '.pgm', '.png', '.ppm',
'.tga', '.tif', '.tiff', '.xbm', '.xpm'
]
movie_genres = [
"Action", "Adventure", "Animation", "Biography", "Comedy",
"Crime", "Documentary", "Drama", "Family", "Fantasy",
"History", "Horror", "Music", "Musical", "Mystery",
"Romance", "Sci-Fi", "Thriller", "Western", "War"
]
movie_rating = [
'','Awesome!', 'Very good..',
'Average', 'Very bad..', 'Yuck!'
]
class BasicElements(object):
def __init__(self):
self.element_title()
self.element_year()
self.element_genre()
self.element_rating()
self.element_description()
self.element_path()
self.element_layout()
def element_title(self):
self.title = QLineEdit()
self.title_label = QLabel("Title")
self.title_label.setAlignment(Qt.AlignRight)
def element_year(self):
self.year = QComboBox()
self.year.addItems([str(year) for year in range(1970, 2021)])
self.year.setCurrentIndex(44) # Default year is 2014
self.year_label = QLabel("Year")
self.year_label.setAlignment(Qt.AlignRight)
def element_genre(self):
self.genre = QListWidget()
self.genre.addItems(movie_genres)
self.genre.setSelectionMode(QAbstractItemView.MultiSelection)
self.genre_label = QLabel("Genre")
self.genre_label.setAlignment(Qt.AlignRight)
def element_rating(self):
self.rating = QComboBox()
self.rating.addItems(movie_rating)
self.rating_label = QLabel("Rating")
self.rating_label.setAlignment(Qt.AlignRight)
def element_description(self):
self.description = QTextEdit()
self.description_label = QLabel("Description")
self.description_label.setAlignment(Qt.AlignRight)
def element_path(self):
self.path = QPushButton("select dir")
self.path.clicked.connect(self.choose_dir)
self.path_display = QLabel()
self.picture_display = QLabel()
def element_layout(self):
self.layout = QGridLayout()
self.layout.addWidget(self.title_label, 0, 0)
self.layout.addWidget(self.title, 0, 1)
self.layout.addWidget(self.year_label, 1, 0)
self.layout.addWidget(self.year, 1, 1)
self.layout.addWidget(self.genre_label, 2, 0)
self.layout.addWidget(self.genre, 2, 1)
self.layout.addWidget(self.rating_label, 3, 0)
self.layout.addWidget(self.rating, 3, 1)
self.layout.addWidget(self.description_label, 4, 0)
self.layout.addWidget(self.description, 4, 1)
self.layout.addWidget(self.path, 5, 0)
self.layout.addWidget(self.path_display,5 ,1)
self.layout.addWidget(self.picture_display,0,2,7,2)
def choose_dir(self):
dialog = QFileDialog(self, 'Browse movie directory')
dialog.setFileMode(QFileDialog.Directory)
dialog.setOption(QFileDialog.ShowDirsOnly)
if dialog.exec_():
self.path_display.setText(*dialog.selectedFiles())
self.movie_poster_preview()
def movie_poster_preview(self):
for f in os.listdir(self.path_display.text()):
if os.path.splitext(f)[1].lower() in pixmap_formats:
poster_path = os.path.join(self.path_display.text(), f)
poster = QPixmap(poster_path).scaled(
self.size(),
Qt.KeepAspectRatio,
Qt.SmoothTransformation
)
self.picture_display.setPixmap(poster)
break
class ClicableQLabel(QLabel):
def __init(self, parent):
QLabel.__init__(self)
def mouseReleaseEvent(self, ev):
self.emit(SIGNAL('clicked()'))
class FormAddMovie(QDialog, BasicElements):
added = Signal()
def __init__(self, parent=None):
super(FormAddMovie, self).__init__(parent)
BasicElements.__init__(self)
self.button = QPushButton("Add movie to catalog")
self.button.clicked.connect(self.create_cortege)
self.layout.addWidget(self.button, 6, 1)
self.setWindowTitle('Form add movie')
self.setLayout(self.layout)
def create_cortege(self):
movie_row = {
'title': self.title.text(),
'year': int(self.year.currentText()),
'genre': '/'.join([item.text() for item in self.genre.selectedItems()]),
'rating': self.rating.currentText(),
'description': self.description.toPlainText(),
'path': self.path_display.text()
}
if not database.add_movie(movie_row):
self.warning_message("Movie already exist!")
else:
self.added.emit()
self.done(True)
self.closeEvent(QCloseEvent)
def closeEvent(self, event):
self.title.clear()
self.year.setCurrentIndex(44)
self.genre.clearSelection()
self.rating.setCurrentIndex(0)
self.description.clear()
self.path_display.clear()
self.picture_display.clear()
def warning_message(self, message):
msgBox = QMessageBox()
msgBox.setText(message)
msgBox.setWindowTitle("Warning!")
msgBox.exec_()
class FormEditMovie(QWidget,BasicElements):
edited = Signal()
def __init__(self, movie, parent=None):
super(FormEditMovie, self).__init__(parent)
BasicElements.__init__(self)
self.cortege = movie
self.button = QPushButton("Edit movie in catalog")
self.button.clicked.connect(self.edit_cortege)
self.layout.addWidget(self.button, 6, 1)
self.setWindowTitle('Form edit movie')
self.setLayout(self.layout)
self.set_values()
self.show()
def edit_cortege(self):
movie_row = self.get_values()
if database.edit_movie(self.cortege[0], self.cortege[1], movie_row):
self.edited.emit()
self.close()
def get_values(self):
movie_row = (
self.title.text(),
int(self.year.currentText()),
'/'.join([item.text() for item in self.genre.selectedItems()]),
self.rating.currentText(),
self.description.toPlainText(),
self.path_display.text()
)
return movie_row
def set_values(self):
self.title.setText(self.cortege[0])
self.year.setCurrentIndex(self.year.findText(str(self.cortege[1])))
selected_genre = self.cortege[2].split('/')
for i in range(0,self.genre.count()):
if self.genre.item(i).text() in selected_genre:
self.genre.item(i).setSelected(True)
self.rating.setCurrentIndex(self.rating.findText(self.cortege[3]))
self.description.setText(self.cortege[4])
self.path_display.setText(self.cortege[5])
class FormViewMovie(QWidget):
deleted = Signal()
def __init__(self, movie, parent=None):
super(FormViewMovie, self).__init__(parent)
self.cortege = movie
self.font_keys = QFont()
self.font_keys.setBold(True)
self.font_keys.setPointSize(12)
self.font_values = QFont()
self.font_values.setPointSize(10)
self.title = QLineEdit()
self.title.setReadOnly(True)
self.title.setFont(self.font_values)
self.title_label = QLabel("Title")
self.title_label.setFont(self.font_keys)
self.title_label.setAlignment(Qt.AlignRight)
self.year = QLineEdit()
self.year.setReadOnly(True)
self.year.setFont(self.font_values)
self.year_label = QLabel("Year")
self.year_label.setFont(self.font_keys)
self.year_label.setAlignment(Qt.AlignRight)
self.genre = QLineEdit()
self.genre.setReadOnly(True)
self.genre.setFont(self.font_values)
self.genre_label = QLabel("Genre")
self.genre_label.setFont(self.font_keys)
self.genre_label.setAlignment(Qt.AlignRight)
self.rating = QLineEdit()
self.rating.setReadOnly(True)
self.rating.setFont(self.font_values)
self.rating_label = QLabel("Rating")
self.rating_label.setFont(self.font_keys)
self.rating_label.setAlignment(Qt.AlignRight)
self.description = QTextEdit()
self.description.setReadOnly(True)
self.description.setFont(self.font_values)
self.description_label = QLabel("Description")
self.description_label.setFont(self.font_keys)
self.description_label.setAlignment(Qt.AlignRight)
self.path = QLabel()
self.path_label = QLabel('Movie directory')
self.path_label.setFont(self.font_keys)
self.path_label.setAlignment(Qt.AlignRight)
self.edit_button = QPushButton('Edit movie')
self.edit_button.clicked.connect(self.edit_cortege)
self.button = QPushButton('Delete movie from catalog')
self.button.clicked.connect(self.delete_cortege)
self.layout = QGridLayout()
self.layout.addWidget(self.title_label, 0, 0)
self.layout.addWidget(self.title, 0, 1)
self.layout.addWidget(self.year_label, 1, 0)
self.layout.addWidget(self.year, 1, 1)
self.layout.addWidget(self.genre_label, 2, 0)
self.layout.addWidget(self.genre, 2, 1)
self.layout.addWidget(self.rating_label, 3, 0)
self.layout.addWidget(self.rating, 3, 1)
self.layout.addWidget(self.description_label, 4, 0)
self.layout.addWidget(self.description, 4, 1)
self.layout.addWidget(self.path_label, 5, 0)
self.layout.addWidget(self.path, 5 ,1)
self.layout.addWidget(self.edit_button, 6, 0)
self.layout.addWidget(self.button, 6, 1)
self.setWindowTitle('Form view movie')
self.setLayout(self.layout)
self.set_values()
self.show()
def edit_cortege(self):
self.movie_edit_form = FormEditMovie(self.cortege)
self.movie_edit_form.edited.connect(self.information_update)
def delete_cortege(self):
if database.delete_movie(self.title.text(), self.year.text()):
self.deleted.emit()
self.close()
def set_values(self):
self.title.setText(self.cortege[0])
self.year.setText(str(self.cortege[1]))
self.genre.setText(self.cortege[2])
self.rating.setText(self.cortege[3])
self.description.setText(self.cortege[4])
self.path.setText(self.cortege[5])
def information_update(self):
self.cortege = self.movie_edit_form.get_values()
self.set_values()
class Picture(ClicableQLabel):
changed = Signal()
def __init__(self, movie, parent=None):
super(Picture, self).__init__(parent)
self.movie = movie
self.setAlignment(Qt.AlignCenter)
self.movie_poster = self.movie_img(self.movie[0], self.movie[5])
self.setPixmap(self.movie_poster)
QWidget.connect(self, SIGNAL('clicked()'), self.view_info_button)
def view_info_button(self):
self.movie_view_form = FormViewMovie(self.movie)
self.movie_view_form.deleted.connect(self.notify)
def movie_img(self, title, path):
for one_file in os.listdir(path):
if os.path.splitext(one_file)[1].lower() in pixmap_formats:
poster_path = os.path.join(path, one_file)
poster = QPixmap(poster_path).scaled(
self.size(),
Qt.KeepAspectRatio,
Qt.SmoothTransformation
)
return poster
return self.blank_poster_with_text(title)
def blank_poster_with_text(self, title):
width, height = 320, 480
text_poster = QPixmap(QSize(width,height))
text_poster.fill(Qt.darkGray)
canvas = QPainter(text_poster)
canvas.setPen(Qt.white)
font = QFont()
font.setPixelSize(20)
canvas.setFont(font)
canvas.drawText(QRectF(0, 0, width, height), Qt.AlignCenter, title)
return text_poster
def notify(self):
self.changed.emit()
class MainWindow(QWidget):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.form_add_movie = FormAddMovie()
self.form_add_movie.added.connect(self.update_slideshow)
self.catalog_view_forms = []
self.i = 2
self.button = QPushButton('Add movie to catalog')
self.button.clicked.connect(self.form_add_movie.show)
self.search = QLineEdit()
self.search_button = QPushButton('Search!')
self.search_button.clicked.connect(self.search_movie_and_update)
database.path_check()
self.scroll_area = self.picture_slideshow(database.all_movies())
self.grid = QGridLayout()
self.grid.addWidget(self.button, 0, 0)
self.grid.addWidget(self.search, 0, 1)
self.grid.addWidget(self.search_button, 0, 2)
self.grid.addWidget(self.scroll_area, 1, 0, 1, 3)
self.setWindowTitle('Movie catalog 2014')
self.setLayout(self.grid)
self.show()
def picture_slideshow(self, records):
scroll_area = QScrollArea()
scroll_area.setBackgroundRole(QPalette.Dark)
scroll_widget = QWidget()
scroll_layout = QGridLayout()
scroll_layout.setDefaultPositioning(4, Qt.Horizontal)
for record in records:
picture_button = Picture(record)
self.catalog_view_forms.append(picture_button)
self.catalog_view_forms[-1].changed.connect(self.update_slideshow)
scroll_layout.addWidget(picture_button)
scroll_widget.setLayout(scroll_layout)
scroll_area.setWidget(scroll_widget)
return scroll_area
def search_movie_and_update(self):
search_word = self.search.text()
found_movies = database.selected_movies(search_word)
self.scroll_area = self.picture_slideshow(found_movies)
self.grid.addWidget(self.scroll_area,1,0,1,3)
def update_slideshow(self):
self.scroll_area = self.picture_slideshow(database.all_movies())
self.grid.addWidget(self.scroll_area,1,0,1,3)
| gpl-3.0 |
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/lib/galaxy/tools/__init__.py | 175364 | """
Classes encapsulating galaxy tools and tool configuration.
"""
import binascii
import glob
import json
import logging
import os
import pipes
import re
import shutil
import sys
import string
import tarfile
import tempfile
import threading
import traceback
import types
import urllib
from math import isinf
from galaxy import eggs
eggs.require( "MarkupSafe" ) # MarkupSafe must load before mako
eggs.require( "Mako" )
eggs.require( "elementtree" )
eggs.require( "Paste" )
eggs.require( "SQLAlchemy >= 0.4" )
from cgi import FieldStorage
from elementtree import ElementTree
from mako.template import Template
from paste import httpexceptions
from sqlalchemy import and_
from galaxy import jobs, model
from galaxy.jobs.error_level import StdioErrorLevel
from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
from galaxy import exceptions
from galaxy.jobs import ParallelismInfo
from galaxy.tools.actions import DefaultToolAction
from galaxy.tools.actions.data_source import DataSourceToolAction
from galaxy.tools.actions.data_manager import DataManagerToolAction
from galaxy.tools.deps import build_dependency_manager
from galaxy.tools.deps.requirements import parse_requirements_from_xml
from galaxy.tools.parameters import check_param, params_from_strings, params_to_strings
from galaxy.tools.parameters import output_collect
from galaxy.tools.parameters.basic import (BaseURLToolParameter,
DataToolParameter, HiddenToolParameter, LibraryDatasetToolParameter,
SelectToolParameter, ToolParameter, UnvalidatedValue,
IntegerToolParameter, FloatToolParameter)
from galaxy.tools.parameters.grouping import Conditional, ConditionalWhen, Repeat, UploadDataset
from galaxy.tools.parameters.input_translation import ToolInputTranslator
from galaxy.tools.parameters.output import ToolOutputActionGroup
from galaxy.tools.parameters.validation import LateValidationError
from galaxy.tools.filters import FilterFactory
from galaxy.tools.test import parse_tests_elem
from galaxy.util import listify, parse_xml, rst_to_html, string_as_bool, string_to_object, xml_text, xml_to_string
from galaxy.tools.parameters.meta import expand_meta_parameters
from galaxy.util.bunch import Bunch
from galaxy.util.expressions import ExpressionContext
from galaxy.util.hash_util import hmac_new
from galaxy.util.none_like import NoneDataset
from galaxy.util.odict import odict
from galaxy.util.template import fill_template
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
from galaxy.model.item_attrs import Dictifiable
from galaxy.model import Workflow
from tool_shed.util import common_util
from tool_shed.util import shed_util_common as suc
from .loader import load_tool, template_macro_params, raw_tool_xml_tree, imported_macro_paths
from .execute import execute as execute_job
from .wrappers import (
ToolParameterValueWrapper,
RawObjectWrapper,
LibraryDatasetValueWrapper,
InputValueWrapper,
SelectToolParameterWrapper,
DatasetFilenameWrapper,
DatasetListWrapper,
DatasetCollectionWrapper,
)
log = logging.getLogger( __name__ )
WORKFLOW_PARAMETER_REGULAR_EXPRESSION = re.compile( '''\$\{.+?\}''' )
JOB_RESOURCE_CONDITIONAL_XML = """<conditional name="__job_resource">
<param name="__job_resource__select" type="select" label="Job Resource Parameters">
<option value="no">Use default job resource parameters</option>
<option value="yes">Specify job resource parameters</option>
</param>
<when value="no"></when>
<when value="yes">
</when>
</conditional>"""
class ToolNotFoundException( Exception ):
pass
def to_dict_helper( obj, kwargs ):
""" Helper function that provides the appropriate kwargs to to_dict an object. """
# Label.to_dict cannot have kwargs.
if isinstance( obj, ToolSectionLabel ):
kwargs = {}
return obj.to_dict( **kwargs )
class ToolBox( object, Dictifiable ):
"""Container for a collection of tools"""
def __init__( self, config_filenames, tool_root_dir, app ):
"""
Create a toolbox from the config files named by `config_filenames`, using
`tool_root_dir` as the base directory for finding individual tool config files.
"""
# The shed_tool_confs list contains dictionaries storing information about the tools defined in each
# shed-related shed_tool_conf.xml file.
self.shed_tool_confs = []
self.tools_by_id = {}
self.workflows_by_id = {}
# In-memory dictionary that defines the layout of the tool panel.
self.tool_panel = odict()
self.index = 0
self.data_manager_tools = odict()
# File that contains the XML section and tool tags from all tool panel config files integrated into a
# single file that defines the tool panel layout. This file can be changed by the Galaxy administrator
# (in a way similar to the single tool_conf.xml file in the past) to alter the layout of the tool panel.
self.integrated_tool_panel_config = app.config.integrated_tool_panel_config
# In-memory dictionary that defines the layout of the tool_panel.xml file on disk.
self.integrated_tool_panel = odict()
self.integrated_tool_panel_config_has_contents = os.path.exists( self.integrated_tool_panel_config ) and os.stat( self.integrated_tool_panel_config ).st_size > 0
if self.integrated_tool_panel_config_has_contents:
self.load_integrated_tool_panel_keys()
# The following refers to the tool_path config setting for backward compatibility. The shed-related
# (e.g., shed_tool_conf.xml) files include the tool_path attribute within the <toolbox> tag.
self.tool_root_dir = tool_root_dir
self.app = app
self.filter_factory = FilterFactory( self )
self.init_dependency_manager()
config_filenames = listify( config_filenames )
for config_filename in config_filenames:
if os.path.isdir( config_filename ):
directory_contents = sorted( os.listdir( config_filename ) )
directory_config_files = [ config_file for config_file in directory_contents if config_file.endswith( ".xml" ) ]
config_filenames.remove( config_filename )
config_filenames.extend( directory_config_files )
for config_filename in config_filenames:
try:
self.init_tools( config_filename )
except:
log.exception( "Error loading tools defined in config %s", config_filename )
if self.app.name == 'galaxy' and self.integrated_tool_panel_config_has_contents:
# Load self.tool_panel based on the order in self.integrated_tool_panel.
self.load_tool_panel()
if app.config.update_integrated_tool_panel:
# Write the current in-memory integrated_tool_panel to the integrated_tool_panel.xml file.
# This will cover cases where the Galaxy administrator manually edited one or more of the tool panel
# config files, adding or removing locally developed tools or workflows. The value of integrated_tool_panel
# will be False when things like functional tests are the caller.
self.fix_integrated_tool_panel_dict()
self.write_integrated_tool_panel_config_file()
def fix_integrated_tool_panel_dict( self ):
# HACK: instead of fixing after the fact, I suggest some combination of:
# 1) adjusting init_tools() and called methods to get this right
# 2) redesigning the code and/or data structure used to read/write integrated_tool_panel.xml
for key, value in self.integrated_tool_panel.iteritems():
if isinstance( value, ToolSection ):
for section_key, section_value in value.elems.iteritems():
if section_value is None:
if isinstance( section_value, Tool ):
tool_id = section_key[5:]
value.elems[section_key] = self.tools_by_id.get( tool_id )
elif isinstance( section_value, Workflow ):
workflow_id = section_key[9:]
value.elems[section_key] = self.workflows_by_id.get( workflow_id )
def init_tools( self, config_filename ):
"""
Read the configuration file and load each tool. The following tags are currently supported:
.. raw:: xml
<toolbox>
<tool file="data_source/upload.xml"/> # tools outside sections
<label text="Basic Tools" id="basic_tools" /> # labels outside sections
<workflow id="529fd61ab1c6cc36" /> # workflows outside sections
<section name="Get Data" id="getext"> # sections
<tool file="data_source/biomart.xml" /> # tools inside sections
<label text="In Section" id="in_section" /> # labels inside sections
<workflow id="adb5f5c93f827949" /> # workflows inside sections
</section>
</toolbox>
"""
if self.app.config.get_bool( 'enable_tool_tags', False ):
log.info("removing all tool tag associations (" + str( self.sa_session.query( self.app.model.ToolTagAssociation ).count() ) + ")" )
self.sa_session.query( self.app.model.ToolTagAssociation ).delete()
self.sa_session.flush()
log.info( "Parsing the tool configuration %s" % config_filename )
tree = parse_xml( config_filename )
root = tree.getroot()
tool_path = root.get( 'tool_path' )
if tool_path:
# We're parsing a shed_tool_conf file since we have a tool_path attribute.
parsing_shed_tool_conf = True
# Keep an in-memory list of xml elements to enable persistence of the changing tool config.
config_elems = []
else:
parsing_shed_tool_conf = False
tool_path = self.__resolve_tool_path(tool_path, config_filename)
# Only load the panel_dict under certain conditions.
load_panel_dict = not self.integrated_tool_panel_config_has_contents
for _, elem in enumerate( root ):
index = self.index
self.index += 1
if parsing_shed_tool_conf:
config_elems.append( elem )
if elem.tag == 'tool':
self.load_tool_tag_set( elem, self.tool_panel, self.integrated_tool_panel, tool_path, load_panel_dict, guid=elem.get( 'guid' ), index=index )
elif elem.tag == 'workflow':
self.load_workflow_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict, index=index )
elif elem.tag == 'section':
self.load_section_tag_set( elem, tool_path, load_panel_dict, index=index )
elif elem.tag == 'label':
self.load_label_tag_set( elem, self.tool_panel, self.integrated_tool_panel, load_panel_dict, index=index )
if parsing_shed_tool_conf:
shed_tool_conf_dict = dict( config_filename=config_filename,
tool_path=tool_path,
config_elems=config_elems )
self.shed_tool_confs.append( shed_tool_conf_dict )
def get_shed_config_dict_by_filename( self, filename, default=None ):
for shed_config_dict in self.shed_tool_confs:
if shed_config_dict[ 'config_filename' ] == filename:
return shed_config_dict
return default
def __resolve_tool_path(self, tool_path, config_filename):
if not tool_path:
# Default to backward compatible config setting.
tool_path = self.tool_root_dir
else:
# Allow use of __tool_conf_dir__ in toolbox config files.
tool_conf_dir = os.path.dirname(config_filename)
tool_path_vars = {"tool_conf_dir": tool_conf_dir}
tool_path = string.Template(tool_path).safe_substitute(tool_path_vars)
return tool_path
def __add_tool_to_tool_panel( self, tool, panel_component, section=False ):
# See if a version of this tool is already loaded into the tool panel. The value of panel_component
# will be a ToolSection (if the value of section=True) or self.tool_panel (if section=False).
tool_id = str( tool.id )
tool = self.tools_by_id[ tool_id ]
if section:
panel_dict = panel_component.elems
else:
panel_dict = panel_component
already_loaded = False
loaded_version_key = None
lineage_id = None
for lineage_id in tool.lineage_ids:
if lineage_id in self.tools_by_id:
loaded_version_key = 'tool_%s' % lineage_id
if loaded_version_key in panel_dict:
already_loaded = True
break
if already_loaded:
if tool.lineage_ids.index( tool_id ) > tool.lineage_ids.index( lineage_id ):
key = 'tool_%s' % tool.id
index = panel_dict.keys().index( loaded_version_key )
del panel_dict[ loaded_version_key ]
panel_dict.insert( index, key, tool )
log.debug( "Loaded tool id: %s, version: %s into tool panel." % ( tool.id, tool.version ) )
else:
inserted = False
key = 'tool_%s' % tool.id
# The value of panel_component is the in-memory tool panel dictionary.
for index, integrated_panel_key in enumerate( self.integrated_tool_panel.keys() ):
if key == integrated_panel_key:
panel_dict.insert( index, key, tool )
if not inserted:
inserted = True
if not inserted:
# Check the tool's installed versions.
for lineage_id in tool.lineage_ids:
lineage_id_key = 'tool_%s' % lineage_id
for index, integrated_panel_key in enumerate( self.integrated_tool_panel.keys() ):
if lineage_id_key == integrated_panel_key:
panel_dict.insert( index, key, tool )
if not inserted:
inserted = True
if not inserted:
if tool.guid is None or \
tool.tool_shed is None or \
tool.repository_name is None or \
tool.repository_owner is None or \
tool.installed_changeset_revision is None:
# We have a tool that was not installed from the Tool Shed, but is also not yet defined in
# integrated_tool_panel.xml, so append it to the tool panel.
panel_dict[ key ] = tool
log.debug( "Loaded tool id: %s, version: %s into tool panel.." % ( tool.id, tool.version ) )
else:
# We are in the process of installing the tool.
tool_version = self.__get_tool_version( tool_id )
tool_lineage_ids = tool_version.get_version_ids( self.app, reverse=True )
for lineage_id in tool_lineage_ids:
if lineage_id in self.tools_by_id:
loaded_version_key = 'tool_%s' % lineage_id
if loaded_version_key in panel_dict:
if not already_loaded:
already_loaded = True
if not already_loaded:
# If the tool is not defined in integrated_tool_panel.xml, append it to the tool panel.
panel_dict[ key ] = tool
log.debug( "Loaded tool id: %s, version: %s into tool panel...." % ( tool.id, tool.version ) )
def load_tool_panel( self ):
for key, val in self.integrated_tool_panel.items():
if isinstance( val, Tool ):
tool_id = key.replace( 'tool_', '', 1 )
if tool_id in self.tools_by_id:
self.__add_tool_to_tool_panel( val, self.tool_panel, section=False )
elif isinstance( val, Workflow ):
workflow_id = key.replace( 'workflow_', '', 1 )
if workflow_id in self.workflows_by_id:
workflow = self.workflows_by_id[ workflow_id ]
self.tool_panel[ key ] = workflow
log.debug( "Loaded workflow: %s %s" % ( workflow_id, workflow.name ) )
elif isinstance( val, ToolSectionLabel ):
self.tool_panel[ key ] = val
elif isinstance( val, ToolSection ):
elem = ElementTree.Element( 'section' )
elem.attrib[ 'id' ] = val.id or ''
elem.attrib[ 'name' ] = val.name or ''
elem.attrib[ 'version' ] = val.version or ''
section = ToolSection( elem )
log.debug( "Loading section: %s" % elem.get( 'name' ) )
for section_key, section_val in val.elems.items():
if isinstance( section_val, Tool ):
tool_id = section_key.replace( 'tool_', '', 1 )
if tool_id in self.tools_by_id:
self.__add_tool_to_tool_panel( section_val, section, section=True )
elif isinstance( section_val, Workflow ):
workflow_id = section_key.replace( 'workflow_', '', 1 )
if workflow_id in self.workflows_by_id:
workflow = self.workflows_by_id[ workflow_id ]
section.elems[ section_key ] = workflow
log.debug( "Loaded workflow: %s %s" % ( workflow_id, workflow.name ) )
elif isinstance( section_val, ToolSectionLabel ):
if section_val:
section.elems[ section_key ] = section_val
log.debug( "Loaded label: %s" % ( section_val.text ) )
self.tool_panel[ key ] = section
def load_integrated_tool_panel_keys( self ):
"""
Load the integrated tool panel keys, setting values for tools and workflows to None. The values will
be reset when the various tool panel config files are parsed, at which time the tools and workflows are
loaded.
"""
tree = parse_xml( self.integrated_tool_panel_config )
root = tree.getroot()
for elem in root:
if elem.tag == 'tool':
key = 'tool_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] = None
elif elem.tag == 'workflow':
key = 'workflow_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] = None
elif elem.tag == 'section':
section = ToolSection( elem )
for section_elem in elem:
if section_elem.tag == 'tool':
key = 'tool_%s' % section_elem.get( 'id' )
section.elems[ key ] = None
elif section_elem.tag == 'workflow':
key = 'workflow_%s' % section_elem.get( 'id' )
section.elems[ key ] = None
elif section_elem.tag == 'label':
key = 'label_%s' % section_elem.get( 'id' )
section.elems[ key ] = None
key = elem.get( 'id' )
self.integrated_tool_panel[ key ] = section
elif elem.tag == 'label':
key = 'label_%s' % elem.get( 'id' )
self.integrated_tool_panel[ key ] = None
def write_integrated_tool_panel_config_file( self ):
"""
Write the current in-memory version of the integrated_tool_panel.xml file to disk. Since Galaxy administrators
use this file to manage the tool panel, we'll not use xml_to_string() since it doesn't write XML quite right.
"""
fd, filename = tempfile.mkstemp()
os.write( fd, '<?xml version="1.0"?>\n' )
os.write( fd, '<toolbox>\n' )
for key, item in self.integrated_tool_panel.items():
if item:
if isinstance( item, Tool ):
os.write( fd, ' <tool id="%s" />\n' % item.id )
elif isinstance( item, Workflow ):
os.write( fd, ' <workflow id="%s" />\n' % item.id )
elif isinstance( item, ToolSectionLabel ):
label_id = item.id or ''
label_text = item.text or ''
label_version = item.version or ''
os.write( fd, ' <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
elif isinstance( item, ToolSection ):
section_id = item.id or ''
section_name = item.name or ''
section_version = item.version or ''
os.write( fd, ' <section id="%s" name="%s" version="%s">\n' % ( section_id, section_name, section_version ) )
for section_key, section_item in item.elems.items():
if isinstance( section_item, Tool ):
if section_item:
os.write( fd, ' <tool id="%s" />\n' % section_item.id )
elif isinstance( section_item, Workflow ):
if section_item:
os.write( fd, ' <workflow id="%s" />\n' % section_item.id )
elif isinstance( section_item, ToolSectionLabel ):
if section_item:
label_id = section_item.id or ''
label_text = section_item.text or ''
label_version = section_item.version or ''
os.write( fd, ' <label id="%s" text="%s" version="%s" />\n' % ( label_id, label_text, label_version ) )
os.write( fd, ' </section>\n' )
os.write( fd, '</toolbox>\n' )
os.close( fd )
shutil.move( filename, os.path.abspath( self.integrated_tool_panel_config ) )
os.chmod( self.integrated_tool_panel_config, 0644 )
def get_tool( self, tool_id, tool_version=None, get_all_versions=False ):
"""Attempt to locate a tool in the tool box."""
if tool_id in self.tools_by_id and not get_all_versions:
#tool_id exactly matches an available tool by id (which is 'old' tool_id or guid)
return self.tools_by_id[ tool_id ]
#exact tool id match not found, or all versions requested, search for other options, e.g. migrated tools or different versions
rval = []
tv = self.__get_tool_version( tool_id )
if tv:
tool_version_ids = tv.get_version_ids( self.app )
for tool_version_id in tool_version_ids:
if tool_version_id in self.tools_by_id:
rval.append( self.tools_by_id[ tool_version_id ] )
if not rval:
#still no tool, do a deeper search and try to match by old ids
for tool in self.tools_by_id.itervalues():
if tool.old_id == tool_id:
rval.append( tool )
if rval:
if get_all_versions:
return rval
else:
if tool_version:
#return first tool with matching version
for tool in rval:
if tool.version == tool_version:
return tool
#No tool matches by version, simply return the first available tool found
return rval[0]
#We now likely have a Toolshed guid passed in, but no supporting database entries
#If the tool exists by exact id and is loaded then provide exact match within a list
if tool_id in self.tools_by_id:
return[ self.tools_by_id[ tool_id ] ]
return None
def get_loaded_tools_by_lineage( self, tool_id ):
"""Get all loaded tools associated by lineage to the tool whose id is tool_id."""
tv = self.__get_tool_version( tool_id )
if tv:
tool_version_ids = tv.get_version_ids( self.app )
available_tool_versions = []
for tool_version_id in tool_version_ids:
if tool_version_id in self.tools_by_id:
available_tool_versions.append( self.tools_by_id[ tool_version_id ] )
return available_tool_versions
else:
if tool_id in self.tools_by_id:
tool = self.tools_by_id[ tool_id ]
return [ tool ]
return []
def __get_tool_version( self, tool_id ):
"""Return a ToolVersion if one exists for the tool_id"""
return self.app.install_model.context.query( self.app.install_model.ToolVersion ) \
.filter( self.app.install_model.ToolVersion.table.c.tool_id == tool_id ) \
.first()
def __get_tool_shed_repository( self, tool_shed, name, owner, installed_changeset_revision ):
# We store only the port, if one exists, in the database.
tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \
.filter( and_( self.app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed,
self.app.install_model.ToolShedRepository.table.c.name == name,
self.app.install_model.ToolShedRepository.table.c.owner == owner,
self.app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
.first()
def get_tool_components( self, tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=False ):
"""
Retrieve all loaded versions of a tool from the toolbox and return a select list enabling
selection of a different version, the list of the tool's loaded versions, and the specified tool.
"""
toolbox = self
tool_version_select_field = None
tools = []
tool = None
# Backwards compatibility for datasource tools that have default tool_id configured, but which
# are now using only GALAXY_URL.
tool_ids = listify( tool_id )
for tool_id in tool_ids:
if get_loaded_tools_by_lineage:
tools = toolbox.get_loaded_tools_by_lineage( tool_id )
else:
tools = toolbox.get_tool( tool_id, tool_version=tool_version, get_all_versions=True )
if tools:
tool = toolbox.get_tool( tool_id, tool_version=tool_version, get_all_versions=False )
if len( tools ) > 1:
tool_version_select_field = self.build_tool_version_select_field( tools, tool.id, set_selected )
break
return tool_version_select_field, tools, tool
def build_tool_version_select_field( self, tools, tool_id, set_selected ):
"""Build a SelectField whose options are the ids for the received list of tools."""
options = []
refresh_on_change_values = []
for tool in tools:
options.insert( 0, ( tool.version, tool.id ) )
refresh_on_change_values.append( tool.id )
select_field = SelectField( name='tool_id', refresh_on_change=True, refresh_on_change_values=refresh_on_change_values )
for option_tup in options:
selected = set_selected and option_tup[ 1 ] == tool_id
if selected:
select_field.add_option( 'version %s' % option_tup[ 0 ], option_tup[ 1 ], selected=True )
else:
select_field.add_option( 'version %s' % option_tup[ 0 ], option_tup[ 1 ] )
return select_field
def load_tool_tag_set( self, elem, panel_dict, integrated_panel_dict, tool_path, load_panel_dict, guid=None, index=None ):
try:
path = elem.get( "file" )
repository_id = None
if guid is None:
tool_shed_repository = None
can_load_into_panel_dict = True
else:
# The tool is contained in an installed tool shed repository, so load
# the tool only if the repository has not been marked deleted.
tool_shed = elem.find( "tool_shed" ).text
repository_name = elem.find( "repository_name" ).text
repository_owner = elem.find( "repository_owner" ).text
installed_changeset_revision_elem = elem.find( "installed_changeset_revision" )
if installed_changeset_revision_elem is None:
# Backward compatibility issue - the tag used to be named 'changeset_revision'.
installed_changeset_revision_elem = elem.find( "changeset_revision" )
installed_changeset_revision = installed_changeset_revision_elem.text
tool_shed_repository = self.__get_tool_shed_repository( tool_shed,
repository_name,
repository_owner,
installed_changeset_revision )
if tool_shed_repository:
# Only load tools if the repository is not deactivated or uninstalled.
can_load_into_panel_dict = not tool_shed_repository.deleted
repository_id = self.app.security.encode_id( tool_shed_repository.id )
else:
# If there is not yet a tool_shed_repository record, we're in the process of installing
# a new repository, so any included tools can be loaded into the tool panel.
can_load_into_panel_dict = True
tool = self.load_tool( os.path.join( tool_path, path ), guid=guid, repository_id=repository_id )
if string_as_bool(elem.get( 'hidden', False )):
tool.hidden = True
key = 'tool_%s' % str( tool.id )
if can_load_into_panel_dict:
if guid is not None:
tool.tool_shed = tool_shed
tool.repository_name = repository_name
tool.repository_owner = repository_owner
tool.installed_changeset_revision = installed_changeset_revision
tool.guid = guid
tool.version = elem.find( "version" ).text
# Make sure the tool has a tool_version.
if not self.__get_tool_version( tool.id ):
tool_version = self.app.install_model.ToolVersion( tool_id=tool.id, tool_shed_repository=tool_shed_repository )
self.app.install_model.context.add( tool_version )
self.app.install_model.context.flush()
# Load the tool's lineage ids.
tool.lineage_ids = tool.tool_version.get_version_ids( self.app )
if self.app.config.get_bool( 'enable_tool_tags', False ):
tag_names = elem.get( "tags", "" ).split( "," )
for tag_name in tag_names:
if tag_name == '':
continue
tag = self.sa_session.query( self.app.model.Tag ).filter_by( name=tag_name ).first()
if not tag:
tag = self.app.model.Tag( name=tag_name )
self.sa_session.add( tag )
self.sa_session.flush()
tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
self.sa_session.add( tta )
self.sa_session.flush()
else:
for tagged_tool in tag.tagged_tools:
if tagged_tool.tool_id == tool.id:
break
else:
tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
self.sa_session.add( tta )
self.sa_session.flush()
# Allow for the same tool to be loaded into multiple places in the tool panel. We have to handle
# the case where the tool is contained in a repository installed from the tool shed, and the Galaxy
# administrator has retrieved updates to the installed repository. In this case, the tool may have
# been updated, but the version was not changed, so the tool should always be reloaded here. We used
# to only load the tool if it was not found in self.tools_by_id, but performing that check did
# not enable this scenario.
self.tools_by_id[ tool.id ] = tool
if load_panel_dict:
self.__add_tool_to_tool_panel( tool, panel_dict, section=isinstance( panel_dict, ToolSection ) )
# Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
if key in integrated_panel_dict or index is None:
integrated_panel_dict[ key ] = tool
else:
integrated_panel_dict.insert( index, key, tool )
except:
log.exception( "Error reading tool from path: %s" % path )
def load_workflow_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
try:
# TODO: should id be encoded?
workflow_id = elem.get( 'id' )
workflow = self.load_workflow( workflow_id )
self.workflows_by_id[ workflow_id ] = workflow
key = 'workflow_' + workflow_id
if load_panel_dict:
panel_dict[ key ] = workflow
# Always load workflows into the integrated_panel_dict.
if key in integrated_panel_dict or index is None:
integrated_panel_dict[ key ] = workflow
else:
integrated_panel_dict.insert( index, key, workflow )
except:
log.exception( "Error loading workflow: %s" % workflow_id )
def load_label_tag_set( self, elem, panel_dict, integrated_panel_dict, load_panel_dict, index=None ):
label = ToolSectionLabel( elem )
key = 'label_' + label.id
if load_panel_dict:
panel_dict[ key ] = label
if key in integrated_panel_dict or index is None:
integrated_panel_dict[ key ] = label
else:
integrated_panel_dict.insert( index, key, label )
def load_section_tag_set( self, elem, tool_path, load_panel_dict, index=None ):
key = elem.get( "id" )
if key in self.tool_panel:
section = self.tool_panel[ key ]
elems = section.elems
else:
section = ToolSection( elem )
elems = section.elems
if key in self.integrated_tool_panel:
integrated_section = self.integrated_tool_panel[ key ]
integrated_elems = integrated_section.elems
else:
integrated_section = ToolSection( elem )
integrated_elems = integrated_section.elems
for sub_index, sub_elem in enumerate( elem ):
if sub_elem.tag == 'tool':
self.load_tool_tag_set( sub_elem, elems, integrated_elems, tool_path, load_panel_dict, guid=sub_elem.get( 'guid' ), index=sub_index )
elif sub_elem.tag == 'workflow':
self.load_workflow_tag_set( sub_elem, elems, integrated_elems, load_panel_dict, index=sub_index )
elif sub_elem.tag == 'label':
self.load_label_tag_set( sub_elem, elems, integrated_elems, load_panel_dict, index=sub_index )
if load_panel_dict:
self.tool_panel[ key ] = section
# Always load sections into the integrated_tool_panel.
if key in self.integrated_tool_panel or index is None:
self.integrated_tool_panel[ key ] = integrated_section
else:
self.integrated_tool_panel.insert( index, key, integrated_section )
def load_tool( self, config_file, guid=None, repository_id=None, **kwds ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
tree = load_tool( config_file )
root = tree.getroot()
# Allow specifying a different tool subclass to instantiate
if root.find( "type" ) is not None:
type_elem = root.find( "type" )
module = type_elem.get( 'module', 'galaxy.tools' )
cls = type_elem.get( 'class' )
mod = __import__( module, globals(), locals(), [cls] )
ToolClass = getattr( mod, cls )
elif root.get( 'tool_type', None ) is not None:
ToolClass = tool_types.get( root.get( 'tool_type' ) )
else:
# Normal tool - only insert dynamic resource parameters for these
# tools.
if hasattr( self.app, "job_config" ): # toolshed may not have job_config?
tool_id = root.get( 'id' ) if root else None
parameters = self.app.job_config.get_tool_resource_parameters( tool_id )
if parameters:
inputs = root.find('inputs')
# If tool has not inputs, create some so we can insert conditional
if not inputs:
inputs = ElementTree.fromstring( "<inputs></inputs>")
root.append( inputs )
# Insert a conditional allowing user to specify resource parameters.
conditional_element = ElementTree.fromstring( JOB_RESOURCE_CONDITIONAL_XML )
when_yes_elem = conditional_element.findall( "when" )[ 1 ]
for parameter in parameters:
when_yes_elem.append( parameter )
inputs.append( conditional_element )
ToolClass = Tool
return ToolClass( config_file, root, self.app, guid=guid, repository_id=repository_id, **kwds )
def package_tool( self, trans, tool_id ):
"""
Create a tarball with the tool's xml, help images, and test data.
:param trans: the web transaction
:param tool_id: the tool ID from app.toolbox
:returns: tuple of tarball filename, success True/False, message/None
"""
message = ''
success = True
# Make sure the tool is actually loaded.
if tool_id not in self.tools_by_id:
return None, False, "No tool with id %s" % tool_id
else:
tool = self.tools_by_id[ tool_id ]
tarball_files = []
temp_files = []
tool_xml = file( os.path.abspath( tool.config_file ), 'r' ).read()
# Retrieve tool help images and rewrite the tool's xml into a temporary file with the path
# modified to be relative to the repository root.
tool_help = tool.help._source
image_found = False
# Check each line of the rendered tool help for an image tag that points to a location under static/
for help_line in tool_help.split( '\n' ):
image_regex = re.compile( 'img alt="[^"]+" src="\${static_path}/([^"]+)"' )
matches = re.search( image_regex, help_line )
if matches is not None:
tool_help_image = matches.group(1)
tarball_path = tool_help_image
filesystem_path = os.path.abspath( os.path.join( trans.app.config.root, 'static', tool_help_image ) )
if os.path.exists( filesystem_path ):
tarball_files.append( ( filesystem_path, tarball_path ) )
image_found = True
tool_xml = tool_xml.replace( '${static_path}/%s' % tarball_path, tarball_path )
# If one or more tool help images were found, add the modified tool XML to the tarball instead of the original.
if image_found:
fd, new_tool_config = tempfile.mkstemp( suffix='.xml' )
os.close( fd )
file( new_tool_config, 'w' ).write( tool_xml )
tool_tup = ( os.path.abspath( new_tool_config ), os.path.split( tool.config_file )[-1] )
temp_files.append( os.path.abspath( new_tool_config ) )
else:
tool_tup = ( os.path.abspath( tool.config_file ), os.path.split( tool.config_file )[-1] )
tarball_files.append( tool_tup )
# TODO: This feels hacky.
tool_command = tool.command.split( ' ' )[0]
tool_path = os.path.dirname( os.path.abspath( tool.config_file ) )
# Add the tool XML to the tuple that will be used to populate the tarball.
if os.path.exists( os.path.join( tool_path, tool_command ) ):
tarball_files.append( ( os.path.join( tool_path, tool_command ), tool_command ) )
# Find and add macros and code files.
for external_file in tool.get_externally_referenced_paths( os.path.abspath( tool.config_file ) ):
external_file_abspath = os.path.abspath( os.path.join( tool_path, external_file ) )
tarball_files.append( ( external_file_abspath, external_file ) )
# Find tests, and check them for test data.
tests = tool.tests
if tests is not None:
for test in tests:
# Add input file tuples to the list.
for input in test.inputs:
for input_value in test.inputs[ input ]:
input_path = os.path.abspath( os.path.join( 'test-data', input_value ) )
if os.path.exists( input_path ):
td_tup = ( input_path, os.path.join( 'test-data', input_value ) )
tarball_files.append( td_tup )
# And add output file tuples to the list.
for label, filename, _ in test.outputs:
output_filepath = os.path.abspath( os.path.join( 'test-data', filename ) )
if os.path.exists( output_filepath ):
td_tup = ( output_filepath, os.path.join( 'test-data', filename ) )
tarball_files.append( td_tup )
for param in tool.input_params:
# Check for tool data table definitions.
if hasattr( param, 'options' ):
if hasattr( param.options, 'tool_data_table' ):
data_table = param.options.tool_data_table
if hasattr( data_table, 'filenames' ):
data_table_definitions = []
for data_table_filename in data_table.filenames:
# FIXME: from_shed_config seems to always be False.
if not data_table.filenames[ data_table_filename ][ 'from_shed_config' ]:
tar_file = data_table.filenames[ data_table_filename ][ 'filename' ] + '.sample'
sample_file = os.path.join( data_table.filenames[ data_table_filename ][ 'tool_data_path' ],
tar_file )
# Use the .sample file, if one exists. If not, skip this data table.
if os.path.exists( sample_file ):
tarfile_path, tarfile_name = os.path.split( tar_file )
tarfile_path = os.path.join( 'tool-data', tarfile_name )
sample_name = tarfile_path + '.sample'
tarball_files.append( ( sample_file, tarfile_path ) )
data_table_definitions.append( data_table.xml_string )
if len( data_table_definitions ) > 0:
# Put the data table definition XML in a temporary file.
table_definition = '<?xml version="1.0" encoding="utf-8"?>\n<tables>\n %s</tables>'
table_definition = table_definition % '\n'.join( data_table_definitions )
fd, table_conf = tempfile.mkstemp()
os.close( fd )
file( table_conf, 'w' ).write( table_definition )
tarball_files.append( ( table_conf, os.path.join( 'tool-data', 'tool_data_table_conf.xml.sample' ) ) )
temp_files.append( table_conf )
# Create the tarball.
fd, tarball_archive = tempfile.mkstemp( suffix='.tgz' )
os.close( fd )
tarball = tarfile.open( name=tarball_archive, mode='w:gz' )
# Add the files from the previously generated list.
for fspath, tarpath in tarball_files:
tarball.add( fspath, arcname=tarpath )
tarball.close()
# Delete any temporary files that were generated.
for temp_file in temp_files:
os.remove( temp_file )
return tarball_archive, True, None
return None, False, "An unknown error occurred."
def reload_tool_by_id( self, tool_id ):
"""
Attempt to reload the tool identified by 'tool_id', if successful
replace the old tool.
"""
if tool_id not in self.tools_by_id:
message = "No tool with id %s" % tool_id
status = 'error'
else:
old_tool = self.tools_by_id[ tool_id ]
new_tool = self.load_tool( old_tool.config_file )
# The tool may have been installed from a tool shed, so set the tool shed attributes.
# Since the tool version may have changed, we don't override it here.
new_tool.id = old_tool.id
new_tool.guid = old_tool.guid
new_tool.tool_shed = old_tool.tool_shed
new_tool.repository_name = old_tool.repository_name
new_tool.repository_owner = old_tool.repository_owner
new_tool.installed_changeset_revision = old_tool.installed_changeset_revision
new_tool.old_id = old_tool.old_id
# Replace old_tool with new_tool in self.tool_panel
tool_key = 'tool_' + tool_id
for key, val in self.tool_panel.items():
if key == tool_key:
self.tool_panel[ key ] = new_tool
break
elif key.startswith( 'section' ):
if tool_key in val.elems:
self.tool_panel[ key ].elems[ tool_key ] = new_tool
break
self.tools_by_id[ tool_id ] = new_tool
message = "Reloaded the tool:<br/>"
message += "<b>name:</b> %s<br/>" % old_tool.name
message += "<b>id:</b> %s<br/>" % old_tool.id
message += "<b>version:</b> %s" % old_tool.version
status = 'done'
return message, status
def remove_tool_by_id( self, tool_id ):
"""
Attempt to remove the tool identified by 'tool_id'.
"""
if tool_id not in self.tools_by_id:
message = "No tool with id %s" % tool_id
status = 'error'
else:
tool = self.tools_by_id[ tool_id ]
del self.tools_by_id[ tool_id ]
tool_key = 'tool_' + tool_id
for key, val in self.tool_panel.items():
if key == tool_key:
del self.tool_panel[ key ]
break
elif key.startswith( 'section' ):
if tool_key in val.elems:
del self.tool_panel[ key ].elems[ tool_key ]
break
if tool_id in self.data_manager_tools:
del self.data_manager_tools[ tool_id ]
#TODO: do we need to manually remove from the integrated panel here?
message = "Removed the tool:<br/>"
message += "<b>name:</b> %s<br/>" % tool.name
message += "<b>id:</b> %s<br/>" % tool.id
message += "<b>version:</b> %s" % tool.version
status = 'done'
return message, status
def load_workflow( self, workflow_id ):
"""
Return an instance of 'Workflow' identified by `id`,
which is encoded in the tool panel.
"""
id = self.app.security.decode_id( workflow_id )
stored = self.app.model.context.query( self.app.model.StoredWorkflow ).get( id )
return stored.latest_workflow
def init_dependency_manager( self ):
self.dependency_manager = build_dependency_manager( self.app.config )
@property
def sa_session( self ):
"""
Returns a SQLAlchemy session
"""
return self.app.model.context
def to_dict( self, trans, in_panel=True, **kwds ):
"""
to_dict toolbox.
"""
context = Bunch( toolbox=self, trans=trans, **kwds )
if in_panel:
panel_elts = [ val for val in self.tool_panel.itervalues() ]
filters = self.filter_factory.build_filters( trans, **kwds )
filtered_panel_elts = []
for index, elt in enumerate( panel_elts ):
elt = _filter_for_panel( elt, filters, context )
if elt:
filtered_panel_elts.append( elt )
panel_elts = filtered_panel_elts
# Produce panel.
rval = []
kwargs = dict(
trans=trans,
link_details=True
)
for elt in panel_elts:
rval.append( to_dict_helper( elt, kwargs ) )
else:
tools = []
for id, tool in self.tools_by_id.items():
tools.append( tool.to_dict( trans, link_details=True ) )
rval = tools
return rval
def _filter_for_panel( item, filters, context ):
"""
Filters tool panel elements so that only those that are compatible
with provided filters are kept.
"""
def _apply_filter( filter_item, filter_list ):
for filter_method in filter_list:
if not filter_method( context, filter_item ):
return False
return True
if isinstance( item, Tool ):
if _apply_filter( item, filters[ 'tool' ] ):
return item
elif isinstance( item, ToolSectionLabel ):
if _apply_filter( item, filters[ 'label' ] ):
return item
elif isinstance( item, ToolSection ):
# Filter section item-by-item. Only show a label if there are
# non-filtered tools below it.
if _apply_filter( item, filters[ 'section' ] ):
cur_label_key = None
tools_under_label = False
filtered_elems = item.elems.copy()
for key, section_item in item.elems.items():
if isinstance( section_item, Tool ):
# Filter tool.
if _apply_filter( section_item, filters[ 'tool' ] ):
tools_under_label = True
else:
del filtered_elems[ key ]
elif isinstance( section_item, ToolSectionLabel ):
# If there is a label and it does not have tools,
# remove it.
if ( cur_label_key and not tools_under_label ) or not _apply_filter( section_item, filters[ 'label' ] ):
del filtered_elems[ cur_label_key ]
# Reset attributes for new label.
cur_label_key = key
tools_under_label = False
# Handle last label.
if cur_label_key and not tools_under_label:
del filtered_elems[ cur_label_key ]
# Only return section if there are elements.
if len( filtered_elems ) != 0:
copy = item.copy()
copy.elems = filtered_elems
return copy
return None
class ToolSection( object, Dictifiable ):
"""
A group of tools with similar type/purpose that will be displayed as a
group in the user interface.
"""
dict_collection_visible_keys = ( 'id', 'name', 'version' )
def __init__( self, elem=None ):
f = lambda elem, val: elem is not None and elem.get( val ) or ''
self.name = f( elem, 'name' )
self.id = f( elem, 'id' )
self.version = f( elem, 'version' )
self.elems = odict()
def copy( self ):
copy = ToolSection()
copy.name = self.name
copy.id = self.id
copy.version = self.version
copy.elems = self.elems.copy()
return copy
def to_dict( self, trans, link_details=False ):
""" Return a dict that includes section's attributes. """
section_dict = super( ToolSection, self ).to_dict()
section_elts = []
kwargs = dict(
trans=trans,
link_details=link_details
)
for elt in self.elems.values():
section_elts.append( to_dict_helper( elt, kwargs ) )
section_dict[ 'elems' ] = section_elts
return section_dict
class ToolSectionLabel( object, Dictifiable ):
"""
A label for a set of tools that can be displayed above groups of tools
and sections in the user interface
"""
dict_collection_visible_keys = ( 'id', 'text', 'version' )
def __init__( self, elem ):
self.text = elem.get( "text" )
self.id = elem.get( "id" )
self.version = elem.get( "version" ) or ''
class DefaultToolState( object ):
"""
Keeps track of the state of a users interaction with a tool between
requests. The default tool state keeps track of the current page (for
multipage "wizard" tools) and the values of all
"""
def __init__( self ):
self.page = 0
self.rerun_remap_job_id = None
self.inputs = None
def encode( self, tool, app, secure=True ):
"""
Convert the data to a string
"""
# Convert parameters to a dictionary of strings, and save curent
# page in that dict
value = params_to_strings( tool.inputs, self.inputs, app )
value["__page__"] = self.page
value["__rerun_remap_job_id__"] = self.rerun_remap_job_id
value = json.dumps( value )
# Make it secure
if secure:
a = hmac_new( app.config.tool_secret, value )
b = binascii.hexlify( value )
return "%s:%s" % ( a, b )
else:
return value
def decode( self, value, tool, app, secure=True ):
"""
Restore the state from a string
"""
if secure:
# Extract and verify hash
a, b = value.split( ":" )
value = binascii.unhexlify( b )
test = hmac_new( app.config.tool_secret, value )
assert a == test
# Restore from string
values = json_fix( json.loads( value ) )
self.page = values.pop( "__page__" )
if '__rerun_remap_job_id__' in values:
self.rerun_remap_job_id = values.pop( "__rerun_remap_job_id__" )
else:
self.rerun_remap_job_id = None
self.inputs = params_from_strings( tool.inputs, values, app, ignore_errors=True )
def copy( self ):
"""
WARNING! Makes a shallow copy, *SHOULD* rework to have it make a deep
copy.
"""
new_state = DefaultToolState()
new_state.page = self.page
new_state.rerun_remap_job_id = self.rerun_remap_job_id
# This need to be copied.
new_state.inputs = self.inputs
return new_state
class ToolOutput( object, Dictifiable ):
"""
Represents an output datasets produced by a tool. For backward
compatibility this behaves as if it were the tuple::
(format, metadata_source, parent)
"""
dict_collection_visible_keys = ( 'name', 'format', 'label', 'hidden' )
def __init__( self, name, format=None, format_source=None, metadata_source=None,
parent=None, label=None, filters=None, actions=None, hidden=False ):
self.name = name
self.format = format
self.format_source = format_source
self.metadata_source = metadata_source
self.parent = parent
self.label = label
self.filters = filters or []
self.actions = actions
self.hidden = hidden
# Tuple emulation
def __len__( self ):
return 3
def __getitem__( self, index ):
if index == 0:
return self.format
elif index == 1:
return self.metadata_source
elif index == 2:
return self.parent
else:
raise IndexError( index )
def __iter__( self ):
return iter( ( self.format, self.metadata_source, self.parent ) )
class Tool( object, Dictifiable ):
"""
Represents a computational tool that can be executed through Galaxy.
"""
tool_type = 'default'
requires_setting_metadata = True
default_tool_action = DefaultToolAction
dict_collection_visible_keys = ( 'id', 'name', 'version', 'description' )
default_template = 'tool_form.mako'
def __init__( self, config_file, root, app, guid=None, repository_id=None ):
"""Load a tool from the config named by `config_file`"""
# Determine the full path of the directory where the tool config is
self.config_file = config_file
self.tool_dir = os.path.dirname( config_file )
self.app = app
self.repository_id = repository_id
#setup initial attribute values
self.inputs = odict()
self.stdio_exit_codes = list()
self.stdio_regexes = list()
self.inputs_by_page = list()
self.display_by_page = list()
self.action = '/tool_runner/index'
self.target = 'galaxy_main'
self.method = 'post'
self.check_values = True
self.nginx_upload = False
self.input_required = False
self.display_interface = True
self.require_login = False
self.rerun = False
# Define a place to keep track of all input These
# differ from the inputs dictionary in that inputs can be page
# elements like conditionals, but input_params are basic form
# parameters like SelectField objects. This enables us to more
# easily ensure that parameter dependencies like index files or
# tool_data_table_conf.xml entries exist.
self.input_params = []
# Attributes of tools installed from Galaxy tool sheds.
self.tool_shed = None
self.repository_name = None
self.repository_owner = None
self.installed_changeset_revision = None
# The tool.id value will be the value of guid, but we'll keep the
# guid attribute since it is useful to have.
self.guid = guid
self.old_id = None
self.version = None
# Enable easy access to this tool's version lineage.
self.lineage_ids = []
#populate toolshed repository info, if available
self.populate_tool_shed_info()
# Parse XML element containing configuration
self.parse( root, guid=guid )
self.external_runJob_script = app.config.drmaa_external_runjob_script
@property
def sa_session( self ):
"""Returns a SQLAlchemy session"""
return self.app.model.context
@property
def tool_version( self ):
"""Return a ToolVersion if one exists for our id"""
return self.app.install_model.context.query( self.app.install_model.ToolVersion ) \
.filter( self.app.install_model.ToolVersion.table.c.tool_id == self.id ) \
.first()
@property
def tool_versions( self ):
# If we have versions, return them.
tool_version = self.tool_version
if tool_version:
return tool_version.get_versions( self.app )
return []
@property
def tool_version_ids( self ):
# If we have versions, return a list of their tool_ids.
tool_version = self.tool_version
if tool_version:
return tool_version.get_version_ids( self.app )
return []
@property
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
return suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
self.tool_shed,
self.repository_name,
self.repository_owner,
self.installed_changeset_revision )
return None
def __get_job_tool_configuration(self, job_params=None):
"""Generalized method for getting this tool's job configuration.
:type job_params: dict or None
:returns: `galaxy.jobs.JobToolConfiguration` -- JobToolConfiguration that matches this `Tool` and the given `job_params`
"""
rval = None
if len(self.job_tool_configurations) == 1:
# If there's only one config, use it rather than wasting time on comparisons
rval = self.job_tool_configurations[0]
elif job_params is None:
for job_tool_config in self.job_tool_configurations:
if not job_tool_config.params:
rval = job_tool_config
break
else:
for job_tool_config in self.job_tool_configurations:
if job_tool_config.params:
# There are job params and this config has params defined
for param, value in job_params.items():
if param not in job_tool_config.params or job_tool_config.params[param] != job_params[param]:
break
else:
# All params match, use this config
rval = job_tool_config
break
else:
rval = job_tool_config
assert rval is not None, 'Could not get a job tool configuration for Tool %s with job_params %s, this is a bug' % (self.id, job_params)
return rval
def get_job_handler(self, job_params=None):
"""Get a suitable job handler for this `Tool` given the provided `job_params`. If multiple handlers are valid for combination of `Tool` and `job_params` (e.g. the defined handler is a handler tag), one will be selected at random.
:param job_params: Any params specific to this job (e.g. the job source)
:type job_params: dict or None
:returns: str -- The id of a job handler for a job run of this `Tool`
"""
# convert tag to ID if necessary
return self.app.job_config.get_handler(self.__get_job_tool_configuration(job_params=job_params).handler)
def get_job_destination(self, job_params=None):
"""
:returns: galaxy.jobs.JobDestination -- The destination definition and runner parameters.
"""
return self.app.job_config.get_destination(self.__get_job_tool_configuration(job_params=job_params).destination)
def get_panel_section( self ):
for key, item in self.app.toolbox.integrated_tool_panel.items():
if item:
if isinstance( item, Tool ):
if item.id == self.id:
return '', ''
if isinstance( item, ToolSection ):
section_id = item.id or ''
section_name = item.name or ''
for section_key, section_item in item.elems.items():
if isinstance( section_item, Tool ):
if section_item:
if section_item.id == self.id:
return section_id, section_name
return None, None
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
"""
# Get the (user visible) name of the tool
self.name = root.get( "name" )
if not self.name:
raise Exception( "Missing tool 'name'" )
# Get the UNIQUE id for the tool
self.old_id = root.get( "id" )
if guid is None:
self.id = self.old_id
else:
self.id = guid
if not self.id:
raise Exception( "Missing tool 'id'" )
self.version = root.get( "version" )
if not self.version:
# For backward compatibility, some tools may not have versions yet.
self.version = "1.0.0"
# Support multi-byte tools
self.is_multi_byte = string_as_bool( root.get( "is_multi_byte", False ) )
# Force history to fully refresh after job execution for this tool.
# Useful i.e. when an indeterminate number of outputs are created by
# a tool.
self.force_history_refresh = string_as_bool( root.get( 'force_history_refresh', 'False' ) )
self.display_interface = string_as_bool( root.get( 'display_interface', str( self.display_interface ) ) )
self.require_login = string_as_bool( root.get( 'require_login', str( self.require_login ) ) )
# Load input translator, used by datasource tools to change names/values of incoming parameters
self.input_translator = root.find( "request_param_translation" )
if self.input_translator:
self.input_translator = ToolInputTranslator.from_element( self.input_translator )
# Command line (template). Optional for tools that do not invoke a local program
command = root.find("command")
if command is not None and command.text is not None:
self.command = command.text.lstrip() # get rid of leading whitespace
# Must pre-pend this AFTER processing the cheetah command template
self.interpreter = command.get( "interpreter", None )
else:
self.command = ''
self.interpreter = None
# Parameters used to build URL for redirection to external app
redirect_url_params = root.find( "redirect_url_params" )
if redirect_url_params is not None and redirect_url_params.text is not None:
# get rid of leading / trailing white space
redirect_url_params = redirect_url_params.text.strip()
# Replace remaining white space with something we can safely split on later
# when we are building the params
self.redirect_url_params = redirect_url_params.replace( ' ', '**^**' )
else:
self.redirect_url_params = ''
# Short description of the tool
self.description = xml_text(root, "description")
# Versioning for tools
self.version_string_cmd = None
version_cmd = root.find("version_command")
if version_cmd is not None:
self.version_string_cmd = version_cmd.text.strip()
version_cmd_interpreter = version_cmd.get( "interpreter", None )
if version_cmd_interpreter:
executable = self.version_string_cmd.split()[0]
abs_executable = os.path.abspath(os.path.join(self.tool_dir, executable))
command_line = self.version_string_cmd.replace(executable, abs_executable, 1)
self.version_string_cmd = version_cmd_interpreter + " " + command_line
# Parallelism for tasks, read from tool config.
parallelism = root.find("parallelism")
if parallelism is not None and parallelism.get("method"):
self.parallelism = ParallelismInfo(parallelism)
else:
self.parallelism = None
# Get JobToolConfiguration(s) valid for this particular Tool. At least
# a 'default' will be provided that uses the 'default' handler and
# 'default' destination. I thought about moving this to the
# job_config, but it makes more sense to store here. -nate
self_ids = [ self.id.lower() ]
if self.old_id != self.id:
# Handle toolshed guids
self_ids = [ self.id.lower(), self.id.lower().rsplit('/', 1)[0], self.old_id.lower() ]
self.all_ids = self_ids
# In the toolshed context, there is no job config.
if 'job_config' in dir(self.app):
self.job_tool_configurations = self.app.job_config.get_job_tool_configurations(self_ids)
# Is this a 'hidden' tool (hidden in tool menu)
self.hidden = xml_text(root, "hidden")
if self.hidden:
self.hidden = string_as_bool(self.hidden)
# Load any tool specific code (optional) Edit: INS 5/29/2007,
# allow code files to have access to the individual tool's
# "module" if it has one. Allows us to reuse code files, etc.
self.code_namespace = dict()
self.hook_map = {}
for code_elem in root.findall("code"):
for hook_elem in code_elem.findall("hook"):
for key, value in hook_elem.items():
# map hook to function
self.hook_map[key] = value
file_name = code_elem.get("file")
code_path = os.path.join( self.tool_dir, file_name )
execfile( code_path, self.code_namespace )
# Load any tool specific options (optional)
self.options = dict( sanitize=True, refresh=False )
for option_elem in root.findall("options"):
for option, value in self.options.copy().items():
if isinstance(value, type(False)):
self.options[option] = string_as_bool(option_elem.get(option, str(value)))
else:
self.options[option] = option_elem.get(option, str(value))
self.options = Bunch(** self.options)
# Parse tool inputs (if there are any required)
self.parse_inputs( root )
# Parse tool help
self.parse_help( root )
# Description of outputs produced by an invocation of the tool
self.parse_outputs( root )
# Parse result handling for tool exit codes and stdout/stderr messages:
self.parse_stdio( root )
# Any extra generated config files for the tool
self.config_files = []
conf_parent_elem = root.find("configfiles")
if conf_parent_elem:
for conf_elem in conf_parent_elem.findall( "configfile" ):
name = conf_elem.get( "name" )
filename = conf_elem.get( "filename", None )
text = conf_elem.text
self.config_files.append( ( name, filename, text ) )
# Action
action_elem = root.find( "action" )
if action_elem is None:
self.tool_action = self.default_tool_action()
else:
module = action_elem.get( 'module' )
cls = action_elem.get( 'class' )
mod = __import__( module, globals(), locals(), [cls])
self.tool_action = getattr( mod, cls )()
# User interface hints
self.uihints = {}
uihints_elem = root.find( "uihints" )
if uihints_elem is not None:
for key, value in uihints_elem.attrib.iteritems():
self.uihints[ key ] = value
# Tests
self.__tests_elem = root.find( "tests" )
self.__tests_populated = False
# Requirements (dependencies)
requirements, containers = parse_requirements_from_xml( root )
self.requirements = requirements
self.containers = containers
self.citations = self._parse_citations( root )
# Determine if this tool can be used in workflows
self.is_workflow_compatible = self.check_workflow_compatible(root)
# Trackster configuration.
trackster_conf = root.find( "trackster_conf" )
if trackster_conf is not None:
self.trackster_conf = TracksterConfig.parse( trackster_conf )
else:
self.trackster_conf = None
@property
def tests( self ):
if not self.__tests_populated:
tests_elem = self.__tests_elem
if tests_elem:
try:
self.__tests = parse_tests_elem( self, tests_elem )
except:
log.exception( "Failed to parse tool tests" )
else:
self.__tests = None
self.__tests_populated = True
return self.__tests
def parse_inputs( self, root ):
"""
Parse the "<inputs>" element and create appropriate `ToolParameter`s.
This implementation supports multiple pages and grouping constructs.
"""
# Load parameters (optional)
input_elem = root.find("inputs")
enctypes = set()
if input_elem is not None:
# Handle properties of the input form
self.check_values = string_as_bool( input_elem.get("check_values", self.check_values ) )
self.nginx_upload = string_as_bool( input_elem.get( "nginx_upload", self.nginx_upload ) )
self.action = input_elem.get( 'action', self.action )
# If we have an nginx upload, save the action as a tuple instead of
# a string. The actual action needs to get url_for run to add any
# prefixes, and we want to avoid adding the prefix to the
# nginx_upload_path. This logic is handled in the tool_form.mako
# template.
if self.nginx_upload and self.app.config.nginx_upload_path:
if '?' in urllib.unquote_plus( self.action ):
raise Exception( 'URL parameters in a non-default tool action can not be used ' \
'in conjunction with nginx upload. Please convert them to ' \
'hidden POST parameters' )
self.action = (self.app.config.nginx_upload_path + '?nginx_redir=',
urllib.unquote_plus(self.action))
self.target = input_elem.get( "target", self.target )
self.method = input_elem.get( "method", self.method )
# Parse the actual parameters
# Handle multiple page case
pages = input_elem.findall( "page" )
for page in ( pages or [ input_elem ] ):
display, inputs = self.parse_input_page( page, enctypes )
self.inputs_by_page.append( inputs )
self.inputs.update( inputs )
self.display_by_page.append( display )
else:
self.inputs_by_page.append( self.inputs )
self.display_by_page.append( None )
self.display = self.display_by_page[0]
self.npages = len( self.inputs_by_page )
self.last_page = len( self.inputs_by_page ) - 1
self.has_multiple_pages = bool( self.last_page )
# Determine the needed enctype for the form
if len( enctypes ) == 0:
self.enctype = "application/x-www-form-urlencoded"
elif len( enctypes ) == 1:
self.enctype = enctypes.pop()
else:
raise Exception( "Conflicting required enctypes: %s" % str( enctypes ) )
# Check if the tool either has no parameters or only hidden (and
# thus hardcoded) FIXME: hidden parameters aren't
# parameters at all really, and should be passed in a different
# way, making this check easier.
self.template_macro_params = template_macro_params(root)
for param in self.inputs.values():
if not isinstance( param, ( HiddenToolParameter, BaseURLToolParameter ) ):
self.input_required = True
break
def parse_help( self, root ):
"""
Parse the help text for the tool. Formatted in reStructuredText, but
stored as Mako to allow for dynamic image paths.
This implementation supports multiple pages.
"""
# TODO: Allow raw HTML or an external link.
self.help = root.find("help")
self.help_by_page = list()
help_header = ""
help_footer = ""
if self.help is not None:
if self.repository_id and self.help.text.find( '.. image:: ' ) >= 0:
# Handle tool help image display for tools that are contained in repositories in the tool shed or installed into Galaxy.
lock = threading.Lock()
lock.acquire( True )
try:
self.help.text = suc.set_image_paths( self.app, self.repository_id, self.help.text )
except Exception, e:
log.exception( "Exception in parse_help, so images may not be properly displayed:\n%s" % str( e ) )
finally:
lock.release()
help_pages = self.help.findall( "page" )
help_header = self.help.text
try:
self.help = Template( rst_to_html(self.help.text), input_encoding='utf-8',
output_encoding='utf-8', default_filters=[ 'decode.utf8' ],
encoding_errors='replace' )
except:
log.exception( "error in help for tool %s" % self.name )
# Multiple help page case
if help_pages:
for help_page in help_pages:
self.help_by_page.append( help_page.text )
help_footer = help_footer + help_page.tail
# Each page has to rendered all-together because of backreferences allowed by rst
try:
self.help_by_page = [ Template( rst_to_html( help_header + x + help_footer ),
input_encoding='utf-8', output_encoding='utf-8',
default_filters=[ 'decode.utf8' ],
encoding_errors='replace' )
for x in self.help_by_page ]
except:
log.exception( "error in multi-page help for tool %s" % self.name )
# Pad out help pages to match npages ... could this be done better?
while len( self.help_by_page ) < self.npages:
self.help_by_page.append( self.help )
def parse_outputs( self, root ):
"""
Parse <outputs> elements and fill in self.outputs (keyed by name)
"""
self.outputs = odict()
out_elem = root.find("outputs")
if not out_elem:
return
for data_elem in out_elem.findall("data"):
output = ToolOutput( data_elem.get("name") )
output.format = data_elem.get("format", "data")
output.change_format = data_elem.findall("change_format")
output.format_source = data_elem.get("format_source", None)
output.metadata_source = data_elem.get("metadata_source", "")
output.parent = data_elem.get("parent", None)
output.label = xml_text( data_elem, "label" )
output.count = int( data_elem.get("count", 1) )
output.filters = data_elem.findall( 'filter' )
output.from_work_dir = data_elem.get("from_work_dir", None)
output.hidden = string_as_bool( data_elem.get("hidden", "") )
output.tool = self
output.actions = ToolOutputActionGroup( output, data_elem.find( 'actions' ) )
output.dataset_collectors = output_collect.dataset_collectors_from_elem( data_elem )
self.outputs[ output.name ] = output
# TODO: Include the tool's name in any parsing warnings.
def parse_stdio( self, root ):
"""
Parse <stdio> element(s) and fill in self.return_codes,
self.stderr_rules, and self.stdout_rules. Return codes have a range
and an error type (fault or warning). Stderr and stdout rules have
a regular expression and an error level (fault or warning).
"""
try:
self.stdio_exit_codes = list()
self.stdio_regexes = list()
# We should have a single <stdio> element, but handle the case for
# multiples.
# For every stdio element, add all of the exit_code and regex
# subelements that we find:
for stdio_elem in ( root.findall( 'stdio' ) ):
self.parse_stdio_exit_codes( stdio_elem )
self.parse_stdio_regexes( stdio_elem )
except Exception:
log.error( "Exception in parse_stdio! " + str(sys.exc_info()) )
def parse_stdio_exit_codes( self, stdio_elem ):
"""
Parse the tool's <stdio> element's <exit_code> subelements.
This will add all of those elements, if any, to self.stdio_exit_codes.
"""
try:
# Look for all <exit_code> elements. Each exit_code element must
# have a range/value.
# Exit-code ranges have precedence over a single exit code.
# So if there are value and range attributes, we use the range
# attribute. If there is neither a range nor a value, then print
# a warning and skip to the next.
for exit_code_elem in ( stdio_elem.findall( "exit_code" ) ):
exit_code = ToolStdioExitCode()
# Each exit code has an optional description that can be
# part of the "desc" or "description" attributes:
exit_code.desc = exit_code_elem.get( "desc" )
if None == exit_code.desc:
exit_code.desc = exit_code_elem.get( "description" )
# Parse the error level:
exit_code.error_level = (
self.parse_error_level( exit_code_elem.get( "level" )))
code_range = exit_code_elem.get( "range", "" )
if None == code_range:
code_range = exit_code_elem.get( "value", "" )
if None == code_range:
log.warning( "Tool stdio exit codes must have "
+ "a range or value" )
continue
# Parse the range. We look for:
# :Y
# X:
# X:Y - Split on the colon. We do not allow a colon
# without a beginning or end, though we could.
# Also note that whitespace is eliminated.
# TODO: Turn this into a single match - it should be
# more efficient.
code_range = re.sub( "\s", "", code_range )
code_ranges = re.split( ":", code_range )
if ( len( code_ranges ) == 2 ):
if ( None == code_ranges[0] or '' == code_ranges[0] ):
exit_code.range_start = float( "-inf" )
else:
exit_code.range_start = int( code_ranges[0] )
if ( None == code_ranges[1] or '' == code_ranges[1] ):
exit_code.range_end = float( "inf" )
else:
exit_code.range_end = int( code_ranges[1] )
# If we got more than one colon, then ignore the exit code.
elif ( len( code_ranges ) > 2 ):
log.warning( "Invalid tool exit_code range %s - ignored"
% code_range )
continue
# Else we have a singular value. If it's not an integer, then
# we'll just write a log message and skip this exit_code.
else:
try:
exit_code.range_start = int( code_range )
except:
log.error( code_range )
log.warning( "Invalid range start for tool's exit_code %s: exit_code ignored" % code_range )
continue
exit_code.range_end = exit_code.range_start
# TODO: Check if we got ">", ">=", "<", or "<=":
# Check that the range, regardless of how we got it,
# isn't bogus. If we have two infinite values, then
# the start must be -inf and the end must be +inf.
# So at least warn about this situation:
if ( isinf( exit_code.range_start ) and
isinf( exit_code.range_end ) ):
log.warning( "Tool exit_code range %s will match on "
+ "all exit codes" % code_range )
self.stdio_exit_codes.append( exit_code )
except Exception:
log.error( "Exception in parse_stdio_exit_codes! "
+ str(sys.exc_info()) )
trace = sys.exc_info()[2]
if ( None != trace ):
trace_msg = repr( traceback.format_tb( trace ) )
log.error( "Traceback: %s" % trace_msg )
def parse_stdio_regexes( self, stdio_elem ):
"""
Look in the tool's <stdio> elem for all <regex> subelements
that define how to look for warnings and fatal errors in
stdout and stderr. This will add all such regex elements
to the Tols's stdio_regexes list.
"""
try:
# Look for every <regex> subelement. The regular expression
# will have "match" and "source" (or "src") attributes.
for regex_elem in ( stdio_elem.findall( "regex" ) ):
# TODO: Fill in ToolStdioRegex
regex = ToolStdioRegex()
# Each regex has an optional description that can be
# part of the "desc" or "description" attributes:
regex.desc = regex_elem.get( "desc" )
if None == regex.desc:
regex.desc = regex_elem.get( "description" )
# Parse the error level
regex.error_level = (
self.parse_error_level( regex_elem.get( "level" ) ) )
regex.match = regex_elem.get( "match", "" )
if None == regex.match:
# TODO: Convert the offending XML element to a string
log.warning( "Ignoring tool's stdio regex element %s - "
"the 'match' attribute must exist" )
continue
# Parse the output sources. We look for the "src", "source",
# and "sources" attributes, in that order. If there is no
# such source, then the source defaults to stderr & stdout.
# Look for a comma and then look for "err", "error", "out",
# and "output":
output_srcs = regex_elem.get( "src" )
if None == output_srcs:
output_srcs = regex_elem.get( "source" )
if None == output_srcs:
output_srcs = regex_elem.get( "sources" )
if None == output_srcs:
output_srcs = "output,error"
output_srcs = re.sub( "\s", "", output_srcs )
src_list = re.split( ",", output_srcs )
# Just put together anything to do with "out", including
# "stdout", "output", etc. Repeat for "stderr", "error",
# and anything to do with "err". If neither stdout nor
# stderr were specified, then raise a warning and scan both.
for src in src_list:
if re.search( "both", src, re.IGNORECASE ):
regex.stdout_match = True
regex.stderr_match = True
if re.search( "out", src, re.IGNORECASE ):
regex.stdout_match = True
if re.search( "err", src, re.IGNORECASE ):
regex.stderr_match = True
if (not regex.stdout_match and not regex.stderr_match):
log.warning( "Tool id %s: unable to determine if tool "
"stream source scanning is output, error, "
"or both. Defaulting to use both." % self.id )
regex.stdout_match = True
regex.stderr_match = True
self.stdio_regexes.append( regex )
except Exception:
log.error( "Exception in parse_stdio_exit_codes! "
+ str(sys.exc_info()) )
trace = sys.exc_info()[2]
if ( None != trace ):
trace_msg = repr( traceback.format_tb( trace ) )
log.error( "Traceback: %s" % trace_msg )
def _parse_citations( self, root ):
citations = []
citations_elem = root.find("citations")
if not citations_elem:
return citations
for citation_elem in citations_elem:
if citation_elem.tag != "citation":
pass
citation = self.app.citations_manager.parse_citation( citation_elem, self.tool_dir )
if citation:
citations.append( citation )
return citations
# TODO: This method doesn't have to be part of the Tool class.
def parse_error_level( self, err_level ):
"""
Parses error level and returns error level enumeration. If
unparsable, returns 'fatal'
"""
return_level = StdioErrorLevel.FATAL
try:
if err_level:
if ( re.search( "log", err_level, re.IGNORECASE ) ):
return_level = StdioErrorLevel.LOG
elif ( re.search( "warning", err_level, re.IGNORECASE ) ):
return_level = StdioErrorLevel.WARNING
elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
return_level = StdioErrorLevel.FATAL
else:
log.debug( "Tool %s: error level %s did not match log/warning/fatal" %
( self.id, err_level ) )
except Exception:
log.error( "Exception in parse_error_level "
+ str(sys.exc_info() ) )
trace = sys.exc_info()[2]
if ( None != trace ):
trace_msg = repr( traceback.format_tb( trace ) )
log.error( "Traceback: %s" % trace_msg )
return return_level
def parse_input_page( self, input_elem, enctypes ):
"""
Parse a page of inputs. This basically just calls 'parse_input_elem',
but it also deals with possible 'display' elements which are supported
only at the top/page level (not in groups).
"""
inputs = self.parse_input_elem( input_elem, enctypes )
# Display
display_elem = input_elem.find("display")
if display_elem is not None:
display = xml_to_string(display_elem)
else:
display = None
return display, inputs
def parse_input_elem( self, parent_elem, enctypes, context=None ):
"""
Parse a parent element whose children are inputs -- these could be
groups (repeat, conditional) or param elements. Groups will be parsed
recursively.
"""
rval = odict()
context = ExpressionContext( rval, context )
for elem in parent_elem:
# Repeat group
if elem.tag == "repeat":
group = Repeat()
group.name = elem.get( "name" )
group.title = elem.get( "title" )
group.help = elem.get( "help", None )
group.inputs = self.parse_input_elem( elem, enctypes, context )
group.default = int( elem.get( "default", 0 ) )
group.min = int( elem.get( "min", 0 ) )
# Use float instead of int so that 'inf' can be used for no max
group.max = float( elem.get( "max", "inf" ) )
assert group.min <= group.max, \
ValueError( "Min repeat count must be less-than-or-equal to the max." )
# Force default to be within min-max range
group.default = min( max( group.default, group.min ), group.max )
rval[group.name] = group
elif elem.tag == "conditional":
group = Conditional()
group.name = elem.get( "name" )
group.value_ref = elem.get( 'value_ref', None )
group.value_ref_in_group = string_as_bool( elem.get( 'value_ref_in_group', 'True' ) )
value_from = elem.get( "value_from" )
if value_from:
value_from = value_from.split( ':' )
group.value_from = locals().get( value_from[0] )
group.test_param = rval[ group.value_ref ]
group.test_param.refresh_on_change = True
for attr in value_from[1].split( '.' ):
group.value_from = getattr( group.value_from, attr )
for case_value, case_inputs in group.value_from( context, group, self ).iteritems():
case = ConditionalWhen()
case.value = case_value
if case_inputs:
case.inputs = self.parse_input_elem(
ElementTree.XML( "<when>%s</when>" % case_inputs ), enctypes, context )
else:
case.inputs = odict()
group.cases.append( case )
else:
# Should have one child "input" which determines the case
input_elem = elem.find( "param" )
assert input_elem is not None, "<conditional> must have a child <param>"
group.test_param = self.parse_param_elem( input_elem, enctypes, context )
possible_cases = list( group.test_param.legal_values ) # store possible cases, undefined whens will have no inputs
# Must refresh when test_param changes
group.test_param.refresh_on_change = True
# And a set of possible cases
for case_elem in elem.findall( "when" ):
case = ConditionalWhen()
case.value = case_elem.get( "value" )
case.inputs = self.parse_input_elem( case_elem, enctypes, context )
group.cases.append( case )
try:
possible_cases.remove( case.value )
except:
log.warning( "Tool %s: a when tag has been defined for '%s (%s) --> %s', but does not appear to be selectable." %
( self.id, group.name, group.test_param.name, case.value ) )
for unspecified_case in possible_cases:
log.warning( "Tool %s: a when tag has not been defined for '%s (%s) --> %s', assuming empty inputs." %
( self.id, group.name, group.test_param.name, unspecified_case ) )
case = ConditionalWhen()
case.value = unspecified_case
case.inputs = odict()
group.cases.append( case )
rval[group.name] = group
elif elem.tag == "upload_dataset":
group = UploadDataset()
group.name = elem.get( "name" )
group.title = elem.get( "title" )
group.file_type_name = elem.get( 'file_type_name', group.file_type_name )
group.default_file_type = elem.get( 'default_file_type', group.default_file_type )
group.metadata_ref = elem.get( 'metadata_ref', group.metadata_ref )
rval[ group.file_type_name ].refresh_on_change = True
rval[ group.file_type_name ].refresh_on_change_values = \
self.app.datatypes_registry.get_composite_extensions()
group.inputs = self.parse_input_elem( elem, enctypes, context )
rval[ group.name ] = group
elif elem.tag == "param":
param = self.parse_param_elem( elem, enctypes, context )
rval[param.name] = param
if hasattr( param, 'data_ref' ):
param.ref_input = context[ param.data_ref ]
self.input_params.append( param )
return rval
def parse_param_elem( self, input_elem, enctypes, context ):
"""
Parse a single "<param>" element and return a ToolParameter instance.
Also, if the parameter has a 'required_enctype' add it to the set
enctypes.
"""
param = ToolParameter.build( self, input_elem )
param_enctype = param.get_required_enctype()
if param_enctype:
enctypes.add( param_enctype )
# If parameter depends on any other paramters, we must refresh the
# form when it changes
for name in param.get_dependencies():
context[ name ].refresh_on_change = True
return param
def populate_tool_shed_info( self ):
if self.repository_id is not None and self.app.name == 'galaxy':
repository_id = self.app.security.decode_id( self.repository_id )
tool_shed_repository = self.app.install_model.context.query( self.app.install_model.ToolShedRepository ).get( repository_id )
if tool_shed_repository:
self.tool_shed = tool_shed_repository.tool_shed
self.repository_name = tool_shed_repository.name
self.repository_owner = tool_shed_repository.owner
self.installed_changeset_revision = tool_shed_repository.installed_changeset_revision
def check_workflow_compatible( self, root ):
"""
Determine if a tool can be used in workflows. External tools and the
upload tool are currently not supported by workflows.
"""
# Multiple page tools are not supported -- we're eliminating most
# of these anyway
if self.has_multiple_pages:
return False
# This is probably the best bet for detecting external web tools
# right now
if self.tool_type.startswith( 'data_source' ):
return False
if not string_as_bool( root.get( "workflow_compatible", "True" ) ):
return False
# TODO: Anyway to capture tools that dynamically change their own
# outputs?
return True
def new_state( self, trans, all_pages=False, history=None ):
"""
Create a new `DefaultToolState` for this tool. It will be initialized
with default values for inputs.
Only inputs on the first page will be initialized unless `all_pages` is
True, in which case all inputs regardless of page are initialized.
"""
state = DefaultToolState()
state.inputs = {}
if all_pages:
inputs = self.inputs
else:
inputs = self.inputs_by_page[ 0 ]
self.fill_in_new_state( trans, inputs, state.inputs, history=history )
return state
def fill_in_new_state( self, trans, inputs, state, context=None, history=None ):
"""
Fill in a tool state dictionary with default values for all parameters
in the dictionary `inputs`. Grouping elements are filled in recursively.
"""
context = ExpressionContext( state, context )
for input in inputs.itervalues():
state[ input.name ] = input.get_initial_value( trans, context, history=history )
def get_param_html_map( self, trans, page=0, other_values={} ):
"""
Return a dictionary containing the HTML representation of each
parameter. This is used for rendering display elements. It is
currently not compatible with grouping constructs.
NOTE: This should be considered deprecated, it is only used for tools
with `display` elements. These should be eliminated.
"""
rval = dict()
for key, param in self.inputs_by_page[page].iteritems():
if not isinstance( param, ToolParameter ):
raise Exception( "'get_param_html_map' only supported for simple paramters" )
rval[key] = param.get_html( trans, other_values=other_values )
return rval
def get_param( self, key ):
"""
Returns the parameter named `key` or None if there is no such
parameter.
"""
return self.inputs.get( key, None )
def get_hook(self, name):
"""
Returns an object from the code file referenced by `code_namespace`
(this will normally be a callable object)
"""
if self.code_namespace:
# Try to look up hook in self.hook_map, otherwise resort to default
if name in self.hook_map and self.hook_map[name] in self.code_namespace:
return self.code_namespace[self.hook_map[name]]
elif name in self.code_namespace:
return self.code_namespace[name]
return None
def visit_inputs( self, value, callback ):
"""
Call the function `callback` on each parameter of this tool. Visits
grouping parameters recursively and constructs unique prefixes for
each nested set of The callback method is then called as:
`callback( level_prefix, parameter, parameter_value )`
"""
# HACK: Yet another hack around check_values -- WHY HERE?
if not self.check_values:
return
for input in self.inputs.itervalues():
if isinstance( input, ToolParameter ):
callback( "", input, value[input.name] )
else:
input.visit_inputs( "", value[input.name], callback )
def handle_input( self, trans, incoming, history=None, old_errors=None, process_state='update', source='html' ):
"""
Process incoming parameters for this tool from the dict `incoming`,
update the tool state (or create if none existed), and either return
to the form or execute the tool (only if 'execute' was clicked and
there were no errors).
process_state can be either 'update' (to incrementally build up the state
over several calls - one repeat per handle for instance) or 'populate'
force a complete build of the state and submission all at once (like
from API). May want an incremental version of the API also at some point,
that is why this is not just called for_api.
"""
all_pages = ( process_state == "populate" ) # If process_state = update, handle all pages at once.
rerun_remap_job_id = None
if 'rerun_remap_job_id' in incoming:
try:
rerun_remap_job_id = trans.app.security.decode_id( incoming[ 'rerun_remap_job_id' ] )
except Exception:
message = 'Failure executing tool (attempting to rerun invalid job).'
return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
# Fixed set of input parameters may correspond to any number of jobs.
# Expand these out to individual parameters for given jobs (tool
# executions).
expanded_incomings, collection_info = expand_meta_parameters( trans, self, incoming )
if not expanded_incomings:
raise exceptions.MessageException( "Tool execution failed, trying to run a tool over an empty collection." )
# Remapping a single job to many jobs doesn't make sense, so disable
# remap if multi-runs of tools are being used.
if rerun_remap_job_id and len( expanded_incomings ) > 1:
message = 'Failure executing tool (cannot create multiple jobs when remapping existing job).'
return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
all_states = []
for expanded_incoming in expanded_incomings:
state, state_new = self.__fetch_state( trans, expanded_incoming, history, all_pages=all_pages )
all_states.append( state )
if state_new:
# This feels a bit like a hack. It allows forcing full processing
# of inputs even when there is no state in the incoming dictionary
# by providing either 'runtool_btn' (the name of the submit button
# on the standard run form) or "URL" (a parameter provided by
# external data source tools).
if "runtool_btn" not in incoming and "URL" not in incoming:
if not self.display_interface:
return self.__no_display_interface_response()
if len(incoming):
self.update_state( trans, self.inputs_by_page[state.page], state.inputs, incoming, old_errors=old_errors or {}, source=source )
return self.default_template, dict( errors={}, tool_state=state, param_values={}, incoming={} )
all_errors = []
all_params = []
for expanded_incoming, expanded_state in zip(expanded_incomings, all_states):
errors, params = self.__check_param_values( trans, expanded_incoming, expanded_state, old_errors, process_state, history=history, source=source )
all_errors.append( errors )
all_params.append( params )
if self.__should_refresh_state( incoming ):
template, template_vars = self.__handle_state_refresh( trans, state, errors )
else:
# User actually clicked next or execute.
# If there were errors, we stay on the same page and display
# error messages
if any( all_errors ):
error_message = "One or more errors were found in the input you provided. The specific errors are marked below."
template = self.default_template
template_vars = dict( errors=errors, tool_state=state, incoming=incoming, error_message=error_message )
# If we've completed the last page we can execute the tool
elif all_pages or state.page == self.last_page:
execution_tracker = execute_job( trans, self, all_params, history=history, rerun_remap_job_id=rerun_remap_job_id, collection_info=collection_info )
if execution_tracker.successful_jobs:
template = 'tool_executed.mako'
template_vars = dict(
out_data=execution_tracker.output_datasets,
num_jobs=len( execution_tracker.successful_jobs ),
job_errors=execution_tracker.execution_errors,
jobs=execution_tracker.successful_jobs,
implicit_collections=execution_tracker.created_collections,
)
else:
template = 'message.mako'
template_vars = dict( status='error', message=execution_tracker.execution_errors[0], refresh_frames=[] )
# Otherwise move on to the next page
else:
template, template_vars = self.__handle_page_advance( trans, state, errors )
return template, template_vars
def __should_refresh_state( self, incoming ):
return not( 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming )
def handle_single_execution( self, trans, rerun_remap_job_id, params, history ):
"""
Return a pair with whether execution is successful as well as either
resulting output data or an error message indicating the problem.
"""
try:
params = self.__remove_meta_properties( params )
job, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id )
except httpexceptions.HTTPFound, e:
#if it's a paste redirect exception, pass it up the stack
raise e
except Exception, e:
log.exception('Exception caught while attempting tool execution:')
message = 'Error executing tool: %s' % str(e)
return False, message
if isinstance( out_data, odict ):
return job, out_data.items()
else:
if isinstance( out_data, str ):
message = out_data
else:
message = 'Failure executing tool (invalid data returned from tool execution)'
return False, message
def __handle_state_refresh( self, trans, state, errors ):
try:
self.find_fieldstorage( state.inputs )
except InterruptedUpload:
# If inputs contain a file it won't persist. Most likely this
# is an interrupted upload. We should probably find a more
# standard method of determining an incomplete POST.
return self.handle_interrupted( trans, state.inputs )
except:
pass
# Just a refresh, render the form with updated state and errors.
if not self.display_interface:
return self.__no_display_interface_response()
return self.default_template, dict( errors=errors, tool_state=state )
def __handle_page_advance( self, trans, state, errors ):
state.page += 1
# Fill in the default values for the next page
self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
if not self.display_interface:
return self.__no_display_interface_response()
return self.default_template, dict( errors=errors, tool_state=state )
def __no_display_interface_response( self ):
return 'message.mako', dict( status='info', message="The interface for this tool cannot be displayed", refresh_frames=['everything'] )
def __fetch_state( self, trans, incoming, history, all_pages ):
# Get the state or create if not found
if "tool_state" in incoming:
encoded_state = string_to_object( incoming["tool_state"] )
state = DefaultToolState()
state.decode( encoded_state, self, trans.app )
new = False
else:
state = self.new_state( trans, history=history, all_pages=all_pages )
new = True
return state, new
def __check_param_values( self, trans, incoming, state, old_errors, process_state, history, source ):
# Process incoming data
if not( self.check_values ):
# If `self.check_values` is false we don't do any checking or
# processing on input This is used to pass raw values
# through to/from external sites. FIXME: This should be handled
# more cleanly, there is no reason why external sites need to
# post back to the same URL that the tool interface uses.
errors = {}
params = incoming
else:
# Update state for all inputs on the current page taking new
# values from `incoming`.
if process_state == "update":
inputs = self.inputs_by_page[state.page]
errors = self.update_state( trans, inputs, state.inputs, incoming, old_errors=old_errors or {}, source=source )
elif process_state == "populate":
inputs = self.inputs
errors = self.populate_state( trans, inputs, state.inputs, incoming, history, source=source )
else:
raise Exception("Unknown process_state type %s" % process_state)
# If the tool provides a `validate_input` hook, call it.
validate_input = self.get_hook( 'validate_input' )
if validate_input:
validate_input( trans, errors, state.inputs, inputs )
params = state.inputs
return errors, params
def find_fieldstorage( self, x ):
if isinstance( x, FieldStorage ):
raise InterruptedUpload( None )
elif type( x ) is types.DictType:
[ self.find_fieldstorage( y ) for y in x.values() ]
elif type( x ) is types.ListType:
[ self.find_fieldstorage( y ) for y in x ]
def handle_interrupted( self, trans, inputs ):
"""
Upon handling inputs, if it appears that we have received an incomplete
form, do some cleanup or anything else deemed necessary. Currently
this is only likely during file uploads, but this method could be
generalized and a method standardized for handling other tools.
"""
# If the async upload tool has uploading datasets, we need to error them.
if 'async_datasets' in inputs and inputs['async_datasets'] not in [ 'None', '', None ]:
for id in inputs['async_datasets'].split(','):
try:
data = self.sa_session.query( trans.model.HistoryDatasetAssociation ).get( int( id ) )
except:
log.exception( 'Unable to load precreated dataset (%s) sent in upload form' % id )
continue
if trans.user is None and trans.galaxy_session.current_history != data.history:
log.error( 'Got a precreated dataset (%s) but it does not belong to anonymous user\'s current session (%s)'
% ( data.id, trans.galaxy_session.id ) )
elif data.history.user != trans.user:
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)'
% ( data.id, trans.user.id ) )
else:
data.state = data.states.ERROR
data.info = 'Upload of this dataset was interrupted. Please try uploading again or'
self.sa_session.add( data )
self.sa_session.flush()
# It's unlikely the user will ever see this.
return 'message.mako', dict( status='error',
message='Your upload was interrupted. If this was uninentional, please retry it.',
refresh_frames=[], cont=None )
def populate_state( self, trans, inputs, state, incoming, history, source, prefix="", context=None ):
errors = dict()
# Push this level onto the context stack
context = ExpressionContext( state, context )
for input in inputs.itervalues():
key = prefix + input.name
if isinstance( input, Repeat ):
group_state = state[input.name]
# Create list of empty errors for each previously existing state
group_errors = [ ]
any_group_errors = False
rep_index = 0
del group_state[:] # Clear prepopulated defaults if repeat.min set.
while True:
rep_name = "%s_%d" % ( key, rep_index )
if not any( [ incoming_key.startswith(rep_name) for incoming_key in incoming.keys() ] ):
break
if rep_index < input.max:
new_state = {}
new_state['__index__'] = rep_index
self.fill_in_new_state( trans, input.inputs, new_state, context, history=history )
group_state.append( new_state )
group_errors.append( {} )
rep_errors = self.populate_state( trans,
input.inputs,
new_state,
incoming,
history,
source,
prefix=rep_name + "|",
context=context )
if rep_errors:
any_group_errors = True
group_errors[rep_index].update( rep_errors )
else:
group_errors[-1] = { '__index__': 'Cannot add repeat (max size=%i).' % input.max }
any_group_errors = True
rep_index += 1
elif isinstance( input, Conditional ):
group_state = state[input.name]
group_prefix = "%s|" % ( key )
# Deal with the 'test' element and see if its value changed
if input.value_ref and not input.value_ref_in_group:
# We are referencing an existent parameter, which is not
# part of this group
test_param_key = prefix + input.test_param.name
else:
test_param_key = group_prefix + input.test_param.name
# Get value of test param and determine current case
value, test_param_error = check_param_from_incoming( trans,
group_state,
input.test_param,
incoming,
test_param_key,
context,
source )
if test_param_error:
errors[ input.name ] = [ test_param_error ]
# Store the value of the test element
group_state[ input.test_param.name ] = value
else:
current_case = input.get_current_case( value, trans )
# Current case has changed, throw away old state
group_state = state[input.name] = {}
# TODO: we should try to preserve values if we can
self.fill_in_new_state( trans, input.cases[current_case].inputs, group_state, context, history=history )
group_errors = self.populate_state( trans,
input.cases[current_case].inputs,
group_state,
incoming,
history,
source,
prefix=group_prefix,
context=context,
)
if group_errors:
errors[ input.name ] = group_errors
# Store the current case in a special value
group_state['__current_case__'] = current_case
# Store the value of the test element
group_state[ input.test_param.name ] = value
elif isinstance( input, UploadDataset ):
group_state = state[input.name]
group_errors = []
any_group_errors = False
d_type = input.get_datatype( trans, context )
writable_files = d_type.writable_files
#remove extra files
while len( group_state ) > len( writable_files ):
del group_state[-1]
# Add new fileupload as needed
while len( writable_files ) > len( group_state ):
new_state = {}
new_state['__index__'] = len( group_state )
self.fill_in_new_state( trans, input.inputs, new_state, context )
group_state.append( new_state )
if any_group_errors:
group_errors.append( {} )
# Update state
for i, rep_state in enumerate( group_state ):
rep_index = rep_state['__index__']
rep_prefix = "%s_%d|" % ( key, rep_index )
rep_errors = self.populate_state( trans,
input.inputs,
rep_state,
incoming,
history,
source,
prefix=rep_prefix,
context=context)
if rep_errors:
any_group_errors = True
group_errors.append( rep_errors )
else:
group_errors.append( {} )
# Were there *any* errors for any repetition?
if any_group_errors:
errors[input.name] = group_errors
else:
value, error = check_param_from_incoming( trans, state, input, incoming, key, context, source )
if error:
errors[ input.name ] = error
state[ input.name ] = value
return errors
def update_state( self, trans, inputs, state, incoming, source='html', prefix="", context=None,
update_only=False, old_errors={}, item_callback=None ):
"""
Update the tool state in `state` using the user input in `incoming`.
This is designed to be called recursively: `inputs` contains the
set of inputs being processed, and `prefix` specifies a prefix to
add to the name of each input to extract its value from `incoming`.
If `update_only` is True, values that are not in `incoming` will
not be modified. In this case `old_errors` can be provided, and any
errors for parameters which were *not* updated will be preserved.
"""
errors = dict()
# Push this level onto the context stack
context = ExpressionContext( state, context )
# Iterate inputs and update (recursively)
for input in inputs.itervalues():
key = prefix + input.name
if isinstance( input, Repeat ):
group_state = state[input.name]
# Create list of empty errors for each previously existing state
group_errors = [ {} for i in range( len( group_state ) ) ]
group_old_errors = old_errors.get( input.name, None )
any_group_errors = False
# Check any removals before updating state -- only one
# removal can be performed, others will be ignored
for i, rep_state in enumerate( group_state ):
rep_index = rep_state['__index__']
if key + "_" + str(rep_index) + "_remove" in incoming:
if len( group_state ) > input.min:
del group_state[i]
del group_errors[i]
if group_old_errors:
del group_old_errors[i]
break
else:
group_errors[i] = { '__index__': 'Cannot remove repeat (min size=%i).' % input.min }
any_group_errors = True
# Only need to find one that can't be removed due to size, since only
# one removal is processed at # a time anyway
break
elif group_old_errors and group_old_errors[i]:
group_errors[i] = group_old_errors[i]
any_group_errors = True
# Update state
max_index = -1
for i, rep_state in enumerate( group_state ):
rep_index = rep_state['__index__']
max_index = max( max_index, rep_index )
rep_prefix = "%s_%d|" % ( key, rep_index )
if group_old_errors:
rep_old_errors = group_old_errors[i]
else:
rep_old_errors = {}
rep_errors = self.update_state( trans,
input.inputs,
rep_state,
incoming,
source=source,
prefix=rep_prefix,
context=context,
update_only=update_only,
old_errors=rep_old_errors,
item_callback=item_callback )
if rep_errors:
any_group_errors = True
group_errors[i].update( rep_errors )
# Check for addition
if key + "_add" in incoming:
if len( group_state ) < input.max:
new_state = {}
new_state['__index__'] = max_index + 1
self.fill_in_new_state( trans, input.inputs, new_state, context )
group_state.append( new_state )
group_errors.append( {} )
else:
group_errors[-1] = { '__index__': 'Cannot add repeat (max size=%i).' % input.max }
any_group_errors = True
# Were there *any* errors for any repetition?
if any_group_errors:
errors[input.name] = group_errors
elif isinstance( input, Conditional ):
group_state = state[input.name]
group_old_errors = old_errors.get( input.name, {} )
old_current_case = group_state['__current_case__']
group_prefix = "%s|" % ( key )
# Deal with the 'test' element and see if its value changed
if input.value_ref and not input.value_ref_in_group:
# We are referencing an existent parameter, which is not
# part of this group
test_param_key = prefix + input.test_param.name
else:
test_param_key = group_prefix + input.test_param.name
test_param_error = None
test_incoming = get_incoming_value( incoming, test_param_key, None )
if test_param_key not in incoming \
and "__force_update__" + test_param_key not in incoming \
and update_only:
# Update only, keep previous value and state, but still
# recurse in case there are nested changes
value = group_state[ input.test_param.name ]
current_case = old_current_case
if input.test_param.name in old_errors:
errors[ input.test_param.name ] = old_errors[ input.test_param.name ]
else:
# Get value of test param and determine current case
value, test_param_error = \
check_param( trans, input.test_param, test_incoming, context, source=source )
try:
current_case = input.get_current_case( value, trans )
except ValueError, e:
if input.is_job_resource_conditional:
# Unless explicitly given job resource parameters
# (e.g. from the run tool form) don't populate the
# state. Along with other hacks prevents workflow
# saving from populating resource defaults - which
# are meant to be much more transient than the rest
# of tool state.
continue
#load default initial value
if not test_param_error:
test_param_error = str( e )
if trans is not None:
history = trans.get_history()
else:
history = None
value = input.test_param.get_initial_value( trans, context, history=history )
current_case = input.get_current_case( value, trans )
if current_case != old_current_case:
# Current case has changed, throw away old state
group_state = state[input.name] = {}
# TODO: we should try to preserve values if we can
self.fill_in_new_state( trans, input.cases[current_case].inputs, group_state, context )
group_errors = dict()
group_old_errors = dict()
else:
# Current case has not changed, update children
group_errors = self.update_state( trans,
input.cases[current_case].inputs,
group_state,
incoming,
prefix=group_prefix,
context=context,
source=source,
update_only=update_only,
old_errors=group_old_errors,
item_callback=item_callback )
if input.test_param.name in group_old_errors and not test_param_error:
test_param_error = group_old_errors[ input.test_param.name ]
if test_param_error:
group_errors[ input.test_param.name ] = test_param_error
if group_errors:
errors[ input.name ] = group_errors
# Store the current case in a special value
group_state['__current_case__'] = current_case
# Store the value of the test element
group_state[ input.test_param.name ] = value
elif isinstance( input, UploadDataset ):
group_state = state[input.name]
group_errors = []
group_old_errors = old_errors.get( input.name, None )
any_group_errors = False
d_type = input.get_datatype( trans, context )
writable_files = d_type.writable_files
#remove extra files
while len( group_state ) > len( writable_files ):
del group_state[-1]
if group_old_errors:
del group_old_errors[-1]
# Update state
max_index = -1
for i, rep_state in enumerate( group_state ):
rep_index = rep_state['__index__']
max_index = max( max_index, rep_index )
rep_prefix = "%s_%d|" % ( key, rep_index )
if group_old_errors:
rep_old_errors = group_old_errors[i]
else:
rep_old_errors = {}
rep_errors = self.update_state( trans,
input.inputs,
rep_state,
incoming,
prefix=rep_prefix,
context=context,
source=source,
update_only=update_only,
old_errors=rep_old_errors,
item_callback=item_callback )
if rep_errors:
any_group_errors = True
group_errors.append( rep_errors )
else:
group_errors.append( {} )
# Add new fileupload as needed
offset = 1
while len( writable_files ) > len( group_state ):
new_state = {}
new_state['__index__'] = max_index + offset
offset += 1
self.fill_in_new_state( trans, input.inputs, new_state, context )
group_state.append( new_state )
if any_group_errors:
group_errors.append( {} )
# Were there *any* errors for any repetition?
if any_group_errors:
errors[input.name] = group_errors
else:
if key not in incoming \
and "__force_update__" + key not in incoming \
and update_only:
# No new value provided, and we are only updating, so keep
# the old value (which should already be in the state) and
# preserve the old error message.
if input.name in old_errors:
errors[ input.name ] = old_errors[ input.name ]
else:
incoming_value = get_incoming_value( incoming, key, None )
value, error = check_param( trans, input, incoming_value, context, source=source )
# If a callback was provided, allow it to process the value
input_name = input.name
if item_callback:
old_value = state.get( input_name, None )
value, error = item_callback( trans, key, input, value, error, old_value, context )
if error:
errors[ input_name ] = error
state[ input_name ] = value
meta_properties = self.__meta_properties_for_state( key, incoming, incoming_value, value, input_name )
state.update( meta_properties )
return errors
def __remove_meta_properties( self, incoming ):
result = incoming.copy()
meta_property_suffixes = [
"__multirun__",
"__collection_multirun__",
]
for key, value in incoming.iteritems():
if any( map( lambda s: key.endswith(s), meta_property_suffixes ) ):
del result[ key ]
return result
def __meta_properties_for_state( self, key, incoming, incoming_val, state_val, input_name ):
meta_properties = {}
meta_property_suffixes = [
"__multirun__",
"__collection_multirun__",
]
for meta_property_suffix in meta_property_suffixes:
multirun_key = "%s|%s" % ( key, meta_property_suffix )
if multirun_key in incoming:
multi_value = incoming[ multirun_key ]
meta_properties[ "%s|%s" % ( input_name, meta_property_suffix ) ] = multi_value
return meta_properties
@property
def params_with_missing_data_table_entry( self ):
"""
Return all parameters that are dynamically generated select lists whose
options require an entry not currently in the tool_data_table_conf.xml file.
"""
params = []
for input_param in self.input_params:
if isinstance( input_param, SelectToolParameter ) and input_param.is_dynamic:
options = input_param.options
if options and options.missing_tool_data_table_name and input_param not in params:
params.append( input_param )
return params
@property
def params_with_missing_index_file( self ):
"""
Return all parameters that are dynamically generated
select lists whose options refer to a missing .loc file.
"""
params = []
for input_param in self.input_params:
if isinstance( input_param, SelectToolParameter ) and input_param.is_dynamic:
options = input_param.options
if options and options.missing_index_file and input_param not in params:
params.append( input_param )
return params
def get_static_param_values( self, trans ):
"""
Returns a map of parameter names and values if the tool does not
require any user input. Will raise an exception if any parameter
does require input.
"""
args = dict()
for key, param in self.inputs.iteritems():
if isinstance( param, HiddenToolParameter ):
args[key] = model.User.expand_user_properties( trans.user, param.value )
elif isinstance( param, BaseURLToolParameter ):
args[key] = param.get_value( trans )
else:
raise Exception( "Unexpected parameter type" )
return args
def execute( self, trans, incoming={}, set_output_hid=True, history=None, **kwargs ):
"""
Execute the tool using parameter values in `incoming`. This just
dispatches to the `ToolAction` instance specified by
`self.tool_action`. In general this will create a `Job` that
when run will build the tool's outputs, e.g. `DefaultToolAction`.
"""
return self.tool_action.execute( self, trans, incoming=incoming, set_output_hid=set_output_hid, history=history, **kwargs )
def params_to_strings( self, params, app ):
return params_to_strings( self.inputs, params, app )
def params_from_strings( self, params, app, ignore_errors=False ):
return params_from_strings( self.inputs, params, app, ignore_errors )
def check_and_update_param_values( self, values, trans, update_values=True, allow_workflow_parameters=False ):
"""
Check that all parameters have values, and fill in with default
values where necessary. This could be called after loading values
from a database in case new parameters have been added.
"""
messages = {}
self.check_and_update_param_values_helper( self.inputs, values, trans, messages, update_values=update_values, allow_workflow_parameters=allow_workflow_parameters )
return messages
def check_and_update_param_values_helper( self, inputs, values, trans, messages, context=None, prefix="", update_values=True, allow_workflow_parameters=False ):
"""
Recursive helper for `check_and_update_param_values_helper`
"""
context = ExpressionContext( values, context )
for input in inputs.itervalues():
# No value, insert the default
if input.name not in values:
if isinstance( input, Conditional ):
cond_messages = {}
if not input.is_job_resource_conditional:
cond_messages = { input.test_param.name: "No value found for '%s%s', used default" % ( prefix, input.label ) }
messages[ input.name ] = cond_messages
test_value = input.test_param.get_initial_value( trans, context )
current_case = input.get_current_case( test_value, trans )
self.check_and_update_param_values_helper( input.cases[ current_case ].inputs, {}, trans, cond_messages, context, prefix, allow_workflow_parameters=allow_workflow_parameters )
elif isinstance( input, Repeat ):
if input.min:
messages[ input.name ] = []
for i in range( input.min ):
rep_prefix = prefix + "%s %d > " % ( input.title, i + 1 )
rep_dict = dict()
messages[ input.name ].append( rep_dict )
self.check_and_update_param_values_helper( input.inputs, {}, trans, rep_dict, context, rep_prefix, allow_workflow_parameters=allow_workflow_parameters )
else:
messages[ input.name ] = "No value found for '%s%s', used default" % ( prefix, input.label )
values[ input.name ] = input.get_initial_value( trans, context )
# Value, visit recursively as usual
else:
if isinstance( input, Repeat ):
for i, d in enumerate( values[ input.name ] ):
rep_prefix = prefix + "%s %d > " % ( input.title, i + 1 )
self.check_and_update_param_values_helper( input.inputs, d, trans, messages, context, rep_prefix, allow_workflow_parameters=allow_workflow_parameters )
elif isinstance( input, Conditional ):
group_values = values[ input.name ]
if input.test_param.name not in group_values:
# No test param invalidates the whole conditional
values[ input.name ] = group_values = input.get_initial_value( trans, context )
messages[ input.test_param.name ] = "No value found for '%s%s', used default" % ( prefix, input.test_param.label )
current_case = group_values['__current_case__']
for child_input in input.cases[current_case].inputs.itervalues():
messages[ child_input.name ] = "Value no longer valid for '%s%s', replaced with default" % ( prefix, child_input.label )
else:
current = group_values["__current_case__"]
self.check_and_update_param_values_helper( input.cases[current].inputs, group_values, trans, messages, context, prefix, allow_workflow_parameters=allow_workflow_parameters )
else:
# Regular tool parameter, no recursion needed
try:
ck_param = True
if allow_workflow_parameters and isinstance( values[ input.name ], basestring ):
if WORKFLOW_PARAMETER_REGULAR_EXPRESSION.search( values[ input.name ] ):
ck_param = False
#this will fail when a parameter's type has changed to a non-compatible one: e.g. conditional group changed to dataset input
if ck_param:
input.value_from_basic( input.value_to_basic( values[ input.name ], trans.app ), trans.app, ignore_errors=False )
except:
messages[ input.name ] = "Value no longer valid for '%s%s', replaced with default" % ( prefix, input.label )
if update_values:
values[ input.name ] = input.get_initial_value( trans, context )
def handle_unvalidated_param_values( self, input_values, app ):
"""
Find any instances of `UnvalidatedValue` within input_values and
validate them (by calling `ToolParameter.from_html` and
`ToolParameter.validate`).
"""
# No validation is done when check_values is False
if not self.check_values:
return
self.handle_unvalidated_param_values_helper( self.inputs, input_values, app )
def handle_unvalidated_param_values_helper( self, inputs, input_values, app, context=None, prefix="" ):
"""
Recursive helper for `handle_unvalidated_param_values`
"""
context = ExpressionContext( input_values, context )
for input in inputs.itervalues():
if isinstance( input, Repeat ):
for i, d in enumerate( input_values[ input.name ] ):
rep_prefix = prefix + "%s %d > " % ( input.title, i + 1 )
self.handle_unvalidated_param_values_helper( input.inputs, d, app, context, rep_prefix )
elif isinstance( input, Conditional ):
values = input_values[ input.name ]
current = values["__current_case__"]
# NOTE: The test param doesn't need to be checked since
# there would be no way to tell what case to use at
# workflow build time. However I'm not sure if we are
# actually preventing such a case explicately.
self.handle_unvalidated_param_values_helper( input.cases[current].inputs, values, app, context, prefix )
else:
# Regular tool parameter
value = input_values[ input.name ]
if isinstance( value, UnvalidatedValue ):
try:
# Convert from html representation
if value.value is None:
# If value.value is None, it could not have been
# submited via html form and therefore .from_html
# can't be guaranteed to work
value = None
else:
value = input.from_html( value.value, None, context )
# Do any further validation on the value
input.validate( value, None )
except Exception, e:
# Wrap an re-raise any generated error so we can
# generate a more informative message
message = "Failed runtime validation of %s%s (%s)" \
% ( prefix, input.label, e )
raise LateValidationError( message )
input_values[ input.name ] = value
def handle_job_failure_exception( self, e ):
"""
Called by job.fail when an exception is generated to allow generation
of a better error message (returning None yields the default behavior)
"""
message = None
# If the exception was generated by late validation, use its error
# message (contains the parameter name and value)
if isinstance( e, LateValidationError ):
message = e.message
return message
def build_dependency_shell_commands( self ):
"""Return a list of commands to be run to populate the current environment to include this tools requirements."""
return self.app.toolbox.dependency_manager.dependency_shell_commands(
self.requirements,
installed_tool_dependencies=self.installed_tool_dependencies
)
@property
def installed_tool_dependencies(self):
if self.tool_shed_repository:
installed_tool_dependencies = self.tool_shed_repository.tool_dependencies_installed_or_in_error
else:
installed_tool_dependencies = None
return installed_tool_dependencies
def build_redirect_url_params( self, param_dict ):
"""
Substitute parameter values into self.redirect_url_params
"""
if not self.redirect_url_params:
return
redirect_url_params = None
# Substituting parameter values into the url params
redirect_url_params = fill_template( self.redirect_url_params, context=param_dict )
# Remove newlines
redirect_url_params = redirect_url_params.replace( "\n", " " ).replace( "\r", " " )
return redirect_url_params
def parse_redirect_url( self, data, param_dict ):
"""
Parse the REDIRECT_URL tool param. Tools that send data to an external
application via a redirect must include the following 3 tool params:
1) REDIRECT_URL - the url to which the data is being sent
2) DATA_URL - the url to which the receiving application will send an
http post to retrieve the Galaxy data
3) GALAXY_URL - the url to which the external application may post
data as a response
"""
redirect_url = param_dict.get( 'REDIRECT_URL' )
redirect_url_params = self.build_redirect_url_params( param_dict )
# Add the parameters to the redirect url. We're splitting the param
# string on '**^**' because the self.parse() method replaced white
# space with that separator.
params = redirect_url_params.split( '**^**' )
rup_dict = {}
for param in params:
p_list = param.split( '=' )
p_name = p_list[0]
p_val = p_list[1]
rup_dict[ p_name ] = p_val
DATA_URL = param_dict.get( 'DATA_URL', None )
assert DATA_URL is not None, "DATA_URL parameter missing in tool config."
DATA_URL += "/%s/display" % str( data.id )
redirect_url += "?DATA_URL=%s" % DATA_URL
# Add the redirect_url_params to redirect_url
for p_name in rup_dict:
redirect_url += "&%s=%s" % ( p_name, rup_dict[ p_name ] )
# Add the current user email to redirect_url
if data.history.user:
USERNAME = str( data.history.user.email )
else:
USERNAME = 'Anonymous'
redirect_url += "&USERNAME=%s" % USERNAME
return redirect_url
def call_hook( self, hook_name, *args, **kwargs ):
"""
Call the custom code hook function identified by 'hook_name' if any,
and return the results
"""
try:
code = self.get_hook( hook_name )
if code:
return code( *args, **kwargs )
except Exception, e:
original_message = ''
if len( e.args ):
original_message = e.args[0]
e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, original_message ), )
raise
def exec_before_job( self, app, inp_data, out_data, param_dict={} ):
pass
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None ):
pass
def job_failed( self, job_wrapper, message, exception=False ):
"""
Called when a job has failed
"""
pass
def collect_associated_files( self, output, job_working_directory ):
"""
Find extra files in the job working directory and move them into
the appropriate dataset's files directory
"""
for name, hda in output.items():
temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( hda.dataset.id ) )
extra_dir = None
try:
# This skips creation of directories - object store
# automatically creates them. However, empty directories will
# not be created in the object store at all, which might be a
# problem.
for root, dirs, files in os.walk( temp_file_path ):
extra_dir = root.replace(job_working_directory, '', 1).lstrip(os.path.sep)
for f in files:
self.app.object_store.update_from_file(hda.dataset,
extra_dir=extra_dir,
alt_name=f,
file_name=os.path.join(root, f),
create=True,
preserve_symlinks=True
)
# Clean up after being handled by object store.
# FIXME: If the object (e.g., S3) becomes async, this will
# cause issues so add it to the object store functionality?
if extra_dir is not None:
# there was an extra_files_path dir, attempt to remove it
shutil.rmtree(temp_file_path)
except Exception, e:
log.debug( "Error in collect_associated_files: %s" % ( e ) )
continue
def collect_child_datasets( self, output, job_working_directory ):
"""
Look for child dataset files, create HDA and attach to parent.
"""
children = {}
# Loop through output file names, looking for generated children in
# form of 'child_parentId_designation_visibility_extension'
for name, outdata in output.items():
filenames = []
if 'new_file_path' in self.app.config.collect_outputs_from:
filenames.extend( glob.glob(os.path.join(self.app.config.new_file_path, "child_%i_*" % outdata.id) ) )
if 'job_working_directory' in self.app.config.collect_outputs_from:
filenames.extend( glob.glob(os.path.join(job_working_directory, "child_%i_*" % outdata.id) ) )
for filename in filenames:
if not name in children:
children[name] = {}
fields = os.path.basename(filename).split("_")
fields.pop(0)
parent_id = int(fields.pop(0))
designation = fields.pop(0)
visible = fields.pop(0).lower()
if visible == "visible":
visible = True
else:
visible = False
ext = fields.pop(0).lower()
child_dataset = self.app.model.HistoryDatasetAssociation( extension=ext,
parent_id=outdata.id,
designation=designation,
visible=visible,
dbkey=outdata.dbkey,
create_dataset=True,
sa_session=self.sa_session )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, child_dataset.dataset )
# Move data from temp location to dataset location
self.app.object_store.update_from_file(child_dataset.dataset, file_name=filename, create=True)
self.sa_session.add( child_dataset )
self.sa_session.flush()
child_dataset.set_size()
child_dataset.name = "Secondary Dataset (%s)" % ( designation )
child_dataset.init_meta()
child_dataset.set_meta()
child_dataset.set_peek()
# Associate new dataset with job
job = None
for assoc in outdata.creating_job_associations:
job = assoc.job
break
if job:
assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s|%s__' % ( name, designation ), child_dataset )
assoc.job = job
self.sa_session.add( assoc )
self.sa_session.flush()
child_dataset.state = outdata.state
self.sa_session.add( child_dataset )
self.sa_session.flush()
# Add child to return dict
children[name][designation] = child_dataset
# Need to update all associated output hdas, i.e. history was
# shared with job running
for dataset in outdata.dataset.history_associations:
if outdata == dataset:
continue
# Create new child dataset
child_data = child_dataset.copy( parent_id=dataset.id )
self.sa_session.add( child_data )
self.sa_session.flush()
return children
def collect_primary_datasets( self, output, job_working_directory, input_ext ):
"""
Find any additional datasets generated by a tool and attach (for
cases where number of outputs is not known in advance).
"""
return output_collect.collect_primary_datasets( self, output, job_working_directory, input_ext )
def to_dict( self, trans, link_details=False, io_details=False ):
""" Returns dict of tool. """
# Basic information
tool_dict = super( Tool, self ).to_dict()
# Add link details.
if link_details:
# Add details for creating a hyperlink to the tool.
if not isinstance( self, DataSourceTool ):
link = url_for( controller='tool_runner', tool_id=self.id )
else:
link = url_for( controller='tool_runner', action='data_source_redirect', tool_id=self.id )
# Basic information
tool_dict.update( { 'link': link,
'min_width': self.uihints.get( 'minwidth', -1 ),
'target': self.target } )
# Add input and output details.
if io_details:
tool_dict[ 'inputs' ] = [ input.to_dict( trans ) for input in self.inputs.values() ]
tool_dict[ 'outputs' ] = [ output.to_dict() for output in self.outputs.values() ]
tool_dict[ 'panel_section_id' ], tool_dict[ 'panel_section_name' ] = self.get_panel_section()
return tool_dict
def get_default_history_by_trans( self, trans, create=False ):
return trans.get_history( create=create )
@classmethod
def get_externally_referenced_paths( self, path ):
""" Return relative paths to externally referenced files by the tool
described by file at `path`. External components should not assume things
about the structure of tool xml files (this is the tool's responsibility).
"""
tree = raw_tool_xml_tree(path)
root = tree.getroot()
external_paths = []
for code_elem in root.findall( 'code' ):
external_path = code_elem.get( 'file' )
if external_path:
external_paths.append( external_path )
external_paths.extend( imported_macro_paths( root ) )
# May also need to load external citation files as well at some point.
return external_paths
class OutputParameterJSONTool( Tool ):
"""
Alternate implementation of Tool that provides parameters and other values
JSONified within the contents of an output dataset
"""
tool_type = 'output_parameter_json'
def _prepare_json_list( self, param_list ):
rval = []
for value in param_list:
if isinstance( value, dict ):
rval.append( self._prepare_json_param_dict( value ) )
elif isinstance( value, list ):
rval.append( self._prepare_json_list( value ) )
else:
rval.append( str( value ) )
return rval
def _prepare_json_param_dict( self, param_dict ):
rval = {}
for key, value in param_dict.iteritems():
if isinstance( value, dict ):
rval[ key ] = self._prepare_json_param_dict( value )
elif isinstance( value, list ):
rval[ key ] = self._prepare_json_list( value )
else:
rval[ key ] = str( value )
return rval
def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
if param_dict is None:
param_dict = {}
json_params = {}
json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict ) # it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
json_params[ 'output_data' ] = []
json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
json_filename = None
for i, ( out_name, data ) in enumerate( out_data.iteritems() ):
#use wrapped dataset to access certain values
wrapped_data = param_dict.get( out_name )
#allow multiple files to be created
file_name = str( wrapped_data )
extra_files_path = str( wrapped_data.files_path )
data_dict = dict( out_data_name=out_name,
ext=data.ext,
dataset_id=data.dataset.id,
hda_id=data.id,
file_name=file_name,
extra_files_path=extra_files_path )
json_params[ 'output_data' ].append( data_dict )
if json_filename is None:
json_filename = file_name
out = open( json_filename, 'w' )
out.write( json.dumps( json_params ) )
out.close()
class DataSourceTool( OutputParameterJSONTool ):
"""
Alternate implementation of Tool for data_source tools -- those that
allow the user to query and extract data from another web site.
"""
tool_type = 'data_source'
default_tool_action = DataSourceToolAction
def _build_GALAXY_URL_parameter( self ):
return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=%s" />' % self.id ) )
def parse_inputs( self, root ):
super( DataSourceTool, self ).parse_inputs( root )
if 'GALAXY_URL' not in self.inputs:
self.inputs[ 'GALAXY_URL' ] = self._build_GALAXY_URL_parameter()
self.inputs_by_page[0][ 'GALAXY_URL' ] = self.inputs[ 'GALAXY_URL' ]
def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
if param_dict is None:
param_dict = {}
dbkey = param_dict.get( 'dbkey' )
info = param_dict.get( 'info' )
data_type = param_dict.get( 'data_type' )
name = param_dict.get( 'name' )
json_params = {}
json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict ) # it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
json_params[ 'output_data' ] = []
json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
json_filename = None
for i, ( out_name, data ) in enumerate( out_data.iteritems() ):
#use wrapped dataset to access certain values
wrapped_data = param_dict.get( out_name )
#allow multiple files to be created
cur_base_param_name = 'GALAXY|%s|' % out_name
cur_name = param_dict.get( cur_base_param_name + 'name', name )
cur_dbkey = param_dict.get( cur_base_param_name + 'dkey', dbkey )
cur_info = param_dict.get( cur_base_param_name + 'info', info )
cur_data_type = param_dict.get( cur_base_param_name + 'data_type', data_type )
if cur_name:
data.name = cur_name
if not data.info and cur_info:
data.info = cur_info
if cur_dbkey:
data.dbkey = cur_dbkey
if cur_data_type:
data.extension = cur_data_type
file_name = str( wrapped_data )
extra_files_path = str( wrapped_data.files_path )
data_dict = dict( out_data_name=out_name,
ext=data.ext,
dataset_id=data.dataset.id,
hda_id=data.id,
file_name=file_name,
extra_files_path=extra_files_path )
json_params[ 'output_data' ].append( data_dict )
if json_filename is None:
json_filename = file_name
out = open( json_filename, 'w' )
out.write( json.dumps( json_params ) )
out.close()
class AsyncDataSourceTool( DataSourceTool ):
tool_type = 'data_source_async'
def _build_GALAXY_URL_parameter( self ):
return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/async/%s" />' % self.id ) )
class DataDestinationTool( Tool ):
tool_type = 'data_destination'
class SetMetadataTool( Tool ):
"""
Tool implementation for special tool that sets metadata on an existing
dataset.
"""
tool_type = 'set_metadata'
requires_setting_metadata = False
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None ):
for name, dataset in inp_data.iteritems():
external_metadata = JobExternalOutputMetadataWrapper( job )
if external_metadata.external_metadata_set_successfully( dataset, app.model.context ):
dataset.metadata.from_JSON_dict( external_metadata.get_output_filenames_by_dataset( dataset, app.model.context ).filename_out )
else:
dataset._state = model.Dataset.states.FAILED_METADATA
self.sa_session.add( dataset )
self.sa_session.flush()
return
# If setting external metadata has failed, how can we inform the
# user? For now, we'll leave the default metadata and set the state
# back to its original.
dataset.datatype.after_setting_metadata( dataset )
if job and job.tool_id == '1.0.0':
dataset.state = param_dict.get( '__ORIGINAL_DATASET_STATE__' )
else:
# Revert dataset.state to fall back to dataset.dataset.state
dataset._state = None
# Need to reset the peek, which may rely on metadata
dataset.set_peek()
self.sa_session.add( dataset )
self.sa_session.flush()
def job_failed( self, job_wrapper, message, exception=False ):
job = job_wrapper.sa_session.query( model.Job ).get( job_wrapper.job_id )
if job:
inp_data = {}
for dataset_assoc in job.input_datasets:
inp_data[dataset_assoc.name] = dataset_assoc.dataset
return self.exec_after_process( job_wrapper.app, inp_data, {}, job_wrapper.get_param_dict(), job=job )
class ExportHistoryTool( Tool ):
tool_type = 'export_history'
class ImportHistoryTool( Tool ):
tool_type = 'import_history'
class GenomeIndexTool( Tool ):
tool_type = 'index_genome'
class DataManagerTool( OutputParameterJSONTool ):
tool_type = 'manage_data'
default_tool_action = DataManagerToolAction
def __init__( self, config_file, root, app, guid=None, data_manager_id=None, **kwds ):
self.data_manager_id = data_manager_id
super( DataManagerTool, self ).__init__( config_file, root, app, guid=guid, **kwds )
if self.data_manager_id is None:
self.data_manager_id = self.id
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None, **kwds ):
#run original exec_after_process
super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job=job, **kwds )
#process results of tool
if job and job.state == job.states.ERROR:
return
#Job state may now be 'running' instead of previous 'error', but datasets are still set to e.g. error
for dataset in out_data.itervalues():
if dataset.state != dataset.states.OK:
return
data_manager_id = job.data_manager_association.data_manager_id
data_manager = self.app.data_managers.get_manager( data_manager_id, None )
assert data_manager is not None, "Invalid data manager (%s) requested. It may have been removed before the job completed." % ( data_manager_id )
data_manager.process_result( out_data )
def get_default_history_by_trans( self, trans, create=False ):
def _create_data_manager_history( user ):
history = trans.app.model.History( name='Data Manager History (automatically created)', user=user )
data_manager_association = trans.app.model.DataManagerHistoryAssociation( user=user, history=history )
trans.sa_session.add_all( ( history, data_manager_association ) )
trans.sa_session.flush()
return history
user = trans.user
assert user, 'You must be logged in to use this tool.'
history = user.data_manager_histories
if not history:
#create
if create:
history = _create_data_manager_history( user )
else:
history = None
else:
for history in reversed( history ):
history = history.history
if not history.deleted:
break
if history.deleted:
if create:
history = _create_data_manager_history( user )
else:
history = None
return history
# Populate tool_type to ToolClass mappings
tool_types = {}
for tool_class in [ Tool, SetMetadataTool, OutputParameterJSONTool,
DataManagerTool, DataSourceTool, AsyncDataSourceTool,
DataDestinationTool ]:
tool_types[ tool_class.tool_type ] = tool_class
# ---- Utility classes to be factored out -----------------------------------
class TracksterConfig:
""" Trackster configuration encapsulation. """
def __init__( self, actions ):
self.actions = actions
@staticmethod
def parse( root ):
actions = []
for action_elt in root.findall( "action" ):
actions.append( SetParamAction.parse( action_elt ) )
return TracksterConfig( actions )
class SetParamAction:
""" Set parameter action. """
def __init__( self, name, output_name ):
self.name = name
self.output_name = output_name
@staticmethod
def parse( elt ):
""" Parse action from element. """
return SetParamAction( elt.get( "name" ), elt.get( "output_name" ) )
class BadValue( object ):
def __init__( self, value ):
self.value = value
class ToolStdioRegex( object ):
"""
This is a container for the <stdio> element's regex subelement.
The regex subelement has a "match" attribute, a "sources"
attribute that contains "output" and/or "error", and a "level"
attribute that contains "warning" or "fatal".
"""
def __init__( self ):
self.match = ""
self.stdout_match = False
self.stderr_match = False
# TODO: Define a common class or constant for error level:
self.error_level = "fatal"
self.desc = ""
class ToolStdioExitCode( object ):
"""
This is a container for the <stdio> element's <exit_code> subelement.
The exit_code element has a range of exit codes and the error level.
"""
def __init__( self ):
self.range_start = float( "-inf" )
self.range_end = float( "inf" )
# TODO: Define a common class or constant for error level:
self.error_level = "fatal"
self.desc = ""
def json_fix( val ):
if isinstance( val, list ):
return [ json_fix( v ) for v in val ]
elif isinstance( val, dict ):
return dict( [ ( json_fix( k ), json_fix( v ) ) for ( k, v ) in val.iteritems() ] )
elif isinstance( val, unicode ):
return val.encode( "utf8" )
else:
return val
def check_param_from_incoming( trans, state, input, incoming, key, context, source ):
"""
Unlike "update" state, this preserves default if no incoming value found.
This lets API user specify just a subset of params and allow defaults to be
used when available.
"""
default_input_value = state.get( input.name, None )
incoming_value = get_incoming_value( incoming, key, default_input_value )
value, error = check_param( trans, input, incoming_value, context, source=source )
return value, error
def get_incoming_value( incoming, key, default ):
"""
Fetch value from incoming dict directly or check special nginx upload
created variants of this key.
"""
if "__" + key + "__is_composite" in incoming:
composite_keys = incoming["__" + key + "__keys"].split()
value = dict()
for composite_key in composite_keys:
value[composite_key] = incoming[key + "_" + composite_key]
return value
else:
return incoming.get( key, default )
class InterruptedUpload( Exception ):
pass
| gpl-3.0 |
hw-manager/android | src/main/java/de/nico/ha_manager/helper/CustomToast.java | 788 | package de.nico.ha_manager.helper;
/*
* @author Nico Alt
* See the file "LICENSE" for the full license governing this code.
*/
import android.content.Context;
import android.widget.Toast;
public final class CustomToast {
/**
* Shows a short Toast.
*
* @param c Needed for {@link android.widget.Toast}.
* @param msg Message to show.
*/
public static void showShort(final Context c, final String msg) {
Toast.makeText(c, msg, Toast.LENGTH_SHORT).show();
}
/**
* Shows a long Toast.
*
* @param c Needed for {@link android.widget.Toast}.
* @param msg Message to show.
*/
public static void showLong(final Context c, final String msg) {
Toast.makeText(c, msg, Toast.LENGTH_LONG).show();
}
}
| gpl-3.0 |
ChristianGreiner/gppcc10 | BringMeHome/Source/Code/CorePlugin/Colors.cs | 684 | using Duality.Drawing;
namespace BringMeHome
{
public static class Colors
{
public static readonly ColorRgba DarkBlue = new ColorRgba(0, 50, 73);
public static readonly ColorRgba BlueHover = new ColorRgba(38, 133, 165);
public static readonly ColorRgba Blue = new ColorRgba(0, 126, 167);
public static readonly ColorRgba LightBlue = new ColorRgba(128, 206, 215);
public static readonly ColorRgba SuperLightBlue = new ColorRgba(188, 244, 245);
public static readonly ColorRgba DarkRed = new ColorRgba(192, 57, 43);
public static readonly ColorRgba Red = new ColorRgba(231, 76, 60);
public static readonly ColorRgba MidnightBlue = new ColorRgba(0, 51, 102);
}
} | gpl-3.0 |
wizzy121/GuruFX | GuruFX/GuruFX.TestApp/Properties/AssemblyInfo.cs | 1404 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("GuruFX.TestApp")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("GuruFX.TestApp")]
[assembly: AssemblyCopyright("Copyright © 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("27c12e96-2c43-4232-b849-14402d164888")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| gpl-3.0 |
WeCase/WeCase | utils/depgraph.py | 898 | #!/usr/bin/python3
import sys
def process_import(filename, statement):
statement = statement.replace(",", " ")
modules = statement.split()
for module in modules[1:]:
print('"%s" -> "%s"' % (filename, module))
def process_from(filename, statement):
statement = statement.replace(",", " ")
modules = statement.split()
main_module = modules[1]
for module in modules[3:]:
print('"%s" -> "%s" -> "%s"' % (filename, main_module, module))
def print_header():
print("digraph WeCase {")
print("ratio=2")
def print_footer():
print("}")
print_header()
for line in sys.stdin:
line = line.replace("\n", "")
if line.endswith(".py"):
filename = line
else:
if line.startswith("import"):
process_import(filename, line)
elif line.startswith("from"):
process_from(filename, line)
print_footer()
| gpl-3.0 |
dut3062796s/forum-1 | application/controller/accessControl.js | 1629 | var express = require('express');
var router = express.Router();
var domain = require("../../domain");
function isAdmin(req, res, next) {
if (req.session.user && req.session.user.email === "brighthas@gmail.com") {
next();
} else {
next(500);
}
}
router.post("/columns/create", isAdmin);
router.post("/columns/:id/*", isAdmin);
function isLogin(req, res,next) {
if (req.session.user) {
req.body.authorId = req.session.user.id;
next();
} else {
next(500);
}
}
router.post("/messages/send", isLogin);
router.post("/replys/create", isLogin);
router.post("/topics/create", isLogin);
router.post("/topics/:id/*", function (req, res, next) {
if (req.session.user) {
if (req.session.user.email === "brighthas@gmail.com") {
next();
} else {
domain.repos.Topic.get(req.params.id).then(function (topic) {
if (topic) {
if (topic.authorId === req.session.user.id) {
next();
} else {
domain.repos.Column.get(topic.columnId).then(function (column) {
if (column.managerId === req.session.user.id) {
next();
} else {
next(500);
}
})
}
} else {
next(500);
}
})
}
} else {
next(500);
}
});
router.post("/users/*",isLogin);
module.exports = router; | gpl-3.0 |
G4brym/Django-Efficient-Rest | EfficientRest/endpoints.py | 8548 | import json
import math
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import status
from rest_framework.settings import api_settings
class EndpointType():
methods = []
user = None
request = None
action = None
input_data = None
Code = status.HTTP_500_INTERNAL_SERVER_ERROR
Result = {}
Errors = []
useSafe = True
def __init__(self, request, action):
self.Code = status.HTTP_500_INTERNAL_SERVER_ERROR
self.Result = {}
self.Errors = []
self.request = request
self.action = action
self.input_data = self.request.data
def get_special_response(self):
try:
return self.Meta.special_response
except:
return False
def getSafe(self):
return self.useSafe
def setSafe(self, value):
self.useSafe = value
def requires_action(self):
return self.Meta.requires_action
def requires_auth(self):
return self.Meta.requires_auth
def getInput(self):
return self.input_data
def getInputPOST(self):
return self.request.POST
def getInputJson(self):
try:
return json.loads(self.input_data.decode("utf-8"))
except AttributeError:
return self.input_data.dict()
except ValueError:
return json.loads("{}")
def getMethods(self):
return self.Meta.methods
def setResult(self, result):
self.Result = result
def getResult(self):
return self.Result
def getCode(self):
return self.Code
def setCode(self, code):
self.Code = code
def getErrors(self):
return self.Errors
def addError(self, error):
self.Errors.append(error)
def setErrors(self, errors):
self.Errors = errors
def addErrorJson(self, error):
self.Errors.append(json.loads(error))
def setUser(self, user):
self.user = user
def process(self):
# Must be overwriten
self.setCode(status.HTTP_500_INTERNAL_SERVER_ERROR)
class Model(EndpointType):
def process(self):
if self.requires_action():
if settings.DEBUG:
self.setCode(getattr(self, str(self.request.method.lower() + "_" + self.action))())
else:
try:
self.setCode(getattr(self, str(self.request.method.lower() + "_" + self.action))())
except AttributeError:
self.addError("not_found")
self.setCode(status.HTTP_404_NOT_FOUND)
else:
if self.action:
if settings.DEBUG:
self.setCode(getattr(self, str(self.request.method.lower() + "_process_single"))(self.action))
else:
try:
self.setCode(getattr(self, str(self.request.method.lower() + "_process_single"))(self.action))
except AttributeError:
self.addError("not_found")
self.setCode(status.HTTP_404_NOT_FOUND)
else:
if settings.DEBUG:
self.setCode(getattr(self, str(self.request.method.lower() + "_process"))())
else:
try:
self.setCode(getattr(self, str(self.request.method.lower() + "_process"))())
except AttributeError:
self.addError("not_found")
self.setCode(status.HTTP_404_NOT_FOUND)
class Service(EndpointType):
def process(self):
if self.requires_action:
if settings.DEBUG:
self.setCode(getattr(self, str(self.request.method.lower() + "_" + self.action))())
else:
try:
self.setCode(getattr(self, str(self.request.method.lower() + "_" + self.action))())
except AttributeError:
self.addError("not_found")
self.setCode(status.HTTP_404_NOT_FOUND)
else:
try:
self.setCode(getattr(self, str(self.request.method.lower() + "_process"))())
except AttributeError:
self.addError("not_found")
self.setCode(status.HTTP_404_NOT_FOUND)
class BaseModel(Model):
class Meta:
methods = ["GET", "POST", "PATCH", "DELETE"]
requires_action = False
requires_auth = True
Model = None
Serializer = None
def __init__(self, request, action):
Model.__init__(self, request, action)
def get_process(self):
#####
# Retrieves the number of pages for this model
#####
page = self.request.GET.get('page', None)
if page != None:
try:
clean_id = int(page)
except:
return status.HTTP_400_BAD_REQUEST
skip = (clean_id - 1) * api_settings.PAGE_SIZE
get = skip + api_settings.PAGE_SIZE
totalObjs = self.Meta.Model.objects.all().count()
if skip > totalObjs or clean_id == 0:
return status.HTTP_406_NOT_ACCEPTABLE
try:
objectList = self.Meta.Model.objects.all()[skip:get]
except ObjectDoesNotExist:
return status.HTTP_404_NOT_FOUND
serializer = self.Meta.Serializer(objectList, many=True).data
if totalObjs <= get:
next = False
else:
next = True
if clean_id < 2:
previous = False
else:
previous = True
self.setResult({"results": serializer, "meta":{"count": totalObjs, "next": next, "previous": previous}})
return status.HTTP_200_OK
# Process Coalescing
ids = self.request.GET.getlist('ids[]', None)
if ids != None:
clean_ids = []
for id in ids:
try:
clean_ids.append(int(id))
except:
return status.HTTP_400_BAD_REQUEST
objectList = self.Meta.Model.objects.filter(id__in=clean_ids)
serializer = self.Meta.Serializer(objectList, many=True).data
self.setResult(serializer)
return status.HTTP_200_OK
objectList = self.Meta.Model.objects.all().count()
pages = math.ceil(objectList / api_settings.PAGE_SIZE)
if pages > 0:
pages = pages-1
self.setResult({"pages": pages, "count": objectList})
return status.HTTP_200_OK
def get_process_single(self, id):
#####
# Retrieves a single object
#####
try:
clean_id = int(id)
except:
return status.HTTP_400_BAD_REQUEST
try:
object = self.Meta.Model.objects.get(id=clean_id)
except ObjectDoesNotExist:
return status.HTTP_404_NOT_FOUND
self.setResult(object.get_as_dict())
return status.HTTP_200_OK
def post_process(self):
#####
# Saves a new Object
#####
serializer = self.Meta.Serializer(data=self.getInputPOST())
if serializer.is_valid():
serializer.save()
self.setResult(serializer.data)
return status.HTTP_201_CREATED
self.setErrors(serializer.errors)
return status.HTTP_400_BAD_REQUEST
def put_process_single(self, id):
#####
# Updates an Object
#####
try:
clean_id = int(id)
except:
return status.HTTP_400_BAD_REQUEST
try:
object = self.Meta.Model.objects.get(id=clean_id)
except:
return status.HTTP_404_NOT_FOUND
serializer = self.Meta.Serializer(object, data=self.getInput(), partial=True)
if serializer.is_valid():
serializer.save()
self.setResult(serializer.data)
return status.HTTP_202_ACCEPTED
def delete_process_single(self, id):
#####
# Deletes a single product
#####
try:
clean_id = int(id)
except:
return status.HTTP_400_BAD_REQUEST
try:
object = self.Meta.Model.objects.get(id=clean_id)
except ObjectDoesNotExist:
return status.HTTP_404_NOT_FOUND
object.delete()
return status.HTTP_200_OK | gpl-3.0 |
EddyTrotin/ISEN_WelcomeApp | lib.php | 3627 | <?php
///////////////////////////////////////////////////////////////////////////////////
function redirect($url) {
echo "<meta http-equiv=\"refresh\" url=".$url."\" />";
echo "<script>location.replace('".$url."');</script>"; // To cope with Mozilla bug
die;
}
///////////////////////////////////////////////////////////////////////////////////
function mailCheck($Email) {
// pas de verification des mails saisis pour le TP
return("OK");
// L'adresse email doit être correctement formatée
if(!eregi("^[[:alpha:]]{1}[[:alnum:]]*((\.|_|-)[[:alnum:]]+)*@[[:alpha:]]{1}[[:alnum:]]*((\.|-)[[:alnum:]]+)*(\.[[:alpha:]]{2,})$", $Email))
return("Adresse ".$Email." mal formatée !");
// On récupère le domaine
list(,$domain ) = split('@',$Email);
// pb avec aol.com, serveur ne répond pas aux demandes...
if ($domain=="aol.com") return("OK");
// idem avec laposte.net
if ($domain=="laposte.net") return("OK");
// pb avec free sur le return-path
if ($domain=="free.fr") return("OK");
// On cherche des enregistrements MX dans les DNS
if (getmxrr($domain, $MXHost))
$ConnectAddress = $MXHost[0];
else
$ConnectAddress = $domain;
// On créé la connexion sur le port smtp (25)
$Connect = @fsockopen($ConnectAddress,25,$errno,$errstr);
if($Connect)
{
if(ereg("^220", $Out = fgets($Connect, 1024)))
{
fputs ($Connect, "HELO {$_SERVER['HTTP_HOST']}\r\n");
$Out = fgets ( $Connect, 1024 );
fputs ($Connect, "MAIL FROM: <{$Email}>\r\n");
$From = fgets ( $Connect, 1024 );
fputs ($Connect, "RCPT TO: <{$Email}>\r\n");
$To = fgets ($Connect, 1024);
fputs ($Connect, "QUIT\r\n");
fclose($Connect);
// Si le code renvoyé par la commande RCPT TO est 250 ou 251 (cf: RFC)
// Alors l'adresse existe
if (!ereg ("^250", $To) && !ereg ( "^251", $To ))
// Adresse rejetée par le serveur
return("Adresse ".$Email." rejetée par le serveur ".$domain." !");
else
// Adresse acceptée par le serveur
return("OK");
} else {
// Le serveur n'a pas répondu
return("Le serveur ".$domain." n'a pas répondu !");
}
} else {
// Connexion au serveur de messagerie impossible
// vous pouvez afficher le message d'erreur en décommentant la ligne suivante:
// echo $errno."-".$errstr;
return("Connexion au serveur ".$domain." impossible !");
}
}
///////////////////////////////////////////////////////////////////////////////////
function dejaInscrit($courriel) {
global $DbHost, $DbName, $DbUser, $DbPassword;
$DbLink = mysqli_connect($DbHost, $DbUser, $DbPassword) or die('erreur de connexion au serveur');
mysqli_select_db($DbLink, $DbName) or die('erreur de connexion a la base de donnees');
mysqli_query($DbLink, "SET NAMES 'utf8'");
$query = "SELECT count(id) FROM data WHERE identifiant='".$courriel."'";
$result = mysqli_query($DbLink, $query) or die (mysqli_error($DbLink));
$value = mysqli_fetch_row($result);
mysqli_close();
if ($value[0]>=1) {
return(true);
}
else {
return(false);
}
}
///////////////////////////////////////////////////////////////////////////////////
// Générer une chaine de caractère aléatoire
function texteAleatoire($longueur) {
$string = "";
$chaine = "abcdefghijklmnpqrstuvwxyABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
srand((double)microtime()*1000000);
for($i=0; $i<$longueur; $i++) {
$string .= $chaine[rand()%strlen($chaine)];
}
return $string;
}
///////////////////////////////////////////////////////////////////////////////////
?>
| gpl-3.0 |
ReunionDev/reunion | jreunion/src/main/java/org/reunionemu/jreunion/login/LoginConnection.java | 552 | package org.reunionemu.jreunion.login;
import java.io.IOException;
import java.io.Serializable;
import java.nio.channels.SocketChannel;
import org.reunionemu.jreunion.network.PacketConnection;
public class LoginConnection extends PacketConnection<LoginConnection> {
public LoginConnection(LoginServer loginServer,
SocketChannel socketChannel) throws IOException {
super(loginServer, socketChannel);
}
@Override
public void onPacket(Serializable obj) {
LoginServer server = (LoginServer) getNetworkThread();
}
}
| gpl-3.0 |
crypti/cryptikit | spec/lib/account_spec.rb | 53 | require 'account'
RSpec.describe Account do
end
| gpl-3.0 |
Ciuco/OptionsOracle | OptionsOracle Updater/Properties/Resources.Designer.cs | 2791 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.18052
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace OptionsOracle.Properties {
using System;
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("OptionsOracle.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
| gpl-3.0 |
philip98/CaveQuest | src/de/philip/net/server/ServerThreadSend.java | 778 | package de.philip.net.server;
import java.io.DataOutputStream;
import java.io.IOException;
import de.philip.net.common.PacketPlayer;
import de.philip.util.Logger;
public class ServerThreadSend extends Thread {
public ServerThreadSend ()
{
}
@Override
public void run() {
while (true) {
for (int uid : Server.connections.keySet()) {
ServerClient client = Server.connections.get(uid);
try {
DataOutputStream data = new DataOutputStream(client.getSocket().getOutputStream());
PacketPlayer packet = new PacketPlayer();
packet.send(uid, Server.players, data);
} catch (IOException e) {
Logger.err(e.getMessage());
}
}
try {
sleep(500);
} catch(InterruptedException e) {
Logger.err(e.getMessage());
}
}
}
}
| gpl-3.0 |
parpg/parpg | parpg/sounds.py | 2435 | # This file is part of PARPG.
# PARPG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# PARPG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PARPG. If not, see <http://www.gnu.org/licenses/>.
# sounds.py holds the object code to play sounds and sound effects
from fife import fife
class SoundEngine:
def __init__(self, fife_engine):
"""Initialise the SoundEngine instance
@type fife_engine: fine.Engine
@param fife_engine: Instance of the Fife engine
@return: None"""
self.engine = fife_engine
self.sound_engine = self.engine.getSoundManager()
#self.sound_engine.init()
# set up the sound
self.music = self.sound_engine.createEmitter()
self.music_on = False
self.music_init = False
def playMusic(self, sfile=None):
"""Play music, with the given file if passed
@type sfile: string
@param sfile: Filename to play
@return: None"""
if(sfile is not None):
# setup the new sound
sound = self.engine.getSoundClipManager().load(sfile)
self.music.setSoundClip(sound)
self.music.setLooping(True)
self.music_init = True
self.music.play()
self.music_on = True
def pauseMusic(self):
"""Stops current playback
@return: None"""
if(self.music_init == True):
self.music.pause()
self.music_on = False
def toggleMusic(self):
"""Toggle status of music, either on or off
@return: None"""
if((self.music_on == False)and(self.music_init == True)):
self.playMusic()
else:
self.pauseMusic()
def setVolume(self, volume):
"""Set the volume of the music
@type volume: integer
@param volume: The volume wanted, 0 to 100
@return: None"""
self.sound_engine.setVolume(0.01 * volume)
| gpl-3.0 |
active-citizen/shop.1c | www/local/libs/classes/CAGShop/CDB/CDB.class.php | 6763 | <?php
namespace DB;
require_once(realpath(__DIR__."/..")."/CAGShop.class.php");
use AGShop;
class CDB extends \AGShop\CAGShop{
var $debug = false;
/**
Выполняет запросы из SQL-файла
@param $sFilename - имя ыйд-afqkf
@return
*/
function runSqlFile($sFilename){
global $DB;
if(!file_exists($sFilename)){
$this->addError("File '$sFilename' is not exists");
return false;
}
$arErrors = $DB->RunSqlBatch($sFilename);
if($arErrors){
$this->addError($arErrors);
return false;
}
return true;
}
/**
Поиск одной записи по имени поля
@param $sTableName - имя таблицы
@param $arFilter = ["имя поля"=>"значение",...]
@return ["имя поля"=>"значение",...]
*/
function searchOne($sTableName, $arFilter = [], $arSelect = []){
global $DB;
$sWhere = "1";
foreach($arFilter as $sKey=>$sValue){
$sWhere .= " AND `".$DB->ForSql($sKey)."`='"
.$DB->ForSql($sValue)."'";
}
if($arSelect)
$sSelect = "`".implode("`,`",$arSelect)."`";
else
$sSelect = "*";
$sQuery = "
SELECT
$sSelect
FROM
`$sTableName`
WHERE
$sWhere
LIMIT
1
";
return array_pop($this->sqlSelect($sQuery));
}
/**
Поиск всех записей по условию
@param $sTableName - имя таблицы
@param $arFilter = ["имя поля"=>"значение",...]
@return [["имя поля"=>"значение","имя поля"=>"значение",...],...]
*/
function searchAll($sTableName, $arFilter = [], $arSelect = []){
global $DB;
$sWhere = "1";
foreach($arFilter as $sKey=>$sValue){
$sWhere .= " AND `".$DB->ForSql($sKey)."`='"
.$DB->ForSql($sValue)."'";
}
if($arSelect)
$sSelect = "`".implode("`,`",$arSelect)."`";
else
$sSelect = "*";
$sQuery = "
SELECT
$sSelect
FROM
`$sTableName`
WHERE
$sWhere
";
return $this->sqlSelect($sQuery);
}
/**
Удаление всех записей по условию
@param $sTableName - имя таблицы
@param $arFilter = ["имя поля"=>"значение",...]
@return true
*/
function delete($sTableName, $arFilter = []){
global $DB;
$sWhere = "1";
foreach($arFilter as $sKey=>$sValue){
$sWhere .= " AND `".$DB->ForSql($sKey)."`='"
.$DB->ForSql($sValue)."'";
}
$sQuery = "DELETE FROM `$sTableName` WHERE $sWhere";
$this->sqlQuery($sQuery);
return true;
}
/**
Обновление всех записей по условию
@param $sTableName - имя таблицы
@param $arFilter = ["имя поля"=>"значение",...]
@return true
*/
function update($sTableName, $arFilter, $arFields){
global $DB;
$sWhere = "1";
foreach($arFilter as $sKey=>$sValue){
$sWhere .= " AND `".$DB->ForSql($sKey)."`='"
.$DB->ForSql($sValue)."'";
}
foreach($arFilter as $sFirstKey=>$sFirstValue)break;
$sSet = "`$sFirstKey`=`$sFirstKey`";
foreach($arFields as $sKey=>$sValue){
$sSet .= " , `".$DB->ForSql($sKey)."`='"
.$DB->ForSql($sValue)."'";
}
$sQuery = "UPDATE `$sTableName` SET $sSet WHERE $sWhere";
$this->sqlQuery($sQuery);
return true;
}
/**
Вставка записи в таблицу
*/
function insert($sTable, $arFields){
global $DB;
$arKeys = [];
$arValues = [];
foreach($arFields as $sKey=>$sValue){
$arKeys[] = $DB->ForSql($sKey);
$arValues[] = $DB->ForSql($sValue);
}
$sFields = "`".implode("`,`",$arKeys)."`";
$sValues = "'".implode("','",$arValues)."'";
$sQuery = "INSERT INTO `"
.$DB->ForSql($sTable)."`($sFields) VALUES($sValues)";
if(!$this->sqlQuery($sQuery)){
return false;
}
return $DB->LastID();
}
/**
Выполнение SELECT-запроса
@param $sQuery - текст запроса
@param $nLimit - максимальное число возвражаемых записей. Игнорирует
LIMIT из SQL-заапроса
@return массив результата запроса
*/
function sqlSelect($sQuery, $nLimit=100){
if(!$resQuery = $this->sqlQuery($sQuery))return false;
$nCounter = 0;
$arReqult = [];
while($arQuery = $resQuery->Fetch()){
$nCounter++;
$arResult[] = $arQuery;
if($nCounter>=$nLimit)break;
}
return $arResult;
}
/**
*/
function sqlQuery($sQuery){
global $DB;
if($this->debug){
echo $sQuery;
}
if(!$resQuery = $DB->Query($sQuery, true)){
$this->addError($DB->db_Conn->error);
$this->addError("SQL query error: $sQuery");
return false;
}
return $resQuery;
}
/**
*/
function ForSql($sText){
global $DB;
return $DB->ForSql($sText);
}
}
| gpl-3.0 |
Altai-man/Kasatou-sources | Kasatou/user_urls.py | 544 | from Layers import views
from django.conf.urls import patterns, url
user_patterns = patterns(
'',
url(r'^login/$', views.Login.as_view(), name='login'),
url(r'^logout/$', views.user_logout, name='logout'),
url(r'^profile/$', views.Profile.as_view(), name='profile'),
# Invites.
url(r'^invite/$', views.invite, name='invite'),
# Urls for auth pages
url(r'^register/(?P<code>.*)/$', views.Register.as_view(), name='register_get'),
url(r'^register/$', views.Register.as_view(), name='register_accept'),
)
| gpl-3.0 |
Neijwiert/C-C-Renegade-Mission-Scripts | Source/scripts/gmlog.cpp | 4892 | /* Renegade Scripts.dll
Copyright 2013 Tiberian Technologies
This file is part of the Renegade scripts.dll
The Renegade scripts.dll is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version. See the file COPYING for more details.
In addition, an exemption is given to allow Run Time Dynamic Linking of this code with any closed source module that does not contain code covered by this licence.
Only the source code to the module(s) containing the licenced code has to be released.
*/
#include "general.h"
#include "gmlog.h"
#include "engine.h"
bool init = false;
extern bool gamelog;
struct Connection {
SOCKET socket;
char cbuffer[512];
int cbufferFilled;
};
SimpleDynVecClass<Connection *> Connections;
SOCKET s;
void SSGMGameLog::Init(unsigned short port)
{
if (port != 0)
{
WSADATA wsaData;
#pragma warning(suppress: 6031) //warning C6031: return value ignored
WSAStartup(MAKEWORD(1,1), &wsaData);
s = socket(AF_INET,SOCK_STREAM,IPPROTO_TCP);
sockaddr_in SockAddr;
SockAddr.sin_family = AF_INET;
SockAddr.sin_addr.s_addr = INADDR_ANY;
SockAddr.sin_port = htons(port);
u_long i = 1;
ioctlsocket(s,FIONBIO,&i);
bind(s,(sockaddr *)&SockAddr,sizeof(SockAddr));
listen(s,3);
init = true;
}
else
{
init = false;
}
}
void SSGMGameLog::Shutdown()
{
if (init)
{
for (int i = 0;i < Connections.Count();i++)
{
closesocket(Connections[i]->socket);
}
closesocket(s);
init = false;
}
}
void SSGMGameLog::Think()
{
SOCKET so = accept(s,NULL,NULL);
if (so != INVALID_SOCKET)
{
Connection *c = new Connection;
c->cbufferFilled = 0;
c->socket = so;
c->cbuffer[_countof(c->cbuffer)-1] = '\0';
Connections.Add(c);
}
for (int i = 0; i < Connections.Count(); ++i)
{
int chars_read = recv(Connections[i]->socket, Connections[i]->cbuffer + Connections[i]->cbufferFilled, _countof(Connections[i]->cbuffer)-1 - Connections[i]->cbufferFilled, 0);
if (chars_read == 0 || (chars_read == SOCKET_ERROR && WSAGetLastError() != WSAEWOULDBLOCK)) // Graceful close or error. Note that if the buffer is full, 0 bytes are read, and this is interpreted as a graceful close too!
{
closesocket(Connections[i]->socket);
Connections.Delete(Connections[i]);
}
else if (chars_read > 0) // Data received
{
char* lineStartPos = Connections[i]->cbuffer;
char* endPos = Connections[i]->cbuffer + Connections[i]->cbufferFilled + chars_read;
for (;;)
{
TT_ASSERT(endPos >= lineStartPos);
char* lineEndPos = (char*)memchr(lineStartPos, '\n', endPos - lineStartPos);
if (!lineEndPos) break;
*lineEndPos = '\0';
if (lineEndPos - lineStartPos > 0 && *(lineEndPos-1) == '\r') *(lineEndPos-1) = '\0'; // Remove trailing \r if CRLF line endings are used.
Console_Input(lineStartPos);
lineStartPos = lineEndPos+1;
}
TT_ASSERT(endPos - lineStartPos >= 0);
Connections[i]->cbufferFilled = endPos - lineStartPos;
memmove(Connections[i]->cbuffer, lineStartPos, endPos - lineStartPos);
}
}
}
void SSGMGameLog::Log_Message(const char *message,const char *category)
{
if (init)
{
char time[256];
memset(time,0,sizeof(time));
time[0] = 0x3F;
GetTimeFormat(LOCALE_SYSTEM_DEFAULT,TIME_FORCE24HOURFORMAT,0,"'['HH':'mm':'ss'] '",time,0xFF);
StringClass str = "000";
str += time;
str += category;
str += " ";
str += message;
Send(str.Peek_Buffer());
}
}
void SSGMGameLog::Log_Gamelog(const char *format,...)
{
if (init && gamelog)
{
char time[256];
memset(time,0,sizeof(time));
time[0] = 0x3F;
GetTimeFormat(LOCALE_SYSTEM_DEFAULT,TIME_FORCE24HOURFORMAT,0,"'['HH':'mm':'ss'] '",time,0xFF);
StringClass str = "001";
str += time;
StringClass str2;
va_list arg_list;
va_start(arg_list,format);
str2.Format_Args(format,arg_list);
va_end(arg_list);
str += str2;
Send(str.Peek_Buffer());
}
}
void SSGMGameLog::Log_RenLog(const char *message)
{
if (init)
{
char time[256];
memset(time,0,sizeof(time));
time[0] = 0x3F;
GetTimeFormat(LOCALE_SYSTEM_DEFAULT,TIME_FORCE24HOURFORMAT,0,"'['HH':'mm':'ss'] '",time,0xFF);
StringClass str = "002";
str += time;
str += message;
Send(str.Peek_Buffer());
}
}
void SSGMGameLog::Send_Console(const char *message)
{
if (init)
{
StringClass str = "003";
str += message;
Send(str.Peek_Buffer());
}
}
void SSGMGameLog::Log_Custom(int id,const char *format,...)
{
if (init)
{
StringClass str;
str.Format("%03d\n",id);
StringClass str2;
va_list arg_list;
va_start(arg_list,format);
str2.Format_Args(format,arg_list);
va_end(arg_list);
str += str2;
Send(str.Peek_Buffer());
}
}
void SSGMGameLog::Send(const char *data)
{
for (int i = 0;i < Connections.Count();i++)
{
send(Connections[i]->socket,data,strlen(data)+1,0);
}
}
| gpl-3.0 |
Keltek/mucommander | src/main/com/mucommander/commons/file/impl/lst/LstArchiveEntryIterator.java | 3439 | package com.mucommander.commons.file.impl.lst;
import com.mucommander.commons.file.ArchiveEntry;
import com.mucommander.commons.file.ArchiveEntryIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.text.SimpleDateFormat;
import java.util.StringTokenizer;
/**
* An <code>ArchiveEntryIterator</code> that iterates through an LST archive.
*
* @author Maxence Bernard
*/
class LstArchiveEntryIterator implements ArchiveEntryIterator {
private static final Logger LOGGER = LoggerFactory.getLogger(LstArchiveEntryIterator.class);
/** Allows to read the LST archive line by line */
private BufferedReader br;
/** Parses LST-formatted dates */
private SimpleDateFormat lstDateFormat = new SimpleDateFormat("yyyy.MM.dd HH:mm.ss");
/** The next entry to be returned by #nextEntry(), null if there is no more entry */
private ArchiveEntry nextEntry;
/** Base folder of all entries */
private String baseFolder;
/** Current directory, used for parsing the LST file */
private String currentDir = "";
/**
* Creates a new <code>LstArchiveEntryIterator</code> that parses the given LST <code>InputStream</code>.
* The <code>InputStream</code> will be closed by {@link #close()}.
*
* @param in an LST archive <code>InputStream</code>
* @throws IOException if an I/O error occurred while initializing this iterator
*/
LstArchiveEntryIterator(InputStream in) throws IOException {
br = new BufferedReader(new InputStreamReader(in));
// Read the base folder
baseFolder = br.readLine();
if(baseFolder==null)
throw new IOException();
}
/**
* Reads the next entry and returns an {@link ArchiveEntry} representing it.
*
* @return an ArchiveEntry representing the entry
* @throws IOException if an error occurred
*/
ArchiveEntry getNextEntry() throws IOException {
String line = br.readLine();
if(line==null)
return null;
try {
StringTokenizer st = new StringTokenizer(line, "\t");
String name = st.nextToken().replace('\\', '/');
long size = Long.parseLong(st.nextToken());
long date = lstDateFormat.parse((st.nextToken()+" "+st.nextToken())).getTime();
String path;
boolean isDirectory;
if(name.endsWith("/")) {
isDirectory = true;
currentDir = name;
path = currentDir;
}
else {
isDirectory = false;
path = currentDir+name;
}
return new LstArchiveEntry(path, isDirectory, date, size, baseFolder);
}
catch(Exception e) { // Catches exceptions thrown by StringTokenizer and SimpleDateFormat
LOGGER.info("Exception caught while parsing LST file", e);
throw new IOException();
}
}
/////////////////////////////////////////
// ArchiveEntryIterator implementation //
/////////////////////////////////////////
public ArchiveEntry nextEntry() throws IOException {
// Return the next entry, if any
return getNextEntry();
}
public void close() throws IOException {
br.close();
}
}
| gpl-3.0 |
snavaneethan1/jaffa-framework | jaffa-components-printing/source/java/org/jaffa/modules/printing/components/printerdefinitionfinder/ui/PrinterDefinitionFinderComponent.java | 29296 | // .//GEN-BEGIN:_1_be
/******************************************************
* Code Generated From JAFFA Framework Default Pattern
*
* The JAFFA Project can be found at http://jaffa.sourceforge.net
* and is available under the Lesser GNU Public License
******************************************************/
package org.jaffa.modules.printing.components.printerdefinitionfinder.ui;
import java.util.*;
import org.apache.log4j.Logger;
import org.jaffa.presentation.portlet.component.Component;
import org.jaffa.presentation.portlet.FormKey;
import org.jaffa.middleware.Factory;
import org.jaffa.datatypes.*;
import org.jaffa.metadata.*;
import org.jaffa.components.finder.*;
import org.jaffa.components.maint.*;
import org.jaffa.exceptions.ApplicationExceptions;
import org.jaffa.exceptions.FrameworkException;
import org.jaffa.components.codehelper.ICodeHelper;
import org.jaffa.components.codehelper.dto.*;
import org.jaffa.modules.printing.components.printerdefinitionfinder.IPrinterDefinitionFinder;
import org.jaffa.modules.printing.components.printerdefinitionfinder.dto.PrinterDefinitionFinderInDto;
import org.jaffa.modules.printing.components.printerdefinitionfinder.dto.PrinterDefinitionFinderOutDto;
import org.jaffa.modules.printing.domain.PrinterDefinitionMeta;
import org.jaffa.modules.printing.components.printerdefinitionmaintenance.ui.PrinterDefinitionMaintenanceComponent;
import org.jaffa.modules.printing.components.printerdefinitionviewer.ui.PrinterDefinitionViewerComponent;
import org.jaffa.modules.printing.components.printerdefinitionmaintenance.ui.PrinterDefinitionMaintenanceComponent;
import org.jaffa.modules.printing.components.printerdefinitionmaintenance.ui.PrinterDefinitionMaintenanceComponent;
// .//GEN-END:_1_be
// Add additional imports//GEN-FIRST:_imports
import org.jaffa.session.ContextManagerFactory;
// .//GEN-LAST:_imports
// .//GEN-BEGIN:_2_be
/** The controller for the PrinterDefinitionFinder.
*/
public class PrinterDefinitionFinderComponent extends FinderComponent2 {
private static Logger log = Logger.getLogger(PrinterDefinitionFinderComponent.class);
private String m_printerId = null;
private String m_printerIdDd = CriteriaField.RELATIONAL_EQUALS;
private String m_description = null;
private String m_descriptionDd = CriteriaField.RELATIONAL_EQUALS;
private String m_siteCode = null;
private String m_siteCodeDd = CriteriaField.RELATIONAL_EQUALS;
private String m_locationCode = null;
private String m_locationCodeDd = CriteriaField.RELATIONAL_EQUALS;
private String m_realPrinterName = null;
private String m_realPrinterNameDd = CriteriaField.RELATIONAL_EQUALS;
private String m_outputType = null;
private String m_outputTypeDd = CriteriaField.RELATIONAL_EQUALS;
private CodeHelperOutElementDto m_outputTypeCodes = null;
private String m_scaleToPageSize = null;
private String m_scaleToPageSizeDd = CriteriaField.RELATIONAL_EQUALS;
private CodeHelperOutElementDto m_scaleToPageSizeCodes = null;
private String[] scaleToPageSizesArray;
private String m_remote = null;
private IPrinterDefinitionFinder m_tx = null;
private PrinterDefinitionMaintenanceComponent m_createComponent = null;
private ICreateListener m_createListener = null;
private PrinterDefinitionMaintenanceComponent m_updateComponent = null;
private IUpdateListener m_updateListener = null;
private PrinterDefinitionMaintenanceComponent m_deleteComponent = null;
private IDeleteListener m_deleteListener = null;
private ICodeHelper m_codeHelperTx = null;
public PrinterDefinitionFinderComponent() {
super();
super.setSortDropDown("PrinterId");
}
/** Returns the Struts GlobalForward for the Criteria screen.
* @return the Struts GlobalForward for the Criteria screen.
*/
protected String getCriteriaFormName() {
return "jaffa_printing_printerDefinitionFinderCriteria";
}
/** Returns the Struts GlobalForward for the Results screen.
* @return the Struts GlobalForward for the Results screen.
*/
protected String getResultsFormName() {
return "jaffa_printing_printerDefinitionFinderResults";
}
/** Returns the Struts GlobalForward for the ConsolidatedCriteriaAndResults screen.
* @return the Struts GlobalForward for the ConsolidatedCriteriaAndResults screen.
*/
protected String getConsolidatedCriteriaAndResultsFormName() {
return "jaffa_printing_printerDefinitionFinderConsolidatedCriteriaAndResults";
}
/** Returns the Struts GlobalForward for the screen displaying the results as an Excel spreadsheet.
* @return the Struts GlobalForward for the screen displaying the results as an Excel spreadsheet.
*/
protected String getExcelFormName() {
return "jaffa_printing_printerDefinitionFinderExcelResults";
}
/** Returns the Struts GlobalForward for the screen displaying the results in XML format.
* @return the Struts GlobalForward for the screen displaying the results in XML format.
*/
protected String getXmlFormName() {
return "jaffa_printing_printerDefinitionFinderXmlResults";
}
// .//GEN-END:_2_be
// .//GEN-BEGIN:_quit_1_be
/** This should be invoked when done with the component.
*/
public void quit() {
// .//GEN-END:_quit_1_be
// Add custom code before processing the method //GEN-FIRST:_quit_1
m_scaleToPageSizeCodes = null;
// .//GEN-LAST:_quit_1
// .//GEN-BEGIN:_quit_2_be
if (m_tx != null) {
m_tx.destroy();
m_tx = null;
}
if (m_createComponent != null) {
m_createComponent.quit();
m_createComponent = null;
}
m_createListener = null;
if (m_updateComponent != null) {
m_updateComponent.quit();
m_updateComponent = null;
}
m_updateListener = null;
if (m_deleteComponent != null) {
m_deleteComponent.quit();
m_deleteComponent = null;
}
m_deleteListener = null;
if (m_codeHelperTx != null) {
m_codeHelperTx.destroy();
m_codeHelperTx = null;
}
m_outputTypeCodes = null;
super.quit();
}
// .//GEN-END:_quit_2_be
// .//GEN-BEGIN:printerId_1_be
/** Getter for property printerId.
* @return Value of property printerId.
*/
public String getPrinterId() {
return m_printerId;
}
/** Setter for property printerId.
* @param printerId New value of property printerId.
*/
public void setPrinterId(String printerId) {
m_printerId = printerId;
}
/** Getter for property printerIdDd.
* @return Value of property printerIdDd.
*/
public String getPrinterIdDd() {
return m_printerIdDd;
}
/** Setter for property printerIdDd.
* @param printerIdDd New value of property printerIdDd.
*/
public void setPrinterIdDd(String printerIdDd) {
m_printerIdDd = printerIdDd;
}
// .//GEN-END:printerId_1_be
// .//GEN-BEGIN:description_1_be
/** Getter for property description.
* @return Value of property description.
*/
public String getDescription() {
return m_description;
}
/** Setter for property description.
* @param description New value of property description.
*/
public void setDescription(String description) {
m_description = description;
}
/** Getter for property descriptionDd.
* @return Value of property descriptionDd.
*/
public String getDescriptionDd() {
return m_descriptionDd;
}
/** Setter for property descriptionDd.
* @param descriptionDd New value of property descriptionDd.
*/
public void setDescriptionDd(String descriptionDd) {
m_descriptionDd = descriptionDd;
}
// .//GEN-END:description_1_be
// .//GEN-BEGIN:siteCode_1_be
/** Getter for property siteCode.
* @return Value of property siteCode.
*/
public String getSiteCode() {
return m_siteCode;
}
/** Setter for property siteCode.
* @param siteCode New value of property siteCode.
*/
public void setSiteCode(String siteCode) {
m_siteCode = siteCode;
}
/** Getter for property siteCodeDd.
* @return Value of property siteCodeDd.
*/
public String getSiteCodeDd() {
return m_siteCodeDd;
}
/** Setter for property siteCodeDd.
* @param siteCodeDd New value of property siteCodeDd.
*/
public void setSiteCodeDd(String siteCodeDd) {
m_siteCodeDd = siteCodeDd;
}
// .//GEN-END:siteCode_1_be
// .//GEN-BEGIN:locationCode_1_be
/** Getter for property locationCode.
* @return Value of property locationCode.
*/
public String getLocationCode() {
return m_locationCode;
}
/** Setter for property locationCode.
* @param locationCode New value of property locationCode.
*/
public void setLocationCode(String locationCode) {
m_locationCode = locationCode;
}
/** Getter for property locationCodeDd.
* @return Value of property locationCodeDd.
*/
public String getLocationCodeDd() {
return m_locationCodeDd;
}
/** Setter for property locationCodeDd.
* @param locationCodeDd New value of property locationCodeDd.
*/
public void setLocationCodeDd(String locationCodeDd) {
m_locationCodeDd = locationCodeDd;
}
// .//GEN-END:locationCode_1_be
// .//GEN-BEGIN:realPrinterName_1_be
/** Getter for property realPrinterName.
* @return Value of property realPrinterName.
*/
public String getRealPrinterName() {
return m_realPrinterName;
}
/** Setter for property realPrinterName.
* @param realPrinterName New value of property realPrinterName.
*/
public void setRealPrinterName(String realPrinterName) {
m_realPrinterName = realPrinterName;
}
/** Getter for property realPrinterNameDd.
* @return Value of property realPrinterNameDd.
*/
public String getRealPrinterNameDd() {
return m_realPrinterNameDd;
}
/** Setter for property realPrinterNameDd.
* @param realPrinterNameDd New value of property realPrinterNameDd.
*/
public void setRealPrinterNameDd(String realPrinterNameDd) {
m_realPrinterNameDd = realPrinterNameDd;
}
// .//GEN-END:realPrinterName_1_be
// .//GEN-BEGIN:outputType_1_be
/** Getter for property outputType.
* @return Value of property outputType.
*/
public String getOutputType() {
return m_outputType;
}
/** Setter for property outputType.
* @param outputType New value of property outputType.
*/
public void setOutputType(String outputType) {
m_outputType = outputType;
}
/** Getter for property outputTypeDd.
* @return Value of property outputTypeDd.
*/
public String getOutputTypeDd() {
return m_outputTypeDd;
}
/** Setter for property outputTypeDd.
* @param outputTypeDd New value of property outputTypeDd.
*/
public void setOutputTypeDd(String outputTypeDd) {
m_outputTypeDd = outputTypeDd;
}
/** Getter for property outputTypeCodes.
* @return Value of property outputTypeDd.
*/
public CodeHelperOutElementDto getOutputTypeCodes() {
return m_outputTypeCodes;
}
// .//GEN-END:outputType_1_be
// .//GEN-BEGIN:scaleToPageSize_1_be
/** Getter for property scaleToPageSize.
* @return Value of property scaleToPageSize.
*/
public String getScaleToPageSize() {
return m_scaleToPageSize;
}
/** Setter for property scaleToPageSize.
* @param scaleToPageSize New value of property scaleToPageSize.
*/
public void setScaleToPageSize(String scaleToPageSize) {
m_scaleToPageSize = scaleToPageSize;
}
/** Getter for property scaleToPageSizeDd.
* @return Value of property scaleToPageSizeDd.
*/
public String getScaleToPageSizeDd() {
return m_scaleToPageSizeDd;
}
/** Setter for property scaleToPageSizeDd.
* @param scaleToPageSizeDd New value of property scaleToPageSizeDd.
*/
public void setScaleToPageSizeDd(String scaleToPageSizeDd) {
m_scaleToPageSizeDd = scaleToPageSizeDd;
}
// .//GEN-END:scaleToPageSize_1_be
// .//GEN-BEGIN:remote_1_be
/** Getter for property remote.
* @return Value of property remote.
*/
public String getRemote() {
return m_remote;
}
/** Setter for property remote.
* @param remote New value of property remote.
*/
public void setRemote(String remote) {
m_remote = remote;
}
// .//GEN-END:remote_1_be
// .//GEN-BEGIN:_doInquiry_1_be
/** This performs the actual query to obtain the FinderOutDto.
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return the FinderOutDto object.
*/
protected FinderOutDto doInquiry() throws ApplicationExceptions, FrameworkException {
ApplicationExceptions appExps = null;
PrinterDefinitionFinderInDto inputDto = new PrinterDefinitionFinderInDto();
// .//GEN-END:_doInquiry_1_be
// Add custom code before processing the method //GEN-FIRST:_doInquiry_1
// .//GEN-LAST:_doInquiry_1
// .//GEN-BEGIN:_doInquiry_2_be
inputDto.setMaxRecords(getMaxRecords());
if (getPrinterId() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getPrinterIdDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getPrinterIdDd() ) )
inputDto.setPrinterId(StringCriteriaField.getStringCriteriaField(getPrinterIdDd(), getPrinterId(), null));
if (getDescription() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getDescriptionDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getDescriptionDd() ) )
inputDto.setDescription(StringCriteriaField.getStringCriteriaField(getDescriptionDd(), getDescription(), null));
if (getSiteCode() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getSiteCodeDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getSiteCodeDd() ) )
inputDto.setSiteCode(StringCriteriaField.getStringCriteriaField(getSiteCodeDd(), getSiteCode(), null));
if (getLocationCode() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getLocationCodeDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getLocationCodeDd() ) )
inputDto.setLocationCode(StringCriteriaField.getStringCriteriaField(getLocationCodeDd(), getLocationCode(), null));
if (getRealPrinterName() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getRealPrinterNameDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getRealPrinterNameDd() ) )
inputDto.setRealPrinterName(StringCriteriaField.getStringCriteriaField(getRealPrinterNameDd(), getRealPrinterName(), null));
if (getOutputType() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getOutputTypeDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getOutputTypeDd() ) )
inputDto.setOutputType(StringCriteriaField.getStringCriteriaField(getOutputTypeDd(), getOutputType(), null));
if (getScaleToPageSize() != null
|| CriteriaField.RELATIONAL_IS_NULL.equals( getScaleToPageSizeDd() )
|| CriteriaField.RELATIONAL_IS_NOT_NULL.equals( getScaleToPageSizeDd() ) )
inputDto.setScaleToPageSize(StringCriteriaField.getStringCriteriaField(getScaleToPageSizeDd(), getScaleToPageSize(), null));
if (getRemote() != null)
inputDto.setRemote(BooleanCriteriaField.getBooleanCriteriaField(CriteriaField.RELATIONAL_EQUALS, getRemote(), null));
// throw ApplicationExceptions, if any parsing errors occured
if (appExps != null && appExps.size() > 0)
throw appExps;
inputDto.setHeaderDto(getHeaderDto());
addSortCriteria(inputDto);
// perform the inquiry
if (m_tx == null)
m_tx = (IPrinterDefinitionFinder) Factory.createObject(IPrinterDefinitionFinder.class);
FinderOutDto finderOutDto = m_tx.find(inputDto);
// .//GEN-END:_doInquiry_2_be
// Add custom code after the Transaction //GEN-FIRST:_doInquiry_2
// .//GEN-LAST:_doInquiry_2
// .//GEN-BEGIN:_doInquiry_3_be
return finderOutDto;
}
// .//GEN-END:_doInquiry_3_be
// .//GEN-BEGIN:_createObject_1_be
/** Calls the Jaffa.Printing.PrinterDefinitionMaintenance component for creating a new PrinterDefinition object.
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return The FormKey for the Create screen.
*/
public FormKey createFromCriteria() throws ApplicationExceptions, FrameworkException {
return createObject(getCriteriaFormKey());
}
/** Calls the Jaffa.Printing.PrinterDefinitionMaintenance component for creating a new PrinterDefinition object.
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return The FormKey for the Create screen.
*/
public FormKey createFromResults() throws ApplicationExceptions, FrameworkException {
return createObject(getResultsFormKey());
}
/** Calls the Jaffa.Printing.PrinterDefinitionMaintenance component for creating a new PrinterDefinition object.
* @param formKey The FormKey object for the screen invoking this method
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return The FormKey for the Create screen.
*/
public FormKey createObject(FormKey formKey) throws ApplicationExceptions, FrameworkException {
if (m_createComponent == null || !m_createComponent.isActive())
m_createComponent = (PrinterDefinitionMaintenanceComponent) run("Jaffa.Printing.PrinterDefinitionMaintenance");
m_createComponent.setReturnToFormKey(formKey);
// Add the Listener only if a search has been done
if (getFinderOutDto() != null)
addListeners(m_createComponent);
if (m_createComponent instanceof IMaintComponent)
((IMaintComponent) m_createComponent).setMode(IMaintComponent.MODE_CREATE);
// .//GEN-END:_createObject_1_be
// Add custom code before invoking the component //GEN-FIRST:_createObject_1
// .//GEN-LAST:_createObject_1
// .//GEN-BEGIN:_createObject_2_be
return m_createComponent.display();
}
private ICreateListener getCreateListener() {
if (m_createListener == null) {
m_createListener = new ICreateListener() {
public void createDone(EventObject source) {
try {
// .//GEN-END:_createObject_2_be
// Add custom code //GEN-FIRST:_createObject_2
// .//GEN-LAST:_createObject_2
// .//GEN-BEGIN:_createObject_3_be
performInquiry();
} catch (Exception e) {
log.warn("Error in refreshing the Results screen after the Create", e);
}
}
};
}
return m_createListener;
}
// .//GEN-END:_createObject_3_be
// .//GEN-BEGIN:_viewObject_1_be
/** Calls the Jaffa.Printing.PrinterDefinitionViewer component for viewing the PrinterDefinition object.
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return The FormKey for the View screen.
*/
public FormKey viewObject(java.lang.String printerId) throws ApplicationExceptions, FrameworkException {
PrinterDefinitionViewerComponent viewComponent = (PrinterDefinitionViewerComponent) run("Jaffa.Printing.PrinterDefinitionViewer");
viewComponent.setReturnToFormKey(FormKey.getCloseBrowserFormKey());
viewComponent.setPrinterId(printerId);
// .//GEN-END:_viewObject_1_be
// Add custom code before invoking the component //GEN-FIRST:_viewObject_1
// .//GEN-LAST:_viewObject_1
// .//GEN-BEGIN:_viewObject_2_be
return viewComponent.display();
}
// .//GEN-END:_viewObject_2_be
// .//GEN-BEGIN:_updateObject_1_be
/** Calls the Jaffa.Printing.PrinterDefinitionMaintenance component for updating the PrinterDefinition object.
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return The FormKey for the Update screen.
*/
public FormKey updateObject(java.lang.String printerId) throws ApplicationExceptions, FrameworkException {
if (m_updateComponent == null || !m_updateComponent.isActive()) {
m_updateComponent = (PrinterDefinitionMaintenanceComponent) run("Jaffa.Printing.PrinterDefinitionMaintenance");
m_updateComponent.setReturnToFormKey(getResultsFormKey());
addListeners(m_updateComponent);
}
m_updateComponent.setPrinterId(printerId);
if (m_updateComponent instanceof IMaintComponent)
((IMaintComponent) m_updateComponent).setMode(IMaintComponent.MODE_UPDATE);
// .//GEN-END:_updateObject_1_be
// Add custom code before invoking the component //GEN-FIRST:_updateObject_2
// .//GEN-LAST:_updateObject_2
// .//GEN-BEGIN:_updateObject_2_be
return m_updateComponent.display();
}
private IUpdateListener getUpdateListener() {
if (m_updateListener == null) {
m_updateListener = new IUpdateListener() {
public void updateDone(EventObject source) {
try {
// .//GEN-END:_updateObject_2_be
// Add custom code //GEN-FIRST:_updateObject_1
// .//GEN-LAST:_updateObject_1
// .//GEN-BEGIN:_updateObject_3_be
performInquiry();
} catch (Exception e) {
log.warn("Error in refreshing the Results screen after the Update", e);
}
}
};
}
return m_updateListener;
}
// .//GEN-END:_updateObject_3_be
// .//GEN-BEGIN:_deleteObject_1_be
/** Calls the Jaffa.Printing.PrinterDefinitionMaintenance component for deleting the PrinterDefinition object.
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
* @return The FormKey for the Delete screen.
*/
public FormKey deleteObject(java.lang.String printerId) throws ApplicationExceptions, FrameworkException {
if (m_deleteComponent == null || !m_deleteComponent.isActive()) {
m_deleteComponent = (PrinterDefinitionMaintenanceComponent) run("Jaffa.Printing.PrinterDefinitionMaintenance");
m_deleteComponent.setReturnToFormKey(getResultsFormKey());
addListeners(m_deleteComponent);
}
m_deleteComponent.setPrinterId(printerId);
if (m_deleteComponent instanceof IMaintComponent)
((IMaintComponent) m_deleteComponent).setMode(IMaintComponent.MODE_DELETE);
// .//GEN-END:_deleteObject_1_be
// Add custom code before invoking the component //GEN-FIRST:_deleteObject_2
// .//GEN-LAST:_deleteObject_2
// .//GEN-BEGIN:_deleteObject_2_be
return m_deleteComponent.display();
}
private IDeleteListener getDeleteListener() {
if (m_deleteListener == null) {
m_deleteListener = new IDeleteListener() {
public void deleteDone(EventObject source) {
try {
// .//GEN-END:_deleteObject_2_be
// Add custom code //GEN-FIRST:_deleteObject_1
// .//GEN-LAST:_deleteObject_1
// .//GEN-BEGIN:_deleteObject_3_be
performInquiry();
} catch (Exception e) {
log.warn("Error in refreshing the Results screen after the Delete", e);
}
}
};
}
return m_deleteListener;
}
// .//GEN-END:_deleteObject_3_be
// .//GEN-BEGIN:_addListeners_1_be
private void addListeners(Component comp) {
if (comp instanceof ICreateComponent)
((ICreateComponent) comp).addCreateListener(getCreateListener());
if (comp instanceof IUpdateComponent)
((IUpdateComponent) comp).addUpdateListener(getUpdateListener());
if (comp instanceof IDeleteComponent)
((IDeleteComponent) comp).addDeleteListener(getDeleteListener());
}
// .//GEN-END:_addListeners_1_be
// .//GEN-BEGIN:_initializeCriteriaScreen_1_be
/** This will retrieve the set of codes for dropdowns, if any are required
* @throws ApplicationExceptions This will be thrown in case any invalid data has been set.
* @throws FrameworkException Indicates some system error.
*/
protected void initializeCriteriaScreen() throws ApplicationExceptions, FrameworkException {
ApplicationExceptions appExps = null;
CodeHelperInDto input = null;
if (m_codeHelperTx == null)
m_codeHelperTx = (ICodeHelper) Factory.createObject(ICodeHelper.class);
if (m_outputTypeCodes == null) {
if (input == null)
input = new CodeHelperInDto();
CodeHelperInElementDto codeHelperInElementDto = new CodeHelperInElementDto();
codeHelperInElementDto.setCode("outputType");
codeHelperInElementDto.setDomainClassName("org.jaffa.modules.printing.domain.PrinterOutputType");
codeHelperInElementDto.setCodeFieldName("OutputType");
codeHelperInElementDto.setDescriptionFieldName("Description");
codeHelperInElementDto.setAppendCodeAndDescription(true);
input.addCodeHelperInElementDto(codeHelperInElementDto);
}
// throw ApplicationExceptions, if any parsing errors occured
if (appExps != null && appExps.size() > 0)
throw appExps;
// Get the Codes and populate the respective fields
if (input != null) {
input.setHeaderDto(getHeaderDto());
CodeHelperOutDto output = m_codeHelperTx.getCodes(input);
if (output != null && output.getCodeHelperOutElementDtoCount() > 0) {
CodeHelperOutElementDto[] codeHelperOutElementDtos = output.getCodeHelperOutElementDtos();
for (int i = 0; i < codeHelperOutElementDtos.length; i++) {
CodeHelperOutElementDto codeHelperOutElementDto = codeHelperOutElementDtos[i];
String code = codeHelperOutElementDto.getCode();
if (code.equals("outputType"))
m_outputTypeCodes = codeHelperOutElementDto;
}
}
}
// Get comma-separated page size business rule and create drop-down list,
// if scaleToPageSizes business rule is defined. e.g. scaleToPageSizes=LETTER,A4
CodeHelperOutElementDto codeHelperOutElementDto;
CodeHelperOutCodeDto codeHelperOutCodeDto;
if (scaleToPageSizesArray == null) {
String scaleToPageSizesRule = (String) ContextManagerFactory.instance().getProperty("jaffa.printing.scaleToPageSizes");
scaleToPageSizesArray = scaleToPageSizesRule != null ? scaleToPageSizesRule.split(",") : null;
codeHelperOutElementDto = new CodeHelperOutElementDto();
if (scaleToPageSizesArray != null) {
for (String size : scaleToPageSizesArray) {
codeHelperOutCodeDto = new CodeHelperOutCodeDto();
codeHelperOutCodeDto.setCode(size);
codeHelperOutCodeDto.setDescription(size);
codeHelperOutElementDto.addCodeHelperOutCodeDto(codeHelperOutCodeDto);
}
m_scaleToPageSizeCodes = codeHelperOutElementDto;
}
}
}
// .//GEN-END:_initializeCriteriaScreen_1_be
// All the custom code goes here //GEN-FIRST:_custom
/** Getter for property scaleToPageSizeCodes.
* @return Value of property scaleToPageSizeCodes.
*/
public CodeHelperOutElementDto getScaleToPageSizeCodes() {
return m_scaleToPageSizeCodes;
}
// .//GEN-LAST:_custom
}
| gpl-3.0 |
martin-eden/workshop | concepts/daytime/to_ampm_hour.lua | 445 | --[[
Calculate 12h hour and AM/PM flag from 24h hour.
In AM/PM there are no zero hour. 00h -> 12 a.m., 12h -> 12p.m.
This function does not check that given hour number in valid
range [0, 23]. It's job for other functions.
]]
return
function(hour)
assert_integer(hour)
local is_pm = (hour >= 12)
if is_pm then
hour = hour - 12
end
if (hour == 0) then
hour = 12
end
return hour, is_pm
end
| gpl-3.0 |
fisharebest/localization | test/Language/LanguageNaqTest.php | 730 | <?php
namespace Fisharebest\Localization\Language;
use Fisharebest\Localization\PluralRule\PluralRuleOneTwoOther;
use Fisharebest\Localization\Script\ScriptLatn;
use PHPUnit\Framework\TestCase;
/**
* Tests for the LanguageNaq class
*
* @author Greg Roach <greg@subaqua.co.uk>
* @copyright (c) 2020 Greg Roach
* @license GPLv3+
*/
class LanguageNaqTest extends TestCase
{
/**
* Test the properties
*/
public function testProperties()
{
$language = new LanguageNaq();
$this->assertEquals(new ScriptLatn(), $language->defaultScript());
$this->assertEquals(new PluralRuleOneTwoOther(), $language->pluralRule());
$this->assertSame('naq', $language->code());
}
}
| gpl-3.0 |
digitalutsc/islandora_web_annotations | js/video/video.js | 13340 |
var user = "anonymous";
jQuery(document).ready(function() {
$ = jQuery;
user = Drupal.settings.islandora_web_annotations.user;
// Hide Lib related permission fields
jQuery(".annotator-checkbox").hide();
jQuery("#islandora_videojs_html5_api").addClass("video-js");
jQuery("#islandora_videojs_html5_api").attr("preload", "none");
preload="none"
var objectPID = Drupal.settings.islandora_web_annotations.pid;
var objectUri = location.protocol + '//' + location.host + "/islandora/object/" + objectPID
var options = {
optionsAnnotator: {
permissions: { },
store: {
// The endpoint of the store on your server.
prefix: location.protocol + '//' + location.host + '/islandora_web_annotations',
emulateJSON: true,
annotationData: {uri:objectUri},
urls: {
create: '/create',
update: '/update',
destroy: '/delete'
},
loadFromSearch:{
limit:100,
uri: objectUri,
}
},
richText: {
tinymce:{
selector: "li.annotator-item textarea",
plugins: "media image insertdatetime link code",
menubar: false,
toolbar_items_size: 'small',
extended_valid_elements : "iframe[src|frameborder|style|scrolling|class|width|height|name|align|id]",
toolbar: "insertfile undo redo | styleselect | bold italic | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image media rubric | code ",
}
},
annotator: {}, //Annotator core
},
optionsVideoJS: {techOrder: ["html5","flash"]},
optionsRS: {},
optionsOVA: {posBigNew:'none'/*,NumAnnotations:20*/},
}
//Load the plugin Open Video Annotation
try {
var targetDiv = jQuery(".video-js").first().parent().parent();
ova = new OpenVideoAnnotation.Annotator(targetDiv, options);
}
catch(e){
alert(e)
}
// Apply Permissions after OpenVideoAnnotation elements are loaded
if(Drupal.settings.islandora_web_annotations.view === false){
jQuery(".vjs-showannotations-annotation").hide();
jQuery(".vjs-statistics-annotation").hide();
}
if(Drupal.settings.islandora_web_annotations.create == false) {
jQuery(".vjs-new-annotation").hide();
}
//change the user (Experimental)
ova.setCurrentUser(user);
$('#username').change(function () {
ova.setCurrentUser($(this).val());
});
ova.annotator.subscribe('annotationViewerShown', function(viewer, annotations){
applyPermissionsOnView(annotations);
if(jQuery(".annotator-hl.active").length > 0) {
var left = jQuery(".annotator-hl.active").first().find("div").first().css("left");
left = left.substr(0, left.length - 2);
var width = jQuery(".annotator-hl.active").first().find("div").first().width();
var newleft = Number(left) + Number(width) / 2;
jQuery(".annotator-viewer").first().css({left: newleft + "px"});
var top = jQuery(".annotator-hl.active").first().find("div").first().css("top");
top = top.substr(0, top.length - 2);
top = Number(top) + 30;
jQuery(".annotator-viewer").first().css({top: top + "px"});
} else {
if(jQuery(".vjs-selectionbar-RS").first().is(":visible") === true) {
positionAnnotatorForm(".annotator-viewer");
}
}
});
ova.annotator.subscribe('annotationEditorShown', function(viewer, annotations){
// Remove the list items which contain default permission checkboxes
// from the Open Video Annotation editor. We do not use these.
jQuery('li.annotator-checkbox').remove();
// issue-174
jQuery(".mce-i-rubric").parent().hide();
if(jQuery(".islandora-oralhistories-object").length > 0){
positionAnnotatorForm(".annotator-editor");
}
});
ova.annotator.subscribe('beforeAnnotationUpdated', function(annotation){
annotation.author = ova.currentUser;
applyBlock("updated");
});
ova.annotator.subscribe('annotationCreated', function(annotation) {
applyBlock("created");
});
if (window.location.href.indexOf("annotationPID") > -1) {
askIfUserWantsToPlay();
}
// Play video when button clicked from View
jQuery(".playvideo").click(function() {
var pid = jQuery(this).attr('id');
ova.playTarget(pid);
});
});
/**
* If the page gets loaded from search results, provide the user option to play the annotation.
*/
function askIfUserWantsToPlay() {
jQuery.blockUI({
message: "Would you like to play the video annotation? <br><span id='yes_play'>Yes</span> <span id='do_not_play'>No</span>",
fadeIn: 700,
fadeOut: 700,
timeout: 3000,
showOverlay: false,
centerY: false,
css: {
height: '50px',
border: 'none',
padding: '5px',
backgroundColor: '#000',
'-webkit-border-radius': '10px',
'-moz-border-radius': '10px',
opacity: .6,
color: '#fff'
}
});
jQuery('#yes_play').click(function() {
jQuery.unblockUI();
var annotationPID = getParameterByName("annotationPID");
ova.playTarget(annotationPID);
});
jQuery('#do_not_play').click(function() {
jQuery.unblockUI();
});
}
function applyPermissionsOnView(annotations){
var createdByMe = (user == annotations[0].user) ? true:false;
jQuery(".annotator-edit").hide();
jQuery(".annotator-delete").hide();
if(Drupal.settings.islandora_web_annotations.edit_any === true || (Drupal.settings.islandora_web_annotations.edit_own === true && createdByMe === true)) {
jQuery(".annotator-edit").show();
}
if(Drupal.settings.islandora_web_annotations.delete_any === true || (Drupal.settings.islandora_web_annotations.delete_own === true && createdByMe === true)) {
jQuery(".annotator-delete").show();
}
}
/**
* issue#123
* Due to an bug in the annotator js library, the annotationCreated does not return the pid of the created annotation.
* We need to attach a POST listener to get this info and update the store.
* This is required to enable the user to edit the annotation immediately after creating it.
*/
jQuery(document).ajaxComplete(function(event, jqXHR, ajaxOptions) {
var jsonDataText = JSON.parse(jqXHR.responseText);
if (ajaxOptions.type === 'POST' && /\/islandora_web_annotations/.test(ajaxOptions.url)) {
jQuery('.annotator-wrapper').unblock();
var jsonData = jsonDataText;
// Basic error check
if(typeof jsonData.rows !== 'undefined'){
var PID = jsonData.rows[0].pid;
var checksum = jsonData.rows[0].checksum;
// Set annotation PID
ova.annotator.plugins["Store"].annotations[0].pid = PID;
ova.annotator.plugins["Store"].annotations[0].checksum = checksum;
var verbose_message = "Annotation successfully created: " + JSON.stringify(jsonData);
var short_message = "Annotation successfully created.";
verbose_alert(short_message, verbose_message);
} else {
var verbose_message = "Error in creating the annotation: " + JSON.stringify(jsonData);
var short_message = "Error in creating the annotation.";
verbose_alert(short_message, verbose_message);
}
jQuery(".vjs-controltimepanel-RS").hide();
} else if (ajaxOptions.type === 'PUT' && /\/islandora_web_annotations/.test(ajaxOptions.url)) {
jQuery('.annotator-wrapper').unblock();
var jsonData = jsonDataText;
var status = jsonData.status;
if(status === undefined){
alert("Error in updating annotation. Server failed to return valid response.");
return;
}
var annoInfo = jsonData.data;
if(status == "success") {
var pid = annoInfo.pid;
var checksum = annoInfo.checksum;
updateChecksum(pid, checksum);
var verbose_message = "Successfully updated the annotation: " + JSON.stringify(annoInfo);
var short_message = "Update successful.";
verbose_alert(short_message, verbose_message);
} else if(status == "conflict"){
var msg = "There was an edit conflict. Please hover over the annotation you edited, copy the content, refresh annotations page and try updating again.";
verbose_alert(msg, msg);
} else {
var verbose_message = "Unable to update. Error info: " + JSON.stringify(annoInfo);
var short_message = "Error: Unable to update.";
verbose_alert(short_message, verbose_message);
}
jQuery(".vjs-controltimepanel-RS").hide();
} else if (ajaxOptions.type === 'DELETE' && /\/islandora_web_annotations/.test(ajaxOptions.url)) {
var jsonData = JSON.parse(jsonDataText);
var status = jsonData.status;
if(status === undefined){
alert("Error in deleting annotation. Server failed to return valid response.");
return;
}
var annoInfo = jsonData.data;
if(status == "success") {
var verbose_message = "Success! " + JSON.stringify(annoInfo);
var short_message = "Annotation successfully deleted.";
verbose_alert(short_message, verbose_message);
} else if(status == "conflict"){
var msg = "There was an edit conflict. Please reload the annotations to view the changes. You can try again to delete.";
verbose_alert(msg, msg);
} else {
var verbose_message = "Unable to delete. Error info: " + JSON.stringify(annoInfo);
var short_message = "Error: Unable to delete.";
verbose_alert(short_message, verbose_message);
}
}
});
/**
* After an annotation is added the checksum is updated in the UI.
*
* @param pid
* @param checksum
* return None
*/
function updateChecksum(pid, checksum) {
var annosLength = ova.annotator.plugins["Store"].annotations.length;
for(var j = 0; j < annosLength; j++){
var annoPID = ova.annotator.plugins["Store"].annotations[j].pid;
if(annoPID == pid) {
ova.annotator.plugins["Store"].annotations[j].checksum = checksum;
delete ova.annotator.plugins["Store"].annotations[j].status;
delete ova.annotator.plugins["Store"].annotations[j].data;
break;
}
}
}
function positionAnnotatorForm(formSelector){
var left = jQuery(".vjs-selectionbar-RS").first().css("left");
left = left.substr(0, left.length - 2);
var width = jQuery(".vjs-selectionbar-RS").first().width();
var newleft = Number(left) + Number(width) / 2;
jQuery(formSelector).first().css({left: newleft + "px"});
var height = jQuery(document.getElementsByTagName("video")[0]).height();
var top = height - 20;
if(formSelector === ".annotator-editor") {
top = top - 25;
}
jQuery(formSelector).first().css({top: top + "px"});
}
function applyBlock(actionType){
var msg = '<h1>Annotation is being ' + actionType + '. Please wait.....</h1>';
jQuery('.annotator-wrapper').block({
message: msg,
css: {
border: 'none',
width: '400px',
padding: '15px',
'-webkit-border-radius': '10px',
'-moz-border-radius': '10px'
}
});
}
/**
* Common Methods
*/
function verbose_alert(short_message, verbose_message) {
var verbose_flag = Drupal.settings.islandora_web_annotations.verbose_messages;
if (verbose_flag) {
alert(verbose_message);
} else {
showGrowlMsg(short_message);
}
}
function showGrowlMsg(i_msg) {
jQuery.blockUI({
message: i_msg,
fadeIn: 700,
fadeOut: 700,
timeout: 2000,
showOverlay: false,
centerY: false,
css: {
width: '400px',
bottom: '10px',
top: '-100',
left: '',
right: '10px',
height: '50px',
border: 'none',
padding: '5px',
backgroundColor: '#000',
'-webkit-border-radius': '10px',
'-moz-border-radius': '10px',
opacity: .6,
color: '#fff'
}
});
}
/**
* Utility method
*
* @param param_name
* @param url
* @returns {*}
*/
function getParameterByName(param_name, url) {
if (!url) url = window.location.href;
param_name = param_name.replace(/[\[\]]/g, "\\$&");
var regex = new RegExp("[?&]" + param_name + "(=([^&#]*)|&|#|$)"),
results = regex.exec(url);
if (!results) return null;
if (!results[2]) return '';
return decodeURIComponent(results[2].replace(/\+/g, " "));
} | gpl-3.0 |
Serg-Norseman/GEDKeeper | projects/GKv3/GKComponents/GKUI/Components/GKListView.cs | 23964 | /*
* "GEDKeeper", the personal genealogical database editor.
* Copyright (C) 2009-2021 by Sergey V. Zhdanovskih.
*
* This file is part of "GEDKeeper".
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Collections.Generic;
using BSLib;
using BSLib.Design;
using BSLib.Design.Graphics;
using BSLib.Design.MVP.Controls;
using Eto.Drawing;
using Eto.Forms;
using GKCore;
using GKCore.Interfaces;
using GKUI.Platform;
using BSDListItem = BSLib.Design.MVP.Controls.IListItem;
using BSDSortOrder = BSLib.Design.BSDTypes.SortOrder;
namespace GKUI.Components
{
/// <summary>
///
/// </summary>
public class GKListItem : GridItem, BSDListItem
{
private Color fBackColor;
private Color fForeColor;
private object fData;
public Color BackColor
{
get { return fBackColor; }
set { fBackColor = value; }
}
public Color ForeColor
{
get { return fForeColor; }
set { fForeColor = value; }
}
public bool Checked
{
get { return (bool)base.Values[0]; }
set { base.Values[0] = value; }
}
public object Data
{
get { return fData; }
set { fData = value; }
}
public GKListItem(params object[] values) : base(values)
{
BackColor = Colors.Transparent;
}
public void AddSubItem(object itemValue)
{
}
public int CompareTo(object obj)
{
return 0;
}
public void SetBackColor(IColor color)
{
var colorHandler = color as ColorHandler;
if (colorHandler != null) {
BackColor = colorHandler.Handle;
}
}
public void SetForeColor(IColor color)
{
var colorHandler = color as ColorHandler;
if (colorHandler != null) {
ForeColor = colorHandler.Handle;
}
}
public void SetFont(IFont font)
{
}
}
public class ItemCheckEventArgs : EventArgs
{
public int Index { get; set; }
public bool NewValue { get; set; }
public ItemCheckEventArgs(int index, bool newValue)
{
Index = index;
NewValue = newValue;
}
}
public delegate void ItemCheckEventHandler(object sender, ItemCheckEventArgs e);
/// <summary>
///
/// </summary>
public class GKListView : GridView, IListViewEx
{
private readonly ObservableExtList<GKListItem> fItems;
//private readonly GKListViewItems fItemsAccessor;
private bool fCheckedList;
private IListManager fListMan;
private bool fSorting;
private int fSortColumn;
private BSDSortOrder fSortOrder;
private int fUpdateCount;
IListViewItems IListView.Items
{
get { return fItems; }
}
public IListManager ListMan
{
get {
return fListMan;
}
set {
if (fListMan != value) {
if (fListMan != null) fListMan.Dispose();
fListMan = value;
if (fListMan != null) {
fSorting = true;
fSortColumn = 0;
fSortOrder = BSDSortOrder.Ascending;
} else {
}
}
}
}
public int SelectedIndex
{
get {
int index = fItems.IndexOf(SelectedItem as GKListItem);
return index;
}
set {
SelectItem(value);
}
}
public bool Sorting
{
get { return fSorting; }
set { fSorting = value; }
}
public int SortColumn
{
get { return fSortColumn; }
set { fSortColumn = value; }
}
public BSDSortOrder SortOrder
{
get { return fSortOrder; }
set { fSortOrder = value; }
}
public event ItemCheckEventHandler ItemCheck;
public GKListView()
{
//SetStyle(ControlStyles.DoubleBuffer, true);
//SetStyle(ControlStyles.OptimizedDoubleBuffer, true);
//SetStyle(ControlStyles.AllPaintingInWmPaint, true);
// Enable the OnNotifyMessage event so we get a chance to filter out
// Windows messages before they get to the form's WndProc
//SetStyle(ControlStyles.EnableNotifyMessage, true);
//OwnerDraw = true;
fCheckedList = false;
fItems = new ObservableExtList<GKListItem>();
//fItemsAccessor = new GKListViewItems(this);
fSortColumn = 0;
fSortOrder = BSDSortOrder.None;
AllowColumnReordering = false;
AllowMultipleSelection = false;
DataStore = fItems;
fListMan = null;
}
protected override void Dispose(bool disposing)
{
if (disposing) {
if (fListMan != null) {
fListMan.Dispose();
fListMan = null;
}
}
base.Dispose(disposing);
}
public void Activate()
{
Focus();
}
public void BeginUpdate()
{
if (fUpdateCount == 0) {
DataStore = null;
fItems.BeginUpdate();
}
fUpdateCount++;
}
public void EndUpdate()
{
fUpdateCount--;
if (fUpdateCount == 0) {
fItems.EndUpdate();
DataStore = fItems;
}
}
protected BSDSortOrder GetColumnSortOrder(int columnIndex)
{
return (fSortColumn == columnIndex) ? fSortOrder : BSDSortOrder.None;
}
public void SetSortColumn(int sortColumn, bool checkOrder = true)
{
int prevColumn = fSortColumn;
if (prevColumn == sortColumn && checkOrder) {
BSDSortOrder prevOrder = GetColumnSortOrder(sortColumn);
fSortOrder = (prevOrder == BSDSortOrder.Ascending) ? BSDSortOrder.Descending : BSDSortOrder.Ascending;
}
fSortColumn = sortColumn;
object rowData = GetSelectedData();
SortContents(true);
UpdateItems();
if (rowData != null) SelectItem(rowData);
}
public void Sort(int sortColumn, BSDSortOrder sortOrder)
{
fSortColumn = sortColumn;
fSortOrder = sortOrder;
object rowData = GetSelectedData();
SortContents(true);
UpdateItems();
if (rowData != null) SelectItem(rowData);
}
protected override void OnColumnHeaderClick(GridColumnEventArgs e)
{
BeginUpdate();
try {
int columnIndex = this.Columns.IndexOf(e.Column);
SetSortColumn(columnIndex, false);
} finally {
EndUpdate();
}
base.OnColumnHeaderClick(e);
}
/*protected override void OnDrawColumnHeader(DrawListViewColumnHeaderEventArgs e)
{
#if DEFAULT_HEADER
e.DrawDefault = true;
#else
using (var sf = new StringFormat())
{
Graphics gfx = e.Graphics;
Rectangle rt = e.Bounds;
#if !MONO
VisualStyleElement element = VisualStyleElement.Header.Item.Normal;
if ((e.State & ListViewItemStates.Hot) == ListViewItemStates.Hot)
element = VisualStyleElement.Header.Item.Hot;
if ((e.State & ListViewItemStates.Selected) == ListViewItemStates.Selected)
element = VisualStyleElement.Header.Item.Pressed;
var visualStyleRenderer = new VisualStyleRenderer(element);
visualStyleRenderer.DrawBackground(gfx, rt);
#else
e.DrawBackground();
#endif
switch (e.Header.TextAlign)
{
case HorizontalAlignment.Left:
sf.Alignment = StringAlignment.Near;
break;
case HorizontalAlignment.Right:
sf.Alignment = StringAlignment.Far;
break;
case HorizontalAlignment.Center:
sf.Alignment = StringAlignment.Center;
break;
}
sf.LineAlignment = StringAlignment.Center;
sf.Trimming = StringTrimming.EllipsisCharacter;
sf.FormatFlags = StringFormatFlags.NoWrap;
int w = TextRenderer.MeasureText(" ", Font).Width;
rt.Inflate(-(w / 5), 0);
gfx.DrawString(e.Header.Text, Font, Brushes.Black, rt, sf);
string arrow = "";
switch (GetColumnSortOrder(e.ColumnIndex)) {
case BSDSortOrder.Ascending:
arrow = "▲";
break;
case BSDSortOrder.Descending:
arrow = "▼";
break;
}
if (arrow != "") {
using (var fnt = new Font(Font.FontFamily, Font.SizeInPoints * 0.6f, FontStyle.Regular)) {
float aw = gfx.MeasureString(arrow, fnt).Width;
float x = rt.Left + (rt.Width - aw) / 2.0f;
gfx.TextRenderingHint = TextRenderingHint.AntiAlias;
gfx.DrawString(arrow, fnt, Brushes.Black, x, rt.Top);
}
}
}
#endif
base.OnDrawColumnHeader(e);
}*/
/*protected override void OnCellFormatting(GridCellFormatEventArgs e)
{
if (e.Row == fItems.IndexOf((GKListItem)SelectedItem)) {
e.BackgroundColor = SystemColors.Highlight;
e.ForegroundColor = Colors.White;
} else {
var item = e.Item as GKListItem;
if (item != null) {
if (item.BackColor != Colors.Transparent) {
e.BackgroundColor = item.BackColor;
e.ForegroundColor = Colors.Black;
} else {
e.BackgroundColor = Colors.White;
e.ForegroundColor = Colors.Black;
}
}
}
base.OnCellFormatting(e);
}*/
private int CompareItems(GKListItem item1, GKListItem item2)
{
int result = 0;
if (fSortOrder != BSDSortOrder.None && fSortColumn >= 0) {
if (fSortColumn < item1.Values.Length && fSortColumn < item2.Values.Length) {
IComparable val1 = item1.Values[fSortColumn] as IComparable;
IComparable val2 = item2.Values[fSortColumn] as IComparable;
if (val1 != null && val2 != null) {
bool isStr1 = val1 is string;
bool isStr2 = val2 is string;
if (isStr1 && isStr2) {
result = StrCompareEx((string)val1, (string)val2);
} else {
result = val1.CompareTo(val2);
}
}
}
if (fSortOrder == BSDSortOrder.Descending) {
result = -result;
}
}
return result;
}
#region Virtual mode with ListSource
// In Eto not exists
/*protected override void OnColumnWidthChanged(ColumnWidthChangedEventArgs e)
{
if (fListMan != null && fUpdateCount == 0) {
fListMan.ChangeColumnWidth(e.ColumnIndex, Columns[e.ColumnIndex].Width);
}
base.OnColumnWidthChanged(e);
}*/
private void SortContents(bool restoreSelected)
{
if (fSorting) {
if (fListMan != null) {
fListMan.SortContents(fSortColumn, fSortOrder == BSDSortOrder.Ascending);
} else {
SortHelper.MergeSort(fItems, CompareItems);
}
}
}
private void UpdateItems()
{
if (fListMan == null) return;
fItems.Clear();
int num = fListMan.FilteredCount;
for (int i = 0; i < num; i++) {
object rowData = fListMan.GetContentItem(i);
if (rowData != null) {
object[] itemData = fListMan.GetItemData(rowData);
GKListItem newItem = (GKListItem)AddItem(rowData, itemData);
fListMan.UpdateItemProps(newItem, rowData);
}
}
}
public void UpdateContents(bool columnsChanged = false)
{
if (fListMan == null) return;
try {
if (fListMan.ColumnsHaveBeenChanged != columnsChanged && columnsChanged) {
fListMan.ColumnsHaveBeenChanged = columnsChanged;
}
object tempRec = GetSelectedData();
BeginUpdate();
try {
if (columnsChanged || Columns.Count == 0 || fListMan.ColumnsHaveBeenChanged) {
Columns.Clear();
fListMan.UpdateColumns(this);
}
fListMan.UpdateContents();
SortContents(false);
UpdateItems();
ResizeColumns();
} finally {
EndUpdate();
}
if (tempRec != null) SelectItem(tempRec);
} catch (Exception ex) {
Logger.WriteError("GKListView.UpdateContents()", ex);
}
}
public void DeleteRecord(object data)
{
// crash protection: when you delete records from the diagrams,
// between the actual deleting a record and updating the list
// may take a few requests to update the list's items which does not already exist
if (fListMan != null && fListMan.DeleteRecord(data)) {
/*VirtualListSize = fListMan.FilteredCount;*/
}
}
#endregion
#region Public methods
public void Clear()
{
Columns.Clear();
fItems.Clear();
}
public void ClearColumns()
{
Columns.Clear();
}
public void AddColumn(string caption, int width, bool autoSize = false)
{
var cell = new TextBoxCell(Columns.Count);
GridColumn column = new GridColumn();
column.HeaderText = caption;
column.DataCell = cell;
column.AutoSize = autoSize;
column.Width = width;
Columns.Add(column);
}
public void AddCheckedColumn(string caption, int width, bool autoSize = false)
{
var cell = new CheckBoxCell(Columns.Count);
GridColumn column = new GridColumn();
column.HeaderText = caption;
column.DataCell = cell;
column.AutoSize = autoSize;
column.Width = width;
column.Editable = true;
Columns.Add(column);
fCheckedList = true;
}
public void AddTextColumn(string caption, int width, bool autoSize = false)
{
var cell = new TextBoxCell(Columns.Count);
GridColumn column = new GridColumn();
column.HeaderText = caption;
column.DataCell = cell;
column.AutoSize = autoSize;
column.Width = width;
column.Editable = true;
Columns.Add(column);
}
public void AddComboColumn(string caption, int width, bool autoSize, object[] items)
{
var cell = new ComboBoxCell(Columns.Count);
cell.DataStore = items;
GridColumn column = new GridColumn();
column.HeaderText = caption;
column.DataCell = cell;
column.AutoSize = autoSize;
column.Width = width;
column.Editable = true;
Columns.Add(column);
}
public void AddColumn(string caption, int width, bool autoSize, BSDTypes.HorizontalAlignment textAlign)
{
AddColumn(caption, width, autoSize);
}
public void SetColumnCaption(int index, string caption)
{
Columns[index].HeaderText = caption;
}
public void ResizeColumn(int columnIndex)
{
try {
/*if (columnIndex >= 0 && Items.Count > 0)
{
AutoResizeColumn(columnIndex, ColumnHeaderAutoResizeStyle.ColumnContent);
if (Columns[columnIndex].Width < 20)
{
AutoResizeColumn(columnIndex, ColumnHeaderAutoResizeStyle.HeaderSize);
}
}*/
} catch (Exception ex) {
Logger.WriteError("GKListView.ResizeColumn()", ex);
}
}
public void ResizeColumns()
{
if (fListMan == null) return;
for (int i = 0; i < Columns.Count; i++) {
if (fListMan.IsColumnAutosize(i)) {
ResizeColumn(i);
}
}
}
public void ClearItems()
{
fItems.Clear();
}
public BSDListItem AddItem(object rowData, params object[] columnValues)
{
object[] itemValues;
if (fCheckedList) {
int num = columnValues.Length;
itemValues = new object[num + 1];
itemValues[0] = false;
Array.Copy(columnValues, 0, itemValues, 1, num);
} else {
itemValues = columnValues;
}
var item = new GKListItem(itemValues);
item.Data = rowData;
fItems.Add(item);
return item;
}
public IList<object> GetSelectedItems()
{
try {
var result = new List<object>();
/*if (!VirtualMode) {
int num = SelectedItems.Count;
for (int i = 0; i < num; i++) {
var lvItem = SelectedItems[i] as GKListItem;
result.Add(lvItem.Data);
}
} else {
int num = SelectedIndices.Count;
for (int i = 0; i < num; i++) {
int index = SelectedIndices[i];
result.Add(fListMan.GetContentItem(index));
}
}*/
return result;
} catch (Exception ex) {
Logger.WriteError("GKListView.GetSelectedItems()", ex);
return null;
}
}
public GKListItem GetSelectedItem()
{
var item = SelectedItem as GKListItem;
return item;
}
public object GetSelectedData()
{
var item = GetSelectedItem();
return (item != null) ? item.Data : null;
}
private void SelectItem(GKListItem item)
{
if (item != null) {
int idx = fItems.IndexOf(item);
SelectItem(idx);
}
}
public void SelectItem(int index)
{
if (index >= 0 && index < fItems.Count) {
ScrollToRow(index);
SelectRow(index);
}
}
public void SelectItem(object rowData)
{
try {
if (fListMan != null) {
// "virtual" mode
int idx = fListMan.IndexOfRecord(rowData);
SelectItem(idx);
} else {
int num = fItems.Count;
for (int i = 0; i < num; i++) {
var item = (GKListItem)fItems[i];
if (item.Data == rowData) {
SelectItem(i);
return;
}
}
}
} catch (Exception ex) {
Logger.WriteError("GKListView.SelectItem()", ex);
}
}
#endregion
#region CheckedList
protected override void OnCellEdited(GridViewCellEventArgs e)
{
if (fCheckedList) {
if (e.Column == 0) {
DoItemCheck(e.Row, ((bool)((GKListItem)e.Item).Values[0]));
}
}
base.OnCellEdited(e);
}
private void DoItemCheck(int index, bool newValue)
{
ItemCheckEventHandler handler = this.ItemCheck;
if (handler != null)
handler.Invoke(this, new ItemCheckEventArgs(index, newValue));
}
#endregion
#region Internal functions
internal static int StrCompareEx(string str1, string str2)
{
double val1, val2;
bool v1 = double.TryParse(str1, out val1);
bool v2 = double.TryParse(str2, out val2);
int result;
if (v1 && v2) {
if (val1 < val2) {
result = -1;
} else if (val1 > val2) {
result = +1;
} else {
result = 0;
}
} else {
result = string.Compare(str1, str2, false);
if (str1 != "" && str2 == "") {
result = -1;
} else if (str1 == "" && str2 != "") {
result = +1;
}
}
return result;
}
#endregion
}
}
| gpl-3.0 |
Aeronavics/MissionPlanner | ExtLibs/UAVCAN/out/include/uavcan.protocol.file.BeginFirmwareUpdate.cs | 182 | public partial class uavcan {
const double UAVCAN_PROTOCOL_FILE_BEGINFIRMWAREUPDATE_DT_ID = 40;
const double UAVCAN_PROTOCOL_FILE_BEGINFIRMWAREUPDATE_DT_SIG = 0xB7D725DF72724126;
}
| gpl-3.0 |
yohasebe/rubyplb | lib/ruby_graphviz.rb | 3747 | ## lib/ruby_graphviz.rb -- graphviz dot generator library
## Author:: Yoichiro Hasebe (mailto: yohasebe@gmail.com)
## Copyright:: Copyright 2009 Yoichiro Hasebe
## License:: GNU GPL version 3
class RubyGraphviz
## Example:
##
## g = RubyGraphviz.new("newgraph", {:rankdir => "LR", :nodesep => "0.4", :ranksep => "0.2"})
##
def initialize(name, graph_hash = nil)
@name = name
@graph_data = graph_hash
@nodes = []
@edges = []
@dot = ""
create_graph
end
protected
def create_graph
@dot << "graph #{@name} {\n graph"
index = 0
if @graph_data
@dot << " ["
@graph_data.each do |k, v|
k = k.to_s
@dot << "#{k} = \"#{v}\""
index += 1
@dot << ", " unless index == @graph_data.size
end
@dot << "]"
end
@dot << ";\n"
end
def finish_graph
@dot << "}\n"
end
def create_edge(edgetype, nid1, nid2, edge_hash = nil)
temp = " #{nid1.to_s} #{edgetype} #{nid2.to_s}"
index = 0
if edge_hash
temp << " ["
edge_hash.each do |k, v|
k = k.to_s
temp << "#{k} = \"#{v}\""
index += 1
temp << ", " unless index == edge_hash.size
end
temp << "]"
end
return temp
end
public
## Add a subgraph to a graph (recursively)
##
## Example:
##
## graph1.subgraph(graph2)
##
def subgraph(graph)
@dot << graph.to_dot.sub(/\Agraph/, "subgraph")
end
## Set default options for nodes
##
## Example:
##
## graph.node_default(:shape => "record", :color => "gray60")
##
def node_default(node_hash = nil)
@dot << " node["
index = 0
node_hash.each do |k, v|
k = k.to_s
@dot << "#{k} = \"#{v}\""
index += 1
@dot << ", " unless index == node_hash.size
end
@dot << "];\n"
self
end
## Set default options for edges
##
## Example:
##
## graph.edge_default(:color => "gray60")
##
def edge_default(edge_hash = nil)
@dot << " edge["
index = 0
edge_hash.each do |k, v|
k = k.to_s
@dot << "#{k} = \"#{v}\""
index += 1
@dot << ", " unless index == edge_hash.size
end
@dot << "];\n"
self
end
## Create a node with its options
##
## Example:
##
## graph.node("node-01", :label => "Node 01", :fillcolor => "pink")
##
def node(node_id, node_hash = nil)
@dot << " #{node_id.to_s}"
index = 0
if node_hash
@dot << " ["
node_hash.each do |k, v|
k = k.to_s
@dot << "#{k} = \"#{v}\""
index += 1
@dot << ", " unless index == node_hash.size
end
@dot << "]"
end
@dot << ";\n"
self
end
## Create a non-directional edge (connection line between nodes) with its options
##
## Example:
##
## graph.edge("node-01", "node-02", :label => "connecting 1 and 2", :color => "lightblue")
##
def edge(nid1, nid2, edge_hash = nil)
@dot << create_edge("--", nid1, nid2, edge_hash) + ";\n"
self
end
## Create a directional edge (arrow from node to node) with its options
##
## Example:
## graph.arrow_edge("node-01", "node-02", :label => "from 1 to 2", :color => "lightblue")
##
def arrow_edge(nid1, nid2, edge_hash = nil)
@dot << create_edge("->", nid1, nid2, edge_hash) + ";\n"
self
end
## Align nodes on the same rank connecting them with non-directional edges
##
def rank(nid1, nid2, edge_hash = nil)
@dot << "{rank=same " + create_edge("--", nid1, nid2, edge_hash) + "}\n"
self
end
## Convert graph into dot formatted data
##
def to_dot
finish_graph
@dot = @dot.gsub(/\"\</m, "<").gsub(/\>\"/m, ">")
return @dot
end
end
| gpl-3.0 |
waikato-datamining/adams-base | adams-core/src/main/java/adams/gui/visualization/sequence/XYSequencePaintlet.java | 1398 | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* XYSequencePaintlet.java
* Copyright (C) 2012-2013 University of Waikato, Hamilton, New Zealand
*/
package adams.gui.visualization.sequence;
import adams.gui.visualization.core.Paintlet;
/**
* Interface for paintlets for the {@link XYSequencePanel}.
*
* @author fracpete (fracpete at waikato dot ac dot nz)
* @version $Revision$
*/
public interface XYSequencePaintlet
extends Paintlet {
/**
* Returns the XY sequence panel currently in use.
*
* @return the panel in use
*/
public XYSequencePanel getSequencePanel();
/**
* Returns a new instance of the hit detector to use.
*
* @return the hit detector
*/
public AbstractXYSequencePointHitDetector newHitDetector();
}
| gpl-3.0 |
lidarr/Lidarr | frontend/src/Settings/Profiles/Metadata/PrimaryTypeItem.js | 1273 | import classNames from 'classnames';
import PropTypes from 'prop-types';
import React, { Component } from 'react';
import CheckInput from 'Components/Form/CheckInput';
import styles from './TypeItem.css';
class PrimaryTypeItem extends Component {
//
// Listeners
onAllowedChange = ({ value }) => {
const {
albumTypeId,
onMetadataPrimaryTypeItemAllowedChange
} = this.props;
onMetadataPrimaryTypeItemAllowedChange(albumTypeId, value);
}
//
// Render
render() {
const {
name,
allowed
} = this.props;
return (
<div
className={classNames(
styles.metadataProfileItem
)}
>
<label
className={styles.albumTypeName}
>
<CheckInput
containerClassName={styles.checkContainer}
name={name}
value={allowed}
onChange={this.onAllowedChange}
/>
{name}
</label>
</div>
);
}
}
PrimaryTypeItem.propTypes = {
albumTypeId: PropTypes.number.isRequired,
name: PropTypes.string.isRequired,
allowed: PropTypes.bool.isRequired,
sortIndex: PropTypes.number.isRequired,
onMetadataPrimaryTypeItemAllowedChange: PropTypes.func
};
export default PrimaryTypeItem;
| gpl-3.0 |
jtsshieh/BonGon | events/guildCreate.js | 41 | module.exports = (bot, guild) => {
};
| gpl-3.0 |
j-m-c-p/diagnostico_enfermedades | www/instalador.php | 2086 | <!--
*
* Autores: Jhonnatan Cubides, Harley Santoyo
*
-->
<html>
<head>
<title>Instalador</title>
<?php
/* se incluye la clase BD la cual contiene las funciones para el funcionamiento del prototipo */
include ('class/BD.php');
/*Se nombra una variable para crear un nuevo objeto*/
$obj_o= new BD;
/* trae la función estilos de bootstrap de la clase */
echo $obj_o->estilos("bootstrap");
?>
</head>
<body>
<br>
<br>
<br>
<div class="container">
<div class="row">
<div class="col-xs-12 col-md-1 "></div>
<div class="col-xs-12 col-md-4 well">
<div class="form-group" >
<form action="instalando.php" method="get">
<!--<label for="exampleInputEmail1">Nombre de la tabla (*) </label>
<br>
<input type="text" class="form-control" name="tabla" placeholder="Nombre de la tabla" required>
<br>-->
<label for="exampleInputEmail1">Servidor (*) </label>
<br>
<input type="text" class="form-control" name="servidor" placeholder="servidor" required>
<br>
<label for="exampleInputEmail1">Usuario (*) </label>
<br>
<input type="text" class="form-control" name="usuario" placeholder="usuario" required>
<br>
<label for="exampleInputEmail1">Clave </label>
<br>
<input type="text" class="form-control" name="contrasena" placeholder="clave">
<br>
<label for="exampleInputEmail1">Base de datos (*) </label>
<br>
<input type="text" class="form-control" name="bd" placeholder="base de datos" required>
<br>
<input type="submit" class="btn btn-success" value="Enviar">
</form>
</div>
</div>
<br>
<br>
<br>
<br>
<br>
<br>
<div class="col-xs-12 col-md-4 "><div><h3 style="color:red">Por favor tener en phpmyadmin una base de datos creada.</h3></div></div>
</div>
</div>
</body>
</html>
| gpl-3.0 |
gitools/gitools | org.gitools.ui.app/src/main/java/org/gitools/ui/app/actions/data/HideSelectionAction.java | 2593 | /*
* #%L
* gitools-ui-app
* %%
* Copyright (C) 2013 Universitat Pompeu Fabra - Biomedical Genomics group
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-3.0.html>.
* #L%
*/
package org.gitools.ui.app.actions.data;
import org.gitools.api.matrix.MatrixDimensionKey;
import org.gitools.heatmap.Heatmap;
import org.gitools.heatmap.HeatmapDimension;
import org.gitools.ui.app.actions.HeatmapDimensionAction;
import org.gitools.ui.core.Application;
import org.gitools.ui.core.HeatmapPosition;
import org.gitools.ui.core.actions.dynamicactions.IHeatmapDimensionAction;
import org.gitools.ui.platform.icons.IconNames;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import static org.gitools.api.matrix.MatrixDimensionKey.ROWS;
public class HideSelectionAction extends HeatmapDimensionAction implements IHeatmapDimensionAction {
private static final long serialVersionUID = 1453040322414160605L;
public HideSelectionAction(MatrixDimensionKey key) {
super(key, "<html><i>Hide</i> selected<html>");
setSmallIconFromResource(IconNames.get(key).getHide16());
setLargeIconFromResource(IconNames.get(key).getHide24());
setMnemonic(key == ROWS ? KeyEvent.VK_W : KeyEvent.VK_O);
}
@Override
public boolean isEnabledByModel(Object model) {
if (model instanceof Heatmap) {
return !getDimension().getSelected().isEmpty();
}
return false;
}
@Override
public void actionPerformed(ActionEvent e) {
HeatmapDimension dimension = getDimension();
dimension.hide(dimension.getSelected());
dimension.getSelected().clear();
Application.get().showNotification("Selected " + getDimensionLabel() + "s hidden");
}
@Override
public void onConfigure(HeatmapDimension dimension, HeatmapPosition position) {
// Enable only if there is at least one item selected
setEnabled(dimension.getSelected().size() > 0);
}
}
| gpl-3.0 |
Thiht/docktor | model/sites/sites.go | 1671 | package sites
import (
"github.com/soprasteria/docktor/model/types"
mgo "gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
)
// Repo is the repository for projects
type Repo struct {
Coll *mgo.Collection
}
// Save a site into a database
func (r *Repo) Save(site types.Site) (types.Site, error) {
if site.ID.Hex() == "" {
site.ID = bson.NewObjectId()
}
_, err := r.Coll.UpsertId(site.ID, bson.M{"$set": site})
return site, err
}
// Delete a site in database
func (r *Repo) Delete(id bson.ObjectId) (bson.ObjectId, error) {
err := r.Coll.RemoveId(id)
if err != nil {
return id, err
}
return id, nil
}
// FindByID get the site by its id
func (r *Repo) FindByID(id string) (types.Site, error) {
result := types.Site{}
err := r.Coll.FindId(bson.ObjectIdHex(id)).One(&result)
if err != nil {
return result, err
}
return result, nil
}
// FindByIDBson get the site by its id (as a bson object)
func (r *Repo) FindByIDBson(id bson.ObjectId) (types.Site, error) {
result := types.Site{}
err := r.Coll.FindId(id).One(&result)
if err != nil {
return result, err
}
return result, nil
}
// Find get the first site with a given title
func (r *Repo) Find(title string) (types.Site, error) {
result := types.Site{}
err := r.Coll.Find(bson.M{"title": title}).One(&result)
if err != nil {
return result, err
}
return result, nil
}
// FindAll get all sites
func (r *Repo) FindAll() ([]types.Site, error) {
results := []types.Site{}
err := r.Coll.Find(bson.M{}).All(&results)
if err != nil {
return results, err
}
return results, nil
}
// Drop drops the content of the collection
func (r *Repo) Drop() error {
return r.Coll.DropCollection()
}
| gpl-3.0 |
DivineOmega/DecentMessaging | src/main/Main.java | 11891 | package main;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.math.BigInteger;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.util.ArrayList;
import java.util.Enumeration;
import javax.swing.UIManager;
import main.factory.PrivateKeyFactory;
import main.factory.PublicKeyFactory;
import main.gui.GUIUpdater;
import main.gui.MainWindow;
import main.network.Bootstrapper;
import main.network.LocalWebServer;
import main.network.MessageRelayer;
import main.network.NodeRelayer;
import main.network.PeerServer;
import org.apache.commons.codec.binary.Base64;
public class Main
{
public static LocalWebServer localWebServer1;
public static PeerServer peerServer1;
public static String dmAddress = null;
public static String storageDirectory = null;
public static MainWindow mainWindow = null;
public static int peerServerPort = 9991;
public static int localWebServerPort = 7771;
public static void main(String[] args)
{
System.setProperty("line.separator", "\n");
System.out.println("*** Decent Messaging ***");
System.out.println("Interpreting command line parameters...");
storageDirectory = System.getProperty("user.home") + System.getProperty("file.separator") +
".decentmessaging" + System.getProperty("file.separator");
boolean showGUI = true;
for (int i = 0; i < args.length; i++) {
String arg = args[i];
if (arg.equalsIgnoreCase("--peer-server-port")) {
if (args.length >= i) {
try {
peerServerPort = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
System.out.println("Invalid peer server port number.");
System.exit(1);
}
}
} else if (arg.equalsIgnoreCase("--local-server-port")) {
if (args.length >= i) {
try {
localWebServerPort = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
System.out.println("Invalid local server port number.");
System.exit(1);
}
}
} else if (arg.equalsIgnoreCase("--hidden")) {
showGUI = false;
} else if (arg.equalsIgnoreCase("--portable")) {
storageDirectory = "." + System.getProperty("file.separator") +
".decentmessaging" + System.getProperty("file.separator");
}
}
if (!showGUI) {
System.out.println("Skipping display of GUI, as requested...");
} else {
System.out.println("Setting system look and feel...");
try
{
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
catch (Exception e)
{
System.out.println("Error setting look and feel.");
}
System.out.println("Displaying main window...");
mainWindow = new MainWindow();
mainWindow.show();
}
String directoryToCreate = Main.storageDirectory;
System.out.println("Checking/creating main directory... "+directoryToCreate);
if (!(new File(directoryToCreate)).exists() && !(new File(directoryToCreate)).mkdir())
{
System.out.println("Error creating directory at "+directoryToCreate);
System.exit(0);
}
directoryToCreate = Main.storageDirectory+"message";
System.out.println("Checking/creating message directory... "+directoryToCreate);
if (!(new File(directoryToCreate)).exists() &&!(new File(directoryToCreate)).mkdir())
{
System.out.println("Error creating directory at "+directoryToCreate);
System.exit(0);
}
directoryToCreate = Main.storageDirectory+"personal";
System.out.println("Checking/creating personal directory... "+directoryToCreate);
if (!(new File(directoryToCreate)).exists() &&!(new File(directoryToCreate)).mkdir())
{
System.out.println("Error creating directory at "+directoryToCreate);
System.exit(0);
}
DatabaseConnection dbconn = new DatabaseConnection();
System.out.println("Creating/opening database...");
if (!dbconn.connect())
{
System.out.println("Error opening database.");
System.exit(0);
}
System.out.println("Checking/setting up database schema...");
if (!dbconn.setupSchema())
{
System.out.println("Error setting up database schema.");
System.exit(0);
}
checkAndCreateKeyPair();
System.out.println("Starting local web server on port "+localWebServerPort+"...");
localWebServer1 = new LocalWebServer(localWebServerPort);
System.out.println("Starting peer server on port "+peerServerPort+"...");
peerServer1 = new PeerServer(peerServerPort);
peerServer1.start();
System.out.println("Starting message relayer...");
MessageRelayer messageRelayer1 = new MessageRelayer();
messageRelayer1.start();
System.out.println("Starting node relayer...");
NodeRelayer nodeRelayer1 = new NodeRelayer();
nodeRelayer1.start();
System.out.println("Starting bootstrapper...");
Bootstrapper bootstrapper1 = new Bootstrapper();
bootstrapper1.start();
System.out.println("Starting message decrypter...");
Decrypter decrypter1 = new Decrypter();
decrypter1.start();
System.out.println("Starting caretaker...");
Caretaker caretaker1 = new Caretaker();
caretaker1.start();
GUIUpdater guiUpdater = null;
if (showGUI) {
System.out.println("Starting GUI updater...");
guiUpdater = new GUIUpdater();
guiUpdater.start();
}
System.out.println("Start up complete.");
System.out.println("Thread monitoring starting...");
ArrayList<Thread> threadsToMonitor = new ArrayList<Thread>();
threadsToMonitor.add(peerServer1);
threadsToMonitor.add(messageRelayer1);
threadsToMonitor.add(nodeRelayer1);
threadsToMonitor.add(bootstrapper1);
threadsToMonitor.add(decrypter1);
threadsToMonitor.add(caretaker1);
if (guiUpdater!=null) {
threadsToMonitor.add(guiUpdater);
}
while(true) {
for (Thread thread : threadsToMonitor) {
if (!thread.isAlive()) {
System.out.println("Thread ID "+thread.getId()+" of type "+thread.getClass().getName()+" appears to have failed. Attempting to restart it...");
try {
Constructor<?>[] constructors = thread.getClass().getConstructors();
for (Constructor<?> constructor : constructors) {
if (constructor.getParameterTypes().length==0) {
thread = (Thread) constructor.newInstance();
} else {
if (constructor.getDeclaringClass().getName()=="main.network.PeerServer") {
thread = (Thread) constructor.newInstance(peerServerPort);
}
}
}
thread.start();
System.out.println("Succesfully restarted thread of type "+thread.getClass().getName()+" with Thread ID "+thread.getId()+".");
} catch (InvocationTargetException | InstantiationException | IllegalAccessException | IllegalArgumentException e) {
System.out.println("Error restarting a monitored thread. This node is not fully functional and should be restarted.");
e.printStackTrace();
}
}
}
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private static void checkAndCreateKeyPair() {
System.out.println("Checking main public/private keys...");
if (PrivateKeyFactory.get(1)==null && PublicKeyFactory.get(1)==null)
{
System.out.println("Main public/private keys do not exist.");
System.out.println("Creating new main public/private key pair...");
try
{
createMainKeyPair();
}
catch (Exception e)
{
e.printStackTrace();
System.out.println("Error creating new main public/private key pair.");
System.exit(0);
}
System.out.println("Public/private key pair created.");
}
else if (PrivateKeyFactory.get(1)!=null && PublicKeyFactory.get(1)==null)
{
System.out.println("Consistency error. Main private key exists without corresponding public key.");
System.exit(0);
}
else if (PrivateKeyFactory.get(1)==null && PublicKeyFactory.get(1)!=null)
{
System.out.println("Consistency error. Main public key exists without corresponding private key.");
System.exit(0);
}
else
{
System.out.println("Main public/private key pair found.");
}
try
{
dmAddress = createDmAddress(PublicKeyFactory.get(1).modulus, PublicKeyFactory.get(1).exponent);
}
catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
private static void createMainKeyPair() throws NoSuchAlgorithmException, InvalidKeySpecException
{
KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
kpg.initialize(4096);
KeyPair kp = kpg.genKeyPair();
PublicKeyFactory.createNew(1, kp);
PrivateKeyFactory.createNew(1, kp);
}
public static String createDmAddress(BigInteger modulus, BigInteger exponent) throws UnsupportedEncodingException
{
String dmAddressVersion = "A";
String base64Modulus = new String(Base64.encodeInteger(modulus), "UTF-8");
String base64Exponent = new String(Base64.encodeInteger(exponent), "UTF-8");
return dmAddressVersion+","+base64Modulus+","+base64Exponent;
}
public static BigInteger getModulusFromDmAddress(String dmAddress) throws UnsupportedEncodingException
{
String[] dmAddressParts = dmAddress.split(",");
if (dmAddressParts.length<1) {
return null;
}
String dmAddressVersion = dmAddressParts[0];
if (dmAddressVersion.equals("A") && dmAddressParts.length==3) {
return Base64.decodeInteger(dmAddressParts[1].getBytes("UTF-8"));
}
return null;
}
public static BigInteger getExponentFromDmAddress(String dmAddress) throws UnsupportedEncodingException
{
String[] dmAddressParts = dmAddress.split(",");
if (dmAddressParts.length<1) {
return null;
}
String dmAddressVersion = dmAddressParts[0];
if (dmAddressVersion.equals("A") && dmAddressParts.length==3) {
return Base64.decodeInteger(dmAddressParts[2].getBytes("UTF-8"));
}
return null;
}
public static ArrayList<InetAddress> getMyIPs() throws SocketException
{
// Create new list to contain the IP addresses found from all connected LAN network interfaces
ArrayList<InetAddress> myIps = new ArrayList<InetAddress>();
// Enumerate all the connected network interfaces on the current system
Enumeration<NetworkInterface> e = NetworkInterface.getNetworkInterfaces();
// Iterate through all network interfaces
while (e.hasMoreElements())
{
// Assign current iteration network interface to temporary 'ni' variable
NetworkInterface ni = (NetworkInterface) e.nextElement();
// Check if the current iteration network interface is a loop back interface (local only), if so skip it.
if (ni.isLoopback())
{
continue;
}
// Enumerate all IP addresses for the current iteration network interface
Enumeration<InetAddress> e2 = ni.getInetAddresses();
// Iterate through all IP addresses for the current iteration network interface
while (e2.hasMoreElements())
{
// Assign current iteration IP address to temporary 'tmp' variable
InetAddress tmp = e2.nextElement();
// Check to ensure current iteration IP address is IPv4
if (tmp.getClass().equals(Inet4Address.class))
{
// Assign 'tmp' to new 'ip' variable (not sure why this is required)
InetAddress ip = (InetAddress) tmp;
// Check to ensure IP address is a LAN IP address (rather than a WAN connection)
// We do not want to do port scanning over an WAN interface
if (ip.isSiteLocalAddress())
{
// Add IP address to list of current LAN IP addresses
myIps.add(ip);
}
}
}
}
return myIps;
}
}
| gpl-3.0 |
charlesyao/FXS-Platform | fxs-platform-security-core/src/main/java/com/fxs/platform/security/core/properties/ImageCodeProperties.java | 525 | package com.fxs.platform.security.core.properties;
/**
* 图片验证码配置项
*
*/
public class ImageCodeProperties extends SmsCodeProperties {
public ImageCodeProperties() {
setLength(4);
}
/**
* 图片宽
*/
private int width = 67;
/**
* 图片高
*/
private int height = 23;
public int getWidth() {
return width;
}
public void setWidth(int width) {
this.width = width;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
}
| gpl-3.0 |
phoronix-test-suite/phoronix-test-suite | pts-core/objects/pts_env.php | 33417 | <?php
/*
Phoronix Test Suite
Copyright (C) 2021, Phoronix Media
Copyright (C) 2021, Michael Larabel
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
class pts_env
{
protected static $overrides = array();
protected static $env_vars = array(
'NO_COLOR' => array(
'description' => 'This option when enabled will force-disable the CLI/TUI text coloring. By default the Phoronix Test Suite will attempt to use CLI/TUI text colors and bolding of text for supported terminals.',
'default' => '',
'usage' => array('all'),
'value_type' => 'bool',
),
'TERMINAL_WIDTH' => array(
'description' => 'This option is used for overriding the detected default of the terminal width for the CLI/TUI interface.',
'default' => '',
'usage' => array('all'),
'value_type' => 'positive_integer',
),
'PHODEVI_SANITIZE' => array(
'description' => 'This option can be used for stripping out part of a string on Phodevi (Phoronix Device Interface) hardware/software properties. Namely around the reported hardware/software information in result files if wanting any values / portions of strings stripped out from that information, such as for confidential hardware strings or other privacy concerns, PHODEVI_SANITIZE can be set. The value will be removed from read Phodevi hardware/software properties if set. Multiple strings to search for can be set by delimiting with a comma. If wanting to limit the sanitization to a particular property, the property value can be specified such as [property]=[value] to sanitisze like a value of "motherboard=ABCVENDOR" or CPU=ENGINEERING-SAMPLE to delete those strings rather than simply the string to remove that will look for matches in any property."',
'default' => '',
'usage' => array('all'),
'value_type' => 'string',
'advertise_in_phoromatic' => true,
'onchange' => 'phodevi::set_sanitize_string',
),
'PTS_SILENT_MODE' => array(
'description' => 'This option when enabled will yield slightly less verbose Phoronix Test Suite terminal output by silencing unnecessary messages / prompts.',
'default' => false,
'usage' => array('all'),
'value_type' => 'bool',
),
'PTS_DISPLAY_MODE' => array(
'description' => 'If you wish to load a non-default display mode for a single instance, specify the mode in this variable as an alternative to adjusting the user configuration file.',
'default' => '',
'usage' => array('all'),
'value_type' => 'enum',
'enum' => array('BASIC', 'BATCH', 'CONCISE', 'SHORT', 'DEFAULT'),
),
'NO_PHODEVI_CACHE' => array(
'description' => 'This option will disable use of the built-in Phodevi (Phoronix Device Interface) cache of system software/hardware details. When enabled, the information is not cached and will be re-computed on each query. This is mainly useful for debugging purposes.',
'default' => false,
'usage' => array('all'),
'value_type' => 'bool',
),
'PTS_TEST_INSTALL_ROOT_PATH' => array(
'description' => 'This option can be used for overriding where tests are installed to on the system. An absolute writable directory path can be the value if wanting to override the default (or user configuration file specified) test installation directory path.',
'default' => '',
'usage' => array('install', 'benchmark', 'stress_run'),
'value_type' => 'string',
),
'TEST_RESULTS_NAME' => array(
'description' => 'This option can be used for specifying the result file name for saving the test/benchmark results automatically to the given name.',
'default' => '',
'usage' => array('benchmark', 'stress_run'),
'value_type' => 'string',
),
'TEST_RESULTS_IDENTIFIER' => array(
'description' => 'This option can be used for specifying the result identifier for distinguishing this run within the saved result file.',
'default' => '',
'usage' => array('benchmark', 'stress_run'),
'value_type' => 'string',
),
'TEST_RESULTS_DESCRIPTION' => array(
'description' => 'This option can be used for specifying the result file description for saving that string and not be prompted for providing a description during the test execution process.',
'default' => '',
'usage' => array('benchmark', 'stress_run'),
'value_type' => 'string',
),
'PTS_EXTRA_SYSTEM_LOGS_DIR' => array(
'description' => 'By default the Phoronix Test Suite collects common system logs (cpuinfo, lscpu, dmesg) during the benchmarking process when saving test results. If wanting to collect additional, arbitrary system log files specific to your operating environment or for other niche system information, this option can be set as a path to a directory containing such log files. Prior to running the Phoronix Test Suite simply set PTS_EXTRA_SYSTEM_LOGS_DIR to the directory where any files should be captured from following test completion.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'string',
'advertise_in_phoromatic' => true,
),
'TEST_EXECUTION_SORT' => array(
'description' => 'This option can be used for controlling the sort order that the test profiles / benchmarks are run in, whether sorted or not and in what manner.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'enum',
'enum' => array('none', 'random', 'dependencies', 'test-estimated-time', 'test-estimated-time-desc', 'test', 'default'),
'advertise_in_phoromatic' => true,
),
'TEST_EXEC_PREPEND' => array(
'description' => 'This option can be used if wanting to specify a binary (e.g. sudo, cgroup or other resource limiting binaries or performance counters) to be called as the binary pre-pended prior to running a test profile binary/script. This option is namely used for specialized use-cases.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'string',
),
'FORCE_TIMES_TO_RUN' => array(
'description' => 'This option can be used to override the default number of times a given test is run. Rather than being specified by the individual test profile, FORCE_TIMES_TO_RUN allows for specifying the number of times to run each benchmark.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'advertise_in_phoromatic' => true,
),
'FORCE_MIN_TIMES_TO_RUN' => array(
'description' => 'This option is similar to FORCE_TIMES_TO_RUN but is used for specifying the minimum possible number of times to run. Unlike FORCE_TIMES_TO_RUN, the run count can still exceed this value if the deviation between results or other factors are too high.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'advertise_in_phoromatic' => true,
),
'FORCE_MIN_TIMES_TO_RUN_CUTOFF' => array(
'description' => 'Used in conjunction with the FORCE_MIN_TIMES_TO_RUN, the FORCE_MIN_TIMES_TO_RUN_CUTOFF can be used for specifyingg the amount of time (in minutes) before foregoing additional runs. This allows cutting off the testing early if this time threshold has been reached.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
),
'FORCE_ABSOLUTE_MIN_TIMES_TO_RUN' => array(
'description' => 'This option is similar to FORCE_MIN_TIMES_TO_RUN but is *absolute* in ensuring each test will run at least that number of times and not subject to change of any timed cut-offs or other factors.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
),
'FORCE_TIMES_TO_RUN_MULTIPLE' => array(
'description' => 'This option is similar to FORCE_TIMES_TO_RUN but the value is a multiple for how many times the test profile should be run respective to its default value. If the value is set to 2 and a given test profile by default is set to run 3 times, it would now instead be run a total of 6 times. This can be used for increasing the statistical significance of test results by using a multiple of the default rather than a static number as is the case with FORCE_TIMES_TO_RUN.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
),
'IGNORE_RUNS' => array(
'description' => 'This option can be used if wanting the Phoronix Test Suite to automatically toss out a specified result position when running a test profile multiple times. E.g. setting this value to 1 will toss out automatically the first run of each test profile or a value of 3 will toss out the third run of a given test. This overrides the IgnoreRuns option also available to individual test profiles. Multiple values for runs to ignore can be specified by delimiting with a comma.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'string',
),
'FORCE_MIN_DURATION_PER_TEST' => array(
'description' => 'This option can be used to specify the minimum number of times to run a given benchmark. Rather than relying on a static times-to-run count, the test will keep looping until the time has exceeded this number (in minutes).',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'advertise_in_phoromatic' => true,
),
'PRESET_OPTIONS' => array(
'description' => 'PRESET_OPTIONS can be used for seeding the values of test profile run options from the environment (though the preferred approach for pre-configuring tests in an automated manner would be by constructing your own local test suite). For setting any test option(s) from an environment variable rather than being prompted for the options when running a test. Example: "PRESET_OPTIONS=\'stream.run-type=Add\' phoronix-test-suite benchmark stream".',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'string',
),
'PRESET_OPTIONS_VALUES' => array(
'description' => 'This option is similar to PRESET_OPTIONS and uses the same syntax but rather than seeding the selected run option it uses the value verbatim as for what is passed to the test profile run option.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'string',
),
'PTS_CONCURRENT_TEST_RUNS' => array(
'description' => 'This option is used in the stress run/benchmarking mode to indicate the number of tests to run concurrently as part of the stress run process.',
'default' => false,
'usage' => array('stress_run'),
'value_type' => 'positive_integer',
),
'TOTAL_LOOP_TIME' => array(
'description' => 'This option is used to specify the amount of time (in minutes) to loop the testing during the Phoronix Test Suite stress run or normal benchmarking process.',
'default' => '',
'usage' => array('stress_run', 'benchmark'),
'value_type' => 'positive_integer',
),
'TOTAL_LOOP_COUNT' => array(
'description' => 'This option is used to specify a multiple if wishing to run each test multiple times rather than just once per saved result file.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
),
'LIMIT_ELAPSED_TEST_TIME' => array(
'description' => 'This option can be used for limiting the amount of time the benchmarking process runs. The value specified is the number of minutes to allow for benchmarking. After a test finishes if that number of minutes has been exceeded, the testing process will abort early and not run any remaining tests.',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'advertise_in_phoromatic' => true,
),
'DONT_BALANCE_TESTS_FOR_SUBSYSTEMS' => array(
'description' => 'If this value is true, the Phoronix Test Suite stress-run manager will not attempt to distribute the selected test(s) among available hardware subsystems. For stress runs with tests covering multiple subsystems (e.g. CPU, GPU, RAM), the default behavior is try to ensure the tests to run concurrently are as balanced across the tested subsystems as possible.',
'default' => false,
'usage' => array('stress_run'),
'value_type' => 'bool',
),
'DONT_TRY_TO_ENSURE_TESTS_ARE_UNIQUE' => array(
'description' => 'When running in the stress-run mode, the default behavior will try to ensure when tests are running concurrently that as many unique tests as possible are being run. Setting this value to try will avoid that check and just attempt to truly randomize the tests being run concurrently without regard for trying to avoid duplicates.',
'default' => false,
'usage' => array('stress_run'),
'value_type' => 'bool',
),
'OUTPUT_FILE' => array(
'description' => 'When exporting a result file, this option can be used for specifying the file name / file path and name of where to save the exported result file to rather than assuming the user home directory.',
'default' => '',
'usage' => array('result_output'),
'value_type' => 'string',
),
'OUTPUT_DIR' => array(
'description' => 'When exporting a result file, this option can be used for specifying the writable directory path where the exported result files should be saved to. The file-name will be automatically generated.',
'default' => '',
'usage' => array('result_output'),
'value_type' => 'string',
),
'GRAPH_HIGHLIGHT' => array(
'description' => 'If automatically generating an HTML or PDF result file from the command-line and wanting to highlight desired result identifier(s), GRAPH_HIGHLIGHT can be set to a comma delimited list of result identifiers to highlight / color differently than the rest.',
'default' => '',
'usage' => array('result_output'),
'value_type' => 'string',
),
'SORT_BY' => array(
'description' => 'This option can be used for specifying the sort order for commands like auto-sort-result-file whether to sort by identifier name, test length, etc.',
'default' => 'identifier',
'usage' => array('auto_sort_result_file'),
'value_type' => 'enum',
'enum' => array('date', 'date-asc', 'date-desc', 'identifier'),
),
'NO_HTTPS' => array(
'description' => 'Enable this option if wanting the Phoronix Test Suite when downloading resources to attempt to only use HTTP without any HTTPS connections. Note: some downloads may fail for servers that only support HTTPS.',
'default' => false,
'usage' => array('all'),
'value_type' => 'bool',
),
'NO_DOWNLOAD_CACHE' => array(
'description' => 'Enable this option if the Phoronix Test Suite should not attempt to discover and use any local/remote Phoronix Test Suite download cache when installing tests and attempting to find those files locally or on a LAN resource.',
'default' => false,
'usage' => array('install'),
'value_type' => 'bool',
),
'NO_FILE_HASH_CHECKS' => array(
'description' => 'Enable this option if you want to skip the MD5 / SHA256 file hash checks after downloading files with known MD5/SHA256 hashsums for verification. This is namely useful for select debugging scenarios and other situations where a file may have been trivially changed / re-packaged and wishing to still install a test even though the hash no longer matches until the test profile has been updated.',
'default' => false,
'usage' => array('install'),
'value_type' => 'bool',
),
'SKIP_TEST_SUPPORT_CHECKS' => array(
'description' => 'This debugging/validation option will have the Phoronix Test Suite skip any test support checks for a test profile (architecture compatibility, OS compatibility, etc) and just assume all tests are supported.',
'default' => false,
'usage' => array('install', 'benchmark'),
'value_type' => 'bool',
),
'NO_COMPILER_MASK' => array(
'description' => 'By default the Phoronix Test Suite attempts to determine the intended system code compilers (namely C / C++ / Fortran) and to intercept the arguments being passed to them during test installation in order to record the prominent compiler flags being used. If this behavior causes problems for your system, NO_COMPILER_MASK can be enabled for debugging purposes to avoid this compiler intercepting/symlinking behavior.',
'default' => false,
'usage' => array('install'),
'value_type' => 'bool',
),
'NO_EXTERNAL_DEPENDENCIES' => array(
'description' => 'Enabling this option will have the Phoronix Test Suite skip over attempting to detect and install any system/external dependencies needed to run desired test profiles. This should just be used in case of testing/evaluation purposes and may leave some tests unable to successfully build/install.',
'default' => false,
'usage' => array('install'),
'value_type' => 'bool',
),
'SKIP_EXTERNAL_DEPENDENCIES' => array(
'description' => 'Rather than NO_EXTERNAL_DEPENDENCIES to outright disable the Phoronix Test Suite external dependency handling, SKIP_EXTERNAL_DEPENDENCIES can be used with a value of a comma separated list of specific external dependencies to avoid. This is mostly useful for any external dependencies that may be out of date or fail to install on your platform.',
'default' => '',
'usage' => array('install'),
'value_type' => 'string',
),
'PTS_DOWNLOAD_CACHE' => array(
'description' => 'PTS_DOWNLOAD_CACHE can be used for setting a path to a directory on the system containing a Phoronix Test Suite download cache if located outside one of the default locations.',
'default' => '',
'usage' => array('install'),
'value_type' => 'string',
),
'SKIP_TESTS' => array(
'description' => 'SKIP_TESTS will skip the test installation and execution of any test identifiers specified by this option. Multiple test identifiers can be specified, delimited by a comma.',
'default' => '',
'usage' => array('install', 'benchmark'),
'value_type' => 'string',
),
'SKIP_TESTS_HAVING_ARGS' => array(
'description' => 'SKIP_TESTS_HAVING_ARGS will skip the test installation and execution of any tests where the specified test arguments match the given string. E.g. if wanting to skip all Vulkan tests in a result file but run just the OpenGL tests or similar where wanting to limit the tests being run from within a result file. Multiple values can be specified when delimited by a comma.',
'default' => '',
'usage' => array('install', 'benchmark'),
'value_type' => 'string',
),
'SKIP_TESTING_SUBSYSTEMS' => array(
'description' => 'This option is similar to SKIP_TESTS but allows for specifying hardware subsystems (e.g. Graphics) to skip from installing/running any test profiles beloning to that subsystem type. Multiple subsystems can be specified when delimited by a comma.',
'default' => '',
'usage' => array('install', 'benchmark'),
'value_type' => 'string',
),
'PTS_MODULE_SETUP' => array(
'description' => 'This option can be used for seeding a module\'s settings when running the phoronix-test-suite module-setup command. An example would be: "PTS_MODULE_SETUP=\'phoromatic.remote_host=http://www.phoromatic.com/; phoromatic.remote_account=123456; phoromatic.remote_verifier=ABCD\' phoronix-test-suite module-setup phoromatic".',
'default' => '',
'usage' => array('modules'),
'value_type' => 'string',
),
'PTS_MODULES' => array(
'description' => 'This option can be used for specifying a comma-separated list of Phoronix Test Suite modules to load at start-time, complementary to the modules specified in the user configuration file. PTS_MODULES is namely used for development purposes or wanting to temporarily enable a given module.',
'default' => '',
'usage' => array('modules'),
'value_type' => 'string',
),
'PTS_IGNORE_MODULES' => array(
'description' => 'Enabling this option can be used for temporarily disabling Phoronix Test Suite modules from being loaded on a given run. This is primarily for debugging purposes.',
'default' => false,
'usage' => array('modules'),
'value_type' => 'bool',
),
'TEST_TIMEOUT_AFTER' => array(
'description' => 'When this variable is set, the value will can be set to "auto" or a positive integer. The value indicates the number of minutes until a test run should be aborted, such as for a safeguard against hung/deadlocked processes or other issues. Setting this to a high number as a backup would be recommended for fending off possible hangs / stalls in the testing process if the test does not quit. If the value is "auto", it will quit if the time of a test run exceeds 3x the average time it normally takes the particular test to complete its run. In the future, auto might be enabled by default in a future PTS release. This functionality requires system PHP PCNTL support (i.e. no Windows support).',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'module' => 'test_timeout',
'advertise_in_phoromatic' => true,
),
'MONITOR' => array(
'description' => 'This option can be used for system sensor monitoring during test execution. The Phoronix Test Suite system_monitor module can monitor various exposed sensors and record them as part of the result file and present them as additional graphs / metrics in the result viewer. The exposed sensors varies by platform hardware/software. This functionality also requires PHP PCNTL support and thus is not available for some platforms (i.e. Windows).',
'default' => '',
'usage' => array('benchmark'),
'value_type' => 'enum_multi',
'enum' => array('all', 'cpu.peak-freq', 'cpu.temp', 'cpu.power', 'cpu.usage', 'gpu.freq', 'gpu.power', 'gpu.temp', 'hdd.temp', 'memory.usage', 'swap.usage', 'sys.power', 'sys.temp'),
'module' => 'system_monitor',
'advertise_in_phoromatic' => true,
),
'LINUX_PERF' => array(
'description' => 'This option allows providing additional complementary per-test graphs looking at various Linux perf subsystem metrics such as cache usage, instructions executed, and other metrics. This requires you to have Linux\'s perf user-space utility already installed and performance counter access.',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'bool',
'module' => 'linux_perf',
'advertise_in_phoromatic' => true,
),
'TURBOSTAT_LOG' => array(
'description' => 'This option allows attaching "turbostat" outputs to the end of archived benchmark/test log files if interested in the Linux TurboStat information. This assumes you have turbostat available on the Linux system(s) and have permissions (root) for running turbostat.',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'bool',
'module' => 'turbostat',
'advertise_in_phoromatic' => true,
),
'WATCHDOG_SENSOR' => array(
'description' => 'This option will enable the watchdog module that checks system sensor values pre/interim/post benchmark execution. If the selected sensor(s) exceed the static threshold level, testing will be paused before continuing to any additional tests so that the system can sleep. Ideally this will allow the system to return to a more suitable state before resuming testing after the sensor value is back below the threshold or after a pre-defined maximum time limit to spend sleeping. This module is mostly focused on pausing testing should system core temperatures become too elevated to allow time for heat dissipation.',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'enum_multi',
'enum' => array('cpu.temp', 'gpu.temp', 'hdd.temp', 'sys.temp'),
'module' => 'watchdog',
'advertise_in_phoromatic' => true,
),
'WATCHDOG_SENSOR_THRESHOLD' => array(
'description' => 'Used in conjunction with the WATCHDOG_SENSOR option, the WATCHDOG_SENSOR_THRESHOLD specifies the threshold for the sensor reading when the testing should be paused (e.g. the Celsius cut-off temperature).',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'module' => 'watchdog',
'advertise_in_phoromatic' => true,
),
'WATCHDOG_MAXIMUM_WAIT' => array(
'description' => 'Used in conjunction with the WATCHDOG_SENSOR option, this is the maximum amount of time to potentially wait when the watchdog is triggered for surpassing the threshold value. The value is the maximum number of minutes to wait being above the threshold.',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'module' => 'watchdog',
'advertise_in_phoromatic' => true,
),
'REMOVE_TESTS_OLDER_THAN' => array(
'description' => 'This option with the cleanup module can be used for automatically un-installing/removing installed tests if they have not been run in a period of time. The value for REMOVE_TESTS_OLDER_THAN is the number of days the test can be installed without running until this module will clean-up/remove older tests.',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'positive_integer',
'module' => 'cleanup',
'advertise_in_phoromatic' => true,
),
'REMOVE_TESTS_ON_COMPLETION' => array(
'description' => 'When this option is set to true, installed test profiles will be automatically removed/uninstalled when they are no longer in the current test execution queue. This is used for saving disk space / resources by automatically removing installed tests after they have been executed. For more persistent behavior is the RemoveTestInstallOnCompletion option within the Phoronix Test Suite user configuration file.',
'default' => false,
'usage' => array('benchmark'),
'value_type' => 'bool',
'advertise_in_phoromatic' => true,
),
);
public static function read($name, &$overrides = null, $fallback_value = false)
{
if(isset(self::$overrides[$name]))
{
return self::$overrides[$name];
}
return getenv($name);
}
public static function set($name, $value)
{
if(!isset(self::$env_vars[$name]))
{
// trigger_error($name . ' is not a recognized Phoronix Test Suite environment variable.', E_USER_NOTICE);
}
if(PTS_IS_CLIENT && isset(self::$env_vars[$name]['module']) && !pts_module_manager::is_module_attached(self::$env_vars[$name]['module']))
{
// Ensure module is loaded
pts_module_manager::attach_module(self::$env_vars[$name]['module']);
}
if(PTS_IS_CLIENT && isset(self::$env_vars[$name]['onchange']) && !empty(self::$env_vars[$name]['onchange']) && is_callable(self::$env_vars[$name]['onchange']))
{
// Call the passed function with the value being set
call_user_func(self::$env_vars[$name]['onchange'], $value);
}
self::$overrides[$name] = $value;
}
public static function set_array($to_set, $clear_overrides = false)
{
if($clear_overrides)
{
self::$overrides = array();
}
foreach($to_set as $name => $value)
{
self::set($name, $value);
}
}
public static function get_overrides()
{
return self::$overrides;
}
public static function remove($name)
{
if(isset(self::$overrides[$name]))
{
unset(self::$overrides[$name]);
}
}
public static function read_possible_vars($limit = false)
{
$possible_vars = self::$env_vars;
if($limit)
{
if($limit == 'phoromatic')
{
$limit = array('advertise_in_phoromatic' => true);
}
if(is_array($limit))
{
foreach($possible_vars as $key => $var_check)
{
foreach($limit as $index => $desired_value)
{
if(!isset($possible_vars[$key][$index]) || $possible_vars[$key][$index] != $desired_value)
{
unset($possible_vars[$key]);
break;
}
}
}
}
}
ksort($possible_vars);
return $possible_vars;
}
public static function get_documentation($for_terminal = true)
{
$docs = '';
foreach(pts_env::read_possible_vars() as $var => $data)
{
if($for_terminal)
{
$docs .= PHP_EOL . pts_client::cli_just_bold($var);
if(pts_env::read($var))
{
$docs .= ': ' . pts_client::cli_colored_text(pts_env::read($var), 'green', true);
}
$docs .= PHP_EOL;
$docs .= pts_client::cli_just_italic($data['description']) . PHP_EOL;
}
else
{
$docs .= PHP_EOL . '<h2>' . $var . '</h2>' . PHP_EOL;
$docs .= '<p><em>' . $data['description'] . '</em></p>' . PHP_EOL;
}
if(isset($data['default']) && !empty($data['default']))
{
if($for_terminal)
{
$docs .= pts_client::cli_just_bold('Default Value: ') . $data['default'] . PHP_EOL;
}
else
{
$docs .= '<p><strong>Default Value:</strong> ' . $data['default'] . '</p>' . PHP_EOL;
}
}
if(!$for_terminal)
{
$docs .= '<p>';
}
if(isset($data['value_type']) && !empty($data['value_type']))
{
$value_type = '';
switch($data['value_type'])
{
case 'bool':
$value_type = 'boolean (TRUE / FALSE)';
break;
case 'string':
$value_type = 'string';
break;
case 'positive_integer':
$value_type = 'positive integer';
break;
case 'enum':
case 'enum_multi':
$value_type = 'enumeration' . (isset($data['enum']) ? ' (' . implode(', ', $data['enum']) . ')' : '');
if($data['value_type'] == 'enum_multi')
{
$value_type .= PHP_EOL . 'Multiple options can be supplied when delimited by a comma.';
}
break;
}
if(!empty($value_type))
{
$docs .= 'The value can be of type: ' . $value_type . '.' . PHP_EOL;
}
}
if(isset($data['usage']) && !empty($data['usage']))
{
$usages = array();
foreach($data['usage'] as $u)
{
switch($u)
{
case 'install':
$usages[] = 'test installation';
break;
case 'benchmark':
$usages[] = 'test execution / benchmarking';
break;
case 'stress_run':
$usages[] = 'stress-run mode';
break;
case 'result_output':
$usages[] = 'result output generation';
break;
case 'modules':
$usages[] = 'modules';
break;
}
}
if(!empty($usages))
{
$docs .= 'The variable is relevant for: ' . implode(', ', $usages) . '.' . PHP_EOL;
}
}
if(isset($data['module']) && !empty($data['module']))
{
$docs .= 'The variable depends upon functionality provided by the Phoronix Test Suite module: ' . $data['module'] . '.' . PHP_EOL;
}
if(!$for_terminal)
{
$docs .= '</p>';
}
}
return $docs;
}
public static function get_html_options($limit = false, $preset_defaults = array())
{
$html = '';
foreach(pts_env::read_possible_vars($limit) as $var => $data)
{
$html .= PHP_EOL . '<h3>' . $var . '</h3>' . PHP_EOL;
$html .= '<p><em>' . $data['description'] . '</em></p>' . PHP_EOL;
$default_value = isset($data['default']) && !empty($data['default']) ? $data['default'] : '';
if(isset($_REQUEST[$var]))
{
$default_value = strip_tags($_REQUEST[$var]);
}
else if(isset($preset_defaults[$var]))
{
$default_value = $preset_defaults[$var];
}
$html .= '<p>';
$enum = array();
switch((isset($data['value_type']) ? $data['value_type'] : ''))
{
case 'bool':
$enum = array('TRUE', 'FALSE');
$default_value = strtoupper($default_value);
case 'enum':
if(isset($data['enum']))
{
$enum = $data['enum'];
}
$html .= '<select name="' . $var . '"><option value="0">[Not Set]</option>';
foreach($enum as $e)
{
$html .= '<option value="' . $e . '"' . (strtoupper($default_value) == strtoupper($e) ? ' selected="selected"' : '') . '>' . $e . '</option>';
}
$html .= '</select>';
break;
case 'enum_multi':
if(isset($data['enum']))
{
if(!empty($default_value) && !is_array($default_value))
{
$default_value = explode($default_value, ',');
}
foreach($data['enum'] as $e)
{
$html .= '<input type="checkbox" name="' . $var . '[]" value="' . $e . '" ' . (is_array($default_value) && in_array($e, $default_value) ? 'checked="checked"' : '') . ' /> ' . $e . '<br />';
}
}
break;
case 'positive_integer':
$html .= '<input type="number" min="0" max="9999" step="1" name="' . $var . '" value="' . $default_value . '" />';
break;
case 'string':
default:
$html .= '<input name="' . $var . '" value="' . $default_value . '" />';
break;
}
$html .= '</p>';
}
return $html;
}
public static function get_posted_options($limit = false)
{
$posted = array();
foreach(pts_env::read_possible_vars($limit) as $var => $data)
{
if(isset($_REQUEST[$var]))
{
if(is_array($_REQUEST[$var]))
{
foreach($_REQUEST[$var] as &$rqv)
{
$rqv = strip_tags($rqv);
}
$v = implode(',', $_REQUEST[$var]);
}
else
{
// TODO add more validation handling checks... then again, PTS client has its own validation of the env vars
$v = strip_tags($_REQUEST[$var]);
}
if(!empty($v) && $v !== 0)
{
$posted[$var] = pts_strings::sanitize($v);
}
}
}
return $posted;
}
}
?>
| gpl-3.0 |
deathcap/BedrockAPI | src/main/java/org/bukkit/material/PistonExtensionMaterial.java | 930 | package org.bukkit.material;
import org.bukkit.Material;
import org.bukkit.block.BlockFace;
import org.bukkit.material.Attachable;
import org.bukkit.material.Directional;
import org.bukkit.material.MaterialData;
import org.bukkit.material.PistonExtensionMaterial;
public class PistonExtensionMaterial extends MaterialData implements Attachable {
@Deprecated public PistonExtensionMaterial(int type) {
}
public PistonExtensionMaterial(Material type) {
}
@Deprecated public PistonExtensionMaterial(int type, byte data) {
}
@Deprecated public PistonExtensionMaterial(Material type, byte data) {
}
public void setFacingDirection(BlockFace face) {
}
public BlockFace getFacing() {
return null;
}
public boolean isSticky() {
return false;
}
public void setSticky(boolean sticky) {
}
public BlockFace getAttachedFace() {
return null;
}
public PistonExtensionMaterial clone() {
return null;
}
}
| gpl-3.0 |
HexHive/datashield | libcxx/libcxx/test/std/utilities/function.objects/func.not_fn/not_fn.pass.cpp | 18694 | //===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// UNSUPPORTED: c++98, c++03, c++11, c++14
// template <class F> unspecified not_fn(F&& f);
#include <functional>
#include <type_traits>
#include <string>
#include <cassert>
#include "test_macros.h"
#include "type_id.h"
///////////////////////////////////////////////////////////////////////////////
// CALLABLE TEST TYPES
///////////////////////////////////////////////////////////////////////////////
bool returns_true() { return true; }
template <class Ret = bool>
struct MoveOnlyCallable {
MoveOnlyCallable(MoveOnlyCallable const&) = delete;
MoveOnlyCallable(MoveOnlyCallable&& other)
: value(other.value)
{ other.value = !other.value; }
template <class ...Args>
Ret operator()(Args&&...) { return Ret{value}; }
explicit MoveOnlyCallable(bool x) : value(x) {}
Ret value;
};
template <class Ret = bool>
struct CopyCallable {
CopyCallable(CopyCallable const& other)
: value(other.value) {}
CopyCallable(CopyCallable&& other)
: value(other.value) { other.value = !other.value; }
template <class ...Args>
Ret operator()(Args&&...) { return Ret{value}; }
explicit CopyCallable(bool x) : value(x) {}
Ret value;
};
template <class Ret = bool>
struct ConstCallable {
ConstCallable(ConstCallable const& other)
: value(other.value) {}
ConstCallable(ConstCallable&& other)
: value(other.value) { other.value = !other.value; }
template <class ...Args>
Ret operator()(Args&&...) const { return Ret{value}; }
explicit ConstCallable(bool x) : value(x) {}
Ret value;
};
template <class Ret = bool>
struct NoExceptCallable {
NoExceptCallable(NoExceptCallable const& other)
: value(other.value) {}
template <class ...Args>
Ret operator()(Args&&...) noexcept { return Ret{value}; }
template <class ...Args>
Ret operator()(Args&&...) const noexcept { return Ret{value}; }
explicit NoExceptCallable(bool x) : value(x) {}
Ret value;
};
struct CopyAssignableWrapper {
CopyAssignableWrapper(CopyAssignableWrapper const&) = default;
CopyAssignableWrapper(CopyAssignableWrapper&&) = default;
CopyAssignableWrapper& operator=(CopyAssignableWrapper const&) = default;
CopyAssignableWrapper& operator=(CopyAssignableWrapper &&) = default;
template <class ...Args>
bool operator()(Args&&...) { return value; }
explicit CopyAssignableWrapper(bool x) : value(x) {}
bool value;
};
struct MoveAssignableWrapper {
MoveAssignableWrapper(MoveAssignableWrapper const&) = delete;
MoveAssignableWrapper(MoveAssignableWrapper&&) = default;
MoveAssignableWrapper& operator=(MoveAssignableWrapper const&) = delete;
MoveAssignableWrapper& operator=(MoveAssignableWrapper &&) = default;
template <class ...Args>
bool operator()(Args&&...) { return value; }
explicit MoveAssignableWrapper(bool x) : value(x) {}
bool value;
};
struct MemFunCallable {
explicit MemFunCallable(bool x) : value(x) {}
bool return_value() const { return value; }
bool return_value_nc() { return value; }
bool value;
};
enum CallType : unsigned {
CT_None,
CT_NonConst = 1,
CT_Const = 2,
CT_LValue = 4,
CT_RValue = 8
};
inline constexpr CallType operator|(CallType LHS, CallType RHS) {
return static_cast<CallType>(static_cast<unsigned>(LHS) | static_cast<unsigned>(RHS));
}
struct ForwardingCallObject {
template <class ...Args>
bool operator()(Args&&... args) & {
set_call<Args&&...>(CT_NonConst | CT_LValue);
return true;
}
template <class ...Args>
bool operator()(Args&&... args) const & {
set_call<Args&&...>(CT_Const | CT_LValue);
return true;
}
// Don't allow the call operator to be invoked as an rvalue.
template <class ...Args>
bool operator()(Args&&... args) && {
set_call<Args&&...>(CT_NonConst | CT_RValue);
return true;
}
template <class ...Args>
bool operator()(Args&&... args) const && {
set_call<Args&&...>(CT_Const | CT_RValue);
return true;
}
template <class ...Args>
static void set_call(CallType type) {
assert(last_call_type == CT_None);
assert(last_call_args == nullptr);
last_call_type = type;
last_call_args = &makeArgumentID<Args...>();
}
template <class ...Args>
static bool check_call(CallType type) {
bool result =
last_call_type == type
&& last_call_args
&& *last_call_args == makeArgumentID<Args...>();
last_call_type = CT_None;
last_call_args = nullptr;
return result;
}
static CallType last_call_type;
static TypeID const* last_call_args;
};
CallType ForwardingCallObject::last_call_type = CT_None;
TypeID const* ForwardingCallObject::last_call_args = nullptr;
///////////////////////////////////////////////////////////////////////////////
// BOOL TEST TYPES
///////////////////////////////////////////////////////////////////////////////
struct EvilBool {
static int bang_called;
EvilBool(EvilBool const&) = default;
EvilBool(EvilBool&&) = default;
friend EvilBool operator!(EvilBool const& other) {
++bang_called;
return EvilBool{!other.value};
}
private:
friend struct MoveOnlyCallable<EvilBool>;
friend struct CopyCallable<EvilBool>;
friend struct NoExceptCallable<EvilBool>;
explicit EvilBool(bool x) : value(x) {}
EvilBool& operator=(EvilBool const& other) = default;
public:
bool value;
};
int EvilBool::bang_called = 0;
struct ExplicitBool {
ExplicitBool(ExplicitBool const&) = default;
ExplicitBool(ExplicitBool&&) = default;
explicit operator bool() const { return value; }
private:
friend struct MoveOnlyCallable<ExplicitBool>;
friend struct CopyCallable<ExplicitBool>;
explicit ExplicitBool(bool x) : value(x) {}
ExplicitBool& operator=(bool x) {
value = x;
return *this;
}
bool value;
};
struct NoExceptEvilBool {
NoExceptEvilBool(NoExceptEvilBool const&) = default;
NoExceptEvilBool(NoExceptEvilBool&&) = default;
NoExceptEvilBool& operator=(NoExceptEvilBool const& other) = default;
explicit NoExceptEvilBool(bool x) : value(x) {}
friend NoExceptEvilBool operator!(NoExceptEvilBool const& other) noexcept {
return NoExceptEvilBool{!other.value};
}
bool value;
};
void constructor_tests()
{
{
using T = MoveOnlyCallable<bool>;
T value(true);
using RetT = decltype(std::not_fn(std::move(value)));
static_assert(std::is_move_constructible<RetT>::value, "");
static_assert(!std::is_copy_constructible<RetT>::value, "");
static_assert(!std::is_move_assignable<RetT>::value, "");
static_assert(!std::is_copy_assignable<RetT>::value, "");
auto ret = std::not_fn(std::move(value));
// test it was moved from
assert(value.value == false);
// test that ret() negates the original value 'true'
assert(ret() == false);
assert(ret(0, 0.0, "blah") == false);
// Move ret and test that it was moved from and that ret2 got the
// original value.
auto ret2 = std::move(ret);
assert(ret() == true);
assert(ret2() == false);
assert(ret2(42) == false);
}
{
using T = CopyCallable<bool>;
T value(false);
using RetT = decltype(std::not_fn(value));
static_assert(std::is_move_constructible<RetT>::value, "");
static_assert(std::is_copy_constructible<RetT>::value, "");
static_assert(!std::is_move_assignable<RetT>::value, "");
static_assert(!std::is_copy_assignable<RetT>::value, "");
auto ret = std::not_fn(value);
// test that value is unchanged (copied not moved)
assert(value.value == false);
// test 'ret' has the original value
assert(ret() == true);
assert(ret(42, 100) == true);
// move from 'ret' and check that 'ret2' has the original value.
auto ret2 = std::move(ret);
assert(ret() == false);
assert(ret2() == true);
assert(ret2("abc") == true);
}
{
using T = CopyAssignableWrapper;
T value(true);
T value2(false);
using RetT = decltype(std::not_fn(value));
static_assert(std::is_move_constructible<RetT>::value, "");
static_assert(std::is_copy_constructible<RetT>::value, "");
static_assert(std::is_move_assignable<RetT>::value, "");
static_assert(std::is_copy_assignable<RetT>::value, "");
auto ret = std::not_fn(value);
assert(ret() == false);
auto ret2 = std::not_fn(value2);
assert(ret2() == true);
ret = ret2;
assert(ret() == true);
assert(ret2() == true);
}
{
using T = MoveAssignableWrapper;
T value(true);
T value2(false);
using RetT = decltype(std::not_fn(std::move(value)));
static_assert(std::is_move_constructible<RetT>::value, "");
static_assert(!std::is_copy_constructible<RetT>::value, "");
static_assert(std::is_move_assignable<RetT>::value, "");
static_assert(!std::is_copy_assignable<RetT>::value, "");
auto ret = std::not_fn(std::move(value));
assert(ret() == false);
auto ret2 = std::not_fn(std::move(value2));
assert(ret2() == true);
ret = std::move(ret2);
assert(ret() == true);
}
}
void return_type_tests()
{
using std::is_same;
{
using T = CopyCallable<bool>;
auto ret = std::not_fn(T{false});
static_assert(is_same<decltype(ret()), bool>::value, "");
static_assert(is_same<decltype(ret("abc")), bool>::value, "");
assert(ret() == true);
}
{
using T = CopyCallable<ExplicitBool>;
auto ret = std::not_fn(T{true});
static_assert(is_same<decltype(ret()), bool>::value, "");
static_assert(is_same<decltype(ret(std::string("abc"))), bool>::value, "");
assert(ret() == false);
}
{
using T = CopyCallable<EvilBool>;
auto ret = std::not_fn(T{false});
static_assert(is_same<decltype(ret()), EvilBool>::value, "");
EvilBool::bang_called = 0;
auto value_ret = ret();
assert(EvilBool::bang_called == 1);
assert(value_ret.value == true);
ret();
assert(EvilBool::bang_called == 2);
}
}
// Other tests only test using objects with call operators. Test various
// other callable types here.
void other_callable_types_test()
{
{ // test with function pointer
auto ret = std::not_fn(returns_true);
assert(ret() == false);
}
{ // test with lambda
auto returns_value = [](bool value) { return value; };
auto ret = std::not_fn(returns_value);
assert(ret(true) == false);
assert(ret(false) == true);
}
{ // test with pointer to member function
MemFunCallable mt(true);
const MemFunCallable mf(false);
auto ret = std::not_fn(&MemFunCallable::return_value);
assert(ret(mt) == false);
assert(ret(mf) == true);
assert(ret(&mt) == false);
assert(ret(&mf) == true);
}
{ // test with pointer to member function
MemFunCallable mt(true);
MemFunCallable mf(false);
auto ret = std::not_fn(&MemFunCallable::return_value_nc);
assert(ret(mt) == false);
assert(ret(mf) == true);
assert(ret(&mt) == false);
assert(ret(&mf) == true);
}
{ // test with pointer to member data
MemFunCallable mt(true);
const MemFunCallable mf(false);
auto ret = std::not_fn(&MemFunCallable::value);
assert(ret(mt) == false);
assert(ret(mf) == true);
assert(ret(&mt) == false);
assert(ret(&mf) == true);
}
}
void throws_in_constructor_test()
{
#ifndef TEST_HAS_NO_EXCEPTIONS
struct ThrowsOnCopy {
ThrowsOnCopy(ThrowsOnCopy const&) {
throw 42;
}
ThrowsOnCopy() = default;
bool operator()() const { assert(false); }
};
{
ThrowsOnCopy cp;
try {
std::not_fn(cp);
assert(false);
} catch (int const& value) {
assert(value == 42);
}
}
#endif
}
void call_operator_sfinae_test() {
{ // wrong number of arguments
using T = decltype(std::not_fn(returns_true));
static_assert(std::is_callable<T()>::value, ""); // callable only with no args
static_assert(!std::is_callable<T(bool)>::value, "");
}
{ // violates const correctness (member function pointer)
using T = decltype(std::not_fn(&MemFunCallable::return_value_nc));
static_assert(std::is_callable<T(MemFunCallable&)>::value, "");
static_assert(!std::is_callable<T(const MemFunCallable&)>::value, "");
}
{ // violates const correctness (call object)
using Obj = CopyCallable<bool>;
using NCT = decltype(std::not_fn(Obj{true}));
using CT = const NCT;
static_assert(std::is_callable<NCT()>::value, "");
static_assert(!std::is_callable<CT()>::value, "");
}
{ // returns bad type with no operator!
auto fn = [](auto x) { return x; };
using T = decltype(std::not_fn(fn));
static_assert(std::is_callable<T(bool)>::value, "");
static_assert(!std::is_callable<T(std::string)>::value, "");
}
}
void call_operator_forwarding_test()
{
using Fn = ForwardingCallObject;
auto obj = std::not_fn(Fn{});
const auto& c_obj = obj;
{ // test zero args
obj();
assert(Fn::check_call<>(CT_NonConst | CT_LValue));
std::move(obj)();
assert(Fn::check_call<>(CT_NonConst | CT_RValue));
c_obj();
assert(Fn::check_call<>(CT_Const | CT_LValue));
std::move(c_obj)();
assert(Fn::check_call<>(CT_Const | CT_RValue));
}
{ // test value categories
int x = 42;
const int cx = 42;
obj(x);
assert(Fn::check_call<int&>(CT_NonConst | CT_LValue));
obj(cx);
assert(Fn::check_call<const int&>(CT_NonConst | CT_LValue));
obj(std::move(x));
assert(Fn::check_call<int&&>(CT_NonConst | CT_LValue));
obj(std::move(cx));
assert(Fn::check_call<const int&&>(CT_NonConst | CT_LValue));
obj(42);
assert(Fn::check_call<int&&>(CT_NonConst | CT_LValue));
}
{ // test value categories - rvalue
int x = 42;
const int cx = 42;
std::move(obj)(x);
assert(Fn::check_call<int&>(CT_NonConst | CT_RValue));
std::move(obj)(cx);
assert(Fn::check_call<const int&>(CT_NonConst | CT_RValue));
std::move(obj)(std::move(x));
assert(Fn::check_call<int&&>(CT_NonConst | CT_RValue));
std::move(obj)(std::move(cx));
assert(Fn::check_call<const int&&>(CT_NonConst | CT_RValue));
std::move(obj)(42);
assert(Fn::check_call<int&&>(CT_NonConst | CT_RValue));
}
{ // test value categories - const call
int x = 42;
const int cx = 42;
c_obj(x);
assert(Fn::check_call<int&>(CT_Const | CT_LValue));
c_obj(cx);
assert(Fn::check_call<const int&>(CT_Const | CT_LValue));
c_obj(std::move(x));
assert(Fn::check_call<int&&>(CT_Const | CT_LValue));
c_obj(std::move(cx));
assert(Fn::check_call<const int&&>(CT_Const | CT_LValue));
c_obj(42);
assert(Fn::check_call<int&&>(CT_Const | CT_LValue));
}
{ // test value categories - const call rvalue
int x = 42;
const int cx = 42;
std::move(c_obj)(x);
assert(Fn::check_call<int&>(CT_Const | CT_RValue));
std::move(c_obj)(cx);
assert(Fn::check_call<const int&>(CT_Const | CT_RValue));
std::move(c_obj)(std::move(x));
assert(Fn::check_call<int&&>(CT_Const | CT_RValue));
std::move(c_obj)(std::move(cx));
assert(Fn::check_call<const int&&>(CT_Const | CT_RValue));
std::move(c_obj)(42);
assert(Fn::check_call<int&&>(CT_Const | CT_RValue));
}
{ // test multi arg
int x = 42;
const double y = 3.14;
std::string s = "abc";
obj(42, std::move(y), s, std::string{"foo"});
Fn::check_call<int&&, const double&&, std::string&, std::string&&>(CT_NonConst | CT_LValue);
std::move(obj)(42, std::move(y), s, std::string{"foo"});
Fn::check_call<int&&, const double&&, std::string&, std::string&&>(CT_NonConst | CT_RValue);
c_obj(42, std::move(y), s, std::string{"foo"});
Fn::check_call<int&&, const double&&, std::string&, std::string&&>(CT_Const | CT_LValue);
std::move(c_obj)(42, std::move(y), s, std::string{"foo"});
Fn::check_call<int&&, const double&&, std::string&, std::string&&>(CT_Const | CT_RValue);
}
}
void call_operator_noexcept_test()
{
{
using T = ConstCallable<bool>;
T value(true);
auto ret = std::not_fn(value);
static_assert(!noexcept(ret()), "call should not be noexcept");
auto const& cret = ret;
static_assert(!noexcept(cret()), "call should not be noexcept");
}
{
using T = NoExceptCallable<bool>;
T value(true);
auto ret = std::not_fn(value);
static_assert(noexcept(!_VSTD::__invoke(value)), "");
static_assert(noexcept(ret()), "call should be noexcept");
auto const& cret = ret;
static_assert(noexcept(cret()), "call should be noexcept");
}
{
using T = NoExceptCallable<NoExceptEvilBool>;
T value(true);
auto ret = std::not_fn(value);
static_assert(noexcept(ret()), "call should not be noexcept");
auto const& cret = ret;
static_assert(noexcept(cret()), "call should not be noexcept");
}
{
using T = NoExceptCallable<EvilBool>;
T value(true);
auto ret = std::not_fn(value);
static_assert(!noexcept(ret()), "call should not be noexcept");
auto const& cret = ret;
static_assert(!noexcept(cret()), "call should not be noexcept");
}
}
int main()
{
constructor_tests();
return_type_tests();
other_callable_types_test();
throws_in_constructor_test();
call_operator_sfinae_test(); // somewhat of an extension
call_operator_forwarding_test();
call_operator_noexcept_test();
}
| gpl-3.0 |
skyosev/OpenCart-Overclocked | upload/catalog/controller/common/column_left.php | 2563 | <?php
class ControllerCommonColumnLeft extends Controller {
protected function index() {
$this->load->model('design/layout');
$this->load->model('catalog/category');
$this->load->model('catalog/product');
$this->load->model('catalog/information');
if (isset($this->request->get['route'])) {
$route = (string)$this->request->get['route'];
} else {
$route = 'common/home';
}
$layout_id = 0;
if ($route == 'product/category' && isset($this->request->get['path']) && !is_array($this->request->get['path'])) {
$path = explode('_', (string)$this->request->get['path']);
$layout_id = $this->model_catalog_category->getCategoryLayoutId(end($path));
}
if ($route == 'product/product' && isset($this->request->get['product_id'])) {
$layout_id = $this->model_catalog_product->getProductLayoutId($this->request->get['product_id']);
}
if ($route == 'information/information' && isset($this->request->get['information_id'])) {
$layout_id = $this->model_catalog_information->getInformationLayoutId($this->request->get['information_id']);
}
if (!$layout_id) {
$layout_id = $this->model_design_layout->getLayout($route);
}
if (!$layout_id) {
$layout_id = $this->config->get('config_layout_id');
}
$module_data = array();
$this->load->model('setting/extension');
$extensions = $this->model_setting_extension->getExtensions('module');
foreach ($extensions as $extension) {
$modules = $this->config->get($extension['code'] . '_module');
if ($modules) {
foreach ($modules as $module) {
if ($module['layout_id'] == $layout_id && $module['position'] == 'column_left' && $module['status']) {
$module_data[] = array(
'code' => $extension['code'],
'setting' => $module,
'sort_order' => $module['sort_order']
);
}
}
}
}
$sort_order = array();
foreach ($module_data as $key => $value) {
$sort_order[$key] = $value['sort_order'];
}
array_multisort($sort_order, SORT_ASC, $module_data);
$this->data['modules'] = array();
foreach ($module_data as $module) {
$module = $this->getChild('module/' . $module['code'], $module['setting']);
if ($module) {
$this->data['modules'][] = $module;
}
}
if (file_exists(DIR_TEMPLATE . $this->config->get('config_template') . '/template/common/column_left.tpl')) {
$this->template = $this->config->get('config_template') . '/template/common/column_left.tpl';
} else {
$this->template = 'default/template/common/column_left.tpl';
}
$this->render();
}
}
?> | gpl-3.0 |
Estada1401/anuwhscript | GameServer/src/com/aionemu/gameserver/controllers/PetController.java | 2954 | /*
* This file is part of aion-lightning <aion-lightning.com>.
*
* aion-lightning is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* aion-lightning is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with aion-lightning. If not, see <http://www.gnu.org/licenses/>.
*/
package com.aionemu.gameserver.controllers;
import com.aionemu.commons.database.dao.DAOManager;
import com.aionemu.gameserver.dao.PlayerPetsDAO;
import com.aionemu.gameserver.model.TaskId;
import com.aionemu.gameserver.model.gameobjects.Pet;
import com.aionemu.gameserver.model.gameobjects.VisibleObject;
import com.aionemu.gameserver.model.gameobjects.player.Player;
import com.aionemu.gameserver.network.aion.serverpackets.SM_PET;
import com.aionemu.gameserver.utils.PacketSendUtility;
/**
* @author ATracer
*/
public class PetController extends VisibleObjectController<Pet> {
@Override
public void see(VisibleObject object) {
}
@Override
public void notSee(VisibleObject object, boolean isOutOfRange) {
}
public static class PetUpdateTask implements Runnable {
private final Player player;
private long startTime = 0;
public PetUpdateTask(Player player) {
this.player = player;
}
@Override
public void run() {
if (startTime == 0)
startTime = System.currentTimeMillis();
try {
Pet pet = player.getPet();
if (pet == null)
throw new IllegalStateException("Pet is null");
int currentPoints = 0;
boolean saved = false;
if (pet.getCommonData().getMoodPoints(false) < 9000) {
if (System.currentTimeMillis() - startTime >= 60 * 1000) {
currentPoints = pet.getCommonData().getMoodPoints(false);
if (currentPoints == 9000) {
PacketSendUtility.sendPacket(player, new SM_PET(pet, 4, 0));
}
DAOManager.getDAO(PlayerPetsDAO.class).savePetMoodData(pet.getCommonData());
saved = true;
startTime = System.currentTimeMillis();
}
}
if (currentPoints < 9000) {
PacketSendUtility.sendPacket(player, new SM_PET(pet, 4, 0));
}
else {
PacketSendUtility.sendPacket(player, new SM_PET(pet, 3, 0));
// Save if it reaches 100% after player snuggles the pet, not by the scheduler itself
if (!saved)
DAOManager.getDAO(PlayerPetsDAO.class).savePetMoodData(pet.getCommonData());
}
}
catch (Exception ex) {
player.getController().cancelTask(TaskId.PET_UPDATE);
}
}
}
}
| gpl-3.0 |
jamesmacwhite/Radarr | src/UI/Quality/QualityDefinitionModel.js | 420 | var ModelBase = require('../Settings/SettingsModelBase');
module.exports = ModelBase.extend({
baseInitialize : ModelBase.prototype.initialize,
initialize : function() {
var name = this.get('quality').name;
this.successMessage = 'Saved ' + name + ' quality settings';
this.errorMessage = 'Couldn\'t save ' + name + ' quality settings';
this.baseInitialize.call(this);
}
}); | gpl-3.0 |
ShahJe/RESTBuddy | RestBuddy/Helpers/XmlHelper.cs | 6755 | using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using System.Xml;
using System.Xml.Xsl;
using static System.Windows.Forms.ComboBox;
namespace RestBuddy
{
public static class XmlHelper
{
private const string recentFileName = "recent.xml";
private const string xslFileName = "defaultss.xsl";
private const string testFileName = "tests.xml";
public static List<WebApiTest> LoadTests(string fileName)
{
if (!File.Exists(fileName))
{
throw new FileNotFoundException("Unable to open " + fileName);
}
var tests = new List<WebApiTest>();
try
{
var xmlDoc = new XmlDocument();
using (var fs = new FileStream(fileName, FileMode.Open, FileAccess.Read))
{
xmlDoc.Load(fs);
fs.Close();
}
var document = xmlDoc.DocumentElement;
var baseTest = new WebApiTest();
if (document != null && document.ChildNodes.Count > 0)
{
for (var i = 0; i < document.ChildNodes.Count; i++)
{
var node = document.ChildNodes.Item(i);
if (node == null)
{
continue;
}
baseTest = MapObject(baseTest, node);
}
var testConfigs = document.GetElementsByTagName("test");
for (var i = 0; i < testConfigs.Count; i++)
{
var node = testConfigs.Item(i);
tests.Add(GetObjectFromXml(node, baseTest));
}
}
}
catch (Exception ex)
{
var errorCode = Marshal.GetHRForException(ex) & ((1 << 16) - 1);
if ((errorCode == 32 || errorCode == 33))
{
throw new FileLoadException("Auto test config file is open in another program.");
}
else
{
throw new FileLoadException("Error processing auto test config file.");
}
}
return tests;
}
private static WebApiTest GetObjectFromXml(XmlNode node, WebApiTest baseTest)
{
var test = new WebApiTest();
if (node != null && node.Attributes != null && node.HasChildNodes)
{
test.RequestUserName = baseTest.RequestUserName;
test.RequestPassword = baseTest.RequestPassword;
test.BaseUri = baseTest.BaseUri;
test.OutputFormat = baseTest.OutputFormat;
test.SendRequestAuthCredentials = baseTest.SendRequestAuthCredentials;
test.IsEnabled = Convert.ToBoolean(node.Attributes.Item(0).Value);
for (var j = 0; j < node.ChildNodes.Count; j++)
{
var child = node.ChildNodes.Item(j);
test = MapObject(test, child);
}
}
return test;
}
private static WebApiTest MapObject(WebApiTest test, XmlNode node)
{
switch (node.Name)
{
case "name":
test.Name = node.InnerText;
break;
case "endpoint":
test.EndpointUri = node.InnerText;
break;
case "httpmethod":
test.HttpMethod = node.InnerText;
break;
case "sendrequestauthcredentials":
test.SendRequestAuthCredentials = Convert.ToBoolean(node.InnerText);
break;
case "body":
test.MessageBody = node.InnerText;
break;
case "requestusername":
test.RequestUserName = node.InnerText;
break;
case "requestpassword":
test.RequestPassword = node.InnerText;
break;
case "baseuri":
test.BaseUri = node.InnerText;
break;
case "output":
test.OutputFormat = node.InnerText;
break;
case "expectedhttpcode":
test.ExpectedHttpStatusCode = (HttpStatusCode)Convert.ToInt32(node.InnerText);
break;
case "headers":
foreach (XmlNode header in node.ChildNodes)
{
test.CustomHeaders.Headers.Add(header.Attributes.Item(0).Value, header.InnerText);
}
break;
}
return test;
}
public static List<string> LoadRecent()
{
if (!File.Exists(recentFileName))
{
return new List<string> { testFileName };
}
var files = new List<string>();
try
{
var xmlDoc = new XmlDocument();
using (var fs = new FileStream(recentFileName, FileMode.Open, FileAccess.Read))
{
xmlDoc.Load(fs);
fs.Close();
}
var document = xmlDoc.DocumentElement;
if (document != null && document.ChildNodes.Count > 0)
{
for (var i = 0; i < document.ChildNodes.Count; i++)
{
var node = document.ChildNodes.Item(i);
if (node == null)
{
continue;
}
files.Add(node.InnerText);
}
}
}
catch (Exception ex)
{
var errorCode = Marshal.GetHRForException(ex) & ((1 << 16) - 1);
if ((errorCode == 32 || errorCode == 33))
{
throw new FileLoadException("Auto test config file is open in another program.");
}
else
{
throw new FileLoadException("Error processing auto test config file.");
}
}
return files;
}
public static void WriteRecentFiles(ObjectCollection files)
{
var xmlWriter = new XmlTextWriter(recentFileName, null);
xmlWriter.WriteStartDocument();
xmlWriter.WriteStartElement("files");
foreach (var file in files)
{
xmlWriter.WriteStartElement("file");
xmlWriter.WriteString(file.ToString());
xmlWriter.WriteEndElement();
}
xmlWriter.WriteEndElement();
xmlWriter.WriteEndDocument();
xmlWriter.Close();
}
public static XslCompiledTransform LoadXsl()
{
var xct = new XslCompiledTransform();
try
{
var xls = File.Open(xslFileName, FileMode.Open, FileAccess.Read);
var reader = XmlReader.Create(xls);
xct.Load(reader);
}
catch
{
//// Ignore xml stylesheet load errors.
}
return xct;
}
public static string GetFormattedStringForBrowser(string displayText, XslCompiledTransform xsl)
{
var formattedString = string.Empty; ;
if (string.IsNullOrEmpty(displayText))
{
return formattedString;
}
if (displayText.StartsWith("<!DOCTYPE html"))
{
// If html web page is returned then display it as is.
formattedString = displayText;
return formattedString;
}
try
{
if (xsl == null)
{
// If unable to load XML stylesheet then make sure browser can display XML as text.
formattedString = WebUtility.HtmlEncode(displayText);
return formattedString;
}
var document = new XmlDocument();
document.LoadXml(displayText);
var builder = new StringBuilder();
var writer = XmlWriter.Create(builder);
xsl.Transform(document, writer);
formattedString = builder.ToString();
}
catch
{
formattedString = displayText;
}
return formattedString;
}
}
}
| gpl-3.0 |
GregMage/xmdoc | class/xmdoc_category.php | 13518 | <?php
/*
You may not change or alter any portion of this comment or credits
of supporting developers from this source code or any supporting source code
which is considered copyrighted (c) material of the original comment or credit authors.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
/**
* xmdoc module
*
* @copyright XOOPS Project (https://xoops.org)
* @license GNU GPL 2 (http://www.gnu.org/licenses/old-licenses/gpl-2.0.html)
* @author Mage Gregory (AKA Mage)
*/
use Xmf\Request;
use Xmf\Module\Helper;
if (!defined('XOOPS_ROOT_PATH')) {
die('XOOPS root path not defined');
}
/**
* Class xmdoc_category
*/
class xmdoc_category extends XoopsObject
{
// constructor
/**
* xmdoc_category constructor.
*/
public function __construct()
{
$this->initVar('category_id', XOBJ_DTYPE_INT, null, false, 11);
$this->initVar('category_name', XOBJ_DTYPE_TXTBOX, null, false);
$this->initVar('category_description', XOBJ_DTYPE_TXTAREA, null, false);
// use html
$this->initVar('dohtml', XOBJ_DTYPE_INT, 1, false);
$this->initVar('category_logo', XOBJ_DTYPE_TXTBOX, null, false);
$this->initVar('category_color', XOBJ_DTYPE_TXTBOX, '#ffffff', false);
$this->initVar('category_size', XOBJ_DTYPE_TXTBOX, '500 K', false);
$this->initVar('category_extensions', XOBJ_DTYPE_ARRAY, array());
$this->initVar('category_folder', XOBJ_DTYPE_TXTBOX, null, false);
$this->initVar('category_rename', XOBJ_DTYPE_INT, null, false, 1);
$this->initVar('category_limitdownload', XOBJ_DTYPE_INT, null, false, 5);
$this->initVar('category_limititem', XOBJ_DTYPE_INT, null, false, 5);
$this->initVar('category_weight', XOBJ_DTYPE_INT, null, false, 11);
$this->initVar('category_status', XOBJ_DTYPE_INT, null, false, 1);
}
/**
* @return mixed
*/
public function get_new_enreg()
{
global $xoopsDB;
$new_enreg = $xoopsDB->getInsertId();
return $new_enreg;
}
/**
* @return mixed
*/
public function saveCategory($categoryHandler, $action = false)
{
if ($action === false) {
$action = $_SERVER['REQUEST_URI'];
}
include __DIR__ . '/../include/common.php';
$error_message = '';
// test error
if ((int)$_REQUEST['category_weight'] == 0 && $_REQUEST['category_weight'] != '0') {
$error_message .= _MA_XMDOC_ERROR_WEIGHT . '<br>';
$this->setVar('category_weight', 0);
}
$iniPostMaxSize = XmdocUtility::returnBytes(ini_get('post_max_size'));
$iniUploadMaxFileSize = XmdocUtility::returnBytes(ini_get('upload_max_filesize'));
if (min($iniPostMaxSize, $iniUploadMaxFileSize) < XmdocUtility::StringSizeConvert(Request::getString('sizeValue', '') . ' ' . Request::getString('sizeType', ''))) {
$error_message .= _MA_XMDOC_ERROR_CATEGORYSIZE . '<br>';
$this->setVar('category_size', '500 K');
}
//logo
if ($_FILES['category_logo']['error'] != UPLOAD_ERR_NO_FILE) {
include_once XOOPS_ROOT_PATH . '/class/uploader.php';
$uploader_category_img = new XoopsMediaUploader($path_logo_category, array('image/gif', 'image/jpeg', 'image/pjpeg', 'image/x-png', 'image/png'), $upload_size, null, null);
if ($uploader_category_img->fetchMedia('category_logo')) {
$uploader_category_img->setPrefix('category_');
if (!$uploader_category_img->upload()) {
$error_message .= $uploader_category_img->getErrors() . '<br />';
} else {
$this->setVar('category_logo', $uploader_category_img->getSavedFileName());
}
} else {
$error_message .= $uploader_category_img->getErrors();
}
} else {
$this->setVar('category_logo', Request::getString('category_logo', ''));
}
$this->setVar('category_name', Request::getString('category_name', ''));
$this->setVar('category_color', Request::getString('category_color', ''));
$this->setVar('category_description', Request::getText('category_description', ''));
$this->setVar('category_extensions', Request::getArray('category_extensions', array()));
$this->setVar('category_rename', Request::getInt('category_rename', 0));
$this->setVar('category_status', Request::getInt('category_status', 1));
if ($this->getVar('category_folder') == '') {
$folder = XmdocUtility::creatFolder($path_document);
$this->setVar('category_folder', $folder);
}
$this->setVar('category_limitdownload', Request::getInt('category_limitdownload', 1));
$this->setVar('category_limititem', Request::getInt('category_limititem', 1));
if ($error_message == '') {
$this->setVar('category_weight', Request::getInt('category_weight', 0));
$this->setVar('category_size',Request::getFloat('sizeValue', 0) . ' ' . Request::getString('sizeType', ''));
if ($categoryHandler->insert($this)) {
// permissions
if ($this->get_new_enreg() == 0){
$perm_id = $this->getVar('category_id');
} else {
$perm_id = $this->get_new_enreg();
}
$permHelper = new Helper\Permission();
// permission view
$groups_view = Request::getArray('xmdoc_view_perms', array(), 'POST');
$permHelper->savePermissionForItem('xmdoc_view', $perm_id, $groups_view);
// permission submit
$groups_submit = Request::getArray('xmdoc_submit_perms', array(), 'POST');
$permHelper->savePermissionForItem('xmdoc_submit', $perm_id, $groups_submit);
// permission edit and approve
$groups_submit = Request::getArray('xmdoc_editapprove_perms', [], 'POST');
$permHelper->savePermissionForItem('xmdoc_editapprove', $perm_id, $groups_submit);
// permission delete
$groups_submit = Request::getArray('xmdoc_delete_perms', [], 'POST');
$permHelper->savePermissionForItem('xmdoc_delete', $perm_id, $groups_submit);
redirect_header($action, 2, _MA_XMDOC_REDIRECT_SAVE);
} else {
$error_message = $this->getHtmlErrors();
}
}
return $error_message;
}
/**
* @param bool $action
* @return XoopsThemeForm
*/
public function getForm($action = false)
{
$helper = Helper::getHelper('xmdoc');
if ($action === false) {
$action = $_SERVER['REQUEST_URI'];
}
include_once XOOPS_ROOT_PATH . '/class/xoopsformloader.php';
include __DIR__ . '/../include/common.php';
//form title
$title = $this->isNew() ? sprintf(_MA_XMDOC_ADD) : sprintf(_MA_XMDOC_EDIT);
$form = new XoopsThemeForm($title, 'form', $action, 'post', true);
$form->setExtra('enctype="multipart/form-data"');
if (!$this->isNew()) {
$form->addElement(new XoopsFormHidden('category_id', $this->getVar('category_id')));
$status = $this->getVar('category_status');
$weight = $this->getVar('category_weight');
$rename = $this->getVar('category_rename');
} else {
$status = 1;
$weight = 0;
$rename = 0;
}
// title
$form->addElement(new XoopsFormText(_MA_XMDOC_CATEGORY_NAME, 'category_name', 50, 255, $this->getVar('category_name')), true);
// description
$editor_configs =array();
$editor_configs['name'] = 'category_description';
$editor_configs['value'] = $this->getVar('category_description', 'e');
$editor_configs['rows'] = 20;
$editor_configs['cols'] = 160;
$editor_configs['width'] = '100%';
$editor_configs['height'] = '400px';
$editor_configs['editor'] = $helper->getConfig('general_editor', 'Plain Text');
$form->addElement(new XoopsFormEditor(_MA_XMDOC_CATEGORY_DESC, 'category_description', $editor_configs), false);
// logo
$blank_img = $this->getVar('category_logo') ?: 'blank.gif';
$uploadirectory = str_replace(XOOPS_URL, '', $url_logo_category);
$imgtray_img = new XoopsFormElementTray(_MA_XMDOC_CATEGORY_LOGOFILE . '<br /><br />' . sprintf(_MA_XMDOC_CATEGORY_UPLOADSIZE, $upload_size/1024), '<br />');
$imgpath_img = sprintf(_MA_XMDOC_CATEGORY_FORMPATH, $uploadirectory);
$imageselect_img = new XoopsFormSelect($imgpath_img, 'category_logo', $blank_img);
$image_array_img = XoopsLists::getImgListAsArray($path_logo_category);
$imageselect_img->addOption("$blank_img", $blank_img);
foreach ($image_array_img as $image_img) {
$imageselect_img->addOption("$image_img", $image_img);
}
$imageselect_img->setExtra("onchange='showImgSelected(\"image_img2\", \"category_logo\", \"" . $uploadirectory . "\", \"\", \"" . XOOPS_URL . "\")'");
$imgtray_img->addElement($imageselect_img, false);
$imgtray_img->addElement(new XoopsFormLabel('', "<br /><img src='" . $url_logo_category . '/' . $blank_img . "' name='image_img2' id='image_img2' alt='' style='max-width:100px'/>"));
$fileseltray_img = new XoopsFormElementTray('<br />', '<br /><br />');
$fileseltray_img->addElement(new XoopsFormFile(_MA_XMDOC_CATEGORY_UPLOAD, 'category_logo', $upload_size), false);
$fileseltray_img->addElement(new XoopsFormLabel(''), false);
$imgtray_img->addElement($fileseltray_img);
$form->addElement($imgtray_img);
//color
$form->addElement(new XoopsFormColorPicker(_MA_XMDOC_CATEGORY_COLOR, 'category_color', $this->getVar('category_color')), false);
// upload size max
$size_value_arr = explode(' ', $this->getVar('category_size'));
$aff_size = new \XoopsFormElementTray(_MA_XMDOC_CATEGORY_SIZE, '');
$aff_size->addElement(new \XoopsFormText('', 'sizeValue', 13, 13, $size_value_arr[0]));
if (array_key_exists (1, $size_value_arr) == false){
$size_value_arr[1] = 'K';
}
$type = new \XoopsFormSelect('', 'sizeType', $size_value_arr[1]);
$typeArray = [
'B' => _MA_XMDOC_UTILITY_BYTES,
'K' => _MA_XMDOC_UTILITY_KBYTES,
'M' => _MA_XMDOC_UTILITY_MBYTES,
'G' => _MA_XMDOC_UTILITY_GBYTES
];
$type->addOptionArray($typeArray);
$aff_size->addElement($type);
$aff_size->addElement(new \XoopsFormElementTray(_MA_XMDOC_CATEGORY_SIZEINFO, 'dfsdfdf'));
$form->addElement($aff_size);
// extensions
$extension_list = include $GLOBALS['xoops']->path('include/mimetypes.inc.php');
ksort($extension_list);
$extension = new XoopsFormSelect(_MA_XMDOC_CATEGORY_EXTENSION, 'category_extensions', $this->getVar('category_extensions'), 10, true);
foreach ($extension_list as $key => $val) {
$extension ->addOption($key, $key);
}
$form->addElement($extension, true);
// limitdownload
$form->addElement(new XoopsFormText(_MA_XMDOC_CATEGORY_LIMITDOWNLOAD, 'category_limitdownload', 5, 5, $this->getVar('category_limitdownload')), true);
// limititem
$form->addElement(new XoopsFormText(_MA_XMDOC_CATEGORY_LIMITITEM, 'category_limititem', 5, 5, $this->getVar('category_limititem')), true);
// rename
$form->addElement(new XoopsFormRadioYN(_MA_XMDOC_CATEGORY_RENAME, 'category_rename', $rename), true);
// weight
$form->addElement(new XoopsFormText(_MA_XMDOC_CATEGORY_WEIGHT, 'category_weight', 5, 5, $weight));
// status
$form_status = new XoopsFormRadio(_MA_XMDOC_STATUS, 'category_status', $status);
$options = array(1 => _MA_XMDOC_STATUS_A, 0 =>_MA_XMDOC_STATUS_NA,);
$form_status->addOptionArray($options);
$form->addElement($form_status);
// permission
$permHelper = new Helper\Permission();
$form->addElement($permHelper->getGroupSelectFormForItem('xmdoc_view', $this->getVar('category_id'), _MA_XMDOC_PERMISSION_VIEW_THIS, 'xmdoc_view_perms', true));
$form->addElement($permHelper->getGroupSelectFormForItem('xmdoc_submit', $this->getVar('category_id'), _MA_XMDOC_PERMISSION_SUBMIT_THIS, 'xmdoc_submit_perms', true));
$form->addElement($permHelper->getGroupSelectFormForItem('xmdoc_editapprove', $this->getVar('category_id'), _MA_XMDOC_PERMISSION_EDITAPPROVE_THIS, 'xmdoc_editapprove_perms', true));
$form->addElement($permHelper->getGroupSelectFormForItem('xmdoc_delete', $this->getVar('category_id'), _MA_XMDOC_PERMISSION_DELETE_THIS, 'xmdoc_delete_perms', true));
$form->addElement(new XoopsFormHidden('op', 'save'));
// submit
$form->addElement(new XoopsFormButton('', 'submit', _SUBMIT, 'submit'));
return $form;
}
}
/**
* Classxmdocxmdoc_categoryHandler
*/
class xmdocxmdoc_categoryHandler extends XoopsPersistableObjectHandler
{
/**
* xmdocxmdoc_categoryHandler constructor.
* @param null|XoopsDatabase $db
*/
public function __construct($db)
{
parent::__construct($db, 'xmdoc_category', 'xmdoc_category', 'category_id', 'category_name');
}
}
| gpl-3.0 |
acquaman/acquaman | source/application/PGM/PGMMain.cpp | 2368 | /*
Copyright 2010-2012 Mark Boots, David Chevrier, and Darren Hunter.
Copyright 2013-2014 David Chevrier and Darren Hunter.
This file is part of the Acquaman Data Acquisition and Management framework ("Acquaman").
Acquaman is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Acquaman is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Acquaman. If not, see <http://www.gnu.org/licenses/>.
*/
#include <QApplication>
#include <QFile>
#include "application/PGM/PGMAppController.h"
#include "application/AMCrashMonitorSupport.h"
int main(int argc, char *argv[])
{
/// Program Startup:
// =================================
QApplication app(argc, argv);
app.setApplicationName("Acquaman");
PGMAppController* appController = new PGMAppController();
#ifndef Q_WS_MAC
// Make a local QFile for the error file. It needs to be in this scope and get passed into AMCrashMonitorSupport, otherwise it won't work properly
// After doing so, star the monitor
// Ignore all of this for Mac OSX, it has it's own crash reporter and the two seem to compete
QFile localErrorFile(QString("/tmp/ErrorFile%1.txt").arg(getpid()));
localErrorFile.open(QIODevice::WriteOnly | QIODevice::Text);
AMCrashMonitorSupport::s()->setErrorFile(&localErrorFile);
AMCrashMonitorSupport::s()->setPathToCrashReportFiles("/home/acquaman/AcquamanApplicationCrashReports/IDEAS");
AMCrashMonitorSupport::s()->monitor();
#endif
/// Program Run-loop:
// =================================
int retVal = -1;
if(appController->startup())
retVal = app.exec();
/// Program Shutdown:
// =================================
if (appController->isRunning())
appController->shutdown();
#ifndef Q_WS_MAC
// Make sure we have the crash reporter system actually generate a report
// Ignore all of this for Mac OSX, it has it's own crash reporter and the two seem to compete
AMCrashMonitorSupport::s()->report();
#endif
delete appController;
return retVal;
}
| gpl-3.0 |
lsuits/moodle | blocks/quickmail/classes/messenger/message/data_mapper/maps_activity_data.php | 1285 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* @package block_quickmail
* @copyright 2008 onwards Louisiana State University
* @copyright 2008 onwards Chad Mazilly, Robert Russo, Jason Peak, Dave Elliott, Adam Zapletal, Philip Cali
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
namespace block_quickmail\messenger\message\data_mapper;
trait maps_activity_data {
///////////////////////////////////////
///
/// ACTIVITY DATA
///
///////////////////////////////////////
// 'activityname',
// 'activityduedate',
// 'activitylink',
// 'activitygradelink',
} | gpl-3.0 |
FroMage/jax-doclets | doclets/src/test/java/com/lunatech/doclets/jax/test/demo/nodoc/UndocumentedQuery.java | 380 | package com.lunatech.doclets.jax.test.demo.nodoc;
import javax.ws.rs.CookieParam;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.MatrixParam;
import javax.ws.rs.QueryParam;
public class UndocumentedQuery {
@CookieParam("session")
String session;
@HeaderParam("X-Pizza-Style")
String style;
@MatrixParam("size")
int size;
@QueryParam("q")
String query;
}
| gpl-3.0 |
the-synister/the-source | gui/PluginEditor.cpp | 1164 | /*
==============================================================================
This file was auto-generated by the Introjucer!
It contains the basic framework code for a JUCE plugin editor.
==============================================================================
*/
#include "PluginProcessor.h"
#include "PluginEditor.h"
//==============================================================================
PluginAudioProcessorEditor::PluginAudioProcessorEditor (PluginAudioProcessor& p)
: AudioProcessorEditor (&p), processor (p)
{
// Make sure that before the constructor has finished, you've set the
// editor's size to whatever you need it to be.
setSize (812, 693);
addAndMakeVisible(ui = new PlugUI(p));
}
PluginAudioProcessorEditor::~PluginAudioProcessorEditor()
{
ui = nullptr;
}
//==============================================================================
void PluginAudioProcessorEditor::paint (Graphics& g)
{
g.fillAll (Colours::white);
}
void PluginAudioProcessorEditor::resized()
{
// This is generally where you'll want to lay out the positions of any
// subcomponents in your editor..
}
| gpl-3.0 |
bendude56/Dungeonman | src/main/java/com/bendude56/dungeonman/ui/GameFrame.java | 15910 | package com.bendude56.dungeonman.ui;
import java.awt.EventQueue;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.WindowEvent;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import com.bendude56.dungeonman.DebugCheats;
import com.bendude56.dungeonman.GameInstance;
import com.bendude56.dungeonman.entity.Entity;
import com.bendude56.dungeonman.entity.Entity.ActionType;
import com.bendude56.dungeonman.entity.EntityPlayer;
import com.bendude56.dungeonman.gfx.GraphicsPanel;
import com.bendude56.dungeonman.world.World;
import com.bendude56.dungeonman.world.WorldLocation;
import com.bendude56.dungeonman.world.tile.TileState;
/**
* @author Benjamin C. Thomas
*/
public class GameFrame extends JFrame {
private static final long serialVersionUID = 1L;
public static GameFrame activeFrame;
public JMenuBar mainMenu;
public JMenu fileMenu;
public JMenuItem newMenuButton;
public JMenuItem loadMenuButton;
public JMenuItem saveMenuButton;
public JMenuItem saveAsMenuButton;
public JMenuItem exitMenuButton;
public JMenu windowMenu;
public JMenuItem windowInventoryButton;
public JMenuItem windowStatsButton;
public JMenuItem windowTextLogButton;
public JMenu debugMenu;
public JMenuItem debugXRayButton;
public JMenuItem debugNoDamageButton;
public JMenuItem debugIdentifyOverrideButton;
public GraphicsPanel gamePanel;
public int lastKeyCode;
public char lastKeyChar;
public CountDownLatch keyLatch;
public TextLogFrame activeLog;
public InventoryFrame activeInventory;
public StatsFrame activeStats;
public String loggedMessages = "";
public static void main(String[] args) throws InvocationTargetException, InterruptedException {
try {
UIManager.setLookAndFeel("javax.swing.plaf.nimbus.NimbusLookAndFeel");
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException e) {
e.printStackTrace();
}
EventQueue.invokeAndWait(new Runnable() {
@Override
public void run() {
try {
activeFrame = new GameFrame();
activeFrame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
activeFrame.mainLoop();
}
public GameFrame() {
initComponents();
}
private void initComponents() {
this.setTitle("Dungeonman v0.1.0");
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setBounds(100, 100, 600, 500);
initMenus();
gamePanel = new GraphicsPanel();
this.add(gamePanel);
}
private void initMenus() {
mainMenu = new JMenuBar();
this.setJMenuBar(mainMenu);
initFileMenu();
initWindowMenu();
initDebugMenu();
}
private void initFileMenu() {
fileMenu = new JMenu("File");
mainMenu.add(fileMenu);
newMenuButton = new JMenuItem("New Game");
fileMenu.add(newMenuButton);
loadMenuButton = new JMenuItem("Load...");
fileMenu.add(loadMenuButton);
fileMenu.addSeparator();
saveMenuButton = new JMenuItem("Save");
saveMenuButton.setEnabled(false);
fileMenu.add(saveMenuButton);
saveAsMenuButton = new JMenuItem("Save As...");
saveAsMenuButton.setEnabled(false);
fileMenu.add(saveAsMenuButton);
fileMenu.addSeparator();
exitMenuButton = new JMenuItem("Exit");
fileMenu.add(exitMenuButton);
newMenuButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
try {
NewGameFrame f = new NewGameFrame(GameFrame.this);
f.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
exitMenuButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
GameFrame.this.dispatchEvent(new WindowEvent(GameFrame.this, WindowEvent.WINDOW_CLOSING));
}
});
}
private void initWindowMenu() {
windowMenu = new JMenu("Window");
windowMenu.setEnabled(false);
mainMenu.add(windowMenu);
windowInventoryButton = new JMenuItem("Inventory");
windowMenu.add(windowInventoryButton);
windowStatsButton = new JMenuItem("Stats");
windowMenu.add(windowStatsButton);
windowTextLogButton = new JMenuItem("Text Log");
windowMenu.add(windowTextLogButton);
windowInventoryButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
try {
if (activeInventory == null) {
activeInventory = new InventoryFrame();
activeInventory.setVisible(true);
} else {
activeInventory.requestFocus();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
windowStatsButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
try {
if (activeStats == null) {
activeStats = new StatsFrame();
activeStats.setVisible(true);
} else {
activeStats.requestFocus();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
windowTextLogButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
try {
if (activeLog == null) {
activeLog = new TextLogFrame();
activeLog.logTextBox.setText(loggedMessages);
activeLog.setVisible(true);
} else {
activeLog.requestFocus();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
});
}
private void initDebugMenu() {
debugMenu = new JMenu("Debug");
mainMenu.add(debugMenu);
debugXRayButton = new JCheckBoxMenuItem("X-Ray Vision");
debugMenu.add(debugXRayButton);
debugNoDamageButton = new JCheckBoxMenuItem("No Damage");
debugMenu.add(debugNoDamageButton);
debugIdentifyOverrideButton = new JCheckBoxMenuItem("Identify Override");
debugMenu.add(debugIdentifyOverrideButton);
debugXRayButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
DebugCheats.xRay = debugXRayButton.isSelected();
if (GameInstance.getActiveInstance() != null) {
gamePanel.drawGameWorld();
gamePanel.repaint();
}
}
});
debugNoDamageButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
DebugCheats.noDamage = debugNoDamageButton.isSelected();
}
});
debugIdentifyOverrideButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
DebugCheats.identifyOverride = debugIdentifyOverrideButton.isSelected();
if (activeInventory != null)
activeInventory.doUpdate();
}
});
}
public void openDefaultWindows() {
try {
if (activeInventory == null) {
activeInventory = new InventoryFrame();
activeInventory.setVisible(true);
} else {
activeInventory.requestFocus();
}
} catch (Exception e) {
e.printStackTrace();
}
try {
if (activeStats == null) {
activeStats = new StatsFrame();
activeStats.setVisible(true);
} else {
activeStats.requestFocus();
}
} catch (Exception e) {
e.printStackTrace();
}
try {
if (activeLog == null) {
activeLog = new TextLogFrame();
activeLog.logTextBox.setText(loggedMessages);
activeLog.setVisible(true);
} else {
activeLog.requestFocus();
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void logMessage(String message) {
loggedMessages += message + "\n";
if (activeLog != null) {
activeLog.logTextBox.setText(activeLog.logTextBox.getText() + message + "\n");
activeLog.logTextBox.select(activeLog.logTextBox.getText().length(), 0);
}
}
private void mainLoop() throws InterruptedException {
KeyListener keyListen = new KeyListener() {
@Override
public void keyTyped(KeyEvent e) {
}
@Override
public void keyPressed(KeyEvent e) {
lastKeyCode = e.getKeyCode();
lastKeyChar = e.getKeyChar();
keyLatch.countDown();
}
@Override
public void keyReleased(KeyEvent e) {
}
};
while (true) {
if (GameInstance.getActiveInstance() == null || GameInstance.getActiveInstance().getPlayerEntity() == null) {
Thread.sleep(500);
continue;
}
World w = GameInstance.getActiveWorld();
EntityPlayer p = GameInstance.getActiveInstance().getPlayerEntity();
render();
if (p.isDead()) {
JOptionPane.showMessageDialog(this, "You have died.");
loggedMessages = "";
if (activeLog != null)
activeLog.dispose();
if (activeInventory != null)
activeInventory.dispose();
if (activeStats != null)
activeStats.dispose();
windowMenu.setEnabled(false);
GameInstance.endGame();
continue;
}
keyLatch = new CountDownLatch(1);
this.addKeyListener(keyListen);
try {
keyLatch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
this.removeKeyListener(keyListen);
WorldLocation newLocation = p.getLocation();
if (lastKeyCode == KeyEvent.VK_UP) {
newLocation = newLocation.adjustLocation(0, 1);
} else if (lastKeyCode == KeyEvent.VK_RIGHT) {
newLocation = newLocation.adjustLocation(1, 0);
} else if (lastKeyCode == KeyEvent.VK_DOWN) {
newLocation = newLocation.adjustLocation(0, -1);
} else if (lastKeyCode == KeyEvent.VK_LEFT) {
newLocation = newLocation.adjustLocation(-1, 0);
} else if (lastKeyCode == KeyEvent.VK_P) {
List<Entity> entities = newLocation.world.getEntities(newLocation);
boolean done = false;
for (Entity e : entities) {
if (e.doAction(ActionType.PICKUP, p)) {
done = true;
break;
}
}
if (!done) {
p.logMessage("There is nothing to pick up!");
continue;
}
} else if (lastKeyCode == KeyEvent.VK_S) {
p.doSearch();
} else if (lastKeyCode == KeyEvent.VK_C) {
TileState state = w.getTileState(newLocation);
if (!state.getTileType().onPlayerClimb(state, p)) {
p.logMessage("There is nothing to climb!");
continue;
} else {
newLocation = p.getLocation();
}
} else if (lastKeyCode != KeyEvent.VK_SPACE) {
continue;
}
TileState state = w.getTileState(newLocation);
if (!newLocation.equals(p.getLocation()) && state.getTileType().onPlayerMove(state, p)) {
List<Entity> entities = w.getEntities(newLocation);
boolean cancelled = false;
for (Entity e : entities) {
if (!e.doAction(ActionType.MOVE, p)) {
cancelled = true;
}
}
if (!cancelled)
p.setLocation(newLocation);
}
state.update();
doTurn();
}
}
public void render() {
gamePanel.centerX = GameInstance.getActiveInstance().getPlayerEntity().getLocation().x;
gamePanel.centerY = GameInstance.getActiveInstance().getPlayerEntity().getLocation().y;
gamePanel.drawGameWorld();
gamePanel.repaint();
}
public void doTurn() {
GameInstance.getActiveWorld().doTurn();
if (activeInventory != null)
activeInventory.doUpdate();
if (activeStats != null)
activeStats.doUpdate();
}
}
| gpl-3.0 |
pouladpld/myTTCBot | src/BusV.Ops/Models/RoutePrediction.cs | 268 | namespace BusV.Ops.Models
{
public class RoutePrediction
{
public string RouteTag { get; set; }
public string DirectionTag { get; set; }
public string RouteName { get; set; }
public string DirectionName { get; set; }
}
}
| gpl-3.0 |
juliomolinero/angular-modal-notify | ngAppService.js | 511 | /**
* Services in charge of removing/adding records
*/
ngAppModule.service('appService', function ($http) {
// Delete option function
var deleteFormOption = function (itemId, callback) {
// Here is where you're supposed to
// implement your service to delete items from your database
return callback(null);
}; // End deleteFormOption
// Make them visible
return {
deleteFormOption: deleteFormOption
};
}); | gpl-3.0 |
dessalines/flowchat | service/src/main/java/com/chat/types/websocket/input/StickyData.java | 495 | package com.chat.types.websocket.input;
import com.chat.tools.Tools;
import com.chat.types.JSONWriter;
import java.io.IOException;
/**
* Created by tyler on 6/11/16.
*/
public class StickyData implements JSONWriter {
private Long id;
private Boolean sticky;
public StickyData(Long id, Boolean sticky) {
this.id = id;
this.sticky = sticky;
}
public Long getId() {
return id;
}
public Boolean getSticky() {
return sticky;
}
} | gpl-3.0 |
zero1hac/songspk-download | nameextracter.py | 479 | import re
def extract(soup):
names=[]
for i in soup.find_all("ul",{"class": "single-album"}):
d = i.find_all("div",{"class": "song-title"})
for hf in d:
a = hf.find_all("a")
for it in a:
if it.text:
r = it.text
r = r.decode("utf-8")
r = re.sub(r'[()]','',r)
r = r.replace(" ","_")
names.append(r)
return names
| gpl-3.0 |
little-apps/little-system-cleaner | Common Tools/TagLib/IFD/Entries/SRationalArrayIFDEntry.cs | 2692 | //
// SRationalArrayIFDEntry.cs:
//
// Author:
// Ruben Vermeersch (ruben@savanne.be)
// Mike Gemuende (mike@gemuende.de)
//
// Copyright (C) 2009 Ruben Vermeersch
// Copyright (C) 2009 Mike Gemuende
//
// This library is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License version
// 2.1 as published by the Free Software Foundation.
//
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
// USA
//
namespace CommonTools.TagLib.IFD.Entries
{
/// <summary>
/// Contains a SRATIONAL value with a count > 1
/// </summary>
public class SRationalArrayIFDEntry : ArrayIFDEntry<SRational>
{
#region Constructors
/// <summary>
/// Construcor.
/// </summary>
/// <param name="tag">
/// A <see cref="System.UInt16"/> with the tag ID of the entry this instance
/// represents
/// </param>
/// <param name="entries">
/// A <see cref="SRational[]"/> to be stored
/// </param>
public SRationalArrayIFDEntry (ushort tag, SRational [] entries)
: base (tag)
{
Values = entries;
}
#endregion
#region Public Methods
/// <summary>
/// Renders the current instance to a <see cref="ByteVector"/>
/// </summary>
/// <param name="is_bigendian">
/// A <see cref="System.Boolean"/> indicating the endianess for rendering.
/// </param>
/// <param name="offset">
/// A <see cref="System.UInt32"/> with the offset, the data is stored.
/// </param>
/// <param name="type">
/// A <see cref="System.UInt16"/> the ID of the type, which is rendered
/// </param>
/// <param name="count">
/// A <see cref="System.UInt32"/> with the count of the values which are
/// rendered.
/// </param>
/// <returns>
/// A <see cref="ByteVector"/> with the rendered data.
/// </returns>
public override ByteVector Render (bool is_bigendian, uint offset, out ushort type, out uint count)
{
type = (ushort) IFDEntryType.SRational;
count = (uint) Values.Length;
ByteVector data = new ByteVector ();
foreach (SRational rational in Values) {
data.Add (ByteVector.FromInt (rational.Numerator, is_bigendian));
data.Add (ByteVector.FromInt (rational.Denominator, is_bigendian));
}
return data;
}
#endregion
}
}
| gpl-3.0 |
andlaus/opm-core | examples/sim_2p_incomp.cpp | 12305 | /*
Copyright 2012 SINTEF ICT, Applied Mathematics.
This file is part of the Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
#if HAVE_CONFIG_H
#include "config.h"
#endif // HAVE_CONFIG_H
#include <opm/core/pressure/FlowBCManager.hpp>
#include <opm/core/grid.h>
#include <opm/core/grid/GridManager.hpp>
#include <opm/core/wells.h>
#include <opm/core/wells/WellsManager.hpp>
#include <opm/core/utility/ErrorMacros.hpp>
#include <opm/core/simulator/initState.hpp>
#include <opm/core/simulator/SimulatorReport.hpp>
#include <opm/core/simulator/SimulatorTimer.hpp>
#include <opm/core/utility/miscUtilities.hpp>
#include <opm/core/utility/parameters/ParameterGroup.hpp>
#include <opm/core/props/IncompPropertiesBasic.hpp>
#include <opm/core/props/IncompPropertiesFromDeck.hpp>
#include <opm/core/props/rock/RockCompressibility.hpp>
#include <opm/core/linalg/LinearSolverFactory.hpp>
#include <opm/core/simulator/TwophaseState.hpp>
#include <opm/core/simulator/WellState.hpp>
#include <opm/core/simulator/SimulatorIncompTwophase.hpp>
#include <memory>
#include <boost/filesystem.hpp>
#include <algorithm>
#include <iostream>
#include <vector>
#include <numeric>
namespace
{
void warnIfUnusedParams(const Opm::parameter::ParameterGroup& param)
{
if (param.anyUnused()) {
std::cout << "-------------------- Unused parameters: --------------------\n";
param.displayUsage();
std::cout << "----------------------------------------------------------------" << std::endl;
}
}
} // anon namespace
// ----------------- Main program -----------------
int
main(int argc, char** argv)
try
{
using namespace Opm;
std::cout << "\n================ Test program for incompressible two-phase flow ===============\n\n";
parameter::ParameterGroup param(argc, argv, false);
std::cout << "--------------- Reading parameters ---------------" << std::endl;
#if ! HAVE_SUITESPARSE_UMFPACK_H
// This is an extra check to intercept a potentially invalid request for the
// implicit transport solver as early as possible for the user.
{
const bool use_reorder = param.getDefault("use_reorder", true);
if (!use_reorder) {
OPM_THROW(std::runtime_error, "Cannot use implicit transport solver without UMFPACK. "
"Either reconfigure opm-core with SuiteSparse/UMFPACK support and recompile, "
"or use the reordering solver (use_reorder=true).");
}
}
#endif
// If we have a "deck_filename", grid and props will be read from that.
bool use_deck = param.has("deck_filename");
std::unique_ptr<EclipseGridParser> deck;
std::unique_ptr<GridManager> grid;
std::unique_ptr<IncompPropertiesInterface> props;
std::unique_ptr<RockCompressibility> rock_comp;
TwophaseState state;
// bool check_well_controls = false;
// int max_well_control_iterations = 0;
double gravity[3] = { 0.0 };
if (use_deck) {
std::string deck_filename = param.get<std::string>("deck_filename");
deck.reset(new EclipseGridParser(deck_filename));
// Grid init
grid.reset(new GridManager(*deck));
// Rock and fluid init
props.reset(new IncompPropertiesFromDeck(*deck, *grid->c_grid()));
// check_well_controls = param.getDefault("check_well_controls", false);
// max_well_control_iterations = param.getDefault("max_well_control_iterations", 10);
// Rock compressibility.
rock_comp.reset(new RockCompressibility(*deck));
// Gravity.
gravity[2] = deck->hasField("NOGRAV") ? 0.0 : unit::gravity;
// Init state variables (saturation and pressure).
if (param.has("init_saturation")) {
initStateBasic(*grid->c_grid(), *props, param, gravity[2], state);
} else {
initStateFromDeck(*grid->c_grid(), *props, *deck, gravity[2], state);
}
} else {
// Grid init.
const int nx = param.getDefault("nx", 100);
const int ny = param.getDefault("ny", 100);
const int nz = param.getDefault("nz", 1);
const double dx = param.getDefault("dx", 1.0);
const double dy = param.getDefault("dy", 1.0);
const double dz = param.getDefault("dz", 1.0);
grid.reset(new GridManager(nx, ny, nz, dx, dy, dz));
// Rock and fluid init.
props.reset(new IncompPropertiesBasic(param, grid->c_grid()->dimensions, grid->c_grid()->number_of_cells));
// Rock compressibility.
rock_comp.reset(new RockCompressibility(param));
// Gravity.
gravity[2] = param.getDefault("gravity", 0.0);
// Init state variables (saturation and pressure).
initStateBasic(*grid->c_grid(), *props, param, gravity[2], state);
}
// Warn if gravity but no density difference.
bool use_gravity = (gravity[0] != 0.0 || gravity[1] != 0.0 || gravity[2] != 0.0);
if (use_gravity) {
if (props->density()[0] == props->density()[1]) {
std::cout << "**** Warning: nonzero gravity, but zero density difference." << std::endl;
}
}
const double *grav = use_gravity ? &gravity[0] : 0;
// Initialising src
int num_cells = grid->c_grid()->number_of_cells;
std::vector<double> src(num_cells, 0.0);
if (use_deck) {
// Do nothing, wells will be the driving force, not source terms.
} else {
// Compute pore volumes, in order to enable specifying injection rate
// terms of total pore volume.
std::vector<double> porevol;
if (rock_comp->isActive()) {
computePorevolume(*grid->c_grid(), props->porosity(), *rock_comp, state.pressure(), porevol);
} else {
computePorevolume(*grid->c_grid(), props->porosity(), porevol);
}
const double tot_porevol_init = std::accumulate(porevol.begin(), porevol.end(), 0.0);
const double default_injection = use_gravity ? 0.0 : 0.1;
const double flow_per_sec = param.getDefault<double>("injected_porevolumes_per_day", default_injection)
*tot_porevol_init/unit::day;
src[0] = flow_per_sec;
src[num_cells - 1] = -flow_per_sec;
}
// Boundary conditions.
FlowBCManager bcs;
if (param.getDefault("use_pside", false)) {
int pside = param.get<int>("pside");
double pside_pressure = param.get<double>("pside_pressure");
bcs.pressureSide(*grid->c_grid(), FlowBCManager::Side(pside), pside_pressure);
}
// Linear solver.
LinearSolverFactory linsolver(param);
// Write parameters used for later reference.
bool output = param.getDefault("output", true);
std::ofstream epoch_os;
std::string output_dir;
if (output) {
output_dir =
param.getDefault("output_dir", std::string("output"));
boost::filesystem::path fpath(output_dir);
try {
create_directories(fpath);
}
catch (...) {
OPM_THROW(std::runtime_error, "Creating directories failed: " << fpath);
}
std::string filename = output_dir + "/epoch_timing.param";
epoch_os.open(filename.c_str(), std::fstream::trunc | std::fstream::out);
// open file to clean it. The file is appended to in SimulatorTwophase
filename = output_dir + "/step_timing.param";
std::fstream step_os(filename.c_str(), std::fstream::trunc | std::fstream::out);
step_os.close();
param.writeParam(output_dir + "/simulation.param");
}
std::cout << "\n\n================ Starting main simulation loop ===============\n"
<< " (number of epochs: "
<< (use_deck ? deck->numberOfEpochs() : 1) << ")\n\n" << std::flush;
SimulatorReport rep;
if (!use_deck) {
// Simple simulation without a deck.
WellsManager wells; // no wells.
SimulatorIncompTwophase simulator(param,
*grid->c_grid(),
*props,
rock_comp->isActive() ? rock_comp.get() : 0,
wells,
src,
bcs.c_bcs(),
linsolver,
grav);
SimulatorTimer simtimer;
simtimer.init(param);
warnIfUnusedParams(param);
WellState well_state;
well_state.init(0, state);
rep = simulator.run(simtimer, state, well_state);
} else {
// With a deck, we may have more epochs etc.
WellState well_state;
int step = 0;
SimulatorTimer simtimer;
// Use timer for last epoch to obtain total time.
deck->setCurrentEpoch(deck->numberOfEpochs() - 1);
simtimer.init(*deck);
const double total_time = simtimer.totalTime();
for (int epoch = 0; epoch < deck->numberOfEpochs(); ++epoch) {
// Set epoch index.
deck->setCurrentEpoch(epoch);
// Update the timer.
if (deck->hasField("TSTEP")) {
simtimer.init(*deck);
} else {
if (epoch != 0) {
OPM_THROW(std::runtime_error, "No TSTEP in deck for epoch " << epoch);
}
simtimer.init(param);
}
simtimer.setCurrentStepNum(step);
simtimer.setTotalTime(total_time);
// Report on start of epoch.
std::cout << "\n\n-------------- Starting epoch " << epoch << " --------------"
<< "\n (number of steps: "
<< simtimer.numSteps() - step << ")\n\n" << std::flush;
// Create new wells, well_state
WellsManager wells(*deck, *grid->c_grid(), props->permeability());
// @@@ HACK: we should really make a new well state and
// properly transfer old well state to it every epoch,
// since number of wells may change etc.
if (epoch == 0) {
well_state.init(wells.c_wells(), state);
}
// Create and run simulator.
SimulatorIncompTwophase simulator(param,
*grid->c_grid(),
*props,
rock_comp->isActive() ? rock_comp.get() : 0,
wells,
src,
bcs.c_bcs(),
linsolver,
grav);
if (epoch == 0) {
warnIfUnusedParams(param);
}
SimulatorReport epoch_rep = simulator.run(simtimer, state, well_state);
if (output) {
epoch_rep.reportParam(epoch_os);
}
// Update total timing report and remember step number.
rep += epoch_rep;
step = simtimer.currentStepNum();
}
}
std::cout << "\n\n================ End of simulation ===============\n\n";
rep.report(std::cout);
if (output) {
std::string filename = output_dir + "/walltime.param";
std::fstream tot_os(filename.c_str(),std::fstream::trunc | std::fstream::out);
rep.reportParam(tot_os);
}
}
catch (const std::exception &e) {
std::cerr << "Program threw an exception: " << e.what() << "\n";
throw;
}
| gpl-3.0 |
psnc-dl/darceo | wrdz/wrdz-zmkd/dao/src/main/java/pl/psnc/synat/wrdz/zmkd/dao/format/FileFormatSorterBuilder.java | 993 | /**
* Copyright 2015 Poznań Supercomputing and Networking Center
*
* Licensed under the GNU General Public License, Version 3.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.gnu.org/licenses/gpl-3.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.psnc.synat.wrdz.zmkd.dao.format;
import pl.psnc.synat.wrdz.common.dao.GenericQuerySorterBuilder;
import pl.psnc.synat.wrdz.zmkd.entity.format.FileFormat;
/**
* Defines methods producing sorters for queries concerning {@link FileFormat} entities.
*/
public interface FileFormatSorterBuilder extends GenericQuerySorterBuilder<FileFormat> {
}
| gpl-3.0 |
CarlosManuelRodr/wxChaos | libs/wxMSW-3.1.4/src/generic/spinctlg.cpp | 22796 | ///////////////////////////////////////////////////////////////////////////////
// Name: src/generic/spinctlg.cpp
// Purpose: implements wxSpinCtrl as a composite control
// Author: Vadim Zeitlin
// Modified by:
// Created: 29.01.01
// Copyright: (c) 2001 Vadim Zeitlin <zeitlin@dptmaths.ens-cachan.fr>
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#ifndef WX_PRECOMP
#include "wx/textctrl.h"
#endif //WX_PRECOMP
#include "wx/spinctrl.h"
#include "wx/tooltip.h"
#if wxUSE_SPINCTRL
#include "wx/private/spinctrl.h"
wxIMPLEMENT_DYNAMIC_CLASS(wxSpinDoubleEvent, wxNotifyEvent);
// There are port-specific versions for the wxSpinCtrl, so exclude the
// contents of this file in those cases
#if !defined(wxHAS_NATIVE_SPINCTRL) || !defined(wxHAS_NATIVE_SPINCTRLDOUBLE)
#include "wx/spinbutt.h"
#if wxUSE_SPINBTN
#include "wx/valnum.h"
#include "wx/valtext.h"
// ----------------------------------------------------------------------------
// constants
// ----------------------------------------------------------------------------
// The margin between the text control and the spin: the value here is the same
// as the margin between the spin button and its "buddy" text control in wxMSW
// so the generic control looks similarly to the native one there, we might
// need to use different value for the other platforms (and maybe even
// determine it dynamically?).
static const wxCoord MARGIN = 1;
#define SPINCTRLBUT_MAX 32000 // large to avoid wrap around trouble
// ----------------------------------------------------------------------------
// wxSpinCtrlTextGeneric: text control used by spin control
// ----------------------------------------------------------------------------
class wxSpinCtrlTextGeneric : public wxTextCtrl
{
public:
wxSpinCtrlTextGeneric(wxSpinCtrlGenericBase *spin, const wxString& value, long style=0)
: wxTextCtrl(spin, wxID_ANY, value, wxDefaultPosition, wxDefaultSize,
// This is tricky: we want to honour all alignment flags
// except wxALIGN_CENTER_VERTICAL because it's the same
// as wxTE_PASSWORD and we definitely don't want to show
// asterisks in spin control. We also want to respect
// wxTE_PROCESS and the border flags as well.
style & (wxALIGN_MASK | wxBORDER_MASK | wxTE_PROCESS_ENTER)
& ~wxTE_PASSWORD)
{
m_spin = spin;
InvalidateBestSize();
// remove the default minsize, the spinctrl will have one instead
SetSizeHints(wxDefaultCoord, wxDefaultCoord);
}
virtual ~wxSpinCtrlTextGeneric()
{
// MSW sends extra kill focus event on destroy
if (m_spin)
m_spin->m_textCtrl = NULL;
m_spin = NULL;
}
void OnChar( wxKeyEvent &event )
{
if ( !m_spin->ProcessWindowEvent(event) )
event.Skip();
}
void OnTextEvent(wxCommandEvent& event)
{
wxCommandEvent eventCopy(event);
eventCopy.SetEventObject(m_spin);
eventCopy.SetId(m_spin->GetId());
eventCopy.SetInt(wxAtoi(event.GetString()));
m_spin->ProcessWindowEvent(eventCopy);
}
void OnKillFocus(wxFocusEvent& event)
{
if (m_spin)
m_spin->ProcessWindowEvent(event);
event.Skip();
}
virtual wxSize DoGetBestSize() const wxOVERRIDE
{
wxString minVal = m_spin->DoValueToText(m_spin->m_min);
wxString maxVal = m_spin->DoValueToText(m_spin->m_max);
wxSize minValSize = GetSizeFromText(minVal);
wxSize maxValSize = GetSizeFromText(maxVal);
return wxSize(wxMax(minValSize.x, maxValSize.x), wxMax(minValSize.y, maxValSize.y));
}
wxSpinCtrlGenericBase *m_spin;
private:
wxDECLARE_EVENT_TABLE();
};
wxBEGIN_EVENT_TABLE(wxSpinCtrlTextGeneric, wxTextCtrl)
EVT_CHAR(wxSpinCtrlTextGeneric::OnChar)
// Forward the text events to wxSpinCtrl itself adjusting them slightly in
// the process.
EVT_TEXT(wxID_ANY, wxSpinCtrlTextGeneric::OnTextEvent)
// And we need to forward this one too as wxSpinCtrl is supposed to
// generate it if wxTE_PROCESS_ENTER is used with it (and if it isn't,
// we're never going to get EVT_TEXT_ENTER in the first place).
EVT_TEXT_ENTER(wxID_ANY, wxSpinCtrlTextGeneric::OnTextEvent)
EVT_KILL_FOCUS(wxSpinCtrlTextGeneric::OnKillFocus)
wxEND_EVENT_TABLE()
// ----------------------------------------------------------------------------
// wxSpinCtrlButtonGeneric: spin button used by spin control
// ----------------------------------------------------------------------------
class wxSpinCtrlButtonGeneric : public wxSpinButton
{
public:
wxSpinCtrlButtonGeneric(wxSpinCtrlGenericBase *spin, int style)
: wxSpinButton(spin, wxID_ANY, wxDefaultPosition,
wxDefaultSize, style | wxSP_VERTICAL)
{
m_spin = spin;
SetRange(-SPINCTRLBUT_MAX, SPINCTRLBUT_MAX);
// remove the default minsize, the spinctrl will have one instead
SetSizeHints(wxDefaultCoord, wxDefaultCoord);
}
void OnSpinButton(wxSpinEvent& event)
{
if (m_spin)
m_spin->OnSpinButton(event);
}
wxSpinCtrlGenericBase *m_spin;
private:
wxDECLARE_EVENT_TABLE();
};
wxBEGIN_EVENT_TABLE(wxSpinCtrlButtonGeneric, wxSpinButton)
EVT_SPIN_UP( wxID_ANY, wxSpinCtrlButtonGeneric::OnSpinButton)
EVT_SPIN_DOWN(wxID_ANY, wxSpinCtrlButtonGeneric::OnSpinButton)
wxEND_EVENT_TABLE()
// ============================================================================
// wxSpinCtrlGenericBase
// ============================================================================
// ----------------------------------------------------------------------------
// wxSpinCtrlGenericBase creation
// ----------------------------------------------------------------------------
void wxSpinCtrlGenericBase::Init()
{
m_value = 0;
m_min = 0;
m_max = 100;
m_increment = 1;
m_snap_to_ticks = false;
m_spin_value = 0;
m_textCtrl = NULL;
m_spinButton = NULL;
}
bool wxSpinCtrlGenericBase::Create(wxWindow *parent,
wxWindowID id,
const wxString& value,
const wxPoint& pos, const wxSize& size,
long style,
double min, double max, double initial,
double increment,
const wxString& name)
{
// don't use borders for this control itself, it wouldn't look good with
// the text control borders (but we might want to use style border bits to
// select the text control style)
if ( !wxControl::Create(parent, id, wxDefaultPosition, wxDefaultSize,
(style & ~wxBORDER_MASK) | wxBORDER_NONE,
wxDefaultValidator, name) )
{
return false;
}
m_value = initial;
m_min = min;
m_max = max;
m_increment = increment;
// the string value overrides the numeric one (for backwards compatibility
// reasons and also because it is simpler to specify the string value which
// comes much sooner in the list of arguments and leave the initial
// parameter unspecified)
if ( !value.empty() )
{
double d;
if ( DoTextToValue(value, &d) )
m_value = d;
}
m_textCtrl = new wxSpinCtrlTextGeneric(this, DoValueToText(m_value), style);
m_spinButton = new wxSpinCtrlButtonGeneric(this, style);
#if wxUSE_TOOLTIPS
m_textCtrl->SetToolTip(GetToolTipText());
m_spinButton->SetToolTip(GetToolTipText());
#endif // wxUSE_TOOLTIPS
ResetTextValidator();
m_spin_value = m_spinButton->GetValue();
SetInitialSize(size);
Move(pos);
return true;
}
wxSpinCtrlGenericBase::~wxSpinCtrlGenericBase()
{
// delete the controls now, don't leave them alive even though they would
// still be eventually deleted by our parent - but it will be too late, the
// user code expects them to be gone now
if (m_textCtrl)
{
// null this since MSW sends KILL_FOCUS on deletion, see ~wxSpinCtrlTextGeneric
wxDynamicCast(m_textCtrl, wxSpinCtrlTextGeneric)->m_spin = NULL;
wxSpinCtrlTextGeneric *text = (wxSpinCtrlTextGeneric*)m_textCtrl;
m_textCtrl = NULL;
delete text;
}
wxDELETE(m_spinButton);
}
wxWindowList wxSpinCtrlGenericBase::GetCompositeWindowParts() const
{
wxWindowList parts;
parts.push_back(m_textCtrl);
parts.push_back(m_spinButton);
return parts;
}
// ----------------------------------------------------------------------------
// geometry
// ----------------------------------------------------------------------------
wxSize wxSpinCtrlGenericBase::DoGetBestSize() const
{
return DoGetSizeFromTextSize(m_textCtrl->GetBestSize().x, -1);
}
wxSize wxSpinCtrlGenericBase::DoGetSizeFromTextSize(int xlen, int ylen) const
{
const wxSize sizeBtn = m_spinButton->GetBestSize();
const wxSize sizeText = m_textCtrl->GetSizeFromTextSize(xlen, ylen);
// Note that we don't use the button height here, as it can be
// much greater than that of a text control that we want to resemble.
return wxSize(sizeText.x + sizeBtn.x + MARGIN, sizeText.y);
}
void wxSpinCtrlGenericBase::DoMoveWindow(int x, int y, int width, int height)
{
wxControl::DoMoveWindow(x, y, width, height);
// position the subcontrols inside the client area
// Use GetBestSize instead of GetSize to get the size of the spin control.
// This fixes a problem on wxMSW when the size is set after a DPI change.
// GetSize returns the old, invalid, size. GetBestSize will return the size
// that the control should be. Normally, GetBestSize and GetSize should
// always return the same value because the size of the spinButton never
// changes.
wxSize sizeBtn = m_spinButton->GetBestSize();
wxCoord wText = width - sizeBtn.x - MARGIN;
m_textCtrl->SetSize(0, 0, wText, height);
m_spinButton->SetSize(0 + wText + MARGIN, 0, wxDefaultCoord, height);
}
// ----------------------------------------------------------------------------
// operations forwarded to the subcontrols
// ----------------------------------------------------------------------------
void wxSpinCtrlGenericBase::SetFocus()
{
if ( m_textCtrl )
m_textCtrl->SetFocus();
}
#ifdef __WXMSW__
void wxSpinCtrlGenericBase::DoEnable(bool enable)
{
wxSpinCtrlBase::DoEnable(enable);
}
#endif // __WXMSW__
bool wxSpinCtrlGenericBase::Enable(bool enable)
{
if ( !wxSpinCtrlBase::Enable(enable) )
return false;
m_spinButton->Enable(enable);
m_textCtrl->Enable(enable);
return true;
}
bool wxSpinCtrlGenericBase::Show(bool show)
{
if ( !wxControl::Show(show) )
return false;
// under GTK Show() is called the first time before we are fully
// constructed
if ( m_spinButton )
{
m_spinButton->Show(show);
m_textCtrl->Show(show);
}
return true;
}
bool wxSpinCtrlGenericBase::SetBackgroundColour(const wxColour& colour)
{
// We need to provide this otherwise the entire composite window
// background and therefore the between component spaces
// will be changed.
if ( m_textCtrl )
return m_textCtrl->SetBackgroundColour(colour);
return true;
}
// ----------------------------------------------------------------------------
// Handle sub controls events
// ----------------------------------------------------------------------------
wxBEGIN_EVENT_TABLE(wxSpinCtrlGenericBase, wxSpinCtrlBase)
EVT_CHAR(wxSpinCtrlGenericBase::OnTextChar)
EVT_KILL_FOCUS(wxSpinCtrlGenericBase::OnTextLostFocus)
wxEND_EVENT_TABLE()
void wxSpinCtrlGenericBase::OnSpinButton(wxSpinEvent& event)
{
event.Skip();
// Pressing the spin button should also give the focus to the text part of
// the control, at least this is how the native control behaves under MSW.
SetFocus();
// Sync the textctrl since the user expects that the button will modify
// what they see in the textctrl.
SyncSpinToText(SendEvent_None);
int spin_value = event.GetPosition();
double step = (event.GetEventType() == wxEVT_SCROLL_LINEUP) ? 1 : -1;
// Use the spinbutton's acceleration, if any, but not if wrapping around
if (((spin_value >= 0) && (m_spin_value >= 0)) || ((spin_value <= 0) && (m_spin_value <= 0)))
step *= abs(spin_value - m_spin_value);
double value = AdjustToFitInRange(m_value + step*m_increment);
// Ignore the edges when it wraps since the up/down event may be opposite
// They are in GTK and Mac
if (abs(spin_value - m_spin_value) > SPINCTRLBUT_MAX)
{
m_spin_value = spin_value;
return;
}
m_spin_value = spin_value;
// Notify about the change in wxTextCtrl too.
if ( DoSetValue(value, SendEvent_Text) )
DoSendEvent();
}
void wxSpinCtrlGenericBase::OnTextLostFocus(wxFocusEvent& event)
{
if ( SyncSpinToText(SendEvent_Text) )
DoSendEvent();
event.Skip();
}
void wxSpinCtrlGenericBase::OnTextChar(wxKeyEvent& event)
{
if ( !HasFlag(wxSP_ARROW_KEYS) )
{
event.Skip();
return;
}
double value = m_value;
switch ( event.GetKeyCode() )
{
case WXK_UP :
value += m_increment;
break;
case WXK_DOWN :
value -= m_increment;
break;
case WXK_PAGEUP :
value += m_increment * 10.0;
break;
case WXK_PAGEDOWN :
value -= m_increment * 10.0;
break;
default:
event.Skip();
return;
}
value = AdjustToFitInRange(value);
SyncSpinToText(SendEvent_None);
// No need to send event, it was already generated by wxTextCtrl itself.
if ( DoSetValue(value, SendEvent_None) )
DoSendEvent();
}
// ----------------------------------------------------------------------------
// Textctrl functions
// ----------------------------------------------------------------------------
bool wxSpinCtrlGenericBase::SyncSpinToText(SendEvent sendEvent)
{
if ( !m_textCtrl || !m_textCtrl->IsModified() )
return false;
double textValue;
if ( DoTextToValue(m_textCtrl->GetValue(), &textValue) )
{
if (textValue > m_max)
textValue = m_max;
else if (textValue < m_min)
textValue = m_min;
}
else // text contents is not a valid number at all
{
// replace its contents with the last valid value
textValue = m_value;
}
// we must always set the value here, even if it's equal to m_value, as
// otherwise we could be left with an out of range value when leaving the
// text control and the current value is already m_max for example
return DoSetValue(textValue, sendEvent);
}
// ----------------------------------------------------------------------------
// changing value and range
// ----------------------------------------------------------------------------
void wxSpinCtrlGenericBase::SetValue(const wxString& text)
{
wxCHECK_RET( m_textCtrl, wxT("invalid call to wxSpinCtrl::SetValue") );
double val;
if ( DoTextToValue(text, &val) && InRange(val) )
{
DoSetValue(val, SendEvent_None);
}
else // not a number at all or out of range
{
m_textCtrl->ChangeValue(text);
m_textCtrl->SelectAll();
}
}
bool wxSpinCtrlGenericBase::DoSetValue(double val, SendEvent sendEvent)
{
wxCHECK_MSG( m_textCtrl, false, wxT("invalid call to wxSpinCtrl::SetValue") );
if ( val < m_min )
val = m_min;
if ( val > m_max )
val = m_max;
if ( m_snap_to_ticks && (m_increment != 0) )
{
double snap_value = val / m_increment;
if (wxFinite(snap_value)) // FIXME what to do about a failure?
{
if ((snap_value - floor(snap_value)) < (ceil(snap_value) - snap_value))
val = floor(snap_value) * m_increment;
else
val = ceil(snap_value) * m_increment;
}
}
wxString str(DoValueToText(val));
if ((val != m_value) || (str != m_textCtrl->GetValue()))
{
if ( !DoTextToValue(str, &m_value ) ) // wysiwyg for textctrl
m_value = val;
switch ( sendEvent )
{
case SendEvent_None:
m_textCtrl->ChangeValue(str);
break;
case SendEvent_Text:
m_textCtrl->SetValue(str);
break;
}
m_textCtrl->SelectAll();
m_textCtrl->DiscardEdits();
return true;
}
return false;
}
double wxSpinCtrlGenericBase::AdjustToFitInRange(double value) const
{
if (value < m_min)
value = HasFlag(wxSP_WRAP) ? m_max : m_min;
if (value > m_max)
value = HasFlag(wxSP_WRAP) ? m_min : m_max;
return value;
}
void wxSpinCtrlGenericBase::DoSetRange(double min, double max)
{
// Negative values in the range are allowed only if base == 10
if ( !wxSpinCtrlImpl::IsBaseCompatibleWithRange(min, max, GetBase()) )
{
return;
}
if ( min != m_min || max != m_max )
m_textCtrl->InvalidateBestSize();
m_min = min;
if ( m_value < m_min )
DoSetValue(m_min, SendEvent_None);
m_max = max;
if ( m_value > m_max )
DoSetValue(m_max, SendEvent_None);
ResetTextValidator();
}
void wxSpinCtrlGenericBase::DoSetIncrement(double inc)
{
m_increment = inc;
}
void wxSpinCtrlGenericBase::SetSnapToTicks(bool snap_to_ticks)
{
m_snap_to_ticks = snap_to_ticks;
DoSetValue(m_value, SendEvent_None);
}
void wxSpinCtrlGenericBase::SetSelection(long from, long to)
{
wxCHECK_RET( m_textCtrl, wxT("invalid call to wxSpinCtrl::SetSelection") );
m_textCtrl->SetSelection(from, to);
}
#ifndef wxHAS_NATIVE_SPINCTRL
//-----------------------------------------------------------------------------
// wxSpinCtrl
//-----------------------------------------------------------------------------
bool wxSpinCtrl::SetBase(int base)
{
// Currently we only support base 10 and 16. We could add support for base
// 8 quite easily but wxMSW doesn't support it natively so don't bother.
if ( base != 10 && base != 16 )
return false;
if ( base == m_base )
return true;
// For negative values in the range only base == 10 is allowed
if ( !wxSpinCtrlImpl::IsBaseCompatibleWithRange(m_min, m_max, base) )
return false;
// Update the current control contents to show in the new base: be careful
// to call DoTextToValue() before changing the base...
double val;
const bool hasValidVal = DoTextToValue(m_textCtrl->GetValue(), &val);
m_base = base;
m_textCtrl->InvalidateBestSize();
ResetTextValidator();
// ... but DoValueToText() after doing it.
if ( hasValidVal )
m_textCtrl->ChangeValue(DoValueToText(val));
return true;
}
void wxSpinCtrl::DoSendEvent()
{
wxSpinEvent event( wxEVT_SPINCTRL, GetId());
event.SetEventObject( this );
event.SetPosition(GetValue());
event.SetString(m_textCtrl->GetValue());
GetEventHandler()->ProcessEvent( event );
}
bool wxSpinCtrl::DoTextToValue(const wxString& text, double *val)
{
long lval;
if ( !text.ToLong(&lval, GetBase()) )
return false;
*val = static_cast<double>(lval);
return true;
}
wxString wxSpinCtrl::DoValueToText(double val)
{
switch ( GetBase() )
{
case 16:
return wxSpinCtrlImpl::FormatAsHex(static_cast<long>(val), GetMax());
default:
wxFAIL_MSG( wxS("Unsupported spin control base") );
wxFALLTHROUGH;
case 10:
return wxString::Format("%ld", static_cast<long>(val));
}
}
void wxSpinCtrl::ResetTextValidator()
{
#if wxUSE_VALIDATORS
if ( GetBase() == 10 )
{
wxIntegerValidator<int> validator;
validator.SetRange(GetMin(), GetMax());
m_textCtrl->SetValidator(validator);
}
else // == 16
{
wxTextValidator validator(wxFILTER_XDIGITS);
m_textCtrl->SetValidator(validator);
}
#endif // wxUSE_VALIDATORS
}
#endif // !wxHAS_NATIVE_SPINCTRL
//-----------------------------------------------------------------------------
// wxSpinCtrlDouble
//-----------------------------------------------------------------------------
#define SPINCTRLDBL_MAX_DIGITS 20
wxIMPLEMENT_DYNAMIC_CLASS(wxSpinCtrlDouble, wxSpinCtrlGenericBase);
void wxSpinCtrlDouble::DoSendEvent()
{
wxSpinDoubleEvent event( wxEVT_SPINCTRLDOUBLE, GetId());
event.SetEventObject( this );
event.SetValue(m_value);
event.SetString(m_textCtrl->GetValue());
GetEventHandler()->ProcessEvent( event );
}
bool wxSpinCtrlDouble::DoTextToValue(const wxString& text, double *val)
{
return text.ToDouble(val);
}
wxString wxSpinCtrlDouble::DoValueToText(double val)
{
return wxString::Format(m_format, val);
}
void wxSpinCtrlDouble::SetDigits(unsigned digits)
{
wxCHECK_RET( digits <= SPINCTRLDBL_MAX_DIGITS, "too many digits for wxSpinCtrlDouble" );
if ( digits == m_digits )
return;
m_digits = digits;
m_format.Printf(wxT("%%0.%ulf"), digits);
ResetTextValidator();
m_textCtrl->InvalidateBestSize();
DoSetValue(m_value, SendEvent_None);
}
void wxSpinCtrlDouble::ResetTextValidator()
{
#if wxUSE_VALIDATORS
wxFloatingPointValidator<double> validator(m_digits);
validator.SetRange(m_min, m_max);
m_textCtrl->SetValidator(validator);
#endif // wxUSE_VALIDATORS
}
void wxSpinCtrlDouble::DetermineDigits(double inc)
{
inc = fabs(inc);
if ( inc > 0.0 && inc < 1.0 )
{
m_digits = wxMin(SPINCTRLDBL_MAX_DIGITS, -static_cast<int>(floor(log10(inc))));
m_format.Printf("%%0.%ulf", m_digits);
}
}
#endif // wxUSE_SPINBTN
#endif // !wxPort-with-native-spinctrl
#endif // wxUSE_SPINCTRL
| gpl-3.0 |
glaubersp/CEAP | resources/lang/es/strings.php | 7517 | <?php
return [
/*
|--------------------------------------------------------------------------
| Strings Language Lines
|--------------------------------------------------------------------------
|
| The following language lines are used in strings throughout the system.
| Regardless where it is placed, a string can be listed here so it is easily
| found in a intuitive way.
|
*/
'backend' => [
'access' => [
'users' => [
'delete_user_confirm' => 'Estás seguro de querer eliminar este Usuario de forma permanente? Esto puede producir un error grave en aquéllas partes de la aplicación que hagan referencia al mismo. Proceda con cautela. Esta operación no puede ser revertida.',
'if_confirmed_off' => '(Si la confirmación está desactivada)',
'restore_user_confirm' => 'Restaurar este Usuario a su estado original?',
],
],
'dashboard' => [
'title' => 'Panel de Administración',
'welcome' => 'Bienvenido',
],
'general' => [
'all_rights_reserved' => 'Todos los derechos reservados.',
'are_you_sure' => 'Está seguro?',
'boilerplate_link' => 'Laravel 5 Boilerplate',
'continue' => 'Continuar',
'member_since' => 'Miembro desde',
'minutes' => ' minutos',
'search_placeholder' => 'Buscar...',
'timeout' => 'Usted ha sido automaticamente desconectado por razones de seguridad ya que no tuvo actividad en ',
'see_all' => [
'messages' => 'Ver todos los mensajes',
'notifications' => 'Ver todo',
'tasks' => 'Ver todas las tareas',
],
'status' => [
'online' => 'Conectado',
'offline' => 'Desconectado',
],
'you_have' => [
'messages' => '{0} No tiene nuevos mensajes|{1} Tiene 1 nuevo mensaje|[2,Inf] Tiene :number mensajes nuevos',
'notifications' => '{0} No tiene nuevas notificaciones|{1} Tiene 1 nueva notificación|[2,Inf] Tiene :number notificaciones',
'tasks' => '{0} No tiene nuevas tareas|{1} Tiene 1 nueva tarea|[2,Inf] Tiene :number nuevas tareas',
],
],
'search' => [
'empty' => 'Favor escribir un término de busqueda.',
'incomplete' => 'Debes escribir tu propia lógica de busqueda para este sistema.',
'title' => 'Resultados de la Busqueda',
'results' => 'Resultados de la busqueda para :query',
],
'welcome' => '<p>Este es tema CoreUI por <a href="https://coreui.io/" target="_blank">creativeLabs</a>. Esta versión no está completa, descargue la versión completa para añadir mas componentes.</p>
<p>Toda la funcionalidad es de prueba, a excepción de <strong>Administración de acceso</strong> a la izquierda. Esta plantilla viene pre-configurada y funcional para total gestión de usuarios/roles/permisos.</p>
<p>Tenga presente que esta plantilla sigue estando en desarrollo y puene contener errores. Hare lo que este en mis manos para enmendarlos.</p>
<p>Espero que disfrute y aprecie el trabajo depositado en este proyecto. Por favor, visite <a href="https://github.com/rappasoft/laravel-5-boilerplate" target="_blank">GitHub</a> para mas información o reportar error <a href="https://github.com/rappasoft/Laravel-5-Boilerplate/issues" target="_blank">aquí</a>.</p>
<p><strong>Este proyecto es muy demandante para mantenerse al día con la frecuencia en que el master branch de laravel va cambiando, por tanto cualquier ayuda será apreciada.</strong></p>
<p>- Anthony Rappa</p>',
],
'emails' => [
'auth' => [
'account_confirmed' => 'Su cuenta ha sido confirmada.',
'error' => 'Ups!',
'greeting' => 'Hola!',
'regards' => 'Saludos,',
'trouble_clicking_button' => 'Si está presentando problemas haciendo clic en el botón ":action_text", copia y pega el enlace en su navegador:',
'thank_you_for_using_app' => 'Gracias por utilizar nuestra aplicación!',
'password_reset_subject' => 'Su enlace de reinicio de contraseña',
'password_cause_of_email' => 'Usted está recibiendo este correo porque hemos recibido una solicitud de reinicio de contraseña para su cuenta.',
'password_if_not_requested' => 'Si usted no hizo la solicitud, ninguna acción es requerida.',
'reset_password' => 'Pulse aquí para reiniciar su contraseña',
'click_to_confirm' => 'Pulse aquí para verificar su cuenta:',
],
'contact' => [
'email_body_title' => 'Tiene una nueva solicitud del formulario de contacto: a continuación los detalles:',
'subject' => '¡Nueva solicitud del formulario de contacto :app_name!',
],
],
'frontend' => [
'test' => 'Prueba',
'tests' => [
'based_on' => [
'permission' => 'Basado en el Permiso - ',
'role' => 'Basado en el Rol - ',
],
'js_injected_from_controller' => 'Javascript inyectado desde el Controlador',
'using_blade_extensions' => 'Usando las extensiones de Blade',
'using_access_helper' => [
'array_permissions' => 'Uso de Access Helper con lista de nombres de Permisos o ID\'s donde el usuario tiene que tenerlos todos.',
'array_permissions_not' => 'Uso de Access Helper con lista de nombres de Permisos o ID\'s donde el usuario no tiene por que tenerlos todos.',
'array_roles' => 'Uso de Access Helper con lista de nombres de Roles o ID\'s donde el usuario tiene que tenerlos todos.',
'array_roles_not' => 'Uso de Access Helper con lista de nombres de Roles o ID\'s donde el usuario no tiene que tenerlos todos.',
'permission_id' => 'Uso de Access Helper mediante ID de Permiso',
'permission_name' => 'Uso de Access Helper mediante nombre de Permiso',
'role_id' => 'Uso de Access Helper mediante ID de Rol',
'role_name' => 'Uso de Access Helper mediante nombre de Rol',
],
'view_console_it_works' => 'Mire la consola del navegador, deberia ver \'Funciona!!\' que tiene su origen en FrontendController@index',
'you_can_see_because' => 'Puede ver esto, por que dispone del Rol \':role\'!',
'you_can_see_because_permission' => 'Puede ver esto, por que dispone del Permiso \':permission\'!',
],
'general' => [
'joined' => 'Joined',
],
'user' => [
'change_email_notice' => 'If you change your e-mail you will be logged out until you confirm your new e-mail address.',
'email_changed_notice' => 'You must confirm your new e-mail address before you can log in again.',
'profile_updated' => 'Perfil actualizado satisfactoriamente.',
'password_updated' => 'Contraseña actualizada satisfactoriamente.',
],
'welcome_to' => 'Bienvenido a :place',
],
];
| gpl-3.0 |
ctactuk/raspberry_php_gpio | config/constants.php | 134 | <?php
define('PATH_TO_SCRIPT','/home/pi/domotica/command.py');
define('ROOT_PRIVILEGES','sudo');
define('PYTHON_COMMAND','python');
?> | gpl-3.0 |
jbundle/jbundle | app/program/screen/src/main/java/org/jbundle/app/program/screen/FieldDataGridScreen.java | 3748 | /**
* @(#)FieldDataGridScreen.
* Copyright © 2013 jbundle.org. All rights reserved.
* GPL3 Open Source Software License.
*/
package org.jbundle.app.program.screen;
import java.util.*;
import org.jbundle.base.db.*;
import org.jbundle.thin.base.util.*;
import org.jbundle.thin.base.db.*;
import org.jbundle.base.db.event.*;
import org.jbundle.base.db.filter.*;
import org.jbundle.base.field.*;
import org.jbundle.base.field.convert.*;
import org.jbundle.base.field.event.*;
import org.jbundle.base.screen.model.*;
import org.jbundle.base.screen.model.util.*;
import org.jbundle.base.model.*;
import org.jbundle.base.util.*;
import org.jbundle.model.*;
import org.jbundle.model.db.*;
import org.jbundle.model.screen.*;
import org.jbundle.app.program.db.*;
/**
* FieldDataGridScreen - .
*/
public class FieldDataGridScreen extends GridScreen
{
/**
* Default constructor.
*/
public FieldDataGridScreen()
{
super();
}
/**
* Constructor.
* @param record The main record for this screen.
* @param itsLocation The location of this component within the parent.
* @param parentScreen The parent screen.
* @param fieldConverter The field this screen field is linked to.
* @param iDisplayFieldDesc Do I display the field desc?.
*/
public FieldDataGridScreen(Record record, ScreenLocation itsLocation, BasePanel parentScreen, Converter fieldConverter, int iDisplayFieldDesc, Map<String,Object> properties)
{
this();
this.init(record, itsLocation, parentScreen, fieldConverter, iDisplayFieldDesc, properties);
}
/**
* Initialize class fields.
*/
public void init(Record record, ScreenLocation itsLocation, BasePanel parentScreen, Converter fieldConverter, int iDisplayFieldDesc, Map<String,Object> properties)
{
super.init(record, itsLocation, parentScreen, fieldConverter, iDisplayFieldDesc, properties);
}
/**
* Override this to open the main file.
* <p />You should pass this record owner to the new main file (ie., new MyNewTable(thisRecordOwner)).
* @return The new record.
*/
public Record openMainRecord()
{
return new FieldData(this);
}
/**
* Add all the screen listeners.
*/
public void addListeners()
{
super.addListeners();
Record record = this.getMainRecord();
Record recClassInfo = this.getRecord(ClassInfo.CLASS_INFO_FILE);
if (recClassInfo != null)
{
record.setKeyArea(FieldData.FIELD_FILE_NAME_KEY);
SubFileFilter listener = new SubFileFilter(recClassInfo.getField(ClassInfo.CLASS_NAME), FieldData.FIELD_FILE_NAME, null, null, null, null, true);
record.addListener(listener);
recClassInfo.getField(ClassInfo.CLASS_NAME).addListener(new FieldReSelectHandler(this));
}
}
/**
* SetupSFields Method.
*/
public void setupSFields()
{
this.getRecord(FieldData.FIELD_DATA_FILE).getField(FieldData.FIELD_NAME).setupDefaultView(this.getNextLocation(ScreenConstants.NEXT_LOGICAL, ScreenConstants.ANCHOR_DEFAULT), this, ScreenConstants.DEFAULT_DISPLAY);
this.getRecord(FieldData.FIELD_DATA_FILE).getField(FieldData.FIELD_CLASS).setupDefaultView(this.getNextLocation(ScreenConstants.NEXT_LOGICAL, ScreenConstants.ANCHOR_DEFAULT), this, ScreenConstants.DEFAULT_DISPLAY);
Converter converter = this.getRecord(FieldData.FIELD_DATA_FILE).getField(FieldData.FIELD_DESCRIPTION);
converter = new FieldLengthConverter(converter, 50);
converter.setupDefaultView(this.getNextLocation(ScreenConstants.NEXT_LOGICAL, ScreenConstants.ANCHOR_DEFAULT), this, ScreenConstants.DEFAULT_DISPLAY);
}
}
| gpl-3.0 |
DeadPixelsSociety/SonOfMars | src/local/Enemy.cc | 6764 | /*
* Son of Mars
*
* Copyright (c) 2015-2016, Team Son of Mars
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "Enemy.h"
#include <cmath>
#include "local/config.h"
#include "Character.h"
#include "Game.h"
#include "Target.h"
static constexpr int BASIC_HEALTH = 5.0f;
static constexpr int BASIC_DAMAGE = 1.0f;
static constexpr int BASIC_ARMOR = -1.0f;
static constexpr int BASIC_GOLD = 2;
static constexpr float BASIC_SPEED = 3.0f;
static constexpr float BASIC_ATTACKPERIOD = 3.0f;
static constexpr float DEGTORAD = M_PI / 180.0f;
Enemy::Enemy(b2World &b2_world, game::EventManager& events, sf::Vector2f position, float multiplier)
: m_body(nullptr)
, m_target({0.0f, 0.0f})
, m_events(events)
, m_maxHealth(BASIC_HEALTH*multiplier)
, m_health(BASIC_HEALTH*multiplier)
, m_damage(BASIC_DAMAGE*multiplier)
, m_armor(BASIC_ARMOR+multiplier)
, m_goldGiven(BASIC_GOLD*multiplier)
, m_speed(BASIC_SPEED+(multiplier/10.0f))
, m_attackPeriod(BASIC_ATTACKPERIOD-(multiplier/10.0f))
, m_timeElapsed(BASIC_ATTACKPERIOD-1.0f)
, m_action(ATTACK) {
// Set the initial position
b2BodyDef b2_bodyDef;
b2_bodyDef.type = b2_dynamicBody;
b2_bodyDef.position.Set(position.x / BOX2D_PIXELS_PER_METER, position.y / BOX2D_PIXELS_PER_METER);
b2CircleShape b2_circle;
b2_circle.m_radius = ENEMY_WIDTH / BOX2D_PIXELS_PER_METER;
b2FixtureDef b2_fixture;
b2_fixture.shape = &b2_circle;
m_body = b2_world.CreateBody(&b2_bodyDef);
m_targets.push_back(new Target(Origin::ENEMY, false, this));
m_body->CreateFixture(&b2_fixture)->SetUserData(m_targets.back());
// Create the hitbox of the ennemy
float radius = 1.0f;
b2Vec2 vertices[8];
vertices[0].Set(0,0);
for (int i = 0; i < 7; i++) {
float angle = ((i / 6.0f * 90.0f) - 45.0f) * DEGTORAD;
vertices[i+1].Set( radius * cosf(angle), radius * sinf(angle) );
}
b2PolygonShape b2_polygonShape;
b2_polygonShape.Set(vertices, 8);
b2_fixture.shape = &b2_polygonShape;
b2_fixture.isSensor = true;
m_targets.push_back(new Target(Origin::ENEMY, true, this));
m_body->CreateFixture(&b2_fixture)->SetUserData(m_targets.back());
}
Enemy::~Enemy() {
for (auto target: m_targets) {
delete target;
}
}
void Enemy::update(const float dt, const ActionType action) {
if(m_timeElapsed<m_attackPeriod) //if the attack is not ready, the cooldown reduce
{
m_timeElapsed += dt;
}
// Define target
m_action = action;
b2Vec2 dir;
switch (m_action) {
case ATTACK:
dir = m_target - m_body->GetPosition();
break;
case CIRCLE:
if (distanceFromCharacter() > 5.0f) {
dir = m_target - m_body->GetPosition();
}
else {
dir = m_target - m_body->GetPosition();
dir.x /= 100.0f;
dir.y /= 100.0f;
}
break;
case RETREAT:
dir = -1 * (m_target - m_body->GetPosition());
break;
}
// Manage the move
// Compute enemy's rotation
float norm = std::hypot(dir.x, dir.y);
m_body->SetTransform(m_body->GetPosition(),
(( dir.y < 0 ) ? -1 : 1) * acos( dir.x/norm) );
// Set enemy's speed (constant)
if ( norm > 3.0f*ENEMY_WIDTH / BOX2D_PIXELS_PER_METER ) {
m_body->SetLinearVelocity((m_speed/norm)*dir);
}
else { // Useless when other enemies push
m_body->SetLinearVelocity( b2Vec2(0,0) );
}
if(m_timeElapsed>=m_attackPeriod)
{
//Manage the attacks
if(!m_visibleCharacter.empty())
{
this->simpleAttack();
m_timeElapsed-=m_attackPeriod;
}
}
//check if the enemy has health>0
if(m_health<=0)
{
// Trigger death event
EnemyDeathEvent deathEvent;
deathEvent.givenGold=m_goldGiven;
m_events.triggerEvent(&deathEvent);
this->death();
}
}
void Enemy::render(sf::RenderWindow& window) {
sf::CircleShape circle;
b2Vec2 b2_pos = m_body->GetPosition();
circle.setOrigin(ENEMY_WIDTH, ENEMY_WIDTH);
circle.setPosition(b2_pos.x * BOX2D_PIXELS_PER_METER, b2_pos.y * BOX2D_PIXELS_PER_METER);
circle.setRadius(ENEMY_WIDTH);
circle.setFillColor(sf::Color::Cyan);
window.draw(circle);
// Orientation of enemy
float angle = m_body->GetAngle();
sf::RectangleShape rect({ENEMY_WIDTH * 2.0f, 4.0f});
rect.setOrigin(ENEMY_WIDTH, 2.0f);
rect.setPosition(b2_pos.x * BOX2D_PIXELS_PER_METER, b2_pos.y * BOX2D_PIXELS_PER_METER);
rect.setFillColor(sf::Color::Red);
rect.setRotation(angle * 180 / M_PI);
window.draw(rect);
// Health bar
sf::RectangleShape healthRect({ENEMY_WIDTH * 4.0f, 6.0f});
const float healthPercent = (float)m_health/(float)m_maxHealth;
// Green part
healthRect.setPosition(b2_pos.x * BOX2D_PIXELS_PER_METER - 2*ENEMY_WIDTH, b2_pos.y * BOX2D_PIXELS_PER_METER - 2 * ENEMY_WIDTH);
healthRect.setScale(healthPercent, 1.0f);
healthRect.setFillColor(sf::Color(0, 200, 0, 128));
window.draw(healthRect);
// Red part
healthRect.setPosition(b2_pos.x * BOX2D_PIXELS_PER_METER + ENEMY_WIDTH * (4.0f * healthPercent - 2.0f), b2_pos.y * BOX2D_PIXELS_PER_METER - 2 * ENEMY_WIDTH);
healthRect.setScale(1.0f - (float)m_health/(float)m_maxHealth, 1.0f);
healthRect.setFillColor(sf::Color(200, 0, 0, 128));
window.draw(healthRect);
}
void Enemy::death() {
m_body->GetWorld()->DestroyBody(m_body);
kill();
}
void Enemy::setCharacterLocation(const b2Vec2 &pos) {
m_target = pos;
}
void Enemy::acquiredCharacter(Character* character) {
m_visibleCharacter.insert(character);
}
void Enemy::lostCharacter(Character* character) {
m_visibleCharacter.erase(character);
}
void Enemy::setHealth(float health) {
m_health=health;
}
float Enemy::getHealth() const {
return m_health;
}
void Enemy::addToHealth(float value)
{
m_health+=value;
}
void Enemy::substractToHealth(float value)
{
m_health-=value;
}
void Enemy::setArmor(float armor)
{
m_armor=armor;
}
float Enemy::getArmor() const
{
return m_armor;
}
void Enemy::simpleAttack()
{
for(Character* character: m_visibleCharacter)
{
character->substractToHealth(m_damage-character->getArmor());
}
}
float Enemy::distanceFromCharacter() const {
return std::abs(m_target.x - m_body->GetPosition().x) + std::abs(m_target.y - m_body->GetPosition().y);
}
| gpl-3.0 |
astralien3000/aversive-- | include/common/geometry/circle.hpp | 2293 | /*
Aversive++
Copyright (C) 2014 Eirbot
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef CIRCLE_HPP
#define CIRCLE_HPP
#include <base/integer.hpp>
#include <math/vect.hpp>
#include <geometry/shape.hpp>
class Circle : public Shape {
Vect<2, s32> _centre;
s32 _radius;
public:
static const u8 ID = 'c';
inline Circle(void)
: Shape(ID), _centre(0, 0), _radius(0) {
}
inline Circle(const Vect<2, s32>& centre, s32 radius)
: Shape(ID), _centre(centre), _radius(radius) {
}
inline Circle(s32 x, s32 y, s32 radius)
: Shape(ID), _centre(x, y), _radius(radius) {
}
inline Circle(const Circle& other)
: Shape(ID), _centre(other._centre), _radius(other._radius) {
}
inline Circle& operator=(const Circle& other) {
_centre = other._centre;
_radius = other._radius;
return (*this);
}
inline bool operator==(const Circle& other) const {
return _centre == other._centre && _radius == other._radius;
}
inline Vect<2, s32>& centre(void) {
return _centre;
}
inline s32& radius(void) {
return _radius;
}
inline const Vect<2, s32>& centre(void) const {
return _centre;
}
inline const s32& radius(void) const {
return _radius;
}
};
#endif//CIRCLE_HPP
| gpl-3.0 |
rub0/tbot | src/main/scala/com/telegram/api/UserProfilePhotos.scala | 443 | package com.telegram.api
/**
* UserProfilePhotos
*
* This object represent a user's profile pictures.
*
* @param totalCount Total number of profile pictures the target user has
* @param photos Requested profile pictures (in up to 4 sizes each)
*/
case class UserProfilePhotos(
totalCount : Int,
photos : Array[Array[PhotoSize]]
)
| gpl-3.0 |
JaumeRibas/Aether2DImgMaker | CellularAutomata/src/cellularautomata/model3d/LongModel3DXZDiagonalCrossSection.java | 1282 | /* Aether2DImgMaker -- console app to generate images of the Aether cellular automaton in 2D
Copyright (C) 2017-2022 Jaume Ribas
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package cellularautomata.model3d;
import cellularautomata.model2d.LongModel2D;
public class LongModel3DXZDiagonalCrossSection<G extends LongModel3D> extends Model3DXZDiagonalCrossSection<G> implements LongModel2D {
public LongModel3DXZDiagonalCrossSection(G source, int zOffsetFromX) {
super(source, zOffsetFromX);
}
@Override
public long getFromPosition(int x, int y) throws Exception {
return source.getFromPosition(x, y, x + zOffsetFromX);
}
}
| gpl-3.0 |
Zepx/f5vpn-client-titech | f5vpn-login.py | 53087 | #!/usr/bin/env python
"""Log in to a F5 Firepass SSL VPN from a command-line, without using F5's
browser-plugin and associated junk. Yay. Only for Tokyo Institute of Technology's VPN service.
Works with OSX and linux, at the moment.
Copyright 2006-2010, James Y Knight <foom@fuhm.net>
2010-08-30
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
CONFIG_FILE = "~/.f5vpn-login.conf"
KEEPALIVE_TIMEOUT = 60 * 5
CERTIFICATE_FILE_LOCATIONS = [
'/etc/ssl/certs/ca-certificates.crt', # Debian/Ubuntu/Gentoo
'/etc/pki/tls/certs/ca-bundle.crt', # New redhat
'/usr/share/ssl/certs/ca-bundle.crt', # Old redhat
'/etc/ssl/cert.pem', # FreeBSD
# Your OS goes here? Email me if you know of more places to look...
# Other paths I've seen mentioned on teh internets, what the heck, can't hurt
'/etc/certs/ca-bundle.crt',
'/usr/local/ssl/certs/ca-bundle.crt',
'/etc/apache/ssl.crt/ca-bundle.crt',
'/usr/share/curl/curl-ca-bundle.crt',
'/usr/lib/ssl/cert.pem',
]
PORTAL_HOST = "portal.nap.gsic.titech.ac.jp"
APM_HOST = "apm.nap.gsic.titech.ac.jp"
RP_HOST = "rp.nap.gsic.titech.ac.jp"
import socket, re, sys, os, time, fcntl, select, errno, signal
import getpass, getopt, types, traceback
from urllib import quote_plus, urlencode
try:
import socks
except ImportError:
socks = None
# File that contains certificates to use
ssl_cert_path = None
proxy_addr = None
try:
# The ssl module is New in python 2.6, and required for cert validation.
import ssl as sslmodule
def sslwrap(hostname, s):
try:
if ssl_cert_path is not None:
ssl_sock = sslmodule.wrap_socket(s, cert_reqs=sslmodule.CERT_REQUIRED,
ca_certs=ssl_cert_path)
ssl_sock.do_handshake()
verify_certificate_host(ssl_sock.getpeercert(),
ssl_sock.getpeername()[0], hostname)
else:
ssl_sock = sslmodule.wrap_socket(s)
ssl_sock.do_handshake()
except sslmodule.SSLError as e:
if 'SSL3_GET_SERVER_CERTIFICATE:certificate verify failed' in str(e):
raise MyException("Couldn't validate server certificate.\nAre you being MITM'd? If not, try --dont-check-certificates\n" + str(e))
else:
raise
return ssl_sock
except ImportError:
sslmodule = None
def sslwrap(hostname, s):
return socket.ssl(s)
class MyException(SystemExit):
pass
def set_non_blocking(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def as_root(fn, *args, **kwargs):
try:
os.seteuid(0)
return fn(*args, **kwargs)
finally:
os.seteuid(os.getuid())
def sts_result(sts):
if os.WIFSIGNALED(sts):
return -os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
return os.WEXITSTATUS(sts)
else:
raise os.error, "Not signaled or exited???"
def run_as_root(args, stdin=None):
if stdin is not None:
pipe_r, pipe_w = os.pipe()
else:
pipe_r, pipe_w = None, None
pid = os.fork()
if pid == 0:
if pipe_r is not None:
# setup stdin pipe
os.dup2(pipe_r, 0)
os.close(pipe_r)
os.close(pipe_w)
os.seteuid(0)
os.setuid(0)
try:
os.execv(args[0], args)
except:
os._exit(127)
else:
if pipe_r is not None:
os.close(pipe_r)
os.write(pipe_w, stdin)
os.close(pipe_w)
wpid, sts = os.waitpid(pid, 0)
code = sts_result(sts)
if code != 0:
raise MyException("%r: exited with result %d"% (args, code))
class Platform:
def setup_route(self, ifname, gateway_ip, net, bits, action):
pass
def setup_dns(self, iface_name, service_id, dns_servers, dns_domains, revdns_domains, override_gateway):
pass
def teardown_dns(self):
pass
class DummyPlatform:
def setup_route(self, ifname, gateway_ip, net, bits, action):
print "setup_route(ifname=%r, gateway_ip=%r, net=%r, bits=%r, action=%r" % (ifname, gateway_ip, net, bits, action)
def setup_host_route(self, ifname, gateway_ip, net, bits):
print "teardown_route(ifname=%r, gateway_ip=%r, net=%r, bits=%r" % (ifname, gateway_ip, net, bits)
def setup_dns(self, iface_name, service_id, dns_servers, dns_domains, revdns_domains, override_gateway):
print "setup_dns(iface_name=%r, service_id=%r, dns_servers=%r, dns_domains=%r, revdns_domains=%r, override_gateway=%r)" % (iface_name, service_id, dns_servers, dns_domains, revdns_domains, override_gateway)
def teardown_dns(self):
print "teardown_dns()"
class DarwinPlatform(Platform):
def __init__(self):
if os.path.exists("/sbin/route"):
self.route_path="/sbin/route"
elif os.path.exists("/usr/bin/route"):
self.route_path="/usr/bin/route"
else:
raise MyException("Couldn't find route command")
def setup_route(self, ifname, gateway_ip, net, bits, action):
args = [self.route_path, action, '-net', "%s/%s" % (net, bits)]
if ifname:
args += ['-interface', ifname]
else:
args += [gateway_ip]
run_as_root(args)
def load_SystemConfigurationFramework(self):
try:
# If it's already been wrapped, we're done.
import SystemConfiguration
return SystemConfiguration
except ImportError:
# Nope, so, try again, the hard way...
import objc
SystemConfiguration=types.ModuleType('SystemConfiguration')
SCbndl = objc.loadBundle(SystemConfiguration.__name__, SystemConfiguration.__dict__,
bundle_identifier="com.apple.SystemConfiguration")
objc.loadBundleFunctions(SCbndl, SystemConfiguration.__dict__, [
(u'SCDynamicStoreCreate', '@@@@@'),
(u'SCDynamicStoreSetValue', 'B@@@')
])
return SystemConfiguration
def setup_dns(self, iface_name, service_id, dns_servers, dns_domains, revdns_domains, override_gateway):
"""Setup DNS the OSX magic way."""
# Preferentially use the SystemConfiguration library (included with OSX
# 10.5) if available, as scutil has a command-length limitation of 256
# chars. With 256 chars it's generally not reasonable to add in the
# revdns domains, so don't bother trying.
# NOTE: There's a 3rd party SystemConfiguration package for 10.4 which
# seems to have a different API (that I don't support currently)
try:
SystemConfiguration=self.load_SystemConfigurationFramework()
SystemConfiguration.SCDynamicStoreCreate
except:
# fall back to scutil.
config = "d.init\n"
config += "d.add ServerAddresses * %s\n" % ' '.join(dns_servers)
if override_gateway:
config += "d.add SearchDomains * %s\n" % ' '.join(dns_domains)
else:
config += "d.add SupplementalMatchDomains * %s\n" % ' '.join(dns_domains)
config += "set State:/Network/Service/%s/DNS\n" % service_id
run_as_root(['/usr/sbin/scutil'], stdin=config)
else:
def setup_helper():
sc = SystemConfiguration.SCDynamicStoreCreate(None, "f5vpn-login", None, None)
d = SystemConfiguration.NSMutableDictionary.new()
d[u'ServerAddresses'] = dns_servers
if override_gateway:
d[u'SearchDomains'] = dns_domains
else:
d[u'SupplementalMatchDomains'] = dns_domains + revdns_domains
SystemConfiguration.SCDynamicStoreSetValue(sc, 'State:/Network/Service/%s/DNS' % service_id, d)
as_root(setup_helper)
class Linux2Platform(Platform):
def setup_route(self, ifname, gateway_ip, net, bits, action):
if bits == 32:
host_or_net = ["-host", net]
else:
host_or_net = ["-net", net, 'netmask', bits2mask[bits]]
run_as_root(['/sbin/route', action] + host_or_net +
['gw', gateway_ip])
class FreeBSD6Base(Platform):
def setup_route(self, ifname, gateway_ip, net, bits, action):
args = ['/sbin/route', action, "%s/%s" % (net, bits)]
if ifname:
args += ['-interface', ifname]
else:
args += [gateway_ip]
run_as_root(args)
class ManualFrobbingDNSMixin:
resolv_conf_timestamp = 0
def setup_dns(self, iface_name, service_id, dns_servers, dns_domains, revdns_domains, override_gateway):
if override_gateway:
old_resolv_conf = []
else:
old_resolv_conf = open("/etc/resolv.conf").readlines()
other_lines = []
search = ''
nses = []
for line in old_resolv_conf:
line = line.rstrip('\n')
if line.startswith('search ') or line.startswith('domain '):
# domain entry is simply an alternative spelling for search
search = line.split(' ', 1)[1]
elif line.startswith('nameserver '):
nses.append(line.split(' ', 1)[1])
else:
other_lines.append(line)
new_resolv_conf = []
new_resolv_conf.append("search %s %s" % (' '.join(dns_domains), search))
for ns in dns_servers + nses:
new_resolv_conf.append("nameserver %s" % ns)
new_resolv_conf.extend(other_lines)
new_resolv_conf.append('')
def _create_file():
os.rename('/etc/resolv.conf', '/etc/resolv.conf.f5_bak')
open('/etc/resolv.conf', 'w').write('\n'.join(new_resolv_conf))
as_root(_create_file)
self.resolv_conf_timestamp = os.stat('/etc/resolv.conf').st_mtime
def teardown_dns(self):
as_root(self._teardown_dns)
def _teardown_dns(self):
try:
if self.resolv_conf_timestamp == 0:
pass
elif os.stat('/etc/resolv.conf').st_mtime == self.resolv_conf_timestamp:
os.rename('/etc/resolv.conf.f5_bak', '/etc/resolv.conf')
else:
sys.stderr.write("Not restoring resolv.conf: modified by another process.\n")
os.unlink('/etc/resolv.conf.f5_bak')
except:
pass
class ResolvConfHelperDNSMixin:
def setup_dns(self, iface_name, service_id, dns_servers, dns_domains, revdns_domains, override_gateway):
# FIXME: should I be doing something different here based on override_gateway?
# ResolvConf is a system for managing your resolv.conf file in a
# structured way on unix systems. When it is installed, go through it,
# rather than munging the file manually (and thus causing potential
# conflicts)
# We append tun- to the interface so the proper record order is
# established with the resolvconf distribution. Since we're essentially
# using ppp for the same reason as most people would use tun, this
# should be okay
self.iface_name = iface_name
cmd = "nameserver %s\nsearch %s\n" % (' '.join(dns_servers), ' '.join(dns_domains))
run_as_root(['/sbin/resolvconf', '-a', 'tun-%s' % iface_name], stdin=cmd)
def teardown_dns(self):
as_root(self._teardown_dns)
def _teardown_dns(self):
try:
run_as_root(["/sbin/resolvconf", '-d', 'tun-%s' % self.iface_name])
except:
pass
class Linux2ManualPlatform(ManualFrobbingDNSMixin, Linux2Platform):
pass
class Linux2ResolvconfPlatform(ResolvConfHelperDNSMixin, Linux2Platform):
pass
class FreeBSD6Platform(ManualFrobbingDNSMixin, FreeBSD6Base):
pass
def get_platform():
if sys.platform == "darwin":
return DarwinPlatform()
elif sys.platform == "linux2":
# Choose a dns resolver setup routine
if os.path.exists('/sbin/resolvconf'):
return Linux2ResolvconfPlatform()
else:
return Linux2ManualPlatform()
elif sys.platform == "freebsd6":
return FreeBSD6Platform()
elif sys.platform == "freebsd7":
return FreeBSD6Platform()
elif sys.platform == "freebsd8":
return FreeBSD6Platform()
else:
# Other Unix-like platforms aren't supported at the moment...but there's
# no reason they can't be, when someone with such a platform tells me
# the syntax for their "route" command. Patches welcome!
raise MyException("Don't know how to setup routes/dns for platform %r" % sys.platform)
platform = get_platform()
##### SSL certificate checking support.
def get_subjectAltName(cert):
if not cert.has_key('subjectAltName'):
return ([],[])
ret = ([], [])
for rdn in cert['subjectAltName']:
if rdn[0].lower() == 'dns':
ret[0].append(rdn[1])
if rdn[0][:2].lower() == 'ip':
ret[1].append(rdn[1])
return ret
def get_commonName(cert):
if not cert.has_key('subject'):
return []
ret = []
for rdn in cert['subject']:
if rdn[0][0].lower() == 'commonname':
ret.append(rdn[0][1])
return ret
# WTF isn't this function in the python stdlib somewhere???
def verify_certificate_host(cert, ip, host):
def validate_entry(match):
if match.startswith('*.'):
hostparts = host.split('.', 1)
if len(hostparts) > 1:
if hostparts[1] == match[2:]:
return True
return host == match
cn = get_commonName(cert)
san, san_ip = get_subjectAltName(cert)
# TODO: check san_ip too...
if not (filter(validate_entry, cn) or filter(validate_entry, san)):
raise MyException("Invalid certificate, connecting to host %r, but cert is for cn: %r subjectAltName: %r.\nAre you being MITM'd? If not, try --dont-check-certificates\n" % (host, cn, san))
OSX_cert_tempfile = None
def OSX_get_a_certificate():
# This function is @#$@#@#$ retarded. Its only point is to extract a single
# cert from the OSX trusted cert store, so I can put it in a file, so I give
# that file to Python's SSL module (which gives it to OpenSSL), which then
# basically ignores that I'm explicitly telling it to only trust that one
# certificate, and uses all the certificates in OSX's trusted cert store,
# anyways. But it forces me to give it a file with a cert in it regardless
# of the fact that it's planning to use the **entire cert store**
# regardless of what I actually ask for...sigh. Why???
import objc, tempfile
global OSX_cert_tempfile
# ALSO, wtf doesn't Security.framework already have python wrappers like the
# rest of OSX's frameworks?? Just make up a minimal wrapper here...
Security=types.ModuleType('Security')
objc.loadBundle(Security.__name__, Security.__dict__,
bundle_identifier="com.apple.security")
objc.parseBridgeSupport('''<signatures version="0.9"><depends_on path="/System/Library/Frameworks/CoreFoundation.framework"/><enum name="kSecFormatX509Cert" value="9"/><enum name="kSecItemPemArmour" value="1"/><function name="SecTrustCopyAnchorCertificates"><arg type="o^@"/><retval type="l"/></function><function name="SecKeychainItemExport"><arg type="@"/><arg type="I"/><arg type="I"/><arg type="^{?=II^v@@@II}"/><arg type="o^@"/><retval type="l"/></function></signatures>''', Security.__dict__, Security.__name__)
res, certs = Security.SecTrustCopyAnchorCertificates(None)
if res == 0:
for cert in certs:
res, data = Security.SecKeychainItemExport(
cert,
Security.kSecFormatX509Cert, Security.kSecItemPemArmour, None, None)
if res == 0:
OSX_cert_tempfile = tempfile.NamedTemporaryFile()
OSX_cert_tempfile.write(str(buffer(data)))
OSX_cert_tempfile.flush()
return OSX_cert_tempfile.name
return None
def find_certificates_file():
global ssl_cert_path
if sslmodule is None:
sys.stderr.write("Warning: server certificate checking disabled, requires Python >= 2.6.\n")
return
# Check for the file in all the places I know about...
for p in CERTIFICATE_FILE_LOCATIONS:
if os.path.exists(p):
ssl_cert_path = p
break
# Oh, and for OSX, which doesn't ship a openssl cert file, do some black magic.
if not ssl_cert_path and sys.platform == "darwin":
ssl_cert_path = OSX_get_a_certificate()
if not ssl_cert_path:
sys.stderr.write("Warning: server certificate checking disabled, couldn't locate the certificates file.\n")
sys.stderr.write(" Do you know where it is on your OS? Lemme know...\n")
### END SSL certificate checking gunk
def readline_from_sock(s):
output = ''
while 1:
data = s.recv(1)
if not data:
break
elif data == '\n':
break
elif data != '\r':
output += data
return output
def proxy_connect(ip, port):
# Connect a socket to ip and port, and return a socket object.
# If a proxy is defined, connect via the proxy.
if proxy_addr and proxy_addr[0] == 'http':
s = socket.socket()
s.connect(proxy_addr[1:])
s.send("CONNECT %s:%d HTTP/1.0\r\n\r\n" % (ip, port))
statusline = readline_from_sock(s).split(' ')
if len(statusline) < 2 or statusline[1] != '200':
raise MyException("Proxy returned bad status for CONNECT: %r" % ' '.join(statusline))
while 1: # Read remaining headers, if any
line = readline_from_sock(s)
if line == '':
break
# Now the ssl connection is going
elif proxy_addr and proxy_addr[0] == 'socks5':
# Socks method
s = socks.socksocket()
s.setproxy(socks.PROXY_TYPE_SOCKS5, proxy_addr[1], proxy_addr[2])
s.connect((ip,port))
else:
s = socket.socket()
s.connect((ip,port))
return s
def parse_hostport(host, default_port=0):
ipport=host.split(':')
if len(ipport) == 1:
ip = ipport[0]
port = 443
else:
ip = ipport[0]
port = int(ipport[1])
return ip, port
def send_request(host, request):
ip, port = parse_hostport(host, 443)
s = proxy_connect(ip, port)
ssl = sslwrap(ip, s)
ssl.write(request)
data = ''
while 1:
try:
newdata = ssl.read()
if not newdata:
break
data += newdata
except (socket.error, socket.sslerror):
break
#print data
return data
def get_vpn_client_data(host):
# Some FirePass servers are configured to redirect to an external "pre-login
# check" server. This server is supposed to run some random additional
# checks to see if it likes you, and then redirects back to the firepass,
# with the client_data gunk as an extra POST variable.
# If such an element is present, the firepass will refuse login unless we
# pass it through to the my.activation.php3 script. So, do so. Secureetay!
request = """GET /my.logon.php3?check=1&no_inspectors=1 HTTP/1.0\r
Accept: */*\r
Accept-Language: en\r
Cookie: uRoamTestCookie=TEST; VHOST=standard\r
Referer: https://%(host)s/my.activation.php3\r
User-Agent: Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en) AppleWebKit/417.9 (KHTML, like Gecko) Safari/417.9.2\r
Host: %(host)s\r
\r
""" % dict(host=host)
result = send_request(host, request)
match = re.search('document.external_data_post_cls.client_data.value = \"([\w=]+)\"', result)
if match:
return match.group(1)
match = re.search('name="client_data" value="([\w=]+)"', result)
if match:
return match.group(1)
return ''
def do_portal_login(username, password):
request = """GET /GetAccess/Login?Template=userpass_key&AUTHMETHOD=UserPassword HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Referer: https://portal.nap.gsic.titech.ac.jp/
Host: %(host)s\r
\r
""" % dict(host=PORTAL_HOST)
result = send_request(PORTAL_HOST, request)
session = None
match = re.search('^Set-Cookie: JSESSIONID=([^;]*);', result, re.MULTILINE)
if match == None:
sys.stderr.write('Error: JSESSIONID not found!\n')
sys.exit(1)
sessid = match.group(1)
match = re.search('name=\'pageGenTime\' value="([^"]+)"', result)
if match == None:
sys.stderr.write('Error: pageGenTime not found!\n')
sys.exit(1)
pageGenTime = match.group(1)
match = re.search("name='CSRFFormToken' value='([^']+)", result)
if match == None:
sys.stderr.write('Error: CSRFFormToken not found!\n')
sys.exit(1)
csrftoken = match.group(1)
body = "usr_name=%(user)s&usr_password=%(password)s&AUTHMETHOD=UserPassword&pageGenTime=%(pageGenTime)s&LOCALE=ja_JP&CSRFFormToken=%(csrftoken)s&HiddenURI=https://portal.nap.gsic.titech.ac.jp/GetAccess/ResourceList&OK=OK&Template=userpass_key" % dict(user=quote_plus(username), password=quote_plus(password), pageGenTime=pageGenTime, csrftoken=csrftoken)
request = """POST /GetAccess/Login HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Cookie: JSESSIONID=%(sessid)s\r
Content-Type:application/x-www-form-urlencoded\r
Content-Length: %(len)d\r
Referer: https://%(host)s/GetAccess/Login?Template=userpass_key&AUTHMETHOD=UserPassword\r
Host: %(host)s\r
\r
%(body)s
""" % dict(host=PORTAL_HOST, len=len(body), body=body, sessid=sessid)
#print request
result = send_request(PORTAL_HOST, request)
match = re.search('^Set-Cookie: JSESSIONID=([^;]+);', result, re.MULTILINE)
if match == None:
sys.stderr.write('Error: JSESSIONID(2) not found!\n')
sys.exit(1)
sessid = match.group(1)
match = re.search('^Set-Cookie: AUTH_SESSION_ID=([^;]+);', result, re.MULTILINE)
if match == None:
sys.stderr.write('Error: AUTH_SESSION_ID not found!\n')
sys.exit(1)
auth_session_id = match.group(1)
match = re.search('^Location: ([^\n]+)', result, re.MULTILINE)
if match == None:
sys.stderr.write('Error: Location not found!\n')
sys.exit(1)
loc = match.group(1)
request = """GET /GetAccess/Login?Template=idg_key&AUTHMETHOD=IG&GASF=CERTIFICATE,IG.GRID&LOCALE=ja_JP&GAREASONCODE=13&GAIDENTIFICATIONID=UserPassword&GARESOURCEID=resourcelistID2&GAURI=https://portal.nap.gsic.titech.ac.jp/GetAccess/ResourceList&Reason=13&APPID=resourcelistID2&URI=https://portal.nap.gsic.titech.ac.jp/GetAccess/ResourceList HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Cookie: JSESSIONID=%(sessid)s; AUTH_SESSION_ID=%(auth_session_id)s\r
Referer: https://portal.nap.gsic.titech.ac.jp/GetAccess/Login?Template=userpass_key&AUTHMETHOD=UserPassword\r
Host: %(host)s\r
\r
""" % dict(host=PORTAL_HOST, sessid=sessid, auth_session_id=auth_session_id)
result = send_request(PORTAL_HOST, request)
# Prompt for Matrix Input
matches = re.findall('(\[\w,\d\])', result)
if matches == None:
sys.stderr.write('Error: Matrix Authentication Codes not found!\n')
sys.exit(1)
inputs = []
messages = ['message3', 'message4', 'message5']
for m in matches:
inputs.append(raw_input('{}?: '.format(m)))
# Get All Form Hidden Values
keyval = get_hidden_values(result)
keyval.update(dict(zip(messages, inputs)))
body = urlencode(keyval)
request = """POST /GetAccess/Login HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Cookie: JSESSIONID=%(sessid)s; AUTH_SESSION_ID=%(auth_session_id)s\r
Content-Type:application/x-www-form-urlencoded\r
Content-Length: %(len)d\r
Referer: https://%(host)s/GetAccess/Login?Template=userpass_key&AUTHMETHOD=UserPassword\r
Host: %(host)s\r
\r
%(body)s
""" % dict(host=PORTAL_HOST, len=len(body), body=body, sessid=sessid, auth_session_id=auth_session_id)
result = send_request(PORTAL_HOST, request)
match = re.search('^Set-Cookie: JSESSIONID=([^;]+);', result, re.MULTILINE)
if match == None:
sys.stderr.write('Error: JSESSIONID(2) not found!\n')
sys.exit(1)
sessid = match.group(1)
match = re.search('^Set-Cookie: AUTH_SESSION_ID=([^;]+);', result, re.MULTILINE)
if match == None:
sys.stderr.write('Error: AUTH_SESSION_ID not found!\n')
sys.exit(1)
auth_session_id = match.group(1)
request = """GET / HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Cookie: AUTH_SESSION_ID=%(auth_session_id)s\r
Host: %(host)s\r
\r
""" % dict(host=APM_HOST, auth_session_id=auth_session_id)
result = send_request(APM_HOST, request)
matches = re.findall(r'Set-Cookie: (.*?)=(.*?);', result)
cookies = dict(matches)
cookies['AUTH_SESSION_ID'] = auth_session_id
cookies_encoded = '; '.join('{}={}'.format(key, val) for key, val in cookies.items())
request = """GET /my.policy HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Cookie: %(cookies)s\r
Host: %(host)s\r
\r
""" % dict(host=APM_HOST, cookies=cookies_encoded)
result = send_request(APM_HOST, request)
matches = re.findall(r'Set-Cookie: (.*?)=(.*?);', result)
cookies.update(dict(matches))
cookies_encoded = '; '.join('{}={}'.format(key, val) for key, val in cookies.items())
body="username=%(username)s&password=%(password)s" % dict(username=username,password=password)
request = """POST /my.policy HTTP/1.0\r
Accept: */*\r
Accept-Encoding: gzip, deflate, sdch, br\r
Accept-Language: en-US,en;q=0.8,en-GB;q=0.6\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.100 Safari/537.36\r
Cookie: %(cookies)s\r
Content-Type:application/x-www-form-urlencoded\r
Content-Length: %(len)d\r
Referer: https://rp.nap.gsic.titech.ac.jp/vpn_access/service/\r
Host: %(host)s\r
\r
%(body)s
""" % dict(host=APM_HOST, len=len(body), auth_session_id=auth_session_id, sessid=sessid, body=body, cookies=cookies_encoded)
result = send_request(APM_HOST, request)
matches = re.findall(r'Set-Cookie: (.*?)=(.*?);', result)
cookies.update(dict(matches))
session = {}
session['AUTH_SESSION_ID'] = auth_session_id
session['F5_ST'] = cookies['F5_ST']
session['LastMRH_Session'] = cookies['LastMRH_Session']
if 'MRHSession' in cookies and cookies['MRHSession'] != None:
session['MRHSession'] = cookies['MRHSession']
return session
else:
sys.stderr.write('Failed to obtain MRHSession\n')
sys.exit(1)
def get_hidden_values(html_page):
matches = re.findall(r'type=[\'"]hidden[\'"] name=[\'"](.*?)[\'"] value=[\'"](.*?)[\'"]', html_page, re.IGNORECASE)
keyval = dict(matches)
return keyval
def do_login(client_data, host, username, password):
body="rsa_port=&vhost=standard&username=%(user)s&password=%(password)s&client_data=%(client_data)s&login=Logon&state=&mrhlogonform=1&miniui=1&tzoffsetmin=1&sessContentType=HTML&overpass=&lang=en&charset=iso-8859-1&uilang=en&uicharset=iso-8859-1&uilangchar=en.iso-8859-1&langswitcher=" % dict(user=quote_plus(username), password=quote_plus(password), client_data=client_data)
request = """POST /my.activation.php3 HTTP/1.0\r
Accept: */*\r
Accept-Language: en\r
Cookie: VHOST=standard; uRoamTestCookie=TEST\r
Content-Type: application/x-www-form-urlencoded\r
Referer: https://%(host)s/my.activation.php3\r
User-Agent: Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en) AppleWebKit/417.9 (KHTML, like Gecko) Safari/417.9.2\r
Host: %(host)s\r
Content-Length: %(len)d\r
\r
%(body)s
""" % dict(host=host, len=len(body), body=body)
result = send_request(host, request)
session = None
pat = re.compile('^Set-Cookie: MRHSession=([^;]*);', re.MULTILINE)
for match in pat.finditer(result):
sessid = match.group(1)
if sessid == "deleted":
session = None
else:
session = sessid
if session is None:
# If the response body contains red HTML text, output the error
# (Usually authentication errors)
pat = re.compile('<font color=red>(.*?)</font>', re.MULTILINE)
for match in pat.finditer(result):
err_msg = match.group(1).replace(' ', '')
sys.stderr.write('Error: ' + err_msg + '\n')
return None
match = re.search("(Challenge: [^<]*)", result)
if match:
sys.stderr.write(match.group(1)+"\n")
return None
sys.stderr.write("Login process failed, unknown output. Sorry!\n")
sys.stderr.write(result)
sys.stderr.write("\n")
sys.exit(1)
else:
return session
def get_vpn_menu_number(session):
# Find out the "Z" parameter to use to open a VPN connection
session_encoded = '; '.join('{}={}'.format(key, val) for key, val in session.items())
request = """GET /vdesk/vpn/index.php3?outform=xml HTTP/1.0\r
Accept: */*\r
Accept-Language: en\r
Cookie: %(session)s\r
Referer: https://%(host)s/my.activation.php3\r
User-Agent: Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en) AppleWebKit/417.9 (KHTML, like Gecko) Safari/417.9.2\r
Host: %(host)s\r
\r
""" % dict(host=APM_HOST, session=session_encoded)
result = send_request(APM_HOST, request)
match = re.search('<favorite id="([^"]*?)">', result)
if match:
menu_number = match.group(1)
#result = send_request(APM_HOST, request)
return match.group(1)
else:
if re.search('^Location: /my.logon.php3', result):
# a redirect to the login page.
sys.stderr.write("Old session no longer valid.\n")
return None
def get_VPN_params(host, session, menu_number):
session['F5_fullWT'] = 1
session_encode = '; '.join('{}={}'.format(key, val) for key, val in session.items())
request = """GET /vdesk/resource_all_info.eui?resourcename=%(menu_number)s&resourcetype=network_access HTTP/1.0\r
Accept: */*\r
Accept-Language: en\r
Cookie: %(session)s\r
Referer: https://apm.nap.gsic.titech.ac.jp/vdesk/webtop.eui?webtop=/Common/Apm_Resource_Webtop__Service-A&webtop_type=webtop_full\r
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36\r
Host: %(host)s\r
\r
""" % dict(menu_number=menu_number, session=session_encode, host=host)
#print request
result = send_request(host, request)
#print result
# Find TunnelHost
#match = re.search('<tunnel_host0>(.*?)</tunnel_host0>', result)
params = {}
params['tunnel_host0'] = 'apm.nap.gsic.titech.ac.jp'
params['tunnel_port0'] = '443'
params['DNS0'] = '131.112.125.58 131.112.181.2'
params['LAN0'] = '0.0.0.0/0.0.0.0'
params['Session_ID'] = session['MRHSession']
# Try to find the plugin parameters
# matches = list(re.finditer("<embed [^>]*?(version=[^>]*)>", result))
# if not matches:
# # A new version of the server has switched to using javascript to write
# # the parameters, now, so try matching that too.
# matches = list(re.finditer("document.writeln\('(version=[^)]*)'\)", result))
# if not matches:
# if re.search('^Location: /my.logon.php3', result):
# # a redirect to the login page.
# sys.stderr.write("Old session no longer valid.\n")
# return None
# sys.stderr.write("Embed info output:\n")
# sys.stderr.write(result)
# return None
# match = matches[-1]
# params = match.group(1)
# params = params.replace(' ', '&').replace('"', '')
# paramsDict = decode_params(params)
#return paramsDict
return params
def decode_params(paramsStr):
paramsDict = {}
for param in paramsStr.split('&'):
k,v = param.split('=', 1)
if re.match('q[0-9]+', k):
k,v = v.decode('hex').split('=', 1)
paramsDict[k] = v
return paramsDict
class LogWatcher:
"""Collect (iface_name, tty, local_ip, remote_ip) from the ppp log messages
and call ppp_ip_up when they've all arrived."""
collected_log = ''
iface_name = tty = remote_ip = local_ip = None
notified = False
def __init__(self, ip_up):
self.ip_up = ip_up
def _get_match(self, exp):
match = re.search(exp, self.collected_log, re.MULTILINE)
if match is not None:
return match.group(1)
def process(self, logmsg):
print "PPPD LOG: %r" % logmsg
self.collected_log += logmsg
if self.iface_name is None:
self.iface_name = self._get_match("Using interface (.*)$")
if self.tty is None:
self.tty = self._get_match("Connect: .* <--> (.*)$")
if self.remote_ip is None:
self.remote_ip = self._get_match("remote IP address (.*)$")
if self.local_ip is None:
self.local_ip = self._get_match("local IP address (.*)$")
if not (self.notified or
self.iface_name is None or self.tty is None or
self.remote_ip is None or self.local_ip is None):
print "CALLING ip_up%r" % ((self.iface_name, self.tty, self.local_ip, self.remote_ip),)
self.notified = True
self.ip_up(self.iface_name, self.tty, self.local_ip, self.remote_ip)
keepalive_socket = None
def set_keepalive_host(host):
global keepalive_socket
keepalive_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
keepalive_socket.connect((host, 7))
keepalive_socket.setblocking(0)
def run_event_loop(pppd_fd, ssl_socket, ssl, logpipe_r, ppp_ip_up):
ssl_socket.setblocking(0)
set_non_blocking(pppd_fd)
set_non_blocking(logpipe_r)
# Tiny little event-loop: don't try this at home.
ssl_write_blocked_on_read = False
ssl_read_blocked_on_write = False
data_to_pppd = ''
data_to_ssl = ''
data_to_ssl_buf2 = ''
def sigusr1(sig, frame):
sys.stderr.write("ssl_write_blocked_on_read=%r, ssl_read_blocked_on_write=%r, data_to_pppd=%r, data_to_ssl=%r, data_to_ssl_buf2=%r, time_since_last_activity=%r\n" % (ssl_write_blocked_on_read, ssl_read_blocked_on_write, data_to_pppd, data_to_ssl, data_to_ssl_buf2, time.time() - last_activity_time))
signal.signal(signal.SIGUSR1, sigusr1)
logwatcher = LogWatcher(ppp_ip_up)
last_activity_time = time.time()
while 1:
reads = [logpipe_r]
writes = []
# try to write data to pppd if pending, otherwise read more data from ssl
if data_to_pppd:
writes.append(pppd_fd)
else:
if ssl_read_blocked_on_write:
writes.append(ssl_socket)
else:
reads.append(ssl_socket)
# Conversely, write data to ssl if pending, otherwise read more data from pppd
if data_to_ssl:
if ssl_write_blocked_on_read:
reads.append(ssl_socket)
else:
writes.append(ssl_socket)
else:
reads.append(pppd_fd)
if keepalive_socket:
timeout = max(last_activity_time + KEEPALIVE_TIMEOUT - time.time(), 0)
else:
timeout = None
# Run the select, woot
try:
reads,writes,exc = select.select(reads, writes, [], timeout)
except select.error, se:
if se.args[0] not in (errno.EAGAIN, errno.EINTR):
raise
continue # loop back around to try again
if keepalive_socket and not reads and not writes:
# Returned from select because of timeout (probably)
if time.time() - last_activity_time > KEEPALIVE_TIMEOUT:
sys.stderr.write("Sending keepalive\n")
keepalive_socket.send('keepalive')
print "SELECT GOT:", reads,writes,exc
# To simplify matters, don't bother with what select returned. Just try
# everything; it doesn't matter if it fails.
# Read data from log pipe
try:
logmsg = os.read(logpipe_r, 10000)
print 'LOGMSG: %s' % logmsg
if not logmsg: #EOF
print "EOF on logpipe_r"
break
logwatcher.process(logmsg)
except OSError, se:
if se.args[0] not in (errno.EAGAIN, errno.EINTR):
raise
# Read data from pppd
if not data_to_ssl:
try:
data_to_ssl = os.read(pppd_fd, 10000)
if not data_to_ssl: #EOF
print "EOF on pppd"
break
#print "READ PPPD: %r" % data_to_ssl
except OSError, se:
if se.args[0] not in (errno.EAGAIN, errno.EINTR):
raise
# Read data from SSL
if not data_to_pppd:
try:
ssl_read_blocked_on_write = False
data_to_pppd = ssl.read()
if not data_to_pppd: #EOF
print "EOF on ssl"
break
last_activity_time = time.time()
except socket.sslerror, se:
if se.args[0] == socket.SSL_ERROR_WANT_READ:
pass
elif se.args[0] == socket.SSL_ERROR_WANT_WRITE:
ssl_read_blocked_on_write = True
else:
raise
#print "READ SSL: %r" % data_to_pppd
# Write data to pppd
if data_to_pppd:
try:
num_written = os.write(pppd_fd, data_to_pppd)
#print "WROTE PPPD: %r" % data_to_pppd[:num_written]
data_to_pppd = data_to_pppd[num_written:]
except OSError, se:
if se.args[0] not in (errno.EAGAIN, errno.EINTR):
raise
# Write data to SSL
if not data_to_ssl_buf2 and data_to_ssl:
# Write in SSL is not like unix write; you *must* call it with the
# same pointer as previously if it fails. Otherwise, it'll raise a
# "bad write retry" error.
data_to_ssl_buf2 = data_to_ssl
data_to_ssl = ''
if data_to_ssl_buf2:
try:
ssl_write_blocked_on_read = False
num_written = ssl.write(data_to_ssl_buf2)
# should always either write all data, or raise a WANT_*
assert num_written == len(data_to_ssl_buf2)
data_to_ssl_buf2 = ''
last_activity_time = time.time()
except socket.sslerror, se:
if se.args[0] == socket.SSL_ERROR_WANT_READ:
ssl_write_blocked_on_read = True
elif se.args[0] == socket.SSL_ERROR_WANT_WRITE:
pass
else:
raise
#print "WROTE SSL: %r" % data_to_ssl[:num_written]
def shutdown_pppd(pid):
res_pid, result = os.waitpid(pid, os.WNOHANG)
if res_pid and result != 0:
sys.stdout.write("PPPd exited unexpectedly with result %s\n" % result)
else:
sys.stdout.write("Shutting down pppd, please wait...\n")
os.kill(pid, signal.SIGTERM)
os.waitpid(pid, 0)
mask2bits = {}
bits2mask = [0]*33
for x in range(33):
mask = 2**32 - 2**(32-x)
mask2bits[mask] = x
bits2mask[x] = '%d.%d.%d.%d' % ((mask / 16777216), (mask / 65536) % 256,
(mask / 256) % 256, mask % 256)
def parts_to_int(parts):
num = 0
for n in parts:
num = num * 256 + n
num *= 256 ** (4 - len(parts))
return num
def parse_net_bits(routespec):
# This routine parses the following formats:
# w.x.y.z/numbits
# w.x.y.z/A.B.C.D
# w[.x[.y[.z]]] (netmask implicit in number of .s)
print '/' in routespec
if '/' in routespec:
net, bits = routespec.split('/', 1)
netparts = map(int, net.split('.'))
while len(netparts) < 4:
netparts.append(0)
if '.' in bits:
netmaskparts = map(int, bits.split('.'))
netmask = 0
for n in netmaskparts:
netmask = netmask * 256 + n
netmask *= 256 ** (4 - len(netmaskparts))
bits = mask2bits.get(netmask)
if bits is None:
raise MyException("Non-contiguous netmask in routespec: %s\n" % (routespec,))
else:
bits = int(bits)
else:
netparts = map(int, routespec.split('.'))
bits = len(netparts) * 8
while len(netparts) < 4:
netparts.append(0)
return netparts, bits
def routespec_to_revdns(netparts, bits):
domain = 'in-addr.arpa'
i = 0
while bits >= 8:
domain = str(netparts[i]) + '.' + domain
bits -= 8
i += 1
if bits == 0:
return [domain]
else:
remaining_bits = 8 - bits
start_addr = netparts[i] & ~(2**remaining_bits - 1)
return [(str(n) + '.' + domain)
for n in range(start_addr, start_addr + 2**(remaining_bits))]
def execPPPd(params):
tunnel_host=params['tunnel_host0']
tunnel_port=int(params['tunnel_port0'])
serviceid = "f5vpn-%s"%tunnel_host
request = """GET /myvpn?sess=%s HTTP/1.0\r
Cookie: MRHSession=%s\r
\r
""" % (params['Session_ID'], params['Session_ID'])
for i in range(5):
try:
ssl_socket = proxy_connect(tunnel_host, tunnel_port)
ssl_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
ssl = sslwrap(tunnel_host, ssl_socket)
ssl.write(request)
initial_data = ssl.read()
print 'Initial data: %s' % initial_data
break
except socket.sslerror, e:
# Sometimes the server seems to respond with "EOF occurred in violation of protocol"
# instead of establishing the connection. Try to deal with this by retrying...
if e.args[0] != 8:
raise
sys.stderr.write("VPN socket unexpectedly closed during connection setup, retrying (%d/5)...\n" % (i + 1))
# Make new PTY
(pppd_fd, slave_pppd_fd) = os.openpty()
# Make log pipe
logpipe_r,logpipe_w = os.pipe()
if params.get('LAN0'):
routes_to_add = [parse_net_bits(routespec)
for routespec in params['LAN0'].split(' ')]
else:
routes_to_add = []
override_gateway = ('UseDefaultGateway0' in params)
if override_gateway:
# If the server says to redirect the default gateway, we need to first add
# an explicit route for the VPN server with the /current/ default gateway.
tunnel_ip = ssl_socket.getpeername()[0]
# FIXME: This is a total hack...and incorrect in some cases, too. But
# it'll work in the normal case where the VPN server isn't on your local
# subnet. This should really be using some (platform-specific) method
# of finding the current route to tunnel_ip instead of assuming that's
# the default route.
gw_ip = os.popen("netstat -rn|grep '^default\|^0.0.0.0'|awk '{print $2}'").read().split()[0]
sys.stderr.write("Detected current default route: %r\n" % gw_ip)
sys.stderr.write("Attempting to delete and override route to VPN server.\n")
try:
platform.setup_route('', gw_ip, tunnel_ip, 32, 'delete')
except:
pass
platform.setup_route('', gw_ip, tunnel_ip, 32, 'add')
# Now, add a new default route, if it wasn't already specified (but not
# on darwin: pppd's "defaultroute" option actually works there)
if sys.platform != "darwin":
if ([0,0,0,0], 0) not in routes_to_add:
routes_to_add.insert(0, ([0,0,0,0], 0))
pid = os.fork()
traceback.print_stack()
print 'PID: %s' % pid
if pid == 0:
os.close(ssl_socket.fileno())
# Setup new controlling TTY
os.close(pppd_fd)
os.setsid()
os.dup2(slave_pppd_fd, 0)
os.close(slave_pppd_fd)
# setup log pipe
os.dup2(logpipe_w, 4)
os.close(logpipe_r)
os.close(logpipe_w)
# Become root
os.seteuid(0)
os.setuid(0)
# Run pppd
args = ['pppd', 'logfd', '4', 'noauth', 'nodetach',
'crtscts', 'passive', 'ipcp-accept-local', 'ipcp-accept-remote',
'local', 'nodeflate', 'novj', ]
if override_gateway:
args.append('defaultroute')
else:
args.append('nodefaultroute')
if sys.platform == "darwin":
args.extend(['serviceid', serviceid])
try:
os.execvp("pppd", args)
except:
os._exit(127)
os.close(slave_pppd_fd)
os.close(logpipe_w)
def ppp_ip_up(iface_name, tty, local_ip, remote_ip):
revdns_domains = []
for net, bits in routes_to_add:
platform.setup_route(iface_name, local_ip, '.'.join(map(str, net)), bits, 'add')
revdns_domains.extend(routespec_to_revdns(net, bits))
# sending a packet to the "local" ip appears to actually send data
# across the connection, which is the desired behavior.
set_keepalive_host(local_ip)
if params.get('DNS0'):
platform.setup_dns(iface_name, serviceid,
params['DNS0'].split(','),
re.split('[, ]+', params.get('DNSSuffix0', '')),
revdns_domains,
override_gateway)
print "VPN link is up!"
try:
print 'printing Stuff: '
print pppd_fd, ssl_socket, ssl, logpipe_r
run_event_loop(pppd_fd, ssl_socket, ssl, logpipe_r, ppp_ip_up)
finally:
if params.get('DNS0'):
platform.teardown_dns()
as_root(shutdown_pppd, pid)
if override_gateway:
try:
platform.setup_route('', gw_ip, tunnel_ip, 32, 'delete')
except:
pass
def usage(exename, s):
print >>s, "Usage: %s [--dont-check-certificates] [--{http,socks5}-proxy=host:port] [[user@]host]" % exename
def get_prefs():
try:
conf = open(os.path.expanduser(CONFIG_FILE))
except:
return None
return conf.readline()
def write_prefs(line):
try:
f = open(os.path.expanduser(CONFIG_FILE), 'w')
f.write(line)
except:
print "Couldn't write prefs file: %s" % CONFIG_FILE
# This code could technically work with 2.3.5 and higher, but for testability purposes, the minimum version is set to be 2.7
# ------------------------------------------------------------------------
# r37117 | doko | 2004-08-24 17:48:15 -0400 (Tue, 24 Aug 2004) | 4 lines
# [Patch #945642] Fix non-blocking SSL sockets, which blocked on reads/writes in Python 2.3.
# Taken from HEAD, tested as part of the unstable and testing Debian packages since May on
# various architectures.
def main(argv):
global proxy_addr
if '--help' in argv:
usage(argv[0], sys.stdout)
sys.exit(0)
if sys.version_info < (2,7):
sys.stderr.write("Python 2.7 or later is required.\n")
sys.stderr.write("Current Version: %s\n" % sys.version)
sys.exit(1)
if os.geteuid() != 0:
sys.stderr.write("ERROR: \n")
sys.stderr.write(
" This script must be run as root. Preferably setuid (via companion .c\n"
" program), but it'll work when invoked as root directly, too.\n")
sys.exit(1)
# Set effective uid to userid; will become root as necessary
os.seteuid(os.getuid())
user = getpass.getuser()
try:
opts,args=getopt.getopt(argv[1:], "", ['verbose', 'http-proxy=', 'socks5-proxy=', 'dont-check-certificates'])
except getopt.GetoptError, e:
sys.stderr.write("Unknown option: %s\n" % e.opt)
usage(argv[0], sys.stderr)
sys.exit(1)
if len(args) > 1:
usage(argv[0], sys.stderr)
sys.exit(1)
prefs = get_prefs()
old_session = None
userhost = None
if prefs is not None:
path, userhost, old_session = prefs.split('\0')
if len(args) > 0:
if args[0] != userhost:
# Don't attempt to reuse session if switching users or servers.
old_session = None
userhost = args[0]
if userhost is None:
sys.stderr.write("The host argument must be provided the first time.\n")
sys.exit(1)
if '@' in userhost:
user,host = userhost.rsplit('@', 1)
else:
host = userhost
verbosity = False
check_certificates = True
for opt,val in opts:
if opt == '--verbose':
verbosity = True
elif opt == '--http-proxy':
proxy_addr = ('http',) + parse_hostport(val)
sys.stderr.write("Using proxy: %r\n" % (proxy_addr,))
elif opt == '--socks5-proxy':
if socks is None:
sys.stderr.write("Cannot use a socks5 proxy: you do not seem to have the socks module available.\n")
sys.stderr.write("Please install SocksiPy: http://socksipy.sourceforge.net/\n")
sys.exit(1)
proxy_addr = ('socks5',) + parse_hostport(val)
sys.stderr.write("Using proxy: %r\n" % (proxy_addr,))
elif opt == '--dont-check-certificates':
check_certificates = False
if check_certificates:
# Updates global ssl_cert_path
find_certificates_file()
params = None
if old_session:
print "Trying old session..."
menu_number = get_vpn_menu_number(host, old_session)
if menu_number is not None:
params = get_VPN_params(host, old_session, menu_number)
session = old_session
if params is None:
#client_data = get_vpn_client_data(host)
# Loop keep asking for passwords while the site gives a new prompt
while True:
password = getpass.getpass("password for %s@%s? " % (user, host))
session = do_portal_login(user, password)
if session is not None:
print "Session id gotten:", session
break
print "Getting params..."
menu_number = get_vpn_menu_number(session)
if menu_number is None:
sys.stderr.write("Unable to find the 'Network Access' entry in main menu. Do you have VPN access?\n")
sys.exit(1)
print"Menu Number: %s" % menu_number
params = get_VPN_params(APM_HOST, session, menu_number)
if params is None:
print "Couldn't get embed info. Sorry."
sys.exit(2)
#write_prefs('\0'.join(['', userhost, session]))
if verbosity:
sys.stderr.write("VPN Parameter dump:\n")
for k,v in params.iteritems():
sys.stderr.write(" %r: %r\n" % (k,v))
print "Got plugin params, execing vpn client"
execPPPd(params)
if __name__ == '__main__':
try:
main(sys.argv)
except KeyboardInterrupt:
pass
except SystemExit, se:
print "ERROR:",se
print "Shut-down."
| gpl-3.0 |
UnigramDev/Unigram | Unigram/Unigram/ViewModels/DialogViewModel.Media.cs | 39364 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Telegram.Td;
using Telegram.Td.Api;
using Unigram.Common;
using Unigram.Controls;
using Unigram.Converters;
using Unigram.Entities;
using Unigram.Services;
using Unigram.Services.Factories;
using Unigram.Views.Popups;
using Windows.ApplicationModel.Contacts;
using Windows.ApplicationModel.DataTransfer;
using Windows.Graphics.Imaging;
using Windows.Media.Capture;
using Windows.Media.Effects;
using Windows.Media.MediaProperties;
using Windows.Storage;
using Windows.Storage.Pickers;
using Windows.UI.Text;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
namespace Unigram.ViewModels
{
public partial class DialogViewModel
{
#region Stickers
public RelayCommand<Sticker> StickerSendCommand { get; }
public void StickerSendExecute(Sticker sticker)
{
StickerSendExecute(sticker, null, null);
}
public async void StickerSendExecute(Sticker sticker, bool? schedule, bool? silent, string emoji = null)
{
Delegate?.HideStickers();
var chat = _chat;
if (chat == null)
{
return;
}
var restricted = await VerifyRightsAsync(chat, x => x.CanSendOtherMessages, Strings.Resources.GlobalAttachStickersRestricted, Strings.Resources.AttachStickersRestrictedForever, Strings.Resources.AttachStickersRestricted);
if (restricted)
{
return;
}
var options = await PickMessageSendOptionsAsync(schedule, silent);
if (options == null)
{
return;
}
var reply = GetReply(true);
var input = new InputMessageSticker(new InputFileId(sticker.StickerValue.Id), sticker.Thumbnail?.ToInput(), sticker.Width, sticker.Height, emoji ?? string.Empty);
await SendMessageAsync(chat, reply, input, options);
}
public RelayCommand<Sticker> StickerViewCommand { get; }
private void StickerViewExecute(Sticker sticker)
{
Delegate?.HideStickers();
OpenSticker(sticker);
}
public RelayCommand<Sticker> StickerFaveCommand { get; }
private void StickerFaveExecute(Sticker sticker)
{
ProtoService.Send(new AddFavoriteSticker(new InputFileId(sticker.StickerValue.Id)));
}
public RelayCommand<Sticker> StickerUnfaveCommand { get; }
private void StickerUnfaveExecute(Sticker sticker)
{
ProtoService.Send(new RemoveFavoriteSticker(new InputFileId(sticker.StickerValue.Id)));
}
#endregion
#region Animations
public RelayCommand<Animation> AnimationSendCommand { get; }
public void AnimationSendExecute(Animation animation)
{
AnimationSendExecute(animation, null, null);
}
public async void AnimationSendExecute(Animation animation, bool? schedule, bool? silent)
{
Delegate?.HideStickers();
var chat = _chat;
if (chat == null)
{
return;
}
var restricted = await VerifyRightsAsync(chat, x => x.CanSendOtherMessages, Strings.Resources.GlobalAttachGifRestricted, Strings.Resources.AttachGifRestrictedForever, Strings.Resources.AttachGifRestricted);
if (restricted)
{
return;
}
var options = await PickMessageSendOptionsAsync(schedule, silent);
if (options == null)
{
return;
}
var reply = GetReply(true);
var input = new InputMessageAnimation(new InputFileId(animation.AnimationValue.Id), animation.Thumbnail?.ToInput(), new int[0], animation.Duration, animation.Width, animation.Height, null);
await SendMessageAsync(chat, reply, input, options);
}
public RelayCommand<Animation> AnimationDeleteCommand { get; }
private void AnimationDeleteExecute(Animation animation)
{
ProtoService.Send(new RemoveSavedAnimation(new InputFileId(animation.AnimationValue.Id)));
}
public RelayCommand<Animation> AnimationSaveCommand { get; }
private void AnimationSaveExecute(Animation animation)
{
ProtoService.Send(new AddSavedAnimation(new InputFileId(animation.AnimationValue.Id)));
}
#endregion
public async Task<bool> VerifyRightsAsync(Chat chat, Func<ChatPermissions, bool> permission, string global, string forever, string temporary)
{
if (chat.Type is ChatTypeSupergroup super)
{
var supergroup = ProtoService.GetSupergroup(super.SupergroupId);
if (supergroup == null)
{
return false;
}
if (supergroup.Status is ChatMemberStatusRestricted restricted && !permission(restricted.Permissions))
{
if (restricted.IsForever())
{
await MessagePopup.ShowAsync(forever, Strings.Resources.AppName, Strings.Resources.OK);
}
else
{
await MessagePopup.ShowAsync(string.Format(temporary, Converter.BannedUntil(restricted.RestrictedUntilDate)), Strings.Resources.AppName, Strings.Resources.OK);
}
return true;
}
else if (supergroup.Status is ChatMemberStatusMember)
{
if (!permission(chat.Permissions))
{
await MessagePopup.ShowAsync(global, Strings.Resources.AppName, Strings.Resources.OK);
return true;
}
}
}
else
{
if (!permission(chat.Permissions))
{
await MessagePopup.ShowAsync(global, Strings.Resources.AppName, Strings.Resources.OK);
return true;
}
}
return false;
}
public bool VerifyRights(Chat chat, Func<ChatPermissions, bool> permission, string global, string forever, string temporary, out string label)
{
return VerifyRights(CacheService, chat, permission, global, forever, temporary, out label);
}
public static bool VerifyRights(ICacheService cacheService, Chat chat, Func<ChatPermissions, bool> permission, string global, string forever, string temporary, out string label)
{
if (cacheService.TryGetSupergroup(chat, out var supergroup))
{
if (supergroup.Status is ChatMemberStatusRestricted restricted && !permission(restricted.Permissions))
{
if (restricted.IsForever())
{
label = forever;
}
else
{
label = string.Format(temporary, Converter.BannedUntil(restricted.RestrictedUntilDate));
}
return true;
}
else if (supergroup.Status is ChatMemberStatusCreator or ChatMemberStatusAdministrator)
{
label = null;
return false;
}
}
if (!permission(chat.Permissions))
{
if (chat.Type is ChatTypeSupergroup super && super.IsChannel)
{
label = Strings.Resources.ChannelCantSendMessage;
return true;
}
label = global;
return true;
}
label = null;
return false;
}
public RelayCommand SendDocumentCommand { get; }
private async void SendDocumentExecute()
{
var header = _composerHeader;
if (header?.EditingMessage == null)
{
try
{
var picker = new FileOpenPicker();
picker.ViewMode = PickerViewMode.Thumbnail;
picker.SuggestedStartLocation = PickerLocationId.DocumentsLibrary;
picker.FileTypeFilter.Add("*");
var files = await picker.PickMultipleFilesAsync();
if (files != null && files.Count > 0)
{
SendFileExecute(files, media: false);
}
}
catch { }
}
else
{
}
}
public async void SendFileExecute(IReadOnlyList<StorageFile> files, FormattedText caption = null, bool media = true)
{
var chat = _chat;
if (chat == null)
{
return;
}
var items = await StorageMedia.CreateAsync(files);
if (items.IsEmpty())
{
return;
}
FormattedText formattedText = null;
if (caption == null)
{
formattedText = GetFormattedText(true);
caption = formattedText.Substring(0, CacheService.Options.MessageCaptionLengthMax);
}
var self = CacheService.IsSavedMessages(_chat);
var dialog = new SendFilesPopup(items, media, _chat.Type is ChatTypePrivate && !self, _type == DialogType.History, self);
dialog.ViewModel = this;
dialog.Caption = caption;
var confirm = await dialog.OpenAsync();
if (confirm != ContentDialogResult.Primary)
{
if (formattedText != null)
{
TextField?.SetText(formattedText);
}
return;
}
var options = await PickMessageSendOptionsAsync(dialog.Schedule, dialog.Silent);
if (options == null)
{
return;
}
if (dialog.Items.Count == 1)
{
await SendStorageMediaAsync(chat, dialog.Items[0], dialog.Caption, dialog.IsFilesSelected, options);
}
else if (dialog.Items.Count > 1 && dialog.IsAlbum && dialog.IsAlbumAvailable)
{
var group = new List<StorageMedia>(Math.Min(dialog.Items.Count, 10));
foreach (var item in dialog.Items)
{
group.Add(item);
if (group.Count == 10)
{
await SendGroupedAsync(group, dialog.Caption, options, dialog.IsFilesSelected);
group = new List<StorageMedia>(Math.Min(dialog.Items.Count, 10));
}
}
if (group.Count > 0)
{
await SendGroupedAsync(group, dialog.Caption, options, dialog.IsFilesSelected);
}
}
else if (dialog.Items.Count > 0)
{
if (dialog.Caption != null)
{
await SendMessageAsync(dialog.Caption, options);
}
foreach (var file in dialog.Items)
{
await SendStorageMediaAsync(chat, file, null, dialog.IsFilesSelected, options);
}
}
}
private async Task SendStorageMediaAsync(Chat chat, StorageMedia storage, FormattedText caption, bool asFile, MessageSendOptions options)
{
if (storage is StorageDocument or StorageAudio)
{
await SendDocumentAsync(chat, storage.File, caption, options);
}
else if (storage is StoragePhoto)
{
await SendPhotoAsync(chat, storage.File, caption, asFile, storage.Ttl, storage.IsEdited ? storage.EditState : null, options);
}
else if (storage is StorageVideo video)
{
await SendVideoAsync(chat, storage.File, caption, video.IsMuted, asFile, storage.Ttl, await video.GetEncodingAsync(), video.GetTransform(), options);
}
}
private async Task SendDocumentAsync(Chat chat, StorageFile file, FormattedText caption = null, MessageSendOptions options = null)
{
var factory = await _messageFactory.CreateDocumentAsync(file, false);
if (factory != null)
{
var reply = GetReply(true);
var input = factory.Delegate(factory.InputFile, caption);
await SendMessageAsync(chat, reply, input, options);
}
}
private async Task SendPhotoAsync(Chat chat, StorageFile file, FormattedText caption, bool asFile, int ttl = 0, BitmapEditState editState = null, MessageSendOptions options = null)
{
var factory = await _messageFactory.CreatePhotoAsync(file, asFile, ttl, editState);
if (factory != null)
{
var reply = GetReply(true);
var input = factory.Delegate(factory.InputFile, caption);
await SendMessageAsync(chat, reply, input, options);
}
}
public async Task SendVideoAsync(Chat chat, StorageFile file, FormattedText caption, bool animated, bool asFile, int ttl = 0, MediaEncodingProfile profile = null, VideoTransformEffectDefinition transform = null, MessageSendOptions options = null)
{
var factory = await _messageFactory.CreateVideoAsync(file, animated, asFile, ttl, profile, transform);
if (factory != null)
{
var reply = GetReply(true);
var input = factory.Delegate(factory.InputFile, caption);
await SendMessageAsync(chat, reply, input, options);
}
}
public async Task SendVideoNoteAsync(Chat chat, StorageFile file, MediaEncodingProfile profile = null, VideoTransformEffectDefinition transform = null)
{
var options = await PickMessageSendOptionsAsync();
if (options == null)
{
return;
}
var factory = await _messageFactory.CreateVideoNoteAsync(file, profile, transform);
if (factory != null)
{
var reply = GetReply(true);
var input = factory.Delegate(factory.InputFile, null);
await SendMessageAsync(chat, reply, input, options);
}
}
public async Task SendVoiceNoteAsync(Chat chat, StorageFile file, int duration, FormattedText caption)
{
var options = await PickMessageSendOptionsAsync();
if (options == null)
{
return;
}
var reply = GetReply(true);
var input = new InputMessageVoiceNote(await file.ToGeneratedAsync(), duration, new byte[0], caption);
await SendMessageAsync(chat, reply, input, options);
}
public RelayCommand SendCameraCommand { get; }
private async void SendCameraExecute()
{
var capture = new CameraCaptureUI();
capture.PhotoSettings.AllowCropping = false;
capture.PhotoSettings.Format = CameraCaptureUIPhotoFormat.Jpeg;
capture.PhotoSettings.MaxResolution = CameraCaptureUIMaxPhotoResolution.HighestAvailable;
capture.VideoSettings.Format = CameraCaptureUIVideoFormat.Mp4;
capture.VideoSettings.MaxResolution = CameraCaptureUIMaxVideoResolution.HighestAvailable;
var file = await capture.CaptureFileAsync(CameraCaptureUIMode.PhotoOrVideo);
if (file != null)
{
SendFileExecute(new[] { file });
}
}
public RelayCommand SendMediaCommand { get; }
private async void SendMediaExecute()
{
try
{
var picker = new FileOpenPicker();
picker.ViewMode = PickerViewMode.Thumbnail;
picker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
picker.FileTypeFilter.AddRange(Constants.MediaTypes);
var files = await picker.PickMultipleFilesAsync();
if (files != null && files.Count > 0)
{
SendFileExecute(files);
}
}
catch { }
}
public RelayCommand SendContactCommand { get; }
private async void SendContactExecute()
{
var chat = _chat;
if (chat == null)
{
return;
}
var picker = new ContactPicker();
//picker.SelectionMode = ContactSelectionMode.Fields;
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.Address);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.ConnectedServiceAccount);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.Email);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.ImportantDate);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.JobInfo);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.Notes);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.PhoneNumber);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.SignificantOther);
//picker.DesiredFieldsWithContactFieldType.Add(ContactFieldType.Website);
try
{
var picked = await picker.PickContactAsync();
if (picked != null)
{
Telegram.Td.Api.Contact contact = null;
string vcard = string.Empty;
var annotationStore = await ContactManager.RequestAnnotationStoreAsync(ContactAnnotationStoreAccessType.AppAnnotationsReadWrite);
var store = await ContactManager.RequestStoreAsync(ContactStoreAccessType.AppContactsReadWrite);
if (store != null && annotationStore != null)
{
var full = await store.GetContactAsync(picked.Id);
if (full != null)
{
var annotations = await annotationStore.FindAnnotationsForContactAsync(full);
//var vcardStream = await ContactManager.ConvertContactToVCardAsync(full, 2000);
//using (var stream = await vcardStream.OpenReadAsync())
//{
// using (var dataReader = new DataReader(stream.GetInputStreamAt(0)))
// {
// await dataReader.LoadAsync((uint)stream.Size);
// vcard = dataReader.ReadString(dataReader.UnconsumedBufferLength);
// }
//}
var first = annotations.FirstOrDefault();
if (first != null)
{
var remote = first.RemoteId;
if (long.TryParse(remote.Substring(1), out long userId))
{
var user = ProtoService.GetUser(userId);
if (user != null)
{
contact = new Telegram.Td.Api.Contact(user.PhoneNumber, user.FirstName, user.LastName, vcard, user.Id);
}
}
}
//contact = full;
if (contact == null)
{
var phone = full.Phones.FirstOrDefault();
if (phone == null)
{
return;
}
contact = new Telegram.Td.Api.Contact(phone.Number, picked.FirstName, picked.LastName, vcard, 0);
}
}
}
if (contact == null)
{
return;
}
var options = await PickMessageSendOptionsAsync();
if (options == null)
{
return;
}
await SendContactAsync(chat, contact, options);
}
}
catch { }
}
public async Task<BaseObject> SendContactAsync(Chat chat, Telegram.Td.Api.Contact contact, MessageSendOptions options)
{
var reply = GetReply(true);
var input = new InputMessageContact(contact);
return await SendMessageAsync(chat, reply, input, options);
}
//private async Task<BaseObject> SendMessageAsync(long replyToMessageId, InputMessageContent inputMessageContent)
//{
// var options = new MessageSendOptions(false, false, null);
// if (_isSchedule)
// {
// var dialog = new SupergroupEditRestrictedUntilView(DateTime.Now.ToTimestamp());
// var confirm = await dialog.ShowQueuedAsync();
// if (confirm != ContentDialogResult.Primary)
// {
// return null;
// }
// options.SchedulingState = new MessageSchedulingStateSendAtDate(dialog.Value.ToTimestamp());
// }
// return await SendMessageAsync(replyToMessageId, inputMessageContent, options);
//}
public async Task<MessageSendOptions> PickMessageSendOptionsAsync(bool? schedule = null, bool? silent = null)
{
var chat = _chat;
if (chat == null)
{
return null;
}
if (schedule == true || (_type == DialogType.ScheduledMessages && schedule == null))
{
var user = CacheService.GetUser(chat);
var dialog = new ScheduleMessagePopup(user, DateTime.Now, CacheService.IsSavedMessages(chat));
var confirm = await dialog.ShowQueuedAsync();
if (confirm != ContentDialogResult.Primary)
{
return null;
}
if (dialog.IsUntilOnline)
{
return new MessageSendOptions(false, false, false, new MessageSchedulingStateSendWhenOnline());
}
else
{
return new MessageSendOptions(false, false, false, new MessageSchedulingStateSendAtDate(dialog.Value.ToTimestamp()));
}
}
else
{
return new MessageSendOptions(silent ?? false, false, false, null);
}
}
private async Task<BaseObject> SendMessageAsync(Chat chat, long replyToMessageId, InputMessageContent inputMessageContent, MessageSendOptions options)
{
if (options == null)
{
options = new MessageSendOptions(false, false, false, null);
}
var response = await ProtoService.SendAsync(new SendMessage(chat.Id, _threadId, replyToMessageId, options, null, inputMessageContent));
if (response is Error error)
{
if (error.TypeEquals(ErrorType.PEER_FLOOD))
{
}
else if (error.TypeEquals(ErrorType.USER_BANNED_IN_CHANNEL))
{
}
else if (error.TypeEquals(ErrorType.SCHEDULE_TOO_MUCH))
{
await MessagePopup.ShowAsync(Strings.Resources.MessageScheduledLimitReached, Strings.Resources.AppName, Strings.Resources.OK);
}
}
return response;
}
public RelayCommand SendLocationCommand { get; }
private async void SendLocationExecute()
{
var chat = _chat;
if (chat == null)
{
return;
}
var dialog = new SendLocationPopup();
//page.LiveLocation = !_liveLocationService.IsTracking(Peer.ToPeer());
var confirm = await dialog.OpenAsync();
if (confirm == ContentDialogResult.Primary)
{
var options = await PickMessageSendOptionsAsync();
if (options == null)
{
return;
}
var reply = GetReply(true);
var input = dialog.Media;
await SendMessageAsync(chat, reply, input, options);
//if (page.Media is TLMessageMediaVenue venue)
//{
// await SendGeoAsync(venue);
//}
//else if (page.Media is TLMessageMediaGeoLive geoLive)
//{
// if (geoLive.Geo == null || geoLive.Period == 0 || _liveLocationService.IsTracking(Peer.ToPeer()))
// {
// _liveLocationService.StopTracking(Peer.ToPeer());
// }
// else
// {
// await SendGeoAsync(geoLive);
// }
//}
//else if (page.Media is TLMessageMediaGeo geo && geo.Geo is TLGeoPoint geoPoint)
//{
// await SendGeoAsync(geoPoint.Lat, geoPoint.Long);
//}
}
//NavigationService.Navigate(typeof(DialogSendLocationPage));
}
public RelayCommand SendPollCommand { get; }
private async void SendPollExecute()
{
await SendPollAsync(false, false, _chat?.Type is ChatTypeSupergroup super && super.IsChannel);
}
private async Task SendPollAsync(bool forceQuiz, bool forceRegular, bool forceAnonymous)
{
var chat = _chat;
if (chat == null)
{
return;
}
var dialog = new CreatePollPopup(forceQuiz, forceRegular, forceAnonymous);
var confirm = await dialog.ShowQueuedAsync();
if (confirm != ContentDialogResult.Primary)
{
return;
}
var options = await PickMessageSendOptionsAsync();
if (options == null)
{
return;
}
var reply = GetReply(true);
var input = new InputMessagePoll(dialog.Question, dialog.Options, dialog.IsAnonymous, dialog.Type, 0, 0, false);
await SendMessageAsync(chat, reply, input, options);
}
//public Task<bool> SendGeoAsync(TLMessageMediaGeoLive media)
//{
// var tsc = new TaskCompletionSource<bool>();
// var date = TLUtils.DateToUniversalTimeTLInt(DateTime.Now);
// //var message = TLUtils.GetMessage(SettingsHelper.UserId, Peer.ToPeer(), true, true, date, string.Empty, media, 0L, null);
// //var previousMessage = InsertSendingMessage(message);
// //CacheService.SyncSendingMessage(message, previousMessage, async (m) =>
// //{
// // var inputMedia = media.ToInputMedia();
// // var result = await LegacyService.SendMediaAsync(Peer, inputMedia, message);
// // if (result.IsSucceeded)
// // {
// // tsc.SetResult(true);
// // await _liveLocationService.TrackAsync(message);
// // }
// // else
// // {
// // tsc.SetResult(false);
// // }
// //});
// return tsc.Task;
//}
private async Task<BaseObject> SendGroupedAsync(ICollection<StorageMedia> items, FormattedText caption, MessageSendOptions options, bool asFile)
{
var chat = _chat;
if (chat == null)
{
return null;
}
var reply = GetReply(true);
var operations = new List<InputMessageContent>();
var firstCaption = asFile ? null : caption;
var audio = items.All(x => x is StorageAudio);
foreach (var item in items)
{
if (asFile || audio)
{
if (item == items.Last())
{
firstCaption = caption;
}
var factory = await _messageFactory.CreateDocumentAsync(item.File, !audio);
if (factory != null)
{
var input = factory.Delegate(factory.InputFile, firstCaption);
operations.Add(input);
firstCaption = null;
}
}
else if (item is StoragePhoto photo)
{
var factory = await _messageFactory.CreatePhotoAsync(photo.File, asFile, photo.Ttl, photo.IsEdited ? photo.EditState : null);
if (factory != null)
{
var input = factory.Delegate(factory.InputFile, firstCaption);
operations.Add(input);
firstCaption = null;
}
}
else if (item is StorageVideo video)
{
var factory = await _messageFactory.CreateVideoAsync(video.File, video.IsMuted, asFile, video.Ttl, await video.GetEncodingAsync(), video.GetTransform());
if (factory != null)
{
var input = factory.Delegate(factory.InputFile, firstCaption);
operations.Add(input);
firstCaption = null;
}
}
}
return await ProtoService.SendAsync(new SendMessageAlbum(chat.Id, _threadId, reply, options, operations));
}
private FormattedText GetFormattedText(string text)
{
if (text == null)
{
return new FormattedText();
}
text = text.Format();
return Client.Execute(new ParseMarkdown(new FormattedText(text, new TextEntity[0]))) as FormattedText;
}
public async Task HandlePackageAsync(DataPackageView package)
{
try
{
if (package.AvailableFormats.Contains(StandardDataFormats.Bitmap))
{
var bitmap = await package.GetBitmapAsync();
var media = new List<StorageFile>();
var fileName = string.Format("image_{0:yyyy}-{0:MM}-{0:dd}_{0:HH}-{0:mm}-{0:ss}.png", DateTime.Now);
var cache = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(fileName, CreationCollisionOption.GenerateUniqueName);
using (var stream = await bitmap.OpenReadAsync())
{
var result = await ImageHelper.TranscodeAsync(stream, cache, BitmapEncoder.PngEncoderId);
media.Add(result);
}
var captionElements = new List<string>();
if (package.AvailableFormats.Contains(StandardDataFormats.Text))
{
var text = await package.GetTextAsync();
captionElements.Add(text);
}
FormattedText caption = null;
if (captionElements.Count > 0)
{
var resultCaption = string.Join(Environment.NewLine, captionElements);
caption = new FormattedText(resultCaption, new TextEntity[0])
.Substring(0, CacheService.Options.MessageCaptionLengthMax);
}
SendFileExecute(media, caption);
}
else if (package.AvailableFormats.Contains(StandardDataFormats.StorageItems))
{
var items = await package.GetStorageItemsAsync();
var files = new List<StorageFile>(items.Count);
foreach (var file in items.OfType<StorageFile>())
{
files.Add(file);
}
SendFileExecute(files);
}
//else if (e.DataView.Contains(StandardDataFormats.WebLink))
//{
// // TODO: Invoke getting a preview of the weblink above the Textbox
// var link = await e.DataView.GetWebLinkAsync();
// if (TextField.Text == "")
// {
// TextField.Text = link.AbsolutePath;
// }
// else
// {
// TextField.Text = (TextField.Text + " " + link.AbsolutePath);
// }
//
// gridLoading.Visibility = Visibility.Collapsed;
//
//}
else if (package.AvailableFormats.Contains(StandardDataFormats.Text))
{
var field = TextField;
if (field == null)
{
return;
}
var text = await package.GetTextAsync();
if (package.Contains(StandardDataFormats.WebLink))
{
var link = await package.GetWebLinkAsync();
text += Environment.NewLine + link.AbsoluteUri;
}
field.Document.GetRange(field.Document.Selection.EndPosition, field.Document.Selection.EndPosition).SetText(TextSetOptions.None, text);
}
else if (package.AvailableFormats.Contains(StandardDataFormats.WebLink))
{
var field = TextField;
if (field == null)
{
return;
}
var link = await package.GetWebLinkAsync();
field.Document.GetRange(field.Document.Selection.EndPosition, field.Document.Selection.EndPosition).SetText(TextSetOptions.None, link.AbsoluteUri);
}
}
catch { }
}
public RelayCommand EditDocumentCommand { get; }
private async void EditDocumentExecute()
{
var header = _composerHeader;
if (header?.EditingMessage == null)
{
return;
}
try
{
var picker = new FileOpenPicker();
picker.ViewMode = PickerViewMode.Thumbnail;
picker.SuggestedStartLocation = PickerLocationId.DocumentsLibrary;
picker.FileTypeFilter.Add("*");
var file = await picker.PickSingleFileAsync();
if (file == null)
{
return;
}
var factory = await _messageFactory.CreateDocumentAsync(file, false);
if (factory != null)
{
header.EditingMessageMedia = factory;
}
}
catch { }
}
public RelayCommand EditMediaCommand { get; }
private async void EditMediaExecute()
{
var header = _composerHeader;
if (header?.EditingMessage == null)
{
return;
}
try
{
var picker = new FileOpenPicker();
picker.ViewMode = PickerViewMode.Thumbnail;
picker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
picker.FileTypeFilter.AddRange(Constants.MediaTypes);
var file = await picker.PickSingleFileAsync();
if (file == null)
{
return;
}
await EditMediaAsync(file);
}
catch { }
}
public RelayCommand EditCurrentCommand { get; }
private async void EditCurrentExecute()
{
var header = _composerHeader;
if (header?.EditingMessage == null)
{
return;
}
var file = header.EditingMessage.GetFile();
if (file == null || !file.Local.IsDownloadingCompleted)
{
return;
}
var storageFile = await ProtoService.GetFileAsync(file);
if (storageFile == null)
{
return;
}
await EditMediaAsync(storageFile);
}
public async Task EditMediaAsync(StorageFile file)
{
var header = _composerHeader;
if (header?.EditingMessage == null)
{
return;
}
var storage = await StorageMedia.CreateAsync(file);
if (storage == null)
{
return;
}
var formattedText = GetFormattedText(true);
var dialog = new SendFilesPopup(new[] { storage }, true, false, false, false);
dialog.Caption = formattedText
.Substring(0, CacheService.Options.MessageCaptionLengthMax);
var confirm = await dialog.OpenAsync();
TextField?.Focus(FocusState.Programmatic);
if (confirm != ContentDialogResult.Primary)
{
return;
}
TextField?.SetText(dialog.Caption);
Task<InputMessageFactory> request = null;
if (storage is StoragePhoto)
{
request = _messageFactory.CreatePhotoAsync(storage.File, dialog.IsFilesSelected, storage.Ttl, storage.IsEdited ? storage.EditState : null);
}
else if (storage is StorageVideo video)
{
request = _messageFactory.CreateVideoAsync(storage.File, video.IsMuted, dialog.IsFilesSelected, storage.Ttl, await video.GetEncodingAsync(), video.GetTransform());
}
if (request == null)
{
return;
}
var factory = await request;
if (factory != null)
{
header.EditingMessageMedia = factory;
}
}
}
}
| gpl-3.0 |
frazzy123/madisonclone | app/models/Notification.php | 3367 | <?php
class Notification extends Eloquent
{
const TYPE_EMAIL = "email";
const TYPE_TEXT = "text";
protected $table = 'notifications';
protected $softDelete = false;
public $timestamps = false;
public function group()
{
return $this->belongsTo('Group', 'group_id');
}
public function user()
{
return $this->belongsTo('User', 'user_id');
}
/**
* Return notifications registered for a given event
*
* @param string $event
*/
static public function getActiveNotifications($event)
{
return static::where('event', '=', $event)->get();
}
/**
* Return array of valid admin notifications
*
* @return array
*/
static public function getValidNotifications()
{
return MadisonEvent::validAdminNotifications();
}
/**
* Return array of valid user notifications
*
* @param void
* @return array
*/
static public function getUserNotifications(){
return MadisonEvent::validUserNotifications();
}
static public function addNotificationForUser($event, $user_id, $type = self::TYPE_EMAIL)
{
$notification = static::where('user_id', '=', $user_id)
->where('event', '=', $event)
->where('type', '=', $type)
->first();
if($notification) {
return;
}
$notification = new static();
$notification->event = $event;
$notification->user_id = $user_id;
$notification->type = $type;
return $notification->save();
}
static public function addNotificationForGroup($event, $group_id, $type = self::TYPE_EMAIL)
{
$notification = static::where('group_id', '=', $group_id)
->where('event', '=', $event)
->where('type', '=', $type)
->first();
if($notification) {
return;
}
$notification = new static();
$notification->event = $event;
$notification->group_id = $group_id;
$notification->type = $type;
return $notification->save();
}
static public function addNotificationForAdmin($event, $type = self::TYPE_EMAIL)
{
$notification = static::where('user_id', '=', null)
->where('group_id', '=', null)
->where('event', '=', $event)
->where('type', '=', $type)
->first();
if($notification) {
return;
}
$notification = new static();
$notification->event = $event;
$notification->group_id = null;
$notification->user_id = null;
$notification->type = $type;
return $notification->save();
}
static public function removeNotificationForAdmin($event, $type = self::TYPE_EMAIL)
{
$notification = static::where('user_id', '=', null)
->where('group_id', '=', null)
->where('event', '=', $event)
->where('type', '=', $type)
->first();
if($notification) {
$notification->delete();
}
}
static public function removeNotificationForUser($event, $user_id, $type = self::TYPE_EMAIL)
{
$notification = static::where('user_id', '=', $user_id)
->where('event', '=', $event)
->where('type', '=', $type)
->first();
if($notification) {
$notification->delete();
}
}
static public function removeNotificationForGroup($event, $group_id, $type = self::TYPE_EMAIL)
{
$notification = static::where('group_id', '=', $group_id)
->where('event', '=', $event)
->where('type', '=', $type)
->first();
if($notification) {
$notification->delete();
}
}
} | gpl-3.0 |
exteso/alf.io | src/main/java/alfio/model/EventAndOrganizationId.java | 1369 | /**
* This file is part of alf.io.
*
* alf.io is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* alf.io is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with alf.io. If not, see <http://www.gnu.org/licenses/>.
*/
package alfio.model;
import alfio.manager.system.ConfigurationLevel;
import ch.digitalfondue.npjt.ConstructorAnnotationRowMapper.Column;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.Getter;
@Getter
public class EventAndOrganizationId implements Configurable {
protected final int id;
protected final int organizationId;
public EventAndOrganizationId(@Column("id") int id,
@Column("org_id") int organizationId) {
this.id = id;
this.organizationId = organizationId;
}
@JsonIgnore
@Override
public ConfigurationLevel getConfigurationLevel() {
return ConfigurationLevel.event(this);
}
}
| gpl-3.0 |
craigiswayne/round-robin | single.php | 107 | <?php get_header(); ?>
<?php get_template_part('templates/content','controller'); ?>
<?php get_footer();
| gpl-3.0 |
madpilot78/ntopng | scripts/plugins/monitors/system/redis_monitor/web_gui/redis_stats.lua | 7753 | --
-- (C) 2013-21 - ntop.org
--
local dirs = ntop.getDirs()
package.path = dirs.installdir .. "/scripts/lua/modules/?.lua;" .. package.path
if((dirs.scriptdir ~= nil) and (dirs.scriptdir ~= "")) then package.path = dirs.scriptdir .. "/lua/modules/?.lua;" .. package.path end
require "lua_utils"
local page_utils = require("page_utils")
local alert_consts = require("alert_consts")
local plugins_utils = require("plugins_utils")
local graph_utils = require("graph_utils")
local alert_utils = require("alert_utils")
local charts_available = plugins_utils.timeseriesCreationEnabled()
if not isAllowedSystemInterface() then
return
end
sendHTTPContentTypeHeader('text/html')
page_utils.set_active_menu_entry(page_utils.menu_entries.redis_monitor)
dofile(dirs.installdir .. "/scripts/lua/inc/menu.lua")
local page = _GET["page"] or "overview"
local url = plugins_utils.getUrl("redis_stats.lua") .. "?ifid=" .. getInterfaceId(ifname)
page_utils.print_navbar("Redis", url,
{
{
active = page == "overview" or not page,
page_name = "overview",
label = "<i class=\"fas fa-lg fa-home\"></i>",
},
{
active = page == "stats",
page_name = "stats",
label = "<i class=\"fas fa-lg fa-wrench\"></i>",
},
{
hidden = not charts_available,
active = page == "historical",
page_name = "historical",
label = "<i class='fas fa-lg fa-chart-area'></i>",
},
}
)
-- #######################################################
if(page == "overview") then
local fa_external = "<i class='fas fa-external-link-alt'></i>"
local tags = {ifid=getSystemInterfaceId()}
print("<table class=\"table table-bordered table-striped\">\n")
if not ntop.isWindows() then
-- NOTE: on Windows, some stats are missing from script.getRedisStatus()
print("<tr><td nowrap width='30%'><b>".. i18n("system_stats.health") .."</b><br><small>"..i18n("system_stats.redis.short_desc_redis_health").."</small></td><td></td><td><span id='throbber' class='spinner-border redis-info-load spinner-border-sm text-primary' role='status'><span class='sr-only'>Loading...</span></span> <span id=\"redis-health\"></span></td></tr>\n")
end
print("<tr><td nowrap width='30%'><b>".. i18n("about.ram_memory") .."</b><br><small>"..i18n("system_stats.redis.short_desc_redis_ram_memory").."</small></td>")
print("<td class='text-center' width=5%>")
print(ternary(charts_available, "<A HREF='"..url.."&page=historical&ts_schema=redis:memory'><i class='fas fa-lg fa-chart-area'></i></A>", ""))
print("</td><td><span id='throbber' class='spinner-border redis-info-load spinner-border-sm text-primary' role='status'><span class='sr-only'>Loading...</span></span> <span id=\"redis-info-memory\"></span></td></tr>\n")
if not ntop.isWindows() then
print("<tr><td nowrap width='30%'><b>".. i18n("system_stats.redis.redis_keys") .."</b><br><small>"..i18n("system_stats.redis.short_desc_redis_keys").."</small></td>")
print("<td class='text-center' width=5%>")
print(ternary(charts_available, "<A HREF='"..url.."&page=historical&ts_schema=redis:keys'><i class='fas fa-chart-area fa-lg'></i></A>", ""))
print("</td><td><span id='throbber' class='spinner-border redis-info-load spinner-border-sm text-primary' role='status'><span class='sr-only'>Loading...</span></span> <span id=\"redis-info-keys\"></span></td></tr>\n")
end
print[[<script>
var last_keys, last_memory
var health_descr = {
]]
print('"green" : {"status" : "<span class=\'badge bg-success\'>'..i18n("system_stats.redis.redis_health_green")..'</span>", "descr" : "<small>'..i18n("system_stats.redis.redis_health_green_descr")..'</small>"},')
print('"red" : {"status" : "<span class=\'badge bg-danger\'>'..i18n("system_stats.redis.redis_health_red")..'</span>", "descr" : "<small>'..i18n("system_stats.redis.redis_health_red_descr", {product = ntop.getInfo()["product"]})..'</small>"},')
print[[
};
function refreshRedisStats() {
$.get("]] print(plugins_utils.getUrl("get_redis_info.lua")) print[[", function(info) {
$(".redis-info-load").hide();
if(typeof info.health !== "undefined" && health_descr[info.health]) {
$("#redis-health").html(health_descr[info.health]["status"] + "<br>" + health_descr[info.health]["descr"]);
}
if(typeof info.dbsize !== "undefined") {
$("#redis-info-keys").html(NtopUtils.formatValue(info.dbsize) + " ");
if(typeof last_keys !== "undefined")
$("#redis-info-keys").append(NtopUtils.drawTrend(info.dbsize, last_keys));
last_keys = info.dbsize;
}
if(typeof info.memory !== "undefined") {
$("#redis-info-memory").html(NtopUtils.bytesToVolume(info.memory) + " ");
if(typeof last_memory !== "undefined")
$("#redis-info-memory").append(NtopUtils.drawTrend(info.memory, last_memory));
last_memory = info.memory;
}
}).fail(function() {
$(".redis-info-load").hide();
});
}
setInterval(refreshRedisStats, 5000);
refreshRedisStats();
</script>
]]
print("</table>\n")
elseif(page == "stats") then
print [[
<div id="table-redis-stats"></div>
<script type='text/javascript'>
$("#table-redis-stats").datatable({
title: "",
perPage: 100,
hidePerPage: true,
url: "]] print(plugins_utils.getUrl("get_redis_stats.lua")) print(ntop.getHttpPrefix()) print[[",
columns: [
{
field: "column_key",
hidden: true,
css: {
width: '15%',
}
}, {
field: "column_command",
sortable: true,
title: "]] print(i18n("please_wait_page.command")) print[[",
css: {
width: '15%',
}
}, {
title: "]] print(i18n("chart")) print[[",
field: "column_chart",
hidden: ]] if not charts_available then print("true") else print("false") end print[[,
sortable: false,
css: {
textAlign: 'center',
width: '5%',
}
}, {
title: "]] print(i18n("system_stats.redis.tot_calls")) print[[",
field: "column_hits",
sortable: true,
css: {
textAlign: 'right'
}
}
], tableCallback: function() {
datatableInitRefreshRows($("#table-redis-stats"), "column_key", 5000, {"column_hits": NtopUtils.addCommas});
}
});
</script>
]]
elseif(page == "historical" and charts_available) then
local ts_utils = require("ts_utils")
local schema = _GET["ts_schema"] or "redis:memory"
local selected_epoch = _GET["epoch"] or ""
local tags = {ifid = getSystemInterfaceId(), command = _GET["redis_command"]}
url = url.."&page=historical"
local timeseries = {
{schema = "redis:memory", label = i18n("about.ram_memory")},
{schema = "redis:keys", label = i18n("system_stats.redis.redis_keys")},
{separator=1, label=i18n("system_stats.redis.commands")},
}
-- Populate individual commands timeseries
local series = ts_utils.listSeries("redis:hits", {ifid = getSystemInterfaceId()}, 0)
if(series) then
for _, serie in pairsByField(series, "command", asc) do
timeseries[#timeseries + 1] = {
schema = "redis:hits",
label = i18n("system_stats.redis.command_hits", {cmd = string.upper(string.sub(serie.command, 5))}),
extra_params = {redis_command = serie.command},
metrics_labels = {i18n("graphs.num_calls")},
}
end
end
graph_utils.drawGraphs(getSystemInterfaceId(), schema, tags, _GET["zoom"], url, selected_epoch, {
top_redis_hits = "top:redis:hits",
timeseries = timeseries,
})
end
-- #######################################################
dofile(dirs.installdir .. "/scripts/lua/inc/footer.lua")
| gpl-3.0 |
aleatorio12/ProVentasConnector | src/net/pixhan/negocio/UtilNegocio.java | 15562 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package net.pixhan.negocio;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.pixhan.utilidades.DatosProducto;
/**
*
* Autor: Victor Matías <vitomany@reproducir.net>
* Fecha: 9/04/2016
* Hora: 06:12:26 PM
* Web: https://reproducir.net
*/
public class UtilNegocio {
private static final int SIN_ERROR = 0;
public static boolean agregarAreaNegocio ( String nombre, String descripcion, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.agregarAreaNegocio(?, ?, ?)}");
cstmt.setString("nombre", nombre);
cstmt.setString("descripcion", descripcion);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean agregarClasePrimaria ( int areaNegocio, String nombre, String descripcion, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.agregarClasePrimaria(?, ?, ?, ?)}");
cstmt.setInt("areaNegocio", areaNegocio);
cstmt.setString("nombre", nombre);
cstmt.setString("descripcion", descripcion);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean agregarClaseSecundaria ( int clasePrimaria, String nombre, String descripcion, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.agregarClaseSecundaria(?, ?, ?, ?)}");
cstmt.setInt("clasePrimaria", clasePrimaria);
cstmt.setString("nombre", nombre);
cstmt.setString("descripcion", descripcion);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean agregarClaseTercearia ( int claseSecundaria, String nombre, String descripcion, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.agregarClaseTercearia(?, ?, ?, ?)}");
cstmt.setInt("claseSecundaria", claseSecundaria);
cstmt.setString("nombre", nombre);
cstmt.setString("descripcion", descripcion);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean agregarProducto (
String nombre,
String descripcion,
String fabricante,
int producto,
int existencia,
int alerta,
int usuario,
int areaNegocio,
int clasePrimaria,
int claseSecundaria,
int claseTercearia,
float costo,
float precioVenta,
float descuento,
Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.agregarProducto(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?, ? )}");
cstmt.setString("nombre", nombre);
cstmt.setString("descripcion", descripcion);
cstmt.setString("fabricante", fabricante);
cstmt.setInt("producto", producto);
cstmt.setInt("existencia", existencia);
cstmt.setInt("alerta", alerta);
cstmt.setInt("usuario", usuario);
cstmt.setInt("areaNegocio", areaNegocio);
cstmt.setInt("clasePrimaria", clasePrimaria);
cstmt.setInt("claseSecundaria", claseSecundaria);
cstmt.setInt("claseTercearia", claseTercearia);
cstmt.setFloat("costo", costo);
cstmt.setFloat("precioVenta", precioVenta);
cstmt.setFloat("descuento", descuento);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean agregarUsuario ( int usuario, String primerNombre, String primerApellido, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.agregarUsuario(?, ?, ?, ?)}");
cstmt.setInt("usuario", usuario);
cstmt.setString("primerNombre", primerNombre);
cstmt.setString("primerApellido", primerApellido);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean asignarSaldosIniciales ( int usuario, float cantidadAsignada, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.asignarSaldosIniciales(?, ?, ?)}");
cstmt.setInt("usuario", usuario);
cstmt.setFloat("cantidadAsignada", cantidadAsignada);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean comprarProducto ( int producto, int cantidad, int usuario, float costo, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.comprarProducto(?, ?, ?, ?, ?)}");
cstmt.setInt("producto", producto);
cstmt.setInt("cantidad", cantidad);
cstmt.setInt("usuario", usuario);
cstmt.setFloat("costo", costo);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static boolean insertarTransaccion ( String tipo, int usuario, float cantidad, Connection conexion ) throws SQLException
{
int ocurreError;
if ( tipo.equalsIgnoreCase("i") || tipo.equalsIgnoreCase("e") )
{
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.insertarTransaccion(?, ?, ?, ?)}");
cstmt.setString("tipo", tipo);
cstmt.setInt("usuario", usuario);
cstmt.setFloat("cantidad", cantidad);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
}
return true;
}
public static boolean venderProducto ( int producto, int usuario, float cantidad, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.venderProducto(?, ?, ?, ?)}");
cstmt.setInt("producto", producto);
cstmt.setInt("usuario", usuario);
cstmt.setFloat("cantidad", cantidad);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
public static int seleccionarProducto ( int producto, String nombreProducto, Connection conexion ) throws SQLException
{
int existencia;
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.seleccionarProducto(?, ?, ?, ?)}");
cstmt.setInt("producto", producto);
cstmt.setString("nombre", nombreProducto);
cstmt.registerOutParameter("existencia", java.sql.Types.SMALLINT);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
existencia = cstmt.getInt("existencia");
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return existencia;
}
return 0; //Significa que ha ocurrido un error
}
public static boolean actualizarDatosProducto ( int producto, int alerta, int descuento, float precioVenta, String nombreProducto, String descripcion, Connection conexion ) throws SQLException
{
int ocurreError;
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.actualizarDatosProducto(?, ?, ?, ?, ?, ?, ?)}");
cstmt.setInt("producto", producto);
cstmt.setInt("alerta", alerta);
cstmt.setInt("descuento", descuento);
cstmt.setFloat("precioVenta", precioVenta);
cstmt.setString("nombre", nombreProducto);
cstmt.setString("descripcion", descripcion);
cstmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cstmt.execute();
ocurreError = cstmt.getInt("existeError");
cstmt.close();
if ( ocurreError == SIN_ERROR )
{
return false;
}
return true;
}
/**
*
* @param tipo
* @param Connection
* @return
*/
public static ArrayList<DatosClases> devolverNombresClases ( String tipo, Connection conexion ) throws SQLException
{
ArrayList<DatosClases> datosClases = new ArrayList<DatosClases>();
try{
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.devolverNombresClases( ? )}");
cstmt.setString("tipo", tipo);
cstmt.execute();
final ResultSet rs = cstmt.getResultSet();
while ( rs.next() ){
DatosClases datos = new DatosClases( rs.getString("NOMBRE"), rs.getInt("ID") );
System.out.println( rs.getString("NOMBRE") );
System.out.println( rs.getInt("ID") );
datosClases.add(datos);
}
cstmt.close();
}catch(SQLException e){
System.out.println( e.getErrorCode() );
}
return datosClases;
}
public static ArrayList<DatosClases> devolverNombresClasesXP ( String tipo, int clase, Connection conexion ) throws SQLException
{
ArrayList<DatosClases> datosClases = new ArrayList<DatosClases>();
try{
CallableStatement cstmt = conexion.prepareCall("{call punto_venta.devolverNombresClasesXP( ?, ? )}");
cstmt.setString("tipo", tipo);
cstmt.setInt("clase", clase);
cstmt.execute();
final ResultSet rs = cstmt.getResultSet();
while ( rs.next() ){
DatosClases datos = new DatosClases( rs.getString("NOMBRE"), rs.getInt("ID") );
System.out.println( rs.getString("NOMBRE") );
System.out.println( rs.getInt("ID") );
datosClases.add(datos);
}
cstmt.close();
}catch(SQLException e){
System.out.println( e.getErrorCode() );
}
return datosClases;
}
public static DatosProducto cargarDatosProducto( int identificador, String nombreProducto, Connection conexion ){
int ocurreError = 0;
try {
DatosProducto datosProducto = null;
if ( identificador != 0 ){
nombreProducto = "";
}else
{
identificador = 0;
}
CallableStatement cmt = conexion.prepareCall("{call punto_venta.cargarDatosProducto(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)}");
cmt.setInt("producto", identificador);
cmt.setString("nombre", nombreProducto);
cmt.registerOutParameter("idProducto", java.sql.Types.SMALLINT);
cmt.registerOutParameter("stock", java.sql.Types.SMALLINT);
cmt.registerOutParameter("alerta", java.sql.Types.SMALLINT);
cmt.registerOutParameter("descuento", java.sql.Types.TINYINT);
cmt.registerOutParameter("precioVenta", java.sql.Types.FLOAT);
cmt.registerOutParameter("nombreProducto", java.sql.Types.VARCHAR);
cmt.registerOutParameter("descripcion", java.sql.Types.VARCHAR);
cmt.registerOutParameter("existeError", java.sql.Types.TINYINT);
cmt.execute();
ocurreError = cmt.getInt("existeError");
if ( ocurreError == SIN_ERROR ){
datosProducto = new DatosProducto();
datosProducto.setAlerta( cmt.getInt("alerta") );
datosProducto.setCantidad( cmt.getInt("stock") );
datosProducto.setDescripcion( cmt.getString("descripcion" ));
datosProducto.setIdProducto( cmt.getInt("idProducto" ));
datosProducto.setNombreProducto( cmt.getString("nombreProducto"));
datosProducto.setPrecio( cmt.getFloat("precioVenta") );
}
cmt.close();
return datosProducto;
} catch (SQLException ex) {
Logger.getLogger(UtilNegocio.class.getName()).log(Level.SEVERE, null, ex);
}
return null;
}
} | gpl-3.0 |
cbuben/cloud-init | tests/unittests/test_datasource/test_smartos.py | 15826 | # vi: ts=4 expandtab
#
# Copyright (C) 2013 Canonical Ltd.
#
# Author: Ben Howard <ben.howard@canonical.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# This is a testcase for the SmartOS datasource. It replicates a serial
# console and acts like the SmartOS console does in order to validate
# return responses.
#
import base64
from cloudinit import helpers as c_helpers
from cloudinit.sources import DataSourceSmartOS
from .. import helpers
import os
import os.path
import re
import stat
import uuid
MOCK_RETURNS = {
'hostname': 'test-host',
'root_authorized_keys': 'ssh-rsa AAAAB3Nz...aC1yc2E= keyname',
'disable_iptables_flag': None,
'enable_motd_sys_info': None,
'test-var1': 'some data',
'cloud-init:user-data': '\n'.join(['#!/bin/sh', '/bin/true', '']),
'sdc:datacenter_name': 'somewhere2',
'sdc:operator-script': '\n'.join(['bin/true', '']),
'sdc:vendor-data': '\n'.join(['VENDOR_DATA', '']),
'user-data': '\n'.join(['something', '']),
'user-script': '\n'.join(['/bin/true', '']),
}
DMI_DATA_RETURN = (str(uuid.uuid4()), 'smartdc')
class MockSerial(object):
"""Fake a serial terminal for testing the code that
interfaces with the serial"""
port = None
def __init__(self, mockdata):
self.last = None
self.last = None
self.new = True
self.count = 0
self.mocked_out = []
self.mockdata = mockdata
def open(self):
return True
def close(self):
return True
def isOpen(self):
return True
def write(self, line):
line = line.replace('GET ', '')
self.last = line.rstrip()
def readline(self):
if self.new:
self.new = False
if self.last in self.mockdata:
return 'SUCCESS\n'
else:
return 'NOTFOUND %s\n' % self.last
if self.last in self.mockdata:
if not self.mocked_out:
self.mocked_out = [x for x in self._format_out()]
if len(self.mocked_out) > self.count:
self.count += 1
return self.mocked_out[self.count - 1]
def _format_out(self):
if self.last in self.mockdata:
_mret = self.mockdata[self.last]
try:
for l in _mret.splitlines():
yield "%s\n" % l.rstrip()
except:
yield "%s\n" % _mret.rstrip()
yield '.'
yield '\n'
class TestSmartOSDataSource(helpers.FilesystemMockingTestCase):
def setUp(self):
helpers.FilesystemMockingTestCase.setUp(self)
# makeDir comes from MockerTestCase
self.tmp = self.makeDir()
self.legacy_user_d = self.makeDir()
# If you should want to watch the logs...
self._log = None
self._log_file = None
self._log_handler = None
# patch cloud_dir, so our 'seed_dir' is guaranteed empty
self.paths = c_helpers.Paths({'cloud_dir': self.tmp})
self.unapply = []
super(TestSmartOSDataSource, self).setUp()
def tearDown(self):
helpers.FilesystemMockingTestCase.tearDown(self)
if self._log_handler and self._log:
self._log.removeHandler(self._log_handler)
apply_patches([i for i in reversed(self.unapply)])
super(TestSmartOSDataSource, self).tearDown()
def _patchIn(self, root):
self.restore()
self.patchOS(root)
self.patchUtils(root)
def apply_patches(self, patches):
ret = apply_patches(patches)
self.unapply += ret
def _get_ds(self, sys_cfg=None, ds_cfg=None, mockdata=None, dmi_data=None):
mod = DataSourceSmartOS
if mockdata is None:
mockdata = MOCK_RETURNS
if dmi_data is None:
dmi_data = DMI_DATA_RETURN
def _get_serial(*_):
return MockSerial(mockdata)
def _dmi_data():
return dmi_data
def _os_uname():
# LP: #1243287. tests assume this runs, but running test on
# arm would cause them all to fail.
return ('LINUX', 'NODENAME', 'RELEASE', 'VERSION', 'x86_64')
if sys_cfg is None:
sys_cfg = {}
if ds_cfg is not None:
sys_cfg['datasource'] = sys_cfg.get('datasource', {})
sys_cfg['datasource']['SmartOS'] = ds_cfg
self.apply_patches([(mod, 'LEGACY_USER_D', self.legacy_user_d)])
self.apply_patches([(mod, 'get_serial', _get_serial)])
self.apply_patches([(mod, 'dmi_data', _dmi_data)])
self.apply_patches([(os, 'uname', _os_uname)])
self.apply_patches([(mod, 'device_exists', lambda d: True)])
dsrc = mod.DataSourceSmartOS(sys_cfg, distro=None,
paths=self.paths)
return dsrc
def test_seed(self):
# default seed should be /dev/ttyS1
dsrc = self._get_ds()
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals('/dev/ttyS1', dsrc.seed)
def test_issmartdc(self):
dsrc = self._get_ds()
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertTrue(dsrc.is_smartdc)
def test_no_base64(self):
ds_cfg = {'no_base64_decode': ['test_var1'], 'all_base': True}
dsrc = self._get_ds(ds_cfg=ds_cfg)
ret = dsrc.get_data()
self.assertTrue(ret)
def test_uuid(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(DMI_DATA_RETURN[0], dsrc.metadata['instance-id'])
def test_root_keys(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['root_authorized_keys'],
dsrc.metadata['public-keys'])
def test_hostname_b64(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
def test_hostname(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
def test_base64_all(self):
# metadata provided base64_all of true
my_returns = MOCK_RETURNS.copy()
my_returns['base64_all'] = "true"
for k in ('hostname', 'cloud-init:user-data'):
my_returns[k] = base64.b64encode(my_returns[k])
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
dsrc.userdata_raw)
self.assertEquals(MOCK_RETURNS['root_authorized_keys'],
dsrc.metadata['public-keys'])
self.assertEquals(MOCK_RETURNS['disable_iptables_flag'],
dsrc.metadata['iptables_disable'])
self.assertEquals(MOCK_RETURNS['enable_motd_sys_info'],
dsrc.metadata['motd_sys_info'])
def test_b64_userdata(self):
my_returns = MOCK_RETURNS.copy()
my_returns['b64-cloud-init:user-data'] = "true"
my_returns['b64-hostname'] = "true"
for k in ('hostname', 'cloud-init:user-data'):
my_returns[k] = base64.b64encode(my_returns[k])
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
dsrc.userdata_raw)
self.assertEquals(MOCK_RETURNS['root_authorized_keys'],
dsrc.metadata['public-keys'])
def test_b64_keys(self):
my_returns = MOCK_RETURNS.copy()
my_returns['base64_keys'] = 'hostname,ignored'
for k in ('hostname',):
my_returns[k] = base64.b64encode(my_returns[k])
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['hostname'],
dsrc.metadata['local-hostname'])
self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
dsrc.userdata_raw)
def test_userdata(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['user-data'],
dsrc.metadata['legacy-user-data'])
self.assertEquals(MOCK_RETURNS['cloud-init:user-data'],
dsrc.userdata_raw)
def test_sdc_scripts(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['user-script'],
dsrc.metadata['user-script'])
legacy_script_f = "%s/user-script" % self.legacy_user_d
self.assertTrue(os.path.exists(legacy_script_f))
self.assertTrue(os.path.islink(legacy_script_f))
user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:]
self.assertEquals(user_script_perm, '700')
def test_scripts_shebanged(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['user-script'],
dsrc.metadata['user-script'])
legacy_script_f = "%s/user-script" % self.legacy_user_d
self.assertTrue(os.path.exists(legacy_script_f))
self.assertTrue(os.path.islink(legacy_script_f))
shebang = None
with open(legacy_script_f, 'r') as f:
shebang = f.readlines()[0].strip()
self.assertEquals(shebang, "#!/bin/bash")
user_script_perm = oct(os.stat(legacy_script_f)[stat.ST_MODE])[-3:]
self.assertEquals(user_script_perm, '700')
def test_scripts_shebang_not_added(self):
"""
Test that the SmartOS requirement that plain text scripts
are executable. This test makes sure that plain texts scripts
with out file magic have it added appropriately by cloud-init.
"""
my_returns = MOCK_RETURNS.copy()
my_returns['user-script'] = '\n'.join(['#!/usr/bin/perl',
'print("hi")', ''])
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(my_returns['user-script'],
dsrc.metadata['user-script'])
legacy_script_f = "%s/user-script" % self.legacy_user_d
self.assertTrue(os.path.exists(legacy_script_f))
self.assertTrue(os.path.islink(legacy_script_f))
shebang = None
with open(legacy_script_f, 'r') as f:
shebang = f.readlines()[0].strip()
self.assertEquals(shebang, "#!/usr/bin/perl")
def test_userdata_removed(self):
"""
User-data in the SmartOS world is supposed to be written to a file
each and every boot. This tests to make sure that in the event the
legacy user-data is removed, the existing user-data is backed-up and
there is no /var/db/user-data left.
"""
user_data_f = "%s/mdata-user-data" % self.legacy_user_d
with open(user_data_f, 'w') as f:
f.write("PREVIOUS")
my_returns = MOCK_RETURNS.copy()
del my_returns['user-data']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertFalse(dsrc.metadata.get('legacy-user-data'))
found_new = False
for root, _dirs, files in os.walk(self.legacy_user_d):
for name in files:
name_f = os.path.join(root, name)
permissions = oct(os.stat(name_f)[stat.ST_MODE])[-3:]
if re.match(r'.*\/mdata-user-data$', name_f):
found_new = True
print name_f
self.assertEquals(permissions, '400')
self.assertFalse(found_new)
def test_vendor_data_not_default(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['sdc:vendor-data'],
dsrc.metadata['vendor-data'])
def test_default_vendor_data(self):
my_returns = MOCK_RETURNS.copy()
def_op_script = my_returns['sdc:vendor-data']
del my_returns['sdc:vendor-data']
dsrc = self._get_ds(mockdata=my_returns)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertNotEquals(def_op_script, dsrc.metadata['vendor-data'])
# we expect default vendor-data is a boothook
self.assertTrue(dsrc.vendordata_raw.startswith("#cloud-boothook"))
def test_disable_iptables_flag(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['disable_iptables_flag'],
dsrc.metadata['iptables_disable'])
def test_motd_sys_info(self):
dsrc = self._get_ds(mockdata=MOCK_RETURNS)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEquals(MOCK_RETURNS['enable_motd_sys_info'],
dsrc.metadata['motd_sys_info'])
def test_default_ephemeral(self):
# Test to make sure that the builtin config has the ephemeral
# configuration.
dsrc = self._get_ds()
cfg = dsrc.get_config_obj()
ret = dsrc.get_data()
self.assertTrue(ret)
assert 'disk_setup' in cfg
assert 'fs_setup' in cfg
self.assertIsInstance(cfg['disk_setup'], dict)
self.assertIsInstance(cfg['fs_setup'], list)
def test_override_disk_aliases(self):
# Test to make sure that the built-in DS is overriden
builtin = DataSourceSmartOS.BUILTIN_DS_CONFIG
mydscfg = {'disk_aliases': {'FOO': '/dev/bar'}}
# expect that these values are in builtin, or this is pointless
for k in mydscfg:
self.assertIn(k, builtin)
dsrc = self._get_ds(ds_cfg=mydscfg)
ret = dsrc.get_data()
self.assertTrue(ret)
self.assertEqual(mydscfg['disk_aliases']['FOO'],
dsrc.ds_cfg['disk_aliases']['FOO'])
self.assertEqual(dsrc.device_name_to_device('FOO'),
mydscfg['disk_aliases']['FOO'])
def apply_patches(patches):
ret = []
for (ref, name, replace) in patches:
if replace is None:
continue
orig = getattr(ref, name)
setattr(ref, name, replace)
ret.append((ref, name, orig))
return ret
| gpl-3.0 |
Raxa/RaxaMachineLearning | WebContent/app/model/drug.js | 285 | Ext.define('umlsApp.model.drug', {
extend : 'Ext.data.Model',
config: {
fields: [{
name: 'drug',
type: 'string'
}, {
name: 'cui',
type: 'string'
}, {
name: 'code',
type: 'string'
}]
}
}); | gpl-3.0 |
yzaguirre/meaction | src/gui/ErroresDialog_GUI.java | 4811 | /**
* UNIVERSIDAD DE SAN CARLOS DE GUATEMALA
* FACULTAD DE INGENIERIA
* ESCUELA DE CIENCIAS Y SISTEMAS
* COMPILADORES 1
* PROYECTO 1
* Creado el 21 de septiembre 2011, 03:41AM
*/
/*
* Muestra los errores presentados por el análisis de compilación
* @(#)ErroresDialog_GUI.java
* Proyecto1_200819312 application
* @author David Y. Gonzalez
*/
package gui;
import java.util.*;
import analisis.*;
import javax.swing.table.*;
/**
*
* @author davidson
*/
public class ErroresDialog_GUI extends javax.swing.JDialog {
/**Lista de errores. Se utiliza para dar la opcion de eliminar errores**/
private LinkedList<XError> listaErrores;
//En caso de venir null
private static final String[] title = new String[]{"Tipo", "Descripción", "Token", "Línea", "Columna"};
private final String[][] cuerpo;
/** Creates new form ErroresDialog_GUI */
public ErroresDialog_GUI(java.awt.Frame parent, boolean modal, LinkedList<XError> listaErrores) {
super(parent, modal);
this.listaErrores = listaErrores;
if (listaErrores == null) {
cuerpo = new String [][] {
{null, null, null, null, null},
{null, null, null, null, null},
{null, null, null, null, null},
{null, null, null, null, null}
};
initComponents();
return;
}
int max = listaErrores.size();
cuerpo = new String [max][5];
for (int i = 0; i < max; i++){
XError error = listaErrores.get(i);
cuerpo[i][0] = error.getTipo();
cuerpo[i][1] = error.getDescripcion();
cuerpo[i][2] = error.getToken();
cuerpo[i][3] = String.valueOf(error.getLinea());
cuerpo[i][4] = String.valueOf(error.getColumna());
}
initComponents();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
jScrollPane1 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jbBorrarListaErrores = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("JBadMeth - Descripción Errores");
getContentPane().setLayout(new java.awt.GridBagLayout());
jTable1.setModel(new DefaultTableModel(cuerpo,title));
jScrollPane1.setViewportView(jTable1);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.ipadx = 352;
gridBagConstraints.ipady = 248;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 25, 25);
getContentPane().add(jScrollPane1, gridBagConstraints);
jbBorrarListaErrores.setText("Limpiar Lista de Errores");
jbBorrarListaErrores.setToolTipText("");
jbBorrarListaErrores.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbBorrarListaErroresActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
getContentPane().add(jbBorrarListaErrores, gridBagConstraints);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jbBorrarListaErroresActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbBorrarListaErroresActionPerformed
listaErrores.clear();
jTable1.setModel(new DefaultTableModel(new String [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
}, title));
}//GEN-LAST:event_jbBorrarListaErroresActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTable jTable1;
private javax.swing.JButton jbBorrarListaErrores;
// End of variables declaration//GEN-END:variables
}
| gpl-3.0 |
Satantarkov/ocmanager | src/main/java/com/ocarballo/ocmanager/repository/JPAMaterialDao.java | 601 | package com.ocarballo.ocmanager.repository;
import java.util.List;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import com.ocarballo.ocmanager.domain.Material;
@Repository(value="materialDao")
public class JPAMaterialDao extends JPAGenericDao<Material> implements MaterialDao {
private static final long serialVersionUID = 1L;
@Transactional(readOnly=true)
@SuppressWarnings("unchecked")
public List<Material> getMaterialsList() {
return em.createQuery("SELECT m FROM Material m ORDER BY m.id").getResultList();
}
}
| gpl-3.0 |
noorxbyte/school_website | wp-content/themes/tortuga/inc/customizer/functions/custom-controls.php | 3593 | <?php
/**
* Custom Controls for the Customizer
*
* @package Tortuga
*/
/**
* Make sure that custom controls are only defined in the Customizer
*/
if ( class_exists( 'WP_Customize_Control' ) ) :
/**
* Displays a bold label text. Used to create headlines for radio buttons and description sections.
*/
class Tortuga_Customize_Header_Control extends WP_Customize_Control {
/**
* Render Control
*/
public function render_content() {
?>
<label>
<span class="customize-control-title"><?php echo wp_kses_post( $this->label ); ?></span>
</label>
<?php
}
}
/**
* Displays a description text in gray italic font
*/
class Tortuga_Customize_Description_Control extends WP_Customize_Control {
/**
* Render Control
*/
public function render_content() {
?>
<span class="description"><?php echo wp_kses_post( $this->label ); ?></span>
<?php
}
}
/**
* Creates a category dropdown control for the Customizer
*/
class Tortuga_Customize_Category_Dropdown_Control extends WP_Customize_Control {
/**
* Render Control
*/
public function render_content() {
$categories = get_categories( array( 'hide_empty' => false ) );
if ( ! empty( $categories ) ) : ?>
<label>
<span class="customize-control-title"><?php echo esc_html( $this->label ); ?></span>
<select <?php $this->link(); ?>>
<option value="0"><?php esc_html_e( 'All Categories', 'tortuga' ); ?></option>
<?php
foreach ( $categories as $category ) :
printf( '<option value="%s" %s>%s</option>',
$category->term_id,
selected( $this->value(), $category->term_id, false ),
$category->name . ' (' . $category->count . ')'
);
endforeach;
?>
</select>
</label>
<?php
endif;
}
}
/**
* Displays the upgrade teasers in thhe Pro Version / More Features section.
*/
class Tortuga_Customize_Upgrade_Control extends WP_Customize_Control {
/**
* Render Control
*/
public function render_content() {
?>
<div class="upgrade-pro-version">
<span class="customize-control-title"><?php esc_html_e( 'Pro Version', 'tortuga' ); ?></span>
<span class="textfield">
<?php printf( esc_html__( 'Purchase the Pro Version of %s to get additional features and advanced customization options.', 'tortuga' ), 'Tortuga' ); ?>
</span>
<p>
<a href="<?php echo esc_url( __( 'https://themezee.com/addons/tortuga-pro/', 'tortuga' ) ); ?>?utm_source=customizer&utm_medium=button&utm_campaign=tortuga&utm_content=pro-version" target="_blank" class="button button-secondary">
<?php printf( esc_html__( 'Learn more about %s Pro', 'tortuga' ), 'Tortuga' ); ?>
</a>
</p>
</div>
<div class="upgrade-plugins">
<span class="customize-control-title"><?php esc_html_e( 'ThemeZee Plugins', 'tortuga' ); ?></span>
<span class="textfield">
<?php esc_html_e( 'Extend the functionality of your WordPress website with our customized plugins.', 'tortuga' ); ?>
</span>
<p>
<a href="<?php echo esc_url( __( 'https://themezee.com/plugins/', 'tortuga' ) ); ?>?utm_source=customizer&utm_medium=button&utm_campaign=tortuga&utm_content=plugins" target="_blank" class="button button-secondary">
<?php esc_html_e( 'Browse Plugins', 'tortuga' ); ?>
</a>
<a href="<?php echo admin_url( 'plugin-install.php?tab=search&type=author&s=themezee' ); ?>" class="button button-primary">
<?php esc_html_e( 'Install now', 'tortuga' ); ?>
</a>
</p>
</div>
<?php
}
}
endif;
| gpl-3.0 |
drussellmrichie/TATools | COGSTools/psgQ3Tester.py | 2154 | __author__ = 'russellrichie'
import re, nltk
from nltk import CFG
from nltk.tree import *
from nltk.draw import tree
import matplotlib.pyplot as plt
grammar = """
S -> NP VP
NP -> D N
NP -> NP CP
CP -> C VP
VP -> V NP
D -> "the"
N -> "pirate" | "captain"
V -> "pursued" | "scared" | "chased"
C -> "that"
"""
#grammar = re.sub(pattern = '([A-z]{3,})', repl ="\\1".lower(), string = grammar)
#grammar = re.sub(pattern = '([A-z]{4,})', repl = lambda m: m.group(0).lower(), string = grammar)
#grammar = re.sub(pattern = '([A-z]{3,})', repl = '\"\\1\"', string = grammar)
#grammar = re.sub(pattern = '"start"', repl = "START", string = grammar)
grammar = CFG.fromstring(grammar)
parser = nltk.ChartParser(grammar)
sents = [
'the captain pursued the pirate',
'the pirate pursued the captain',
'the pirate scared the pirate',
'the pirate that scared the captain chased the pirate',
'the captain that scared the pirate chased the captain',
'the pirate chased the captain that scared the pirate',
'the captain that scared the pirate that chased the pirate scared the pirate that chased the captain'
]
badSents = [
'scared the captain',
'pirate chased the captain',
'the pirate that chased the captain',
'the pirate the scared the captain chased the captain',
'the captain chased the pirate scared the pirate',
'the captain scared the pirate chased'
]
print "Should do these"
for sent in sents:
print "\n" + sent
if any(parser.parse(sent.split())):
print "Yes, the grammar generates this."
else:
print "No, the grammar does not generate this."
print "\n \n \nShould not do these"
for sent in badSents:
print "\n" + sent
if any(parser.parse(sent.split())):
print "Yes, the grammar generates this."
else:
print "No, the grammar does not generate this."
sent = 'the captain that scared the pirate scared the pirate that chased the captain'
tree = parser.parse(sent.split())
# now that I'm in NLTK 3.0, the parse method returns a generator rather than a list of parses, in true Python 3.0
# style
tree.next().draw()
| gpl-3.0 |
opencart-patches/html-emails | src/upload/system/library/mailtemplate.php | 3495 | <?php
abstract class MailTemplate extends Model {
abstract public function triggerMail($route, $data = null, $result = null);
public function sendMail($to, $subject, $text, $html = null) {
$mail = new Mail();
$mail->protocol = $this->config->get('config_mail_protocol');
$mail->parameter = $this->config->get('config_mail_parameter');
$mail->smtp_hostname = $this->config->get('config_mail_smtp_hostname');
$mail->smtp_username = $this->config->get('config_mail_smtp_username');
$mail->smtp_password = html_entity_decode($this->config->get('config_mail_smtp_password'), ENT_QUOTES, 'UTF-8');
$mail->smtp_port = $this->config->get('config_mail_smtp_port');
$mail->smtp_timeout = $this->config->get('config_mail_smtp_timeout');
$mail->setTo($to);
$mail->setFrom($this->config->get('config_email'));
$mail->setSender(html_entity_decode($this->config->get('config_name'), ENT_QUOTES, 'UTF-8'));
$mail->setSubject($subject);
$mail->setText($text);
if ($html) {
$mail->setHtml($html);
}
$mail->send();
}
public function sendAdminMails($subject, $text, $html = null) {
$this->sendMail(
$this->config->get('config_email'),
html_entity_decode($this->language->get('text_new_customer'), ENT_QUOTES, 'UTF-8'),
$text,
$html
);
// Send to additional alert emails if new account email is enabled
$emails = explode(',', $this->config->get('config_alert_email'));
foreach ($emails as $email) {
if (utf8_strlen($email) > 0 && filter_var($email, FILTER_VALIDATE_EMAIL)) {
$this->sendMail(
$email,
html_entity_decode($this->language->get('text_new_customer'), ENT_QUOTES, 'UTF-8'),
$text,
$html
);
}
}
}
public function attachHeader(&$data) {
$data['logo'] = $this->config->get('config_url') . 'image/' . $this->config->get('config_logo');
$data['store_name'] = html_entity_decode($this->config->get('config_name'));
}
public function attachFooter(&$data) {
$this->load->model('catalog/information');
$this->load->language('common/footer');
$data['text_information'] = $this->language->get('text_information');
$data['text_service'] = $this->language->get('text_service');
$data['text_contact'] = $this->language->get('text_contact');
$data['text_return'] = $this->language->get('text_return');
$data['text_sitemap'] = $this->language->get('text_sitemap');
$data['informations'] = "";
foreach ($this->model_catalog_information->getInformations() as $result) {
if ($result['bottom']) {
$data['informations'] .= '<a style="color:white;" href="'.$this->url->link('information/information', 'information_id=' . $result['information_id']).'">'.$result['title'].'</a><br/>';
}
}
$data['contact'] = $this->url->link('information/contact');
$data['return'] = $this->url->link('account/return/add', '', true);
$data['sitemap'] = $this->url->link('information/sitemap');
$data['address'] = nl2br($this->config->get('config_address'));
$data['telephone'] = $this->config->get('config_telephone');
$data['email'] = $this->config->get('config_fax');
}
} | gpl-3.0 |
revbayes/revbayes | src/revlanguage/functions/argumentrules/ArgumentRule.cpp | 15618 | #include <stddef.h>
#include <sstream>
#include <string>
#include <vector>
#include "Argument.h"
#include "ArgumentRule.h"
#include "RbException.h"
#include "RlFunction.h"
#include "TypeSpec.h"
#include "Workspace.h"
#include "DagNode.h"
#include "Environment.h"
#include "RevObject.h"
#include "RevPtr.h"
#include "RevVariable.h"
using namespace RevLanguage;
/**
* Construct rule with single type;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::string& argName, const TypeSpec& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt, RevObject *defVal) :
argTypeSpecs( 1, argTypeSp ),
defaultVar( new RevVariable( defVal ) ),
evalType( et ),
nodeType( dt ),
aliases( std::vector<std::string>(1, argName) ),
label( argName ),
description( argDesc ),
hasDefaultVal( true )
{
}
/**
* Construct rule with multiple types;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::string& argName, const std::vector<TypeSpec>& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt, RevObject *defVal) :
argTypeSpecs( argTypeSp ),
defaultVar( new RevVariable( defVal ) ),
evalType( et ),
nodeType( dt ),
aliases( std::vector<std::string>(1, argName) ),
label( argName ),
description( argDesc ),
hasDefaultVal( true )
{
}
/**
* Construct rule with single type;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::string& argName, const TypeSpec& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt) :
argTypeSpecs( 1, argTypeSp ),
defaultVar( NULL ),
evalType( et ),
nodeType( dt ),
aliases( std::vector<std::string>(1, argName) ),
label( argName ),
description( argDesc ),
hasDefaultVal( false )
{
}
/**
* Construct rule with multiple types;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::string& argName, const std::vector<TypeSpec>& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt) :
argTypeSpecs( argTypeSp ),
defaultVar( NULL ),
evalType( et ),
nodeType( dt ),
aliases( std::vector<std::string>(1, argName) ),
label( argName ),
description( argDesc ),
hasDefaultVal( false )
{
}
/**
* Construct rule with single type and multiple names;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::vector<std::string>& argNames, const TypeSpec& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt) :
argTypeSpecs( 1, argTypeSp ),
defaultVar( NULL ),
evalType( et ),
nodeType( dt ),
aliases( argNames ),
description( argDesc ),
hasDefaultVal( false )
{
label = "";
for (size_t i = 0; i < argNames.size(); i++)
{
if (i > 0)
{
label += "/";
}
label += argNames[i];
}
}
/**
* Construct rule with multiple types and multiple names;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::vector<std::string>& argNames, const std::vector<TypeSpec>& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt) :
argTypeSpecs( argTypeSp ),
defaultVar( NULL ),
evalType( et ),
nodeType( dt ),
aliases( argNames ),
description( argDesc ),
hasDefaultVal( false )
{
label = "";
for (size_t i = 0; i < argNames.size(); i++)
{
if (i > 0)
{
label += "/";
}
label += argNames[i];
}
}
/**
* Construct rule with single type;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::vector<std::string>& argNames, const TypeSpec& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt, RevObject *defVal) :
argTypeSpecs( 1, argTypeSp ),
defaultVar( new RevVariable( defVal ) ),
evalType( et ),
nodeType( dt ),
aliases( argNames ),
description( argDesc ),
hasDefaultVal( true )
{
label = "";
for (size_t i = 0; i < argNames.size(); i++)
{
if (i > 0)
{
label += "/";
}
label += argNames[i];
}
}
/**
* Construct rule with multiple types;
* use "" for no label.
*/
ArgumentRule::ArgumentRule(const std::vector<std::string>& argNames, const std::vector<TypeSpec>& argTypeSp, const std::string& argDesc, EvaluationType et, DagNodeType dt, RevObject *defVal) :
argTypeSpecs( argTypeSp ),
defaultVar( new RevVariable( defVal ) ),
evalType( et ),
nodeType( dt ),
aliases( argNames ),
description( argDesc ),
hasDefaultVal( true )
{
label = "";
for (size_t i = 0; i < argNames.size(); i++)
{
if (i > 0)
{
label += "/";
}
label += argNames[i];
}
}
ArgumentRule* RevLanguage::ArgumentRule::clone( void ) const
{
return new ArgumentRule( *this );
}
/**
* Fit a variable into an argument according to the argument rule. If necessary and
* appropriate, we do type conversion or type promotion.
*
*
*
* @todo To conform to the old code we change the required type of the incoming
* variable wrapper here. We need to change this so that we do not change
* the wrapper here, but make sure that if the argument variable is inserted
* in a member variable or container element slot, that the slot variable
* wrapper, which should be unique (not the same as the incoming variable
* wrapper), has the right required type.
*/
Argument ArgumentRule::fitArgument( Argument& arg, bool once ) const
{
RevPtr<RevVariable> the_var = arg.getVariable();
if ( evalType == BY_VALUE || the_var->isWorkspaceVariable() || the_var->getRevObject().isConstant() )
{
once = true;
}
for ( std::vector<TypeSpec>::const_iterator it = argTypeSpecs.begin(); it != argTypeSpecs.end(); ++it )
{
if ( evalType == BY_VALUE || the_var->isWorkspaceVariable() == true )
{
if ( the_var->getRevObject().isType( *it ) )
{
RevPtr<RevVariable> valueVar = RevPtr<RevVariable>( new RevVariable(the_var->getRevObject().clone(),arg.getLabel()) );
return Argument( valueVar, arg.getLabel(), false );
}
else if ( the_var->getRevObject().isConvertibleTo( *it, once ) != -1)
{
// Fit by type conversion. For now, we also modify the type of the incoming variable wrapper.
RevObject* convertedObject = the_var->getRevObject().convertTo( *it );
RevPtr<RevVariable> valueVar = RevPtr<RevVariable>( new RevVariable(convertedObject,arg.getLabel()) );
return Argument( valueVar, arg.getLabel(), false );
}
} // if (by-value)
else
{
if ( the_var->getRevObject().isType( *it ) )
{
// For now, change the required type of the incoming variable wrapper
the_var->setRequiredTypeSpec( *it );
if ( isEllipsis() == false )
{
return Argument( the_var, arg.getLabel(), evalType == BY_CONSTANT_REFERENCE );
}
else
{
return Argument( the_var, arg.getLabel(), true );
}
}
else if ( the_var->getRevObject().isConvertibleTo( *it, once ) != -1 && (*it).isDerivedOf( the_var->getRequiredTypeSpec() ) )
{
// Fit by type conversion. For now, we also modify the type of the incoming variable wrapper.
RevObject* convertedObject = the_var->getRevObject().convertTo( *it );
the_var->replaceRevObject( convertedObject );
the_var->setRequiredTypeSpec( *it );
if ( !isEllipsis() )
{
return Argument( the_var, arg.getLabel(), false );
}
else
{
return Argument( the_var, arg.getLabel(), false );
}
}
else
{
// Fit by type conversion function
const TypeSpec& typeFrom = the_var->getRevObject().getTypeSpec();
const TypeSpec& typeTo = *it;
// create the function name
std::string function_name = "_" + typeFrom.getType() + "2" + typeTo.getType();
// Package arguments
std::vector<Argument> args;
Argument theArg = Argument( the_var, "arg" );
args.push_back( the_var );
Environment& env = Workspace::globalWorkspace();
try
{
Function* func = env.getFunction(function_name, args, once).clone();
// Allow the function to process the arguments
func->processArguments( args, once );
// Set the execution environment of the function
func->setExecutionEnviroment( &env );
// Evaluate the function
RevPtr<RevVariable> conversionVar = func->execute();
// free the memory
delete func;
conversionVar->setHiddenVariableState( true );
conversionVar->setRequiredTypeSpec( *it );
return Argument( conversionVar, arg.getLabel(), evalType == BY_CONSTANT_REFERENCE );
}
catch (RbException& e)
{
// we do nothing here
}
} // else (type conversion function)
} // else (not by-value)
}
throw RbException( "Argument type mismatch while fitting variable with name \"" + the_var->getName() + "\" of type " + the_var->getRevObject().getType() + " to the argument with name \"" + getArgumentLabel() + "\" and type " +
getArgumentTypeSpec()[0].getType() );
}
const std::vector<std::string>& ArgumentRule::getArgumentAliases( void ) const
{
return aliases;
}
ArgumentRule::DagNodeType ArgumentRule::getArgumentDagNodeType( void ) const
{
// return the internal value
return nodeType;
}
const std::string& ArgumentRule::getArgumentLabel( void ) const
{
return label;
}
const std::vector<TypeSpec>& ArgumentRule::getArgumentTypeSpec(void) const
{
return argTypeSpecs;
}
const RevVariable& ArgumentRule::getDefaultVariable( void ) const
{
if ( defaultVar == NULL )
{
throw RbException("Cannot get default variable \"" + label + "\"");
}
return *defaultVar;
}
const std::string& ArgumentRule::getArgumentDescription( void ) const
{
return description;
}
ArgumentRule::EvaluationType ArgumentRule::getEvaluationType( void ) const
{
return evalType;
}
bool ArgumentRule::hasDefault(void) const
{
return hasDefaultVal;
}
/**
* Test if argument is valid. The boolean flag 'once' is used to signal whether the argument matching
* is done in a static or a dynamic context. If the rule is constant, then the argument matching
* is done in a static context (evaluate-once context) regardless of the setting of the once flag.
* If the argument is constant, we try type promotion if permitted by the variable required type.
*
* @todo See the TODOs for fitArgument(...)
*/
double ArgumentRule::isArgumentValid( Argument &arg, bool once) const
{
RevPtr<RevVariable> the_var = arg.getVariable();
if ( the_var == NULL )
{
return -1;
}
if ( evalType == BY_VALUE || the_var->isWorkspaceVariable() || ( the_var->getRevObject().isModelObject() && the_var->getRevObject().getDagNode()->getDagNodeType() == RevBayesCore::DagNode::CONSTANT) )
{
once = true;
}
if ( nodeType == STOCHASTIC && the_var->getRevObject().getDagNode()->getDagNodeType() != RevBayesCore::DagNode::STOCHASTIC )
{
return -1;
}
else if ( nodeType == DETERMINISTIC && the_var->getRevObject().getDagNode()->getDagNodeType() != RevBayesCore::DagNode::DETERMINISTIC )
{
return -1;
}
for ( std::vector<TypeSpec>::const_iterator it = argTypeSpecs.begin(); it != argTypeSpecs.end(); ++it )
{
if ( the_var->getRevObject().isType( *it ) )
{
return 0.0;
}
double penalty = the_var->getRevObject().isConvertibleTo( *it, once );
if ( penalty != -1 && (*it).isDerivedOf( the_var->getRequiredTypeSpec() ) )
{
return penalty;
}
else if ( penalty != -1 && evalType == BY_VALUE )
{
return penalty;
}
// else if ( once == true &&
//// !var->isAssignable() &&
// the_var->getRevObject().isConvertibleTo( *it, true ) != -1 &&
// (*it).isDerivedOf( the_var->getRequiredTypeSpec() )
// )
// {
// return the_var->getRevObject().isConvertibleTo( *it, true );
// }
else if ( nodeType != STOCHASTIC )
{
const TypeSpec& typeFrom = the_var->getRevObject().getTypeSpec();
const TypeSpec& typeTo = *it;
// create the function name
std::string function_name = "_" + typeFrom.getType() + "2" + typeTo.getType();
// Package arguments
std::vector<Argument> args;
Argument theArg = Argument( the_var, "arg" );
args.push_back( the_var );
Environment& env = Workspace::globalWorkspace();
try
{
// we just want to check if the function exists and can be found
env.getFunction(function_name, args, once);
return 0.1;
}
catch (RbException& e)
{
// we do nothing here
}
}
}
return -1;
}
bool RevLanguage::ArgumentRule::isEllipsis( void ) const
{
return false;
}
/**
* Print value for user (in descriptions of functions, for instance). We apparently do
* not use the isConst flag to denote whether an argument is supposed to be passed as
* a constant currently, so the printing of this modifier is suspended for now.
*
*/
void ArgumentRule::printValue(std::ostream &o) const
{
for ( std::vector<TypeSpec>::const_iterator it = argTypeSpecs.begin(); it != argTypeSpecs.end(); ++it )
{
if ( it != argTypeSpecs.begin() )
{
o << "|";
}
o << (*it).getType();
}
// create the default DAG type of the passed-in argument
std::string dagtype = "";
// get the type if the variable wasn't NULL
if ( nodeType == ArgumentRule::DETERMINISTIC )
{
dagtype = "<deterministic>";
}
else if ( nodeType == ArgumentRule::STOCHASTIC )
{
dagtype = "<stochastic>";
}
else if ( nodeType == ArgumentRule::CONSTANT )
{
dagtype = "<constant>";
}
else if ( nodeType == ArgumentRule::DYNAMIC )
{
dagtype = "<dynamic>";
}
else if ( nodeType == ArgumentRule::ANY )
{
dagtype = "<any>";
}
else
{
dagtype = "<?>";
}
o << dagtype;
o << " " << label;
}
| gpl-3.0 |
trsinard/Takatsuki | src/nl/nitori/Takatsuki/ANN/NeuralNode.java | 2401 | /* Copyright (C) 2012 Justin Wilcox
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package nl.nitori.Takatsuki.ANN;
import java.util.ArrayList;
import nl.nitori.Takatsuki.Util.Log;
/**
* A neuron in a neural network
*/
public class NeuralNode {
private double activation;
private double input;
private double delta;
private ArrayList<Connection> outputs;
private ArrayList<Connection> inputs;
public ArrayList<Connection> getOutputs() {
return outputs;
}
public ArrayList<Connection> getInputs() {
return inputs;
}
public double getDelta() {
return delta;
}
public void setDelta(double delta) {
this.delta = delta;
}
public NeuralNode() {
outputs = new ArrayList<Connection>();
inputs = new ArrayList<Connection>();
}
/**
* Automatically creates a connection between two nodes
*
* @param child
* The node that will be on the receiving end of the connection
* @param weight
* The weight of the connection
*/
public void addChild(NeuralNode child, double weight) {
if (child instanceof InputNode) {
Log.warning("Attempt to add an input node as a child. Blocked");
return;
}
Connection c = new Connection(this, child, weight);
outputs.add(c);
child.inputs.add(c);
}
public double getInput() {
return input;
}
public void setInput(double input) {
this.input = input;
}
public void setActivation(double act) {
activation = act;
}
public double getActivation() {
return activation;
}
}
| gpl-3.0 |
probedock/probedock | features/step_definitions/project_steps.rb | 563 | Given /^project (.+) exists within organization (.+?)(?: with repo url (.+))?$/ do |name,organization_name,repo_url|
options = {
name: name.downcase.gsub(/\s+/, '-'),
organization: named_record(organization_name)
}
options[:display_name] = name if name != options[:name]
options[:repo_url] = repo_url if repo_url
add_named_record(name, create(:project, options))
end
Given /^the project (.+) has repo url pattern (.+)$/ do |name,repo_url_pattern|
project = named_record(name)
project.repo_url_pattern = repo_url_pattern
project.save
end | gpl-3.0 |
nsklc/suayipkilic.com | template/_footer.php | 790 | <!-- Google Map -->
<!-- <div id="googleMap" class="w3-grayscale" style="width:100%;height:450px;"></div> -->
<!-- Footer -->
</div>
<footer class="w3-center w3-light-grey w3-padding-16 w3-margin-bottom">
<!-- <a href="#home" class="w3-button w3-light-grey"><i class="fa fa-arrow-up w3-margin-right"></i>Başa dön</a> -->
<!-- <p>Powered by <a href="" title="W3.CSS" target="_blank" class="w3-hover-text-green">Enes Kılıç</a></p> -->
<ul class="nav">
<a class="w3-button w3-large w3-dark-grey" href="language.php?language=En">English</a>
<a class="w3-button w3-large w3-teal" href="language.php?language=De">Deutsch</a>
<a class="w3-button w3-large w3-dark-grey" href="language.php?language=Tr">Türkçe</a>
</ul>
</footer>
</body>
</html>
| gpl-3.0 |
fserena/GLORIASolarExperiment | WebContent/solar/js/ccds.js | 10784 | 'use strict';
function LoadCCDContent($gloriaAPI, scope) {
return scope.sequence.execute(function() {
return $gloriaAPI.getParameterTreeValue(scope.rid, 'cameras', 'ccd',
function(data) {
scope.ccds = data.images.slice(0, 2);
});
});
}
function LoadFocuserContent($gloriaAPI, scope) {
return scope.sequence.execute(function() {
return $gloriaAPI.getParameterValue(scope.rid, 'focuser',
function(data) {
scope.focuser = data;
if (scope.focuser.last_offset == undefined) {
scope.focuser.last_offset = 500;
}
scope.focuser.offset = scope.focuser.last_offset;
scope.status.main.focuser.exp_offset = Math
.floor(scope.focuser.offset
- scope.focuser.last_offset);
});
});
}
function LoadContinuousImage($gloriaAPI, scope, order) {
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'cameras',
'ccd.order', order, function() {
scope.ccdSelected = order;
});
});
scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(scope.rid, 'stop_continuous_image',
function() {
scope.continuousMode = false;
}, function(error) {
scope.$parent.deviceOnError = true;
});
});
return scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(scope.rid, 'load_continuous_image',
function() {
scope.continuousMode = true;
}, function(error) {
scope.$parent.deviceOnError = true;
});
});
}
function SetFocuserPosition($gloriaAPI, scope) {
scope.status.main.focuser.valueSet = false;
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'focuser', 'steps',
scope.focuser.exp_offset, function(data) {
// PUT SOMETHING HERE!!
});
});
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'focuser',
'last_offset', scope.focuser.offset, function(data) {
scope.focuser.last_offset = scope.focuser.offset;
});
});
return scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(scope.rid, 'move_focus', function(
data) {
scope.status.main.focuser.valueSet = true;
}, function() {
});
});
}
function SetExposureTime($gloriaAPI, scope) {
scope.status.main.exposure.valueSet = false;
if (scope.ccdSelected != 0) {
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'cameras',
'ccd.order', 0, function() {
scope.ccdSelected = 0;
});
});
}
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'cameras',
'ccd.images.[0].exposure', scope.ccds[0].exposure, function(
data) {
// PUT SOMETHING HERE!!
});
});
return scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(scope.rid, 'set_exposure', function(
data) {
scope.status.main.exposure.valueSet = true;
});
});
}
function LoadCCDAttributes($gloriaAPI, scope, order) {
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'cameras',
'ccd.order', order, function() {
scope.ccdSelected = order;
});
});
return scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(scope.rid, 'get_ccd_attributes',
function() {
}, function(error) {
scope.$parent.deviceOnError = true;
});
});
}
function CheckExposure($gloriaAPI, scope, timeout) {
scope.status.main.exposure.timer = timeout(
scope.status.main.exposure.check, 1000);
}
function StartExposure($gloriaAPI, scope, timeout) {
scope.$parent.imageTaken = false;
scope.sequence.execute(function() {
return $gloriaAPI.setParameterTreeValue(scope.rid, 'cameras',
'ccd.order', 0, function() {
scope.ccdSelected = 0;
});
});
return scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(scope.rid, 'start_exposure',
function(data) {
CheckExposure($gloriaAPI, scope, timeout);
});
});
}
function SolarCCDCtrl($gloriaAPI, $scope, $timeout, $sequenceFactory) {
$scope.sequence = $sequenceFactory.getSequence();
$scope.finderImage = $scope.mainPath + '/img/wn3.gif';
$scope.ccds = [ {}, {} ];
$scope.status = {
time : {
count : Math.floor(Math.random() * 100000)
},
finder : {
focused : false
},
main : {
focused : false,
clock : {
focused : false
},
focus : {
focused : false
},
camera : {
focused : false
},
exposure : {
begin : null,
end : null,
length : 0,
valueSet : true
},
focuser : {
begin : null,
end : null,
length : 0,
valueSet : true,
exp_offset : 0
}
}
};
$scope.exposureStyle = {};
$scope.focuserStyle = {};
$scope.exposureBarStyle = {};
$scope.focuserBarStyle = {};
$scope.moveFinder = function(direction) {
$scope.$parent.movementDirection = direction;
$scope.$parent.movementRequested = true;
};
$scope.beginSetExposureTime = function() {
$scope.status.main.exposure.begin = new Date();
};
$scope.endSetExposureTime = function() {
$scope.status.main.exposure.end = new Date();
$scope.status.main.exposure.length = ($scope.status.main.exposure.end - $scope.status.main.exposure.begin) / 1000;
$scope.status.main.exposure.length = Math.min(2.0, Math.max(
$scope.status.main.exposure.length, 0));
};
$scope.beginSetFocuserPosition = function() {
$scope.status.main.focuser.begin = new Date();
};
$scope.endSetFocuserPosition = function() {
$scope.status.main.focuser.end = new Date();
$scope.status.main.focuser.length = ($scope.status.main.focuser.end - $scope.status.main.focuser.begin) / 1000;
$scope.status.main.focuser.length = Math.min(2.0, Math.max(
$scope.status.main.focuser.length, 0));
};
$scope.status.main.exposure.check = function() {
$scope.sequence.execute(function() {
return $gloriaAPI.getParameterTreeValue($scope.rid, 'cameras',
'ccd.images.[0].inst', function(data) {
if (data.id >= 0) {
if (data.jpg != undefined && data.jpg != null) {
$scope.$parent.imageTaken = true;
} else {
$scope.sequence.execute(function() {
return $gloriaAPI.executeOperation(
$scope.rid, 'load_image_urls',
function() {
CheckExposure($gloriaAPI,
$scope, $timeout);
});
});
}
} else {
$scope.$parent.imageTaken = true;
}
});
});
};
$scope.setExposureTimeValue = function(sign) {
$scope.ccds[0].exposure += (0.01 * $scope.status.main.exposure.length)
* sign;
console.log($scope.ccds[0].exposure);
if ($scope.ccds[0].exposure < 0) {
$scope.ccds[0].exposure = 0;
} else if ($scope.ccds[0].exposure > 0.05) {
$scope.ccds[0].exposure = 0.05;
}
$scope.exposureStyle.top = ((($scope.ccds[0].exposure * 230 / 0.05) + 83) * -1.0)
+ "px";
$scope.exposureBarStyle.top = 230
- ((($scope.ccds[0].exposure * 230 / 0.05))) + "px";
};
$scope.setFocuserPositionValue = function(sign) {
$scope.focuser.offset += (150 * $scope.status.main.focuser.length)
* sign;
if ($scope.focuser.offset < 0) {
$scope.focuser.offset = 0;
} else if ($scope.focuser.offset > 1000) {
$scope.focuser.offset = 1000;
}
$scope.focuser.exp_offset = Math.floor($scope.focuser.offset
- $scope.focuser.last_offset);
var steps = $scope.focuser.offset - 500;
var height = Math.abs(steps) * 115 / 500;
$scope.focuserBarStyle.height = height + "px";
if (steps >= 0) {
$scope.focuserStyle.top = ((($scope.focuser.offset * 230 / 1000) + 83) * -1.0)
+ "px";
$scope.focuserBarStyle.top = (115 - height) + "px";
} else {
$scope.focuserStyle.top = (((($scope.focuser.offset * 230 / 1000) + 83) * -1.0) + 25)
+ "px";
$scope.focuserBarStyle.top = (115) + "px";
}
};
$scope.setExposureTime = function() {
if (!$scope.sharedMode) {
SetExposureTime($gloriaAPI, $scope);
}
};
$scope.setFocuserPosition = function() {
if (!$scope.sharedMode) {
SetFocuserPosition($gloriaAPI, $scope);
}
};
$scope.startExposure = function() {
if (!$scope.sharedMode) {
StartExposure($gloriaAPI, $scope, $timeout);
}
};
$scope.initCCDSystem = function() {
var upToDate = true;
if (!$scope.sharedMode) {
for (var i = 0; i < $scope.ccds.length; i++) {
if ($scope.ccds[i].cont == undefined
|| $scope.ccds[i].cont == null) {
LoadContinuousImage($gloriaAPI, $scope, i);
upToDate = false;
}
}
}
$scope.exposureStyle.top = ((($scope.ccds[0].exposure * 230 / 0.05) + 83) * -1.0)
+ "px";
$scope.exposureBarStyle.top = 230
- ((($scope.ccds[0].exposure * 230 / 0.05))) + "px";
if (!upToDate) {
LoadCCDContent($gloriaAPI, $scope).then(function() {
$scope.$parent.ccdImagesLoaded = true;
});
} else {
$scope.$parent.ccdImagesLoaded = true;
}
};
$scope.$watch('weatherLoaded', function() {
if ($scope.rid > 0) {
if (!$scope.sharedMode) {
LoadCCDAttributes($gloriaAPI, $scope, 0);
} else {
$scope.loadContentTimer = $timeout($scope.loadSharedContent,
5000);
}
LoadFocuserContent($gloriaAPI, $scope);
LoadCCDContent($gloriaAPI, $scope).then(
function() {
$scope.initCCDSystem();
$scope.status.time.timer = $timeout(
$scope.status.time.onTimeout, 1000, 1000);
$scope.setFocuserPositionValue(1.0);
});
}
});
$scope.loadSharedContent = function() {
LoadFocuserContent($gloriaAPI, $scope).then(function() {
$scope.setFocuserPositionValue(1.0);
});
$gloriaAPI
.getParameterTreeValue(
$scope.rid,
'cameras',
'ccd',
function(data) {
var ccds = data.images.slice(0, 2);
$scope.ccds[0].exposure = ccds[0].exposure;
$scope.exposureStyle.top = ((($scope.ccds[0].exposure * 230 / 0.05) + 83) * -1.0)
+ "px";
$scope.exposureBarStyle.top = 230
- ((($scope.ccds[0].exposure * 230 / 0.05)))
+ "px";
});
$scope.loadContentTimer = $timeout($scope.loadSharedContent, 5000);
};
$scope.status.time.onTimeout = function() {
$scope.status.time.count += 1;
var i = 0;
$scope.ccds
.forEach(function(index) {
if ($scope.ccds[i].cont != null
&& $scope.ccds[i].cont != undefined) {
$scope.ccds[i].pcont = $scope.ccds[i].cont + '?v='
+ $scope.status.time.count;
}
i++;
});
$scope.status.time.timer = $timeout($scope.status.time.onTimeout, 1000,
1000);
};
$scope.contClicked = function(event) {
//alert(event.offsetX + " " + event.offsetY);
};
$scope.$on('$destroy', function() {
$timeout.cancel($scope.status.time.timer);
$timeout.cancel($scope.status.main.exposure.timer);
$timeout.cancel($scope.loadContentTimer);
});
}
| gpl-3.0 |
amaurywalbert/twitter | statistics/plot_top_k_co_occurrence_boxplot_v1.py | 8431 | # -*- coding: latin1 -*-
################################################################################################
#
#
import calc
import sys, time, json, os, os.path
import numpy as np
from math import*
import networkx as nx
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib as mpl
import pylab
import numpy as np
import plotly
import plotly.plotly as py
import plotly.graph_objs as go
import pandas as pd
import pandas_datareader
from pandas_datareader import data, wb
from pandas import Series, DataFrame
pd.__version__
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
## Status - Versão 1 - Plotar os dados da co-ocorrência dos dados entre as camadas usando grouped Boxplot
##
##
## ID_ego a:amigos s:seguidores r:retuítes l:likes m:menções
##
## ID_ego as:data sr:data rl:data lm:data ma:data - TXT
## {ID_ego:{ as:data sr:data rl:data lm:data ma:data} - JSON
######################################################################################################################################################################
######################################################################################################################################################################
#
# Cria diretórios
#
######################################################################################################################################################################
def create_dirs(x):
if not os.path.exists(x):
os.makedirs(x)
#####################################################################################################################################################################
# Grouped Box Plot
######################################################################################################################################################################
def box_plot(metric,_aa,_as,_ar,_al,_am,_sa,_ss,_sr,_sl,_sm,_ra,_rs,_rr,_rl,_rm,_la,_ls,_lr,_ll,_lm,_ma,_ms,_mr,_ml,_mm,title):
print ("\nCriando Box Plot...")
print ("Salvando dados em: "+str(output_dir)+"\n")
x = []
y0 = [] #Follow
y1 = [] #Retweets
y2 = [] #Likes
y3 = [] #Mentions
# for i in range(len(_aa)):
# x.append("Follow")
for i in range(len(_ar)):
x.append("Retweet")
for i in range(len(_al)):
x.append("Like")
for i in range(len(_am)):
x.append("Mention")
# for i in _aa:
# y0.append(float(i*100))
for i in _ar:
y0.append(float(i*100))
for i in _al:
y0.append(float(i*100))
for i in _am:
y0.append(float(i*100))
# for i in _ra:
# y1.append(float(i*100))
for i in _rr:
y1.append(float(i*100))
for i in _rl:
y1.append(float(i*100))
for i in _rm:
y1.append(float(i*100))
# for i in _la:
# y2.append(float(i*100))
for i in _lr:
y2.append(float(i*100))
for i in _ll:
y2.append(float(i*100))
for i in _lm:
y2.append(float(i*100))
# for i in _ma:
# y3.append(float(i*100))
for i in _mr:
y3.append(float(i*100))
for i in _ml:
y3.append(float(i*100))
for i in _mm:
y3.append(float(i*100))
trace0 = go.Box(y=y0,x=x,name='Follow',boxmean='sd')
trace1 = go.Box(y=y1,x=x,name='Retweet',boxmean='sd')
trace2 = go.Box(y=y2,x=x,name='Like',boxmean='sd')
trace3 = go.Box(y=y3,x=x,name='Mention',boxmean='sd')
title_plot = title
data = [trace0, trace1, trace2,trace3]
layout = go.Layout(title=title_plot,yaxis=dict(title='Top-K Alters',zeroline=False),boxmode='group')
fig = go.Figure(data=data, layout=layout)
plotly.offline.plot(fig, filename=output_dir+str(metric)+"_box_plot.html",auto_open=True)
print (" - OK! Imagem salva em: "+str(output_dir))
print
######################################################################################################################################################################
#
# Plotar Gŕaficos relacionados aos dados
#
######################################################################################################################################################################
def prepare(metric,file,title):
with open(file,'r') as f:
data = json.load(f)
_aa = []
_as = []
_ar = []
_al = []
_am = []
_sa = []
_ss = []
_sr = []
_sl = []
_sm = []
_ra = []
_rs = []
_rr = []
_rl = []
_rm = []
_la = []
_ls = []
_lr = []
_ll = []
_lm = []
_ma = []
_ms = []
_mr = []
_ml = []
_mm = []
for k,v in data.iteritems():
for key,value in v.iteritems():
if key == "aa":
_aa.append(value)
elif key == "as":
_as.append(value)
elif key == "ar":
_ar.append(value)
elif key == "al":
_al.append(value)
elif key == "am":
_am.append(value)
elif key == "sa":
_sa.append(value)
elif key == "ss":
_ss.append(value)
elif key == "sr":
_sr.append(value)
elif key == "sl":
_sl.append(value)
elif key == "sm":
_sm.append(value)
elif key == "ra":
_ra.append(value)
elif key == "rs":
_rs.append(value)
elif key == "rr":
_rr.append(value)
elif key == "rl":
_rl.append(value)
elif key == "rm":
_rm.append(value)
elif key == "la":
_la.append(value)
elif key == "ls":
_ls.append(value)
elif key == "lr":
_lr.append(value)
elif key == "ll":
_ll.append(value)
elif key == "lm":
_lm.append(value)
elif key == "ma":
_ma.append(value)
elif key == "ms":
_ms.append(value)
elif key == "mr":
_mr.append(value)
elif key == "ml":
_ml.append(value)
elif key == "mm":
_mm.append(value)
else:
print ("Rede inválida")
sys.exit()
box_plot(metric,_aa,_as,_ar,_al,_am,_sa,_ss,_sr,_sl,_sm,_ra,_rs,_rr,_rl,_rm,_la,_ls,_lr,_ll,_lm,_ma,_ms,_mr,_ml,_mm,title)
######################################################################################################################################################################
######################################################################################################################################################################
#
# Método principal do programa.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Plotar gráficos sobre as métricas e propriedades calculadas - Multilayer "
print" "
print"#################################################################################"
print
metric = "topk_co_occurrence"
title = "Top-K Intersection over Alters Set"
if not os.path.exists(str(data_dir)+str(metric)+".json"): # Verifica se diretório existe
print ("Impossível localizar arquivo: "+str(data_dir)+str(metric)+".json")
else:
file = str(data_dir)+str(metric)+".json"
prepare(metric,file,title)
print("\n######################################################################\n")
print("Script finalizado!")
print("\n######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
print "\n#######################################################################\n"
type_graphs1 = "graphs_with_ego"
type_graphs2 = "graphs_without_ego"
singletons1 = "full"
singletons2 = "without_singletons"
#######################################################################
data_dir = "/home/amaury/Dropbox/net_structure_hashmap/multilayer/"+str(type_graphs)"+/unweighted_directed/"
output_dir = "/home/amaury/Dropbox/net_structure_hashmap_statistics/multilayer/"+str(type_graphs)"+/unweighted_directed/"
#Executa o método main
if __name__ == "__main__": main() | gpl-3.0 |
maysara/pandora_image | pandora/title/migrations/0001_initial.py | 1690 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Title'
db.create_table('title_title', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=1000)),
('sorttitle', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('sortsorttitle', self.gf('django.db.models.fields.CharField')(max_length=1000)),
('edited', self.gf('django.db.models.fields.BooleanField')(default=False)),
('imdbId', self.gf('django.db.models.fields.CharField')(max_length=7, blank=True)),
))
db.send_create_signal('title', ['Title'])
def backwards(self, orm):
# Deleting model 'Title'
db.delete_table('title_title')
models = {
'title.title': {
'Meta': {'object_name': 'Title'},
'edited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imdbId': ('django.db.models.fields.CharField', [], {'max_length': '7', 'blank': 'True'}),
'sortsorttitle': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'sorttitle': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1000'})
}
}
complete_apps = ['title'] | gpl-3.0 |
dfmcphee/simplifeed-rails | app/controllers/api/v1/tokens_controller.rb | 1626 | class Api::V1::TokensController < ApplicationController
skip_before_filter :verify_authenticity_token
respond_to :json
def create
email = params[:email]
password = params[:password]
if request.format != :json
render :status=>406, :json=>{:message=>"The request must be json"}
return
end
if email.nil? or password.nil?
render :status=>400,
:json=>{:message=>"The request must contain the user email and password.", :callback => params[:callback]}
return
end
@user=User.find_by_email(email.downcase)
if @user.nil?
logger.info("User #{email} failed signin, user cannot be found.")
render :status=>401, :json=>{:message=>"Invalid email or password.", :callback => params[:callback]}
return
end
@user.ensure_authentication_token!
if not @user.valid_password?(password)
logger.info("User #{email} failed signin, password \"#{password}\" is invalid")
render :status=>401, :json=>{:message=>"Invalid email or password.", :callback => params[:callback]}
else
render :status=>200, :json=>{:token=>User.authentication_token, :callback => params[:callback]}
end
end
def destroy
@user=User.find_by_authentication_token(params[:id])
if @user.nil?
logger.info("Token not found.")
render :status=>404, :json=>{:message=>"Invalid token.", :callback => params[:callback]}
else
@user.reset_authentication_token!
render :status=>200, :json=>{:token=>params[:id], :callback => params[:callback]}
end
end
end | gpl-3.0 |
NeoTerm/NeoTerm | chrome-tabs/src/main/java/de/mrapp/android/tabswitcher/iterator/AbstractTabItemIterator.java | 6656 | /*
* Copyright 2016 - 2017 Michael Rapp
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.mrapp.android.tabswitcher.iterator;
import androidx.annotation.NonNull;
import de.mrapp.android.tabswitcher.model.TabItem;
import static de.mrapp.android.util.Condition.ensureAtLeast;
/**
* An abstract base class for all iterators, which allow to iterate items of the type {@link
* TabItem}.
*
* @author Michael Rapp
* @since 0.1.0
*/
public abstract class AbstractTabItemIterator implements java.util.Iterator<TabItem> {
/**
* An abstract base class of all builders, which allows to configure and create instances of the
* class {@link AbstractTabItemIterator}.
*/
public static abstract class AbstractBuilder<BuilderType extends AbstractBuilder<?, ProductType>, ProductType extends AbstractTabItemIterator> {
/**
* True, if the tabs should be iterated in reverse order, false otherwise.
*/
protected boolean reverse;
/**
* The index of the first tab, which should be iterated.
*/
protected int start;
/**
* Returns a reference to the builder itself. It is implicitly cast to the generic type
* BuilderType.
*
* @return The builder as an instance of the generic type BuilderType
*/
@SuppressWarnings("unchecked")
private BuilderType self() {
return (BuilderType) this;
}
/**
* Creates a new builder, which allows to configure and create instances of the class {@link
* AbstractTabItemIterator}.
*/
protected AbstractBuilder() {
reverse(false);
start(-1);
}
/**
* Creates the iterator, which has been configured by using the builder.
*
* @return The iterator, which has been created, as an instance of the class {@link
* TabItemIterator}. The iterator may not be null
*/
@NonNull
public abstract ProductType create();
/**
* Sets, whether the tabs should be iterated in reverse order, or not.
*
* @param reverse True, if the tabs should be iterated in reverse order, false otherwise
* @return The builder, this method has been called upon, as an instance of the generic type
* BuilderType. The builder may not be null
*/
@NonNull
public BuilderType reverse(final boolean reverse) {
this.reverse = reverse;
return self();
}
/**
* Sets the index of the first tab, which should be iterated.
*
* @param start The index, which should be set, as an {@link Integer} value or -1, if all tabs
* should be iterated Builder}. The builder may not be null
* @return The builder, this method has been called upon, as an instance of the generic type
* BuilderType. The builder may not be null
*/
@NonNull
public BuilderType start(final int start) {
ensureAtLeast(start, -1, "The start must be at least -1");
this.start = start;
return self();
}
}
/**
* True, if the tabs should be iterated in reverse order, false otherwise.
*/
private boolean reverse;
/**
* The index of the next tab.
*/
private int index;
/**
* The current tab item.
*/
private TabItem current;
/**
* The previous tab item.
*/
private TabItem previous;
/**
* The first tab item.
*/
private TabItem first;
/**
* The method, which is invoked on subclasses in order to retrieve the total number of available
* items.
*
* @return The total number of available items as an {@link Integer} value
*/
public abstract int getCount();
/**
* The method, which is invoked on subclasses in order to retrieve the item, which corresponds
* to a specific index.
*
* @param index The index of the item, which should be returned, as an {@link Integer} value
* @return The item, which corresponds to the given index, as an instance of the class {@link
* TabItem}. The tab item may not be null
*/
@NonNull
public abstract TabItem getItem(final int index);
/**
* Initializes the iterator.
*
* @param reverse True, if the tabs should be iterated in reverse order, false otherwise
* @param start The index of the first tab, which should be iterated, as an {@link Integer} value or
* -1, if all tabs should be iterated
*/
protected final void initialize(final boolean reverse, final int start) {
ensureAtLeast(start, -1, "The start must be at least -1");
this.reverse = reverse;
this.previous = null;
this.index = start != -1 ? start : (reverse ? getCount() - 1 : 0);
int previousIndex = reverse ? this.index + 1 : this.index - 1;
if (previousIndex >= 0 && previousIndex < getCount()) {
this.current = getItem(previousIndex);
} else {
this.current = null;
}
}
/**
* Returns the tab item, which corresponds to the first tab.
*
* @return The tab item, which corresponds to the first tab, as an instance of the class {@link
* TabItem} or null, if no tabs are available
*/
public final TabItem first() {
return first;
}
/**
* Returns the tab item, which corresponds to the previous tab.
*
* @return The tab item, which corresponds to the previous tab, as an instance of the class
* {@link TabItem} or null, if no previous tab is available
*/
public final TabItem previous() {
return previous;
}
/**
* Returns the tab item, which corresponds to the next tab.
*
* @return The tab item, which corresponds to the next tab, as an instance of the class {@link
* TabItem} or null, if no next tab is available
*/
public final TabItem peek() {
return index >= 0 && index < getCount() ? getItem(index) : null;
}
@Override
public final boolean hasNext() {
if (reverse) {
return index >= 0;
} else {
return getCount() - index >= 1;
}
}
@Override
public final TabItem next() {
if (hasNext()) {
previous = current;
if (first == null) {
first = current;
}
current = getItem(index);
index += reverse ? -1 : 1;
return current;
}
return null;
}
}
| gpl-3.0 |
CCWI/SocialMediaCrawler | src/edu/hm/cs/smc/database/helperddl/GenerateDdlApp.java | 558 | package edu.hm.cs.smc.database.helperddl;
import java.io.File;
import java.io.IOException;
public class GenerateDdlApp {
public static void main(String[] args) throws IOException {
// HibernateExporter exporter = new HibernateExporter("org.hibernate.dialect.HSQLDialect", "com.geowarin.model");
HibernateExporter exporter = new HibernateExporter("org.hibernate.dialect.MySQLDialect", "edu.hm.cs.smc.database.models.models");
//exporter.setGenerateDropQueries(true);
exporter.exportToConsole();
//exporter.export(new File("schema.sql"));
}
}
| gpl-3.0 |
Sisqui/lungcheck_django | lungcheck/doctors/models/doctor.py | 2935 | from django.contrib.auth.models import User
from django.db import models
from datetime import datetime
from django.core.mail import send_mail
from json import dumps
from random import getrandbits
from datetime import datetime
#
# Doctor manager
# Create function that first creates the user
#
class DoctorManager(models.Manager) :
def create_doctor(self, username, password, mail, first_name='', last_name='', birth_year='', gender='', country='', hospital='', license='') :
auth_user = User.objects.create_user(
username,
email=mail,
password=password,
first_name=first_name,
last_name=last_name,
last_login=datetime.now()
)
doctor = self.create(
user=auth_user,
birth_year=birth_year,
gender=gender,
country=country,
hospital=hospital,
license=license
)
doctor.send_activation_mail()
return doctor
#
# Doctor class
# Extend the default user class
# Add hospital and license
# Needs to be manually validated by staff
#
class Doctor(models.Model) :
MALE = 'M'
FEMALE = 'F'
OTHER = 'O'
GENDER_CHOICES = (
(MALE, 'Male'),
(FEMALE, 'Female'),
(OTHER, 'Other'),
)
user = models.OneToOneField(
User,
on_delete=models.CASCADE
)
birth_year = models.IntegerField(
null=True,
blank=True,
)
gender = models.CharField(
max_length=1,
choices=GENDER_CHOICES,
null=True,
)
country = models.CharField(
max_length=30,
null=True,
blank=True,
)
hospital = models.CharField(
max_length=100,
null=True,
blank=True
)
license = models.CharField(
max_length=100,
null=True,
blank=True
)
mail_code = models.CharField(
max_length=200,
null=True,
blank=True
)
mail_confirmed = models.BooleanField(
default=False
)
validated = models.BooleanField(
default=False
)
objects = DoctorManager()
def __str__ (self) :
return "{} - {}".format(
self.user.get_full_name,
self.license)
def json(self) :
data = {
"user_id" : self.user.id,
"username": self.user.username,
"first_name": self.user.first_name,
"last_name" : self.user.last_name,
"mail" : self.user.email,
"is_active" : self.user.is_active,
"last_login" : self.user.last_login.strftime("%c"),
"date_joined" : self.user.date_joined.strftime("%c"),
"doctor_id" : self.id,
"country" : self.country,
"gender" : self.gender,
"birth_year" : self.birth_year,
"license" : self.license,
"hospital" : self.hospital,
"mail_code" : self.mail_code,
"mail_confirmed" : self.mail_confirmed,
"validated" : self.validated
}
return dumps(data)
def send_activation_mail(self) :
code = getrandbits(128)
self.mail_code = code
url = "lungcheck.tk/app/confirm_mail?uid="+str(self.user.id)+"&code="+str(code)
send_mail(
'Activate your account',
'Click in the following link to activate your account.\n\n'+url,
'lungcheckturku@gmail.com',
[self.user.email],
fail_silently=True,
) | gpl-3.0 |
aosorgin/gotools | tools/zpf/zpflib/processor.go | 2582 | /*
Author: Alexey Osorgin (alexey.osorgin@gmail.com)
Copyright: Alexey Osorgin, 2017
Brief: Compressing of files
*/
package zpflib
import
(
"archive/zip"
"bufio"
"compress/flate"
"io"
"path/filepath"
"os"
"fmt"
"runtime"
"time"
)
func compress_file(srcPath string, destPath string, processedSignal *chan bool) (writen int64, err error) {
defer func() { *processedSignal <- true }()
src, err := os.Open(srcPath)
if err != nil {
return
}
defer src.Close()
rawDst, err := os.Create(destPath)
if err != nil {
return
}
defer rawDst.Close()
dst := bufio.NewWriter(rawDst)
defer dst.Flush()
zipWriter := zip.NewWriter(dst)
defer zipWriter.Close()
zipWriter.RegisterCompressor(zip.Deflate, func(out io.Writer) (io.WriteCloser, error) {
if Options.Compression == CompressionFast {
return flate.NewWriter(out, flate.BestSpeed)
} else if Options.Compression == CompressionBest {
return flate.NewWriter(out, flate.BestCompression)
} else {
return flate.NewWriter(out, flate.DefaultCompression)
}
})
_, fileName := filepath.Split(srcPath)
zippedFile, err := zipWriter.Create(fileName)
writen, err = io.Copy(zippedFile, src)
if err != nil {
return
}
return
}
func Compress(srcPath string, dstPath string) (err error) {
processingQueue := make(chan bool, runtime.NumCPU() + 1)
processedSignal := make(chan bool)
filesCount, filesProcessed := 0, 0
completedSignal := make(chan bool)
completed := false
go func () {
filepath.Walk(srcPath, func(path string, info os.FileInfo, err error) error {
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(srcPath, path)
dirPath, _ := filepath.Split(relPath)
os.MkdirAll(filepath.Join(dstPath, dirPath), os.ModeDir | 0755)
if err == nil {
filesCount++
processingQueue <- true
go compress_file(path, filepath.Join(dstPath, relPath + ".zip"), &processedSignal)
}
return nil
})
completedSignal <- true
}()
timeout := time.Tick(time.Second)
for {
select {
case <-processedSignal:
filesProcessed++
<-processingQueue
if completed == true && filesCount == filesProcessed {
fmt.Printf("\rProcessing/processed files: (%d/%d) \nDone", filesCount, filesProcessed)
return
}
case <- completedSignal:
completed = true
if filesCount == filesProcessed {
fmt.Printf("\rProcessing/processed files: (%d/%d) \nDone", filesCount, filesProcessed)
return
}
case <-timeout:
fmt.Printf("\rProcessing/processed files: (%d/%d) ", filesCount, filesProcessed)
}
}
return
}
| gpl-3.0 |
jeromeetienne/neoip | src/neoip_crypto/skey/pkcs5_derivkey/utest/neoip_skey_pkcs5_derivkey_utest.hpp | 320 | /*! \file
\brief Header of the \ref neoip_skey_pkcs5_derivkey_utest.cpp
*/
#ifndef __NEOIP_SKEY_PKCS5_DERIVKEY_UTEST_HPP__
#define __NEOIP_SKEY_PKCS5_DERIVKEY_UTEST_HPP__
/* system include */
/* local include */
int neoip_skey_pkcs5_derivkey_utest();
#endif /* __NEOIP_SKEY_PKCS5_DERIVKEY_UTEST_HPP__ */
| gpl-3.0 |
A360RN/AgendaElectronica | AgendaElectronica/src/java/com/santamariaapostol/controller/CitacionController.java | 4923 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.santamariaapostol.controller;
import com.santamariaapostol.entity.Alumno;
import com.santamariaapostol.entity.Citacion;
import com.santamariaapostol.entity.Profesor;
import com.santamariaapostol.service.CitacionService;
import com.santamariaapostol.util.PageHelper;
import com.santamariaapostol.util.SessionStringHelpers;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
*
* @author alonsorn
*/
public class CitacionController extends HttpServlet {
private final CitacionService citacionService;
public CitacionController() {
citacionService = new CitacionService();
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String action = request.getParameter("action");
if (action.equals("decide")) {
redireccionarPorTipoUsuario(request, response);
}
}
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String action = request.getParameter("action");
if (action.equals("identificar_alumno")) {
solicitarNuevaCitacion(request, response);
} else if (action.equals("nuevo")) {
nuevaCitacion(request, response);
}
}
private void mostrarListaAlumnos(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
HttpSession session = request.getSession();
Profesor profesor = (Profesor) session.getAttribute(SessionStringHelpers.USUARIO);
List<Alumno> alumnos = citacionService.listaDeAlumnos(profesor);
session.setAttribute(SessionStringHelpers.LISTA_ALUMNOS, alumnos);
response.sendRedirect(PageHelper.LISTADO_ALUMNOS_CITACION);
}
private void solicitarNuevaCitacion(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
HttpSession session = request.getSession();
int idAlumno = Integer.parseInt(request.getParameter("idAlumno"));
session.setAttribute(SessionStringHelpers.ID_ALUMNO, idAlumno);
response.sendRedirect(PageHelper.NUEVA_CITACION);
}
private void nuevaCitacion(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
HttpSession session = request.getSession();
int idAlumno = Integer.parseInt(session.getAttribute(SessionStringHelpers.ID_ALUMNO).toString());
String titulo = request.getParameter("titulo");
String cuerpo = request.getParameter("cuerpo");
Alumno alumno = new Alumno();
alumno.setIdAlumno(idAlumno);
Profesor profesor = (Profesor) session.getAttribute(SessionStringHelpers.USUARIO);
Citacion citacion = new Citacion();
citacion.setTitulo(titulo);
citacion.setCuerpo(cuerpo);
citacionService.nuevaCitacion(citacion, profesor, alumno);
session.setAttribute(SessionStringHelpers.MESSAGE, SessionStringHelpers.CITACION_ENVIADA_MENSAJE);
response.sendRedirect(PageHelper.LISTADO_ALUMNOS_CITACION);
}
private void redireccionarPorTipoUsuario(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
HttpSession session = request.getSession();
String tipoUsuario = session.getAttribute(SessionStringHelpers.TIPO_USUARIO).toString();
if (tipoUsuario.equals("profesor")) {
mostrarListaAlumnos(request, response);
response.sendRedirect(PageHelper.LISTADO_ASISTENCIA);
} else if (tipoUsuario.equals("apoderado")) {
response.sendRedirect(PageHelper.DASHBOARD);
}
}
}
| gpl-3.0 |
quasoft/adblockradio | adblockradio/storage/blacklist.py | 3688 | import re
from PyQt4 import QtGui
import dispatchers
from .base import BaseStorage
from ui.dlg_blacklist_editor import DlgBlacklistEditor
import ui
class BlacklistStorage(BaseStorage):
"""Manages patterns for blacklisting song titles.
The storage is a simple text file with separate patterns (regular expressions)
written on each line.
Blacklist patterns are used for detecting advertisement blocks.
When the radio stream contains metadata tag for song title that matches
any of those patterns, the whole block with this title is considered an advertisement.
This is useful because many radio streams change the song title to the radio state name
when an advertisement starts, which is easy detect.
"""
filename = "blacklist.txt"
@classmethod
def exists(cls, pattern):
"""Check if the pattern has already been added to the file"""
return cls.is_added(pattern)
@classmethod
def add_pattern(cls, pattern):
"""Add a new pattern to the file, unless it already exists"""
if cls.exists(pattern):
return False
if not cls.add_line(pattern):
return False
QtGui.QMessageBox.question(
None,
'Information',
"Pattern '%s' added to blacklist." % pattern,
QtGui.QMessageBox.Ok
)
return True
@classmethod
def is_valid_blacklist_pattern(cls, pattern):
"""
If value contains at least five characters (not spaces), consider this a valid pattern
:param pattern: The regex pattern to check
:return: True if pattern is valid
"""
matches = re.findall('[\S]+', pattern, re.LOCALE)
if len(matches) < 5:
return False
if any(re.search(pattern, t, re.LOCALE) for t in ['', ' ', 'JUST SOME TEST', "\n"]):
return False
return True
@classmethod
def is_blacklisted(cls, value):
return any(re.search(p, value, re.LOCALE) for p in cls.read_items() if p.strip())
@classmethod
def manage(cls):
editor = DlgBlacklistEditor(None)
editor.set_items(BlacklistStorage.read_items())
editor.setModal(True)
editor.exec_()
BlacklistStorage.write_items(editor.get_items())
@classmethod
def add_song_title(cls, title):
value = title.strip()
# If value contains at least five characters (not spaces), consider this a valid pattern
if not cls.is_valid_blacklist_pattern(value):
return
# Construct regex pattern from value: '.*value.*'
pattern = '.*' + value + '.*'
# Ask user to modify pattern, if wanted
pattern, ok = ui.utils.input_query(None, "Mark as advertisement - blacklist meta title", "Regex pattern:", pattern)
if not ok:
return
# Make sure the user entered a pattern that would not match spaces or an otherwise valid title
if not cls.is_valid_blacklist_pattern(pattern):
QtGui.QMessageBox.question(
None,
'Warning',
"Pattern rejected!\nIt is too broad and matches empty strings.",
QtGui.QMessageBox.Ok
)
return
if cls.exists(pattern):
QtGui.QMessageBox.question(
None,
'Information',
"Pattern already exists!",
QtGui.QMessageBox.Ok
)
return
cls.add_pattern(pattern)
dispatchers.storage.blacklist_song_clicked += BlacklistStorage.add_song_title
dispatchers.storage.manage_blacklist_clicked += BlacklistStorage.manage
| gpl-3.0 |
GhostWriterTNCS/OtakuManager | OtakuManager/websites/WebAnimex_IT.cpp | 2339 | #include "OMA.h"
#include "Website.h"
void Website::initialize_WebAnimex_IT() {
homepage = "http://webanimex.com/";
getEpisodesFunction = std::bind(&Website::getEpisodes_WebAnimex_IT, this);
goToEpisodeFunction = std::bind(&Website::goToEpisode_WebAnimex_IT, this, std::placeholders::_1,
std::placeholders::_2);
}
bool Website::getEpisodes_WebAnimex_IT() {
QString html = MyUtils::urlToQString(homepage);
QString start = "<ul class=\"list list-episodes\">";
QString end = "<!-- #content -->";
if (html.contains(end)) {
html = MyUtils::substring(html, start, end);
QStringList list = html.split("<h3>");
for (int i = 1; i < list.size(); i++) {
Episode episode;
html = list[i];
html = MyUtils::substring(html, "href=\"", "\"");
episode.url = html;
html = list[i];
html = MyUtils::advancedReplace(html, "<", ">", "");
episode.name = MyUtils::decodeHtml(html).trimmed();
if (!episode.name.contains("Download"))
episode.hasDownload = false;
episode.name = MyUtils::substring(episode.name, "", " - [SubITA]");
episodes.push_back(episode);
}
return true;
}
return false;
}
QString Website::goToEpisode_WebAnimex_IT(Episode* episode, QString type) {
QString s;
if (type == OMA::linkTypes[LinkTypes::streaming]) {
s = MyUtils::urlToQString(episode->url);
s = MyUtils::substring(s, "class=\"box-link-streaming");
s = MyUtils::substring(s, "href=\"");
s = MyUtils::substring(s, "", "\"");
s = MyUtils::substring(s, "red.html?af=");
} else if (type == OMA::linkTypes[LinkTypes::download]) {
s = MyUtils::urlToQString(episode->url);
s = MyUtils::substring(s, "class=\"box-link-download");
s = MyUtils::substring(s, "href=\"", "\"");
s = MyUtils::substring(s, "red.html?af=");
s = MyUtils::redirectedUrlQt(s);
/*s = MyUtils::advancedReplace(
s, "", ".php?file=", "https://anon.to/?http://animeforce.stream/?u=http://");*/
s = MyUtils::advancedReplace(s, "", ".php?file=", "http://");
} else if (type == OMA::linkTypes[LinkTypes::animeInfo]) {
s = MyUtils::urlToQString(episode->url);
if (s.contains("class=\"breadcrumb-list\"")) {
s = MyUtils::substring(s, "class=\"breadcrumb-list\"");
s = MyUtils::substring(s, "/ul");
s = MyUtils::substring(s, "href=\"", "\"");
}
}
if (s.endsWith("&")) {
s.truncate(s.length() - 1);
}
return s;
}
| gpl-3.0 |
mescobal/geined | caja_curso.py | 7448 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import cgitb; cgitb.enable()
import pagina
import htm
import datos
import cgi
import funciones
def nuevo(frm):
"""Nuevo ingreso a caja por cursos"""
# Recuperar variables
deposito_id = frm.getvalue("deposito_id")
caja_id = frm.getvalue("caja_id")
cola = "deposito_id=" + str(deposito_id) + "&caja_id=" + str(caja_id)
# Base de datos
cuentas = datos.Tabla("cuentas")
cuentas.filtro = "rubro like '411%' and nivel=0"
cuentas.orden = "rubro"
cuentas.filtrar()
pag = pagina.Pagina("Entrada a caja por cursos", 5)
htm.form_edicion("Entrada a caja por cursos", "caja_curso.py?accion=agregar&" + cola)
print(htm.hidden("tipo", "entrada"))
htm.input_combo("Rubro:", "cuenta_id", cuentas.resultado, ["id", "nombre"], "Elegir...")
htm.input_numero("Efectivo:", "efectivo", "")
htm.input_numero("Cheques:", "cheques", "")
htm.input_numero("Vouchers:", "vouchers", "")
htm.input_numero("Otros:", "otros", "")
htm.botones("caja_ver.php?accion=listado&caja_id=" + str(caja_id) + "&deposito_id=" + str(deposito_id))
print("</td></tr>")
htm.form_edicion_fin()
pag.fin()
def devolucion(frm):
"""Devolución de ingreso a caja por cursos"""
# Recuperar variables
deposito_id = frm.getvalue("deposito_id")
caja_id = frm.getvalue("caja_id")
cola = "deposito_id=" + str(deposito_id) + "&caja_id=" + str(caja_id)
# Base de datos
cuentas = datos.Tabla("cuentas")
cuentas.filtro = "rubro like '411%' and nivel=0"
cuentas.orden = "rubro"
cuentas.filtrar()
pag = pagina.Pagina("Devolución de entrada a caja por cursos", 5)
htm.form_edicion("Devolución de entrada a caja por cursos", "caja_curso.py?accion=agregar&" + cola)
print(htm.hidden("tipo", "salida"))
htm.input_combo("Rubro:", "cuenta_id", cuentas.resultado, ["id", "nombre"], "Elegir...")
htm.input_numero("Efectivo:", "efectivo", "")
htm.input_numero("Cheques:", "cheques", "")
htm.input_numero("Vouchers:", "vouchers", "")
htm.input_numero("Otros:", "otros", "")
htm.botones("caja_ver.php?accion=listado&caja_id=" + str(caja_id) + "&deposito_id=" + str(deposito_id))
print("</td></tr>")
htm.form_edicion_fin()
pag.fin()
def agregar(frm):
"""Agregar una nueva linea a caja con datos de ingresos por cursos"""
# recuperar variables
deposito_id = int(frm.getvalue("deposito_id"))
efectivo = float(frm.getvalue('efectivo'))
cheques = float(frm.getvalue("cheques"))
vouchers = float(frm.getvalue("vouchers"))
otros = float(frm.getvalue("otros"))
cuenta_id = frm.getvalue("cuenta_id")
caja_id = frm.getvalue('caja_id')
cola = "deposito_id=" + str(deposito_id) + "&caja_id=" + str(caja_id)
# base de datos
mov_caja = datos.Tabla("mov_caja")
cuentas = datos.Tabla("cuentas")
cajas = datos.Tabla("cajas")
# Buscar en bases de datos
cajas.ir_a(caja_id)
cuentas.ir_a(frm.getvalue("cuenta_id"))
# Ajustes
detalle = "Ingreso por cursos: " + cuentas.registro["nombre"]
if frm.getvalue("tipo") == "salida":
efectivo = -efectivo
cheques = -cheques
vouchers = -vouchers
otros = -otros
detalle = "Devolución por cursos: " + cuentas.registro["nombre"]
# Movimientos de caja
mov_caja.nuevo()
mov_caja.registro["caja_id"] = caja_id
mov_caja.registro["detalle"] = detalle
mov_caja.registro["efectivo"] = efectivo
mov_caja.registro["cheques"] = cheques
mov_caja.registro["vouchers"] = vouchers
mov_caja.registro["otros"] = otros
mov_caja.insertar()
# Transacciones
fecha = cajas.registro["fecha"]
cuenta_efectivo = 111010
cuenta_cheques = 111020
cuenta_vouchers = 113001
cuenta_otros = 111024
if deposito_id == 2:
cuenta_efectivo = 111011
cuenta_cheques = 111021
cuenta_vouchers = 113002
cuenta_otros = 111025
elif deposito_id == 3:
cuenta_efectivo = 111012
cuenta_cheques = 111022
cuenta_vouchers = 113003
cuenta_otros = 111026
else:
cuenta_efectivo = 111010
cuenta_cheques = 111020
cuenta_vouchers = 113001
cuenta_otros = 111024
# Debe: caja en cuestión
if frm.getvalue("tipo") == "entrada":
if efectivo != 0:
cuentas.buscar("rubro", cuenta_efectivo)
efectivo_id = cuentas.registro["id"]
transaccion(fecha, detalle, efectivo_id, efectivo, 0, caja_id)
transaccion(fecha, detalle, cuenta_id, 0, efectivo, caja_id)
if cheques != 0:
cuentas.buscar("rubro", cuenta_cheques)
cheques_id = cuentas.registro["id"]
transaccion(fecha, detalle, cheques_id, cheques, 0, caja_id)
transaccion(fecha, detalle, cuenta_id, 0, cheques, caja_id)
if vouchers != 0:
cuentas.buscar("rubro", cuenta_vouchers)
vouchers_id = cuentas.registro["id"]
transaccion(fecha, detalle, vouchers_id, vouchers, 0, caja_id)
transaccion(fecha, detalle, cuenta_id, 0, vouchers, caja_id)
if otros != 0:
cuentas.buscar("rubro", cuenta_otros)
otros_id = cuentas.registro["id"]
transaccion(fecha, detalle, otros_id, otros, 0, caja_id)
transaccion(fecha, detalle, cuenta_id, 0, otros, caja_id)
else:
if efectivo != 0:
cuentas.buscar("rubro", cuenta_efectivo)
efectivo_id = cuentas.registro["id"]
transaccion(fecha, detalle, cuenta_id, -efectivo, 0, caja_id)
transaccion(fecha, detalle, efectivo_id, 0, -efectivo, caja_id)
if cheques != 0:
cuentas.buscar("rubro", cuenta_cheques)
cheques_id = cuentas.registro["id"]
transaccion(fecha, detalle, cuenta_id, -cheques, 0, caja_id)
transaccion(fecha, detalle, cheques_id, 0, -cheques, caja_id)
if vouchers != 0:
cuentas.buscar("rubro", cuenta_vouchers)
vouchers_id = cuentas.registro["id"]
transaccion(fecha, detalle, cuenta_id, -vouchers, 0, caja_id)
transaccion(fecha, detalle, vouchers_id, 0, -vouchers, caja_id)
if otros != 0:
cuentas.buscar("rubro", cuenta_otros)
otros_id = cuentas.registro["id"]
transaccion(fecha, detalle, cuenta_id, -otros, 0, caja_id)
transaccion(fecha, detalle, otros_id, 0, -otros, caja_id)
# Haber: rubro del cursos
htm.inicio()
print('<META HTTP-EQUIV="Refresh" CONTENT="0;URL=caja_ver.php?accion=listado&' + str(cola) + '">')
def transaccion(fecha, detalle, cuenta_id, debe, haber, documento):
"""Agregar una transacción"""
transacciones = datos.Tabla("transacciones")
transacciones.nuevo()
transacciones.registro['fecha'] = funciones.fecha_a_mysql(fecha)
transacciones.registro['detalle'] = detalle
transacciones.registro['cuenta_id'] = cuenta_id
transacciones.registro['debe'] = debe
transacciones.registro['haber'] = haber
transacciones.registro['documento_id'] = documento
transacciones.insertar()
if __name__ == "__main__":
form = cgi.FieldStorage()
accion = form.getvalue("accion", "nuevo")
if accion == "agregar":
agregar(form)
elif accion == "devolucion":
devolucion(form)
else:
nuevo(form)
| gpl-3.0 |