code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
package com.fr.design.scrollruler;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import com.fr.base.BaseUtils;
import com.fr.base.GraphHelper;
import com.fr.base.ScreenResolution;
import com.fr.base.Style;
import com.fr.base.Utils;
import com.fr.design.file.HistoryTemplateListPane;
public class VerticalRulerUI extends RulerUI{
public VerticalRulerUI(ScrollRulerComponent rulerComponent) {
super(rulerComponent);
}
@Override
protected void paintRuler(Graphics g, int showText, int extra, Dimension size, int ratio) {
int k = pxToLength(extra) * ratio;
for (int i = k; i < (pxToLength(size.height + extra) + 1) * ratio; i++) {
g.setColor(BaseRuler.UNIT_SIGN_COLOR);
if (i % BaseRuler.SCALE_10 == 0) {
double times = (double) HistoryTemplateListPane.getInstance().getCurrentEditingTemplate().getJTemplateResolution() / ScreenResolution.getScreenResolution();
String text = Utils.convertNumberStringToString(Math.round((i / times) / showText));
GraphHelper.drawLine(g, size.width, toPX(i) / ratio - extra, 0, toPX(i) / ratio - extra);
Graphics2D gg = (Graphics2D) g.create(0, (int) (toPX(i) / ratio - extra + 1), size.width, BaseRuler.NUMBER_99);
BaseUtils.drawStringStyleInRotation(gg, BaseRuler.NUMBER_11, BaseRuler.NUMBER_100, text, Style.getInstance().deriveVerticalAlignment(1).deriveRotation(
BaseRuler.NUMBER_90).deriveFRFont(BaseRuler.TEXT_FONT), ScreenResolution.getScreenResolution());
gg.dispose();
} else if (i % BaseRuler.SCALE_5 == 0) {
GraphHelper.drawLine(g, size.width, toPX(i) / ratio - extra, BaseRuler.NUMBER_11, toPX(i) / ratio - extra);
} else {
GraphHelper.drawLine(g, size.width, toPX(i) / ratio - extra, BaseRuler.NUMBER_13, toPX(i) / ratio - extra);
}
}
GraphHelper.drawLine(g, size.width - 1, 0, size.width - 1, size.height);
g.setColor(BaseRuler.STRAR_BG);
GraphHelper.drawLine(g, 0, 0, size.width, 0);
}
@Override
protected void paintPTRuler(Graphics g, int extra, Dimension size, int unit) {
int k = pxToLength(extra);
for (int i = unit * (k/unit); i < pxToLength(size.height + extra); i += unit) {
g.setColor(BaseRuler.UNIT_SIGN_COLOR);
if (i % BaseRuler.SCALE_100 == 0) {
GraphHelper.drawLine(g, size.width, toPX(i) - extra, 0, toPX(i) - extra);
String text = Utils.convertNumberStringToString(i);
Graphics2D gg = (Graphics2D) g.create(0, (int) (toPX(i) - extra + 1), size.width, BaseRuler.NUMBER_99);
BaseUtils.drawStringStyleInRotation(gg, BaseRuler.NUMBER_11, BaseRuler.NUMBER_100, text, Style.getInstance().deriveVerticalAlignment(1).deriveRotation(
BaseRuler.NUMBER_90).deriveFRFont(BaseRuler.TEXT_FONT), ScreenResolution.getScreenResolution());
gg.dispose();
} else if (i % BaseRuler.SCALE_50 == 0) {
GraphHelper.drawLine(g, size.width, toPX(i) - extra, BaseRuler.NUMBER_5, toPX(i) - extra);
} else {
GraphHelper.drawLine(g, size.width, toPX(i) - extra, BaseRuler.NUMBER_11, toPX(i) - extra);
}
}
GraphHelper.drawLine(g, size.width - 1, 0, size.width - 1, size.height);
g.setColor(BaseRuler.STRAR_BG);
GraphHelper.drawLine(g, 0, 0, size.width, 0);
}
} | fanruan/finereport-design | designer_base/src/com/fr/design/scrollruler/VerticalRulerUI.java | Java | gpl-3.0 | 3,640 |
<?php defined( 'ABSPATH' ) or die( 'Restricted access' );
class gPersianDateCalendar extends gPersianDateModuleCore
{
public static function build( $atts = [], $current_time = NULL )
{
global $wpdb;
$current_date = gPersianDateDate::getByCalfromObject( $current_time, ( isset( $atts['calendar'] ) ? $atts['calendar'] : NULL ) );
$current_year = ''.$current_date['year'];
$current_month = ''.sprintf( '%02d', $current_date['mon'] );
$current_day = ''.sprintf( '%02d', $current_date['mday'] );
$args = self::atts( [
'calendar' => NULL, // NULL to default
'this_year' => $current_year,
'this_month' => $current_month,
'this_day' => $current_day,
'week_begins' => get_option( 'start_of_week' ), // '6' // week start on Saturday
'post_type' => apply_filters( 'gpersiandate_calendar_posttypes', [ 'post' ] ),
'exclude_statuses' => NULL, // for admin only // NULL to default
'initial' => TRUE,
'caption' => TRUE, // year/month table caption / string for custom
'caption_link' => TRUE, // table caption link to / string for custom
'navigation' => TRUE, // next/prev foot nav
/* translators: %s: previous month */
'nav_prev' => _x( '« %s', 'Calendar: Build: Previous Month', 'gpersiandate' ),
/* translators: %s: next month */
'nav_next' => _x( '%s »', 'Calendar: Build: Next Month', 'gpersiandate' ),
'title_sep' => _x( ', ', 'Calendar: Build: Title Seperator', 'gpersiandate' ),
'title_trim' => 55,
'link_build_callback' => NULL, // NULL to default
'the_day_callback' => NULL, // NULL to default
'nav_month_callback' => NULL, // NULL to default
'id' => 'wp-calendar', // table html id
'class' => [ 'date-calendar', 'wp-calendar-table' ], // table html css class
], $atts );
// bailing if no posttypes!
if ( empty( $args['post_type'] ) )
return '';
// bailing if no posts!
if ( ! gPersianDateUtilities::hasPosts( $args['post_type'], $args['exclude_statuses'] ) )
return '';
if ( ! $args['link_build_callback'] || ! is_callable( $args['link_build_callback'] ) )
$args['link_build_callback'] = [ __CLASS__, 'linkBuildCallback' ];
if ( ! $args['the_day_callback'] || ! is_callable( $args['the_day_callback'] ) )
$args['the_day_callback'] = [ __CLASS__, 'theDayCallback' ];
if ( ! $args['nav_month_callback'] || ! is_callable( $args['nav_month_callback'] ) )
$args['nav_month_callback'] = [ __CLASS__, 'navMonthCallback' ];
list( $first_day, $last_day ) = gPersianDateDate::monthFirstAndLast( $args['this_year'], $args['this_month'], NULL, $args['calendar'] );
$post_type_clause = "AND post_type IN ( '".implode( "', '", esc_sql( (array) $args['post_type'] ) )."' )";
$post_status_clause = is_admin()
? "AND post_status NOT IN ( '".implode( "', '", esc_sql(
gPersianDateUtilities::getExcludeStatuses( $args['exclude_statuses'] ) ) )."' )"
: "AND post_status = 'publish'";
$html = $caption = '';
if ( TRUE === $args['caption'] )
$caption = self::getCaption( $args['this_year'], $args['this_month'], $args['calendar'] );
else if ( $args['caption'] )
$caption = $args['caption'];
if ( $caption && TRUE === $args['caption_link'] )
$caption = gPersianDateHTML::link( $caption, call_user_func_array( $args['link_build_callback'], [ 'month', $args['this_year'], $args['this_month'], NULL, $args ] ) );
else if ( $caption && $args['caption_link'] )
$caption = gPersianDateHTML::link( $caption, $args['caption_link'] );
if ( $caption )
$html.= '<caption>'.$caption.'</caption>';
$html.= '<thead><tr>';
$myweek = gPersianDateStrings::dayoftheweek( NULL, TRUE, $args['calendar'], FALSE );
$mydays = gPersianDateStrings::dayoftheweek( NULL, TRUE, $args['calendar'], TRUE );
for ( $wdcount = 0; $wdcount <= 6; $wdcount++ ) {
$wd = ( $wdcount + $args['week_begins'] ) % 7;
$html.= $args['initial']
? '<th title="'.esc_attr( $myweek[$wd] ).'" data-weekday="'.$wd.'">'.$mydays[$wd].'</th>'
: '<th data-weekday="'.$wd.'">'.$myweek[$wd].'</th>';
}
$html.= '</tr></thead><tbody><tr>';
$data = [];
$post_select_fields = is_admin()
? "post_title, post_date, post_type, post_modified, post_status, post_author"
: "post_title, post_date, post_type";
$posts = $wpdb->get_results( "
SELECT ID, {$post_select_fields}, MONTH(post_date) AS month, DAYOFMONTH(post_date) as dom
FROM {$wpdb->posts}
WHERE post_date >= '{$first_day}'
AND post_date <= '{$last_day}'
{$post_type_clause}
{$post_status_clause}
" );
if ( $posts ) {
foreach ( (array) $posts as $post ) {
$key = $post->month.'_'.$post->dom;
if ( ! isset( $data[$key] ) ) {
$post_date = gPersianDateDate::getByCalfromObject( $post->post_date, $args['calendar'] );
$data[$key] = [ 'posts' => [], 'mday' => $post_date['mday'] ];
}
$the_post = [
'ID' => $post->ID,
'date' => $post->post_date,
'type' => $post->post_type,
'title' => $post->post_title,
];
if ( is_admin() ) {
$the_post['modified'] = $post->post_modified;
$the_post['status'] = $post->post_status;
$the_post['author'] = $post->post_author;
}
$data[$key]['posts'][] = $the_post;
}
if ( ! empty( $data ) ) {
$the_days = wp_list_pluck( $data, 'mday' );
$data = array_combine( $the_days, $data );
}
}
if ( $pad = self::mod( date( 'w', strtotime( $first_day ) ) - $args['week_begins'] ) )
$html.= self::getPad( $pad );
$days_in_month = gPersianDateDate::daysInMonth( $args['this_month'], $args['this_year'], $args['calendar'] );
for ( $the_day = 1; $the_day <= $days_in_month; ++$the_day ) {
if ( isset( $new_row ) && $new_row )
$html.= '</tr><tr>';
$new_row = FALSE;
$today = ( $the_day == $current_day
&& $args['this_month'] == $current_month
&& $args['this_year'] == $current_year );
$the_day_data = array_key_exists( $the_day, $data ) ? $data[$the_day]['posts'] : [];
$html.= '<td class="-day'.( $today ? ' -today' : '' ).( empty( $the_day_data ) ? '' : ' -with-posts' ).'" data-day="'.$the_day.'">';
$html.= call_user_func_array( $args['the_day_callback'], [ $the_day, $the_day_data, $args, $today ] ).'</td>';
$week_day = gPersianDateDate::dayOfWeek( $args['this_month'], $the_day, $args['this_year'], $args['calendar'] );
if ( 6 == self::mod( $week_day - $args['week_begins'] ) )
$new_row = TRUE;
}
if ( $pad = ( 6 - self::mod( $week_day - $args['week_begins'] ) ) )
$html.= self::getPad( $pad );
$html.= '</tr></tbody>';
if ( $args['navigation'] ) {
// get the next and previous months
// with at least one post
$previous = $wpdb->get_row( "
SELECT post_date
FROM {$wpdb->posts}
WHERE post_date < '{$first_day}'
{$post_type_clause}
{$post_status_clause}
ORDER BY post_date DESC
LIMIT 1
" );
$next = $wpdb->get_row( "
SELECT post_date
FROM {$wpdb->posts}
WHERE post_date > '{$last_day}'
{$post_type_clause}
{$post_status_clause}
ORDER BY post_date ASC
LIMIT 1
" );
// The `<tfoot>` element was allowed to precede the `<tbody>` element
// in HTML 5. However, that was changed in HTML 5.1 and `<tfoot>`
// must now follow `<tbody>`.
$html.= '<tfoot><tr>';
if ( $previous ) {
$previous_date = gPersianDateDate::getByCalfromObject( $previous->post_date, $args['calendar'] );
$html.= '<td colspan="3" class="-next-prev -prev" data-month="'.$previous_date['mon'].'" data-year="'.$previous_date['year'].'">';
$html.= call_user_func_array( $args['nav_month_callback'], [ $previous_date, FALSE, $args ] ).'</td>';
} else {
$html.= self::getPad( 3 );
}
$html.= '<td class="-middle -pad"> </td>';
if ( $next ) {
$next_date = gPersianDateDate::getByCalfromObject( $next->post_date, $args['calendar'] );
$html.= '<td colspan="3" class="-next-prev -next" data-month="'.$next_date['mon'].'" data-year="'.$next_date['year'].'">';
$html.= call_user_func_array( $args['nav_month_callback'], [ $next_date, TRUE, $args ] ).'</td>';
} else {
$html.= self::getPad( 3 );
}
$html.= '</tr></tfoot>';
}
return gPersianDateHTML::tag( 'table', [
'id' => $args['id'],
'class' => $args['class'],
'data' => [
'calendar' => $args['calendar'] ?: FALSE,
'year' => $args['this_year'],
'month' => $args['this_month'],
],
], $html );
}
public static function linkBuildCallback( $for, $year = NULL, $month = NULL, $day = NULL, $args = [] )
{
$link = gPersianDateLinks::build( $for, $year, $month, $day );
// only for single posttype args
if ( ! empty( $args['post_type'] ) && 1 === count( $args['post_type'] ) ) {
$posttype = array_shift( $args['post_type'] );
if ( 'post' !== $posttype )
return add_query_arg( [ 'post_type' => $posttype ], $link );
}
return $link;
}
public static function theDayCallback( $the_day, $data = [], $args = [], $today = FALSE )
{
if ( ! count( $data ) )
return gPersianDateTranslate::numbers( $the_day );
$titles = [];
foreach ( $data as $post ) {
$title = apply_filters( 'the_title', $post['title'], $post['ID'] );
$title = apply_filters( 'string_format_i18n', $title );
$title = gPersianDateText::trimChars( $title, $args['title_trim'] );
if ( ! empty( $title ) )
$titles[] = $title;
}
return gPersianDateHTML::tag( 'a', [
'href' => call_user_func_array( $args['link_build_callback'], [ 'day', $args['this_year'], $args['this_month'], $the_day, $args ] ),
'title' => implode( $args['title_sep'], $titles ),
], gPersianDateTranslate::numbers( $the_day ) );
}
public static function navMonthCallback( $date, $next = TRUE, $args = [] )
{
return gPersianDateHTML::tag( 'a', [
'href' => call_user_func_array( $args['link_build_callback'], [ 'month', $date['year'], $date['mon'], NULL, $args ] ),
'title' => self::getCaption( $date['year'], $date['mon'], $args['calendar'] ),
], sprintf( ( $next ? $args['nav_next'] : $args['nav_prev'] ), $date['month'] ) );
}
public static function getPad( $pad )
{
return '<td class="-pad" colspan="'.esc_attr( $pad ).'"> </td>';
}
public static function getCaption( $year, $month, $calendar = NULL )
{
return sprintf(
/* translators: %1$s: month name, %2$s: year number */
_x( '%1$s %2$s', 'Calendar: Build: Caption', 'gpersiandate' ),
gPersianDateStrings::month( $month, FALSE, $calendar ),
gPersianDateTranslate::numbers( $year )
);
}
// get number of days since the start of the week
// @SOURCE: `calendar_week_mod()`
public static function mod( $num, $base = 7 )
{
return intval( $num - $base * floor( $num / $base ) );
}
// REPLICA: `get_calendar()`
public static function get( $initial = TRUE, $echo = TRUE )
{
global $wpdb, $m, $monthnum, $year, $posts;
$args = [ 'initial' => $initial ];
if ( ! empty( $monthnum ) && ! empty( $year ) ) {
$args['this_year'] = ''.intval( $year );
$args['this_month'] = ''.zeroise( intval( $monthnum ), 2 );
} else if ( ! empty( $m ) ) {
$args['this_year'] = ''.intval( substr( $m, 0, 4 ) );
if ( strlen( $m ) < 6 )
$args['this_month'] = '01';
else
$args['this_month'] = ''.zeroise( intval( substr( $m, 4, 2 ) ), 2 );
}
$key = md5( 'gpersiandate_calendar_'.serialize( $args ) );
if ( self::isFlush() )
delete_transient( $key );
if ( FALSE === ( $html = get_transient( $key ) ) ) {
$html = self::build( $args );
$html = gPersianDateUtilities::minifyHTML( $html );
set_transient( $key, $html, 12 * HOUR_IN_SECONDS );
}
if ( ! $echo )
return $html;
echo $html;
}
}
| geminorum/gpersiandate | includes/calendar.class.php | PHP | gpl-3.0 | 11,713 |
<?php
use DataFX\DataFX;
use DataFX\Table;
use DataFX\Column;
use DataFX\ColumnValue;
use DataFX\Record;
use DataFX\RecordColumn;
use PhoenixSNS\Objects\UserPresenceStatus;
use PhoenixSNS\Objects\UserProfileVisibility;
/*
$tables[] = new Table("RealMoneyTradingResources", "rmtresource_", array
(
// $name, $dataType, $size, $value, $allowNull, $primaryKey, $autoIncrement
new Column("ResourceTypeID", "INT", null, null, false),
new Column("ExchangeRate", "DECIMAL", null, null, false)
));
$tables[] = new Table("RealMoneyTradingResourcePacks", "rmtresourcepack_", array
(
// $name, $dataType, $size, $value, $allowNull, $primaryKey, $autoIncrement
new Column("ID", "INT", null, null, false, true, true),
new Column("Price", "DECIMAL", null, null, false)
));
*/
?> | alcexhim/PhoenixSNS | Tenant/Include/Modules/001-Setup/Tables/RealMoneyTradingResources.inc.php | PHP | gpl-3.0 | 804 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Infected file report
*
* @package report_infectedfiles
* @author Nathan Nguyen <nathannguyen@catalyst-au.net>
* @copyright Catalyst IT
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
$string['author'] = 'Author';
$string['confirmdelete'] = 'Do you really wish to delete this file?';
$string['confirmdeleteall'] = 'Do you really wish to delete all files?';
$string['confirmdownload'] = 'Do you really wish to download this file?';
$string['confirmdownloadall'] = 'Do you really wish to download all files?';
$string['filename'] = 'File name';
$string['infectedfiles'] = 'Antivirus failures';
$string['privacy:metadata:infected_files'] = 'This table stores information on antivirus failures detected by the system.';
$string['privacy:metadata:infected_files:filename'] = 'The name of the infected file uploaded by the user.';
$string['privacy:metadata:infected_files:timecreated'] = 'The timestamp of when a user uploaded an infected file.';
$string['privacy:metadata:infected_files:userid'] = 'The userid of the user who uploaded an infected file.';
$string['privacy:metadata:infected_files_subcontext'] = 'Antivirus failures';
$string['pluginname'] = 'Infected files';
$string['quarantinedfile'] = 'Quarantined file';
$string['reason'] = 'Failure reason';
$string['timecreated'] = 'Time created';
| ankitagarwal/moodle | report/infectedfiles/lang/en/report_infectedfiles.php | PHP | gpl-3.0 | 2,034 |
"""Experiment to test the correction of calculation of income from
agriculture and ecosystem services.
This experiment tests the influence of prefactors in income
calculation (r_bca and r_es) for two scenarios:
1) for income calculated as **mean** over income from cells
2) for income calculated as **sum** over income from cells
Explanation
-----------
Previously, the income from aggriculture and ecosystem services for each city
was calculated as the mean of the income from cells which it had under its
controll.
This does not make sense, since the actual harvest is not the mean of the
harvest of different places, but obviously the sum of the harvest from
different places.
Therefore, I changed the calculation of these sources of income to calculate
the sum over different cells.
Then, to get reasonable results, one has to adjust the prefactors in the
calculation of total income, since they have been tailored to reproduce
stylized facts before (and therefore must be taylored to do so again, just
differently)
"""
from __future__ import print_function
try:
import cPickle as cp
except ImportError:
import pickle as cp
import getpass
import itertools as it
import numpy as np
import sys
import pandas as pd
from pymofa.experiment_handling import experiment_handling as eh
from mayasim.model.ModelCore import ModelCore as Model
from mayasim.model.ModelParameters import ModelParameters as Parameters
test = True
def run_function(r_bca=0.2, r_eco=0.0002, population_control=False,
N=30, crop_income_mode='sum',
kill_cropless=True, steps=350, filename='./'):
"""
Set up the Model for different Parameters and determine
which parts of the output are saved where.
Output is saved in pickled dictionaries including the
initial values and Parameters, as well as the time
development of aggregated variables for each run.
Parameters:
-----------
r_bca : float > 0
the pre factor for income from agriculture
population_control : boolean
determines whether the population grows
unbounded or if population growth decreases
with income per capita and population density.
N : int > 0
initial number of settlements on the map
crop_income_mode : string
defines the mode of crop income calculation.
possible values are 'sum' and 'mean'
kill_cropless: bool
Switch to determine whether or not to kill cities
without cropped cells.
filename: string
path to save the results to.
"""
# initialize the Model
m = Model(N, output_data_location=filename, debug=test)
if not filename.endswith('s0.pkl'):
m.output_geographic_data = False
m.output_settlement_data = False
m.population_control = population_control
m.crop_income_mode = crop_income_mode
m.r_bca_sum = r_bca
m.r_es_sum = r_eco
m.kill_cities_without_crops = kill_cropless
# store initial conditions and Parameters
res = {}
res["initials"] = pd.DataFrame({"Settlement X Possitions":
m.settlement_positions[0],
"Settlement Y Possitions":
m.settlement_positions[1],
"Population": m.population})
res["Parameters"] = pd.Series({key:
getattr(m, key)
for key in dir(Parameters)
if not key.startswith('__')
and not callable(key)})
# run Model
if test:
m.run(1)
else:
m.run(steps)
# Retrieve results
res["trajectory"] = m.get_trajectory()
res["traders trajectory"] = m.get_traders_trajectory()
try:
with open(filename, 'wb') as dumpfile:
cp.dump(res, dumpfile)
return 1
except IOError:
return -1
def run_experiment(argv):
"""
Take arv input variables and run sub_experiment accordingly.
This happens in five steps:
1) parse input arguments to set switches
for [test],
2) set output folders according to switches,
3) generate parameter combinations,
4) define names and dictionaries of callables to apply to sub_experiment
data for post processing,
5) run computation and/or post processing and/or plotting
depending on execution on cluster or locally or depending on
experimentation mode.
Parameters
----------
argv: list[N]
List of parameters from terminal input
Returns
-------
rt: int
some return value to show whether sub_experiment succeeded
return 1 if sucessfull.
"""
global test
# Parse switches from input
if len(argv) > 1:
test = int(argv[1])
# Generate paths according to switches and user name
test_folder = ['', 'test_output/'][int(test)]
experiment_folder = 'X2_eco_income/'
raw = 'raw_data/'
res = 'results/'
if getpass.getuser() == "kolb":
save_path_raw = "/p/tmp/kolb/Mayasim/output_data/{}{}{}".format(
test_folder, experiment_folder, raw)
save_path_res = "/home/kolb/Mayasim/output_data/{}{}{}".format(
test_folder, experiment_folder, res)
elif getpass.getuser() == "jakob":
save_path_raw = "/home/jakob/Project_MayaSim/Python/" \
"output_data/{}{}{}".format(test_folder,
experiment_folder, raw)
save_path_res = "/home/jakob/Project_MayaSim/Python/" \
"output_data/{}{}{}".format(test_folder,
experiment_folder, res)
else:
save_path_res = './{}'.format(res)
save_path_raw = './{}'.format(raw)
print(save_path_raw)
# Generate parameter combinations
index = {0: "r_bca",
1: "r_eco",
2: "kill_cropless"}
if test == 0:
r_bcas = [0.1, 0.15, 0.2, 0.25, 0.3]
r_ecos = [0.0001, 0.00015, 0.0002, 0.00025]
kill_cropless = [True, False]
test=False
if test == 1:
r_bcas = [0.1, 0.3]
r_ecos = [0.0001, 0.00025]
kill_cropless = [True, False]
test=True
param_combs = list(it.product(r_bcas, r_ecos, kill_cropless))
sample_size = 10 if not test else 2
# Define names and callables for post processing
name1 = "trajectory"
estimators1 = {"mean_trajectories":
lambda fnames:
pd.concat([np.load(f)["trajectory"]
for f in fnames]).groupby(level=0).mean(),
"sigma_trajectories":
lambda fnames:
pd.concat([np.load(f)["trajectory"]
for f in fnames]).groupby(level=0).std()
}
name2 = "traders_trajectory"
estimators2 = {
"mean_trajectories":
lambda fnames:
pd.concat([np.load(f)["traders trajectory"]
for f in fnames]).groupby(
level=0).mean(),
"sigma_trajectories":
lambda fnames:
pd.concat([np.load(f)["traders trajectory"]
for f in fnames]).groupby(
level=0).std()
}
# Run computation and post processing.
if test:
print('testing {}'.format(experiment_folder))
handle = eh(sample_size=sample_size,
parameter_combinations=param_combs,
index=index,
path_raw=save_path_raw,
path_res=save_path_res,
use_kwargs=True)
handle.compute(run_func=run_function)
handle.resave(eva=estimators1, name=name1)
handle.resave(eva=estimators2, name=name2)
if test:
data = pd.read_pickle(save_path_res + name1)
print(data.head())
data = pd.read_pickle(save_path_res + name2)
print(data.head())
return 1
if __name__ == '__main__':
run_experiment(sys.argv)
| jakobkolb/MayaSim | Experiments/mayasim_X2_scan_r_es_and_r_agg.py | Python | gpl-3.0 | 8,304 |
<?php
/**
* Domainer Template Functions
*
* @package Domainer
* @subpackage Utilities
*
* @api
*
* @since 1.0.0
*/
/**
* Filter the content to replace the domain name.
*
* @since 1.1.2 Use property_exists() to check if domain_id exists on blog object.
* @since 1.0.0
*
* @global \WP_Site $current_blog The current site object.
*
* @param string $content The content to filter.
* @param string|array $old_domain Optional. The original domain(s) to replace
* (defaults to blog's original).
* @param string $new_domain Optional. The new domain to replace with
* (defaults to blog's primary).
*/
function domainer_rewrite_url( $content, $old_domain = null, $new_domain = null ) {
if ( is_null( $old_domain ) ) {
$old_domain = Domainer\get_true_url();
}
if ( is_null( $new_domain ) ) {
global $current_blog;
$new_domain = rtrim( $current_blog->domain . $current_blog->path, '/' );
if ( property_exists( $current_blog, 'domain_id' ) && $domain = Domainer\Registry::get_domain( $current_blog->domain_id ) ) {
$new_domain = $domain->fullname();
}
}
$content = str_replace( (array) $old_domain, $new_domain, $content );
return $content;
}
/**
* Get the primary domain for the current blog.
*
* @since 1.1.0
*
* @global int $blog_id The current blog ID.
* @global \WP_Site $current_blog The current site object.
*
* @param int $blog_id Optional The ID of the current blog.
*
* @return string The primary/only domain name for the blog, falling back to the original.
*/
function domainer_get_primary_domain( $blog_id = null ) {
if ( is_null( $blog_id ) ) {
global $blog_id;
}
$domain = Domainer\Registry::get_primary_domain( $blog_id );
if ( ! $domain ) {
global $current_blog;
return $current_blog->domain;
}
return $domain->fullname();
}
| dougwollison/domainer | includes/functions-template.php | PHP | gpl-3.0 | 1,891 |
package main
import (
"encoding/json"
"flag"
"fmt"
"log"
"net/http"
"os"
"runtime"
"time"
"github.com/gorilla/handlers"
"github.com/gorilla/mux"
)
const (
WIDTH = 19
HEIGHT = 19
)
var hub map[string]*Room = make(map[string]*Room)
type player struct {
Name string
Score int
Index int
}
type coord struct {
X int
Y int
Player string
}
func intSendGameState(w http.ResponseWriter, r *http.Request, state GameState) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(200)
json.NewEncoder(w).Encode(state)
}
type GameStateRequest struct {
UserName string
RoomName string
}
func unpackGameStateRequest(r *http.Request) (GameStateRequest, error) {
decoder := json.NewDecoder(r.Body)
defer r.Body.Close()
var req GameStateRequest
err := decoder.Decode(&req)
return req, err
}
func getRoom(name string) (*Room, error) {
if room, ok := hub[name]; ok {
return room, nil
}
return nil, fmt.Errorf("Invalid room name")
}
func sendGameState(w http.ResponseWriter, r *http.Request) {
req, err := unpackGameStateRequest(r)
if err != nil {
http.Error(w, "Invalid packet request", 400)
return
}
room, err := getRoom(req.RoomName)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
}
func startGame(w http.ResponseWriter, r *http.Request) {
req, err := unpackGameStateRequest(r)
if err != nil {
http.Error(w, "Invalid packet request", 400)
return
}
room, ok := hub[req.RoomName]
if !ok {
http.Error(w, "Invalid room name", 400)
return
}
if room.GameState == GameStateInit {
room.StartGame()
} else {
room.RestartGame()
}
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
}
type PlayerMoveRequest struct {
UserName string
RoomName string
X int
Y int
}
func playerMove(w http.ResponseWriter, r *http.Request) {
decoder := json.NewDecoder(r.Body)
defer r.Body.Close()
var req PlayerMoveRequest
err := decoder.Decode(&req)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
room, ok := hub[req.RoomName]
if !ok {
http.Error(w, "Invalid room name", 400)
return
}
if room.GameState != GameStateRunning {
http.Error(w, "Game is not running", 400)
return
}
if req.UserName != room.Players[room.Current].Name {
http.Error(w, "Not your turn bitch", 400)
return
}
if room.Players[room.Current].Index != room.HistoryLen-1 {
http.Error(w, "You can't play in History", 400)
return
}
mv := coord{X: req.X, Y: req.Y}
pCapturedCount := 0
err = move(room.Board, mv, room.Current, &room.Board, &pCapturedCount)
if err != nil && err.Error() != "Game Over" {
http.Error(w, err.Error(), 400)
return
}
room.Players[room.Current].Score += pCapturedCount
if (err != nil && err.Error() == "Game Over") ||
room.Players[room.Current].Score >= 10 {
room.SetWinner()
} else {
room.History = append(room.History, boardCopy(room.Board))
room.HistoryLen += 1
room.Players[room.Current].Index = room.HistoryLen - 1
room.SwitchPlayer()
room.Players[room.Current].Index = room.HistoryLen - 1
}
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
}
func IAMove(w http.ResponseWriter, r *http.Request) {
req, err := unpackGameStateRequest(r)
if err != nil {
http.Error(w, "Invalid packet request", 400)
return
}
room, err := getRoom(req.RoomName)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
if room.Mode != "solo" {
return
}
if room.GameState != GameStateRunning {
http.Error(w, "Game is not running", 400)
return
}
start := time.Now()
mv := room.AIGetMove()
room.Time = (time.Since(start) / 1000000)
pCapturedCount := 0
err = move(room.Board, mv, room.Current, &room.Board, &pCapturedCount)
if err != nil && err.Error() != "Game Over" {
http.Error(w, err.Error(), 400)
return
}
room.Players[room.Current].Score += pCapturedCount
if (err != nil && err.Error() == "Game Over") ||
room.Players[room.Current].Score >= 10 {
room.SetWinner()
} else {
room.History = append(room.History, boardCopy(room.Board))
room.HistoryLen += 1
room.Players[room.Current].Index = room.HistoryLen - 1
room.SwitchPlayer()
room.Players[room.Current].Index = room.HistoryLen - 1
}
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
}
func hint(w http.ResponseWriter, r *http.Request) {
req, err := unpackGameStateRequest(r)
if err != nil {
http.Error(w, "Invalid packet request", 400)
return
}
room, err := getRoom(req.RoomName)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
if room.GameState != GameStateRunning {
http.Error(w, "Waiting for the game to start", 400)
return
}
mv := room.AIGetMove()
room.Board[mv.X][mv.Y] = -1
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
room.Board[mv.X][mv.Y] = 0
}
func historyPrev(w http.ResponseWriter, r *http.Request) {
req, err := unpackGameStateRequest(r)
if err != nil {
http.Error(w, "Invalid packet request", 400)
return
}
room, err := getRoom(req.RoomName)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
if room.GameState != GameStateRunning {
http.Error(w, "Waiting for the game to start", 400)
return
}
room.HistoryPrev(req.UserName)
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
}
func historyNext(w http.ResponseWriter, r *http.Request) {
req, err := unpackGameStateRequest(r)
if err != nil {
http.Error(w, "Invalid packet request", 400)
return
}
room, err := getRoom(req.RoomName)
if err != nil {
http.Error(w, err.Error(), 400)
return
}
if room.GameState != GameStateRunning {
http.Error(w, "Waiting for the game to start", 400)
return
}
room.HistoryNext(req.UserName)
roomState := room.GetState(req.UserName)
intSendGameState(w, r, roomState)
}
func main() {
/*
TESTHEURISTIC()
return
*/
log.Println("GOMAXPROCS: ", runtime.GOMAXPROCS(0))
var entry string
var static string
var port string
flag.StringVar(&entry, "entry", "index.html", "the entrypoint to serve.")
flag.StringVar(&static, "static", ".", "the directory to serve static files from.")
flag.StringVar(&port, "port", "3004", "the `port` to listen on.")
flag.Parse()
r := mux.NewRouter()
// Note: In a larger application, we'd likely extract our route-building logic into our handlers
// package, given the coupling between them.
// It's important that this is before your catch-all route ("/")
// api := r.PathPrefix("/api/v1/").Subrouter()
r.HandleFunc("/GameStateRequest", sendGameState).Methods("POST")
r.HandleFunc("/RoomsRequest", sendRooms).Methods("GET")
r.HandleFunc("/CreateRoomRequest", CreateRoom).Methods("POST")
r.HandleFunc("/JoinRoomRequest", JoinRoom).Methods("POST")
r.HandleFunc("/StartGameRequest", startGame).Methods("POST")
r.HandleFunc("/PlayerMoveRequest", playerMove).Methods("POST")
r.HandleFunc("/IAMoveRequest", IAMove).Methods("POST")
r.HandleFunc("/HintRequest", hint).Methods("POST")
r.HandleFunc("/HistoryPrevRequest", historyPrev).Methods("POST")
r.HandleFunc("/HistoryNextRequest", historyNext).Methods("POST")
// Optional: Use a custom 404 handler for our API paths.
// api.NotFoundHandler = JSONNotFound
// Serve static assets directly.
r.PathPrefix("/dist").Handler(http.FileServer(http.Dir(static)))
// Catch-all: Serve our JavaScript application's entry-point (index.html).
r.HandleFunc("/", IndexHandler(entry))
srv := &http.Server{
Handler: handlers.LoggingHandler(os.Stdout, r),
Addr: "0.0.0.0:" + port,
// Good practice: enforce timeouts for servers you create!
//WriteTimeout: 15 * time.Second,
//ReadTimeout: 15 * time.Second,
}
log.Fatal(srv.ListenAndServe())
}
func IndexHandler(entrypoint string) func(w http.ResponseWriter, r *http.Request) {
fn := func(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, entrypoint)
}
return http.HandlerFunc(fn)
}
| abombard/gomoku | main.go | GO | gpl-3.0 | 8,038 |
module.exports = function (grunt) {
var exec = require('child_process').exec;
var rimraf = require('rimraf');
var fs = require('fs');
// Project configuration.
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
connect: {
server: {
options: {
port: 1841,
base: __dirname,
open: 'http://localhost:1841',
keepalive: true,
middleware: function (connect, options) {
var middlewares = [];
var directory = options.directory || options.base[options.base.length - 1];
if (!Array.isArray(options.base)) {
options.base = [options.base];
}
options.base.forEach(function(base) {
// Serve static files.
middlewares.push(connect.static(base));
});
// Make directory browse-able.
middlewares.push(connect.directory(directory));
middlewares.push(function (req, res, next) {
if (req.url == '/cordova.js') {
res.writeHead(200, {
'Content-Type': 'text/javascript'
});
res.end('console.warn("当前处于网页调试状态,所有设备功能将不可用")');
return;
}
next();
});
return middlewares;
}
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-connect');
var registry = grunt.file.readJSON('registry.json');
var readApp = function (method) {
var filename = 'app.json'
var app = grunt.file.readJSON(filename);
method(app);
};
var editApp = function (method) {
var filename = 'app.json';
readApp(function (app) {
method(app);
fs.writeFileSync(filename, JSON.stringify(app, null, 4));
});
};
grunt.registerTask('init', function () {
var log = grunt.log.write('Installing touch library ...');
var done = this.async();
exec('git submodule update --init', function (err, stdout, stderr) {
if (err) {
log.error(stderr);
done(false);
return;
}
log.ok();
done();
});
});
grunt.registerTask('register', function (packageName) {
var log = grunt.log.write('add ' + packageName + ' to app.json...');
editApp(function (app) {
if (app.requires.indexOf(packageName) < 0) {
app.requires.push(packageName);
}
});
log.ok();
});
grunt.registerTask('unregister', function (packageName) {
var log = grunt.log.write('remove ' + packageName + ' from app.json...');
editApp(function (app) {
var index = app.requires.indexOf(packageName);
if (!(index < 0)) {
app.requires.splice(index, 1);
};
});
log.ok();
});
grunt.registerTask('refresh', function () {
var log = grunt.log.write('refreshing sencha app...');
var done = this.async();
exec('sencha app refresh', function (err, stdout, stderr) {
if (err) {
done(false);
return;
}
log.ok();
done();
});
});
grunt.registerTask('download', function (packageName) {
var log = grunt.log.write('download ' + packageName + '...');
var done = this.async();
exec('git clone ' + registry[packageName] + ' packages/' + packageName, function (err, stdout, stderr) {
if (err) {
done(false);
return;
}
log.ok();
done();
});
});
grunt.registerTask('delete', function (packageName) {
var log = grunt.log.write('delete ' + packageName + '...');
var done = this.async();
rimraf('packages/' + packageName, function (err) {
if (err) {
done(false);
return;
}
log.ok();
done();
});
});
grunt.registerTask('ls', function () {
var log = grunt.log.writeln('listing installed packages');
readApp(function (app) {
console.log(app.requires);
});
log.ok();
});
grunt.registerTask('remove', function (packageName) {
grunt.task.run('delete:' + packageName, 'unregister:' + packageName, 'refresh');
});
grunt.registerTask('install', function (packageName) {
grunt.task.run('delete:' + packageName, 'download:' + packageName, 'register:' + packageName, 'refresh');
});
grunt.registerTask('default', ['connect']);
};
| gengen1988/bootplate | Gruntfile.js | JavaScript | gpl-3.0 | 5,163 |
package de.grovie.renderer.GL2;
import java.nio.IntBuffer;
import javax.media.opengl.GL2;
import de.grovie.exception.GvExRendererBufferSet;
import de.grovie.exception.GvExRendererDrawGroup;
import de.grovie.exception.GvExRendererVertexArray;
import de.grovie.renderer.GvBufferSet;
import de.grovie.renderer.GvContext;
import de.grovie.renderer.GvDrawGroup;
import de.grovie.renderer.GvRenderer;
import de.grovie.renderer.GvVertexArray;
public class GvContextGL2 extends GvContext {
public GvContextGL2(GvRenderer renderer) {
super(renderer);
}
@Override
public GvVertexArray createVertexArray(GvVertexArray vao) throws GvExRendererVertexArray {
try
{
//get reference to jogl gl2
GL2 gl2 = ((GvIllustratorGL2)lRenderer.getIllustrator()).getGL2();
//int buffer for storing id given by opengl
int vaoId[] = new int[1];
IntBuffer vaoIdBuffer = IntBuffer.wrap(vaoId);
// Create 1 Vertex Array Object, id set into vaoIdBuffer
gl2.glGenVertexArrays(1, vaoIdBuffer);
//set created VAO id into specified wrapper object
vao.setId(vaoId[0]);
return vao;
}
catch(Exception e)
{
throw new GvExRendererVertexArray("Error generating VAO.");
}
}
@Override
public GvDrawGroup createDrawGroup() throws GvExRendererDrawGroup {
return new GvDrawGroup();
}
@Override
public GvBufferSet createBufferSet()
throws GvExRendererBufferSet {
return (GvBufferSet)new GvBufferSetGL2();
}
}
| grovie/grovie | grovie/src/de/grovie/renderer/GL2/GvContextGL2.java | Java | gpl-3.0 | 1,449 |
/*
* This file is part of the L2J Mobius project.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package handlers.telnethandlers;
import java.io.PrintWriter;
import java.net.Socket;
import com.l2jmobius.gameserver.handler.ITelnetHandler;
/**
* @author UnAfraid
*/
public class HelpHandler implements ITelnetHandler
{
private final String[] _commands =
{
"help"
};
@Override
public boolean useCommand(String command, PrintWriter _print, Socket _cSocket, int _uptime)
{
if (command.equals("help"))
{
_print.println("The following is a list of all available commands: ");
_print.println("help - shows this help.");
_print.println("status - displays basic server statistics.");
_print.println("gamestat privatestore - displays info about stores");
_print.println("performance - shows server performance statistics.");
_print.println("forcegc - forced garbage collection.");
_print.println("purge - removes finished threads from thread pools.");
_print.println("memusage - displays memory amounts in JVM.");
_print.println("announce <text> - announces <text> in game.");
_print.println("msg <nick> <text> - Sends a whisper to char <nick> with <text>.");
_print.println("gmchat <text> - Sends a message to all GMs with <text>.");
_print.println("gmlist - lists all gms online.");
_print.println("kick - kick player <name> from server.");
_print.println("shutdown <time> - shuts down server in <time> seconds.");
_print.println("restart <time> - restarts down server in <time> seconds.");
_print.println("abort - aborts shutdown/restart.");
_print.println("give <player> <itemid> <amount>");
_print.println("enchant <player> <itemType> <enchant> (itemType: 1 - Helmet, 2 - Chest, 3 - Gloves, 4 - Feet, 5 - Legs, 6 - Right Hand, 7 - Left Hand, 8 - Left Ear, 9 - Right Ear , 10 - Left Finger, 11 - Right Finger, 12- Necklace, 13 - Underwear, 14 - Back, 15 - Belt, 0 - No Enchant)");
_print.println("debug <cmd> - executes the debug command (see 'help debug').");
_print.println("reload <type> - reload data");
_print.println("jail <player> [time]");
_print.println("unjail <player>");
_print.println("quit - closes telnet session.");
}
else if (command.equals("help debug"))
{
_print.println("The following is a list of all available debug commands: ");
_print.println("full - Dumps complete debug information to an file (recommended)");
_print.println("decay - prints info about the DecayManager");
_print.println("packetsend - Send packet data to a player");
_print.println("PacketTP - prints info about the General Packet ThreadPool");
_print.println("IOPacketTP - prints info about the I/O Packet ThreadPool");
_print.println("GeneralTP - prints info about the General ThreadPool");
}
return false;
}
@Override
public String[] getCommandList()
{
return _commands;
}
}
| karolusw/l2j | game/data/scripts/handlers/telnethandlers/HelpHandler.java | Java | gpl-3.0 | 3,766 |
# -*- coding: utf-8 -*-
#
# forms_function.py - Function Flask Forms
#
from flask_babel import lazy_gettext
from flask_wtf import FlaskForm
from wtforms import BooleanField
from wtforms import DecimalField
from wtforms import SelectField
from wtforms import StringField
from wtforms import SubmitField
from wtforms import widgets
from wtforms.widgets import NumberInput
from mycodo.config_translations import TRANSLATIONS
from mycodo.mycodo_flask.utils.utils_general import generate_form_action_list
from mycodo.utils.function_actions import parse_function_action_information
class FunctionAdd(FlaskForm):
function_type = SelectField()
function_add = SubmitField(TRANSLATIONS['add']['title'])
class FunctionMod(FlaskForm):
choices_actions = []
dict_actions = parse_function_action_information()
list_actions_sorted = generate_form_action_list(dict_actions)
for each_action in list_actions_sorted:
choices_actions.append((each_action, dict_actions[each_action]['name']))
action_type = SelectField(
choices=[('', TRANSLATIONS['select_one']['title'])] + choices_actions)
function_id = StringField('Function ID', widget=widgets.HiddenInput())
function_type = StringField('Function Type', widget=widgets.HiddenInput())
name = StringField(TRANSLATIONS['name']['title'])
log_level_debug = BooleanField(
TRANSLATIONS['log_level_debug']['title'])
add_action = SubmitField(lazy_gettext('Add Action'))
execute_all_actions = SubmitField(lazy_gettext('Execute All Actions'))
function_activate = SubmitField(TRANSLATIONS['activate']['title'])
function_deactivate = SubmitField(TRANSLATIONS['deactivate']['title'])
function_mod = SubmitField(TRANSLATIONS['save']['title'])
function_delete = SubmitField(TRANSLATIONS['delete']['title'])
class Actions(FlaskForm):
function_type = StringField(
'Function Type', widget=widgets.HiddenInput())
function_id = StringField(
'Function ID', widget=widgets.HiddenInput())
function_action_id = StringField(
'Function Action ID', widget=widgets.HiddenInput())
pause_duration = DecimalField(
lazy_gettext('Duration (seconds)'),
widget=NumberInput(step='any'))
do_action_string = StringField(lazy_gettext('Action String'))
do_unique_id = StringField(lazy_gettext('Controller ID'))
do_output_state = StringField(lazy_gettext('State'))
do_output_amount = DecimalField(
lazy_gettext('Amount'),
widget=NumberInput(step='any'))
do_output_duration = DecimalField(
lazy_gettext('Duration (seconds)'),
widget=NumberInput(step='any'))
do_output_pwm = DecimalField(
lazy_gettext('Duty Cycle'),
widget=NumberInput(step='any'))
do_output_pwm2 = DecimalField(
lazy_gettext('Duty Cycle'),
widget=NumberInput(step='any'))
do_camera_duration = DecimalField(
lazy_gettext('Duration (seconds)'),
widget=NumberInput(step='any'))
save_action = SubmitField(TRANSLATIONS['save']['title'])
delete_action = SubmitField(TRANSLATIONS['delete']['title'])
| kizniche/Mycodo | mycodo/mycodo_flask/forms/forms_function.py | Python | gpl-3.0 | 3,125 |
<?php
namespace Autobahn\Cli;
use Symfony\Component\Console\Formatter\OutputFormatterStyle;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
/**
* Class Application
* @package Autobahn\Cli
*/
class Application extends \Symfony\Component\Console\Application
{
/**
* @inheritdoc
*/
protected function configureIO(InputInterface $input, OutputInterface $output)
{
parent::configureIO($input, $output);
// code style
$code = new OutputFormatterStyle('black', 'white');
$output->getFormatter()->setStyle('code', $code);
}
}
| mrgrain/autobahn-cli | src/Application.php | PHP | gpl-3.0 | 640 |
var class_undoable =
[
[ "Undoable", "class_undoable.html#a9b5187143377380164baa7bda8756b30", null ],
[ "execute", "class_undoable.html#a8ce5a456598a27152d873ebaeb063e00", null ],
[ "isClearingUndoRedo", "class_undoable.html#a58b3ab47ca3687f314542ef72497b858", null ],
[ "isExecutable", "class_undoable.html#a98cbf0431ce4ec8a1131f27e965afdcd", null ],
[ "isUndoable", "class_undoable.html#a55ea467c2c1006a7cdec39605a9d4348", null ],
[ "redo", "class_undoable.html#acf782d879f8241b0ddf3655bb69e30e8", null ],
[ "undo", "class_undoable.html#a19f85257c6403dbf4eeb62689f446b48", null ]
]; | Ryoga-Unryu/pidgirl-engine | doc/html/class_undoable.js | JavaScript | gpl-3.0 | 612 |
using CP77.CR2W.Reflection;
namespace CP77.CR2W.Types
{
[REDMeta]
public class RemoteBreach : ActionBool
{
public RemoteBreach(CR2WFile cr2w, CVariable parent, string name) : base(cr2w, parent, name) { }
}
}
| Traderain/Wolven-kit | CP77.CR2W/Types/cp77/RemoteBreach.cs | C# | gpl-3.0 | 215 |
#!/usr/bin/env python3
# encoding: utf-8
# Copyright (C) 2020 Space Science and Engineering Center (SSEC),
# University of Wisconsin-Madison.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This file is part of the polar2grid software package. Polar2grid takes
# satellite observation data, remaps it, and writes it to a file format for
# input into another program.
# Documentation: http://www.ssec.wisc.edu/software/polar2grid/
"""The VIIRS SDR Reader operates on Science Data Record (SDR) HDF5 files from
the Suomi National Polar-orbiting Partnership's (NPP) and/or the NOAA20
Visible/Infrared Imager Radiometer Suite (VIIRS) instrument. The VIIRS
SDR reader ignores filenames and uses internal file content to determine
the type of file being provided, but SDR are typically named as below
and have corresponding geolocation files::
SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5
The VIIRS SDR reader supports all instrument spectral bands, identified as
the products shown below. It supports terrain corrected or non-terrain corrected
navigation files. Geolocation files must be included when specifying filepaths to
readers and ``polar2grid.sh``. The VIIRS reader can be specified to the ``polar2grid.sh`` script
with the reader name ``viirs_sdr``.
This reader's default remapping algorithm is ``ewa`` for Elliptical Weighted
Averaging resampling. The ``--fornav-D`` parameter set to 40 and the
``--fornav-d`` parameter set to 2.
+---------------------------+-----------------------------------------------------+
| Product Name | Description |
+===========================+=====================================================+
| i01 | I01 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| i02 | I02 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| i03 | I03 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| i04 | I04 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| i05 | I05 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| i01_rad | I01 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i02_rad | I02 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i03_rad | I03 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i04_rad | I04 Radiance Band |
+---------------------------+-----------------------------------------------------+
| i05_rad | I05 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m01 | M01 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m02 | M02 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m03 | M03 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m04 | M04 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m05 | M05 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m06 | M06 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m07 | M07 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m08 | M08 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m09 | M09 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m10 | M10 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m11 | M11 Reflectance Band |
+---------------------------+-----------------------------------------------------+
| m12 | M12 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m13 | M13 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m14 | M14 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m15 | M15 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m16 | M16 Brightness Temperature Band |
+---------------------------+-----------------------------------------------------+
| m01_rad | M01 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m02_rad | M02 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m03_rad | M03 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m04_rad | M04 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m05_rad | M05 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m06_rad | M06 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m07_rad | M07 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m08_rad | M08 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m09_rad | M09 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m10_rad | M10 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m11_rad | M11 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m12_rad | M12 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m13_rad | M13 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m14_rad | M14 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m15_rad | M15 Radiance Band |
+---------------------------+-----------------------------------------------------+
| m16_rad | M16 Radiance Band |
+---------------------------+-----------------------------------------------------+
| dnb | Raw DNB Band (not useful for images) |
+---------------------------+-----------------------------------------------------+
| histogram_dnb | Histogram Equalized DNB Band |
+---------------------------+-----------------------------------------------------+
| adaptive_dnb | Adaptive Histogram Equalized DNB Band |
+---------------------------+-----------------------------------------------------+
| dynamic_dnb | Dynamic DNB Band from Steve Miller and |
| | Curtis Seaman. Uses erf to scale the data. |
+---------------------------+-----------------------------------------------------+
| hncc_dnb | Simplified High and Near-Constant Contrast |
| | Approach from Stephan Zinke |
+---------------------------+-----------------------------------------------------+
| ifog | Temperature difference between I05 and I04 |
+---------------------------+-----------------------------------------------------+
| i_solar_zenith_angle | I Band Solar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| i_solar_azimuth_angle | I Band Solar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| i_sat_zenith_angle | I Band Satellite Zenith Angle |
+---------------------------+-----------------------------------------------------+
| i_sat_azimuth_angle | I Band Satellite Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| m_solar_zenith_angle | M Band Solar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| m_solar_azimuth_angle | M Band Solar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| m_sat_zenith_angle | M Band Satellite Zenith Angle |
+---------------------------+-----------------------------------------------------+
| m_sat_azimuth_angle | M Band Satellite Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| dnb_solar_zenith_angle | DNB Band Solar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| dnb_solar_azimuth_angle | DNB Band Solar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| dnb_sat_zenith_angle | DNB Band Satellite Zenith Angle |
+---------------------------+-----------------------------------------------------+
| dnb_sat_azimuth_angle | DNB Band Satellite Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| dnb_lunar_zenith_angle | DNB Band Lunar Zenith Angle |
+---------------------------+-----------------------------------------------------+
| dnb_lunar_azimuth_angle | DNB Band Lunar Azimuth Angle |
+---------------------------+-----------------------------------------------------+
| true_color | Ratio sharpened rayleigh corrected true color |
+---------------------------+-----------------------------------------------------+
| false_color | Ratio sharpened rayleigh corrected false color |
+---------------------------+-----------------------------------------------------+
"""
from __future__ import annotations
from argparse import ArgumentParser, _ArgumentGroup
from typing import Optional
from satpy import DataQuery, Scene
from polar2grid.core.script_utils import ExtendConstAction
from ._base import ReaderProxyBase
I_PRODUCTS = [
"I01",
"I02",
"I03",
"I04",
"I05",
]
I_ANGLE_PRODUCTS = [
"i_solar_zenith_angle",
"i_solar_azimuth_angle",
"i_sat_zenith_angle",
"i_sat_azimuth_angle",
]
M_PRODUCTS = [
"M01",
"M02",
"M03",
"M04",
"M05",
"M06",
"M07",
"M08",
"M09",
"M10",
"M11",
"M12",
"M13",
"M14",
"M15",
"M16",
]
M_ANGLE_PRODUCTS = [
"m_solar_zenith_angle",
"m_solar_azimuth_angle",
"m_sat_zenith_angle",
"m_sat_azimuth_angle",
]
DNB_PRODUCTS = [
"histogram_dnb",
"adaptive_dnb",
"dynamic_dnb",
"hncc_dnb",
]
DNB_ANGLE_PRODUCTS = [
"dnb_solar_zenith_angle",
"dnb_solar_azimuth_angle",
"dnb_sat_zenith_angle",
"dnb_sat_azimuth_angle",
"dnb_lunar_zenith_angle",
"dnb_lunar_azimuth_angle",
]
TRUE_COLOR_PRODUCTS = ["true_color"]
FALSE_COLOR_PRODUCTS = ["false_color"]
OTHER_COMPS = [
"ifog",
]
PRODUCT_ALIASES = {}
def _process_legacy_and_rad_products(satpy_names, band_aliases, rad_aliases):
"""Map all lowercase band names to uppercase names and add radiance product."""
for band in satpy_names:
# P2G name is lowercase, Satpy is uppercase
PRODUCT_ALIASES[band.lower()] = band
band_aliases.append(band.lower())
# radiance products for M and I bands
rad_name = band.lower() + "_rad"
dq = DataQuery(name=band, calibration="radiance")
PRODUCT_ALIASES[rad_name] = dq
rad_aliases.append(rad_name)
I_ALIASES = []
I_RAD_PRODUCTS = []
_process_legacy_and_rad_products(I_PRODUCTS, I_ALIASES, I_RAD_PRODUCTS)
M_ALIASES = []
M_RAD_PRODUCTS = []
_process_legacy_and_rad_products(M_PRODUCTS, M_ALIASES, M_RAD_PRODUCTS)
_AWIPS_TRUE_COLOR = ["viirs_crefl08", "viirs_crefl04", "viirs_crefl03"]
_AWIPS_FALSE_COLOR = ["viirs_crefl07", "viirs_crefl09", "viirs_crefl08"]
PRODUCT_ALIASES["dnb_solar_zenith_angle"] = DataQuery(name="dnb_solar_zenith_angle")
PRODUCT_ALIASES["dnb_solar_azimuth_angle"] = DataQuery(name="dnb_solar_azimuth_angle")
PRODUCT_ALIASES["dnb_sat_zenith_angle"] = DataQuery(name="dnb_satellite_zenith_angle")
PRODUCT_ALIASES["dnb_sat_azimuth_angle"] = DataQuery(name="dnb_satellite_azimuth_angle")
PRODUCT_ALIASES["dnb_lunar_zenith_angle"] = DataQuery(name="dnb_lunar_zenith_angle")
PRODUCT_ALIASES["dnb_lunar_azimuth_angle"] = DataQuery(name="dnb_lunar_azimuth_angle")
PRODUCT_ALIASES["m_solar_zenith_angle"] = DataQuery(name="solar_zenith_angle", resolution=742)
PRODUCT_ALIASES["m_solar_azimuth_angle"] = DataQuery(name="solar_azimuth_angle", resolution=742)
PRODUCT_ALIASES["m_sat_zenith_angle"] = DataQuery(name="satellite_zenith_angle", resolution=742)
PRODUCT_ALIASES["m_sat_azimuth_angle"] = DataQuery(name="satellite_azimuth_angle", resolution=742)
PRODUCT_ALIASES["i_solar_zenith_angle"] = DataQuery(name="solar_zenith_angle", resolution=371)
PRODUCT_ALIASES["i_solar_azimuth_angle"] = DataQuery(name="solar_azimuth_angle", resolution=371)
PRODUCT_ALIASES["i_sat_zenith_angle"] = DataQuery(name="satellite_zenith_angle", resolution=371)
PRODUCT_ALIASES["i_sat_azimuth_angle"] = DataQuery(name="satellite_azimuth_angle", resolution=371)
DEFAULT_PRODUCTS = I_ALIASES + M_ALIASES + DNB_PRODUCTS[1:] + TRUE_COLOR_PRODUCTS + FALSE_COLOR_PRODUCTS + OTHER_COMPS
P2G_PRODUCTS = I_ALIASES + M_ALIASES + DNB_PRODUCTS + I_RAD_PRODUCTS + M_RAD_PRODUCTS
P2G_PRODUCTS += I_ANGLE_PRODUCTS + M_ANGLE_PRODUCTS + DNB_ANGLE_PRODUCTS + OTHER_COMPS
P2G_PRODUCTS += TRUE_COLOR_PRODUCTS + FALSE_COLOR_PRODUCTS
FILTERS = {
"day_only": {
"standard_name": [
"toa_bidirectional_reflectance",
"true_color",
"false_color",
"natural_color",
"corrected_reflectance",
],
},
"night_only": {
"standard_name": ["temperature_difference"],
},
}
class ReaderProxy(ReaderProxyBase):
"""Provide Polar2Grid-specific information about this reader's products."""
is_polar2grid_reader = True
def __init__(self, scn: Scene, user_products: list[str]):
self.scn = scn
self._modified_aliases = PRODUCT_ALIASES.copy()
if "dynamic_dnb_saturation" in user_products:
# they specified --dnb-saturation-correction
# let's modify the aliases so dynamic_dnb points to this product
user_products.remove("dynamic_dnb_saturation")
user_products.append("dynamic_dnb")
self._modified_aliases["dynamic_dnb"] = DataQuery(name="dynamic_dnb_saturation")
self._orig_user_products = user_products
def get_default_products(self) -> list[str]:
"""Get products to load if users hasn't specified any others."""
return DEFAULT_PRODUCTS
def get_all_products(self):
"""Get all polar2grid products that could be loaded."""
return P2G_PRODUCTS
@property
def _aliases(self):
return self._modified_aliases
def add_reader_argument_groups(
parser: ArgumentParser, group: Optional[_ArgumentGroup] = None
) -> tuple[Optional[_ArgumentGroup], Optional[_ArgumentGroup]]:
"""Add reader-specific command line arguments to an existing argument parser.
If ``group`` is provided then arguments are added to this group. If not,
a new group is added to the parser and arguments added to this new group.
"""
if group is None:
group = parser.add_argument_group(title="VIIRS SDR Reader")
group.add_argument(
"--i-bands",
dest="products",
action=ExtendConstAction,
const=I_ALIASES,
help="Add all I-band raw products to list of products",
)
group.add_argument(
"--m-bands",
dest="products",
action=ExtendConstAction,
const=M_ALIASES,
help="Add all M-band raw products to list of products",
)
group.add_argument(
"--dnb-angle-products",
dest="products",
action=ExtendConstAction,
const=DNB_ANGLE_PRODUCTS,
help="Add DNB-band geolocation 'angle' products to list of products",
)
group.add_argument(
"--dnb-saturation-correction",
dest="products",
action=ExtendConstAction,
const=["dynamic_dnb_saturation"],
help="Enable dynamic DNB saturation correction (normally used for aurora scenes)",
)
group.add_argument(
"--i-angle-products",
dest="products",
action=ExtendConstAction,
const=I_ANGLE_PRODUCTS,
help="Add I-band geolocation 'angle' products to list of products",
)
group.add_argument(
"--m-angle-products",
dest="products",
action=ExtendConstAction,
const=M_ANGLE_PRODUCTS,
help="Add M-band geolocation 'angle' products to list of products",
)
group.add_argument(
"--m-rad-products",
dest="products",
action=ExtendConstAction,
const=M_RAD_PRODUCTS,
help="Add M-band geolocation radiance products to list of products",
)
group.add_argument(
"--i-rad-products",
dest="products",
action=ExtendConstAction,
const=I_RAD_PRODUCTS,
help="Add I-band geolocation radiance products to list of products",
)
group.add_argument(
"--awips-true-color",
dest="products",
action=ExtendConstAction,
const=_AWIPS_TRUE_COLOR,
help="Add individual CREFL corrected products to create " "the 'true_color' composite in AWIPS.",
)
group.add_argument(
"--awips-false-color",
dest="products",
action=ExtendConstAction,
const=_AWIPS_FALSE_COLOR,
help="Add individual CREFL corrected products to create " "the 'false_color' composite in AWIPS.",
)
return group, None
| davidh-ssec/polar2grid | polar2grid/readers/viirs_sdr.py | Python | gpl-3.0 | 21,308 |
using System.Diagnostics.CodeAnalysis;
using JetBrains.Annotations;
using NSubstitute;
using NUnit.Framework;
using Selkie.Geometry.Shapes;
using Selkie.NUnit.Extensions;
using Selkie.Services.Lines.Common.Dto;
using Selkie.Windsor;
namespace Selkie.Services.Lines.Tests
{
[ExcludeFromCodeCoverage]
[TestFixture]
internal sealed class LinesValidatorTests
{
[Theory]
[TestCase(0, 1, true)]
[TestCase(0, 0, false)]
[TestCase(1, 0, false)]
[TestCase(-1, 0, false)]
[TestCase(1, 2, false)]
public void ValidateDtosTest(int firstId,
int secondId,
bool result)
{
// assemble
var one = new LineDto
{
Id = firstId
};
var two = new LineDto
{
Id = secondId
};
LineDto[] lines =
{
one,
two
};
var logger = new LinesValidatorLogger(Substitute.For <ISelkieLogger>());
var sut = new LinesValidator(logger);
// act
// assert
Assert.AreEqual(result,
sut.ValidateDtos(lines));
}
[Theory]
[AutoNSubstituteData]
public void ValidateReturnsFalseForEmptyTest([NotNull] LinesValidator sut)
{
// assemble
var lines = new LineDto[0];
// act
// assert
Assert.False(sut.ValidateDtos(lines));
}
[Theory]
[AutoNSubstituteData]
public void ValidateReturnsFalseForOneLineTest([NotNull] LinesValidator sut)
{
// assemble
LineDto[] lines =
{
new LineDto()
};
// act
// assert
Assert.False(sut.ValidateDtos(lines));
}
[Test]
[TestCase(0, 1, true)]
[TestCase(0, 0, false)]
[TestCase(1, 0, false)]
[TestCase(-1, 0, false)]
[TestCase(1, 2, false)]
public void ValidateLinesTest(int firstId,
int secondId,
bool result)
{
// assemble
var one = Substitute.For <ILine>();
one.Id.Returns(firstId);
var two = Substitute.For <ILine>();
two.Id.Returns(secondId);
ILine[] lines =
{
one,
two
};
var logger = new LinesValidatorLogger(Substitute.For <ISelkieLogger>());
var sut = new LinesValidator(logger);
// act
// assert
Assert.AreEqual(result,
sut.ValidateLines(lines));
}
}
} | tschroedter/Selkie.Services.Lines | Selkie.Services.Lines.Tests/LinesValidatorTests.cs | C# | gpl-3.0 | 2,953 |
# -*- coding: utf-8 -*-
from django.db.models import Q
from django.conf import settings
from django.contrib.auth.models import Permission, Group
from django.utils.html import escape
from ajax_select import register, LookupChannel
from .models import (
Attribute,
ServerAttribute,
Package,
Property,
DeviceLogical,
Computer,
UserProfile,
)
@register('user_profile')
class UserProfileLookup(LookupChannel):
model = UserProfile
def can_add(self, user, model):
return False
def get_query(self, q, request):
return self.model.objects.filter(
Q(username__icontains=q) | Q(first_name__icontains=q) | Q(last_name__icontains=q)
).order_by('username')
def format_item_display(self, obj):
if obj.first_name or obj.last_name:
return '{} ({})'.format(
obj.link(),
' '.join(filter(None, [obj.first_name, obj.last_name]))
)
return obj.link()
def format_match(self, obj):
return escape("%s (%s)" % (obj.__str__(), ' '.join(filter(None, [obj.first_name, obj.last_name]))))
@register('domain_admin')
class DomainAdminLookup(UserProfileLookup):
def get_query(self, q, request):
domain_admin = Group.objects.get(name="Domain Admin")
return self.model.objects.filter(
Q(username__icontains=q) | Q(first_name__icontains=q) | Q(last_name__icontains=q),
groups__in=[domain_admin]
).order_by('username')
@register('permission')
class PermissionLookup(LookupChannel):
model = Permission
def get_query(self, q, request):
return self.model.objects.filter(
Q(name__icontains=q) | Q(codename__icontains=q)
).order_by('name')
def format_match(self, obj):
return escape(obj.__str__())
def format_item_display(self, obj):
return obj.__str__()
def get_objects(self, ids):
return self.model.objects.filter(pk__in=ids).order_by('name')
@register('attribute')
class AttributeLookup(LookupChannel):
model = Attribute
def get_query(self, q, request):
properties = Property.objects.values_list('prefix', flat=True)
if q[0:Property.PREFIX_LEN].upper() \
in (item.upper() for item in properties) \
and len(q) > (Property.PREFIX_LEN + 1):
queryset = self.model.objects.scope(request.user.userprofile).filter(
property_att__prefix__icontains=q[0:Property.PREFIX_LEN],
value__icontains=q[Property.PREFIX_LEN + 1:],
property_att__enabled=True
)
else:
queryset = self.model.objects.scope(request.user.userprofile).filter(
Q(value__icontains=q) |
Q(description__icontains=q) |
Q(property_att__prefix__icontains=q)
).filter(property_att__enabled=True)
# exclude available and unsubscribed computers (inactive)
inactive_computers = [
str(x) for x in Computer.inactive.values_list('id', flat=True)
]
queryset = queryset.exclude(
property_att__prefix='CID',
value__in=inactive_computers
).order_by('value')
return queryset
def format_match(self, obj):
return escape(obj.__str__())
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def get_objects(self, ids):
if settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0] != "id":
return self.model.objects.filter(
pk__in=ids
).filter(
~Q(property_att__prefix='CID')
).order_by(
'property_att',
'value'
) | self.model.objects.filter(
pk__in=ids,
property_att__prefix='CID'
).order_by(
'description'
)
else:
return self.model.objects.filter(
pk__in=ids
).order_by(
'property_att',
'value'
)
@register('package')
class PackageLookup(LookupChannel):
model = Package
def get_query(self, q, request):
project_id = request.GET.get('project_id', None)
queryset = self.model.objects.scope(request.user.userprofile).filter(name__icontains=q).order_by('name')
if project_id:
queryset = queryset.filter(project__id=project_id)
return queryset
def format_match(self, obj):
return escape(obj.name)
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def get_objects(self, ids):
return self.model.objects.filter(pk__in=ids).order_by('name')
@register('tag')
class TagLookup(LookupChannel):
model = ServerAttribute
def get_query(self, q, request):
return self.model.objects.scope(request.user.userprofile).filter(
property_att__enabled=True,
property_att__sort='server'
).filter(
Q(value__icontains=q) |
Q(description__icontains=q) |
Q(property_att__prefix__icontains=q)
).order_by('value')
def format_match(self, obj):
return '{}-{} {}'.format(
escape(obj.property_att.prefix),
escape(obj.value),
escape(obj.description)
)
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def get_objects(self, ids):
return self.model.objects.filter(
pk__in=ids
).order_by(
'property_att',
'value'
)
@register('devicelogical')
class DeviceLogicalLookup(LookupChannel):
model = DeviceLogical
def get_query(self, q, request):
return self.model.objects.filter(device__name__icontains=q)
def format_match(self, obj):
return escape(obj.__str__())
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
@register('computer')
class ComputerLookup(LookupChannel):
model = Computer
def get_query(self, q, request):
if settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0] == "id":
return self.model.objects.scope(request.user.userprofile).filter(id__exact=q)
else:
return self.model.objects.scope(request.user.userprofile).filter(
Q(id__exact=q) if isinstance(q, int) else Q() |
Q(**{'{}__icontains'.format(settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0]): q})
).filter(
~Q(status__in=['available', 'unsubscribed'])
)
def format_match(self, obj):
return obj.__str__()
def format_item_display(self, obj):
return obj.link()
def can_add(self, user, model):
return False
def reorder(self, ids):
return [row.id for row in Computer.objects.filter(
pk__in=ids
).order_by(settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0])]
def get_objects(self, ids):
things = self.model.objects.in_bulk(ids)
if settings.MIGASFREE_COMPUTER_SEARCH_FIELDS[0] == "id":
return [things[aid] for aid in ids if aid in things]
return [things[aid] for aid in self.reorder(ids) if aid in things]
| migasfree/migasfree | migasfree/server/lookups.py | Python | gpl-3.0 | 7,455 |
/* IcosaMapper - an rpg map editor based on equilateral triangles that form an icosahedron
* Copyright (C) 2013 Ville Jokela
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* contact me <ville.jokela@penny-craal.org>
*/
package org.penny_craal.icosamapper.map.layerrenderers;
import java.io.Serializable;
import java.util.Map;
import org.penny_craal.icosamapper.map.layerrenderers.variabletypes.VariableType;
/**
* An abstract helper class for implementations of LayerRenderer.
* @author Ville Jokela
*/
public abstract class LayerRendererHelper implements LayerRenderer, Serializable {
@Override
public int[] renderArray(byte[] values) {
int[] rendered = new int[values.length];
for (int i = 0; i < rendered.length; i++) {
rendered[i] = renderByte(values[i]);
}
return rendered;
}
@Override
public void setVariable(String variableName, Object value) {
Map<String, VariableType> variables = getVariables();
if (variables.containsKey(variableName)) {
if (variables.get(variableName).isValid(value)) {
setCheckedVariable(variableName, value);
}
} else {
throw new RuntimeException("tried to set variable '" + variableName + "' to: " + value);
}
}
abstract void setCheckedVariable(String variableName, Object value);
} | razalhague/IcosaMapper | src/main/java/org/penny_craal/icosamapper/map/layerrenderers/LayerRendererHelper.java | Java | gpl-3.0 | 2,008 |
# frozen_string_literal: true
require 'pio/open_flow13/meter'
describe Pio::OpenFlow13::Meter do
describe '.new' do
When(:meter) { Pio::OpenFlow13::Meter.new(options) }
context 'with 1' do
Given(:options) { 1 }
Then { meter.meter_id == 1 }
end
end
end
| trema/pio | spec/pio/open_flow13/meter_spec.rb | Ruby | gpl-3.0 | 283 |
<?php
namespace Orange\SearchBundle\DependencyInjection\Compiler;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\Compiler\CompilerPassInterface;
use Symfony\Component\DependencyInjection\Reference;
/**
* Description of FilterCompilerPass
*
* @author aameziane
*/
class FilterCompilerPass implements CompilerPassInterface
{
public function process(ContainerBuilder $container)
{
if (!$container->hasDefinition('orange.search.filter_manager')) {
return;
}
$definition = $container->getDefinition(
'orange.search.filter_manager'
);
$taggedServices = $container->findTaggedServiceIds(
'orange.search.filter'
);
foreach ($taggedServices as $id => $tagAttributes) {
foreach ($tagAttributes as $attributes) {
$definition->addMethodCall(
'addFilter',
array(new Reference($id), $attributes["alias"])
);
}
}
}
}
| Solerni-R1-1/SearchBundle | DependencyInjection/Compiler/FilterCompilerPass.php | PHP | gpl-3.0 | 1,087 |
package net.bemacized.grimoire.data.retrievers.storeretrievers;
import net.bemacized.grimoire.data.models.card.MtgCard;
import net.bemacized.grimoire.data.models.scryfall.ScryfallCard;
import net.bemacized.grimoire.data.retrievers.ScryfallRetriever;
import java.util.HashMap;
import java.util.logging.Level;
public class ScryfallPriceRetriever extends StoreRetriever {
@Override
public String getStoreName() {
return "Scryfall";
}
@Override
public String getStoreId() {
return "SCF";
}
@Override
public String[] supportedLanguages() {
return new String[]{"English"};
}
@Override
public long timeout() {
return 1000 * 60 * 60 * 6; // 6 hours
}
@Override
protected StoreCardPriceRecord _retrievePrice(MtgCard card) throws StoreAuthException, StoreServerErrorException, UnknownStoreException {
try {
ScryfallCard sc = ScryfallRetriever.getCardByScryfallId(card.getScryfallId());
return new StoreCardPriceRecord(card.getName(), card.getSet().getCode(), null, System.currentTimeMillis(), getStoreId(), new HashMap<String, Price>() {{
if (sc.getEur() != null)
put("Paper (EUR)", new Price(Double.parseDouble(sc.getEur()), Currency.EUR));
if (sc.getTix() != null)
put("MTGO", new Price(Double.parseDouble(sc.getTix()), Currency.TIX));
if (sc.getUsd() != null)
put("Paper (USD)", new Price(Double.parseDouble(sc.getUsd()), Currency.USD));
}});
} catch (ScryfallRetriever.ScryfallRequest.UnknownResponseException | ScryfallRetriever.ScryfallRequest.NoResultException | ScryfallRetriever.ScryfallRequest.ScryfallErrorException e) {
LOG.log(Level.SEVERE, "Could not retrieve price for known scryfall card", e);
throw new UnknownStoreException();
}
}
}
| BeMacized/Grimoire | src/main/java/net/bemacized/grimoire/data/retrievers/storeretrievers/ScryfallPriceRetriever.java | Java | gpl-3.0 | 1,724 |
package com.leosoft.eam.userphoto;
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.provider.MediaStore;
import com.leosoft.eam.utils.InfoText;
/**
* Created by Mickael on 10/10/2016.
*/
public class ImagePickerManager extends PickerManager {
public ImagePickerManager(Activity activity) {
super(activity);
}
protected void sendToExternalApp( ){
Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
intent.setType("image/*");
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
intent.addFlags(Intent.FLAG_GRANT_WRITE_URI_PERMISSION);
activity.startActivityForResult(Intent.createChooser(intent, InfoText.TEXT_SELECT_IMAGE),
REQUEST_CODE_SELECT_IMAGE);
}
@Override
public void setUri(Uri uri)
{
mProcessingPhotoUri = uri;
}
}
| SunnyLeo2008/ExpenseAccountManage | app/src/main/java/com/leosoft/eam/userphoto/ImagePickerManager.java | Java | gpl-3.0 | 937 |
/**
* Created on 28-Jun-2004
*
*/
package coaching.model;
/**
* An example Car class.
*/
public class Car extends AbstractVehicle implements CarInterface {
/**
* Instantiates a new car.
*/
public Car() {
super(new PetrolEngine());
}
/**
* Instantiates a new car with an engine.
*
* @param engine the engine
*/
public Car(final AbstractEngine engine) {
super(engine);
}
}
| Martin-Spamer/java-coaching | src/main/java/coaching/model/Car.java | Java | gpl-3.0 | 480 |
# Maked by Mr. Have fun! Version 0.2
# Shadow Weapon Coupons contributed by BiTi for the Official L2J Datapack Project
# Visit http://forum.l2jdp.com for more details
import sys
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
qn = "408_PathToElvenwizard"
ROGELLIAS_LETTER = 1218
RED_DOWN = 1219
MAGICAL_POWERS_RUBY = 1220
PURE_AQUAMARINE = 1221
APPETIZING_APPLE = 1222
GOLD_LEAVES = 1223
IMMORTAL_LOVE = 1224
AMETHYST = 1225
NOBILITY_AMETHYST = 1226
FERTILITY_PERIDOT = 1229
ETERNITY_DIAMOND = 1230
CHARM_OF_GRAIN = 1272
SAP_OF_WORLD_TREE = 1273
LUCKY_POTPOURI = 1274
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
player = st.getPlayer()
if event == "1" :
st.set("id","0")
if player.getClassId().getId() != 0x19 :
if player.getClassId().getId() == 0x1a :
htmltext = "30414-02a.htm"
else:
htmltext = "30414-03.htm"
else:
if player.getLevel()<19 :
htmltext = "30414-04.htm"
else:
if st.getQuestItemsCount(ETERNITY_DIAMOND) != 0 :
htmltext = "30414-05.htm"
else:
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
if st.getQuestItemsCount(FERTILITY_PERIDOT) == 0 :
st.giveItems(FERTILITY_PERIDOT,1)
htmltext = "30414-06.htm"
elif event == "408_1" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(MAGICAL_POWERS_RUBY) != 0 :
htmltext = "30414-10.htm"
elif st.getInt("cond") != 0 and st.getQuestItemsCount(MAGICAL_POWERS_RUBY) == 0 and st.getQuestItemsCount(FERTILITY_PERIDOT) != 0 :
if st.getQuestItemsCount(ROGELLIAS_LETTER) == 0 :
st.giveItems(ROGELLIAS_LETTER,1)
htmltext = "30414-07.htm"
st.set("cond","2")
elif event == "408_4" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(ROGELLIAS_LETTER) != 0 :
st.takeItems(ROGELLIAS_LETTER,st.getQuestItemsCount(ROGELLIAS_LETTER))
if st.getQuestItemsCount(CHARM_OF_GRAIN) == 0 :
st.giveItems(CHARM_OF_GRAIN,1)
htmltext = "30157-02.htm"
elif event == "408_2" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(PURE_AQUAMARINE) != 0 :
htmltext = "30414-13.htm"
elif st.getInt("cond") != 0 and st.getQuestItemsCount(PURE_AQUAMARINE) == 0 and st.getQuestItemsCount(FERTILITY_PERIDOT) != 0 :
if st.getQuestItemsCount(APPETIZING_APPLE) == 0 :
st.giveItems(APPETIZING_APPLE,1)
htmltext = "30414-14.htm"
elif event == "408_5" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(APPETIZING_APPLE) != 0 :
st.takeItems(APPETIZING_APPLE,st.getQuestItemsCount(APPETIZING_APPLE))
if st.getQuestItemsCount(SAP_OF_WORLD_TREE) == 0 :
st.giveItems(SAP_OF_WORLD_TREE,1)
htmltext = "30371-02.htm"
elif event == "408_3" :
if st.getInt("cond") != 0 and st.getQuestItemsCount(NOBILITY_AMETHYST) != 0 :
htmltext = "30414-17.htm"
elif st.getInt("cond") != 0 and st.getQuestItemsCount(NOBILITY_AMETHYST) == 0 and st.getQuestItemsCount(FERTILITY_PERIDOT) != 0 :
if st.getQuestItemsCount(IMMORTAL_LOVE) == 0 :
st.giveItems(IMMORTAL_LOVE,1)
htmltext = "30414-18.htm"
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
if npcId != 30414 and id != STARTED : return htmltext
if id == CREATED :
st.setState(STARTING)
st.set("cond","0")
st.set("onlyone","0")
st.set("id","0")
if npcId == 30414 and st.getInt("cond")==0 :
if st.getInt("cond")<15 :
htmltext = "30414-01.htm"
else:
htmltext = "30414-01.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)==0 and st.getQuestItemsCount(APPETIZING_APPLE)==0 and st.getQuestItemsCount(IMMORTAL_LOVE)==0 and st.getQuestItemsCount(CHARM_OF_GRAIN)==0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)==0 and st.getQuestItemsCount(LUCKY_POTPOURI)==0 and st.getQuestItemsCount(FERTILITY_PERIDOT)!=0 and (st.getQuestItemsCount(MAGICAL_POWERS_RUBY)==0 or st.getQuestItemsCount(NOBILITY_AMETHYST)==0 or st.getQuestItemsCount(PURE_AQUAMARINE)==0) :
htmltext = "30414-11.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)!=0 :
htmltext = "30414-08.htm"
elif npcId == 30157 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)!=0 :
htmltext = "30157-01.htm"
elif npcId == 30157 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)<5 :
htmltext = "30157-03.htm"
elif npcId == 30157 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)>=5 :
st.takeItems(RED_DOWN,st.getQuestItemsCount(RED_DOWN))
st.takeItems(CHARM_OF_GRAIN,st.getQuestItemsCount(CHARM_OF_GRAIN))
if st.getQuestItemsCount(MAGICAL_POWERS_RUBY) == 0 :
st.giveItems(MAGICAL_POWERS_RUBY,1)
htmltext = "30157-04.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)<5 :
htmltext = "30414-09.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(RED_DOWN)>=5 :
htmltext = "30414-25.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(APPETIZING_APPLE)!=0 :
htmltext = "30414-15.htm"
elif npcId == 30371 and st.getInt("cond")!=0 and st.getQuestItemsCount(APPETIZING_APPLE)!=0 :
htmltext = "30371-01.htm"
elif npcId == 30371 and st.getInt("cond")!=0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)!=0 and st.getQuestItemsCount(GOLD_LEAVES)<5 :
htmltext = "30371-03.htm"
elif npcId == 30371 and st.getInt("cond")!=0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)!=0 and st.getQuestItemsCount(GOLD_LEAVES)>=5 :
st.takeItems(GOLD_LEAVES,st.getQuestItemsCount(GOLD_LEAVES))
st.takeItems(SAP_OF_WORLD_TREE,st.getQuestItemsCount(SAP_OF_WORLD_TREE))
if st.getQuestItemsCount(PURE_AQUAMARINE) == 0 :
st.giveItems(PURE_AQUAMARINE,1)
htmltext = "30371-04.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)!=0 and st.getQuestItemsCount(GOLD_LEAVES)<5 :
htmltext = "30414-16.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(CHARM_OF_GRAIN)!=0 and st.getQuestItemsCount(GOLD_LEAVES)>=5 :
htmltext = "30414-26.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(IMMORTAL_LOVE)!=0 :
htmltext = "30414-19.htm"
elif npcId == 30423 and st.getInt("cond")!=0 and st.getQuestItemsCount(IMMORTAL_LOVE)!=0 :
st.takeItems(IMMORTAL_LOVE,st.getQuestItemsCount(IMMORTAL_LOVE))
if st.getQuestItemsCount(LUCKY_POTPOURI) == 0 :
st.giveItems(LUCKY_POTPOURI,1)
htmltext = "30423-01.htm"
elif npcId == 30423 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)<2 :
htmltext = "30423-02.htm"
elif npcId == 30423 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)>=2 :
st.takeItems(AMETHYST,st.getQuestItemsCount(AMETHYST))
st.takeItems(LUCKY_POTPOURI,st.getQuestItemsCount(LUCKY_POTPOURI))
if st.getQuestItemsCount(NOBILITY_AMETHYST) == 0 :
st.giveItems(NOBILITY_AMETHYST,1)
htmltext = "30423-03.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)<2 :
htmltext = "30414-20.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(LUCKY_POTPOURI)!=0 and st.getQuestItemsCount(AMETHYST)>=2 :
htmltext = "30414-27.htm"
elif npcId == 30414 and st.getInt("cond")!=0 and st.getQuestItemsCount(ROGELLIAS_LETTER)==0 and st.getQuestItemsCount(APPETIZING_APPLE)==0 and st.getQuestItemsCount(IMMORTAL_LOVE)==0 and st.getQuestItemsCount(CHARM_OF_GRAIN)==0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE)==0 and st.getQuestItemsCount(LUCKY_POTPOURI)==0 and st.getQuestItemsCount(FERTILITY_PERIDOT)!=0 and st.getQuestItemsCount(MAGICAL_POWERS_RUBY)!=0 and st.getQuestItemsCount(NOBILITY_AMETHYST)!=0 and st.getQuestItemsCount(PURE_AQUAMARINE)!=0 :
st.takeItems(MAGICAL_POWERS_RUBY,st.getQuestItemsCount(MAGICAL_POWERS_RUBY))
st.takeItems(PURE_AQUAMARINE,st.getQuestItemsCount(PURE_AQUAMARINE))
st.takeItems(NOBILITY_AMETHYST,st.getQuestItemsCount(NOBILITY_AMETHYST))
st.takeItems(FERTILITY_PERIDOT,st.getQuestItemsCount(FERTILITY_PERIDOT))
st.set("cond","0")
st.setState(COMPLETED)
st.playSound("ItemSound.quest_finish")
if st.getQuestItemsCount(ETERNITY_DIAMOND) == 0 :
st.giveItems(ETERNITY_DIAMOND,1)
htmltext = "30414-24.htm"
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != STARTED : return
npcId = npc.getNpcId()
if npcId == 20466 :
st.set("id","0")
if st.getInt("cond") != 0 and st.getQuestItemsCount(CHARM_OF_GRAIN) != 0 and st.getQuestItemsCount(RED_DOWN)<5 and st.getRandom(100)<70 :
st.giveItems(RED_DOWN,1)
if st.getQuestItemsCount(RED_DOWN) == 5 :
st.playSound("ItemSound.quest_middle")
else:
st.playSound("ItemSound.quest_itemget")
elif npcId == 20019 :
st.set("id","0")
if st.getInt("cond") != 0 and st.getQuestItemsCount(SAP_OF_WORLD_TREE) != 0 and st.getQuestItemsCount(GOLD_LEAVES)<5 and st.getRandom(100)<40 :
st.giveItems(GOLD_LEAVES,1)
if st.getQuestItemsCount(GOLD_LEAVES) == 5 :
st.playSound("ItemSound.quest_middle")
else:
st.playSound("ItemSound.quest_itemget")
elif npcId == 20047 :
st.set("id","0")
if st.getInt("cond") != 0 and st.getQuestItemsCount(LUCKY_POTPOURI) != 0 and st.getQuestItemsCount(AMETHYST)<2 and st.getRandom(100)<40 :
st.giveItems(AMETHYST,1)
if st.getQuestItemsCount(AMETHYST) == 2 :
st.playSound("ItemSound.quest_middle")
else:
st.playSound("ItemSound.quest_itemget")
return
QUEST = Quest(408,qn,"Path To Elvenwizard")
CREATED = State('Start', QUEST)
STARTING = State('Starting', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(30414)
QUEST.addTalkId(30414)
QUEST.addTalkId(30157)
QUEST.addTalkId(30371)
QUEST.addTalkId(30423)
QUEST.addKillId(20019)
QUEST.addKillId(20466)
QUEST.addKillId(20047)
STARTED.addQuestDrop(30414,ROGELLIAS_LETTER,1)
STARTED.addQuestDrop(20466,RED_DOWN,1)
STARTED.addQuestDrop(30157,CHARM_OF_GRAIN,1)
STARTED.addQuestDrop(30414,APPETIZING_APPLE,1)
STARTED.addQuestDrop(20019,GOLD_LEAVES,1)
STARTED.addQuestDrop(30371,SAP_OF_WORLD_TREE,1)
STARTED.addQuestDrop(30414,IMMORTAL_LOVE,1)
STARTED.addQuestDrop(20047,AMETHYST,1)
STARTED.addQuestDrop(30423,LUCKY_POTPOURI,1)
STARTED.addQuestDrop(30157,MAGICAL_POWERS_RUBY,1)
STARTED.addQuestDrop(30371,PURE_AQUAMARINE,1)
STARTED.addQuestDrop(30423,NOBILITY_AMETHYST,1)
STARTED.addQuestDrop(30414,FERTILITY_PERIDOT,1) | zenn1989/scoria-interlude | L2Jscoria-Game/data/scripts/quests/408_PathToElvenwizard/__init__.py | Python | gpl-3.0 | 12,096 |
class Foo(object):
def __init__(self):
self.frotz = {'ping':'pong'}
class Bar(Foo):
def __init__(self, frob, frizzle):
Foo.__init__(self)
self.frotz['foo'] = 'bar'
self.frazzle = frizzle
bar = Bar(1,2)
print("frotz:", bar.frotz)
print("frazzle:", bar.frazzle) | ozzmeister00/textAdventureA | sandbox.py | Python | gpl-3.0 | 312 |
'use strict';
angular.module('risevision.common.header')
.controller('CompanySettingsModalCtrl', ['$scope', '$modalInstance',
'updateCompany', 'companyId', 'countries', 'REGIONS_CA', 'REGIONS_US',
'TIMEZONES', 'getCompany', 'regenerateCompanyField', '$loading',
'humanReadableError', 'userState', 'userAuthFactory', 'deleteCompany',
'companyTracker', 'confirmModal', '$modal', '$templateCache',
'COMPANY_INDUSTRY_FIELDS', 'COMPANY_SIZE_FIELDS', 'addressFactory',
function ($scope, $modalInstance, updateCompany, companyId,
countries, REGIONS_CA, REGIONS_US, TIMEZONES, getCompany,
regenerateCompanyField, $loading, humanReadableError,
userState, userAuthFactory, deleteCompany, companyTracker, confirmModal,
$modal, $templateCache, COMPANY_INDUSTRY_FIELDS, COMPANY_SIZE_FIELDS, addressFactory) {
$scope.company = {
id: companyId
};
$scope.countries = countries;
$scope.regionsCA = REGIONS_CA;
$scope.regionsUS = REGIONS_US;
$scope.timezones = TIMEZONES;
$scope.COMPANY_INDUSTRY_FIELDS = COMPANY_INDUSTRY_FIELDS;
$scope.COMPANY_SIZE_FIELDS = COMPANY_SIZE_FIELDS;
$scope.isRiseStoreAdmin = userState.isRiseStoreAdmin();
_clearErrorMessages();
$scope.$watch('loading', function (loading) {
if (loading) {
$loading.start('company-settings-modal');
} else {
$loading.stop('company-settings-modal');
}
});
$scope.loading = false;
$scope.forms = {};
if (companyId) {
$scope.loading = true;
getCompany(companyId).then(
function (company) {
$scope.company = company;
$scope.company.isSeller = company && company.sellerId ? true : false;
$scope.company.isChargebee = company && company.origin === 'Chargebee';
},
function (resp) {
_showErrorMessage('load', resp);
}).finally(function () {
$scope.loading = false;
});
}
$scope.closeModal = function () {
$modalInstance.dismiss('cancel');
};
$scope.save = function () {
_clearErrorMessages();
if (!$scope.forms.companyForm.$valid) {
console.info('form not valid: ', $scope.forms.companyForm.$error);
} else {
$scope.loading = true;
addressFactory.isValidOrEmptyAddress($scope.company).then(function () {
var company = angular.copy($scope.company);
verifyAdmin(company);
return updateCompany($scope.company.id, company)
.then(
function () {
companyTracker('Company Updated', userState.getSelectedCompanyId(),
userState.getSelectedCompanyName(), !userState.isSubcompanySelected());
userState.updateCompanySettings($scope.company);
$modalInstance.close('success');
}).catch(function (error) {
_showErrorMessage('update', error);
});
})
.catch(function (error) {
$scope.formError = 'We couldn\'t update your address.';
$scope.apiError = humanReadableError(error);
$scope.isAddressError = true;
})
.finally(function () {
$scope.loading = false;
});
}
};
$scope.deleteCompany = function () {
_clearErrorMessages();
var instance = $modal.open({
template: $templateCache.get('partials/common-header/safe-delete-modal.html'),
controller: 'SafeDeleteModalCtrl'
});
instance.result.then(function () {
$scope.loading = true;
deleteCompany($scope.company.id)
.then(
function () {
companyTracker('Company Deleted', userState.getSelectedCompanyId(),
userState.getSelectedCompanyName(), !userState.isSubcompanySelected());
if (userState.getUserCompanyId() === $scope.company.id) {
userAuthFactory.signOut();
} else if (userState.getSelectedCompanyId() === $scope.company
.id) {
userState.resetCompany();
}
$modalInstance.close('success');
})
.catch(
function (error) {
_showErrorMessage('delete', error);
})
.finally(function () {
$scope.loading = false;
});
});
};
var _resetCompanyField = function (type, title, message) {
_clearErrorMessages();
return confirmModal(title, message)
.then(function () {
$loading.start('company-settings-modal');
return regenerateCompanyField($scope.company.id, type)
.catch(function (error) {
_showErrorMessage('update', error);
})
.finally(function () {
$loading.stop('company-settings-modal');
});
});
};
$scope.resetAuthKey = function () {
var type = 'authKey';
var title = 'Reset Authentication Key',
message = 'Resetting the Company Authentication Key will cause existing Data Gadgets ' +
'to no longer report data until they are updated with the new Key.';
_resetCompanyField(type, title, message)
.then(function (resp) {
$scope.company.authKey = resp.item;
});
};
$scope.resetClaimId = function () {
var type = 'claimId';
var title = 'Reset Claim Id',
message = 'Resetting the Company Claim Id will cause existing installations to no ' +
'longer be associated with your Company.';
_resetCompanyField(type, title, message)
.then(function (resp) {
$scope.company.claimId = resp.item;
});
};
function verifyAdmin(company) {
if ($scope.isRiseStoreAdmin) {
company.sellerId = company.isSeller ? 'yes' : null;
} else {
//exclude fields from API call
delete company.sellerId;
delete company.isTest;
delete company.shareCompanyPlan;
}
}
function _clearErrorMessages() {
$scope.formError = null;
$scope.apiError = null;
$scope.isAddressError = false;
}
function _showErrorMessage(action, error) {
$scope.formError = 'Failed to ' + action + ' Company.';
$scope.apiError = humanReadableError(error);
}
}
]);
| Rise-Vision/rise-vision-app-launcher | web/scripts/common-header/controllers/ctr-company-settings-modal.js | JavaScript | gpl-3.0 | 6,693 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from mock import patch
from django.test import TestCase as DjangoTestCase
from tastypie.test import ResourceTestCase as TastypieResourceTestCase
from baobab.utils.mock import MockSN, MockLOG
class TestCase(DjangoTestCase):
def setUp(self, *args, **kwargs):
super(TestCase, self).setUp(*args, **kwargs)
self._mock = []
cls_path = 'baobab.socialnetwork.base.LOG'
self._mock.append(patch(cls_path, new_callable=MockLOG))
self.log = self._mock[0].start()
cls_path = 'baobab.socialnetwork.base.SocialNetworkBase.__subclasses__'
self._mock.append(patch(cls_path, return_value=[MockSN, ]))
self._mock[1].start()
def tearDown(self, *args, **kwargs):
super(TestCase, self).tearDown(*args, **kwargs)
for mock in self._mock:
mock.stop()
class ResourceTestCase(TastypieResourceTestCase):
def setUp(self, *args, **kwargs):
super(ResourceTestCase, self).setUp(*args, **kwargs)
self._mock = []
cls_path = 'baobab.socialnetwork.base.LOG'
self._mock.append(patch(cls_path, new_callable=MockLOG))
self.log = self._mock[0].start()
cls_path = 'baobab.socialnetwork.base.SocialNetworkBase.__subclasses__'
self._mock.append(patch(cls_path, return_value=[MockSN, ]))
self._mock[1].start()
def tearDown(self, *args, **kwargs):
super(ResourceTestCase, self).tearDown(*args, **kwargs)
for mock in self._mock:
mock.stop()
| Gandi/baobab | baobab/utils/test.py | Python | gpl-3.0 | 1,573 |
#include "pluginmanager.h"
#include "plugininterface.h"
#include "server.h"
#include "../Utilities/CrossDynamicLib.h"
ServerPluginManager::ServerPluginManager(Server *ser) : server(ser), m_settings("config", QSettings::IniFormat)
{
loadPlugins();
}
QSettings &ServerPluginManager::settings()
{
return m_settings;
}
ServerPlugin *ServerPluginManager::instanciatePlugin(void *function)
{
return dynamic_cast<ServerPlugin*>(((ServerPluginInstanceFunction)function)(server));
}
| coyotte508/pokemon-online | src/Server/pluginmanager.cpp | C++ | gpl-3.0 | 489 |
var Main = require('../src/Main.purs');
var initialState = require('../src/Types.purs').init;
var debug = process.env.NODE_ENV === 'development'
if (module.hot) {
var app = Main[debug ? 'debug' : 'main'](window.puxLastState || initialState)();
app.state.subscribe(function (state) {
window.puxLastState = state;
});
module.hot.accept();
} else {
Main[debug ? 'debug' : 'main'](initialState)();
}
| input-output-hk/rscoin-haskell | block-explorer/support/index.js | JavaScript | gpl-3.0 | 404 |
/*
Travel Expense: travel expense tracking application
Copyright (C) 2015 Chris Lin peijen@ualberta.ca
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ca.ualberta.cs.peijen_travelexpense;
import java.util.ArrayList;
import java.util.Collection;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.DialogInterface.OnClickListener;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
import android.widget.AdapterView.OnItemLongClickListener;
public class ClaimInfoActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.claiminfoactivity);
ExpenseListManager.initManager(this.getApplicationContext());
//base on eclass youtube video by Abram Hindle: https://www.youtube.com/watch?v=7zKCuqScaRE
//adding and update expenses onto claim list
ListView listView = (ListView) findViewById(R.id.expenselistView);
Collection<Expense> expenses = ExpenseListController.getExpenseList().getExpenses();
final ArrayList<Expense> list = new ArrayList<Expense>(expenses);// share the list, not gonna change it
final ArrayAdapter<Expense> expenseAdapter = new ArrayAdapter<Expense>(this, android.R.layout.simple_list_item_1, list);
listView.setAdapter(expenseAdapter);
//update to make our adapter now that list has been changed
ExpenseListController.getExpenseList().addListener(new Listener(){
@Override
public void update(){
list.clear();
Collection<Expense> expenses = ExpenseListController.getExpenseList().getExpenses();
list.addAll(expenses);
expenseAdapter.notifyDataSetChanged();
}
});
//base on eclass youtube video by Abram Hindle: https://www.youtube.com/watch?v=7zKCuqScaRE
//delete an expense
listView.setOnItemLongClickListener(new OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> adapterView, View view,
int position, long id) {
//Toast.makeText(ClaimListActivity.this,"Delete "+ list.get(position).toString(), Toast.LENGTH_SHORT).show();
AlertDialog.Builder adb = new AlertDialog.Builder(ClaimInfoActivity.this); //set alert dialog for deleting
adb.setMessage("Delete "+list.get(position).toString()+ "?");
adb.setCancelable(true);
final int FinalPosition = position;
//set options for delete claims or cancel
adb.setPositiveButton("Delete", new OnClickListener(){
@Override
public void onClick(DialogInterface dialog, int which) {
Expense expense = list.get(FinalPosition);
ExpenseListController.getExpenseList().deleteExpense(expense);
}
});
adb.setNegativeButton("Cancel", new OnClickListener(){
@Override
public void onClick(DialogInterface dialog, int which) {
//do nothing here since we dont want to delete the claim
}
});
adb.show();
return false;
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.claim_info, menu);
return true;
}
public void addexpenseitem(MenuItem menu) //click add an new expense
{
Toast.makeText(this, "Add an expense", Toast.LENGTH_SHORT).show();
Intent intent = new Intent(ClaimInfoActivity.this, AddExpense.class);
startActivity(intent); //move to the adding expense layout
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| peijen/TravelExpense | src/ca/ualberta/cs/peijen_travelexpense/ClaimInfoActivity.java | Java | gpl-3.0 | 4,796 |
module FoodsoftSplitManufacturer
module AddManufacturerToPdfControllers
def self.included(base) # :nodoc:
base.class_eval do
before_filter :foodsoft_split_manufacturer_set_options, only: [:show, :foodcoop_doc], if: -> { FoodsoftSplitManufacturer.enabled? }
private
def foodsoft_split_manufacturer_set_options
@doc_options ||= {}
@doc_options[:manufacturer] = params[:manufacturer] if params[:manufacturer].present?
end
end
end
end
end
ActiveSupport.on_load(:after_initialize) do
OrdersController.send :include, FoodsoftSplitManufacturer::AddManufacturerToPdfControllers
if defined? FoodsoftCurrentOrders
CurrentOrders::OrdersController.send :include, FoodsoftSplitManufacturer::AddManufacturerToPdfControllers
end
if defined? FoodsoftMultishared
MultisharedOrdersController.send :include, FoodsoftSplitManufacturer::AddManufacturerToPdfControllers
end
end
| foodcoop-adam/foodsoft | lib/foodsoft_split_manufacturer/lib/foodsoft_split_manufacturer/add_manufacturer_to_pdf_controllers.rb | Ruby | gpl-3.0 | 955 |
#ifndef UTILITY_H
#define UTILITY_H
#include <string>
#include <ctime>
#include <cstdio>
#include <iostream>
const std::string log_file="assembly_log_details";
const std::string solid_kmers_file="solid_kmers";
const std::string assembly_file="contigs.fasta";
extern std::string prefix;// In case if you asked the user about the prefix name for his files.
extern time_t rawtime ;
static const int SECS_PER_MIN = 60 ;
static const int SECS_PER_HOUR = 3600;
static const int MAX_READ_LENGTH = 1024;
static const int READ_BUFFER_PER_THREAD = 1024;
#define start_time(t) \
time(&rawtime);\
double start_time ## t = rawtime;
#define end_time(t)\
time(&rawtime);\
double end_time ## t = rawtime;\
double elspsed ## t = difftime(end_time ## t,start_time ## t);\
int hours ## t = elspsed ## t / SECS_PER_HOUR;\
int minutes ## t = elspsed ## t / SECS_PER_MIN;\
int mins_left ## t = minutes ## t % SECS_PER_MIN;\
int secs_left ## t = (int)elspsed ## t % SECS_PER_MIN; \
std::cout<<"--- h("<<hours ## t<<")"<<":m("<<mins_left ## t<<")"<<":s("<<secs_left ## t<<") elapsed time."<< std::endl;
std::string return_file_name(const std::string file_name);
void delete_created_file(const std::string file_name);
#endif
| SaraEl-Metwally/LightAssembler | Utility.hpp | C++ | gpl-3.0 | 1,271 |
<?php
/**
* This file is part of byrokrat\autogiro.
*
* byrokrat\autogiro is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* byrokrat\autogiro is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with byrokrat\autogiro. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2016-21 Hannes Forsgård
*/
declare(strict_types=1);
namespace byrokrat\autogiro\Xml;
use byrokrat\autogiro\Tree\Node;
use byrokrat\autogiro\Visitor\VisitorInterface;
/**
* Transform node tree into xml
*/
final class XmlWritingVisitor implements VisitorInterface
{
/**
* @var \XMLWriter
*/
private $xmlWriter;
/**
* @var Stringifier
*/
private $stringifier;
public function __construct(\XMLWriter $xmlWriter, Stringifier $stringifier)
{
$this->xmlWriter = $xmlWriter;
$this->stringifier = $stringifier;
}
public function visitBefore(Node $node): void
{
$this->xmlWriter->startElement($node->getName());
if ($node->getName() != $node->getType()) {
$this->xmlWriter->writeAttribute('type', $node->getType());
}
if ($node->getValue()) {
$this->xmlWriter->text($this->stringifier->stringify($node->getValue()));
}
}
public function visitAfter(Node $node): void
{
$this->xmlWriter->endElement();
}
}
| byrokrat/autogiro | src/Xml/XmlWritingVisitor.php | PHP | gpl-3.0 | 1,800 |
class PlaylistsController < ApplicationController
layout 'alternative'
before_filter :require_user, :only => ['new', 'edit', 'index', 'search', 'destroy', 'update', 'create']
before_filter :has_profile_filled_out
before_filter :no_listener, :only => ['new', 'edit', 'destroy', 'index', 'update', 'create']
before_filter :increment_visit, :only => ['show']
autocomplete :artist, :name, :full => false
autocomplete :program, :title, :full => true
autocomplete :album, :name, :full => false, :display_value => :playlist_autocomplete
autocomplete :track, :title, :full => false, :display_value => :track_autocomplete
autocomplete :label, :name, :full => false, :display_value => :label_autocomplete
# GET /playlists
# GET /playlists.xml
def index
@inpage = params[:inpage]
@playlists = current_user.playlists.order("created_at DESC").paginate(:page => params[:page], :per_page => 25)
respond_to do |format|
format.js { render :partial => "index" }
end
end
# GET /playlists/1
# GET /playlists/1.xml
def show
@playlist = Playlist.find(params[:id])
if @playlist != nil
@tracks = @playlist.playlist_items.all
@tracks.sort! { |x,y| x.position <=> y.position }
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @playlist }
end
else
four_oh_four_error
end
end
def search
if params[:track_search]
@artists = []
@albums = []
@tracks = []
if params[:artist_search]
@results = Artist.name_like(params[:search])
@aa = @artists.map{|a| a.id }
@artists = Track.find_all_by_artist_id(@aa) rescue nil
end
if params[:album_search]
@albums = Album.name_like(params[:search])
@aa = @albums.map{|a| a.id }
@tracks = Track.find_all_by_album_id(@aa) rescue nil
end
if params[:search_type] == "tracks"
@tracks = Track.title_like(params[:search]) rescue nil
else
end
if @tracks && @tracks.size > 0
respond_to do |format|
format.js { render :partial => "track_search.js.erb" }
end
else
respond_to do |format|
format.js { render :partial => "track_error.js.erb" }
end
end
else
if params[:search_type] == "artist"
@artists = Artist.name_like(params[:search])
@aa = @artists.map{|a| a.id }
@tracks = Track.find_all_by_artist_id(@aa) rescue nil
elsif params[:search_type] == "albums"
@albums = Album.title_like(params[:search])
@aa = @albums.map{|a| a.id }
@tracks = Track.find_all_by_album_id(@aa) rescue nil
elsif params[:search_type] == "tracks"
@tracks = Track.title_like(params[:search]) rescue nil
else
end
@type = params[:search_type].pluralize
if @tracks && @tracks.size > 0
respond_to do |format|
format.js { render :partial => "search.js.erb" }
end
else
respond_to do |format|
format.js { render :partial => "search_error.js.erb" }
end
end
end
end
def export
# Give the path of the temp file to the zip outputstream, it won't try to open it as an archive.
@playlists = current_user.playlists.all
csv_string = FasterCSV.generate do |csv|
# header row
csv << ["playlist title", "artist", "album", "track name", "label"]
# data rows
@playlists.each do |playlist|
@playlist.tracks.each do |track|
csv << ["#{track.playlist.title rescue nil}", "#{track.artist.name rescue nil}", "#{track.album.name rescue nil}", "#{track.title rescue nil}", "#{track.album.label.name rescue nil}"]
end
end
end
# send it to the browsah
send_data csv_string,
:type => 'text/csv; charset=iso-8859-1; header=present',
:disposition => "attachment; filename=users.csv"
end
# GET /playlists/new
# GET /playlists/new.xml
def new
@inpage = params[:inpage]
@playlist = Playlist.new
@psetup = Program.all
@programs = []
@person = current_user
@j = "no downloads"
@psetup.each do |p|
if p.event
@programs << p
end
end
@downloads = []
@dsetup = Download.last(50)
if current_user.programs.first.downloads.all != []
@j = "downloads"
end
current_user.programs.each do |program|
program.downloads.each do |dl|
#@dsetup.each do |dl| #added this
@downloads << dl
end
end
if @downloads == []
@dsetup.each do |dl| #added this
@downloads << dl
end
end
#@downloads = current_user.programs.first.downloads
@downloads.sort! {|y,x| x.title.to_i <=> y.title.to_i}
@promopromos = Promo.where(:category => 1)
if @promopromos != nil
@promopromos.sort!{|x,y| x.title <=> y.title}
end
@psapromos = Promo.where(:category => 2)
if @psapromos != nil
@psapromos.sort!{|x,y| x.title <=> y.title}
end
@psatimelypromos = Promo.where(:category => 3)
if @psatimelypromos != nil
@psatimelypromos.sort!{|x,y| x.title <=> y.title}
end
@underWriting = Promo.where(:category => 4)
if @underWriting != nil
@underWriting.sort!{|x,y| x.title <=> y.title}
end
respond_to do |format|
format.js { render :partial => "new" }
format.xml { render :xml => @playlist }
end
end
# GET /playlists/1/edit
def edit
@psetup = Program.all
@programs = []
@psetup.each do |p|
if p.event
@programs << p
end
end
@downloads = current_user.programs.first.downloads
@downloads.sort! {|y,x| x.title.to_i <=> y.title.to_i}
@playlist = Playlist.find(params[:id])
#@this_download = Download.find(@playlist.download) rescue nil
@pi = @playlist.playlist_items.sort! { |a,b| a.position <=> b.position }
respond_to do |format|
format.js { render :partial => "edit" }
format.xml { render :xml => @playlist }
end
end
# POST /playlists
# POST /playlists.xml
def create
@i = 0
@title = params[:playlist][:title]
@description = params[:playlist][:description]
@tmp_tracks = params[:tracks].split(",")
@program = Program.find(params[:programs])
#If it's a one-hour show
if params[:download]
@playlist = Playlist.new(:title => @title, :program => @program, :description => @description, :user_id => current_user.id)
@playlist.save
@playlist.reload
unless params[:curently_playing] == "true"
@download = Download.find(params[:download])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
end
if params[:currently_playing] == "true"
@download = Download.new(:title => "1364776013", :playlist_id => @playlist.id)
@download.save
end
end
#If it's a two-hour show
if params[:first_download]
@playlist = Playlist.new(:title => @title, :program => @program, :description => @description, :user_id => current_user.id)
@playlist.save
@playlist.reload
unless params[:currently_playing] == "true"
@download = Download.find(params[:first_download])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
@download = Download.find(params[:second_download])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
end
if params[:currently_playing] == "true"
@download = Download.find(params[:first_download])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
@download = Download.new(:title => "1364776013", :playlist_id => @playlist.id)
@download.save
end
end
if params[:promopromo] != ""
@promopromo = Promo.find(params[:promopromo])
@promopromo.count += 1
@promopromo.save
end
if params[:psapromo] != ""
@psapromo = Promo.find(params[:psapromo])
@psapromo.count += 1
@psapromo.save
end
if params[:psatimelypromo] != ""
@psatimelypromo = Promo.find(params[:psatimelypromo])
@psatimelypromo.count += 1
@psatimelypromo.save
end
if params[:Underwriting] != ""
@underWriting = Promo.find(params[:Underwriting])
@underWriting.count += 1
@underWriting.save
end
@i = 0
@tmp_tracks.each do |track|
@i += 1
@track = Track.find(track)
@pi = PlaylistItem.new
@pi.track = @track
@pi.playlist = @playlist
@pi.position = @i
@pi.save
end
respond_to do |format|
format.js { render :partial => "saved.js.erb" }
end
end
# PUT /playlists/1
# PUT /playlists/1.xml
def update
@ii = 0
@playlist = Playlist.find(params[:id])
@program = Program.find(params[:programs])
if params[:downloads]
@download = Download.find(params[:downloads])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
end
if params[:first_download]
@download = Download.find(params[:first_download])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
end
if params[:second_download]
@download = Download.find(params[:second_download])
@download.update_attributes(:playlist_id => @playlist.id)
@download.save
end
@tmp_tracks = params[:tracks].split(",")
@playlist.playlist_items.each do |pi|
pi.destroy
end
Rails.logger.info(@tmp_tracks.length)
@tmp_tracks.each do |track|
@ii += 1
@track = Track.find(track)
@pi = PlaylistItem.new
@pi.track = @track
@pi.playlist = @playlist
@pi.position = @ii
@pi.save
end
@title = params[:playlist][:title]
@playlist.update_attributes(:title => params[:playlist][:title])
@decription = params[:playlist][:description]
@playlist.update_attributes(:description => params[:playlist][:description])
respond_to do |format|
format.js { render :partial => "saved.js.erb" }
end
end
# DELETE /playlists/1
# DELETE /playlists/1.xml
def destroy
@playlist = Playlist.find(params[:id])
@playlist.playlist_items
@playlist.destroy
@msg = "Success!"
respond_to do |format|
format.html { redirect_to(playlists_url) }
format.json { render :json => @msg }
format.xml { head :ok }
end
end
def download_time
@p = Playlist.find(params[:id])
@t = Time.new
@t = @p.download.created_at
return @t
end
private
def increment_visit
@p = Playlist.find(params[:id])
if @p
@v = View.new
@v.viewable = @p
@v.user_agent = request.user_agent
@v.save
end
end
end
| KPSU/kpsu.org | app/controllers/playlists_controller.rb | Ruby | gpl-3.0 | 10,927 |
'use strict';
describe('Controller: GeneralCtrl', function () {
// load the controller's module
beforeEach(module('golfAdminApp'));
var GeneralCtrl,
scope;
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope) {
scope = $rootScope.$new();
GeneralCtrl = $controller('GeneralCtrl', {
$scope: scope
// place here mocked dependencies
});
}));
it('should attach a list of awesomeThings to the scope', function () {
expect(GeneralCtrl.awesomeThings.length).toBe(3);
});
});
| SinfoProject/golfAdmin | test/spec/controllers/general.js | JavaScript | gpl-3.0 | 566 |
/////////////////////////////////////////////////////////////////////////////
//
// Project ProjectForge Community Edition
// www.projectforge.org
//
// Copyright (C) 2001-2022 Micromata GmbH, Germany (www.micromata.com)
//
// ProjectForge is dual-licensed.
//
// This community edition is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License as published
// by the Free Software Foundation; version 3 of the License.
//
// This community edition is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
// Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, see http://www.gnu.org/licenses/.
//
/////////////////////////////////////////////////////////////////////////////
package org.projectforge.framework.configuration;
/**
* Marker for Configuration.
* @author Kai Reinhard (k.reinhard@micromata.de)
* @see Configuration
*/
public interface ConfigurationData
{
}
| micromata/projectforge | projectforge-business/src/main/java/org/projectforge/framework/configuration/ConfigurationData.java | Java | gpl-3.0 | 1,164 |
/*
* Copyright (C) 2017 The MoonLake Authors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.minecraft.moonlake.api.packet.wrapper;
import com.minecraft.moonlake.property.SimpleStringProperty;
import com.minecraft.moonlake.property.StringProperty;
import java.io.IOException;
/**
* <h1>PacketPlayOutBungeeKickPlayer</h1>
* 数据包输出蹦极目标 KICK
*
* @version 1.0
* @author Month_Light
*/
public class PacketPlayOutBungeeKickPlayer extends PacketPlayOutBungeeAbstractTarget {
private StringProperty reason;
/**
* 数据包输出蹦极目标 KICK 构造函数
*
* @param target 目标玩家名
* @param reason 原因
*/
public PacketPlayOutBungeeKickPlayer(String target, String reason) {
super(target);
this.reason = new SimpleStringProperty(reason);
}
/**
* 获取此数据包输出蹦极目标 KICK 的原因
*
* @return 原因
*/
public StringProperty getReason() {
return reason;
}
@Override
protected void write() throws IOException {
super.dataOut.writeUTF("KickPlayer");
super.dataOut.writeUTF(super.target.get());
super.dataOut.writeUTF(reason.get());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PacketPlayOutBungeeKickPlayer that = (PacketPlayOutBungeeKickPlayer) o;
return reason.equals(that.reason);
}
@Override
public int hashCode() {
return reason.hashCode();
}
@Override
public String toString() {
return "PacketPlayOutBungeeKickPlayer{" +
"reason=" + reason +
", target=" + target +
", dataOut=" + dataOut +
'}';
}
}
| u2g/MoonLake | src/main/java/com/minecraft/moonlake/api/packet/wrapper/PacketPlayOutBungeeKickPlayer.java | Java | gpl-3.0 | 2,453 |
(function(mod) {
if (typeof exports == "object" && typeof module == "object")
// CommonJS
mod(require("codemirror/lib/codemirror"));
else mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
const comment = { regex: /\(\*/, push: "comment", token: "comment" };
const base = [
{ regex: /"(?:[^\\]|\\.)*?(?:"|$)/, token: "string" },
{ regex: /`\(/, sol: true, token: "builtin strong", next: "staging" },
{ regex: /`/, token: "string", push: "expansion" },
{ regex: /[1-9][0-9]*/, token: "number" },
{ regex: /\{[a-z]+\|/, token: "string strong", push: "freequote_brpp" },
{ regex: /\{\{/, token: "string strong", push: "freequote_brbr" },
{ regex: /\<\</, token: "string strong", push: "freequote_abab" },
{ regex: /\{/, token: "string strong", push: "freequote_br" },
{ regex: /\%[a-z]+/, token: "builtin" },
comment,
{
regex: /(Yes(:|\.)|Impossible\.)/
},
{
regex: /\>\>/,
sol: true,
token: "builtin"
},
{ regex: /[A-Z_][A-Za-z_\\'0-9]*/, token: "variable-2" }, //metavars
{
regex: /(if|then|else|when|fun|pfun)\b/,
token: "keyword"
},
{
regex: /[a-z][a-zA-Z0-9_\']*/,
sol: true,
token: "def",
push: "definition"
},
{
regex: /[a-z][a-zA-Z0-9_\']*/
},
{ regex: /(\<-|:-)/, token: "builtin", indent: true },
{ regex: /(:=|-\>)/, token: "builtin" },
{ regex: /=\>/, token: "keyword" },
{
regex: /(\.|\?)(\s|$)/,
dedent: true,
dedentIfLineStart: true
}
];
CodeMirror.defineSimpleMode("makam", {
start: base,
staging: [].concat(
[{ regex: /\)\./, token: "builtin strong", next: "start" }],
base
),
const_definition: [
{
regex: /[a-z][a-zA-Z0-9_\']*/,
token: "def"
},
{ regex: /:/, next: "type_in_definition" }
],
definition: [
{ regex: /\s*,\s*/, next: "const_definition" },
{ regex: /\s*:\s*/, next: "type_in_definition" },
{ regex: /\s/, pop: true }
],
type_in_definition: [
{ regex: /(type|prop)\b/, token: "keyword" },
{ regex: /[a-z][a-zA-Z0-9_\']*/, token: "type" },
{ regex: /-\>/, token: "meta" },
{ regex: /[A-Z_][A-Za-z_\\'0-9]*/, token: "variable-2" }, //metavars
{ regex: /\./, pop: true }
],
expansion: [
{ regex: /\$\{/, push: "expansion_quote", token: "meta" },
{ regex: /\$\`/, pop: true, token: "string" },
{ regex: /\$[^\{]/, token: "string" },
{ regex: /(?:[^\\`\$]|\\.)+/, token: "string" },
{ regex: /\`/, pop: true, token: "string" }
],
expansion_quote: [].concat(
[{ regex: /\}/, pop: true, token: "meta" }],
base
),
freequote_brpp: [
{ regex: /\|\}/, token: "string strong", pop: true },
{ regex: /[^\|]+/, token: "string" },
{ regex: /\|/, token: "string" },
],
freequote_brbr: [
{ regex: /\}\}/, token: "string strong", pop: true },
{ regex: /[^\}]+/, token: "string" },
{ regex: /\}/, token: "string" }
],
freequote_abab: [
{ regex: /\>\>/, token: "string strong", pop: true },
{ regex: /[^\>]+/, token: "string" },
{ regex: /\>/, token: "string" }
],
freequote_br: [
{ regex: /\}/, token: "string strong", pop: true },
{ regex: /[^\}]+/, token: "string" }
],
comment: [
// { regex: /\(\*/, token: "comment", push: "comment" },
{ regex: /.*\*\)/, token: "comment", pop: true },
{ regex: /.*/, token: "comment" }
]
});
CodeMirror.defineSimpleMode("makam-query-results", {
start: [
{ regex: /^(Yes(:|\.)|Impossible\.)/, mode: { spec: "makam" } }
]
});
});
| astampoulis/makam | webui/makam-codemirror.js | JavaScript | gpl-3.0 | 3,725 |
package toni.druck.elementActions;
import java.util.regex.Pattern;
/**
*
* @author Thomas Nill
*
* Konkrete Implementierung Entscheidung über Reguläre Expressions
*
*/
public class RegExpAction extends OnOffAction {
private Pattern ifRe = null;
public RegExpAction() {
}
public String getIfRe() {
return ifRe.pattern();
}
public void setIfRe(String ifRe) {
this.ifRe = Pattern.compile(ifRe);
}
@Override
protected boolean isOn(String value) {
return ifRe.matcher(value).matches();
}
}
| ThoNill/DRUCK | DRUCK/src/toni/druck/elementActions/RegExpAction.java | Java | gpl-3.0 | 573 |
<?php
/**
* About page displaying information about the plugin
*
* @link http://www.69signals.com
* @since 0.1
* @package Signals_Widgets
*/
function widgets_admin_about() {
// View template for the about panel
require 'views/about.php';
} | wp-plugins/signals-widgets | framework/admin/about.php | PHP | gpl-3.0 | 280 |
# -*- encoding: utf-8 -*-
"""Implements System Groups UI"""
from robottelo.ui.base import Base
from robottelo.ui.locators import common_locators, locators
class SystemGroup(Base):
def create(self, name, description=None, limit=None):
"""Creates new System Group from UI"""
if self.wait_until_element(locators['system-groups.new']):
# new
self.click(locators['system-groups.new'])
self.wait_until_element(locators['system-groups.name'])
# fill name
self.field_update('system-groups.name', name)
# fill description
if description:
self.field_update('system-groups.description', description)
# set limit (unlimited by default)
if limit:
self.click(locators['system-groups.unlimited'])
self.field_update('system-groups.limit', limit)
# create
self.click(common_locators['create'])
def update(self, name, new_name=None, new_description=None, limit=None):
"""Updates existing System Group from UI"""
system_group = self.search(name)
self.wait_for_ajax()
if system_group:
system_group.click()
self.wait_for_ajax()
if new_name: # update name
self.edit_entity(locators["system-groups.update_name"],
locators["system-groups.update_name_field"],
new_name,
locators["system-groups.update_name_save"])
if new_description: # update description
self.edit_entity(
locators["system-groups.update_description"],
locators["system-groups.update_description_field"],
new_description,
locators["system-groups.update_description_save"]
)
if limit: # update limit
self.click(locators["system-groups.update_limit"])
checkbox = self.wait_until_element(
locators["system-groups.update_limit_checkbox"])
# uncheck checkbox when needed
if checkbox.get_attribute('checked'):
checkbox.click()
self.wait_for_ajax()
# update field and save
self.field_update("system-groups.update_limit_field", limit)
self.click(locators["system-groups.update_limit_save"])
def remove(self, name):
"""Removes existing System Group from UI"""
system_group = self.search(name)
self.wait_for_ajax()
if system_group:
system_group.click()
self.wait_for_ajax()
self.click(locators["system-groups.remove"])
self.click(locators["system-groups.confirm_remove"])
def search(self, name):
"""Searches existing System Group from UI"""
return self.search_entity(
name, locators['system-groups.search'], katello=True)
| abalakh/robottelo | robottelo/ui/systemgroup.py | Python | gpl-3.0 | 3,072 |
# -*- coding: utf-8 -*-
# This package and all its sub-packages are part of django-wiki,
# except where otherwise stated.
#
# django-wiki is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# django-wiki is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with django-wiki. If not, see <http://www.gnu.org/licenses/>.
VERSION = "0.0.17"
| Attorney-Fee/django-wiki | wiki/__init__.py | Python | gpl-3.0 | 773 |
/*
* This file is part of the loot project for Android.
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version. This program is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU General Public License
* for more details. You should have received a copy of the GNU General
* Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright (C) 2008, 2009, 2010, 2011 Christopher McCurdy
*/
package net.gumbercules.loot.transaction;
import java.text.DateFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Currency;
import java.util.Date;
import net.gumbercules.loot.R;
import net.gumbercules.loot.account.Account;
import net.gumbercules.loot.backend.CurrencyWatcher;
import net.gumbercules.loot.backend.Database;
import net.gumbercules.loot.backend.Logger;
import net.gumbercules.loot.backend.NoDecimalCurrencyWatcher;
import net.gumbercules.loot.premium.ViewImage;
import net.gumbercules.loot.repeat.RepeatActivity;
import net.gumbercules.loot.repeat.RepeatSchedule;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.DatePickerDialog;
import android.app.Dialog;
import android.content.ContentValues;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.provider.MediaStore;
import android.provider.MediaStore.Images;
import android.provider.MediaStore.Images.Media;
import android.text.method.DigitsKeyListener;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnFocusChangeListener;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.DatePicker;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.MultiAutoCompleteTextView;
import android.widget.RadioButton;
import android.widget.Spinner;
import android.widget.TableRow;
import android.widget.TextView;
import android.widget.AdapterView.OnItemSelectedListener;
public class TransactionEdit extends Activity
{
public static final String KEY_TRANSFER = "te_transfer";
private static final String TAG = "net.gumbercules.loot.TransactionEdit";
private static final String KEY_IMAGE_URI = "k_uri";
private static final String KEY_URIS = "k_uris";
private static final int REQ_REPEAT = 0;
private static final int REQ_CAMERA = 1;
private static final int REQ_GALLERY = 2;
private Transaction mTrans;
private RepeatSchedule mRepeat;
private int mTransId;
private int mRepeatId; // used only when editing from repeat manager
private int mFinishIntent;
private int mRequest;
private int mType;
private int mAccountId;
private boolean mFinished;
private int mDefaultRepeatValue;
private int mLastRepeatValue;
private Date mDate;
private int mAccountPos;
private CurrencyWatcher mCurrencyWatcher;
private Uri mImageUri;
private Uri[] mUris;
private RadioButton checkRadio;
private RadioButton withdrawRadio;
private RadioButton depositRadio;
private EditText dateEdit;
private ImageButton dateButton;
private AutoCompleteTextView partyEdit;
private EditText amountEdit;
private EditText checkEdit;
private MultiAutoCompleteTextView tagsEdit;
private Spinner accountSpinner;
private Spinner repeatAccountSpinner;
private Spinner repeatSpinner;
private ArrayAdapter<String> mRepeatAdapter;
private RadioButton budgetRadio;
private RadioButton actualRadio;
private ImageButton saveButton;
private ImageButton cancelButton;
private boolean restarted;
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.trans);
mFinishIntent = RESULT_CANCELED;
mFinished = false;
mDefaultRepeatValue = -1;
mDate = null;
restarted = false;
ArrayList<String> repeat =
new ArrayList<String>(Arrays.asList(getResources().getStringArray(R.array.repeat)));
mRepeatAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_spinner_item, repeat);
mRepeatAdapter.setDropDownViewResource(android.R.layout.simple_dropdown_item_1line);
// get the type code so we know whether to show a transaction or a transfer window
if (savedInstanceState != null)
{
mRequest = savedInstanceState.getInt(TransactionActivity.KEY_REQ);
mType = savedInstanceState.getInt(TransactionActivity.KEY_TYPE);
mTransId = savedInstanceState.getInt(Transaction.KEY_ID);
mAccountId = savedInstanceState.getInt(Account.KEY_ID);
long date = savedInstanceState.getLong(Transaction.KEY_DATE);
mDate = (date == 0 ? null : new Date(date));
mRepeat = new RepeatSchedule();
mRepeat.iter = savedInstanceState.getInt(RepeatSchedule.KEY_ITER);
mRepeat.freq = savedInstanceState.getInt(RepeatSchedule.KEY_FREQ);
mRepeat.custom = savedInstanceState.getInt(RepeatSchedule.KEY_CUSTOM);
long end = savedInstanceState.getLong(RepeatSchedule.KEY_DATE);
mRepeat.end = (end == 0 ? null : new Date(end));
mAccountPos = savedInstanceState.getInt(TransactionEdit.KEY_TRANSFER);
String uri_string = savedInstanceState.getString(KEY_IMAGE_URI);
if (uri_string != null)
{
mImageUri = Uri.parse(uri_string);
}
String[] uris = savedInstanceState.getStringArray(KEY_URIS);
if (uris != null)
{
int len = uris.length;
mUris = new Uri[len];
for (int i = 0; i < len; ++i)
{
mUris[i] = Uri.parse(uris[i]);
}
}
restarted = true;
}
else
{
Bundle extras = getIntent().getExtras();
mRequest = extras.getInt(TransactionActivity.KEY_REQ);
mType = extras.getInt(TransactionActivity.KEY_TYPE);
mTransId = extras.getInt(Transaction.KEY_ID);
mAccountId = extras.getInt(Account.KEY_ID);
mRepeatId = extras.getInt(RepeatSchedule.KEY_ID);
}
}
private void populateFields()
{
depositRadio = (RadioButton)findViewById(R.id.depositRadio);
withdrawRadio = (RadioButton)findViewById(R.id.withdrawRadio);
checkRadio = (RadioButton)findViewById(R.id.checkRadio);
dateEdit = (EditText)findViewById(R.id.dateEdit);
dateEdit.setEnabled(false);
dateButton = (ImageButton)findViewById(R.id.datePickerButton);
amountEdit = (EditText)findViewById(R.id.amountEdit);
checkEdit = (EditText)findViewById(R.id.checkEdit);
checkEdit.setKeyListener(new DigitsKeyListener());
if (PreferenceManager.getDefaultSharedPreferences(this).getBoolean("key_input_no_decimal", false))
{
mCurrencyWatcher = new NoDecimalCurrencyWatcher();
}
else
{
mCurrencyWatcher = new CurrencyWatcher();
}
amountEdit.addTextChangedListener(mCurrencyWatcher);
tagsEdit = (MultiAutoCompleteTextView)findViewById(R.id.tagsEdit);
String[] tags = Transaction.getAllTags();
if (tags == null)
tags = new String[0];
ArrayAdapter<String> tagsAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_dropdown_item_1line, tags);
tagsEdit.setAdapter(tagsAdapter);
tagsEdit.setTokenizer(new TransactionActivity.SpaceTokenizer());
// create the repeat spinner and populate the values
repeatSpinner = (Spinner)findViewById(R.id.repeatSpinner);
repeatSpinner.setAdapter(mRepeatAdapter);
actualRadio = (RadioButton)findViewById(R.id.ActualRadio);
budgetRadio = (RadioButton)findViewById(R.id.BudgetRadio);
saveButton = (ImageButton)findViewById(R.id.saveButton);
cancelButton = (ImageButton)findViewById(R.id.cancelButton);
dateButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View v)
{
showDialog(0);
}
});
// set the check radio to enable/disable and automatically populate the check entry field
checkRadio.setOnCheckedChangeListener( new RadioButton.OnCheckedChangeListener()
{
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked)
{
int visibility = View.GONE;
if (isChecked)
{
if (checkEdit.getText().toString().equals(""))
{
// autopopulate the edit with the next check number
Account acct = Account.getAccountById(mAccountId);
int check_num = acct.getNextCheckNum();
checkEdit.setText(new Integer(check_num).toString());
}
visibility = View.VISIBLE;
}
findViewById(R.id.checkRow).setVisibility(visibility);
}
});
makeCreditChanges();
// load the transaction if mTransId > 0
Transaction trans = null;
if (mTransId == 0 && mRepeatId == 0)
{
mTrans = new Transaction();
if (mRepeat == null)
{
mRepeat = new RepeatSchedule();
}
else
{
setRepeatSpinnerSelection(mRepeat);
}
// set the date edittext to the current date by default
setDateEdit(mDate);
}
else
{
if (mTransId != 0)
{
if (mTrans == null)
{
mTrans = Transaction.getTransactionById(mTransId);
}
}
else
{
if (mRepeat == null && mTrans == null)
{
mRepeat = RepeatSchedule.getSchedule(mRepeatId);
mTrans = mRepeat.getTransaction();
mAccountId = mTrans.account;
}
}
trans = mTrans;
if (!restarted)
{
if (mRepeat == null)
{
int repeat_id = RepeatSchedule.getRepeatId(mTransId);
if (repeat_id != -1)
mRepeat = RepeatSchedule.getSchedule(repeat_id);
else
mRepeat = new RepeatSchedule();
}
if (trans == null)
{
Log.e(TAG + ".populateFields()", "trans is null");
return;
}
// figure out if this is a normal transaction or a transfer
int transfer_id = mTrans.getTransferId();
if (transfer_id != -1)
{
Transaction transfer = Transaction.getTransactionById(transfer_id, true);
if (transfer == null)
{
transfer = new Transaction();
transfer.setId(-1);
mTrans.removeTransfer(transfer);
mType = TransactionActivity.TRANSACTION;
}
else
{
mType = TransactionActivity.TRANSFER;
}
}
else
{
if (mRepeat.getTransferId() > 0)
{
mType = TransactionActivity.TRANSFER;
}
else
{
mType = TransactionActivity.TRANSACTION;
}
}
RadioButton radio = checkRadio;
Account acct = Account.getAccountById(trans.account);
Log.i("TEEEEEEEEEEEEEEST", "credit? " + acct.credit);
if (trans.type == Transaction.WITHDRAW)
{
radio = withdrawRadio;
}
else if (trans.type == Transaction.DEPOSIT)
{
radio = depositRadio;
}
radio.setChecked(true);
if (trans.budget && !trans.isPosted())
{
budgetRadio.setChecked(true);
}
else
{
actualRadio.setChecked(true);
}
if (mDate == null)
{
setDateEdit(trans.date);
}
else
{
setDateEdit(mDate);
}
// replace comma and currency symbol with empty string
NumberFormat nf = NumberFormat.getCurrencyInstance();
String new_currency = Database.getOptionString("override_locale");
if (new_currency != null && !new_currency.equals(""))
nf.setCurrency(Currency.getInstance(new_currency));
String num = nf.format(trans.amount);
StringBuilder sb = new StringBuilder();
sb.append(mCurrencyWatcher.getAcceptedChars());
String accepted = "[^\\Q" + sb.toString() + "\\E]";
num = num.replaceAll(accepted, "");
amountEdit.setText(num);
tagsEdit.setText(trans.tagListToString());
for (Uri uri : trans.images)
{
addImageRow(uri);
}
}
if (mUris != null)
{
for (Uri uri : mUris)
{
addImageRow(uri);
}
mUris = null;
}
setRepeatSpinnerSelection(mRepeat);
}
if (mType == TransactionActivity.TRANSFER)
{
ArrayAdapter<CharSequence> accountAdapter = showTransferFields();
if (accountAdapter != null)
{
if (!restarted)
{
Transaction transfer = null;
if (mTransId != 0)
{
transfer = Transaction.getTransactionById(mTrans.getTransferId(), true);
}
else if (mRepeatId != 0)
{
RepeatSchedule rs = RepeatSchedule.getSchedule(
RepeatSchedule.getRepeatId(mRepeat.getTransferId()));
transfer = rs.getTransaction();
}
if (transfer != null)
{
Account acct = Account.getAccountById(transfer.account);
int pos = accountAdapter.getPosition(acct.name);
accountSpinner.setSelection(pos);
}
}
else
{
accountSpinner.setSelection(mAccountPos);
}
if (trans != null && trans.type == Transaction.CHECK)
{
fillCheckFields(trans.check_num);
}
}
}
else
{
showTransactionFields();
if (trans != null)
{
partyEdit.setText(trans.party);
if (trans.type == Transaction.CHECK)
{
fillCheckFields(trans.check_num);
}
}
}
amountEdit.setOnFocusChangeListener(new OnFocusChangeListener()
{
public void onFocusChange(View v, boolean hasFocus)
{
if (v instanceof EditText)
{
CurrencyWatcher.setInputType((EditText)v);
}
}
});
CurrencyWatcher.setInputType(amountEdit);
repeatSpinner.setSelection(mDefaultRepeatValue);
repeatSpinner.setOnItemSelectedListener(mRepeatSpinnerListener);
// show the second account spinner if we're editing from the repeat manager
if (mRepeatId != 0)
{
TableRow row = (TableRow)findViewById(R.id.repeatAccountRow);
row.setVisibility(View.VISIBLE);
TextView text = (TextView)findViewById(R.id.repeatAccountLabel);
text.setText(R.string.trans_account);
text = (TextView)findViewById(R.id.accountLabel);
text.setText(R.string.trans);
repeatAccountSpinner = (Spinner)findViewById(R.id.repeatAccountSpinner);
String[] names = Account.getAccountNames();
ArrayAdapter<CharSequence> adapter = new ArrayAdapter<CharSequence>(this,
android.R.layout.simple_spinner_item, names);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
repeatAccountSpinner.setAdapter(adapter);
Account acct = Account.getAccountById(mTrans.account);
int pos = adapter.getPosition(acct.name);
repeatAccountSpinner.setSelection(pos);
// hide the image attachments
row = (TableRow)findViewById(R.id.imageHeaderRow);
row.setVisibility(View.GONE);
LinearLayout ll = (LinearLayout)findViewById(R.id.imageLayout);
ll.setVisibility(View.GONE);
}
ImageView addImage = (ImageView)findViewById(R.id.addImage);
addImage.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
AlertDialog dialog = new AlertDialog.Builder(TransactionEdit.this)
.setTitle(R.string.attach_receipt)
.setItems(R.array.receipt_capture_name,
new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
Intent i = new Intent();
int req = 0;
if (which == 0)
{
i.setAction(Intent.ACTION_GET_CONTENT);
i.setType("image/*");
req = REQ_GALLERY;
}
else if (which == 1)
{
ContentValues values = new ContentValues();
values.put(Images.Media.TITLE, "Loot Receipt");
values.put(Images.Media.BUCKET_ID, "Receipts");
values.put(Images.Media.DESCRIPTION, "Receipt Image for Loot");
values.put(Images.Media.MIME_TYPE, "image/jpeg");
mImageUri = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, values);
i.setAction(MediaStore.ACTION_IMAGE_CAPTURE);
i.putExtra(MediaStore.EXTRA_OUTPUT, mImageUri);
req = REQ_CAMERA;
}
startActivityForResult(i, req);
}
})
.create();
dialog.show();
}
});
saveButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View view)
{
mFinishIntent = RESULT_OK;
onPause();
}
});
cancelButton.setOnClickListener(new View.OnClickListener()
{
public void onClick(View view)
{
setResult(mFinishIntent);
finish();
}
});
}
private void fillCheckFields(int check_num)
{
checkEdit.setText(new Integer(check_num).toString());
checkRadio.setChecked(true);
checkEdit.setEnabled(true);
}
private void addImageRow(final Uri content_uri)
{
LayoutInflater inflater = (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
final LinearLayout imageLayout = (LinearLayout)findViewById(R.id.imageLayout);
// bail if this uri was already added
if (imageLayout.findViewWithTag(content_uri) != null)
{
return;
}
View row = inflater.inflate(R.layout.receipt_entry, null);
row.setTag(content_uri);
imageLayout.addView(row);
imageLayout.invalidate();
Button button = (Button)row.findViewById(R.id.image_button);
String[] columns = new String[] { Images.ImageColumns.TITLE };
Cursor cur = getContentResolver().query(content_uri, columns, null, null, null);
String title = "Receipt";
if (cur.moveToFirst())
{
title = cur.getString(0);
}
button.setText(title);
button.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
Intent i = new Intent(TransactionEdit.this, ViewImage.class);
i.setData(content_uri);
startActivity(i);
}
});
ImageView remove = (ImageView)row.findViewById(R.id.image_delete);
remove.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
imageLayout.removeView(imageLayout.findViewWithTag(content_uri));
mTrans.images.remove(content_uri);
imageLayout.invalidate();
}
});
}
private OnItemSelectedListener mRepeatSpinnerListener = new Spinner.OnItemSelectedListener()
{
public void onItemSelected(AdapterView<?> adapter, View view, int pos, long id)
{
// only remove the row if the adapter has more than the 7 default items
// and the position isn't the row to be deleted
if (mRepeatAdapter.getCount() > 7 && pos != 7)
mRepeatAdapter.remove("Custom");
if (pos == 6)
{
mLastRepeatValue = mDefaultRepeatValue;
Intent i = new Intent(view.getContext(), RepeatActivity.class);
i.putExtra(RepeatSchedule.KEY_ITER, mRepeat.iter);
i.putExtra(RepeatSchedule.KEY_FREQ, mRepeat.freq);
i.putExtra(RepeatSchedule.KEY_CUSTOM, mRepeat.custom);
i.putExtra(RepeatSchedule.KEY_DATE, (mRepeat.end != null) ? mRepeat.end.getTime() : 0);
startActivityForResult(i, REQ_REPEAT);
}
mDefaultRepeatValue = pos;
}
public void onNothingSelected(AdapterView<?> adapter) { }
};
private void setDateEdit(Date date)
{
Calendar cal = Calendar.getInstance();
if (date != null)
{
cal.setTime(date);
}
mDate = date;
DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
dateEdit.setText(df.format(cal.getTime()));
}
private Date parseDateEdit()
{
Date date;
DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
try
{
date = df.parse(dateEdit.getText().toString());
}
catch (ParseException e)
{
// set the date to today if there's a parsing error
date = new Date();
}
catch (NullPointerException e)
{
date = new Date();
Log.e(TAG + ".parseDateEdit", "null pointer exception on parsing date edit");
}
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
date = cal.getTime();
return date;
}
private int[] dateEditToYMD()
{
int[] ymd = new int[3];
Date date = parseDateEdit();
Calendar cal = Calendar.getInstance();
cal.setTime(date);
ymd[0] = cal.get(Calendar.YEAR);
ymd[1] = cal.get(Calendar.MONTH);
ymd[2] = cal.get(Calendar.DAY_OF_MONTH);
return ymd;
}
@Override
protected Dialog onCreateDialog(int id)
{
int[] ymd = dateEditToYMD();
return new DatePickerDialog(this, mDateSetListener, ymd[0], ymd[1], ymd[2]);
}
@Override
protected void onPrepareDialog(int id, Dialog dialog)
{
int[] ymd = dateEditToYMD();
((DatePickerDialog)dialog).updateDate(ymd[0], ymd[1], ymd[2]);
}
private DatePickerDialog.OnDateSetListener mDateSetListener =
new DatePickerDialog.OnDateSetListener()
{
public void onDateSet(DatePicker view, int year, int month, int day)
{
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, year);
cal.set(Calendar.MONTH, month);
cal.set(Calendar.DAY_OF_MONTH, day);
setDateEdit(cal.getTime());
}
};
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQ_REPEAT)
{
if (resultCode == RESULT_OK)
{
Bundle extras = data.getExtras();
mRepeat.iter = extras.getInt(RepeatSchedule.KEY_ITER);
mRepeat.freq = extras.getInt(RepeatSchedule.KEY_FREQ);
mRepeat.custom = extras.getInt(RepeatSchedule.KEY_CUSTOM);
mRepeat.end = new Date(extras.getLong(RepeatSchedule.KEY_DATE));
setRepeatSpinnerSelection(mRepeat);
}
else
{
mDefaultRepeatValue = mLastRepeatValue;
if (mDefaultRepeatValue == 7)
setRepeatSpinnerSelection(mRepeat);
}
}
else if (requestCode == REQ_CAMERA)
{
if (resultCode == RESULT_OK)
{
addImageRow(mImageUri);
}
}
else if (requestCode == REQ_GALLERY)
{
if (resultCode == RESULT_OK)
{
Uri uri = data.getData();
addImageRow(uri);
}
}
}
@SuppressWarnings("unchecked")
private void setRepeatSpinnerSelection(RepeatSchedule repeat)
{
int spinner_num = 7;
switch (repeat.iter)
{
case RepeatSchedule.NO_REPEAT:
spinner_num = 0;
break;
case RepeatSchedule.DAILY:
if (repeat.freq == 1 && (repeat.end == null || repeat.end.getTime() <= 0))
spinner_num = 1;
break;
case RepeatSchedule.WEEKLY:
if (repeat.freq == 1 && repeat.custom == 0 &&
(repeat.end == null || repeat.end.getTime() <= 0))
spinner_num = 2;
else if (repeat.freq == 2 && repeat.custom == 0 &&
(repeat.end == null || repeat.end.getTime() <= 0))
spinner_num = 3;
break;
case RepeatSchedule.MONTHLY:
if (repeat.freq == 1 && repeat.custom == RepeatSchedule.DATE &&
(repeat.end == null || repeat.end.getTime() <= 0))
spinner_num = 4;
break;
case RepeatSchedule.YEARLY:
if (repeat.freq == 1 && (repeat.end == null || repeat.end.getTime() <= 0))
spinner_num = 5;
break;
}
if (spinner_num == 7)
{
mRepeatAdapter = (ArrayAdapter<String>)repeatSpinner.getAdapter();
if (mRepeatAdapter.getCount() <= 7)
mRepeatAdapter.add("Custom");
}
mDefaultRepeatValue = spinner_num;
}
private void showTransactionFields()
{
partyEdit = (AutoCompleteTextView)findViewById(R.id.partyEdit);
// set the autocompletion values for partyEdit
String[] parties = Transaction.getAllParties();
if (parties == null)
{
parties = new String[0];
}
ArrayAdapter<String> adapter = new ArrayAdapter<String>(this,
android.R.layout.simple_dropdown_item_1line, parties);
partyEdit.setAdapter(adapter);
}
private void makeCreditChanges()
{
Account acct = Account.getAccountById(mAccountId);
if (acct.credit)
{
checkRadio.setVisibility(View.GONE);
withdrawRadio.setText(R.string.credit);
depositRadio.setText(R.string.debit);
depositRadio.setChecked(true);
TableRow row = (TableRow)findViewById(R.id.checkRow);
row.setVisibility(View.GONE);
}
}
private ArrayAdapter<CharSequence> showTransferFields()
{
// if we're showing a transfer window, hide the party field and show the account field
// don't hide the check options, as one could write a check to deposit into another account
TableRow row = (TableRow)findViewById(R.id.partyRow);
row.setVisibility(TableRow.GONE);
row = (TableRow)findViewById(R.id.accountRow);
row.setVisibility(TableRow.VISIBLE);
accountSpinner = (Spinner)findViewById(R.id.accountSpinner);
String[] names = Account.getAccountNames();
// if there is only one account in the database, tell the user they can't transfer and cancel
if (names.length == 1)
{
setResult(mFinishIntent);
finish();
return null;
}
String[] acctNames = new String[names.length - 1];
Account acct = Account.getAccountById(mAccountId);
int i = 0;
for ( String name : names )
{
if (!name.equalsIgnoreCase(acct.name))
{
acctNames[i++] = name;
}
}
ArrayAdapter<CharSequence> adapter = new ArrayAdapter<CharSequence>(this,
android.R.layout.simple_spinner_item, acctNames);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
accountSpinner.setAdapter(adapter);
return adapter;
}
@Override
protected void onPause()
{
super.onPause();
try
{
saveState();
}
catch (Exception e)
{
Logger.logStackTrace(e, this);
}
}
private void saveState() throws Exception
{
if (mFinishIntent == RESULT_CANCELED || mFinished)
return;
Object[] fields = parseFields();
if (fields != null)
{
if (mRepeatId == 0)
{
saveTransaction((Transaction)fields[0], (Account)fields[1]);
}
else
{
saveRepeat((Transaction)fields[0], (Account)fields[1]);
}
}
finish();
}
// returns Object[] containing Transaction and Account
private Object[] parseFields()
{
Transaction trans;
Account acct2 = null;
if (mTransId != 0)
trans = mTrans;
else
{
trans = new Transaction();
}
if (mType == TransactionActivity.TRANSACTION)
{
trans.party = partyEdit.getText().toString();
}
else
{
acct2 = Account.getAccountByName((String)accountSpinner.getSelectedItem());
}
if (mRepeatId != 0)
{
trans.account = Account.getAccountByName(
(String)repeatAccountSpinner.getSelectedItem()).id();
}
// clear the list so we don't write tags leftover from loading the transaction
trans.tags.clear();
trans.addTags(tagsEdit.getText().toString());
// get the date of the transaction and set time values to 0
trans.date = parseDateEdit();
// get the amount of the transaction
try
{
DecimalFormatSymbols dfs = new DecimalFormatSymbols();
char sep = dfs.getMonetaryDecimalSeparator();
String str = amountEdit.getText().toString();
if (sep != '.')
str = str.replaceAll(String.valueOf(sep), ".");
trans.amount = new Double(str);
}
catch (NumberFormatException e)
{
trans.amount = 0.0;
}
// get the type of transaction
if (checkRadio.isChecked())
{
trans.type = Transaction.CHECK;
try
{
trans.check_num = new Integer(checkEdit.getText().toString());
}
catch (NumberFormatException e)
{
trans.type = Transaction.WITHDRAW;
trans.check_num = 0;
}
}
else if (withdrawRadio.isChecked())
{
trans.type = Transaction.WITHDRAW;
}
else
{
trans.type = Transaction.DEPOSIT;
}
// get if it's a budget transaction
trans.budget = budgetRadio.isChecked();
if (trans.budget && trans.id() != -1 && trans.isPosted())
{
trans.post(false);
trans.budget = true; // set to true because trans.post() sets it to false
}
setRepeat();
// add the images to the transaction
LinearLayout images = (LinearLayout)findViewById(R.id.imageLayout);
int count = images.getChildCount();
LinearLayout image_row;
Uri uri;
for (int i = 0; i < count; ++i)
{
image_row = (LinearLayout)images.getChildAt(i);
uri = (Uri)image_row.getTag();
trans.addImage(uri);
}
return new Object[]{trans, acct2};
}
private void saveTransaction(Transaction trans, Account acct2)
{
int id = -1;
Intent broadcast = new Intent("net.gumbercules.loot.intent.ACCOUNT_UPDATED", null);
if (mType == TransactionActivity.TRANSACTION)
{
id = trans.write(mAccountId);
}
else
{
trans.account = mAccountId;
id = trans.transfer(acct2);
broadcast.putExtra("transfer_account", acct2.id());
}
mFinished = true;
if (id != -1)
{
// write the repeat schedule if it's not set to NO_REPEAT
if (mRepeat.iter != RepeatSchedule.NO_REPEAT || mRepeat.id() > 0)
{
mRepeat.start = trans.date;
mRepeat.write(id);
}
mTransId = id;
Intent i = new Intent();
Bundle b = new Bundle();
b.putInt(Transaction.KEY_ID, mTransId);
b.putInt(TransactionActivity.KEY_REQ, mRequest);
i.putExtras(b);
setResult(mFinishIntent, i);
broadcast.putExtra("account_id", trans.account);
sendBroadcast(broadcast);
}
else
{
setResult(RESULT_CANCELED);
}
}
private void saveRepeat(Transaction trans, Account acct2)
{
int id = -1;
mRepeat.trans = trans;
mRepeat.start = trans.date;
if (mType == TransactionActivity.TRANSACTION)
{
id = mRepeat.updateRepeat(trans.id(), false, false);
}
else
{
// update the party text to reflect the transfer if the account changed
// and update the transferred repeat
String detail1, detail2;
RepeatSchedule repeat2 = new RepeatSchedule(mRepeat);
repeat2.setId(RepeatSchedule.getRepeatId(mRepeat.getTransferId()));
repeat2.trans = new Transaction(mRepeat.trans, false);
repeat2.trans.setId(repeat2.getTransactionId());
repeat2.trans.account = acct2.id();
Account acct1 = Account.getAccountById(mRepeat.trans.account);
if ( mRepeat.trans.type == Transaction.DEPOSIT )
{
detail1 = "from ";
detail2 = "to ";
repeat2.trans.type = Transaction.WITHDRAW;
}
else
{
detail1 = "to ";
detail2 = "from ";
repeat2.trans.type = Transaction.DEPOSIT;
}
mRepeat.trans.party = "Transfer " + detail1 + acct2.name;
id = mRepeat.updateRepeat(trans.id(), false, false);
repeat2.trans.party = "Transfer " + detail2 + acct1.name;
repeat2.updateRepeat(repeat2.trans.id(), false, false);
}
mFinished = true;
if (id != -1)
{
setResult(mFinishIntent);
}
else
{
setResult(RESULT_CANCELED);
}
}
private void setRepeat()
{
// set repeat values
switch (repeatSpinner.getSelectedItemPosition())
{
// No Repeat
case 0:
mRepeat.iter = RepeatSchedule.NO_REPEAT;
break;
// Daily
case 1:
mRepeat.iter = RepeatSchedule.DAILY;
mRepeat.freq = 1;
mRepeat.custom = 0;
mRepeat.end = null;
break;
// Weekly
case 2:
mRepeat.iter = RepeatSchedule.WEEKLY;
mRepeat.freq = 1;
mRepeat.custom = 0;
mRepeat.end = null;
break;
// Bi-weekly
case 3:
mRepeat.iter = RepeatSchedule.WEEKLY;
mRepeat.freq = 2;
mRepeat.custom = 0;
mRepeat.end = null;
break;
// Monthly
case 4:
mRepeat.iter = RepeatSchedule.MONTHLY;
mRepeat.freq = 1;
mRepeat.custom = RepeatSchedule.DATE;
mRepeat.end = null;
break;
// Yearly
case 5:
mRepeat.iter = RepeatSchedule.YEARLY;
mRepeat.freq = 1;
mRepeat.custom = 0;
mRepeat.end = null;
break;
// if it's past position 5, mRepeat has already been set
}
}
@Override
protected void onResume()
{
super.onResume();
populateFields();
}
@Override
protected void onSaveInstanceState(Bundle outState)
{
super.onSaveInstanceState(outState);
outState.putInt(TransactionActivity.KEY_REQ, mRequest);
outState.putInt(TransactionActivity.KEY_TYPE, mType);
if (mAccountId > 0)
{
outState.putInt(Account.KEY_ID, mAccountId);
}
if (mTransId > 0)
{
outState.putInt(Transaction.KEY_ID, mTransId);
}
if (mRepeatId > 0)
{
outState.putInt(RepeatSchedule.KEY_ID, mRepeatId);
}
setRepeat();
outState.putInt(RepeatSchedule.KEY_ITER, mRepeat.iter);
outState.putInt(RepeatSchedule.KEY_FREQ, mRepeat.freq);
outState.putInt(RepeatSchedule.KEY_CUSTOM, mRepeat.custom);
long end = (mRepeat.end == null ? 0 : mRepeat.end.getTime());
outState.putLong(RepeatSchedule.KEY_DATE, end);
if (mDate != null)
{
outState.putLong(Transaction.KEY_DATE, mDate.getTime());
}
if (mType == TransactionActivity.TRANSFER)
{
outState.putInt(TransactionEdit.KEY_TRANSFER, accountSpinner.getSelectedItemPosition());
}
if (mImageUri != null)
{
outState.putString(KEY_IMAGE_URI, mImageUri.toString());
}
// save added image URIs
// add the images to the transaction
LinearLayout images = (LinearLayout)findViewById(R.id.imageLayout);
int count = images.getChildCount();
if (count > 0)
{
LinearLayout image_row;
String[] uris = new String[count];
for (int i = 0; i < count; ++i)
{
image_row = (LinearLayout)images.getChildAt(i);
uris[i] = ((Uri)image_row.getTag()).toString();
}
outState.putStringArray(KEY_URIS, uris);
}
}
}
| chrismccurdy/loot | src/net/gumbercules/loot/transaction/TransactionEdit.java | Java | gpl-3.0 | 35,072 |
// This is a generated file. Not intended for manual editing.
package org.modula.parsing.definition.psi;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
public interface DefinitionFieldList extends PsiElement {
@Nullable
DefinitionFieldListSequence getFieldListSequence();
@Nullable
DefinitionIdentList getIdentList();
@Nullable
DefinitionIdent getIdent();
@Nullable
DefinitionQualident getQualident();
@Nullable
DefinitionTypes getTypes();
@NotNull
List<DefinitionVariant> getVariantList();
}
| miracelwhipp/idea-modula-support | ims-plugin/gen/org/modula/parsing/definition/psi/DefinitionFieldList.java | Java | gpl-3.0 | 602 |
var a00099 =
[
[ "enc_info", "a00099.html#ad111ceb6802da6301dbe73a73b35a4a1", null ],
[ "id_info", "a00099.html#a301c28abac3bf1f2f38d2b95854695cd", null ],
[ "master_id", "a00099.html#ad0cc95342832dacb99b8b0daede5856c", null ],
[ "peer_addr", "a00099.html#a2c9e328ee5b20afe64224e8d807e0015", null ],
[ "sign_info", "a00099.html#ad6b652026333405eba289d528d220638", null ]
]; | DroneBucket/Drone_Bucket_CrazyFlie2_NRF_Firmware | nrf51_sdk/Documentation/s120/html/a00099.js | JavaScript | gpl-3.0 | 393 |
<?php
/**
* UDSSL Cron
*
* wget -q -O – http://local.udssl.dev/wp-cron.php >/dev/null 2>&1
*
* Slug: udssl_report
* Interval: udssl_report_interval
* Action: udssl_report_action
*/
class UDSSL_Cron{
/**
* UDSSL Interval Array
*/
private $interval_array = array();
/**
* Constructor
*/
function __construct(){
/**
* Hook the cron actions to update functions
*/
add_action('udssl_report_action', array($this, 'udssl_report'));
/**
* Add UDSSL Intervals
*/
$this->add_new_intervals();
/**
* Add UDSSL Intervals to WordPress Cron
*/
add_filter('cron_schedules', array($this, 'udssl_intervals'));
}
/**
* UDSSL Intervals
*/
function udssl_intervals($schedules){
$schedules = $schedules + $this->interval_array;
return $schedules;
}
/**
* Adds a new interval to the UDSSL schedule array
*/
function add_new_interval( $slug, $period ){
$interval_name = $slug . '_' . $period;
$interval_display_name = $slug . '_interval_' . $period;
$this->interval_array[ $interval_name ] = array(
'interval'=> $period,
'display'=> $interval_display_name
);
}
/**
* Adds Current UDSSL Intervals
*/
function add_new_intervals(){
$options = get_option( 'udssl_options' );
$slug = 'udssl_report';
$period = $options['cron']['report']['interval'];
$interval_name = $slug . '_' . $period;
$interval_display_name = $slug . '_interval_' . $period;
$this->interval_array[ $interval_name ] = array(
'interval'=> $period,
'display'=> $interval_display_name
);
}
/**
* Schedule or Reschedule a new UDSSL Report
*/
function schedule_event($slug, $period){
$timestamp = time() + $period;
$interval_name = $slug . '_' . $period;
$action = $slug . '_action';
wp_clear_scheduled_hook($action);
$this->add_new_interval($slug, $period);
$r = wp_schedule_event(
$timestamp,
$interval_name,
$action
);
if(false === $r){
global $udssl_theme;
$udssl_theme->utils->log('error', 'Cron Reschedule Failed.');
}
return array(
'timestamp' => $timestamp,
'interval_name' => $interval_name,
'action' => $action
);
}
/**
* Create Cron Jobs on Activation
*/
function activation(){
/**
* Add Email Report Periodic Update
*/
$options = get_option('udssl_options');
$period = $options['cron']['report']['interval'];
$slug = 'udssl_report';
$r1 = $this->schedule_event( $slug, $period );
if(isset( $r1['timestamp'] )):
return array(
'r1' => $r1['timestamp'],
);
else:
return false;
endif;
}
/**
* Get Cron Data
*/
function get_cron_data($slug){
$hook = $slug . '_action';
$next = wp_next_scheduled($hook);
if($next){
$now = time();
$minutes = (int)( ($next - $now) / 60 );
$seconds = (int)( ($next - $now) % 60 );
$description = 'Again in ' . $minutes . ' minutes and ' . $seconds . ' seconds';
return array(
'last' => 0,
'next' => $next,
'description' => $description
);
} else {
return array(
'last' => 0,
'next' => 0,
'description' => 'Not Scheduled'
);
}
}
/**
* Clear Cron Jobs on Deactivation
*/
function deactivation(){
wp_clear_scheduled_hook('udssl_report_action');
}
/**
* Send Email Report
*/
function udssl_report(){
require_once UDS_PATH . 'cron/class-udssl-report.php';
$reports = new UDSSL_Reports();
$reports->send_email_report();
}
}
?>
| UDSSL/theme | cron/class-udssl-cron.php | PHP | gpl-3.0 | 4,178 |
/*
* This file is part of Transitime.org
*
* Transitime.org is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License (GPL) as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* Transitime.org is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Transitime.org . If not, see <http://www.gnu.org/licenses/>.
*/
package org.transitime.gtfs;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.transitime.config.StringConfigValue;
import org.transitime.db.hibernate.HibernateUtils;
import org.transitime.db.structs.ActiveRevisions;
import org.transitime.db.structs.Agency;
import org.transitime.db.structs.Block;
import org.transitime.db.structs.Calendar;
import org.transitime.db.structs.CalendarDate;
import org.transitime.db.structs.ConfigRevision;
import org.transitime.db.structs.FareAttribute;
import org.transitime.db.structs.FareRule;
import org.transitime.db.structs.Frequency;
import org.transitime.db.structs.Location;
import org.transitime.db.structs.StopPath;
import org.transitime.db.structs.Route;
import org.transitime.db.structs.ScheduleTime;
import org.transitime.db.structs.Stop;
import org.transitime.db.structs.Transfer;
import org.transitime.db.structs.Trip;
import org.transitime.db.structs.TripPattern;
import org.transitime.db.structs.TripPatternKey;
import org.transitime.gtfs.gtfsStructs.GtfsAgency;
import org.transitime.gtfs.gtfsStructs.GtfsCalendar;
import org.transitime.gtfs.gtfsStructs.GtfsCalendarDate;
import org.transitime.gtfs.gtfsStructs.GtfsFareAttribute;
import org.transitime.gtfs.gtfsStructs.GtfsFareRule;
import org.transitime.gtfs.gtfsStructs.GtfsFrequency;
import org.transitime.gtfs.gtfsStructs.GtfsRoute;
import org.transitime.gtfs.gtfsStructs.GtfsShape;
import org.transitime.gtfs.gtfsStructs.GtfsStop;
import org.transitime.gtfs.gtfsStructs.GtfsStopTime;
import org.transitime.gtfs.gtfsStructs.GtfsTransfer;
import org.transitime.gtfs.gtfsStructs.GtfsTrip;
import org.transitime.gtfs.readers.GtfsAgenciesSupplementReader;
import org.transitime.gtfs.readers.GtfsAgencyReader;
import org.transitime.gtfs.readers.GtfsCalendarDatesReader;
import org.transitime.gtfs.readers.GtfsCalendarReader;
import org.transitime.gtfs.readers.GtfsFareAttributesReader;
import org.transitime.gtfs.readers.GtfsFareRulesReader;
import org.transitime.gtfs.readers.GtfsFrequenciesReader;
import org.transitime.gtfs.readers.GtfsRoutesReader;
import org.transitime.gtfs.readers.GtfsRoutesSupplementReader;
import org.transitime.gtfs.readers.GtfsShapesReader;
import org.transitime.gtfs.readers.GtfsShapesSupplementReader;
import org.transitime.gtfs.readers.GtfsStopTimesReader;
import org.transitime.gtfs.readers.GtfsStopTimesSupplementReader;
import org.transitime.gtfs.readers.GtfsStopsReader;
import org.transitime.gtfs.readers.GtfsStopsSupplementReader;
import org.transitime.gtfs.readers.GtfsTransfersReader;
import org.transitime.gtfs.readers.GtfsTripsReader;
import org.transitime.gtfs.readers.GtfsTripsSupplementReader;
import org.transitime.utils.Geo;
import org.transitime.utils.IntervalTimer;
import org.transitime.utils.MapKey;
import org.transitime.utils.Time;
/**
* Contains all the GTFS data processed into Java lists and such. Also combines
* in info from supplemental routes.txt file if there is one.
*
* @author SkiBu Smith
*
*/
public class GtfsData {
// Set by constructor. Specifies where to find data files
private final String gtfsDirectoryName;
private final String supplementDir;
// The session used throughout the class
private final Session session;
// Various params set by constructor
private final ActiveRevisions revs;
private final String notes;
// For when zip file used. Null otherwise
private final Date zipFileLastModifiedTime;
private final int originalTravelTimesRev;
private final String agencyId;
private final boolean shouldCombineShortAndLongNamesForRoutes;
private final double pathOffsetDistance;
private final double maxStopToPathDistance;
private final double maxDistanceForEliminatingVertices;
private final int defaultWaitTimeAtStopMsec;
private final double maxSpeedKph;
private final double maxTravelTimeSegmentLength;
private final boolean trimPathBeforeFirstStopOfTrip;
// So can make the titles more readable
private final TitleFormatter titleFormatter;
// Where the data is stored.
// From main and supplement routes.txt files. Key is route_id.
private Map<String, GtfsRoute> gtfsRoutesMap;
private List<Route> routes;
// For keeping track of which routes are just sub-routes of a parent.
// For these the route will not be configured separately and the
// route IDs from trips.txt and fare_rules.txt will be set to the
// parent ID.
private Map<String, String> properRouteIdMap =
new HashMap<String, String>();
// From main and supplement stops.txt files. Key is stop_id.
private Map<String, GtfsStop> gtfsStopsMap;
private Map<String, Stop> stopsMap;
// From trips.txt file. Key is trip_id.
private Map<String, GtfsTrip> gtfsTripsMap;
// From stop_times.txt file
private Map<String, List<GtfsStopTime>> gtfsStopTimesForTripMap; // Key is trip_id
private Collection<Trip> tripsCollection;
// Want to lookup trip patterns and only keep around
// unique ones. Also, want to have each trip pattern
// know which trips use it. This means that TripPattern
// needs list of GTFS trips. To do all of this need to
// use a map. The key needs to be a TripPatternKey so
// make sure the patterns are unique. The map contains
// TripPatterns as the values so that can update
// the trips list when another Trip is found to
// use that TripPattern.
private Map<TripPatternKey, TripPattern> tripPatternMap;
// Also need to be able to get trip patterns associated
// with a route so can be included in Route object.
// Key is routeId.
private Map<String, List<TripPattern>> tripPatternsByRouteIdMap;
// So can convert from a Trip to a TripPattern. The key
// is tripId.
private Map<String, TripPattern> tripPatternsByTripIdMap;
// So can make sure that each tripPattern gets a unique ID
// even when really screwy things are done such as use the same
// shapeId for different trip patterns.
private Set<String> tripPatternIdSet;
// So can know which service IDs have trips so that can remove
// calendars for ones that do not.
private Set<String> serviceIdsWithTrips;
// List of all the blocks, random order
private List<Block> blocks;
// Keyed on tripPatternId and pathId using getPathMapKey(tripPatternId, pathId)
private HashMap<String, StopPath> pathsMap;
private List<Calendar> calendars;
private List<CalendarDate> calendarDates;
private Set<String> validServiceIds;
// Data for the other GTFS files
// Key for frequencyMap is trip_id. Values are a List of Frequency objects
// since each trip can be listed in frequencies.txt file multiple times in
// order to define a different headway for different time ranges.
private Map<String, List<Frequency>> frequencyMap;
private List<Agency> agencies;
private List<FareAttribute> fareAttributes;
private List<FareRule> fareRules;
private List<Transfer> transfers;
// This is the format that dates are in for CSV. Should
// be accessed only through getDateFormatter() to make
// sure that it is initialized.
private SimpleDateFormat _dateFormatter = null;
// So can process only routes that match a regular expression.
// Note, see http://stackoverflow.com/questions/406230/regular-expression-to-match-text-that-doesnt-contain-a-word
// for details on how to filter out matches as opposed to specifying
// which ones want to keep.
private static StringConfigValue routeIdFilterRegEx = new StringConfigValue(
"transitime.gtfs.routeIdFilterRegEx",
null, // Default of null means don't do any filtering
"Route is included only if route_id matches the this regular "
+ "expression. If only want routes with \"SPECIAL\" in the id then "
+ "would use \".*SPECIAL.*\". If want to filter out such trips "
+ "would instead use the complicated \"^((?!SPECIAL).)*$\" or "
+ "\"^((?!(SPECIAL1|SPECIAL2)).)*$\" "
+ "if want to filter out two names. The default value "
+ "of null causes all routes to be included.");
private static Pattern routeIdFilterRegExPattern = null;
// So can process only trips that match a regular expression.
// Default of null means don't do any filtering
private static StringConfigValue tripIdFilterRegEx = new StringConfigValue(
"transitime.gtfs.tripIdFilterRegEx",
null, // Default of null means don't do any filtering
"Trip is included only if trip_id matches the this regular "
+ "expression. If only want trips with \"SPECIAL\" in the id then "
+ "would use \".*SPECIAL.*\". If want to filter out such trips "
+ "would instead use the complicated \"^((?!SPECIAL).)*$\" or "
+ "\"^((?!(SPECIAL1|SPECIAL2)).)*$\" "
+ "if want to filter out two names. The default value "
+ "of null causes all trips to be included.");
private static Pattern tripIdFilterRegExPattern = null;
// Logging
public static final Logger logger =
LoggerFactory.getLogger(GtfsData.class);
/********************** Member Functions **************************/
/**
* Constructor
*
* @param configRev
* @param notes
* @param zipFileLastModifiedTime
* @param shouldStoreNewRevs
* @param projectId
* @param gtfsDirectoryName
* @param supplementDir
* @param shouldCombineShortAndLongNamesForRoutes
* @param pathOffsetDistance
* @param maxStopToPathDistance
* @param maxDistanceForEliminatingVertices
* @param defaultWaitTimeAtStopMsec
* @param maxSpeedKph
* @param maxTravelTimeSegmentLength
* @param trimPathBeforeFirstStopOfTrip
* @param titleFormatter
*/
public GtfsData(int configRev,
String notes,
Date zipFileLastModifiedTime,
boolean shouldStoreNewRevs,
String projectId,
String gtfsDirectoryName,
String supplementDir,
boolean shouldCombineShortAndLongNamesForRoutes,
double pathOffsetDistance,
double maxStopToPathDistance,
double maxDistanceForEliminatingVertices,
int defaultWaitTimeAtStopMsec,
double maxSpeedKph,
double maxTravelTimeSegmentLength,
boolean trimPathBeforeFirstStopOfTrip,
TitleFormatter titleFormatter) {
this.agencyId = projectId;
this.notes = notes;
this.zipFileLastModifiedTime = zipFileLastModifiedTime;
this.gtfsDirectoryName = gtfsDirectoryName;
this.supplementDir = supplementDir;
this.shouldCombineShortAndLongNamesForRoutes =
shouldCombineShortAndLongNamesForRoutes;
this.pathOffsetDistance = pathOffsetDistance;
this.maxStopToPathDistance = maxStopToPathDistance;
this.maxDistanceForEliminatingVertices =
maxDistanceForEliminatingVertices;
this.defaultWaitTimeAtStopMsec = defaultWaitTimeAtStopMsec;
this.maxSpeedKph = maxSpeedKph;
this.maxTravelTimeSegmentLength = maxTravelTimeSegmentLength;
this.trimPathBeforeFirstStopOfTrip = trimPathBeforeFirstStopOfTrip;
this.titleFormatter = titleFormatter;
// Get the database session. Using one session for the whole process.
SessionFactory sessionFactory =
HibernateUtils.getSessionFactory(getAgencyId());
session = sessionFactory.openSession();
// Deal with the ActiveRevisions. First, store the original travel times
// rev since need it to read in old travel time data.
ActiveRevisions originalRevs = ActiveRevisions.get(session);
originalTravelTimesRev = originalRevs.getTravelTimesRev();
// If should store the new revs in database (make them active)
// then use the originalRevs read from db since they will be
// written out when session is closed
if (shouldStoreNewRevs) {
// Use the originalRevs object which was read from the db.
// When revs is updated then originalRevs is updated and
// Hibernate will store the changes to originalRevs to
// the db when the session is closed.
revs = originalRevs;
} else {
// Don't need to store new revs in db so use a transient object
revs = new ActiveRevisions();
}
// If particular configuration rev specified then use it. This way
// can write over existing configuration revisions.
if (configRev >= 0) {
revs.setConfigRev(configRev);
} else {
revs.setConfigRev(revs.getConfigRev() + 1);
}
// Increment the travel times rev
revs.setTravelTimesRev(originalTravelTimesRev + 1);
// Log which revisions are being used
logger.info("Will be writing data to revisions {}", revs);
}
/**
* Creates the static dateFormatter using the specified timezone. The
* timezone name is obtained from the first agency in the agency.txt file.
*/
private DateFormat getDateFormatter() {
// If already created then return cached version for efficiency
if (_dateFormatter != null)
return _dateFormatter;
// The dateFormatter not yet read in.
// First, read in the agency.txt GTFS data from file
GtfsAgencyReader agencyReader = new GtfsAgencyReader(gtfsDirectoryName);
List<GtfsAgency> gtfsAgencies = agencyReader.get();
if (gtfsAgencies.isEmpty()) {
logger.error("Could not read in {}/agency.txt file, which is "
+ "needed for createDateFormatter()", gtfsDirectoryName);
System.exit(-1);
}
String timezoneName = gtfsAgencies.get(0).getAgencyTimezone();
// Create the dateFormatter with the proper timezone
TimeZone timezone = TimeZone.getTimeZone(timezoneName);
_dateFormatter = new SimpleDateFormat("yyyyMMdd");
_dateFormatter.setTimeZone(timezone);
return _dateFormatter;
}
/**
* Reads routes.txt files from both gtfsDirectoryName and supplementDir
* and combines them together. End result is that gtfsRoutesMap is created
* and filled in.
*/
private void processRouteData() {
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing routes.txt data...");
// Read in standard route data
GtfsRoutesReader routesReader = new GtfsRoutesReader(gtfsDirectoryName);
List<GtfsRoute> gtfsRoutes = routesReader.get();
// Put GtfsRoute objects in Map so easy to find the right ones.
// HashMap is keyed on the route_id.
gtfsRoutesMap = new HashMap<String, GtfsRoute>(gtfsRoutes.size());
for (GtfsRoute r : gtfsRoutes) {
// If this route is just a subset of another route
// then can ignore it. But put in the map so that
// can adjust trips and fare_rules to use the proper
// parent route ID.
if (r.getParentRouteId() != null) {
properRouteIdMap.put(r.getRouteId(), r.getParentRouteId());
} else {
// Use normal route ID
properRouteIdMap.put(r.getRouteId(), r.getRouteId());
}
// Add the gtfs route to the map now that we can get the proper
// route ID.
gtfsRoutesMap.put(getProperIdOfRoute(r.getRouteId()), r);
}
// Read in supplemental route data
if (supplementDir != null) {
// Read in the supplemental route data
GtfsRoutesSupplementReader routesSupplementReader =
new GtfsRoutesSupplementReader(supplementDir);
List<GtfsRoute> gtfsRoutesSupplement = routesSupplementReader.get();
// Modify the main GtfsRoute objects using the supplemental data
for (GtfsRoute supplementRoute : gtfsRoutesSupplement) {
// Determine the original GtfsRoute that the supplemental
// data corresponds to
GtfsRoute originalGtfsRoute = null;
if (supplementRoute.getRouteId() != null) {
// Using regular route_id in supplemental routes.txt file.
// Look up the GTFS route by the proper route ID.
String routeMapKey =
getProperIdOfRoute(supplementRoute.getRouteId());
originalGtfsRoute = gtfsRoutesMap.get(routeMapKey);
} else {
// Must be using route short name as ID. Therefore
// cannot use the routes hashmap directly.
String routeShortName = supplementRoute.getRouteShortName();
for (GtfsRoute gtfsRoute : gtfsRoutes) {
if (gtfsRoute.getRouteShortName().equals(routeShortName)) {
originalGtfsRoute = gtfsRoute;
break;
}
}
}
if (originalGtfsRoute != null) {
// Found the original GTFS route that the supplemental data
// is associated with so create a new GTFSRoute object that
// combines the original data with the supplemental data.
GtfsRoute combinedRoute =
new GtfsRoute(originalGtfsRoute, supplementRoute);
// Store that combined data route in the map
gtfsRoutesMap.put(combinedRoute.getRouteId(), combinedRoute);
} else {
// Didn't find the original route with the same ID as the
// supplemental route so log warning that supplemental file
// is not correct.
logger.warn("Found route data in supplemental file but " +
"there is no such route with the ID in the " +
"main routes.txt file. Therefore could not use " +
"the supplemental data for this route. " +
"supplementRoute={}", supplementRoute);
}
}
}
// Let user know what is going on
logger.info("processRouteData() finished processing routes.txt " +
"data. Took {} msec.",
timer.elapsedMsec());
}
/**
* Takes data from gtfsRoutesMap and creates corresponding Route map.
* Processes all of the titles to make them more readable. Creates
* corresponding Route objects and stores them into the database.
* This method is separated out from processRouteData() since reading
* trips needs the gtfs route info but reading Routes requires trips.
*/
private void processRouteMaps() {
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing Routes objects data in processRouteMaps()...");
// Make sure needed data is already read in. This method uses
// trips and trip patterns from the stop_time.txt file. This objects
// need to know lat & lon so can figure out bounding box. Therefore
// stops.txt file must be read in first.
if (gtfsTripsMap == null || gtfsTripsMap.isEmpty()) {
logger.error("processTripsData() must be called before " +
"GtfsData.processRouteMaps() is. Exiting.");
System.exit(-1);
}
if (gtfsRoutesMap == null || gtfsRoutesMap.isEmpty()) {
logger.error("processRouteData() must be called before " +
"GtfsData.processRouteMaps() is. Exiting.");
System.exit(-1);
}
// Now that the GtfsRoute objects have been created, create
// the corresponding map of Route objects
routes = new ArrayList<Route>();
Set<String> routeIds = gtfsRoutesMap.keySet();
int numberOfRoutesWithoutTrips = 0;
for (String routeId : routeIds) {
GtfsRoute gtfsRoute = gtfsRoutesMap.get(routeId);
// If route is to be ignored then continue
if (gtfsRoute.shouldRemove())
continue;
// If this route is just a subset of another route
// then can ignore it.
if (gtfsRoute.getParentRouteId() != null) {
continue;
}
// Determine the trip patterns for the route so that they
// can be included when constructing the route object.
// If there aren't any then can't
List<TripPattern> tripPatternsForRoute = getTripPatterns(routeId);
if (tripPatternsForRoute == null || tripPatternsForRoute.isEmpty()) {
logger.warn("Route \"{}\" route_id={} was defined on line #{} in " +
"routes.txt file but does not have any associated trips " +
"defined in the stop_times.txt file. Therefore that route " +
"has been removed from the configuration.",
gtfsRoute.getRouteLongName()!=null ?
gtfsRoute.getRouteLongName() : gtfsRoute.getRouteShortName() ,
routeId,
gtfsRoute.getLineNumber());
++numberOfRoutesWithoutTrips;
continue;
}
// Create the route object and add it to the container
Route route = new Route(revs.getConfigRev(), gtfsRoute,
tripPatternsForRoute,
titleFormatter,
shouldCombineShortAndLongNamesForRoutes);
routes.add(route);
}
// Summarize how many problem routes there are that don't have trips
if (numberOfRoutesWithoutTrips > 0) {
logger.warn("Found {} routes without trips in stop_times.txt " +
"out of a total of {} routes defined in the routes.txt file.",
numberOfRoutesWithoutTrips, routeIds.size());
}
// Let user know what is going on
logger.info("Finished processing Routes objects data in processRouteData(). Took {} msec.",
timer.elapsedMsec());
}
/**
* Reads stops.txt files from both gtfsDirectoryName and supplementDir and
* combines them together. Processes all of the titles to make them more
* readable. Puts the stops into the gtfsStopsMap and stopsMap members.
*/
private void processStopData() {
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing stops.txt data...");
// Read in standard route data
GtfsStopsReader stopsReader = new GtfsStopsReader(gtfsDirectoryName);
List<GtfsStop> gtfsStops = stopsReader.get();
// Put GtfsStop objects in Map so easy to find the right ones
gtfsStopsMap = new HashMap<String, GtfsStop>(gtfsStops.size());
for (GtfsStop gtfsStop : gtfsStops)
gtfsStopsMap.put(gtfsStop.getStopId(), gtfsStop);
// Read in supplemental stop data
if (supplementDir != null) {
// Read in the supplemental stop data
GtfsStopsSupplementReader stopsSupplementReader =
new GtfsStopsSupplementReader(supplementDir);
List<GtfsStop> gtfsStopsSupplement = stopsSupplementReader.get();
// Modify the main GtfsStop objects using the supplemental data
for (GtfsStop supplementStop : gtfsStopsSupplement) {
GtfsStop gtfsStop = gtfsStopsMap.get(supplementStop.getStopId());
if (gtfsStop == null) {
logger.error("Found supplemental stop data for stopId={} "
+ "but that stop did not exist in the main "
+ "stops.txt file. {}",
supplementStop.getStopId(), supplementStop);
continue;
}
// Create a new GtfsStop object that combines the original
// data with the supplemental data
GtfsStop combinedStop = new GtfsStop(gtfsStop, supplementStop);
// Store that combined data stop in the map
gtfsStopsMap.put(combinedStop.getStopId(), combinedStop);
}
}
// Create the map of the Stop objects. Use a ConcurrentHashMap instead
// of a regular HashMap so that trimStops() can delete unused stops
// while iterating across the hash map.
stopsMap = new ConcurrentHashMap<String, Stop>(gtfsStops.size());
for (GtfsStop gtfsStop : gtfsStopsMap.values()) {
Stop stop = new Stop(revs.getConfigRev(), gtfsStop, titleFormatter);
stopsMap.put(stop.getId(), stop);
}
// Let user know what is going on
logger.info("Finished processing stops.txt data. Took {} msec.",
timer.elapsedMsec());
}
/**
* Sometimes will be using a partial configuration. For example, for MBTA
* commuter rail only want to use the trips defined for commuter rail even
* though the GTFS data can have trips for other modes defined. This can
* mean that the data includes many stops that are actually not used by the
* subset of trips. Therefore trim out the unused stops from the stopsMap
* member.
*/
private void trimStops() {
// Make sure needed data is already read in. T
if (stopsMap == null || stopsMap.isEmpty()) {
logger.error("processStopData() must be called before " +
"GtfsData.trimStops() is. Exiting.");
System.exit(-1);
}
if (tripPatternMap == null || tripPatternMap.isEmpty()) {
logger.error("createTripsAndTripPatterns() must be called before " +
"GtfsData.trimStops() is. Exiting.");
System.exit(-1);
}
// Determine all the stops used in the trip patterns
Set<String> stopIdsUsed = new HashSet<String>();
for (TripPattern tripPattern : tripPatternMap.values()) {
for (String stopIdInTripPattern : tripPattern.getStopIds()) {
stopIdsUsed.add(stopIdInTripPattern);
}
}
// Remove any stops not in trip patterns from the stopsMap
for (String stopId : stopsMap.keySet()) {
if (!stopIdsUsed.contains(stopId)) {
// The stop is not actually used so remove it from stopsMap
stopsMap.remove(stopId);
}
}
}
/**
* Reads data from trips.txt and puts it into gtfsTripsMap.
*/
private void processTripsData() {
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Make sure needed data is already read in. This method uses
// GtfsRoutes info to make sure all trips reference a route.
if (gtfsRoutesMap == null || gtfsRoutesMap.isEmpty()) {
logger.error("processGtfsRouteMap() must be called before "
+ "GtfsData.processTripsData() is or no routes were "
+ "found. Exiting.");
System.exit(-1);
}
// Let user know what is going on
logger.info("Processing trips.txt data...");
// Create the map where the data is going to go
gtfsTripsMap = new HashMap<String, GtfsTrip>();
// Read in the trips.txt GTFS data from file
GtfsTripsReader tripsReader = new GtfsTripsReader(gtfsDirectoryName);
List<GtfsTrip> gtfsTrips = tripsReader.get();
// For each GTFS trip make sure route is OK and and the trip to the
// gtfsTripsMap.
for (GtfsTrip gtfsTrip : gtfsTrips) {
// Make sure that each trip references a valid route. If it does
// not then something is fishy with the data so output a warning.
if (getGtfsRoute(gtfsTrip.getRouteId()) != null) {
// Refers to a valid route so we should process this trip
gtfsTripsMap.put(gtfsTrip.getTripId(), gtfsTrip);;
} else {
logger.warn("The trip id={} in the trips.txt file at " +
"line # {} refers to route id={} but that route is " +
"not in the routes.txt file. Therefore " +
"that trip is being discarded.",
gtfsTrip.getTripId(),
gtfsTrip.getLineNumber(),
gtfsTrip.getRouteId());
}
}
// Read in supplemental trips data
if (supplementDir != null) {
// Read in the supplemental stop data
GtfsTripsSupplementReader tripsSupplementReader =
new GtfsTripsSupplementReader(supplementDir);
List<GtfsTrip> gtfsTripsSupplement = tripsSupplementReader.get();
// Modify the main GtfsStop objects using the supplemental data
for (GtfsTrip supplementTrip : gtfsTripsSupplement) {
// First try matching supplemental trip to the regular trip
// using trip ID
String supplementTripId = supplementTrip.getTripId();
GtfsTrip gtfsTrip = null;
if (supplementTripId != null) {
// Determine the GTFS trip that has the same trip id
// as the supplemental GTFS trip
gtfsTrip = gtfsTripsMap.get(supplementTripId);
if (gtfsTrip == null) {
logger.error("Found supplemental trip data for "
+ "tripId={} on line {} but that trip did not "
+ "exist in the main trips.txt file. {}",
supplementTripId, supplementTrip.getLineNumber(),
supplementTrip);
continue;
}
} else {
// trip ID wasn't specified in supplemental trips.txt so try
// identifying trip using the trip short name
String supplementTripShortName = supplementTrip.getTripShortName();
if (supplementTripShortName != null) {
// Determine the GTFS trip that has the same trip short
// name as the supplemental GTFS trip
for (GtfsTrip gTrip : gtfsTrips) {
if (supplementTripShortName.equals(gTrip.getTripShortName())) {
gtfsTrip = gTrip;
break;
}
}
if (gtfsTrip == null) {
logger.error("Found supplemental trip data for "
+ "tripShortName={} on line {} but that "
+ "trip did not exist in "
+ "the main trips.txt file. {}",
supplementTripShortName,
supplementTrip.getLineNumber(),
supplementTrip);
continue;
}
} else {
logger.error("Neither trip_id nor trip_short_name "
+ "specified for supplemental trip data on "
+ "line {}. ", supplementTrip.getLineNumber());
continue;
}
}
// Create a new GtfsStop object that combines the original
// data with the supplemental data
GtfsTrip combinedTrip = new GtfsTrip(gtfsTrip, supplementTrip);
// Store that combined data stop in the map
gtfsTripsMap.put(combinedTrip.getTripId(), combinedTrip);
}
}
// Let user know what is going on
logger.info("Finished processing trips.txt data. Took {} msec.",
timer.elapsedMsec());
}
/**
* Sorts gtfsStopTimesForTrip and then goes through the data to make sure it
* is OK. If data is a real problem, like a duplicate stop, it is not
* included in the returned list. If there is a duplicate stop and the times
* are different then it is assumed that it is a wait stop and the arrival
* time from the first stop entry and the departure time for the second stop
* entry are used. Any problems found are logged.
*
* @param gtfsStopTimesForTrip
* @return Processed/cleaned up list of GtfsStopTime for the trip
*/
private List<GtfsStopTime> processStopTimesForTrip(
List<GtfsStopTime> gtfsStopTimesForTrip) {
// For returning results
List<GtfsStopTime> processedGtfsStopTimesForTrip =
new ArrayList<GtfsStopTime>();
// Sort the list so that the stop times are in sequence order.
// This way can treat first and last stop times for a trip
// specially. Plus want them in order to determine trip patterns
// and such.
Collections.sort(gtfsStopTimesForTrip);
// Iterate over stop times for trip and remove inappropriate ones.
// Also, log any warning or error messages.
String previousStopId = null;
boolean firstStopInTrip = true;
int previousTimeForTrip = 0;
for (GtfsStopTime gtfsStopTime : gtfsStopTimesForTrip) {
// Convenience variable
String tripId = gtfsStopTime.getTripId();
// There can be a situation where the agency defines a stop as the
// start terminal of a trip but where the vehicle actually starts
// the trip at a subsequent stop. Yes, the agency should fix this
// data but they won't. An example is sfmta 21-Hayes route
// downtown where the first stop defined in config is wrong.
// Therefore filter out such stops. Note that only filtering
// out such stops if they are the first stops in the trip.
GtfsStop gtfsStop = getGtfsStop(gtfsStopTime.getStopId());
if (gtfsStop.getDeleteFromRoutesStr() != null
|| (firstStopInTrip && gtfsStop
.getDeleteFirstStopFromRoutesStr() != null)) {
GtfsTrip gtfsTrip = getGtfsTrip(gtfsStopTime.getTripId());
GtfsRoute gtfsRoute = getGtfsRoute(gtfsTrip.getRouteId());
String routeShortName = gtfsRoute.getRouteShortName();
String deleteFromRoutesStr =
gtfsStop.getDeleteFromRoutesStr() != null ?
gtfsStop.getDeleteFromRoutesStr() :
gtfsStop.getDeleteFirstStopFromRoutesStr();
if (gtfsStop.shouldDeleteFromRoute(routeShortName,
deleteFromRoutesStr)) {
continue;
}
}
// If the GtfsStopTime refers to a non-existent stop than log an error
if (getStop(gtfsStopTime.getStopId()) == null) {
logger.error("In stop_times.txt line {} refers to stop_id {} but " +
"it is not defined in the stops.txt file. Therefore this " +
"stop will be ignored for trip {}.",
gtfsStopTime.getLineNumber(),
gtfsStopTime.getStopId(),
gtfsStopTime.getTripId());
continue;
}
// Make sure arrival/departure times OK.
Integer arr = gtfsStopTime.getArrivalTimeSecs();
Integer dep = gtfsStopTime.getDepartureTimeSecs();
// Make sure that first stop has a departure time and the
// last one has an arrival time.
if (firstStopInTrip && dep == null) {
if (arr == null) {
logger.error("First stop in trip {} does not have a "
+ "departure time and no arrival time either. "
+ "The problem is in the "
+ "stop_times.txt file at line {}. ",
tripId, getGtfsTrip(tripId).getLineNumber());
} else {
logger.error("First stop in trip {} does not have a "
+ "departure time. The problem is in the "
+ "stop_times.txt file at line {}. Using arrival "
+ "time of {} as the departure time.",
tripId, getGtfsTrip(tripId).getLineNumber(),
Time.timeOfDayStr(arr));
dep = arr;
}
}
boolean lastStopInTrip =
gtfsStopTime == gtfsStopTimesForTrip
.get(gtfsStopTimesForTrip.size() - 1);
if (lastStopInTrip && arr == null) {
logger.error("Last stop in trip {} does not have an arrival "
+ "time. The problem is in the stop_times.txt file at "
+ "line {}.",
tripId, getGtfsTrip(tripId).getLineNumber());
}
// Make sure that not listing the same stop twice in a row.
if (gtfsStopTime.getStopId().equals(previousStopId)) {
// If same time for same stop then filter out the duplicate stop.
// Yes, SFMTA actually has done this!
if (arr == null || arr == previousTimeForTrip) {
// This stop doesn't have an arrival time or it is an
// identical time to the previous stop which means it
// it is an uneeded duplicated. Therefore don't use it.
logger.warn("Encountered stopId={} twice in a row with the "
+ "same times for tripId={} "
+ "in stop_times.txt at line {}. The second "
+ "stop will not be included.",
gtfsStopTime.getStopId(), gtfsStopTime.getTripId(),
gtfsStopTime.getLineNumber());
continue;
} else {
// Special case where a stop was defined twice in a row but
// with different schedule time. This likely means that the
// stop is a wait stop but it isn't configured correctly.
// For this case remove the original GtfsStopTime and
// create a new one with the previous arrival time but the
// new departure time and create it as a wait stop.
GtfsStopTime arrivalStopTime =
processedGtfsStopTimesForTrip
.remove(processedGtfsStopTimesForTrip
.size() - 1);
gtfsStopTime =
new GtfsStopTime(gtfsStopTime,
arrivalStopTime.getArrivalTimeSecs());
logger.warn("Encountered stopId={} twice in a row with "
+ "different times for tripId={} "
+ "in stop_times.txt at line {}. Assuming "
+ "it is supposed to be a mid trip wait "
+ "stop. {}", gtfsStopTime.getStopId(),
gtfsStopTime.getTripId(),
gtfsStopTime.getLineNumber(), gtfsStopTime);
}
}
// Make sure departure time >= arrival time.
// Of course either one can be null so bit more complicated.
if (arr != null && dep != null && dep < arr) {
logger.error("The departure time {} is before the arrival "
+ "time {} in the stop_times.txt file at line {}",
Time.timeOfDayStr(dep), Time.timeOfDayStr(arr),
gtfsStopTime.getLineNumber());
}
// Now make sure that arrival/departures times never go backwards in time
if (arr != null && arr < previousTimeForTrip) {
logger.error("The arrival time {} is before the time {} for " +
"a previous stop for the trip. " +
"See stop_times.txt file line {}",
Time.timeOfDayStr(arr),
Time.timeOfDayStr(previousTimeForTrip),
gtfsStopTime.getLineNumber());
}
if (dep != null && dep < previousTimeForTrip) {
logger.error("The departure time {} is before the time {} for " +
"a previous stop for the trip. " +
"See stop_times.txt file line {}",
Time.timeOfDayStr(dep),
Time.timeOfDayStr(previousTimeForTrip),
gtfsStopTime.getLineNumber());
}
// Update previous time so can check the next stop for the trip
if (arr != null)
previousTimeForTrip = arr;
if (dep != null)
previousTimeForTrip = dep;
// The GtfsStopTime is acceptable so add it to list to be returned
processedGtfsStopTimesForTrip.add(gtfsStopTime);
// For next time through loop
previousStopId = gtfsStopTime.getStopId();
firstStopInTrip = false;
}
// Return processed stop times for trip
return processedGtfsStopTimesForTrip;
}
/**
* Reads the data from stop_times.txt and puts it into
* gtfsStopTimesForTripMap map. Also processes the data to determine Trips
* and TripPatterns. When processing Trips uses frequency.txt data to
* determine if each trip ID is actually for multiple trips with unique
* start times defined by the headway.
*/
private void processStopTimesData() {
// Make sure needed data is already read in. This method determines
// trips and trip patterns from the stop_time.txt file. This objects
// need to know lat & lon so can figure out bounding box. Therefore
// stops.txt file must be read in first. Also, need to know which route
// is associated with a trip determined in stop_time.txt file. This
// info is in trips.txt so it needs to be processed first.
if (gtfsStopsMap == null || gtfsStopsMap.isEmpty()) {
logger.error("processStopData() must be called before " +
"GtfsData.processStopTimesData() is. Exiting.");
System.exit(-1);
}
if (stopsMap == null || stopsMap.isEmpty()) {
logger.error("processStopData() must be called before " +
"GtfsData.processStopTimesData() is. Exiting.");
System.exit(-1);
}
if (gtfsTripsMap == null || gtfsTripsMap.isEmpty()) {
logger.error("processTripsData() must be called before " +
"GtfsData.processStopTimesData() is. Exiting.");
System.exit(-1);
}
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing stop_times.txt data...");
// Read in the stop_times.txt GTFS data from file. Use a large initial
// array size so when reading in data won't have to constantly increase
// array size and do array copying. SFMTA for example has 1,100,000
// stop times so starting with a value of 500,000 certainly should be
// reasonable.
GtfsStopTimesReader stopTimesReader =
new GtfsStopTimesReader(gtfsDirectoryName);
Collection<GtfsStopTime> gtfsStopTimes = stopTimesReader.get(500000);
// Handle possible supplemental stop_times.txt file.
// Match the supplemental data to the main data using both
// trip_id and stop_id.
if (supplementDir != null) {
GtfsStopTimesSupplementReader stopTimesSupplementReader =
new GtfsStopTimesSupplementReader(supplementDir);
List<GtfsStopTime> stopTimesSupplement =
stopTimesSupplementReader.get();
if (stopTimesSupplement.size() > 0) {
// Put original shapes into map for quick searching
Map<MapKey, GtfsStopTime> map =
new HashMap<MapKey, GtfsStopTime>();
for (GtfsStopTime gtfsStopTime : gtfsStopTimes) {
MapKey key =
new MapKey(gtfsStopTime.getTripId(),
gtfsStopTime.getStopId());
map.put(key, gtfsStopTime);
}
// Modify main GtfsShape objects using supplemental data
for (GtfsStopTime stopTimeSupplement : stopTimesSupplement) {
MapKey key =
new MapKey(stopTimeSupplement.getTripId(),
stopTimeSupplement.getStopId());
// Handle depending on whether the supplemental data
// indicates the point is to be deleted, added, or modified
if (stopTimeSupplement.shouldDelete()) {
// The supplemental shape indicates that the point
// should be deleted
GtfsStopTime oldStopTime = map.remove(key);
if (oldStopTime == null) {
logger.error("Supplement stop_times.txt file for "
+ "trip_id={} and stop_id={} specifies "
+ "that the stop time should be removed "
+ "but it is not actually configured in "
+ "the regular stop_times.txt file",
stopTimeSupplement.getTripId(),
stopTimeSupplement.getStopId());
}
} else if (map.get(key) != null) {
// The stop time is already in map so modify it
GtfsStopTime combinedShape =
new GtfsStopTime(map.get(key),
stopTimeSupplement);
map.put(key, combinedShape);
} else {
// The stop time is not already in map so add it
map.put(key, stopTimeSupplement);
}
}
// Use the new combined shapes
gtfsStopTimes = map.values();
}
}
// The GtfsStopTimes are put into this map and then can create Trips
// and TripPatterns. Keyed by tripId
gtfsStopTimesForTripMap = new HashMap<String, List<GtfsStopTime>>();
// Put the GtfsStopTimes into the map
for (GtfsStopTime gtfsStopTime : gtfsStopTimes) {
// Add the GtfsStopTime to the map so later can create Trips and
// TripPatterns
String tripId = gtfsStopTime.getTripId();
List<GtfsStopTime> gtfsStopTimesForTrip =
gtfsStopTimesForTripMap.get(tripId);
if (gtfsStopTimesForTrip == null) {
gtfsStopTimesForTrip = new ArrayList<GtfsStopTime>();
gtfsStopTimesForTripMap.put(tripId, gtfsStopTimesForTrip);
}
gtfsStopTimesForTrip.add(gtfsStopTime);
}
// Go through the stop times for each tripId. Sort them and look for
// any problems with the data.
Set<String> tripIds = gtfsStopTimesForTripMap.keySet();
for (String tripId : tripIds) {
List<GtfsStopTime> gtfsStopTimesForTrip =
gtfsStopTimesForTripMap.get(tripId);
List<GtfsStopTime> processedGtfsStopTimesForTrip =
processStopTimesForTrip(gtfsStopTimesForTrip);
// Replace the stop times for the trip with the processed/cleaned
// up version
gtfsStopTimesForTripMap.put(tripId, processedGtfsStopTimesForTrip);
}
// Log if a trip is defined in the trips.txt file but not in
// stop_times.txt
int numberOfProblemTrips = 0;
for (String tripIdFromTripsFile : gtfsTripsMap.keySet()) {
if (gtfsStopTimesForTripMap.get(tripIdFromTripsFile) == null) {
++numberOfProblemTrips;
logger.warn("trip_id={} was defined on line #{} in trips.txt " +
"but there was no such trip defined in the " +
"stop_times.txt file",
tripIdFromTripsFile,
gtfsTripsMap.get(tripIdFromTripsFile).getLineNumber());
}
}
if (numberOfProblemTrips > 0)
logger.warn("Found {} trips were defined in trips.txt but not in " +
"stop_times.txt out of a total of {} trips in trips.txt",
numberOfProblemTrips, gtfsTripsMap.size());
// Now that have all the stop times gtfs data create the trips
// and the trip patterns.
createTripsAndTripPatterns(gtfsStopTimesForTripMap);
// Let user know what is going on
logger.info("Finished processing stop_times.txt data. Took {} msec.",
timer.elapsedMsec());
}
/**
* For the trip being created go through all the stop times from the
* stop_times.txt GTFS file and determine all the stop paths for the trip.
* Also update the trip patterns map when a new trip pattern encountered.
*
* @param trip
* The trip being created
* @return List of ScheduleTime objects for the trip
*/
private List<ScheduleTime> getScheduleTimesForTrip(Trip trip) {
// Make sure necessary data already read in
if (gtfsStopTimesForTripMap == null || gtfsStopTimesForTripMap.isEmpty()) {
logger.error("gtfsStopTimesForTripMap not filled in before " +
"GtfsData.getScheduleTimesForTrip() was. Exiting.");
System.exit(-1);
}
if (gtfsRoutesMap == null) {
logger.error("gtfsRoutesMap not filled in before " +
"GtfsData.getScheduleTimesForTrip() was. Exiting.");
System.exit(-1);
}
// Create list of Paths for creating trip pattern
List<StopPath> paths = new ArrayList<StopPath>();
// Create set of path IDs for this trip so can tell if looping
// back on path such that need to create a unique path ID
Set<String> pathIdsForTrip = new HashSet<String>();
// Determine the gtfs stop times for this trip
List<GtfsStopTime> gtfsStopTimesForTrip =
gtfsStopTimesForTripMap.get(trip.getId());
// For each stop time for the trip...
List<ScheduleTime> newScheduleTimesList =
new ArrayList<ScheduleTime>();
String previousStopId = null;
for (int i=0; i<gtfsStopTimesForTrip.size(); ++i) {
// The current gtfsStopTime
GtfsStopTime gtfsStopTime = gtfsStopTimesForTrip.get(i);
// Convenience variables
Integer arrTime = gtfsStopTime.getArrivalTimeSecs();
Integer depTime = gtfsStopTime.getDepartureTimeSecs();
boolean firstStopInTrip = i==0;
boolean lastStopInTrip = i==gtfsStopTimesForTrip.size()-1;
String stopId = gtfsStopTime.getStopId();
// Add the schedule time to the Trip object. Some agencies configure the
// same arrival and departure time for every stop. That is just silly
// and overly verbose. If the times are the same should just use
// departure time, except for last stop for trip where should use
// arrival time.
Integer filteredArr = arrTime;
Integer filteredDep = depTime;
if (arrTime != null && depTime != null && arrTime.equals(depTime)) {
if (lastStopInTrip)
filteredDep = null;
else
filteredArr = null;
}
ScheduleTime scheduleTime = new ScheduleTime(filteredArr, filteredDep);
newScheduleTimesList.add(scheduleTime);
// Create StopPath so it can be used to create TripPattern.
// First determine attributes layoverStop,
// waitStop, and scheduleAdherenceStop. They are true
// if there is a departure time and they are configured or
// are first stop in trip.
Stop stop = getStop(stopId);
// Determine if layover stop. If trip doesn't have schedule then
// it definitely can't be a layover.
boolean layoverStop = false;
if (depTime != null && !trip.isNoSchedule()) {
if (stop.isLayoverStop() == null) {
layoverStop = firstStopInTrip;
} else {
layoverStop = stop.isLayoverStop();
}
}
// Determine if it is a waitStop. If trip doesn't have schedule then
// it definitely can't be a waitStop.
boolean waitStop = false;
if (depTime != null && !trip.isNoSchedule()) {
if (stop.isWaitStop() == null) {
waitStop = firstStopInTrip || gtfsStopTime.isWaitStop();
} else {
waitStop = stop.isWaitStop();
}
}
// This one is a bit complicated. Should be a scheduleAdherenceStop
// if there is an associated time and it is configured to be such.
// But should also be true if there is associated time and it is
// first or last stop of the trip.
boolean scheduleAdherenceStop =
(depTime != null
&& (firstStopInTrip
|| gtfsStopTime.isTimepointStop()
|| stop.isTimepointStop()))
|| (arrTime != null && lastStopInTrip);
// Determine the pathId. Make sure that use a unique path ID by
// appending "_loop" if looping over the same stops
String pathId = StopPath.determinePathId(previousStopId, stopId);
while (pathIdsForTrip.contains(pathId))
pathId += "_loop";
pathIdsForTrip.add(pathId);
// Determine the GtfsRoute so that can get break time
GtfsRoute gtfsRoute = gtfsRoutesMap.get(trip.getRouteId());
// Create the new StopPath and add it to the list
// for this trip.
StopPath path = new StopPath(revs.getConfigRev(), pathId,
stopId, gtfsStopTime.getStopSequence(), lastStopInTrip,
trip.getRouteId(), layoverStop, waitStop,
scheduleAdherenceStop, gtfsRoute.getBreakTime());
paths.add(path);
previousStopId = stopId;
} // End of for each stop_time for trip
// Now that have Paths defined for the trip, if need to,
// also create new trip pattern
updateTripPatterns(trip, paths);
return newScheduleTimesList;
}
/**
* For when encountering a new trip. Creates the trip. Doesn't set the
* travel times or startTime and endTime.
*
* @param tripId
* @return The new trip, or null if there is a problem with this trip and
* should skip it.
*/
private Trip createNewTrip(String tripId) {
// Determine the GtfsTrip for the ID so can be used
// to construct the Trip object.
GtfsTrip gtfsTrip = getGtfsTrip(tripId);
// If resulting gtfsTrip is null because it wasn't defined in
// trips.txt then return null
if (gtfsTrip == null) {
return null;
}
// If the service ID for the trip is not valid in the future
// then don't need to process this Trip
if (!validServiceIds.contains(gtfsTrip.getServiceId())) {
// ServiceUtils ID not valid for this trip so log warning message
// and continue on to next trip ID
logger.warn("For tripId={} and serviceId={} the " +
"service is not valid in the future so the trip " +
"is being filtered out.",
gtfsTrip.getTripId(), gtfsTrip.getServiceId());
return null;
}
// If this route is actually a sub-route of a parent then use
// the parent ID.
String properRouteId =
getProperIdOfRoute(gtfsTrip.getRouteId());
// Create the Trip and store the stop times into associated map
GtfsRoute gtfsRoute = gtfsRoutesMap.get(gtfsTrip.getRouteId());
String routeShortName = gtfsRoute.getRouteShortName();
Trip trip =
new Trip(revs.getConfigRev(), gtfsTrip, properRouteId,
routeShortName, titleFormatter);
return trip;
}
/**
* Returns true if the trip specified in frequency.txt GTFS file as being
* frequency based with an exact schedule. This means that need several
* copies of the trip, one for each start time.
*
* @param tripId
* @return tTrue if specified trip is frequency based with exact_time set to
* true
*/
private boolean isTripFrequencyBasedWithExactTimes(String tripId) {
// Get list of frequencies associated with trip ID
List<Frequency> frequencyList = getFrequencyList(tripId);
// If first frequency is specified for "exact times" in GTFS then
// return true.
return frequencyList != null
&& frequencyList.size() > 0
&& frequencyList.get(0).getExactTimes();
}
/**
* Returns true if the trip specified is frequency.txt GTFS file as being
* frequency based without an exact schedule. This means the trip doesn't
* describe a schedule (it is noSchedule). Instead, the vehicles are
* supposed to run in a loop at roughly the specified headway.
*
* @param tripId
* @return true if specified trip is frequency based but with exact_time set
* to false
*/
private boolean isTripFrequencyBasedWithoutExactTimes(String tripId) {
// Get list of frequencies associated with trip ID
List<Frequency> frequencyList = getFrequencyList(tripId);
// If first frequency is specified for "exact times" in GTFS then
// return true.
return frequencyList != null
&& frequencyList.size() > 0
&& !frequencyList.get(0).getExactTimes();
}
/**
*
* @param gtfsStopTimesForTripMap
* Keyed by tripId. List of GtfsStopTimes for the tripId.
*/
private void createTripsAndTripPatterns(
Map<String, List<GtfsStopTime>> gtfsStopTimesForTripMap) {
if (stopsMap == null || stopsMap.isEmpty()) {
logger.error("processStopData() must be called before " +
"GtfsData.processStopTimesData() is. Exiting.");
System.exit(-1);
}
if (frequencyMap == null) {
logger.error("processFrequencies() must be called before " +
"GtfsData.createTripsAndTripPatterns() is. Exiting.");
System.exit(-1);
}
if (validServiceIds == null) {
logger.error("GtfsData.processServiceIds() must be called before " +
"GtfsData.createTripsAndTripPatterns() is. Exiting.");
System.exit(-1);
}
if (validServiceIds.isEmpty()) {
logger.error("There are no services that are still active. Make "
+ "sure you are processing the most up to date GTFS data "
+ "that includes service that will be active. Exiting.");
System.exit(-1);
}
// Create the necessary collections for trips. These collections are
// populated in the other methods that are called by this method.
tripsCollection = new ArrayList<Trip>();
tripPatternMap = new HashMap<TripPatternKey, TripPattern>();
tripPatternsByTripIdMap = new HashMap<String, TripPattern>();
tripPatternsByRouteIdMap = new HashMap<String, List<TripPattern>>();
tripPatternIdSet = new HashSet<String>();
serviceIdsWithTrips = new HashSet<String>();
pathsMap = new HashMap<String, StopPath>();
// For each trip in the stop_times.txt file ...
for (String tripId : gtfsStopTimesForTripMap.keySet()) {
// Create a Trip element for the trip ID.
Trip trip = createNewTrip(tripId);
// If trip not valid then skip over it
if (trip == null) {
logger.warn("Encountered trip_id={} in the "
+ "stop_times.txt file but that trip_id is not in "
+ "the trips.txt file, the service ID for the "
+ "trip is not valid in anytime in the future, "
+ "or the associated route is filtered out, "
+ "or the trip is filtered out. "
+ "Therefore this trip cannot be configured and "
+ "has been discarded.", tripId);
continue;
}
// Keep track of service IDs so can filter unneeded calendars
serviceIdsWithTrips.add(trip.getServiceId());
// All the schedule times are available in gtfsStopTimesForTripMap
// so add them all at once to the Trip. This also sets the startTime
// and endTime for the trip. This is done after the Trip is already
// created since it deals with a few things including schedule
// times list, trip patterns, paths, etc and so it is much simpler
// to have getScheduleTimesForTrip() update an already existing
// Trip object.
List<ScheduleTime> scheduleTimesList = getScheduleTimesForTrip(trip);
trip.addScheduleTimes(scheduleTimesList);
if (isTripFrequencyBasedWithExactTimes(tripId)) {
// This is special case where for this trip ID
// there is an entry in the frequencies.txt
// file with exact_times set indicating that need to create
// a separate Trip for each actual trip.
List<Frequency> frequencyListForTripId =
frequencyMap.get(tripId);
for (Frequency frequency : frequencyListForTripId) {
for (int tripStartTime = frequency.getStartTime();
tripStartTime < frequency.getEndTime();
tripStartTime += frequency.getHeadwaySecs()) {
Trip frequencyBasedTrip =
new Trip(trip, tripStartTime);
tripsCollection.add(frequencyBasedTrip);
}
}
} else if (isTripFrequencyBasedWithoutExactTimes(tripId)) {
// This is a trip defined in the GTFS frequency.txt file
// to not be schedule based (not have exact_times set).
// Need to create a trip for each time range defined for
// the trip in frequency.txt .
List<Frequency> frequencyListForTripId =
frequencyMap.get(tripId);
for (Frequency frequency : frequencyListForTripId) {
Trip frequencyBasedTrip =
new Trip(trip, frequency.getStartTime(),
frequency.getEndTime());
tripsCollection.add(frequencyBasedTrip);
}
} else {
// This is the normal case, an actual Trip that is not affected
// by exact times in frequencies.txt data. Therefore simply add
// it to the collection. It still might be a trip with no
// schedule, but it isn't one with exact times.
tripsCollection.add(trip);
}
} // End of for each trip ID
}
/**
* This method is called for each trip. Determines if the corresponding trip
* pattern is already in tripPatternMap. If it is then it updates the trip
* pattern to include this trip as a member. If this trip pattern not
* already encountered then it adds it to the tripPatternMap.
*
* @param trip
* @param stopPaths
* List of StopPath objects that define the trip pattern
*/
private void updateTripPatterns(Trip trip, List<StopPath> stopPaths) {
// Create a TripPatternBase from the Trip object
TripPatternKey tripPatternKey =
new TripPatternKey(trip.getShapeId(), stopPaths);
// Determine if the TripPattern is already stored.
TripPattern tripPatternFromMap = tripPatternMap.get(tripPatternKey);
// If not already stored then create and store the trip pattern.
if (tripPatternFromMap == null) {
// Create the trip pattern
TripPattern tripPattern = new TripPattern(revs.getConfigRev(),
trip.getShapeId(), stopPaths, trip, this);
// Add the new trip pattern to the maps
tripPatternMap.put(tripPatternKey, tripPattern);
tripPatternsByTripIdMap.put(trip.getId(), tripPattern);
tripPatternIdSet.add(tripPattern.getId());
// Also add the new TripPattern to tripPatternsByRouteIdMap
List<TripPattern> tripPatternsForRoute =
tripPatternsByRouteIdMap.get(tripPattern.getRouteId());
// If haven't dealt with this route, create the List now
if (tripPatternsForRoute == null) {
tripPatternsForRoute = new ArrayList<TripPattern>();
tripPatternsByRouteIdMap.put(tripPattern.getRouteId(), tripPatternsForRoute);
}
tripPatternsForRoute.add(tripPattern);
// Update the Trip to indicate which TripPattern it is for
trip.setTripPattern(tripPattern);
// Now that we have the trip pattern ID update the map of Paths
for (StopPath path : tripPattern.getStopPaths())
putPath(tripPattern.getId(), path.getId(), path);
} else {
// This trip pattern already in map so just add the Trip
// to the list of trips that refer to it.
tripPatternFromMap.addTrip(trip);
// Add it to tripPatternsByTripIdMap as well
tripPatternsByTripIdMap.put(trip.getId(), tripPatternFromMap);
// Update the Trip to indicate which TripPattern it is for
trip.setTripPattern(tripPatternFromMap);
}
}
/**
* Goes through all the trips and constructs block assignments from them.
* Also, goes through the frequencies and created unscheduled blocks
* for them.
*/
private void processBlocks() {
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing blocks...");
// Actually process the block info and get back list of blocks
blocks = new BlocksProcessor(this).process(revs.getConfigRev());
// Let user know what is going on
logger.info("Finished processing blocks. Took {} msec.",
timer.elapsedMsec());
}
/**
* Reads frequencies.txt file and puts data into _frequencies list.
*/
private void processFrequencies() {
// Make sure needed data is already read in.
if (gtfsTripsMap == null || gtfsTripsMap.isEmpty()) {
logger.error("processTripsData() must be called before " +
"GtfsData.processFrequencies() is. Exiting.");
System.exit(-1);
}
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing frequencies.txt data...");
// Create the map where the data is going to go
frequencyMap = new HashMap<String, List<Frequency>>();
// Read in the frequencies.txt GTFS data from file
GtfsFrequenciesReader frequenciesReader =
new GtfsFrequenciesReader(gtfsDirectoryName);
List<GtfsFrequency> gtfsFrequencies = frequenciesReader.get();
for (GtfsFrequency gtfsFrequency : gtfsFrequencies) {
// Make sure this Frequency is in trips.txt
GtfsTrip gtfsTrip = gtfsTripsMap.get(gtfsFrequency.getTripId());
if (gtfsTrip == null) {
logger.error("The frequency from line # {} of frequencies.txt "
+ "refers to trip_id={} but that trip is not in the "
+ "trips.txt file. Therefore this frequency will be "
+ "ignored.",
gtfsFrequency.getLineNumber(),
gtfsFrequency.getTripId());
continue;
}
// Create the Frequency object and put it into the frequenctMap
Frequency frequency = new Frequency(revs.getConfigRev(),
gtfsFrequency);
String tripId = frequency.getTripId();
List<Frequency> frequenciesForTripId = frequencyMap.get(tripId);
if (frequenciesForTripId == null) {
frequenciesForTripId = new ArrayList<Frequency>();
frequencyMap.put(tripId, frequenciesForTripId);
}
frequenciesForTripId.add(frequency);
}
// Let user know what is going on
logger.info("Finished processing frequencies.txt data. Took {} msec.",
timer.elapsedMsec());
}
/**
* Reads in shapes.txt file and processes the information into
* StopPath objects. Using the term "StopPath" instead of "Shape" to
* be more descriptive of what the data is really for.
*/
private void processPaths() {
// Make sure needed data is already read in. This method
// converts the shapes into Paths such that each path ends
// at a stop. Therefore need to have read in stop info first.
if (stopsMap == null || stopsMap.isEmpty()) {
logger.error("processStopData() must be called before " +
"GtfsData.processPaths() is. Exiting.");
System.exit(-1);
}
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing shapes.txt data...");
// Read in the shapes.txt GTFS data from file
GtfsShapesReader shapesReader = new GtfsShapesReader(gtfsDirectoryName);
Collection<GtfsShape> gtfsShapes = shapesReader.get();
// Handle possible supplemental shapes.txt file.
// Match the supplemental data to the main data using both
// shape_id and shape_pt_sequence.
if (supplementDir != null) {
GtfsShapesSupplementReader shapesSupplementReader =
new GtfsShapesSupplementReader(supplementDir);
List<GtfsShape> shapesSupplement = shapesSupplementReader.get();
if (shapesSupplement.size() > 0) {
// Put original shapes into map for quick searching
Map<MapKey, GtfsShape> map = new HashMap<MapKey, GtfsShape>();
for (GtfsShape gtfsShape : gtfsShapes) {
MapKey key =
new MapKey(gtfsShape.getShapeId(),
gtfsShape.getShapePtSequence());
map.put(key, gtfsShape);
}
// Modify main GtfsShape objects using supplemental data.
for (GtfsShape shapeSupplement : shapesSupplement) {
MapKey key =
new MapKey(shapeSupplement.getShapeId(),
shapeSupplement.getShapePtSequence());
// Handle depending on whether the supplemental data
// indicates the point is to be deleted, added, or modified
if (shapeSupplement.shouldDelete()) {
// The supplemental shape indicates that the point
// should be deleted
GtfsShape oldShape = map.remove(key);
if (oldShape == null) {
logger.error("Supplement shapes.txt file for "
+ "shape_id={} and shape_pt_sequence={} "
+ "specifies that the shape point should "
+ "be removed but it is not actually "
+ "configured in the regular shapes.txt "
+ "file",
shapeSupplement.getShapeId(),
shapeSupplement.getShapePtSequence());
}
} else if (map.get(key) != null) {
// The shape point is already in map so modify it
GtfsShape combinedShape =
new GtfsShape(map.get(key), shapeSupplement);
map.put(key, combinedShape);
} else {
// The shape point is not already in map so add it
map.put(key, shapeSupplement);
}
}
// Use the new combined shapes
gtfsShapes = map.values();
}
}
// Process all the shapes into stopPaths
StopPathProcessor pathProcessor =
new StopPathProcessor(
Collections.unmodifiableCollection(gtfsShapes),
Collections.unmodifiableMap(stopsMap),
Collections.unmodifiableCollection(tripPatternMap.values()),
pathOffsetDistance,
maxStopToPathDistance,
maxDistanceForEliminatingVertices,
trimPathBeforeFirstStopOfTrip);
pathProcessor.processPathSegments();
// Let user know what is going on
logger.info("Finished processing shapes.txt data. Took {} msec.",
timer.elapsedMsec());
}
/**
* Reads agency.txt file and puts data into agencies list.
*/
private void processAgencyData() {
// Make sure necessary data read in
if (getRoutes() == null || getRoutes().isEmpty()) {
// Route data first needed so can determine extent of agency
logger.error("GtfsData.processRoutesData() must be called before " +
"GtfsData.processAgencyData() is. Exiting.");
System.exit(-1);
}
// Let user know what is going on
logger.info("Processing agency.txt data...");
// Create the map where the data is going to go
agencies = new ArrayList<Agency>();
// Read in the agency.txt GTFS data from file
GtfsAgencyReader agencyReader = new GtfsAgencyReader(gtfsDirectoryName);
List<GtfsAgency> gtfsAgencies = agencyReader.get();
HashMap<String, GtfsAgency> gtfsAgenciesMap =
new HashMap<String, GtfsAgency>(gtfsAgencies.size());
for (GtfsAgency gtfsAgency : gtfsAgencies)
gtfsAgenciesMap.put(gtfsAgency.getAgencyId(), gtfsAgency);
// Read in supplemental stop data
if (supplementDir != null) {
// Read in the supplemental stop data
GtfsAgenciesSupplementReader agenciesSupplementReader =
new GtfsAgenciesSupplementReader(supplementDir);
List<GtfsAgency> gtfsAgenciesSupplement = agenciesSupplementReader.get();
for (GtfsAgency gtfsAgencySupplement : gtfsAgenciesSupplement) {
GtfsAgency gtfsAgency = gtfsAgenciesMap.get(gtfsAgencySupplement.getAgencyId());
if (gtfsAgency == null) {
logger.error("Found supplemental agency data for agencyId={} "
+ "but that agency did not exist in the main "
+ "agency.txt file. {}",
gtfsAgencySupplement.getAgencyId(), gtfsAgencySupplement);
continue;
}
// Create a new GtfsAgency object that combines the original
// data with the supplemental data
GtfsAgency combinedAgency = new GtfsAgency(gtfsAgency, gtfsAgencySupplement);
// Store that combined data agency in the map
gtfsAgenciesMap.put(combinedAgency.getAgencyId(), combinedAgency);
}
}
for (GtfsAgency gtfsAgency : gtfsAgenciesMap.values()) {
// Create the Agency object and put it into the array
Agency agency = new Agency(revs.getConfigRev(), gtfsAgency, getRoutes());
agencies.add(agency);
}
// Let user know what is going on
logger.info("Finished processing agencies.txt data. ");
}
/**
* Determines if the specified calendar is active in the future. It is
* active if the end date is in the future or if it is added in the future
* via calendar_dates.txt
*
* @param calendar
* @param calendarDates
* @return
*/
private static boolean isCalendarActiveInTheFuture(Calendar calendar,
List<CalendarDate> calendarDates) {
// If calendar end date is for sometime in the future then it is
// definitely active.
if (calendar.getEndDate().getTime() > System.currentTimeMillis())
return true;
// End date is not in the future so see if it is being added as an
// exception via the calendar_dates.txt file.
for (CalendarDate calendarDate : calendarDates) {
if (calendar.getServiceId().equals(calendarDate.getServiceId())
&& calendarDate.addService()
&& calendarDate.getDate().getTime() > System.currentTimeMillis()) {
return true;
}
}
// The calendar is for in the past and the associated service is not
// listed as an "add service" in a calendar date so must not be valid.
return false;
}
/**
* Reads calendar.txt file and puts data into calendars list.
*/
private void processCalendars() {
// Let user know what is going on
logger.info("Processing calendar.txt data...");
// Create the map where the data is going to go
calendars = new ArrayList<Calendar>();
// Read in the calendar.txt GTFS data from file
GtfsCalendarReader calendarReader =
new GtfsCalendarReader(gtfsDirectoryName);
List<GtfsCalendar> gtfsCalendars = calendarReader.get();
for (GtfsCalendar gtfsCalendar : gtfsCalendars) {
// Create the Calendar object and put it into the array)
Calendar calendar =
new Calendar(revs.getConfigRev(), gtfsCalendar,
getDateFormatter());
calendars.add(calendar);
}
// Let user know what is going on
logger.info("Finished processing calendar.txt data. ");
}
/**
* Reads calendar_dates.txt file and puts data into calendarDates list.
*/
private void processCalendarDates() {
// Let user know what is going on
logger.info("Processing calendar_dates.txt data...");
// Create the map where the data is going to go
calendarDates = new ArrayList<CalendarDate>();
// Read in the calendar_dates.txt GTFS data from file
GtfsCalendarDatesReader calendarDatesReader =
new GtfsCalendarDatesReader(gtfsDirectoryName);
List<GtfsCalendarDate> gtfsCalendarDates = calendarDatesReader.get();
for (GtfsCalendarDate gtfsCalendarDate : gtfsCalendarDates) {
// Create the CalendarDate object
CalendarDate calendarDate =
new CalendarDate(revs.getConfigRev(), gtfsCalendarDate,
getDateFormatter());
// If calendar date is not sometime in the future then can ignore
// it and not store it. This can be useful because an agency might
// not clean out old dates from the calendar_dates.txt file even
// if they are no longer useful.
if (calendarDate.getDate().getTime() + 1*Time.DAY_IN_MSECS <
System.currentTimeMillis())
continue;
// The calendar date is for in the future so store it
calendarDates.add(calendarDate);
}
// Let user know what is going on
logger.info("Finished processing calendar_dates.txt data. ");
}
/**
* Creates validServiceIds member by going through calendars
*/
private void processServiceIds() {
// Make sure needed data is already read in.
if (calendars == null) {
logger.error("GtfsData.processCalendars() must be called " +
"before GtfsData.processServiceIds() is. Exiting.");
System.exit(-1);
}
if (calendarDates == null) {
logger.error("GtfsData.processCalendarDates() must be called " +
"before GtfsData.processServiceIds() is. Exiting.");
System.exit(-1);
}
// Create set of service IDs
validServiceIds = new HashSet<String>();
for (Calendar calendar : calendars) {
if (isCalendarActiveInTheFuture(calendar, calendarDates)) {
validServiceIds.add(calendar.getServiceId());
} else {
logger.warn("The service ID {} is not configured for in the " +
"future in calendar.txt and calendar_dates.txt and " +
"is therefore not being included in the " +
"configuration. {}",
calendar.getServiceId(), calendar);
}
}
}
/**
* Get rid of calendars and calendar dates that don't have any trips
* associated to try to pare down number of service IDs. Especially useful
* when processing just part of an agency config, like MBTA commuter rail.
*/
private void trimCalendars() {
// Make sure needed data is already read in.
if (serviceIdsWithTrips == null) {
logger.error("GtfsData.processStopTimesData() must be called " +
"before GtfsData.trimCalendars() is. Exiting.");
System.exit(-1);
}
// Trim calendar list
Iterator<Calendar> calendarIter = calendars.iterator();
while (calendarIter.hasNext()) {
Calendar calendar = calendarIter.next();
if (!serviceIdsWithTrips.contains(calendar.getServiceId()))
calendarIter.remove();
}
// Trim calendar date list
Iterator<CalendarDate> calendarDateIter = calendarDates.iterator();
while (calendarDateIter.hasNext()) {
CalendarDate calendarDate = calendarDateIter.next();
if (!serviceIdsWithTrips.contains(calendarDate.getServiceId()))
calendarDateIter.remove();
}
// Trim serviceIds list
Iterator<String> serviceIdIter = validServiceIds.iterator();
while (serviceIdIter.hasNext()) {
String serviceId = serviceIdIter.next();
if (!serviceIdsWithTrips.contains(serviceId))
serviceIdIter.remove();
}
}
/**
* Reads fare_attributes.txt file and puts data into fareAttributes list.
*/
private void processFareAttributes() {
// Let user know what is going on
logger.info("Processing fare_attributes.txt data...");
// Create the map where the data is going to go
fareAttributes = new ArrayList<FareAttribute>();
// Read in the fare_attributes.txt GTFS data from file
GtfsFareAttributesReader fareAttributesReader =
new GtfsFareAttributesReader(gtfsDirectoryName);
List<GtfsFareAttribute> gtfsFareAttributes = fareAttributesReader.get();
for (GtfsFareAttribute gtfsFareAttribute : gtfsFareAttributes) {
// Create the FareAttribute object and put it into the array
FareAttribute FareAttribute =
new FareAttribute(revs.getConfigRev(), gtfsFareAttribute);
fareAttributes.add(FareAttribute);
}
// Let user know what is going on
logger.info("Finished processing fare_attributes.txt data. ");
}
/**
* Reads fare_rules.txt file and puts data into fareRules list.
*/
private void processFareRules() {
// Let user know what is going on
logger.info("Processing fare_rules.txt data...");
// Create the map where the data is going to go
fareRules = new ArrayList<FareRule>();
// Read in the fare_rules.txt GTFS data from file
GtfsFareRulesReader fareRulesReader =
new GtfsFareRulesReader(gtfsDirectoryName);
List<GtfsFareRule> gtfsFareRules = fareRulesReader.get();
for (GtfsFareRule gtfsFareRule : gtfsFareRules) {
// If this route is actually a sub-route of a parent then use the
// parent ID.
String parentRouteId =
getProperIdOfRoute(gtfsFareRule.getRouteId());
// Create the CalendarDate object and put it into the array
FareRule fareRule = new FareRule(revs.getConfigRev(), gtfsFareRule,
parentRouteId);
fareRules.add(fareRule);
}
// Let user know what is going on
logger.info("Finished processing fare_rules.txt data. ");
}
/**
* Reads transfers.txt file and puts data into transfers list.
*/
private void processTransfers() {
// Let user know what is going on
logger.info("Processing transfers.txt data...");
// Create the map where the data is going to go
transfers = new ArrayList<Transfer>();
// Read in the transfers.txt GTFS data from file
GtfsTransfersReader transfersReader =
new GtfsTransfersReader(gtfsDirectoryName);
List<GtfsTransfer> gtfsTransfers = transfersReader.get();
for (GtfsTransfer gtfsTransfer : gtfsTransfers) {
// Create the CalendarDate object and put it into the array
Transfer transfer = new Transfer(revs.getConfigRev(), gtfsTransfer);
transfers.add(transfer);
}
// Let user know what is going on
logger.info("Finished processing transfers.txt data. ");
}
/******************** Getter Methods ****************************/
/**
* @return projectId
*/
public String getAgencyId() {
return agencyId;
}
public Map<String, GtfsRoute> getGtfsRoutesMap() {
return gtfsRoutesMap;
}
/**
* @param routeId
* @return the GtfsRoute from the trips.txt file for the specified routeId,
* null if that route Id not defined in the file.
*/
public GtfsRoute getGtfsRoute(String routeId) {
GtfsRoute gtfsRoute = gtfsRoutesMap.get(routeId);
return gtfsRoute;
}
/**
* @param tripId
* @return the GtfsTrip from the trips.txt file for the specified tripId,
* null if that trip Id not defined in the file.
*/
public GtfsTrip getGtfsTrip(String tripId) {
GtfsTrip gtfsTrip = gtfsTripsMap.get(tripId);
return gtfsTrip;
}
/**
* Returns true if GTFS stop times read in and are available
* @return
*/
public boolean isStopTimesReadIn() {
return gtfsStopTimesForTripMap != null && !gtfsStopTimesForTripMap.isEmpty();
}
/**
* Returns list of GtfsStopTimes for the trip specified
* @param tripId
* @return
*/
public List<GtfsStopTime> getGtfsStopTimesForTrip(String tripId) {
return gtfsStopTimesForTripMap.get(tripId);
}
/**
* @return Collection of all the Trip objects
*/
public Collection<Trip> getTrips() {
return tripsCollection;
}
/**
* @return True if tripsMap read in and is usable
*/
public boolean isTripsReadIn() {
return tripsCollection != null && !tripsCollection.isEmpty();
}
public GtfsStop getGtfsStop(String stopId) {
return gtfsStopsMap.get(stopId);
}
public Collection<Stop> getStops() {
return stopsMap.values();
}
/**
* Gets the Stop from the stopsMap.
*
* @param stopId
* @return The Stop for the specified stopId
*/
public Stop getStop(String stopId) {
return stopsMap.get(stopId);
}
/**
* @return Collection of all TripPatterns
*/
public Collection<TripPattern> getTripPatterns() {
return tripPatternsByTripIdMap.values();
}
/**
* @param routeId
* @return List of TripPatterns for the routeId
*/
public List<TripPattern> getTripPatterns(String routeId) {
return tripPatternsByRouteIdMap.get(routeId);
}
/**
* @param tripId The trip ID to return the TripPattern for
* @return The TripPattern for the specified trip ID
*/
public TripPattern getTripPatternByTripId(String tripId) {
return tripPatternsByTripIdMap.get(tripId);
}
public boolean isTripPatternIdAlreadyUsed(String tripPatternId) {
return tripPatternIdSet.contains(tripPatternId);
}
public Map<TripPatternKey, TripPattern> getTripPatternMap() {
return tripPatternMap;
}
/**
* Returns the specified trip pattern. Not very efficient
* because does a linear search through the set of trip
* patterns but works well enough for debugging.
*
* @param tripPatternId
* @return
*/
public TripPattern getTripPattern(String tripPatternId) {
for (TripPattern tp : tripPatternMap.values()) {
if (tp.getId().equals(tripPatternId))
return tp;
}
// Couldn't find the specified trip pattern so return null;
return null;
}
/**
* For use with pathsMap member.
*
* @param tripPatternId
* @param pathId
* @return
*/
public static String getPathMapKey(String tripPatternId, String pathId) {
return tripPatternId + "|" + pathId;
}
/**
* Returns the StopPath for the specified tripPatternId and pathId.
* Can't just use pathId since lots of trip patterns will traverse
* the same stops, resulting in identical pathIds. And don't want
* to make the pathIds themselves unique because then wouldn't be
* able to reuse travel time data as much.
*
* @param tripPatternId
* @param pathId
* @return
*/
public StopPath getPath(String tripPatternId, String pathId) {
String key = getPathMapKey(tripPatternId, pathId);
return pathsMap.get(key);
}
/**
* @return Collection of all the Paths
*/
public Collection<StopPath> getPaths() {
return pathsMap.values();
}
/**
* Adds the StopPath object to the pathMap.
*
* @param tripPatternId
* @param pathId
* @param path
*/
public void putPath(String tripPatternId, String pathId, StopPath path) {
String key = getPathMapKey(tripPatternId, pathId);
pathsMap.put(key, path);
}
/**
* @return List of routes that can be stored in db. The result
* is not ordered by route_order since that isn't needed as part
* of processing GTFS data.
*/
public List<Route> getRoutes() {
return routes;
}
/**
* If a route is configured to be a sub-route of a parent then this
* method will return the route ID of the parent route. Otherwise
* returns null.
*
* @param routeId
* @return route ID of parent route if there is one. Otherwise, null.
*/
public String getProperIdOfRoute(String routeId) {
if (routeId == null)
return null;
return properRouteIdMap.get(routeId);
}
public List<Block> getBlocks() {
return blocks;
}
public List<Agency> getAgencies() {
return agencies;
}
/**
* Returns true if according to frequency.txt GTFS file that specified trip
* is frequencies based and doesn't have exact_times set. Note that if
* exact_times is set then a schedule is used. It is just that the schedule
* is based on the frequencies and start time of trip.
*
* @param tripId
* @return true if frequency based trip
*/
public boolean isTripFrequencyBased(String tripId) {
List<Frequency> frequencyListForTrip = getFrequencyList(tripId);
return frequencyListForTrip != null
&& !frequencyListForTrip.get(0).getExactTimes();
}
/**
* @param tripId
* @return The Frequency list specified by tripId param
*/
public List<Frequency> getFrequencyList(String tripId) {
return frequencyMap.get(tripId);
}
/**
* Returns collection of all the Frequency objects. This method goes
* through the internal frequencyMap and compiles the collection each
* time this member is called.
* @return
*/
public Collection<Frequency> getFrequencies() {
Collection<Frequency> collection = new ArrayList<Frequency>();
for (List<Frequency> frequencyListForTripId : frequencyMap.values()) {
for (Frequency frequency : frequencyListForTripId) {
collection.add(frequency);
}
}
return collection;
}
public List<Calendar> getCalendars() {
return calendars;
}
public List<CalendarDate> getCalendarDates() {
return calendarDates;
}
public List<FareAttribute> getFareAttributes() {
return fareAttributes;
}
public List<FareRule> getFareRules() {
return fareRules;
}
public List<Transfer> getTransfers() {
return transfers;
}
/**
* Returns information about the current revision.
*
* @return
*/
public ConfigRevision getConfigRevision() {
return new ConfigRevision(revs.getConfigRev(), new Date(),
zipFileLastModifiedTime, notes);
}
/*************************** Main Public Methods **********************/
/**
* Outputs data for specified route grouped by trip pattern.
* The resulting data can be visualized on a map by cutting
* and pasting it in to http://www.gpsvisualizer.com/map_input .
* @param routeId
*/
public void outputPathsAndStopsForGraphing(String routeId) {
System.err.println("\nPaths for routeId=" + routeId);
// Also need to be able to get trip patterns associated
// with a route so can be included in Route object.
// Key is routeId.
List<TripPattern> tripPatterns = tripPatternsByRouteIdMap.get(routeId);
for (TripPattern tripPattern : tripPatterns) {
System.err.println("\n\n================= TripPatternId=" + tripPattern.getId() +
" shapeId=" + tripPattern.getShapeId() +
"=======================\n");
// Output the header info
System.err.println("name,symbol,color,label,latitude,longitude");
// Output the stop locations so can see where they are relative to path
for (StopPath path : tripPattern.getStopPaths()) {
String stopId = path.getStopId();
Stop stop = getStop(stopId);
System.err.println(", pin, red, stop " +
stopId + ", " +
Geo.format(stop.getLoc().getLat()) + ", "
+ Geo.format(stop.getLoc().getLon()));
}
int pathCnt = 0;
for (StopPath path : tripPattern.getStopPaths()) {
// Use different colors and symbols so can tell how things are progressing
++pathCnt;
String symbolAndColor;
switch (pathCnt%13) {
case 0: symbolAndColor = "star, blue"; break;
case 1: symbolAndColor = "googlemini, green"; break;
case 2: symbolAndColor = "diamond, blue"; break;
case 3: symbolAndColor = "square, green"; break;
case 4: symbolAndColor = "triangle, blue"; break;
case 5: symbolAndColor = "cross, green"; break;
case 6: symbolAndColor = "circle, blue"; break;
case 7: symbolAndColor = "star, red"; break;
case 8: symbolAndColor = "googlemini, yellow"; break;
case 9: symbolAndColor = "diamond, red"; break;
case 10: symbolAndColor = "square, yellow"; break;
case 11: symbolAndColor = "triangle, red"; break;
case 12: symbolAndColor = "cross, yellow"; break;
default: symbolAndColor = "circle, red"; break;
}
// Output the path info for this trip pattern
int i=0;
for (Location loc : path.getLocations()) {
String popupName = "" + i + " lat=" + Geo.format(loc.getLat()) +
" lon=" + Geo.format(loc.getLon());
String label = "" + i;
System.err.println(popupName + ", " + symbolAndColor + ", " +
label + ", " +
Geo.format(loc.getLat()) + ", "
+ Geo.format(loc.getLon()));
++i;
}
}
}
}
/**
* Returns true if the tripId isn't supposed to be filtered out, as
* specified by the transitime.gtfs.tripIdRegExPattern property.
*
* @param tripId
* @return True if trip not to be filtered out
*/
public static boolean tripNotFiltered(String tripId) {
if (tripIdFilterRegEx.getValue() == null)
return true;
// Create pattern if haven't done so yet, but only do so once.
if (tripIdFilterRegExPattern == null)
tripIdFilterRegExPattern = Pattern.compile(tripIdFilterRegEx.getValue());
boolean matches = tripIdFilterRegExPattern.matcher(tripId.trim()).matches();
return matches;
}
/**
* Returns true if the routeId isn't supposed to be filtered out, as
* specified by the transitime.gtfs.routeIdRegExPattern property.
*
* @param routeId
* @return True if route not to be filtered out
*/
public static boolean routeNotFiltered(String routeId) {
if (routeIdFilterRegEx.getValue() == null)
return true;
// Create pattern if haven't done so yet, but only do so once.
if (routeIdFilterRegExPattern == null)
routeIdFilterRegExPattern = Pattern.compile(routeIdFilterRegEx.getValue());
boolean matches = routeIdFilterRegExPattern.matcher(routeId.trim()).matches();
return matches;
}
/**
* Does all the work. Processes the data and store it in internal structures
*/
public void processData() {
// For logging how long things take
IntervalTimer timer = new IntervalTimer();
// Let user know what is going on
logger.info("Processing GTFS data from {} ...",
gtfsDirectoryName);
// Note. The order of how these are processed in important because
// some data sets rely on others in order to be fully processed.
// If the order is wrong then the methods below will log an error and
// exit.
processRouteData();
processStopData();
processCalendarDates();
processCalendars();
processServiceIds();
processTripsData();
processFrequencies();
processStopTimesData();
processRouteMaps();
processBlocks();
processPaths();
processAgencyData();
// Following are simple objects that don't require combining tables
processFareAttributes();
processFareRules();
processTransfers();
// Sometimes will be using a partial configuration. For example, for
// MBTA commuter rail only want to use the trips defined for
// commuter rail even though the GTFS data can have trips for
// other modes defined. This can mean that the data includes many
// stops that are actually not used by the subset of trips.
// Therefore trim out the unused stops.
trimStops();
// Get rid of calendars and calendar dates that don't have any trips
// associated to try to pare down number of service IDs. Especially
// useful when processing just part of an agency config, like
// MBTA commuter rail.
trimCalendars();
// debugging
//outputPathsAndStopsForGraphing("8699");
// Now process travel times and update the Trip objects.
TravelTimesProcessorForGtfsUpdates travelTimesProcesssor =
new TravelTimesProcessorForGtfsUpdates(revs,
originalTravelTimesRev, maxTravelTimeSegmentLength,
defaultWaitTimeAtStopMsec, maxSpeedKph);
travelTimesProcesssor.process(session, this);
// Try allowing garbage collector to free up some memory since
// don't need the GTFS structures anymore.
gtfsRoutesMap = null;
gtfsTripsMap = null;
gtfsStopTimesForTripMap = null;
// Now that have read in all the data into collections output it
// to database.
DbWriter dbWriter = new DbWriter(this);
dbWriter.write(session, revs.getConfigRev());
// Finish things up by closing the session
session.close();
// Let user know what is going on
logger.info("Finished processing GTFS data from {} . Took {} msec.",
gtfsDirectoryName, timer.elapsedMsec());
// just for debugging
// GtfsLoggingAppender.outputMessagesToSysErr();
//
// //outputShapesForGraphing("102589" /*"102829"*/);
//
// outputPathsAndStopsForGraphing("8701");
//
// System.err.println("\nPaths:");
// for (StopPath p : getPathsMap().values()) {
// TripPattern tp = getTripPattern(p.getTripPatternId());
// System.err.println("\npathId=" + p.getPathId() +
// " routeId=" + p.getRouteId() +
// " tripPattern=" + tp.toStringListingTripIds());
// for (Location l : p.getLocations()) {
// System.err.println("" + Geo.format(l.getLat()) + ", " + Geo.format(l.getLon()));
// }
// }
//
// System.err.println("\nPaths:");
// for (StopPath p : getPathsMap().values()) {
// System.err.println(" " + p);
// }
//
// System.err.println("\nBlocks:");
// for (Block b : getBlocks()) {
// System.err.println(" " + b.toShortString());
// }
//
// System.err.println("\nRoutes:");
// for (Route r : getRoutes()) {
// System.err.println(" " + r);
// }
}
}
| walkeriniraq/transittime-core | transitime/src/main/java/org/transitime/gtfs/GtfsData.java | Java | gpl-3.0 | 92,143 |
# Write a program that asks for a person's favorite number.
# Have your program add 1 to the number, and then
# suggest the resut as a bigger and better favorite number.
# (Do be tactful about it, though.)
puts "What is your favorite number?"
favorite_number = gets.chomp
better_number = favorite_number.to_i+1
puts "Have you considered liking the number "+better_number.to_s+" instead?"
puts "I mean, it's a bigger number, so it has to be better, right? ;-D" | brennx0r/ruby-exploration | learn-to-program_Chris-Pine/chapter-5/favorite-number.rb | Ruby | gpl-3.0 | 461 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^register/$', views.register, name='register'),
url(r'^register/mail/$', views.register_mail, name='register_mail'),
url(r'^register/activate/$', views.register_activate, name='register_activate'),
]
| enfancemill/baobaocloud | app/account/urls.py | Python | gpl-3.0 | 390 |
import os
os.environ["DJANGO_SETTINGS_MODULE"] = "settings"
from collections import namedtuple
from django.conf import settings
from django.contrib.auth.models import User
from django.db import connection
from djsopnet.control.assembly import \
generate_assembly_equivalences, \
generate_compatible_assemblies_between_cores, \
_map_assembly_equivalence_to_skeleton
from djsopnet.models import SegmentationConfiguration
from djsopnet.control.block import _blockcursor_to_namedtuple
from tests.testsopnet import SopnetTest
PARALLEL_JOBS = getattr(settings, 'SOPNET_TEST_PARALLEL_JOBS', {'ASSEMBLY': False})['ASSEMBLY']
if PARALLEL_JOBS:
from joblib import Parallel, delayed
# Threshold for number of segments an assembly equivalence must have to be
# mapped to a skeleton.
MAPPING_SEGMENTS_THRESHOLD = 20
st = SopnetTest()
sc = SegmentationConfiguration.objects.get(pk=st.segmentation_configuration_id)
segstack = sc.segmentationstack_set.get(type='Membrane')
segstack.clear_schema(delete_slices=False,
delete_segments=False,
delete_solutions=False,
delete_assembly_relationships=True)
bi = sc.block_info
block_size = bi.size_for_unit('block')
jobs = []
# Generate assembly compatibility edges for all (6-)neighboring, solved cores.
def core_compatibility(i, j, k):
cursor = connection.cursor()
cursor.execute('''
SELECT * FROM segstack_%s.core
WHERE coordinate_x = %s
AND coordinate_y = %s
AND coordinate_z = %s
''' % (segstack.id, i, j, k))
c = _blockcursor_to_namedtuple(cursor, block_size)[0]
if c.solution_set_flag:
print 'Generating compatibility for core %s (%s, %s, %s)' % (c.id, i, j, k)
for (ni, nj, nk) in bi.core_neighbor_range((i, j, k)):
cursor.execute('''
SELECT * FROM segstack_%s.core
WHERE coordinate_x = %s
AND coordinate_y = %s
AND coordinate_z = %s
''' % (segstack.id, ni, nj, nk))
nbr = _blockcursor_to_namedtuple(cursor, block_size)[0]
if nbr.solution_set_flag:
generate_compatible_assemblies_between_cores(segstack.id, c.id, nbr.id)
for core_coord in bi.core_range():
if PARALLEL_JOBS:
connection.close()
jobs.append(delayed(core_compatibility)(*core_coord))
else:
core_compatibility(*core_coord)
if PARALLEL_JOBS:
Parallel(n_jobs=PARALLEL_JOBS)(jobs)
# Generate assembly equivalences.
print 'Generating assembly equivalences...'
generate_assembly_equivalences(segstack.id)
# For each assembly equivalence, map to a skeleton.
Reqfake = namedtuple('Reqfake', ['user', 'project_id'])
u = User.objects.get(username='drew')
request = Reqfake(user=u, project_id=sc.project_id)
def map_skeleton(equivalence_id):
print 'Mapping assembly equivalence %s' % equivalence_id
try:
_map_assembly_equivalence_to_skeleton(request, segstack.id, equivalence_id)
except Exception as e:
print '...error'
print str(e)
global_cursor = connection.cursor()
global_cursor.execute('''
SELECT
e.id,
COUNT(aseg.segment_id)
FROM segstack_%(segstack_id)s.assembly_equivalence e
JOIN segstack_%(segstack_id)s.assembly a
ON a.equivalence_id = e.id
JOIN segstack_%(segstack_id)s.assembly_segment aseg
ON aseg.assembly_id = a.id
WHERE e.skeleton_id IS NULL
GROUP BY e.id
HAVING COUNT(aseg.segment_id) > %(segments_threshold)s
''' % {'segstack_id': segstack.id, 'segments_threshold': MAPPING_SEGMENTS_THRESHOLD})
equivalence_ids = [r[0] for r in global_cursor.fetchall()]
jobs = []
for equivalence_id in equivalence_ids:
if PARALLEL_JOBS:
connection.close()
jobs.append(delayed(map_skeleton)(equivalence_id))
else:
map_skeleton(equivalence_id)
if PARALLEL_JOBS:
Parallel(n_jobs=PARALLEL_JOBS)(jobs)
| catsop/CATMAID | django/projects/mysite/scripts/test_assembly_mapping.py | Python | gpl-3.0 | 3,648 |
/*
* Copyright (C) 2014 Mikel Corcuera <mik.corcuera@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.theoryinpractise.halbuilder.jaxrs.builders;
/**
* Property builder for Long objects
* @author Mikel Corcuera <mik.corcuera@gmail.com>
*/
public class LongBuilder implements PropertyBuilder<Long>{
@Override
public Long build(Object s)
{
return Long.parseLong( (String) s);
}
@Override
public boolean canBuild(Class type)
{
return Long.class.isAssignableFrom(type);
}
@Override
public Class getBuildType()
{
return Long.class;
}
}
| mcorcuera/halbuilder-jaxrs | src/main/java/com/theoryinpractise/halbuilder/jaxrs/builders/LongBuilder.java | Java | gpl-3.0 | 1,252 |
"""
Transducer predicate class and parser
has input and output predicate and defines operations with them
Copyright (c) 2017 Michaela Bielikova <xbieli06@stud.fit.vutbr.cz>
"""
import abc
import random
from predicate_interface import PredicateInterface
class TransPred(PredicateInterface):
"""
Transducer predicate class
represents a transducer label with input and output predicates
Attributes:
input input predicate
output output predicate
identity flag if the label represents identity
"""
def __init__(self):
self.input = None
self.output = None
self.identity = False
def __str__(self):
if self.identity:
return "@" + str(self.input) + "/@" + str(self.output)
else:
return str(self.input) + "/" + str(self.output)
def __repr__(self):
if self.identity:
return "@" + str(self.input) + "/@" + str(self.output)
else:
return str(self.input) + "/" + str(self.output)
def __eq__(self, other):
return (self.identity, self.input, self.output) == (other.identity, other.input, other.output)
def __hash__(self):
return hash((self.identity, str(self.input), str(self.output)))
@abc.abstractmethod
def complement(self):
"""
Predicate negation
:return: negation of given predicate
"""
result = TransPred()
result.identity = self.identity
result.input = self.input.complement()
result.output = self.output.complement()
return result
@abc.abstractmethod
def conjunction(self, predicate):
"""
Predicate conjunction
:param predicate: second predicate
:return: conjunction of two predicates
"""
result = TransPred()
if self.identity or predicate.identity:
result.identity = True
else:
result.identity = False
if result.identity:
identic_input = self.input.conjunction(predicate.input)
identic_output = self.output.conjunction(predicate.output)
identic = identic_input.conjunction(identic_output)
result.input = identic
result.output = identic
else:
result.input = self.input.conjunction(predicate.input)
result.output = self.output.conjunction(predicate.output)
return result
@abc.abstractmethod
def disjunction(self, predicate):
"""
Predicate disjunction
:param predicate: second predicate
:return: disjunction of two predicates
"""
result = TransPred()
if self.identity or predicate.identity:
result.identity = True
else:
result.identity = False
if result.identity:
identic_input = self.input.disjunction(predicate.input)
identic_output = self.output.disjunction(predicate.output)
identic = identic_input.conjunction(identic_output)
result.input = identic
result.output = identic
else:
result.input = self.input.disjunction(predicate.input)
result.output = self.output.disjunction(predicate.output)
return result
@abc.abstractmethod
def is_equal(self, predicate):
"""
Checks whether the given predicates are equal
:param predicate: second predicate
:return: bool
"""
if self.identity != predicate.identity:
# if every predicate has exactly one symbol, they can be equal even if their .identity is not the same
if len(self.input) != 1 or len(self.output) != 1 or len(predicate.input) != 1 or len(predicate.output) != 1:
return False
if not self.input.is_equal(predicate.input):
return False
if not self.output.is_equal(predicate.output):
return False
return True
@abc.abstractmethod
def is_subset(self, predicate):
"""
Checks whether the given predicate represent a subset of the second one
:param predicate: second predicate
:return: bool
"""
if self.identity != predicate.identity:
if predicate.identity and not self.is_equal(predicate):
return False
if not self.input.is_subset(predicate.input):
return False
if not self.output.is_subset(predicate.output):
return False
return True
@abc.abstractmethod
def is_satisfiable(self):
"""
Checks whether the given predicate is satisfiable
:return: bool
"""
if not self.input.is_satisfiable():
return False
if not self.output.is_satisfiable():
return False
return True
def combine(self, other):
"""
Creates composition of two given labels
:param other: the second predicate
:return: composed predicate
"""
result = TransPred()
if self.identity or result.identity:
result.identity = True
identic = self.input.conjunction(other.output)
result.input = identic
result.output = identic
else:
result.identity = False
result.input = self.input
result.output = other.output
return result
def translates(self, a, b):
"""
Checks whether predicates translates symbol a to symbol b
:param a: the input symbol
:param b: the output symbol
:return: bool
"""
if self.identity:
if self.input.has_letter(a) and a == b:
return True
else:
if self.input.has_letter(a) and self.output.has_letter(b):
return True
return False
def translate(self, a, alphabet):
"""
Translates symbol a to another symbol
:param a: the input symbol
:param alphabet: alphabet of the automaton
:return: translation fo the symbol
"""
if self.input.has_letter(a):
if self.identity:
return a
else:
for symbol in alphabet:
if self.output.has_letter(symbol):
return symbol
else:
return False
def parsePredicate(pred, automaton_type):
"""
Parses given predicate
:param pred: predicate string
:param automaton_type: type of the automaton
:return: predicate object
"""
result = TransPred()
if pred[0] == "@":
result.identity = True
pred = pred.replace("@", "")
pred_parts = pred.split("/")
if automaton_type == "INT":
from in_notin_parser import parsePredicate as parsePr
elif automaton_type == "LT":
from letter_parser import parsePredicate as parsePr
else:
print("Unsupported transducer type.")
exit(-1)
result.input = parsePr(pred_parts[0])
result.output = parsePr(pred_parts[1])
return result
| Miskaaa/symboliclib | symboliclib/transducer_predicate.py | Python | gpl-3.0 | 7,134 |
const corenlp = require("corenlp");
const CoreNLP = corenlp.default; // convenient when not using `import`
/**
* IMPORTANT
* The server http://corenlp.run is used here just for demo purposes.
* It is not set up to handle a large volume of requests. Instructions for
* setting up your own server can be found in the Dedicated Server section (link below).
* @see {@lik https://stanfordnlp.github.io/CoreNLP/corenlp-server.html}
* @see {@link http://corenlp.run}
*/
const connector = new corenlp.ConnectorServer({
dsn: 'http://corenlp.run',
});
// initialize the pipeline and document to annotate
const props = new corenlp.Properties({
annotators: 'tokenize,ssplit,pos,ner,parse',
});
const pipeline = new corenlp.Pipeline(props, 'Spanish', connector);
const sent = new CoreNLP.simple.Sentence(
'Jorge quiere cinco empanadas de queso y carne.'
);
// performs the call to corenlp (in this case via http)
await pipeline.annotate(sent);
// constituency parse string representation
console.log('parse', sent.parse());
// constituency parse tree representation
const tree = CoreNLP.util.Tree.fromSentence(sent);
// traverse the tree leaves and print some props
tree.visitLeaves(node =>
console.log(node.word(), node.pos(), node.token().ner()));
// dump the tree for debugging
console.log(JSON.stringify(tree.dump(), null, 2));
| gerardobort/node-corenlp | examples/runkit.js | JavaScript | gpl-3.0 | 1,350 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/**
* @module twitter-account
* @license MPL-2.0
*/
"use strict";
const twitter = require("twitter-text");
const fetch = require("fetch-base64");
const pagination = require("./pagination");
const Twitter = require("twitter");
const self = require("../self");
const ContentAccount = require("./content-account");
const TwitterFormatter = require("../formatters/twitter");
/**
* @fires module:twitter-account.TwitterAccount#mention
* @this module:twitter-account.TwitterAccount
* @param {string?} lastMention - ID of the latest mention.
* @returns {string} ID of the latest mention.
*/
async function getMentions(lastMention) {
await this.ready;
const args = {
count: 200,
tweet_mode: "extended"
};
if(lastMention !== undefined) {
args.since_id = lastMention;
}
const tweets = await this.tweets;
const res = await pagination.twitter((params) => this._twitterClient.get('statuses/mentions_timeline', params), args);
if(res.length > 0) {
const oldestTweet = Date.parse(tweets.length ? tweets.slice().pop().created_at : Date.now());
for(const tweet of res) {
//TODO filter replies in a thread from the same side, so you only get one issue for longer replies.
if(Date.parse(tweet.created_at) > oldestTweet && tweets.every((t) => t.in_reply_to_status_id_str !== tweet.id_str)) {
this.emit("mention", tweet);
}
}
return res[0].id_str;
}
return lastMention;
}
getMentions.emitsEvents = true;
/**
* @this module:twitter-account.TwitterAccount
* @param {[Object]} [tweets=[]] - Previous tweets.
* @returns {[Object]} Updated list of tweets.
*/
async function getTweets(tweets = []) {
await this.ready;
const args = {
user_id: await this.getID(),
exclude_replies: false,
include_rts: true,
count: 200,
tweet_mode: "extended"
};
if(tweets.length) {
args.since_id = tweets[0].id_str;
}
const result = await pagination.twitter((params) => this._twitterClient.get('statuses/user_timeline', params), args);
if(result.length) {
return result.concat(tweets);
}
return tweets;
}
/**
* A new mention of the twitter account was found. Holds the raw tweet from the API.
*
* @event module:twitter-account.TwitterAccount#mention
* @type {Object}
*/
/**
* @alias module:twitter-account.TwitterAccount
* @extends module:accounts/content-account.ContentAccount
*/
class TwitterAccount extends ContentAccount {
static get Formatter() {
return TwitterFormatter;
}
/**
* @param {Object} config - Twitter client config.
* @param {Twitter} [client] - Twitter client to use for testing.
*/
constructor(config, client) {
super({
lastMention: getMentions,
tweets: getTweets
});
/**
* @type {external:Twitter}
* @private
*/
this._twitterClient = client ? client : new Twitter(config);
/**
* @type {Promise}
*/
this.ready = this.checkLogin().catch((e) => {
console.error("TwitterAccount checkLogin", e);
throw e;
});
}
/**
* Extract the Tweet id from the url if it's a valid tweet URL.
*
* @param {string} tweetUrl - URL to the tweet.
* @returns {string?} If possible the ID of the tweet is returned.
*/
static getTweetIDFromURL(tweetUrl) {
const matches = tweetUrl.match(/^https?:\/\/(?:www\.)?twitter.com\/[^/]+\/status\/([0-9]+)\/?$/);
return (matches && matches.length > 1) ? matches[1] : null;
}
static getUserFromURL(tweetUrl) {
const matches = tweetUrl.match(/^https?:\/\/(?:www\.)?twitter.com\/([^/]+)\/status\/[0-9]+\/?$/);
return (matches && matches.length > 1) ? matches[1] : null;
}
/**
* @param {string} content - Content of the tweet to get the count for.
* @returns {number} Estimated amount of remaining characters for the tweet.
* @throws {Error} When the tweet contains too many images.
* @todo Convert to percentage/permill based indication
*/
static getRemainingChars(content) {
const [ pureTweet ] = this.getMediaAndContent(content);
const parsedTweet = twitter.parseTweet(pureTweet);
return 280 - parsedTweet.weightedLength;
}
/**
* Checks if the content of a tweet is too long.
*
* @param {string} content - Content of the tweet to check.
* @returns {boolean} Whether the tweet content is too long.
* @throws {Error} When the tweet contains too many images.
*/
static tweetTooLong(content) {
const charCount = this.getRemainingChars(content);
return charCount < 0;
}
/**
* Checks if the tweet content is valid.
*
* @param {string} content - Content of the tweet to check.
* @returns {boolean} Wether the tweet is valid.
* @throws {Error} When the tweet contains too many images.
*/
static tweetValid(content) {
const [ pureTweet ] = this.getMediaAndContent(content);
const parsedTweet = twitter.parseTweet(pureTweet);
return parsedTweet.valid;
}
/**
* Separate media and text content of a tweet authored in GitHub Flavoured
* Markdown.
*
* @param {string} tweet - Content of the tweet.
* @returns {[string, [string]]} An array with the first item being the
* cleaned up text content and the second item being an array of
* media item URLs.
* @throws {Error} When more than 4 images are given, as Twitter only
* supports up to 4 images.
*/
static getMediaAndContent(tweet) {
if(tweet.search(/!\[[^\]]*\]\([^)]+\)/) !== -1) {
const media = [];
const pureTweet = tweet.replace(/!\[[^\]]*\]\(([^)]+)\)/g, (match, url) => {
media.push(url);
return '';
});
if(media.length > 4) {
throw new Error("Can not upload more than 4 images per tweet");
}
return [ pureTweet.trim(), media ];
}
return [ tweet.trim(), [] ];
}
static makeTweetPermalink(username, id) {
return `https://twitter.com/${username}/status/${id}`;
}
async getAccountLink() {
const username = await this.getUsername();
return `[@${username}](https://twitter.com/${username})`;
}
/**
* Upload an image to Twitter and get its media id.
*
* @param {string} mediaUrl - URL of the image to upload.
* @returns {string} Media ID of the image on Twitter.
*/
async uploadMedia(mediaUrl) {
const [ media_data ] = await fetch.remote(mediaUrl);
const args = {
media_data
};
const response = await this._twitterClient.post('media/upload', args);
return response.media_id_string;
}
/**
* Sends a tweet with the given content to the authenticated account.
*
* @param {string} content - Tweet content. Should not be over 140 chars.
* @param {string} [media=''] - List of media ids to associate with the tweet.
* @param {string} [inReplyTo] - Tweet this is a reply to.
* @returns {string} URL of the tweet.
*/
async tweet(content, media = '', inReplyTo = null) {
if(self(this).tweetTooLong(content)) {
return Promise.reject(new Error("Tweet content too long"));
}
const args = {
status: content
};
if(inReplyTo) {
const tweetId = self(this).getTweetIDFromURL(inReplyTo);
if(tweetId) {
args.in_reply_to_status_id = tweetId;
const recipient = self(this).getUserFromURL(inReplyTo);
const mentions = twitter.extractMentions(content).map((m) => m.toLowerCase());
if(!mentions.length || !mentions.includes(recipient.toLowerCase())) {
args.auto_populate_reply_metadata = "true";
}
}
}
if(media.length) {
args.media_ids = media;
}
await this.ready;
const [
res,
username
] = await Promise.all([
this._twitterClient.post('statuses/update', args),
this.getUsername()
]);
return self(this).makeTweetPermalink(username, res.id_str);
}
/**
* Retweet a tweet based on its URL.
*
* @async
* @param {string} url - URL to the tweet to retweet.
* @returns {string} URL to the retweet.
*/
async retweet(url) {
const tweetId = self(this).getTweetIDFromURL(url);
await this.ready;
return this._twitterClient.post(`statuses/retweet/${tweetId}`, {})
.then(() => url);
}
/**
* Verifies the login credentials and stores the screenname if successful.
*
* @async
* @returns {undefined}
*/
checkLogin() {
return this._twitterClient.get('account/verify_credentials', {}).then((res) => {
this.username = res.screen_name;
this.id = res.id_str;
});
}
/**
* Returns the username when available.
*
* @returns {string} Authenticated username.
*/
async getUsername() {
await this.ready;
return this.username;
}
/**
* Returns the Twitter ID of the current account when available.
*
* @returns {string} Authenticated user ID.
*/
async getID() {
await this.ready;
return this.id;
}
async separateContentAndMedia(card) {
const staticRef = self(this);
const contentSection = staticRef.GetContentSection(card);
const [ content, media ] = staticRef.getMediaAndContent(contentSection);
const mediaIds = await Promise.all(media.map((m) => this.uploadMedia(m)));
return [ content, mediaIds.join(",") ];
}
async checkPosts(column, markPublished) {
const tweets = await this.tweets;
const cards = await column.cards;
for(const card of Object.values(cards)) {
if(!card.content.hasSection(TwitterFormatter.RETWEET)) {
const [ content ] = self(this).getMediaAndContent(self(this).GetContentSection(card));
const tweet = tweets.find((t) => 'full_text' in t ? t.full_text.includes(content) : t.text.includes(content));
if(tweet) {
await markPublished(card, self(this).makeTweetPermalink(tweet.user.screen_name, tweet.id_str));
}
}
else {
const retweetID = self(this).getTweetIDFromURL(card.content.getSection(TwitterFormatter.RETWEET));
const didRetweet = tweets.some((t) => t.retweeted_status && t.retweeted_status.id_str === retweetID);
if(didRetweet) {
await markPublished(card, 'Already retweeted.');
}
}
}
}
isCardHighPrio(card) {
return card.content.hasSection(TwitterFormatter.REPLY_TO);
}
async publish(card) {
let url;
if(card.content.hasSection(TwitterFormatter.RETWEET)) {
url = await this.retweet(card.content.getSection(TwitterFormatter.RETWEET));
}
else {
let replyTo = null;
if(card.content.hasSection(TwitterFormatter.REPLY_TO)) {
replyTo = card.content.getSection(TwitterFormatter.REPLY_TO);
}
const [ content, media ] = await this.separateContentAndMedia(card);
url = await this.tweet(content, media, replyTo);
}
let successMsg = "Successfully tweeted. See " + url;
if(card.content.hasSection(TwitterFormatter.RETWEET)) {
successMsg = "Successfully retweeted.";
}
return successMsg;
}
}
module.exports = TwitterAccount;
| mozillach/gh-projects-content-queue | lib/accounts/twitter.js | JavaScript | mpl-2.0 | 12,246 |
#include "stdafx.h"
#include "widgets/vehiclelist.h"
#include "simulation.h"
#include "Driver.h"
#include "widgets/vehicleparams.h"
ui::vehiclelist_panel::vehiclelist_panel(ui_layer &parent)
: ui_panel(STR_C("Vehicle list"), false), m_parent(parent)
{
}
void ui::vehiclelist_panel::render_contents()
{
if (m_first_show && Global.gui_trainingdefault) {
for (TDynamicObject *vehicle : simulation::Vehicles.sequence())
{
if (!vehicle->Mechanik || vehicle->name() != ToLower(Global.local_start_vehicle))
continue;
ui_panel *panel = new vehicleparams_panel(vehicle->name());
m_parent.add_owned_panel(panel);
}
m_first_show = false;
is_open = false;
return;
}
for (TDynamicObject *vehicle : simulation::Vehicles.sequence())
{
if (!vehicle->Mechanik)
continue;
std::string name = vehicle->name();
double speed = vehicle->GetVelocity();
std::string timetable;
if (vehicle->Mechanik)
timetable = vehicle->Mechanik->TrainName() + ", ";
std::string label = std::string(name + ", " + timetable + std::to_string(speed) + " km/h###");
ImGui::PushID(vehicle);
if (ImGui::Button(label.c_str())) {
ui_panel *panel = new vehicleparams_panel(vehicle->name());
m_parent.add_owned_panel(panel);
}
ImGui::PopID();
}
}
| eu07/maszyna | widgets/vehiclelist.cpp | C++ | mpl-2.0 | 1,351 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package util
import (
"code.google.com/p/go-uuid/uuid"
"github.com/mozilla-services/heka/client"
"github.com/mozilla-services/heka/message"
"fmt"
"log"
"os"
"runtime"
"runtime/debug"
"strconv"
"strings"
"time"
)
type HekaLogger struct {
client client.Client
encoder client.Encoder
sender client.Sender
logname string
pid int32
hostname string
conf *MzConfig
tracer bool
filter int64
}
// Message levels
const (
CRITICAL = iota
ERROR
WARNING
INFO
DEBUG
)
// The fields to relay. NOTE: object reflection is VERY CPU expensive.
// I specify strings here to reduce that as much as possible. Please do
// not change this to something like map[string]interface{} since that
// can dramatically increase server load.
type Fields map[string]string
// Create a new Heka logging interface.
func NewHekaLogger(conf *MzConfig) *HekaLogger {
//Preflight
var encoder client.Encoder = nil
var sender client.Sender = nil
var logname string = ""
var err error
var tracer bool = false
var filter int64
pid := int32(os.Getpid())
dhost, _ := os.Hostname()
conf.SetDefaultFlag("heka.show_caller", false)
conf.SetDefault("logger.filter", "10")
filter, _ = strconv.ParseInt(conf.Get("logger.filter", "10"), 0, 0)
if conf.GetFlag("heka.use") {
encoder = client.NewProtobufEncoder(nil)
sender, err = client.NewNetworkSender(conf.Get("heka.sender", "tcp"),
conf.Get("heka.server_addr", "127.0.0.1:5565"))
if err != nil {
log.Panic("Could not create sender ", err)
}
logname = conf.Get("heka.logger_name", "package")
}
return &HekaLogger{encoder: encoder,
sender: sender,
logname: logname,
pid: pid,
hostname: conf.Get("heka.current_host", dhost),
conf: conf,
tracer: tracer,
filter: filter}
}
// Fields are additional logging data passed to Heka. They are technically
// undefined, but searchable and actionable.
func addFields(msg *message.Message, fields Fields) (err error) {
for key, ival := range fields {
var field *message.Field
if ival == "" {
ival = "*empty*"
}
if key == "" {
continue
}
field, err = message.NewField(key, ival, ival)
if err != nil {
return err
}
msg.AddField(field)
}
return err
}
// Logging workhorse function. Chances are you're not going to call this
// directly, but via one of the helper methods. of Info() .. Critical()
// level - One of the defined logging CONST values
// mtype - Message type, Short class identifier for the message
// payload - Main error message
// fields - additional optional key/value data associated with the message.
func (self HekaLogger) Log(level int32, mtype, payload string, fields Fields) (err error) {
var caller Fields
// add in go language tracing. (Also CPU intensive, but REALLY helpful
// when dev/debugging)
if self.tracer {
if pc, file, line, ok := runtime.Caller(2); ok {
funk := runtime.FuncForPC(pc)
caller = Fields{
"file": file,
// defaults don't appear to work.: file,
"line": strconv.FormatInt(int64(line), 0),
"name": funk.Name()}
}
}
// Only print out the debug message if it's less than the filter.
if int64(level) < self.filter {
dump := fmt.Sprintf("[%d]% 7s: %s", level, mtype, payload)
if len(fields) > 0 {
var fld []string
for key, val := range fields {
fld = append(fld, key+": "+val)
}
dump += " {" + strings.Join(fld, ", ") + "}"
}
if len(caller) > 0 {
dump += fmt.Sprintf(" [%s:%d %s]", caller["file"],
caller["line"], caller["name"])
}
log.Printf(dump)
// Don't send an error if there's nothing to do
if self.sender == nil {
return nil
}
var stream []byte
msg := &message.Message{}
msg.SetTimestamp(time.Now().UnixNano())
msg.SetUuid(uuid.NewRandom())
msg.SetLogger(self.logname)
msg.SetType(mtype)
msg.SetPid(self.pid)
msg.SetSeverity(level)
msg.SetHostname(self.hostname)
if len(payload) > 0 {
msg.SetPayload(payload)
}
err = addFields(msg, fields)
if err != nil {
return err
}
err = addFields(msg, caller)
if err != nil {
return err
}
err = self.encoder.EncodeMessageStream(msg, &stream)
if err != nil {
log.Fatal("ERROR: Could not encode log message (%s)", err)
return err
}
err = self.sender.SendMessage(stream)
if err != nil {
log.Fatal("ERROR: Could not send message (%s)", err)
return err
}
}
return nil
}
// record the lowest priority message
func (self HekaLogger) Info(mtype, msg string, fields Fields) (err error) {
return self.Log(INFO, mtype, msg, fields)
}
func (self HekaLogger) Debug(mtype, msg string, fields Fields) (err error) {
return self.Log(DEBUG, mtype, msg, fields)
}
func (self HekaLogger) Warn(mtype, msg string, fields Fields) (err error) {
return self.Log(WARNING, mtype, msg, fields)
}
func (self HekaLogger) Error(mtype, msg string, fields Fields) (err error) {
return self.Log(ERROR, mtype, msg, fields)
}
// record the Highest priority message, and include a printstack to STDERR
func (self HekaLogger) Critical(mtype, msg string, fields Fields) (err error) {
debug.PrintStack()
return self.Log(CRITICAL, mtype, msg, fields)
}
// o4fs
// vim: set tabstab=4 softtabstop=4 shiftwidth=4 noexpandtab
| jrconlin/mozsvc_util | heka_log.go | GO | mpl-2.0 | 5,411 |
"use strict";
const React = require('react');
const _ = require('lodash');
const TabActions = require('../../actions/tabActions');
const TabList = React.createClass({
propTypes: {
openTabs: React.PropTypes.arrayOf(React.PropTypes.object).isRequired,
activeTabIndex: React.PropTypes.number.isRequired
},
onTabClick: function(tabIndex, clickEvent) {
clickEvent.preventDefault();
TabActions.switchToTab(tabIndex);
},
createTabElement: function(tabData, index) {
const elementKey = `tab${index}`;
const elementClass = index === this.props.activeTabIndex ? 'active' : null;
const onClickFunc = _.partial(this.onTabClick, index);
return (
<li key={elementKey} role="presentation" className={elementClass}>
<a href="#" onClick={onClickFunc}>{tabData.title}</a>
</li>
);
},
render: function() {
const tabs = _.map(this.props.openTabs, this.createTabElement);
return (
<ul className="nav nav-pills">
{tabs}
</ul>
);
}
});
module.exports = TabList;
| amgaera/azure-storage-navigator | src/components/tabs/tabList.js | JavaScript | mpl-2.0 | 1,052 |
/* Canvas Palette class
Accepts touch events and draws palette on a canvas
*/
package com.codejockey.canvas.helperfiles;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.os.Vibrator;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.widget.ImageView;
import android.content.Context;
import com.codejockey.canvas.R;
public class Palette
{
private static final String TAG = "CanvasPalette";
private Context context;
private ImageView imageview;
private Bitmap bitmap;
private Canvas canvas = new Canvas ();
private Paint paint = new Paint ();
private Drawing drawing;
// Dimensions of the palette bitmap
private int height = PALETTE_BOX;
private int width = PALETTE_BOX * PALETTE_SIZE;
// Array of color palettes of 50x50px, 8 at a time
private static final int PALETTE_SIZE = 8;
private static final int PALETTE_BOX = 50;
private int [] colors;
private int color_base = 0;
// If we stay in one color for this many msecs it's like a long press
private static final int LONG_PRESS = 500;
// Current drawn points
private Point start_point; // Requested starting point
private Point end_point; // Requested ending point
private int start_color; // Index into colors
private long start_time; // For timing long pushes
public Palette (Context _context, ImageView _imageview, Drawing _drawing)
{
// Store parent context
context = _context;
imageview = _imageview;
drawing = _drawing;
// Get colors for palettes
colors = context.getResources ().getIntArray (R.array.color_array);
// Create our bitmap drawing area
bitmap = Bitmap.createBitmap (width, height, Bitmap.Config.ARGB_8888);
// Draw the bitmap on the image
imageview.setImageBitmap (bitmap);
imageview.setScaleType (ImageView.ScaleType.FIT_XY);
// Create our drawing objects
canvas.setBitmap (bitmap);
redraw ();
}
public void onTouchEvent (MotionEvent event)
{
if (!gesture_detector.onTouchEvent (event)) {
switch (event.getAction ()) {
case MotionEvent.ACTION_DOWN:
touch_down (event);
break;
case MotionEvent.ACTION_MOVE:
touch_move (event);
break;
case MotionEvent.ACTION_UP:
touch_up (event);
break;
}
}
}
public void redraw ()
{
int color_index = color_base;
int box_start = 0;
for (int box = 0; box < PALETTE_SIZE; box++) {
paint.setColor (colors [color_index++]);
canvas.drawRect (box_start, 0, box_start + PALETTE_BOX, height, paint);
box_start += PALETTE_BOX;
}
imageview.invalidate ();
}
// Private methods
// ---------------------------------------------------------------------
private GestureDetector gesture_detector = new GestureDetector (
new GestureDetector.SimpleOnGestureListener () {
// Long press
// - in palette means fill whole canvas with drawing color
public void onLongPress (MotionEvent event) {
if (start_color >= 0) {
Log.d (TAG, "Palette OnLongPress -- erase");
touch_fill (event);
}
}
// Single tap
// - in palette means select color
public boolean onSingleTapUp (MotionEvent event) {
touch_select (event);
return true;
}
}
);
private void touch_down (MotionEvent event)
{
start_point = get_point (event);
start_color = get_color (start_point);
start_time = System.currentTimeMillis ();
Log.d (TAG, "Start color=" + start_color);
}
// Returns index into color array, or -1 if no valid selection
private int get_color (Point point)
{
if (point.y < height)
return color_base + (point.x / PALETTE_BOX);
else
return -1;
}
private void touch_move (MotionEvent event)
{
// Get current point, color, and time
Point point = get_point (event);
int color = get_color (point);
if (color != start_color) {
// Invalidate start color if we left the box
start_color = -1;
Log.d (TAG, "Invalidated start color");
}
else
if (color >= 0
&& System.currentTimeMillis () - start_time > LONG_PRESS) {
Log.d (TAG, "Simulated long press in color -- erase");
touch_fill (event);
}
}
// End the line
private void touch_up (MotionEvent event)
{
Point point = get_point (event);
int delta = point.x - start_point.x;
if (Math.abs (delta) > PALETTE_BOX) {
if (delta > 0){
Log.d (TAG, "Right color swipe");
color_base -= PALETTE_SIZE;
}
else{
Log.d (TAG, "Left color swipe");
color_base += PALETTE_SIZE;
}
color_base = (color_base + colors.length) % colors.length;
Log.d (TAG, " -- index=" + color_base);
redraw ();
}
}
// Fill the canvas the current ink color
public void touch_fill (MotionEvent event)
{
Point point = get_point (event);
int color = get_color (point);
if (color >= 0) {
drawing.setPaper (colors [color]);
drawing.erase ();
imageview.invalidate ();
vibrate ();
// Set new ink to black or white
if (colors [color] == Color.BLACK)
drawing.setInk (Color.WHITE);
else
drawing.setInk (Color.BLACK);
// Invalidate touch down color so we don't repeat the fill
start_color = -1;
}
}
// Fill the canvas the current ink color
public void touch_select (MotionEvent event)
{
Point point = get_point (event);
int color = get_color (point);
if (color >= 0) {
drawing.setInk (colors [color]);
vibrate ();
}
}
// Convert motion event coordinates into point in our drawing
private Point get_point (MotionEvent event)
{
Point point = new Point ();
point.x = (int) (event.getX () * width / imageview.getWidth ());
point.y = (int) (event.getY () * height / imageview.getHeight ());
return point;
}
// Calculate distance in pixels between two knots
private float distance (Point p1, Point p2)
{
float distance = (float) Math.sqrt (
(p2.x - p1.x) * (p2.x - p1.x)
+ (p2.y - p1.y) * (p2.y - p1.y));
return distance;
}
private void vibrate ()
{
Vibrator mVibrator;
mVibrator = (Vibrator) context.getSystemService (Context.VIBRATOR_SERVICE);
mVibrator.vibrate (10);
}
private void trace (String s, Point p)
{
Log.d (TAG, s + " " + p.x + "/" + p.y);
}
}
| CodeJockey/Canvas | app/src/main/java/com/codejockey/canvas/helperfiles/Palette.java | Java | mpl-2.0 | 7,576 |
#include <Ice/Graphics/Renderer.hpp>
#include <Ice/Graphics/Vertex.hpp>
#include <Ice/System/Window.hpp>
#include <Ice/Maths/Maths.hpp>
namespace Ice
{
Renderer::Renderer()
{
// Set the default clear colour.
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
// Tell OpenGL which face to cull.
// glFrontFace( GL_CW );
// glCullFace ( GL_FRONT );
_shader = new Shader("data/shaders/basic.vert",
"data/shaders/basic.frag");
_shader->RegisterUniform("model");
_shader->RegisterUniform("view");
_shader->RegisterUniform("projection");
std::vector<GLuint> indices = {
0, 1, 3,
3, 1, 2
};
std::vector<Texture> textures = { load_texture("data/test/box.jpg", TextureType::Diffuse) };
//_cube_model.loadModel
_cube.SetTranslation({ 0.0f, 0.0f, 0.0f });
_cube.SetScale({ 1.0f, 1.0f, 1.0f });
// _cube.SetRotation ( { 2.0f, 1.0f, 0.0f } );
// _view = look_at( { 1.0f, 2.0f, 4.0f },
// { 1.0f, 0.0f, 0.0f },
// { 0.0f, 1.0f, 0.0f } );
_view = translate({ 0.0f, 0.0f, 0.0f });
}
Renderer::~Renderer()
{
delete _shader;
}
void Renderer::Render(const RenderContext& context)
{
context.wire_frame
? glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
: glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
context.has_depth
? glEnable(GL_DEPTH_TEST)
: glDisable(GL_DEPTH_TEST);
_cube.SetTranslation({ sinf(glfwGetTime()), cosf(glfwGetTime()), 0.0f });
// _cube.SetScale ( { sinf( glfwGetTime() ), cosf( glfwGetTime() ), 0.0f } );
// _view = translate( { sinf( glfwGetTime() ), cosf( glfwGetTime() ), -2.0f } );
_shader->Bind();
_shader->UpdateUniform("model", _cube.GetTransform());
_shader->UpdateUniform("view", _view);
_shader->UpdateUniform("projection", context.projection);
//_cube_model.Draw(_shader);
}
void Renderer::Clear()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
} | for-all-mankind/game-engine | Renderer.cpp | C++ | mpl-2.0 | 1,981 |
angular.module("app").factory('Helpers', function() {
var service = {
replaceEmptyStrings: function(object) {
for (var i in object) {
if (object.hasOwnProperty(i)) {
if ((typeof object[i] === 'string' || object[i] instanceof String) && object[i] === "") {
object[i] = null;
}
}
}
return object;
},
};
return service;
});
| aksareen/balrog | ui/app/js/services/helper_service.js | JavaScript | mpl-2.0 | 403 |
package zmq.proxy;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import java.io.IOException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import zmq.Ctx;
import zmq.Msg;
import zmq.SocketBase;
import zmq.ZMQ;
import zmq.util.Utils;
public class ProxySingleSocketTest
{
private static class ServerTask implements Runnable
{
private final Ctx ctx;
private final String host;
public ServerTask(Ctx ctx, String host)
{
this.ctx = ctx;
this.host = host;
}
@Override
public void run()
{
SocketBase rep = ZMQ.socket(ctx, ZMQ.ZMQ_REP);
assertThat(rep, notNullValue());
boolean rc = ZMQ.bind(rep, host);
assertThat(rc, is(true));
// Control socket receives terminate command from main over inproc
SocketBase control = ZMQ.socket(ctx, ZMQ.ZMQ_SUB);
ZMQ.setSocketOption(control, ZMQ.ZMQ_SUBSCRIBE, "");
rc = ZMQ.connect(control, "inproc://control");
assertThat(rc, is(true));
// Use rep as both frontend and backend
ZMQ.proxy(rep, rep, null, control);
ZMQ.close(rep);
ZMQ.close(control);
}
}
@Test
public void testProxySingleSocket() throws IOException, InterruptedException
{
int port = Utils.findOpenPort();
String host = "tcp://127.0.0.1:" + port;
// The main thread simply starts several clients and a server, and then
// waits for the server to finish.
Ctx ctx = ZMQ.createContext();
SocketBase req = ZMQ.socket(ctx, ZMQ.ZMQ_REQ);
assertThat(req, notNullValue());
boolean rc = ZMQ.connect(req, host);
assertThat(rc, is(true));
// Control socket receives terminate command from main over inproc
SocketBase control = ZMQ.socket(ctx, ZMQ.ZMQ_PUB);
rc = ZMQ.bind(control, "inproc://control");
assertThat(rc, is(true));
ExecutorService executor = Executors.newSingleThreadExecutor();
executor.submit(new ServerTask(ctx, host));
int ret = ZMQ.send(req, "msg1", 0);
assertThat(ret, is(4));
System.out.print(".");
Msg msg = ZMQ.recv(req, 0);
System.out.print(".");
assertThat(msg, notNullValue());
assertThat(new String(msg.data(), ZMQ.CHARSET), is("msg1"));
ret = ZMQ.send(req, "msg22", 0);
assertThat(ret, is(5));
System.out.print(".");
msg = ZMQ.recv(req, 0);
System.out.print(".");
assertThat(msg, notNullValue());
assertThat(new String(msg.data(), ZMQ.CHARSET), is("msg22"));
ret = ZMQ.send(control, "TERMINATE", 0);
assertThat(ret, is(9));
System.out.println(".");
ZMQ.close(control);
ZMQ.close(req);
executor.shutdown();
executor.awaitTermination(30, TimeUnit.SECONDS);
ZMQ.term(ctx);
}
}
| barbocz/zeromq | src/test/java/zmq/proxy/ProxySingleSocketTest.java | Java | mpl-2.0 | 3,184 |
defineComponent('login', {
prototype: {
data() {
return {
password: '',
username: '',
errors: [],
}
},
computed: {
is_not_clickable() {
if (!this.username) return true;
if (!this.password) return true;
return false;
}
},
methods: {
getBody() {
return {
login: this.username,
password: this.password,
};
},
logIn() {
if (this.is_not_clickable) return;
this.errors = [];
axios.post('/furetui/login', this.getBody())
.then((res) => {
if (res.data.global !== undefined) this.$store.commit('UPDATE_GLOBAL', res.data.global);
if (res.data.menus !== undefined) this.$store.commit('UPDATE_MENUS', res.data.menus);
if (res.data.lang !== undefined) updateLang(res.data.lang);
if (res.data.langs !== undefined) updateLocales(res.data.langs);
const userName = this.$store.state.global.userName;
this.$notify({
title: `Welcome ${userName}`,
text: 'Your are logged',
duration: 5000,
});
this.$store.commit('LOGIN')
if (this.$route.query.redirect !== undefined) this.$router.push(this.$route.query.redirect);
else this.$router.push('/');
})
.catch((error) => {
error.response.data.errors.forEach((error) => {
this.errors.push(this.$t('components.login.errors.' + error.name));
});
})
},
},
},
});
| AnyBlok/anyblok_furetui | anyblok_furetui/auth/components/login.js | JavaScript | mpl-2.0 | 1,656 |
package com.servinglynk.hmis.dao;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.persistence.EntityManager;
import org.hibernate.Session;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Component;
import com.servinglynk.hmis.entity.HousingUnitReservationEntity;
import com.servinglynk.hmis.service.SecurityContextUtil;
@Component
public class HousingUnitReservationDaoImpl implements HousingUnitReservationDao {
@Autowired EntityManager entityManager;
public PageImpl<HousingUnitReservationEntity> getHousingReservations(UUID housingUnitId, Date fromdate, Date todate,
Pageable pageable) {
Session session = entityManager.unwrap(Session.class);
DetachedCriteria criteria = DetachedCriteria.forClass(HousingUnitReservationEntity.class);
criteria.createAlias("housingUnit", "housingUnit");
criteria.add(Restrictions.eq("housingUnit.id", housingUnitId));
Disjunction disjunction = Restrictions.disjunction();
if(fromdate!=null && todate != null) {
disjunction.add(Restrictions.between("reservationStateDate", fromdate, todate));
disjunction.add(Restrictions.between("reservationEndDateDate", fromdate, todate));
criteria.add(disjunction);
} else if(fromdate!=null) {
criteria.add(Restrictions.lt("reservationStateDate", fromdate));
criteria.add(Restrictions.gt("reservationEndDateDate", fromdate));
} else if(todate!=null) {
criteria.add(Restrictions.lt("reservationStateDate", todate));
criteria.add(Restrictions.gt("reservationEndDateDate", todate));
}
criteria.add(Restrictions.eq("deleted", false));
criteria.add(Restrictions.eq("projectGroupCode", SecurityContextUtil.getUserProjectGroup()));
DetachedCriteria countCriteria = criteria;
List<HousingUnitReservationEntity> entities = criteria.getExecutableCriteria(session).
setMaxResults(pageable.getPageSize()).setFirstResult(pageable.getPageSize()*pageable.getPageNumber()).list();
countCriteria.setProjection(Projections.rowCount());
Long count = (Long)countCriteria.getExecutableCriteria(session).uniqueResult();
return new PageImpl<HousingUnitReservationEntity>(entities,pageable,count);
}
@Override
public Page<HousingUnitReservationEntity> getClientHousingUnitReservations(UUID dedupClientId, Date fromdate,
Date todate, Pageable pageable) {
Session session = entityManager.unwrap(Session.class);
DetachedCriteria criteria = DetachedCriteria.forClass(HousingUnitReservationEntity.class);
criteria.add(Restrictions.eq("dedupClientId", dedupClientId));
Disjunction disjunction = Restrictions.disjunction();
if(fromdate!=null && todate != null) {
disjunction.add(Restrictions.between("reservationStateDate", fromdate, todate));
disjunction.add(Restrictions.between("reservationEndDateDate", fromdate, todate));
criteria.add(disjunction);
} else if(fromdate!=null) {
criteria.add(Restrictions.lt("reservationStateDate", fromdate));
criteria.add(Restrictions.gt("reservationEndDateDate", fromdate));
} else if(todate!=null) {
criteria.add(Restrictions.lt("reservationStateDate", todate));
criteria.add(Restrictions.gt("reservationEndDateDate", todate));
}
criteria.add(Restrictions.eq("deleted", false));
criteria.add(Restrictions.eq("projectGroupCode", SecurityContextUtil.getUserProjectGroup()));
DetachedCriteria countCriteria = criteria;
List<HousingUnitReservationEntity> entities = criteria.getExecutableCriteria(session).
setMaxResults(pageable.getPageSize()).setFirstResult(pageable.getPageSize()*pageable.getPageNumber()).list();
countCriteria.setProjection(Projections.rowCount());
Long count = (Long)countCriteria.getExecutableCriteria(session).uniqueResult();
return new PageImpl<HousingUnitReservationEntity>(entities,pageable,count);
}
}
| servinglynk/servinglynk-hmis | bed-inventory/src/main/java/com/servinglynk/hmis/dao/HousingUnitReservationDaoImpl.java | Java | mpl-2.0 | 4,262 |
/*
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/licenses/publicdomain/
*/
var gTestfile = 'ownkeys-trap-duplicates.js';
var BUGNUMBER = 1293995;
var summary =
"Scripted proxies' [[OwnPropertyKeys]] should not throw if the trap " +
"implementation returns duplicate properties and the object is " +
"non-extensible or has non-configurable properties";
print(BUGNUMBER + ": " + summary);
/**************
* BEGIN TEST *
**************/
var target = Object.preventExtensions({ a: 1 });
var proxy = new Proxy(target, { ownKeys(t) { return ["a", "a"]; } });
assertDeepEq(Object.getOwnPropertyNames(proxy), ["a", "a"]);
target = Object.freeze({ a: 1 });
proxy = new Proxy(target, { ownKeys(t) { return ["a", "a"]; } });
assertDeepEq(Object.getOwnPropertyNames(proxy), ["a", "a"]);
/******************************************************************************/
if (typeof reportCompare === "function")
reportCompare(true, true);
print("Tests complete");
| Yukarumya/Yukarum-Redfoxes | js/src/tests/ecma_6/Proxy/ownkeys-trap-duplicates.js | JavaScript | mpl-2.0 | 1,004 |
// Copyright (C) 2014 The Syncthing Authors.
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this file,
// You can obtain one at http://mozilla.org/MPL/2.0/.
package model
import (
"errors"
"fmt"
"io/ioutil"
"math/rand"
"os"
"path/filepath"
"sort"
"time"
"github.com/syncthing/protocol"
"github.com/syncthing/syncthing/lib/config"
"github.com/syncthing/syncthing/lib/db"
"github.com/syncthing/syncthing/lib/events"
"github.com/syncthing/syncthing/lib/ignore"
"github.com/syncthing/syncthing/lib/osutil"
"github.com/syncthing/syncthing/lib/scanner"
"github.com/syncthing/syncthing/lib/symlinks"
"github.com/syncthing/syncthing/lib/sync"
"github.com/syncthing/syncthing/lib/versioner"
)
// TODO: Stop on errors
const (
pauseIntv = 60 * time.Second
nextPullIntv = 10 * time.Second
shortPullIntv = time.Second
)
// A pullBlockState is passed to the puller routine for each block that needs
// to be fetched.
type pullBlockState struct {
*sharedPullerState
block protocol.BlockInfo
}
// A copyBlocksState is passed to copy routine if the file has blocks to be
// copied.
type copyBlocksState struct {
*sharedPullerState
blocks []protocol.BlockInfo
}
// Which filemode bits to preserve
const retainBits = os.ModeSetgid | os.ModeSetuid | os.ModeSticky
var (
activity = newDeviceActivity()
errNoDevice = errors.New("no available source device")
)
const (
dbUpdateHandleDir = iota
dbUpdateDeleteDir
dbUpdateHandleFile
dbUpdateDeleteFile
dbUpdateShortcutFile
)
const (
defaultCopiers = 1
defaultPullers = 16
)
type dbUpdateJob struct {
file protocol.FileInfo
jobType int
}
type rwFolder struct {
stateTracker
model *Model
progressEmitter *ProgressEmitter
virtualMtimeRepo *db.VirtualMtimeRepo
folder string
dir string
scanIntv time.Duration
versioner versioner.Versioner
ignorePerms bool
copiers int
pullers int
shortID uint64
order config.PullOrder
stop chan struct{}
queue *jobQueue
dbUpdates chan dbUpdateJob
scanTimer *time.Timer
pullTimer *time.Timer
delayScan chan time.Duration
scanNow chan rescanRequest
remoteIndex chan struct{} // An index update was received, we should re-evaluate needs
errors map[string]string // path -> error string
errorsMut sync.Mutex
}
func newRWFolder(m *Model, shortID uint64, cfg config.FolderConfiguration) *rwFolder {
p := &rwFolder{
stateTracker: stateTracker{
folder: cfg.ID,
mut: sync.NewMutex(),
},
model: m,
progressEmitter: m.progressEmitter,
virtualMtimeRepo: db.NewVirtualMtimeRepo(m.db, cfg.ID),
folder: cfg.ID,
dir: cfg.Path(),
scanIntv: time.Duration(cfg.RescanIntervalS) * time.Second,
ignorePerms: cfg.IgnorePerms,
copiers: cfg.Copiers,
pullers: cfg.Pullers,
shortID: shortID,
order: cfg.Order,
stop: make(chan struct{}),
queue: newJobQueue(),
pullTimer: time.NewTimer(shortPullIntv),
scanTimer: time.NewTimer(time.Millisecond), // The first scan should be done immediately.
delayScan: make(chan time.Duration),
scanNow: make(chan rescanRequest),
remoteIndex: make(chan struct{}, 1), // This needs to be 1-buffered so that we queue a notification if we're busy doing a pull when it comes.
errorsMut: sync.NewMutex(),
}
if p.copiers == 0 {
p.copiers = defaultCopiers
}
if p.pullers == 0 {
p.pullers = defaultPullers
}
return p
}
// Helper function to check whether either the ignorePerm flag has been
// set on the local host or the FlagNoPermBits has been set on the file/dir
// which is being pulled.
func (p *rwFolder) ignorePermissions(file protocol.FileInfo) bool {
return p.ignorePerms || file.Flags&protocol.FlagNoPermBits != 0
}
// Serve will run scans and pulls. It will return when Stop()ed or on a
// critical error.
func (p *rwFolder) Serve() {
if debug {
l.Debugln(p, "starting")
defer l.Debugln(p, "exiting")
}
defer func() {
p.pullTimer.Stop()
p.scanTimer.Stop()
// TODO: Should there be an actual FolderStopped state?
p.setState(FolderIdle)
}()
var prevVer int64
var prevIgnoreHash string
rescheduleScan := func() {
if p.scanIntv == 0 {
// We should not run scans, so it should not be rescheduled.
return
}
// Sleep a random time between 3/4 and 5/4 of the configured interval.
sleepNanos := (p.scanIntv.Nanoseconds()*3 + rand.Int63n(2*p.scanIntv.Nanoseconds())) / 4
intv := time.Duration(sleepNanos) * time.Nanosecond
if debug {
l.Debugln(p, "next rescan in", intv)
}
p.scanTimer.Reset(intv)
}
// We don't start pulling files until a scan has been completed.
initialScanCompleted := false
for {
select {
case <-p.stop:
return
case <-p.remoteIndex:
prevVer = 0
p.pullTimer.Reset(shortPullIntv)
if debug {
l.Debugln(p, "remote index updated, rescheduling pull")
}
case <-p.pullTimer.C:
if !initialScanCompleted {
if debug {
l.Debugln(p, "skip (initial)")
}
p.pullTimer.Reset(nextPullIntv)
continue
}
p.model.fmut.RLock()
curIgnores := p.model.folderIgnores[p.folder]
p.model.fmut.RUnlock()
if newHash := curIgnores.Hash(); newHash != prevIgnoreHash {
// The ignore patterns have changed. We need to re-evaluate if
// there are files we need now that were ignored before.
if debug {
l.Debugln(p, "ignore patterns have changed, resetting prevVer")
}
prevVer = 0
prevIgnoreHash = newHash
}
// RemoteLocalVersion() is a fast call, doesn't touch the database.
curVer, ok := p.model.RemoteLocalVersion(p.folder)
if !ok || curVer == prevVer {
if debug {
l.Debugln(p, "skip (curVer == prevVer)", prevVer, ok)
}
p.pullTimer.Reset(nextPullIntv)
continue
}
if debug {
l.Debugln(p, "pulling", prevVer, curVer)
}
p.setState(FolderSyncing)
p.clearErrors()
tries := 0
for {
tries++
changed := p.pullerIteration(curIgnores)
if debug {
l.Debugln(p, "changed", changed)
}
if changed == 0 {
// No files were changed by the puller, so we are in
// sync. Remember the local version number and
// schedule a resync a little bit into the future.
if lv, ok := p.model.RemoteLocalVersion(p.folder); ok && lv < curVer {
// There's a corner case where the device we needed
// files from disconnected during the puller
// iteration. The files will have been removed from
// the index, so we've concluded that we don't need
// them, but at the same time we have the local
// version that includes those files in curVer. So we
// catch the case that localVersion might have
// decreased here.
l.Debugln(p, "adjusting curVer", lv)
curVer = lv
}
prevVer = curVer
if debug {
l.Debugln(p, "next pull in", nextPullIntv)
}
p.pullTimer.Reset(nextPullIntv)
break
}
if tries > 10 {
// We've tried a bunch of times to get in sync, but
// we're not making it. Probably there are write
// errors preventing us. Flag this with a warning and
// wait a bit longer before retrying.
l.Infof("Folder %q isn't making progress. Pausing puller for %v.", p.folder, pauseIntv)
if debug {
l.Debugln(p, "next pull in", pauseIntv)
}
if folderErrors := p.currentErrors(); len(folderErrors) > 0 {
events.Default.Log(events.FolderErrors, map[string]interface{}{
"folder": p.folder,
"errors": folderErrors,
})
}
p.pullTimer.Reset(pauseIntv)
break
}
}
p.setState(FolderIdle)
// The reason for running the scanner from within the puller is that
// this is the easiest way to make sure we are not doing both at the
// same time.
case <-p.scanTimer.C:
if err := p.model.CheckFolderHealth(p.folder); err != nil {
l.Infoln("Skipping folder", p.folder, "scan due to folder error:", err)
rescheduleScan()
continue
}
if debug {
l.Debugln(p, "rescan")
}
if err := p.model.internalScanFolderSubs(p.folder, nil); err != nil {
// Potentially sets the error twice, once in the scanner just
// by doing a check, and once here, if the error returned is
// the same one as returned by CheckFolderHealth, though
// duplicate set is handled by setError.
p.setError(err)
rescheduleScan()
continue
}
if p.scanIntv > 0 {
rescheduleScan()
}
if !initialScanCompleted {
l.Infoln("Completed initial scan (rw) of folder", p.folder)
initialScanCompleted = true
}
case req := <-p.scanNow:
if err := p.model.CheckFolderHealth(p.folder); err != nil {
l.Infoln("Skipping folder", p.folder, "scan due to folder error:", err)
req.err <- err
continue
}
if debug {
l.Debugln(p, "forced rescan")
}
if err := p.model.internalScanFolderSubs(p.folder, req.subs); err != nil {
// Potentially sets the error twice, once in the scanner just
// by doing a check, and once here, if the error returned is
// the same one as returned by CheckFolderHealth, though
// duplicate set is handled by setError.
p.setError(err)
req.err <- err
continue
}
req.err <- nil
case next := <-p.delayScan:
p.scanTimer.Reset(next)
}
}
}
func (p *rwFolder) Stop() {
close(p.stop)
}
func (p *rwFolder) IndexUpdated() {
select {
case p.remoteIndex <- struct{}{}:
default:
// We might be busy doing a pull and thus not reading from this
// channel. The channel is 1-buffered, so one notification will be
// queued to ensure we recheck after the pull, but beyond that we must
// make sure to not block index receiving.
}
}
func (p *rwFolder) Scan(subs []string) error {
req := rescanRequest{
subs: subs,
err: make(chan error),
}
p.scanNow <- req
return <-req.err
}
func (p *rwFolder) String() string {
return fmt.Sprintf("rwFolder/%s@%p", p.folder, p)
}
// pullerIteration runs a single puller iteration for the given folder and
// returns the number items that should have been synced (even those that
// might have failed). One puller iteration handles all files currently
// flagged as needed in the folder.
func (p *rwFolder) pullerIteration(ignores *ignore.Matcher) int {
pullChan := make(chan pullBlockState)
copyChan := make(chan copyBlocksState)
finisherChan := make(chan *sharedPullerState)
updateWg := sync.NewWaitGroup()
copyWg := sync.NewWaitGroup()
pullWg := sync.NewWaitGroup()
doneWg := sync.NewWaitGroup()
if debug {
l.Debugln(p, "c", p.copiers, "p", p.pullers)
}
p.dbUpdates = make(chan dbUpdateJob)
updateWg.Add(1)
go func() {
// dbUpdaterRoutine finishes when p.dbUpdates is closed
p.dbUpdaterRoutine()
updateWg.Done()
}()
for i := 0; i < p.copiers; i++ {
copyWg.Add(1)
go func() {
// copierRoutine finishes when copyChan is closed
p.copierRoutine(copyChan, pullChan, finisherChan)
copyWg.Done()
}()
}
for i := 0; i < p.pullers; i++ {
pullWg.Add(1)
go func() {
// pullerRoutine finishes when pullChan is closed
p.pullerRoutine(pullChan, finisherChan)
pullWg.Done()
}()
}
doneWg.Add(1)
// finisherRoutine finishes when finisherChan is closed
go func() {
p.finisherRoutine(finisherChan)
doneWg.Done()
}()
p.model.fmut.RLock()
folderFiles := p.model.folderFiles[p.folder]
p.model.fmut.RUnlock()
// !!!
// WithNeed takes a database snapshot (by necessity). By the time we've
// handled a bunch of files it might have become out of date and we might
// be attempting to sync with an old version of a file...
// !!!
changed := 0
pullFileSize := int64(0)
fileDeletions := map[string]protocol.FileInfo{}
dirDeletions := []protocol.FileInfo{}
buckets := map[string][]protocol.FileInfo{}
folderFiles.WithNeed(protocol.LocalDeviceID, func(intf db.FileIntf) bool {
// Needed items are delivered sorted lexicographically. We'll handle
// directories as they come along, so parents before children. Files
// are queued and the order may be changed later.
file := intf.(protocol.FileInfo)
if ignores.Match(file.Name) {
// This is an ignored file. Skip it, continue iteration.
return true
}
if debug {
l.Debugln(p, "handling", file.Name)
}
switch {
case file.IsDeleted():
// A deleted file, directory or symlink
if file.IsDirectory() {
dirDeletions = append(dirDeletions, file)
} else {
fileDeletions[file.Name] = file
df, ok := p.model.CurrentFolderFile(p.folder, file.Name)
// Local file can be already deleted, but with a lower version
// number, hence the deletion coming in again as part of
// WithNeed, furthermore, the file can simply be of the wrong
// type if we haven't yet managed to pull it.
if ok && !df.IsDeleted() && !df.IsSymlink() && !df.IsDirectory() {
// Put files into buckets per first hash
key := string(df.Blocks[0].Hash)
buckets[key] = append(buckets[key], df)
}
}
case file.IsDirectory() && !file.IsSymlink():
// A new or changed directory
if debug {
l.Debugln("Creating directory", file.Name)
}
p.handleDir(file)
default:
// A new or changed file or symlink. This is the only case where we
// do stuff concurrently in the background
pullFileSize += file.Size()
p.queue.Push(file.Name, file.Size(), file.Modified)
}
changed++
return true
})
// Check if we are able to store all files on disk
if pullFileSize > 0 {
folder, ok := p.model.cfg.Folders()[p.folder]
if ok {
if free, err := osutil.DiskFreeBytes(folder.Path()); err == nil && free < pullFileSize {
l.Infof("Puller (folder %q): insufficient disk space available to pull %d files (%.2fMB)", p.folder, changed, float64(pullFileSize)/1024/1024)
return 0
}
}
}
// Reorder the file queue according to configuration
switch p.order {
case config.OrderRandom:
p.queue.Shuffle()
case config.OrderAlphabetic:
// The queue is already in alphabetic order.
case config.OrderSmallestFirst:
p.queue.SortSmallestFirst()
case config.OrderLargestFirst:
p.queue.SortLargestFirst()
case config.OrderOldestFirst:
p.queue.SortOldestFirst()
case config.OrderNewestFirst:
p.queue.SortOldestFirst()
}
// Process the file queue
nextFile:
for {
fileName, ok := p.queue.Pop()
if !ok {
break
}
f, ok := p.model.CurrentGlobalFile(p.folder, fileName)
if !ok {
// File is no longer in the index. Mark it as done and drop it.
p.queue.Done(fileName)
continue
}
// Local file can be already deleted, but with a lower version
// number, hence the deletion coming in again as part of
// WithNeed, furthermore, the file can simply be of the wrong type if
// the global index changed while we were processing this iteration.
if !f.IsDeleted() && !f.IsSymlink() && !f.IsDirectory() {
key := string(f.Blocks[0].Hash)
for i, candidate := range buckets[key] {
if scanner.BlocksEqual(candidate.Blocks, f.Blocks) {
// Remove the candidate from the bucket
lidx := len(buckets[key]) - 1
buckets[key][i] = buckets[key][lidx]
buckets[key] = buckets[key][:lidx]
// candidate is our current state of the file, where as the
// desired state with the delete bit set is in the deletion
// map.
desired := fileDeletions[candidate.Name]
// Remove the pending deletion (as we perform it by renaming)
delete(fileDeletions, candidate.Name)
p.renameFile(desired, f)
p.queue.Done(fileName)
continue nextFile
}
}
}
// Not a rename or a symlink, deal with it.
p.handleFile(f, copyChan, finisherChan)
}
// Signal copy and puller routines that we are done with the in data for
// this iteration. Wait for them to finish.
close(copyChan)
copyWg.Wait()
close(pullChan)
pullWg.Wait()
// Signal the finisher chan that there will be no more input.
close(finisherChan)
// Wait for the finisherChan to finish.
doneWg.Wait()
for _, file := range fileDeletions {
if debug {
l.Debugln("Deleting file", file.Name)
}
p.deleteFile(file)
}
for i := range dirDeletions {
dir := dirDeletions[len(dirDeletions)-i-1]
if debug {
l.Debugln("Deleting dir", dir.Name)
}
p.deleteDir(dir)
}
// Wait for db updates to complete
close(p.dbUpdates)
updateWg.Wait()
return changed
}
// handleDir creates or updates the given directory
func (p *rwFolder) handleDir(file protocol.FileInfo) {
var err error
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": file.Name,
"type": "dir",
"action": "update",
})
defer func() {
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": file.Name,
"error": events.Error(err),
"type": "dir",
"action": "update",
})
}()
realName := filepath.Join(p.dir, file.Name)
mode := os.FileMode(file.Flags & 0777)
if p.ignorePermissions(file) {
mode = 0777
}
if debug {
curFile, _ := p.model.CurrentFolderFile(p.folder, file.Name)
l.Debugf("need dir\n\t%v\n\t%v", file, curFile)
}
info, err := osutil.Lstat(realName)
switch {
// There is already something under that name, but it's a file/link.
// Most likely a file/link is getting replaced with a directory.
// Remove the file/link and fall through to directory creation.
case err == nil && (!info.IsDir() || info.Mode()&os.ModeSymlink != 0):
err = osutil.InWritableDir(osutil.Remove, realName)
if err != nil {
l.Infof("Puller (folder %q, dir %q): %v", p.folder, file.Name, err)
p.newError(file.Name, err)
return
}
fallthrough
// The directory doesn't exist, so we create it with the right
// mode bits from the start.
case err != nil && os.IsNotExist(err):
// We declare a function that acts on only the path name, so
// we can pass it to InWritableDir. We use a regular Mkdir and
// not MkdirAll because the parent should already exist.
mkdir := func(path string) error {
err = os.Mkdir(path, mode)
if err != nil || p.ignorePermissions(file) {
return err
}
// Stat the directory so we can check its permissions.
info, err := osutil.Lstat(path)
if err != nil {
return err
}
// Mask for the bits we want to preserve and add them in to the
// directories permissions.
return os.Chmod(path, mode|(info.Mode()&retainBits))
}
if err = osutil.InWritableDir(mkdir, realName); err == nil {
p.dbUpdates <- dbUpdateJob{file, dbUpdateHandleDir}
} else {
l.Infof("Puller (folder %q, dir %q): %v", p.folder, file.Name, err)
p.newError(file.Name, err)
}
return
// Weird error when stat()'ing the dir. Probably won't work to do
// anything else with it if we can't even stat() it.
case err != nil:
l.Infof("Puller (folder %q, dir %q): %v", p.folder, file.Name, err)
p.newError(file.Name, err)
return
}
// The directory already exists, so we just correct the mode bits. (We
// don't handle modification times on directories, because that sucks...)
// It's OK to change mode bits on stuff within non-writable directories.
if p.ignorePermissions(file) {
p.dbUpdates <- dbUpdateJob{file, dbUpdateHandleDir}
} else if err := os.Chmod(realName, mode|(info.Mode()&retainBits)); err == nil {
p.dbUpdates <- dbUpdateJob{file, dbUpdateHandleDir}
} else {
l.Infof("Puller (folder %q, dir %q): %v", p.folder, file.Name, err)
p.newError(file.Name, err)
}
}
// deleteDir attempts to delete the given directory
func (p *rwFolder) deleteDir(file protocol.FileInfo) {
var err error
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": file.Name,
"type": "dir",
"action": "delete",
})
defer func() {
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": file.Name,
"error": events.Error(err),
"type": "dir",
"action": "delete",
})
}()
realName := filepath.Join(p.dir, file.Name)
// Delete any temporary files lying around in the directory
dir, _ := os.Open(realName)
if dir != nil {
files, _ := dir.Readdirnames(-1)
for _, file := range files {
if defTempNamer.IsTemporary(file) {
osutil.InWritableDir(osutil.Remove, filepath.Join(realName, file))
}
}
}
err = osutil.InWritableDir(osutil.Remove, realName)
if err == nil || os.IsNotExist(err) {
// It was removed or it doesn't exist to start with
p.dbUpdates <- dbUpdateJob{file, dbUpdateDeleteDir}
} else if _, serr := os.Lstat(realName); serr != nil && !os.IsPermission(serr) {
// We get an error just looking at the directory, and it's not a
// permission problem. Lets assume the error is in fact some variant
// of "file does not exist" (possibly expressed as some parent being a
// file and not a directory etc) and that the delete is handled.
p.dbUpdates <- dbUpdateJob{file, dbUpdateDeleteDir}
} else {
l.Infof("Puller (folder %q, dir %q): delete: %v", p.folder, file.Name, err)
p.newError(file.Name, err)
}
}
// deleteFile attempts to delete the given file
func (p *rwFolder) deleteFile(file protocol.FileInfo) {
var err error
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": file.Name,
"type": "file",
"action": "delete",
})
defer func() {
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": file.Name,
"error": events.Error(err),
"type": "file",
"action": "delete",
})
}()
realName := filepath.Join(p.dir, file.Name)
cur, ok := p.model.CurrentFolderFile(p.folder, file.Name)
if ok && p.inConflict(cur.Version, file.Version) {
// There is a conflict here. Move the file to a conflict copy instead
// of deleting. Also merge with the version vector we had, to indicate
// we have resolved the conflict.
file.Version = file.Version.Merge(cur.Version)
err = osutil.InWritableDir(moveForConflict, realName)
} else if p.versioner != nil {
err = osutil.InWritableDir(p.versioner.Archive, realName)
} else {
err = osutil.InWritableDir(osutil.Remove, realName)
}
if err == nil || os.IsNotExist(err) {
// It was removed or it doesn't exist to start with
p.dbUpdates <- dbUpdateJob{file, dbUpdateDeleteFile}
} else if _, serr := os.Lstat(realName); serr != nil && !os.IsPermission(serr) {
// We get an error just looking at the file, and it's not a permission
// problem. Lets assume the error is in fact some variant of "file
// does not exist" (possibly expressed as some parent being a file and
// not a directory etc) and that the delete is handled.
p.dbUpdates <- dbUpdateJob{file, dbUpdateDeleteFile}
} else {
l.Infof("Puller (folder %q, file %q): delete: %v", p.folder, file.Name, err)
p.newError(file.Name, err)
}
}
// renameFile attempts to rename an existing file to a destination
// and set the right attributes on it.
func (p *rwFolder) renameFile(source, target protocol.FileInfo) {
var err error
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": source.Name,
"type": "file",
"action": "delete",
})
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": target.Name,
"type": "file",
"action": "update",
})
defer func() {
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": source.Name,
"error": events.Error(err),
"type": "file",
"action": "delete",
})
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": target.Name,
"error": events.Error(err),
"type": "file",
"action": "update",
})
}()
if debug {
l.Debugln(p, "taking rename shortcut", source.Name, "->", target.Name)
}
from := filepath.Join(p.dir, source.Name)
to := filepath.Join(p.dir, target.Name)
if p.versioner != nil {
err = osutil.Copy(from, to)
if err == nil {
err = osutil.InWritableDir(p.versioner.Archive, from)
}
} else {
err = osutil.TryRename(from, to)
}
if err == nil {
// The file was renamed, so we have handled both the necessary delete
// of the source and the creation of the target. Fix-up the metadata,
// and update the local index of the target file.
p.dbUpdates <- dbUpdateJob{source, dbUpdateDeleteFile}
err = p.shortcutFile(target)
if err != nil {
l.Infof("Puller (folder %q, file %q): rename from %q metadata: %v", p.folder, target.Name, source.Name, err)
p.newError(target.Name, err)
return
}
p.dbUpdates <- dbUpdateJob{target, dbUpdateHandleFile}
} else {
// We failed the rename so we have a source file that we still need to
// get rid of. Attempt to delete it instead so that we make *some*
// progress. The target is unhandled.
err = osutil.InWritableDir(osutil.Remove, from)
if err != nil {
l.Infof("Puller (folder %q, file %q): delete %q after failed rename: %v", p.folder, target.Name, source.Name, err)
p.newError(target.Name, err)
return
}
p.dbUpdates <- dbUpdateJob{source, dbUpdateDeleteFile}
}
}
// This is the flow of data and events here, I think...
//
// +-----------------------+
// | | - - - - > ItemStarted
// | handleFile | - - - - > ItemFinished (on shortcuts)
// | |
// +-----------------------+
// |
// | copyChan (copyBlocksState; unless shortcut taken)
// |
// | +-----------------------+
// | | +-----------------------+
// +--->| | |
// | | copierRoutine |
// +-| |
// +-----------------------+
// |
// | pullChan (sharedPullerState)
// |
// | +-----------------------+
// | | +-----------------------+
// +-->| | |
// | | pullerRoutine |
// +-| |
// +-----------------------+
// |
// | finisherChan (sharedPullerState)
// |
// | +-----------------------+
// | | |
// +-->| finisherRoutine | - - - - > ItemFinished
// | |
// +-----------------------+
// handleFile queues the copies and pulls as necessary for a single new or
// changed file.
func (p *rwFolder) handleFile(file protocol.FileInfo, copyChan chan<- copyBlocksState, finisherChan chan<- *sharedPullerState) {
curFile, ok := p.model.CurrentFolderFile(p.folder, file.Name)
if ok && len(curFile.Blocks) == len(file.Blocks) && scanner.BlocksEqual(curFile.Blocks, file.Blocks) {
// We are supposed to copy the entire file, and then fetch nothing. We
// are only updating metadata, so we don't actually *need* to make the
// copy.
if debug {
l.Debugln(p, "taking shortcut on", file.Name)
}
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": file.Name,
"type": "file",
"action": "metadata",
})
p.queue.Done(file.Name)
var err error
if file.IsSymlink() {
err = p.shortcutSymlink(file)
} else {
err = p.shortcutFile(file)
}
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": file.Name,
"error": events.Error(err),
"type": "file",
"action": "metadata",
})
if err != nil {
l.Infoln("Puller: shortcut:", err)
p.newError(file.Name, err)
} else {
p.dbUpdates <- dbUpdateJob{file, dbUpdateShortcutFile}
}
return
}
events.Default.Log(events.ItemStarted, map[string]string{
"folder": p.folder,
"item": file.Name,
"type": "file",
"action": "update",
})
scanner.PopulateOffsets(file.Blocks)
// Figure out the absolute filenames we need once and for all
tempName := filepath.Join(p.dir, defTempNamer.TempName(file.Name))
realName := filepath.Join(p.dir, file.Name)
reused := 0
var blocks []protocol.BlockInfo
// Check for an old temporary file which might have some blocks we could
// reuse.
tempBlocks, err := scanner.HashFile(tempName, protocol.BlockSize)
if err == nil {
// Check for any reusable blocks in the temp file
tempCopyBlocks, _ := scanner.BlockDiff(tempBlocks, file.Blocks)
// block.String() returns a string unique to the block
existingBlocks := make(map[string]struct{}, len(tempCopyBlocks))
for _, block := range tempCopyBlocks {
existingBlocks[block.String()] = struct{}{}
}
// Since the blocks are already there, we don't need to get them.
for _, block := range file.Blocks {
_, ok := existingBlocks[block.String()]
if !ok {
blocks = append(blocks, block)
}
}
// The sharedpullerstate will know which flags to use when opening the
// temp file depending if we are reusing any blocks or not.
reused = len(file.Blocks) - len(blocks)
if reused == 0 {
// Otherwise, discard the file ourselves in order for the
// sharedpuller not to panic when it fails to exclusively create a
// file which already exists
os.Remove(tempName)
}
} else {
blocks = file.Blocks
}
s := sharedPullerState{
file: file,
folder: p.folder,
tempName: tempName,
realName: realName,
copyTotal: len(blocks),
copyNeeded: len(blocks),
reused: reused,
ignorePerms: p.ignorePermissions(file),
version: curFile.Version,
mut: sync.NewMutex(),
}
if debug {
l.Debugf("%v need file %s; copy %d, reused %v", p, file.Name, len(blocks), reused)
}
cs := copyBlocksState{
sharedPullerState: &s,
blocks: blocks,
}
copyChan <- cs
}
// shortcutFile sets file mode and modification time, when that's the only
// thing that has changed.
func (p *rwFolder) shortcutFile(file protocol.FileInfo) error {
realName := filepath.Join(p.dir, file.Name)
if !p.ignorePermissions(file) {
if err := os.Chmod(realName, os.FileMode(file.Flags&0777)); err != nil {
l.Infof("Puller (folder %q, file %q): shortcut: chmod: %v", p.folder, file.Name, err)
p.newError(file.Name, err)
return err
}
}
t := time.Unix(file.Modified, 0)
if err := os.Chtimes(realName, t, t); err != nil {
// Try using virtual mtimes
info, err := os.Stat(realName)
if err != nil {
l.Infof("Puller (folder %q, file %q): shortcut: unable to stat file: %v", p.folder, file.Name, err)
p.newError(file.Name, err)
return err
}
p.virtualMtimeRepo.UpdateMtime(file.Name, info.ModTime(), t)
}
// This may have been a conflict. We should merge the version vectors so
// that our clock doesn't move backwards.
if cur, ok := p.model.CurrentFolderFile(p.folder, file.Name); ok {
file.Version = file.Version.Merge(cur.Version)
}
return nil
}
// shortcutSymlink changes the symlinks type if necessary.
func (p *rwFolder) shortcutSymlink(file protocol.FileInfo) (err error) {
err = symlinks.ChangeType(filepath.Join(p.dir, file.Name), file.Flags)
if err != nil {
l.Infof("Puller (folder %q, file %q): symlink shortcut: %v", p.folder, file.Name, err)
p.newError(file.Name, err)
}
return
}
// copierRoutine reads copierStates until the in channel closes and performs
// the relevant copies when possible, or passes it to the puller routine.
func (p *rwFolder) copierRoutine(in <-chan copyBlocksState, pullChan chan<- pullBlockState, out chan<- *sharedPullerState) {
buf := make([]byte, protocol.BlockSize)
for state := range in {
dstFd, err := state.tempFile()
if err != nil {
// Nothing more to do for this failed file, since we couldn't create a temporary for it.
out <- state.sharedPullerState
continue
}
if p.progressEmitter != nil {
p.progressEmitter.Register(state.sharedPullerState)
}
folderRoots := make(map[string]string)
p.model.fmut.RLock()
for folder, cfg := range p.model.folderCfgs {
folderRoots[folder] = cfg.Path()
}
p.model.fmut.RUnlock()
for _, block := range state.blocks {
buf = buf[:int(block.Size)]
found := p.model.finder.Iterate(block.Hash, func(folder, file string, index int32) bool {
fd, err := os.Open(filepath.Join(folderRoots[folder], file))
if err != nil {
return false
}
_, err = fd.ReadAt(buf, protocol.BlockSize*int64(index))
fd.Close()
if err != nil {
return false
}
hash, err := scanner.VerifyBuffer(buf, block)
if err != nil {
if hash != nil {
if debug {
l.Debugf("Finder block mismatch in %s:%s:%d expected %q got %q", folder, file, index, block.Hash, hash)
}
err = p.model.finder.Fix(folder, file, index, block.Hash, hash)
if err != nil {
l.Warnln("finder fix:", err)
}
} else if debug {
l.Debugln("Finder failed to verify buffer", err)
}
return false
}
_, err = dstFd.WriteAt(buf, block.Offset)
if err != nil {
state.fail("dst write", err)
}
if file == state.file.Name {
state.copiedFromOrigin()
}
return true
})
if state.failed() != nil {
break
}
if !found {
state.pullStarted()
ps := pullBlockState{
sharedPullerState: state.sharedPullerState,
block: block,
}
pullChan <- ps
} else {
state.copyDone()
}
}
out <- state.sharedPullerState
}
}
func (p *rwFolder) pullerRoutine(in <-chan pullBlockState, out chan<- *sharedPullerState) {
for state := range in {
if state.failed() != nil {
out <- state.sharedPullerState
continue
}
// Get an fd to the temporary file. Technically we don't need it until
// after fetching the block, but if we run into an error here there is
// no point in issuing the request to the network.
fd, err := state.tempFile()
if err != nil {
out <- state.sharedPullerState
continue
}
var lastError error
potentialDevices := p.model.Availability(p.folder, state.file.Name)
for {
// Select the least busy device to pull the block from. If we found no
// feasible device at all, fail the block (and in the long run, the
// file).
selected := activity.leastBusy(potentialDevices)
if selected == (protocol.DeviceID{}) {
if lastError != nil {
state.fail("pull", lastError)
} else {
state.fail("pull", errNoDevice)
}
break
}
potentialDevices = removeDevice(potentialDevices, selected)
// Fetch the block, while marking the selected device as in use so that
// leastBusy can select another device when someone else asks.
activity.using(selected)
buf, lastError := p.model.requestGlobal(selected, p.folder, state.file.Name, state.block.Offset, int(state.block.Size), state.block.Hash, 0, nil)
activity.done(selected)
if lastError != nil {
if debug {
l.Debugln("request:", p.folder, state.file.Name, state.block.Offset, state.block.Size, "returned error:", lastError)
}
continue
}
// Verify that the received block matches the desired hash, if not
// try pulling it from another device.
_, lastError = scanner.VerifyBuffer(buf, state.block)
if lastError != nil {
if debug {
l.Debugln("request:", p.folder, state.file.Name, state.block.Offset, state.block.Size, "hash mismatch")
}
continue
}
// Save the block data we got from the cluster
_, err = fd.WriteAt(buf, state.block.Offset)
if err != nil {
state.fail("save", err)
} else {
state.pullDone()
}
break
}
out <- state.sharedPullerState
}
}
func (p *rwFolder) performFinish(state *sharedPullerState) error {
// Set the correct permission bits on the new file
if !p.ignorePermissions(state.file) {
if err := os.Chmod(state.tempName, os.FileMode(state.file.Flags&0777)); err != nil {
return err
}
}
// Set the correct timestamp on the new file
t := time.Unix(state.file.Modified, 0)
if err := os.Chtimes(state.tempName, t, t); err != nil {
// Try using virtual mtimes instead
info, err := os.Stat(state.tempName)
if err != nil {
return err
}
p.virtualMtimeRepo.UpdateMtime(state.file.Name, info.ModTime(), t)
}
var err error
if p.inConflict(state.version, state.file.Version) {
// The new file has been changed in conflict with the existing one. We
// should file it away as a conflict instead of just removing or
// archiving. Also merge with the version vector we had, to indicate
// we have resolved the conflict.
state.file.Version = state.file.Version.Merge(state.version)
err = osutil.InWritableDir(moveForConflict, state.realName)
} else if p.versioner != nil {
// If we should use versioning, let the versioner archive the old
// file before we replace it. Archiving a non-existent file is not
// an error.
err = p.versioner.Archive(state.realName)
} else {
err = nil
}
if err != nil {
return err
}
// If the target path is a symlink or a directory, we cannot copy
// over it, hence remove it before proceeding.
stat, err := osutil.Lstat(state.realName)
if err == nil && (stat.IsDir() || stat.Mode()&os.ModeSymlink != 0) {
osutil.InWritableDir(osutil.Remove, state.realName)
}
// Replace the original content with the new one
if err = osutil.Rename(state.tempName, state.realName); err != nil {
return err
}
// If it's a symlink, the target of the symlink is inside the file.
if state.file.IsSymlink() {
content, err := ioutil.ReadFile(state.realName)
if err != nil {
return err
}
// Remove the file, and replace it with a symlink.
err = osutil.InWritableDir(func(path string) error {
os.Remove(path)
return symlinks.Create(path, string(content), state.file.Flags)
}, state.realName)
if err != nil {
return err
}
}
// Record the updated file in the index
p.dbUpdates <- dbUpdateJob{state.file, dbUpdateHandleFile}
return nil
}
func (p *rwFolder) finisherRoutine(in <-chan *sharedPullerState) {
for state := range in {
if closed, err := state.finalClose(); closed {
if debug {
l.Debugln(p, "closing", state.file.Name)
}
p.queue.Done(state.file.Name)
if err == nil {
err = p.performFinish(state)
}
if err != nil {
l.Infoln("Puller: final:", err)
p.newError(state.file.Name, err)
}
events.Default.Log(events.ItemFinished, map[string]interface{}{
"folder": p.folder,
"item": state.file.Name,
"error": events.Error(err),
"type": "file",
"action": "update",
})
if p.progressEmitter != nil {
p.progressEmitter.Deregister(state)
}
}
}
}
// Moves the given filename to the front of the job queue
func (p *rwFolder) BringToFront(filename string) {
p.queue.BringToFront(filename)
}
func (p *rwFolder) Jobs() ([]string, []string) {
return p.queue.Jobs()
}
func (p *rwFolder) DelayScan(next time.Duration) {
p.delayScan <- next
}
// dbUpdaterRoutine aggregates db updates and commits them in batches no
// larger than 1000 items, and no more delayed than 2 seconds.
func (p *rwFolder) dbUpdaterRoutine() {
const (
maxBatchSize = 1000
maxBatchTime = 2 * time.Second
)
batch := make([]dbUpdateJob, 0, maxBatchSize)
files := make([]protocol.FileInfo, 0, maxBatchSize)
tick := time.NewTicker(maxBatchTime)
defer tick.Stop()
handleBatch := func() {
found := false
var lastFile protocol.FileInfo
for _, job := range batch {
files = append(files, job.file)
if job.file.IsInvalid() || (job.file.IsDirectory() && !job.file.IsSymlink()) {
continue
}
if job.jobType&(dbUpdateHandleFile|dbUpdateDeleteFile) == 0 {
continue
}
found = true
lastFile = job.file
}
p.model.updateLocals(p.folder, files)
if found {
p.model.receivedFile(p.folder, lastFile)
}
batch = batch[:0]
files = files[:0]
}
loop:
for {
select {
case job, ok := <-p.dbUpdates:
if !ok {
break loop
}
job.file.LocalVersion = 0
batch = append(batch, job)
if len(batch) == maxBatchSize {
handleBatch()
}
case <-tick.C:
if len(batch) > 0 {
handleBatch()
}
}
}
if len(batch) > 0 {
handleBatch()
}
}
func (p *rwFolder) inConflict(current, replacement protocol.Vector) bool {
if current.Concurrent(replacement) {
// Obvious case
return true
}
if replacement.Counter(p.shortID) > current.Counter(p.shortID) {
// The replacement file contains a higher version for ourselves than
// what we have. This isn't supposed to be possible, since it's only
// we who can increment that counter. We take it as a sign that
// something is wrong (our index may have been corrupted or removed)
// and flag it as a conflict.
return true
}
return false
}
func invalidateFolder(cfg *config.Configuration, folderID string, err error) {
for i := range cfg.Folders {
folder := &cfg.Folders[i]
if folder.ID == folderID {
folder.Invalid = err.Error()
return
}
}
}
func removeDevice(devices []protocol.DeviceID, device protocol.DeviceID) []protocol.DeviceID {
for i := range devices {
if devices[i] == device {
devices[i] = devices[len(devices)-1]
return devices[:len(devices)-1]
}
}
return devices
}
func moveForConflict(name string) error {
ext := filepath.Ext(name)
withoutExt := name[:len(name)-len(ext)]
newName := withoutExt + time.Now().Format(".sync-conflict-20060102-150405") + ext
err := os.Rename(name, newName)
if os.IsNotExist(err) {
// We were supposed to move a file away but it does not exist. Either
// the user has already moved it away, or the conflict was between a
// remote modification and a local delete. In either way it does not
// matter, go ahead as if the move succeeded.
return nil
}
return err
}
func (p *rwFolder) newError(path string, err error) {
p.errorsMut.Lock()
defer p.errorsMut.Unlock()
// We might get more than one error report for a file (i.e. error on
// Write() followed by Close()); we keep the first error as that is
// probably closer to the root cause.
if _, ok := p.errors[path]; ok {
return
}
p.errors[path] = err.Error()
}
func (p *rwFolder) clearErrors() {
p.errorsMut.Lock()
p.errors = make(map[string]string)
p.errorsMut.Unlock()
}
func (p *rwFolder) currentErrors() []fileError {
p.errorsMut.Lock()
errors := make([]fileError, 0, len(p.errors))
for path, err := range p.errors {
errors = append(errors, fileError{path, err})
}
sort.Sort(fileErrorList(errors))
p.errorsMut.Unlock()
return errors
}
// A []fileError is sent as part of an event and will be JSON serialized.
type fileError struct {
Path string `json:"path"`
Err string `json:"error"`
}
type fileErrorList []fileError
func (l fileErrorList) Len() int {
return len(l)
}
func (l fileErrorList) Less(a, b int) bool {
return l[a].Path < l[b].Path
}
func (l fileErrorList) Swap(a, b int) {
l[a], l[b] = l[b], l[a]
}
| arkhi/syncthing | lib/model/rwfolder.go | GO | mpl-2.0 | 43,291 |
// ----------------------------------------------------------------------------
//
// *** AUTO GENERATED CODE *** Type: MMv1 ***
//
// ----------------------------------------------------------------------------
//
// This file is automatically generated by Magic Modules and manual
// changes will be clobbered when the file is regenerated.
//
// Please read more about how to change this file in
// .github/CONTRIBUTING.md.
//
// ----------------------------------------------------------------------------
package google
import (
"context"
"log"
"strings"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)
func init() {
resource.AddTestSweepers("ComputeTargetTcpProxy", &resource.Sweeper{
Name: "ComputeTargetTcpProxy",
F: testSweepComputeTargetTcpProxy,
})
}
// At the time of writing, the CI only passes us-central1 as the region
func testSweepComputeTargetTcpProxy(region string) error {
resourceName := "ComputeTargetTcpProxy"
log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName)
config, err := sharedConfigForRegion(region)
if err != nil {
log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err)
return err
}
err = config.LoadAndValidate(context.Background())
if err != nil {
log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err)
return err
}
t := &testing.T{}
billingId := getTestBillingAccountFromEnv(t)
// Setup variables to replace in list template
d := &ResourceDataMock{
FieldsInSchema: map[string]interface{}{
"project": config.Project,
"region": region,
"location": region,
"zone": "-",
"billing_account": billingId,
},
}
listTemplate := strings.Split("https://compute.googleapis.com/compute/v1/projects/{{project}}/global/targetTcpProxies", "?")[0]
listUrl, err := replaceVars(d, config, listTemplate)
if err != nil {
log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err)
return nil
}
res, err := sendRequest(config, "GET", config.Project, listUrl, config.userAgent, nil)
if err != nil {
log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err)
return nil
}
resourceList, ok := res["items"]
if !ok {
log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.")
return nil
}
rl := resourceList.([]interface{})
log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName)
// Keep count of items that aren't sweepable for logging.
nonPrefixCount := 0
for _, ri := range rl {
obj := ri.(map[string]interface{})
if obj["name"] == nil {
log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName)
return nil
}
name := GetResourceNameFromSelfLink(obj["name"].(string))
// Skip resources that shouldn't be sweeped
if !isSweepableTestResource(name) {
nonPrefixCount++
continue
}
deleteTemplate := "https://compute.googleapis.com/compute/v1/projects/{{project}}/global/targetTcpProxies/{{name}}"
deleteUrl, err := replaceVars(d, config, deleteTemplate)
if err != nil {
log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err)
return nil
}
deleteUrl = deleteUrl + name
// Don't wait on operations as we may have a lot to delete
_, err = sendRequest(config, "DELETE", config.Project, deleteUrl, config.userAgent, nil)
if err != nil {
log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err)
} else {
log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name)
}
}
if nonPrefixCount > 0 {
log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount)
}
return nil
}
| hashicorp/terraform-provider-google | google/resource_compute_target_tcp_proxy_sweeper_test.go | GO | mpl-2.0 | 3,772 |
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
var isOSX = Services.appinfo.OS === "Darwin";
add_task(function* () {
let shortcuts = new KeyShortcuts({
window
});
yield testSimple(shortcuts);
yield testNonLetterCharacter(shortcuts);
yield testPlusCharacter(shortcuts);
yield testFunctionKey(shortcuts);
yield testMixup(shortcuts);
yield testLooseDigits(shortcuts);
yield testExactModifiers(shortcuts);
yield testLooseShiftModifier(shortcuts);
yield testStrictLetterShiftModifier(shortcuts);
yield testAltModifier(shortcuts);
yield testCommandOrControlModifier(shortcuts);
yield testCtrlModifier(shortcuts);
yield testInvalidShortcutString(shortcuts);
yield testCmdShiftShortcut(shortcuts);
shortcuts.destroy();
yield testTarget();
});
// Test helper to listen to the next key press for a given key,
// returning a promise to help using Tasks.
function once(shortcuts, key, listener) {
let called = false;
return new Promise(done => {
let onShortcut = (key2, event) => {
shortcuts.off(key, onShortcut);
ok(!called, "once listener called only once (i.e. off() works)");
is(key, key2, "listener first argument match the key we listen");
called = true;
listener(key2, event);
done();
};
shortcuts.on(key, onShortcut);
});
}
function* testSimple(shortcuts) {
info("Test simple key shortcuts");
let onKey = once(shortcuts, "0", (key, event) => {
is(event.key, "0");
// Display another key press to ensure that once() correctly stop listening
EventUtils.synthesizeKey("0", {}, window);
});
EventUtils.synthesizeKey("0", {}, window);
yield onKey;
}
function* testNonLetterCharacter(shortcuts) {
info("Test non-naive character key shortcuts");
let onKey = once(shortcuts, "[", (key, event) => {
is(event.key, "[");
});
EventUtils.synthesizeKey("[", {}, window);
yield onKey;
}
function* testFunctionKey(shortcuts) {
info("Test function key shortcuts");
let onKey = once(shortcuts, "F12", (key, event) => {
is(event.key, "F12");
});
EventUtils.synthesizeKey("F12", { keyCode: 123 }, window);
yield onKey;
}
// Plus is special. It's keycode is the one for "=". That's because it requires
// shift to be pressed and is behind "=" key. So it should be considered as a
// character key
function* testPlusCharacter(shortcuts) {
info("Test 'Plus' key shortcuts");
let onKey = once(shortcuts, "Plus", (key, event) => {
is(event.key, "+");
});
EventUtils.synthesizeKey("+", { keyCode: 61, shiftKey: true }, window);
yield onKey;
}
// Test they listeners are not mixed up between shortcuts
function* testMixup(shortcuts) {
info("Test possible listener mixup");
let hitFirst = false, hitSecond = false;
let onFirstKey = once(shortcuts, "0", (key, event) => {
is(event.key, "0");
hitFirst = true;
});
let onSecondKey = once(shortcuts, "Alt+A", (key, event) => {
is(event.key, "a");
ok(event.altKey);
hitSecond = true;
});
// Dispatch the first shortcut and expect only this one to be notified
ok(!hitFirst, "First shortcut isn't notified before firing the key event");
EventUtils.synthesizeKey("0", {}, window);
yield onFirstKey;
ok(hitFirst, "Got the first shortcut notified");
ok(!hitSecond, "No mixup, second shortcut is still not notified (1/2)");
// Wait an extra time, just to be sure this isn't racy
yield new Promise(done => {
window.setTimeout(done, 0);
});
ok(!hitSecond, "No mixup, second shortcut is still not notified (2/2)");
// Finally dispatch the second shortcut
EventUtils.synthesizeKey("a", { altKey: true }, window);
yield onSecondKey;
ok(hitSecond, "Got the second shortcut notified once it is actually fired");
}
// On azerty keyboard, digits are only available by pressing Shift/Capslock,
// but we accept them even if we omit doing that.
function* testLooseDigits(shortcuts) {
info("Test Loose digits");
let onKey = once(shortcuts, "0", (key, event) => {
is(event.key, "à");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(!event.shiftKey);
});
// Simulate a press on the "0" key, without shift pressed on a french
// keyboard
EventUtils.synthesizeKey(
"à",
{ keyCode: 48 },
window);
yield onKey;
onKey = once(shortcuts, "0", (key, event) => {
is(event.key, "0");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(event.shiftKey);
});
// Simulate the same press with shift pressed
EventUtils.synthesizeKey(
"0",
{ keyCode: 48, shiftKey: true },
window);
yield onKey;
}
// Test that shortcuts is notified only when the modifiers match exactly
function* testExactModifiers(shortcuts) {
info("Test exact modifiers match");
let hit = false;
let onKey = once(shortcuts, "Alt+A", (key, event) => {
is(event.key, "a");
ok(event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(!event.shiftKey);
hit = true;
});
// Dispatch with unexpected set of modifiers
ok(!hit, "Shortcut isn't notified before firing the key event");
EventUtils.synthesizeKey("a",
{ accelKey: true, altKey: true, shiftKey: true },
window);
EventUtils.synthesizeKey(
"a",
{ accelKey: true, altKey: false, shiftKey: false },
window);
EventUtils.synthesizeKey(
"a",
{ accelKey: false, altKey: false, shiftKey: true },
window);
EventUtils.synthesizeKey(
"a",
{ accelKey: false, altKey: false, shiftKey: false },
window);
// Wait an extra time to let a chance to call the listener
yield new Promise(done => {
window.setTimeout(done, 0);
});
ok(!hit, "Listener isn't called when modifiers aren't exactly matching");
// Dispatch the expected modifiers
EventUtils.synthesizeKey("a", { accelKey: false, altKey: true, shiftKey: false},
window);
yield onKey;
ok(hit, "Got shortcut notified once it is actually fired");
}
// Some keys are only accessible via shift and listener should also be called
// even if the key didn't explicitely requested Shift modifier.
// For example, `%` on french keyboards is only accessible via Shift.
// Same thing for `@` on US keybords.
function* testLooseShiftModifier(shortcuts) {
info("Test Loose shift modifier");
let onKey = once(shortcuts, "%", (key, event) => {
is(event.key, "%");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(event.shiftKey);
});
EventUtils.synthesizeKey(
"%",
{ accelKey: false, altKey: false, ctrlKey: false, shiftKey: true},
window);
yield onKey;
onKey = once(shortcuts, "@", (key, event) => {
is(event.key, "@");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(event.shiftKey);
});
EventUtils.synthesizeKey(
"@",
{ accelKey: false, altKey: false, ctrlKey: false, shiftKey: true},
window);
yield onKey;
}
// But Shift modifier is strict on all letter characters (a to Z)
function* testStrictLetterShiftModifier(shortcuts) {
info("Test strict shift modifier on letters");
let hitFirst = false;
let onKey = once(shortcuts, "a", (key, event) => {
is(event.key, "a");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(!event.shiftKey);
hitFirst = true;
});
let onShiftKey = once(shortcuts, "Shift+a", (key, event) => {
is(event.key, "a");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(event.shiftKey);
});
EventUtils.synthesizeKey(
"a",
{ shiftKey: true},
window);
yield onShiftKey;
ok(!hitFirst, "Didn't fire the explicit shift+a");
EventUtils.synthesizeKey(
"a",
{ shiftKey: false},
window);
yield onKey;
}
function* testAltModifier(shortcuts) {
info("Test Alt modifier");
let onKey = once(shortcuts, "Alt+F1", (key, event) => {
is(event.keyCode, window.KeyboardEvent.DOM_VK_F1);
ok(event.altKey);
ok(!event.ctrlKey);
ok(!event.metaKey);
ok(!event.shiftKey);
});
EventUtils.synthesizeKey(
"VK_F1",
{ altKey: true },
window);
yield onKey;
}
function* testCommandOrControlModifier(shortcuts) {
info("Test CommandOrControl modifier");
let onKey = once(shortcuts, "CommandOrControl+F1", (key, event) => {
is(event.keyCode, window.KeyboardEvent.DOM_VK_F1);
ok(!event.altKey);
if (isOSX) {
ok(!event.ctrlKey);
ok(event.metaKey);
} else {
ok(event.ctrlKey);
ok(!event.metaKey);
}
ok(!event.shiftKey);
});
let onKeyAlias = once(shortcuts, "CmdOrCtrl+F1", (key, event) => {
is(event.keyCode, window.KeyboardEvent.DOM_VK_F1);
ok(!event.altKey);
if (isOSX) {
ok(!event.ctrlKey);
ok(event.metaKey);
} else {
ok(event.ctrlKey);
ok(!event.metaKey);
}
ok(!event.shiftKey);
});
if (isOSX) {
EventUtils.synthesizeKey(
"VK_F1",
{ metaKey: true },
window);
} else {
EventUtils.synthesizeKey(
"VK_F1",
{ ctrlKey: true },
window);
}
yield onKey;
yield onKeyAlias;
}
function* testCtrlModifier(shortcuts) {
info("Test Ctrl modifier");
let onKey = once(shortcuts, "Ctrl+F1", (key, event) => {
is(event.keyCode, window.KeyboardEvent.DOM_VK_F1);
ok(!event.altKey);
ok(event.ctrlKey);
ok(!event.metaKey);
ok(!event.shiftKey);
});
let onKeyAlias = once(shortcuts, "Control+F1", (key, event) => {
is(event.keyCode, window.KeyboardEvent.DOM_VK_F1);
ok(!event.altKey);
ok(event.ctrlKey);
ok(!event.metaKey);
ok(!event.shiftKey);
});
EventUtils.synthesizeKey(
"VK_F1",
{ ctrlKey: true },
window);
yield onKey;
yield onKeyAlias;
}
function* testCmdShiftShortcut(shortcuts) {
if (!isOSX) {
// This test is OSX only (Bug 1300458).
return;
}
let onCmdKey = once(shortcuts, "CmdOrCtrl+[", (key, event) => {
is(event.key, "[");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(event.metaKey);
ok(!event.shiftKey);
});
let onCmdShiftKey = once(shortcuts, "CmdOrCtrl+Shift+[", (key, event) => {
is(event.key, "[");
ok(!event.altKey);
ok(!event.ctrlKey);
ok(event.metaKey);
ok(event.shiftKey);
});
EventUtils.synthesizeKey(
"[",
{ metaKey: true, shiftKey: true },
window);
EventUtils.synthesizeKey(
"[",
{ metaKey: true },
window);
yield onCmdKey;
yield onCmdShiftKey;
}
function* testTarget() {
info("Test KeyShortcuts with target argument");
let target = document.createElementNS("http://www.w3.org/1999/xhtml",
"input");
document.documentElement.appendChild(target);
target.focus();
let shortcuts = new KeyShortcuts({
window,
target
});
let onKey = once(shortcuts, "0", (key, event) => {
is(event.key, "0");
is(event.target, target);
});
EventUtils.synthesizeKey("0", {}, window);
yield onKey;
target.remove();
shortcuts.destroy();
}
function testInvalidShortcutString(shortcuts) {
info("Test wrong shortcut string");
let shortcut = KeyShortcuts.parseElectronKey(window, "Cmmd+F");
ok(!shortcut, "Passing a invalid shortcut string should return a null object");
shortcuts.on("Cmmd+F", function () {});
ok(true, "on() shouldn't throw when passing invalid shortcut string");
}
| Yukarumya/Yukarum-Redfoxes | devtools/client/shared/test/browser_key_shortcuts.js | JavaScript | mpl-2.0 | 11,394 |
/**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.api.impl;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.openmrs.Address;
import org.openmrs.Location;
import org.openmrs.LocationAttribute;
import org.openmrs.LocationAttributeType;
import org.openmrs.LocationTag;
import org.openmrs.api.APIException;
import org.openmrs.api.LocationService;
import org.openmrs.api.context.Context;
import org.openmrs.api.db.LocationDAO;
import org.openmrs.customdatatype.CustomDatatypeUtil;
import org.openmrs.util.OpenmrsConstants;
import org.openmrs.util.OpenmrsUtil;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
/**
* Default implementation of the {@link LocationService}
* <p>
* This class should not be instantiated alone, get a service class from the Context:
* Context.getLocationService();
*
* @see org.openmrs.api.context.Context
* @see org.openmrs.api.LocationService
* @see org.openmrs.Location
*/
@Transactional
public class LocationServiceImpl extends BaseOpenmrsService implements LocationService {
private LocationDAO dao;
/**
* @see org.openmrs.api.LocationService#setLocationDAO(org.openmrs.api.db.LocationDAO)
*/
public void setLocationDAO(LocationDAO dao) {
this.dao = dao;
}
/**
* @see org.openmrs.api.LocationService#saveLocation(org.openmrs.Location)
*/
public Location saveLocation(Location location) throws APIException {
if (location.getName() == null) {
throw new APIException("Location name is required");
}
// Check for transient tags. If found, try to match by name and overwrite, otherwise throw exception.
if (location.getTags() != null) {
for (LocationTag tag : location.getTags()) {
// only check transient (aka non-precreated) location tags
if (tag.getLocationTagId() == null) {
if (!StringUtils.hasLength(tag.getName())) {
throw new APIException("A tag name is required");
}
LocationTag existing = Context.getLocationService().getLocationTagByName(tag.getName());
if (existing != null) {
location.removeTag(tag);
location.addTag(existing);
} else {
throw new APIException("Cannot add transient tags! "
+ "Save all location tags to the database before saving this location");
}
}
}
}
CustomDatatypeUtil.saveAttributesIfNecessary(location);
return dao.saveLocation(location);
}
/**
* @see org.openmrs.api.LocationService#getLocation(java.lang.Integer)
*/
@Transactional(readOnly = true)
public Location getLocation(Integer locationId) throws APIException {
return dao.getLocation(locationId);
}
/**
* @see org.openmrs.api.LocationService#getLocation(java.lang.String)
*/
@Transactional(readOnly = true)
public Location getLocation(String name) throws APIException {
return dao.getLocation(name);
}
/**
* @see org.openmrs.api.LocationService#getDefaultLocation()
*/
@Transactional(readOnly = true)
public Location getDefaultLocation() throws APIException {
Location location = null;
String locationGP = Context.getAdministrationService().getGlobalProperty(
OpenmrsConstants.GLOBAL_PROPERTY_DEFAULT_LOCATION_NAME);
if (StringUtils.hasText(locationGP)) {
location = Context.getLocationService().getLocation(locationGP);
}
//Try to look up 'Unknown Location' in case the global property is something else
if (location == null && (!StringUtils.hasText(locationGP) || !locationGP.equalsIgnoreCase("Unknown Location"))) {
location = Context.getLocationService().getLocation("Unknown Location");
}
// If Unknown Location does not exist, try Unknown if the global property was different
if (location == null && (!StringUtils.hasText(locationGP) || !locationGP.equalsIgnoreCase("Unknown"))) {
location = Context.getLocationService().getLocation("Unknown");
}
// If neither exist, get the first available location
if (location == null) {
location = Context.getLocationService().getLocation(Integer.valueOf(1));
}
// TODO Figure out if we should/could throw an exception if there's
// no location to fall back on.
//if (location == null) {
// throw new APIException("Default location does not exist");
//}
return location;
}
/**
* @see org.openmrs.api.LocationService#getLocationByUuid(java.lang.String)
*/
@Transactional(readOnly = true)
public Location getLocationByUuid(String uuid) throws APIException {
return dao.getLocationByUuid(uuid);
}
/**
* @see org.openmrs.api.LocationService#getLocationTagByUuid(java.lang.String)
*/
@Transactional(readOnly = true)
public LocationTag getLocationTagByUuid(String uuid) throws APIException {
return dao.getLocationTagByUuid(uuid);
}
/**
* @see org.openmrs.api.LocationService#getAllLocations()
*/
@Transactional(readOnly = true)
public List<Location> getAllLocations() throws APIException {
return dao.getAllLocations(true);
}
/**
* @see org.openmrs.api.LocationService#getAllLocations(boolean)
*/
@Transactional(readOnly = true)
public List<Location> getAllLocations(boolean includeRetired) throws APIException {
return dao.getAllLocations(includeRetired);
}
/**
* @see org.openmrs.api.LocationService#getLocations(java.lang.String)
*/
@Transactional(readOnly = true)
public List<Location> getLocations(String nameFragment) throws APIException {
return Context.getLocationService().getLocations(nameFragment, null, null, false, null, null);
}
/**
* @see org.openmrs.api.LocationService#getLocationsByTag(LocationTag)
*/
@Transactional(readOnly = true)
public List<Location> getLocationsByTag(LocationTag tag) throws APIException {
List<Location> locations = new ArrayList<Location>();
for (Location l : dao.getAllLocations(false)) {
if (l.getTags().contains(tag)) {
locations.add(l);
}
}
return locations;
}
/**
* @see org.openmrs.api.LocationService#getLocationsHavingAllTags(List)
*/
@Transactional(readOnly = true)
public List<Location> getLocationsHavingAllTags(List<LocationTag> tags) throws APIException {
return CollectionUtils.isEmpty(tags) ? getAllLocations(false) : dao.getLocationsHavingAllTags(tags);
}
/**
* @see org.openmrs.api.LocationService#getLocationsHavingAnyTag(List)
*/
@Transactional(readOnly = true)
public List<Location> getLocationsHavingAnyTag(List<LocationTag> tags) throws APIException {
List<Location> locations = new ArrayList<Location>();
for (Location loc : dao.getAllLocations(false)) {
for (LocationTag t : tags) {
if (loc.getTags().contains(t) && !locations.contains(loc)) {
locations.add(loc);
}
}
}
return locations;
}
/**
* @see org.openmrs.api.LocationService#retireLocation(Location, String)
*/
public Location retireLocation(Location location, String reason) throws APIException {
location.setRetired(true);
location.setRetireReason(reason);
return Context.getLocationService().saveLocation(location);
}
/**
* @see org.openmrs.api.LocationService#unretireLocation(org.openmrs.Location)
*/
public Location unretireLocation(Location location) throws APIException {
location.setRetired(false);
return Context.getLocationService().saveLocation(location);
}
/**
* @see org.openmrs.api.LocationService#purgeLocation(org.openmrs.Location)
*/
public void purgeLocation(Location location) throws APIException {
dao.deleteLocation(location);
}
/**
* @see org.openmrs.api.LocationService#saveLocationTag(org.openmrs.LocationTag)
*/
public LocationTag saveLocationTag(LocationTag tag) throws APIException {
return dao.saveLocationTag(tag);
}
/**
* @see org.openmrs.api.LocationService#getLocationTag(java.lang.Integer)
*/
@Transactional(readOnly = true)
public LocationTag getLocationTag(Integer locationTagId) throws APIException {
return dao.getLocationTag(locationTagId);
}
/**
* @see org.openmrs.api.LocationService#getLocationTagByName(java.lang.String)
*/
@Transactional(readOnly = true)
public LocationTag getLocationTagByName(String tag) throws APIException {
return dao.getLocationTagByName(tag);
}
/**
* @see org.openmrs.api.LocationService#getAllLocationTags()
*/
@Transactional(readOnly = true)
public List<LocationTag> getAllLocationTags() throws APIException {
return dao.getAllLocationTags(true);
}
/**
* @see org.openmrs.api.LocationService#getAllLocationTags(boolean)
*/
@Transactional(readOnly = true)
public List<LocationTag> getAllLocationTags(boolean includeRetired) throws APIException {
return dao.getAllLocationTags(includeRetired);
}
/**
* @see org.openmrs.api.LocationService#getLocationTags(java.lang.String)
*/
@Transactional(readOnly = true)
public List<LocationTag> getLocationTags(String search) throws APIException {
if (search == null || search.equals("")) {
return Context.getLocationService().getAllLocationTags(true);
}
return dao.getLocationTags(search);
}
/**
* @see org.openmrs.api.LocationService#retireLocationTag(LocationTag, String)
*/
public LocationTag retireLocationTag(LocationTag tag, String reason) throws APIException {
if (tag.isRetired()) {
return tag;
} else {
if (reason == null) {
throw new APIException("Reason is required");
}
tag.setRetired(true);
tag.setRetireReason(reason);
tag.setRetiredBy(Context.getAuthenticatedUser());
tag.setDateRetired(new Date());
return Context.getLocationService().saveLocationTag(tag);
}
}
/**
* @see org.openmrs.api.LocationService#unretireLocationTag(org.openmrs.LocationTag)
*/
public LocationTag unretireLocationTag(LocationTag tag) throws APIException {
tag.setRetired(false);
tag.setRetireReason(null);
tag.setRetiredBy(null);
tag.setDateRetired(null);
return Context.getLocationService().saveLocationTag(tag);
}
/**
* @see org.openmrs.api.LocationService#purgeLocationTag(org.openmrs.LocationTag)
*/
public void purgeLocationTag(LocationTag tag) throws APIException {
dao.deleteLocationTag(tag);
}
/**
* @see org.openmrs.api.LocationService#getCountOfLocations(String, Boolean)
*/
@Override
@Transactional(readOnly = true)
public Integer getCountOfLocations(String nameFragment, Boolean includeRetired) {
return OpenmrsUtil.convertToInteger(dao.getCountOfLocations(nameFragment, includeRetired));
}
/**
* @see LocationService#getLocations(String, boolean, Integer, Integer)
*/
@Override
@Deprecated
@Transactional(readOnly = true)
public List<Location> getLocations(String nameFragment, boolean includeRetired, Integer start, Integer length)
throws APIException {
return dao.getLocations(nameFragment, null, null, includeRetired, start, length);
}
/**
* @see LocationService#getLocations(String, org.openmrs.Location, java.util.Map, boolean,
* Integer, Integer)
*/
@Override
public List<Location> getLocations(String nameFragment, Location parent,
Map<LocationAttributeType, Object> attributeValues, boolean includeRetired, Integer start, Integer length) {
Map<LocationAttributeType, String> serializedAttributeValues = CustomDatatypeUtil
.getValueReferences(attributeValues);
return dao.getLocations(nameFragment, parent, serializedAttributeValues, includeRetired, start, length);
}
/**
* @see LocationService#getRootLocations(boolean)
*/
@Override
@Transactional(readOnly = true)
public List<Location> getRootLocations(boolean includeRetired) throws APIException {
return dao.getRootLocations(includeRetired);
}
/**
* @see org.openmrs.api.LocationService#getPossibleAddressValues(org.openmrs.Address,
* org.openmrs.AddressField)
*/
public List<String> getPossibleAddressValues(Address incomplete, String fieldName) throws APIException {
// not implemented by default
return null;
}
/**
* @see org.openmrs.api.LocationService#getAllLocationAttributeTypes()
*/
@Override
@Transactional(readOnly = true)
public List<LocationAttributeType> getAllLocationAttributeTypes() {
return dao.getAllLocationAttributeTypes();
}
/**
* @see org.openmrs.api.LocationService#getLocationAttributeType(java.lang.Integer)
*/
@Override
@Transactional(readOnly = true)
public LocationAttributeType getLocationAttributeType(Integer id) {
return dao.getLocationAttributeType(id);
}
/**
* @see org.openmrs.api.LocationService#getLocationAttributeTypeByUuid(java.lang.String)
*/
@Override
@Transactional(readOnly = true)
public LocationAttributeType getLocationAttributeTypeByUuid(String uuid) {
return dao.getLocationAttributeTypeByUuid(uuid);
}
/**
* @see org.openmrs.api.LocationService#saveLocationAttributeType(org.openmrs.LocationAttributeType)
*/
@Override
public LocationAttributeType saveLocationAttributeType(LocationAttributeType locationAttributeType) {
return dao.saveLocationAttributeType(locationAttributeType);
}
/**
* @see org.openmrs.api.LocationService#retireLocationAttributeType(org.openmrs.LocationAttributeType,
* java.lang.String)
*/
@Override
public LocationAttributeType retireLocationAttributeType(LocationAttributeType locationAttributeType, String reason) {
return dao.saveLocationAttributeType(locationAttributeType);
}
/**
* @see org.openmrs.api.LocationService#unretireLocationAttributeType(org.openmrs.LocationAttributeType)
*/
@Override
public LocationAttributeType unretireLocationAttributeType(LocationAttributeType locationAttributeType) {
return dao.saveLocationAttributeType(locationAttributeType);
}
/**
* @see org.openmrs.api.LocationService#purgeLocationAttributeType(org.openmrs.LocationAttributeType)
*/
@Override
public void purgeLocationAttributeType(LocationAttributeType locationAttributeType) {
dao.deleteLocationAttributeType(locationAttributeType);
}
/**
* @see org.openmrs.api.LocationService#getLocationAttributeByUuid(java.lang.String)
*/
@Override
@Transactional(readOnly = true)
public LocationAttribute getLocationAttributeByUuid(String uuid) {
return dao.getLocationAttributeByUuid(uuid);
}
/**
* @see org.openmrs.api.LocationService#getAddressTemplate()
*/
@Override
@Transactional(readOnly = true)
public String getAddressTemplate() throws APIException {
String addressTemplate = Context.getAdministrationService().getGlobalProperty(
OpenmrsConstants.GLOBAL_PROPERTY_ADDRESS_TEMPLATE);
if (!StringUtils.hasLength(addressTemplate)) {
addressTemplate = OpenmrsConstants.DEFAULT_ADDRESS_TEMPLATE;
}
return addressTemplate;
}
/**
* @see org.openmrs.api.LocationService#saveAddressTemplate(String)
*/
@Override
public void saveAddressTemplate(String xml) throws APIException {
Context.getAdministrationService().setGlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_ADDRESS_TEMPLATE, xml);
}
/**
* @see org.openmrs.api.LocationService#getLocationAttributeTypeByName(java.lang.String)
*/
@Override
@Transactional(readOnly = true)
public LocationAttributeType getLocationAttributeTypeByName(String name) {
return dao.getLocationAttributeTypeByName(name);
}
}
| Winbobob/openmrs-core | api/src/main/java/org/openmrs/api/impl/LocationServiceImpl.java | Java | mpl-2.0 | 15,659 |
/*
Battleship browser client
version 0.1.2
required
battleshipclient.js >=0.1.2
battleshipgraphics.js >=0.1.2
*/
function battleshipSession(){
this.isMatchBeginning = false;
this.isPlayerMoved = false;
this.playerIndex = -1;
this.Grid = {
Self: [],
Enemy: [],
};
this.Ships = {
Self:[],
Enemy:[]
};
this.initialize();
}
battleshipSession.prototype.initialize = function (){
var column = 1;
var row = 1;
for (var i=0;i<100;i++){
var key = column + "" + row;
this.Grid.Self[key] = { ship: null, isTurned: false };
this.Grid.Enemy[key] = { ship: null, isTurned: false };
if (++row > 10){
column++;
row = 1;
}
}
var maximum = 4;
var minimum = 1;
do{
for (var i=0;i<minimum;i++){
this.Ships.Self.push ({ count: maximum , column: null, row: null , isVertical: true , isDie: false });
}
maximum--;
minimum++;
} while (maximum > 0);
}
battleshipSession.prototype.sendMessage = function (event,obj){
if (!obj) throw 'Sending object must defined!';
obj.event = event;
obj.playerIndex = this.playerIndex;
sendMessageToSession(obj);
}
battleshipSession.prototype.checkBlockedCells = function (ship,column,row){
var blockedCells = [];
for (var i=0;i<ship.length;i++){
var column = ship.isVertical ? (column+i) : column;
var row = !ship.isVertical ? (row+i) : row;
var key = column + "" + row;
if (this.Grid.Self[key].ship != null) blockedCells.push({column:column,row:row});
}
return blockedCells;
}
battleshipSession.prototype.checkFreeCells = function (ship,column,row){
for (var i=0;i<ship.length;i++){
var column = ship.isVertical ? (column+i) : column;
var row = !ship.isVertical ? (row+i) : row;
var key = column + "" + row;
if (this.Grid.Self[key].ship != null) return false;
}
return true;
}
battleshipSession.prototype.setShipToGrid = function (ship,column,row){
if (!this.checkFreeCells(ship,column,row)) return;//TODO:handle error
for (var i=0;i<ship.length;i++){
var column = ship.isVertical ? (column+i) : column;
var row = !ship.isVertical ? (row+i) : row;
var key = column + "" + row;
this.Grid.Self[key].ship = ship;
}
ship.column = column;
ship.row = row;
}
battleshipSession.prototype.changeEnemyGridStatus = function (column,row,status){
switch (status){
case "empty":
var cell = this.Grid.Enemy[column + "" + row];
cell.isTurned = true;
cell.ship = null;
break;
case "dead":
case "hit":
var cell = this.Grid.Enemy[column + "" + row];
cell.isTurned = true;
if (cell.ship == null) cell.ship = { count: 0 , coordinates:[] };
cell.ship.count += 1;
cell.ship.coordinates.push({column:column,row:row});
if (status == "dead") this.Ships.Enemy.push (cell.ship);
break;
default:
if (status.indexOf("error") > -1){
//TODO:handling error
}
break;
}
}
battleshipSession.prototype.changeSelfGridStatus = function (column,row,status){
switch (status){
case "empty":
var cell = this.Grid.Self[column + "" + row];
cell.isTurned = true;
break;
case "dead":
case "hit":
var cell = this.Grid.Self[column + "" + row];
cell.isTurned = true;
if (status == "dead") cell.ship.isDie = true;
break;
default:
if (status.indexOf("error") > -1){
//TODO:handling error
}
break;
}
}
function matchTime ( layer , x , y , viewer ){
this.x = x;
this.y = y;
this.layer = layer;
this.currentTime = 0;
this.spriteName;
this.viewer = viewer;
this.needIterate = false;
this.timeoutId;
this.text = new battleshipText(
{
characters: "0123456789:",
textAtlas: "images/Numbers_title.png",
width:8,
height:19,
viewer:viewer,
name:"matchTime"
}
);
}
matchTime.prototype.start = function (){
this.needIterate = true;
var self = this;
function iterator(){
self.currentTime+=1;
if (self.needIterate){
self.timeoutId = setTimeout(
iterator,
1000
);
}
}
iterator();
}
matchTime.prototype.stop = function(){
this.needIterate = false;
clearTimeout( this.timeoutId );
}
matchTime.prototype.showTwoDigit = function (digit){
return digit < 10 ? "0" + digit : "" + digit;
}
matchTime.prototype.showTime = function (){
var hours = Math.round(this.currentTime / 3600);
var minutes = Math.round(this.currentTime % 3600 / 60);
var time = this.showTwoDigit(hours) + ":" + this.showTwoDigit(minutes);
this.text.drawText ( this.layer , this.x , this.y , time );
}
var
CELL_STATE_EMPTY = 1,
CELL_STATE_CLEAR = 2,
CELL_STATE_SHIP = 3,
CELL_STATE_INJURED = 4,
CELL_STATE_DEAD = 5
;
function miniMap(options){
this.x = options.x;
this.y = options.y;
this.size = options.size;
this.count = options.size*options.size;
this.borderWidth = options.borderWidth;
this.stateWidth = options.stateWidth;
this.viewer = options.viewer;
this.layer = options.layer;
this.atlas = options.atlas;
this.atlasCell = options.atlasCell;
this.name = options.name;
this.cellCreated = false;
}
miniMap.prototype.getName = function (column,row){
return this.name + "minimap" + column + "" + row;
}
miniMap.prototype.getCellName = function (column,row){
return this.name + "minimapcell" + column + "" + row;
}
miniMap.prototype.createCells = function(){
for (var i=0;i<this.size;i++){
for (var l=0;l<this.size;l++){
this.viewer.addSprite ( this.layer , this.getName ( i , l ) , this.atlas );
this.viewer.addSprite ( this.layer , this.getCellName ( i , l ) , this.atlasCell );
}
}
this.cellCreated = true;
}
miniMap.prototype.changeCellTileOnCoordinates = function ( column , row , state ){
var sprite = this.viewer.getSprite ( this.getCellName ( column , row ) );
this.changeCellTileOnSprite ( sprite , state );
}
miniMap.prototype.changeCellTileOnSprite = function ( sprite , state ){
switch (state){//it need for control of state variable
case CELL_STATE_EMPTY:
case CELL_STATE_CLEAR:
case CELL_STATE_SHIP:
case CELL_STATE_INJURED:
case CELL_STATE_DEAD:
sprite.setSpriteTile(state,this.stateWidth,this.stateWidth);
break;
default: throw 'Incorrect state.';
}
}
miniMap.prototype.show = function (){
if (!this.cellCreated) this.createCells();
var columnPosition;
var rowPosition = this.y;
for ( var i=0 ; i < this.size ; i++ ){
columnPosition = this.x;
for ( var l=0 ; l< this.size ; l++ ){
var sprite = this.viewer.getSprite ( this.getName ( i , l ) );
var spriteCell = this.viewer.getSprite ( this.getCellName ( i , l ) );
sprite.position ( columnPosition , rowPosition );
spriteCell.position( columnPosition + 1 , rowPosition + 1 );
columnPosition += this.borderWidth-1;
}
rowPosition += this.borderWidth-1;
}
}
$(
function (){
var isGameStarted = $('#room').size() > 0;
if (!isGameStarted) return;
var session = new battleshipSession( );
var time;
var userMinimap;
var viewer = new battleshipViewer(
{
onStartLoadImages: function () {
},
onEndLoadImages: function () {
viewer.addLayer( "background" );
viewer.addSprite( "background" , "mainbackground" , "images/background.png" );
viewer.addLayer( "grids" );
viewer.addLayer( "hud" );
time = new matchTime ( "hud" , 379 , 4 , viewer );
time.start();
viewer.StartTimer( );
userMinimap = new miniMap(
{
x:39,
y:230,
size:10,
borderWidth:15,
stateWidth:13,
viewer:viewer,
layer:"grids",
atlas:"images/minimap_square.png",
atlasCell:"images/minimap_title.png",
name:"userMinimap"
}
);
userMinimap.show();
},
onDraw: function () {
var sprite = this.sprites["test"];
time.showTime();
},
images: [ "images/minimap_square.png", "images/minimap_title.png" , "images/background.png" , "images/Numbers_title.png" ]
}
);
var room = parseInt($('#room').val(),10);
$('#sendToChat').click(
function (){
session.sendMessage(
"chat",
{
message:$('#messageText').val()
}
);
}
);
$('#sendShips').click(
function (){
session.setShipToGrid(session.Ships.Self[0],1,1);//4
session.setShipToGrid(session.Ships.Self[1],3,1);//3
session.setShipToGrid(session.Ships.Self[2],5,1);//3
session.setShipToGrid(session.Ships.Self[3],7,1);//2
session.setShipToGrid(session.Ships.Self[4],1,8);//2
session.setShipToGrid(session.Ships.Self[5],3,5);//2
session.setShipToGrid(session.Ships.Self[6],4,9);//1
session.setShipToGrid(session.Ships.Self[7],6,9);//1
session.setShipToGrid(session.Ships.Self[8],8,9);//1
session.setShipToGrid(session.Ships.Self[9],10,9);//1
session.sendMessage(
"createField",
{
ships:session.Ships.Self
}
);
}
);
$('#turn').click(
function (){
if (!session.isMatchBeginning) return;
session.sendMessage(
"move",
{
column: $('#column').val(),
row: $('#row').val()
}
);
}
);
startSession(
room,
'Test',
function (event,data){
switch (event){
case introduceHandler:
session.playerIndex = data.playerIndex;
break;
case chatHandler:
$('#messageLog').val($('#messageLog').val() + data.message);
break;
case whoMoveHandler:
session.isPlayerMoved = data.isYouTurn;
$('#WhoTurn').val( session.isPlayerMoved ? "You turn" : "Not you turn" );
break;
case matchSuccessHandler:
session.isMatchBeginning = data.isSuccess;
$('#GameStatus').val( session.isMatchBeginning ? "game started" : "arrangement of ships" );
break;
}
}
);
}
); | trueromanus/battleship | src/battleship/public/javascripts/battleshiplogic.js | JavaScript | mpl-2.0 | 9,958 |
CoSeMe.namespace('protocol', (function(){
'use strict';
var k = CoSeMe.protocol.dictionary; // protocol constants
var token2Code = k.token2Code;
var ByteArray = CoSeMe.utils.ByteArrayWA;
var logger = new CoSeMe.common.Logger('BinaryWriter');
var IS_COUNTING = true;
var IS_RAW = true;
/**
* The binary writer sends via TCPSocket the required data avoiding
* unnecessary copies. To accomplish this purpose, as the size is not known
* before codifying the tree, the algorithm preprocess the tree by calculating
* the necessary space only, then repeat the processing to effectively write
* the data.
*/
function BinaryWriter(connection) {
this._socket = connection.socket; // an opened socket in binary mode
this.outputKey = undefined;
}
var STREAM_START = k.STREAM_START;
/**
* Sends the start of the protocol.
*/
BinaryWriter.prototype.streamStart = function(domain, resource, callback) {
var writerTask = this.newWriteTask(callback);
writerTask._sendProtocol(IS_COUNTING);
writerTask._sendProtocol();
writerTask._streamStart(domain, resource, IS_COUNTING);
writerTask._streamStart(domain, resource);
};
BinaryWriter.prototype._sendProtocol = function(counting) {
var dictionaryVersion = 5; // my guess: the dictionary version
this.resetBuffer(counting, IS_RAW);
this.writeASCII('WA', counting);
this.writeByte(STREAM_START, counting);
this.writeByte(dictionaryVersion, counting);
this.flushBuffer(counting);
}
BinaryWriter.prototype._streamStart = function(domain, resource, counting) {
var attributes = {to: domain, resource: resource};
this.resetBuffer(counting);
this.writeListStart(1 + 2 * CoSeMe.utils.len(attributes), counting);
this.writeInt8(1, counting);
this.writeAttributes(attributes, undefined, counting);
this.sendMessage(counting);
}
/**
* Spawn a new BinaryWriter in charge of sending the tree via socket.
*/
BinaryWriter.prototype.write = function(tree, callback) {
var writerTask = this.newWriteTask(callback);
writerTask._write(tree, IS_COUNTING);
writerTask._write(tree);
};
/*
* Creates a new BinaryWriter object proxying the current one. This new
* object can not spawn new write tasks.
*/
BinaryWriter.prototype.newWriteTask = function(callback) {
var task = Object.create(this);
task.newWriteTask = undefined;
task._callback = callback;
task._socket = this._socket; // Copy the current socket to the task to
// ensure this task put its data on the current
// socket and not in a future one (i.e a new
// one as a result of a reconnection).
return task;
};
BinaryWriter.prototype._write = function(tree, counting) {
this.resetBuffer(counting);
if (!tree) {
this.writeInt8(0, counting);
}
else {
this.writeTree(tree, counting);
!counting && logger.log(tree.toString());
}
this.sendMessage(counting);
}
/**
* Encode the tree in binary format and put it in the output buffer.
*/
BinaryWriter.prototype.writeTree = function(tree, counting) {
var length = 1 + (2 * CoSeMe.utils.len(tree.attributes));
if (tree.children.length > 0) length++;
if (tree.data !== null) length++;
// Tree header and tag
this.writeListStart(length, counting);
this.writeString(tree.tag, counting);
// Attributes
this.writeAttributes(tree.attributes, tree, counting);
// Data
if (tree.data) {
this.writeBytes(tree.data, counting);
}
// Children
var childrenCount = tree.children.length;
if (childrenCount !== 0) {
this.writeListStart(childrenCount, counting);
for (var i = 0; i < childrenCount; i++) {
this.writeTree(tree.children[i], counting);
}
}
};
var SHORT_LIST_MARK = k.SHORT_LIST_MARK;
var LONG_LIST_MARK = k.LONG_LIST_MARK;
var EMPTY_LIST_MARK = k.EMPTY_LIST_MARK;
/**
* Writes an attributes header in the output buffer.
*/
BinaryWriter.prototype.writeListStart = function(length, counting) {
if (length === 0) {
counting ? this.messageLength++ : this.message.write(EMPTY_LIST_MARK);
}
else if (length < 256) {
counting ? this.messageLength++ : this.message.write(SHORT_LIST_MARK);
this.writeInt8(length, counting);
}
else {
counting ? this.messageLength++ : this.message.write(LONG_LIST_MARK);
this.writeInt16(length, counting);
}
return this;
};
/**
* Writes an attribute object in the output buffer.
*/
BinaryWriter.prototype.writeAttributes = function(attrs, tree, counting) {
var attributes = attrs || {};
var value;
for (var attrName in attributes) if (attributes.hasOwnProperty(attrName)) {
value = tree ? tree.getAttributeValue(attrName) : attributes[attrName];
this.writeString(attrName, counting);
this.writeString(value, counting);
}
return this;
};
/**
* Wrapper to encode both tokens and JID (Jabber ID).
*/
BinaryWriter.prototype.writeString = function(string, counting) {
if (typeof string !== 'string') {
logger.warn('Expecting a string!', typeof string, 'given instead.');
if (string === null || string === undefined) {
string = '';
} else {
string = string.toString();
}
}
var result = token2Code(string);
if (result.code !== null) {
if (result.submap !== null) {
this.writeToken(result.submap, counting);
}
this.writeToken(result.code, counting);
} else {
if (string.indexOf('@') < 1) {
this.writeBytes(string, counting);
}
else {
var userAndServer = string.split('@');
var user = userAndServer[0];
var server = userAndServer[1];
this.writeJid(user, server, counting);
}
}
return this;
};
var SURROGATE_MARK = k.SURROGATE_MARK;
/**
* Writes a string token in an efficent encoding derived from a dictionary.
*/
BinaryWriter.prototype.writeToken = function(code, counting) {
if (code < 245) {
counting ? this.messageLength++ : this.message.write(code);
}
else if (code <= 500) {
counting ? this.messageLength++ : this.message.write(SURROGATE_MARK);
counting ? this.messageLength++ : this.message.write(code - 245);
}
return this;
};
var SHORT_STRING_MARK = k.SHORT_STRING_MARK;
var LONG_STRING_MARK = k.LONG_STRING_MARK;
/**
* Writes bytes from a JavaScript (latin1) string, an ArrayBuffer or any
* type with a buffer property of type ArrayBuffer like ArrayBufferView
* instances.
*/
BinaryWriter.prototype.writeBytes = function(data, counting) {
var bytes;
if (typeof data === 'string') {
bytes = CoSeMe.utils.bytesFromLatin1(data);
} else if (data instanceof ArrayBuffer) {
bytes = new Uint8Array(data);
} else if (data && data.buffer instanceof ArrayBuffer) {
bytes = new Uint8Array(data.buffer);
} else {
var fallback = data === null || data === undefined ? '' : data.toString();
logger.error('Expecting string, ArrayBuffer or ArrayBufferView-like' +
'object. A', data.constructor.name, 'received instead.');
bytes = CoSeMe.utils.bytesFromLatin1(fallback);
}
var l = bytes.length;
if (l < 256) {
counting ? this.messageLength++ : this.message.write(SHORT_STRING_MARK);
this.writeInt8(l, counting);
}
else {
counting ? this.messageLength++ : this.message.write(LONG_STRING_MARK);
this.writeInt24(l, counting);
}
for (var i = 0; i < l; i++) {
counting ? this.messageLength++ : this.message.write(bytes[i]);
}
return this;
};
var JID_MARK = k.JID_MARK;
/**
* Writes the JID in the output buffer.
*/
BinaryWriter.prototype.writeJid = function(user, server, counting) {
counting ? this.messageLength++ : this.message.write(JID_MARK);
if (user) {
this.writeString(user, counting);
} else {
this.writeToken(0, counting);
}
this.writeString(server, counting);
return this;
};
/**
* Writes the ASCII values for each character of the given input.
*/
BinaryWriter.prototype.writeASCII = function(input, counting) {
var character;
for (var i = 0, l = input.length; i < l; i++) {
character = input.charCodeAt(i);
this.writeByte(character, counting);
}
return this;
};
/**
* An alias for writeInt8.
*/
BinaryWriter.prototype.writeByte = function(i, counting) {
this.writeInt8(i, counting)
return this;
};
/**
* Writes a 8-bit integer into the output buffer.
*/
BinaryWriter.prototype.writeInt8 = function(i, counting) {
counting ? this.messageLength++ : this.message.write(i & 0xFF);
return this;
};
/**
* Writes a 16-bit integer into the output buffer.
*/
BinaryWriter.prototype.writeInt16 = function(i, counting) {
counting ? this.messageLength++ : this.message.write((i & 0xFF00) >>> 8);
counting ? this.messageLength++ : this.message.write((i & 0x00FF));
return this;
};
/**
* Writes a 24-bit integer into the output buffer.
*/
BinaryWriter.prototype.writeInt24 = function(i, counting) {
counting ? this.messageLength++ : this.message.write((i & 0xFF0000) >>> 16);
counting ? this.messageLength++ : this.message.write((i & 0x00FF00) >>> 8);
counting ? this.messageLength++ : this.message.write((i & 0x0000FF));
return this;
};
/**
* Sends the message in the output buffer.
*/
BinaryWriter.prototype.sendMessage = function(counting) {
if (counting) { return; }
if (this.isEncrypted()) {
this.cipherMessage();
}
this.addMessageHeader();
this.flushBuffer(counting);
};
/**
* Consumes all the data in the output buffer sending them via the socket.
*/
BinaryWriter.prototype.flushBuffer = function(counting) {
if (counting) { return; }
try {
// This includes the header and trailing paddings.
var out, offset, realOutLength;
if (this.isRaw) {
out = this.message.finish().buffer;
offset = 0;
realOutLength = this.messageLength;
}
else {
var completeView = new Uint32Array(this.outBuffer);
var completeViewLength = completeView.buffer.byteLength;
out = new ByteArray(completeView, completeViewLength).finish().buffer;
offset = HEADER_PADDING;
realOutLength = HEADER_LENGTH + this.messageLength;
}
var error = null, socketState = this._socket.readyState;
if (socketState === 'open') {
// With these offset and length we omit the header and trailing
// paddings.
this._socket.send(out.buffer, offset, realOutLength);
} else {
logger.warn('Can not write. Socket state:', socketState);
error = 'socket-non-ready';
}
(typeof this._callback === 'function') && this._callback(error);
} catch (x) {
var socketState = this._socket.readyState;
if (typeof this._callback === 'function') {
this._callback(socketState === 'closed' ? 'disconnected' : x);
}
}
};
var HEADER_LENGTH = k.HEADER_LENGTH;
var HEADER_PADDING = 4 - (HEADER_LENGTH % 4);
var COMPLETE_HEADER_LENGTH = HEADER_LENGTH + HEADER_PADDING;
var MAC_LENGTH = k.MAC_LENGTH;
/**
* If not counting, allocate an outgoing buffer for the message.
* If only counting, reset the outgoing length to 0.
*
* If isRaw parameter is set to true, no header, mac nor cyphering size
* considerations will be taken into account. Now is used to send the
* `streamStart`.
*/
BinaryWriter.prototype.resetBuffer = function(counting, isRaw) {
if (counting) {
this.messageLength = 0;
}
else {
// If encrypted, it is needed to allocate extra space for the mac.
this.isRaw = isRaw;
// No headers, no mac, no cyphering
if (isRaw) {
this.message = new ByteArray(this.messageLength);
}
// Headers + mac + cyphering
else {
var macLength = this.isEncrypted() ? MAC_LENGTH : 0;
this.messageLength += macLength;
this.messagePadding = 4 - (this.messageLength % 4);
this.completeMessageLength = this.messageLength + this.messagePadding;
var totalSize = COMPLETE_HEADER_LENGTH + this.completeMessageLength;
this.outBuffer = new Uint8Array(totalSize).buffer;
var headerView =
new Uint32Array(this.outBuffer, 0, COMPLETE_HEADER_LENGTH >>> 2);
var messageView =
new Uint32Array(this.outBuffer, COMPLETE_HEADER_LENGTH);
this.header = new ByteArray(headerView);
this.message = new ByteArray(messageView);
}
}
};
/**
* Ciphers the message and signs it. Ciphering occurs IN-PLACE.
*/
BinaryWriter.prototype.cipherMessage = function() {
var textAndMac = this.outputKey.encodeMessage(this.message);
for (var i = 0; i < MAC_LENGTH; i++) {
this.message.write(textAndMac.hmacSHA1.get(i));
}
};
/**
* Adds the header of the message and encrypt the output buffer.
*/
BinaryWriter.prototype.addMessageHeader = function() {
// Write padding
for (var i = 0; i < HEADER_PADDING; i++) {
this.header.write(0);
}
var messageLength = this.messageLength;
var encryptedFlag = this.isEncrypted() ? 0x80 : 0x00;
var b2 = encryptedFlag | ((messageLength & 0xFF0000) >>> 16);
var b1 = (messageLength & 0xFF00) >>> 8;
var b0 = (messageLength & 0x00FF);
this.header.write(b2);
this.header.write(b1);
this.header.write(b0);
};
/**
* Returns true if the RC4 key is set.
*/
BinaryWriter.prototype.isEncrypted = function() {
return !!this.outputKey;
};
return BinaryWriter;
}()));
| mozillahispano/coseme | src/protocol/binary_writer.js | JavaScript | mpl-2.0 | 13,957 |
from django.db import models
from pygments.lexers import get_all_lexers
from pygments.styles import get_all_styles
LEXERS=[item for item in get_all_lexers() if item[1]]
LANGUAGE_CHOICES=sorted([(item[1][0],item[0]) for item in LEXERS])
STYLE_CHOICES=sorted((item,item) for item in get_all_styles())
# Create your models here.
class Interest(models.Model):
created=models.DateTimeField(auto_now_add=True)
name = models.CharField(max_length=100,blank=True,default='')
description=models.TextField(default='^$')
category=models.TextField(default='^$')
subcategory=models.TextField(default='^$')
linenos = models.BooleanField(default=False)
language = models.CharField(choices=LANGUAGE_CHOICES, default='python', max_length=100)
style = models.CharField(choices=STYLE_CHOICES, default='friendly', max_length=100)
class Meta:
ordering=('created',) | DataDrivenExperiences/Server | interest/models.py | Python | mpl-2.0 | 891 |
module Labrador
class Constants
ADAPTER_KEYS = %w(
mongodb
postgresql
mysql
mysql2
sqlite
sqlite2
sqlite3
rethinkdb
)
end
end
| bkcloud/bkplatform | bkdatabase/lib/labrador/constants.rb | Ruby | mpl-2.0 | 186 |
package main
import (
"errors"
"fmt"
log "github.com/Sirupsen/logrus"
"github.com/jroimartin/gocui"
"github.com/nextmetaphor/gwAPI/connection"
"github.com/nextmetaphor/gwAPI/controller"
"gopkg.in/square/go-jose.v1/json"
"net/http"
"os"
"strconv"
)
const (
logo = "" +
" \x1b[36m┌─┐┬ ┬\x1b[37m╔═╗╔═╗╦ \n" +
" \x1b[36m│ ┬│││\x1b[37m╠═╣╠═╝║ \n" +
" \x1b[36m└─┘└┴┘\x1b[37m╩ ╩╩ ╩ "
logoView = "logo"
domainView = "domain"
apiSummaryView = "apis"
apiDetailView = "apiDetails"
sessionSummaryView = "sessions"
)
type viewDefinition struct {
name string
left int
top int
width int
height int
}
var (
viewDefinitions = map[string]viewDefinition{
logoView: {logoView, 1, 0, 16, 4},
domainView: {domainView, 19, 1, 100, 2},
apiSummaryView: {apiSummaryView, 1, 5, 200, 20},
sessionSummaryView: {sessionSummaryView, 1, 26, 52, 42},
apiDetailView: {apiDetailView, 19, 6, 200, 26},
}
credentials = connection.ConnectionCredentials{
GatewayURL: "http://192.168.64.8:30002",
AuthToken: "ThisInNotTheSecretYouAreLookingFor"}
connector = connection.Connection{}
)
func setViewFromDefinition(g *gocui.Gui, viewName string) (*gocui.View, error) {
var v *gocui.View
dfn := viewDefinitions[viewName]
if &dfn == nil {
return v, errors.New("view " + viewName + " does not have a definition.")
}
return g.SetView(dfn.name, dfn.left, dfn.top, dfn.left+dfn.width, dfn.top+dfn.height)
}
func layout(g *gocui.Gui) error {
maxX, _ := g.Size()
// create the "logo" view
if view, viewErr := setViewFromDefinition(g, logoView); viewErr != nil {
if viewErr != gocui.ErrUnknownView {
return viewErr
}
view.Frame = true
fmt.Fprintln(view, logo)
}
if v, err := setViewFromDefinition(g, domainView); err != nil {
fmt.Fprintln(v, "http://localhost:8080")
v.FgColor = gocui.ColorCyan
v.Frame = true
v.Title = "Dashboard URL [Connected] "
v.Editable = true
g.SetCurrentView("domain")
}
if v, err := setViewFromDefinition(g, apiSummaryView); err != nil {
if err != gocui.ErrUnknownView {
return err
}
v.Frame = true
v.Title = "APIs"
v.Highlight = true
v.SelBgColor = gocui.ColorGreen
v.SelFgColor = gocui.ColorBlack
}
if v, err := setViewFromDefinition(g, sessionSummaryView); err != nil {
if err != gocui.ErrUnknownView {
return err
}
v.Frame = true
v.Title = "sessions"
v.Highlight = true
v.SelBgColor = gocui.ColorGreen
v.SelFgColor = gocui.ColorBlack
}
log.Debug(maxX)
return nil
}
func quit(g *gocui.Gui, v *gocui.View) error {
return gocui.ErrQuit
}
func cursorDown(g *gocui.Gui, v *gocui.View) error {
if v != nil {
cx, cy := v.Cursor()
if err := v.SetCursor(cx, cy+1); err != nil {
ox, oy := v.Origin()
if err := v.SetOrigin(ox, oy+1); err != nil {
return err
}
}
}
return nil
}
func cursorUp(g *gocui.Gui, v *gocui.View) error {
if v != nil {
ox, oy := v.Origin()
cx, cy := v.Cursor()
log.Debug("HERE", oy, cy)
getKeys(g, "1")
if (oy == 0) && (cy == 1) {
return nil
} else if (oy == 1) && (cy == 1) {
if err := v.SetOrigin(ox, oy-1); err != nil {
return err
}
}
if err := v.SetCursor(cx, cy-1); err != nil && oy > 0 {
if err := v.SetOrigin(ox, oy-1); err != nil {
return err
}
}
}
return nil
}
func login(gui *gocui.Gui, view *gocui.View) error {
maxX, maxY := gui.Size()
if v, err := gui.SetView("login", maxX/2-30, maxY/2, maxX/2+30, maxY/2+2); err != nil {
if err != gocui.ErrUnknownView {
return err
}
v.Editable = true
v.Title = "Enter Auth Key for Domain"
v.Autoscroll = false
v.Wrap = false
fmt.Fprintln(v, "Snouts")
if _, err := gui.SetCurrentView("login"); err != nil {
return err
}
}
return nil
}
func cancelAuthenticationView(gui *gocui.Gui, view *gocui.View) error {
err := gui.DeleteView("login")
gui.SetCurrentView("domain")
return err
}
func saveAPI(gui *gocui.Gui, view *gocui.View) error {
view.Title = view.Title + " Saving API..."
bufferString := view.Buffer()
_, httpResponse, updateErr := controller.UpdateAPI(credentials, connector, "1", &bufferString)
if updateErr != nil {
return updateErr
}
if httpResponse.StatusCode == http.StatusOK {
view.Title = view.Title + "API Saved. Reloading definitions..."
controller.ReloadGatewayGroup(credentials, connector)
view.Title = view.Title + "Definitions reloaded"
}
return nil
}
func selectAPI(gui *gocui.Gui, view *gocui.View) error {
_, cy := view.Cursor()
//var line string
var err error
if _, err = view.Line(cy); err != nil {
//TODO
return err
}
maxX, maxY := gui.Size()
if v, err := gui.SetView(apiDetailView, 10, 10, maxX-10, maxY-10); err != nil {
if err != gocui.ErrUnknownView {
return err
}
v.Editable = true
v.Title = "API Definition"
v.Autoscroll = false
v.Wrap = true
api, _, loadAPIErr := controller.ReadAPI(credentials, connector, "1")
if loadAPIErr != nil {
panic(loadAPIErr)
}
jsonAPI, marshallErr := json.MarshalIndent(api, "", " ")
if marshallErr != nil {
panic(marshallErr)
}
fmt.Fprintln(v, string(jsonAPI))
if _, err := gui.SetCurrentView(apiDetailView); err != nil {
return err
}
}
return nil
}
//func showNodes(gui *gocui.Gui, view *gocui.View) error {
//nodeHealth := new(tykcommon.)
//}
func getKeys(gui *gocui.Gui, apiID string) {
keys, _, err := controller.SelectKeys(credentials, connector, apiID)
if err != nil {
panic(err)
}
keyView, _ := gui.View(sessionSummaryView)
keyView.Clear()
const outputFormat = "%-32.32s\n"
for _, key := range keys.APIKeys {
fmt.Fprintf(keyView, outputFormat, key)
}
}
func attemptLogin(gui *gocui.Gui, view *gocui.View) error {
err := gui.DeleteView("login")
apis, _, _ := controller.SelectAPIs(credentials, connector)
apiView, apiViewErr := gui.View("apis")
if apiViewErr != nil {
log.Fatal(apiViewErr)
return apiViewErr
}
const outputFormat = "%-6.6s %-32.32s %-24.24s %-32.32s %-24.24s %-32.32s %-100.100s\n"
fmt.Fprintf(apiView, outputFormat, "\x1b[36m", "NAME", "ID", "API-ID", "ORG-ID", "LISTEN-PATH", "TARGET-URL")
for index, api := range *apis {
fmt.Fprintf(
apiView,
outputFormat,
"\x1b[37m"+strconv.Itoa(index+1),
api.Name,
"0",
api.APIID,
api.OrgID,
api.Proxy.ListenPath,
api.Proxy.TargetURL,
)
}
gui.SetCurrentView("apis")
return err
}
func cancelEditAPI(gui *gocui.Gui, view *gocui.View) error {
err := gui.DeleteView(apiDetailView)
gui.SetCurrentView("apis")
return err
}
func toggleWindows(gui *gocui.Gui, view *gocui.View) error {
var err error
if view.Name() == apiSummaryView {
_, err = gui.SetCurrentView(sessionSummaryView)
} else {
_, err = gui.SetCurrentView(apiSummaryView)
}
return err
}
func keybindings(g *gocui.Gui) error {
if err := g.SetKeybinding("", 'c', gocui.ModNone, quit); err != nil {
return err
}
//if err := g.SetKeybinding("side", gocui.KeyCtrlSpace, gocui.ModNone, nextView); err != nil {
// return err
//}
//if err := g.SetKeybinding("main", gocui.KeyCtrlSpace, gocui.ModNone, nextView); err != nil {
// return err
//}
if err := g.SetKeybinding("side", gocui.KeyArrowDown, gocui.ModNone, cursorDown); err != nil {
return err
}
if err := g.SetKeybinding("side", gocui.KeyArrowUp, gocui.ModNone, cursorUp); err != nil {
return err
}
if err := g.SetKeybinding("", gocui.KeyCtrlC, gocui.ModNone, quit); err != nil {
return err
}
if err := g.SetKeybinding("domain", gocui.KeyEnter, gocui.ModNone, login); err != nil {
return err
}
if err := g.SetKeybinding("login", gocui.KeyArrowDown, gocui.ModNone, nil); err != nil {
return err
}
if err := g.SetKeybinding("login", gocui.KeyEnter, gocui.ModNone, attemptLogin); err != nil {
return err
}
if err := g.SetKeybinding("login", gocui.KeyEsc, gocui.ModNone, cancelAuthenticationView); err != nil {
return err
}
if err := g.SetKeybinding("apis", gocui.KeyArrowDown, gocui.ModNone, cursorDown); err != nil {
return err
}
if err := g.SetKeybinding("apis", gocui.KeyArrowUp, gocui.ModNone, cursorUp); err != nil {
return err
}
if err := g.SetKeybinding("apis", gocui.KeyEnter, gocui.ModNone, selectAPI); err != nil {
return err
}
if err := g.SetKeybinding(apiDetailView, gocui.KeyEsc, gocui.ModNone, cancelEditAPI); err != nil {
return err
}
if err := g.SetKeybinding(apiDetailView, gocui.KeyCtrlS, gocui.ModNone, saveAPI); err != nil {
return err
}
if err := g.SetKeybinding("", gocui.KeyTab, gocui.ModNone, toggleWindows); err != nil {
return err
}
// if err := g.SetKeybinding("side", gocui.KeyEnter, gocui.ModNone, getLine); err != nil {
// return err
// }
// if err := g.SetKeybinding("msg", gocui.KeyEnter, gocui.ModNone, delMsg); err != nil {
// return err
// }
//
// if err := g.SetKeybinding("main", gocui.KeyCtrlS, gocui.ModNone, saveMain); err != nil {
// return err
// }
// if err := g.SetKeybinding("main", gocui.KeyCtrlW, gocui.ModNone, saveVisualMain); err != nil {
// return err
// }
return nil
}
func main() {
f, err := os.OpenFile("gwAPI.log", os.O_WRONLY|os.O_CREATE, 0755)
if err != nil {
panic(err)
}
log.SetOutput(f)
log.SetLevel(log.DebugLevel)
defer f.Close()
gui, guiError := gocui.NewGui(gocui.Output256)
gui.InputEsc = true
if guiError != nil {
log.WithFields(log.Fields{
"error": guiError}).Debug("Error creating gui.")
return
}
defer gui.Close()
gui.Cursor = true
gui.SetManagerFunc(layout)
gui.SelFgColor = gocui.ColorGreen
gui.Highlight = true
if err := keybindings(gui); err != nil {
log.Panicln(err)
}
if err := gui.MainLoop(); err != nil && err != gocui.ErrQuit {
log.Panicln(err)
}
}
| nextmetaphor/gwAPI | gwAPI.go | GO | mpl-2.0 | 9,772 |
module TK.SpaceTac.UI {
/**
* Orientation of a ValueBar.
*
* A EAST bar will have 0 at the west, and 1 at the east.
*/
export enum ValueBarOrientation {
NORTH,
SOUTH,
EAST,
WEST,
}
/**
* Bar to display a value with a graphical bar
*
* This will crop the image according to the value
*/
export class ValueBar {
// Phaser node
node: UIImage
// Orientation
private orientation: ValueBarOrientation
// Current value
private current = 0
// Maximal value
private maximal = 0
// Proportional value
private proportional = 0
// Original size
private original_width: number
private original_height: number
private crop_rect: Phaser.Geom.Rectangle
private crop_mask: Phaser.GameObjects.Graphics
constructor(view: BaseView, name: string, orientation: ValueBarOrientation, x = 0, y = 0) {
this.node = view.newImage(name, x, y);
if (orientation == ValueBarOrientation.WEST) {
this.node.setOrigin(1, 0);
} else if (orientation == ValueBarOrientation.NORTH) {
this.node.setOrigin(0, 1);
} else {
this.node.setOrigin(0, 0);
}
this.orientation = orientation;
this.original_width = this.node.width;
this.original_height = this.node.height;
this.crop_rect = new Phaser.Geom.Rectangle(0, 0, this.original_width, this.original_height);
this.crop_mask = view.make.graphics({ x: x, y: y, add: false });
this.crop_mask.fillStyle(0xffffff);
this.node.setMask(new Phaser.Display.Masks.GeometryMask(view, this.crop_mask));
this.setValue(0, 1000);
}
/**
* Update the phaser graphics to match the value
*/
update() {
// TODO animation
switch (this.orientation) {
case ValueBarOrientation.EAST:
this.crop_rect.width = Math.round(this.original_width * this.proportional);
break;
case ValueBarOrientation.WEST:
this.crop_rect.width = Math.round(this.original_width * this.proportional);
this.crop_rect.x = this.original_width - this.crop_rect.width;
break;
case ValueBarOrientation.NORTH:
this.crop_rect.height = Math.round(this.original_height * this.proportional);
this.crop_rect.y = this.original_height - this.crop_rect.height;
break;
case ValueBarOrientation.SOUTH:
this.crop_rect.height = Math.round(this.original_height * this.proportional);
break;
}
this.crop_mask.clear();
this.crop_mask.fillRectShape(this.crop_rect);
}
/**
* Set the current value, and maximal value
*/
setValue(current: number, maximal: number = -1) {
this.current = current > 0 ? current : 0;
if (maximal >= 0) {
this.maximal = maximal;
}
if (this.maximal === 0) {
this.proportional = 0;
} else {
this.proportional = this.current / this.maximal;
}
this.update();
}
/**
* Get current raw value
*/
getValue(): number {
return this.current;
}
/**
* Get the proportional (in 0.0-1.0 range) value
*/
getProportionalValue(): number {
return this.proportional;
}
}
}
| thunderk/spacetac | src/ui/common/ValueBar.ts | TypeScript | mpl-2.0 | 3,808 |
/*
Based on ObjExporter.cs, this "wrapper" lets you export to .OBJ directly from the editor menu.
This should be put in your "Editor"-folder. Use by selecting the objects you want to export, and select
the appropriate menu item from "Custom->Export". Exported models are put in a folder called
"ExportedObj" in the root of your Unity-project. Textures should also be copied and placed in the
same folder.
N.B. there may be a bug so if the custom option doesn't come up refer to this thread http://answers.unity3d.com/questions/317951/how-to-use-editorobjexporter-obj-saving-script-fro.html
Updated for Unity 5.3
*/
using UnityEngine;
using UnityEditor;
using UnityEditor.SceneManagement;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System;
struct ObjMaterial
{
public string name;
public string textureName;
}
public class ExportTools : MonoBehaviour
{
private static int vertexOffset = 0;
private static int normalOffset = 0;
private static int uvOffset = 0;
//User should probably be able to change this. It is currently left as an exercise for
//the reader.
private static string targetFolder = "ExportedObj";
private static string MeshToString(MeshFilter mf, Dictionary<string, ObjMaterial> materialList)
{
Mesh m = mf.sharedMesh;
Material[] mats = mf.GetComponent<Renderer>().sharedMaterials;
StringBuilder sb = new StringBuilder();
sb.Append("g ").Append(mf.name).Append("\n");
foreach(Vector3 lv in m.vertices)
{
Vector3 wv = mf.transform.TransformPoint(lv);
//This is sort of ugly - inverting x-component since we're in
//a different coordinate system than "everyone" is "used to".
sb.Append(string.Format("v {0} {1} {2}\n", -wv.x, wv.y, wv.z));
}
sb.Append("\n");
foreach(Vector3 lv in m.normals)
{
Vector3 wv = mf.transform.TransformDirection(lv);
sb.Append(string.Format("vn {0} {1} {2}\n", -wv.x, wv.y, wv.z));
}
sb.Append("\n");
foreach(Vector3 v in m.uv)
{
sb.Append(string.Format("vt {0} {1}\n", v.x, v.y));
}
for(int material = 0; material < m.subMeshCount; material++)
{
sb.Append("\n");
sb.Append("usemtl ").Append(mats[material].name).Append("\n");
sb.Append("usemap ").Append(mats[material].name).Append("\n");
//See if this material is already in the materiallist.
try
{
ObjMaterial objMaterial = new ObjMaterial();
objMaterial.name = mats[material].name;
if(mats[material].mainTexture)
objMaterial.textureName = AssetDatabase.GetAssetPath(mats[material].mainTexture);
else
objMaterial.textureName = null;
materialList.Add(objMaterial.name, objMaterial);
}
catch(ArgumentException)
{
//Already in the dictionary
}
int[] triangles = m.GetTriangles(material);
for(int i = 0; i < triangles.Length; i += 3)
{
//Because we inverted the x-component, we also needed to alter the triangle winding.
sb.Append(string.Format("f {1}/{1}/{1} {0}/{0}/{0} {2}/{2}/{2}\n",
triangles[i] + 1 + vertexOffset, triangles[i + 1] + 1 + normalOffset, triangles[i + 2] + 1 + uvOffset));
}
}
vertexOffset += m.vertices.Length;
normalOffset += m.normals.Length;
uvOffset += m.uv.Length;
return sb.ToString();
}
private static void Clear()
{
vertexOffset = 0;
normalOffset = 0;
uvOffset = 0;
}
private static Dictionary<string, ObjMaterial> PrepareFileWrite()
{
Clear();
return new Dictionary<string, ObjMaterial>();
}
private static void MaterialsToFile(Dictionary<string, ObjMaterial> materialList, string folder, string filename)
{
string path = Path.Combine(folder, filename + ".mtl");
using(StreamWriter sw = new StreamWriter(path))
{
foreach(KeyValuePair<string, ObjMaterial> kvp in materialList)
{
sw.Write("\n");
sw.Write("newmtl {0}\n", kvp.Key);
sw.Write("Ka 0.6 0.6 0.6\n");
sw.Write("Kd 0.6 0.6 0.6\n");
sw.Write("Ks 0.9 0.9 0.9\n");
sw.Write("d 1.0\n");
sw.Write("Ns 0.0\n");
sw.Write("illum 2\n");
if(kvp.Value.textureName != null)
{
string destinationFile = kvp.Value.textureName;
int stripIndex = destinationFile.LastIndexOf(Path.PathSeparator);
if(stripIndex >= 0)
destinationFile = destinationFile.Substring(stripIndex + 1).Trim();
string relativeFile = destinationFile;
destinationFile = folder + Path.PathSeparator + destinationFile;
Debug.Log("Copying texture from " + kvp.Value.textureName + " to " + destinationFile);
try
{
//Copy the source file
File.Copy(kvp.Value.textureName, destinationFile);
}
catch
{
}
sw.Write("map_Kd {0}", relativeFile);
}
sw.Write("\n\n\n");
}
}
}
private static void MeshToFile(MeshFilter mf, string folder, string filename)
{
Dictionary<string, ObjMaterial> materialList = PrepareFileWrite();
string path = Path.Combine(folder, filename + ".obj");
using(StreamWriter sw = new StreamWriter(path))
{
sw.Write("mtllib ./" + filename + ".mtl\n");
sw.Write(MeshToString(mf, materialList));
}
MaterialsToFile(materialList, folder, filename);
}
private static void MeshesToFile(MeshFilter[] mf, string folder, string filename)
{
Dictionary<string, ObjMaterial> materialList = PrepareFileWrite();
string path = Path.Combine(folder, filename + ".obj");
using(StreamWriter sw = new StreamWriter(path))
{
sw.Write("mtllib ./" + filename + ".mtl\n");
for(int i = 0; i < mf.Length; i++)
{
sw.Write(MeshToString(mf[i], materialList));
}
}
MaterialsToFile(materialList, folder, filename);
}
private static bool CreateTargetFolder()
{
try
{
System.IO.Directory.CreateDirectory(targetFolder);
}
catch
{
EditorUtility.DisplayDialog("Error!", "Failed to create target folder!", "");
return false;
}
return true;
}
[MenuItem("BZWTools/Export/Export all MeshFilters in selection to separate OBJs")]
static void ExportSelectionToSeparate()
{
targetFolder = EditorUtility.SaveFolderPanel("Where to save files?", string.Empty, string.Empty);
if(targetFolder == string.Empty || !CreateTargetFolder())
return;
Transform[] selection = Selection.GetTransforms(SelectionMode.Editable | SelectionMode.ExcludePrefab);
if(selection.Length == 0)
{
EditorUtility.DisplayDialog("No source object selected!", "Please select one or more target objects", "");
return;
}
int exportedObjects = 0;
for(int i = 0; i < selection.Length; i++)
{
Component[] meshfilter = selection[i].GetComponentsInChildren(typeof(MeshFilter));
for(int m = 0; m < meshfilter.Length; m++)
{
exportedObjects++;
MeshToFile((MeshFilter)meshfilter[m], targetFolder, selection[i].name + "_" + i + "_" + m);
}
}
if(exportedObjects > 0)
EditorUtility.DisplayDialog("Objects exported", "Exported " + exportedObjects + " objects", "");
else
EditorUtility.DisplayDialog("Objects not exported", "Make sure at least some of your selected objects have mesh filters!", "");
}
[MenuItem("BZWTools/Export/Export all selection to single OBJ")]
static void ExportWholeSelectionToSingle()
{
targetFolder = EditorUtility.SaveFolderPanel("Where to save files?", string.Empty, string.Empty);
if(targetFolder == string.Empty || !CreateTargetFolder())
return;
Transform[] selection = Selection.GetTransforms(SelectionMode.Editable | SelectionMode.ExcludePrefab);
if(selection.Length == 0)
{
EditorUtility.DisplayDialog("No source object selected!", "Please select one or more target objects", "");
return;
}
int exportedObjects = 0;
ArrayList mfList = new ArrayList();
for(int i = 0; i < selection.Length; i++)
{
Component[] meshfilter = selection[i].GetComponentsInChildren(typeof(MeshFilter));
for(int m = 0; m < meshfilter.Length; m++)
{
exportedObjects++;
mfList.Add(meshfilter[m]);
}
}
if(exportedObjects > 0)
{
MeshFilter[] mf = new MeshFilter[mfList.Count];
for(int i = 0; i < mfList.Count; i++)
{
mf[i] = (MeshFilter)mfList[i];
}
string filename = EditorSceneManager.GetActiveScene().name + "_" + exportedObjects;
int stripIndex = filename.LastIndexOf(Path.PathSeparator);
if(stripIndex >= 0)
filename = filename.Substring(stripIndex + 1).Trim();
MeshesToFile(mf, targetFolder, filename);
EditorUtility.DisplayDialog("Objects exported", "Exported " + exportedObjects + " objects to " + filename, "");
}
else
EditorUtility.DisplayDialog("Objects not exported", "Make sure at least some of your selected objects have mesh filters!", "");
}
[MenuItem("BZWTools/Export/Export each Selected items to single OBJ")]
static void ExportEachSelectionToSingle()
{
Transform[] selection = Selection.GetTransforms(SelectionMode.Editable | SelectionMode.ExcludePrefab);
if(selection.Length == 0)
{
EditorUtility.DisplayDialog("No source object selected!", "Please select one or more target objects", "");
return;
}
targetFolder = EditorUtility.SaveFolderPanel("Where to save files?", targetFolder, selection[0].name);
if(targetFolder == string.Empty || !CreateTargetFolder())
return;
int exportedObjects = 0;
for(int i = 0; i < selection.Length; i++)
{
Component[] meshfilter = selection[i].GetComponentsInChildren(typeof(MeshFilter));
MeshFilter[] mf = new MeshFilter[meshfilter.Length];
for(int m = 0; m < meshfilter.Length; m++)
{
exportedObjects++;
mf[m] = (MeshFilter)meshfilter[m];
}
MeshesToFile(mf, targetFolder, selection[i].name + "_" + i);
}
if(exportedObjects > 0)
{
EditorUtility.DisplayDialog("Objects exported", "Exported " + exportedObjects + " objects", "");
}
else
EditorUtility.DisplayDialog("Objects not exported", "Make sure at least some of your selected objects have mesh filters!", "");
}
}
| JeffM2501/UnityBZWTools | UnityProject/UnityBZWTools/Assets/BZWTools/Scripts/Editor/ExportTools.cs | C# | mpl-2.0 | 9,876 |
highlightNavButton('#payments');
/*************** Cached Selectors ***************/
/* Tables / Charts / Tooltips */
var $transactionHistoryTable = $('#transaction-history-table');
var $transactionBarGraphContainer = $('#transaction-history-highchart');
var $payoutToolTips = $(".payout-tooltip");
var $transactionHistoryTableTimes = $transactionHistoryTable.find(".transaction-payout-time");
/* Panel Stuff */
var $loadingPanel = $('.panel-loading');
/* Off Canvas Stuff */
var $offCanvasRow = $('.row-offcanvas');
var $offConvasButton = $(".btn-offcanvas");
var $offCanvasArrowSymbol = $offConvasButton.find(".fa-offcanvas-arrow");
var $colClanInfo = $('.col-clan-info');
/* Payout Stuff */
var $payoutButton = $(".btn-payout");
var $payoutGroupsModal = $("#payout-groups-modal");
var $payoutSuccessModal = $("#payout-success-modal");
var $unpaidSharesSpan = $(".unpaid-shares-span");
/* Caller Bonus Stuff*/
var $callerBonusBattleAmount = $("#caller-bonus-battle-amount");
var $callerBonusUnpaidBattlesBadge = $("#caller-bonus-unpaid-battles");
var $callerBonusTotalAmountBadge = $("#caller-bonus-total-amount");
/* Tank Incentive Stuff */
var $tankIncentivePanel = $('.panel-tank-incentive');
var $usePresetValuesCheckbox = $tankIncentivePanel.find('.checkbox input[type="checkbox"]');
var $filterIncentiveTiersDropdown = $tankIncentivePanel.find('.input-group-tier-filter > select');
var $filterIncentiveTiersBtn = $tankIncentivePanel.find('.btn-filter-incentive-tiers');
var $incentiveTankDropdown = $('#incentive-tank');
var $incentiveAmountInput = $('#incentive-amount');
/* Datepicker Stuff */
var $inputDateRange = $(".input-daterange");
/*************** Initializers ***************/
formatMilisToLocalizedtime($transactionHistoryTableTimes);
initializeTransactionDataTable($transactionHistoryTable);
initializeTransactionBarGraphChart($transactionBarGraphContainer);
updateBarGraph($transactionBarGraphContainer);
var jsonOptions = {weekStart: 0, format: "mm/dd/yyyy", autoclose: true}
$inputDateRange = initializeDateRange($inputDateRange, jsonOptions);
colorizeUnpaidShareCountLabel($unpaidSharesSpan);
$payoutToolTips.tooltip();
$callerBonusBattleAmount.trigger("change"); //Updates the caller bonus total if there is any
/*************** Listeners ***************/
$offConvasButton.on("click", function() {
$colClanInfo.toggleClass("active");
// var colClanInfoHasActive = $colClanInfo.hasClass('active');
// var isNotAffixTopOrBottom = !$colClanInfo.hasClass('affix-top') && !$colClanInfo.hasClass('affix-bottom');
//
// if(isNotAffixTopOrBottom) {
// if(colClanInfoHasActive) {
// $colClanInfo.animate({
// right: '15%'
// }, 250);
// } else {
// $colClanInfo.animate({
// right: '-35%'
// }, 250);
// }
// }
$offCanvasRow.toggleClass("active");
$offCanvasArrowSymbol.toggleClass("fa-arrow-right");
});
$payoutButton.on("click", function(){
var $closestForm = $(this).closest('form');
var formSubmitURL = $closestForm.data("form-submit-url");
var notValidForm = !validateInputs($closestForm);
if(notValidForm) return;
var $formSubmitPromise = $.post(formSubmitURL, $closestForm.serialize());
var isStandardPayout = $(this).hasClass("btn-payout-standard");
$formSubmitPromise.done(function(data){
if(isStandardPayout){
$payoutSuccessModal.modal('show');
}else{
$payoutGroupsModal.find(".modal-body").empty().html(data);
$payoutGroupsModal.modal('show')
}
});
});
$payoutGroupsModal.on("click", "#btn-group-payout-finalize", function(){
var $newCalculatedPayoutGroupTotalForm = $payoutGroupsModal.find("#new-calculated-payout-group-total-form");
var formSubmitURL = $newCalculatedPayoutGroupTotalForm.data("form-submit-url");
$payoutGroupsModal.modal('hide');
var $payoutGroupFormSubmitPromise = $.post(formSubmitURL, $newCalculatedPayoutGroupTotalForm.serialize());
$payoutGroupFormSubmitPromise.done(function(){
$payoutSuccessModal.modal('show');
});
});
$payoutGroupsModal.on("click", ".btn-script-generation", function(){
var $this = $(this);
var $payoutGroupPlayerNicknameH4s = $this.closest(".panel").find(".panel-body .player-nickname");
var payoutAmount = $this.data("gold-amount");
var playerNicknames = [];
$payoutGroupPlayerNicknameH4s.each(function(){
playerNicknames.push($(this).text())
});
var locationUrl = "../app/payouts/get-payout-script.user.js?payoutAmount=" + payoutAmount;
$.each(playerNicknames, function(){
locationUrl += "&playerNicknames=" + this;
});
window.location = locationUrl;
});
$inputDateRange.datepicker().on("changeDate", function(e){
var startDate = $("#campaign-start-date").datepicker("getDate");
var endDate = $("#campaign-end-date").datepicker("getDate");
var startDateInSec = startDate.getTime();
var endDateInSec = endDate.getTime();
$("#hidden-campaign-start-date").val(startDateInSec);
$("#hidden-campaign-end-date").val(endDateInSec);
});
$callerBonusBattleAmount.on("keyup change", function(){
var amountEntered = $(this).val();
var unpaidBattles = $callerBonusUnpaidBattlesBadge.text();
var callerBonusTotalAmount = (Number(amountEntered) * Number(unpaidBattles));
//TODO think if we want to actually show NaN or not
$callerBonusTotalAmountBadge.text(callerBonusTotalAmount);
});$callerBonusBattleAmount.trigger('change');
$usePresetValuesCheckbox.on('change', function () {
var isNotChecked = !$(this).is(':checked');
if(isNotChecked) {
$filterIncentiveTiersDropdown.prop("disabled", false);
$filterIncentiveTiersBtn.removeAttr("disabled");
return false;
}else {
$filterIncentiveTiersDropdown.prop("disabled", true);
$filterIncentiveTiersBtn.attr("disabled", "disabled");
}
var dropdownAlreadyHasPresetValues = $incentiveTankDropdown.data('is-preset-values') == true;
if(dropdownAlreadyHasPresetValues) return false;
$loadingPanel.addClass('loading');
var $getTankIncentivePresetValuesPromise = $.get('payouts/get-tank-incentive-preset-values');
$getTankIncentivePresetValuesPromise.always(function() {
$loadingPanel.removeClass('loading');
}).done(function(data) {
$incentiveTankDropdown.empty();
var incentivePayoutAmountsTableRows = data;
$.each(incentivePayoutAmountsTableRows, function() {
var incentivePayoutAmountsTableRow = this;
var tankId = incentivePayoutAmountsTableRow.tankInformation.tankId;
var tankName = incentivePayoutAmountsTableRow.tankInformation.nameI18n;
var tankIncentiveAmount = incentivePayoutAmountsTableRow.tankIncentiveDefaultPayout.amount;
var dropdownOption = '<option value="' + tankId + '"' + ' data-tank-incentive-amount="' + tankIncentiveAmount + '" >' + tankName + '</option>';
$incentiveTankDropdown.append(dropdownOption).data('is-preset-values', 'true');
});
$incentiveTankDropdown.trigger('change');
});
});
$filterIncentiveTiersBtn.on('click', function () {
var tierSelected = $filterIncentiveTiersDropdown.val();
$loadingPanel.addClass('loading');
var $getTankInformationByTierPromise = $.get('payouts/get-tank-information-by-tier', {tankTiers : tierSelected});
$getTankInformationByTierPromise.always(function(){
$loadingPanel.removeClass('loading');
}).done(function(data) {
$incentiveTankDropdown.empty();
var tankInformationList = data;
$.each(tankInformationList, function() {
var tankInformation = this;
var tankId = tankInformation.tankId;
var tankName = tankInformation.nameI18n;
var dropdownOption = '<option value="' + tankId + '">' + tankName + '</option>';
$incentiveTankDropdown.append(dropdownOption).data('is-preset-values', 'false');;
});
$incentiveAmountInput.val('');
});
});
$incentiveTankDropdown.on('change', function() {
var isNotChecked = !$usePresetValuesCheckbox.is(':checked');
if(isNotChecked) return false;
var incentiveTankAmount = $incentiveTankDropdown.find('option:selected').data('tank-incentive-amount');
$incentiveAmountInput.val(incentiveTankAmount);
});$incentiveTankDropdown.trigger('change');
/************* Functions *************/
function updateBarGraph($transactionBarGraphContainer){
var $getTransactionBarGraphDataPromise = $.get("payouts/get-transaction-pie");
$getTransactionBarGraphDataPromise.done(function(data){
var barData = data;
var seriesData = [];
var categoryNames = [];
$.each(barData, function(){
var name = this.type;
var amount = this.amount;
seriesData.push(amount);
categoryNames.push(name);
});
var chart = $transactionBarGraphContainer.highcharts();
chart.xAxis[0].setCategories(categoryNames);
chart.series[0].setData(seriesData);
});
}
function colorizeUnpaidShareCountLabel($unpaidSharesSpan){
var unpaidShareAmount = parseInt($unpaidSharesSpan.html());
var isNaN = unpaidShareAmount == NaN;
if(isNaN) return;
switch (true){
case unpaidShareAmount < 100:
removeClassByWildCard($unpaidSharesSpan, "label-");
$unpaidSharesSpan.addClass("label-success");
break;
case unpaidShareAmount < 300:
removeClassByWildCard($unpaidSharesSpan, "label-");
$unpaidSharesSpan.addClass("label-warning");
break;
default:
removeClassByWildCard($unpaidSharesSpan, "label-");
$unpaidSharesSpan.addClass("label-danger");
}
}
function initializeTransactionBarGraphChart($barGraphContainer){
$barGraphContainer.highcharts({
chart: {
type: 'bar'
},
title: {
text: null
},
colors: ['#FFCC00'],
yAxis: {
min: 0,
title: {
text: 'Total Gold Amount',
align: 'high'
},
labels: {
overflow: 'justify'
}
},
xAxis:{
labels: {
style: {
fontSize:'14px',
fontWeight: 'bold'
}
}
},
tooltip: {
pointFormat: '{series.name}: <b>{point.y}</b><br/>',
valueSuffix: ' gold'
},
plotOptions: {
bar: {
dataLabels: {
enabled: true
}
}
},
credits: {
enabled: false
},
series: [{
name: 'Gold Amount',
data: []
}]
});
}
function initializeTransactionDataTable($dataTable){
$dataTable.dataTable({
"paging": true,
"pageLength": 10,
"pagingType": "simple",
"order": [[ 1, "desc" ]],
"searching": true,
"columnDefs": [ {
"targets": [0],
"orderable": false
}
],
"drawCallback": function (oSettings) {
/* Need to redo the counters if filtered or sorted */
if ( oSettings.bSorted || oSettings.bFiltered )
{
for ( var i=0, iLen=oSettings.aiDisplay.length ; i<iLen ; i++ )
{
$('td:eq(0)', oSettings.aoData[ oSettings.aiDisplay[i] ].nTr ).html( i+1 );
}
}
}
});
} | NYPD/pms | WebContent/js/payouts.js | JavaScript | mpl-2.0 | 11,044 |
from configparser import SafeConfigParser
import os
import sys
from collections import OrderedDict
here = os.path.dirname(__file__)
class ConfigDict(dict):
def __init__(self, base_path, *args, **kwargs):
self.base_path = base_path
dict.__init__(self, *args, **kwargs)
def get_path(self, key, default=None):
if key not in self:
return default
path = self[key]
os.path.expanduser(path)
return os.path.abspath(os.path.join(self.base_path, path))
def read(config_path):
config_path = os.path.abspath(config_path)
config_root = os.path.dirname(config_path)
parser = SafeConfigParser()
success = parser.read(config_path)
assert config_path in success, success
subns = {"pwd": os.path.abspath(os.path.curdir)}
rv = OrderedDict()
for section in parser.sections():
rv[section] = ConfigDict(config_root)
for key in parser.options(section):
rv[section][key] = parser.get(section, key, raw=False, vars=subns)
return rv
def path(argv=None):
if argv is None:
argv = []
path = None
for i, arg in enumerate(argv):
if arg == "--config":
if i + 1 < len(argv):
path = argv[i + 1]
elif arg.startswith("--config="):
path = arg.split("=", 1)[1]
if path is not None:
break
if path is None:
if os.path.exists("wptrunner.ini"):
path = os.path.abspath("wptrunner.ini")
else:
path = os.path.join(here, "..", "wptrunner.default.ini")
return os.path.abspath(path)
def load():
return read(path(sys.argv))
| KiChjang/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/config.py | Python | mpl-2.0 | 1,667 |
package Misc.Gui.Actions;
import java.awt.event.ActionEvent;
import javax.swing.AbstractAction;
public class ExitAction extends AbstractAction
{
static final long serialVersionUID = 1L;
public ExitAction() {
super("Exit");
}
public void actionPerformed(final ActionEvent e) {
System.exit(0);
}
}
| Hjertesvikt/Hjerte | src/Misc/Gui/Actions/ExitAction.java | Java | mpl-2.0 | 344 |
package simulator.configurables;
import java.io.Serializable;
import simulator.util.Distributions;
import simulator.util.VariableDistribution;
/**
* VariableOnsetConfig.java
*
* Class holding a configuration for a variable duration.
*
* Created on 01-Dec-2011
* City University
* BSc Computing with Artificial Intelligence
* Project title: Building a TD Simulator for Real-Time Classical Conditioning
* @supervisor Dr. Eduardo Alonso
* @author Jonathan Gray
*
*/
public class VariableOnsetConfig implements StimulusOnsetConfig,Serializable {
/**
*
*/
private static final long serialVersionUID = -7336395455119352484L;
/** Variable distribution for generating durations. **/
private VariableDistribution varDist;
/** Mean used by the distribution. **/
private double mean = 0;
/** Standard deviation for the distribution. **/
private float sd = 0;
/** Number of trials. **/
private int trials;
/** Distribution type. **/
private int type;
/** Mean type. **/
private boolean isGeometric;
public VariableOnsetConfig(double mean, long seed, int trials, int type, boolean geometric) {
this.sd = 0;
this.mean = mean;
this.trials = trials;
isGeometric = geometric;
this.type = type;
varDist = Distributions.getDistribution(type, mean, seed, trials, geometric);
}
/**
* Get the next onset from the distribution.
*/
public double getNextOnset() {
return varDist.next();
}
@Override
public boolean isFixed() {
return false;
}
/**
* @return the mean
*/
public double getMean() {
return mean;
}
/**
* @param mean the mean to set
*/
public void setMean(double mean) {
this.mean = mean;
}
/**
* @return the standard deviation
*/
public float getSd() {
return sd;
}
/**
* @param sd the standard deviation to set
*/
public void setSd(float sd) {
this.sd = sd;
}
public long getSeed() {
return varDist.getSeed();
}
public String toString() {
return "V(μ" + mean+ ")";
}
@Override
public void reset() {
varDist.build();
varDist.setIndex(0);
}
public int getTrials() {
return trials;
}
public void setTrials(int num) {
trials = num;
varDist.setTrials(trials);
}
/**
* @return the isGeometric
*/
public boolean isGeometric() {
return isGeometric;
}
/**
* @param isGeometric the isGeometric to set
*/
public void setGeometric(boolean isGeometric) {
this.isGeometric = isGeometric;
varDist.setMeanType(isGeometric);
}
/**
* @return the type of distribution used.
*/
public int getType() {
return type;
}
/**
* @param type the type to use.
*/
public void setType(int type) {
this.type = type;
}
/* (non-Javadoc)
* @see simulator.configurables.StimulusOnsetConfig#regenerate()
*/
@Override
public void regenerate() {
varDist.regenerate();
}
}
| cal-r/td | src/simulator/configurables/VariableOnsetConfig.java | Java | mpl-2.0 | 2,823 |
<?php
/**
* Posts Layout 1 for Optimizer
*
* Displays The Posts in Layout 1
*
* @package Optimizer
*
* @since Optimizer 1.0
*/
global $optimizer;
?>
<div class="lay1">
<div class="center">
<?php /* If homepage Display the Title */?>
<?php if ( is_home() ) { ?>
<div class="homeposts_title">
<?php if($optimizer['posts_title_id']) { ?><h2 class="home_title"><?php echo do_shortcode($optimizer['posts_title_id']); ?></h2><?php }?>
<?php if($optimizer['posts_subtitle_id']) { ?><div class="home_subtitle"><?php echo do_shortcode(esc_textarea(($optimizer['posts_subtitle_id']))); ?></div><?php }?>
<?php if($optimizer['posts_title_id']) { ?>
<?php get_template_part('template_parts/divider','icon'); ?>
<?php }?>
</div>
<?php }?>
<div class="lay1_wrap <?php if(!empty($optimizer['lay_show_title']) ) { ?>lay1_tt_on<?php }?>">
<?php if(have_posts()): ?><?php while(have_posts()): ?><?php the_post(); ?>
<div <?php post_class(); ?> id="post-<?php the_ID(); ?>">
<!--POST THUMBNAIL START-->
<div class="post_image">
<!--Post Image Hover-->
<div class="img_hover"></div>
<!--CALL POST IMAGE-->
<?php if ( has_post_thumbnail() ) : ?>
<div class="imgwrap">
<div class="icon_wrap animated fadeInUp">
<a class="imgzoom" href="<?php $image = wp_get_attachment_image_src( get_post_thumbnail_id( $post->ID ), 'full'); echo $image[0]; ?>" title="<?php echo _e('Preview','optimizer'); ?>" data-title="<?php the_title(); ?>"><i class="fa fa-search"></i></a>
<a href="<?php the_permalink();?>" title="<?php echo _e('Read More','optimizer'); ?>"><i class="fa fa-plus"></i></a>
</div>
<a href="<?php the_permalink();?>"><?php the_post_thumbnail('optimizer_thumb'); ?></a>
</div>
<?php elseif(!optimizer_gallery_thumb() == ''): ?>
<div class="imgwrap">
<div class="icon_wrap animated fadeInUp">
<a href="<?php the_permalink();?>" title="<?php echo _e('Read More','optimizer'); ?>"><i class="fa fa-plus"></i></a>
</div>
<a href="<?php the_permalink();?>"><img src="<?php echo optimizer_gallery_thumb();?>" alt="<?php the_title_attribute(); ?>" class="thn_thumbnail"/></a></div>
<?php elseif(!optimizer_first_image() == ''): ?>
<div class="imgwrap">
<div class="icon_wrap animated fadeInUp">
<a class="imgzoom" href="<?php $image = wp_get_attachment_image_src( get_post_thumbnail_id( $post->ID ), 'full'); echo $image[0]; ?>" title="<?php echo _e('Preview','optimizer'); ?>" data-title="<?php the_title(); ?>"><i class="fa fa-search"></i></a>
<a href="<?php the_permalink();?>" title="<?php echo _e('Read More','optimizer'); ?>"><i class="fa fa-plus"></i></a>
</div>
<a href="<?php the_permalink();?>"><img alt="<?php the_title(); ?>" src="<?php echo optimizer_first_image('optimizer_thumb'); ?>" /></a></div>
<?php else : ?>
<div class="imgwrap">
<div class="icon_wrap animated fadeInUp">
<a href="<?php the_permalink();?>" title="<?php echo _e('Read More','optimizer'); ?>"><i class="fa fa-plus"></i></a>
</div>
<a href="<?php the_permalink();?>"><img src="<?php echo optimizer_placeholder_image();?>" alt="<?php the_title_attribute(); ?>" class="thn_thumbnail"/></a></div>
<?php endif; ?>
<!--POST CONTENT-->
<div class="post_content">
<h2 class="postitle"><a href="<?php the_permalink();?>" title="<?php the_title_attribute(); ?>"><?php the_title(); ?></a></h2>
</div>
</div>
<!--POST THUMBNAIL END-->
</div>
<?php endwhile ?>
<?php endif ?>
</div><!--lay1_wrap class end-->
<!--PAGINATION START-->
<?php get_template_part( 'framework/core','pagination' ); ?>
<!--PAGINATION END-->
<?php wp_reset_query(); ?>
</div><!--center class end-->
</div><!--lay1 class end--> | FMCalisto/karat18_webpage | theme/wordpress/optimizer.0.2.2/template_parts/post-layout1.php | PHP | mpl-2.0 | 5,287 |
set :base_url, "https://www.consul.io/"
activate :hashicorp do |h|
h.name = "consul"
h.version = "0.8.2"
h.github_slug = "hashicorp/consul"
end
helpers do
# Returns the FQDN of the image URL.
#
# @param [String] path
#
# @return [String]
def image_url(path)
File.join(base_url, image_path(path))
end
# Get the title for the page.
#
# @param [Middleman::Page] page
#
# @return [String]
def title_for(page)
if page && page.data.page_title
return "#{page.data.page_title} - Consul by HashiCorp"
end
"Consul by HashiCorp"
end
# Get the description for the page
#
# @param [Middleman::Page] page
#
# @return [String]
def description_for(page)
description = (page.data.description || "")
.gsub('"', '')
.gsub(/\n+/, ' ')
.squeeze(' ')
return escape_html(description)
end
# This helps by setting the "active" class for sidebar nav elements
# if the YAML frontmatter matches the expected value.
def sidebar_current(expected)
current = current_page.data.sidebar_current || ""
if current.start_with?(expected)
return " class=\"active\""
else
return ""
end
end
# Returns the id for this page.
# @return [String]
def body_id_for(page)
if !(name = page.data.sidebar_current).blank?
return "page-#{name.strip}"
end
if page.url == "/" || page.url == "/index.html"
return "page-home"
end
if !(title = page.data.page_title).blank?
return title
.downcase
.gsub('"', '')
.gsub(/[^\w]+/, '-')
.gsub(/_+/, '-')
.squeeze('-')
.squeeze(' ')
end
return ""
end
# Returns the list of classes for this page.
# @return [String]
def body_classes_for(page)
classes = []
if !(layout = page.data.layout).blank?
classes << "layout-#{page.data.layout}"
end
if !(title = page.data.page_title).blank?
title = title
.downcase
.gsub('"', '')
.gsub(/[^\w]+/, '-')
.gsub(/_+/, '-')
.squeeze('-')
.squeeze(' ')
classes << "page-#{title}"
end
return classes.join(" ")
end
end
| dankraw/consul | website/config.rb | Ruby | mpl-2.0 | 2,191 |
/**
* Copyright (c) 2013-2016, The SeedStack authors <http://seedstack.org>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package org.seedstack.seed.core.internal.data;
import com.google.inject.PrivateModule;
import com.google.inject.TypeLiteral;
import org.seedstack.seed.DataManager;
import java.util.Map;
/**
* Guice module for configuring SEED core data infrastructure.
*/
class DataModule extends PrivateModule {
private final Map<String, Map<String, DataExporterDefinition<Object>>> allDataExporters;
private final Map<String, Map<String, DataImporterDefinition<Object>>> allDataImporters;
DataModule(Map<String, Map<String, DataExporterDefinition<Object>>> allDataExporters, Map<String, Map<String, DataImporterDefinition<Object>>> allDataImporters) {
this.allDataExporters = allDataExporters;
this.allDataImporters = allDataImporters;
}
@Override
protected void configure() {
bind(new DataExportersTypeLiteral()).toInstance(allDataExporters);
bind(new DataImportersTypeLiteral()).toInstance(allDataImporters);
bind(DataManager.class).to(DataManagerImpl.class);
// Bind importers
for (Map<String, DataImporterDefinition<Object>> dataImporterDefinitionMap : allDataImporters.values()) {
for (DataImporterDefinition<Object> dataImporterDefinition : dataImporterDefinitionMap.values()) {
bind(dataImporterDefinition.getDataImporterClass());
}
}
// Bind exporters
for (Map<String, DataExporterDefinition<Object>> dataExporterDefinitionMap : allDataExporters.values()) {
for (DataExporterDefinition<Object> dataExporterDefinition : dataExporterDefinitionMap.values()) {
bind(dataExporterDefinition.getDataExporterClass());
}
}
expose(DataManager.class);
}
private static class DataExportersTypeLiteral extends TypeLiteral<Map<String, Map<String, DataExporterDefinition<Object>>>> {
}
private static class DataImportersTypeLiteral extends TypeLiteral<Map<String, Map<String, DataImporterDefinition<Object>>>> {
}
}
| adrienlauer/seed | core/src/main/java/org/seedstack/seed/core/internal/data/DataModule.java | Java | mpl-2.0 | 2,316 |
/*****************************************************************************************
* *** BEGIN LICENSE BLOCK *****
*
* Version: MPL 2.0
*
* echocat Jomon, Copyright (c) 2012-2013 echocat
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* *** END LICENSE BLOCK *****
****************************************************************************************/
package org.echocat.jomon.net.cluster.channel.tcp;
import org.echocat.jomon.net.cluster.channel.StatisticEnabledNode;
import java.util.UUID;
public interface TcpNode extends StatisticEnabledNode<UUID> {}
| echocat/jomon | net/cluster/src/main/java/org/echocat/jomon/net/cluster/channel/tcp/TcpNode.java | Java | mpl-2.0 | 738 |
/*******************************************************************************
* Copyright 2017, Elektrobit Automotive GmbH. All rights reserved.
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
* If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/.
*******************************************************************************/
#ifndef SOURCE_DIRECTORY__TARGET_AGENT_MODULES_MESSAGE_RECORDER_INC_MARSHALINGENGINE_HPP_
#define SOURCE_DIRECTORY__TARGET_AGENT_MODULES_MESSAGE_RECORDER_INC_MARSHALINGENGINE_HPP_
#include "target_agent_prot_frame.pb.h"
#include "Poco/File.h"
#include "Poco/Path.h"
#include "Poco/BinaryReader.h"
#include "CMessageDispatcher.h"
#include "plugin_provider.h"
#include <fstream>
namespace TargetAgent {
namespace MessageRecorder {
class CSerializer {
public:
CSerializer();
CSerializer(const CSerializer& other);
void openStream(const char* file);
void closeStream(void);
void resetCounters(void);
bool isHeaderSizeValid(void) const {
return (mHeader.ByteSize() < MAX_HEADER_SIZE);
}
bool isFileSizeLimitReached(void) const;
void adjustMessageBuffer(Poco::UInt32 expectedSize,
unsigned int* actualSize, unsigned char** buffer);
bool deserialzeMessage(Poco::BinaryReader& reader,PluginInterface::ProtocolMessage** msg);
std::streampos getCurentPosition(){
return mFileStream.tellp();
}
bool serializeMessage(const PluginInterface::ProtocolMessage* msg, bool& limitReached);
void flushMessage(const PluginInterface::ProtocolMessage* msg);
CSerializer& operator=(const CSerializer& other);
~CSerializer();
private:
void writeMessageHeader(const PluginInterface::ProtocolMessage* msg);
private:
std::ofstream mFileStream;
static const int MAX_HEADER_SIZE = 0xFF;
static char mHeaderBuffer[MAX_HEADER_SIZE + 1];
unsigned int mCurrentPayloadSize;
unsigned char *mPayloadBuffer;
Protocol::Frame::Header mHeader;
unsigned long long int mCrtAmountOfBytes;
};
}
;
}
;
#endif /* SOURCE_DIRECTORY__TARGET_AGENT_MODULES_MESSAGE_RECORDER_INC_MARSHALINGENGINE_HPP_ */
| Elektrobit/eb-solys-target-agent | target-agent/modules/message-recorder/inc/Serializer.hpp | C++ | mpl-2.0 | 2,233 |
# Generated by Django 3.1.6 on 2021-02-02 19:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('busstops', '0006_auto_20201225_0004'),
('vehicles', '0009_livery_text_colour'),
]
operations = [
migrations.AlterField(
model_name='vehicle',
name='latest_location',
field=models.OneToOneField(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to='vehicles.vehiclelocation'),
),
migrations.AlterField(
model_name='vehicle',
name='source',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='busstops.datasource'),
),
]
| jclgoodwin/bustimes.org.uk | vehicles/migrations/0010_auto_20210202_1951.py | Python | mpl-2.0 | 819 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/*
* Copyright (c) 2014, Joyent, Inc.
*/
var clone = require('clone');
var tape = require('tape');
var uuid = require('libuuid').create;
var helper = require('./helper.js');
///--- Globals
var FULL_CFG = {
index: {
str: {
type: 'string'
},
str_u: {
type: 'string',
unique: true
},
num: {
type: 'number'
},
num_u: {
type: 'number',
unique: true
},
bool: {
type: 'boolean'
},
bool_u: {
type: 'boolean',
unique: true
}
},
pre: [function onePre(req, cb) { cb(); }],
post: [function onePost(req, cb) { cb(); }],
options: {}
};
var c; // client
var server;
var b; // bucket
function test(name, setup) {
tape.test(name + ' - setup', function (t) {
b = 'moray_unit_test_' + uuid().substr(0, 7);
helper.createServer(function (s) {
server = s;
c = helper.createClient();
c.on('connect', t.end.bind(t));
});
});
tape.test(name + ' - main', function (t) {
setup(t);
});
tape.test(name + ' - teardown', function (t) {
// May or may not exist, just blindly ignore
c.delBucket(b, function () {
c.once('close', function () {
helper.cleanupServer(server, function () {
t.pass('closed');
t.end();
});
});
c.close();
});
});
}
///--- Helpers
function assertBucket(t, bucket, cfg) {
t.ok(bucket);
if (!bucket)
return (undefined);
t.equal(bucket.name, b);
t.ok(bucket.mtime instanceof Date);
t.deepEqual(bucket.index, (cfg.index || {}));
t.ok(Array.isArray(bucket.pre));
t.ok(Array.isArray(bucket.post));
t.equal(bucket.pre.length, (cfg.pre || []).length);
t.equal(bucket.post.length, (cfg.post || []).length);
if (bucket.pre.length !== (cfg.pre || []).length ||
bucket.post.length !== (cfg.post || []).length)
return (undefined);
var i;
for (i = 0; i < bucket.pre.length; i++)
t.equal(bucket.pre[i].toString(), cfg.pre[i].toString());
for (i = 0; i < bucket.post.length; i++)
t.equal(bucket.post[i].toString(), cfg.post[i].toString());
return (undefined);
}
///--- tests
test('create bucket stock config', function (t) {
c.createBucket(b, {}, function (err) {
t.ifError(err);
c.getBucket(b, function (err2, bucket) {
t.ifError(err2);
assertBucket(t, bucket, {});
c.listBuckets(function (err3, buckets) {
t.ifError(err3);
t.ok(buckets);
t.ok(buckets.length);
t.end();
});
});
});
});
test('create bucket loaded', function (t) {
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
c.getBucket(b, function (err2, bucket) {
t.ifError(err2);
assertBucket(t, bucket, FULL_CFG);
t.end();
});
});
});
test('update bucket', function (t) {
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
var cfg = clone(FULL_CFG);
cfg.index.foo = {
type: 'string',
unique: false
};
cfg.post.push(function two(req, cb) {
cb();
});
c.updateBucket(b, cfg, function (err2) {
t.ifError(err2);
c.getBucket(b, function (err3, bucket) {
t.ifError(err3);
assertBucket(t, bucket, cfg);
t.end();
});
});
});
});
test('update bucket (versioned ok 0->1)', function (t) {
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
var cfg = clone(FULL_CFG);
cfg.options.version = 1;
cfg.index.foo = {
type: 'string',
unique: false
};
cfg.post.push(function two(req, cb) {
cb();
});
c.updateBucket(b, cfg, function (err2) {
t.ifError(err2);
c.getBucket(b, function (err3, bucket) {
t.ifError(err3);
assertBucket(t, bucket, cfg);
t.end();
});
});
});
});
test('update bucket (versioned ok 1->2)', function (t) {
var cfg = clone(FULL_CFG);
cfg.options.version = 1;
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
cfg = clone(FULL_CFG);
cfg.options.version = 2;
cfg.index.foo = {
type: 'string',
unique: false
};
cfg.post.push(function two(req, cb) {
cb();
});
c.updateBucket(b, cfg, function (err2) {
t.ifError(err2);
c.getBucket(b, function (err3, bucket) {
t.ifError(err3);
assertBucket(t, bucket, cfg);
t.end();
});
});
});
});
test('update bucket (reindex tracked)', function (t) {
var cfg = clone(FULL_CFG);
cfg.options.version = 1;
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
cfg = clone(FULL_CFG);
cfg.options.version = 2;
cfg.index.foo = {
type: 'string',
unique: false
};
c.updateBucket(b, cfg, function (err2) {
t.ifError(err2);
c.getBucket(b, function (err3, bucket) {
t.ifError(err3);
assertBucket(t, bucket, cfg);
t.ok(bucket.reindex_active);
t.ok(bucket.reindex_active['2']);
t.end();
});
});
});
});
test('update bucket (reindex disabled)', function (t) {
var cfg = clone(FULL_CFG);
cfg.options.version = 1;
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
cfg = clone(FULL_CFG);
cfg.options.version = 2;
cfg.index.foo = {
type: 'string',
unique: false
};
var opts = {
no_reindex: true
};
c.updateBucket(b, cfg, opts, function (err2) {
t.ifError(err2);
c.getBucket(b, function (err3, bucket) {
t.ifError(err3);
assertBucket(t, bucket, cfg);
t.notOk(bucket.reindex_active);
t.end();
});
});
});
});
test('update bucket (null version, reindex disabled)', function (t) {
var cfg = clone(FULL_CFG);
cfg.options.version = 0;
c.createBucket(b, FULL_CFG, function (err) {
t.ifError(err);
cfg = clone(FULL_CFG);
cfg.options.version = 0;
cfg.index.foo = {
type: 'string',
unique: false
};
c.updateBucket(b, cfg, function (err2) {
t.ifError(err2);
c.getBucket(b, function (err3, bucket) {
t.ifError(err3);
assertBucket(t, bucket, cfg);
t.notOk(bucket.reindex_active);
t.end();
});
});
});
});
test('update bucket (versioned not ok 1 -> 0)', function (t) {
var cfg = clone(FULL_CFG);
cfg.options.version = 1;
c.createBucket(b, cfg, function (err) {
t.ifError(err);
cfg = clone(FULL_CFG);
cfg.options.version = 0;
cfg.index.foo = {
type: 'string',
unique: false
};
cfg.post.push(function two(req, cb) {
cb();
});
c.updateBucket(b, cfg, function (err2) {
t.ok(err2);
if (err2) {
t.equal(err2.name, 'BucketVersionError');
t.ok(err2.message);
}
t.end();
});
});
});
test('update bucket (versioned not ok 2 -> 1)', function (t) {
var cfg = clone(FULL_CFG);
cfg.options.version = 2;
c.createBucket(b, cfg, function (err) {
t.ifError(err);
cfg = clone(FULL_CFG);
cfg.options.version = 1;
cfg.index.foo = {
type: 'string',
unique: false
};
cfg.post.push(function two(req, cb) {
cb();
});
c.updateBucket(b, cfg, function (err2) {
t.ok(err2);
if (err2) {
t.equal(err2.name, 'BucketVersionError');
t.ok(err2.message);
}
t.end();
});
});
});
test('create bucket bad index type', function (t) {
c.createBucket(b, {index: {foo: 'foo'}}, function (err) {
t.ok(err);
t.equal(err.name, 'InvalidBucketConfigError');
t.ok(err.message);
t.end();
});
});
test('create bucket triggers not function', function (t) {
c.createBucket(b, {pre: ['foo']}, function (err) {
t.ok(err);
t.equal(err.name, 'NotFunctionError');
t.ok(err.message);
t.end();
});
});
test('get bucket 404', function (t) {
c.getBucket(uuid().substr(0, 7), function (err) {
t.ok(err);
t.equal(err.name, 'BucketNotFoundError');
t.ok(err.message);
t.end();
});
});
test('delete missing bucket', function (t) {
c.delBucket(uuid().substr(0, 7), function (err) {
t.ok(err);
t.equal(err.name, 'BucketNotFoundError');
t.ok(err.message);
t.end();
});
});
| pfmooney/moray | test/buckets.test.js | JavaScript | mpl-2.0 | 9,723 |
// Copyright 2014 ISRG. All rights reserved
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
package main
import (
"time"
"github.com/jmhodges/clock"
"github.com/letsencrypt/boulder/bdns"
"github.com/letsencrypt/boulder/cmd"
bgrpc "github.com/letsencrypt/boulder/grpc"
blog "github.com/letsencrypt/boulder/log"
"github.com/letsencrypt/boulder/metrics"
"github.com/letsencrypt/boulder/rpc"
"github.com/letsencrypt/boulder/va"
caaPB "github.com/letsencrypt/boulder/cmd/caa-checker/proto"
)
const clientName = "VA"
func main() {
app := cmd.NewAppShell("boulder-va", "Handles challenge validation")
app.Action = func(c cmd.Config, stats metrics.Statter, logger blog.Logger) {
go cmd.DebugServer(c.VA.DebugAddr)
go cmd.ProfileCmd("VA", stats)
pc := &cmd.PortConfig{
HTTPPort: 80,
HTTPSPort: 443,
TLSPort: 443,
}
if c.VA.PortConfig.HTTPPort != 0 {
pc.HTTPPort = c.VA.PortConfig.HTTPPort
}
if c.VA.PortConfig.HTTPSPort != 0 {
pc.HTTPSPort = c.VA.PortConfig.HTTPSPort
}
if c.VA.PortConfig.TLSPort != 0 {
pc.TLSPort = c.VA.PortConfig.TLSPort
}
var caaClient caaPB.CAACheckerClient
if c.VA.CAAService != nil {
conn, err := bgrpc.ClientSetup(c.VA.CAAService)
cmd.FailOnError(err, "Failed to load credentials and create connection to service")
caaClient = caaPB.NewCAACheckerClient(conn)
}
clk := clock.Default()
sbc := newGoogleSafeBrowsing(c.VA.GoogleSafeBrowsing)
vai := va.NewValidationAuthorityImpl(pc, sbc, caaClient, stats, clk)
dnsTimeout, err := time.ParseDuration(c.Common.DNSTimeout)
cmd.FailOnError(err, "Couldn't parse DNS timeout")
scoped := metrics.NewStatsdScope(stats, "VA", "DNS")
dnsTries := c.VA.DNSTries
if dnsTries < 1 {
dnsTries = 1
}
if !c.Common.DNSAllowLoopbackAddresses {
vai.DNSResolver = bdns.NewDNSResolverImpl(dnsTimeout, []string{c.Common.DNSResolver}, scoped, clk, dnsTries)
} else {
vai.DNSResolver = bdns.NewTestDNSResolverImpl(dnsTimeout, []string{c.Common.DNSResolver}, scoped, clk, dnsTries)
}
vai.UserAgent = c.VA.UserAgent
vai.IssuerDomain = c.VA.IssuerDomain
amqpConf := c.VA.AMQP
rac, err := rpc.NewRegistrationAuthorityClient(clientName, amqpConf, stats)
cmd.FailOnError(err, "Unable to create RA client")
vai.RA = rac
vas, err := rpc.NewAmqpRPCServer(amqpConf, c.VA.MaxConcurrentRPCServerRequests, stats)
cmd.FailOnError(err, "Unable to create VA RPC server")
err = rpc.NewValidationAuthorityServer(vas, vai)
cmd.FailOnError(err, "Unable to setup VA RPC server")
err = vas.Start(amqpConf)
cmd.FailOnError(err, "Unable to run VA RPC server")
}
app.Run()
}
| patf/boulder | cmd/boulder-va/main.go | GO | mpl-2.0 | 2,783 |
<?php
namespace Honeybee\Common\Util;
use Honeybee\Common\Error\RuntimeError;
class PhpClassParser
{
protected static $t_whitespace = [ T_ENCAPSED_AND_WHITESPACE, T_WHITESPACE ];
protected $tokens;
protected $pos;
protected $saved_pos;
public function parse($class_file_path)
{
if (!is_readable($class_file_path)) {
throw new RuntimeError(sprintf("Unable to read given php class file at %s", $class_file_path));
}
$this->tokens = token_get_all(file_get_contents($class_file_path));
$this->pos = 0;
$this->save();
$namespace = $this->parseNamespace();
if (!$namespace) {
$this->restore();
}
$this->save();
$class_name = $this->parseClassName();
if (!$class_name) {
$this->restore();
}
return new ClassFileInfo(
[
'namespace' => $namespace,
'class_name' => $class_name,
'class_file_path' => $class_file_path
]
);
}
protected function current()
{
return isset($this->tokens[$this->pos]) ? $this->tokens[$this->pos] : null;
}
protected function next()
{
$this->pos++;
return $this->current();
}
protected function skip($token_or_tokens)
{
$skip_tokens = (array)$token_or_tokens;
do {
$next_token = $this->next();
$next_token = is_array($next_token) ? $next_token[0] : $next_token;
} while ($next_token !== null && in_array($next_token, $skip_tokens));
}
protected function seek($token_or_tokens)
{
$expected_tokens = (array)$token_or_tokens;
do {
$next_token = $this->next();
$next_token = is_array($next_token) ? $next_token[0] : $next_token;
} while ($next_token !== null && !in_array($next_token, $expected_tokens));
}
protected function parseNamespace()
{
$this->seek(T_NAMESPACE);
$this->skip(self::$t_whitespace);
$token = $this->current();
$namespace = '';
while ($token !== null && $token !== ';') {
$namespace .= is_array($token) ? $token[1] : $token;
$token = $this->next();
}
// skip the ';'
$this->next();
return trim($namespace);
}
protected function parseClassName()
{
$this->seek(T_CLASS);
$this->skip(self::$t_whitespace);
$token = $this->current();
$class_name = '';
while (is_array($token) && $token[0] === T_STRING) {
$class_name .= $token[1];
$token = $this->next();
}
return trim($class_name);
}
protected function save()
{
$this->saved_pos = $this->pos;
}
protected function restore()
{
$this->pos = $this->saved_pos;
}
}
| honeybee/honeybee | src/Common/Util/PhpClassParser.php | PHP | mpl-2.0 | 2,921 |
#include <sstream>
#include <opencv2/opencv.hpp>
#include <opencv/highgui.h>
#include <unistd.h>
#include <Camera/CameraModel.h>
#include <Camera/OpenniProvider.h>
int main(int argc, char** argv){
if(argc!=2){
std::cout << "Usage: " << argv[0] << " output_folder\n";
return -1;
}
std::cout << "Started saving frames into " << argv[1] << "..\n";
cv::namedWindow("DEPTH");
Camera::OpenNIProvider capture;
for(size_t c=0; ; ++c){
cv::Mat depthMap, rgb;
usleep(500000);
auto im=capture.getFrame();
depthMap=im.depth;
rgb=im.rgb;
cv::imshow("DEPTH", depthMap);
cv::imshow("RGB", rgb);
cv::waitKey(1);
std::ostringstream rgbName, depthName;
rgbName << argv[1] << "/rgb" << c << ".png";
depthName << argv[1] << "/depth" << c << ".png";
cv::Mat ddd;
depthMap.convertTo(ddd, CV_16UC1, 1000);
imwrite(rgbName.str(), rgb);
imwrite(depthName.str(), ddd);
std::cout << "Saved images: " << c << "\n";
}
return 0;
}
| conte91/JG | Misc/OpenNI_save_images/main.cpp | C++ | mpl-2.0 | 992 |
package org.openmrs.maven.plugins.model;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
/**
*
*/
public abstract class BaseSdkProperties {
protected static final String ARTIFACT_ID = "artifactId";
protected static final String TYPE = "type";
protected static final String GROUP_ID = "groupId";
protected static final String TYPE_OMOD = "omod";
protected static final String TYPE_WAR = "war";
protected static final String TYPE_JAR = "jar";
protected static final String NAME = "name";
protected static final String VERSION = "version";
protected static final String TYPE_DISTRO = "distro";
protected Properties properties;
public Properties getModuleAndWarProperties(List<Artifact> warArtifacts, List<Artifact> moduleArtifacts) {
Properties properties = new Properties();
for (Artifact artifact : warArtifacts) {
artifact = getArtifactWithStrippedArtifactId(artifact);
if (!artifact.getType().equals(TYPE_WAR)) {
properties.setProperty(TYPE_WAR + "." + artifact.getArtifactId() + "." + TYPE, artifact.getType());
}
if (!artifact.getGroupId().equals(Artifact.GROUP_WEB)) {
properties.setProperty(TYPE_WAR + "." + artifact.getArtifactId() + "." + GROUP_ID, artifact.getGroupId());
}
properties.setProperty(TYPE_WAR + "." + artifact.getArtifactId(), artifact.getVersion());
}
for (Artifact artifact : moduleArtifacts) {
artifact = getArtifactWithStrippedArtifactId(artifact);
if (!artifact.getType().equals(TYPE_JAR)) {
properties.setProperty(TYPE_OMOD + "." + artifact.getArtifactId() + "." + TYPE, artifact.getType());
}
if (!artifact.getGroupId().equals(Artifact.GROUP_MODULE)) {
properties.setProperty(TYPE_OMOD + "." + artifact.getArtifactId() + "." + GROUP_ID, artifact.getGroupId());
}
properties.setProperty(TYPE_OMOD + "." + artifact.getArtifactId(), artifact.getVersion());
}
return properties;
}
public String getPlatformVersion(){
return getParam("war.openmrs");
}
public void setPlatformVersion(String version){
properties.setProperty("war.openmrs", version);
}
public String getVersion(){
return getParam("version");
}
public void setVersion(String version){
properties.setProperty("version", version);
}
public String getName(){
return getParam("name");
}
public void setName(String name){
properties.setProperty("name", name);
}
public List<Artifact> getModuleArtifacts(){
List<Artifact> artifactList = new ArrayList<>();
for (Object keyObject: getAllKeys()) {
String key = keyObject.toString();
String artifactType = getArtifactType(key);
if(artifactType.equals(TYPE_OMOD)) {
artifactList.add(new Artifact(checkIfOverwritten(key, ARTIFACT_ID), getParam(key), checkIfOverwritten(key, GROUP_ID), checkIfOverwritten(key, TYPE), "omod"));
}
}
return artifactList;
}
public List<Artifact> getWarArtifacts(){
List<Artifact> artifactList = new ArrayList<>();
for (Object keyObject: getAllKeys()) {
String key = keyObject.toString();
String artifactType = getArtifactType(key);
if(artifactType.equals(TYPE_WAR)) {
artifactList.add(new Artifact(checkIfOverwritten(key, ARTIFACT_ID), getParam(key), checkIfOverwritten(key, GROUP_ID), checkIfOverwritten(key, TYPE)));
}
}
return artifactList;
}
protected Set<Object> getAllKeys(){
return properties.keySet();
}
protected String getArtifactType(String key){
String[] wordsArray = key.split("\\.");
if(!(wordsArray[wordsArray.length-1].equals(TYPE) || wordsArray[wordsArray.length-1].equals(ARTIFACT_ID) || wordsArray[wordsArray.length-1].equals(GROUP_ID))){
if(key.contains(".")){
return key.substring(0, key.indexOf("."));
}else {
return "";
}
}else {
return "";
}
}
protected String checkIfOverwritten(String key, String param) {
String newKey = key + "." + param;
if (getParam(newKey) != null) {
String setting = getParam(newKey);
if (setting.equals("referenceapplication")) {
setting = setting.concat("-");
setting = setting.concat("package");
}
return setting;
} else {
if (param.equals(ARTIFACT_ID)) {
return extractArtifactId(key);
} else if (param.equals(GROUP_ID)) {
if (getArtifactType(key).equals(TYPE_WAR)) { //for openmrs.war use org.openmrs.web groupId
return Artifact.GROUP_WEB;
} else if (getArtifactType(key).equals(TYPE_OMOD)) {
return Artifact.GROUP_MODULE;
} else if (getArtifactType(key).equals(TYPE_DISTRO)) {
return Artifact.GROUP_DISTRO;
}else {
return "";
}
} else if (param.equals(TYPE)) {
if(getArtifactType(key).equals(TYPE_OMOD)){
return TYPE_JAR;
}else if(getArtifactType(key).equals(TYPE_WAR)){
return TYPE_WAR;
} else if(getArtifactType(key).equals(TYPE_DISTRO)) {
return TYPE_JAR;
} else {
return "";
}
} else {
return "";
}
}
}
private String extractArtifactId(String key){
String type = getArtifactType(key);
StringBuilder stringBuilder = new StringBuilder(key.substring(key.indexOf(".")+1, key.length()));
if(type.equals(TYPE_OMOD)) {
stringBuilder.append("-");
stringBuilder.append(type);
} else if(type.equals(TYPE_WAR)){
stringBuilder.append("-");
stringBuilder.append("webapp");
} // referenceapplication exclusive parser
else if (key.equals("distro.referenceapplication")) {
stringBuilder.append("-");
stringBuilder.append("package");
}
return stringBuilder.toString();
}
/**
* get param from properties
* @param key
* @return
*/
public String getParam(String key) {return properties.getProperty(key); }
public Artifact getModuleArtifact(String artifactId){
String key = TYPE_OMOD + "." + artifactId;
if(StringUtils.isNotBlank(getParam(key))){
return new Artifact(checkIfOverwritten(key, ARTIFACT_ID), getParam(key), checkIfOverwritten(key, GROUP_ID), checkIfOverwritten(key, TYPE));
}
return null;
}
public void setModuleProperties(Artifact newModule) {
newModule = getArtifactWithStrippedArtifactId(newModule);
if(!newModule.getGroupId().equals(Artifact.GROUP_MODULE)){
setCustomModuleGroupId(newModule);
}
if(!newModule.getType().equals(TYPE_JAR)){
setCustomModuleType(newModule);
}
setModule(newModule);
}
public void removeModuleProperties(Artifact artifact) {
artifact = getArtifactWithStrippedArtifactId(artifact);
if (getModuleArtifact(artifact.getArtifactId()) != null) {
Properties newProperties = new Properties();
newProperties.putAll(properties);
for(Object keyObject: properties.keySet()){
String key = keyObject.toString();
if(key.equals(TYPE_OMOD+"."+artifact.getArtifactId())){
newProperties.remove(key);
} else if(key.equals(TYPE_OMOD+"."+artifact.getArtifactId()+"."+TYPE)){
newProperties.remove(key);
} else if(key.equals(TYPE_OMOD+"."+artifact.getArtifactId()+"."+GROUP_ID)){
newProperties.remove(key);
}
}
properties = newProperties;
}
}
private Artifact getArtifactWithStrippedArtifactId(Artifact artifact) {
String artifactId = artifact.getArtifactId();
if (artifactId.endsWith("-omod")) {
artifact.setArtifactId(artifactId.substring(0, artifactId.indexOf("-")));
return artifact;
} else if (artifactId.endsWith("-webapp")) {
artifact.setArtifactId(artifactId.substring(0, artifactId.indexOf("-")));
return artifact;
}
return artifact;
}
private void setModule(Artifact artifact) {
properties.setProperty(TYPE_OMOD+"."+artifact.getArtifactId(), artifact.getVersion());
}
private void setCustomModuleType(Artifact artifact){
properties.setProperty(TYPE_OMOD+"."+artifact.getArtifactId()+"."+TYPE, artifact.getType());
}
private void setCustomModuleGroupId(Artifact artifact){
properties.setProperty(TYPE_OMOD+"."+artifact.getArtifactId()+"."+GROUP_ID, artifact.getGroupId());
}
public void synchronize(BaseSdkProperties other){
for(Object key: getAllKeys()){
if (isBaseSdkProperty(key.toString())) {
other.properties.put(key, properties.get(key));
}
}
for(Object key: new ArrayList<>(other.getAllKeys())){
if(isBaseSdkProperty(key.toString())){
if(StringUtils.isBlank(getParam(key.toString()))){
other.properties.remove(key);
}
}
}
}
private boolean isBaseSdkProperty(String key) {
return (key.startsWith(TYPE_OMOD) || key.startsWith(TYPE_WAR) || key.equals(NAME) || key.equals(VERSION));
}
public void setArtifacts(List<Artifact> warArtifacts, List<Artifact> moduleArtifacts){
for (Artifact moduleArtifact : moduleArtifacts) {
this.setModuleProperties(moduleArtifact);
}
for (Artifact warArtifact : warArtifacts) {
this.setPlatformVersion(warArtifact.getVersion());
}
}
}
| AdamGrzybkowski/openmrs-sdk | maven-plugin/src/main/java/org/openmrs/maven/plugins/model/BaseSdkProperties.java | Java | mpl-2.0 | 10,429 |
/* -*- Mode: javascript; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* Date: 14 April 2001
*
* SUMMARY: Testing obj.prop getter/setter
* Note: this is a non-ECMA extension to the language.
*/
//-----------------------------------------------------------------------------
var gTestfile = 'getset-003.js';
var UBound = 0;
var BUGNUMBER = '(none)';
var summary = 'Testing obj.prop getter/setter';
var statprefix = 'Status: ';
var status = '';
var statusitems = [ ];
var actual = '';
var actualvalues = [ ];
var expect= '';
var expectedvalues = [ ];
var cnDEFAULT = 'default name';
var cnFRED = 'Fred';
var obj = {};
var obj2 = {};
var s = '';
// SECTION1: define getter/setter directly on an object (not its prototype)
obj = new Object();
obj.nameSETS = 0;
obj.nameGETS = 0;
obj.name setter = function(newValue) {this._name=newValue; this.nameSETS++;}
obj.name getter = function() {this.nameGETS++; return this._name;}
status = 'In SECTION1 of test after 0 sets, 0 gets';
actual = [obj.nameSETS,obj.nameGETS];
expect = [0,0];
addThis();
s = obj.name;
status = 'In SECTION1 of test after 0 sets, 1 get';
actual = [obj.nameSETS,obj.nameGETS];
expect = [0,1];
addThis();
obj.name = cnFRED;
status = 'In SECTION1 of test after 1 set, 1 get';
actual = [obj.nameSETS,obj.nameGETS];
expect = [1,1];
addThis();
obj.name = obj.name;
status = 'In SECTION1 of test after 2 sets, 2 gets';
actual = [obj.nameSETS,obj.nameGETS];
expect = [2,2];
addThis();
// SECTION2: define getter/setter in Object.prototype
Object.prototype.nameSETS = 0;
Object.prototype.nameGETS = 0;
Object.prototype.name setter = function(newValue) {this._name=newValue; this.nameSETS++;}
Object.prototype.name getter = function() {this.nameGETS++; return this._name;}
obj = new Object();
status = 'In SECTION2 of test after 0 sets, 0 gets';
actual = [obj.nameSETS,obj.nameGETS];
expect = [0,0];
addThis();
s = obj.name;
status = 'In SECTION2 of test after 0 sets, 1 get';
actual = [obj.nameSETS,obj.nameGETS];
expect = [0,1];
addThis();
obj.name = cnFRED;
status = 'In SECTION2 of test after 1 set, 1 get';
actual = [obj.nameSETS,obj.nameGETS];
expect = [1,1];
addThis();
obj.name = obj.name;
status = 'In SECTION2 of test after 2 sets, 2 gets';
actual = [obj.nameSETS,obj.nameGETS];
expect = [2,2];
addThis();
// SECTION 3: define getter/setter in prototype of user-defined constructor
function TestObject()
{
}
TestObject.prototype.nameSETS = 0;
TestObject.prototype.nameGETS = 0;
TestObject.prototype.name setter = function(newValue) {this._name=newValue; this.nameSETS++;}
TestObject.prototype.name getter = function() {this.nameGETS++; return this._name;}
TestObject.prototype.name = cnDEFAULT;
obj = new TestObject();
status = 'In SECTION3 of test after 1 set, 0 gets'; // (we set a default value in the prototype)
actual = [obj.nameSETS,obj.nameGETS];
expect = [1,0];
addThis();
s = obj.name;
status = 'In SECTION3 of test after 1 set, 1 get';
actual = [obj.nameSETS,obj.nameGETS];
expect = [1,1];
addThis();
obj.name = cnFRED;
status = 'In SECTION3 of test after 2 sets, 1 get';
actual = [obj.nameSETS,obj.nameGETS];
expect = [2,1];
addThis();
obj.name = obj.name;
status = 'In SECTION3 of test after 3 sets, 2 gets';
actual = [obj.nameSETS,obj.nameGETS];
expect = [3,2];
addThis();
obj2 = new TestObject();
status = 'obj2 = new TestObject() after 1 set, 0 gets';
actual = [obj2.nameSETS,obj2.nameGETS];
expect = [1,0]; // we set a default value in the prototype -
addThis();
// Use both obj and obj2 -
obj2.name = obj.name + obj2.name;
status = 'obj2 = new TestObject() after 2 sets, 1 get';
actual = [obj2.nameSETS,obj2.nameGETS];
expect = [2,1];
addThis();
status = 'In SECTION3 of test after 3 sets, 3 gets';
actual = [obj.nameSETS,obj.nameGETS];
expect = [3,3]; // we left off at [3,2] above -
addThis();
//---------------------------------------------------------------------------------
test();
//---------------------------------------------------------------------------------
function addThis()
{
statusitems[UBound] = status;
actualvalues[UBound] = actual.toString();
expectedvalues[UBound] = expect.toString();
UBound++;
}
function test()
{
enterFunc ('test');
printBugNumber(BUGNUMBER);
printStatus (summary);
for (var i = 0; i < UBound; i++)
{
reportCompare(expectedvalues[i], actualvalues[i], getStatus(i));
}
exitFunc ('test');
}
function getStatus(i)
{
return statprefix + statusitems[i];
}
| mozilla/rhino | testsrc/tests/js1_5/extensions/getset-003.js | JavaScript | mpl-2.0 | 4,686 |
package command
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
"path"
"path/filepath"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
svchost "github.com/hashicorp/terraform-svchost"
"github.com/hashicorp/terraform-svchost/disco"
"github.com/hashicorp/terraform/helper/copy"
"github.com/hashicorp/terraform/internal/getproviders"
"github.com/mitchellh/cli"
)
// This map from provider type name to namespace is used by the fake registry
// when called via LookupLegacyProvider. Providers not in this map will return
// a 404 Not Found error.
var legacyProviderNamespaces = map[string]string{
"foo": "hashicorp",
"bar": "hashicorp",
"baz": "terraform-providers",
}
func verifyExpectedFiles(t *testing.T, expectedPath string) {
// Compare output and expected file trees
var outputFiles, expectedFiles []string
// Gather list of output files in the current working directory
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
if !info.IsDir() {
outputFiles = append(outputFiles, path)
}
return nil
})
if err != nil {
t.Fatal("error listing output files:", err)
}
// Gather list of expected files
revertChdir := testChdir(t, expectedPath)
err = filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
if !info.IsDir() {
expectedFiles = append(expectedFiles, path)
}
return nil
})
if err != nil {
t.Fatal("error listing expected files:", err)
}
revertChdir()
// If the file trees don't match, give up early
if diff := cmp.Diff(expectedFiles, outputFiles); diff != "" {
t.Fatalf("expected and output file trees do not match\n%s", diff)
}
// Check that the contents of each file is correct
for _, filePath := range outputFiles {
output, err := ioutil.ReadFile(path.Join(".", filePath))
if err != nil {
t.Fatalf("failed to read output %s: %s", filePath, err)
}
expected, err := ioutil.ReadFile(path.Join(expectedPath, filePath))
if err != nil {
t.Fatalf("failed to read expected %s: %s", filePath, err)
}
if diff := cmp.Diff(expected, output); diff != "" {
t.Fatalf("expected and output file for %s do not match\n%s", filePath, diff)
}
}
}
func TestZeroThirteenUpgrade_success(t *testing.T) {
registrySource, close := testRegistrySource(t)
defer close()
testCases := map[string]string{
"implicit": "013upgrade-implicit-providers",
"explicit": "013upgrade-explicit-providers",
"provider not found": "013upgrade-provider-not-found",
"implicit not found": "013upgrade-implicit-not-found",
"file exists": "013upgrade-file-exists",
"no providers": "013upgrade-no-providers",
"submodule": "013upgrade-submodule",
"providers with source": "013upgrade-providers-with-source",
"preserves comments": "013upgrade-preserves-comments",
"multiple blocks": "013upgrade-multiple-blocks",
"multiple files": "013upgrade-multiple-files",
"existing versions.tf": "013upgrade-existing-versions-tf",
"skipped files": "013upgrade-skipped-files",
}
for name, testPath := range testCases {
t.Run(name, func(t *testing.T) {
inputPath, err := filepath.Abs(testFixturePath(path.Join(testPath, "input")))
if err != nil {
t.Fatalf("failed to find input path %s: %s", testPath, err)
}
expectedPath, err := filepath.Abs(testFixturePath(path.Join(testPath, "expected")))
if err != nil {
t.Fatalf("failed to find expected path %s: %s", testPath, err)
}
td := tempDir(t)
copy.CopyDir(inputPath, td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
ProviderSource: registrySource,
Ui: ui,
},
}
if code := c.Run([]string{"-yes"}); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
output := ui.OutputWriter.String()
if !strings.Contains(output, "Upgrade complete") {
t.Fatal("unexpected output:", output)
}
verifyExpectedFiles(t, expectedPath)
})
}
}
// Ensure that non-default upgrade paths are supported, and that the output is
// in the correct place. This test is very similar to the table tests above,
// but with a different expected output path, and with an argument passed to
// the Run call.
func TestZeroThirteenUpgrade_submodule(t *testing.T) {
registrySource, close := testRegistrySource(t)
defer close()
testPath := "013upgrade-submodule"
inputPath, err := filepath.Abs(testFixturePath(path.Join(testPath, "input")))
if err != nil {
t.Fatalf("failed to find input path %s: %s", testPath, err)
}
// The expected output for processing a submodule is different
expectedPath, err := filepath.Abs(testFixturePath(path.Join(testPath, "expected-module")))
if err != nil {
t.Fatalf("failed to find expected path %s: %s", testPath, err)
}
td := tempDir(t)
copy.CopyDir(inputPath, td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
ProviderSource: registrySource,
Ui: ui,
},
}
// Here we pass a target module directory to process
if code := c.Run([]string{"-yes", "module"}); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
output := ui.OutputWriter.String()
if !strings.Contains(output, "Upgrade complete") {
t.Fatal("unexpected output:", output)
}
verifyExpectedFiles(t, expectedPath)
}
// Verify that JSON and override files are skipped with a warning. Generated
// output for this config is verified in the table driven tests above.
func TestZeroThirteenUpgrade_skippedFiles(t *testing.T) {
inputPath := testFixturePath(path.Join("013upgrade-skipped-files", "input"))
td := tempDir(t)
copy.CopyDir(inputPath, td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run([]string{"-yes"}); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
output := ui.OutputWriter.String()
if !strings.Contains(output, "Upgrade complete") {
t.Fatal("unexpected output:", output)
}
errMsg := ui.ErrorWriter.String()
if !strings.Contains(errMsg, `The JSON configuration file "variables.tf.json" was skipped`) {
t.Fatal("missing JSON skipped file warning:", errMsg)
}
if !strings.Contains(errMsg, `The override configuration file "bar_override.tf" was skipped`) {
t.Fatal("missing override skipped file warning:", errMsg)
}
}
func TestZeroThirteenUpgrade_confirm(t *testing.T) {
inputPath := testFixturePath(path.Join("013upgrade-explicit-providers", "input"))
td := tempDir(t)
copy.CopyDir(inputPath, td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
inputBuf := &bytes.Buffer{}
ui.InputReader = inputBuf
inputBuf.WriteString("yes")
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run(nil); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
output := ui.OutputWriter.String()
if !strings.Contains(output, "Upgrade complete") {
t.Fatal("unexpected output:", output)
}
}
func TestZeroThirteenUpgrade_cancel(t *testing.T) {
inputPath := testFixturePath(path.Join("013upgrade-explicit-providers", "input"))
td := tempDir(t)
copy.CopyDir(inputPath, td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
inputBuf := &bytes.Buffer{}
ui.InputReader = inputBuf
inputBuf.WriteString("no")
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run(nil); code != 0 {
t.Fatalf("bad: \n%s", ui.ErrorWriter.String())
}
output := ui.OutputWriter.String()
if !strings.Contains(output, "Upgrade cancelled") {
t.Fatal("unexpected output:", output)
}
if strings.Contains(output, "Upgrade complete") {
t.Fatal("unexpected output:", output)
}
}
func TestZeroThirteenUpgrade_unsupportedVersion(t *testing.T) {
inputPath := testFixturePath("013upgrade-unsupported-version")
td := tempDir(t)
copy.CopyDir(inputPath, td)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run([]string{"-yes"}); code == 0 {
t.Fatal("expected error, got:", ui.OutputWriter)
}
errMsg := ui.ErrorWriter.String()
if !strings.Contains(errMsg, `Unsupported Terraform Core version`) {
t.Fatal("missing version constraint error:", errMsg)
}
}
func TestZeroThirteenUpgrade_invalidFlags(t *testing.T) {
td := tempDir(t)
os.MkdirAll(td, 0755)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run([]string{"--whoops"}); code == 0 {
t.Fatal("expected error, got:", ui.OutputWriter)
}
errMsg := ui.ErrorWriter.String()
if !strings.Contains(errMsg, "Usage: terraform 0.13upgrade") {
t.Fatal("unexpected error:", errMsg)
}
}
func TestZeroThirteenUpgrade_tooManyArguments(t *testing.T) {
td := tempDir(t)
os.MkdirAll(td, 0755)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run([]string{".", "./modules/test"}); code == 0 {
t.Fatal("expected error, got:", ui.OutputWriter)
}
errMsg := ui.ErrorWriter.String()
if !strings.Contains(errMsg, "Error: Too many arguments") {
t.Fatal("unexpected error:", errMsg)
}
}
func TestZeroThirteenUpgrade_empty(t *testing.T) {
td := tempDir(t)
os.MkdirAll(td, 0755)
defer os.RemoveAll(td)
defer testChdir(t, td)()
ui := new(cli.MockUi)
c := &ZeroThirteenUpgradeCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(testProvider()),
Ui: ui,
},
}
if code := c.Run([]string{"-yes"}); code == 0 {
t.Fatal("expected error, got:", ui.OutputWriter)
}
errMsg := ui.ErrorWriter.String()
if !strings.Contains(errMsg, "Not a module directory") {
t.Fatal("unexpected error:", errMsg)
}
}
// testServices starts up a local HTTP server running a fake provider registry
// service which responds only to discovery requests and legacy provider lookup
// API calls.
//
// The final return value is a function to call at the end of a test function
// to shut down the test server. After you call that function, the discovery
// object becomes useless.
func testServices(t *testing.T) (services *disco.Disco, cleanup func()) {
server := httptest.NewServer(http.HandlerFunc(fakeRegistryHandler))
services = disco.New()
services.ForceHostServices(svchost.Hostname("registry.terraform.io"), map[string]interface{}{
"providers.v1": server.URL + "/providers/v1/",
})
return services, func() {
server.Close()
}
}
// testRegistrySource is a wrapper around testServices that uses the created
// discovery object to produce a Source instance that is ready to use with the
// fake registry services.
//
// As with testServices, the final return value is a function to call at the end
// of your test in order to shut down the test server.
func testRegistrySource(t *testing.T) (source *getproviders.RegistrySource, cleanup func()) {
services, close := testServices(t)
source = getproviders.NewRegistrySource(services)
return source, close
}
func fakeRegistryHandler(resp http.ResponseWriter, req *http.Request) {
path := req.URL.EscapedPath()
if !strings.HasPrefix(path, "/providers/v1/") {
resp.WriteHeader(404)
resp.Write([]byte(`not a provider registry endpoint`))
return
}
pathParts := strings.Split(path, "/")[3:]
if len(pathParts) != 3 {
resp.WriteHeader(404)
resp.Write([]byte(`unrecognized path scheme`))
return
}
if pathParts[0] != "-" || pathParts[2] != "versions" {
resp.WriteHeader(404)
resp.Write([]byte(`this registry only supports legacy namespace lookup requests`))
}
name := pathParts[1]
if namespace, ok := legacyProviderNamespaces[name]; ok {
resp.Header().Set("Content-Type", "application/json")
resp.WriteHeader(200)
resp.Write([]byte(fmt.Sprintf(`{"id":"%s/%s"}`, namespace, name)))
} else {
resp.WriteHeader(404)
resp.Write([]byte(`provider not found`))
}
}
| VladRassokhin/terraform | command/013_config_upgrade_test.go | GO | mpl-2.0 | 12,779 |
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WebBrowserPersistDocumentChild.h"
#include "mozilla/ipc/InputStreamUtils.h"
#include "nsIDocument.h"
#include "nsIInputStream.h"
#include "WebBrowserPersistLocalDocument.h"
#include "WebBrowserPersistResourcesChild.h"
#include "WebBrowserPersistSerializeChild.h"
namespace mozilla {
WebBrowserPersistDocumentChild::WebBrowserPersistDocumentChild()
{
}
WebBrowserPersistDocumentChild::~WebBrowserPersistDocumentChild() = default;
void
WebBrowserPersistDocumentChild::Start(nsIDocument* aDocument)
{
RefPtr<WebBrowserPersistLocalDocument> doc;
if (aDocument) {
doc = new WebBrowserPersistLocalDocument(aDocument);
}
Start(doc);
}
void
WebBrowserPersistDocumentChild::Start(nsIWebBrowserPersistDocument* aDocument)
{
MOZ_ASSERT(!mDocument);
if (!aDocument) {
SendInitFailure(NS_ERROR_FAILURE);
return;
}
WebBrowserPersistDocumentAttrs attrs;
nsCOMPtr<nsIInputStream> postDataStream;
OptionalInputStreamParams postData;
nsTArray<FileDescriptor> postFiles;
#define ENSURE(e) do { \
nsresult rv = (e); \
if (NS_FAILED(rv)) { \
SendInitFailure(rv); \
return; \
} \
} while(0)
ENSURE(aDocument->GetIsPrivate(&(attrs.isPrivate())));
ENSURE(aDocument->GetDocumentURI(attrs.documentURI()));
ENSURE(aDocument->GetBaseURI(attrs.baseURI()));
ENSURE(aDocument->GetContentType(attrs.contentType()));
ENSURE(aDocument->GetCharacterSet(attrs.characterSet()));
ENSURE(aDocument->GetTitle(attrs.title()));
ENSURE(aDocument->GetReferrer(attrs.referrer()));
ENSURE(aDocument->GetContentDisposition(attrs.contentDisposition()));
ENSURE(aDocument->GetCacheKey(&(attrs.cacheKey())));
ENSURE(aDocument->GetPersistFlags(&(attrs.persistFlags())));
ENSURE(aDocument->GetPostData(getter_AddRefs(postDataStream)));
ipc::SerializeInputStream(postDataStream,
postData,
postFiles);
#undef ENSURE
mDocument = aDocument;
SendAttributes(attrs, postData, postFiles);
}
mozilla::ipc::IPCResult
WebBrowserPersistDocumentChild::RecvSetPersistFlags(const uint32_t& aNewFlags)
{
mDocument->SetPersistFlags(aNewFlags);
return IPC_OK();
}
PWebBrowserPersistResourcesChild*
WebBrowserPersistDocumentChild::AllocPWebBrowserPersistResourcesChild()
{
auto* actor = new WebBrowserPersistResourcesChild();
NS_ADDREF(actor);
return actor;
}
mozilla::ipc::IPCResult
WebBrowserPersistDocumentChild::RecvPWebBrowserPersistResourcesConstructor(PWebBrowserPersistResourcesChild* aActor)
{
RefPtr<WebBrowserPersistResourcesChild> visitor =
static_cast<WebBrowserPersistResourcesChild*>(aActor);
nsresult rv = mDocument->ReadResources(visitor);
if (NS_FAILED(rv)) {
// This is a sync failure on the child side but an async
// failure on the parent side -- it already got NS_OK from
// ReadResources, so the error has to be reported via the
// visitor instead.
visitor->EndVisit(mDocument, rv);
}
return IPC_OK();
}
bool
WebBrowserPersistDocumentChild::DeallocPWebBrowserPersistResourcesChild(PWebBrowserPersistResourcesChild* aActor)
{
auto* castActor =
static_cast<WebBrowserPersistResourcesChild*>(aActor);
NS_RELEASE(castActor);
return true;
}
PWebBrowserPersistSerializeChild*
WebBrowserPersistDocumentChild::AllocPWebBrowserPersistSerializeChild(
const WebBrowserPersistURIMap& aMap,
const nsCString& aRequestedContentType,
const uint32_t& aEncoderFlags,
const uint32_t& aWrapColumn)
{
auto* actor = new WebBrowserPersistSerializeChild(aMap);
NS_ADDREF(actor);
return actor;
}
mozilla::ipc::IPCResult
WebBrowserPersistDocumentChild::RecvPWebBrowserPersistSerializeConstructor(
PWebBrowserPersistSerializeChild* aActor,
const WebBrowserPersistURIMap& aMap,
const nsCString& aRequestedContentType,
const uint32_t& aEncoderFlags,
const uint32_t& aWrapColumn)
{
auto* castActor =
static_cast<WebBrowserPersistSerializeChild*>(aActor);
// This actor performs the roles of: completion, URI map, and output stream.
nsresult rv = mDocument->WriteContent(castActor,
castActor,
aRequestedContentType,
aEncoderFlags,
aWrapColumn,
castActor);
if (NS_FAILED(rv)) {
castActor->OnFinish(mDocument, castActor, aRequestedContentType, rv);
}
return IPC_OK();
}
bool
WebBrowserPersistDocumentChild::DeallocPWebBrowserPersistSerializeChild(PWebBrowserPersistSerializeChild* aActor)
{
auto* castActor =
static_cast<WebBrowserPersistSerializeChild*>(aActor);
NS_RELEASE(castActor);
return true;
}
} // namespace mozilla
| Yukarumya/Yukarum-Redfoxes | dom/webbrowserpersist/WebBrowserPersistDocumentChild.cpp | C++ | mpl-2.0 | 5,345 |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//
// Eric Vaughan
// Netscape Communications
//
// See documentation in associated header file
//
#include "nsGridRow.h"
#include "nsBoxLayoutState.h"
#include "nsIFrame.h"
nsGridRow::nsGridRow():mIsBogus(false),
mBox(nullptr),
mFlex(-1),
mPref(-1),
mMin(-1),
mMax(-1),
mTop(-1),
mBottom(-1),
mTopMargin(0),
mBottomMargin(0)
{
MOZ_COUNT_CTOR(nsGridRow);
}
void
nsGridRow::Init(nsIFrame* aBox, bool aIsBogus)
{
mBox = aBox;
mIsBogus = aIsBogus;
mFlex = -1;
mPref = -1;
mMin = -1;
mMax = -1;
mTop = -1;
mBottom = -1;
mTopMargin = 0;
mBottomMargin = 0;
}
nsGridRow::~nsGridRow()
{
MOZ_COUNT_DTOR(nsGridRow);
}
bool
nsGridRow::IsXULCollapsed()
{
return mBox && mBox->IsXULCollapsed();
}
| Yukarumya/Yukarum-Redfoxes | layout/xul/grid/nsGridRow.cpp | C++ | mpl-2.0 | 1,216 |
import datetime
from dataclasses import dataclass
from typing import List, Dict, Optional
from everyclass.server.entity import domain
from everyclass.server.utils import JSONSerializable
from everyclass.server.utils.encryption import encrypt, RTYPE_STUDENT, RTYPE_TEACHER
@dataclass
class Event(JSONSerializable):
name: str
room: str
def __json_encode__(self):
return {'name': self.name, 'room': self.room}
@dataclass
class People(JSONSerializable):
name: str
id_encoded: str
def __json_encode__(self):
return {'name': self.name, 'id': self.id_encoded}
@dataclass
class MultiPeopleSchedule(JSONSerializable):
schedules: List[Dict[str, Optional[Event]]]
accessible_people: List[People]
inaccessible_people: List[People]
def __json_encode__(self):
return {'schedules': self.schedules,
'inaccessible_people': self.inaccessible_people,
'accessible_people': self.accessible_people}
def __init__(self, people: List[str], date: datetime.date, current_user: str):
"""多人日程展示。输入学号或教工号列表及日期,输出多人在当天的日程"""
from everyclass.server import logger
from everyclass.server.entity import service
from everyclass.server.user import service as user_service
from everyclass.server.entity import service as entity_service
accessible_people_ids = []
accessible_people = []
inaccessible_people = []
for identifier in people:
if user_service.has_access(identifier, current_user)[0]:
accessible_people_ids.append(identifier)
else:
inaccessible_people.append(People(entity_service.get_student(identifier).name, encrypt(RTYPE_STUDENT, identifier)))
self.schedules = list()
for identifier in accessible_people_ids:
is_student, people_info = service.get_people_info(identifier)
accessible_people.append(
People(people_info.name, encrypt(RTYPE_STUDENT, identifier) if is_student else encrypt(RTYPE_TEACHER, identifier)))
semester, week, day = domain.get_semester_date(date)
if is_student:
cards = service.get_student_timetable(identifier, semester).cards
else:
cards = service.get_teacher_timetable(identifier, semester).cards
cards = filter(lambda c: week in c.weeks and c.lesson[0] == str(day), cards) # 用日期所属的周次和星期过滤card
event_dict = {}
for card in cards:
time = card.lesson[1:5] # "10102" -> "0102"
if time not in event_dict:
event_dict[time] = Event(name=card.name, room=card.room)
else:
# 课程重叠
logger.warning("time of card overlapped", extra={'people_identifier': identifier,
'date': date})
# 给没课的位置补充None
for i in range(1, 10, 2):
key = f"{i:02}{i + 1:02}"
if key not in event_dict:
event_dict[key] = None
self.schedules.append(event_dict)
self.inaccessible_people = inaccessible_people
self.accessible_people = accessible_people
@dataclass
class SearchResultItem(JSONSerializable):
name: str
description: str
people_type: str
id_encoded: str
has_access: bool
forbid_reason: Optional[bool]
def __json_encode__(self):
return {'name': self.name, 'description': self.description,
'people_type': self.people_type, 'id_encoded': self.id_encoded,
'has_access': self.has_access, 'forbid_reason': self.forbid_reason}
| fr0der1c/EveryClass-server | everyclass/server/entity/model/multi_people_schedule.py | Python | mpl-2.0 | 3,857 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
from io import BytesIO
from gzip import GzipFile
import pytest
from botocore.exceptions import ClientError
from requests.exceptions import ContentDecodingError
from requests.packages.urllib3.response import HTTPResponse
from tecken.storage import StorageBucket
from tecken.base.symboldownloader import (
SymbolDownloader,
SymbolNotFound,
iter_lines,
exists_in_source,
)
def test_exists_in_source(botomock, settings):
mock_api_calls = []
def mock_api_call(self, operation_name, api_params):
mock_api_calls.append(api_params)
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
bucket = StorageBucket("https://s3.example.com/private")
with botomock(mock_api_call):
assert not exists_in_source(bucket, "xxx.sym")
assert exists_in_source(bucket, "xul.sym")
assert len(mock_api_calls) == 2
# again
assert not exists_in_source(bucket, "xxx.sym")
assert exists_in_source(bucket, "xul.sym")
assert len(mock_api_calls) == 2
def test_iter_lines():
class Stream:
def __init__(self, content):
self.left = content
def read(self, size):
if not self.left:
raise StopIteration
chunk = self.left[:size]
self.left = self.left[size:]
return chunk
lines = "Line 1\n" "Line 2\n" "Line 3\n"
stream = Stream(lines)
output = list(iter_lines(stream))
assert output == ["Line 1", "Line 2", "Line 3"]
# Create it again because our little stream mock doesn't rewind
stream = Stream(lines)
output = list(iter_lines(stream, chunk_size=5))
assert output == ["Line 1", "Line 2", "Line 3"]
stream = Stream(lines.strip()) # no trailing linebreak
output = list(iter_lines(stream))
assert output == ["Line 1", "Line 2", "Line 3"]
stream = Stream(lines.strip()) # no trailing linebreak
output = list(iter_lines(stream, chunk_size=3))
assert output == ["Line 1", "Line 2", "Line 3"]
def test_has_public(requestsmock):
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls, file_prefix="v0")
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_has_private_bubble_other_clienterrors(botomock):
def mock_api_call(self, operation_name, api_params):
parsed_response = {"Error": {"Code": "403", "Message": "Not found"}}
raise ClientError(parsed_response, operation_name)
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
# Expect this to raise a ClientError because the bucket ('private')
# doesn't exist. So boto3 would normally trigger a ClientError
# with a code 'Forbidden'.
with botomock(mock_api_call):
with pytest.raises(ClientError):
downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
def test_has_private(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert downloader.time_took > 0.0
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert downloader.time_took > 0.0
def test_has_private_caching_and_invalidation(botomock):
mock_calls = []
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
mock_calls.append(api_params["Prefix"])
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 1
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
# This should be cached
assert len(mock_calls) == 1
# Now invalidate it
downloader.invalidate_cache(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 2
# Invalidating unrecognized keys shouldn't break anything
downloader.invalidate_cache(
"never", "44E4EC8C2F41492B9369D6B9A059577C2", "heardof"
)
def test_get_url_private_caching_and_invalidation(botomock):
mock_calls = []
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
mock_calls.append(api_params["Prefix"])
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 1
assert downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
# This should be cached
assert len(mock_calls) == 1
# Now invalidate it
downloader.invalidate_cache(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert len(mock_calls) == 2
def test_has_private_without_prefix(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xul.sym"):
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
elif api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
raise NotImplementedError(api_params)
urls = ("https://s3.example.com/private",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_get_url_public(requestsmock):
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
url = downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert url == (
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym"
)
url = downloader.get_symbol_url(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert url is None
def test_get_url_private(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
url = downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
# The bucket gets put in the top-domain.
assert url.startswith("https://s3.example.com/")
assert (
"/private/prefix/v0/xul.pdb/" "44E4EC8C2F41492B9369D6B9A059577C2/xul.sym?"
) in url
assert "Expires=" in url
assert "AWSAccessKeyId=" in url
assert "Signature=" in url
url = downloader.get_symbol_url(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert url is None
assert len(botomock.calls) == 2
def test_public_default_file_prefix(requestsmock, settings):
"""The idea with settings.SYMBOL_FILE_PREFIX is to make it easier
to specify the settings.SYMBOL_URLS. That settings.SYMBOL_FILE_PREFIX
is *always* used when uploading symbols. So it's *always* useful to
query for symbols with a prefix. However, it's an easy mistake to make
that you just focus on the bucket name to say where symbols come from.
In those cases, the code should "protect" you can make sure we actually
use the prefix.
However, we don't want to lose the flexibility to actually override
it on a *per URL* basis.
"""
# settings.SYMBOL_FILE_PREFIX = 'myprfx'
requestsmock.head(
"https://s3.example.com/public/start/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.head(
"https://s3.example.com/also-public/prrffxx/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.head(
"https://s3.example.com/special/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = (
"https://s3.example.com/public/start/?access=public",
# No trailing / in the path part
"https://s3.example.com/also-public/prrffxx?access=public",
# No prefix!
"https://s3.example.com/special?access=public",
)
downloader = SymbolDownloader(urls, file_prefix="myprfx")
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
requestsmock.get(
"https://s3.example.com/public/start/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.get(
"https://s3.example.com/also-public/prrffxx/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
requestsmock.get(
"https://s3.example.com/special/myprfx/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
# Now try to stream it
with pytest.raises(SymbolNotFound):
list(stream)
def test_private_default_file_prefix(botomock, settings):
"""See doc string in test_public_default_file_prefix"""
all_mock_calls = []
def mock_api_call(self, operation_name, api_params):
if operation_name == "ListObjectsV2":
# the has_symbol() was called
all_mock_calls.append(api_params["Prefix"])
# pretend it doesn't exist
return {}
elif operation_name == "GetObject":
# someone wants a stream
all_mock_calls.append(api_params["Key"])
parsed_response = {"Error": {"Code": "NoSuchKey", "Message": "Not found"}}
raise ClientError(parsed_response, operation_name)
else:
raise NotImplementedError(operation_name)
urls = (
# Private URL with prefix and trailing /
"https://s3.example.com/priv-bucket/borje/",
# No trailing /
"https://s3.example.com/also-priv-bucket/prrffxx",
# No prefix
"https://s3.example.com/some-bucket",
)
downloader = SymbolDownloader(urls, file_prefix="myprfx")
with botomock(mock_api_call):
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert len(all_mock_calls) == 3
assert all_mock_calls[0].startswith("borje/myprfx/xxx.pdb")
assert all_mock_calls[1].startswith("prrffxx/myprfx/xxx.pdb")
assert all_mock_calls[2].startswith("myprfx/xxx.pdb")
# reset the mutable recorder
all_mock_calls = []
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
with pytest.raises(SymbolNotFound):
next(stream)
assert len(all_mock_calls) == 3
assert all_mock_calls[0].startswith("borje/myprfx/xxx.pdb")
assert all_mock_calls[1].startswith("prrffxx/myprfx/xxx.pdb")
assert all_mock_calls[2].startswith("myprfx/xxx.pdb")
def test_get_url_private_dotted_name(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/com.example.private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
url = downloader.get_symbol_url(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert (
"/com.example.private/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym?"
) in url
url = downloader.get_symbol_url(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
assert url is None
assert len(botomock.calls) == 2
def test_get_stream_public(requestsmock):
requestsmock.get(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
content=b"LINE ONE\nLINE TWO\n",
)
requestsmock.get(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
content=b"Page Not Found",
status_code=404,
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
url = next(stream)
assert url == (
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym"
)
lines = list(stream)
assert lines == ["LINE ONE", "LINE TWO"]
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
with pytest.raises(SymbolNotFound):
list(stream)
def test_get_stream_private(botomock):
long_line = "x" * 600
def mock_api_call(self, operation_name, api_params):
assert operation_name == "GetObject"
if api_params["Key"].endswith("xxx.sym"):
parsed_response = {"Error": {"Code": "NoSuchKey", "Message": "Not found"}}
raise ClientError(parsed_response, operation_name)
return {"Body": BytesIO(bytes(f"line 1\r\nline 2\r\n{long_line}\r\n", "utf-8"))}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
bucket_name, key = next(stream)
assert bucket_name == "private"
assert key == ("prefix/v0/xul.pdb/44E4EC8C2F41492B9369D6B9A059577C2/xul.sym")
lines = list(stream)
assert lines == ["line 1", "line 2", long_line]
stream = downloader.get_symbol_stream(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
with pytest.raises(SymbolNotFound):
next(stream)
def test_get_stream_gzipped(botomock):
def mock_api_call(self, operation_name, api_params):
payload = b"line 1\n" b"line 2\n" b"line 3\n"
buffer_ = BytesIO()
with GzipFile(fileobj=buffer_, mode="w") as f:
f.write(payload)
payload_gz = buffer_.getvalue()
return {"ContentEncoding": "gzip", "Body": BytesIO(payload_gz)}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
bucket_name, key = next(stream)
assert bucket_name == "private"
assert key == ("prefix/v0/xul.pdb/44E4EC8C2F41492B9369D6B9A059577C2/xul.sym")
lines = list(stream)
assert lines == ["line 1", "line 2", "line 3"]
def test_get_stream_gzipped_but_not_gzipped(botomock):
def mock_api_call(self, operation_name, api_params):
payload = b"line 1\n" b"line 2\n" b"line 3\n"
return {
"ContentEncoding": "gzip", # <-- note!
"Body": BytesIO(payload), # but it's not gzipped!
}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
bucket_name, key = next(stream)
assert bucket_name == "private"
assert key == ("prefix/v0/xul.pdb/44E4EC8C2F41492B9369D6B9A059577C2/xul.sym")
# But when you start to stream it will realize that the file is not
# actually gzipped and SymbolDownloader will automatically just skip
# that file as if it doesn't exist.
with pytest.raises(SymbolNotFound):
next(stream)
def test_get_stream_private_other_clienterrors(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "GetObject"
parsed_response = {"Error": {"Code": "403", "Message": "Forbidden"}}
raise ClientError(parsed_response, operation_name)
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
stream = downloader.get_symbol_stream(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
with pytest.raises(ClientError):
next(stream)
def test_multiple_urls_public_then_private(requestsmock, botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = (
"https://s3.example.com/public/prefix/?access=public",
"https://s3.example.com/private/prefix/",
)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_multiple_urls_private_then_public(requestsmock, botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
if api_params["Prefix"].endswith("xxx.sym"):
# not found
return {}
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xxx.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xxx.sym",
text="Page Not Found",
status_code=404,
)
urls = (
"https://s3.example.com/private/prefix/",
"https://s3.example.com/public/prefix/?access=public",
)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xul.sym"
)
assert not downloader.has_symbol(
"xxx.pdb", "44E4EC8C2F41492B9369D6B9A059577C2", "xxx.sym"
)
def test_has_public_case_insensitive_debugid(requestsmock):
requestsmock.head(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
text="",
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
assert downloader.has_symbol(
"xul.pdb", "44e4ec8c2f41492b9369d6b9a059577c2", "xul.sym"
)
def test_has_private_case_insensitive_debugid(botomock):
def mock_api_call(self, operation_name, api_params):
assert operation_name == "ListObjectsV2"
assert "44E4EC8C2F41492B9369D6B9A059577C2" in api_params["Prefix"]
# found
return {"Contents": [{"Key": api_params["Prefix"]}]}
urls = ("https://s3.example.com/private/prefix/",)
downloader = SymbolDownloader(urls)
with botomock(mock_api_call):
assert downloader.has_symbol(
"xul.pdb", "44e4ec8c2f41492b9369d6b9a059577c2", "xul.sym"
)
def test_get_stream_public_content_encode_error(requestsmock):
class BreakingStreamHTTPResponse(HTTPResponse):
def stream(self, *a, **kwargs):
raise ContentDecodingError("something terrible!")
requestsmock.get(
"https://s3.example.com/public/prefix/v0/xul.pdb/"
"44E4EC8C2F41492B9369D6B9A059577C2/xul.sym",
raw=BreakingStreamHTTPResponse(status=200),
)
urls = ("https://s3.example.com/public/prefix/?access=public",)
downloader = SymbolDownloader(urls)
stream = downloader.get_symbol_stream(
"xul.pdb", "44e4ec8c2f41492b9369d6b9a059577c2", "xul.sym"
)
# Because the URL exists (hence the 200 OK), but when you start
# streaming it, it realizes it's there's something wrong with the
# content encoding, it captures that and consider this symbol
# not found.
# I.e. unable to stream its content is as bad as the file not existing.
# And because it's not found, the whole stream lookup is exhausted and
# it finally raises a SymbolNotFound error.
with pytest.raises(SymbolNotFound):
list(stream)
| mozilla-services/tecken | tecken/tests/test_symboldownloader.py | Python | mpl-2.0 | 24,201 |
(function (module) {
mifosX.controllers = _.extend(module, {
ViewSavingDetailsController: function (scope, routeParams, resourceFactory, paginatorService, location, $uibModal, route, dateFilter, $sce, $rootScope, API_VERSION) {
scope.report = false;
scope.hidePentahoReport = true;
scope.showActiveCharges = true;
scope.formData = {};
scope.date = {};
scope.staffData = {};
scope.fieldOfficers = [];
scope.savingaccountdetails = [];
scope.isDebit = function (savingsTransactionType) {
return savingsTransactionType.withdrawal == true || savingsTransactionType.feeDeduction == true
|| savingsTransactionType.overdraftInterest == true || savingsTransactionType.withholdTax == true;
};
scope.routeTo = function (savingsAccountId, transactionId, accountTransfer, transferId) {
if (accountTransfer) {
location.path('/viewaccounttransfers/' + transferId);
} else {
location.path('/viewsavingtrxn/' + savingsAccountId + '/trxnId/' + transactionId);
}
};
/***
* we are using orderBy(https://docs.angularjs.org/api/ng/filter/orderBy) filter to sort fields in ui
* api returns dates in array format[yyyy, mm, dd], converting the array of dates to date object
* @param dateFieldName
*/
scope.convertDateArrayToObject = function(dateFieldName){
for(var i in scope.savingaccountdetails.transactions){
scope.savingaccountdetails.transactions[i][dateFieldName] = new Date(scope.savingaccountdetails.transactions[i].date);
}
};
scope.isRecurringCharge = function (charge) {
return charge.chargeTimeType.value == 'Monthly Fee' || charge.chargeTimeType.value == 'Annual Fee' || charge.chargeTimeType.value == 'Weekly Fee';
}
scope.viewCharge = function (id){
location.path('/savings/'+scope.savingaccountdetails.id+'/viewcharge/'+id).search({'status':scope.savingaccountdetails.status.value});
}
scope.clickEvent = function (eventName, accountId) {
eventName = eventName || "";
switch (eventName) {
case "modifyapplication":
location.path('/editsavingaccount/' + accountId);
break;
case "approve":
location.path('/savingaccount/' + accountId + '/approve');
break;
case "reject":
location.path('/savingaccount/' + accountId + '/reject');
break;
case "withdrawnbyclient":
location.path('/savingaccount/' + accountId + '/withdrawnByApplicant');
break;
case "delete":
resourceFactory.savingsResource.delete({accountId: accountId}, {}, function (data) {
var destination = '/viewgroup/' + data.groupId;
if (data.clientId) destination = '/viewclient/' + data.clientId;
location.path(destination);
});
break;
case "undoapproval":
location.path('/savingaccount/' + accountId + '/undoapproval');
break;
case "activate":
location.path('/savingaccount/' + accountId + '/activate');
break;
case "deposit":
location.path('/savingaccount/' + accountId + '/deposit');
break;
case "withdraw":
location.path('/savingaccount/' + accountId + '/withdrawal');
break;
case "addcharge":
location.path('/savingaccounts/' + accountId + '/charges');
break;
case "calculateInterest":
resourceFactory.savingsResource.save({accountId: accountId, command: 'calculateInterest'}, {}, function (data) {
route.reload();
});
break;
case "postInterest":
resourceFactory.savingsResource.save({accountId: accountId, command: 'postInterest'}, {}, function (data) {
route.reload();
});
break;
case "applyAnnualFees":
location.path('/savingaccountcharge/' + accountId + '/applyAnnualFees/' + scope.annualChargeId);
break;
case "transferFunds":
if (scope.savingaccountdetails.clientId) {
location.path('/accounttransfers/fromsavings/' + accountId);
}
break;
case "close":
location.path('/savingaccount/' + accountId + '/close');
break;
case "assignSavingsOfficer":
location.path('/assignsavingsofficer/' + accountId);
break;
case "unAssignSavingsOfficer":
location.path('/unassignsavingsofficer/' + accountId);
break;
case "enableWithHoldTax":
var changes = {
withHoldTax:true
};
resourceFactory.savingsResource.update({accountId: accountId, command: 'updateWithHoldTax'}, changes, function (data) {
route.reload();
});
break;
case "disableWithHoldTax":
var changes = {
withHoldTax:false
};
resourceFactory.savingsResource.update({accountId: accountId, command: 'updateWithHoldTax'}, changes, function (data) {
route.reload();
});
break;
case "postInterestAsOn":
location.path('/savingaccount/' + accountId + '/postInterestAsOn');
break;
}
};
resourceFactory.savingsResource.get({accountId: routeParams.id, associations: 'all'}, function (data) {
scope.savingaccountdetails = data;
scope.savingaccountdetails.availableBalance = scope.savingaccountdetails.enforceMinRequiredBalance?(scope.savingaccountdetails.summary.accountBalance - scope.savingaccountdetails.minRequiredOpeningBalance):scope.savingaccountdetails.summary.accountBalance;
scope.convertDateArrayToObject('date');
if(scope.savingaccountdetails.groupId) {
resourceFactory.groupResource.get({groupId: scope.savingaccountdetails.groupId}, function (data) {
scope.groupLevel = data.groupLevel;
});
}
scope.showonhold = true;
if(angular.isUndefined(data.onHoldFunds)){
scope.showonhold = false;
}
scope.staffData.staffId = data.staffId;
scope.date.toDate = new Date();
scope.date.fromDate = new Date(data.timeline.activatedOnDate);
scope.status = data.status.value;
if (scope.status == "Submitted and pending approval" || scope.status == "Active" || scope.status == "Approved") {
scope.choice = true;
}
scope.chargeAction = data.status.value == "Submitted and pending approval" ? true : false;
scope.chargePayAction = data.status.value == "Active" ? true : false;
if (scope.savingaccountdetails.charges) {
scope.charges = scope.savingaccountdetails.charges;
scope.chargeTableShow = true;
} else {
scope.chargeTableShow = false;
}
if (data.status.value == "Submitted and pending approval") {
scope.buttons = { singlebuttons: [
{
name: "button.modifyapplication",
icon: "fa fa-pencil ",
taskPermissionName:"UPDATE_SAVINGSACCOUNT"
},
{
name: "button.approve",
icon: "fa fa-check",
taskPermissionName:"APPROVE_SAVINGSACCOUNT"
}
],
options: [
{
name: "button.reject",
taskPermissionName:"REJECT_SAVINGSACCOUNT"
},
{
name: "button.withdrawnbyclient",
taskPermissionName:"WITHDRAW_SAVINGSACCOUNT"
},
{
name: "button.addcharge",
taskPermissionName:"CREATE_SAVINGSACCOUNTCHARGE"
},
{
name: "button.delete",
taskPermissionName:"DELETE_SAVINGSACCOUNT"
}
]
};
}
if (data.status.value == "Approved") {
scope.buttons = { singlebuttons: [
{
name: "button.undoapproval",
icon: "fa faf-undo",
taskPermissionName:"APPROVALUNDO_SAVINGSACCOUNT"
},
{
name: "button.activate",
icon: "fa fa-check",
taskPermissionName:"ACTIVATE_SAVINGSACCOUNT"
},
{
name: "button.addcharge",
icon: "fa fa-plus",
taskPermissionName:"CREATE_SAVINGSACCOUNTCHARGE"
}
]
};
}
if (data.status.value == "Active") {
scope.buttons = { singlebuttons: [
{
name: "button.postInterestAsOn",
icon: "icon-arrow-right",
taskPermissionName:"POSTINTERESTASON_SAVINGSACCOUNT"
},
{
name: "button.deposit",
icon: "fa fa-arrow-up",
taskPermissionName:"DEPOSIT_SAVINGSACCOUNT"
},
{
name: "button.withdraw",
icon: "fa fa-arrow-down",
taskPermissionName:"WITHDRAW_SAVINGSACCOUNT"
},
{
name: "button.calculateInterest",
icon: "fa fa-table",
taskPermissionName:"CALCULATEINTEREST_SAVINGSACCOUNT"
}
],
options: [
{
name: "button.postInterest",
taskPermissionName:"POSTINTEREST_SAVINGSACCOUNT"
},
{
name: "button.addcharge",
taskPermissionName:"CREATE_SAVINGSACCOUNTCHARGE"
},
{
name: "button.close",
taskPermissionName:"CLOSE_SAVINGSACCOUNT"
}
]
};
if (data.clientId) {
scope.buttons.options.push({
name: "button.transferFunds",
taskPermissionName:"CREATE_ACCOUNTTRANSFER"
});
}
if (data.charges) {
for (var i in scope.charges) {
if (scope.charges[i].name == "Annual fee - INR") {
scope.buttons.options.push({
name: "button.applyAnnualFees",
taskPermissionName:"APPLYANNUALFEE_SAVINGSACCOUNT"
});
scope.annualChargeId = scope.charges[i].id;
}
}
}
if(data.taxGroup){
if(data.withHoldTax){
scope.buttons.options.push({
name: "button.disableWithHoldTax",
taskPermissionName:"UPDATEWITHHOLDTAX_SAVINGSACCOUNT"
});
}else{
scope.buttons.options.push({
name: "button.enableWithHoldTax",
taskPermissionName:"UPDATEWITHHOLDTAX_SAVINGSACCOUNT"
});
}
}
}
if (data.annualFee) {
var annualdueDate = [];
annualdueDate = data.annualFee.feeOnMonthDay;
annualdueDate.push(new Date().getFullYear());
scope.annualdueDate = new Date(annualdueDate);
};
resourceFactory.standingInstructionTemplateResource.get({fromClientId: scope.savingaccountdetails.clientId,fromAccountType: 2,fromAccountId: routeParams.id},function (response) {
scope.standinginstruction = response;
scope.searchTransaction();
});
});
var fetchFunction = function (offset, limit, callback) {
var params = {};
params.offset = offset;
params.limit = limit;
params.locale = scope.optlang.code;
params.fromAccountId = routeParams.id;
params.fromAccountType = 2;
params.clientId = scope.savingaccountdetails.clientId;
params.clientName = scope.savingaccountdetails.clientName;
params.dateFormat = scope.df;
resourceFactory.standingInstructionResource.search(params, callback);
};
scope.searchTransaction = function () {
scope.displayResults = true;
scope.instructions = paginatorService.paginate(fetchFunction, 14);
scope.isCollapsed = false;
};
resourceFactory.DataTablesResource.getAllDataTables({apptable: 'm_savings_account'}, function (data) {
scope.savingdatatables = data;
});
/*// Saving notes not yet implemented
resourceFactory.savingsResource.getAllNotes({accountId: routeParams.id,resourceType:'notes'}, function (data) {
scope.savingNotes = data;
});
scope.saveNote = function () {
resourceFactory.savingsResource.save({accountId: routeParams.id, resourceType: 'notes'}, this.formData, function (data) {
var today = new Date();
temp = { id: data.resourceId, note: scope.formData.note, createdByUsername: "test", createdOn: today };
scope.savingNotes.push(temp);
scope.formData.note = "";
scope.predicate = '-id';
});
};*/
scope.dataTableChange = function (datatable) {
resourceFactory.DataTablesResource.getTableDetails({datatablename: datatable.registeredTableName,
entityId: routeParams.id, genericResultSet: 'true'}, function (data) {
scope.datatabledetails = data;
scope.datatabledetails.isData = data.data.length > 0 ? true : false;
scope.datatabledetails.isMultirow = data.columnHeaders[0].columnName == "id" ? true : false;
scope.showDataTableAddButton = !scope.datatabledetails.isData || scope.datatabledetails.isMultirow;
scope.showDataTableEditButton = scope.datatabledetails.isData && !scope.datatabledetails.isMultirow;
scope.singleRow = [];
for (var i in data.columnHeaders) {
if (scope.datatabledetails.columnHeaders[i].columnCode) {
for (var j in scope.datatabledetails.columnHeaders[i].columnValues) {
for (var k in data.data) {
if (data.data[k].row[i] == scope.datatabledetails.columnHeaders[i].columnValues[j].id) {
data.data[k].row[i] = scope.datatabledetails.columnHeaders[i].columnValues[j].value;
}
}
}
}
}
if (scope.datatabledetails.isData) {
for (var i in data.columnHeaders) {
if (!scope.datatabledetails.isMultirow) {
var row = {};
row.key = data.columnHeaders[i].columnName;
row.value = data.data[0].row[i];
scope.singleRow.push(row);
}
}
}
});
};
scope.export = function () {
scope.report = true;
scope.printbtn = false;
scope.viewReport = false;
scope.viewSavingReport = true;
scope.viewTransactionReport = false;
};
scope.viewJournalEntries = function(){
location.path("/searchtransaction/").search({savingsId: scope.savingaccountdetails.id});
};
scope.viewDataTable = function (registeredTableName,data){
if (scope.datatabledetails.isMultirow) {
location.path("/viewdatatableentry/"+registeredTableName+"/"+scope.savingaccountdetails.id+"/"+data.row[0]);
}else{
location.path("/viewsingledatatableentry/"+registeredTableName+"/"+scope.savingaccountdetails.id);
}
};
scope.viewSavingDetails = function () {
scope.report = false;
scope.hidePentahoReport = true;
scope.viewReport = false;
};
scope.viewPrintDetails = function () {
//scope.printbtn = true;
scope.report = true;
scope.viewTransactionReport = false;
scope.viewReport = true;
scope.hidePentahoReport = true;
scope.formData.outputType = 'PDF';
scope.baseURL = $rootScope.hostUrl + API_VERSION + "/runreports/" + encodeURIComponent("Client Saving Transactions");
scope.baseURL += "?output-type=" + encodeURIComponent(scope.formData.outputType) + "&tenantIdentifier=" + $rootScope.tenantIdentifier+"&locale="+scope.optlang.code;
var reportParams = "";
scope.startDate = dateFilter(scope.date.fromDate, 'yyyy-MM-dd');
scope.endDate = dateFilter(scope.date.toDate, 'yyyy-MM-dd');
var paramName = "R_startDate";
reportParams += encodeURIComponent(paramName) + "=" + encodeURIComponent(scope.startDate)+ "&";
paramName = "R_endDate";
reportParams += encodeURIComponent(paramName) + "=" + encodeURIComponent(scope.endDate)+ "&";
paramName = "R_savingsAccountId";
reportParams += encodeURIComponent(paramName) + "=" + encodeURIComponent(scope.savingaccountdetails.accountNo);
if (reportParams > "") {
scope.baseURL += "&" + reportParams;
}
// allow untrusted urls for iframe http://docs.angularjs.org/error/$sce/insecurl
scope.viewReportDetails = $sce.trustAsResourceUrl(scope.baseURL);
};
scope.viewSavingsTransactionReceipts = function (transactionId) {
scope.report = true;
scope.viewTransactionReport = true;
scope.viewSavingReport = false;
scope.printbtn = false;
scope.viewReport = true;
scope.hidePentahoReport = true;
scope.formData.outputType = 'PDF';
scope.baseURL = $rootScope.hostUrl + API_VERSION + "/runreports/" + encodeURIComponent("Savings Transaction Receipt");
scope.baseURL += "?output-type=" + encodeURIComponent(scope.formData.outputType) + "&tenantIdentifier=" + $rootScope.tenantIdentifier+"&locale="+scope.optlang.code;
var reportParams = "";
var paramName = "R_transactionId";
reportParams += encodeURIComponent(paramName) + "=" + encodeURIComponent(transactionId);
if (reportParams > "") {
scope.baseURL += "&" + reportParams;
}
// allow untrusted urls for iframe http://docs.angularjs.org/error/$sce/insecurl
scope.viewReportDetails = $sce.trustAsResourceUrl(scope.baseURL);
};
scope.deletestandinginstruction = function (id) {
$uibModal.open({
templateUrl: 'delInstruction.html',
controller: DelInstructionCtrl,
resolve: {
ids: function () {
return id;
}
}
});
};
var DelInstructionCtrl = function ($scope, $uibModalInstance, ids) {
$scope.delete = function () {
resourceFactory.standingInstructionResource.cancel({standingInstructionId: ids}, function (data) {
scope.searchTransaction();
$uibModalInstance.close('delete');
});
};
$scope.cancel = function () {
$uibModalInstance.dismiss('cancel');
};
};
scope.printReport = function () {
window.print();
window.close();
};
scope.deleteAll = function (apptableName, entityId) {
resourceFactory.DataTablesResource.delete({datatablename: apptableName, entityId: entityId, genericResultSet: 'true'}, {}, function (data) {
route.reload();
});
};
scope.modifyTransaction = function (accountId, transactionId) {
location.path('/savingaccount/' + accountId + '/modifytransaction?transactionId=' + transactionId);
};
scope.transactionSort = {
column: 'date',
descending: true
};
scope.changeTransactionSort = function(column) {
var sort = scope.transactionSort;
if (sort.column == column) {
sort.descending = !sort.descending;
} else {
sort.column = column;
sort.descending = true;
}
};
scope.checkStatus = function(){
if(scope.status == 'Active' || scope.status == 'Closed' || scope.status == 'Transfer in progress' ||
scope.status == 'Transfer on hold' || scope.status == 'Premature Closed' || scope.status == 'Matured'){
return true;
}
return false;
};
}
});
mifosX.ng.application.controller('ViewSavingDetailsController', ['$scope', '$routeParams', 'ResourceFactory','PaginatorService' , '$location','$uibModal', '$route', 'dateFilter', '$sce', '$rootScope', 'API_VERSION', mifosX.controllers.ViewSavingDetailsController]).run(function ($log) {
$log.info("ViewSavingDetailsController initialized");
});
}(mifosX.controllers || {}));
| gkrishnan724/community-app | app/scripts/controllers/savings/ViewSavingDetailsController.js | JavaScript | mpl-2.0 | 25,472 |
namespace Sparkle.Services.Networks.Users
{
using Sparkle.Services.Networks.Companies;
using Sparkle.Services.Networks.Lang;
using Sparkle.Services.Networks.Models;
using SrkToolkit.Domain;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
public class AcceptApplyRequestRequest : BaseRequest
{
public Guid ApplyKey { get; set; }
public int? UserId { get; set; }
public bool NotifyNewlyCreatedUser { get; set; }
public IDictionary<int, string> AvailableCompanies { get; set; }
[Display(Name = "IsWrongCompany", ResourceType = typeof(NetworksLabels))]
public bool IsWrongCompany { get; set; }
public int JoinCompanyId { get; set; }
public Dictionary<int, string> CompaniesNameMatch { get; set; }
}
public class AcceptApplyRequestResult : BaseResult<AcceptApplyRequestRequest, AcceptApplyRequestError>
{
public AcceptApplyRequestResult(AcceptApplyRequestRequest request)
: base(request)
{
}
public CreateCompanyResult CreateCompanyResult { get; set; }
public CreateEmailPassordAccountResult CreateUserResult { get; set; }
}
public enum AcceptApplyRequestError
{
NoSuchApplyRequest,
CreateCompanyRequestFailed,
CreateEmailPasswordAccountFailed,
OnForwardRequestNoSuchCompany,
NotInPendingAccept,
JoinCompanyIsDisabled,
}
}
| SparkleNetworks/SparkleNetworks | src/Sparkle.Services/Networks/Users/AcceptApplyRequestRequest.cs | C# | mpl-2.0 | 1,581 |
/*
This file is part of the MinSG library extension MultiAlgoRendering.
Copyright (C) 2009-2012 Ralf Petring <ralf@petring.net>
This library is subject to the terms of the Mozilla Public License, v. 2.0.
You should have received a copy of the MPL along with this library; see the
file LICENSE. If not, you can obtain one at http://mozilla.org/MPL/2.0/.
*/
#ifdef MINSG_EXT_MULTIALGORENDERING
#include "SampleStorage.h"
namespace MinSG {
namespace MAR {
}
}
#endif // MINSG_EXT_MULTIALGORENDERING
| PADrend/MinSG | Ext/MultiAlgoRendering/SampleStorage.cpp | C++ | mpl-2.0 | 508 |
package aws
import (
"fmt"
"log"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/arn"
"github.com/aws/aws-sdk-go/service/directconnect"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/validation"
)
func resourceAwsDxHostedPublicVirtualInterface() *schema.Resource {
return &schema.Resource{
Create: resourceAwsDxHostedPublicVirtualInterfaceCreate,
Read: resourceAwsDxHostedPublicVirtualInterfaceRead,
Delete: resourceAwsDxHostedPublicVirtualInterfaceDelete,
Importer: &schema.ResourceImporter{
State: resourceAwsDxHostedPublicVirtualInterfaceImport,
},
Schema: map[string]*schema.Schema{
"arn": {
Type: schema.TypeString,
Computed: true,
},
"connection_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"vlan": {
Type: schema.TypeInt,
Required: true,
ForceNew: true,
ValidateFunc: validation.IntBetween(1, 4094),
},
"bgp_asn": {
Type: schema.TypeInt,
Required: true,
ForceNew: true,
},
"bgp_auth_key": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
"address_family": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringInSlice([]string{directconnect.AddressFamilyIpv4, directconnect.AddressFamilyIpv6}, false),
},
"customer_address": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
"amazon_address": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
"owner_account_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validateAwsAccountId,
},
"route_filter_prefixes": {
Type: schema.TypeSet,
Required: true,
ForceNew: true,
Elem: &schema.Schema{Type: schema.TypeString},
MinItems: 1,
},
"aws_device": {
Type: schema.TypeString,
Computed: true,
},
},
Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(10 * time.Minute),
Delete: schema.DefaultTimeout(10 * time.Minute),
},
}
}
func resourceAwsDxHostedPublicVirtualInterfaceCreate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).dxconn
addressFamily := d.Get("address_family").(string)
caRaw, caOk := d.GetOk("customer_address")
aaRaw, aaOk := d.GetOk("amazon_address")
if addressFamily == directconnect.AddressFamilyIpv4 {
if !caOk {
return fmt.Errorf("'customer_address' must be set when 'address_family' is '%s'", addressFamily)
}
if !aaOk {
return fmt.Errorf("'amazon_address' must be set when 'address_family' is '%s'", addressFamily)
}
}
req := &directconnect.AllocatePublicVirtualInterfaceInput{
ConnectionId: aws.String(d.Get("connection_id").(string)),
OwnerAccount: aws.String(d.Get("owner_account_id").(string)),
NewPublicVirtualInterfaceAllocation: &directconnect.NewPublicVirtualInterfaceAllocation{
VirtualInterfaceName: aws.String(d.Get("name").(string)),
Vlan: aws.Int64(int64(d.Get("vlan").(int))),
Asn: aws.Int64(int64(d.Get("bgp_asn").(int))),
AddressFamily: aws.String(addressFamily),
},
}
if v, ok := d.GetOk("bgp_auth_key"); ok && v.(string) != "" {
req.NewPublicVirtualInterfaceAllocation.AuthKey = aws.String(v.(string))
}
if caOk && caRaw.(string) != "" {
req.NewPublicVirtualInterfaceAllocation.CustomerAddress = aws.String(caRaw.(string))
}
if aaOk && aaRaw.(string) != "" {
req.NewPublicVirtualInterfaceAllocation.AmazonAddress = aws.String(aaRaw.(string))
}
if v, ok := d.GetOk("route_filter_prefixes"); ok {
req.NewPublicVirtualInterfaceAllocation.RouteFilterPrefixes = expandDxRouteFilterPrefixes(v.(*schema.Set))
}
log.Printf("[DEBUG] Allocating Direct Connect hosted public virtual interface: %#v", req)
resp, err := conn.AllocatePublicVirtualInterface(req)
if err != nil {
return fmt.Errorf("Error allocating Direct Connect hosted public virtual interface: %s", err.Error())
}
d.SetId(aws.StringValue(resp.VirtualInterfaceId))
arn := arn.ARN{
Partition: meta.(*AWSClient).partition,
Region: meta.(*AWSClient).region,
Service: "directconnect",
AccountID: meta.(*AWSClient).accountid,
Resource: fmt.Sprintf("dxvif/%s", d.Id()),
}.String()
d.Set("arn", arn)
if err := dxHostedPublicVirtualInterfaceWaitUntilAvailable(conn, d.Id(), d.Timeout(schema.TimeoutCreate)); err != nil {
return err
}
return resourceAwsDxHostedPublicVirtualInterfaceRead(d, meta)
}
func resourceAwsDxHostedPublicVirtualInterfaceRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).dxconn
vif, err := dxVirtualInterfaceRead(d.Id(), conn)
if err != nil {
return err
}
if vif == nil {
log.Printf("[WARN] Direct Connect virtual interface (%s) not found, removing from state", d.Id())
d.SetId("")
return nil
}
d.Set("connection_id", vif.ConnectionId)
d.Set("name", vif.VirtualInterfaceName)
d.Set("vlan", vif.Vlan)
d.Set("bgp_asn", vif.Asn)
d.Set("bgp_auth_key", vif.AuthKey)
d.Set("address_family", vif.AddressFamily)
d.Set("customer_address", vif.CustomerAddress)
d.Set("amazon_address", vif.AmazonAddress)
d.Set("route_filter_prefixes", flattenDxRouteFilterPrefixes(vif.RouteFilterPrefixes))
d.Set("owner_account_id", vif.OwnerAccount)
d.Set("aws_device", vif.AwsDeviceV2)
return nil
}
func resourceAwsDxHostedPublicVirtualInterfaceDelete(d *schema.ResourceData, meta interface{}) error {
return dxVirtualInterfaceDelete(d, meta)
}
func resourceAwsDxHostedPublicVirtualInterfaceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
arn := arn.ARN{
Partition: meta.(*AWSClient).partition,
Region: meta.(*AWSClient).region,
Service: "directconnect",
AccountID: meta.(*AWSClient).accountid,
Resource: fmt.Sprintf("dxvif/%s", d.Id()),
}.String()
d.Set("arn", arn)
return []*schema.ResourceData{d}, nil
}
func dxHostedPublicVirtualInterfaceWaitUntilAvailable(conn *directconnect.DirectConnect, vifId string, timeout time.Duration) error {
return dxVirtualInterfaceWaitUntilAvailable(
conn,
vifId,
timeout,
[]string{
directconnect.VirtualInterfaceStatePending,
},
[]string{
directconnect.VirtualInterfaceStateAvailable,
directconnect.VirtualInterfaceStateConfirming,
directconnect.VirtualInterfaceStateDown,
directconnect.VirtualInterfaceStateVerifying,
})
}
| Ninir/terraform-provider-aws | aws/resource_aws_dx_hosted_public_virtual_interface.go | GO | mpl-2.0 | 6,694 |