repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
mnpappo/neural_network | cifar10_dropout_test.py | 3327 | __author__ = "Nadimozzaman Pappo"
__github__ = "http://github.com/mnpappo"
"""
This script use keras & cifar10 dataset to test random Dropout effects on result.
"""
from keras.datasets import cifar10
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.optimizers import SGD
from randomdropouttest import get_random_dropout, RandomDropout
# defining the settings
batch_size = 32
nb_classes = 10
nb_epoch = 10
img_channels = 3
img_rows, img_cols = 32, 32
# loading cifar10 dataset
def load_dataset():
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
return X_train, Y_train, X_test, Y_test
# creating the network architecture
def make_network():
model = Sequential()
model.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(img_channels, img_rows, img_cols)))
model.add(Activation('relu'))
model.add(Convolution2D(32, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
# drop out 1
shape = (32, 32, 15, 15)
model.add(RandomDropout(get_random_dropout(0, 1, shape)))
model.add(Convolution2D(64, 3, 3, border_mode='same'))
model.add(Activation('relu'))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
# drop out 2
# model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
# drop out 3
# model.add(Dropout(dropout))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
return model
# training model with SGD with momentum
def train_model(model, X_train, Y_train, X_test, Y_test):
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
# model.fit(X_train, Y_train, nb_epoch=nb_epoch, batch_size=batch_size, validation_split=0.1, show_accuracy=True, verbose=1)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, validation_data=(X_test, Y_test), shuffle=True)
print('Testing...')
res = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1)
print("--------")
print('Test accuracy: {0}'.format(res[1]))
# saving the trained model & cifar10 model architecture
def save_model(model):
model_json = model.to_json()
open('data/cifar10_architecture.json', 'w').write(model_json)
model.save_weights('data/cifar10_weights.h5', overwrite=True)
if __name__ == '__main__':
X_train, Y_train, X_test, Y_test = load_dataset()
model = make_network()
trained_model = train_model(model, X_train, Y_train, X_test, Y_test)
# print("Training completed. Saving the model.")
# save_model(model)
| mit |
DigitalMachinist/HexGrid | docs/html/dir_3cb425e2b7a3497cb33453c12e0f9dfb.js | 2208 | var dir_3cb425e2b7a3497cb33453c12e0f9dfb =
[
[ "obj", "dir_9eed77f92de805d0c609728ec5fe3bb4.html", "dir_9eed77f92de805d0c609728ec5fe3bb4" ],
[ "Properties", "dir_9edb41ecf4d8ed9652bfb4c7b0e784ac.html", "dir_9edb41ecf4d8ed9652bfb4c7b0e784ac" ],
[ "AxialHexCoord.cs", "_axial_hex_coord_8cs.html", [
[ "AxialHexCoord", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_axial_hex_coord.html", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_axial_hex_coord" ]
] ],
[ "CubicHexCoord.cs", "_cubic_hex_coord_8cs.html", [
[ "CubicHexCoord", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_cubic_hex_coord.html", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_cubic_hex_coord" ]
] ],
[ "DiagonalEnum.cs", "_diagonal_enum_8cs.html", "_diagonal_enum_8cs" ],
[ "DirectionEnum.cs", "_direction_enum_8cs.html", "_direction_enum_8cs" ],
[ "FloatAxial.cs", "_float_axial_8cs.html", [
[ "FloatAxial", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_float_axial.html", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_float_axial" ]
] ],
[ "FloatCubic.cs", "_float_cubic_8cs.html", [
[ "FloatCubic", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_float_cubic.html", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_float_cubic" ]
] ],
[ "HexGrid.cs", "_hex_grid_8cs.html", [
[ "HexGrid", "classca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_hex_grid.html", "classca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_hex_grid" ]
] ],
[ "OffsetHexCoord.cs", "_offset_hex_coord_8cs.html", [
[ "OffsetHexCoord", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_offset_hex_coord.html", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_offset_hex_coord" ]
] ],
[ "ParityEnum.cs", "_parity_enum_8cs.html", "_parity_enum_8cs" ],
[ "RotationEnum.cs", "_rotation_enum_8cs.html", "_rotation_enum_8cs" ],
[ "TriangleEnum.cs", "_triangle_enum_8cs.html", "_triangle_enum_8cs" ],
[ "Vec2D.cs", "_vec2_d_8cs.html", [
[ "Vec2D", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_vec2_d.html", "structca_1_1axoninteractive_1_1_geometry_1_1_hex_1_1_vec2_d" ]
] ]
]; | mit |
pablomartinez123/SSI | application/views/backend/abms/abmEnfermedadesAlta.php | 2958 | <div class="main-content">
<div class="main-content-inner">
<div class="page-content">
<div class="page-header">
<h1>
Gestionar Enfermedades
</h1>
</div>
<div class="widget-box"> <!-- Empieza el recuadro con su titulo -->
<div class="widget-header">
<h5 class="widget-title">Registrar Nueva Enfermedad </h5>
<!-- #section:custom/widget-box.toolbar -->
<div class="widget-toolbar">
<a href="#" data-action="collapse">
<i class="ace-icon fa fa-chevron-up"></i>
</a>
</div>
<!-- /section:custom/widget-box.toolbar -->
</div>
<div class="widget-body">
<div class="widget-main">
<div class="widget-main">
<?php echo validation_errors('<div class="errors">','</div>');
?>
<form class="form-horizontal" role="form" action="<?php echo base_url() ?>abms/abmEnfermedadesC/recibirDatos/" method="post" name="formEnfermedad"><!-- Comienza formulario -->
<div class="form-group"> <!-- Empieza una linea del formulario -->
<label class="col-sm-3 control-label no-padding-right" for="nombreEnfermedad">Nombre Enfermedad(*) </label>
<div class="col-sm-4">
<input class="form-control" id="nombreEnfermedad" name="nombreEnfermedad" placeholder="" type="text">
</div>
</div>
<!-- /section:elements.form -->
<div class="clearfix form-actions"> <!-- Empiezan botones de guardar y limpiar -->
<div class="col-md-offset-3 col-md-9">
<button class="btn btn-info" type="submit" name="GuardarEnDB">
<i class="ace-icon fa fa-check bigger-110"></i>
Guardar
</button>
<button class="btn" type="reset">
<i class="ace-icon fa fa-undo bigger-110"></i>
Limpiar
</button>
</div>
</div>
</form><!-- Termina formulario -->
</div>
</div>
</div>
</div><!--Fin Cuadro Registrar Nueva Enfermedad -->
<?php //if($mensaje!=""){ echo"<script>alert('".$mensaje."')</script>"; } ?>
</div><!-- /.page-content -->
</div><!-- /.main-content-inner -->
</div><!-- /.main-content -->
<!--Para que se vean los botones de la tabla responsive-->
<script type="text/javascript">
window.jQuery || document.write("<script src='../../assets/js/jquery.js'>"+"<"+"/script>");
</script>
<script type="text/javascript">
if('ontouchstart' in document.documentElement) document.write("<script src='../../assets/js/jquery.mobile.custom.js'>"+"<"+"/script>");
</script>
<script src="../../assets/js/bootstrap.js"></script> | mit |
jmeline/pygame_rpg | samplePyGame.py | 4014 | ## Import and Init
import pygame
from pygame.locals import K_ESCAPE, K_UP, K_DOWN, K_LEFT, K_RIGHT
pygame.init()
'''
while not done:
#Handle a Close Event
for event in pygame.event.get():
keys = pygame.key.get_pressed()
if event.type == pygame.QUIT or keys[K_ESCAPE]:
done = True
## Set framerate to 60
framerate.tick(60)
#Keyborad Keypress Events, Movement
if pygame.key.get_pressed()[K_UP]:
ly = ly - speed
if pygame.key.get_pressed()[K_DOWN]:
ly = ly + speed
if pygame.key.get_pressed()[K_LEFT]:
lx = lx - speed
if pygame.key.get_pressed()[K_RIGHT]:
lx = lx + speed
#Test for Out-of-Bounds
if lx > rightBound:
lx = rightBound
if lx < 0:
lx = 0
if ly > bottomBound:
ly = bottomBound
if ly < 0:
ly = 0
## draw background
screen.blit(background, (0, 0))
## draw image
screen.blit(sotcLogo, (lx, ly))
pygame.display.flip()
#pygame.display.update()
'''
class Game(object):
"""Handles the game itself"""
def __init__(self):
#Load and Convert the SOTC Logo
self.image = pygame.image.load("cloud.png")
self.screen = pygame.display.get_surface()
self.screen_rect = self.screen.get_rect()
self.clock = pygame.time.Clock()
self.keys = pygame.key.get_pressed()
self.fps = 60.0
self.done = False
self.lx = 100
self.ly = 100
self.speed = 3
self.rightBound = self.screen.get_width() - self.image.get_width()
self.bottomBound = self.screen.get_height() - self.image.get_height()
self.background = pygame.Surface(self.screen.get_size()).convert()
self.background.fill((20, 130, 157))
def eventLoop(self):
"""Add/pop directions from player's direction stack as necessary."""
for event in pygame.event.get():
self.keys = pygame.key.get_pressed()
if event.type == pygame.QUIT or self.keys[pygame.K_ESCAPE]:
self.done = True
elif event.type == pygame.KEYDOWN:
pass
##self.player.add_direction(event.key)
elif event.type == pygame.KEYUP:
pass
##self.player.pop_direction(event.key)
def mainLoop(self):
"""Our main game loop; I bet you'd never have guessed."""
delta = self.clock.tick(self.fps)/1000.0
while not self.done:
self.eventLoop()
## draw background
self.screen.blit(self.background, (0, 0))
## draw image
self.screen.blit(self.image, (self.lx, self.ly))
## Set framerate to 60
self.clock.tick(60)
#Keyborad Keypress Events, Movement
if pygame.key.get_pressed()[K_UP]:
self.ly = self.ly - self.speed
if pygame.key.get_pressed()[K_DOWN]:
self.ly = self.ly + self.speed
if pygame.key.get_pressed()[K_LEFT]:
self.lx = self.lx - self.speed
if pygame.key.get_pressed()[K_RIGHT]:
self.lx = self.lx + self.speed
#Test for Out-of-Bounds
if self.lx > self.rightBound:
self.lx = self.rightBound
if self.lx < 0:
self.lx = 0
if self.ly > self.bottomBound:
self.ly = self.bottomBound
if self.ly < 0:
self.ly = 0
#self.player.update(self.obstacles, delta)
#self.draw()
pygame.display.flip()
#pygame.display.update()
#delta = self.clock.tick(self.fps)/1000.0
#self.display_fps()
def main():
## Set Up the window
screen = pygame.display.set_mode((640,480))
pygame.display.set_caption("Sprite Test!")
#Set Blue Background
#Logo Position Varibales
#Right and Bottom Bounds for Game Entities
Game().mainLoop()
if __name__ == '__main__':
main() | mit |
plzen/ebay | lib/ebay_trading/requests/delete_selling_manager_template_automation_rule.rb | 1191 |
module EbayTrading # :nodoc:
module Requests # :nodoc:
# == Attributes
# numeric_node :sale_template_id, 'SaleTemplateID', :optional => true
# boolean_node :delete_automated_listing_rule, 'DeleteAutomatedListingRule', 'true', 'false', :optional => true
# boolean_node :delete_automated_relisting_rule, 'DeleteAutomatedRelistingRule', 'true', 'false', :optional => true
# boolean_node :delete_automated_second_chance_offer_rule, 'DeleteAutomatedSecondChanceOfferRule', 'true', 'false', :optional => true
class DeleteSellingManagerTemplateAutomationRule < Abstract
include XML::Mapping
include Initializer
root_element_name 'DeleteSellingManagerTemplateAutomationRuleRequest'
numeric_node :sale_template_id, 'SaleTemplateID', :optional => true
boolean_node :delete_automated_listing_rule, 'DeleteAutomatedListingRule', 'true', 'false', :optional => true
boolean_node :delete_automated_relisting_rule, 'DeleteAutomatedRelistingRule', 'true', 'false', :optional => true
boolean_node :delete_automated_second_chance_offer_rule, 'DeleteAutomatedSecondChanceOfferRule', 'true', 'false', :optional => true
end
end
end
| mit |
Zex/neural-node | aceagent/t_clerk.cpp | 737 | #include <ace/Reactor.h>
#include <ace/Svc_Handler.h>
#include <ace/SOCK_Stream.h>
#include <ace/Event_Handler.h>
class Clerk_Handler : public ACE_Svc_Handler<ACE_SOCK_Stream, ACE_MT_SYNCH>
{
public:
virtual int handle_input()
{
// Do something
ACE_DEBUG((LM_INFO, "Handling input ...\n"));
}
};
typedef Acceptor<Clerk_Handler, ACE_SOCK_Acceptor> Clerk_Acceptor;
int main(int argc, char argv[])
{
// ACE_Name_Options::instance()->parse_args(argc, argv);
Clerk_Acceptor acceptor(7351);//Options::instance()->port());
ACE_Reactor::instance()->register_handler(&acceptor, ACE_Event_Handler::ACCEPT_MASK);
for (;;)
ACE_Reactor::instance()->handle_events();
}
| mit |
jojanper/ng-app | grunt/clean.js | 178 | module.exports = function() {
return {
build_clean: {
src: ['<%= testReportsPath %>/*.xml', '<%= testReportsPath %>/xmlrunner/*.xml']
}
};
};
| mit |
wildekek/rammstein-generator | rammstein.py | 319 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
a='Du hast mich'
b='Du, du hast'+'\n'+a
c=a+' gefragt'
def j(i):
return '\n'.join(i)
d=j([b,b,''])
e=j(['Willst du bis der Tod euch scheidet','Treurig sein für alle Tage?','Nein, nein!',''])
f=j([d,b,a,'',j([c,c,c,'Und ich hab nichts gesagt','']),e,e])
print j([d,f,f,e]) | mit |
JayKob141/Preguntas | routes/logout.js | 176 | var express = require('express');
var router = express.Router();
router.get('/', function(req, res, next) {
req.logout();
res.redirect('/');
});
module.exports = router;
| mit |
TheMrButcher/gamebase_manager | manager/librariesform.cpp | 9741 | #include "librariesform.h"
#include "ui_librariesform.h"
#include "settings.h"
#include "mainwindow.h"
#include "appsform.h"
#include "librarysourcemanagerlist.h"
#include "librarydeployer.h"
#include "libraryremover.h"
#include <QDir>
#include <QMessageBox>
#include <QThreadPool>
#include <QDebug>
LibrariesForm::LibrariesForm(MainWindow *parent)
: QWidget(parent)
, ui(new Ui::LibrariesForm)
, parent(parent)
{
ui->setupUi(this);
hasActiveDownload = false;
librariesModel = new LibrariesTableModel(this);
ui->librariesTable->setModel(librariesModel);
ui->librariesTable->setColumnWidth(0, 60);
ui->librariesTable->horizontalHeader()->setSectionResizeMode(1, QHeaderView::Stretch);
ui->librariesTable->setColumnWidth(2, 150);
ui->librariesTable->setColumnWidth(3, 50);
ui->librariesTable->setColumnWidth(4, 50);
clearLibrariesTable();
connect(ui->updateButton, SIGNAL(clicked()), parent, SLOT(updateLibrarySources()));
connect(ui->librariesTable->selectionModel(), SIGNAL(selectionChanged(QItemSelection,QItemSelection)),
this, SLOT(onLibrariesSelectionChanged(QItemSelection,QItemSelection)));
connect(LibrarySourceManagerList::instance(), SIGNAL(finishedDownload(Library)),
this, SLOT(onLibraryDownloaded(Library)));
}
LibrariesForm::~LibrariesForm()
{
delete ui;
}
void LibrariesForm::clearLibrariesTable()
{
librariesModel->set(QList<Library>());
ui->librariesTable->selectionModel()->clearSelection();
}
void LibrariesForm::append(const QList<Library>& libraries)
{
foreach (const auto& library, libraries)
librariesModel->append(library);
}
void LibrariesForm::download(Library library)
{
auto downloadsDir = Settings::instance().downloadsDir();
if (downloadsDir.check() != SourceStatus::OK) {
auto answer = QMessageBox::question(this, "Создание папки для загрузок",
"Папка для загрузок отсутствует. Создать?");
if (answer != QMessageBox::Yes)
return;
QDir dir;
dir.mkpath(downloadsDir.path);
if (downloadsDir.check() != SourceStatus::OK)
return;
}
auto resultLibrary = library.afterAction(Library::Download);
if (resultLibrary.validate()) {
onLibraryDownloaded(resultLibrary);
return;
}
hasActiveDownload = true;
updateButtons();
LibrarySourceManagerList::instance()->download(library);
}
void LibrariesForm::install(Library library)
{
srcLibToInstall = library;
installImpl(library);
}
void LibrariesForm::installImpl(Library library)
{
toInstall = Library::makeAbsent();
if (!library.checkAbility(Library::Install))
return finishInstall(false);
if (library.state == Library::SourceCode) {
if (Settings::instance().vcVarsPath.isEmpty()) {
QMessageBox::warning(this, "Не указан путь к vcvarsall.bat",
"Для компиляции кода необходим Microsoft Visual Studio 2010. "
"Пожалуйста, укажите путь к файлу vcvarsall.bat.");
return finishInstall(false);
}
}
if (library.checkAbility(Library::Deploy)) {
auto resultLibrary = library.afterAction(Library::Deploy);
if (resultLibrary.source.check() != SourceStatus::OK) {
auto answer = QMessageBox::question(this, "Создание рабочей папки",
"Рабочая папка отсутствует. Создать?");
if (answer != QMessageBox::Yes)
return finishInstall(false);
QDir dir;
dir.mkpath(resultLibrary.source.path);
if (resultLibrary.source.check() != SourceStatus::OK) {
QMessageBox::warning(this, "Не удалось создать рабочую папку",
"Невозможно создать рабочую папку. Проверьте, правильно "
"ли указан путь в настройках.");
return finishInstall(false);
}
}
if (LibraryRemover::checkHasDeployedFiles(resultLibrary.source.path)) {
waitedInstallAction = Library::Remove;
toInstall = library;
librariesModel->replaceCurrentLibrary(
Library::makeAbsent(Settings::instance().workingDir()));
remove(resultLibrary);
return;
}
auto deployer = new LibraryDeployer(library);
connect(deployer, SIGNAL(finishedDeploy(Library)),
this, SLOT(onLibraryDeployed(Library)));
QThreadPool::globalInstance()->start(deployer);
return;
}
if (library.checkAbility(Library::Download)) {
waitedInstallAction = Library::Download;
toInstall = library.afterAction(Library::Download);
download(library);
}
}
void LibrariesForm::finishInstall(bool success)
{
auto library = srcLibToInstall;
toInstall = Library::makeAbsent();
srcLibToInstall = Library::makeAbsent();
emit finishedInstall(library, success);
}
void LibrariesForm::onLibraryDeployed(Library library)
{
if (!library.validate()) {
QMessageBox::warning(this, "Ошибка при установке библиотеки",
"Не удалось установить выбранную библиотеку в "
"качестве текущей рабочей библиотеки.");
return finishInstall(false);
}
librariesModel->append(library);
parent->appsForm()->reconfigurateAll();
return finishInstall(true);
}
void LibrariesForm::onLibraryRemoved(Library library)
{
emit finishedRemove(library);
if (toInstall.exists()
&& library.source.type == LibrarySource::WorkingDirectory
&& waitedInstallAction == Library::Remove) {
if (!LibraryRemover::checkHasDeployedFiles(Settings::instance().workingDir().path)) {
installImpl(toInstall);
} else {
QMessageBox::warning(this, "Ошибка при удалении библиотеки",
"Не удалось удалить текущую рабочую библиотеку, чтобы "
"освободить место для новой библиотеки.");
return finishInstall(false);
}
}
}
void LibrariesForm::onLibraryDownloaded(Library library)
{
hasActiveDownload = false;
bool waitedToInstall =
toInstall == library && waitedInstallAction == Library::Download;
if (!library.validate()) {
QMessageBox::warning(this, "Ошибка при скачивании библиотеки",
"Не удалось скачать библиотеку. Возможно, источник библиотеки"
"временно недоступен, либо библиотека была удалена из источника.");
if (waitedToInstall)
finishInstall(false);
return;
}
librariesModel->append(library);
if (waitedToInstall)
installImpl(library);
updateButtons();
}
void LibrariesForm::onLibrariesSelectionChanged(const QItemSelection&, const QItemSelection&)
{
updateButtons();
}
void LibrariesForm::on_downloadButton_clicked()
{
int row = selectedRow();
if (row == -1)
return;
download(librariesModel->get()[row]);
}
void LibrariesForm::on_removeButton_clicked()
{
int row = selectedRow();
if (row == -1)
return;
auto library = librariesModel->get()[row];
auto answer = QMessageBox::question(this, "Удаление библиотеки",
"Вы уверены, что хотите удалить выбранную версию библиотеки?");
if (answer != QMessageBox::Yes)
return;
if (row == 0)
librariesModel->replaceCurrentLibrary(
Library::makeAbsent(Settings::instance().workingDir()));
else
librariesModel->removeRow(row);
remove(library);
}
void LibrariesForm::on_installButton_clicked()
{
int row = selectedRow();
if (row == -1)
return;
install(librariesModel->get()[row]);
}
int LibrariesForm::selectedRow() const
{
auto rows = ui->librariesTable->selectionModel()->selectedRows();
if (rows.empty())
return -1;
return rows[0].row();
}
void LibrariesForm::remove(Library library)
{
auto remover = new LibraryRemover(library);
connect(remover, SIGNAL(finishedRemove(Library)),
this, SLOT(onLibraryRemoved(Library)));
QThreadPool::globalInstance()->start(remover);
}
void LibrariesForm::updateButtons()
{
auto rows = ui->librariesTable->selectionModel()->selectedRows();
if (rows.isEmpty()) {
ui->downloadButton->setEnabled(false);
ui->removeButton->setEnabled(false);
ui->installButton->setEnabled(false);
} else {
int row = rows[0].row();
const auto& library = librariesModel->get()[row];
ui->downloadButton->setEnabled(library.checkAbility(Library::Download) && !hasActiveDownload);
ui->removeButton->setEnabled(library.checkAbility(Library::Remove));
ui->installButton->setEnabled(library.checkAbility(Library::Install) && !hasActiveDownload);
}
}
| mit |
rdobson94/python_koans | python2/koans/about_dictionaries.py | 1969 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutHashes in the Ruby Koans
#
from runner.koan import *
class AboutDictionaries(Koan):
def test_creating_dictionaries(self):
empty_dict = dict()
self.assertEqual(dict, type(empty_dict))
self.assertEqual(dict(), empty_dict)
self.assertEqual(0, len(empty_dict))
def test_dictionary_literals(self):
empty_dict = {}
self.assertEqual(dict, type(empty_dict))
babel_fish = {'one': 'uno', 'two': 'dos'}
self.assertEqual(2, len(babel_fish))
def test_accessing_dictionaries(self):
babel_fish = {'one': 'uno', 'two': 'dos'}
self.assertEqual(__, babel_fish['one'])
self.assertEqual(__, babel_fish['two'])
def test_changing_dictionaries(self):
babel_fish = {'one': 'uno', 'two': 'dos'}
babel_fish['one'] = 'eins'
expected = {'two': 'dos', 'one': __}
self.assertEqual(expected, babel_fish)
def test_dictionary_is_unordered(self):
dict1 = {'one': 'uno', 'two': 'dos'}
dict2 = {'two': 'dos', 'one': 'uno'}
self.assertEqual(____, dict1 == dict2)
def test_dictionary_keys_and_values(self):
babel_fish = {'one': 'uno', 'two': 'dos'}
self.assertEqual(__, len(babel_fish.keys()))
self.assertEqual(__, len(babel_fish.values()))
self.assertEqual(__, 'one' in babel_fish.keys())
self.assertEqual(__, 'two' in babel_fish.values())
self.assertEqual(__, 'uno' in babel_fish.keys())
self.assertEqual(__, 'dos' in babel_fish.values())
def test_making_a_dictionary_from_a_sequence_of_keys(self):
cards = {}.fromkeys(
('red warrior', 'green elf', 'blue valkyrie', 'yellow dwarf',
'confused looking zebra'),
42)
self.assertEqual(__, len(cards))
self.assertEqual(__, cards['green elf'])
self.assertEqual(__, cards['yellow dwarf'])
| mit |
fabpot/DoctrineBundle | Tests/ConnectionFactoryTest.php | 3062 | <?php
/*
* This file is part of the Doctrine Bundle
*
* The code was originally distributed inside the Symfony framework.
*
* (c) Fabien Potencier <fabien@symfony.com>
* (c) Doctrine Project, Benjamin Eberlei <kontakt@beberlei.de>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Doctrine\Bundle\DoctrineBundle\Tests;
use Doctrine\Bundle\DoctrineBundle\ConnectionFactory;
use Doctrine\DBAL\Connection;
use Doctrine\DBAL\Driver;
use Doctrine\DBAL\Exception\DriverException;
class ConnectionFactoryTest extends TestCase
{
protected function setUp()
{
parent::setUp();
if (!class_exists('Doctrine\\ORM\\Version')) {
$this->markTestSkipped('Doctrine ORM is not available.');
}
}
/**
* @expectedException \Doctrine\DBAL\DBALException
*/
public function testContainer()
{
$typesConfig = [];
$factory = new ConnectionFactory($typesConfig);
$params = ['driverClass' => '\\Doctrine\\Bundle\\DoctrineBundle\\Tests\\FakeDriver'];
$config = null;
$eventManager = null;
$mappingTypes = [0];
$exception = new DriverException('', $this->getMockBuilder(Driver\AbstractDriverException::class)->disableOriginalConstructor()->getMock());
// put the mock into the fake driver
FakeDriver::$exception = $exception;
try {
$factory->createConnection($params, $config, $eventManager, $mappingTypes);
} catch (\Exception $e) {
$this->assertTrue(strpos($e->getMessage(), 'can circumvent this by setting') > 0);
throw $e;
}
}
}
/**
* FakeDriver class to simulate a problem discussed in DoctrineBundle issue #673
* In order to not use a real database driver we have to create our own fake/mock implementation.
*
* @link https://github.com/doctrine/DoctrineBundle/issues/673
*/
class FakeDriver implements Driver
{
/**
* Exception Mock
*
* @var \Doctrine\DBAL\Exception\DriverException
*/
public static $exception;
/**
* This method gets called to determine the database version which in our case leeds to the problem.
* So we have to fake the exception a driver would normally throw.
*
*
* @link https://github.com/doctrine/DoctrineBundle/issues/673
*/
public function getDatabasePlatform()
{
throw self::$exception;
}
// ----- below this line follow only dummy methods to satisfy the interface requirements ----
public function connect(array $params, $username = null, $password = null, array $driverOptions = [])
{
throw new \Exception('not implemented');
}
public function getSchemaManager(Connection $conn)
{
throw new \Exception('not implemented');
}
public function getName()
{
return 'FakeDriver';
}
public function getDatabase(Connection $conn)
{
return 'fake_db';
}
}
| mit |
kybarg/material-ui | packages/material-ui-icons/src/BugReportRounded.js | 839 | import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(
<path d="M19 8h-1.81c-.45-.78-1.07-1.45-1.82-1.96l.93-.93c.39-.39.39-1.02 0-1.41a.9959.9959 0 00-1.41 0l-1.47 1.47C12.96 5.06 12.49 5 12 5s-.96.06-1.41.17L9.11 3.7a.9959.9959 0 00-1.41 0c-.39.39-.39 1.02 0 1.41l.92.93C7.88 6.55 7.26 7.22 6.81 8H5c-.55 0-1 .45-1 1s.45 1 1 1h1.09c-.05.33-.09.66-.09 1v1H5c-.55 0-1 .45-1 1s.45 1 1 1h1v1c0 .34.04.67.09 1H5c-.55 0-1 .45-1 1s.45 1 1 1h1.81c1.04 1.79 2.97 3 5.19 3s4.15-1.21 5.19-3H19c.55 0 1-.45 1-1s-.45-1-1-1h-1.09c.05-.33.09-.66.09-1v-1h1c.55 0 1-.45 1-1s-.45-1-1-1h-1v-1c0-.34-.04-.67-.09-1H19c.55 0 1-.45 1-1s-.45-1-1-1zm-6 8h-2c-.55 0-1-.45-1-1s.45-1 1-1h2c.55 0 1 .45 1 1s-.45 1-1 1zm0-4h-2c-.55 0-1-.45-1-1s.45-1 1-1h2c.55 0 1 .45 1 1s-.45 1-1 1z" />
, 'BugReportRounded');
| mit |
cxong/Slappa | world.py | 104 | from group import *
class World(Group):
def __init__(self):
super(World, self).__init__()
| mit |
mdoviedor/WEBsimon | src/GS/ContenidosBundle/Tests/Controller/UsuariosControllerTest.php | 320 | <?php
namespace GS\ContenidosBundle\Tests\Controller;
use Symfony\Bundle\FrameworkBundle\Test\WebTestCase;
class UsuariosControllerTest extends WebTestCase
{
public function testVer()
{
$client = static::createClient();
$crawler = $client->request('GET', 'contenidos/usuarios/ver');
}
}
| mit |
pablollarena/Previos | previos/Modelos/ReportePrevio.php | 13410 | <?php
/**
* Created by PhpStorm.
* User: PLLARENA
* Date: 13/02/2017
* Time: 01:07 PM
*/
include_once ("Sir52Facturas.php");
include_once ("Sir60Referencias.php");
include_once ("AccesoDatos.php");
include_once ("AccesoDatos2.php");
class ReportePrevio
{
private $oAD = null;
private $oAD2 = null;
private $oSir60 = null;
private $oSir52 = null;
private $nCant = 0;
private $nCompleta = 0;
private $nFaltante = 0;
private $nSobrante = 0;
private $nPieza = 0;
private $nJuego = 0;
private $nOtro = 0;
private $sOrigen = "";
private $nPesoAprox = 0;
private $sObservaciones = "";
private $nCantidadSobrante = 0;
public function getCantidadSobrante()
{
return $this->nCantidadSobrante;
}
public function setCantidadSobrante($nCantidadSobrante)
{
$this->nCantidadSobrante = $nCantidadSobrante;
}
public function getAD()
{
return $this->oAD;
}
public function setAD($oAD)
{
$this->oAD = $oAD;
}
public function getAD2()
{
return $this->oAD2;
}
public function setAD2($oAD2)
{
$this->oAD2 = $oAD2;
}
public function getSir60()
{
return $this->oSir60;
}
public function setSir60($oSir60)
{
$this->oSir60 = $oSir60;
}
public function getSir52()
{
return $this->oSir52;
}
public function setSir52($oSir52)
{
$this->oSir52 = $oSir52;
}
public function getCant()
{
return $this->nCant;
}
public function setCant($nCant)
{
$this->nCant = $nCant;
}
public function getCompleta()
{
return $this->nCompleta;
}
public function setCompleta($nCompleta)
{
$this->nCompleta = $nCompleta;
}
public function getFaltante()
{
return $this->nFaltante;
}
public function setFaltante($nFaltante)
{
$this->nFaltante = $nFaltante;
}
public function getSobrante()
{
return $this->nSobrante;
}
public function setSobrante($nSobrante)
{
$this->nSobrante = $nSobrante;
}
public function getPieza()
{
return $this->nPieza;
}
public function setPieza($nPieza)
{
$this->nPieza = $nPieza;
}
public function getJuego()
{
return $this->nJuego;
}
public function setJuego($nJuego)
{
$this->nJuego = $nJuego;
}
public function getOtro()
{
return $this->nOtro;
}
public function setOtro($nOtro)
{
$this->nOtro = $nOtro;
}
public function getOrigen()
{
return $this->sOrigen;
}
public function setOrigen($sOrigen)
{
$this->sOrigen = $sOrigen;
}
public function getPesoAprox()
{
return $this->nPesoAprox;
}
public function setPesoAprox($nPesoAprox)
{
$this->nPesoAprox = $nPesoAprox;
}
public function getObservaciones()
{
return $this->sObservaciones;
}
public function setObservaciones($sObservaciones)
{
$this->sObservaciones = $sObservaciones;
}
function buscarItemsPorFactura(){
$oAD = new AccesoDatos();
$sQuery = "";
$rst = null;
$vObj = null;
$i = 0;
$oRep = null;
if($this->getSir60()->getReferencia() == "" AND $this->getSir52()->getNumero() == ""){
throw new Exception("ReportePrevio->buscarItemsPorFactura(): error, faltan datos");
}else{
$sQuery = "exec [1G_TRIMEX].[dbo].buscarPartida '".$this->getSir52()->getNumero()."','".$this->getSir60()->getReferencia()."';";
$rst = $oAD->ejecutaQuery($sQuery);
$oAD->Desconecta();
if($rst){
foreach ($rst as $vRow){
$oRep = new ReportePrevio();
$oRep->setSir52(new Facturas());
$oRep->getSir52()->setItem($vRow[0]);
$vObj[$i] = $oRep;
$i = $i + 1;
}
}else{
$vObj = false;
}
}
return $vObj;
}
function insertarReportePrevio(){
$oAD = new AccesoDatos2();
$sQuery = "";
$nAfec = 0;
if($this->getSir60()->getReferencia() == "" AND $this->getSir52()->getNumero() == ""){
throw new Exception("ReportePrevio->insertarPartida(): error faltan datos ");
}else{
$sQuery = " exec [Previos].[dbo].insertarReportePrevio '".$this->getSir60()->getReferencia()."',
'".$this->getSir52()->getNumero()."',
".$this->getSir52()->getItem().",
".$this->getCant().",
".$this->getCompleta().",
".$this->getFaltante().",
".$this->getSobrante().",
".$this->getPieza().",
".$this->getJuego().",
".$this->getOtro().",
'".$this->getOrigen()."',
".$this->getPesoAprox().",
'".$this->getObservaciones()."',
".$this->getCantidadSobrante().";";
$nAfec = $oAD->ejecutaComando($sQuery);
$oAD->Desconecta();
}
return $nAfec;
}
function consultarInfoItem (){
$oAD2 = new AccesoDatos2();
$sQuery = "";
$rst = null;
$vObj = null;
$i = 0 ;
$oReporte = null;
if ($this->getSir60()->getReferencia() == "" and $this->getSir52()->getItem() == ""){
throw new Exception("ReportePrevio->consultarInfoItem(): error faltan datos");
}else{
$sQuery = " exec [Previos].[dbo].consultarInfoItem '".$this->getSir60()->getReferencia()."','".$this->getSir52()->getNumero()."'; ";
$rst = $oAD2->ejecutaQuery($sQuery);
$oAD2->Desconecta();
if ($rst){
foreach ($rst as $vRow){
$oReporte = new ReportePrevio();
$oReporte->setSir52(new Facturas());
$oReporte->getSir52()->setNumero($vRow [1]);
$oReporte->getSir52()->setItem($vRow [2]);
$oReporte->setCant($vRow[3]);
$oReporte->setCompleta($vRow[4]);
$oReporte->setFaltante($vRow[5]);
$oReporte->setSobrante($vRow [6]);
$oReporte->setPieza($vRow[7]);
$oReporte->setJuego($vRow[8]);
$oReporte->setOtro($vRow[9]);
$oReporte->setOrigen($vRow[10]);
$oReporte->setPesoAprox($vRow[11]);
$oReporte->setObservaciones($vRow[12]);
$vObj[$i] = $oReporte;
$i = $i + 1 ;
}
}else{
$vObj = false;
}
return $vObj;
}
}
function validarItem ($sRef,$nItem,$nFactura){
$oAD2 = new AccesoDatos2();
$sQuery = "";
$rst = null;
$bRet = false;
if($sRef == "" and $nItem == "" and $nFactura == ""){
throw new Exception("ReportePrevio->validadItem(): error en los datos");
}else{
$sQuery = "exec validaPartidas '".$sRef."',".$nItem.",'".$nFactura."'; ";
$rst = $oAD2->ejecutaQuery($sQuery);
$oAD2->Desconecta();
if (count($rst) == 1){
$bRet = true;
}
}
return $bRet;
}
function buscarInfoPartida(){
$oAD2 = new AccesoDatos2();
$sQuery = "";
$rst = null;
$bRet = false;
if($this->getSir60()->getReferencia() == "" && $this->getSir52()->getNumero() == "" and $this->getSir52()->getItem() == 0){
throw new Exception("ReportePrevio->buscarInfoPartida(): error, faltan datos");
}else{
$sQuery = "EXEC [Previos].[dbo].buscarDatosPartida '".$this->getSir60()->getReferencia()."','".$this->getSir52()->getNumero()."',".$this->getSir52()->getItem().";";
$rst = $oAD2->ejecutaQuery($sQuery);
$oAD2->Desconecta();
if($rst){
$this->setCant($rst[0][0]);
$this->setCompleta($rst[0][1]);
$this->setFaltante($rst[0][2]);
$this->setSobrante($rst[0][3]);
$this->setPieza($rst[0][4]);
$this->setJuego($rst[0][5]);
$this->setOtro($rst[0][6]);
$this->setOrigen($rst[0][7]);
$this->setPesoAprox($rst[0][8]);
$bRet = true;
}
}
}
function updatePartida($sUsuario,$sAccion){
$oAD = new AccesoDatos2();
$sQuery = "";
$nAfec = 0;
if ($this->getSir60()->getReferencia() == "" && $this->getSir52()->getNumero() == "" and $this->getSir52()->getItem() == 0)
{
throw new Exception("ReportePrevio->updatePartida() : error faltan datos");
}else{
$sQuery = "exec [Previos].[dbo].updatePartidas '".$this->getSir60()->getReferencia()."',
'".$this->getSir52()->getNumero()."',
".$this->getSir52()->getItem().",
".$this->getCant().",
".$this->getCompleta().",
".$this->getFaltante().",
".$this->getSobrante().",
".$this->getPieza().",
".$this->getJuego().",
".$this->getOtro().",
'".$this->getOrigen()."',
".$this->getPesoAprox().",
'".$this->getObservaciones()."',
'".$sUsuario."',
'".$sAccion."';";
$nAfec = $oAD->ejecutaComando($sQuery);
}
return $nAfec;
}
function estadoReferencia(){
$oAD = new AccesoDatos();
$sQuery = "";
$rst = null;
$bBand = false;
if ($this->getSir60()->getReferencia() == "")
{
throw new Exception("ReportePrevio->estadoReferencia() : error faltan datos");
}else {
$sQuery = "exec [1G_TRIMEX].[dbo].buscarEstadoReferencia'".$this->getSir60()->getReferencia()."';";
$rst = $oAD->ejecutaQuery($sQuery);
$oAD->Desconecta();
if ($rst != null){
$bBand = true;
}
}
return $bBand;
}
function buscarReporteReferencia(){
$oAD2 = new AccesoDatos2();
$sQuery = "";
$rst = null;
$vObj = null;
$i = 0;
$oReporte = null;
if($this->getSir60()->getReferencia() == ""){
throw new Exception("ReportePrevio->buscarReportePrevio(): error, faltan datos");
}else{
$sQuery = "EXEC [Previos].[dbo].reportePartidas '".$this->getSir60()->getReferencia()."';";
$rst = $oAD2->ejecutaQuery($sQuery);
$oAD2->Desconecta();
if ($rst){
foreach ($rst as $vRow){
$oReporte = new ReportePrevio();
$oReporte->setSir52(new Facturas());
$oReporte->getSir52()->setNumero($vRow[0]);
$oReporte->getSir52()->setItem($vRow[1]);
$oReporte->setCant($vRow[2]);
$oReporte->setCompleta($vRow[3]);
$oReporte->setFaltante($vRow[4]);
$oReporte->setSobrante($vRow[5]);
$oReporte->setPieza($vRow[6]);
$oReporte->setJuego($vRow[7]);
$oReporte->setOtro($vRow[8]);
$oReporte->setOrigen($vRow[9]);
$oReporte->setPesoAprox($vRow[10]);
$oReporte->setObservaciones($vRow[11]);
$oReporte->setCantidadSobrante($vRow[12]);
$vObj[$i] = $oReporte;
$i = $i + 1;
}
}else{
$vObj = false;
}
}
return $vObj;
}
function buscarReferencias(){
$oAD2 = new AccesoDatos();
$sQuery = "";
$vObj = null;
$rst = null;
$i = 0;
$oRep = null;
$sQuery = "EXEC [Previos].[dbo].consultarTodosReferencias";
$rst = $oAD2->ejecutaQuery($sQuery);
$oAD2->Desconecta();
if($rst){
foreach ($rst as $vRow){
$oRep = new ReportePrevio();
$oRep->setSir60(new Sir60Referencias());
$oRep->getSir60()->setReferencia($vRow[0]);
$vObj[$i] = $oRep;
$i = $i + 1;
}
}else{
$vObj = false;
}
return $vObj;
}
} | mit |
ostat/Console | Cognifide.PowerShell/Core/Utility/WriteLogCommand.cs | 2741 | using System.Collections;
using System.Management.Automation;
using Cognifide.PowerShell.Commandlets;
using Sitecore.Diagnostics;
namespace Cognifide.PowerShell.Core.Utility
{
[Cmdlet(VerbsCommunications.Write, "Log")]
public class WriteLogCommand : BaseCommand
{
public WriteLogCommand()
{
Separator = " ";
}
[Parameter(Position = 0, ValueFromPipeline = true, ValueFromRemainingArguments = true)]
public object Object { get; set; }
[Parameter]
public object Separator { get; set; }
[Parameter]
public LogNotificationLevel Log { get; set; }
private void LogObject(object o)
{
if (o == null) return;
var str1 = o as string;
if (str1 != null)
{
if (str1.Length <= 0) return;
LogString(str1);
}
else
{
IEnumerable enumerable;
if ((enumerable = o as IEnumerable) != null)
{
var flag = false;
foreach (var o1 in enumerable)
{
if (flag && Separator != null)
{
LogString(Separator.ToString());
}
LogObject(o1);
flag = true;
}
}
else
{
var str2 = o.ToString();
if (str2.Length <= 0) return;
LogString(str2);
}
}
}
private void LogString(string logMessage)
{
switch (Log)
{
case LogNotificationLevel.Debug:
Sitecore.Diagnostics.Log.Debug(logMessage, this);
break;
case LogNotificationLevel.Error:
Sitecore.Diagnostics.Log.Error(logMessage, this);
break;
case LogNotificationLevel.Fatal:
Sitecore.Diagnostics.Log.Fatal(logMessage, this);
break;
case LogNotificationLevel.Warning:
Sitecore.Diagnostics.Log.Warn(logMessage, this);
break;
//case (LogNotificationLevel.Info) :
//case LogNotificationLevel.None:
default:
Sitecore.Diagnostics.Log.Info(logMessage, this);
break;
}
WriteVerbose(logMessage);
}
protected override void ProcessRecord()
{
LogObject(Object);
}
}
} | mit |
michigan-com/reeeeeader.js | gulpfile.js | 1096 | var gulp = require('gulp');
var babel = require('gulp-babel');
var source = require('vinyl-source-stream');
var rename = require('gulp-rename');
var less = require('gulp-less');
var jade = require('gulp-jade');
var autoprefixer = require('gulp-autoprefixer');
var insert = require('gulp-insert');
gulp.task('default', ['jade', 'babel', 'less']);
gulp.task('jade', function() {
return gulp.src('./views/*.jade')
.pipe(jade({ client: true }))
.pipe(insert.prepend("var jade = require('jade/runtime');\n\n"))
.pipe(insert.append(";\n\nmodule.exports = template;"))
.pipe(gulp.dest('./lib/compiled-views/'));
});
gulp.task('babel', function() {
return gulp.src('./src/**/*.js')
.pipe(babel())
.pipe(gulp.dest('./lib'));
});
gulp.task('less', function() {
return gulp.src('./styles/styles.less')
.pipe(less())
.pipe(autoprefixer())
.pipe(gulp.dest('./dist'));
});
gulp.task('watch', ['default'], function() {
gulp.watch('./src/**/*.js', ['babel']);
gulp.watch('./styles/*.less', ['less']);
gulp.watch('./views/*.jade', ['jade', 'browserify']);
});
| mit |
elogina/EMHaveclesoleil | app/cache/prod/annotations/ae5fd08205b42590ae7fa66d6fd34b958898f8fd.cache.php | 517 | <?php return unserialize('a:2:{i:0;O:26:"Doctrine\\ORM\\Mapping\\Table":5:{s:4:"name";s:7:"membres";s:6:"schema";N;s:7:"indexes";N;s:17:"uniqueConstraints";a:2:{i:0;O:37:"Doctrine\\ORM\\Mapping\\UniqueConstraint":2:{s:4:"name";s:11:"mail_UNIQUE";s:7:"columns";a:1:{i:0;s:4:"mail";}}i:1;O:37:"Doctrine\\ORM\\Mapping\\UniqueConstraint":2:{s:4:"name";s:10:"mdp_UNIQUE";s:7:"columns";a:1:{i:0;s:3:"mdp";}}}s:7:"options";a:0:{}}i:1;O:27:"Doctrine\\ORM\\Mapping\\Entity":2:{s:15:"repositoryClass";N;s:8:"readOnly";b:0;}}'); | mit |
joalri24/Cefeida_1_zona | Cefeidas/Cefeidas/Properties/Settings.Designer.cs | 1065 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Cefeidas.Properties
{
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
{
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default
{
get
{
return defaultInstance;
}
}
}
}
| mit |
Horusiath/NWamp | NWamp/Messages/IMessageProvider.cs | 804 | namespace NWamp.Messages
{
/// <summary>
/// Interface of the message provider, allowing for WAMP message frame serialization/deserialization.
/// </summary>
public interface IMessageProvider
{
/// <summary>
/// Deserializes a JSON string into WAMP message frame.
/// </summary>
/// <param name="json">JSON string containing incoming WAMP message</param>
/// <returns>Deserialized WAMP message frame</returns>
IMessage DeserializeMessage(string json);
/// <summary>
/// Serializes a WAMP message frame into a JSON string.
/// </summary>
/// <param name="message">Message frame to serialize</param>
/// <returns>JSON string</returns>
string SerializeMessage(IMessage message);
}
}
| mit |
mikeobrien/FubuMVC.Swank | src/Tests/Description/TypeConventionTests.cs | 9357 | using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Xml.Serialization;
using FubuMVC.Swank;
using FubuMVC.Swank.Description;
using NUnit.Framework;
using Should;
namespace Tests.Description
{
[TestFixture]
public class TypeConventionTests
{
public TypeDescription GetDescription(Type type)
{
return new TypeConvention(new Configuration()).GetDescription(type);
}
public TypeDescription GetDescription<T>()
{
return GetDescription(typeof(T));
}
public class SomeType { }
[Test]
public void should_return_default_description_of_datatype()
{
var description = GetDescription<SomeType>();
description.Name.ShouldEqual("SomeType");
description.Comments.ShouldBeNull();
}
[Test]
public void should_return_default_description_of_list_datatype()
{
var description = GetDescription<List<SomeType>>();
description.Name.ShouldEqual("ArrayOfSomeType");
description.Comments.ShouldBeNull();
}
[Comments("This is a type with comments.")]
public class SomeTypeWithComments { }
[Test]
public void should_return_attribute_description_of_datatype()
{
var description = GetDescription<SomeTypeWithComments>();
description.Name.ShouldEqual("SomeTypeWithComments");
description.Comments.ShouldEqual("This is a type with comments.");
}
[XmlType("SomeType")]
public class SomeTypeWithXmlName { }
[Test]
public void should_return_attribute_description_of_datatype_and_xml_type_attribute()
{
var description = GetDescription<SomeTypeWithXmlName>();
description.Name.ShouldEqual("SomeType");
description.Comments.ShouldBeNull();
}
[XmlRoot("SomeRoot")]
public class SomeTypeWithXmlRootName { }
[Test]
public void should_return_attribute_description_of_datatype_and_xml_root_attribute()
{
var description = GetDescription<SomeTypeWithXmlRootName>();
description.Name.ShouldEqual("SomeRoot");
description.Comments.ShouldBeNull();
}
[DataContract(Name = "SomeType")]
public class SomeTypeWithDataContractName { }
[Test]
public void should_return_data_contract_attribute_name()
{
var description = GetDescription<SomeTypeWithDataContractName>();
description.Name.ShouldEqual("SomeType");
description.Comments.ShouldBeNull();
}
[Comments("These are some types.")]
public class SomeTypes : List<SomeType> { }
[Test]
public void should_return_attribute_description_of_inherited_list_datatype()
{
var description = GetDescription<SomeTypes>();
description.Name.ShouldEqual("ArrayOfSomeType");
description.Comments.ShouldEqual("These are some types.");
}
[Comments("These are some moar types."), XmlType("SomeTypes")]
public class SomeMoarTypes : List<SomeType> { }
[CollectionDataContract(Name = "SomeTypes")]
public class SomeCollectionWithDataContractName : List<SomeType> { }
[Test]
public void should_return_attribute_description_of_inherited_list_datatype_with_xml_type_attribute()
{
var description = GetDescription<SomeMoarTypes>();
description.Name.ShouldEqual("SomeTypes");
description.Comments.ShouldEqual("These are some moar types.");
}
[Test]
public void should_return_name_of_inherited_list_datatype_with_collection_data_contract_attribute()
{
var description = GetDescription<SomeCollectionWithDataContractName>();
description.Name.ShouldEqual("SomeTypes");
description.Comments.ShouldBeNull();
}
[Test]
public void should_initial_cap_list_primitive_type_name()
{
var description = GetDescription<List<Int64>>();
description.Name.ShouldEqual("ArrayOfLong");
}
public class WithNoArrayComments : List<int> { }
[ArrayDescription]
public class WithEmptyArrayComments : List<int> { }
[ArrayDescription("ArrayName", "This is an array comment.", "ItemName", "This is an item comment.")]
public class WithArrayComments : List<int> { }
[Test]
public void should_return_null_array_comments_if_not_specified(
[Values(typeof(WithNoArrayComments), typeof(WithEmptyArrayComments))] Type type)
{
var description = GetDescription(type);
description.Name.ShouldEqual("ArrayOfInt");
description.Comments.ShouldBeNull();
description.ArrayItem.Name.ShouldBeNull();
description.ArrayItem.Comments.ShouldBeNull();
}
[Test]
public void should_return_array_comments_if_specified()
{
var description = GetDescription<WithArrayComments>();
description.Name.ShouldEqual("ArrayName");
description.Comments.ShouldEqual("This is an array comment.");
description.ArrayItem.Name.ShouldEqual("ItemName");
description.ArrayItem.Comments.ShouldEqual("This is an item comment.");
}
[Test]
public void should_return_null_array_item_comments_if_not_specified(
[Values(typeof(WithNoArrayComments), typeof(WithEmptyArrayComments))] Type type)
{
GetDescription(type).ArrayItem.Comments.ShouldBeNull();
}
[Test]
public void should_return_array_item_comments_if_specified()
{
GetDescription<WithArrayComments>().ArrayItem.Comments.ShouldEqual("This is an item comment.");
}
public class WithNoDictionaryDescription : Dictionary<string, int> { }
[DictionaryDescription]
public class WithEmptyDictionaryDescription : Dictionary<string, int> { }
[DictionaryDescription("DictionaryName", "This is a comment.", "KeyName",
"This is a key comment.", "This is a value comment.")]
public class WithDictionaryDescription : Dictionary<string, int> { }
[Test]
public void should_return_default_dictionary_name_if_not_specified(
[Values(typeof(WithNoDictionaryDescription),
typeof(WithEmptyDictionaryDescription))] Type type)
{
GetDescription(type).Name.ShouldEqual("DictionaryOfInt");
}
[Test]
public void should_return_custom_dictionary_name_if_specified()
{
GetDescription<WithDictionaryDescription>()
.Name.ShouldEqual("DictionaryName");
}
[Test]
public void should_return_null_dictionary_comments_if_not_specified(
[Values(typeof(WithNoDictionaryDescription),
typeof(WithEmptyDictionaryDescription))] Type type)
{
GetDescription(type).Comments.ShouldBeNull();
}
[Test]
public void should_return_dictionary_comments_if_specified()
{
GetDescription<WithDictionaryDescription>()
.Comments.ShouldEqual("This is a comment.");
}
[Test]
public void should_return_null_dictionary_key_name_if_not_specified(
[Values(typeof(WithNoDictionaryDescription), typeof(WithEmptyDictionaryDescription))] Type type)
{
GetDescription(type).DictionaryEntry.KeyName.ShouldBeNull();
}
[Test]
public void should_return_dictionary_key_name_if_specified()
{
GetDescription<WithDictionaryDescription>()
.DictionaryEntry.KeyName.ShouldEqual("KeyName");
}
[Test]
public void should_return_null_dictionary_key_comments_if_not_specified(
[Values(typeof(WithNoDictionaryDescription), typeof(WithEmptyDictionaryDescription))] Type type)
{
GetDescription(type).DictionaryEntry.KeyComments.ShouldBeNull();
}
[Test]
public void should_return_dictionary_key_comments_if_specified()
{
GetDescription<WithDictionaryDescription>()
.DictionaryEntry.KeyComments.ShouldEqual("This is a key comment.");
}
[Test]
public void should_return_null_dictionary_value_comments_if_not_specified(
[Values(typeof(WithNoDictionaryDescription), typeof(WithEmptyDictionaryDescription))] Type type)
{
GetDescription(type).DictionaryEntry.ValueComments.ShouldBeNull();
}
[Test]
public void should_return_dictionary_value_comments_if_specified()
{
GetDescription<WithDictionaryDescription>().DictionaryEntry
.ValueComments.ShouldEqual("This is a value comment.");
}
}
} | mit |
TinderBox/big_machines | lib/big_machines/user_info.rb | 322 | module BigMachines
class UserInfo
attr_reader :hash
def initialize(response)
@hash = response['userInfo']
end
def method_missing(method, *args)
if @hash.key?(method.to_s)
@hash[method.to_s]
else
super
end
end
def to_s
@hash.to_s
end
end
end
| mit |
timonf/twigony-bundle | DependencyInjection/Configuration.php | 678 | <?php
/*
* This file is part of Twigony.
*
* © Timon F <dev@timonf.de>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Twigony\Bundle\FrameworkBundle\DependencyInjection;
use Symfony\Component\Config\Definition\Builder\TreeBuilder;
use Symfony\Component\Config\Definition\ConfigurationInterface;
class Configuration implements ConfigurationInterface
{
/**
* {@inheritdoc}
*/
public function getConfigTreeBuilder()
{
$treeBuilder = new TreeBuilder();
$treeBuilder->root('twigony_framework_bundle');
return $treeBuilder;
}
}
| mit |
antonioperic/Sylius | src/Sylius/Behat/Context/Setup/CheckoutContext.php | 4635 | <?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Behat\Context\Setup;
use Behat\Behat\Context\Context;
use Sylius\Behat\Service\SharedStorageInterface;
use Sylius\Bundle\ApiBundle\Command\Checkout\AddressOrder;
use Sylius\Bundle\ApiBundle\Command\Checkout\ChoosePaymentMethod;
use Sylius\Bundle\ApiBundle\Command\Checkout\ChooseShippingMethod;
use Sylius\Component\Core\Model\AddressInterface;
use Sylius\Component\Core\Model\OrderInterface;
use Sylius\Component\Core\Model\PaymentInterface;
use Sylius\Component\Core\Model\ShipmentInterface;
use Sylius\Component\Core\Repository\OrderRepositoryInterface;
use Sylius\Component\Resource\Factory\FactoryInterface;
use Sylius\Component\Resource\Repository\RepositoryInterface;
use Symfony\Component\Messenger\MessageBusInterface;
use Webmozart\Assert\Assert;
final class CheckoutContext implements Context
{
private OrderRepositoryInterface $orderRepository;
private RepositoryInterface $shippingMethodRepository;
private RepositoryInterface $paymentMethodRepository;
private MessageBusInterface $commandBus;
private FactoryInterface $addressFactory;
private SharedStorageInterface $sharedStorage;
public function __construct(
OrderRepositoryInterface $orderRepository,
RepositoryInterface $shippingMethodRepository,
RepositoryInterface $paymentMethodRepository,
MessageBusInterface $commandBus,
FactoryInterface $addressFactory,
SharedStorageInterface $sharedStorage
) {
$this->orderRepository = $orderRepository;
$this->shippingMethodRepository = $shippingMethodRepository;
$this->paymentMethodRepository = $paymentMethodRepository;
$this->commandBus = $commandBus;
$this->addressFactory = $addressFactory;
$this->sharedStorage = $sharedStorage;
}
/**
* @Given I have proceeded through checkout process in the :localeCode locale with email :email
*/
public function iHaveProceededThroughCheckoutProcessInTheLocaleWithEmail(string $localeCode, string $email)
{
$cartToken = $this->sharedStorage->get('cart_token');
/** @var OrderInterface|null $cart */
$cart = $this->orderRepository->findCartByTokenValue($cartToken);
Assert::notNull($cart);
$cart->setLocaleCode($localeCode);
$command = new AddressOrder($email, $this->getDefaultAddress());
$command->setOrderTokenValue($cartToken);
$this->commandBus->dispatch($command);
$this->completeCheckout($cart);
}
/**
* @Given I have proceeded through checkout process
*/
public function iHaveProceededThroughCheckoutProcess(): void
{
$cartToken = $this->sharedStorage->get('cart_token');
/** @var OrderInterface|null $cart */
$cart = $this->orderRepository->findCartByTokenValue($cartToken);
Assert::notNull($cart);
$command = new AddressOrder('rich@sylius.com', $this->getDefaultAddress());
$command->setOrderTokenValue($cartToken);
$this->commandBus->dispatch($command);
$this->completeCheckout($cart);
}
private function getDefaultAddress(): AddressInterface
{
/** @var AddressInterface $address */
$address = $this->addressFactory->createNew();
$address->setCity('New York');
$address->setStreet('Wall Street');
$address->setPostcode('00-001');
$address->setCountryCode('US');
$address->setFirstName('Richy');
$address->setLastName('Rich');
return $address;
}
private function completeCheckout(OrderInterface $order): void
{
$command = new ChooseShippingMethod($this->shippingMethodRepository->findOneBy([])->getCode());
$command->setOrderTokenValue($order->getTokenValue());
/** @var ShipmentInterface $shipment */
$shipment = $order->getShipments()->first();
$command->setSubresourceId((string) $shipment->getId());
$this->commandBus->dispatch($command);
$command = new ChoosePaymentMethod($this->paymentMethodRepository->findOneBy([])->getCode());
$command->setOrderTokenValue($order->getTokenValue());
/** @var PaymentInterface $payment */
$payment = $order->getPayments()->first();
$command->setSubresourceId((string) $payment->getId());
$this->commandBus->dispatch($command);
}
}
| mit |
eric-appeagle/appeagle-redoc-demo | node_modules/json-schema-faker/locale/nep.js | 180 | module.exports = require('../lib/jsf')
.extend('faker', function() {
try {
return require('faker/locale/nep');
} catch (e) {
return null;
}
});
| mit |
shaotao/Leetcode | algorithm/house_robber_3/HouseRobber3.java | 1302 | import java.io.*;
import java.util.*;
class HouseRobber3
{
public static void main(String[] args)
{
System.out.println("=== House Robber III ===");
Solution solution = new Solution();
TreeNode n1 = new TreeNode(3);
TreeNode n2 = new TreeNode(4);
TreeNode n3 = new TreeNode(5);
TreeNode n4 = new TreeNode(1);
TreeNode n5 = new TreeNode(3);
TreeNode n6 = new TreeNode(1);
n1.left = n2; n1.right = n3;
n2.left = n4; n2.right = n5;
n3.right = n6;
System.out.println("max = "+solution.rob(n1));
}
}
class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) { val = x; }
}
class Solution
{
public int rob(TreeNode root) {
if(root == null) { return 0; }
if(root.left == null && root.right == null) { return root.val; }
int with_root = root.val;
if(root.left != null) {
with_root += rob(root.left.left);
with_root += rob(root.left.right);
}
if(root.right != null) {
with_root += rob(root.right.left);
with_root += rob(root.right.right);
}
int no_root = rob(root.left) + rob(root.right);
return (with_root > no_root)?with_root:no_root;
}
}
| mit |
mjenrungrot/competitive_programming | UVa Online Judge/v127/12703.cc | 5116 | /*=============================================================================
# Author: Teerapat Jenrungrot - https://github.com/mjenrungrot/
# FileName: 12703.cc
# Description: UVa Online Judge - 12703
=============================================================================*/
#include <bits/stdc++.h>
#pragma GCC optimizer("Ofast")
#pragma GCC target("avx2")
using namespace std;
typedef pair<int, int> ii;
typedef pair<long long, long long> ll;
typedef pair<double, double> dd;
typedef tuple<int, int, int> iii;
typedef tuple<long long, long long, long long> lll;
typedef tuple<double, double, double> ddd;
typedef vector<string> vs;
typedef vector<int> vi;
typedef vector<vector<int>> vvi;
typedef vector<long long> vl;
typedef vector<vector<long long>> vvl;
typedef vector<double> vd;
typedef vector<vector<double>> vvd;
typedef vector<ii> vii;
typedef vector<ll> vll;
typedef vector<dd> vdd;
// Debug Snippets
void __print(int x) { cerr << x; }
void __print(long x) { cerr << x; }
void __print(long long x) { cerr << x; }
void __print(unsigned x) { cerr << x; }
void __print(unsigned long x) { cerr << x; }
void __print(unsigned long long x) { cerr << x; }
void __print(float x) { cerr << x; }
void __print(double x) { cerr << x; }
void __print(long double x) { cerr << x; }
void __print(char x) { cerr << '\'' << x << '\''; }
void __print(const char* x) { cerr << '\"' << x << '\"'; }
void __print(const string& x) { cerr << '\"' << x << '\"'; }
void __print(bool x) { cerr << (x ? "true" : "false"); }
template <typename T, typename V>
void __print(const pair<T, V>& x) {
cerr << '{';
__print(x.first);
cerr << ',';
__print(x.second);
cerr << '}';
}
template <typename T>
void __print(const T& x) {
int f = 0;
cerr << '{';
for (auto& i : x) cerr << (f++ ? "," : ""), __print(i);
cerr << "}";
}
void _print() { cerr << "]\n"; }
template <typename T, typename... V>
void _print(T t, V... v) {
__print(t);
if (sizeof...(v)) cerr << ", ";
_print(v...);
}
#define debug(x...) \
cerr << "[" << #x << "] = ["; \
_print(x)
template <class Ch, class Tr, class Container>
basic_ostream<Ch, Tr>& operator<<(basic_ostream<Ch, Tr>& os,
Container const& x) {
os << "{ ";
for (auto& y : x) os << y << " ";
return os << "}";
}
template <class X, class Y>
ostream& operator<<(ostream& os, pair<X, Y> const& p) {
return os << "[ " << p.first << ", " << p.second << "]";
}
// End Debug Snippets
vs split(string line) {
vs output;
istringstream iss(line);
string tmp;
while (iss >> tmp) {
output.push_back(tmp);
}
return output;
}
vs split(string line, regex re) {
vs output;
sregex_token_iterator it(line.begin(), line.end(), re, -1), it_end;
while (it != it_end) {
output.push_back(it->str());
it++;
}
return output;
}
const int INF_INT = 1e9 + 7;
const long long INF_LL = 1e18;
const int MAXN = 45;
long long fib[MAXN];
int N, A, B;
vector<pair<int, long long>> factorize(int x) {
vector<pair<int, long long>> ans;
for (int i = 2; i * i <= x; i++) {
long long count = 0;
while (x % i == 0) {
x /= i;
count++;
}
if (count) ans.emplace_back(i, count);
}
if (x > 1) ans.emplace_back(x, 1);
return ans;
}
void solve() {
/*
F_2 = a b
F_3 = a b^2
F_4 = a^2 b^3
...
F_k = a^{fib(k-2)} b^{fib(k-1)}
*/
cin >> N >> A >> B;
long long n_a = fib[N - 2];
long long n_b = fib[N - 1];
vector<pair<int, long long>> factor_a = factorize(A);
vector<pair<int, long long>> factor_b = factorize(B);
for (int i = 0; i < factor_a.size(); i++)
factor_a[i] = {factor_a[i].first, factor_a[i].second * n_a};
for (int i = 0; i < factor_b.size(); i++)
factor_b[i] = {factor_b[i].first, factor_b[i].second * n_b};
vector<pair<int, long long>> ans;
int id_a = 0, id_b = 0;
while (id_a < factor_a.size() and id_b < factor_b.size()) {
if (factor_a[id_a].first < factor_b[id_b].first) {
ans.push_back(factor_a[id_a]);
id_a++;
} else if (factor_a[id_a].first > factor_b[id_b].first) {
ans.push_back(factor_b[id_b]);
id_b++;
} else {
ans.emplace_back(factor_a[id_a].first,
factor_a[id_a].second + factor_b[id_b].second);
id_a++;
id_b++;
}
}
while (id_a < factor_a.size()) {
ans.push_back(factor_a[id_a]);
id_a++;
}
while (id_b < factor_b.size()) {
ans.push_back(factor_b[id_b]);
id_b++;
}
for (int i = 0; i < ans.size(); i++) {
cout << ans[i].first << " " << ans[i].second << endl;
}
}
int main() {
ios::sync_with_stdio(false);
cin.tie(0);
fib[0] = fib[1] = 1;
for (int i = 2; i < MAXN; i++) fib[i] = fib[i - 1] + fib[i - 2];
int T;
cin >> T;
while (T--) {
solve();
cout << endl;
}
return 0;
} | mit |
choonchernlim/test-spring-data-jpa | src/main/java/com/github/choonchernlim/testSpringDataJPA/example/datajpa/ProjectDao.java | 1164 | package com.github.choonchernlim.testSpringDataJPA.example.datajpa;
import com.github.choonchernlim.testSpringDataJPA.entity.Project;
import org.joda.time.LocalDate;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
public interface ProjectDao extends JpaRepository<Project, Long>, ProjectDaoCustom {
@Query("from Project p where p.name = :name")
Project anotherWayToGetProjectByName(@Param("name") String name);
Project findByName(String name);
Project findByTotalUsersAndCreated(Integer age, LocalDate created);
List<Project> findByTotalUsers(Integer age);
List<Project> findByTotalUsersOrderByNameAsc(Integer age);
List<Project> findByCreatedAfter(LocalDate startDate);
List<Project> findByCreatedBetween(LocalDate startDate, LocalDate endDate);
List<Project> findByNameStartingWith(String start);
List<Project> findByNameEndingWithIgnoreCase(String end);
List<Project> findByTotalUsers(Integer age, Sort sort);
}
| mit |
coinkeeper/terracoin_20150327 | src/version.cpp | 2635 | // Copyright (c) 2012 The Terracoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <string>
#include "version.h"
// Name of client reported in the 'version' message. Report the same name
// for both terracoind and terracoin-qt, to make it harder for attackers to
// target servers or GUI users specifically.
const std::string CLIENT_NAME("SatoshiClone");
// Client version number
#define CLIENT_VERSION_SUFFIX ""
// The following part of the code determines the CLIENT_BUILD variable.
// Several mechanisms are used for this:
// * first, if HAVE_BUILD_INFO is defined, include build.h, a file that is
// generated by the build environment, possibly containing the output
// of git-describe in a macro called BUILD_DESC
// * secondly, if this is an exported version of the code, GIT_ARCHIVE will
// be defined (automatically using the export-subst git attribute), and
// GIT_COMMIT will contain the commit id.
// * then, three options exist for determining CLIENT_BUILD:
// * if BUILD_DESC is defined, use that literally (output of git-describe)
// * if not, but GIT_COMMIT is defined, use v[maj].[min].[rev].[build]-g[commit]
// * otherwise, use v[maj].[min].[rev].[build]-unk
// finally CLIENT_VERSION_SUFFIX is added
// First, include build.h if requested
#ifdef HAVE_BUILD_INFO
# include "build.h"
#endif
// git will put "#define GIT_ARCHIVE 1" on the next line inside archives. $Format:%n#define GIT_ARCHIVE 1$
#ifdef GIT_ARCHIVE
# define GIT_COMMIT_ID "$Format:%h$"
# define GIT_COMMIT_DATE "$Format:%cD"
#endif
#define BUILD_DESC_FROM_COMMIT(maj,min,rev,build,commit) \
"v" DO_STRINGIZE(maj) "." DO_STRINGIZE(min) "." DO_STRINGIZE(rev) "." DO_STRINGIZE(build) "-g" commit
#define BUILD_DESC_FROM_UNKNOWN(maj,min,rev,build) \
"v" DO_STRINGIZE(maj) "." DO_STRINGIZE(min) "." DO_STRINGIZE(rev) "." DO_STRINGIZE(build) "-unk"
#ifndef BUILD_DESC
# ifdef GIT_COMMIT_ID
# define BUILD_DESC BUILD_DESC_FROM_COMMIT(CLIENT_VERSION_MAJOR, CLIENT_VERSION_MINOR, CLIENT_VERSION_REVISION, CLIENT_VERSION_BUILD, GIT_COMMIT_ID)
# else
# define BUILD_DESC BUILD_DESC_FROM_UNKNOWN(CLIENT_VERSION_MAJOR, CLIENT_VERSION_MINOR, CLIENT_VERSION_REVISION, CLIENT_VERSION_BUILD)
# endif
#endif
#ifndef BUILD_DATE
# ifdef GIT_COMMIT_DATE
# define BUILD_DATE GIT_COMMIT_DATE
# else
# define BUILD_DATE __DATE__ ", " __TIME__
# endif
#endif
const std::string CLIENT_BUILD(BUILD_DESC CLIENT_VERSION_SUFFIX);
const std::string CLIENT_DATE(BUILD_DATE);
| mit |
sakapon/Tools-2017 | EpidemicSimulator/EpidemicSimulator/DataModel.cs | 3015 | using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using Reactive.Bindings;
namespace EpidemicSimulator
{
public enum InfectionStatus
{
Susceptible,
Infectious,
Recovered,
}
public struct PopulationRatio
{
public double SusceptibleRatio { get; set; }
public double InfectiousRatio { get; set; }
public double RecoveredRatio { get; set; }
}
public class InitialSettings
{
public ReactiveProperty<int> Width { get; } = new ReactiveProperty<int>(200);
public ReactiveProperty<int> Height { get; } = new ReactiveProperty<int>(200);
public ReactiveProperty<PopulationRatio> PopulationRatio { get; } = new ReactiveProperty<PopulationRatio>(new PopulationRatio
{
SusceptibleRatio = 0.7,
InfectiousRatio = 0.1,
RecoveredRatio = 0.2,
});
public VInitialSettings ToValue() => new VInitialSettings
{
Size = new Size(Width.Value, Height.Value),
PopulationRatio = PopulationRatio.Value,
};
}
public struct VInitialSettings
{
public Size Size { get; set; }
public PopulationRatio PopulationRatio { get; set; }
}
public class RealtimeSettings
{
public ReactiveProperty<double> InfectionRate { get; } = new ReactiveProperty<double>(0.1);
public ReactiveProperty<double> RecoveryRate { get; } = new ReactiveProperty<double>(0.2);
public ReactiveProperty<double> DeimmunizationRate { get; } = new ReactiveProperty<double>(0.3);
public ReactiveProperty<double> ExecutionInterval { get; } = new ReactiveProperty<double>(0.02);
public ReactiveProperty<bool> IsMapLooping { get; } = new ReactiveProperty<bool>(false);
public VRealtimeSettings ToValue() => new VRealtimeSettings
{
InfectionRate = InfectionRate.Value,
RecoveryRate = RecoveryRate.Value,
DeimmunizationRate = DeimmunizationRate.Value,
ExecutionInterval = ExecutionInterval.Value,
IsMapLooping = IsMapLooping.Value,
};
}
public struct VRealtimeSettings
{
public double InfectionRate { get; set; }
public double RecoveryRate { get; set; }
public double DeimmunizationRate { get; set; }
public double ExecutionInterval { get; set; }
public bool IsMapLooping { get; set; }
}
public struct InfectionModel
{
public VInitialSettings InitialSettings { get; set; }
public int Turn { get; set; }
public InfectionStatus[,] Statuses { get; set; }
}
public struct PopulationSummary
{
public int Total { get; set; }
public int Susceptible { get; set; }
public int Infectious { get; set; }
public int Recovered { get; set; }
}
}
| mit |
DuncanCragg/ForestJS | lib/evaluatefed.js | 1934 | (function (global, factory) {
if (typeof define === "function" && define.amd) {
define(['react', './forest-common', './renderfed'], factory);
} else if (typeof exports !== "undefined") {
factory(require('react'), require('./forest-common'), require('./renderfed'));
} else {
var mod = {
exports: {}
};
factory(global.react, global.forestCommon, global.renderfed);
global.evaluatefed = mod.exports;
}
})(this, function (_react, _forestCommon, _renderfed) {
'use strict';
var _react2 = _interopRequireDefault(_react);
var _forestCommon2 = _interopRequireDefault(_forestCommon);
var _renderfed2 = _interopRequireDefault(_renderfed);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var uids = _forestCommon2.default.cacheObjects([{ UID: 'uid-1', is: 'guistack', name: 'Forest App', list: ['uid-3', 'uid-2'] }, { UID: 'uid-2', Evaluator: evalFed, is: 'fedexample', counter: 42, topic: 'banana', watching: 'uid-3' }, { UID: 'uid-3', Evaluator: evalFed, is: 'fedexample', counter: 99, topic: 'mango' }]);
function evalFed(object) {
return [object('Timer') === 0 && { Timer: 2000, enableCounting: !object('enableCounting') }, object('user-state.add?') && { counter: object('counter') + 1 }, !object('enableCounting') && { counter: 0 }, object('watching') && { counter: object('watching.counter') }, object('user-state.topic') && { topic: object('user-state.topic').toLowerCase() }, (!object('giphy') || object('user-state.load?')) && { giphy: 'https://api.giphy.com/v1/gifs/random?api_key=dc6zaTOxFJmzC&tag=' + object('topic') }, object('giphy.data') && { gdata: object('giphy.data') }, true && { loading: !object('giphy.data') }, object('gdata') && { image: object('gdata.fixed_height_small_url') }];
}
_forestCommon2.default.renderDOM(_react2.default.createElement(_renderfed2.default, { uid: uids[0] }));
}); | mit |
janessasmith/zl-angular-demo | app/scripts/directives/app/nav.js | 364 | /**
* Created by ZhengLu on 2015/8/24.
*/
'use strict';
angular.module('formApp').directive('appNav', function(NavData) {
return {
restrict: 'EA',
scope: {},
templateUrl: 'views/home/nav.html',
link: function(scope, element, attrs) {
template: '<div ng-transclude="test"></div>'
}
}
}) | mit |
treytomes/DirectCanvas | DirectCanvas/DirectCanvasM2/Demos/CompositorDemo/VisualElement.cs | 3575 | using System;
using System.Diagnostics;
using DirectCanvas;
using DirectCanvas.Misc;
namespace WindowsFormsTest.Demos.CompositorDemo
{
class VisualElement
{
private readonly RectangleF m_location;
private Color4 m_color;
private RotationParameters m_rotation;
private Random m_random;
private PointF m_currentPos;
private PointF m_originalPos;
private RectangleF m_sourceArea;
private RectangleF m_destArea;
private static Stopwatch stopWatch;
private static long m_lastTicks;
private float m_animationStep = 2;
static VisualElement()
{
stopWatch = new Stopwatch();
stopWatch.Start();
}
public VisualElement(RectangleF area)
{
long ticks = stopWatch.ElapsedTicks;
if (ticks <= m_lastTicks)
ticks = m_lastTicks+1;
m_random = new Random((int)ticks);
m_lastTicks = ticks;
m_originalPos = new PointF(area.Location.X, area.Location.Y);
m_location = area;
m_animationStep = m_random.Next(0, 40);
float r = m_random.Next(90, 100) / 100f;
float g = m_random.Next(90, 100) / 100f;
float b = m_random.Next(90, 100) / 100f;
//Color = new Color4(1, r, g, b);
Color = new Color4(1, 1, 1, 1);
m_currentPos = new PointF(m_originalPos.X, m_originalPos.Y);
}
public Color4 Color
{
get { return m_color; }
set { m_color = value; }
}
public RotationParameters Rotation
{
get { return m_rotation; }
set { m_rotation = value; }
}
public RectangleF SourceArea
{
get { return m_sourceArea; }
set { m_sourceArea = value; }
}
public RectangleF DestArea
{
get { return m_destArea; }
set { m_destArea = value; }
}
private PointF rotationCenter = new PointF(0.5f, 0.5f);
public void Compose(DrawingLayer outputLayer, DrawingLayer inputLayer)
{
m_rotation.RotationCenter = rotationCenter;
outputLayer.ComposeLayer(inputLayer, ref m_sourceArea, ref m_destArea, ref m_rotation, ref m_color);
}
public void Update(bool mouseDown, PointF mousePoint)
{
//m_rotation.RotateX += 5;
//m_rotation.RotateZ += 5;
m_rotation.RotateY += m_animationStep/4f;
PointF targetLocation;
if (mouseDown)
{
targetLocation = mousePoint;
}
else
{
targetLocation = m_originalPos;
}
var xDistance = targetLocation.X - m_currentPos.X;
if (xDistance < 0)
{
m_currentPos.X -= m_animationStep;
}
else if (xDistance > 0)
{
m_currentPos.X += m_animationStep;
}
var yDistance = targetLocation.Y - m_currentPos.Y;
if (yDistance < 0)
{
m_currentPos.Y -= m_animationStep;
}
else if (yDistance > 0)
{
m_currentPos.Y += m_animationStep;
}
DestArea = new RectangleF(m_currentPos.X, m_currentPos.Y, m_location.Width, m_location.Height);
SourceArea = m_location;
}
}
}
| mit |
karloescota/dragonpay-api | lib/dragonpay_api/configuration.rb | 262 | module DragonpayApi
class Configuration
attr_accessor :url
attr_accessor :merchant_id
attr_accessor :secret_key
def initialize(options = {})
options.each do |key, value|
public_send("#{key}=", value)
end
end
end
end
| mit |
dwalend/aws-lambda-scala | lambda/src/main/scala/net/walend/lambdademo/lambda/package.scala | 884 | package net.walend.lambdademo
/**
* Web API
*
* /user/ID/addEdge/ID POST - add a new relationship and post an edge . Body is some words about the relationship.
* /users GET - return a list of all users
* /user/ID/edges GET - return a list of all user ID -> ID relationships
* /user/ID/noEdge GET - return a list of all IDs with no relationship to the user
* /edges GET - return a list of all relationships
*
* User lists are [{"name" : "yourName","id" : "3"} ...]
*
* Edges are {"from" : "3", "to" : "5" , "text" : "words" }
*
* and a list of edges is [ {"from" : "3", "to" : "5" , "text" : "words" } ... ]
*
*
* 634303334930 - AWS account number
*
* https://634303334930.signin.aws.amazon.com/console/
*
* or
*
* https://walend-net.signin.aws.amazon.com/console/
*
* @author dwalend
* @since v0.0.0
*/
package object lambda {
}
| mit |
SliinQ/IP-Logger | app/utils.py | 1288 | from .database import db_session
from .models import Picture, Invite
import random
import string
def gen_filename(length=5, source=string.ascii_letters+string.digits):
''' Generate random string to use as filename/url
and check if it already exists
length -- length of string to generate
source -- characters to use in random string
'''
while True:
filename = ''.join(random.choice(source) for __ in range(length))
pic = Picture.query.filter(Picture.filename == filename).first()
print(pic)
if not pic:
break
return filename
def gen_invite(length=10, source=string.ascii_letters+string.digits):
''' Generate random string to use as invite key
and check if it already exists
length -- length of string to generate
source -- characters to use in random string
'''
while True:
invite = ''.join(random.choice(source) for __ in range(length))
inv = Invite.query.filter(Invite.invite_key == invite).first()
if not inv:
break
return invite
def get_extension(filename):
''' Return the file extension from filename
filename -- filename from which to return the extension
'''
return filename.rsplit('.', 1)[1]
| mit |
minimalist-components/mn-dialog | tasks/vendorJS.js | 607 | import gulp from 'gulp'
import packageFiles from 'package-files'
import concat from 'gulp-concat'
import uglify from 'gulp-uglify'
gulp.task('vendorJS', vendorJSTask)
function vendorJSTask() {
const devDependencies = [
'mn-gh-page',
'mn-select',
]
const dependencies = packageFiles(devDependencies)
.filter(dep => dep.endsWith('.js'))
.map(item =>
item.includes('document-register-element')
? item.replace('.node.js', '.js')
: item
)
return gulp
.src(dependencies)
.pipe(concat('vendor.js'))
.pipe(uglify())
.pipe(gulp.dest('./docs'))
}
| mit |
jjhesk/v-server-content-bank | js/adminmodel/stock_config.js | 3260 | /**
* Created by ryo on 14年8月13日.
*/
var StockConfigure = StockConfigure || {};
jQuery(function ($) {
StockConfigure = function (component_name, checker) {
this.$container = $("#" + component_name);
var $container = this.$container;
this.$listtable = $("#stock_count_admin");
this.$innvendorid = $("#innvendorid", $container);
this.$stock_system_type = $("#stock_system_type", $container);
this.$assign_location_ids = $("#assign_location_ids", $container);
this.$stock_configuration_complete = $("#stock_configuration_complete", $container);
this.show_loading = false;
this.location_number = 0;
this.domain = window.location.origin + "/api/";
this.vendor_location_template_output = Handlebars.compile($("#stock_configuration_stock_location_template").html());
this.$extension_obj = $("#ext_v2").val();
this.checker = checker;
this.Init();
};
StockConfigure.prototype = {
Init: function () {
var d = this;
d.$innvendorid.on("change", {that: d}, d.display_locations);
d.$stock_system_type.on("change", {that: d}, d.call_checker);
},
call_checker: function (e) {
var d = e.data.that;
d.checker.check_stock_type();
},
display_locations: function (e) {
e.preventDefault();
var d = e.data.that;
d.show_loading = true;
$(".display_location_group").remove();
var loader = new AJAXLoader(d.$innvendorid, "normal", "app_reg");
var enter = new JAXAPIsupport(d.domain + "vendor/stores_locations_choices/", {
id: parseInt($("option:selected", d.$innvendorid).val())
}, d, function (that, json) {
var insert_html = "";
d.$assign_location_ids.val("");
d.vendor_selected_ids = "";
d.location_number = 0;
$.each(json, function (key, value) {
var json_obj = {i: d.location_number, loc_id: key, display_text: value}, append = that.vendor_location_template_output(json_obj);
insert_html += append;
d.location_number++;
});
MetaBoxSupport.InsertHTMLFieldSelectAfter("#innvendorid", insert_html);
$("input.vendor_checkbox", d.$container).on("change", {that: d}, d.checkbox_ticked);
});
enter.add_loader(loader);
enter.init();
},
checkbox_ticked: function (e) {
var d = e.data.that;
var result = [];
for (var i = 0; i < d.location_number; i++) {
if ($('#location' + i, d.$container).is(':checked'))
//result[$('#location' + i, d.$container).val()] = $("#loc_text_" + $('#location' + i, d.$container).val(), d.$container).html();
result.push($('#location' + i, d.$container).val());
}
//var location_json = JSON.stringify(result);
var location_json = result.join(",");
d.$assign_location_ids.val(location_json);
d.checker.check_address();
}
}
})
; | mit |
tOOlmaker-equalsp/apiStransTHE | src/main/java/com/equalsp/stransthe/SerializedInthegraService.java | 4752 | package com.equalsp.stransthe;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
public class SerializedInthegraService extends CachedInthegraService {
public SerializedInthegraService(InthegraAPI delegate, CachedServiceFileHander fileHandler, long tempoExpiracao, TimeUnit unit) {
super(delegate, fileHandler, tempoExpiracao, unit);
}
@Override
public void initialize() throws IOException {
if (System.currentTimeMillis() > expireAt) {
lock.lock();
try {
boolean successfullyLoaded = loadFromFile();
if (!successfullyLoaded) {
refreshCache();
expireAt = System.currentTimeMillis() + timeoutInMillis;
saveCacheToFile();
}
} finally {
lock.unlock();
}
}
}
private void saveCacheToFile() throws IOException {
Gson gson = new GsonBuilder().create();
JsonObject cachedJsonObject = new JsonObject();
cachedJsonObject.addProperty("expireAt", gson.toJson(expireAt));
JsonArray linhasParadasJsonArray = new JsonArray();
for (Linha linha : cacheLinhaParadas.keySet()) {
JsonObject linhaParadaJsonObject = new JsonObject();
String linhaJson = gson.toJson(linha);
List<Parada> paradas = cacheLinhaParadas.get(linha);
JsonArray paradasJsonArray = new JsonArray();
for (Parada parada : paradas) {
String paradaJson = gson.toJson(parada);
paradasJsonArray.add(paradaJson);
}
linhaParadaJsonObject.addProperty("linha", linhaJson);
linhaParadaJsonObject.add("paradas", paradasJsonArray);
linhasParadasJsonArray.add(linhaParadaJsonObject);
}
cachedJsonObject.add("linhasParadas", linhasParadasJsonArray);
//cachedJsonObject.add("linhasParadas", gson.toJsonTree(cacheLinhaParadas));
JsonArray paradasLinhasJsonArray = new JsonArray();
for (Parada parada : cacheParadaLinhas.keySet()) {
JsonObject paradaLinhasJsonObject = new JsonObject();
String paradaJson = gson.toJson(parada);
List<Linha> linhas = cacheParadaLinhas.get(parada);
JsonArray linhasJsonArray = new JsonArray();
for (Linha linha : linhas) {
String linhaJson = gson.toJson(linha);
linhasJsonArray.add(linhaJson);
}
paradaLinhasJsonObject.addProperty("parada", paradaJson);
paradaLinhasJsonObject.add("linhas", linhasJsonArray);
paradasLinhasJsonArray.add(paradaLinhasJsonObject);
}
cachedJsonObject.add("paradasLinhas", paradasLinhasJsonArray);
//cachedJsonObject.add("paradasLinhas", gson.toJsonTree(cacheParadaLinhas));
String cacheJson = gson.toJson(cachedJsonObject);
fileHandler.saveCacheFile(cacheJson);
}
private boolean loadFromFile() throws IOException {
String fileContent = fileHandler.loadCacheFile();
if (!fileContent.isEmpty()) {
Gson gson = new GsonBuilder().create();
JsonObject cacheJson = gson.fromJson(fileContent, JsonObject.class);
expireAt = cacheJson.get("expireAt").getAsLong();
if (System.currentTimeMillis() > expireAt) {
return false;
} else {
cacheLinhaParadas.clear();
cacheParadaLinhas.clear();
JsonArray linhasParadas = cacheJson.getAsJsonArray("linhasParadas");
for (int i = 0; i < linhasParadas.size(); ++i) {
JsonObject jsonObject = linhasParadas.get(i).getAsJsonObject();
Linha linha = gson.fromJson(jsonObject.get("linha").getAsString(), Linha.class);
JsonArray paradasJsonArray = jsonObject.getAsJsonArray("paradas");
List<Parada> paradasDaLinha = new ArrayList<>();
for (int j = 0; j < paradasJsonArray.size(); j++) {
JsonElement paradaObejct = paradasJsonArray.get(j);
Parada parada = gson.fromJson(paradaObejct.getAsString(), Parada.class);
paradasDaLinha.add(parada);
}
cacheLinhaParadas.put(linha, paradasDaLinha);
}
JsonArray paradasLinhas = cacheJson.getAsJsonArray("paradasLinhas");
for (int i = 0; i < paradasLinhas.size(); ++i) {
JsonObject jsonObject = paradasLinhas.get(i).getAsJsonObject();
Parada parada = gson.fromJson(jsonObject.get("parada").getAsString(), Parada.class);
JsonArray linhasJsonArray = jsonObject.getAsJsonArray("linhas");
List<Linha> linhasDaParada = new ArrayList<>();
for (int j = 0; j < linhasJsonArray.size(); j++) {
JsonElement paradaObejct = linhasJsonArray.get(j);
Linha linha = gson.fromJson(paradaObejct.getAsString(), Linha.class);
linhasDaParada.add(linha);
}
cacheParadaLinhas.put(parada, linhasDaParada);
}
System.out.println("ok");
return true;
}
} else {
return false;
}
}
} | mit |
davidascher/go-tool | Makefile.py | 15840 |
"""Makefile for the go project.
${common_task_list}
See `mk -h' for options.
"""
import sys
import os
from os.path import join, dirname, normpath, abspath, exists, basename
import re
from glob import glob
import webbrowser
from mklib.common import MkError
from mklib import Task
from mklib import sh
class bugs(Task):
"""Open bug database page."""
def make(self):
webbrowser.open("http://code.google.com/p/go-tool/issues/list")
class site(Task):
"""Open the Google Code project page."""
def make(self):
webbrowser.open("http://code.google.com/p/go-tool/")
class clean(Task):
"""Clean generated files and dirs."""
def make(self):
patterns = [
"dist",
"build",
"MANIFEST",
"*.pyc",
"lib/*.pyc",
]
for pattern in patterns:
p = join(self.dir, pattern)
for path in glob(p):
sh.rm(path, log=self.log)
class sdist(Task):
"""python setup.py sdist"""
def make(self):
sh.run_in_dir("%spython setup.py sdist -f --formats zip"
% _setup_command_prefix(),
self.dir, self.log.debug)
class webdist(Task):
"""Build a web dist package for trentm.com/projects/
"Web dist" packages are zip files with '.web' extension. All files in
the zip must be under a dir named after the project. There must be a
webinfo.xml file at <projname>/webinfo.xml. This file is "defined"
by the parsing in trentm.com/build.py.
"""
deps = ["docs"]
def results(self):
yield join(self.dir, "dist", "go-%s.web" % _get_version())
def make(self):
assert sys.platform != "win32", "'webdist' not implemented for win32"
build_dir = join(self.dir, "build", "webdist")
zip_dir = join(build_dir, "go")
if exists(build_dir):
sh.rm(build_dir)
os.makedirs(zip_dir)
# Copy the webdist bits to the build tree.
manifest = [
"src/trentm.com/project-info.xml",
"src/trentm.com/index.markdown",
"LICENSE.txt",
"lib/go.py",
"src/trentm.com/logo.jpg",
]
for src in manifest:
sh.cp(src, dstdir=zip_dir, log=self.log.info)
# Zip up the webdist contents.
dist_dir = join(self.dir, "dist")
bit = abspath(join(dist_dir, "go-%s.web" % _get_version()))
if exists(bit):
os.remove(bit)
if not exists(dist_dir):
os.makedirs(dist_dir)
sh.run_in_dir("zip -r %s go" % bit, build_dir, self.log.info)
class pypi_upload(Task):
"""Upload release to pypi."""
def make(self):
tasks = (sys.platform == "win32"
and "sdist --formats zip bdist_wininst upload"
or "sdist --formats zip upload")
sh.run_in_dir("%spython setup.py %s" % (_setup_command_prefix(), tasks),
self.dir, self.log.debug)
sys.path.insert(0, join(self.dir, "lib"))
url = "http://pypi.python.org/pypi/go/"
import webbrowser
webbrowser.open_new(url)
class googlecode_upload(Task):
"""Upload sdist to Google Code project site."""
deps = ["sdist"]
def make(self):
helper_in_cwd = exists(join(self.dir, "googlecode_upload.py"))
if helper_in_cwd:
sys.path.insert(0, self.dir)
try:
import googlecode_upload
except ImportError:
raise MkError("couldn't import `googlecode_upload` (get it from http://support.googlecode.com/svn/trunk/scripts/googlecode_upload.py)")
if helper_in_cwd:
del sys.path[0]
ver = _get_version()
sdist_path = join(self.dir, "dist", "go-%s.zip" % ver)
status, reason, url = googlecode_upload.upload_find_auth(
sdist_path,
"go-tool", # project_name
"go %s source package" % ver, # summary
["Featured", "Type-Archive"]) # labels
if not url:
raise MkError("couldn't upload sdist to Google Code: %s (%s)"
% (reason, status))
self.log.info("uploaded sdist to `%s'", url)
project_url = "http://code.google.com/p/go-tool/"
import webbrowser
webbrowser.open_new(project_url)
class trentm_com_upload(Task):
"""Upload webdist to trentm.com bits (in prep for updating trentm.com)."""
deps = ["webdist"]
def make(self):
ver = _get_version()
dist_dir = join(self.dir, "dist")
paths = [join(dist_dir, "go-%s%s" % (ver, ext))
for ext in [".web"]]
# Upload the bits.
user = "trentm"
host = "trentm.com"
remote_dir = "~/data/bits/go/%s" % _get_version()
if sys.platform == "win32":
ssh = "plink"
scp = "pscp -unsafe"
else:
ssh = "ssh"
scp = "scp"
sh.run("%s %s@%s 'mkdir -p %s'" % (ssh, user, host, remote_dir),
self.log.info)
for path in paths:
sh.run("%s %s %s@%s:%s" % (scp, path, user, host, remote_dir),
self.log.info)
class todo(Task):
"""Print out todo's and xxx's in the docs area."""
def make(self):
for path in _paths_from_path_patterns(['.'],
excludes=[".svn", "*.pyc", "TO""DO.txt", "Makefile.py",
"*.png", "*.gif", "*.pprint", "*.prof",
"tmp-*"]):
self._dump_pattern_in_path("TO\DO\\|XX\X", path)
path = join(self.dir, "TO""DO.txt")
todos = re.compile("^- ", re.M).findall(open(path, 'r').read())
print "(plus %d TODOs from TO""DO.txt)" % len(todos)
def _dump_pattern_in_path(self, pattern, path):
os.system("grep -nH '%s' '%s'" % (pattern, path))
class gow(Task):
"""Build the Windows 'gow.exe' launcher exe for DQSD integration."""
def make(self):
assert sys.platform == "win32", "can only build `gow.exe' on Windows"
sh.run_in_dir("nmake -f Makefile.win", join("src", "dqsd"))
class docs(Task):
"""Regenerate some doc bits from project-info.xml."""
deps = ["src/trentm.com/project-info.xml"]
results = [
"README.txt",
"src/trentm.com/index.markdown"
]
def make(self):
project_info_xml = join("src", "trentm.com", "project-info.xml")
index_markdown = join("src", "trentm.com", "index.markdown")
sh.run_in_dir("projinfo -f %s -R -o README.txt --force"
% project_info_xml, self.dir)
sh.run_in_dir("projinfo -f %s --index-markdown -o %s --force"
% (project_info_xml, index_markdown), self.dir)
class check_version(Task):
"""grep for version strings in source code
List all things that look like version strings in the source code.
Used for checking that versioning is updated across the board.
"""
sources = [
"lib/go.py",
"src/trentm.com/project-info.xml",
]
def make(self):
pattern = r'[0-9]\+\(\.\|, \)[0-9]\+\(\.\|, \)[0-9]\+'
sh.run_in_dir('grep -n "%s" %s' % (pattern, ' '.join(self.sources)),
self.dir)
#---- internal support stuff
# Recipe: paths_from_path_patterns (0.3.7)
def _should_include_path(path, includes, excludes):
"""Return True iff the given path should be included."""
from os.path import basename
from fnmatch import fnmatch
base = basename(path)
if includes:
for include in includes:
if fnmatch(base, include):
try:
log.debug("include `%s' (matches `%s')", path, include)
except (NameError, AttributeError):
pass
break
else:
try:
log.debug("exclude `%s' (matches no includes)", path)
except (NameError, AttributeError):
pass
return False
for exclude in excludes:
if fnmatch(base, exclude):
try:
log.debug("exclude `%s' (matches `%s')", path, exclude)
except (NameError, AttributeError):
pass
return False
return True
_NOT_SPECIFIED = ("NOT", "SPECIFIED")
def _paths_from_path_patterns(path_patterns, files=True, dirs="never",
recursive=True, includes=[], excludes=[],
on_error=_NOT_SPECIFIED):
"""_paths_from_path_patterns([<path-patterns>, ...]) -> file paths
Generate a list of paths (files and/or dirs) represented by the given path
patterns.
"path_patterns" is a list of paths optionally using the '*', '?' and
'[seq]' glob patterns.
"files" is boolean (default True) indicating if file paths
should be yielded
"dirs" is string indicating under what conditions dirs are
yielded. It must be one of:
never (default) never yield dirs
always yield all dirs matching given patterns
if-not-recursive only yield dirs for invocations when
recursive=False
See use cases below for more details.
"recursive" is boolean (default True) indicating if paths should
be recursively yielded under given dirs.
"includes" is a list of file patterns to include in recursive
searches.
"excludes" is a list of file and dir patterns to exclude.
(Note: This is slightly different than GNU grep's --exclude
option which only excludes *files*. I.e. you cannot exclude
a ".svn" dir.)
"on_error" is an error callback called when a given path pattern
matches nothing:
on_error(PATH_PATTERN)
If not specified, the default is look for a "log" global and
call:
log.error("`%s': No such file or directory")
Specify None to do nothing.
Typically this is useful for a command-line tool that takes a list
of paths as arguments. (For Unix-heads: the shell on Windows does
NOT expand glob chars, that is left to the app.)
Use case #1: like `grep -r`
{files=True, dirs='never', recursive=(if '-r' in opts)}
script FILE # yield FILE, else call on_error(FILE)
script DIR # yield nothing
script PATH* # yield all files matching PATH*; if none,
# call on_error(PATH*) callback
script -r DIR # yield files (not dirs) recursively under DIR
script -r PATH* # yield files matching PATH* and files recursively
# under dirs matching PATH*; if none, call
# on_error(PATH*) callback
Use case #2: like `file -r` (if it had a recursive option)
{files=True, dirs='if-not-recursive', recursive=(if '-r' in opts)}
script FILE # yield FILE, else call on_error(FILE)
script DIR # yield DIR, else call on_error(DIR)
script PATH* # yield all files and dirs matching PATH*; if none,
# call on_error(PATH*) callback
script -r DIR # yield files (not dirs) recursively under DIR
script -r PATH* # yield files matching PATH* and files recursively
# under dirs matching PATH*; if none, call
# on_error(PATH*) callback
Use case #3: kind of like `find .`
{files=True, dirs='always', recursive=(if '-r' in opts)}
script FILE # yield FILE, else call on_error(FILE)
script DIR # yield DIR, else call on_error(DIR)
script PATH* # yield all files and dirs matching PATH*; if none,
# call on_error(PATH*) callback
script -r DIR # yield files and dirs recursively under DIR
# (including DIR)
script -r PATH* # yield files and dirs matching PATH* and recursively
# under dirs; if none, call on_error(PATH*)
# callback
"""
from os.path import basename, exists, isdir, join
from glob import glob
assert not isinstance(path_patterns, basestring), \
"'path_patterns' must be a sequence, not a string: %r" % path_patterns
GLOB_CHARS = '*?['
for path_pattern in path_patterns:
# Determine the set of paths matching this path_pattern.
for glob_char in GLOB_CHARS:
if glob_char in path_pattern:
paths = glob(path_pattern)
break
else:
paths = exists(path_pattern) and [path_pattern] or []
if not paths:
if on_error is None:
pass
elif on_error is _NOT_SPECIFIED:
try:
log.error("`%s': No such file or directory", path_pattern)
except (NameError, AttributeError):
pass
else:
on_error(path_pattern)
for path in paths:
if isdir(path):
# 'includes' SHOULD affect whether a dir is yielded.
if (dirs == "always"
or (dirs == "if-not-recursive" and not recursive)
) and _should_include_path(path, includes, excludes):
yield path
# However, if recursive, 'includes' should NOT affect
# whether a dir is recursed into. Otherwise you could
# not:
# script -r --include="*.py" DIR
if recursive and _should_include_path(path, [], excludes):
for dirpath, dirnames, filenames in os.walk(path):
dir_indeces_to_remove = []
for i, dirname in enumerate(dirnames):
d = join(dirpath, dirname)
if dirs == "always" \
and _should_include_path(d, includes, excludes):
yield d
if not _should_include_path(d, [], excludes):
dir_indeces_to_remove.append(i)
for i in reversed(dir_indeces_to_remove):
del dirnames[i]
if files:
for filename in sorted(filenames):
f = join(dirpath, filename)
if _should_include_path(f, includes, excludes):
yield f
elif files and _should_include_path(path, includes, excludes):
yield path
_g_version = None
def _get_version():
global _g_version
if _g_version is None:
sys.path.insert(0, join(dirname(__file__), "lib"))
try:
import go
_g_version = go.__version__
finally:
del sys.path[0]
return _g_version
def _setup_command_prefix():
prefix = ""
if sys.platform == "darwin":
# http://forums.macosxhints.com/archive/index.php/t-43243.html
# This is an Apple customization to `tar` to avoid creating
# '._foo' files for extended-attributes for archived files.
prefix = "COPY_EXTENDED_ATTRIBUTES_DISABLE=1 "
return prefix
| mit |
sylcastaing/Olaf-web | src/app/app.module.ts | 2444 | import { BrowserModule } from '@angular/platform-browser';
import { NgModule, APP_INITIALIZER, LOCALE_ID } from '@angular/core';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { XHRBackend, RequestOptions, HttpModule } from '@angular/http';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { Router } from '@angular/router';
import { AppComponent } from './app.component';
import { routing } from './app.routing';
import { MaterialModule, MdSnackBar, MdDatepickerModule, MdNativeDateModule } from '@angular/material';
import { ChartModule } from 'angular2-highcharts';
import { HighchartsStatic } from 'angular2-highcharts/dist/HighchartsService';
import { httpFactory, chartFactory } from './_factories';
import { AuthService, HttpService, DatasService, WeatherService, UserService, DialogService } from './_services';
import { AuthGuard, AdminGuard } from './_guards';
import { CameraComponent } from './camera';
import { WeatherComponent } from './weather';
import { ApplicationsComponent } from './applications';
import { LoginComponent } from './login';
import { ChangePasswordComponent } from './user';
import { UsersComponent, AddUserComponent } from './admin';
import { ConfirmDialogComponent } from './dialog';
import { LoaderComponent, LoaderService } from './loader';
import 'hammerjs';
@NgModule({
declarations: [
AppComponent,
WeatherComponent,
CameraComponent,
ApplicationsComponent,
LoginComponent,
ChangePasswordComponent,
UsersComponent,
AddUserComponent,
ConfirmDialogComponent,
LoaderComponent
],
imports: [
BrowserModule,
BrowserAnimationsModule,
FormsModule,
ReactiveFormsModule,
MaterialModule,
HttpModule,
routing,
ChartModule,
MdDatepickerModule,
MdNativeDateModule
],
providers: [
AuthGuard,
AdminGuard,
AuthService,
DatasService,
WeatherService,
UserService,
DialogService,
LoaderService,
{
provide: HttpService,
useFactory: httpFactory,
deps: [XHRBackend, RequestOptions, Router, MdSnackBar]
},
{
provide: HighchartsStatic,
useFactory: chartFactory
},
{
provide: LOCALE_ID,
useValue: 'fr-FR'},
],
entryComponents: [
ChangePasswordComponent,
AddUserComponent,
ConfirmDialogComponent
],
bootstrap: [AppComponent]
})
export class AppModule { }
| mit |
Innmind/AMQP | src/Model/Count.php | 412 | <?php
declare(strict_types = 1);
namespace Innmind\AMQP\Model;
use Innmind\AMQP\Exception\DomainException;
final class Count
{
private int $value;
public function __construct(int $value)
{
if ($value < 0) {
throw new DomainException((string) $value);
}
$this->value = $value;
}
public function toInt(): int
{
return $this->value;
}
}
| mit |
JasonFruit/doremi | doremi.py | 2419 | #!/usr/bin/env python
from __future__ import print_function
import codecs
import argparse
from doremi.doremi_parser import DoremiParser
from doremi.lyric_parser import Lyric, LyricParser
import os, uuid
# set up argument parser and use it
p = argparse.ArgumentParser()
p.add_argument("infile",
help="the Doremi file to process")
p.add_argument("outfile",
help="the Lilypond output file")
p.add_argument("--key", "-k",
help='the key for the output file (e.g. "A major", "c minor", "gis minor")')
p.add_argument("--shapes", "-s",
help='use shape notes (i.e. "round" (default), "aikin", "sacredharp", "southernharmony", "funk", "walker")')
p.add_argument("--octaves", "-o", help="transpose up OCTAVES octaves")
p.add_argument("--lyricfile",
"-l",
help="the file containing the lyrics")
p.add_argument("--template",
"-t",
help='the output template name, e.g. "default", "sacred-harp"')
args = p.parse_args()
if args.octaves:
octave_offset = int(args.octaves)
else:
octave_offset = 0
# try to load the lyric file; if none is specified, use an empty
# Lyric
lyric = Lyric()
if args.lyricfile:
try:
text = codecs.open(args.lyricfile, "r", "utf-8").read()
lyric = LyricParser(text).convert()
except FileNotFoundError:
raise Exception("Unable to open lyric file '%s'." % args.lyricfile)
# correct a common misspelling
if args.shapes and args.shapes.lower() == "aiken":
args.shapes = "aikin"
if not args.template:
args.template = "default"
# parse the Doremi file
lc = DoremiParser(args.infile)
# convert it to the internal representation
tune = lc.convert()
if not args.key:
args.key = tune.key
# convert it to lilypond and write to the output file
ly = tune.to_lilypond(args.key.lower(),
octave_offset=octave_offset,
shapes=args.shapes,
lyric=lyric,
template=args.template)
if args.outfile.endswith(".pdf"):
fn = "%s.ly" % uuid.uuid4()
lyfile = codecs.open("/tmp/%s" % fn, "w", "utf-8")
lyfile.write(ly)
lyfile.close()
args.outfile = args.outfile[:-4]
os.system("lilypond -o %s /tmp/%s" % (args.outfile, fn))
elif args.outfile.endswith(".ly"):
with codecs.open(args.outfile, "w", "utf-8") as f:
f.write(ly)
| mit |
HashTagDotNet/HashTag-Net4 | HashTag.RESTClient/HttpRequest.cs | 12451 | using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Net.Mime;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using HashTag.Common;
namespace HashTag.HttpClient
{
public partial class HttpRequest
{
private HttpSession _restSession;
public HttpRequest()
{
}
internal HttpRequest(HttpSession restSession)
{
_restSession = restSession;
SendingJsonContent();
ExpectingJsonContent();
}
public HttpRequest(HttpConnection restConnection)
{
this.restConnection = restConnection;
}
/// <summary>
/// Cookies to send
/// </summary>
public NameValueCollection Cookies { get; set; }
/// <summary>
/// Headers to send
/// </summary>
public NameValueCollection Headers { get; set; }
/// <summary>
/// Url query string parameters to send
/// </summary>
public NameValueCollection Parameters { get; set; }
/// <summary>
/// Form fields to send (if Form content type)
/// </summary>
public NameValueCollection FormFields { get; set; }
internal string contentType { get; set; }
internal CancellationToken? cancellationToken { get; set; }
/// <summary>
/// Actual or textual representation of body sent with request
/// </summary>
internal string BodyString { get; set; }
[JsonIgnore]
internal object Body { get; set; }
HttpRequestMessage _httpRequest;
private HttpConnection restConnection;
/// <summary>
/// Actual request used by .Net framework to send to client
/// </summary>
[JsonIgnore]
public HttpRequestMessage NetRequest
{
get
{
if (_httpRequest == null) _httpRequest = new HttpRequestMessage();
return _httpRequest;
}
}
/// <summary>
/// Set content type to 'application/x-www-form-urlencoded'
/// </summary>
/// <returns></returns>
public HttpRequest SendingFormContent()
{
this.contentType = MediaTypeNames.Form;
return this;
}
/// <summary>
/// (default) Set content to 'application/json'
/// </summary>
/// <returns></returns>
public HttpRequest SendingJsonContent()
{
this.contentType = MediaTypeNames.Json;
return this;
}
/// <summary>
/// Set content to 'application/xml'
/// </summary>
/// <returns></returns>
public HttpRequest SendingXmlContent()
{
this.contentType = MediaTypeNames.Xml;
return this;
}
/// <summary>
/// Set Accept header to 'application/xml'
/// </summary>
/// <returns></returns>
public HttpRequest ExpectingXmlContent()
{
NetRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(MediaTypeNames.Xml));
return this;
}
/// <summary>
/// Set Accept header to 'application/json'
/// </summary>
/// <returns></returns>
public HttpRequest ExpectingJsonContent()
{
NetRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(MediaTypeNames.Json));
return this;
}
/// <summary>
/// Set Accept header to 'text/plain'
/// </summary>
/// <returns></returns>
public HttpRequest ExpectingTextContent()
{
NetRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(MediaTypeNames.Text));
return this;
}
/// <summary>
/// Clear default Accept type
/// </summary>
/// <returns></returns>
public HttpRequest ExpectingNullContent()
{
NetRequest.Headers.Accept.Clear();
return this;
}
/// <summary>
/// Add/updates a cookie that wil be sent with request
/// </summary>
/// <param name="name"></param>
/// <param name="value"></param>
/// <returns></returns>
public HttpRequest SetCookie(string name, string value)
{
if (Cookies == null) Cookies = new NameValueCollection();
Cookies[name] = value;
return this;
}
/// <summary>
/// Adds a header that will be sent with request
/// </summary>
/// <param name="name"></param>
/// <param name="value"></param>
/// <returns></returns>
public HttpRequest SetHeader(string name, string value)
{
Headers[name] = value;
return this;
}
/// <summary>
/// Adds a query string parameter
/// </summary>
/// <param name="name"></param>
/// <param name="value"></param>
/// <returns></returns>
public HttpRequest SetQueryString(string name, string value = null)
{
if (Parameters == null) Parameters = new NameValueCollection();
Parameters[name] = value;
return this;
}
/// <summary>
/// Adds a form field for form based requests
/// </summary>
/// <param name="name"></param>
/// <param name="value"></param>
/// <returns></returns>
public HttpRequest SetForm(string name, string value)
{
if (FormFields == null) FormFields = new NameValueCollection();
FormFields[name] = value;
return this;
}
/// <summary>
/// Set string body for request
/// </summary>
/// <param name="body"></param>
/// <returns></returns>
public HttpRequest SetBody(string body)
{
this.BodyString = body;
return this;
}
/// <summary>
/// Set object body for request
/// </summary>
/// <param name="body"></param>
/// <returns></returns>
public HttpRequest SetBody(object body)
{
this.Body = body;
return this;
}
/// <summary>
/// Set HTTP method. Usually not used.
/// </summary>
/// <param name="method"></param>
/// <returns></returns>
public HttpRequest SetMethod(HttpMethod method)
{
this.NetRequest.Method = method;
return this;
}
internal string RequestAction { get; set; }
internal HttpRequest Build(string baseUrl)
{
baseUrl = Extensions.PathCombine(baseUrl,RequestAction);
if (Parameters != null && Parameters.Count > 0)
{
var queryString = "";
foreach (string param in Parameters.Keys)
{
if (queryString.Length != 0)
{
queryString += "&";
}
queryString += string.Format("{0}={1}", param, Parameters[param]);
}
if (queryString.Length > 0)
{
baseUrl = baseUrl + "?" + queryString;
}
}
this.NetRequest.RequestUri = new Uri(baseUrl);
this.NetRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(MediaTypeNames.Json));
if (!string.IsNullOrEmpty(this.BodyString))
{
NetRequest.Content = new StringContent(BodyString, System.Text.Encoding.UTF8, MediaTypeNames.Json);
}
else
{
if (this.Body != null)
{
switch (this.contentType)
{
case MediaTypeNames.Json:
this.BodyString = JsonConvert.SerializeObject(Body);
NetRequest.Content = new StringContent(BodyString, System.Text.Encoding.UTF8, this.contentType);
break;
case MediaTypeNames.Xml:
this.BodyString = Serialize.To.Xml(this.Body);
NetRequest.Content = new StringContent(BodyString, System.Text.Encoding.UTF8, this.contentType);
break;
case MediaTypeNames.Form:
this.BodyString = this.FormFields.ToFormUnencodedString();
NetRequest.Content = new FormUrlEncodedContent(this.FormFields.ToIEnumerable());
break;
case MediaTypeNames.Text:
this.BodyString = Serialize.To.Xml(this.Body);
NetRequest.Content = new StringContent(BodyString, System.Text.Encoding.UTF8, this.contentType);
break;
}
}
}
if (Headers != null)
{
for (int x = 0; x < Headers.Count; x++)
{
var key = Headers.Keys[x];
var value = Headers[x];
if (key.IsContentHeader())
{
if (NetRequest.Content != null)
{
NetRequest.Content.Headers.Add(key, value);
}
else
{
throw new InvalidOperationException($"Header: '{key}' is a content header and there is no content specified for request");
}
}
else
{
NetRequest.Headers.Add(key, value);
}
}
}
return this;
}
internal void setCookies(CookieContainer container, Uri cookieUri)
{
if (Cookies == null || Cookies.Count == 0) return;
if (container == null) container = new CookieContainer();
if (NetRequest == null || NetRequest.RequestUri == null)
{
throw new InvalidOperationException("Request's 'HttpRequest.Uri' property must be set before setting cookies on the request");
}
for (int x = 0; x < Cookies.Count; x++)
{
container.Add(new Cookie()
{
Domain = NetRequest.RequestUri.Host,
Path = "/",
Name = Cookies.Keys[x],
Value = Cookies[x]
});
}
}
public override string ToString()
{
var sb = new StringBuilder();
if (this.NetRequest != null)
{
sb.AppendLine("{0} {1} HTTP/{2}", this.NetRequest.Method.ToString(), this.NetRequest.RequestUri.PathAndQuery, this.NetRequest.Version.ToString());
sb.AppendLine("{0}: {1}", "Host", this.NetRequest.RequestUri.Host);
foreach (var hdr in this.NetRequest.Headers)
{
var key = hdr.Key;
var values = string.Join(";", hdr.Value.Select(v => v).ToArray());
sb.AppendLine("{0}: {1}", key, values);
}
if (this.NetRequest?.Content?.Headers != null)
{
foreach (var hdr in this.NetRequest.Content.Headers)
{
var key = hdr.Key;
var values = string.Join(";", hdr.Value.Select(v => v).ToArray());
sb.AppendLine("{0}: {1}", key, values);
}
}
sb.AppendLine();
if (this.BodyString != null)
{
sb.AppendLine(this.BodyString);
}
else
{
sb.AppendLine("--no body--");
}
}
return sb.ToString();
}
}
}
| mit |
cssho/simple-chat | SimpleChatClient/index.js | 2453 | window.$ = require('jquery');
global.jQuery = $;
var setting = require('./setting').get();
var Util = require('./util');
var socket = require('socket.io-client')(setting.server);
var WindowsBalloon = require('node-notifier').WindowsBalloon;
var notifier = new WindowsBalloon({
withFallback: false,
customPath: void 0
});
var createNotification = function (title, message) {
notifier.notify({
title: title,
message: message,
sound: false,
time: 20000,
wait: true
}, function (error, response) {
console.log(response);
});
notifier.on('click', function (notifierObject, options) {
window.focus();
});
}
$(function () {
$('#message').keydown(function (e) {
if (e.ctrlKey && e.keyCode == 13) {
sendMessage()
}
});
$('button').on('click', function () {
sendMessage();
});
socket.once('log', function (data) {
$('#log').append(data);
window.scroll(0, $(document).height());
});
socket.on('msg', function (data) {
recieveMessage(data);
});
$(window).on('scroll', function () {
if ($('#space').hasClass('notice') && checkBottom()) {
$('#space').removeClass('notice');
}
});
socket.emit('add', setting.username);
socket.on('add', function (data) {
if (document.getElementById(data.id) == null) {
$('#member').append($('<span class="label username" id="' + data.id + '">' + data.name + '</span>')
.addClass(data.id == socket.id ? 'label-primary' : 'label-success'));
}
});
socket.on('remove', function (id) {
if (document.getElementById(id) != null) {
$('#' + id).remove();
}
});
});
function sendMessage() {
var message = $('#message').val();
if (message && message != '') {
message = message.replace(/\r\n/g, '<br>');
message = message.replace(/(\n|\r)/g, '<br>');
socket.emit('msg', setting.username + ' : <br>' + message);
$('#message').val('');
}
}
function checkBottom() {
return ($(window).height() + $(window).scrollTop()) == $(document).height();
}
function recieveMessage(data) {
var isBottom = checkBottom();
$("#log").append(data + '<p align="right" style="font-size:80%">' + Util.getDate() + '</p><hr>');
if (isBottom) {
window.scroll(0, $(document).height());
} else {
$('#space').addClass('notice');
}
createNotification('Simple Chat', data.replace('<br>', ' '));
} | mit |
Karasiq/scalajs-highcharts | src/main/scala/com/highcharts/config/SeriesIkhStatesSelectAnimation.scala | 1273 | /**
* Automatically generated file. Please do not edit.
* @author Highcharts Config Generator by Karasiq
* @see [[http://api.highcharts.com/highcharts]]
*/
package com.highcharts.config
import scalajs.js, js.`|`
import com.highcharts.CleanJsObject
import com.highcharts.HighchartsUtils._
/**
* @note JavaScript name: <code>series<ikh>-states-select-animation</code>
*/
@js.annotation.ScalaJSDefined
class SeriesIkhStatesSelectAnimation extends com.highcharts.HighchartsGenericObject {
/**
* <p>The duration of the hover animation in milliseconds. By
* default the hover state animates quickly in, and slowly back
* to normal.</p>
* @since 5.0.8
*/
val duration: js.UndefOr[Double] = js.undefined
}
object SeriesIkhStatesSelectAnimation {
/**
* @param duration <p>The duration of the hover animation in milliseconds. By. default the hover state animates quickly in, and slowly back. to normal.</p>
*/
def apply(duration: js.UndefOr[Double] = js.undefined): SeriesIkhStatesSelectAnimation = {
val durationOuter: js.UndefOr[Double] = duration
com.highcharts.HighchartsGenericObject.toCleanObject(new SeriesIkhStatesSelectAnimation {
override val duration: js.UndefOr[Double] = durationOuter
})
}
}
| mit |
inveniosoftware/invenio-indexer | invenio_indexer/__init__.py | 6623 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Record indexer for Invenio.
Invenio-Indexer is responsible for sending records for indexing in
Elasticsearch so that the records can be searched. Invenio-Indexer can either
send the records in bulk or individually. Bulk indexing is far superior in
performance if multiple records needs to be indexed at the price of delay. Bulk
indexing works by queuing records in a message queue, which is then consumed
and sent to Elasticsearch.
Initialization
--------------
First create a Flask application:
>>> from flask import Flask
>>> app = Flask('myapp')
>>> app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
You initialize Indexer like a normal Flask extension, however Invenio-Indexer
is dependent on Invenio-Records and Invenio-Search so you need to initialize
these extensions first:
>>> from invenio_db import InvenioDB
>>> ext_db = InvenioDB(app)
>>> from invenio_search import InvenioSearch
>>> ext_search = InvenioSearch(app)
>>> from invenio_records import InvenioRecords
>>> ext_records = InvenioRecords(app)
We now initialize Invenio-Indexer:
>>> from invenio_indexer import InvenioIndexer
>>> ext_indexer = InvenioIndexer(app)
In order for the following examples to work, you need to work within an Flask
application context so let's push one:
>>> ctx = app.app_context()
>>> ctx.push()
Also, for the examples to work we need to create the database tables and
Elasticsearch indexes (note, in this example we use an in-memory SQLite
database):
>>> from invenio_db import db
>>> db.create_all()
Indexing a record
-----------------
Let's start by creating a record that we would like to index:
>>> from invenio_db import db
>>> from invenio_records.api import Record
>>> record = Record.create({'title': 'A test'})
>>> db.session.commit()
Note, that you are responsible for ensuring that the record is committed to the
database, prior to sending it for indexing.
Now, let's index the record:
>>> from invenio_indexer.api import RecordIndexer
>>> indexer = RecordIndexer()
>>> res = indexer.index(record)
By default, records are sent to the Elasticsearch index defined by the
configuration variable ``INDEXER_DEFAULT_INDEX``. If the record however has a
``$schema`` attribute, the index is automatically determined from this. E.g.
the following record:
>>> r = Record({
... '$schema': 'http://example.org/records/record-v1.0.0.json'})
Would be indexed in the following Elasticsearch index/doctype:
>>> index, doc_type = indexer.record_to_index(record)
Bulk indexing
-------------
If you have many records to index, bulk indexing is far superior in speed to
single record indexing. Bulk indexing requires the existence of a queue on your
broker, so since this is the very first time we send any records for bulk
indexing, we will have to create this queue:
>>> from celery.messaging import establish_connection
>>> queue = app.config['INDEXER_MQ_QUEUE']
>>> with establish_connection() as conn:
... queue(conn).declare()
'indexer'
We can now send a record for bulk indexing:
>>> indexer.bulk_index([str(r.id)])
Above will send the record id to the queue on your broker and wait for the bulk
indexer to execute. This is normally done in the background by a Celery task
which can be started from the command line like e.g.:
.. code-block:: console
$ <instance cmd> index run
Note, you can achieve much higher indexing speeds, by having multiple processes
running ``process_bulk_queue`` concurrently. This can be achieved with e.g.:
.. code-block:: console
# Send 8 Celery tasks to bulk index messages from the "indexer" queue
$ <instance cmd> index run -d -c 8
Customizing record indexing
---------------------------
Record indexing can easily be customized using either:
* **JSONRef:** By default, all JSONRefs for each record is resolved prior to
indexing.
* **Signals:** Before each record is indexed the signal ``before_record_index``
is sent, in order to allow modification of the record. The signal can be used
to e.g. remove sensitive data and/or add extra data to the record.
JSONRef
~~~~~~~
JSONRefs inside the record are by default resolved prior to indexing the
record. For instance the value for the ``rel`` key will be replaced with the
referenced JSON object:
>>> r = Record.create({
... 'title': 'A ref',
... 'rel': {'$ref': 'http://dx.doi.org/10.1234/foo'}})
See Invenio-Records documentation for how to customize the JSONRef resolver
to resolve references locally. The JSONRefs resolving works on all indexed
records, and can be switched off using the configuration::
>>> app.config['INDEXER_REPLACE_REFS'] = False
Signal
~~~~~~
First write a signal receiver. In the example below, we remove the attribute
``_internal`` if it exists in the record:
>>> def indexer_receiver(sender, json=None, record=None,
... index=None, doc_type=None, arguments=None, **kwargs):
... if '_internal' in json:
... del json['_internal']
The receiver takes various parameters besides the sender (which is the Flask
application)
* ``json``: JSON is a Python dictionary dump of the record, and the actual
data that will be sent to the index. Modify this dictionary in order to
change the document.
* ``record``: The record from which the JSON was dumped.
* ``index``: The Elasticsearch index in which the record will be indexed.
* ``doc_type``: The Elasticsearch document type for the record.
* ``arguments``: The arguments that will be passed to the ``index()`` call.
Connecting the receiver to the signal is as simple as (do this e.g. in your
extension's ``init_app`` method):
>>> from invenio_indexer.signals import before_record_index
>>> res = before_record_index.connect(indexer_receiver, sender=app)
Receivers can be useful if you have rules that apply to all of your records.
If specific types of records have different rules (e.g. in case you had
"records" and "authors") you can use the
``before_record_index.dynamic_connect()`` function as so:
>>> # Only be applied to documents sent to the "authors-v1.0.0" index
>>> res = before_record_index.dynamic_connect(
... indexer_receiver, sender=app, index='authors-v1.0.0')
"""
from __future__ import absolute_import, print_function
from .ext import InvenioIndexer
from .proxies import current_record_to_index
from .version import __version__
__all__ = ('__version__', 'InvenioIndexer', 'current_record_to_index')
| mit |
punker76/code-samples | MahAppsMetroHamburgerMenu/HamburgerMenuApp.Core/SelectedItemToContentConverter.cs | 776 | using System;
using System.Globalization;
using System.Linq;
using System.Windows.Data;
namespace HamburgerMenuApp.Core
{
public class SelectedItemToContentConverter : IMultiValueConverter
{
public object Convert(object[] values, Type targetType, object parameter, CultureInfo culture)
{
// first value is selected menu item, second value is selected option item
if (values != null && values.Length > 1)
{
return values[0] ?? values[1];
}
return null;
}
public object[] ConvertBack(object value, Type[] targetTypes, object parameter, CultureInfo culture)
{
return targetTypes.Select(t => Binding.DoNothing).ToArray();
}
}
} | mit |
actaeon/resourced | executors/pagerduty.go | 1172 | package executors
import (
"encoding/json"
"github.com/dselans/pagerduty"
)
func init() {
Register("PagerDuty", NewPagerDuty)
}
func NewPagerDuty() IExecutor {
pd := &PagerDuty{}
pd.Data = make(map[string]interface{})
return pd
}
type PagerDuty struct {
Base
Data map[string]interface{}
ServiceKey string
Description string
IncidentKey string
}
// Run shells out external program and store the output on c.Data.
func (pd *PagerDuty) Run() error {
pd.Data["Conditions"] = pd.Conditions
if pd.IsConditionMet() && pd.LowThresholdExceeded() && !pd.HighThresholdExceeded() {
event := pagerduty.NewTriggerEvent(pd.ServiceKey, pd.Description)
if pd.IncidentKey != "" {
event.IncidentKey = pd.IncidentKey
}
response, statusCode, err := pagerduty.Submit(event)
pd.Data["IncidentKey"] = response.IncidentKey
pd.Data["Status"] = response.Status
pd.Data["StatusCode"] = statusCode
pd.Data["Message"] = response.Message
pd.Data["Errors"] = response.Errors
if err != nil {
return err
}
}
return nil
}
// ToJson serialize Data field to JSON.
func (pd *PagerDuty) ToJson() ([]byte, error) {
return json.Marshal(pd.Data)
}
| mit |
TombaX/BlogRails5 | app/controllers/pictures_controller.rb | 520 | class PicturesController < ApplicationController
def create
@picture = Picture.new(image_params)
@picture.save
respond_to do |format|
format.json {render :json => {url: @picture.image.url(:large), picture_id: @picture.id } }
end
end
def destroy
picture = Picture.find(params[:id])
picture.destroy
respond_to do |format|
format.json {render json: {status: :ok } }
end
end
private
def image_params
params_require(:picture).permit(:image)
end
end | mit |
acmumn/payacm | main.go | 1063 | package main
import (
"fmt"
"log"
"net/http"
"os"
"github.com/gin-gonic/gin"
stripe "github.com/stripe/stripe-go"
)
func main() {
// Get port to serve on.
port := os.Getenv("PORT")
if port == "" {
log.Println("Using default port of 3000")
port = "3000"
}
// Get the stripe key.
stripe.Key = getenv("STRIPE_SECRET_KEY")
// Fail out early if other variables are not defined.
getenv("STRIPE_PUBLIC_KEY")
getenv("SMTP_FROM")
getenv("SMTP_HOST")
getenv("SMTP_PASS")
getenv("SMTP_PORT")
getenv("SMTP_USER")
// Create the router.
r := gin.New()
r.Use(gin.Logger())
// Connect callbacks.
r.StaticFile("/", "static/index.html")
r.POST("/", pay)
r.StaticFile("/static/main.js", "static/main.js")
r.GET("/stripeKey.json", func(c *gin.Context) {
c.JSON(http.StatusOK, getenv("STRIPE_PUBLIC_KEY"))
})
// Serve.
if err := http.ListenAndServe(":"+port, r); err != nil {
log.Fatal(err)
}
}
func getenv(key string) string {
val := os.Getenv(key)
if val == "" {
log.Fatal(fmt.Sprintf("Did not provide %s", key))
}
return val
}
| mit |
Pulpmedia/PulpmediaNgHttpBundle | DependencyInjection/Configuration.php | 1350 | <?php
namespace Pulpmedia\NgHttpBundle\DependencyInjection;
use Symfony\Component\Config\Definition\Builder\TreeBuilder;
use Symfony\Component\Config\Definition\ConfigurationInterface;
/**
* This is the class that validates and merges configuration from your app/config files.
*
* To learn more see {@link http://symfony.com/doc/current/cookbook/bundles/configuration.html}
*/
class Configuration implements ConfigurationInterface
{
/**
* {@inheritdoc}
*/
public function getConfigTreeBuilder()
{
$treeBuilder = new TreeBuilder();
$rootNode = $treeBuilder->root('pulpmedia_ng_http');
$rootNode
->children()
->arrayNode("response")
->children()
->scalarNode("format")
->info("Default format of responses (json or xml)")
->defaultValue("json")
->end()
->end()
->end()
->end()
->end()
;
// Here you should define the parameters that are allowed to
// configure your bundle. See the documentation linked above for
// more information on that topic.
return $treeBuilder;
}
}
| mit |
ray6080/JavaPractice | src/main/java/cn/edu/ruc/dbiir/lambda/LambdaTest.java | 1154 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.edu.ruc.dbiir.lambda;
import java.util.function.Function;
/**
* @author jelly.guodong.jin@gmail.com
*/
public class LambdaTest {
public static void main(String[] args) {
print(() -> "Jelly", (String name) -> "Mr. " + name);
}
private static void print(NameSupplier supplier, Function<String, String> decorator) {
System.out.println(decorator.apply(supplier.g()));
}
@FunctionalInterface
private interface NameSupplier {
String g();
default void say(String name) {
System.out.println("");
}
}
}
| mit |
Jereq/MC-TestMod | src/main/java/se/jereq/testmod/init/ModAchievements.java | 358 | package se.jereq.testmod.init;
import net.minecraft.stats.Achievement;
import net.minecraft.stats.AchievementList;
public class ModAchievements {
public static final Achievement test = new Achievement("achievement.testmod.test", "testmod.test", -1, 2, ModBlocks.testBlock, AchievementList.openInventory).func_180788_c();
public static void init() {}
}
| mit |
spywhere/Javatar | commands/creates/__init__.py | 58 | from .create_class import *
from .create_package import *
| mit |
elfrog/apib-editor | src/editor/editor-settings-view.js | 2009 | import React from 'react';
import PropTypes from 'prop-types';
import Modal from './components/modal';
import PropertyView from './components/property-view';
import Text from './components/property-controls/text';
import Number from './components/property-controls/number';
import Select from './components/property-controls/select';
import CheckBox from './components/property-controls/check-box';
export default class EditorSettingsView extends React.Component {
static propTypes = {
action: PropTypes.any.isRequired,
settings: PropTypes.object.isRequired,
onChange: PropTypes.func
};
constructor(props) {
super(props);
this.state = { open: false };
}
componentDidMount() {
this.props.action.on('editor:showSettingsView', this.openModal);
}
componentWillUnmount() {
this.props.action.off('editor:showSettingsView', this.openModal);
}
openModal = () => {
this.setState({ open: true });
}
closeModal = () => {
this.setState({ open: false });
}
onSettingChange(key, value) {
let newSettings = Object.assign({}, this.props.settings, { [key]: value });
if (this.props.onChange) {
this.props.onChange(newSettings);
}
}
render() {
let settings = this.props.settings;
return <Modal open={this.state.open} header='Settings' onClose={this.closeModal}>
<PropertyView>
<Text label='Font' value={settings.font} onChange={value => this.onSettingChange('font', value)} />
<Number label='Font Size' value={settings.fontSize} onChange={value => this.onSettingChange('fontSize', value)} />
<Select
label='Theme'
options={[
'Solarized Dark',
'Solarized Light'
]}
value={settings.theme}
onChange={value => this.onSettingChange('theme', value)}
/>
<CheckBox label='Vim Mode' value={settings.vimMode} onChange={value => this.onSettingChange('vimMode', value)} />
</PropertyView>
</Modal>;
}
}
| mit |
xabbuh/DIConfigurationBundle | DependencyInjection/Configuration.php | 2445 | <?php
/*
* This file is part of the XabbuhDIConfigurationBundle package.
*
* (c) Christian Flothmann <christian.flothmann@xabbuh.de>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Xabbuh\DIConfigurationBundle\DependencyInjection;
use Symfony\Component\Config\Definition\Builder\TreeBuilder;
use Symfony\Component\Config\Definition\ConfigurationInterface;
/**
* @author Christian Flothmann <christian.flothmann@xabbuh.de>
*/
class Configuration implements ConfigurationInterface
{
/**
* {@inheritdoc}
*/
public function getConfigTreeBuilder()
{
$treeBuilder = new TreeBuilder();
$rootNode = $treeBuilder->root('xabbuh_di_configuration');
$rootNode
->useAttributeAsKey('id')
->prototype('array')
->beforeNormalization()
->ifString()
->then(function ($v) { return array('class' => $v); })
->end()
->children()
->scalarNode('class')->end()
->arrayNode('arguments')
->beforeNormalization()
->always()
->then(function ($v) {
$values = array();
$index = 0;
foreach ($v as $value) {
if (is_array($value)) {
$index = $value['index'];
$value = $value['value'];
}
$values[] = array(
'index' => $index++,
'value' => $value,
);
}
return $values;
})
->end()
->prototype('array')
->children()
->integerNode('index')->end()
->scalarNode('value')->end()
->end()
->end()
->end()
->end()
->end()
;
return $treeBuilder;
}
}
| mit |
lamperi/aoc | 2016/9/solve.py | 3478 | import collections
import re
with open("input.txt") as file:
data = file.read()
def decompress(data, debug=False):
u = []
marker = False
repeat = None
times = None
rep = 0
tim = 0
buf = []
for c in data:
if debug: print("CHAR " + c)
if rep == 0 and c == "(":
if debug: print("START REP")
marker = True
repeat = []
times = None
elif marker and c == "x":
if debug: print("COMMA")
rep = int("".join(repeat))
repeat = None
times = []
elif marker and times is None and repeat is not None:
if debug: print("MORE REP")
repeat.append(c)
elif marker and c == ")":
tim = int("".join(times))
times = None
marker = False
if debug: print("END REP {} x {}".format(rep, tim))
elif marker and times is not None:
if debug: print("MORE TIM")
times.append(c)
elif rep > 0:
buf.append(c)
rep -= 1
if rep == 0:
for x in range(tim):
for b in buf:
u.append(b)
buf = []
else:
u.append(c)
return "".join(u)
# tests
print(decompress("ADVENT"))
print(decompress("A(1x5)BC"))
print(decompress("(3x3)XYZ"))
print(decompress("A(2x2)BCD(2x2)EFG"))
print(decompress("(6x1)(1x3)A"))
print(decompress("X(8x2)(3x3)ABCY"))
print("**** PART 1:")
print(len(decompress(data).strip()))
print("****")
def decompress_len(data, debug=False):
marker = False
repeat = None
times = None
reps = []
tims = []
bufs = [0]
for c in data:
if debug: print("CHAR " + c)
for i in range(len(reps)):
reps[i] -= 1
if c == "(":
if debug: print("START REP")
marker = True
repeat = []
times = None
elif marker and c == "x":
if debug: print("COMMA")
rep = int("".join(repeat))
repeat = None
times = []
elif marker and times is None and repeat is not None:
if debug: print("MORE REP")
repeat.append(c)
elif marker and c == ")":
tim = int("".join(times))
times = None
marker = False
if debug: print("END REP {} x {}".format(rep, tim))
reps.append(rep)
tims.append(tim)
bufs.append(0)
elif marker and times is not None:
if debug: print("MORE TIM")
times.append(c)
else:
if debug: print("Inc buf level {} to {} with {} (buf remaining {})".format(len(bufs), bufs[-1]+1, c, reps[-1] if len(reps) else None))
bufs[-1] += 1
while len(reps) and reps[-1] == 0:
l = bufs[-1]
t = tims[-1]
reps.pop()
tims.pop()
bufs.pop()
bufs[-1] += l * t
if debug: print("Append {}x{}={}".format(l,t,l*t))
return bufs[-1]
print(decompress_len("(3x3)XYZ"))
print(len("XABCABCABCABCABCABCY"))
print(decompress_len("X(8x2)(3x3)ABCY"))
print(decompress_len("(27x12)(20x12)(13x14)(7x10)(1x12)A"))
print(decompress_len("(25x3)(3x3)ABC(2x3)XY(5x2)PQRSTX(18x9)(3x2)TWO(5x7)SEVEN"))
print("**** PART 2:")
print(decompress_len(data))
print("****")
| mit |
jneuendorf/what-should-i-eat | what_should_i_eat/static/what_should_i_eat/js/__javascript__/index.min.js | 22214 | 'use strict';var $jscomp={scope:{},getGlobal:function(f){return"undefined"!=typeof window&&window===f?f:"undefined"!=typeof global?global:f}};$jscomp.global=$jscomp.getGlobal(this);$jscomp.patches={};$jscomp.patch=function(f,n){($jscomp.patches[f]=$jscomp.patches[f]||[]).push(n);var q=$jscomp.global;f=f.split(".");for(var m=0;m<f.length-1&&q;m++)q=q[f[m]];f=f[f.length-1];q&&q[f]instanceof Function&&(q[f]=n(q[f]))};$jscomp.SYMBOL_PREFIX="jscomp_symbol_";
$jscomp.initSymbol=function(){$jscomp.initSymbol=function(){};if(!$jscomp.global.Symbol){$jscomp.global.Symbol=$jscomp.Symbol;var f=[],n=function(q){return function(m){f=[];m=q(m);for(var n=[],w=0,E=m.length;w<E;w++){var l;a:if(l=m[w],l.length<$jscomp.SYMBOL_PREFIX.length)l=!1;else{for(var g=0;g<$jscomp.SYMBOL_PREFIX.length;g++)if(l[g]!=$jscomp.SYMBOL_PREFIX[g]){l=!1;break a}l=!0}l?f.push(m[w]):n.push(m[w])}return n}};$jscomp.patch("Object.keys",n);$jscomp.patch("Object.getOwnPropertyNames",n);$jscomp.patch("Object.getOwnPropertySymbols",
function(q){return function(m){n.unused=Object.getOwnPropertyNames(m);f.push.apply(q(m));return f}})}};$jscomp.symbolCounter_=0;$jscomp.Symbol=function(f){return $jscomp.SYMBOL_PREFIX+f+$jscomp.symbolCounter_++};$jscomp.initSymbolIterator=function(){$jscomp.initSymbol();$jscomp.global.Symbol.iterator||($jscomp.global.Symbol.iterator=$jscomp.global.Symbol("iterator"));$jscomp.initSymbolIterator=function(){}};
function index(){function f(a){return 0<=["boolean","number"].indexOf(typeof a)||a instanceof Function||x(a)?a:!1}function n(a){this.iterable=a;this.index=0}function q(a){this.iterable=a;this.index=0}function m(a,b,d){void 0==b&&(b=a,a=0);void 0==d&&(d=1);if(0<d&&a>=b||0>d&&a<=b)return[];for(var e=[];0<d?a<b:a>b;a+=d)e.push(a);return e}function J(a){return K(m(x(a)),a)}function w(a){if(null==a||"object"==typeof a)return a;var b={},d;for(d in obj)a.hasOwnProperty(d)&&(b[d]=a[d]);return b}function E(a){if(null==
a||"object"==typeof a)return a;var b={},d;for(d in obj)a.hasOwnProperty(d)&&(b[d]=E(a[d]));return b}function l(a){return a?[].slice.apply(a):[]}function g(a){a=a?[].slice.apply(a):[];a.__class__=g;return a}function r(a){var b=[];if(a)for(var d=0;d<a.length;d++)b.add(a[d]);b.__class__=r;return b}function t(a){try{return a.__str__()}catch(b){try{return I(a)}catch(d){return String(a)}}}function M(){var a=[],b;for(b in this)A(b)||a.push(b);return a}function N(){var a=[],b;for(b in this)A(b)||a.push([b,
this[b]]);return a}function O(a){delete this[a]}function P(){for(var a in this)delete this[a]}function Q(a,b){a=this[a];return void 0==a?void 0==b?null:b:a}function R(a,b){var d=this[a];if(void 0!=d)return d;b=void 0==b?null:b;return this[a]=b}function S(a,b){var d=this[a];return void 0!=d?(delete this[a],d):b}function T(a){for(var b in a)this[b]=a[b]}function B(a){if(!a||a instanceof Array){var b={};if(a)for(var d=0;d<a.length;d++){var e=a[d];b[e[0]]=e[1]}}else b=a;Object.defineProperty(b,"__class__",
{value:B,enumerable:!1,writable:!0});Object.defineProperty(b,"py_keys",{value:M,enumerable:!1});Object.defineProperty(b,"__iter__",{value:function(){new n(this.py_keys())},enumerable:!1});$jscomp.initSymbol();$jscomp.initSymbolIterator();Object.defineProperty(b,Symbol.iterator,{value:function(){new q(this.py_keys())},enumerable:!1});Object.defineProperty(b,"py_items",{value:N,enumerable:!1});Object.defineProperty(b,"py_del",{value:O,enumerable:!1});Object.defineProperty(b,"py_clear",{value:P,enumerable:!1});
Object.defineProperty(b,"py_get",{value:Q,enumerable:!1});Object.defineProperty(b,"py_setdefault",{value:R,enumerable:!1});Object.defineProperty(b,"py_pop",{value:S,enumerable:!1});Object.defineProperty(b,"py_update",{value:T,enumerable:!1});return b}var c={},y=function(a,b,d){if(""!=b){b=b.split(".");for(var e=b.length,c=0;c<b.length;c++){if(!a.hasOwnProperty(b[c])){e=c;break}a=a[b[c]]}for(c=e;c<b.length;c++)a[b[c]]={},a=a[b[c]]}for(var g in d)a[g]=d[g]};c.__nest__=y;var F=function(a){a.__inited__||
(a.__all__.__init__(a.__all__),a.__inited__=!0);return a.__all__};c.__init__=F;var p=function(a,b,d){return a&&(a.hasOwnProperty("__class__")||"string"==typeof a||a instanceof String)?(d&&Object.defineProperty(a,d,{value:function(){var d=[].slice.apply(arguments);return b.apply(null,[a].concat(d))},writable:!0,enumerable:!0,configurable:!0}),function(){var d=[].slice.apply(arguments);return b.apply(null,[a].concat(d))}):b};c.__get__=p;var u={__name__:"type",__bases__:[],__new__:function(a,b,d,c){for(var k=
function(){var a=[].slice.apply(arguments);return k.__new__(a)},g=d.length-1;0<=g;g--){var f=d[g],h;for(h in f){var l=Object.getOwnPropertyDescriptor(f,h);Object.defineProperty(k,h,l)}}k.__metaclass__=a;k.__name__=b;k.__bases__=d;for(h in c)l=Object.getOwnPropertyDescriptor(c,h),Object.defineProperty(k,h,l);return k}};u.__metaclass__=u;c.py_metatype=u;var C={__init__:function(a){},__metaclass__:u,__name__:"object",__bases__:[],__new__:function(a){var b=Object.create(this,{__class__:{value:this,enumerable:!0}});
this.__init__.apply(null,[b].concat(a));return b}};c.object=C;var v=function(a,b,d,c){void 0==c&&(c=b[0].__metaclass__);return c.__new__(c,a,b,d)};c.__class__=v;c.__pragma__=function(){};y(c,"org.transcrypt.__base__",{__all__:{__inited__:!1,__init__:function(a){var b=v("__Envir__",[C],{get __init__(){return p(this,function(a){a.interpreter_name="python";a.transpiler_name="transcrypt";a.transpiler_version="3.6.3";a.target_subdir="__javascript__"})}}),d=b();a.__Envir__=b;a.__envir__=d}}});y(c,"org.transcrypt.__standard__",
{__all__:{__inited__:!1,__init__:function(a){var b=v("Exception",[C],{get __init__(){return p(this,function(a){var b={};if(arguments.length){var d=arguments.length-1;if(arguments[d]&&arguments[d].__class__==h){var c=arguments[d--],e;for(e in c)switch(e){case "self":a=c[e];break;default:b[e]=c[e]}b.__class__=null}d=g([].slice.apply(arguments).slice(1,d+1))}else d=g();a.__args__=d;try{a.stack=b.error.stack}catch(k){a.stack="No stack trace available"}})},get __repr__(){return p(this,function(a){return x(a.__args__)?
"{}{}".format(a.__class__.__name__,I(g(a.__args__))):"{}()".format(a.__class__.__name__)})},get __str__(){return p(this,function(a){return 1<x(a.__args__)?t(g(a.__args__)):x(a.__args__)?t(a.__args__[0]):""})}}),d=v("IterableError",[b],{get __init__(){return p(this,function(a,d){b.__init__(a,"Can't iterate over non-iterable",h({error:d}))})}}),c=v("StopIteration",[b],{get __init__(){return p(this,function(a,d){b.__init__(a,"Iterator exhausted",h({error:d}))})}}),k=v("ValueError",[b],{get __init__(){return p(this,
function(a,d){b.__init__(a,"Erroneous value",h({error:d}))})}}),f=v("KeyError",[b],{get __init__(){return p(this,function(a,d){b.__init__(a,"Invalid key",h({error:d}))})}}),l=v("AssertionError",[b],{get __init__(){return p(this,function(a,d,c){d?b.__init__(a,d,h({error:c})):b.__init__(a,h({error:c}))})}}),m=function(a,b,d){if("undefined"==typeof b||null!=b&&b.__class__==h)b=null;if("undefined"==typeof d||null!=d&&d.__class__==h)d=!1;if(arguments.length){var c=arguments.length-1;if(arguments[c]&&arguments[c].__class__==
h){var c=arguments[c--],e;for(e in c)switch(e){case "iterable":a=c[e];break;case "key":b=c[e];break;case "reverse":d=c[e]}}}b?a.sort(function(a,d){if(arguments.length){var c=arguments.length-1;if(arguments[c]&&arguments[c].__class__==h){var c=arguments[c--],e;for(e in c)switch(e){case "a":a=c[e];break;case "b":d=c[e]}}}return b(a)>b(d)?1:-1}):a.sort();d&&a.reverse()},n=v("__Terminal__",[C],{get __init__(){return p(this,function(a){a.buffer="";try{a.element=document.getElementById("__terminal__")}catch(b){a.element=
null}a.element&&(a.element.style.overflowX="auto",a.element.style.boxSizing="border-box",a.element.style.padding="5px",a.element.innerHTML="_")})},get print(){return p(this,function(a){var b=" ",d="\n";if(arguments.length){var c=arguments.length-1;if(arguments[c]&&arguments[c].__class__==h){var e=arguments[c--],k;for(k in e)switch(k){case "self":a=e[k];break;case "sep":b=e[k];break;case "end":d=e[k]}}var f=g([].slice.apply(arguments).slice(1,c+1))}else f=g();a.buffer="{}{}{}".format(a.buffer,b.join(function(){for(var a=
[],b=f,d=0;d<b.length;d++)a.append(t(b[d]));return a}()),d).__getslice__(-4096,null,1);a.element?(a.element.innerHTML=a.buffer.py_replace("\n","<br>"),a.element.scrollTop=a.element.scrollHeight):console.log(b.join(function(){for(var a=[],b=f,d=0;d<b.length;d++)a.append(t(b[d]));return a}()))})},get input(){return p(this,function(a,b){if(arguments.length){var d=arguments.length-1;if(arguments[d]&&arguments[d].__class__==h){var d=arguments[d--],c;for(c in d)switch(c){case "self":a=d[c];break;case "question":b=
d[c]}}}a.print("{}".format(b),h({end:""}));c=window.prompt("\n".join(a.buffer.py_split("\n").__getslice__(-16,null,1)));a.print(c);return c})}}),q=n();a.AssertionError=l;a.Exception=b;a.IterableError=d;a.KeyError=f;a.StopIteration=c;a.ValueError=k;a.__Terminal__=n;a.__sort__=m;a.__terminal__=q;a.filter=function(a,b){for(var d=[],c=0;c<b.length;c++){var e=b[c];a(e)&&d.append(e)}return d};a.map=function(a,b){for(var d=[],c=0;c<b.length;c++)d.append(a(b[c]));return d};a.sorted=function(a,b,d){if("undefined"==
typeof b||null!=b&&b.__class__==h)b=null;if("undefined"==typeof d||null!=d&&d.__class__==h)d=!1;if(arguments.length){var c=arguments.length-1;if(arguments[c]&&arguments[c].__class__==h){var c=arguments[c--],e;for(e in c)switch(e){case "iterable":a=c[e];break;case "key":b=c[e];break;case "reverse":d=c[e]}}}e=G(a)==B?w(a.py_keys()):w(a);m(e,b,d);return e}}}});y(c,"",F(c.org.transcrypt.__base__));u=c.__envir__;y(c,"",F(c.org.transcrypt.__standard__));var U=c.StopIteration,V=c.ValueError,W=c.AssertionError,
X=c.__sort__;c.print=c.__terminal__.print;c.input=c.__terminal__.input;u.executor_name=u.transpiler_name;c.main={__file__:""};c.__except__=null;var h=function(a){a.__class__=h;a.constructor=Object;return a};c.___kwargdict__=h;c.property=function(a,b){b||(b=function(){});return{get:function(){return a(this)},set:function(a){b(this,a)},enumerable:!0}};c.assert=function(a,b){if(!a)throw W(b,Error());};c.__merge__=function(a,b){var d={},c;for(c in a)d[c]=a[c];for(c in b)d[c]=b[c];return d};c.setattr=
function(a,b,d){a[b]=d};c.getattr=function(a,b){return a[b]};c.hasattr=function(a,b){return b in a};c.delattr=function(a,b){delete a[b]};c.__in__=function(a,b){return G(b)==B?b.hasOwnProperty(a):b.indexOf?-1<b.indexOf(a):b.hasOwnProperty(a)};var A=function(a){return a.startswith("__")&&a.endswith("__")||"constructor"==a||a.startswith("py_")};c.__specialattrib__=A;var x=function(a){if(a){var b=a.length;if(void 0==b){var b=0,d;for(d in a)A(d)||b++}return b}return 0};c.len=x;c.__t__=f;var z=function(a){return!!f(a)};
z.__name__="bool";c.bool=z;var H=function(a){if("inf"==a)return Infinity;if("-inf"==a)return-Infinity;if(isNaN(a))throw V(Error());return+a};H.__name__="float";c.float=H;var D=function(a){return H(a)|0};D.__name__="int";c.int=D;var G=function(a){var b=typeof a;if("object"==b)try{return a.__class__}catch(d){return b}else return"boolean"==b?z:"string"==b?t:"number"==b?0==a%1?D:H:null};c.py_typeof=G;var L=function(a,b){function d(a){if(a==b)return!0;for(var c=0;c<a.__bases__.length;c++)if(d(a.__bases__[c],
b))return!0;return!1}if(b instanceof Array){for(var c=0;c<b.length;c++)if(L(a,b[c]))return!0;return!1}try{return"__class__"in a?d(a.__class__):a instanceof b}catch(k){return a=G(a),a==b||a==z&&b==D}};c.isinstance=L;var I=function(a){try{return a.__repr__()}catch(c){try{return a.__str__()}catch(g){try{if(null==a)return"None";if(a.constructor==Object){var b="{",d=!1,e;for(e in a)if(!A(e)){var k=e.isnumeric()?e:"'"+e+"'";d?b+=", ":d=!0;try{b+=k+": "+a[e].__repr__()}catch(f){b+=k+": "+a[e].toString()}}return b+
"}"}return"boolean"==typeof a?a.toString().capitalize():a.toString()}catch(f){return console.log("ERROR: Could not evaluate repr (<object of type "+typeof a+">)"),console.log(f),"???"}}}};c.repr=I;c.chr=function(a){return String.fromCharCode(a)};c.org=function(a){return a.charCodeAt(0)};c.max=Math.max;c.min=Math.min;c.abs=Math.abs;c.round=function(a,b){if(b){var d=Math.pow(10,b);a*=d}var c=Math.round(a);.5==c-a&&c%2&&--c;b&&(c/=d);return c};n.prototype.__next__=function(){if(this.index<this.iterable.length)return this.iterable[this.index++];
throw U(Error());};q.prototype.next=function(){return this.index<this.iterable.py_keys.length?{value:this.index++,done:!1}:{value:void 0,done:!0}};c.py_reversed=function(a){a=a.slice();a.reverse();return a};var K=function(){var a=[].slice.call(arguments);return(0==a.length?[]:a.reduce(function(a,d){return a.length<d.length?a:d})).map(function(b,d){return a.map(function(a){return a[d]})})};c.zip=K;c.range=m;c.any=function(a){for(var b=0;b<a.length;b++)if(z(a[b]))return!0;return!1};c.all=function(a){for(var b=
0;b<a.length;b++)if(!z(a[b]))return!1;return!0};c.sum=function(a){for(var b=0,d=0;d<a.length;d++)b+=a[d];return b};c.enumerate=J;c.copy=w;c.deepcopy=E;c.list=l;Array.prototype.__class__=l;l.__name__="list";Array.prototype.__iter__=function(){return new n(this)};Array.prototype.__getslice__=function(a,b,d){0>a&&(a=this.length+a);null==b?b=this.length:0>b?b=this.length+b:b>this.length&&(b=this.length);for(var c=l([]);a<b;a+=d)c.push(this[a]);return c};Array.prototype.__setslice__=function(a,b,d,c){0>
a&&(a=this.length+a);null==b?b=this.length:0>b&&(b=this.length+b);if(null==d)Array.prototype.splice.apply(this,[a,b-a].concat(c));else for(var k=0;a<b;a+=d)this[a]=c[k++]};Array.prototype.__repr__=function(){if(this.__class__==r&&!this.length)return"set()";for(var a=this.__class__&&this.__class__!=l?this.__class__==g?"(":"{":"[",b=0;b<this.length;b++){b&&(a+=", ");try{a+=this[b].__repr__()}catch(d){a+=this[b].toString()}}this.__class__==g&&1==this.length&&(a+=",");return a+=this.__class__&&this.__class__!=
l?this.__class__==g?")":"}":"]"};Array.prototype.__str__=Array.prototype.__repr__;Array.prototype.append=function(a){this.push(a)};Array.prototype.clear=function(){this.length=0};Array.prototype.extend=function(a){this.push.apply(this,a)};Array.prototype.insert=function(a,b){this.splice(a,0,b)};Array.prototype.remove=function(a){a=this.indexOf(a);if(-1==a)throw KeyError(Error());this.splice(a,1)};Array.prototype.index=function(a){return this.indexOf(a)};Array.prototype.py_pop=function(a){return void 0==
a?this.pop():this.splice(a,1)[0]};Array.prototype.py_sort=function(){X.apply(null,[this].concat([].slice.apply(arguments)))};Array.prototype.__add__=function(a){return l(this.concat(a))};Array.prototype.__mul__=function(a){for(var b=this,d=1;d<a;d++)b=b.concat(this);return b};Array.prototype.__rmul__=Array.prototype.__mul__;c.tuple=g;g.__name__="tuple";c.set=r;r.__name__="set";Array.prototype.__bindexOf__=function(a){a+="";for(var b=0,d=this.length-1;b<=d;){var c=(b+d)/2|0,k=this[c]+"";if(k<a)b=c+
1;else if(k>a)d=c-1;else return c}return-1};Array.prototype.add=function(a){-1==this.indexOf(a)&&this.push(a)};Array.prototype.discard=function(a){a=this.indexOf(a);-1!=a&&this.splice(a,1)};Array.prototype.isdisjoint=function(a){this.sort();for(var b=0;b<a.length;b++)if(-1!=this.__bindexOf__(a[b]))return!1;return!0};Array.prototype.issuperset=function(a){this.sort();for(var b=0;b<a.length;b++)if(-1==this.__bindexOf__(a[b]))return!1;return!0};Array.prototype.issubset=function(a){return r(a.slice()).issuperset(this)};
Array.prototype.union=function(a){for(var b=r(this.slice().sort()),d=0;d<a.length;d++)-1==b.__bindexOf__(a[d])&&b.push(a[d]);return b};Array.prototype.intersection=function(a){this.sort();for(var b=r(),d=0;d<a.length;d++)-1!=this.__bindexOf__(a[d])&&b.push(a[d]);return b};Array.prototype.difference=function(a){a=r(a.slice().sort());for(var b=r(),d=0;d<this.length;d++)-1==a.__bindexOf__(this[d])&&b.push(this[d]);return b};Array.prototype.symmetric_difference=function(a){return this.union(a).difference(this.intersection(a))};
Array.prototype.py_update=function(){var a=[].concat.apply(this.slice(),arguments).sort();this.clear();for(var b=0;b<a.length;b++)a[b]!=a[b-1]&&this.push(a[b])};Array.prototype.__eq__=function(a){if(this.length!=a.length)return!1;this.__class__==r&&(this.sort(),a.sort());for(var b=0;b<this.length;b++)if(this[b]!=a[b])return!1;return!0};Array.prototype.__ne__=function(a){return!this.__eq__(a)};Array.prototype.__le__=function(a){return this.issubset(a)};Array.prototype.__ge__=function(a){return this.issuperset(a)};
Array.prototype.__lt__=function(a){return this.issubset(a)&&!this.issuperset(a)};Array.prototype.__gt__=function(a){return this.issuperset(a)&&!this.issubset(a)};c.str=t;String.prototype.__class__=t;t.__name__="str";String.prototype.__iter__=function(){new n(this)};String.prototype.__repr__=function(){return(-1==this.indexOf("'")?"'"+this+"'":'"'+this+'"').py_replace("\t","\\t").py_replace("\n","\\n")};String.prototype.__str__=function(){return this};String.prototype.capitalize=function(){return this.charAt(0).toUpperCase()+
this.slice(1)};String.prototype.endswith=function(a){return""==a||this.slice(-a.length)==a};String.prototype.find=function(a,b){return this.indexOf(a,b)};String.prototype.__getslice__=function(a,b,d){0>a&&(a=this.length+a);null==b?b=this.length:0>b&&(b=this.length+b);var c="";if(1==d)c=this.substring(a,b);else for(;a<b;a+=d)c=c.concat(this.charAt(a));return c};Object.defineProperty(String.prototype,"format",{get:function(){return p(this,function(a){var b=g([].slice.apply(arguments).slice(1)),d=0;
return a.replace(/\{(\w*)\}/g,function(a,c){""==c&&(c=d++);if(c==+c)return void 0==b[c]?a:t(b[c]);for(var f=0;f<b.length;f++)if("object"==typeof b[f]&&void 0!=b[f][c])return t(b[f][c]);return a})})},enumerable:!0});String.prototype.isnumeric=function(){return!isNaN(parseFloat(this))&&isFinite(this)};String.prototype.join=function(a){return a.join(this)};String.prototype.lower=function(){return this.toLowerCase()};String.prototype.py_replace=function(a,b,d){return this.split(a,d).join(b)};String.prototype.lstrip=
function(){return this.replace(/^\s*/g,"")};String.prototype.rfind=function(a,b){return this.lastIndexOf(a,b)};String.prototype.rsplit=function(a,b){if(void 0==a||null==a){a=/\s+/;var d=this.strip()}else d=this;if(void 0==b||-1==b)return d.split(a);d=d.split(a);return b<d.length?(b=d.length-b,[d.slice(0,b).join(a)].concat(d.slice(b))):d};String.prototype.rstrip=function(){return this.replace(/\s*$/g,"")};String.prototype.py_split=function(a,b){if(void 0==a||null==a){a=/\s+/;var d=this.strip()}else d=
this;if(void 0==b||-1==b)return d.split(a);d=d.split(a);return b<d.length?d.slice(0,b).concat([d.slice(b).join(a)]):d};String.prototype.startswith=function(a){return 0==this.indexOf(a)};String.prototype.strip=function(){return this.trim()};String.prototype.upper=function(){return this.toUpperCase()};String.prototype.__mul__=function(a){for(var b=this,d=1;d<a;d++)b+=this;return b};String.prototype.__rmul__=String.prototype.__mul__;c.dict=B;B.__name__="dict";Object.defineProperty(Function.prototype,
"__setdoc__",{value:function(a){this.__doc__=a;return this},enumerable:!1});c.__neg__=function(a){return"object"==typeof a&&"__neg__"in a?a.__neg__():-a};c.__matmul__=function(a,b){return a.__matmul__(b)};u=function(a,b){return"object"==typeof a&&"__pow__"in a?a.__pow__(b):"object"==typeof b&&"__rpow__"in b?b.__rpow__(a):Math.pow(a,b)};c.pow=u;c.pow=u;c.__mul__=function(a,b){return"object"==typeof a&&"__mul__"in a?a.__mul__(b):"object"==typeof b&&"__rmul__"in b?b.__rmul__(a):"string"==typeof a?a.__mul__(b):
"string"==typeof b?b.__rmul__(a):a*b};c.__div__=function(a,b){return"object"==typeof a&&"__div__"in a?a.__div__(b):"object"==typeof b&&"__rdiv__"in b?b.__rdiv__(a):a/b};c.__add__=function(a,b){return"object"==typeof a&&"__add__"in a?a.__add__(b):"object"==typeof b&&"__radd__"in b?b.__radd__(a):a+b};c.__sub__=function(a,b){return"object"==typeof a&&"__sub__"in a?a.__sub__(b):"object"==typeof b&&"__rsub__"in b?b.__rsub__(a):a-b};c.__eq__=function(a,b){return"object"==typeof a&&"__eq__"in a?a.__eq__(b):
a==b};c.__ne__=function(a,b){return"object"==typeof a&&"__ne__"in a?a.__ne__(b):a!=b};c.__lt__=function(a,b){return"object"==typeof a&&"__lt__"in a?a.__lt__(b):a<b};c.__le__=function(a,b){return"object"==typeof a&&"__le__"in a?a.__le__(b):a<=b};c.__gt__=function(a,b){return"object"==typeof a&&"__gt__"in a?a.__gt__(b):a>b};c.__ge__=function(a,b){return"object"==typeof a&&"__ge__"in a?a.__ge__(b):a>=b};c.__getitem__=function(a,b){return"object"==typeof a&&"__getitem__"in a?a.__getitem__(b):a[b]};c.__setitem__=
function(a,b,d){"object"==typeof a&&"__setitem__"in a?a.__setitem__(b,d):a[b]=d};c.__getslice__=function(a,b,d,c){return"object"==typeof a&&"__getitem__"in a?a.__getitem__([b,d,c]):a.__getslice__(b,d,c)};c.__setslice__=function(a,b,c,e,f){"object"==typeof a&&"__setitem__"in a?a.__setitem__([b,c,e],f):a.__setslice__(b,c,e,f)};c.__call__=function(){var a=[].slice.apply(arguments);return"object"==typeof a[0]&&"__call__"in a[0]?a[0].__call__.apply(a[1],a.slice(2)):a[0].apply(a[1],a.slice(2))};y(c,"itertools",
{__all__:{__inited__:!1,__init__:function(a){a.chain=function(){for(var a=[].slice.apply(arguments),c=[],e=0;e<a.length;e++)c=c.concat(a[e]);return l(c)}}}});(function(){var a=F(c.itertools).chain,b=v("SolarSystem",[C],{planets:function(){for(var b=[],c=J(g([g(["Mercury","hot",2240]),g(["Venus","sulphurous",6052]),g(["Earth","fertile",6378]),g(["Mars","reddish",3397]),g(["Jupiter","stormy",71492]),g(["Saturn","ringed",60268]),g(["Uranus","cold",25559]),g(["Neptune","very cold",24766])])),d=0;d<c.length;d++){var f=
c[d];b.append(l(a(f[1],g([f[0]+1]))))}return b}(),lines:g(["{} is a {} planet","The radius of {} is {} km","{} is planet nr. {} counting from the sun"]),get __init__(){return p(this,function(a){a.lineIndex=0})},get greet(){return p(this,function(a){a.planet=a.planets[D(Math.random()*x(a.planets))];document.getElementById("greet").innerHTML="Hello {}".format(a.planet[0]);a.explain()})},get explain(){return p(this,function(a){document.getElementById("explain").innerHTML=a.lines[a.lineIndex].format(a.planet[0],
a.planet[a.lineIndex+1]);var b;b=a.lineIndex+1;b="object"==typeof b&&"__mod__"in b?b.__mod__(3):(b%3+3)%3;a.lineIndex=b})}}),d=b();c.SolarSystem=b;c.chain=a;c.solarSystem=d})();return c}window.index=index();
| mit |
jonrimmer/angular | packages/compiler-cli/src/ngtsc/annotations/index.ts | 610 | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
export {ResourceLoader} from './src/api';
export {ComponentDecoratorHandler} from './src/component';
export {DirectiveDecoratorHandler} from './src/directive';
export {InjectableDecoratorHandler} from './src/injectable';
export {NgModuleDecoratorHandler} from './src/ng_module';
export {PipeDecoratorHandler} from './src/pipe';
export {CompilationScope, SelectorScopeRegistry} from './src/selector_scope';
| mit |
jlecker/rhythmbox-xchat-music-channel | xchat_music_channel/conf.py | 1230 | import gtk
import gconf
gconf_keys = {
'server': '/apps/rhythmbox/plugins/xchat_music_channel/server',
'channel': '/apps/rhythmbox/plugins/xchat_music_channel/channel',
}
class ConfDialog(object):
def __init__(self, builder_file):
self.gconf = gconf.client_get_default()
builder = gtk.Builder()
builder.add_from_file(builder_file)
self.dialog = builder.get_object('prefs_dialog')
self.server_entry = builder.get_object('server_entry')
server = self.gconf.get_string(gconf_keys['server'])
if server is None:
server = 'irc.freenode.net'
self.server_entry.set_text(server)
self.channel_entry = builder.get_object('channel_entry')
channel = self.gconf.get_string(gconf_keys['channel'])
if channel is None:
channel = '##rhythmbox_xchat_music_channel'
self.channel_entry.set_text(channel)
self.dialog.connect('response', self.response)
def response(self, dialog, response):
self.gconf.set_string(gconf_keys['server'], self.server_entry.get_text())
self.gconf.set_string(gconf_keys['channel'], self.channel_entry.get_text())
self.dialog.hide() | mit |
Akhenoth/Factorian | application/files/cache/0fea6a13c52b4d47/25368f24b045ca84/38a865804f8fdcb6/57cd99682e939275/8f3a01e2e3b200aa/f980d8ea6abd2ae6/29c9b3b1491ac67f/64c0febd1631a381.php | 375 | <?php
/* Cachekey: cache/stash_default/zend/zend_i18n_translator_messages_f81a55c2847a74d67d28e1ffce253c94/ */
/* Type: array */
/* Expiration: 2017-04-02T18:09:25+02:00 */
$loaded = true;
$expiration = 1491149365;
$data = array();
/* Child Type: NULL */
$data['return'] = unserialize(base64_decode('Tjs='));
/* Child Type: integer */
$data['createdOn'] = 1490717954;
| mit |
maxupeng/snippets | tool/pdf/pdfbookmark.py | 2364 | #!/usr/bin/python
# -*- coding:UTF-8 -*-
import os
import sys
import codecs
import argparse
import subprocess
chapters = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
parser = argparse.ArgumentParser(description='Set PDF bookmark')
parser.add_argument('input', help='input file')
parser.add_argument('bookmark', help='bookmark file')
parser.add_argument('-o', default='output.pdf', help='output file', dest='output')
args = parser.parse_args()
ret = subprocess.call(['pdftk', args.input, 'dump_data_utf8', 'output', 'dump_data_0'])
if ret != 0:
sys.exit(-1)
src_data_f = codecs.open('dump_data_0', 'r', 'UTF-8')
bookmark_f = codecs.open(args.bookmark, 'r', 'UTF-8')
dest_data_f = codecs.open('dump_data_1', 'w', 'UTF-8')
base = 0
level = 0
line = ''
while not line.startswith('NumberOfPages'):
line = src_data_f.readline()
dest_data_f.write(line)
for bline in bookmark_f:
bline = bline.replace('\t', ' ')
if bline.startswith('###'):
pair = bline.replace('###', '').split(':')
if len(pair) < 2:
print('Bookmark file Error: ' + bline)
sys.exit(-1)
if pair[0].strip() == 'base':
base = int(pair[1].strip())
else:
strip = bline.strip()
pre_level = level
level = 1 + bline.find(strip)
if level > pre_level:
for i in range(pre_level+1,level):
chapters[i] = 0
chapters[level] = chapters[level]+1;
chapter = ''
for i in range(1,level):
chapter = chapter + str(chapters[i]) + '.'
chapter = chapter + str(chapters[level])
items = strip.rpartition(' ')
title = items[0].strip()
page = base + int(items[2].strip())
dest_data_f.write('BookmarkBegin\n')
dest_data_f.write('BookmarkTitle: ' + chapter + ' ' + str(title) + '\n')
dest_data_f.write('BookmarkLevel: ' + str(level) + '\n')
dest_data_f.write('BookmarkPageNumber: ' + str(page) + '\n')
line = src_data_f.readline()
while line.startswith('Bookmark'):
line = src_data_f.readline()
while len(line) > 0:
dest_data_f.write(line)
line = src_data_f.readline()
src_data_f.close()
dest_data_f.close()
bookmark_f.close()
subprocess.call(['pdftk', args.input, 'update_info_utf8', 'dump_data_1', 'output', args.output])
os.remove('dump_data_0')
os.remove('dump_data_1')
| mit |
lukemadera/grunt-buildfiles | tasks/buildfiles - v2.js | 9917 | /**
Many grunt tasks use the wildcard * or ** symbols for deciding which files to include for that task. This is simple and easy BUT doesn't allow much flexibility since if you have a particular directory or file to exclude, it becomes fairly complicated to exclude that file. Furthermore, you have to define (hardcode) this file list in your actual grunt file and you may have to define it many times (or define different variations multiple times). This plugin allows you to define your files (css, js, html) ONCE in a separate config file and gives more flexibility around which files to use where - without relying on blanket file globbing or pattern matching rules. Basically, instead of including all files by default, this plugin does the opposite - it only includes the files you explicity tell it to. This is less error prone since files won't accidentally be included that shouldn't be since each file is specified.
Secondly, this plugin writes grunt templates for you (allowing you to specify a config file in one place then use it to add these config values in all file types (your css, js and html) so you have ONE source of truth. This is especially useful for file paths, which are often shared across your css (i.e. for LESS or SASS pre-processing) and javascript (for http requests or including resources). Can also be used server side so you only need ONE config.json file for your WHOLE app (rather than one config file per language - one for css, one for javascript, one for the server, etc.)
Common use cases:
- forming and stuffing a list of file to lint/jshint and/or minify/uglify (without double linting/uglifying external library files that have already been linted/minified and should NOT be done again). For use with a grunt jshint and/or uglify task.
- forming and stuffing a list of files to concat (for use with a grunt concat task)
- dynamically generating your index.html file with all your <link rel="stylesheet" > css tags, <script> js tags and <script> template cache html files/partials (i.e. for use with AngularJS to preload all your templates for better performance)
- generate other config files (across css, js, html) using grunt template files to stick in config variables defined in a config.json file (this way you don't need to separately set, hardcode and match config variables across css, js, etc.)
More specifically, this plugin takes a buildfilesArray javascript array as a config list of all javascript and css (and html) files and uses them to set the lint, concat, uglify files as well as writes grunt template files (including index.html) to include these resources appropriately.
Can handle separating external (already minified) files from the rest to avoid any issues with double minification. To use this, set the "dirsExt" key in the buildfilesArray to an array of the directories to NOT uglify/minify.
@toc
1. pull in grunt config
2. init and form file paths
3. set grunt.config paths (for use later in other grunt tasks)
4. write the actual file(s) using the grunt template(s)
@todo - figure out how to make uglify files key be dynamic rather than hardcoded..
*/
module.exports = function(grunt) {
grunt.registerTask("buildfiles", "Generate resource file names and build final files using grunt templates depending on server environment config", function() {
var ii;
/**
Pull in grunt config.
@toc 1.
*/
var conf =grunt.config('buildfiles');
var files =conf.buildfilesArray;
/**
Init filePaths and then build them (join directory and filenames toegether for each file)
@toc 2.
*/
/**
@property filePaths Will hold all the final files (joining the directory with the file name) by type. Each type (html, css, js) is an array of file paths.
'all' key is for BOTH custom and external/3rd party files
'custom' key is for custom files. These are just files that SHOULD be linted (and minified) (i.e. all files that are NOT in one of the 'dirsExt' directories). This is the OPPOSITE of the 'ext' key - whichever files are NOT in 'ext' will be in here.
'ext' key is for external / 3rd party files. These are just files that should NOT be linted and minfied (i.e. all files that ARE in one of the 'dirsExt' directories). This is the OPPOSITE of the 'custom' key - whichever files are NOT in 'custom' will be in here.
@type Object
*/
var filePaths ={
all: {
css: [],
js: [],
html: []
},
custom: {
js: []
},
ext: {
js: []
}
};
//build full file resource links (join directory with file to form full path)
for(var type in files) { //go through all resource types (css, js)
for(var dir in files[type].files) { //go through all directories
//see if this dir is a custom one (i.e. not an external/3rd party library directory)
var customDir =false;
if(files[type].dirsExt !==undefined) {
customDir =true;
for(var dd =0; dd<files[type].dirsExt.length; dd++) {
if(files[type].dirsExt[dd] ==dir) {
customDir =false;
break;
}
}
}
//go through each file and join it to the directory to form the full path
var dirPath =files[type].dirs[dir];
for(ii =0; ii<files[type].files[dir].length; ii++) { //go through each file in this directory
var curPathPart =dirPath+'/'+files[type].files[dir][ii]; //form the full path
//add to the main ('all') file group
filePaths.all[type].push(curPathPart);
//if a custom directory, add to the custom file group as well
if(customDir) {
if(type =='js') {
filePaths.custom[type].push(curPathPart);
}
}
//if NOT a custom directory (i.e. if an external / 3rd party directory), add to the ext file group as well
else {
if(type =='js') {
filePaths.ext[type].push(curPathPart);
}
}
}
}
}
/**
update/set grunt.config paths (for use later in other grunt tasks)
@toc 3.
*/
if(conf.configPaths !==undefined) {
var config, prefix, fileType, fileGroup, prefixedFilePaths;
for(config in conf.configPaths) { //iterate through each config path
prefix =conf.configPaths[config].prefix || ''; //default to no prefix
fileGroup =conf.configPaths[config].fileGroup || 'all'; //default to all (both custom and ext files)
// console.log('config: '+config+' prefix: '+prefix+' fileGroup: '+fileGroup);
for(fileType in conf.configPaths[config].files) { //iterate through each file type
//form new file paths array with prefix prepended
prefixedFilePaths =[];
for(ii =0; ii<filePaths[fileGroup][fileType].length; ii++) {
prefixedFilePaths[ii] =prefix+filePaths[fileGroup][fileType][ii];
}
//if want to add additional files to this grunt.config, add them now
if(conf.configPaths[config].additionalFiles !==undefined && conf.configPaths[config].additionalFiles.length >0) {
for(ii =0; ii<conf.configPaths[config].additionalFiles.length; ii++) {
prefixedFilePaths.push(conf.configPaths[config].additionalFiles[ii]);
}
}
//special case for uglify task //@todo - fix this..
if(conf.configPaths[config].uglify !==undefined && conf.configPaths[config].uglify) {
prefixedFilePaths ={
'<%= customMinifyFile %>': prefixedFilePaths
};
}
//actually set the grunt.config now that we have the final file paths (with the prefixes prepended)
for(ii=0; ii<conf.configPaths[config].files[fileType].length; ii++) {
// console.log(conf.configPaths[config].files[fileType][ii]+' '+prefixedFilePaths);
grunt.config(conf.configPaths[config].files[fileType][ii], prefixedFilePaths);
}
}
}
}
/**
write the actual file(s) using the grunt template(s). `ifOpts` are used with command line options to see which files (if any) to skip. ifOpts are treated as an `and` so if multiple are specified, ALL must match for the file to be written.
@toc 4.
*/
//will output which files are skiped and which are written
var outputFiles ={
skip: [],
write: [],
};
for(var ff in conf.files) {
//check to see if should write this file at all using 'ifOpts' param which corresponds to command line arguments (i.e. `--if=yes`) which correspond to grunt.option here.
var goTrig =true;
if(conf.files[ff].ifOpts !==undefined) {
//go through ALL ifOpts and find at least ONE that either is undefined or does not match, then set goTrig to false
for(ii =0; ii<conf.files[ff].ifOpts.length; ii++) {
if(grunt.option(conf.files[ff].ifOpts[ii].key) ===undefined || grunt.option(conf.files[ff].ifOpts[ii].key) != conf.files[ff].ifOpts[ii].val) {
goTrig =false;
outputFiles.skip.push('src: '+conf.files[ff].src);
// grunt.log.writeln('buildfiles SKIP file due to ifOpts: src: '+conf.files[ff].src);
break;
}
}
}
if(goTrig) {
var src =conf.files[ff].src;
var dest =conf.files[ff].dest;
var tmpl = grunt.file.read(src);
grunt.file.write(dest, grunt.template.process(tmpl));
// grunt.log.writeln('buildfiles writing file: src: '+src+' dest: '+dest);
// outputFiles.write.push('src: '+src+' dest: '+dest);
outputFiles.write.push(dest+' src: '+src);
}
}
//output message detailing which files were written and which were skipped
var msg ='\nbuildfiles writing files (if multiple files go to the same destination, the LAST one is the src that will have been used):\n';
if(outputFiles.skip.length >0) {
msg +='SKIPPED files (due to ifOpts):\n';
for(ii =0; ii<outputFiles.skip.length; ii++) {
msg+=(ii+1)+'. '+outputFiles.skip[ii]+'\n';
}
}
if(outputFiles.write.length >0) {
msg +='WRITTEN files:\n';
for(ii =0; ii<outputFiles.write.length; ii++) {
msg+=(ii+1)+'. '+outputFiles.write[ii]+'\n';
}
}
grunt.log.writeln(msg);
grunt.log.writeln('buildfiles done');
});
}; | mit |
ajanson/SCIRun | src/Interface/Modules/Render/Ospray/VolumeViewer.cpp | 30722 | // ======================================================================== //
// Copyright 2009-2017 Intel Corporation //
// //
// Licensed under the Apache License, Version 2.0 (the "License"); //
// you may not use this file except in compliance with the License. //
// You may obtain a copy of the License at //
// //
// http://www.apache.org/licenses/LICENSE-2.0 //
// //
// Unless required by applicable law or agreed to in writing, software //
// distributed under the License is distributed on an "AS IS" BASIS, //
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //
// See the License for the specific language governing permissions and //
// limitations under the License. //
// ======================================================================== //
#include <algorithm>
#include "VolumeViewer.h"
//#include "TransferFunctionEditor.h"
#include "IsosurfaceEditor.h"
#include "SliceEditor.h"
#include "ProbeWidget.h"
#include "OpenGLAnnotationRenderer.h"
#include <components/ospcommon/FileName.h>
#include "importer/Importer.h"
using namespace ospcommon;
VolumeViewer::VolumeViewer(const OsprayViewerParameters& params, const OsprayGUIParameters& guiParams,
const OsprayObjectParameters& objParams,
QWidget* parent)
: QWidget(parent),
additionalObjects_(objParams.moreObjects),
modelIndex(0),
ownModelPerObject_(params.ownModelPerObject),
boundingBox_(ospcommon::vec3f(0.f), ospcommon::vec3f(1.f)),
presetBoundingBox_(objParams.presetBoundingBox),
renderer(nullptr),
rendererInitialized(false),
transferFunction(nullptr),
ambientLight(nullptr),
directionalLight(nullptr),
osprayWindow_(nullptr),
annotationRenderer(nullptr),
//transferFunctionEditor(nullptr),
isosurfaceEditor(nullptr),
autoRotationRate(0.025f),
usePlane(-1),
samplingRate(-1),
adaptiveMaxSamplingRate(-1),
spp(-1),
shadows(-1),
preIntegration(-1),
aoSamples(-1),
adaptiveSampling(-1),
gradientShadingEnabled(-1)
{
setLayout(new QVBoxLayout);
// Default window size.
resize(guiParams.height, guiParams.width);
parent->resize(guiParams.height, guiParams.width);
// Create and configure the OSPRay state.
initObjects(params.rendererType);
postInitObjectConstruction(params, guiParams);
}
VolumeViewer::~VolumeViewer()
{
ospRelease(renderer);
ospRelease(planeMesh);
ospRelease(ambientLight);
ospRelease(directionalLight);
ospRelease(transferFunction);
for (auto& m : modelStates_)
m.release();
for (auto& obj : additionalObjects_)
ospRelease(obj);
}
#if 0
void VolumeViewer::setParameters(const OsprayObjectParameters& params)
{
additionalObjects_ = params.moreObjects;
qDebug() << "additionalObjects_" << additionalObjects_.size();
presetBoundingBox_ = params.presetBoundingBox;
loadAdditionalGeometries();
initPostObjects();
osprayWindow_->setWorldBounds(boundingBox_);
}
#endif
void VolumeViewer::postInitObjectConstruction(const OsprayViewerParameters& params, const OsprayGUIParameters& guiParams)
{
// Create an OSPRay window and set it as the central widget, but don't let it start rendering until we're done with setup.
osprayWindow_ = new QOSPRayWindow(renderer, params.writeFramesFilename, this, guiParams.frameRateWidget);
layout()->addWidget(osprayWindow_);
//PRINT(boundingBox_);
// Set the window bounds based on the OSPRay world bounds.
osprayWindow_->setWorldBounds(boundingBox_);
// Connect the "play timesteps" timer.
connect(&playTimeStepsTimer, SIGNAL(timeout()), this, SLOT(nextTimeStep()));
{
//TODO: connect to new config widgets
lightEditor = new LightEditor(ambientLight, directionalLight,
guiParams.ambientLightIntensitySpinBox, guiParams.directionalLightIntensitySpinBox, guiParams.directionalLightAzimuthSlider, guiParams.directionalLightElevationSlider);
connect(lightEditor, SIGNAL(lightsChanged()), this, SLOT(render()));
probeWidget = new ProbeWidget(this);
}
if (params.fullScreen)
setWindowState(windowState() | Qt::WindowFullScreen);
setGradientShadingEnabled(true);
setAOSamples(1);
setAdaptiveSampling(true);
setPreIntegration(true);
setShadows(true);
setSPP(1);
setPlane(true);
}
ospcommon::box3f VolumeViewer::getBoundingBox()
{
return boundingBox_;
}
QOSPRayWindow *VolumeViewer::getWindow()
{
return osprayWindow_;
}
//
// TransferFunctionEditor *VolumeViewer::getTransferFunctionEditor()
// {
// return transferFunctionEditor;
// }
void VolumeViewer::setModel(size_t index)
{
modelIndex = index;
// Set current model on the OSPRay renderer.
ospSetObject(renderer, "model", modelStates_[index].model);
ospCommit(renderer);
rendererInitialized = true;
//PRINT(modelStates_[index].volumes.size());
if (!modelStates_[index].volumes.empty())
{
// Update transfer function and isosurface editor data value range with the voxel range of the current model's first volume.
OSPVolume volume = modelStates_[index].volumes[0]->handle;
ospcommon::vec2f voxelRange = modelStates_[index].volumes[0]->voxelRange;
if(voxelRange != ospcommon::vec2f(0.f))
{
//transferFunctionEditor->setDataValueRange(voxelRange);
isosurfaceEditor->setDataValueRange(voxelRange);
}
// Update active volume on probe widget.
probeWidget->setVolume(modelStates_[index].volumes[0]->handle);
#if 0
// Update current filename information label.
if (ownModelPerObject_)
currentFilenameInfoLabel.setText("<b>Timestep " + QString::number(index) + QString("</b>: Data value range: [") + QString::number(voxelRange.x) + ", " + QString::number(voxelRange.y) + "]");
else
currentFilenameInfoLabel.setText("<b>Timestep " + QString::number(index) + QString("</b>: ") + QString(objectFileFilenames_[index].c_str()).split('/').back() + ". Data value range: [" + QString::number(voxelRange.x) + ", " + QString::number(voxelRange.y) + "]");
#endif
}
// Enable rendering on the OSPRay window.
osprayWindow_->setRenderingEnabled(true);
}
std::string VolumeViewer::toString() const
{
return "VolumeViewer";
}
void VolumeViewer::autoRotate(bool set)
{
if (!osprayWindow_)
return;
if (set)
{
osprayWindow_->setRotationRate(autoRotationRate);
osprayWindow_->updateGL();
}
else
{
osprayWindow_->setRotationRate(0.);
}
}
void VolumeViewer::setAutoRotationRate(double rate)
{
autoRotationRate = rate;
}
void VolumeViewer::nextTimeStep()
{
modelIndex = (modelIndex + 1) % modelStates_.size();
setModel(modelIndex);
render();
}
void VolumeViewer::playTimeSteps(bool animate)
{
if (animate)
playTimeStepsTimer.start(500);
else
playTimeStepsTimer.stop();
}
void VolumeViewer::addSlice(std::string filename)
{
sliceEditor->addSlice(filename);
}
void VolumeViewer::setShowFrameRate(bool on)
{
if (osprayWindow_)
osprayWindow_->setShowFrameRate(on);
}
void VolumeViewer::addGeometry(std::string filename)
{
#if 0
// For now we assume PLY geometry files. Later we can support other geometry formats.
// Get filename if not specified.
if(filename.empty())
filename = QFileDialog::getOpenFileName(this, tr("Load geometry"), ".", "Geometry files (*)").toStdString();
if(filename.empty())
return;
// Attempt to load the geometry through the TriangleMeshFile loader.
// OSPGeometry triangleMesh = ospNewGeometry("trianglemesh");
// If successful, commit the triangle mesh and add it to all models.
// if(TriangleMeshFile::importTriangleMesh(filename, triangleMesh) != NULL) {
ospcommon::FileName fn = filename;
ospray::miniSG::Model* msgModel = new miniSG::Model;
bool loadedSGScene = false;
if (fn.ext() == "stl") {
miniSG::importSTL(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "msg") {
miniSG::importMSG(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "tri") {
miniSG::importTRI(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "xml") {
miniSG::importRIVL(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "obj") {
miniSG::importOBJ(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "hbp") {
miniSG::importHBP(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "x3d") {
miniSG::importX3D(*msgModel,fn);
loadedSGScene = true;
} else if (fn.ext() == "astl") {
// miniSG::importSTL(msgAnimation,fn);
// loadedSGScene = true;
} else {
ospray::importer::Group *newStuff = ospray::importer::import(filename);
if (!newStuff) return;
if (newStuff->geometry.size() != 1) return;
OSPGeometry triangleMesh = newStuff->geometry[0]->handle;
// For now: if this is a DDS geometry, assume it is a horizon and its color should be mapped through the first volume's transfer function.
if(QString(filename.c_str()).endsWith(".dds") && modelStates_.size() > 0 && modelStates_[0].volumes.size() > 0) {
OSPMaterial material = ospNewMaterial(renderer, "default");
ospSet3f(material, "Kd", 1,1,1);
ospSetObject(material, "volume", modelStates_[0].volumes[0]->handle);
ospCommit(material);
ospSetMaterial(triangleMesh, material);
ospCommit(triangleMesh);
// Create an instance of the geometry and add the instance to the main model(s)--this prevents the geometry
// from being rebuilt every time the main model is committed (e.g. when slices / isosurfaces are manipulated)
OSPModel modelInstance = ospNewModel();
ospAddGeometry(modelInstance, triangleMesh);
ospCommit(modelInstance);
ospcommon::affine3f xfm = ospcommon::one;
OSPGeometry triangleMeshInstance = ospNewInstance(modelInstance, (osp::affine3f&)xfm);
ospCommit(triangleMeshInstance);
for(size_t i=0; i<modelStates_.size(); i++) {
ospAddGeometry(modelStates_[i].model, triangleMeshInstance);
ospCommit(modelStates_[i].model);
}
}
}
if (loadedSGScene)
{
std::vector<OSPModel> instanceModels;
for (size_t i = 0; i < msgModel->mesh.size(); i++) {
Ref<miniSG::Mesh> msgMesh = msgModel->mesh[i];
TriangleMeshSceneParser parser(ospray::cpp::Renderer(), "triangles");
auto ospMesh = parser.createOSPRayGeometry(msgModel, msgMesh.ptr);
OSPMaterial mat = ospNewMaterial(renderer, "OBJMaterial");
ospSet3f(mat,"Kd",.8f,.8f,.8f);
ospCommit(mat);
ospSetMaterial(ospMesh.handle(), mat);
ospCommit(ospMesh.handle());
cpp::Model model_i;
model_i.addGeometry(ospMesh);
model_i.commit();
instanceModels.push_back(model_i.handle());
}
for (size_t i = 0; i < msgModel->instance.size(); i++) {
msgModel->instance[i].xfm = msgModel->instance[i].xfm*ospcommon::affine3f::translate(ospcommon::vec3f(16,16,.1)); // hack for landing gear
OSPGeometry inst =
ospNewInstance(instanceModels[msgModel->instance[i].meshID],
reinterpret_cast<osp::affine3f&>(msgModel->instance[i].xfm));
ospCommit(inst);
// sceneModel->addGeometry(inst);
for(size_t i=0; i<modelStates_.size(); i++) {
ospAddGeometry(modelStates_[i].model, inst);
ospCommit(modelStates_[i].model);
}
}
}
// Force render.
render();
#endif
}
void VolumeViewer::screenshot(const QString& file)
{
// Print current camera view parameters (can be used on command line to recreate view)
qDebug() << "screenshot view parameters (use on command line to reproduce view): \n"
<< " " << osprayWindow_->getViewport()->toString();
QString filename(file);
// Get filename if not specified.
if (filename.isEmpty())
filename = QFileDialog::getSaveFileName(this, tr("Save screenshot"), ".", "PNG files (*.png)");
if (filename.isEmpty())
return;
// Make sure the filename has the proper extension.
if (!filename.endsWith(".png"))
filename += ".png";
// Grab the image.
auto image = osprayWindow_->grabFrameBuffer();
// Save the screenshot.
bool success = image.save(filename);
qDebug() << (success ? "saved screenshot to " : "failed saving screenshot ") << filename;
}
void VolumeViewer::keyPressEvent(QKeyEvent * event)
{
if (event->key() == Qt::Key_Escape)
{
close();
}
else if (event->key() == Qt::Key_P)
{
std::cout << "View parameters (use on command line to reproduce view): " << std::endl
<< " " << *(osprayWindow_->getViewport()) << std::endl;
}
else if (event->key() == Qt::Key_L)
{
std::cout << "Light parameters (use on command line to reproduce view): " << std::endl
<< " " << *lightEditor << std::endl;
}
}
void VolumeViewer::commitVolumes()
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++)
ospCommit(modelStates_[i].volumes[j]->handle);
}
void VolumeViewer::render()
{
if (osprayWindow_)
{
osprayWindow_->resetAccumulationBuffer();
osprayWindow_->updateGL();
}
}
void VolumeViewer::setRenderAnnotationsEnabled(bool value)
{
if (value)
{
if (!annotationRenderer)
annotationRenderer = new OpenGLAnnotationRenderer(this);
connect(osprayWindow_, SIGNAL(renderGLComponents()), annotationRenderer, SLOT(render()), Qt::UniqueConnection);
}
else
{
delete annotationRenderer;
annotationRenderer = NULL;
}
render();
}
void VolumeViewer::setSubsamplingInteractionEnabled(bool value)
{
ospSet1i(renderer, "spp", value ? -1 : 1);
if(rendererInitialized)
ospCommit(renderer);
}
void VolumeViewer::setGradientShadingEnabled(bool value)
{
if (gradientShadingEnabled != value)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++) {
ospSet1i(modelStates_[i].volumes[j]->handle, "gradientShadingEnabled", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
gradientShadingEnabled = value;
}
}
//! Set gradient shading flag on all volumes.
void VolumeViewer::setPreIntegration(bool value)
{
if (preIntegration != value)
{
ospSet1i(transferFunction, "preIntegration", value);
ospCommit(transferFunction);
render();
preIntegration = value;
}
}
//! Set gradient shading flag on all volumes.
void VolumeViewer::setSingleShade(bool value)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++) {
ospSet1i(modelStates_[i].volumes[j]->handle, "singleShade", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
}
void VolumeViewer::setShadows(bool value)
{
if (shadows != value)
{
ospSet1i(renderer, "shadowsEnabled", value);
if (rendererInitialized)
ospCommit(renderer);
render();
shadows = value;
}
}
void VolumeViewer::setPlane(bool st)
{
if (usePlane != st)
{
usePlane = st;
if (planeMesh)
{
for(size_t i=0; i<modelStates_.size(); i++)
{
ospCommit(modelStates_[i].model);
if (usePlane)
ospAddGeometry(modelStates_[i].model, planeMesh);
else
ospRemoveGeometry(modelStates_[i].model, planeMesh);
ospCommit(modelStates_[i].model);
}
}
render();
}
}
void VolumeViewer::setAOWeight(double value)
{
ospSet1f(renderer, "aoWeight", value);
if(rendererInitialized)
ospCommit(renderer);
render();
}
void VolumeViewer::setAOSamples(int value)
{
if (aoSamples != value)
{
ospSet1i(renderer, "aoSamples", value);
if(rendererInitialized)
ospCommit(renderer);
render();
aoSamples = value;
}
}
void VolumeViewer::setSPP(int value)
{
if (spp != value)
{
ospSet1i(renderer, "spp", value);
if(rendererInitialized)
ospCommit(renderer);
render();
spp = value;
}
}
//! Set gradient shading flag on all volumes.
void VolumeViewer::setAdaptiveScalar(double value)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++) {
ospSet1f(modelStates_[i].volumes[j]->handle, "adaptiveScalar", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
}
//! Set gradient shading flag on all volumes.
void VolumeViewer::setAdaptiveMaxSamplingRate(double value)
{
if (adaptiveMaxSamplingRate != value)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++) {
ospSet1f(modelStates_[i].volumes[j]->handle, "adaptiveMaxSamplingRate", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
adaptiveMaxSamplingRate = value;
}
}
//! Set gradient shading flag on all volumes.
void VolumeViewer::setAdaptiveBacktrack(double value)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++) {
ospSet1f(modelStates_[i].volumes[j]->handle, "adaptiveBacktrack", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
}
//! Set gradient shading flag on all volumes.
void VolumeViewer::setAdaptiveSampling(bool value)
{
if (value != adaptiveSampling)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++) {
ospSet1i(modelStates_[i].volumes[j]->handle, "adaptiveSampling", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
adaptiveSampling = value;
}
}
void VolumeViewer::setSamplingRate(double value)
{
if (samplingRate != value)
{
for(size_t i=0; i<modelStates_.size(); i++)
{
for(size_t j=0; j<modelStates_[i].volumes.size(); j++)
{
ospSet1f(modelStates_[i].volumes[j]->handle, "samplingRate", value);
ospCommit(modelStates_[i].volumes[j]->handle);
}
}
render();
samplingRate = value;
}
}
void VolumeViewer::setVolumeClippingBox(ospcommon::box3f value)
{
for(size_t i=0; i<modelStates_.size(); i++)
for(size_t j=0; j<modelStates_[i].volumes.size(); j++)
{
ospSet3fv(modelStates_[i].volumes[j]->handle, "volumeClippingBoxLower", &value.lower.x);
ospSet3fv(modelStates_[i].volumes[j]->handle, "volumeClippingBoxUpper", &value.upper.x);
ospCommit(modelStates_[i].volumes[j]->handle);
}
render();
}
void VolumeViewer::setSlices(std::vector<SliceParameters> sliceParameters)
{
// Provide the slices to OSPRay as the coefficients (a,b,c,d) of the plane equation ax + by + cz + d = 0.
std::vector<ospcommon::vec4f> planes;
for(size_t i=0; i<sliceParameters.size(); i++)
planes.push_back(ospcommon::vec4f(sliceParameters[i].normal.x,
sliceParameters[i].normal.y,
sliceParameters[i].normal.z,
-dot(sliceParameters[i].origin, sliceParameters[i].normal)));
OSPData planesData = ospNewData(planes.size(), OSP_FLOAT4, &planes[0].x);
// Remove existing slice geometries from models.
for (auto& state : modelStates_)
{
for(size_t j=0; j<state.slices.size(); j++)
{
ospRemoveGeometry(state.model, state.slices[j]->handle);
}
state.slices.clear();
}
// Add new slices for each volume of each model. Later we can do this only for the active model on time step change...
for (auto& state : modelStates_)
{
if(planes.size() > 0)
{
for(size_t j=0; j<state.volumes.size(); j++)
{
OSPGeometry slicesGeometry = ospNewGeometry("slices");
ospSetData(slicesGeometry, "planes", planesData);
ospSetObject(slicesGeometry, "volume", state.volumes[j]->handle);
ospCommit(slicesGeometry);
ospAddGeometry(state.model, slicesGeometry);
state.slices.push_back(std::make_shared<ModelState::Geometry>(slicesGeometry));
}
}
ospCommit(state.model);
}
render();
}
void VolumeViewer::setIsovalues(const std::vector<float>& isovalues)
{
// Remove existing isosurface geometries from models.
for (auto& state : modelStates_)
{
for (const auto& iso : state.isosurfaces)
{
ospRemoveGeometry(state.model, iso->handle);
}
state.isosurfaces.clear();
}
OSPData isovaluesData = ospNewData(isovalues.size(), OSP_FLOAT, &isovalues[0]);
// Add new isosurfaces for each volume of each model. Later we can do this only for the active model on time step change...
for (auto& state : modelStates_)
{
if (isovalues.size() > 0)
{
for (const auto& vol : state.volumes)
{
OSPGeometry isosurfacesGeometry = ospNewGeometry("isosurfaces");
ospSetData(isosurfacesGeometry, "isovalues", isovaluesData);
ospSetObject(isosurfacesGeometry, "volume", vol->handle);
ospCommit(isosurfacesGeometry);
ospAddGeometry(state.model, isosurfacesGeometry);
state.isosurfaces.push_back(std::make_shared<ModelState::Geometry>(isosurfacesGeometry));
}
}
ospCommit(state.model);
}
render();
}
void VolumeViewer::importObjectsFromFile(const std::string &filename)
{
//PRINT(ownModelPerObject_);
if (!ownModelPerObject_)
{
// Create an OSPRay model and its associated model state.
modelStates_.push_back(ModelState(ospNewModel()));
}
std::unique_ptr<ospray::importer::Group> imported(ospray::importer::import(filename));
assert(imported);
//PRINT(imported->geometry.size());
for (const auto& geom : imported->geometry)
{
loadGeometry(geom->handle);
}
for (const auto& vol : imported->volume)
{
loadVolume(vol->handle, vol->voxelRange, vol->bounds);
}
if (!ownModelPerObject_)
ospCommit(modelStates_.back().model);
}
void VolumeViewer::loadGeometry(OSPGeometry geom)
{
//PRINT(ownModelPerObject_);
//PRINT(modelStates_.size());
if (ownModelPerObject_)
modelStates_.push_back(ModelState(ospNewModel()));
ospCommit(geom);
// Add the loaded geometry to the model.
ospAddGeometry(modelStates_.back().model, geom);
if (ownModelPerObject_)
ospCommit(modelStates_.back().model);
//PRINT(modelStates_.size());
}
void VolumeViewer::loadVolume(OSPVolume vol, const vec2f& voxelRange, const box3f& bounds)
{
if (ownModelPerObject_)
modelStates_.push_back(ModelState(ospNewModel()));
assert(vol);
// For now we set the same transfer function on all volumes.
ospSetObject(vol, "transferFunction", transferFunction);
ospCommit(vol);
// Add the loaded volume(s) to the model.
ospAddVolume(modelStates_.back().model, vol);
assert(!bounds.empty());
// Add to volumes vector for the current model.
modelStates_.back().volumes.push_back(std::make_shared<ModelState::Volume>(vol, bounds, voxelRange));
if (ownModelPerObject_)
ospCommit(modelStates_.back().model);
}
void VolumeViewer::initObjects(const std::string &renderer_type)
{
globalInit(renderer_type);
loadAdditionalGeometries();
initPostObjects();
}
void VolumeViewer::loadAdditionalGeometries()
{
if (!ownModelPerObject_)
{
modelStates_.push_back(ModelState(ospNewModel()));
}
for (const auto& geom : additionalObjects_)
{
loadGeometry(geom);
}
}
void VolumeViewer::globalInit(const std::string &renderer_type)
{
// Create an OSPRay renderer.
renderer = ospNewRenderer(renderer_type.c_str());
exitOnCondition(renderer == NULL, "could not create OSPRay renderer object");
// Set renderer defaults (if not using 'aoX' renderers)
if (renderer_type[0] != 'a' && renderer_type[1] != 'o')
{
ospSet1i(renderer, "aoSamples", 1);
ospSet1i(renderer, "shadowsEnabled", 1);
ospSet1i(renderer, "aoTransparencyEnabled", 1);
}
// Create OSPRay ambient and directional lights. GUI elements will modify their parameters.
ambientLight = ospNewLight(renderer, "AmbientLight");
exitOnCondition(ambientLight == NULL, "could not create ambient light");
ospSet3f(ambientLight, "color", 0.3f, 0.5f, 1.f);
ospCommit(ambientLight);
directionalLight = ospNewLight(renderer, "DirectionalLight");
exitOnCondition(directionalLight == NULL, "could not create directional light");
ospSet3f(directionalLight, "color", 1.f, 0.9f, 0.4f);
ospCommit(directionalLight);
// Set the light sources on the renderer.
std::vector<OSPLight> lights;
lights.push_back(ambientLight);
lights.push_back(directionalLight);
ospSetData(renderer, "lights", ospNewData(lights.size(), OSP_OBJECT, &lights[0]));
// Create an OSPRay transfer function.
transferFunction = ospNewTransferFunction("piecewise_linear");
exitOnCondition(transferFunction == NULL, "could not create OSPRay transfer function object");
ospCommit(transferFunction);
}
const float b = -3;
void VolumeViewer::initPostObjects()
{
boundingBox_ = ospcommon::empty;
if (!modelStates_.empty())
{
for (const auto& vol : modelStates_[0].volumes)
boundingBox_.extend(vol->boundingBox);
if (modelStates_[0].volumes.empty())
{
boundingBox_ = presetBoundingBox_;
}
addInitialPlane();
}
else
{
boundingBox_ = {{b,b,b},{-b,-b,-b}};
}
osp::vec3f specular = osp::vec3f{0.135f,0.135f,0.135f};
for (const auto& model : modelStates_)
{
for (const auto& vol : model.volumes)
{
ospSet3fv(vol->handle, "specular", &specular.x);
ospCommit(vol->handle);
}
}
}
void VolumeViewer::addInitialPlane()
{
OSPMaterial planeMaterial = ospNewMaterial(renderer,"default");
ospSet3f(planeMaterial,"Kd",.5,.5,.5);
ospSet3f(planeMaterial,"Ks",0,0,0);
ospSet1f(planeMaterial,"Ns",0);
ospCommit(planeMaterial);
osp::vec3f vertices[4];
float ps = 100000.f;
float py = boundingBox_.upper.y+1.f;
vertices[0] = osp::vec3f{-ps, -ps, py};
vertices[1] = osp::vec3f{-ps, ps, py};
vertices[2] = osp::vec3f{ ps, -ps, py};
vertices[3] = osp::vec3f{ ps, ps, py};
planeMesh = ospNewGeometry("triangles");
OSPData position = ospNewData(4, OSP_FLOAT3, &vertices[0]);
ospCommit(position);
ospSetData(planeMesh, "vertex", position);
osp::vec3i triangles[2];
triangles[0] = osp::vec3i{0,1,2};
triangles[1] = osp::vec3i{1,2,3};
OSPData index = ospNewData(2, OSP_INT3, &triangles[0]);
ospCommit(index);
ospSetData(planeMesh, "index", index);
ospSetMaterial(planeMesh, planeMaterial);
ospCommit(planeMesh);
setPlane(usePlane);
ospRelease(index);
}
#if 0
void VolumeViewer::loadObjectsFromFiles()
{
for (const auto& file : objectFileFilenames_)
importObjectsFromFile(file);
}
#endif
void VolumeViewer::initUserInterfaceWidgets()
{
#if 0
// Add the "add geometry" widget and callback.
QAction *addGeometryAction = new QAction("Add geometry", this);
connect(addGeometryAction, SIGNAL(triggered()), this, SLOT(addGeometry()));
toolbar->addAction(addGeometryAction);
// Create the transfer function editor dock widget, this widget modifies the transfer function directly.
QDockWidget *transferFunctionEditorDockWidget = new QDockWidget("Transfer Function", this);
transferFunctionEditor = new TransferFunctionEditor(transferFunction);
transferFunctionEditorDockWidget->setWidget(transferFunctionEditor);
connect(transferFunctionEditor, SIGNAL(committed()), this, SLOT(commitVolumes()));
connect(transferFunctionEditor, SIGNAL(committed()), this, SLOT(render()));
addDockWidget(Qt::LeftDockWidgetArea, transferFunctionEditorDockWidget);
// Set the transfer function editor widget to its minimum allowed height, to leave room for other dock widgets.
transferFunctionEditor->setMaximumHeight(transferFunctionEditor->minimumSize().height());
// Create slice editor dock widget.
QDockWidget *sliceEditorDockWidget = new QDockWidget("Slices", this);
sliceEditor = new SliceEditor(boundingBox_);
sliceEditorDockWidget->setWidget(sliceEditor);
connect(sliceEditor, SIGNAL(slicesChanged(std::vector<SliceParameters>)), this, SLOT(setSlices(std::vector<SliceParameters>)));
addDockWidget(Qt::LeftDockWidgetArea, sliceEditorDockWidget);
// Create isosurface editor dock widget.
QDockWidget *isosurfaceEditorDockWidget = new QDockWidget("Isosurfaces", this);
isosurfaceEditor = new IsosurfaceEditor();
isosurfaceEditorDockWidget->setWidget(isosurfaceEditor);
connect(isosurfaceEditor, SIGNAL(isovaluesChanged(std::vector<float>)), this, SLOT(setIsovalues(std::vector<float>)));
addDockWidget(Qt::LeftDockWidgetArea, isosurfaceEditorDockWidget);
// Default to showing transfer function tab widget.
transferFunctionEditorDockWidget->raise();
// Tabify dock widgets.
tabifyDockWidget(transferFunctionEditorDockWidget, sliceEditorDockWidget);
tabifyDockWidget(transferFunctionEditorDockWidget, isosurfaceEditorDockWidget);
tabifyDockWidget(transferFunctionEditorDockWidget, lightEditorDockWidget);
tabifyDockWidget(transferFunctionEditorDockWidget, probeDockWidget);
// Add the "screenshot" widget and callback.
QAction *screenshotAction = new QAction("Screenshot", this);
connect(screenshotAction, SIGNAL(triggered()), this, SLOT(screenshot()));
toolbar->addAction(screenshotAction);
// Create the light editor dock widget, this widget modifies the light directly.
QDockWidget *lightEditorDockWidget = new QDockWidget("Lights", this);
lightEditorDockWidget->setWidget(lightEditor);
addDockWidget(Qt::LeftDockWidgetArea, lightEditorDockWidget);
// Create the probe dock widget.
QDockWidget *probeDockWidget = new QDockWidget("Probe", this);
probeDockWidget->setWidget(probeWidget);
addDockWidget(Qt::LeftDockWidgetArea, probeDockWidget);
// Tabs on top.
setTabPosition(Qt::LeftDockWidgetArea, QTabWidget::North);
// Add the current OSPRay object file label to the bottom status bar.
statusBar()->addWidget(¤tFilenameInfoLabel);
#endif
}
void VolumeViewer::setAmbientLightVisible(bool visible)
{
ospSet1i(ambientLight, "isVisible", visible);
ospCommit(ambientLight);
render();
}
void VolumeViewer::setDirectionalLightVisible(bool visible)
{
ospSet1i(directionalLight, "isVisible", visible);
ospCommit(directionalLight);
render();
}
void VolumeViewer::setAmbientLightColor(float r, float g, float b)
{
ospSet3f(ambientLight, "color", r, g, b);
ospCommit(ambientLight);
render();
}
void VolumeViewer::setDirectionalLightColor(float r, float g, float b)
{
ospSet3f(directionalLight, "color", r, g, b);
ospCommit(directionalLight);
render();
}
| mit |
dannyfritz/funcdash | test/and.js | 303 | var test = require('tape');
var and = require('../logic/and');
test('and', function (t) {
t.equal(and(true, true), true);
t.equal(and(true, false), false);
t.equal(and(false, true), false);
t.equal(and(false, false), false);
t.equal(and(1, 1), true);
t.equal(and(1, false), false);
t.end();
});
| mit |
talho/rollcall | app/controllers/rollcall/map_controller.rb | 3024 | class Rollcall::MapController < Rollcall::RollcallAppController
respond_to :json
layout false
def index
@start = 8.days.ago.to_date
@end = 1.days.ago.to_date
if params[:school_district].present?
@results = get_by_school_district
else
@results = get_by_school
end
respond_with(@results, @start, @end)
end
private
def get_by_school_district
flat_results = Rollcall::SchoolDistrict
.joins("inner join (select report_date, district_id, sum(total_absent) as total_absent, sum(total_enrolled) as total_enrolled from rollcall_school_daily_infos inner join rollcall_schools on rollcall_schools.id = school_id group by report_date, district_id) as totals on totals.district_id = rollcall_school_districts.id")
.joins("inner join (select district_id, (atan2(avg(cos(gmap_lat * pi() / 180) * sin(gmap_lng * pi() / 180)), avg(cos(gmap_lat * pi() / 180) * cos(gmap_lng * pi() / 180)))) * 180 / pi() as gmap_lng, (atan2(avg(sin(gmap_lat * pi() / 180)), sqrt(avg(cos(gmap_lat * pi() / 180) * cos(gmap_lng * pi() / 180)) ^ 2 + avg(cos(gmap_lat * pi() / 180) * sin(gmap_lng * pi() / 180)) ^ 2))) * 180 / pi() as gmap_lat from rollcall_schools where district_id is not null group by district_id) as sdc on sdc.district_id = rollcall_school_districts.id")
.where("report_date >= ? and report_date <= ?", @start, @end)
.where("total_enrolled <> 0")
.select("name as display_name, sdc.gmap_lat, sdc.gmap_lng, rollcall_school_districts.id, report_date")
.select("round(Cast(total_absent as float) / Cast(total_enrolled as float) * 100) as weight")
.order("report_date, rollcall_school_districts.id")
.all
process_query flat_results
end
def get_by_school
flat_results = Rollcall::School
.joins("inner join rollcall_school_daily_infos i on i.school_id = rollcall_schools.id")
.where("report_date >= ? and report_date <= ?", @start, @end)
.where("total_enrolled <> 0")
.where("gmap_lat is not null and gmap_lng is not null")
.select("display_name, gmap_lat, gmap_lng, rollcall_schools.id, report_date")
.select("round(Cast(total_absent as float) / Cast(total_enrolled as float) * 100) as weight")
.order("report_date, rollcall_schools.id")
.all
process_query flat_results
end
def process_query(flat_results)
schools = Array.new
results = Array.new
if !flat_results.empty?
iterator_date = flat_results[0][:report_date]
flat_results.each_with_index do |s, i|
if flat_results.count - 1 == i
schools.push(s)
results.push({record_date: iterator_date, schools: schools})
end
if iterator_date != s.report_date
results.push({record_date: iterator_date, schools: schools})
schools = Array.new
iterator_date = s.report_date
else
schools.push(s)
end
end
end
return results
end
end | mit |
aggiedefenders/aggiedefenders.github.io | node_modules/re-base/tests/specs/firestore/reset.spec.js | 3281 | const Rebase = require('../../../src/rebase');
const React = require('react');
const ReactDOM = require('react-dom');
const firebase = require('firebase');
require('firebase/firestore');
var invalidEndpoints = require('../../fixtures/invalidEndpoints');
var dummyObjData = require('../../fixtures/dummyObjData');
var dummyArrayOfObjects = require('../../fixtures/dummyArrayOfObjects');
var invalidOptions = require('../../fixtures/invalidOptions');
var dummyArrData = require('../../fixtures/dummyArrData');
var firebaseConfig = require('../../fixtures/config');
describe('reset()', function() {
var base;
var testApp;
var collectionPath = 'testCollection';
var collectionRef;
var app;
beforeAll(() => {
testApp = firebase.initializeApp(firebaseConfig, 'DB_CHECK');
collectionRef = testApp.firestore().collection(collectionPath);
var mountNode = document.createElement('div');
mountNode.setAttribute('id', 'mount');
document.body.appendChild(mountNode);
});
afterAll(done => {
var mountNode = document.getElementById('mount');
mountNode.parentNode.removeChild(mountNode);
testApp.delete().then(done);
});
beforeEach(() => {
app = firebase.initializeApp(firebaseConfig);
var db = firebase.firestore(app);
base = Rebase.createClass(db);
});
afterEach(done => {
ReactDOM.unmountComponentAtNode(document.body);
Promise.all([
collectionRef.get().then(docs => {
const deleteOps = [];
docs.forEach(doc => {
deleteOps.push(doc.ref.delete());
});
return Promise.all(deleteOps);
}),
app.delete()
])
.then(done)
.catch(err => done.fail(err));
});
it('should remove listeners set by the app', done => {
class TestComponent extends React.Component {
constructor(props) {
super(props);
this.state = {
user: {}
};
}
componentDidMount() {
this.ref = base.bindCollection(`${collectionPath}`, {
context: this,
state: 'user'
});
base.reset();
collectionRef
.doc('testDoc')
.set({ user: 'abcdef' })
.then(() => {
setTimeout(done, 500);
});
}
componentDidUpdate() {
done.fail('listener should have been removed');
}
render() {
return <div>No Data</div>;
}
}
ReactDOM.render(<TestComponent />, document.getElementById('mount'));
});
it('should remove syncs set by the app', done => {
class TestComponent extends React.Component {
constructor(props) {
super(props);
this.state = {
user: {}
};
}
componentDidMount() {
this.ref = base.syncDoc(`${collectionPath}/testDoc`, {
context: this,
state: 'user'
});
base.reset();
collectionRef
.doc('testDoc')
.set({ user: 'abcdef' })
.then(() => {
setTimeout(done, 500);
});
}
componentDidUpdate() {
done.fail('Sync should have been removed');
}
render() {
return <div>No Data</div>;
}
}
ReactDOM.render(<TestComponent />, document.getElementById('mount'));
});
});
| mit |
stivalet/PHP-Vulnerability-test-suite | XSS/CWE_79/safe/CWE_79__fopen__func_FILTER-CLEANING-number_float_filter__Use_untrusted_data_script-side_DoubleQuoted_Expr.php | 1633 | <!--
Safe sample
input : use fopen to read /tmp/tainted.txt and put the first line in $tainted
Uses a number_float_filter via filter_var function
File : use of untrusted data in one side of a double quoted expression in a script
-->
<!--Copyright 2015 Bertrand STIVALET
Permission is hereby granted, without written agreement or royalty fee, to
use, copy, modify, and distribute this software and its documentation for
any purpose, provided that the above copyright notice and the following
three paragraphs appear in all copies of this software.
IN NO EVENT SHALL AUTHORS BE LIABLE TO ANY PARTY FOR DIRECT,
INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF AUTHORS HAVE
BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AUTHORS SPECIFICALLY DISCLAIM ANY WARRANTIES INCLUDING, BUT NOT
LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE, AND NON-INFRINGEMENT.
THE SOFTWARE IS PROVIDED ON AN "AS-IS" BASIS AND AUTHORS HAVE NO
OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
MODIFICATIONS.-->
<!DOCTYPE html>
<html>
<head>
<script>
<?php
$handle = @fopen("/tmp/tainted.txt", "r");
if ($handle) {
if(($tainted = fgets($handle, 4096)) == false) {
$tainted = "";
}
fclose($handle);
} else {
$tainted = "";
}
$sanitized = filter_var($tainted, FILTER_SANITIZE_NUMBER_FLOAT);
if (filter_var($sanitized, FILTER_VALIDATE_FLOAT))
$tainted = $sanitized ;
else
$tainted = "" ;
echo "x=\"". $tainted."\"" ;
?>
</script>
</head>
<body>
<h1>Hello World!</h1>
</body>
</html> | mit |
dinhhuydh/rake_t | lib/rake_t.rb | 190 | require 'rake_t/engine'
require 'rails/rake_t'
require 'rake'
module RakeT
def self.all_tasks
Rake::Task.tasks
end
def self.load_tasks
Rails.application.load_tasks
end
end
| mit |
jirutka/gitlabhq | app/services/projects/lfs_pointers/lfs_import_service.rb | 3060 | # frozen_string_literal: true
# This service manages the whole worflow of discovering the Lfs files in a
# repository, linking them to the project and downloading (and linking) the non
# existent ones.
module Projects
module LfsPointers
class LfsImportService < BaseService
include Gitlab::Utils::StrongMemoize
HEAD_REV = 'HEAD'.freeze
LFS_ENDPOINT_PATTERN = /^\t?url\s*=\s*(.+)$/.freeze
LFS_BATCH_API_ENDPOINT = '/info/lfs/objects/batch'.freeze
LfsImportError = Class.new(StandardError)
def execute
return {} unless project&.lfs_enabled?
if external_lfs_endpoint?
# If the endpoint host is different from the import_url it means
# that the repo is using a third party service for storing the LFS files.
# In this case, we have to disable lfs in the project
disable_lfs!
return {}
end
get_download_links
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsImportError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
private
def external_lfs_endpoint?
lfsconfig_endpoint_uri && lfsconfig_endpoint_uri.host != import_uri.host
end
def disable_lfs!
project.update(lfs_enabled: false)
end
def get_download_links
existent_lfs = LfsListService.new(project).execute
linked_oids = LfsLinkService.new(project).execute(existent_lfs.keys)
# Retrieving those oids not linked and which we need to download
not_linked_lfs = existent_lfs.except(*linked_oids)
LfsDownloadLinkListService.new(project, remote_uri: current_endpoint_uri).execute(not_linked_lfs)
end
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file
data = project.repository.lfsconfig_for(HEAD_REV)
# Parsing the data to retrieve the url
parsed_data = data&.match(LFS_ENDPOINT_PATTERN)
if parsed_data
URI.parse(parsed_data[1]).tap do |endpoint|
endpoint.user ||= import_uri.user
endpoint.password ||= import_uri.password
end
end
end
rescue URI::InvalidURIError
raise LfsImportError, 'Invalid URL in .lfsconfig file'
end
def import_uri
@import_uri ||= URI.parse(project.import_url)
rescue URI::InvalidURIError
raise LfsImportError, 'Invalid project import URL'
end
def current_endpoint_uri
(lfsconfig_endpoint_uri || default_endpoint_uri)
end
# The import url must end with '.git' here we ensure it is
def default_endpoint_uri
@default_endpoint_uri ||= begin
import_uri.dup.tap do |uri|
path = uri.path.gsub(%r(/$), '')
path += '.git' unless path.ends_with?('.git')
uri.path = path + LFS_BATCH_API_ENDPOINT
end
end
end
end
end
end
| mit |
agiamas/publify | spec/models/user_spec.rb | 11401 | require 'spec_helper'
describe User do
describe 'FactoryGirl Girl' do
it 'should user factory valid' do
create(:user).should be_valid
build(:user).should be_valid
end
it 'should multiple user factory valid' do
create(:user).should be_valid
create(:user).should be_valid
end
it 'salt should not be nil' do
User.salt.should == '20ac4d290c2293702c64b3b287ae5ea79b26a5c1'
end
end
context 'With the contents and users fixtures loaded' do
before(:each) do
User.stub(:salt).and_return('change-me')
end
it 'Calling User.authenticate with a valid user/password combo returns a user' do
alice = create(:user, :login => 'alice', :password => 'greatest')
User.authenticate('alice', 'greatest').should == alice
end
it 'User.authenticate(user,invalid) returns nil' do
create(:user, :login => 'alice', :password => 'greatest')
User.authenticate('alice', 'wrong password').should be_nil
end
it 'User.authenticate(inactive,valid) returns nil' do
create(:user, :login => 'alice', :state => 'inactive')
User.authenticate('inactive', 'longtest').should be_nil
end
it 'User.authenticate(invalid,whatever) returns nil' do
create(:user, :login => 'alice')
User.authenticate('userwhodoesnotexist', 'what ever').should be_nil
end
it 'The various article finders work appropriately' do
create(:blog)
tobi = create(:user)
7.times do
create(:article, :user => tobi)
end
create(:article, :published => false, :published_at => nil, :user => tobi)
tobi.articles.size.should == 8
tobi.articles.published.size.should == 7
end
it 'authenticate? works as expected' do
bob = create(:user, :login => 'bob', :password => 'testtest')
User.should be_authenticate('bob', 'testtest')
User.should_not be_authenticate('bob', 'duff password')
end
end
describe 'With a new user' do
before(:each) do
@user = User.new :login => 'not_bob'
@user.email = 'publify@publify.com'
set_password 'a secure password'
end
describe "the password" do
it 'can be just right' do
set_password 'Just right'
@user.should be_valid
end
{ 'too short' => 'x',
'too long' => 'repetitivepass' * 10,
'empty' => ''
}.each do |problematic, password|
it "cannot be #{problematic}" do
set_password password
@user.should_not be_valid
@user.errors['password'].should be_any
end
end
it "has to match confirmation" do
@user.password = "foo"
@user.password_confirmation = "bar"
@user.should_not be_valid
@user.errors['password'].should be_any
end
end
describe 'the login' do
it 'can be just right' do
@user.login = 'okbob'
@user.should be_valid
end
{ 'too short' => 'x',
'too long' => 'repetitivepass' * 10,
'empty' => ''
}.each do |problematic, login|
it "cannot be #{problematic}" do
@user.login = login
@user.should_not be_valid
@user.errors['login'].should be_any
end
end
end
it 'email cannot be blank' do
@user.email = ''
@user.should_not be_valid
end
describe "#display_name" do
it 'should not be blank' do
@user.display_name.should_not be_empty
end
end
def set_password(newpass)
@user.password = @user.password_confirmation = newpass
end
end
describe 'With a user in the database' do
before(:each) do
@olduser = create(:user)
end
it 'should not be able to create another user with the same login' do
login = @olduser.login
u = User.new(:login => login) {|u| u.password = u.password_confirmation = 'secure password'}
u.should_not be_valid
u.errors['login'].should be_any
end
end
describe 'Updating an existing user' do
before(:each) do
@user = create(:user)
set_password 'a secure password'
@user.save!
end
describe "the password" do
{ 'just right' => 'Just right',
'empty' => ''
}.each do |ok, password|
it "can be #{ok}" do
set_password password
@user.should be_valid
end
end
{ 'too short' => 'x',
'too long' => 'repetitivepass' * 10,
}.each do |problematic, password|
it "cannot be #{problematic}" do
set_password password
@user.should_not be_valid
@user.errors['password'].should be_any
end
end
it "has to match confirmation" do
@user.password = "foo"
@user.password_confirmation = "bar"
@user.should_not be_valid
@user.errors['password'].should be_any
end
it "is not actually changed when set to empty" do
set_password ''
@user.save!
User.authenticate(@user.login, '').should be_nil
User.authenticate(@user.login, 'a secure password').should == @user
end
end
describe "saving twice" do
it "should not change the password" do
(found = User.authenticate(@user.login, 'a secure password')).should == @user
found.save
found.save
User.authenticate(@user.login, 'a secure password').should == found
end
end
describe 'the login' do
it 'must not change' do
@user.login = 'not_bob'
@user.should_not be_valid
end
end
def set_password(newpass)
@user.password = @user.password_confirmation = newpass
end
end
describe "#initialize" do
it "accepts a settings field in its parameter hash" do
User.new({"firstname" => 'foo'})
end
end
describe '#admin?' do
it 'should return true if user is admin' do
admin = build(:user, :profile => build(:profile_admin, :label => Profile::ADMIN))
admin.should be_admin
end
it 'should return false if user is not admin' do
publisher = build(:user, :profile => build(:profile_publisher))
publisher.should_not be_admin
end
end
describe "set_author" do
it "uses user given param to set author AND user of article" do
article = Article.new
user = build(:user, login: 'Henri')
article.set_author(user)
article.author.should eq 'Henri'
article.user.should eq user
end
end
describe "generate_password!" do
it "respond to generate_password!" do
User.new.should respond_to(:generate_password!)
end
it "set a 7 char length password" do
user = User.new
user.should_receive(:rand).exactly(7).times.and_return(0)
user.should_receive(:password=).with('a' * 7)
user.generate_password!
end
end
describe "default_text_filter" do
it "returns user text_filter" do
blog = create(:blog)
user = build(:user)
expect(user.default_text_filter.name).to eq(blog.text_filter)
end
end
describe :first_and_last_name do
context "with first and last name" do
let(:user) { create(:user, firstname: 'Marlon', lastname: 'Brando') }
it { expect(user.first_and_last_name).to eq('Marlon Brando') }
end
context "with firstname without lastname" do
let(:user) { create(:user, firstname: 'Marlon', lastname: nil) }
it { expect(user.first_and_last_name).to eq('') }
end
end
describe :display_names do
context "with user without nickname, firstname, lastname" do
let(:user) { create(:user, nickname: nil, firstname: nil, lastname: nil) }
it { expect(user.display_names).to eq([user.login]) }
end
context "with user with nickname without firstname, lastname" do
let(:user) { create(:user, nickname: 'Bob', firstname: nil, lastname: nil) }
it { expect(user.display_names).to eq([user.login, user.nickname]) }
end
context "with user with firstname, without nickname, lastname" do
let(:user) { create(:user, nickname: nil, firstname: 'Robert', lastname: nil) }
it { expect(user.display_names).to eq([user.login, user.firstname]) }
end
context "with user with lastname, without nickname, firstname" do
let(:user) { create(:user, nickname: nil, firstname: nil, lastname: 'Redford') }
it { expect(user.display_names).to eq([user.login, user.lastname]) }
end
context "with user with firstname and lastname, witjout nickname" do
let(:user) { create(:user, nickname: nil, firstname: 'Robert', lastname: 'Redford') }
it { expect(user.display_names).to eq([user.login, user.firstname, user.lastname, "#{user.firstname} #{user.lastname}"]) }
end
end
describe "User's Twitter configuration" do
it "A user without twitter_oauth_token or twitter_oauth_token_secret should not have Twitter configured" do
user = build(:user, twitter_oauth_token:nil, twitter_oauth_token_secret:nil)
user.has_twitter_configured?.should == false
end
it "A user with an empty twitter_oauth_token and no twitter_oauth_token_secret should not have Twitter configured" do
user = build(:user, twitter_oauth_token: "", twitter_oauth_token_secret: nil)
user.has_twitter_configured?.should == false
end
it "A user with an empty twitter_oauth_token and an empty twitter_oauth_token_secret should not have Twitter configured" do
user = build(:user, twitter_oauth_token: "", twitter_oauth_token_secret: "")
user.has_twitter_configured?.should == false
end
it "A user with a twitter_oauth_token and no twitter_oauth_token_secret should not have Twitter configured" do
user = build(:user, twitter_oauth_token: "12345", twitter_oauth_token_secret: '')
user.has_twitter_configured?.should == false
end
it "A user with a twitter_oauth_token and an empty twitter_oauth_token_secret should not have Twitter configured" do
user = build(:user, twitter_oauth_token: "12345", twitter_oauth_token_secret: "")
user.has_twitter_configured?.should == false
end
it "A user with a twitter_oauth_token_secret and no twitter_oauth_token should not have Twitter configured" do
user = build(:user, twitter_oauth_token: "", twitter_oauth_token_secret: "67890")
user.has_twitter_configured?.should == false
end
it "A user with a twitter_oauth_token_secret and an empty twitter_oauth_token should not have Twitter configured" do
user = build(:user, twitter_oauth_token_secret: "67890", twitter_oauth_token: "")
user.has_twitter_configured?.should == false
end
it "A user with a twitter_oauth_token and a twitter_oauth_token_secret should have Twitter configured" do
user = build(:user, twitter_oauth_token: "12345", twitter_oauth_token_secret: "67890")
user.has_twitter_configured?.should == true
end
end
describe :can_access_to do
let(:profile) { create(:profile, modules: modules) }
let(:user) { create(:user, profile: profile) }
AccessControl.available_modules.each do |m|
context "without module #{m}" do
let(:modules) { [] }
it { expect(user.send("can_access_to_#{m}?")).to be_false }
end
context "with module #{m}" do
let(:modules) { [m] }
it { expect(user.send("can_access_to_#{m}?")).to be_true }
end
end
end
end
| mit |
LFGUI/LFGUI | Doxygen/html/search/enumvalues_1.js | 270 | var searchData=
[
['beam',['beam',['../namespacelfgui.html#a765b713f4249e73e28aaa97f10bf6285a5435eeb714f3a0739ca75b3b0eb8cfb3',1,'lfgui']]],
['busy',['busy',['../namespacelfgui.html#a765b713f4249e73e28aaa97f10bf6285a8bc1b2f84252c3df4edd53e4aad097a7',1,'lfgui']]]
];
| mit |
distributions-io/triangular-quantile | test/test.number.js | 1613 | /* global describe, it, require */
'use strict';
// MODULES //
var // Expectation library:
chai = require( 'chai' ),
// Check whether an element is a finite number
isFiniteNumber = require( 'validate.io-finite' ),
// Check whether an element is `NaN`
isnan = require( 'validate.io-nan' ),
// Module to be tested:
quantile = require( './../lib/number.js' );
// VARIABLES //
var expect = chai.expect,
assert = chai.assert;
// TESTS //
describe( 'number quantile', function tests() {
var validationData = require( './fixtures/number.json' ),
data = validationData.data,
expected = validationData.expected.map( function( d ) {
if (d === 'Inf' ) {
return Number.POSITIVE_INFINITY;
}
if ( d === '-Inf' ) {
return Number.NEGATIVE_INFINITY;
}
return d;
}),
a = validationData.a,
b = validationData.b,
c = validationData.c;
it( 'should export a function', function test() {
expect( quantile ).to.be.a( 'function' );
});
it( 'should evaluate the quantile function', function test() {
var actual;
for ( var i = 0; i < data.length; i++ ) {
actual = quantile( data[ i ], a, b, c );
if ( isFiniteNumber( actual ) && isFiniteNumber( expected[ i ] ) ) {
assert.closeTo( actual, expected[ i ] , 1e-12 );
}
}
});
it( 'should return `NaN` if provided `NaN` as input', function test() {
assert.isTrue( isnan( quantile( NaN, a, b, c ) ) );
});
it( 'should return `NaN` if provided a number outside [0,1]', function test() {
assert.isTrue( isnan( quantile( 1.1, a, b, c ) ) );
assert.isTrue( isnan( quantile( -0.1, a, b, c ) ) );
});
});
| mit |
vr-the-feedback/vr-the-feedback-unity | Assets/VRTheFeedback/Scripts/OggVorbisEncoder/Setup/Templates/BookBlocks/Stereo8/Coupled/Chapter0/Page5_0.cs | 937 | namespace OggVorbisEncoder.Setup.Templates.BookBlocks.Stereo8.Coupled.Chapter0
{
public class Page5_0 : IStaticCodeBook
{
public int Dimensions { get; } = 2;
public byte[] LengthList { get; } = {
1, 3, 3, 5, 5, 7, 6, 8, 8, 0, 0, 0, 7, 7, 7, 7,
8, 8, 0, 0, 0, 7, 7, 7, 7, 8, 9, 0, 0, 0, 8, 8,
8, 8, 9, 9, 0, 0, 0, 8, 8, 8, 8, 9, 9, 0, 0, 0,
9, 9, 8, 8,10,10, 0, 0, 0, 9, 9, 8, 8,10,10, 0,
0, 0,10,10, 9, 9,10,10, 0, 0, 0, 0, 0, 9, 9,10,
10,
};
public CodeBookMapType MapType { get; } = (CodeBookMapType)1;
public int QuantMin { get; } = -531628032;
public int QuantDelta { get; } = 1611661312;
public int Quant { get; } = 4;
public int QuantSequenceP { get; } = 0;
public int[] QuantList { get; } = {
4,
3,
5,
2,
6,
1,
7,
0,
8,
};
}
} | mit |
Studentmediene/RadioRevolt-API | src/models/episode.model.js | 1956 | /**
* Episode model
* @module models/Episode
*/
import slugify from 'slugify';
/**
* Episode model - create and export the database model for posts
* including all assosiations and classmethods assiciated with this model.
* @memberof module:models/Post
* @param {Object} sequelize description
* @param {Object} DataTypes description
*/
export default function (sequelize, DataTypes) {
const Episode = sequelize.define('episode', {
title: {
type: DataTypes.STRING,
allowNull: false,
defaultValue: ''
},
slug: {
type: DataTypes.STRING,
// Allow null because then Sequelize can set it to null,
// then receive the object and create slug based on
// id and title.
allowNull: true,
unique: true
},
lead: {
type: DataTypes.STRING(2048),
allowNull: false,
defaultValue: ''
},
podcastUrl: {
type: DataTypes.STRING,
allowNull: true,
validate: {
isUrl: true
}
},
soundUrl: {
type: DataTypes.STRING,
allowNull: true,
validate: {
isUrl: true
}
}
}, {
name: {
singular: 'episode',
plural: 'episodes'
},
hooks: {
afterCreate: episode => {
const slug = slugify(`${episode.get('title')} ${episode.get('id')}`);
episode.set('slug', slug);
episode.save();
}
},
classMethods: {
associate(models) {
Episode.belongsTo(models.Show, {
foreignKey: {
name: 'showId',
allowNull: true
}
});
}
}
}
);
return Episode;
}
| mit |
LouisK130/Orange-Cosmos-Roleplay | ocrp/entities/effects/kuhbewm/init.lua | 3845 | local Textures = {}
Textures.Glow1 = Material("sprites/light_glow02")
Textures.Glow2 = Material("sprites/flare1")
for k,mat in pairs(Textures) do
mat:SetInt("$spriterendermode",9)
mat:SetInt("$ignorez",1)
mat:SetInt("$illumfactor",8)
end
function EFFECT:Init(data)
local vOffset = data:GetOrigin()
if LocalPlayer():GetPos():Distance(vOffset) < 750 then
surface.PlaySound("ambient/explosions/explode_" .. math.random(1, 4) .. ".wav");
else
surface.PlaySound('ambient/explosions/explode_9.wav');
end
self.Scale = data:GetScale()
self.ScaleSlow = math.sqrt(self.Scale)
self.ScaleSlowest = math.sqrt(self.ScaleSlow)
self.Normal = data:GetNormal()
self.RightAngle = self.Normal:Angle():Right():Angle()
self.Position = data:GetOrigin() - 12*self.Normal
self.Position2 = self.Position + self.Scale*64*self.Normal
local CurrentTime = CurTime()
self.Duration = 0.5*self.Scale
self.KillTime = CurrentTime + self.Duration
self.GlowAlpha = 200
self.GlowSize = 100*self.Scale
self.FlashAlpha = 100
self.FlashSize = 0
local emitter = ParticleEmitter(self.Position)
for i=1,math.ceil(self.Scale*math.random(1,10)) do
end
for i=1,math.ceil(self.Scale*120) do
local vecang = VectorRand()*8
local particle = emitter:Add("particle/particle_smokegrenade",self.Position - vecang*9*k)
particle:SetColor(80,80,80,255)
particle:SetVelocity((math.Rand(50,400)*vecang)*self.Scale)
particle:SetDieTime(math.Rand(7,9)*self.Scale)
particle:SetAirResistance(150)
particle:SetStartAlpha(150)
particle:SetEndAlpha(0)
particle:SetStartSize(math.Rand(300,300)*self.ScaleSlow)
particle:SetEndSize(math.Rand(300,300)*self.ScaleSlow)
particle:SetRoll(math.Rand(20,80))
particle:SetRollDelta(0.6*math.random(-1,1))
end
for i=1,math.ceil(self.Scale*50) do
local vecang = VectorRand()*8
local particle = emitter:Add("Effects/fire_cloud"..math.random(1,2),self.Position - vecang*9*k)
particle:SetColor(200,200,200,255)
particle:SetVelocity((math.Rand(50,200)*vecang)*self.Scale)
particle:SetDieTime(math.Rand(0.5,2)*self.Scale)
particle:SetAirResistance(150)
particle:SetStartAlpha(150)
particle:SetEndAlpha(0)
particle:SetStartSize(math.Rand(25,300)*self.ScaleSlow)
particle:SetEndSize(math.Rand(25,300)*self.ScaleSlow)
particle:SetRoll(math.Rand(20,80))
particle:SetRollDelta(0.6*math.random(-1,1))
if math.random(1,10) == 1 then
local vecang = VectorRand()*8
local particle = emitter:Add("Effects/fire_embers"..math.random(1,3),self.Position - vecang*9*k)
particle:SetColor(200,200,200,255)
particle:SetVelocity((math.Rand(10,30)*vecang)*self.Scale)
particle:SetDieTime(math.Rand(8,10)*self.Scale)
particle:SetAirResistance(30)
particle:SetStartAlpha(255)
particle:SetEndAlpha(200)
particle:SetStartSize(math.Rand(2,10)*self.ScaleSlow)
particle:SetEndSize(math.Rand(2,10)*self.ScaleSlow)
particle:SetRoll(math.Rand(20,100))
particle:SetRollDelta(0.6*math.random(-3,3))
particle:SetGravity(Vector(0,0,200))
end
end
emitter:Finish()
end
function EFFECT:Think()
local TimeLeft = self.KillTime - CurTime()
local TimeScale = TimeLeft/self.Duration
local FTime = FrameTime()
if TimeLeft > 0 then
self.FlashAlpha = self.FlashAlpha - 200*FTime
self.FlashSize = self.FlashSize + 60000*FTime
self.GlowAlpha = 200*TimeScale
self.GlowSize = TimeLeft*self.Scale
return true
else
return false
end
end
function EFFECT:Render()
render.SetMaterial(Textures.Glow1)
render.DrawSprite(self.Position2,7000*self.GlowSize,5500*self.GlowSize,Color(255,240,220,self.GlowAlpha))
if self.FlashAlpha > 0 then
render.SetMaterial(Textures.Glow2)
render.DrawSprite(self.Position2,self.FlashSize,self.FlashSize,Color(255,245,215,self.FlashAlpha))
end
end
| mit |
Saeris/Scribe | src/models/lists/variations.js | 740 | import db from '../../config/bookshelf.config'
export default class Variations extends db.Model {
// Knex Schema Definitions
static fields(table) {
// Fields
table.bigInteger(`card`)
.comment(`The card associated with this variation.`)
.notNullable()
.unsigned()
.index(`variations_card`)
table.bigInteger(`variation`)
.comment(`The variation associated with this card.`)
.notNullable()
.unsigned()
.index(`variations_variation`)
// Timestamps
table.timestamps()
// Keys
table.primary([`card`, `variation`])
}
// Bookshelf Relation Definitions
get tableName() { return `variations` }
get hasTimestamps() { return true }
}
| mit |
will-gilbert/OSWf-OSWorkflow-fork | oswf/core/src/main/java/org/informagen/oswf/util/ValidationHelper.java | 661 | package org.informagen.oswf.util;
import org.informagen.oswf.exceptions.InvalidWorkflowDescriptorException;
import org.informagen.oswf.Validatable;
import java.util.Collection;
/**
* DOCUMENT ME!
*
* @author $author$
* @version $Revision: 1.2 $
*/
public class ValidationHelper {
// M E T H O D S -------------------------------------------------------------------------
public static void validate(Collection collection) throws InvalidWorkflowDescriptorException {
for(Object object : collection) {
if (object instanceof Validatable) {
((Validatable) object).validate();
}
}
}
}
| mit |
akaspin/bar | client/lists/links.go | 1790 | package lists
import (
"github.com/akaspin/bar/proto"
"path/filepath"
)
// Link from filename to manifest
type BlobLink struct {
proto.Manifest
Name string
}
// Link to chunk in blob
type ChunkLink struct {
Name string
proto.Chunk
}
// Reverse mapping from id to names
type IDMap map[proto.ID][]string
func (i IDMap) ToBlobMap(manifests []proto.Manifest) (res BlobMap) {
res = BlobMap{}
for _, manifest := range manifests {
if names, ok := i[manifest.ID]; ok {
for _, name := range names {
res[name] = manifest
}
}
}
return
}
func (i IDMap) IDs() (res []proto.ID) {
for id, _ := range i {
res = append(res, id)
}
return
}
// filename to manifest mapping
type BlobMap map[string]proto.Manifest
// Get unique chunk links
func (l BlobMap) GetChunkLinkSlice(chunkIDs []proto.ID) (res []ChunkLink) {
// make chunk ref
ref := map[proto.ID]struct{}{}
for _, v := range chunkIDs {
ref[v] = struct{}{}
}
var ok bool
for name, man := range l {
for _, chunk := range man.Chunks {
_, ok = ref[chunk.ID]
if ok {
res = append(res, ChunkLink{name, chunk})
delete(ref, chunk.ID)
if len(ref) == 0 {
return
}
}
}
}
return
}
// Get unique manifests
func (l BlobMap) GetManifestSlice() (res []proto.Manifest) {
ref := map[proto.ID]proto.Manifest{}
for _, m := range l {
ref[m.ID] = m
}
for _, v := range ref {
res = append(res, v)
}
return
}
func (l BlobMap) ToSlice() (res []BlobLink) {
for k, v := range l {
res = append(res, BlobLink{v, k})
}
return
}
func (l BlobMap) IDMap() (res IDMap) {
res = IDMap{}
for name, m := range l {
res[m.ID] = append(res[m.ID], name)
}
return
}
func (l BlobMap) Names() (res []string) {
for n, _ := range l {
res = append(res, filepath.FromSlash(n))
}
return
}
| mit |
AFR0N1NJAZ/Twilight-Forest-Rewrite | src/main/java/ninjaz/twilight/common/structures/darktower/TFDarkTowerPieces.java | 1477 | package twilightforest.structures.darktower;
import net.minecraft.world.gen.structure.MapGenStructureIO;
public class TFDarkTowerPieces
{
public TFDarkTowerPieces() {}
public static void registerPieces() {
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerBalcony.class, "TFDTBal");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerBeard.class, "TFDTBea");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerBossBridge.class, "TFDTBB");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerBossTrap.class, "TFDTBT");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerBridge.class, "TFDTBri");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerEntrance.class, "TFDTEnt");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerEntranceBridge.class, "TFDTEB");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerMain.class, "TFDTMai");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerMainBridge.class, "TFDTMB");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerRoof.class, "TFDTRooS");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerRoofAntenna.class, "TFDTRA");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerRoofCactus.class, "TFDTRC");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerRoofFourPost.class, "TFDTRFP");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerRoofRings.class, "TFDTRR");
MapGenStructureIO.func_143031_a(ComponentTFDarkTowerWing.class, "TFDTWin");
}
}
| mit |
Qymh/movieWeb | views/controllers/registerCtrl.js | 195 | angular.module('log',[])
.controller('logCtrl', function($scope) {
$scope.resett = function() {
$scope.log.name = '';
$scope.log.password = '';
$scope.log.passwordAgain = '';
}
}) | mit |
michelfernandes/crescer-2016-1 | src/modulo-08-java/Dia07/Carlos/modulo-08-java/aula3/src/main/java/br/com/crescer/aula3/run/RunDDL.java | 971 | package br.com.crescer.aula3.run;
import br.com.crescer.aula3.util.ConnectionFactory;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
/**
* @author Carlos H. Nonnemacher
*/
public class RunDDL {
public static void main(String[] args) {
final String ddl = "CREATE TABLE PESSOA ("
+ " ID_PESSOA NUMBER(19,0) NOT NULL, "
+ " NM_PESSOA VARCHAR2(50) NOT NULL, "
+ " PRIMARY KEY (ID_PESSOA) "
+ ")";
try (final Connection connection = ConnectionFactory.newConnection()) {
try (final Statement statement = connection.createStatement()) {
statement.executeUpdate(ddl);
} catch (final SQLException e) {
System.err.format("SQLException: %s", e);
}
} catch (SQLException e) {
System.err.format("SQLException: %s", e);
}
}
}
| mit |
rizwanniazigroupdocs/GroupDocs.Annotation-for-.NET | Showcases/GroupDocs-Annotation-Modern-Front-End/GroupDocs.Annotation for .NET/Global.asax.cs | 1063 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
using System.Web.Security;
using System.Web.SessionState;
using System.Web.Http;
using GroupDocs.Annotation;
namespace GroupDocs.Annotation_for.NET
{
public class Global : HttpApplication
{
private static string _licensePath = "E:\\GroupDocs\\Licenses\\GroupDocs.Total.lic";
void Application_Start(object sender, EventArgs e)
{
// Code that runs on application startup
AreaRegistration.RegisterAllAreas();
GlobalConfiguration.Configure(WebApiConfig.Register);
RouteConfig.RegisterRoutes(RouteTable.Routes);
License l = new License();
if (System.IO.File.Exists(_licensePath))
{
try
{
l.SetLicense(_licensePath);
}
catch (Exception)
{
throw;
}
}
}
}
} | mit |
AlphaHat/sdc | handler/HateHandler.go | 2800 | package handler
import (
"fmt"
"github.com/cinience/sdc/helper"
"github.com/cinience/sdc/lib"
"github.com/cinience/sdc/model"
"strconv"
"time"
)
type HateHandler struct {
lib.BaseHandler
}
func (self *HateHandler) Get() {
if helper.IsSpider(self.Ctx.Request.UserAgent()) != true {
name := self.GetString(":name")
id, _ := self.GetInt64(":id")
uid, _ := self.GetSession("userid").(int64)
if name == "question" {
if model.IsQuestionMark(uid, id) {
//self.Abort("304") <-白痴函数 妈的 难道这货不是用来设置状态号的?居然尼玛的直接panic!
self.Ctx.Output.SetStatus(304)
return
} else {
if qs, err := model.GetQuestion(id); err == nil {
qs.Hotdown = qs.Hotdown + 1
qs.Hotscore = helper.Qhot_QScore(qs.Hotup, qs.Hotdown)
qs.Hotvote = helper.Qhot_Vote(qs.Hotup, qs.Hotdown)
qs.Hotness = helper.Qhot(qs.Views, qs.ReplyCount, qs.Hotscore, model.GetAScoresByPid(id), qs.Created, qs.ReplyTime)
if _, err := model.PutQuestion(id, qs); err != nil {
fmt.Println("PutQuestion执行错误:", err)
} else {
model.SetQuestionMark(uid, id)
}
self.Ctx.WriteString(strconv.Itoa(int(qs.Hotscore)))
} else {
return
}
}
} else if name == "answer" {
if model.IsAnswerMark(uid, id) {
self.Ctx.Output.SetStatus(304)
return
} else {
if ans, err := model.GetAnswer(id); err == nil {
ans.Hotdown = ans.Hotdown + 1
ans.Views = ans.Views + 1
ans.Hotscore = helper.Qhot_QScore(ans.Hotup, ans.Hotdown)
ans.Hotvote = helper.Qhot_Vote(ans.Hotup, ans.Hotdown)
ans.Hotness = helper.Qhot(ans.Views, ans.ReplyCount, ans.Hotscore, ans.Views, ans.Created, ans.ReplyTime)
if _, err := model.PutAnswer(id, ans); err != nil {
fmt.Println("PutAnswer执行错误:", err)
} else {
model.SetAnswerMark(uid, id)
}
self.Ctx.WriteString(strconv.Itoa(int(ans.Hotscore)))
} else {
return
}
}
} else if name == "topic" {
if tp, err := model.GetTopic(id); err == nil {
tp.Hotdown = tp.Hotdown + 1
tp.Hotscore = helper.Hotness_Score(tp.Hotup, tp.Hotdown)
tp.Hotness = helper.Hotness(tp.Hotup, tp.Hotdown, time.Now())
model.PutTopic(id, tp)
//♠ 没用 ({{.article.Hotdown}})
self.Ctx.WriteString(strconv.Itoa(int(tp.Hotdown)))
} else {
return
}
} else if name == "node" {
if nd, err := model.GetNode(id); err == nil {
nd.Hotdown = nd.Hotdown + 1
nd.Hotscore = helper.Hotness_Score(nd.Hotup, nd.Hotdown)
nd.Hotness = helper.Hotness(nd.Hotup, nd.Hotdown, time.Now())
model.PutNode(id, nd)
self.Ctx.WriteString("node hated")
} else {
return
}
} else {
self.Ctx.Output.SetStatus(304)
}
} else {
self.Ctx.Output.SetStatus(401)
}
}
| mit |
jorexe/tpe-pod | api/src/main/java/mbaracus/query1/mr/Query1CombinerFactory.java | 880 | package mbaracus.query1.mr;
import com.hazelcast.mapreduce.Combiner;
import com.hazelcast.mapreduce.CombinerFactory;
import mbaracus.query1.model.AgeCount;
import mbaracus.query1.model.AgeType;
/**
* Created by jorexe on 21/11/16.
*/
public class Query1CombinerFactory implements CombinerFactory<AgeType, AgeCount, AgeCount> {
@Override
public Combiner<AgeCount, AgeCount> newCombiner(AgeType key) {
return new Combiner<AgeCount, AgeCount>() {
private int count;
@Override
public void combine(AgeCount value) {
this.count += value.count;
}
@Override
public AgeCount finalizeChunk() {
return new AgeCount(key, count);
}
@Override
public void reset() {
this.count = 0;
}
};
}
}
| mit |
saurabh1e/SuperFlaskSeed | web/src/admin_panel/admin_manager.py | 1069 | from flask_admin_impexp.admin_impexp import AdminImportExport
from src import admin, db
from src.user.models import User, Role, Permission, UserRole, Partner, PosOutlet, Address
from src.order.models import Order, Invoice, InlineItem, InlineItemTax
from src.user.schemas import UserSchema, UserRoleSchema
class MyModel(AdminImportExport):
pass
class PartnerAdmin(AdminImportExport):
inline_models = (Address,)
pass
admin.add_view(MyModel(User, session=db.session, schema=UserSchema))
admin.add_view(MyModel(Role, session=db.session))
admin.add_view(PartnerAdmin(Partner, session=db.session))
admin.add_view(MyModel(PosOutlet, session=db.session))
admin.add_view(MyModel(Address, session=db.session))
admin.add_view(MyModel(Order, session=db.session))
admin.add_view(MyModel(UserRole, session=db.session, schema=UserRoleSchema))
admin.add_view(MyModel(Permission, session=db.session))
admin.add_view(MyModel(Invoice, session=db.session))
admin.add_view(MyModel(InlineItem, session=db.session))
admin.add_view(MyModel(InlineItemTax, session=db.session))
| mit |
webino/JAWStats | js/jquery.flot.threshold.js | 3218 | /*
Flot plugin for thresholding data. Controlled through the option
"threshold" in either the global series options
series: {
threshold: {
below: number
color: colorspec
}
}
or in a specific series
$.plot($("#placeholder"), [{ data: [ ... ], threshold: { ... }}])
The data points below "below" are drawn with the specified color. This
makes it easy to mark points below 0, e.g. for budget data.
Internally, the plugin works by splitting the data into two series,
above and below the threshold. The extra series below the threshold
will have its label cleared and the special "originSeries" attribute
set to the original series. You may need to check for this in hover
events.
*/
(function($) {
var options = {
series: {threshold: null} // or { below: number, color: color spec}
};
function init(plot) {
function thresholdData(plot, s, datapoints) {
if (!s.threshold)
return;
var ps = datapoints.pointsize, i, x, y, p, prevp,
thresholded = $.extend({}, s); // note: shallow copy
thresholded.datapoints = {points: [], pointsize: ps};
thresholded.label = null;
thresholded.color = s.threshold.color;
thresholded.threshold = null;
thresholded.originSeries = s;
thresholded.data = [];
var below = s.threshold.below,
origpoints = datapoints.points,
addCrossingPoints = s.lines.show;
threspoints = [];
newpoints = [];
for (i = 0; i < origpoints.length; i += ps) {
x = origpoints[i]
y = origpoints[i + 1];
prevp = p;
if (y < below)
p = threspoints;
else
p = newpoints;
if (addCrossingPoints && prevp != p && x != null
&& i > 0 && origpoints[i - ps] != null) {
var interx = (x - origpoints[i - ps]) / (y - origpoints[i - ps + 1]) * (below - y) + x;
prevp.push(interx);
prevp.push(below);
for (m = 2; m < ps; ++m)
prevp.push(origpoints[i + m]);
p.push(null); // start new segment
p.push(null);
for (m = 2; m < ps; ++m)
p.push(origpoints[i + m]);
p.push(interx);
p.push(below);
for (m = 2; m < ps; ++m)
p.push(origpoints[i + m]);
}
p.push(x);
p.push(y);
}
datapoints.points = newpoints;
thresholded.datapoints.points = threspoints;
if (thresholded.datapoints.points.length > 0)
plot.getData().push(thresholded);
// FIXME: there are probably some edge cases left in bars
}
plot.hooks.processDatapoints.push(thresholdData);
}
$.plot.plugins.push({
init: init,
options: options,
name: 'threshold',
version: '1.0'
});
})(jQuery);
| mit |
Lingotek/translation-utility | python3/ltk/actions/action.py | 24789 | # Using the following encoding: utf-8
# Python 2
# from ConfigParser import ConfigParser, NoOptionError
# End Python 2
# Python 3
from configparser import ConfigParser, NoOptionError
# End Python 3
import requests
import os
import shutil
import fnmatch
import time
import getpass
import itertools
from ltk import exceptions
from ltk.apicalls import ApiCalls
from ltk.utils import *
from ltk.managers import DocumentManager, FolderManager
from ltk.constants import CONF_DIR, CONF_FN, SYSTEM_FILE, ERROR_FN
import json
from ltk.logger import logger
from ltk.git_auto import Git_Auto
from tabulate import tabulate
class Action:
def __init__(self, path, watch=False, timeout=60):
self.host = ''
self.access_token = ''
self.project_id = ''
self.project_name = ''
self.path = path
self.community_id = ''
self.workflow_id = '' # default workflow id; MT phase only
self.locale = ''
self.clone_option = 'on'
self.auto_format_option = ''
self.download_option = 'clone'
self.download_dir = None # directory where downloaded translation will be stored
self.watch_locales = set() # if specified, add these target locales to any files in the watch folder
self.git_autocommit = None
self.git_username = ''
self.git_password = ''
self.append_option = 'none'
self.locale_folders = {}
if not self._is_initialized():
raise exceptions.UninitializedError("This project is not initialized. Please run init command.")
self._initialize_self()
self.watch = watch
self.doc_manager = DocumentManager(self.path)
self.folder_manager = FolderManager(self.path)
self.timeout = timeout
self.api = ApiCalls(self.host, self.access_token, self.watch, self.timeout)
self.git_auto = Git_Auto(self.path)
self.error_file_name = os.path.join(self.path, CONF_DIR, ERROR_FN)
def _is_initialized(self):
actual_path = find_conf(self.path)
if not actual_path:
return False
self.path = os.path.join(actual_path, '')
if not is_initialized(self.path):
return False
return True
def _initialize_self(self):
config_file_name = os.path.join(self.path, CONF_DIR, CONF_FN)
conf_parser = ConfigParser()
conf_parser.read(config_file_name)
self.host = conf_parser.get('main', 'host')
self.access_token = conf_parser.get('main', 'access_token')
self.project_id = conf_parser.get('main', 'project_id')
self.community_id = conf_parser.get('main', 'community_id')
self.workflow_id = conf_parser.get('main', 'workflow_id')
self.locale = conf_parser.get('main', 'default_locale')
self.locale = self.locale.replace('_','-')
try:
if conf_parser.has_option('main', 'auto_format'):
self.auto_format_option = conf_parser.get('main', 'auto_format')
else:
self.update_config_file('auto_format', 'on', conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'project_name'):
self.project_name = conf_parser.get('main', 'project_name')
if conf_parser.has_option('main', 'download_folder'):
self.download_dir = conf_parser.get('main', 'download_folder')
else:
self.download_dir = None
self.update_config_file('download_folder', json.dumps(self.download_dir), conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'watch_locales'):
watch_locales = conf_parser.get('main', 'watch_locales')
if watch_locales:
self.watch_locales = set(watch_locales.split(','))
else:
# there are no watch locales, so set it to an empty set
self.watch_locales = set()
else:
self.watch_locales = set()
self.update_config_file('watch_locales', json.dumps(list(self.watch_locales)), conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'locale_folders'):
self.locale_folders = json.loads(conf_parser.get('main', 'locale_folders'))
locale_folders = {}
#for key, value in self.locale_folders.items():
# key = key.replace('_', '-');
# locale_folders[key] = value
#self.locale_folders = locale_folders
else:
self.locale_folders = {}
self.update_config_file('locale_folders', json.dumps(self.locale_folders), conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'download_option'):
self.download_option = conf_parser.get('main', 'download_option')
else:
self.download_option = 'clone'
self.update_config_file('download_option', self.download_option, conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'clone_option'):
self.clone_option = conf_parser.get('main', 'clone_option')
else:
self.clone_option = 'on'
self.update_config_file('clone_option', self.clone_option, conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'git_autocommit'):
self.git_autocommit = conf_parser.get('main', 'git_autocommit')
else:
self.git_autocommit = ''
self.update_config_file('git_autocommit', self.git_autocommit, conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'git_username'):
self.git_username = conf_parser.get('main', 'git_username')
else:
self.git_username = ''
self.update_config_file('git_username', self.git_username, conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'git_password'):
self.git_password = conf_parser.get('main', 'git_password')
else:
self.git_password = ''
self.update_config_file('git_password', self.git_password, conf_parser, config_file_name, "")
if conf_parser.has_option('main', 'append_option'):
self.append_option = conf_parser.get('main', 'append_option')
else:
self.append_option = 'none'
self.update_config_file('append_option', self.append_option, conf_parser, config_file_name, "")
except NoOptionError as e:
if not self.project_name:
self.api = ApiCalls(self.host, self.access_token)
project_info = self.api.get_project_info(self.community_id)
self.project_name = project_info[self.project_id]
config_file_name, conf_parser = self.init_config_file()
log_info = 'Updated project name'
self.update_config_file('project_name', self.project_name, conf_parser, config_file_name, log_info)
def _add_document(self, file_name, title, doc_id):
""" adds a document to db """
now = time.time()
# doc_id = json['properties']['id']
full_path = os.path.join(self.path, file_name)
last_modified = os.stat(full_path).st_mtime
self.doc_manager.add_document(title, now, doc_id, last_modified, now, file_name)
def _update_document(self, file_name):
""" updates a document in the db """
now = time.time()
file_path = os.path.join(self.path, file_name)
# sys_last_modified = os.stat(file_name).st_mtime
sys_last_modified = os.stat(file_path).st_mtime
entry = self.doc_manager.get_doc_by_prop('file_name', file_name)
doc_id = entry['id']
self.doc_manager.update_document('last_mod', now, doc_id)
self.doc_manager.update_document('sys_last_mod', sys_last_modified, doc_id)
# whenever a document is updated, it should have new translations
self.doc_manager.update_document('downloaded', [], doc_id)
def close(self):
self.doc_manager.close_db()
def open(self):
self.doc_manager.open_db()
def init_config_file(self):
config_file_name = os.path.join(self.path, CONF_DIR, CONF_FN)
conf_parser = ConfigParser()
conf_parser.read(config_file_name)
return config_file_name, conf_parser
def update_config_file(self, option, value, conf_parser, config_file_name, log_info):
try:
conf_parser.set('main', option, value)
with open(config_file_name, 'w') as new_file:
conf_parser.write(new_file)
self._initialize_self()
if (len(log_info)):
logger.info(log_info+"\n")
except IOError as e:
print(e.errno)
print(e)
def get_relative_path(self, path):
return get_relative_path(self.path, path)
def get_current_path(self, path):
cwd = os.getcwd()
if cwd in path:
path = path.replace(cwd,"")
return path
else:
cwd_relative_path = cwd.replace(self.path,"")
return path.replace(cwd_relative_path+os.sep,"")
def get_current_abs(self, path):
# print("orig path: "+str(path))
cwd = os.getcwd()
if cwd in path:
path = path.replace(cwd,"")
else:
# print("cwd: "+cwd)
# print("self.path: "+self.path)
cwd_relative_path = cwd.replace(self.path,"")
# print("cwd relative path: "+cwd_relative_path)
cwd_path = path.replace(cwd_relative_path+os.sep,"")
# print("cwd path: "+cwd_path)
path = cwd_path
# print("current path: "+path)
# print("abs path: "+os.path.abspath(path))
return os.path.abspath(path)
def norm_path(self, file_location):
# print("original path: "+str(file_location))
if file_location:
file_location = os.path.normpath(file_location)
# abspath=os.path.abspath(file_location)
# print("abspath: "+str(os.path.abspath(os.path.expanduser(file_location))))
# print("self.path: "+self.path)
# print("cwd: "+str(os.getcwd()))
norm_path = os.path.abspath(os.path.expanduser(file_location)).replace(self.path, '')
# print("normalized path: "+norm_path)
# print("joined path: "+str(os.path.join(self.path,file_location)))
# if file_location == ".." and self.path.rstrip('/') in norm_path:
# return norm_path.replace(self.path.rstrip('/'), '')
if file_location is not "." and ".." not in file_location and os.path.exists(os.path.join(self.path,file_location)):
# print("returning original path: "+str(file_location))
return file_location.replace(self.path, '')
elif ".." in file_location and file_location != "..":
# print("returning norm path: "+norm_path)
return norm_path.replace(self.path,'')
if not os.path.exists(os.path.join(self.path,norm_path)) and os.path.exists(os.path.join(self.path,file_location)):
# print("Starting path at project directory: "+file_location.replace(self.path, ''))
return os.path.abspath(os.path.expanduser(file_location.replace(self.path, ''))).replace(self.path, '')
elif file_location == "..":
return os.path.abspath(os.path.expanduser(file_location.replace(self.path, ''))).replace(self.path, '')
return norm_path
else:
return None
def get_docs_in_path(self, path):
files = get_files(path)
db_files = self.doc_manager.get_file_names()
docs = []
if files:
for file in files:
file_name = self.norm_path(file)
if file_name in db_files:
docs.append(self.doc_manager.get_doc_by_prop('file_name',file_name))
return docs
def get_doc_filenames_in_path(self, path):
files = get_files(path)
db_files = self.doc_manager.get_file_names()
docs = []
if files:
for file in files:
file_name = self.norm_path(file)
if file_name in db_files:
docs.append(file_name)
return docs
def get_doc_locales(self, doc_id, doc_name):
locales = []
response = self.api.document_translation_status(doc_id)
if response.status_code != 200:
if check_response(response) and response.json()['messages'] and 'No translations exist' in response.json()['messages'][0]:
return locales
if doc_name:
raise_error(response.json(), 'Failed to check target locales for document '+doc_name, True, doc_id)
else:
raise_error(response.json(), 'Failed to check target locales for document '+doc_id, True, doc_id)
try:
if 'entities' in response.json():
for entry in response.json()['entities']:
locales.append(entry['properties']['locale_code'])
except KeyError as e:
print("Error listing translations")
return
# return detailed_status
return locales
def is_locale_folder_taken(self, new_locale, path):
# Python 2
# for locale, folder in self.locale_folders.iteritems():
# End Python 2
# Python 3
for locale, folder in self.locale_folders.items():
# End Python 3
if path == folder and not locale == new_locale:
return locale
return False
def update_document_action(self, file_name, title=None, **kwargs):
try:
relative_path = self.norm_path(file_name)
entry = self.doc_manager.get_doc_by_prop('file_name', relative_path)
try:
document_id = entry['id']
except TypeError as e:
log_error(self.error_file_name, e)
logger.error("Document name specified for update doesn't exist: {0}".format(title))
return
if title:
response = self.api.document_update(document_id, file_name, title=title, **kwargs)
else:
response = self.api.document_update(document_id, file_name)
if response.status_code != 202:
raise_error(response.json(), "Failed to update document {0}".format(file_name), True)
self._update_document(relative_path)
return True
except Exception as e:
log_error(self.error_file_name, e)
if 'string indices must be integers' in str(e) or 'Expecting value: line 1 column 1' in str(e):
logger.error("Error connecting to Lingotek's TMS")
else:
logger.error("Error on updating document"+str(file_name)+": "+str(e))
def _target_action_db(self, to_delete, locales, document_id):
if to_delete:
curr_locales = self.doc_manager.get_doc_by_prop('id', document_id)['locales']
updated_locales = set(curr_locales) - set(locales)
self.doc_manager.update_document('locales', updated_locales, document_id)
else:
self.doc_manager.update_document('locales', list(locales), document_id)
def update_doc_locales(self, document_id):
try:
locale_map = self.import_locale_info(document_id)
locale_info = list(iter(locale_map))
except exceptions.RequestFailedError as e:
log_error(self.error_file_name, e)
locale_info = []
self.doc_manager.update_document('locales', locale_info, document_id)
def added_folder_of_file(self, file_path):
folders = self.folder_manager.get_file_names()
if not folders:
#print("not folders")
return
for folder in folders:
folder = os.path.join(self.path, folder)
if folder in file_path:
return folder
def get_new_name(self, file_name, curr_path):
i = 1
file_path = os.path.join(curr_path, file_name)
name, extension = os.path.splitext(file_name)
while os.path.isfile(file_path):
new_name = '{name}({i}){ext}'.format(name=name, i=i, ext=extension)
file_path = os.path.join(curr_path, new_name)
i += 1
return file_path
def import_locale_info(self, document_id, poll=False):
locale_progress = {}
response = self.api.document_translation_status(document_id)
if response.status_code != 200:
if poll or response.status_code == 404:
return {}
else:
# raise_error(response.json(), 'Failed to get locale details of document', True)
raise exceptions.RequestFailedError('Failed to get locale details of document')
try:
for entry in response.json()['entities']:
curr_locale = entry['properties']['locale_code']
curr_progress = int(entry['properties']['percent_complete'])
curr_locale = curr_locale.replace('-', '_')
locale_progress[curr_locale] = curr_progress
except KeyError:
pass
return locale_progress
def delete_local(self, title, document_id, message=None):
# print('local delete:', title, document_id)
if not title:
title = document_id
message = '{0} has been deleted locally'.format(title) if not message else message
try:
file_name = self.doc_manager.get_doc_by_prop('id', document_id)['file_name']
except TypeError:
logger.info('Document to remove not found in the local database')
return
try:
os.remove(os.path.join(self.path, file_name))
logger.info(message)
except OSError:
logger.info('Something went wrong trying to delete the local file')
def delete_local_translation(self, file_name):
try:
if not file_name:
logger.info('Please provide a valid file name')
logger.info('{0} (local translation) has been deleted'.format(self.get_relative_path(file_name)))
os.remove(os.path.join(self.path, file_name))
except OSError:
logger.info('Something went wrong trying to download the local translation')
def delete_local_path(self, path, message=None):
path = self.norm_path(path)
message = '{0} has been deleted locally.'.format(path) if not message else message
try:
os.remove(path)
logger.info(message)
except OSError:
logger.info('Something went wrong trying to delete the local file')
def raise_error(json, error_message, is_warning=False, doc_id=None, file_name=None):
try:
if json:
error = json['messages'][0]
file_name = file_name.replace("Status of ", "")
if file_name is not None and doc_id is not None:
error = error.replace(doc_id, file_name+" ("+doc_id+")")
# Sometimes api returns vague errors like 'Unknown error'
if error == 'Unknown error':
error = error_message
if not is_warning:
raise exceptions.RequestFailedError(error)
# warnings.warn(error)
logger.error(error)
except (AttributeError, IndexError):
if not is_warning:
raise exceptions.RequestFailedError(error_message)
# warnings.warn(error_message)
logger.error(error_message)
def is_initialized(project_path):
ltk_path = os.path.join(project_path, CONF_DIR)
if os.path.isdir(ltk_path) and os.path.isfile(os.path.join(ltk_path, CONF_FN)) and \
os.stat(os.path.join(ltk_path, CONF_FN)).st_size:
return True
return False
def choice_mapper(info):
mapper = {}
import operator
#sorted_info = sorted(info.iteritems(), key=operator.itemgetter(1))
sorted_info = sorted(info.items(), key = operator.itemgetter(1))
index = 0
for entry in sorted_info:
if entry[0] and entry[1]:
mapper[index] = {entry[0]: entry[1]}
index += 1
table = []
for k,v in mapper.items():
try:
for values in v:
table.append({
"ID": k,
"Name": v[values],
"UUID": values
})
except UnicodeEncodeError:
continue
print(tabulate(table, headers="keys"), "\n")
return mapper
def find_conf(curr_path):
"""
check if the conf folder exists in current directory's parent directories
"""
if os.path.isdir(os.path.join(curr_path, CONF_DIR)):
return curr_path
elif curr_path == os.path.abspath(os.sep):
return None
else:
return find_conf(os.path.abspath(os.path.join(curr_path, os.pardir)))
def printResponseMessages(response):
for message in response.json()['messages']:
logger.info(message)
def get_files(patterns):
""" gets all files matching pattern from root
pattern supports any unix shell-style wildcards (not same as RE) """
cwd = os.getcwd()
if isinstance(patterns,str):
patterns = [patterns]
allPatterns = []
if isinstance(patterns,list) or isinstance(patterns,tuple):
for pattern in patterns:
basename = os.path.basename(pattern)
if basename and basename != "":
allPatterns.extend(getRegexFiles(pattern,cwd))
else:
allPatterns.append(pattern)
else:
basename = os.path.basename(patterns)
if basename and basename != "":
allPatterns.extend(getRegexFiles(patterns,cwd))
else:
allPatterns.append(patterns)
matched_files = []
# print("all patterns: "+str(allPatterns))
for pattern in allPatterns:
path = os.path.abspath(pattern)
# print("looking at path "+str(path))
# check if pattern contains subdirectory
if os.path.exists(path):
if os.path.isdir(path):
for root, subdirs, files in os.walk(path):
# split_path = root.split(os.sep)
# print("split_path: {0}".format(split_path))
for file in files:
if not (("desktop.ini" in file) or ('Thumbs.db' in file) or ('ehthumbs.db' in file)): # don't add desktop.ini, Thumbs.db, or ehthumbs.db files
matched_files.append(os.path.join(root, file))
else:
matched_files.append(path)
# else:
# logger.info("File not found: "+pattern)
# subdir_pat, fn_pat = os.path.split(pattern)
# if not subdir_pat:
# for path, subdirs, files in os.walk(root):
# for fn in fnmatch.filter(files, pattern):
# matched_files.append(os.path.join(path, fn))
# else:
# for path, subdirs, files in os.walk(root):
# # print os.path.split(path)
# # subdir = os.path.split(path)[1] # get current subdir
# search_root = os.path.join(root, '')
# subdir = path.replace(search_root, '')
# # print subdir, subdir_pat
# if fnmatch.fnmatch(subdir, subdir_pat):
# for fn in fnmatch.filter(files, fn_pat):
# matched_files.append(os.path.join(path, fn))
if len(matched_files) == 0:
return None
return matched_files
def getRegexFiles(pattern,path):
dir_name = os.path.dirname(pattern)
if dir_name:
path = os.path.join(path,dir_name)
pattern_name = os.path.basename(pattern)
# print("path: "+path)
# print("pattern: "+str(pattern))
matched_files = []
if pattern_name and not "*" in pattern:
return [pattern]
for path, subdirs, files in os.walk(path):
for fn in fnmatch.filter(files, pattern):
matched_files.append(os.path.join(path, fn))
# print("matched files: "+str(matched_files))
return matched_files
def log_id_names(json):
"""
logs the id and titles from a json object
"""
ids = []
titles = []
for entity in json['entities']:
ids.append(entity['properties']['id'])
titles.append(entity['properties']['title'])
return ids, titles
| mit |
cyrilschumacher/prometheus-node-usage | test/metrics/builder/label/create.ts | 1227 | /// <reference types="mocha" />
/// <reference types="node" />
import * as chai from "chai";
import * as process from "process";
import * as sinon from "sinon";
import { createMetricLabels } from "../../../../src/metrics/builder/label/create";
describe("createMetricLabels()", () => {
it("should return metric labels", () => {
const labels = [{name: "label1", value: "value1"}, {name: "label2", value: "value2"}];
const metricLabels = createMetricLabels(labels);
chai.expect(metricLabels).to.be.equal("{label1=\"value1\",label2=\"value2\"}");
});
it("should return empty string", () => {
const metricLabels = createMetricLabels([]);
chai.expect(metricLabels).to.be.equal("");
});
it("should return empty string with empty label name", () => {
const labels = [{name: "", value: "value1"}];
const metricLabels = createMetricLabels(labels);
chai.expect(metricLabels).to.be.equal("{}");
});
it("should return empty string with empty label name", () => {
const labels = [{name: "label1", value: ""}];
const metricLabels = createMetricLabels(labels);
chai.expect(metricLabels).to.be.equal("{}");
});
});
| mit |
turbolent/ralph | core/ralph/repl/interactor.js | 15518 | require('ralph/core');
{
var $module = Object.create($moduleRoot);
{
($module)['%export'] = function B3(name__4, value__5) {
var B7 = (exports);
return(B7[name__4] = value__5);
};
{
($module)['%eval'] = function B6() {
return(eval((arguments[0])));
};
($module)['%export']('%eval', ($module)['%eval']);
}
}
}
var B8 = require('ralph/core');
{
var B9 = require('ralph/stream');
{
var B10 = require('ralph/format-out');
{
var B11 = require('ralph/compiler/environment');
{
var B12 = require('ralph/compiler/reader');
{
var B13 = require('ralph/compiler/compile');
{
var B14 = require('ws');
var B15 = require('readline');
}
}
}
}
}
}
{
($module)['make-interface'] = function B17() {
return(B15['createInterface']({
'input': (process.stdin),
'output': (process.stdout)
}));
};
B8['%annotate-function'](($module)['make-interface'], 'make-interface', false);
}
{
($module)['<interactor>'] = B8['%make-class'](B8['<object>'], {
'current-line': function B19() {
return('');
}
}, function LinteractorG__20() {
var B21 = (this);
return(B21['current-line'] = (undefined));
});
($module)['%export']('<interactor>', ($module)['<interactor>']);
}
($module)['initialize'] = B8['%make-method']('initialize', function B29(interactor__30) {
B8['%next-method'](B29)['apply']((null), arguments);
{
var interface__31 = ($module)['make-interface']();
{
B8['get-setter'](interactor__30, 'interface', interface__31);
{
var B32 = interface__31;
{
var B33 = 'close';
{
var B37 = function B34() {
var B35 = (process);
{
var B36 = 0;
return(B35['exit'](B36));
}
};
return(B32['on'](B33, B37));
}
}
}
}
}
}, false, ($module)['<interactor>'], ($module)['initialize']);
{
($module)['active?'] = function B39(interactor__40) {
return(B8['true?'](B8['get'](interactor__40, 'connection')));
};
B8['%annotate-function'](($module)['active?'], 'active?', false);
}
($module)['$incomplete'] = B8['make-object']();
{
var B42 = $K('string');
{
var B43 = $K('eof-error?');
{
var B44 = $K('eof-value');
{
var B45 = $K('if-incomplete');
{
($module)['read-line'] = function B46(interactor__47, line__48) {
var input_stream__49 = B8['make'](B9['<string-stream>'], B42, line__48);
return(B12['read'](input_stream__49, B8['get'](interactor__47, 'current-environment'), B43, false, B44, ($module)['$incomplete'], B45, ($module)['$incomplete']));
};
B8['%annotate-function'](($module)['read-line'], 'read-line', false);
}
}
}
}
}
{
($module)['on-each-line'] = function B54(interactor__55, handler__56) {
var B57 = B8['get'](interactor__55, 'interface');
{
var B58 = 'line';
{
var B59 = handler__56;
return(B57['on'](B58, B59));
}
}
};
B8['%annotate-function'](($module)['on-each-line'], 'on-each-line', false);
}
{
($module)['make-environment'] = function B61(name__62) {
var env__63 = B13['make-module-environment'](name__62);
{
B8['get-setter'](env__63, 'persistent?', false);
return(env__63);
}
};
B8['%annotate-function'](($module)['make-environment'], 'make-environment', false);
}
{
($module)['send-command'] = function B67(interactor__68, type__69) {
var data__70 = $SL.call(arguments, 2);
{
var B71 = B8['get'](interactor__68, 'connection');
{
var B72 = B8['as-json'](B8['apply'](B8['make-object'], 'type', type__69, data__70));
return(B71['send'](B72));
}
}
};
B8['%annotate-function'](($module)['send-command'], 'send-command', false);
}
{
($module)['perform-module-change'] = function B75(interactor__76, name__77) {
var B78 = B8['get'](interactor__76, 'environments', name__77);
{
var env__79 = false;
if (($T)(B78))
env__79 = B78;
else
env__79 = ($module)['make-environment'](name__77);
{
B8['get-setter'](interactor__76, 'envrionments', name__77, env__79);
return(B8['get-setter'](interactor__76, 'current-environment', env__79));
}
}
};
B8['%annotate-function'](($module)['perform-module-change'], 'perform-module-change', false);
}
{
{
($module)['change-module'] = function B81(interactor__82, name__83) {
($module)['perform-module-change'](interactor__82, name__83);
return(($module)['send-command'](interactor__82, 'change-module', 'name', name__83));
};
($module)['%export']('change-module', ($module)['change-module']);
}
B8['%annotate-function'](($module)['change-module'], 'change-module', false);
}
($module)['$handlers'] = B8['make-plain-object']();
{
($module)['handle-message'] = function B86(interactor__87, serialized_message__88) {
var message__89 = B8['parse-json'](serialized_message__88);
{
var B90 = B8['get'](($module)['$handlers'], B8['get'](message__89, 'type'));
if (($T)(B90)) {
var handler__91 = B90;
return(handler__91(interactor__87, message__89));
} else
return(false);
}
};
B8['%annotate-function'](($module)['handle-message'], 'handle-message', false);
}
{
($module)['handle-connection'] = function B99(interactor__100, connection__101) {
var B102 = connection__101;
{
var B103 = 'message';
{
var B104 = B8['curry'](($module)['handle-message'], interactor__100);
{
B102['on'](B103, B104);
{
var B105 = connection__101;
{
var B106 = 'close';
{
var B107 = B8['curry'](($module)['handle-close'], interactor__100);
{
B105['on'](B106, B107);
{
B8['get-setter'](interactor__100, 'connection', connection__101);
{
($module)['change-module'](interactor__100, 'ralph/core');
return(($module)['update-prompt!'](interactor__100));
}
}
}
}
}
}
}
}
}
};
B8['%annotate-function'](($module)['handle-connection'], 'handle-connection', false);
}
{
($module)['handle-close'] = function B109(interactor__110) {
B8['get-setter'](interactor__110, 'connection', false);
{
B8['get-setter'](interactor__110, 'current-environment', false);
return(($module)['update-prompt!'](interactor__110));
}
};
B8['%annotate-function'](($module)['handle-close'], 'handle-close', false);
}
{
($module)['start-server'] = function B112(interactor__113, port__114) {
var server__115 = B14['createServer']({ 'port': port__114 }, B8['curry'](($module)['handle-connection'], interactor__113));
return(B8['get-setter'](interactor__113, 'server', server__115));
};
B8['%annotate-function'](($module)['start-server'], 'start-server', false);
}
{
{
($module)['start-interactor'] = function B120(interactor__121) {
var B122 = $SL.call(arguments, 1);
{
var B123 = B8['%keys'](B122, { 'port': 2342 });
{
var port__124 = B123['port'];
{
($module)['start-server'](interactor__121, port__124);
{
($module)['on-each-line'](interactor__121, function B125(line__126) {
if (($T)(($module)['active?'](interactor__121)))
return(($module)['handle-line'](interactor__121, line__126));
else {
B10['format-out']('[inactive]\n');
return(($module)['update-prompt!'](interactor__121));
}
});
return(($module)['update-prompt!'](interactor__121));
}
}
}
}
};
($module)['%export']('start-interactor', ($module)['start-interactor']);
}
B8['%annotate-function'](($module)['start-interactor'], 'start-interactor', false);
}
{
($module)['append-line!'] = function B128(interactor__129, line__130) {
return(B8['get-setter'](interactor__129, 'current-line', B8['concatenate'](B8['get'](interactor__129, 'current-line'), '\n', line__130)));
};
B8['%annotate-function'](($module)['append-line!'], 'append-line!', false);
}
{
($module)['eval-in-module'] = function B132(interactor__133, code__134) {
return(($module)['send-command'](interactor__133, 'eval-in-module', 'code', code__134));
};
B8['%annotate-function'](($module)['eval-in-module'], 'eval-in-module', false);
}
B8['get-setter'](($module)['$handlers'], 'result', function B137(interactor__138, message__139) {
var B140 = message__139;
{
var result__141 = B140['result'];
{
B10['format-out']('%s\n', result__141);
return(($module)['update-prompt!'](interactor__138));
}
}
});
B8['get-setter'](($module)['$handlers'], 'exception', function B144(interactor__145, message__146) {
var B147 = message__146;
{
var stack__148 = B147['stack'];
{
B10['format-out']('%s\n', stack__148);
return(($module)['update-prompt!'](interactor__145));
}
}
});
B8['get-setter'](($module)['$handlers'], 'change-module', function B151(interactor__152, message__153) {
var B154 = message__153;
{
var name__155 = B154['name'];
return(($module)['perform-module-change'](interactor__152, name__155));
}
});
{
($module)['handle-line'] = function B159(interactor__160, line__161) {
($module)['append-line!'](interactor__160, line__161);
{
var B162 = interactor__160;
{
var current_line__163 = B162['current-line'];
{
var current_environment__164 = B162['current-environment'];
try {
{
var expression__165 = ($module)['read-line'](interactor__160, current_line__163);
{
var incompleteQ__166 = B8['=='](expression__165, ($module)['$incomplete']);
{
B8['get-setter'](interactor__160, 'incomplete?', incompleteQ__166);
if (($T)(incompleteQ__166))
return(($module)['update-prompt!'](interactor__160));
else {
var code__167 = B13['compile-to-string'](expression__165, current_environment__164);
{
($module)['eval-in-module'](interactor__160, code__167);
return(B8['get-setter'](interactor__160, 'current-line', ''));
}
}
}
}
}
} catch (B168) {
if (($T)(B8['instance?'](B168, B8['<error>']))) {
var condition__169 = B168;
{
B10['format-out']('%s\n', B8['get'](condition__169, 'stack'));
{
current_line__163 = '';
return(($module)['update-prompt!'](interactor__160));
}
}
} else
return(false);
}
}
}
}
};
B8['%annotate-function'](($module)['handle-line'], 'handle-line', false);
}
{
($module)['current-module-name'] = function B172(interactor__173) {
var B174 = B8['get'](interactor__173, 'current-environment');
if (($T)(B174)) {
var env__175 = B174;
return(B8['get'](env__175, 'module', 'name'));
} else
return(false);
};
B8['%annotate-function'](($module)['current-module-name'], 'current-module-name', false);
}
{
($module)['set-prompt!'] = function B179(interactor__180, prompt__181) {
var B182 = B8['get'](interactor__180, 'interface');
{
var B183 = prompt__181;
return(B182['setPrompt'](B183));
}
};
B8['%annotate-function'](($module)['set-prompt!'], 'set-prompt!', false);
}
{
($module)['prompt!'] = function B186(interactor__187) {
var B188 = B8['get'](interactor__187, 'interface');
return(B188['prompt']());
};
B8['%annotate-function'](($module)['prompt!'], 'prompt!', false);
}
{
($module)['update-prompt!'] = function B191(interactor__192) {
var B193 = ($module)['current-module-name'](interactor__192);
{
var name__194 = false;
if (($T)(B193))
name__194 = B193;
else
name__194 = '';
{
var B195 = false;
if (($T)(B8['get'](interactor__192, 'incomplete?')))
B195 = B8['concatenate'](B8['repeat-string'](' ', B8['-'](B8['size'](name__194), 2)), '... ');
else
B195 = B8['concatenate'](name__194, '> ');
{
($module)['set-prompt!'](interactor__192, B195);
return(($module)['prompt!'](interactor__192));
}
}
}
};
B8['%annotate-function'](($module)['update-prompt!'], 'update-prompt!', false);
}
| mit |
rranford/redesigned-barnacle | html/src/js/close_open.js | 4266 |
var margin = {top: 30, right: 70, bottom: 70, left: 40},
width = 750 - margin.left - margin.right,
height = 500 - margin.top - margin.bottom;
var x = d3.time.scale().range([0, width]);
var yopen = d3.scale.linear().range([height, 0]);
var yclose = d3.scale.linear().range([height, 0]);
function make_x_axis() {
return d3.svg.axis()
.scale(x)
.orient("bottom")
.ticks(d3.time.hours, 24)
.tickFormat(d3.time.format("%Y %b %d"))
.tickSize(-height, 0, 0);
}
function make_y_axis_left() {
return d3.svg.axis()
.scale(yopen)
.orient("left")
.ticks(5)
.tickSize(-width, 0, 0);
}
function make_y_axis_right() {
return d3.svg.axis()
.scale(yclose)
.orient("right")
.ticks(5);
}
var valueline_open = d3.svg.line()
.x(function (d) {
return x(d.datetime);
})
.y(function (d) {
return yopen(d.open);
});
var valueline_close = d3.svg.line()
.x(function (d) {
return x(d.datetime);
})
.y(function (d) {
return yclose(d.close);
});
var parseDate = d3.time.format("%d-%b-%y").parse;
var svg = d3.select("body")
.append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
function render() {
d3.csv("../src/data/close_open.csv", function (error, data) {
data.forEach(function (d) {
d.datetime = parseDate(d.date);
d.close = +d.close;
d.open = +d.open;
});
x.domain(d3.extent(data, function (d) {
return d.datetime;
}));
yopen.domain([0, d3.max(data, function(d) {
return Math.max(d.close, d.open);
})]);
yclose.domain([0, d3.max(data, function(d) {
return Math.max(d.close, d.open);
})]);
svg.append("path")
.attr("class", "lineopen")
.attr("d", valueline_open(data));
svg.append("path")
.attr("class", "lineclose")
.attr("d", valueline_close(data));
svg.append("text")
.attr("transform", "translate(" + (width + 3) + "," + yopen(data[0].open) + ")")
.attr("dy", ".35em")
.attr("text-anchor", "start")
.style("fill", "darkblue")
.text("Open");
svg.append("text")
.attr("transform", "translate(" + (width + 3) + "," + yclose(data[0].close) + ")")
.attr("dy", ".35em")
.attr("text-anchor", "start")
.style("fill", "darkred")
.text("Close");
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(make_x_axis())
.selectAll("text")
.style("text-anchor", "end")
.attr("dx", "-.8em")
.attr("dy", ".15em")
.attr("transform", "rotate(-65)");
svg.append("g")
.attr("class", "y axis")
.style("fill", "darkred")
.call(make_y_axis_left());
svg.append("g")
.attr("class", "y axis")
.attr("transform", "translate(" + width + " ,0)")
.style("fill", "darkblue")
.call(make_y_axis_right());
svg.append("text")
.attr("transform",
"translate(" + (width / 2) + " ," +
(height + margin.bottom) + ")")
.style("text-anchor", "middle")
.text("Date");
svg.append("text")
.attr("x", (width / 2))
.attr("y", 0 - (margin.top / 2))
.attr("text-anchor", "middle")
.style("font-size", "16px")
.style("text-decoration", "underline")
.text("Close Open Graph");
});
} | mit |
szelcsanyi/chef-sysctl | recipes/tune_vfs.rb | 218 | #
# Cookbook Name:: L7-sysctl
# Recipe:: tune_vfs
#
# Copyright 2015, Gabor Szelcsanyi <szelcsanyi.gabor@gmail.com>
L7_sysctl 'fs.file-max' do
value '402400'
end
L7_sysctl 'fs.dir-notify-enable' do
value '0'
end
| mit |
FloKnapp/faulancer | src/Form/Validator/Base/NotEmpty.php | 481 | <?php
namespace Faulancer\Form\Validator\Base;
use Faulancer\Form\Validator\AbstractValidator;
/**
* Class NotEmpty
*
* @package Form\Validator\Base
* @author Florian Knapp <office@florianknapp.de>
*/
class NotEmpty extends AbstractValidator
{
/** @var string */
protected $errorMessage = 'validator_empty_text';
/**
* @param mixed $data
*
* @return bool
*/
public function process($data)
{
return !empty($data);
}
} | mit |
coingecko/cryptoexchange | spec/exchanges/worldcore/integration/market_spec.rb | 2456 | require 'spec_helper'
RSpec.describe 'Worldcore integration specs' do
let(:client) { Cryptoexchange::Client.new }
let(:ltc_btc_pair) { Cryptoexchange::Models::MarketPair.new(base: 'LTC', target: 'BTC', market: 'worldcore', inst_id: 49) }
it 'has trade_page_url' do
trade_page_url = client.trade_page_url ltc_btc_pair.market, base: ltc_btc_pair.base, target: ltc_btc_pair.target
expect(trade_page_url).to eq "https://worldcore.trade/exchange/ltc_btc"
end
it 'fetch pairs' do
pairs = client.pairs('worldcore')
expect(pairs).not_to be_empty
pair = pairs.first
expect(pair.base).to_not be nil
expect(pair.target).to_not be nil
expect(pair.market).to eq 'worldcore'
end
it 'fetch ticker' do
ticker = client.ticker(ltc_btc_pair)
expect(ticker.base).to eq 'LTC'
expect(ticker.target).to eq 'BTC'
expect(ticker.market).to eq 'worldcore'
expect(ticker.last).to be_a Numeric
expect(ticker.low).to be_a Numeric
expect(ticker.high).to be_a Numeric
expect(ticker.volume).to be_a Numeric
expect(ticker.timestamp).to be nil
expect(ticker.payload).to_not be nil
end
it 'fetch order book' do
order_book = client.order_book(ltc_btc_pair)
expect(order_book.base).to eq 'LTC'
expect(order_book.target).to eq 'BTC'
expect(order_book.market).to eq 'worldcore'
expect(order_book.asks).to_not be_empty
expect(order_book.bids).to_not be_empty
expect(order_book.asks.first.price).to_not be_nil
expect(order_book.bids.first.amount).to_not be_nil
expect(order_book.bids.first.timestamp).to be_nil
expect(order_book.asks.count).to be > 5
expect(order_book.bids.count).to be > 5
expect(order_book.timestamp).to be_a Numeric
expect(2000..Date.today.year).to include(Time.at(order_book.timestamp).year)
expect(order_book.payload).to_not be nil
end
it 'fetch trade' do
trades = client.trades(ltc_btc_pair)
trade = trades.sample
expect(trades).to_not be_empty
expect(trade.base).to eq 'LTC'
expect(trade.target).to eq 'BTC'
expect(trade.market).to eq 'worldcore'
expect(trade.trade_id).to_not be_nil
expect(['buy', 'sell']).to include trade.type
expect(trade.price).to_not be_nil
expect(trade.amount).to_not be_nil
expect(trade.timestamp).to be_a Numeric
expect(2000..Date.today.year).to include(Time.at(trade.timestamp).year)
expect(trade.payload).to_not be nil
end
end
| mit |
mikeaustin/impulse-js | src/experiments/promise.js | 828 | var http = require("http");
http.ClientRequest.prototype.then = function (executor) {
return new Promise((resolve, reject) => {
this.on("response", function (response) {
response.setEncoding("utf8");
var body = "";
response.on("data", (data) => {
body += data;
});
response.on("end", () => {
resolve(executor(body));
});
});
this.on("error", (error) => reject(error));
});
};
http.get("http://mike-austin.com", (data) => {
console.log(data);
});
http.get("http://mike-austin.com").then((data) => {
console.log(data);
}).then(() => console.log("Done."));
/*
Promises are asynchronous
Continuations are lazy
Add-hoc chaining
Push vs pull
http.get("http://mike-austin.com") | data => console.log(data);
"a,b,c" | .split(",") | .toUpperCase();
*/
| mit |
pon-ekh/ptest | lib/main/SimpleMain.cpp | 324 | #include <ptest/main/SimpleMain.hpp>
#include "SimpleMainImpl.hpp"
namespace ptest {
namespace main {
bool
SimpleMain::run(const hiera::TestSuiteView& suite,
std::ostream& output,
std::ostream& error) {
return SimpleMainImpl::instance().run(suite, output, error);
}
} // namespace ptest::main
} // namespace ptest
| mit |
goodgamecoin/cryptop | pubkey.go | 1656 | package cryptop
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/x509"
"crypto/x509/pkix"
"encoding/asn1"
"errors"
)
var (
oidPublicKeyRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 1}
oidPublicKeyDSA = asn1.ObjectIdentifier{1, 2, 840, 10040, 4, 1}
oidPublicKeyECDSA = asn1.ObjectIdentifier{1, 2, 840, 10045, 2, 1}
)
// https://golang.org/src/crypto/x509/x509.go
type publicKeyInfo struct {
Raw asn1.RawContent
Algorithm pkix.AlgorithmIdentifier
PublicKey asn1.BitString
}
// An extension for x509.ParsePKIXPublicKey() that supports more ECDSA curves
func ParsePKIXPublicKey(derBytes []byte) (interface{}, error) {
var pki publicKeyInfo
if rest, err := asn1.Unmarshal(derBytes, &pki); err != nil {
return nil, err
} else if len(rest) != 0 {
return nil, errors.New("x509: trailing data after ASN.1 of public-key")
}
if !oidPublicKeyECDSA.Equal(pki.Algorithm.Algorithm) {
return x509.ParsePKIXPublicKey(derBytes)
}
asn1Data := pki.PublicKey.RightAlign()
paramsData := pki.Algorithm.Parameters.FullBytes
namedCurveOID := new(asn1.ObjectIdentifier)
rest, err := asn1.Unmarshal(paramsData, namedCurveOID)
if err != nil {
return nil, err
}
if len(rest) != 0 {
return nil, errors.New("x509: trailing data after ECDSA parameters")
}
namedCurve := namedCurveFromOID(*namedCurveOID)
if namedCurve == nil {
return nil, errors.New("x509: unsupported elliptic curve")
}
x, y := elliptic.Unmarshal(namedCurve, asn1Data)
if x == nil {
return nil, errors.New("x509: failed to unmarshal elliptic curve point")
}
pub := &ecdsa.PublicKey{
Curve: namedCurve,
X: x,
Y: y,
}
return pub, nil
}
| mit |
laisiangtho/core | asset/js/script.Language.js | 8029 | var language= {
message: {
bible: 'Bible',
book: 'Book',
chapter: 'Chapter',
verse: 'Verse',
lookup: 'Lookup',
setting: 'Setting',
parallel: 'Parallel',
bookmark: 'Bookmark',
bookmarks: 'Bookmarks',
about: 'About',
note: 'Note',
todo: 'Todo',
more: 'More',
nameChapter: '{b} {c}',
nameVerse: '{b} {c}:{v}',
Loading: 'Loading',
Checking: 'Checking',
Searching: 'Searching',
Downloading: "Downloading",
PercentLoaded: "{Percent}%",
Discover: 'Discover!',
Paused: 'Paused!',
PleaseWait: 'Please wait!',
// isEmpty: 'Would you like to remove "{is}" from local?',
// IsNotReady: '{is} not ready yet, for using!',
isLocalRemove: 'Would you like to remove "{is}" from local?',
// isLocalAdd: 'Would you like to add "{is}" to local?',
// NotFound: 'not found',
// isNotFound: '"{is}" not found!',
// isNotFoundIn: '"{is}" not found in {in}!',
// isNotFoundInAt: '"{is}" not found in {in} at {at}!',
// Selection: 'Selection',
// AddBookmarks: 'Add Bookmarks',
// BookmarkedAlready: 'Bookmarked already!',
// Bookmarked: 'Bookmarked!',
// NoBookmarks: 'No bookmarks!',
// NoMatch: 'No match...',
noMatchFor: 'No match for {for}!',
// BookMustSelected: 'Book must be selected!',
// ChapterMustSelected: 'Chapter must be selected!',
// VerseMustSelected: 'Verse must be selected!',
// FoundV: 'Found {v} verses!',
// FoundBCV: 'Found book:{b}, chapter:{c} & verse:{v}!',
noBookSelected: 'No Book selected!',
// noBookSelected: 'You have not selected any Book!',
// noChapterSelected: 'You have not selected any chapter!',
// noVerseSelected: 'You have not selected any verse!',
// ShowMe: 'show me',
// Yes: 'Yes',
// No: 'No'
},
section: {
1: 'Law',
2: 'History',
3: 'Poetry',
4: 'Prophecy: Major Prophets',
5: 'Prophecy: Minor Prophets',
6: 'Gospels',
7: 'Historical',
8: 'Doctrinal: Gentiles',
9: 'Doctrinal: Individuals',
10: 'Doctrinal: The Jews',
11: 'Doctrinal: General Espitles'
},
testament: {
1: 'Old Testament',
2: 'New Testament'
},
book: {
1: 'Genesis',
2: 'Exodus',
3: 'Leviticus',
4: 'Numbers',
5: 'Deuteronomy',
6: 'Joshua',
7: 'Judges',
8: 'Ruth',
9: '1 Samuel',
10: '2 Samuel',
11: '1 Kings',
12: '2 Kings',
13: '1 Chronicles',
14: '2 Chronicles',
15: 'Ezra',
16: 'Nehemiah',
17: 'Esther',
18: 'Job',
19: 'Psalm',
20: 'Proverbs',
21: 'Ecclesiastes',
22: 'Song of Solomon',
23: 'Isaiah',
24: 'Jeremiah',
25: 'Lamentations',
26: 'Ezekiel',
27: 'Daniel',
28: 'Hosea',
29: 'Joel',
30: 'Amos',
31: 'Obadiah',
32: 'Jonah',
33: 'Micah',
34: 'Nahum',
35: 'Habakkuk',
36: 'Zephaniah',
37: 'Haggai',
38: 'Zechariah',
39: 'Malachi',
40: 'Matthew',
41: 'Mark',
42: 'Luke',
43: 'John',
44: 'Acts',
45: 'Romans',
46: '1 Corinthians',
47: '2 Corinthians',
48: 'Galatians',
49: 'Ephesians',
50: 'Philippians',
51: 'Colossians',
52: '1 Thessalonians',
53: '2 Thessalonians',
54: '1 Timothy',
55: '2 Timothy',
56: 'Titus',
57: 'Philemon',
58: 'Hebrews',
59: 'James',
60: '1 Peter',
61: '2 Peter',
62: '1 John',
63: '2 John',
64: '3 John',
65: 'Jude',
66: 'Revelation'
},
name: {
1: ['Genesis', 'Ge', 'Gen'],
2: ['Exodus', 'Ex', 'Exo', 'Exod'],
3: ['Leviticus', 'Le', 'Lev'],
4: ['Numbers', 'Nu', 'Num'],
5: ['Deuteronomy', 'De', 'Deu', 'Deut', 'Dt'],
6: ['Joshua', 'Js', 'Jos', 'Josh'],
7: ['Judges', 'Jg', 'Jdg', 'Ju', 'Jdgs', 'Judg'],
8: ['Ruth', 'Ru', 'Rut'],
9: ['1 Samuel', '1S', '1Sa', '1 Sam', '1Sam', '1 Sa', 'I Samuel', 'I Sam', 'I Sa', 'IS'],
10: ['2 Samuel', '2S', '2Sa', '2 Sam', '2Sam', '2 Sa', 'II Samuel', 'II Sam', 'II Sa', 'IIS'],
11: ['1 Kings', '1K', '1Ki', '1 Kin', '1Kin', '1 Ki', 'IK', 'I Kings', 'I Kin', 'I Ki', '1Kgs'],
12: ['2 Kings', '2K', '2Ki', '2 Kin', '2Kin', '2 Ki', 'IIK', 'II Kings', 'II Kin', 'II Ki', '2Kgs'],
13: ['1 Chronicles', '1C', '1Ch', '1 Chr', '1Chr', '1 Ch', 'ICh', 'I Chronicles', 'I Chr', 'I Ch'],
14: ['2 Chronicles', '2C', '2Ch', '2 Chr', '2Chr', '2 Ch', 'IICh', 'II Chronicles', 'II Chr', 'II Ch'],
15: ['Ezra', 'Ezr'],
16: ['Nehemiah', 'Ne', 'Neh'],
17: ['Esther', 'Es', 'Est', 'Esth'],
18: ['Job', 'Jb'],
19: ['Psalm', 'Ps', 'Psa'],
20: ['Proverbs', 'Pr', 'Pro', 'Prov'],
21: ['Ecclesiastes', 'Ec', 'Ecc', 'Eccl'],
22: ['Song of Songs', 'So', 'Sng','Sos', 'Song of Solomon', 'SOS', 'SongOfSongs', 'SongofSolomon', 'Song'],
23: ['Isaiah', 'Is', 'Isa'],
24: ['Jeremiah', 'Je', 'Jer'],
25: ['Lamentations', 'La', 'Lam', 'Lament'],
26: ['Ezekiel', 'Ek', 'Ezk', 'Ezek', 'Eze'],
27: ['Daniel', 'Da', 'Dan', 'Dl', 'Dnl'],
28: ['Hosea', 'Ho', 'Hos'],
29: ['Joel', 'Jl', 'Jol', 'Joe'],
30: ['Amos', 'Am', 'Amo'],
31: ['Obadiah', 'Ob', 'Oba', 'Obd', 'Odbh', 'Obad'],
32: ['Jonah', 'Jh', 'Jon', 'Jnh'],
33: ['Micah', 'Mi', 'Mic'],
34: ['Nahum', 'Na', 'Nam', 'Nah'],
35: ['Habakkuk', 'Hb', 'Hab', 'Hk', 'Habk'],
36: ['Zephaniah', 'Zp', 'Zep', 'Zeph', 'Ze'],
37: ['Haggia', 'Ha', 'Hag', 'Hagg'],
38: ['Zechariah', 'Zc', 'Zec', 'Zech'],
39: ['Malachi', 'Ml', 'Mal', 'Mlc'],
40: ['Matthew', 'Mt', 'Mat', 'Matt'],
41: ['Mark', 'Mk', 'Mrk', 'Mar'],
42: ['Luke', 'Lk', 'Luk', 'Lu'],
43: ['John', 'Jn', 'Jhn', 'Joh', 'Jo'],
44: ['Acts', 'Ac', 'Act'],
45: ['Romans', 'Ro', 'Rom', 'Rmn', 'Rmns'],
46: ['1 Corinthians', '1Co', '1 Cor', '1Cor', 'ICo', '1 Co', 'I Corinthians', 'I Cor', 'I Co'],
47: ['2 Corinthians', '2Co', '2 Cor', '2Cor', 'IICo', '2 Co', 'II Corinthians', 'II Cor', 'II Co'],
48: ['Galatians', 'Ga', 'Gal', 'Gltns'],
49: ['Ephesians', 'Ep', 'Eph', 'Ephn'],
50: ['Philippians', 'Pp', 'Php', 'Phi', 'Phil'],
51: ['Colossians', 'Co', 'Col', 'Colo', 'Cln', 'Clns'],
52: ['1 Thessalonians', '1Th', '1 Thess', '1Thess', 'ITh', '1 Thes', '1Thes', '1 The', '1The', '1 Th', 'I Thessalonians', 'I Thess', 'I The', 'I Th'],
53: ['2 Thessalonians', '2Th', '2 Thess', '2Thess', 'IITh', '2 Thes', '2Thes', '2 The', '2The', '2 Th', 'II Thessalonians', 'II Thess', 'II The', 'II Th'],
54: ['1 Timothy', '1Ti', '1 Tim', '1Tim', '1 Ti', 'ITi', 'I Timothy', 'I Tim', 'I Ti'],
55: ['2 Timothy', '2Ti', '2 Tim', '2Tim', '2 Ti', 'IITi', 'II Timothy', 'II Tim', 'II Ti'],
56: ['Titus', 'Ti', 'Tit', 'Tt', 'Ts'],
57: ['Philemon', 'Pm', 'Phm', 'Phile', 'Philm', 'Phlm'],
58: ['Hebrews', 'He', 'Heb', 'Hw'],
59: ['James', 'Jm', 'Jas', 'Jam', 'Ja'],
60: ['1 Peter', '1P', '1Pe', '1 Pet', '1Pet', 'IPe', 'I Peter', 'I Pet', 'I Pe'],
61: ['2 Peter', '2P', '2Pe', '2 Pet', '2Pet', 'IIP', 'II Peter', 'II Pet', 'II Pe'],
62: ['1 John', '1J', '1Jn', '1 Jn', '1 Jo', 'IJo', 'I John', 'I Jo', 'I Jn', '1John'],
63: ['2 John', '2J', '2Jn', '2 Jn', '2 Jo', 'IIJo', 'II John', 'II Jo', 'II Jn', '2John'],
64: ['3 John', '3J', '3Jn', '3 Jn', '3 Jo', 'IIIJo', 'III John', 'III Jo', 'III Jn', '3John'],
65: ['Jude','Jud'],
66: ['Revelation', 'Re', 'Rev', 'Rvltn']
}
// digit:{}
}; | mit |