repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
menify/sandbox | aql/benchmark/lib_75/class_5.cpp | 312 | #include "class_5.h"
#include "class_5.h"
#include "class_6.h"
#include "class_3.h"
#include "class_9.h"
#include "class_4.h"
#include <lib_54/class_4.h>
#include <lib_57/class_7.h>
#include <lib_27/class_5.h>
#include <lib_74/class_1.h>
#include <lib_72/class_8.h>
class_5::class_5() {}
class_5::~class_5() {}
| mit |
davidsantoso/active_merchant | test/remote/gateways/remote_worldpay_us_test.rb | 4207 | require 'test_helper'
class RemoteWorldpayUsTest < Test::Unit::TestCase
def setup
@gateway = WorldpayUsGateway.new(fixtures(:worldpay_us))
@amount = 100
@credit_card = credit_card('4446661234567892', :verification_value => '987')
@declined_card = credit_card('4000300011112220')
@check = check(:number => '12345654321')
@options = {
order_id: generate_unique_id,
billing_address: address,
description: 'Store Purchase'
}
end
def test_successful_purchase
response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'Succeeded', response.message
end
def test_successful_purchase_on_backup_url
gateway = WorldpayUsGateway.new(fixtures(:worldpay_us).merge({ use_backup_url: true}))
response = gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'Succeeded', response.message
end
def test_failed_purchase
response = @gateway.purchase(@amount, @declined_card, @options)
assert_failure response
assert response.message =~ /DECLINED/
end
def test_successful_echeck_purchase
response = @gateway.purchase(@amount, @check, @options)
assert_equal 'Succeeded', response.message
assert_success response
end
def test_failed_echeck_purchase
response = @gateway.purchase(@amount, check(routing_number: '23433'), @options)
assert_failure response
assert response.message =~ /DECLINED/
end
def test_successful_authorize_and_capture
assert response = @gateway.authorize(@amount, @credit_card, @options)
assert_success response
assert_equal 'Succeeded', response.message
assert_match %r(^\d+\|.+$), response.authorization
assert capture = @gateway.capture(@amount, response.authorization)
assert_success capture
assert_equal 'Succeeded', capture.message
end
def test_failed_authorize
assert response = @gateway.authorize(@amount, @declined_card, @options)
assert_failure response
assert response.message =~ /DECLINED/
end
def test_successful_refund
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert refund = @gateway.refund(@amount, response.authorization)
assert_success refund
assert_equal 'Succeeded', refund.message
end
def test_successful_void
auth = @gateway.authorize(@amount, @credit_card, @options)
assert_success auth
assert void = @gateway.void(auth.authorization)
assert_success void
end
def test_failed_void
response = @gateway.void('')
assert_failure response
end
def test_successful_verify
assert response = @gateway.verify(@credit_card, @options)
assert_success response
assert_equal 'Succeeded', response.message
assert_success response.responses.last, 'The void should succeed'
end
def test_failed_verify
bogus_card = credit_card('4424222222222222')
assert response = @gateway.verify(bogus_card, @options)
assert_failure response
assert response.message =~ /DECLINED/
end
def test_passing_billing_address
assert response = @gateway.purchase(@amount, @credit_card, @options.merge(:billing_address => address))
assert_success response
end
def test_invalid_login
gateway = WorldpayUsGateway.new(
:acctid => '',
:subid => '',
:merchantpin => ''
)
assert response = gateway.purchase(@amount, @credit_card, @options)
assert_failure response
assert response.message =~ /DECLINED/
end
def test_transcript_scrubbing
transcript = capture_transcript(@gateway) do
@gateway.purchase(@amount, @credit_card, @options)
end
transcript = @gateway.scrub(transcript)
assert_scrubbed(@credit_card.number, transcript)
assert_scrubbed(@credit_card.verification_value, transcript)
assert_scrubbed(@gateway.options[:merchantpin], transcript)
transcript = capture_transcript(@gateway) do
@gateway.purchase(@amount, @check, @options)
end
transcript = @gateway.scrub(transcript)
assert_scrubbed(@check.account_number, transcript)
assert_scrubbed(@gateway.options[:merchantpin], transcript)
end
end
| mit |
Tar-Minyatur/WorkTracker | src/main/java/de/tshw/worktracker/view/JTextFieldWithPlaceholder.java | 2922 | /******************************************************************************
* This file is part of WorkTracker, Copyright (c) 2015 Till Helge Helwig. *
* *
* WorkTracker is distributed under the MIT License, so feel free to do *
* whatever you want with application or code. You may notify the author *
* about bugs via http://github.com/Tar-Minyatur/WorkTracker/issues, but *
* be aware that he is not (legally) obligated to provide support. You are *
* using this software at your own risk. *
******************************************************************************/
package de.tshw.worktracker.view;
import javax.swing.*;
import java.awt.*;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
public class JTextFieldWithPlaceholder extends JTextField {
private static final Color disabledTextColor;
private static final Color activeTextColor;
static {
UIDefaults uiDefaults = UIManager.getDefaults();
disabledTextColor = uiDefaults.getColor("Label.disabledForeground");
activeTextColor = uiDefaults.getColor("TextField.foreground");
}
private String placeholder;
private boolean fieldIsEmpty;
public JTextFieldWithPlaceholder( String placeholder ) {
super("");
this.placeholder = placeholder;
this.fieldIsEmpty = true;
this.showPlaceholder(true);
this.setupComponent();
}
private void showPlaceholder( boolean show ) {
if ( show ) {
super.setText(this.placeholder);
this.setForeground(disabledTextColor);
} else {
if ( fieldIsEmpty ) {
super.setText("");
}
this.setForeground(activeTextColor);
}
}
private void setupComponent() {
this.addFocusListener(new FocusAdapter() {
@Override
public void focusGained( FocusEvent e ) {
super.focusGained(e);
updatePlaceholderStatus();
}
@Override
public void focusLost( FocusEvent e ) {
super.focusLost(e);
setFieldIsEmpty(getEnteredText().length() == 0);
updatePlaceholderStatus();
}
});
}
private void updatePlaceholderStatus() {
if ( this.fieldIsEmpty ) {
this.showPlaceholder(!this.hasFocus());
} else {
this.showPlaceholder(false);
}
}
private void setFieldIsEmpty( boolean fieldIsEmpty ) {
this.fieldIsEmpty = fieldIsEmpty;
}
private String getEnteredText() {
return super.getText();
}
@Override
public String getText() {
return fieldIsEmpty ? "" : getEnteredText();
}
public JTextFieldWithPlaceholder( String text, String placeholder ) {
super(text);
this.fieldIsEmpty = false;
this.placeholder = placeholder;
this.showPlaceholder(( text.length() == 0 ));
this.setupComponent();
}
@Override
public void setText( String text ) {
super.setText(text);
setFieldIsEmpty(( text == null ) || ( text.length() == 0 ));
updatePlaceholderStatus();
}
}
| mit |
ExperisIT-rav/Bifhi-release | find-idol/src/main/public/static/js/find/idol/public/idol-app.js | 504 | /*
* Copyright 2015 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
define([
'find/public/app',
'find/idol/public/idol-pages',
'find/idol/public/idol-navigation'
], function(BaseApp, Pages, Navigation) {
'use strict';
return BaseApp.extend({
Navigation: Navigation,
constructPages: function() {
return new Pages();
}
});
});
| mit |
btalaei/RescueCoin | src/qt/sendcoinsdialog.cpp | 18353 | #include "sendcoinsdialog.h"
#include "ui_sendcoinsdialog.h"
#include "init.h"
#include "walletmodel.h"
#include "addresstablemodel.h"
#include "bitcoinunits.h"
#include "addressbookpage.h"
#include "optionsmodel.h"
#include "sendcoinsentry.h"
#include "guiutil.h"
#include "askpassphrasedialog.h"
#include "coincontrol.h"
#include "coincontroldialog.h"
#include <QMessageBox>
#include <QTextDocument>
#include <QScrollBar>
#include <QClipboard>
SendCoinsDialog::SendCoinsDialog(QWidget *parent) :
QDialog(parent),
ui(new Ui::SendCoinsDialog),
model(0)
{
ui->setupUi(this);
#ifdef Q_OS_MAC // Icons on push buttons are very uncommon on Mac
ui->addButton->setIcon(QIcon());
ui->clearButton->setIcon(QIcon());
ui->sendButton->setIcon(QIcon());
#endif
#if QT_VERSION >= 0x040700
/* Do not move this to the XML file, Qt before 4.7 will choke on it */
ui->lineEditCoinControlChange->setPlaceholderText(tr("Enter a Rescuecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)"));
#endif
addEntry();
connect(ui->addButton, SIGNAL(clicked()), this, SLOT(addEntry()));
connect(ui->clearButton, SIGNAL(clicked()), this, SLOT(clear()));
// Coin Control
ui->lineEditCoinControlChange->setFont(GUIUtil::bitcoinAddressFont());
connect(ui->pushButtonCoinControl, SIGNAL(clicked()), this, SLOT(coinControlButtonClicked()));
connect(ui->checkBoxCoinControlChange, SIGNAL(stateChanged(int)), this, SLOT(coinControlChangeChecked(int)));
connect(ui->lineEditCoinControlChange, SIGNAL(textEdited(const QString &)), this, SLOT(coinControlChangeEdited(const QString &)));
// Coin Control: clipboard actions
QAction *clipboardQuantityAction = new QAction(tr("Copy quantity"), this);
QAction *clipboardAmountAction = new QAction(tr("Copy amount"), this);
QAction *clipboardFeeAction = new QAction(tr("Copy fee"), this);
QAction *clipboardAfterFeeAction = new QAction(tr("Copy after fee"), this);
QAction *clipboardBytesAction = new QAction(tr("Copy bytes"), this);
QAction *clipboardPriorityAction = new QAction(tr("Copy priority"), this);
QAction *clipboardLowOutputAction = new QAction(tr("Copy low output"), this);
QAction *clipboardChangeAction = new QAction(tr("Copy change"), this);
connect(clipboardQuantityAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardQuantity()));
connect(clipboardAmountAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardAmount()));
connect(clipboardFeeAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardFee()));
connect(clipboardAfterFeeAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardAfterFee()));
connect(clipboardBytesAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardBytes()));
connect(clipboardPriorityAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardPriority()));
connect(clipboardLowOutputAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardLowOutput()));
connect(clipboardChangeAction, SIGNAL(triggered()), this, SLOT(coinControlClipboardChange()));
ui->labelCoinControlQuantity->addAction(clipboardQuantityAction);
ui->labelCoinControlAmount->addAction(clipboardAmountAction);
ui->labelCoinControlFee->addAction(clipboardFeeAction);
ui->labelCoinControlAfterFee->addAction(clipboardAfterFeeAction);
ui->labelCoinControlBytes->addAction(clipboardBytesAction);
ui->labelCoinControlPriority->addAction(clipboardPriorityAction);
ui->labelCoinControlLowOutput->addAction(clipboardLowOutputAction);
ui->labelCoinControlChange->addAction(clipboardChangeAction);
fNewRecipientAllowed = true;
}
void SendCoinsDialog::setModel(WalletModel *model)
{
this->model = model;
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
entry->setModel(model);
}
}
if(model && model->getOptionsModel())
{
setBalance(model->getBalance(), model->getUnconfirmedBalance(), model->getImmatureBalance());
connect(model, SIGNAL(balanceChanged(qint64, qint64, qint64)), this, SLOT(setBalance(qint64, qint64, qint64)));
connect(model->getOptionsModel(), SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit()));
// Coin Control
connect(model->getOptionsModel(), SIGNAL(displayUnitChanged(int)), this, SLOT(coinControlUpdateLabels()));
connect(model->getOptionsModel(), SIGNAL(coinControlFeaturesChanged(bool)), this, SLOT(coinControlFeatureChanged(bool)));
connect(model->getOptionsModel(), SIGNAL(transactionFeeChanged(qint64)), this, SLOT(coinControlUpdateLabels()));
ui->frameCoinControl->setVisible(model->getOptionsModel()->getCoinControlFeatures());
coinControlUpdateLabels();
}
}
SendCoinsDialog::~SendCoinsDialog()
{
delete ui;
}
void SendCoinsDialog::on_sendButton_clicked()
{
QList<SendCoinsRecipient> recipients;
bool valid = true;
if(!model)
return;
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
if(entry->validate())
{
recipients.append(entry->getValue());
}
else
{
valid = false;
}
}
}
if(!valid || recipients.isEmpty())
{
return;
}
// Format confirmation message
QStringList formatted;
foreach(const SendCoinsRecipient &rcp, recipients)
{
#if QT_VERSION < 0x050000
formatted.append(tr("<b>%1</b> to %2 (%3)").arg(BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, rcp.amount), Qt::escape(rcp.label), rcp.address));
#else
formatted.append(tr("<b>%1</b> to %2 (%3)").arg(BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, rcp.amount), rcp.label.toHtmlEscaped(), rcp.address));
#endif
}
fNewRecipientAllowed = false;
QMessageBox::StandardButton retval = QMessageBox::question(this, tr("Confirm send coins"),
tr("Are you sure you want to send %1?").arg(formatted.join(tr(" and "))),
QMessageBox::Yes|QMessageBox::Cancel,
QMessageBox::Cancel);
if(retval != QMessageBox::Yes)
{
fNewRecipientAllowed = true;
return;
}
WalletModel::UnlockContext ctx(model->requestUnlock());
if(!ctx.isValid())
{
// Unlock wallet was cancelled
fNewRecipientAllowed = true;
return;
}
WalletModel::SendCoinsReturn sendstatus;
if (!model->getOptionsModel() || !model->getOptionsModel()->getCoinControlFeatures())
sendstatus = model->sendCoins(recipients);
else
sendstatus = model->sendCoins(recipients, CoinControlDialog::coinControl);
switch(sendstatus.status)
{
case WalletModel::InvalidAddress:
QMessageBox::warning(this, tr("Send Coins"),
tr("The recipient address is not valid, please recheck."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::InvalidAmount:
QMessageBox::warning(this, tr("Send Coins"),
tr("The amount to pay must be larger than 0."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::AmountExceedsBalance:
QMessageBox::warning(this, tr("Send Coins"),
tr("The amount exceeds your balance."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::AmountWithFeeExceedsBalance:
QMessageBox::warning(this, tr("Send Coins"),
tr("The total exceeds your balance when the %1 transaction fee is included.").
arg(BitcoinUnits::formatWithUnit(BitcoinUnits::BTC, sendstatus.fee)),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::DuplicateAddress:
QMessageBox::warning(this, tr("Send Coins"),
tr("Duplicate address found, can only send to each address once per send operation."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::TransactionCreationFailed:
QMessageBox::warning(this, tr("Send Coins"),
tr("Error: Transaction creation failed!"),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::TransactionCommitFailed:
QMessageBox::warning(this, tr("Send Coins"),
tr("Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here."),
QMessageBox::Ok, QMessageBox::Ok);
break;
case WalletModel::Aborted: // User aborted, nothing to do
break;
case WalletModel::OK:
accept();
CoinControlDialog::coinControl->UnSelectAll();
coinControlUpdateLabels();
break;
}
fNewRecipientAllowed = true;
}
void SendCoinsDialog::clear()
{
// Remove entries until only one left
while(ui->entries->count())
{
ui->entries->takeAt(0)->widget()->deleteLater();
}
addEntry();
updateRemoveEnabled();
ui->sendButton->setDefault(true);
}
void SendCoinsDialog::reject()
{
clear();
}
void SendCoinsDialog::accept()
{
clear();
}
SendCoinsEntry *SendCoinsDialog::addEntry()
{
SendCoinsEntry *entry = new SendCoinsEntry(this);
entry->setModel(model);
ui->entries->addWidget(entry);
connect(entry, SIGNAL(removeEntry(SendCoinsEntry*)), this, SLOT(removeEntry(SendCoinsEntry*)));
connect(entry, SIGNAL(payAmountChanged()), this, SLOT(coinControlUpdateLabels()));
updateRemoveEnabled();
// Focus the field, so that entry can start immediately
entry->clear();
entry->setFocus();
ui->scrollAreaWidgetContents->resize(ui->scrollAreaWidgetContents->sizeHint());
qApp->processEvents();
QScrollBar* bar = ui->scrollArea->verticalScrollBar();
if(bar)
bar->setSliderPosition(bar->maximum());
return entry;
}
void SendCoinsDialog::updateRemoveEnabled()
{
// Remove buttons are enabled as soon as there is more than one send-entry
bool enabled = (ui->entries->count() > 1);
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
entry->setRemoveEnabled(enabled);
}
}
setupTabChain(0);
coinControlUpdateLabels();
}
void SendCoinsDialog::removeEntry(SendCoinsEntry* entry)
{
entry->deleteLater();
updateRemoveEnabled();
}
QWidget *SendCoinsDialog::setupTabChain(QWidget *prev)
{
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
{
prev = entry->setupTabChain(prev);
}
}
QWidget::setTabOrder(prev, ui->addButton);
QWidget::setTabOrder(ui->addButton, ui->sendButton);
return ui->sendButton;
}
void SendCoinsDialog::setAddress(const QString &address)
{
SendCoinsEntry *entry = 0;
// Replace the first entry if it is still unused
if(ui->entries->count() == 1)
{
SendCoinsEntry *first = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(0)->widget());
if(first->isClear())
{
entry = first;
}
}
if(!entry)
{
entry = addEntry();
}
entry->setAddress(address);
}
void SendCoinsDialog::pasteEntry(const SendCoinsRecipient &rv)
{
if(!fNewRecipientAllowed)
return;
SendCoinsEntry *entry = 0;
// Replace the first entry if it is still unused
if(ui->entries->count() == 1)
{
SendCoinsEntry *first = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(0)->widget());
if(first->isClear())
{
entry = first;
}
}
if(!entry)
{
entry = addEntry();
}
entry->setValue(rv);
}
bool SendCoinsDialog::handleURI(const QString &uri)
{
SendCoinsRecipient rv;
// URI has to be valid
if (GUIUtil::parseBitcoinURI(uri, &rv))
{
CBitcoinAddress address(rv.address.toStdString());
if (!address.IsValid())
return false;
pasteEntry(rv);
return true;
}
return false;
}
void SendCoinsDialog::setBalance(qint64 balance, qint64 unconfirmedBalance, qint64 immatureBalance)
{
Q_UNUSED(unconfirmedBalance);
Q_UNUSED(immatureBalance);
if(!model || !model->getOptionsModel())
return;
int unit = model->getOptionsModel()->getDisplayUnit();
ui->labelBalance->setText(BitcoinUnits::formatWithUnit(unit, balance));
}
void SendCoinsDialog::updateDisplayUnit()
{
if(model && model->getOptionsModel())
{
// Update labelBalance with the current balance and the current unit
ui->labelBalance->setText(BitcoinUnits::formatWithUnit(model->getOptionsModel()->getDisplayUnit(), model->getBalance()));
}
}
// Coin Control: copy label "Quantity" to clipboard
void SendCoinsDialog::coinControlClipboardQuantity()
{
GUIUtil::setClipboard(ui->labelCoinControlQuantity->text());
}
// Coin Control: copy label "Amount" to clipboard
void SendCoinsDialog::coinControlClipboardAmount()
{
GUIUtil::setClipboard(ui->labelCoinControlAmount->text().left(ui->labelCoinControlAmount->text().indexOf(" ")));
}
// Coin Control: copy label "Fee" to clipboard
void SendCoinsDialog::coinControlClipboardFee()
{
GUIUtil::setClipboard(ui->labelCoinControlFee->text().left(ui->labelCoinControlFee->text().indexOf(" ")));
}
// Coin Control: copy label "After fee" to clipboard
void SendCoinsDialog::coinControlClipboardAfterFee()
{
GUIUtil::setClipboard(ui->labelCoinControlAfterFee->text().left(ui->labelCoinControlAfterFee->text().indexOf(" ")));
}
// Coin Control: copy label "Bytes" to clipboard
void SendCoinsDialog::coinControlClipboardBytes()
{
GUIUtil::setClipboard(ui->labelCoinControlBytes->text());
}
// Coin Control: copy label "Priority" to clipboard
void SendCoinsDialog::coinControlClipboardPriority()
{
GUIUtil::setClipboard(ui->labelCoinControlPriority->text());
}
// Coin Control: copy label "Low output" to clipboard
void SendCoinsDialog::coinControlClipboardLowOutput()
{
GUIUtil::setClipboard(ui->labelCoinControlLowOutput->text());
}
// Coin Control: copy label "Change" to clipboard
void SendCoinsDialog::coinControlClipboardChange()
{
GUIUtil::setClipboard(ui->labelCoinControlChange->text().left(ui->labelCoinControlChange->text().indexOf(" ")));
}
// Coin Control: settings menu - coin control enabled/disabled by user
void SendCoinsDialog::coinControlFeatureChanged(bool checked)
{
ui->frameCoinControl->setVisible(checked);
if (!checked && model) // coin control features disabled
CoinControlDialog::coinControl->SetNull();
}
// Coin Control: button inputs -> show actual coin control dialog
void SendCoinsDialog::coinControlButtonClicked()
{
CoinControlDialog dlg;
dlg.setModel(model);
dlg.exec();
coinControlUpdateLabels();
}
// Coin Control: checkbox custom change address
void SendCoinsDialog::coinControlChangeChecked(int state)
{
if (model)
{
if (state == Qt::Checked)
CoinControlDialog::coinControl->destChange = CBitcoinAddress(ui->lineEditCoinControlChange->text().toStdString()).Get();
else
CoinControlDialog::coinControl->destChange = CNoDestination();
}
ui->lineEditCoinControlChange->setEnabled((state == Qt::Checked));
ui->labelCoinControlChangeLabel->setVisible((state == Qt::Checked));
}
// Coin Control: custom change address changed
void SendCoinsDialog::coinControlChangeEdited(const QString & text)
{
if (model)
{
CoinControlDialog::coinControl->destChange = CBitcoinAddress(text.toStdString()).Get();
// label for the change address
ui->labelCoinControlChangeLabel->setStyleSheet("QLabel{color:black;}");
if (text.isEmpty())
ui->labelCoinControlChangeLabel->setText("");
else if (!CBitcoinAddress(text.toStdString()).IsValid())
{
ui->labelCoinControlChangeLabel->setStyleSheet("QLabel{color:red;}");
ui->labelCoinControlChangeLabel->setText(tr("Warning: Invalid Bitcoin address"));
}
else
{
QString associatedLabel = model->getAddressTableModel()->labelForAddress(text);
if (!associatedLabel.isEmpty())
ui->labelCoinControlChangeLabel->setText(associatedLabel);
else
{
CPubKey pubkey;
CKeyID keyid;
CBitcoinAddress(text.toStdString()).GetKeyID(keyid);
if (model->getPubKey(keyid, pubkey))
ui->labelCoinControlChangeLabel->setText(tr("(no label)"));
else
{
ui->labelCoinControlChangeLabel->setStyleSheet("QLabel{color:red;}");
ui->labelCoinControlChangeLabel->setText(tr("Warning: Unknown change address"));
}
}
}
}
}
// Coin Control: update labels
void SendCoinsDialog::coinControlUpdateLabels()
{
if (!model || !model->getOptionsModel() || !model->getOptionsModel()->getCoinControlFeatures())
return;
// set pay amounts
CoinControlDialog::payAmounts.clear();
for(int i = 0; i < ui->entries->count(); ++i)
{
SendCoinsEntry *entry = qobject_cast<SendCoinsEntry*>(ui->entries->itemAt(i)->widget());
if(entry)
CoinControlDialog::payAmounts.append(entry->getValue().amount);
}
if (CoinControlDialog::coinControl->HasSelected())
{
// actual coin control calculation
CoinControlDialog::updateLabels(model, this);
// show coin control stats
ui->labelCoinControlAutomaticallySelected->hide();
ui->widgetCoinControl->show();
}
else
{
// hide coin control stats
ui->labelCoinControlAutomaticallySelected->show();
ui->widgetCoinControl->hide();
ui->labelCoinControlInsuffFunds->hide();
}
}
| mit |
andrewboss/bitclone | template/bitcoin/src/qt/bitcoin.cpp | 10991 | /*
* W.J. van der Laan 2011-2012
*/
#include "bitcoingui.h"
#include "clientmodel.h"
#include "walletmodel.h"
#include "optionsmodel.h"
#include "guiutil.h"
#include "guiconstants.h"
#include "init.h"
#include "ui_interface.h"
#include "qtipcserver.h"
#include <QApplication>
#include <QMessageBox>
#include <QTextCodec>
#include <QLocale>
#include <QTranslator>
#include <QSplashScreen>
#include <QLibraryInfo>
#include <boost/interprocess/ipc/message_queue.hpp>
#include <boost/algorithm/string/predicate.hpp>
#if defined(BITCOIN_NEED_QT_PLUGINS) && !defined(_BITCOIN_QT_PLUGINS_INCLUDED)
#define _BITCOIN_QT_PLUGINS_INCLUDED
#define __INSURE__
#include <QtPlugin>
Q_IMPORT_PLUGIN(qcncodecs)
Q_IMPORT_PLUGIN(qjpcodecs)
Q_IMPORT_PLUGIN(qtwcodecs)
Q_IMPORT_PLUGIN(qkrcodecs)
Q_IMPORT_PLUGIN(qtaccessiblewidgets)
#endif
// Need a global reference for the notifications to find the GUI
static BitcoinGUI *guiref;
static QSplashScreen *splashref;
static void ThreadSafeMessageBox(const std::string& message, const std::string& caption, int style)
{
// Message from network thread
if(guiref)
{
bool modal = (style & CClientUIInterface::MODAL);
// in case of modal message, use blocking connection to wait for user to click OK
QMetaObject::invokeMethod(guiref, "error",
modal ? GUIUtil::blockingGUIThreadConnection() : Qt::QueuedConnection,
Q_ARG(QString, QString::fromStdString(caption)),
Q_ARG(QString, QString::fromStdString(message)),
Q_ARG(bool, modal));
}
else
{
printf("%s: %s\n", caption.c_str(), message.c_str());
fprintf(stderr, "%s: %s\n", caption.c_str(), message.c_str());
}
}
static bool ThreadSafeAskFee(int64 nFeeRequired, const std::string& strCaption)
{
if(!guiref)
return false;
if(nFeeRequired < MIN_TX_FEE || nFeeRequired <= nTransactionFee || fDaemon)
return true;
bool payFee = false;
QMetaObject::invokeMethod(guiref, "askFee", GUIUtil::blockingGUIThreadConnection(),
Q_ARG(qint64, nFeeRequired),
Q_ARG(bool*, &payFee));
return payFee;
}
static void ThreadSafeHandleURI(const std::string& strURI)
{
if(!guiref)
return;
QMetaObject::invokeMethod(guiref, "handleURI", GUIUtil::blockingGUIThreadConnection(),
Q_ARG(QString, QString::fromStdString(strURI)));
}
static void InitMessage(const std::string &message)
{
if(splashref)
{
splashref->showMessage(QString::fromStdString(message), Qt::AlignBottom|Qt::AlignHCenter, QColor(255,255,200));
QApplication::instance()->processEvents();
}
}
static void QueueShutdown()
{
QMetaObject::invokeMethod(QCoreApplication::instance(), "quit", Qt::QueuedConnection);
}
/*
Translate string to current locale using Qt.
*/
static std::string Translate(const char* psz)
{
return QCoreApplication::translate("bitcoin-core", psz).toStdString();
}
/* Handle runaway exceptions. Shows a message box with the problem and quits the program.
*/
static void handleRunawayException(std::exception *e)
{
PrintExceptionContinue(e, "Runaway exception");
QMessageBox::critical(0, "Runaway exception", BitcoinGUI::tr("A fatal error occured. ${bcl_name} can no longer continue safely and will quit.") + QString("\n\n") + QString::fromStdString(strMiscWarning));
exit(1);
}
#ifndef BITCOIN_QT_TEST
int main(int argc, char *argv[])
{
// TODO: implement URI support on the Mac.
#if !defined(MAC_OSX)
// Do this early as we don't want to bother initializing if we are just calling IPC
for (int i = 1; i < argc; i++)
{
if (boost::algorithm::istarts_with(argv[i], "bitcoin:"))
{
const char *strURI = argv[i];
try {
boost::interprocess::message_queue mq(boost::interprocess::open_only, BITCOINURI_QUEUE_NAME);
if (mq.try_send(strURI, strlen(strURI), 0))
// if URI could be sent to the message queue exit here
exit(0);
else
// if URI could not be sent to the message queue do a normal ${bcl_name}-Qt startup
break;
}
catch (boost::interprocess::interprocess_exception &ex) {
// don't log the "file not found" exception, because that's normal for
// the first start of the first instance
if (ex.get_error_code() != boost::interprocess::not_found_error)
{
printf("main() - boost interprocess exception #%d: %s\n", ex.get_error_code(), ex.what());
break;
}
}
}
}
#endif
// Internal string conversion is all UTF-8
QTextCodec::setCodecForTr(QTextCodec::codecForName("UTF-8"));
QTextCodec::setCodecForCStrings(QTextCodec::codecForTr());
Q_INIT_RESOURCE(bitcoin);
QApplication app(argc, argv);
// Install global event filter that makes sure that long tooltips can be word-wrapped
app.installEventFilter(new GUIUtil::ToolTipToRichTextFilter(TOOLTIP_WRAP_THRESHOLD, &app));
// Command-line options take precedence:
ParseParameters(argc, argv);
// ... then bitcoin.conf:
if (!boost::filesystem::is_directory(GetDataDir(false)))
{
fprintf(stderr, "Error: Specified directory does not exist\n");
return 1;
}
ReadConfigFile(mapArgs, mapMultiArgs);
// Application identification (must be set before OptionsModel is initialized,
// as it is used to locate QSettings)
app.setOrganizationName("${bcl_name}");
app.setOrganizationDomain("bitclone.net");
if(GetBoolArg("-testnet")) // Separate UI settings for testnet
app.setApplicationName("${bcl_name}-qt-testnet");
else
app.setApplicationName("${bcl_name}-qt");
// ... then GUI settings:
OptionsModel optionsModel;
// Get desired locale (e.g. "de_DE") from command line or use system locale
QString lang_territory = QString::fromStdString(GetArg("-lang", QLocale::system().name().toStdString()));
QString lang = lang_territory;
// Convert to "de" only by truncating "_DE"
lang.truncate(lang_territory.lastIndexOf('_'));
QTranslator qtTranslatorBase, qtTranslator, translatorBase, translator;
// Load language files for configured locale:
// - First load the translator for the base language, without territory
// - Then load the more specific locale translator
// Load e.g. qt_de.qm
if (qtTranslatorBase.load("qt_" + lang, QLibraryInfo::location(QLibraryInfo::TranslationsPath)))
app.installTranslator(&qtTranslatorBase);
// Load e.g. qt_de_DE.qm
if (qtTranslator.load("qt_" + lang_territory, QLibraryInfo::location(QLibraryInfo::TranslationsPath)))
app.installTranslator(&qtTranslator);
// Load e.g. bitcoin_de.qm (shortcut "de" needs to be defined in bitcoin.qrc)
if (translatorBase.load(lang, ":/translations/"))
app.installTranslator(&translatorBase);
// Load e.g. bitcoin_de_DE.qm (shortcut "de_DE" needs to be defined in bitcoin.qrc)
if (translator.load(lang_territory, ":/translations/"))
app.installTranslator(&translator);
// Subscribe to global signals from core
uiInterface.ThreadSafeMessageBox.connect(ThreadSafeMessageBox);
uiInterface.ThreadSafeAskFee.connect(ThreadSafeAskFee);
uiInterface.ThreadSafeHandleURI.connect(ThreadSafeHandleURI);
uiInterface.InitMessage.connect(InitMessage);
uiInterface.QueueShutdown.connect(QueueShutdown);
uiInterface.Translate.connect(Translate);
// Show help message immediately after parsing command-line options (for "-lang") and setting locale,
// but before showing splash screen.
if (mapArgs.count("-?") || mapArgs.count("--help"))
{
GUIUtil::HelpMessageBox help;
help.showOrPrint();
return 1;
}
QSplashScreen splash(QPixmap(":/images/splash"), 0);
if (GetBoolArg("-splash", true) && !GetBoolArg("-min"))
{
splash.show();
splash.setAutoFillBackground(true);
splashref = &splash;
}
app.processEvents();
app.setQuitOnLastWindowClosed(false);
try
{
// Regenerate startup link, to fix links to old versions
if (GUIUtil::GetStartOnSystemStartup())
GUIUtil::SetStartOnSystemStartup(true);
BitcoinGUI window;
guiref = &window;
if(AppInit2())
{
{
// Put this in a block, so that the Model objects are cleaned up before
// calling Shutdown().
optionsModel.Upgrade(); // Must be done after AppInit2
if (splashref)
splash.finish(&window);
ClientModel clientModel(&optionsModel);
WalletModel walletModel(pwalletMain, &optionsModel);
window.setClientModel(&clientModel);
window.setWalletModel(&walletModel);
// If -min option passed, start window minimized.
if(GetBoolArg("-min"))
{
window.showMinimized();
}
else
{
window.show();
}
// TODO: implement URI support on the Mac.
#if !defined(MAC_OSX)
// Place this here as guiref has to be defined if we dont want to lose URIs
ipcInit();
// Check for URI in argv
for (int i = 1; i < argc; i++)
{
if (boost::algorithm::istarts_with(argv[i], "bitcoin:"))
{
const char *strURI = argv[i];
try {
boost::interprocess::message_queue mq(boost::interprocess::open_only, BITCOINURI_QUEUE_NAME);
mq.try_send(strURI, strlen(strURI), 0);
}
catch (boost::interprocess::interprocess_exception &ex) {
printf("main() - boost interprocess exception #%d: %s\n", ex.get_error_code(), ex.what());
break;
}
}
}
#endif
app.exec();
window.hide();
window.setClientModel(0);
window.setWalletModel(0);
guiref = 0;
}
// Shutdown the core and it's threads, but don't exit ${bcl_name}-Qt here
Shutdown(NULL);
}
else
{
return 1;
}
} catch (std::exception& e) {
handleRunawayException(&e);
} catch (...) {
handleRunawayException(NULL);
}
return 0;
}
#endif // BITCOIN_QT_TEST
| mit |
YY030913/tg | packages/tagt-lib/server/functions/createRoom.js | 2561 | /* globals TAGT */
TAGT.createRoom = function(type, name, owner, members, readOnly) {
name = s.trim(name);
owner = s.trim(owner);
members = [].concat(members);
if (!name) {
throw new Meteor.Error('error-invalid-name', 'Invalid name', { function: 'TAGT.createRoom' });
}
owner = TAGT.models.Users.findOneByUsername(owner, { fields: { username: 1 }});
if (!owner) {
throw new Meteor.Error('error-invalid-user', 'Invalid user', { function: 'TAGT.createRoom' });
}
let nameValidation;
try {
nameValidation = new RegExp('^' + TAGT.settings.get('UTF8_Names_Validation') + '$');
} catch (error) {
nameValidation = new RegExp('^[0-9a-zA-Z-_.]+$');
}
if (!nameValidation.test(name)) {
throw new Meteor.Error('error-invalid-name', 'Invalid name', { function: 'TAGT.createRoom' });
}
let now = new Date();
if (!_.contains(members, owner.username)) {
members.push(owner.username);
}
// avoid duplicate names
let room = TAGT.models.Rooms.findOneByName(name);
if (room) {
if (room.archived) {
throw new Meteor.Error('error-archived-duplicate-name', 'There\'s an archived channel with name ' + name, { function: 'TAGT.createRoom', room_name: name });
} else {
throw new Meteor.Error('error-duplicate-channel-name', 'A channel with name \'' + name + '\' exists', { function: 'TAGT.createRoom', room_name: name });
}
}
if (type === 'c') {
TAGT.callbacks.run('beforeCreateChannel', owner, {
t: 'c',
name: name,
ts: now,
ro: readOnly === true,
sysMes: readOnly !== true,
usernames: members,
u: {
_id: owner._id,
username: owner.username
}
});
}
room = TAGT.models.Rooms.createWithTypeNameUserAndUsernames(type, name, owner.username, members, {
ts: now,
ro: readOnly === true,
sysMes: readOnly !== true
});
for (let username of members) {
let member = TAGT.models.Users.findOneByUsername(username, { fields: { username: 1 }});
if (!member) {
continue;
}
// make all room members muted by default, unless they have the post-readonly permission
if (readOnly === true && !TAGT.authz.hasPermission(member._id, 'post-readonly')) {
TAGT.models.Rooms.muteUsernameByRoomId(room._id, username);
}
let extra = { open: true };
if (username === owner.username) {
extra.ls = now;
}
TAGT.models.Subscriptions.createWithRoomAndUser(room, member, extra);
}
TAGT.authz.addUserRoles(owner._id, ['owner'], room._id);
if (type === 'c') {
Meteor.defer(() => {
TAGT.callbacks.run('afterCreateChannel', owner, room);
});
}
return {
rid: room._id
};
};
| mit |
ericalli/onlooker | lib/net/ping.rb | 312 | # By doing a "require 'net/ping'" you are requiring every subclass. If you
# want to require a specific ping type only, do "require 'net/ping/tcp'",
# for example.
#
$LOAD_PATH.unshift File.dirname(__FILE__)
require 'ping/tcp'
require 'ping/udp'
require 'ping/icmp'
require 'ping/external'
require 'ping/http'
| mit |
bitcoin-s/bitcoin-s | node/src/main/scala/org/bitcoins/node/SpvNode.scala | 3859 | package org.bitcoins.node
import akka.actor.ActorSystem
import org.bitcoins.asyncutil.AsyncUtil
import org.bitcoins.chain.config.ChainAppConfig
import org.bitcoins.core.api.chain.ChainQueryApi.FilterResponse
import org.bitcoins.core.api.node.NodeType
import org.bitcoins.core.bloom.BloomFilter
import org.bitcoins.core.protocol.transaction.Transaction
import org.bitcoins.core.protocol.{BitcoinAddress, BlockStamp}
import org.bitcoins.core.util.Mutable
import org.bitcoins.node.config.NodeAppConfig
import org.bitcoins.node.models.Peer
import org.bitcoins.node.networking.peer.{
ControlMessageHandler,
DataMessageHandler
}
import scala.concurrent.Future
case class SpvNode(
dataMessageHandler: DataMessageHandler,
nodeConfig: NodeAppConfig,
chainConfig: ChainAppConfig,
actorSystem: ActorSystem,
configPeersOverride: Vector[Peer] = Vector.empty)
extends Node {
require(nodeConfig.nodeType == NodeType.SpvNode,
s"We need our SPV mode enabled to be able to construct a SPV node!")
implicit override def system: ActorSystem = actorSystem
implicit override def nodeAppConfig: NodeAppConfig = nodeConfig
override def chainAppConfig: ChainAppConfig = chainConfig
private val _bloomFilter = new Mutable(BloomFilter.empty)
def bloomFilter: BloomFilter = _bloomFilter.atomicGet
val controlMessageHandler = ControlMessageHandler(this)
override def getDataMessageHandler: DataMessageHandler = dataMessageHandler
override val peerManager: PeerManager = PeerManager(this, configPeersOverride)
def setBloomFilter(bloom: BloomFilter): SpvNode = {
_bloomFilter.atomicSet(bloom)
this
}
override def updateDataMessageHandler(
dataMessageHandler: DataMessageHandler): SpvNode = {
copy(dataMessageHandler = dataMessageHandler)
}
/** Updates our bloom filter to match the given TX
*
* @return SPV node with the updated bloom filter
*/
def updateBloomFilter(transaction: Transaction): Future[SpvNode] = {
logger.info(s"Updating bloom filter with transaction=${transaction.txIdBE}")
val newBloom = _bloomFilter.atomicUpdate(transaction)(_.update(_))
// we could send filteradd messages, but we would
// then need to calculate all the new elements in
// the filter. this is easier:-)
for {
_ <- peerMsgSenders(0).sendFilterClearMessage()
_ <- peerMsgSenders(0).sendFilterLoadMessage(newBloom)
} yield this
}
/** Updates our bloom filter to match the given address
*
* @return SPV node with the updated bloom filter
*/
def updateBloomFilter(address: BitcoinAddress): Future[SpvNode] = {
logger.info(s"Updating bloom filter with address=$address")
val hash = address.hash
_bloomFilter.atomicUpdate(hash)(_.insert(_))
val sentFilterAddF = peerMsgSenders(0).sendFilterAddMessage(hash)
sentFilterAddF.map(_ => this)
}
override def start(): Future[SpvNode] = {
for {
node <- super.start()
_ <- AsyncUtil.retryUntilSatisfiedF(() => isConnected(0))
_ <- peerMsgSenders(0).sendFilterLoadMessage(bloomFilter)
} yield {
logger.info(
s"Sending bloomfilter=${bloomFilter.hex} to ${peerManager.peers(0)}")
node.asInstanceOf[SpvNode]
}
}
/** Returns the block height of the given block stamp */
override def getHeightByBlockStamp(blockStamp: BlockStamp): Future[Int] =
chainApiFromDb().flatMap(_.getHeightByBlockStamp(blockStamp))
private val cfErrMsg = "Compact filters are not supported in SPV mode"
/** Gets the number of compact filters in the database */
override def getFilterCount(): Future[Int] =
Future.failed(new RuntimeException(cfErrMsg))
override def getFiltersBetweenHeights(
startHeight: Int,
endHeight: Int): Future[Vector[FilterResponse]] =
Future.failed(new RuntimeException(cfErrMsg))
}
| mit |
oemdaro/mqtt-microservices-example | mqtt-server/redis/index.js | 137 | const Redis = require('ioredis')
const config = require('../config')
const redis = new Redis(config.redis.cache)
module.exports = redis
| mit |
vkhazin/play-framework-docker | target/scala-2.10/src_managed/main/controllers/routes.java | 648 | // @SOURCE:/Users/vkhazin/Projects/Samples/play-framework-docker/conf/routes
// @HASH:f94c37f141a2fd9c4c81ec2672feceaf0bd6b469
// @DATE:Fri Dec 18 18:02:00 EST 2015
package controllers;
public class routes {
public static final controllers.ReverseApplication Application = new controllers.ReverseApplication();
public static class javascript {
public static final controllers.javascript.ReverseApplication Application = new controllers.javascript.ReverseApplication();
}
public static class ref {
public static final controllers.ref.ReverseApplication Application = new controllers.ref.ReverseApplication();
}
}
| mit |
tpoikela/battles | lib/rot-js/fov/index.ts | 275 | import DiscreteShadowcasting from './discrete-shadowcasting';
import PreciseShadowcasting from './precise-shadowcasting';
import RecursiveShadowcasting from './recursive-shadowcasting';
export default { DiscreteShadowcasting, PreciseShadowcasting, RecursiveShadowcasting };
| mit |
vonWolfehaus/common-gulp-starter | build/config.js | 852 | var src = '../src/';
var dest = '../www/';
var jsFolder = src+'js/';
var js = {
entries: [
jsFolder+'main.js'
],
destFile: 'main.js',
all: jsFolder+'**/*.js',
libs: jsFolder+'lib/**/*.js'
};
module.exports = {
sass: {
src: src+'style/**/*.{sass,scss}',
cssLib: src+'style/lib',
dest: dest,
settings: {
indentedSyntax: true, // Enable .sass syntax
imagePath: 'img' // Used by the image-url helper
}
},
html: {
src: src+'htdocs/**',
dest: dest
},
images: {
src: src+'img/**',
dest: dest+'img'
},
browserify: {
entries: js.entries,
debug: false,
paths: ['./node_modules', jsFolder, js.libs], // allows you to omit './' when requiring local modules
extensions: ['.jsx', '.tag'],
noparse: js.libs,
},
js: js,
release: false, // set to true by passing `--dist` as a gulp argument
src: src,
dest: dest
};
| mit |
DrTr/Supportilla | test/unit/supportilla/subject_test.rb | 1109 | require 'test_helper'
module Supportilla
class SubjectTest < ActiveSupport::TestCase
def setup
@subject = supportilla_subjects(:account)
end
test "responds to description" do
assert_respond_to @subject, :description
assert_respond_to @subject, :tickets
assert_respond_to @subject, :activity
end
test "must be valid" do
assert @subject.valid?
end
test "with invalid description" do
@subject.description = ""
assert !@subject.valid?
@subject.description = "a" * 50
assert !@subject.valid?
end
test "with duplicate description " do
@subject.description = "Site content"
assert !@subject.valid?
end
test "with invalid activity" do
@subject.activity = ""
assert !@subject.valid?
end
test "have tickets" do
assert_equal @subject.tickets,
[supportilla_tickets(:oldest), supportilla_tickets(:ticket)]
end
test "destroy tickets with itself" do
assert_difference("Ticket.count", -2){ @subject.destroy }
end
end
end
| mit |
markolbert/Installers | LanHistoryInstaller/setup-ui/Properties/AssemblyInfo.cs | 2514 | using System.Reflection;
using System.Runtime.InteropServices;
using System.Windows;
using Microsoft.Tools.WindowsInstallerXml.Bootstrapper;
using Olbert.LanHistorySetupUI;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
//[assembly: AssemblyTitle("LanHistorySetupUI")]
//[assembly: AssemblyDescription("")]
//[assembly: AssemblyConfiguration("")]
//[assembly: AssemblyCompany("")]
//[assembly: AssemblyProduct("LanHistorySetupUI")]
//[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
//In order to begin building localizable applications, set
//<UICulture>CultureYouAreCodingWith</UICulture> in your .csproj file
//inside a <PropertyGroup>. For example, if you are using US english
//in your source files, set the <UICulture> to en-US. Then uncomment
//the NeutralResourceLanguage attribute below. Update the "en-US" in
//the line below to match the UICulture setting in the project file.
//[assembly: NeutralResourcesLanguage("en-US", UltimateResourceFallbackLocation.Satellite)]
[assembly:ThemeInfo(
ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located
//(used if a resource is not found in the page,
// or application resource dictionaries)
ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located
//(used if a resource is not found in the page,
// app, or any theme specific resource dictionaries)
)]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
//[assembly: AssemblyVersion("1.0.0.0")]
//[assembly: AssemblyFileVersion("1.0.0.0")]
//WiX -- denotes which class is the Managed Bootstrapper
[assembly: BootstrapperApplication(typeof(LanHistorySetupApp))] | mit |
joelwurtz/symfony | src/Symfony/Component/Messenger/Transport/AmqpExt/AmqpReceiver.php | 4126 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Messenger\Transport\AmqpExt;
use Symfony\Component\Messenger\Envelope;
use Symfony\Component\Messenger\Exception\LogicException;
use Symfony\Component\Messenger\Exception\MessageDecodingFailedException;
use Symfony\Component\Messenger\Exception\TransportException;
use Symfony\Component\Messenger\Transport\Receiver\MessageCountAwareInterface;
use Symfony\Component\Messenger\Transport\Receiver\ReceiverInterface;
use Symfony\Component\Messenger\Transport\Serialization\PhpSerializer;
use Symfony\Component\Messenger\Transport\Serialization\SerializerInterface;
/**
* Symfony Messenger receiver to get messages from AMQP brokers using PHP's AMQP extension.
*
* @author Samuel Roze <samuel.roze@gmail.com>
*
* @experimental in 4.2
*/
class AmqpReceiver implements ReceiverInterface, MessageCountAwareInterface
{
private $serializer;
private $connection;
public function __construct(Connection $connection, SerializerInterface $serializer = null)
{
$this->connection = $connection;
$this->serializer = $serializer ?? new PhpSerializer();
}
/**
* {@inheritdoc}
*/
public function get(): iterable
{
foreach ($this->connection->getQueueNames() as $queueName) {
yield from $this->getEnvelope($queueName);
}
}
private function getEnvelope(string $queueName): iterable
{
try {
$amqpEnvelope = $this->connection->get($queueName);
} catch (\AMQPException $exception) {
throw new TransportException($exception->getMessage(), 0, $exception);
}
if (null === $amqpEnvelope) {
return [];
}
try {
$envelope = $this->serializer->decode([
'body' => $amqpEnvelope->getBody(),
'headers' => $amqpEnvelope->getHeaders(),
]);
} catch (MessageDecodingFailedException $exception) {
// invalid message of some type
$this->rejectAmqpEnvelope($amqpEnvelope, $queueName);
throw $exception;
}
yield $envelope->with(new AmqpReceivedStamp($amqpEnvelope, $queueName));
}
/**
* {@inheritdoc}
*/
public function ack(Envelope $envelope): void
{
try {
$stamp = $this->findAmqpStamp($envelope);
$this->connection->ack(
$stamp->getAmqpEnvelope(),
$stamp->getQueueName()
);
} catch (\AMQPException $exception) {
throw new TransportException($exception->getMessage(), 0, $exception);
}
}
/**
* {@inheritdoc}
*/
public function reject(Envelope $envelope): void
{
$stamp = $this->findAmqpStamp($envelope);
$this->rejectAmqpEnvelope(
$stamp->getAmqpEnvelope(),
$stamp->getQueueName()
);
}
/**
* {@inheritdoc}
*/
public function getMessageCount(): int
{
try {
return $this->connection->countMessagesInQueues();
} catch (\AMQPException $exception) {
throw new TransportException($exception->getMessage(), 0, $exception);
}
}
private function rejectAmqpEnvelope(\AMQPEnvelope $amqpEnvelope, string $queueName): void
{
try {
$this->connection->nack($amqpEnvelope, $queueName, AMQP_NOPARAM);
} catch (\AMQPException $exception) {
throw new TransportException($exception->getMessage(), 0, $exception);
}
}
private function findAmqpStamp(Envelope $envelope): AmqpReceivedStamp
{
$amqpReceivedStamp = $envelope->last(AmqpReceivedStamp::class);
if (null === $amqpReceivedStamp) {
throw new LogicException('No "AmqpReceivedStamp" stamp found on the Envelope.');
}
return $amqpReceivedStamp;
}
}
| mit |
asobrien/randomOrg | randomorg/__init__.py | 111 | from __future__ import absolute_import
from ._rand_core import integers, sequence, string, quota # noqa: F401
| mit |
scottlaurent/accounting | src/Exceptions/JournalAlreadyExists.php | 182 | <?php
declare(strict_types=1);
namespace Scottlaurent\Accounting\Exceptions;
class JournalAlreadyExists extends BaseException
{
public $message = 'Journal already exists.';
}
| mit |
dario01/domotics | app/js/view/mainView.js | 818 | "use strict";
define(['marionette', 'view/LeftNavView', 'view/DomoticListView', 'templates', 'model/DomoticCollectionModel'],
function(Marionette, LeftNavView, DomoticListView, templates, DomoticCollection, Dimmer, LightSwitch) {
return Marionette.LayoutView.extend({
template: templates['app/templates/main.hbs'],
regions: {
leftNav: '#leftNav',
mainPanel: '#mainPanel'
},
onBeforeShow: function() {
console.log('onBeforeShow mainView');
this.leftNav.show(new LeftNavView());
this.mainPanel.show(
new DomoticListView({
collection: new DomoticCollection()
})
);
}
});
}
);
| mit |
Heliex/PlastProd | app/cache/dev/twig/2a/47/0bff6caf161d0a25ec925d50e33ee2ad4a7afcdf711ec73d30ff750d7c09.php | 2266 | <?php
/* FOSUserBundle:Registration:email.txt.twig */
class __TwigTemplate_2a470bff6caf161d0a25ec925d50e33ee2ad4a7afcdf711ec73d30ff750d7c09 extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
'subject' => array($this, 'block_subject'),
'body_text' => array($this, 'block_body_text'),
'body_html' => array($this, 'block_body_html'),
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 2
$this->displayBlock('subject', $context, $blocks);
// line 7
$this->displayBlock('body_text', $context, $blocks);
// line 12
$this->displayBlock('body_html', $context, $blocks);
}
// line 2
public function block_subject($context, array $blocks = array())
{
// line 4
echo $this->env->getExtension('translator')->trans("registration.email.subject", array("%username%" => $this->getAttribute((isset($context["user"]) ? $context["user"] : $this->getContext($context, "user")), "username", array()), "%confirmationUrl%" => (isset($context["confirmationUrl"]) ? $context["confirmationUrl"] : $this->getContext($context, "confirmationUrl"))), "FOSUserBundle");
echo "
";
}
// line 7
public function block_body_text($context, array $blocks = array())
{
// line 9
echo $this->env->getExtension('translator')->trans("registration.email.message", array("%username%" => $this->getAttribute((isset($context["user"]) ? $context["user"] : $this->getContext($context, "user")), "username", array()), "%confirmationUrl%" => (isset($context["confirmationUrl"]) ? $context["confirmationUrl"] : $this->getContext($context, "confirmationUrl"))), "FOSUserBundle");
echo "
";
}
// line 12
public function block_body_html($context, array $blocks = array())
{
}
public function getTemplateName()
{
return "FOSUserBundle:Registration:email.txt.twig";
}
public function getDebugInfo()
{
return array ( 48 => 12, 42 => 9, 39 => 7, 33 => 4, 30 => 2, 26 => 12, 24 => 7, 22 => 2,);
}
}
| mit |
furio/js-config-server | lib/repo-data-filters/filter.js | 588 | var Promise = require("bluebird");
var debugLog = require('debug')('js-config-server:lib-repo-data-filters');
var FilterData = module.exports = function FilterData( materializedFilters ) {
this.matFilters = materializedFilters;
};
FilterData.prototype.filterData = function(rawData){
try {
var finalData = this.matFilters.reduce(function(partialData, filterFun) {
return filterFun.filterData(partialData);
}, rawData);
return Promise.resolve(finalData);
} catch(err) {
debugLog(err);
return Promise.reject(err);
}
}; | mit |
Chainsawkitten/HymnToBeauty | src/Engine/Util/Input.cpp | 3072 | #include "Input.hpp"
#include <Utility/Log.hpp>
#include <map>
#include <GLFW/glfw3.h>
std::map<GLFWwindow*, InputHandler*> inputMap;
void characterCallback(GLFWwindow* window, unsigned int codePoint) {
inputMap[window]->CharacterCallback(codePoint);
}
void scrollCallback(GLFWwindow* window, double xOffset, double yOffset) {
inputMap[window]->ScrollCallback(yOffset);
}
InputHandler* InputHandler::activeInstance = nullptr;
InputHandler::InputHandler(GLFWwindow* window) {
this->window = window;
for (int i = 0; i < BUTTONS; i++) {
buttonData[i].down = false;
buttonData[i].released = false;
buttonData[i].triggered = false;
}
// Init mouse state.
glfwSetScrollCallback(window, scrollCallback);
glfwSetCharCallback(window, characterCallback);
inputMap[window] = this;
}
InputHandler* InputHandler::GetActiveInstance() {
return activeInstance;
}
void InputHandler::SetActive() {
activeInstance = this;
}
void InputHandler::Update() {
lastScroll = scroll;
scroll = 0.0;
// Get button states.
bool values[BUTTONS] = {};
for (Binding binding : bindings) {
bool value = false;
switch (binding.device) {
case KEYBOARD:
if (glfwGetKey(window, binding.index) == GLFW_PRESS)
value = true;
break;
case MOUSE:
if (glfwGetMouseButton(window, binding.index) == GLFW_PRESS)
value = true;
break;
default:
break;
}
if (!values[binding.button])
values[binding.button] = value;
}
// Update triggered and released.
for (int button = 0; button < BUTTONS; button++) {
buttonData[button].triggered = !buttonData[button].down && values[button];
buttonData[button].released = buttonData[button].down && !values[button];
buttonData[button].down = values[button];
}
glfwGetCursorPos(window, &cursorX, &cursorY);
text = tempText;
tempText = "";
}
double InputHandler::GetCursorX() const {
return cursorX;
}
double InputHandler::GetCursorY() const {
return cursorY;
}
glm::vec2 InputHandler::GetCursorXY() const {
return glm::vec2(cursorX, cursorY);
}
bool InputHandler::GetScrollUp() const {
return lastScroll > 0.0;
}
bool InputHandler::GetScrollDown() const {
return lastScroll < 0.0;
}
void InputHandler::AssignButton(Button button, Device device, int index) {
Binding binding;
binding.button = button;
binding.device = device;
binding.index = index;
bindings.push_back(binding);
}
bool InputHandler::Pressed(Button button) const {
return buttonData[button].down;
}
bool InputHandler::Triggered(Button button) const {
return buttonData[button].triggered;
}
void InputHandler::CharacterCallback(unsigned int codePoint) {
tempText += static_cast<char>(codePoint);
}
void InputHandler::ScrollCallback(double yOffset) {
scroll += yOffset;
}
InputHandler* Input() {
return InputHandler::GetActiveInstance();
}
| mit |
inilotic/vims_crm_core | public/js/users/search.js | 1454 | $(document).ready(function() {
function formatRepo (repo) {
if (repo.loading) return repo.text;
var markup = '<div class="clearfix">' +
'<div class="col-sm-6">' + repo.login + '</div>';
if (repo.fio) {
markup += ' (' + repo.fio + ')';
}
markup += '</div>';
return markup;
}
function formatRepoSelection (repo) {
return repo.login || repo.text;
}
$(".js-user-apiFind").select2({
ajax: {
url: "?route=User/apiFind",
dataType: 'json',
delay: 250,
data: function (params) {
return {
q: params.term // search term
};
},
processResults: function (data, page) {
// parse the results into the format expected by Select2.
// since we are using custom formatting functions we do not need to
// alter the remote JSON data
return {
results: data.items
};
},
cache: true
},
escapeMarkup: function (markup) { return markup; }, // let our custom formatter work
minimumInputLength: 3,
templateResult: formatRepo, // omitted for brevity, see the source of this page
templateSelection: formatRepoSelection // omitted for brevity, see the source of this page
});
}) | mit |
myautotech/Artist | db/migrate/20150421175336_add_is_readed_column_to_comment.rb | 145 | class AddIsReadedColumnToComment < ActiveRecord::Migration
def change
add_column :comments, :is_readed, :boolean, default: false
end
end
| mit |
nikatruskawka/testioki | app/scripts/app.js | 2043 | /*jshint unused: vars */
define([
'angular',
'routeManager',
'ionicAngular',
'controllers/toc',
'controllers/task',
'controllers/definition',
'controllers/game',
'controllers/menu',
'controllers/summary',
'controllers/tutorial',
'controllers/back',
'controllers/testioki',
'angular-ui-router',
'angular-bootstrap',
'services/productList',
'services/unitList',
'services/tasksContent',
'services/checking',
'services/contentFixture',
'services/content',
'services/tutorialProvider',
'services/device',
'services/locationService',
'services/media',
'services/animation'
],
/*deps*/
function (angular, RouteManager)/*invoke*/ {
'use strict';
return angular.module('connectApp', [
'MenuCtrl',
'GameCtrl',
'TocCtrl',
'TaskCtrl',
'DefinitionCtrl',
'SummaryCtrl',
'TutorialCtrl',
'BackCtrl',
'MyApp',
'TasksContentService',
'ProductListService',
'UnitListService',
'CheckingService',
'LocationService',
'ContentFixture',
'Content',
'TutorialProvider',
'DeviceService',
'MediaService',
'AnimationService',
/*angJSDeps*/
'ionic',
'ngCookies',
'ngResource',
'ngSanitize',
'ngDialog',
'ui.bootstrap',
'ui.router',
'angular-progress-arc',
'Orbicular'
])
.config(function ($compileProvider) {
// Set the whitelist for certain URLs just to be safe
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|ftp|mailto|file|tel):/);
})
.config(RouteManager);
}); | mit |
pugnusferreus/wamlibrary | app/controllers/sub_categories_controller.rb | 2404 | class SubCategoriesController < ApplicationController
before_filter :is_admin
# GET /sub_categories
# GET /sub_categories.xml
def index
@sub_categories = SubCategory.all
@categories = Category.all
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @sub_categories }
end
end
def tojson
sub_category = SubCategory.new
render :json => sub_category.to_data_table
end
# GET /sub_categories/1
# GET /sub_categories/1.xml
def show
@sub_category = SubCategory.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @sub_category }
end
end
# GET /sub_categories/new
# GET /sub_categories/new.xml
def new
@sub_category = SubCategory.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @sub_category }
end
end
# GET /sub_categories/1/edit
def edit
@sub_category = SubCategory.find(params[:id])
end
# POST /sub_categories
# POST /sub_categories.xml
def create
@sub_category = SubCategory.new(params[:sub_category])
respond_to do |format|
if @sub_category.save
format.html { redirect_to(@sub_category, :notice => 'Sub category was successfully created.') }
format.xml { render :xml => @sub_category, :status => :created, :location => @sub_category }
else
format.html { render :action => "new" }
format.xml { render :xml => @sub_category.errors, :status => :unprocessable_entity }
end
end
end
# PUT /sub_categories/1
# PUT /sub_categories/1.xml
def update
@sub_category = SubCategory.find(params[:id])
respond_to do |format|
if @sub_category.update_attributes(params[:sub_category])
format.html { redirect_to(@sub_category, :notice => 'Sub category was successfully updated.') }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @sub_category.errors, :status => :unprocessable_entity }
end
end
end
# DELETE /sub_categories/1
# DELETE /sub_categories/1.xml
def destroy
@sub_category = SubCategory.find(params[:id])
@sub_category.destroy
respond_to do |format|
format.html { redirect_to(sub_categories_url) }
format.xml { head :ok }
end
end
end
| mit |
tanordheim/ChopsUI-NG | ChopsUI/modules/datatexts/time.lua | 1255 | local E = unpack(select(2, ...)); -- Import: Engine
local module = E:NewModule("DataText_Time", "AceConsole-3.0", "AceEvent-3.0")
local frame = CreateFrame("Frame", "ChopsUI_DataText_Time", E.Panels.MinimapDataTextContainer1)
frame:SetAllPoints()
frame:EnableMouse(true)
frame:SetFrameStrata("BACKGROUND")
frame:SetFrameLevel(3)
frame:Show()
local text = frame:CreateFontString("ChopsUI_DataText_Time_Text", "OVERLAY")
text:SetFont(E.Media.Fonts.Normal, E.Settings.UI.DataTexts.FontSize)
text:SetJustifyH("CENTER")
frame.text = text
frame.text:SetAllPoints()
local int = 1
local function Update(self, t)
int = int - t
if int > 0 then return end
local hours = tonumber(date("%H"))
local minutes = tonumber(date("%M"))
local seconds = tonumber(date("%S"))
local hourText, minuteText, secondText
if hours < 10 then
hourText = "0" .. hours
else
hourText = hours
end
if minutes < 10 then
minuteText = "0" .. minutes
else
minuteText = minutes
end
if seconds < 10 then
secondText = "0" .. seconds
else
secondText = seconds
end
text:SetText(format("%s:%s:%s", hourText, minuteText, secondText))
end
function module:Initialize()
frame:SetScript("OnUpdate", Update)
Update(frame, 10)
end
E:RegisterModule(module:GetName())
| mit |
inquisive/keystonejs-site | public/systemjs/app/listen.js | 8132 | import React from 'react'
import $ from 'jquery'
import _ from 'lodash'
import {routes, config} from './config';
import {baseRoute, cleanPath, getFileName} from './common/util';
import Debug from 'debug'
import Gab from './common/gab'
let debug = Debug('keystone:app:common:listen');
let Routes = routes.map(v => {
return v.path;
});
export default (Component) => {
class Listeners extends React.Component {
constructor(props){
super(props)
this.displayName = 'Page Template'
const clean = getFileName(props.location.pathname)
this.state = {
route: clean.clean,
prev: clean.clean,
paths: clean
}
this._update = false
this._limiters = {}
}
render() {
// return React.cloneElement(Component, this.props)
return <Component {...this.props} {...this.state} />
}
componentWillReceiveProps(props) {
const clean = getFileName(props.location.pathname).clean
if(clean !== this.state.route) {
this.setState({
route: clean,
prev: this.state.route
});
this._update = true;
}
}
componentDidUpdate() {
if(this._update) {
this.onUpdate();
}
}
componentDidMount() {
this.onMount();
this.onUpdate();
}
onUpdate() {
let thisComponent = this;
this._update = false;
//debug('update listeners')
window.scrollTo(0,0);
// scroll to anchor
// *only* if we have anchor on the url
if(thisComponent.props.location.hash) {
// smooth scroll to the anchor id
const $anchor = $('a[name="' + thisComponent.props.location.hash.slice(1) + '"]');
if($anchor.length === 1) {
$('html, body').animate({
scrollTop: $anchor.offset().top + 'px'
}, 100, 'swing');
}
}
// add anchor links and version switcher to certain elements
$(".docs-content a[name]").each(function() {
let $anchor = $(this),
name = $anchor.attr("name")
let link = '<a class="anchor" href="#' + name + '"><i class="entypo entypo-link"></i></a><a class="anchor" href="#top"><i class="entypo entypo-up"></i></a>'
const $next = $anchor.next();
const version = $('#_version').val()
const path = $('#_path').val()
if(path) {
link += '<i class="anchor" style="padding-right: 0px;"><span>switch to:</span></i>';
config.versions.forEach(function(v) {
if(v !== version) {
const newpath = path.replace(version, v);
link += '<a style="padding-left: 8px;padding-right: 8px;"class="anchor" href="' + newpath + '/#' + name + '"><span>' + v + '</span></a>';
}
});
}
const $link = $(link);
// only append links to H2/H3/H4 tags
if (["H2", "H3", "H4"].indexOf($next.prop("tagName")) > -1) {
$next.append($link);
}
});
// add code links and dropdown html
$(".docs-content .addGitHubLink").each(function(e) {
const $this = $(this);
const file = $this.data("file");
const branch = $('#_branch').val();
const append = '<div style="float:right" > /' + file + ' <a href="http://github.com/keystonejs/keystone/blob/' + branch + '/' + file + '" target="_blank"><i class="entypo entypo-social entypo-github"></i></a></div>';
$this.append(append);
});
// sticky menu
const $stickyMenu = $('.stickyMenu');
const $docsFooter = $('.docs-footer');
if (!!$stickyMenu.offset()) {
const stickyTop = $stickyMenu.offset().top;
$(window).scroll(function() {
const windowTop = $(window).scrollTop();
if (stickyTop-40 < windowTop){
const docFooterView = $docsFooter[0].getBoundingClientRect();
const height = (($(window).height() - docFooterView.top) < 0) ? '100%' : docFooterView.top + 'px';
const width = $stickyMenu.parent().width()
$stickyMenu.css({ direction: 'ltr', position: 'fixed', overflowY: 'auto', top: 0, marginTop: '0', paddingBottom: '80px', 'height': height, 'width': width });
} else {
$stickyMenu.css({ position:'static'});
}
});
}
// Run Prism highlighting
// weird error on api pages needs a second run
if(location.pathname.search('api') > -1) {
debug('highlight again', location.pathname.search('api'))
setTimeout(Prism.highlightAll,1000)
}
Prism.highlightAll()
} // end onUpdate
onMount() {
let thisComponent = this;
let _cached = {}
let create_cached = function(version) {
if(!_.isObject(_cached[version])) {
_cached[version] = {}
}
}
// catch code view click
$(document).on('click', '.loadCode', function(e) {
e.preventDefault()
const $this = $(this)
const target = $this.parent().data()
const $pre = $this.parent().next()
let pass = (go=true) => {
if(go) {
$pre.slideToggle();
}
}
const branch = $('#_branch').val();
create_cached(branch);
if($pre.css('display') === 'block') {
// just toggle close
pass()
} else if(_cached[branch][target.file]) {
// cached results so just toggle open
pass()
} else if(target.file) {
let url
let convert
if(target.com) {
convert = target.convert || 'js'
url = 'https://raw.githubusercontent.com/snowkeeper/keystonejs-site/site/' + target.file
} else {
url = 'https://raw.githubusercontent.com/keystonejs/keystone/BRANCH/FILE'.replace('BRANCH',branch).replace('FILE', target.file)
convert = 'js'
}
debug('github code cache',_cached, target)
fetch(url)
.then(r => r.text())
.then(results => {
_cached[branch][target.file] = Prism.highlight(results, Prism.languages[convert]);
debug(_cached)
$pre.html(_cached[branch][target.file])
pass()
})
.catch(e => debug('Fetch error',page,e))
} else {
pass(false)
}
});
// catch clicks for react-router
// to add links that bypass this measure add class '.notspa' or '.uselink'
$(document).on('click', 'a:not(.uselink, .notspa, .loadCode)', function(event) {
const $url = $(this)[0]
const myLocation = getFileName($url.href)
const filename = myLocation.clean
const url = $url.pathname + $url.search + myLocation.hash
debug('click information', location, myLocation, 'url', url, '$url', $url.hostname)
if(location.hostname !== $url.hostname) {
// not our link
return
}
// is this a routed link
if(myLocation.route.section !== '404' && (!$url.hash || thisComponent.state.route !== filename)) {
event.preventDefault()
debug('push history known route', url)
thisComponent.props.history.pushState(null, url)
return
}
// should this be a 404?
if(!myLocation.hash && $url.host === location.host) {
// this app is entirely SPA with defined routes, so this page is probably a 404, but also could be a development, dynamic or hidden page
event.preventDefault()
debug('push history unknown route', url)
thisComponent.props.history.pushState(null, url)
return
}
if(myLocation.hash && thisComponent.state.route === filename) {
// react-router is currently triggering a render on same page anchor links
// this can catch that and fake the move
// but we lose history
//event.preventDefault()
debug('fake scroll', url)
let $goto = $('a[name="' + myLocation.hashless + '"]')
if($goto.length) {
//location.hash = myLocation.hashless;
//$(document).scrollTop($goto.offset().top)
}
//thisComponent.props.history.replaceState(null, url)
return
}
debug('not our link so send away')
})
// x for clear
function tog(v){
return v ? 'addClass' : 'removeClass'
}
$(document).on('#searchBar input', '.clearable', function(){
$(this)[tog(this.value)]('x');
}).on('mousemove', '.x', function( e ){
$(this)[tog(this.offsetWidth-22 < e.clientX-this.getBoundingClientRect().left)]('onX');
}).on('touchstart click', '.onX', function( ev ){
ev.preventDefault();
$(this).removeClass('x onX').val('').change();
})
} // end onMount
}
Listeners.propTypes = {};
return Listeners
}
| mit |
nbsdx/abac | examples/scaling_tests/daisychain/likes_python/base/QUERY.py | 5116 | #!/usr/bin/env python
"""
Run the queries described in README
cmd: env keystore=`pwd` ./query.py
"""
import os
import sys
import ABAC
import time
import datetime
import math
debug=0
ctxt = ABAC.Context()
cred_count = 2 + 2 * #VAL#
def get_msec(e_time) :
msec_delta=0
if( int(e_time.seconds) !=0 ) :
msec_delta= int(e_time.seconds) *1000
if( int(e_time.microseconds) !=0) :
msec_delta = msec_delta + int(e_time.microseconds)/1000
return msec_delta
def get_micro(e_time) :
micro_delta=0
if( int(e_time.seconds) !=0 ) :
micro_delta= int(e_time.seconds) *1000000
if( int(e_time.microseconds) !=0) :
micro_delta = micro_delta + int(e_time.microseconds)
return micro_delta
def extract_delta(starttime, endtime) :
""" given a start time, and a endtime, extract delta """
elapsed_time = (endtime - starttime)
# Only handle in seconds/microseconds
if ( int(elapsed_time.days) != 0 ) :
sys.stderr,write("%s is longer than a day !!!" % msg)
exit(1)
return elapsed_time
# Keystore is the directory containing the principal credentials.
# Load existing principals and/or policy credentials
if (os.environ.has_key("keystore")) :
keystore=os.environ["keystore"]
starttime = datetime.datetime.now()
ctxt.load_directory(keystore)
endtime = datetime.datetime.now()
elapsed_load=extract_delta(starttime, endtime)
elapsed_msec=get_msec(elapsed_load)
sys.stderr.write("%d %d LOAD(msec)\n" % (cred_count,elapsed_msec))
else:
print("keystore is not set...")
exit(1)
##########################################################################
# dump the loaded principals/policies
#
fd=os.open("creds_dump",os.O_WRONLY|os.O_CREAT)
credentials = ctxt.credentials()
for cred in credentials:
string="%s <- %s" % (cred.head().string(), cred.tail().string())
os.write(fd,string)
os.write(fd,"\n")
os.close(fd)
##########################################################################
# Does JohnX likes John0 ?
# role = [keyid:JohnX].role:after
# p [Keyid:john0]
def goodQuery() :
aid="John%s_ID.pem"% #VAL#
aID=ABAC.ID(aid)
bID=ABAC.ID("John0_ID.pem")
print "\n===good============ johnN.likes <- john0 "
starttime = datetime.datetime.now()
(success, credentials) = ctxt.query("%s.likes" % aID.keyid(), bID.keyid())
if success:
print "success"
else:
print "failure"
endtime = datetime.datetime.now()
if(debug):
print "good query start-> %s\n" % starttime
print "good query end -> %s\n" % endtime
for cred in credentials:
print "%s <- %s" % (cred.head().string(), cred.tail().string())
return extract_delta(starttime, endtime)
##########################################################################
# Does John0 likes JohnX ?
# role = [keyid:JohnX].role:after
# p [Keyid:john0]
def badQuery() :
bid="John%s_ID.pem"% #VAL#
bID=ABAC.ID(bid)
aID=ABAC.ID("John0_ID.pem")
print "\n===bad============ john0.likes <- johnX "
starttime = datetime.datetime.now()
(success, credentials) = ctxt.query("%s.likes" % aID.keyid(), bID.keyid())
if success:
print "success"
else:
print "failure"
endtime = datetime.datetime.now()
for cred in credentials:
print "%s <- %s" % (cred.head().string(), cred.tail().string())
return extract_delta(starttime, endtime)
##############################################################
#skip the first one
e_time=goodQuery()
elapsed_micro=get_micro(e_time)
sys.stderr.write("%d %d GOOD_f(micro)\n" % (cred_count,elapsed_micro))
tlist=[]
k=100
while(k):
e_time=goodQuery()
elapsed_micro=get_micro(e_time)
k=k-1
tlist.append(elapsed_micro)
if(k==99):
sys.stderr.write("%d %d GOOD_s(micro)\n" % (cred_count,elapsed_micro))
if(debug):
sys.stderr.write("%d %d GOOD_%d(micro)\n" % (cred_count,elapsed_micro,k))
sum=0
for i in tlist:
sum=sum+i
ave=sum/100
dlist = [(x-ave) for x in tlist ]
slist = [ (x-ave)*(x-ave) for x in tlist]
sum=0
for i in slist:
sum=sum+i
sd=math.sqrt(sum/99)
sys.stderr.write("%d %d %d GOOD_t(micro)\n" % (cred_count,ave,sd))
sys.stderr.write("%d 100 %s GOOD_list(micro)\n" % (cred_count,tlist))
###############################################################
e_time=badQuery()
elapsed_micro=get_micro(e_time)
sys.stderr.write("%d %d BAD_f(micro)\n" % (cred_count,elapsed_micro))
tlist=[]
k=100
while(k):
e_time=badQuery()
elapsed_micro=get_micro(e_time)
tlist.append(elapsed_micro)
k=k-1
if(k==99):
sys.stderr.write("%d %d BAD_s(micro)\n" % (cred_count,elapsed_micro))
if(debug):
sys.stderr.write("%d %d BAD_%d(micro)\n" % (cred_count,elapsed_micro,k))
sum=0
for i in tlist:
sum=sum+i
ave=sum/100
dlist = [(x-ave) for x in tlist ]
slist = [ (x-ave)*(x-ave) for x in tlist]
sum=0
for i in slist:
sum=sum+i
sd=math.sqrt(sum/99)
sys.stderr.write("%d %d %d BAD_t(micro)\n" % (cred_count,ave, sd))
sys.stderr.write("%d 100 %s BAD_list(micro)\n" % (cred_count,tlist))
| mit |
csxiaoyaojianxian/JavaScriptStudy | 05-ajax/ajax_tool.js | 2637 | // ajax get 五部曲
function ajax_get(url,data) {
// 异步对象
var ajax = new XMLHttpRequest();
// url 方法
// 如果是get发送数据 发送的格式为 xxx.php?name=jack&age=18
// 所以 这里 需要拼接 url
if (data) {
// 如果有值 需要拼接字符串
// 拼接为xxx.php?name=jack&age=18
url+='?';
url+=data;
}else{
}
ajax.open('get',url);
// 发送
ajax.send();
// 注册事件
ajax.onreadystatechange = function () {
// 在事件中 获取数据 并修改界面显示
if (ajax.readyState==4&& ajax.status==200) {
console.log(ajax.responseText);
}
}
}
// ajax_post五部曲
function ajax_post(url,data) {
// 异步对象
var ajax = new XMLHttpRequest();
// url 方法
ajax.open('post',url);
// 设置 请求报文
ajax.setRequestHeader("Content-type","application/x-www-form-urlencoded");
// 发送
if (data) {
// 如果 有值 post请求 是在 send中 发送给服务器
ajax.send(data);
}else{
ajax.send();
}
// 注册事件
ajax.onreadystatechange = function () {
// 在事件中 获取数据 并修改界面显示
if (ajax.readyState==4&&ajax.status==200) {
console.log(ajax.responseText);
}
}
}
// 将 get 跟post 封装到一起
/*
参数1:url
参数2:数据
参数3:请求的方法
参数4:数据成功获取以后 调用的方法
*/
function ajax_tool(url,data,method,success) {
// 异步对象
var ajax = new XMLHttpRequest();
// get 跟post 需要分别写不同的代码
if (method=='get') {
// get请求
if (data) {
// 如果有值
url+='?';
url+=data;
}else{
}
// 设置 方法 以及 url
ajax.open(method,url);
// send即可
ajax.send();
}else{
// post请求
// post请求 url 是不需要改变
ajax.open(method,url);
// 需要设置请求报文
ajax.setRequestHeader("Content-type","application/x-www-form-urlencoded");
// 判断data send发送数据
if (data) {
// 如果有值 从send发送
ajax.send(data);
}else{
// 木有值 直接发送即可
ajax.send();
}
}
// 注册事件
ajax.onreadystatechange = function () {
// 在事件中 获取数据 并修改界面显示
if (ajax.readyState==4&&ajax.status==200) {
// console.log(ajax.responseText);
// 将 数据 让 外面可以使用
// return ajax.responseText;
// 当 onreadystatechange 调用时 说明 数据回来了
// ajax.responseText;
// 如果说 外面可以传入一个 function 作为参数 success
success(ajax.responseText);
}
}
} | mit |
NicolaSabino/Monitor_1.0 | app/src/main/java/com/nicola/monitor_10/MainActivity.java | 24663 | package com.nicola.monitor_10;
import android.app.AlarmManager;
import android.app.IntentService;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.content.res.ColorStateList;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.Color;
import android.icu.text.DecimalFormat;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceManager;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.NotificationCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v4.widget.CursorAdapter;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.view.menu.ActionMenuItemView;
import android.support.v7.view.menu.MenuView;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.view.Menu;
import android.view.MenuItem;
import android.view.ViewGroup;
import android.widget.ListView;
import android.widget.TextView;
import com.jjoe64.graphview.GraphView;
import com.jjoe64.graphview.series.DataPoint;
import com.jjoe64.graphview.series.LineGraphSeries;
import java.io.BufferedReader;
import java.io.FileReader;
import weka.classifiers.Classifier;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import static android.Manifest.permission.RECORD_AUDIO;
import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
import static android.R.attr.key;
/**
* Main activity dell'applicazione
*/
public class MainActivity extends AppCompatActivity implements SharedPreferences.OnSharedPreferenceChangeListener {
private boolean stato;
private FloatingActionButton fab;
boolean playPauseState;
private Intent servizio;
private MenuView.ItemView s;
private DbManager db;
private LineGraphSeries<DataPoint> light,sound,movement;
private GraphView graph1,graph2,graph3;
private int currentGraphIndex;
private int frequenza ;
private int numeroDatiGrafico;
private boolean AsseX;
private boolean AsseY;
private boolean AiState;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
readSettings();
fabConfig();
initStartStop();
//settaggio dell'intent che gestisce l'acquisizione dati
this.servizio = new Intent(this,DataService.class);
//imposto il db manager
db = new DbManager(getApplicationContext());
popolaTabella();
light = new LineGraphSeries<DataPoint>();
sound = new LineGraphSeries<DataPoint>();
movement = new LineGraphSeries<DataPoint>();
graph1 = (GraphView) findViewById(R.id.graph1);
graph2 = (GraphView) findViewById(R.id.graph2);
graph3 = (GraphView) findViewById(R.id.graph3);
initGrafici();
currentGraphIndex = 0;
popolaGrafici();
LocalBroadcastManager.getInstance(this).registerReceiver(mMessageReciver,new IntentFilter("evento-popola-tabella"));
}
private void fabConfig(){
fab = (FloatingActionButton) findViewById(R.id.fab);
// se non è abilitata la classificazione rendo il fab cliccabile
if(!AiState) {
if(!stato){
fab .setBackgroundTintList(ColorStateList.valueOf(Color.rgb(255,193,7)));//ARANCIONE
fab .setImageDrawable(getResources().getDrawable(R.drawable.sun,getTheme()));
}else{
fab.setBackgroundTintList(ColorStateList.valueOf(Color.rgb(48,63,159)));//blu
fab.setImageDrawable(getResources().getDrawable(R.drawable.moon,getTheme()));
}
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
cambiastato(view);
}
});
}else {
fab .setBackgroundTintList(ColorStateList.valueOf(Color.parseColor("#FF5722")));//ARANCIONE
fab .setImageDrawable(getResources().getDrawable(R.drawable.ic_action_name,getTheme()));
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
MessageHelper.snak(view,"È attiva l'inteligenza artificiale");
}
});
}
}
private BroadcastReceiver mMessageReciver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
//ogni volta che ricevo un intent popolo la tabella
popolaTabella();
float l = Float.parseFloat(intent.getStringExtra("light"));
float s = Float.parseFloat(intent.getStringExtra("sound"));
float m = Float.parseFloat(intent.getStringExtra("motion"));
appendValuesTabella(l,s,m);
}
};
@Override
protected void onPostResume() {
super.onPostResume();
readSettings();
popolaTabella();
MessageHelper.log("RESUME", "popolo la tabella");
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
saveState();
}
@Override
protected void onDestroy() {
LocalBroadcastManager.getInstance(this).unregisterReceiver(mMessageReciver);
super.onDestroy();
}
private void cambiastato(View view) {
//se stavo dormento
if(stato){
MessageHelper.snak(view,"Buongiorno");
fab.setBackgroundTintList(ColorStateList.valueOf(Color.rgb(255,193,7)));//arancione
fab.setImageDrawable(getResources().getDrawable(R.drawable.sun,getTheme()));
//notifica
deleteNotification();
generateNotification(this,"Ricorda di darmi la buonanotte \u263A",R.drawable.sun);
}else{
MessageHelper.snak(view,"Buonanotte");
fab.setBackgroundTintList(ColorStateList.valueOf(Color.rgb(48,63,159)));//blu
fab.setImageDrawable(getResources().getDrawable(R.drawable.moon,getTheme()));
//notifica
deleteNotification();
generateNotification(this,"Ricorda di darmi il buongiorno \u263b",R.drawable.moon);
}
MessageHelper.log("SWITCH_STATE", stato + " -> " + !stato);
stato = !stato; //cambio lo stato dell'utente
db.changeState(stato);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if(playPauseState){
getMenuInflater().inflate(R.menu.menu_main, menu);
}else{
getMenuInflater().inflate(R.menu.menu_main_pause, menu);
MessageHelper.toast(getApplicationContext(),"Acquisizione dati già in esecuzione");
}
this.s = (MenuView.ItemView) findViewById(R.id.alarmState);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
Intent intent = new Intent(this,SettingsActivity.class);
startActivity(intent);
return true;
}
if (id == R.id.alarmState){
if(checkPermission()){ //se ho i permessi per usare il microfono faccio partire il task in background
startStop();
return true;
}else{ //altrimenti
requestPermission();
return true;
}
}
return super.onOptionsItemSelected(item);
}
public void initStartStop(){
ActionMenuItemView x = (ActionMenuItemView) findViewById(R.id.alarmState);
if(playPauseState){
if (x != null) {
x.setIcon(getResources().getDrawable(android.R.drawable.ic_media_pause,getTheme()));
}else {
MessageHelper.log("ICONA","NULL");
}
}else {
if (x != null) {
x.setIcon(getResources().getDrawable(android.R.drawable.ic_media_play, getTheme()));
} else {
MessageHelper.log("ICONA", "NULL");
}
}
}
public void startStop(){
ActionMenuItemView x = (ActionMenuItemView) findViewById(R.id.alarmState);
if(playPauseState){
if (x != null) {
x.setIcon(getResources().getDrawable(android.R.drawable.ic_media_pause,getTheme()));
}else {
MessageHelper.log("ICONA","NULL");
}
//this.startService(servizio);
scheduleAlarm();
//messaggi
MessageHelper.log("TOOLBAR","Play -> inizio acquisizione dati");
MessageHelper.toast(getApplicationContext(),"Inizio acquisizione dati");
playPauseState = false;
saveState();
}else{
if (x != null) {
x.setIcon(getResources().getDrawable(android.R.drawable.ic_media_play,getTheme()));
}else {
MessageHelper.log("ICONA","NULL");
}
//this.stopService(servizio);
cancelAlarm();
//messaggi
MessageHelper.log("TOOLBAR","Pause -> acquisizione dati sospesa");
MessageHelper.toast(getApplicationContext(),"Acquisizione dati interrotta");
playPauseState = true;
saveState();
}
}
public void popolaTabella() {
//seleziona le info dal database e ripongo il risultato in un oggetto cursore
final Cursor crs = db.query();
CursorAdapter adapter = new CursorAdapter(this,crs,0) {
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
View v = getLayoutInflater().inflate(R.layout.custom_row_table,null);
return v;
}
@Override
public void bindView(View v, Context context, Cursor cursor) {
//prendo le informazioni dalla query
String id = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_ID));
String light = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_LIGHT));
String movement = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_MOVEMENT));
String sound = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_SOUND));
String charging = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_CHARGING));
String locked = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_LOCKED));
String time = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_TIME));
String date = crs.getString(crs.getColumnIndex(DatabaseStrings.FIELD_DATE));
//stampo le informazioni nella riga della tabella
TextView elem8 = (TextView) v.findViewById(R.id.predictionContent);
TextView elem0 = (TextView) v.findViewById(R.id.idContent);
TextView elem1 = (TextView) v.findViewById(R.id.lightContent);
TextView elem2 = (TextView) v.findViewById(R.id.movContent);
TextView elem3 = (TextView) v.findViewById(R.id.soundContent);
TextView elem4 = (TextView) v.findViewById(R.id.chargingContent);
TextView elem5 = (TextView) v.findViewById(R.id.lockContent);
TextView elem6 = (TextView) v.findViewById(R.id.dateContent);
TextView elem7 = (TextView) v.findViewById(R.id.timeContent);
elem0.setText(id);
elem1.setText(light);
elem2.setText(movement);
elem3.setText(sound);
elem6.setText(date);
elem7.setText(time);
//modifico charging e loked
if(Boolean.parseBoolean(charging)){
elem4.setText("\u26A1");
}else{
elem4.setText("");
}
if(Boolean.parseBoolean(locked)){
elem5.setText("\u26BF");
}else{
elem5.setText("");
}
//se la classificazione è attivata popolo con la predizione altrimenti non scrivo niente
if(AiState) {
int NUMERO_DI_ATTRIBUTI = 6; // 5 + 1 classe
int NUMERO_DI_ISTANZE = 1;
// creo degli oggetti attributo
Attribute a0 = new Attribute("light");
Attribute a1 = new Attribute("sound");
Attribute a2 = new Attribute("movement");
Attribute a3 = new Attribute("locked");
Attribute a4 = new Attribute("charging");
Attribute aClass = new Attribute("state");
//creo un vettore di attributi
FastVector fastVector = new FastVector(NUMERO_DI_ATTRIBUTI);
fastVector.addElement(a0);
fastVector.addElement(a1);
fastVector.addElement(a2);
fastVector.addElement(a3);
fastVector.addElement(a4);
fastVector.addElement(aClass);
Instances testSet = new Instances("Insieme di istanze",fastVector,NUMERO_DI_ISTANZE);
testSet.setClassIndex(5);
//istanza per la classificazione sicuramente il risultato è 1
Instance instance = new DenseInstance(fastVector.size());
instance.setValue(a0,Double.parseDouble(light));
instance.setValue(a1,Double.parseDouble(sound));
instance.setValue(a2,Double.parseDouble(movement));
instance.setValue(a3,(Boolean.parseBoolean(locked)) ? 1 : 0);
instance.setValue(a4,(Boolean.parseBoolean(charging)) ? 1 : 0);
testSet.add(instance);
double prediction;
try {
Classifier classifier = (Classifier) weka.core.SerializationHelper.read(getAssets().open("M_S.model"));
prediction = classifier.classifyInstance(testSet.instance(0)); //predizione
}catch (Exception e) {
e.printStackTrace();
prediction = -1;
}
java.text.DecimalFormat df = new java.text.DecimalFormat("##.##");
if(prediction >= 0.5){ //è stato predetto che sono sveglio
elem8.setText("\u263C " + df.format(prediction*100) + "%");
}else{ // è stato predetto che sto dormento
elem8.setText("\u263D " + df.format(100 -(prediction*100)) +"%");
}
}else {
elem8.setText("");
}
}
};
ListView listaValori = (ListView) findViewById(R.id.listaValori);
listaValori.setAdapter(adapter);
}
public void initGrafici(){
// definisco lo stile dei grafici
light.setTitle("light");
light.setColor(Color.rgb(255,193,7));//ambra
light.setDrawBackground(true);
light.setBackgroundColor(Color.argb(60,255,193,7));
light.setDrawDataPoints(true);
light.setDataPointsRadius(8);
light.setThickness(5);
light.setAnimated(true);
movement.setTitle("movement");
movement.setColor(Color.rgb(230,74,25));//rosso
movement.setDrawBackground(true);
movement.setBackgroundColor(Color.argb(60,230,74,25));
movement.setDrawDataPoints(true);
movement.setDataPointsRadius(8);
movement.setThickness(5);
movement.setAnimated(true);
sound.setTitle("sound");
sound.setColor(Color.rgb(63,81,181));//blu
sound.setDrawBackground(true);
sound.setBackgroundColor(Color.argb(60,63,81,181));
sound.setDrawDataPoints(true);
sound.setDataPointsRadius(8);
sound.setThickness(5);
sound.setAnimated(true);
graph1.getViewport().setXAxisBoundsManual(true);
graph1.getViewport().setMinX(1);
graph1.getViewport().setMaxX(20);
graph1.getGridLabelRenderer().setHorizontalLabelsVisible(AsseX);
graph1.getGridLabelRenderer().setVerticalLabelsVisible(AsseY);
graph1.setTitleColor(Color.rgb(255,193,7));
graph2.getViewport().setXAxisBoundsManual(true);
graph2.getViewport().setMinX(1);
graph2.getViewport().setMaxX(20);
graph2.getGridLabelRenderer().setHorizontalLabelsVisible(AsseX);
graph2.getGridLabelRenderer().setVerticalLabelsVisible(AsseY);
graph2.setTitleColor(Color.rgb(48,63,159));
graph3.getViewport().setXAxisBoundsManual(true);
graph3.getViewport().setMinX(1);
graph3.getViewport().setMaxX(20);
graph3.getGridLabelRenderer().setHorizontalLabelsVisible(AsseX);
graph3.getGridLabelRenderer().setVerticalLabelsVisible(AsseY);
graph3.setTitleColor(Color.rgb(230,74,25));
graph1.setTitle("Light");
graph2.setTitle("Sound");
graph3.setTitle("Movement");
graph1.getViewport().setScrollable(true);
graph2.getViewport().setScrollable(true);
graph3.getViewport().setScrollable(true);
graph1.addSeries(light);
graph2.addSeries(sound);
graph3.addSeries(movement);
}
public void popolaGrafici(){
final Cursor crs = db.reverseQuery();
if (crs == null) {
// do nothing
} else {
if(crs.moveToFirst()){
for (int i=0;i<crs.getCount();i++) {
String id = crs.getString(0);
light.appendData(new DataPoint(Integer.valueOf(id),crs.getDouble(1)),true,numeroDatiGrafico);
sound.appendData(new DataPoint(Integer.valueOf(id),crs.getDouble(2)),true,numeroDatiGrafico);
movement.appendData(new DataPoint(Integer.valueOf(id),crs.getDouble(3)),true,numeroDatiGrafico);
crs.moveToNext();
currentGraphIndex=Integer.valueOf(id);
}
}
}
}
public void appendValuesTabella(float l,float s,float m){
currentGraphIndex++;
light.appendData(new DataPoint(currentGraphIndex,l),true,numeroDatiGrafico);
sound.appendData(new DataPoint(currentGraphIndex,s),true,numeroDatiGrafico);
movement.appendData(new DataPoint(currentGraphIndex,m),true,numeroDatiGrafico);
}
public boolean checkPermission(){
int result = ContextCompat.checkSelfPermission(getApplicationContext(),
RECORD_AUDIO);
return result == PackageManager.PERMISSION_GRANTED ;
}
private void requestPermission() {
ActivityCompat.requestPermissions(MainActivity.this, new
String[]{RECORD_AUDIO,WRITE_EXTERNAL_STORAGE}, 1);
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
switch (requestCode) {
case 1:
boolean RecordPermission = grantResults[0] ==
PackageManager.PERMISSION_GRANTED;
if (RecordPermission) {
startStop();
MessageHelper.toast(this,"Permesso garantito!");
} else {
MessageHelper.toast(this,"Permesso negato");
}
break;
}
}
@Override
public void onBackPressed() {
saveState();
super.onBackPressed();
}
@Override
protected void onPause() {
super.onPause();
}
public void saveState(){
SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
editor.putBoolean("stato",stato);
editor.putBoolean("playPauseState",this.playPauseState);
editor.apply();
}
public void scheduleAlarm() {
// Costruisco un intent che eseguirà l'AlarmReceiver
Intent intent = new Intent(getApplicationContext(), MyAlarmReceiver.class);
// Creo un PendingIntent che verrà attivato quando l'allarme si spegne
final PendingIntent pIntent = PendingIntent.getBroadcast(this, MyAlarmReceiver.REQUEST_CODE,
intent, PendingIntent.FLAG_UPDATE_CURRENT);
long firstMillis = System.currentTimeMillis(); // alarm is set right away
AlarmManager alarm = (AlarmManager) this.getSystemService(Context.ALARM_SERVICE);
// First parameter is the type: ELAPSED_REALTIME, ELAPSED_REALTIME_WAKEUP, RTC_WAKEUP
// Interval can be INTERVAL_FIFTEEN_MINUTES, INTERVAL_HALF_HOUR, INTERVAL_HOUR, INTERVAL_DAY
alarm.setInexactRepeating(AlarmManager.RTC_WAKEUP,firstMillis,
1000 * 60 * frequenza , pIntent);
}
public void cancelAlarm() {
Intent intent = new Intent(getApplicationContext(), MyAlarmReceiver.class);
final PendingIntent pIntent = PendingIntent.getBroadcast(this, MyAlarmReceiver.REQUEST_CODE,
intent, PendingIntent.FLAG_UPDATE_CURRENT);
AlarmManager alarm = (AlarmManager) this.getSystemService(Context.ALARM_SERVICE);
alarm.cancel(pIntent);
}
public void readSettings(){
SharedPreferences sharedpreferences = PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences sharedPref = this.getPreferences(Context.MODE_PRIVATE);
String f = sharedpreferences.getString("freq","15");
String nD = sharedpreferences.getString("rend","100");
stato = sharedPref.getBoolean("stato",false);
playPauseState = sharedPref.getBoolean("playPauseState",true);
frequenza = Integer.parseInt(f);
numeroDatiGrafico = Integer.parseInt(nD);
AsseX = sharedpreferences.getBoolean("X",false);
AsseY = sharedpreferences.getBoolean("Y",false);
AiState = sharedpreferences.getBoolean("Classification",false);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String s) {
if (s.equals(frequenza) && !playPauseState) {
cancelAlarm();
scheduleAlarm();
}
}
private static void generateNotification(Context context, String message, int res){
int mNotificationId = 001;
Intent notificationIntent = new Intent(context, MainActivity.class);
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
PendingIntent intent = PendingIntent.getActivity(context, 0, notificationIntent, 0);
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(context)
.setSmallIcon(res)
.setContentTitle(context.getString(R.string.app_name))
.setContentIntent(intent)
.setPriority(5) //private static final PRIORITY_HIGH = 5;
.setContentText(message)
.setAutoCancel(false);
NotificationManager mNotificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
mNotificationManager.notify(001, mBuilder.build());
}
public void deleteNotification() {
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.cancel(001);
}
}
| mit |
Pmovil/CN1WindowsPort | src/gnu/xml/dom/DomCharacterData_EmptyNodeList.cs | 1507 | // Automatically generated by xmlvm2csharp (do not edit).
using org.xmlvm;
namespace gnu.xml.dom {
public class DomCharacterData_2EmptyNodeList: global::java.lang.Object,global::org.w3c.dom.NodeList {
new public void @this(){
//XMLVM_BEGIN_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList: void <init>()]
global::System.Object _r0_o = null;
_r0_o = this;
((global::java.lang.Object) _r0_o).@this();
return;
//XMLVM_END_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList: void <init>()]
}
public virtual int getLength(){
//XMLVM_BEGIN_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList: int getLength()]
global::org.xmlvm._nElement _r0;
global::System.Object _r1_o = null;
_r1_o = this;
_r0.i = 0;
return _r0.i;
//XMLVM_END_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList: int getLength()]
}
public virtual global::System.Object item(int n1){
//XMLVM_BEGIN_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList: org.w3c.dom.Node item(int)]
global::System.Object _r0_o = null;
global::System.Object _r1_o = null;
global::org.xmlvm._nElement _r2;
_r1_o = this;
_r2.i = n1;
_r0_o = null;
return (global::org.w3c.dom.Node) _r0_o;
//XMLVM_END_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList: org.w3c.dom.Node item(int)]
}
//XMLVM_BEGIN_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList]
//XMLVM_END_WRAPPER[gnu.xml.dom.DomCharacterData$EmptyNodeList]
} // end of class: DomCharacterData_2EmptyNodeList
} // end of namespace: gnu.xml.dom
| mit |
sserrot/champion_relationships | venv/Lib/site-packages/adodbapi/test/adodbapitest.py | 56111 | """ Unit tests version 2.6.1.0 for adodbapi"""
from __future__ import print_function
"""
adodbapi - A python DB API 2.0 interface to Microsoft ADO
Copyright (C) 2002 Henrik Ekelund
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Updates by Vernon Cole
"""
import unittest
import sys
import datetime
import decimal
import copy
import random
import string
try:
import win32com.client
win32 = True
except ImportError:
win32 = False
# run the configuration module.
import adodbapitestconfig as config # will set sys.path to find correct version of adodbapi
# in our code below, all our switches are from config.whatever
import tryconnection
import adodbapi
import adodbapi.apibase as api
try:
import adodbapi.ado_consts as ado_consts
except ImportError: #we are doing a shortcut import as a module -- so
try:
import ado_consts
except ImportError:
from adodbapi import ado_consts
if sys.version_info >= (3,0):
def str2bytes(sval):
return sval.encode("latin1")
str = str
long = int
else:
def str2bytes(sval):
if isinstance(sval, str):
return sval
return sval.encode("latin1")
try:
bytes
except NameError:
bytes = str
def randomstring(length):
return ''.join([random.choice(string.ascii_letters) for n in range(32)])
class CommonDBTests(unittest.TestCase):
"Self contained super-simple tests in easy syntax, should work on everything between mySQL and Oracle"
def setUp(self):
self.engine = 'unknown'
def getEngine(self):
return self.engine
def getConnection(self):
raise NotImplementedError #"This method must be overriden by a subclass"
def getCursor(self):
return self.getConnection().cursor()
def testConnection(self):
crsr=self.getCursor()
assert crsr.__class__.__name__ == 'Cursor'
def testErrorHandlerInherits(self):
if not self.remote:
conn=self.getConnection()
mycallable=lambda connection,cursor,errorclass,errorvalue: 1
conn.errorhandler=mycallable
crsr=conn.cursor()
assert crsr.errorhandler==mycallable,"Error handler on crsr should be same as on connection"
def testDefaultErrorHandlerConnection(self):
if not self.remote:
conn=self.getConnection()
del conn.messages[:]
try:
conn.close()
conn.commit() #Should not be able to use connection after it is closed
except:
assert len(conn.messages)==1
assert len(conn.messages[0])==2
assert conn.messages[0][0]==api.ProgrammingError
def testOwnErrorHandlerConnection(self):
if self.remote: # ToDo: use "skip"
return
mycallable=lambda connection,cursor,errorclass,errorvalue: 1 #does not raise anything
conn=self.getConnection()
conn.errorhandler=mycallable
conn.close()
conn.commit() #Should not be able to use connection after it is closed
assert len(conn.messages)==0
conn.errorhandler=None #This should bring back the standard error handler
try:
conn.close()
conn.commit() #Should not be able to use connection after it is closed
except:
pass
#The Standard errorhandler appends error to messages attribute
assert len(conn.messages)>0,"Setting errorhandler to none should bring back the standard error handler"
def testDefaultErrorHandlerCursor(self):
crsr=self.getConnection().cursor()
if not self.remote:
del crsr.messages[:]
try:
crsr.execute("SELECT abbtytddrf FROM dasdasd")
except:
assert len(crsr.messages)==1
assert len(crsr.messages[0])==2
assert crsr.messages[0][0]==api.DatabaseError
def testOwnErrorHandlerCursor(self):
if self.remote: # ToDo: should be a "skip"
return
mycallable=lambda connection,cursor,errorclass,errorvalue: 1 #does not raise anything
crsr=self.getConnection().cursor()
crsr.errorhandler=mycallable
crsr.execute("SELECT abbtytddrf FROM dasdasd")
assert len(crsr.messages)==0
crsr.errorhandler=None #This should bring back the standard error handler
try:
crsr.execute("SELECT abbtytddrf FROM dasdasd")
except:
pass
#The Standard errorhandler appends error to messages attribute
assert len(crsr.messages)>0,"Setting errorhandler to none should bring back the standard error handler"
def testUserDefinedConversions(self):
if self.remote: ## Todo: should be a "skip"
return
try:
duplicatingConverter=lambda aStringField: aStringField*2
assert duplicatingConverter('gabba') == 'gabbagabba'
self.helpForceDropOnTblTemp()
conn=self.getConnection()
# the variantConversions attribute should not exist on a normal connection object
self.assertRaises(AttributeError, lambda x:conn.variantConversions[x],[2])
if not self.remote:
# create a variantConversions attribute on the connection
conn.variantConversions = copy.copy(api.variantConversions)
crsr=conn.cursor()
tabdef = "CREATE TABLE xx_%s (fldData VARCHAR(100) NOT NULL, fld2 VARCHAR(20))" % config.tmp
crsr.execute(tabdef)
crsr.execute("INSERT INTO xx_%s(fldData,fld2) VALUES('gabba','booga')" % config.tmp)
crsr.execute("INSERT INTO xx_%s(fldData,fld2) VALUES('hey','yo')" % config.tmp)
# change converter for ALL adoStringTypes columns
conn.variantConversions[api.adoStringTypes]=duplicatingConverter
crsr.execute("SELECT fldData,fld2 FROM xx_%s ORDER BY fldData" % config.tmp)
rows=crsr.fetchall()
row = rows[0]
self.assertEqual(row[0],'gabbagabba')
row = rows[1]
self.assertEqual(row[0],'heyhey')
self.assertEqual(row[1],'yoyo')
upcaseConverter=lambda aStringField: aStringField.upper()
assert upcaseConverter('upThis') == 'UPTHIS'
# now use a single column converter
rows.converters[1] = upcaseConverter # convert second column
self.assertEqual(row[0],'heyhey') # first will be unchanged
self.assertEqual(row[1],'YO') # second will convert to upper case
finally:
try:
del conn.variantConversions #Restore the default
except: pass
self.helpRollbackTblTemp()
def testUserDefinedConversionForExactNumericTypes(self):
# variantConversions is a dictionary of conversion functions
# held internally in adodbapi.apibase
#
# !!! this test intentionally alters the value of what should be constant in the module
# !!! no new code should use this example, to is only a test to see that the
# !!! deprecated way of doing this still works. (use connection.variantConversions)
#
if not self.remote and sys.version_info < (3,0): ### Py3 need different test
oldconverter = adodbapi.variantConversions[ado_consts.adNumeric] #keep old function to restore later
# By default decimal and "numbers" are returned as decimals.
# Instead, make numbers return as floats
try:
adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtFloat
self.helpTestDataType("decimal(18,2)",'NUMBER',3.45,compareAlmostEqual=1)
self.helpTestDataType("numeric(18,2)",'NUMBER',3.45,compareAlmostEqual=1)
# now return strings
adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtString
self.helpTestDataType("numeric(18,2)",'NUMBER','3.45')
# now a completly weird user defined convertion
adodbapi.variantConversions[ado_consts.adNumeric] = lambda x: '!!This function returns a funny unicode string %s!!'%x
self.helpTestDataType("numeric(18,2)",'NUMBER','3.45',
allowedReturnValues=['!!This function returns a funny unicode string 3.45!!'])
finally:
# now reset the converter to its original function
adodbapi.variantConversions[ado_consts.adNumeric]=oldconverter #Restore the original convertion function
def helpTestDataType(self,sqlDataTypeString,
DBAPIDataTypeString,
pyData,
pyDataInputAlternatives=None,
compareAlmostEqual=None,
allowedReturnValues=None):
self.helpForceDropOnTblTemp()
conn=self.getConnection()
crsr=conn.cursor()
tabdef= """
CREATE TABLE xx_%s (
fldId integer NOT NULL,
fldData """ % config.tmp + sqlDataTypeString + ")\n"
crsr.execute(tabdef)
#Test Null values mapped to None
crsr.execute("INSERT INTO xx_%s (fldId) VALUES (1)" % config.tmp)
crsr.execute("SELECT fldId,fldData FROM xx_%s" % config.tmp)
rs=crsr.fetchone()
self.assertEqual(rs[1],None) #Null should be mapped to None
assert rs[0]==1
#Test description related
descTuple=crsr.description[1]
assert descTuple[0] in ['fldData','flddata'], 'was "%s" expected "%s"'%(descTuple[0],'fldData')
if DBAPIDataTypeString=='STRING':
assert descTuple[1] == api.STRING, 'was "%s" expected "%s"'%(descTuple[1],api.STRING.values)
elif DBAPIDataTypeString == 'NUMBER':
assert descTuple[1] == api.NUMBER, 'was "%s" expected "%s"'%(descTuple[1],api.NUMBER.values)
elif DBAPIDataTypeString == 'BINARY':
assert descTuple[1] == api.BINARY, 'was "%s" expected "%s"'%(descTuple[1],api.BINARY.values)
elif DBAPIDataTypeString == 'DATETIME':
assert descTuple[1] == api.DATETIME, 'was "%s" expected "%s"'%(descTuple[1],api.DATETIME.values)
elif DBAPIDataTypeString == 'ROWID':
assert descTuple[1] == api.ROWID, 'was "%s" expected "%s"'%(descTuple[1],api.ROWID.values)
elif DBAPIDataTypeString == 'UUID':
assert descTuple[1] == api.OTHER, 'was "%s" expected "%s"'%(descTuple[1],api.OTHER.values)
else:
raise NotImplementedError #"DBAPIDataTypeString not provided"
#Test data binding
inputs=[pyData]
if pyDataInputAlternatives:
inputs.extend(pyDataInputAlternatives)
if str is str:
inputs = set(inputs) # removes redundant string==unicode tests
fldId=1
for inParam in inputs:
fldId+=1
try:
crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (?,?)" % config.tmp, (fldId, inParam))
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
crsr.execute("SELECT fldData FROM xx_%s WHERE ?=fldID" % config.tmp, [fldId])
rs=crsr.fetchone()
if allowedReturnValues:
allowedTypes = tuple([type(aRV) for aRV in allowedReturnValues])
assert isinstance(rs[0],allowedTypes), \
'result type "%s" must be one of %s'%(type(rs[0]),allowedTypes)
else:
assert isinstance(rs[0] ,type(pyData)), \
'result type "%s" must be instance of %s'%(type(rs[0]),type(pyData))
if compareAlmostEqual and DBAPIDataTypeString == 'DATETIME':
iso1=adodbapi.dateconverter.DateObjectToIsoFormatString(rs[0])
iso2=adodbapi.dateconverter.DateObjectToIsoFormatString(pyData)
self.assertEqual(iso1, iso2)
elif compareAlmostEqual:
s = float(pyData)
v = float(rs[0])
assert abs(v-s)/s < 0.00001, \
"Values not almost equal recvd=%s, expected=%f" %(rs[0],s)
else:
if allowedReturnValues:
ok=False
self.assertTrue(rs[0] in allowedReturnValues,
'Value "%s" not in %s' % (repr(rs[0]), allowedReturnValues))
else:
self.assertEqual(rs[0], pyData,
'Values are not equal recvd="%s", expected="%s"' %(rs[0],pyData))
def testDataTypeFloat(self):
self.helpTestDataType("real",'NUMBER',3.45,compareAlmostEqual=True)
self.helpTestDataType("float",'NUMBER',1.79e37,compareAlmostEqual=True)
def testDataTypeDecmal(self):
self.helpTestDataType("decimal(18,2)",'NUMBER',3.45,
allowedReturnValues=['3.45','3,45',decimal.Decimal('3.45')])
self.helpTestDataType("numeric(18,2)",'NUMBER',3.45,
allowedReturnValues=['3.45','3,45',decimal.Decimal('3.45')])
self.helpTestDataType("decimal(20,2)",'NUMBER',444444444444444444,
allowedReturnValues=['444444444444444444.00', '444444444444444444,00',
decimal.Decimal('444444444444444444')])
if self.getEngine() == 'MSSQL':
self.helpTestDataType("uniqueidentifier",'UUID','{71A4F49E-39F3-42B1-A41E-48FF154996E6}',
allowedReturnValues=['{71A4F49E-39F3-42B1-A41E-48FF154996E6}'])
def testDataTypeMoney(self): #v2.1 Cole -- use decimal for money
if self.getEngine() == 'MySQL':
self.helpTestDataType("DECIMAL(20,4)",'NUMBER',decimal.Decimal('-922337203685477.5808'))
elif self.getEngine() == 'PostgreSQL':
self.helpTestDataType("money",'NUMBER',decimal.Decimal('-922337203685477.5808'),
compareAlmostEqual=True,
allowedReturnValues=[-922337203685477.5808,
decimal.Decimal('-922337203685477.5808')])
else:
self.helpTestDataType("smallmoney",'NUMBER',decimal.Decimal('214748.02'))
self.helpTestDataType("money",'NUMBER',decimal.Decimal('-922337203685477.5808'))
def testDataTypeInt(self):
if self.getEngine() != 'PostgreSQL':
self.helpTestDataType("tinyint",'NUMBER',115)
self.helpTestDataType("smallint",'NUMBER',-32768)
if self.getEngine() not in ['ACCESS','PostgreSQL']:
self.helpTestDataType("bit",'NUMBER',1) #Does not work correctly with access
if self.getEngine() in ['MSSQL','PostgreSQL']:
self.helpTestDataType("bigint",'NUMBER',3000000000,
allowedReturnValues=[3000000000, int(3000000000)])
self.helpTestDataType("int",'NUMBER',2147483647)
def testDataTypeChar(self):
for sqlDataType in ("char(6)","nchar(6)"):
self.helpTestDataType(sqlDataType,'STRING','spam ',allowedReturnValues=['spam','spam','spam ','spam '])
def testDataTypeVarChar(self):
if self.getEngine() == 'MySQL':
stringKinds = ["varchar(10)","text"]
elif self.getEngine() == 'PostgreSQL':
stringKinds = ["varchar(10)","text","character varying"]
else:
stringKinds = ["varchar(10)","nvarchar(10)","text","ntext"] #,"varchar(max)"]
for sqlDataType in stringKinds:
self.helpTestDataType(sqlDataType,'STRING','spam',['spam'])
def testDataTypeDate(self):
if self.getEngine() == 'PostgreSQL':
dt = "timestamp"
else:
dt = "datetime"
self.helpTestDataType(dt,'DATETIME',adodbapi.Date(2002,10,28),
compareAlmostEqual=True)
if self.getEngine() not in ['MySQL','PostgreSQL']:
self.helpTestDataType("smalldatetime",'DATETIME',adodbapi.Date(2002,10,28),
compareAlmostEqual=True)
if tag != 'pythontime' and self.getEngine() not in ['MySQL','PostgreSQL']: # fails when using pythonTime
self.helpTestDataType(dt,'DATETIME', adodbapi.Timestamp(2002,10,28,12,15,1),
compareAlmostEqual=True)
def testDataTypeBinary(self):
binfld = str2bytes('\x07\x00\xE2\x40*')
arv = [binfld, adodbapi.Binary(binfld), bytes(binfld)]
if self.getEngine() == 'PostgreSQL':
self.helpTestDataType("bytea",'BINARY',adodbapi.Binary(binfld),
allowedReturnValues=arv)
else:
self.helpTestDataType("binary(5)",'BINARY',adodbapi.Binary(binfld),
allowedReturnValues=arv)
self.helpTestDataType("varbinary(100)",'BINARY',adodbapi.Binary(binfld),
allowedReturnValues=arv)
if self.getEngine() != 'MySQL':
self.helpTestDataType("image",'BINARY',adodbapi.Binary(binfld),
allowedReturnValues=arv)
def helpRollbackTblTemp(self):
self.helpForceDropOnTblTemp()
def helpForceDropOnTblTemp(self):
conn=self.getConnection()
with conn.cursor() as crsr:
try:
crsr.execute("DROP TABLE xx_%s" % config.tmp)
if not conn.autocommit:
conn.commit()
except:
pass
def helpCreateAndPopulateTableTemp(self,crsr):
tabdef= """
CREATE TABLE xx_%s (
fldData INTEGER
)
""" % config.tmp
try: #EAFP
crsr.execute(tabdef)
except api.DatabaseError: # was not dropped before
self.helpForceDropOnTblTemp() # so drop it now
crsr.execute(tabdef)
for i in range(9): # note: this poor SQL code, but a valid test
crsr.execute("INSERT INTO xx_%s (fldData) VALUES (%i)" % (config.tmp, i))
# NOTE: building the test table without using parameter substitution
def testFetchAll(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp)
rs=crsr.fetchall()
assert len(rs)==9
#test slice of rows
i = 3
for row in rs[3:-2]: #should have rowid 3..6
assert row[0]==i
i+=1
self.helpRollbackTblTemp()
def testPreparedStatement(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.prepare("SELECT fldData FROM xx_%s" % config.tmp)
crsr.execute(crsr.command) # remembes the one that was prepared
rs=crsr.fetchall()
assert len(rs)==9
assert rs[2][0]==2
self.helpRollbackTblTemp()
def testWrongPreparedStatement(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.prepare("SELECT * FROM nowhere")
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) # should execute this one, not the prepared one
rs=crsr.fetchall()
assert len(rs)==9
assert rs[2][0]==2
self.helpRollbackTblTemp()
def testIterator(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp)
for i,row in enumerate(crsr): # using cursor as an iterator, rather than fetchxxx
assert row[0]==i
self.helpRollbackTblTemp()
def testExecuteMany(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
seq_of_values = [ (111,) , (222,) ]
crsr.executemany("INSERT INTO xx_%s (fldData) VALUES (?)" % config.tmp, seq_of_values)
if crsr.rowcount==-1:
print(self.getEngine()+" Provider does not support rowcount (on .executemany())")
else:
self.assertEqual( crsr.rowcount,2)
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp)
rs=crsr.fetchall()
assert len(rs)==11
self.helpRollbackTblTemp()
def testRowCount(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp)
if crsr.rowcount == -1:
#print("provider does not support rowcount on select")
pass
else:
self.assertEqual( crsr.rowcount,9)
self.helpRollbackTblTemp()
def testRowCountNoRecordset(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("DELETE FROM xx_%s WHERE fldData >= 5" % config.tmp)
if crsr.rowcount==-1:
print(self.getEngine()+" Provider does not support rowcount (on DELETE)")
else:
self.assertEqual( crsr.rowcount,4)
self.helpRollbackTblTemp()
def testFetchMany(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp)
rs=crsr.fetchmany(3)
assert len(rs)==3
rs=crsr.fetchmany(5)
assert len(rs)==5
rs=crsr.fetchmany(5)
assert len(rs)==1 #Asked for five, but there is only one left
self.helpRollbackTblTemp()
def testFetchManyWithArraySize(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("SELECT fldData FROM xx_%s" % config.tmp)
rs=crsr.fetchmany()
assert len(rs)==1 #arraysize Defaults to one
crsr.arraysize=4
rs=crsr.fetchmany()
assert len(rs)==4
rs=crsr.fetchmany()
assert len(rs)==4
rs=crsr.fetchmany()
assert len(rs)==0
self.helpRollbackTblTemp()
def testErrorConnect(self):
conn = self.getConnection()
kw = {}
if 'proxy_host' in conn.kwargs:
kw['proxy_host'] = conn.kwargs['proxy_host']
conn.close()
self.assertRaises(api.DatabaseError, self.db, 'not a valid connect string', kw)
def testRowIterator(self):
self.helpForceDropOnTblTemp()
conn=self.getConnection()
crsr=conn.cursor()
tabdef= """
CREATE TABLE xx_%s (
fldId integer NOT NULL,
fldTwo integer,
fldThree integer,
fldFour integer)
""" % config.tmp
crsr.execute(tabdef)
inputs = [(2,3,4),(102,103,104)]
fldId=1
for inParam in inputs:
fldId+=1
try:
crsr.execute("INSERT INTO xx_%s (fldId,fldTwo,fldThree,fldFour) VALUES (?,?,?,?)" % config.tmp,
(fldId,inParam[0],inParam[1],inParam[2]))
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
crsr.execute("SELECT fldTwo,fldThree,fldFour FROM xx_%s WHERE ?=fldID" % config.tmp, [fldId])
rec = crsr.fetchone()
# check that stepping through an emulated row works
for j in range(len(inParam)):
assert rec[j] == inParam[j], 'returned value:"%s" != test value:"%s"'%(rec[j],inParam[j])
# check that we can get a complete tuple from a row
assert tuple(rec) == inParam, 'returned value:"%s" != test value:"%s"'%(repr(rec),repr(inParam))
# test that slices of rows work
slice1 = tuple(rec[:-1])
slice2 = tuple(inParam[0:2])
assert slice1 == slice2, 'returned value:"%s" != test value:"%s"'%(repr(slice1),repr(slice2))
# now test named column retrieval
assert rec['fldTwo'] == inParam[0]
assert rec.fldThree == inParam[1]
assert rec.fldFour == inParam[2]
# test array operation
# note that the fields vv vv vv are out of order
crsr.execute("select fldThree,fldFour,fldTwo from xx_%s" % config.tmp)
recs = crsr.fetchall()
assert recs[1][0] == 103
assert recs[0][1] == 4
assert recs[1]['fldFour'] == 104
assert recs[0,0] == 3
assert recs[0,'fldTwo'] == 2
assert recs[1,2] == 102
for i in range(1):
for j in range(2):
assert recs[i][j] == recs[i,j]
def testFormatParamstyle(self):
self.helpForceDropOnTblTemp()
conn=self.getConnection()
conn.paramstyle = 'format' #test nonstandard use of paramstyle
crsr=conn.cursor()
tabdef= """
CREATE TABLE xx_%s (
fldId integer NOT NULL,
fldData varchar(10),
fldConst varchar(30))
""" % config.tmp
crsr.execute(tabdef)
inputs = ['one','two','three']
fldId=2
for inParam in inputs:
fldId+=1
sql = "INSERT INTO xx_" + \
config.tmp + \
" (fldId,fldConst,fldData) VALUES (%s,'thi%s :may cause? trouble', %s)"
try:
crsr.execute(sql, (fldId,inParam))
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
crsr.execute("SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE %s=fldID", [fldId])
rec = crsr.fetchone()
self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"' % (rec[0],inParam))
self.assertEqual(rec[1], "thi%s :may cause? trouble")
# now try an operation with a "%s" as part of a literal
sel = "insert into xx_" + config.tmp + " (fldId,fldData) VALUES (%s,'four%sfive')"
params = (20,)
crsr.execute(sel,params)
#test the .query implementation
assert '(?,' in crsr.query, 'expected:"%s" in "%s"'%('(?,',crsr.query)
#test the .command attribute
assert crsr.command == sel, 'expected:"%s" but found "%s"' % (sel, crsr.command)
#test the .parameters attribute
if not self.remote: # parameter list will be altered in transit
self.assertEqual(crsr.parameters, params)
#now make sure the data made it
crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=20" % config.tmp)
rec = crsr.fetchone()
self.assertEqual(rec[0], 'four%sfive')
def testNamedParamstyle(self):
self.helpForceDropOnTblTemp()
conn=self.getConnection()
crsr=conn.cursor()
crsr.paramstyle = 'named' #test nonstandard use of paramstyle
tabdef= """
CREATE TABLE xx_%s (
fldId integer NOT NULL,
fldData varchar(10))
""" % config.tmp
crsr.execute(tabdef)
inputs = ['four','five','six']
fldId=10
for inParam in inputs:
fldId+=1
try:
crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" % config.tmp,
{"f_Val":inParam,'Id':fldId})
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=:Id" % config.tmp, {'Id':fldId})
rec = crsr.fetchone()
self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam))
# now a test with a ":" as part of a literal
crsr.execute("insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp,{'xyz':30})
crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp)
rec = crsr.fetchone()
self.assertEqual(rec[0], 'six:five')
def testPyformatParamstyle(self):
self.helpForceDropOnTblTemp()
conn=self.getConnection()
crsr=conn.cursor()
crsr.paramstyle = 'pyformat' #test nonstandard use of paramstyle
tabdef= """
CREATE TABLE xx_%s (
fldId integer NOT NULL,
fldData varchar(10))
""" % config.tmp
crsr.execute(tabdef)
inputs = ['four', 'five', 'six']
fldId=10
for inParam in inputs:
fldId+=1
try:
crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (%%(Id)s,%%(f_Val)s)" % config.tmp,
{"f_Val": inParam, 'Id': fldId})
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=%%(Id)s" % config.tmp, {'Id':fldId})
rec = crsr.fetchone()
self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam))
# now a test with a "%" as part of a literal
crsr.execute("insert into xx_%s (fldId,fldData) VALUES (%%(xyz)s,'six%%five')" % config.tmp,{'xyz': 30})
crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp)
rec = crsr.fetchone()
self.assertEqual(rec[0], 'six%five')
def testAutomaticParamstyle(self):
self.helpForceDropOnTblTemp()
conn=self.getConnection()
conn.paramstyle = 'dynamic' #test nonstandard use of paramstyle
crsr=conn.cursor()
tabdef= """
CREATE TABLE xx_%s (
fldId integer NOT NULL,
fldData varchar(10),
fldConst varchar(30))
""" % config.tmp
crsr.execute(tabdef)
inputs = ['one', 'two', 'three']
fldId=2
for inParam in inputs:
fldId+=1
try:
crsr.execute("INSERT INTO xx_" + config.tmp + \
" (fldId,fldConst,fldData) VALUES (?,'thi%s :may cause? troub:1e', ?)", (fldId,inParam))
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
trouble = 'thi%s :may cause? troub:1e'
crsr.execute("SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE ?=fldID", [fldId])
rec = crsr.fetchone()
self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam))
self.assertEqual(rec[1], trouble)
# inputs = [u'four',u'five',u'six']
fldId=10
for inParam in inputs:
fldId+=1
try:
crsr.execute("INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" % config.tmp,
{"f_Val":inParam,'Id':fldId})
except:
if self.remote:
for message in crsr.messages:
print(message)
else:
conn.printADOerrors()
raise
crsr.execute("SELECT fldData FROM xx_%s WHERE :Id=fldID" % config.tmp, {'Id':fldId})
rec = crsr.fetchone()
self.assertEqual(rec[0], inParam, 'returned value:"%s" != test value:"%s"'%(rec[0],inParam))
# now a test with a ":" as part of a literal -- and use a prepared query
ppdcmd = "insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp
crsr.prepare(ppdcmd)
crsr.execute(ppdcmd, {'xyz':30})
crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp)
rec = crsr.fetchone()
self.assertEqual(rec[0], 'six:five')
def testRollBack(self):
conn = self.getConnection()
crsr = conn.cursor()
assert not crsr.connection.autocommit, 'Unexpected beginning condition'
self.helpCreateAndPopulateTableTemp(crsr)
crsr.connection.commit() # commit the first bunch
crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp)
selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp
crsr.execute(selectSql)
rs = crsr.fetchall()
assert len(rs) == 1
self.conn.rollback()
crsr.execute(selectSql)
assert crsr.fetchone() == None, 'cursor.fetchone should return None if a query retrieves no rows'
crsr.execute('SELECT fldData from xx_%s' % config.tmp)
rs = crsr.fetchall()
assert len(rs) == 9, 'the original records should still be present'
self.helpRollbackTblTemp()
def testCommit(self):
try:
con2 = self.getAnotherConnection()
except NotImplementedError:
return # should be "SKIP" for ACCESS
assert not con2.autocommit, 'default should be manual commit'
crsr = con2.cursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp)
con2.commit()
selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp
crsr.execute(selectSql)
rs = crsr.fetchall()
assert len(rs) == 1
crsr.close()
con2.close()
conn = self.getConnection()
crsr = self.getCursor()
with conn.cursor() as crsr:
crsr.execute(selectSql)
rs = crsr.fetchall()
assert len(rs) == 1
assert rs[0][0] == 100
self.helpRollbackTblTemp()
def testAutoRollback(self):
try:
con2 = self.getAnotherConnection()
except NotImplementedError:
return # should be "SKIP" for ACCESS
assert not con2.autocommit, 'unexpected beginning condition'
crsr = con2.cursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp)
selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp
crsr.execute(selectSql)
rs = crsr.fetchall()
assert len(rs) == 1
crsr.close()
con2.close()
crsr = self.getCursor()
try:
crsr.execute(selectSql) # closing the connection should have forced rollback
row = crsr.fetchone()
except api.DatabaseError:
row = None # if the entire table disappeared the rollback was perfect and the test passed
assert row == None, 'cursor.fetchone should return None if a query retrieves no rows. Got %s' % repr(row)
self.helpRollbackTblTemp()
def testAutoCommit(self):
try:
ac_conn = self.getAnotherConnection({'autocommit': True})
except NotImplementedError:
return # should be "SKIP" for ACCESS
crsr = ac_conn.cursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp)
crsr.close()
with self.getCursor() as crsr:
selectSql = 'SELECT fldData from xx_%s' % config.tmp
crsr.execute(selectSql) # closing the connection should _not_ have forced rollback
rs = crsr.fetchall()
assert len(rs) == 10, 'all records should still be present'
ac_conn.close()
self.helpRollbackTblTemp()
def testSwitchedAutoCommit(self):
try:
ac_conn = self.getAnotherConnection()
except NotImplementedError:
return # should be "SKIP" for ACCESS
ac_conn.autocommit = True
crsr = ac_conn.cursor()
self.helpCreateAndPopulateTableTemp(crsr)
crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp)
crsr.close()
conn = self.getConnection()
ac_conn.close()
with self.getCursor() as crsr:
selectSql = 'SELECT fldData from xx_%s' % config.tmp
crsr.execute(selectSql) # closing the connection should _not_ have forced rollback
rs = crsr.fetchall()
assert len(rs) == 10, 'all records should still be present'
self.helpRollbackTblTemp()
def testExtendedTypeHandling(self):
class XtendString(str):
pass
class XtendInt(int):
pass
class XtendFloat(float):
pass
xs = XtendString(randomstring(30))
xi = XtendInt(random.randint(-100, 500))
xf = XtendFloat(random.random())
self.helpForceDropOnTblTemp()
conn = self.getConnection()
crsr = conn.cursor()
tabdef = """
CREATE TABLE xx_%s (
s VARCHAR(40) NOT NULL,
i INTEGER NOT NULL,
f REAL NOT NULL)""" % config.tmp
crsr.execute(tabdef)
crsr.execute("INSERT INTO xx_%s (s, i, f) VALUES (?, ?, ?)" % config.tmp, (xs, xi, xf))
crsr.close()
conn = self.getConnection()
with self.getCursor() as crsr:
selectSql = 'SELECT s, i, f from xx_%s' % config.tmp
crsr.execute(selectSql) # closing the connection should _not_ have forced rollback
row = crsr.fetchone()
self.assertEqual(row.s, xs)
self.assertEqual(row.i, xi)
self.assertAlmostEqual(row.f, xf)
self.helpRollbackTblTemp()
class TestADOwithSQLServer(CommonDBTests):
def setUp(self):
self.conn = config.dbSqlServerconnect(*config.connStrSQLServer[0], **config.connStrSQLServer[1])
self.conn.timeout = 30 # turn timeout back up
self.engine = 'MSSQL'
self.db = config.dbSqlServerconnect
self.remote = config.connStrSQLServer[2]
def tearDown(self):
try:
self.conn.rollback()
except:
pass
try:
self.conn.close()
except:
pass
self.conn=None
def getConnection(self):
return self.conn
def getAnotherConnection(self, addkeys=None):
keys = dict(config.connStrSQLServer[1])
if addkeys:
keys.update(addkeys)
return config.dbSqlServerconnect(*config.connStrSQLServer[0], **keys)
def testVariableReturningStoredProcedure(self):
crsr=self.conn.cursor()
spdef= """
CREATE PROCEDURE sp_DeleteMeOnlyForTesting
@theInput varchar(50),
@theOtherInput varchar(50),
@theOutput varchar(100) OUTPUT
AS
SET @theOutput=@theInput+@theOtherInput
"""
try:
crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting")
self.conn.commit()
except: #Make sure it is empty
pass
crsr.execute(spdef)
retvalues=crsr.callproc('sp_DeleteMeOnlyForTesting',('Dodsworth','Anne',' '))
assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0])
assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1])
assert retvalues[2]=='DodsworthAnne','%s is not "DodsworthAnne"'%repr(retvalues[2])
self.conn.rollback()
def testMultipleSetReturn(self):
crsr=self.getCursor()
self.helpCreateAndPopulateTableTemp(crsr)
spdef= """
CREATE PROCEDURE sp_DeleteMe_OnlyForTesting
AS
SELECT fldData FROM xx_%s ORDER BY fldData ASC
SELECT fldData From xx_%s where fldData = -9999
SELECT fldData FROM xx_%s ORDER BY fldData DESC
""" % (config.tmp, config.tmp, config.tmp)
try:
crsr.execute("DROP PROCEDURE sp_DeleteMe_OnlyForTesting")
self.conn.commit()
except: #Make sure it is empty
pass
crsr.execute(spdef)
retvalues=crsr.callproc('sp_DeleteMe_OnlyForTesting')
row=crsr.fetchone()
self.assertEqual(row[0], 0)
assert crsr.nextset() == True, 'Operation should succeed'
assert not crsr.fetchall(), 'Should be an empty second set'
assert crsr.nextset() == True, 'third set should be present'
rowdesc=crsr.fetchall()
self.assertEqual(rowdesc[0][0],8)
assert crsr.nextset() == None,'No more return sets, should return None'
self.helpRollbackTblTemp()
def testDatetimeProcedureParameter(self):
crsr=self.conn.cursor()
spdef= """
CREATE PROCEDURE sp_DeleteMeOnlyForTesting
@theInput DATETIME,
@theOtherInput varchar(50),
@theOutput varchar(100) OUTPUT
AS
SET @theOutput = CONVERT(CHARACTER(20), @theInput, 0) + @theOtherInput
"""
try:
crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting")
self.conn.commit()
except: #Make sure it is empty
pass
crsr.execute(spdef)
result = crsr.callproc('sp_DeleteMeOnlyForTesting', [adodbapi.Timestamp(2014,12,25,0,1,0), 'Beep', ' ' * 30])
assert result[2] == 'Dec 25 2014 12:01AM Beep', 'value was="%s"' % result[2]
self.conn.rollback()
def testIncorrectStoredProcedureParameter(self):
crsr=self.conn.cursor()
spdef= """
CREATE PROCEDURE sp_DeleteMeOnlyForTesting
@theInput DATETIME,
@theOtherInput varchar(50),
@theOutput varchar(100) OUTPUT
AS
SET @theOutput = CONVERT(CHARACTER(20), @theInput) + @theOtherInput
"""
try:
crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting")
self.conn.commit()
except: #Make sure it is empty
pass
crsr.execute(spdef)
# calling the sproc with a string for the first parameter where a DateTime is expected
result = tryconnection.try_operation_with_expected_exception(
(api.DataError,api.DatabaseError),
crsr.callproc,
['sp_DeleteMeOnlyForTesting'],
{'parameters': ['this is wrong', 'Anne', 'not Alice']}
)
if result[0]: # the expected exception was raised
assert '@theInput' in str(result[1]) or 'DatabaseError' in str(result), \
'Identifies the wrong erroneous parameter'
else:
assert result[0], result[1] # incorrect or no exception
self.conn.rollback()
class TestADOwithAccessDB(CommonDBTests):
def setUp(self):
self.conn = config.dbAccessconnect(*config.connStrAccess[0], **config.connStrAccess[1])
self.conn.timeout = 30 # turn timeout back up
self.engine = 'ACCESS'
self.db = config.dbAccessconnect
self.remote = config.connStrAccess[2]
def tearDown(self):
try:
self.conn.rollback()
except:
pass
try:
self.conn.close()
except:
pass
self.conn=None
def getConnection(self):
return self.conn
def getAnotherConnection(self, addkeys=None):
raise NotImplementedError('Jet cannot use a second connection to the database')
def testOkConnect(self):
c = self.db(*config.connStrAccess[0], **config.connStrAccess[1])
assert c != None
c.close()
class TestADOwithMySql(CommonDBTests):
def setUp(self):
self.conn = config.dbMySqlconnect(*config.connStrMySql[0], **config.connStrMySql[1])
self.conn.timeout = 30 # turn timeout back up
self.engine = 'MySQL'
self.db = config.dbMySqlconnect
self.remote = config.connStrMySql[2]
def tearDown(self):
try:
self.conn.rollback()
except:
pass
try:
self.conn.close()
except:
pass
self.conn=None
def getConnection(self):
return self.conn
def getAnotherConnection(self, addkeys=None):
keys = dict(config.connStrMySql[1])
if addkeys:
keys.update(addkeys)
return config.dbMySqlconnect(*config.connStrMySql[0], **keys)
def testOkConnect(self):
c = self.db(*config.connStrMySql[0], **config.connStrMySql[1])
assert c != None
# def testStoredProcedure(self):
# crsr=self.conn.cursor()
# try:
# crsr.execute("DROP PROCEDURE DeleteMeOnlyForTesting")
# self.conn.commit()
# except: #Make sure it is empty
# pass
# spdef= """
# DELIMITER $$
# CREATE PROCEDURE DeleteMeOnlyForTesting (onein CHAR(10), twoin CHAR(10), OUT theout CHAR(20))
# DETERMINISTIC
# BEGIN
# SET theout = onein //|| twoin;
# /* (SELECT 'a small string' as result; */
# END $$
# """
#
# crsr.execute(spdef)
#
# retvalues=crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' '))
# print 'return value (mysql)=',repr(crsr.returnValue) ###
# assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0])
# assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1])
# assert retvalues[2]=='DodsworthAnne','%s is not "DodsworthAnne"'%repr(retvalues[2])
#
# try:
# crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting")
# self.conn.commit()
# except: #Make sure it is empty
# pass
class TestADOwithPostgres(CommonDBTests):
def setUp(self):
self.conn = config.dbPostgresConnect(*config.connStrPostgres[0], **config.connStrPostgres[1])
self.conn.timeout = 30 # turn timeout back up
self.engine = 'PostgreSQL'
self.db = config.dbPostgresConnect
self.remote = config.connStrPostgres[2]
def tearDown(self):
try:
self.conn.rollback()
except:
pass
try:
self.conn.close()
except:
pass
self.conn=None
def getConnection(self):
return self.conn
def getAnotherConnection(self, addkeys=None):
keys = dict(config.connStrPostgres[1])
if addkeys:
keys.update(addkeys)
return config.dbPostgresConnect(*config.connStrPostgres[0], **keys)
def testOkConnect(self):
c = self.db(*config.connStrPostgres[0], **config.connStrPostgres[1])
assert c != None
# def testStoredProcedure(self):
# crsr=self.conn.cursor()
# spdef= """
# CREATE OR REPLACE FUNCTION DeleteMeOnlyForTesting (text, text)
# RETURNS text AS $funk$
# BEGIN
# RETURN $1 || $2;
# END;
# $funk$
# LANGUAGE SQL;
# """
#
# crsr.execute(spdef)
# retvalues = crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' '))
# ### print 'return value (pg)=',repr(crsr.returnValue) ###
# assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0])
# assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1])
# assert retvalues[2]=='Dodsworth Anne','%s is not "Dodsworth Anne"'%repr(retvalues[2])
# self.conn.rollback()
# try:
# crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting")
# self.conn.commit()
# except: #Make sure it is empty
# pass
class TimeConverterInterfaceTest(unittest.TestCase):
def testIDate(self):
assert self.tc.Date(1990,2,2)
def testITime(self):
assert self.tc.Time(13,2,2)
def testITimestamp(self):
assert self.tc.Timestamp(1990,2,2,13,2,1)
def testIDateObjectFromCOMDate(self):
assert self.tc.DateObjectFromCOMDate(37435.7604282)
def testICOMDate(self):
assert hasattr(self.tc,'COMDate')
def testExactDate(self):
d=self.tc.Date(1994,11,15)
comDate=self.tc.COMDate(d)
correct=34653.0
assert comDate == correct,comDate
def testExactTimestamp(self):
d=self.tc.Timestamp(1994,11,15,12,0,0)
comDate=self.tc.COMDate(d)
correct=34653.5
self.assertEqual( comDate ,correct)
d=self.tc.Timestamp(2003,5,6,14,15,17)
comDate=self.tc.COMDate(d)
correct=37747.593946759262
self.assertEqual( comDate ,correct)
def testIsoFormat(self):
d=self.tc.Timestamp(1994,11,15,12,3,10)
iso=self.tc.DateObjectToIsoFormatString(d)
self.assertEqual(str(iso[:19]) , '1994-11-15 12:03:10')
dt=self.tc.Date(2003,5,2)
iso=self.tc.DateObjectToIsoFormatString(dt)
self.assertEqual(str(iso[:10]), '2003-05-02')
if config.doMxDateTimeTest:
import mx.DateTime
class TestMXDateTimeConverter(TimeConverterInterfaceTest):
def setUp(self):
self.tc = api.mxDateTimeConverter()
def testCOMDate(self):
t=mx.DateTime.DateTime(2002,6,28,18,15,2)
cmd=self.tc.COMDate(t)
assert cmd == t.COMDate()
def testDateObjectFromCOMDate(self):
cmd=self.tc.DateObjectFromCOMDate(37435.7604282)
t=mx.DateTime.DateTime(2002,6,28,18,15,0)
t2=mx.DateTime.DateTime(2002,6,28,18,15,2)
assert t2>cmd>t
def testDate(self):
assert mx.DateTime.Date(1980,11,4)==self.tc.Date(1980,11,4)
def testTime(self):
assert mx.DateTime.Time(13,11,4)==self.tc.Time(13,11,4)
def testTimestamp(self):
t=mx.DateTime.DateTime(2002,6,28,18,15,1)
obj=self.tc.Timestamp(2002,6,28,18,15,1)
assert t == obj
import time
class TestPythonTimeConverter(TimeConverterInterfaceTest):
def setUp(self):
self.tc=api.pythonTimeConverter()
def testCOMDate(self):
mk = time.mktime((2002,6,28,18,15,1, 4,31+28+31+30+31+28,-1))
t=time.localtime(mk)
# Fri, 28 Jun 2002 18:15:01 +0000
cmd=self.tc.COMDate(t)
assert abs(cmd - 37435.7604282) < 1.0/24,"%f more than an hour wrong" % cmd
def testDateObjectFromCOMDate(self):
cmd=self.tc.DateObjectFromCOMDate(37435.7604282)
t1=time.gmtime(time.mktime((2002,6,28,0,14,1, 4,31+28+31+30+31+28,-1)))
#there are errors in the implementation of gmtime which we ignore
t2=time.gmtime(time.mktime((2002,6,29,12,14,2, 4,31+28+31+30+31+28,-1)))
assert t1<cmd<t2, '"%s" should be about 2002-6-28 12:15:01'%repr(cmd)
def testDate(self):
t1=time.mktime((2002,6,28,18,15,1, 4,31+28+31+30+31+30,0))
t2=time.mktime((2002,6,30,18,15,1, 4,31+28+31+30+31+28,0))
obj=self.tc.Date(2002,6,29)
assert t1< time.mktime(obj)<t2,obj
def testTime(self):
self.assertEqual( self.tc.Time(18,15,2),time.gmtime(18*60*60+15*60+2))
def testTimestamp(self):
t1=time.localtime(time.mktime((2002,6,28,18,14,1, 4,31+28+31+30+31+28,-1)))
t2=time.localtime(time.mktime((2002,6,28,18,16,1, 4,31+28+31+30+31+28,-1)))
obj=self.tc.Timestamp(2002,6,28,18,15,2)
assert t1< obj <t2,obj
class TestPythonDateTimeConverter(TimeConverterInterfaceTest):
def setUp(self):
self.tc = api.pythonDateTimeConverter()
def testCOMDate(self):
t=datetime.datetime( 2002,6,28,18,15,1)
# Fri, 28 Jun 2002 18:15:01 +0000
cmd=self.tc.COMDate(t)
assert abs(cmd - 37435.7604282) < 1.0/24,"more than an hour wrong"
def testDateObjectFromCOMDate(self):
cmd = self.tc.DateObjectFromCOMDate(37435.7604282)
t1 = datetime.datetime(2002,6,28,18,14,1)
t2 = datetime.datetime(2002,6,28,18,16,1)
assert t1 < cmd < t2, cmd
tx = datetime.datetime(2002,6,28,18,14,1,900000) # testing that microseconds don't become milliseconds
c1 = self.tc.DateObjectFromCOMDate(self.tc.COMDate(tx))
assert t1 < c1 < t2, c1
def testDate(self):
t1=datetime.date(2002,6,28)
t2=datetime.date(2002,6,30)
obj=self.tc.Date(2002,6,29)
assert t1< obj <t2,obj
def testTime(self):
self.assertEqual( self.tc.Time(18,15,2).isoformat()[:8],'18:15:02')
def testTimestamp(self):
t1=datetime.datetime(2002,6,28,18,14,1)
t2=datetime.datetime(2002,6,28,18,16,1)
obj=self.tc.Timestamp(2002,6,28,18,15,2)
assert t1< obj <t2,obj
suites=[]
suites.append( unittest.makeSuite(TestPythonDateTimeConverter,'test'))
if config.doMxDateTimeTest:
suites.append( unittest.makeSuite(TestMXDateTimeConverter,'test'))
if config.doTimeTest:
suites.append( unittest.makeSuite(TestPythonTimeConverter,'test'))
if config.doAccessTest:
suites.append( unittest.makeSuite(TestADOwithAccessDB,'test'))
if config.doSqlServerTest:
suites.append( unittest.makeSuite(TestADOwithSQLServer,'test'))
if config.doMySqlTest:
suites.append( unittest.makeSuite(TestADOwithMySql,'test'))
if config.doPostgresTest:
suites.append( unittest.makeSuite(TestADOwithPostgres,'test'))
class cleanup_manager(object):
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
config.cleanup(config.testfolder, config.mdb_name)
suite=unittest.TestSuite(suites)
if __name__ == '__main__':
mysuite = copy.deepcopy(suite)
with cleanup_manager():
defaultDateConverter = adodbapi.dateconverter
print(__doc__)
print("Default Date Converter is %s" %(defaultDateConverter,))
dateconverter = defaultDateConverter
tag = 'datetime'
unittest.TextTestRunner().run(mysuite)
if config.iterateOverTimeTests:
for test, dateconverter, tag in (
(config.doTimeTest,api.pythonTimeConverter, 'pythontime'),
(config.doMxDateTimeTest, api.mxDateTimeConverter, 'mx')):
if test:
mysuite = copy.deepcopy(suite) # work around a side effect of unittest.TextTestRunner
adodbapi.adodbapi.dateconverter = dateconverter()
print("Changed dateconverter to ")
print(adodbapi.adodbapi.dateconverter)
unittest.TextTestRunner().run(mysuite)
| mit |
liufeiit/tulip | net/src/test/java/io/netty/example/udt/echo/message/MsgEchoServerHandler.java | 1868 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.example.udt.echo.message;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.udt.nio.NioUdtProvider;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Handler implementation for the echo server.
*/
@Sharable
public class MsgEchoServerHandler extends ChannelInboundHandlerAdapter {
private static final Logger log = Logger.getLogger(MsgEchoServerHandler.class.getName());
@Override
public void exceptionCaught(final ChannelHandlerContext ctx,
final Throwable cause) {
log.log(Level.WARNING, "close the connection when an exception is raised", cause);
ctx.close();
}
@Override
public void channelActive(final ChannelHandlerContext ctx) throws Exception {
// log.info("ECHO active " + NioUdtProvider.socketUDT(ctx.channel()).toStringOptions());
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.write(msg);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
}
| mit |
mayuanyang/helloaurelia | src/helloworld/src/validation/validation.js | 1353 | import {ValidationRules} from 'aurelia-validation';
import {ValidationControllerFactory} from 'aurelia-validation';
import {inject} from 'aurelia-framework';
let model = {
require : "",
email : "",
username : ""
}
ValidationRules
.ensure('require')
.required()
.withMessage(`Require cannot be blank.`)
.ensure('email')
.required()
.email()
.ensure('username')
.required()
.minLength(10)
.maxLength(50)
.on(model);
@inject(ValidationControllerFactory)
export class Validation {
constructor(controllerFactory) {
this.title = 'Aurelia Validation';
this.description = "This pratice is to use the aurelia-validation to validate data";
this.items = [
{ description: "aurelia-validation" },
{ description: "Require field validator" },
{ description: "Email validator" },
{ description: "Min/max length validator" },
{ description: "Use ValidationControllerFactory" }
];
this.model = model;
this.controller = controllerFactory.createForCurrentScope();
}
validate(){
console.log(this.model);
console.log('validating');
this.controller.validate().then(result => {
if (result.valid) {
console.log('all good');
} else {
console.log(result);
}
});
}
} | mit |
novitskiy-aleksei/courses-gbook-sf2 | src/GBook/GuestBookBundle/GuestBookBundle.php | 131 | <?php
namespace GBook\GuestBookBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class GuestBookBundle extends Bundle
{
}
| mit |
deboyblog/model-maker-pro | template/Official/Java/SpringBoot/Repository.java | 800 | package {{projectProps.path}}.repository;
import {{projectProps.path}}.domain.{{upperFirstAndDashToCamel name}};
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Repository
public interface {{upperFirstAndDashToCamel name}}Repository extends IRepository<{{upperFirstAndDashToCamel name}}> {
@Modifying
@Query("update {{upperFirstAndDashToCamel name}} set del=1,delDtm=current_time where id = ?1")
void delete(Integer id);
@Transactional
@Modifying
@Query("update {{upperFirstAndDashToCamel name}} set del=1,delDtm=current_time where id in (?1)")
void deleteBatch(List<Integer> list);
}
| mit |
cosmo0920/ruroonga_client | src/lib.rs | 435 | extern crate hyper;
extern crate url;
extern crate json_flex;
#[cfg(feature="gqtp")]
extern crate byteorder;
mod http_request;
mod result_parser;
mod request_uri;
mod uri_base;
pub mod builtin;
#[cfg(feature="gqtp")]
mod gqtp_request;
pub use http_request::HTTPRequest;
pub use result_parser::{ResultParser, Rows};
pub use request_uri::RequestURI;
pub use uri_base::URIBase;
#[cfg(feature="gqtp")]
pub use gqtp_request::GQTPRequest;
| mit |
ionomy/ion | test/functional/bipdersig-p2p.py | 6437 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Copyright (c) 2018-2020 The Ion Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP66 (DER SIG).
Test that the DERSIG soft-fork activates at (regtest) height 1251.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import *
from test_framework.blocktools import create_coinbase, create_block
from test_framework.script import CScript
from io import BytesIO
DERSIG_HEIGHT = 1251
# Reject codes that we might receive in this test
REJECT_INVALID = 16
REJECT_OBSOLETE = 17
REJECT_NONSTANDARD = 64
# A canonical signature consists of:
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
def unDERify(tx):
"""
Make the signature in vin 0 of a tx non-DER-compliant,
by adding padding after the S-value.
"""
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
newscript.append(i[0:-1] + b'\0' + i[-1:])
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
def create_transaction(node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(signresult['hex'])))
return tx
class BIP66Test(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-whitelist=127.0.0.1', '-dip3params=9000:9000']]
self.setup_clean_chain = True
def run_test(self):
self.nodes[0].add_p2p_connection(NodeConnCB())
network_thread_start()
# wait_for_verack ensures that the P2P connection is fully up.
self.nodes[0].p2p.wait_for_verack()
self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
self.coinbase_blocks = self.nodes[0].generate(DERSIG_HEIGHT - 2)
self.nodeaddress = self.nodes[0].getnewaddress()
self.log.info("Test that a transaction with non-DER signature can still appear in a block")
spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[0],
self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
tip = self.nodes[0].getbestblockhash()
block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1), block_time)
block.nVersion = 2
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.nodes[0].p2p.send_and_ping(msg_block(block))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
self.log.info("Test that blocks must now be at least version 3")
tip = block.sha256
block_time += 1
block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
block.nVersion = 2
block.rehash()
block.solve()
self.nodes[0].p2p.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
with mininode_lock:
assert_equal(self.nodes[0].p2p.last_message["reject"].code, REJECT_OBSOLETE)
assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'bad-version(0x00000002)')
assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
del self.nodes[0].p2p.last_message["reject"]
self.log.info("Test that transactions with non-DER signatures cannot appear in a block")
block.nVersion = 3
spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[1],
self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
# First we show that this tx is valid except for DERSIG by getting it
# rejected from the mempool for exactly that reason.
assert_raises_rpc_error(-26, '64: non-mandatory-script-verify-flag (Non-canonical DER signature)', self.nodes[0].sendrawtransaction, bytes_to_hex_str(spendtx.serialize()), True)
# Now we verify that a block with this transaction is also invalid.
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.nodes[0].p2p.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
with mininode_lock:
# We can receive different reject messages depending on whether
# bitcoind is running with multiple script check threads. If script
# check threads are not in use, then transaction script validation
# happens sequentially, and bitcoind produces more specific reject
# reasons.
assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD]
assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
# Generic rejection when a block is invalid
assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed')
else:
assert b'Non-canonical DER signature' in self.nodes[0].p2p.last_message["reject"].reason
self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted")
block.vtx[1] = create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.nodes[0].p2p.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
if __name__ == '__main__':
BIP66Test().main()
| mit |
mbrookes/material-ui | docs/src/pages/components/tables/SpanningTable.tsx | 2888 | import * as React from 'react';
import Table from '@material-ui/core/Table';
import TableBody from '@material-ui/core/TableBody';
import TableCell from '@material-ui/core/TableCell';
import TableContainer from '@material-ui/core/TableContainer';
import TableHead from '@material-ui/core/TableHead';
import TableRow from '@material-ui/core/TableRow';
import Paper from '@material-ui/core/Paper';
const TAX_RATE = 0.07;
function ccyFormat(num: number) {
return `${num.toFixed(2)}`;
}
function priceRow(qty: number, unit: number) {
return qty * unit;
}
function createRow(desc: string, qty: number, unit: number) {
const price = priceRow(qty, unit);
return { desc, qty, unit, price };
}
interface Row {
desc: string;
qty: number;
unit: number;
price: number;
}
function subtotal(items: readonly Row[]) {
return items.map(({ price }) => price).reduce((sum, i) => sum + i, 0);
}
const rows = [
createRow('Paperclips (Box)', 100, 1.15),
createRow('Paper (Case)', 10, 45.99),
createRow('Waste Basket', 2, 17.99),
];
const invoiceSubtotal = subtotal(rows);
const invoiceTaxes = TAX_RATE * invoiceSubtotal;
const invoiceTotal = invoiceTaxes + invoiceSubtotal;
export default function SpanningTable() {
return (
<TableContainer component={Paper}>
<Table sx={{ minWidth: 700 }} aria-label="spanning table">
<TableHead>
<TableRow>
<TableCell align="center" colSpan={3}>
Details
</TableCell>
<TableCell align="right">Price</TableCell>
</TableRow>
<TableRow>
<TableCell>Desc</TableCell>
<TableCell align="right">Qty.</TableCell>
<TableCell align="right">Unit</TableCell>
<TableCell align="right">Sum</TableCell>
</TableRow>
</TableHead>
<TableBody>
{rows.map((row) => (
<TableRow key={row.desc}>
<TableCell>{row.desc}</TableCell>
<TableCell align="right">{row.qty}</TableCell>
<TableCell align="right">{row.unit}</TableCell>
<TableCell align="right">{ccyFormat(row.price)}</TableCell>
</TableRow>
))}
<TableRow>
<TableCell rowSpan={3} />
<TableCell colSpan={2}>Subtotal</TableCell>
<TableCell align="right">{ccyFormat(invoiceSubtotal)}</TableCell>
</TableRow>
<TableRow>
<TableCell>Tax</TableCell>
<TableCell align="right">{`${(TAX_RATE * 100).toFixed(0)} %`}</TableCell>
<TableCell align="right">{ccyFormat(invoiceTaxes)}</TableCell>
</TableRow>
<TableRow>
<TableCell colSpan={2}>Total</TableCell>
<TableCell align="right">{ccyFormat(invoiceTotal)}</TableCell>
</TableRow>
</TableBody>
</Table>
</TableContainer>
);
}
| mit |
mbrookes/material-ui | docs/src/pages/components/breadcrumbs/IconBreadcrumbs.js | 1385 | import * as React from 'react';
import Typography from '@material-ui/core/Typography';
import Breadcrumbs from '@material-ui/core/Breadcrumbs';
import Link from '@material-ui/core/Link';
import HomeIcon from '@material-ui/icons/Home';
import WhatshotIcon from '@material-ui/icons/Whatshot';
import GrainIcon from '@material-ui/icons/Grain';
function handleClick(event) {
event.preventDefault();
console.info('You clicked a breadcrumb.');
}
export default function IconBreadcrumbs() {
return (
<div role="presentation" onClick={handleClick}>
<Breadcrumbs aria-label="breadcrumb">
<Link
underline="hover"
sx={{ display: 'flex', alignItems: 'center' }}
color="inherit"
href="/"
>
<HomeIcon sx={{ mr: 0.5 }} fontSize="inherit" />
Material-UI
</Link>
<Link
underline="hover"
sx={{ display: 'flex', alignItems: 'center' }}
color="inherit"
href="/getting-started/installation/"
>
<WhatshotIcon sx={{ mr: 0.5 }} fontSize="inherit" />
Core
</Link>
<Typography
sx={{ display: 'flex', alignItems: 'center' }}
color="text.primary"
>
<GrainIcon sx={{ mr: 0.5 }} fontSize="inherit" />
Breadcrumb
</Typography>
</Breadcrumbs>
</div>
);
}
| mit |
BookingSync/ember-data-partial-model | tests/dummy/app/mirage/fixtures/user_extendeds.js | 639 | export default [
{
id: 1,
name: 'joliss',
twitter: "joliss",
clients: [
]
},
{
id: 2,
name: 'igort',
twitter: "terzicigor",
clients: [
]
},
{
id: 3,
name: 'tchak',
twitter: "tchak13",
clients: [
]
},
{
id: 4,
name: 'wecc',
twitter: "ChristofferP",
clients: [
]
},
{
id: 5,
name: 'bmac',
twitter: "BezoMaxo",
clients: [
]
},
{
id: 6,
name: 'teddyzeenny',
twitter: "teddyzeenny",
clients: [
]
},
{
id: 7,
name: 'zencocoon',
twitter: "sebgrosjean",
clients: [
]
}
];
| mit |
stivalet/PHP-Vulnerability-test-suite | URF/CWE_601/unsafe/CWE_601__object-Array__func_htmlentities__header_file_name-sprintf_%s_simple_quote.php | 1642 | <?php
/*
Unsafe sample
input : get the field userData from the variable $_GET via an object, which store it in a array
sanitize : use of the function htmlentities. Sanitizes the query but has a high chance to produce unexpected results
construction : use of sprintf via a %s with simple quote
*/
/*Copyright 2015 Bertrand STIVALET
Permission is hereby granted, without written agreement or royalty fee, to
use, copy, modify, and distribute this software and its documentation for
any purpose, provided that the above copyright notice and the following
three paragraphs appear in all copies of this software.
IN NO EVENT SHALL AUTHORS BE LIABLE TO ANY PARTY FOR DIRECT,
INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF AUTHORS HAVE
BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AUTHORS SPECIFICALLY DISCLAIM ANY WARRANTIES INCLUDING, BUT NOT
LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE, AND NON-INFRINGEMENT.
THE SOFTWARE IS PROVIDED ON AN "AS-IS" BASIS AND AUTHORS HAVE NO
OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
MODIFICATIONS.*/
class Input{
private $input;
public function getInput(){
return $this->input[1];
}
public function __construct(){
$this->input = array();
$this->input[0]= 'safe' ;
$this->input[1]= $_GET['UserData'] ;
$this->input[2]= 'safe' ;
}
}
$temp = new Input();
$tainted = $temp->getInput();
$tainted = htmlentities($tainted, ENT_QUOTES);
//flaw
$var = header(sprintf("Location: '%s'.php", $tainted));
?> | mit |
dwivivagoal/KuizMilioner | application/libraries/php-google-sdk/google/apiclient-services/src/Google/Service/CloudVideoIntelligence/GoogleCloudVideointelligenceV1beta1VideoContext.php | 1840 | <?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_CloudVideoIntelligence_GoogleCloudVideointelligenceV1beta1VideoContext extends Google_Collection
{
protected $collection_key = 'segments';
public $labelDetectionMode;
public $labelDetectionModel;
protected $segmentsType = 'Google_Service_CloudVideoIntelligence_GoogleCloudVideointelligenceV1beta1VideoSegment';
protected $segmentsDataType = 'array';
public $shotChangeDetectionModel;
public function setLabelDetectionMode($labelDetectionMode)
{
$this->labelDetectionMode = $labelDetectionMode;
}
public function getLabelDetectionMode()
{
return $this->labelDetectionMode;
}
public function setLabelDetectionModel($labelDetectionModel)
{
$this->labelDetectionModel = $labelDetectionModel;
}
public function getLabelDetectionModel()
{
return $this->labelDetectionModel;
}
public function setSegments($segments)
{
$this->segments = $segments;
}
public function getSegments()
{
return $this->segments;
}
public function setShotChangeDetectionModel($shotChangeDetectionModel)
{
$this->shotChangeDetectionModel = $shotChangeDetectionModel;
}
public function getShotChangeDetectionModel()
{
return $this->shotChangeDetectionModel;
}
}
| mit |
stivalet/PHP-Vulnerability-test-suite | Injection/CWE_90/safe/CWE_90__backticks__whitelist_using_array__userByMail-concatenation_simple_quote.php | 1510 | <?php
/*
Safe sample
input : backticks interpretation, reading the file /tmp/tainted.txt
SANITIZE : use in_array to check if $tainted is in the white list
construction : concatenation with simple quote
*/
/*Copyright 2015 Bertrand STIVALET
Permission is hereby granted, without written agreement or royalty fee, to
use, copy, modify, and distribute this software and its documentation for
any purpose, provided that the above copyright notice and the following
three paragraphs appear in all copies of this software.
IN NO EVENT SHALL AUTHORS BE LIABLE TO ANY PARTY FOR DIRECT,
INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF AUTHORS HAVE
BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AUTHORS SPECIFICALLY DISCLAIM ANY WARRANTIES INCLUDING, BUT NOT
LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE, AND NON-INFRINGEMENT.
THE SOFTWARE IS PROVIDED ON AN "AS-IS" BASIS AND AUTHORS HAVE NO
OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
MODIFICATIONS.*/
$tainted = `cat /tmp/tainted.txt`;
$legal_table = array("safe1", "safe2");
if (in_array($tainted, $legal_table, true)) {
$tainted = $tainted;
} else {
$tainted = $legal_table[0];
}
$query = "(&(objectCategory=person)(objectClass=user)(mail='". $tainted . "'))";
$ds=ldap_connect("localhost");
$r=ldap_bind($ds);
$sr=ldap_search($ds,"o=My Company, c=US", $query);
ldap_close($ds);
?> | mit |
sirprize/dojo-form-controls | MappedTextbox.js | 2394 | define([
"dojo/_base/declare",
"mijit/_WidgetBase",
"mijit/_TemplatedMixin",
"dojo/dom-construct"
], function (
declare,
_WidgetBase,
_TemplatedMixin,
domConstruct
) {
return declare([_WidgetBase, _TemplatedMixin], {
templateString: '<input type="text" data-dojo-attach-point="containerNode">',
name: '',
valueNode: null, // <input type="hidden"> holding the serialized value
_setNameAttr: function (name) {
if (this.valueNode) {
this.valueNode.name = name;
}
this._set('name', name);
},
_setValueAttr: function (v) {
var v = this._parseValue(v),
oldVal = this.get('value');
if (this.valueNode) {
this.valueNode.value = this._serializeValue(v);
}
this.domNode.value = this._formatValue(v);
this._set('value', v);
if (oldVal !== v) {
this.onChange(v);
}
},
_parseValue: function (v) {
return v;
},
_serializeValue: function (v) {
return v;
},
_formatValue: function (v) {
return v;
},
_attrToDom: function (attr, value, commands) {
// summary:
// the name must be set on the hidden field holding the serialized date to be submitted by a form.
// here we make sure that _WidgetBase::_attrToDom() doesn't set it on domNOde
if (attr !== 'name') {
this.inherited(arguments);
}
},
onChange: function (newValue) {},
_getDisplayValueAttr: function () {
return this.domNode.value;
},
startup: function () {
this.inherited(arguments);
this.valueNode = domConstruct.create('input', {
type: 'hidden',
name: this.get('name'),
value: this._serializeValue(this.get('value'))
}, this.domNode, 'after');
},
destroy: function () {
domConstruct.destroy(this.valueNode);
this.inherited(arguments);
}
});
}); | mit |
github/codeql | csharp/extractor/Semmle.Extraction.CIL/Entities/Attribute.cs | 3246 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection.Metadata;
namespace Semmle.Extraction.CIL.Entities
{
/// <summary>
/// Entity representing a CIL attribute.
/// </summary>
internal sealed class Attribute : UnlabelledEntity
{
private readonly CustomAttributeHandle handle;
private readonly CustomAttribute attrib;
private readonly IExtractedEntity @object;
public Attribute(Context cx, IExtractedEntity @object, CustomAttributeHandle handle) : base(cx)
{
attrib = cx.MdReader.GetCustomAttribute(handle);
this.handle = handle;
this.@object = @object;
}
public override bool Equals(object? obj)
{
return obj is Attribute attribute && handle.Equals(attribute.handle);
}
public override int GetHashCode() => handle.GetHashCode();
public override IEnumerable<IExtractionProduct> Contents
{
get
{
var constructor = (Method)Context.Create(attrib.Constructor);
yield return constructor;
yield return Tuples.cil_attribute(this, @object, constructor);
CustomAttributeValue<Type> decoded;
try
{
decoded = attrib.DecodeValue(new CustomAttributeDecoder(Context));
}
catch
{
Context.Extractor.Logger.Log(Util.Logging.Severity.Info,
$"Attribute decoding is partial. Decoding attribute {constructor.DeclaringType.GetQualifiedName()} failed on {@object}.");
yield break;
}
for (var index = 0; index < decoded.FixedArguments.Length; ++index)
{
var stringValue = GetStringValue(decoded.FixedArguments[index].Type, decoded.FixedArguments[index].Value);
yield return Tuples.cil_attribute_positional_argument(this, index, stringValue);
}
foreach (var p in decoded.NamedArguments)
{
var stringValue = GetStringValue(p.Type, p.Value);
yield return Tuples.cil_attribute_named_argument(this, p.Name!, stringValue);
}
}
}
private static string GetStringValue(Type type, object? value)
{
if (value is System.Collections.Immutable.ImmutableArray<CustomAttributeTypedArgument<Type>> values)
{
return "[" + string.Join(",", values.Select(v => GetStringValue(v.Type, v.Value))) + "]";
}
if (type.GetQualifiedName() == "System.Type" &&
value is Type t)
{
return t.GetQualifiedName();
}
return value?.ToString() ?? "null";
}
public static IEnumerable<IExtractionProduct> Populate(Context cx, IExtractedEntity @object, CustomAttributeHandleCollection attributes)
{
foreach (var attrib in attributes)
{
yield return new Attribute(cx, @object, attrib);
}
}
}
}
| mit |
Vardius/menu-bundle | Twig/MenuExtension.php | 1149 | <?php
/**
* This file is part of the vardius/menu-bundle package.
*
* Created by Rafał Lorenz <vardius@gmail.com>.
*/
namespace Vardius\Bundle\MenuBundle\Twig;
use Vardius\Bundle\MenuBundle\Manager\MenuManager;
/**
* Vardius\Bundle\MenuBundle\Twig\MenuExtension
*
* @author Rafał Lorenz <vardius@gmail.com>
*/
class MenuExtension extends \Twig_Extension
{
/** @var MenuManager */
protected $menuManager;
/**
* @param MenuManager $menuManager
*/
function __construct(MenuManager $menuManager)
{
$this->menuManager = $menuManager;
}
/**
* @return array
*/
public function getFilters()
{
return array(
new \Twig_SimpleFilter('vardius_menu', array($this, 'renderMenu')),
);
}
/**
* @param $name
* @param string $view
* @return string
*/
public function renderMenu($name, $view = null)
{
$menu = $this->menuManager->getMenu($name);
return $menu->render($view);
}
/**
* @return string
*/
public function getName()
{
return 'vardius_menu_extension';
}
}
| mit |
jesusreal/project-feature-sets-manager | app/src/views/project/projectActiveComponentsFactory.js | 5572 | (function () {
'use strict';
angular
.module('projectFeatureSetsManager')
.factory('ProjectActiveComponents', ProjectActiveComponentsFactory);
function ProjectActiveComponentsFactory (PROJECT_ACTIVE_COMPONENTS_DATA, FeatureSet, FeatureSetActiveComponents) {
return {
get: function(projectId){
var projectComponentsData = [];
var amountOfRows = PROJECT_ACTIVE_COMPONENTS_DATA.length;
for (var i=0; i<amountOfRows; i++) {
var projectIdMatch = (PROJECT_ACTIVE_COMPONENTS_DATA[i].projectId === projectId);
if (projectIdMatch) {
projectComponentsData.push(PROJECT_ACTIVE_COMPONENTS_DATA[i]);
}
}
return angular.copy(projectComponentsData);
},
getResourcesIdForComponentId: function(componentId){
var projectsId = [];
var amountOfRows = PROJECT_ACTIVE_COMPONENTS_DATA.length;
for (var i=0; i<amountOfRows; i++) {
var componentIdMatch = (PROJECT_ACTIVE_COMPONENTS_DATA[i].componentId === componentId);
if (componentIdMatch) {
projectsId.push(PROJECT_ACTIVE_COMPONENTS_DATA[i].projectId);
}
}
return angular.copy(projectsId);
},
insert: function(projectComponentsData, projectId) {
// projectComponentsData.id = Math.ceil(Math.random() * 1e3);
var amountOfComponentsToInsert = projectComponentsData.length;
for (var i=0; i<amountOfComponentsToInsert; i++) {
if (projectId!==undefined && !projectComponentsData[i].projectId) {
projectComponentsData[i].projectId = projectId;
}
PROJECT_ACTIVE_COMPONENTS_DATA.push(projectComponentsData[i]);
console.log(projectComponentsData[i]);
}
},
update: function(componentsToUpdate) {
var projectId = componentsToUpdate[0].projectId;
var amountOfRows = PROJECT_ACTIVE_COMPONENTS_DATA.length;
var returnVal = false;
var componentsId = [];
for (var i=0; i<componentsToUpdate.length; i++) {
componentsId.push(componentsToUpdate[i].componentId)
}
for (var i=0; i<amountOfRows; i++) {
var projectIdMatch = (PROJECT_ACTIVE_COMPONENTS_DATA[i].projectId === projectId);
var currentComponentId = PROJECT_ACTIVE_COMPONENTS_DATA[i].componentId;
var currentComponentIndexInComponentsToUpdate = componentsId.indexOf(currentComponentId);
var componentIdMatch = (currentComponentIndexInComponentsToUpdate !== -1);
if (projectIdMatch && componentIdMatch) {
PROJECT_ACTIVE_COMPONENTS_DATA[i].state = componentsToUpdate[currentComponentIndexInComponentsToUpdate].state;
returnVal = true;
}
}
return returnVal;
},
remove: function(componentsToRemove) {
console.log("ProjectActiveComponents::remove. BEFORE removing: " + PROJECT_ACTIVE_COMPONENTS_DATA.length);
var returnVal = false;
var componentsRemovedFromProject = false;
var projectId = componentsToRemove[0].projectId;
var componentsId = [];
for (var i=0; i<componentsToRemove.length; i++) {
componentsId.push(componentsToRemove[i].componentId)
}
for (var i=0; i<PROJECT_ACTIVE_COMPONENTS_DATA.length; i++) {
var projectIdMatch = (PROJECT_ACTIVE_COMPONENTS_DATA[i].projectId === projectId);
var currentComponentId = PROJECT_ACTIVE_COMPONENTS_DATA[i].componentId;
var componentIdMatch = (componentsId.indexOf(currentComponentId) !== -1);
if (projectIdMatch && componentIdMatch) {
PROJECT_ACTIVE_COMPONENTS_DATA.splice(i,1);
i = i - 1;
componentsRemovedFromProject = true;
}
}
console.log("ProjectActiveComponents::remove. AFTER removing: " + PROJECT_ACTIVE_COMPONENTS_DATA.length);
if (componentsRemovedFromProject) {
var featureSetsId = FeatureSet.getFeatureSetsIdForProject(projectId);
if (featureSetsId.length > 0){
var componentsRemovedFromFeatureSet =
FeatureSetActiveComponents.removeAfterRemovingFromProject(componentsId, featureSetsId);
if (componentsRemovedFromFeatureSet){
returnVal = true;
}
}
}
return returnVal;
},
removeAllForProject: function(projectId) {
console.log("ProjectActiveComponents::removeAllForProject. BEFORE removing: " + PROJECT_ACTIVE_COMPONENTS_DATA.length);
var returnVal = false;
for (var i=0; i<PROJECT_ACTIVE_COMPONENTS_DATA.length; i++) {
var projectIdMatch = (PROJECT_ACTIVE_COMPONENTS_DATA[i].projectId === projectId);
if (projectIdMatch) {
PROJECT_ACTIVE_COMPONENTS_DATA.splice(i,1);
i = i - 1;
returnVal = true;
}
}
console.log("ProjectActiveComponents::removeAllForProject. AFTER removing: " + PROJECT_ACTIVE_COMPONENTS_DATA.length);
return returnVal;
},
removeAfterRemovingFromGlobalTemplate: function(componentsId) {
console.log("ProjectActiveComponents::removeAfterRemovingFromGlobalTemplate. BEFORE removing: " +
PROJECT_ACTIVE_COMPONENTS_DATA.length);
for (var i=0; i<PROJECT_ACTIVE_COMPONENTS_DATA.length; i++) {
var currentComponentId = PROJECT_ACTIVE_COMPONENTS_DATA[i].componentId;
var componentIdMatch = (componentsId.indexOf(currentComponentId) !== -1);
if (componentIdMatch) {
PROJECT_ACTIVE_COMPONENTS_DATA.splice(i,1);
i = i - 1;
}
}
console.log("ProjectActiveComponents::removeAfterRemovingFromGlobalTemplate. AFTER removing: " +
PROJECT_ACTIVE_COMPONENTS_DATA.length);
FeatureSetActiveComponents.removeAfterRemovingFromGlobalTemplate(componentsId);
},
}
};
})(); | mit |
NobbZ/redox | kernel/schemes/debug.rs | 2732 | use alloc::boxed::Box;
use collections::string::String;
use scheduler::context::{context_switch, context_i, contexts_ptr};
use scheduler;
use schemes::{KScheme, Resource, Url};
use syscall::handle;
/// A debug resource
pub struct DebugResource {
pub scheme: *mut DebugScheme,
pub command: String,
pub line_toggle: bool,
}
impl Resource for DebugResource {
fn dup(&self) -> Option<Box<Resource>> {
Some(box DebugResource {
scheme: self.scheme,
command: self.command.clone(),
line_toggle: self.line_toggle,
})
}
fn url(&self) -> Url {
return Url::from_str("debug:");
}
fn read(&mut self, buf: &mut [u8]) -> Option<usize> {
if self.line_toggle {
self.line_toggle = false;
return Some(0);
}
if self.command.is_empty() {
loop {
unsafe {
let reenable = scheduler::start_no_ints();
// Hack!
if (*self.scheme).context >= (*contexts_ptr).len() ||
(*self.scheme).context < context_i {
(*self.scheme).context = context_i;
}
if (*self.scheme).context == context_i && (*::console).command.is_some() {
if let Some(ref command) = (*::console).command {
self.command = command.clone();
}
(*::console).command = None;
break;
}
scheduler::end_no_ints(reenable);
context_switch(false);
}
}
}
// TODO: Unicode
let mut i = 0;
while i < buf.len() && ! self.command.is_empty() {
buf[i] = unsafe { self.command.as_mut_vec().remove(0) };
i += 1;
}
if i > 0 && self.command.is_empty() {
self.line_toggle = true;
}
Some(i)
}
fn write(&mut self, buf: &[u8]) -> Option<usize> {
unsafe {
handle::do_sys_debug(buf.as_ptr(), buf.len());
}
return Some(buf.len());
}
fn sync(&mut self) -> bool {
true
}
}
pub struct DebugScheme {
pub context: usize,
}
impl DebugScheme {
pub fn new() -> Box<Self> {
box DebugScheme { context: 0 }
}
}
impl KScheme for DebugScheme {
fn scheme(&self) -> &str {
"debug"
}
fn open(&mut self, _: &Url, _: usize) -> Option<Box<Resource>> {
Some(box DebugResource {
scheme: self,
command: String::new(),
line_toggle: false,
})
}
}
| mit |
klpdotorg/dubdubdub | unittests/ivrs/tests.py | 7170 | from django.test import TestCase
from django.contrib.auth.models import Group
from rest_framework.test import APIRequestFactory
from users.models import User
from ivrs.api_views import SMSView
class SMSViewTests(TestCase):
def setUp(self):
user, created = User.objects.get_or_create(
email="testing@klp.org.in",
mobile_no="1234567890",
)
group = Group.objects.get(name="BFC")
group.user_set.add(user)
def test_reply_for_valid_input(self):
"""
SMSView should return a certain reply to be sent when it receives
a valid input
"""
print "Testing for valid input"
view = SMSView.as_view()
factory = APIRequestFactory()
request = factory.get(
'/api/v1/sms/',
{
'SmsSid':'1',
'From':'01234567890',
'To':'08039514048',
'Date':'2016-07-12 15:16:48',
'Body':'24657,1,1,1,2,2',
},
content_type='text/plain',
)
response = view(request)
self.assertEqual(
response.data,
'Response accepted. Your message was: 24657,1,1,1,2,2 received at: 2016-07-12 15:16:48'
)
def test_reply_for_invalid_input(self):
"""
SMSView should return a certain reply to be sent when it receives
an invalid input
"""
print "Testing for invalid input"
view = SMSView.as_view()
factory = APIRequestFactory()
bodies = [
'24657,1,1,1,2,2,2',
'wat',
'24657,1,2,,',
'24657,1,1,',
]
for body in bodies:
print "Testing input: " + body
request = factory.get(
'/api/v1/sms/',
{
'SmsSid':'2',
'From':'01234567890',
'To':'08039514048',
'Date':'2016-07-12 15:16:48',
'Body':body,
},
content_type='text/plain',
)
response = view(request)
self.assertEqual(
response.data,
'Error. Your response: ' + body + '. Expected response: 3885,1,1,1,2,2'
)
def test_reply_for_invalid_school_id(self):
"""
SMSView should return a certain reply to be sent when it receives
an invalid school_id
"""
print "Testing for invalid school_id"
view = SMSView.as_view()
factory = APIRequestFactory()
#body = 'ID 20,1,1,1,2,2'
bodies = [
'0,1,1,1,2,2',
u'ID 20,1,1,1,2,2',
]
for body in bodies:
print "Testing input: " + body
request = factory.get(
'/api/v1/sms/',
{
u'SmsSid':u'2',
u'From':u'01234567890',
u'To':u'08039514048',
u'Date':u"'2016-07-12 15:16:48'",
u'Body':body,
},
content_type='text/plain',
)
response = view(request)
self.assertEqual(
response.data,
'School ID ' + body.split(',').pop(0) + ' not found.'
)
def test_reply_for_blank_telephone(self):
"""
SMSView should return a certain reply to be sent when it receives
a blank telephone
"""
print "Testing for blank telephone"
view = SMSView.as_view()
factory = APIRequestFactory()
body = '0,1,1,1,2,2'
request = factory.get(
'/api/v1/sms/',
{
'SmsSid':'2',
'From':'',
'To':'08039514048',
'Date':'2016-07-12 15:16:48',
'Body':body,
},
content_type='text/plain',
)
response = view(request)
self.assertEqual(
response.data,
'Invalid phone number.'
)
def test_reply_for_invalid_answer_to_specific_question_number(self):
"""
SMSView should return a certain reply to be sent when it receives
an invalid input
"""
print "Testing for invalid input to specific question number"
view = SMSView.as_view()
factory = APIRequestFactory()
bodies = [
'24657,1,4,1,2,2',
'24657,1,1,4,2,2',
'24657,1,1,2,4,2',
'24657,1,1,1,2,4',
]
for count, body in enumerate(bodies):
print "Testing input: " + body
request = factory.get(
'/api/v1/sms/',
{
'SmsSid':'2',
'From':'01234567890',
'To':'08039514048',
'Date':'2016-07-12 15:16:48',
'Body':body,
},
content_type='text/plain',
)
response = view(request)
self.assertEqual(
response.data,
'Error at que.no: ' + str(count+2) + '. Your response was ' + body
)
def test_reply_for_logical_error(self):
"""
SMSView should return a certain reply to be sent when it receives
data that has a logical error.
"""
print "Testing for logical error in input"
view = SMSView.as_view()
factory = APIRequestFactory()
body = '24657,1,3,1,2,1'
print "Testing input: " + body
request = factory.get(
'/api/v1/sms/',
{
'SmsSid':'2',
'From':'01234567890',
'To':'08039514048',
'Date':'2016-07-12 15:16:48',
'Body':body,
},
content_type='text/plain',
)
response = view(request)
self.assertEqual(
response.data,
'Logical error.'
)
def test_reply_for_unregistered_number(self):
"""
SMSView should accept the SMS when it receives
data from a number that is not registered.
"""
print "Testing for unregistered number"
view = SMSView.as_view()
factory = APIRequestFactory()
body = '24657,1,1,1,2,2'
print "Testing input: " + body
request = factory.get(
'/api/v1/sms/',
{
'SmsSid':'2',
'From':'0111111',
'To':'08039514048',
'Date':'2016-07-12 15:16:48',
'Body':body,
},
content_type='text/plain',
)
response = view(request)
user = User.objects.get(mobile_no="111111")
self.assertEqual(user.mobile_no, "111111")
self.assertEqual(user.groups.get().name, "EV")
self.assertEqual(user.email, "dummy_111111@klp.org.in")
self.assertEqual(
response.data,
'Response accepted. Your message was: 24657,1,1,1,2,2 received at: 2016-07-12 15:16:48'
)
| mit |
tonygalmiche/is_plastigray | is_copy_other_database.py | 107155 | # -*- coding: utf-8 -*-
from openerp import tools
from openerp import api, fields, models, _
from openerp.exceptions import ValidationError, Warning
import xmlrpclib
from openerp.osv import osv
import unicodedata
from openerp import SUPERUSER_ID
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import logging
_logger = logging.getLogger(__name__)
class is_database(models.Model):
_name = 'is.database'
_order='name'
name = fields.Char('Site' , required=True)
ip_server = fields.Char('Adresse IP' , required=False)
port_server = fields.Integer('Port' , required=False)
database = fields.Char('Base de données', required=False)
login = fields.Char('Login' , required=False)
password = fields.Char('Mot de passe' , required=False)
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def copy_other_database(self, obj):
try:
cr , uid, context = self.env.args
class_name=obj.__class__.__name__
database_lines = self.env['is.database'].search([])
for database in database_lines:
if database.database:
_logger.info(u'database='+str(database.database))
if class_name=='res.partner':
if obj.id==obj.is_adr_facturation.id:
raise osv.except_osv('Client recursif 2 !','')
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
vals=False
if class_name=='res.partner':
vals = self.get_partner_vals(obj, DB, USERID, USERPASS, sock)
if vals:
ids = sock.execute(DB, USERID, USERPASS, class_name, 'search', [('is_database_origine_id', '=', obj.id),'|',('active','=',True),('active','=',False)], {})
if not ids:
if class_name=='res.partner':
search=[
('name' , '=', obj.name),
('parent_id' , '=', obj.parent_id.id or False),
('is_code' , '=', obj.is_code),
('is_adr_code', '=', obj.is_adr_code),
'|',('active','=',True),('active','=',False)
]
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', search, {})
if ids:
sock.execute(DB, USERID, USERPASS, class_name, 'write', ids, vals, {})
created_id = ids[0]
else:
created_id = sock.execute(DB, USERID, USERPASS, class_name, 'create', vals, {})
except Exception as e:
raise osv.except_osv('Client recursif !','')
return True
@api.model
def get_state_id(self, state , DB, USERID, USERPASS, sock):
state_ids = sock.execute(DB, USERID, USERPASS, 'res.country.state', 'search', [('name', '=', state.name)], {})
if state_ids:
return state_ids[0]
else:
state_vals = {'name':state.name, 'code':state.code, 'country_id':state.country_id and state.country_id.id or False}
new_state_id = sock.execute(DB, USERID, USERPASS, 'res.country.state', 'create', state_vals, {})
return new_state_id
@api.model
def get_title(self, title , DB, USERID, USERPASS, sock):
title_ids = sock.execute(DB, USERID, USERPASS, 'res.partner.title', 'search', [('name', '=', title.name)], {})
if title_ids:
return title_ids[0]
else:
title_vals = {'name':title.name, 'shortcut':title.shortcut}
new_title_id = sock.execute(DB, USERID, USERPASS, 'res.partner.title', 'create', title_vals, {})
return new_title_id
@api.model
def get_is_secteur_activite(self, obj , DB, USERID, USERPASS, sock):
ids = sock.execute(DB, USERID, USERPASS, 'is.secteur.activite', 'search', [('is_database_origine_id', '=', obj.id)], {})
if not ids:
obj.copy_other_database_secteur_activite()
ids = sock.execute(DB, USERID, USERPASS, 'is.secteur.activite', 'search', [('is_database_origine_id', '=', obj.id)], {})
if ids:
return ids[0]
return False
@api.model
def create_check_categ(self, category, DB, USERID, USERPASS, sock):
category_ids = sock.execute(DB, USERID, USERPASS, 'res.partner.category', 'search', [('name', '=', category.name)], {})
if category_ids:
return category_ids[0]
else:
category_vals = {'name':category.name, 'parent_id':category.parent_id and self.create_check_categ(category.parent_id, DB, USERID, USERPASS, sock) or False}
categoty_id = sock.execute(DB, USERID, USERPASS, 'res.partner.category', 'create', category_vals, {})
return categoty_id
@api.model
def _get_category_id(self, category_line_ids, DB, USERID, USERPASS, sock):
categ_lst = []
for category in category_line_ids:
n_categ_id = self.create_check_categ(category, DB, USERID, USERPASS, sock)
categ_lst.append(n_categ_id)
return [(6, 0, categ_lst)]
def _get_child_ids(self, child_ids, DB, USERID, USERPASS, sock):
new_child_ids = []
flag = False
for child in child_ids:
dest_child_ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', child.id),'|',('active','=',True),('active','=',False)], {})
if dest_child_ids:
new_child_ids.append(dest_child_ids[0])
else:
child_vals = self.get_partner_vals(child, DB, USERID, USERPASS, sock)
child_created_id = sock.execute(DB, USERID, USERPASS, 'res.partner', 'create', child_vals, {})
new_child_ids.append(child_created_id)
return [(6,0,new_child_ids)]
@api.model
def get_partner_is_adr_facturation(self, partner, DB, USERID, USERPASS, sock):
partner_obj = self.pool.get('res.partner')
try:
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', partner.id),'|',('active','=',True),('active','=',False)], {})
if not ids:
self.copy_other_database(partner)
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', partner.id),'|',('active','=',True),('active','=',False)], {})
if ids:
return ids[0]
return False
except Exception as e:
raise osv.except_osv('Client recursif !','')
@api.model
def get_partner_parent_id(self, partner, DB, USERID, USERPASS, sock):
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', partner.id),'|',('active','=',True),('active','=',False)], {})
if not ids:
parent_id=False
if partner.id:
parent_id=self.get_partner_parent_id(partner, DB, USERID, USERPASS, sock)
search=[
('name' , '=', partner.name),
('parent_id' , '=', parent_id),
('is_code' , '=', partner.is_code),
('is_adr_code', '=', partner.is_adr_code),
'|',('active','=',True),('active','=',False)
]
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', search, {})
if ids:
id=ids[0]
else:
vals = self.get_partner_vals(partner, DB, USERID, USERPASS, sock)
id = sock.execute(DB, USERID, USERPASS, 'res.partner', 'create', vals, {})
return id
def get_is_transporteur_id(self, obj, DB, USERID, USERPASS, sock):
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', obj.id),'|',('active','=',True),('active','=',False)], {})
if not ids:
self.copy_other_database(obj)
ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', obj.id),'|',('active','=',True),('active','=',False)], {})
if ids:
return ids[0]
return False
def get_is_type_contact(self, obj , DB, USERID, USERPASS, sock):
is_type_contact_ids = sock.execute(DB, USERID, USERPASS, 'is.type.contact', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_type_contact_ids:
return is_type_contact_ids[0]
else:
vals = {'name':tools.ustr(obj.name), 'is_database_origine_id':obj.id}
is_type_contact = sock.execute(DB, USERID, USERPASS, 'is.type.contact', 'create', vals, {})
return is_type_contact
def get_is_incoterm(self, obj, DB, USERID, USERPASS, sock):
is_incoterm_ids = sock.execute(DB, USERID, USERPASS, 'stock.incoterms', 'search', [('name', '=', tools.ustr(obj.name))], {})
if is_incoterm_ids:
return is_incoterm_ids[0]
else:
vals = {'name':tools.ustr(obj.name),'code':obj.code, 'active':obj.active}
is_incoterm = sock.execute(DB, USERID, USERPASS, 'stock.incoterms', 'create', vals, {})
return is_incoterm
def get_is_rib_id(self, obj , DB, USERID, USERPASS, sock):
is_rib_id = sock.execute(DB, USERID, USERPASS, 'res.partner.bank', 'search', [('acc_number', '=', obj.is_rib_id.acc_number)], {})
if is_rib_id:
return is_rib_id[0]
return False
def get_is_type_reglement(self, obj , DB, USERID, USERPASS, sock):
_logger.info(u'get_is_type_reglement : code='+str(obj.is_type_reglement.code))
res = sock.execute(DB, USERID, USERPASS, 'account.journal', 'search', [('code', '=', obj.is_type_reglement.code)], {})
if res:
return res[0]
return False
def get_user_id(self, obj , DB, USERID, USERPASS, sock):
user_id = sock.execute(DB, USERID, USERPASS, 'res.users', 'search', [('login', '=', obj.user_id.login)], {})
if user_id:
return user_id[0]
return False
def get_is_segment_achat(self, obj , DB, USERID, USERPASS, sock):
is_segment_achat_ids = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_segment_achat_ids:
return is_segment_achat_ids[0]
else:
vals = {'name':tools.ustr(obj.name),'description':tools.ustr(obj.description), 'is_database_origine_id':obj.id}
is_segment_achat = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'create', vals, {})
return is_segment_achat
def get_is_famille_achat_ids(self, obj_ids , DB, USERID, USERPASS, sock):
lst_is_famille_achat_ids = []
for obj in obj_ids:
famille_achat_ids = sock.execute(DB, USERID, USERPASS, 'is.famille.achat', 'search', [('is_database_origine_id', '=', obj.id)], {})
if famille_achat_ids:
lst_is_famille_achat_ids.append(famille_achat_ids[0])
else:
vals = {'is_database_origine_id':obj.id,'name':tools.ustr(obj.name),'description':obj.description, 'segment_id':self.get_is_segment_achat(obj.segment_id , DB, USERID, USERPASS, sock)}
is_famille_achat = sock.execute(DB, USERID, USERPASS, 'is.famille.achat', 'create', vals, {})
lst_is_famille_achat_ids.append(is_famille_achat)
return [(6,0,lst_is_famille_achat_ids)]
def get_is_site_livre_ids(self, obj_ids , DB, USERID, USERPASS, sock):
lst_site_livre_ids = []
for obj in obj_ids:
is_site_livre_ids = sock.execute(DB, USERID, USERPASS, 'is.site', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_site_livre_ids:
lst_site_livre_ids.append(is_site_livre_ids[0])
else:
vals = {'name':tools.ustr(obj.name), 'is_database_origine_id':obj.id}
lst_site_livre_id = sock.execute(DB, USERID, USERPASS, 'is.site', 'create', vals, {})
lst_site_livre_ids.append(lst_site_livre_id)
return [(6,0,lst_site_livre_ids)]
def get_is_transmission_cde(self, obj, DB, USERID, USERPASS, sock):
is_transmission_cde_ids = sock.execute(DB, USERID, USERPASS, 'is.transmission.cde', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_transmission_cde_ids:
return is_transmission_cde_ids[0]
else:
vals = {'name':tools.ustr(obj.name), 'is_database_origine_id':obj.id}
is_transmission_cde = sock.execute(DB, USERID, USERPASS, 'is.transmission.cde', 'create', vals, {})
return is_transmission_cde
return False
def get_is_norme(self, obj, DB, USERID, USERPASS, sock):
is_norme_ids = sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_norme_ids:
return is_norme_ids[0]
else:
vals = {'name':tools.ustr(obj.name), 'is_database_origine_id':obj.id}
is_norme = sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'create', vals, {})
return is_norme
return False
def get_is_certifications(self, obj_ids, DB, USERID, USERPASS, sock):
lst_is_certifications = []
for obj in obj_ids:
is_certifications_ids = sock.execute(DB, USERID, USERPASS, 'is.certifications.qualite', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_certifications_ids:
lst_is_certifications.append(is_certifications_ids[0])
else:
vals = {'is_norme':obj.is_norme and self.get_is_norme(obj.is_norme, DB, USERID, USERPASS, sock) or False,
'is_date_validation':obj.is_date_validation,
'is_database_origine_id':obj.id,
}
is_certifications = sock.execute(DB, USERID, USERPASS, 'is.certifications.qualite', 'create', vals, {})
lst_is_certifications.append(is_certifications)
return [(6,0,lst_is_certifications)]
def get_is_database_line_ids(self, partner , DB, USERID, USERPASS, sock):
lst_is_database_line_ids = []
obj_ids = partner.is_database_line_ids
for obj in obj_ids:
is_database_line_ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('is_database_origine_id', '=', obj.id)], {})
if is_database_line_ids:
lst_is_database_line_ids.append(is_database_line_ids[0])
else:
vals = {'name':obj.name,
'is_database_origine_id':obj.id,
}
is_database_line_id = sock.execute(DB, USERID, USERPASS, 'is.database', 'create', vals, {})
lst_is_database_line_ids.append(is_database_line_id)
return [(6,0,lst_is_database_line_ids)]
@api.model
def get_partner_vals(self, partner, DB, USERID, USERPASS, sock):
partner_vals = {
'name': tools.ustr(partner.name),
'is_raison_sociale2' : partner.is_raison_sociale2,
'is_code' : partner.is_code,
'is_adr_code' : partner.is_adr_code,
'category_id' : partner.category_id and self._get_category_id(partner.category_id, DB, USERID, USERPASS, sock)or [],
'is_company' : partner.is_company,
'street' : partner.street,
'street2' : partner.street2,
'is_rue3' : partner.is_rue3,
'city' : partner.city,
'state_id' : partner.state_id and self.get_state_id(partner.state_id, DB, USERID, USERPASS, sock) or False,
'zip' : partner.zip,
'country_id' : partner.country_id.id or False,
'is_adr_facturation' : partner.is_adr_facturation and self.get_partner_is_adr_facturation(partner.is_adr_facturation, DB, USERID, USERPASS, sock) or False,
'website' : partner.website,
'function' : partner.function,
'phone' : partner.phone,
'mobile' : partner.mobile,
'fax' : partner.fax,
'email' : partner.email,
'title' : partner.title and self.get_title(partner.title , DB, USERID, USERPASS, sock) or False,
'is_secteur_activite': partner.is_secteur_activite and self.get_is_secteur_activite(partner.is_secteur_activite , DB, USERID, USERPASS, sock) or False,
'customer' : partner.customer,
'supplier' : partner.supplier,
'is_database_origine_id': partner.id,
'is_raison_sociale2' : partner.is_raison_sociale2,
'is_code' : partner.is_code,
'is_adr_code' : partner.is_adr_code,
'is_rue3' : partner.is_rue3,
'is_type_contact' : partner.is_type_contact and self.get_is_type_contact(partner.is_type_contact , DB, USERID, USERPASS, sock) or False,
'is_adr_groupe' : partner.is_adr_groupe,
'is_cofor' : partner.is_cofor,
'is_num_siret' : partner.is_num_siret,
'is_code_client' : partner.is_code_client,
'is_segment_achat' : partner.is_segment_achat and self.get_is_segment_achat(partner.is_segment_achat , DB, USERID, USERPASS, sock) or False,
'is_famille_achat_ids' : partner.is_famille_achat_ids and self.get_is_famille_achat_ids(partner.is_famille_achat_ids , DB, USERID, USERPASS, sock) or False,
'is_fournisseur_imp' : partner.is_fournisseur_imp,
'is_fournisseur_da_fg' : partner.is_fournisseur_da_fg,
'is_site_livre_ids' : partner.is_site_livre_ids and self.get_is_site_livre_ids(partner.is_site_livre_ids , DB, USERID, USERPASS, sock) or False,
'is_groupage' : partner.is_groupage,
'is_tolerance_delai' : partner.is_tolerance_delai,
'is_nb_jours_tolerance' : partner.is_nb_jours_tolerance,
'is_tolerance_quantite' : partner.is_tolerance_quantite,
'is_transmission_cde' : partner.is_transmission_cde and self.get_is_transmission_cde(partner.is_transmission_cde , DB, USERID, USERPASS, sock) or False,
'is_certifications' : partner.is_certifications and self.get_is_certifications(partner.is_certifications , DB, USERID, USERPASS, sock) or False,
'is_adr_liv_sur_facture' : partner.is_adr_liv_sur_facture,
'is_num_autorisation_tva': partner.is_num_autorisation_tva,
'is_caracteristique_bl' : partner.is_caracteristique_bl,
'is_mode_envoi_facture' : partner.is_mode_envoi_facture,
'is_database_line_ids' : self.get_is_database_line_ids(partner, DB, USERID, USERPASS, sock) or False,
'vat' : partner.vat,
'property_account_position' : partner.property_account_position.id,
'property_payment_term' : partner.property_payment_term.id,
'property_supplier_payment_term' : partner.property_supplier_payment_term.id,
'is_escompte' : partner.is_escompte.id,
'is_type_reglement' : partner.is_type_reglement and self.get_is_type_reglement(partner, DB, USERID, USERPASS, sock) or False,
'is_rib_id' : partner.is_rib_id and self.get_is_rib_id(partner, DB, USERID, USERPASS, sock) or False,
'user_id' : partner.user_id and self.get_user_id(partner, DB, USERID, USERPASS, sock) or False,
'active' : partner.active,
}
db_ids = self.env['is.database'].search([('database','=',DB)])
if db_ids:
is_database_line_ids = partner_vals.get('is_database_line_ids',[]) and partner_vals.get('is_database_line_ids',[])[0][2]
database_rec = sock.execute(DB, USERID, USERPASS, 'is.database', 'read', is_database_line_ids,['is_database_origine_id','name'], {})
origin_db_ids = []
if database_rec:
for db_rec in database_rec:
if db_rec.get('is_database_origine_id',False):
origin_db_ids.append(db_rec.get('is_database_origine_id'))
if db_ids[0].id not in origin_db_ids:
partner_vals.update({'active':False})
if partner.is_company:
partner_vals.update({'child_ids':partner.child_ids and self._get_child_ids(partner.child_ids, DB, USERID, USERPASS, sock) or [] })
return partner_vals
@api.multi
def write(self, vals):
try:
res=super(is_database, self).write(vals)
for obj in self:
if obj.database:
obj.copy_other_database_is_database()
return res
except Exception as e:
raise osv.except_osv(_('database!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_database, self).create(vals)
obj.copy_other_database_is_database()
return obj
except Exception as e:
raise osv.except_osv(_('database!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_is_database(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for obj in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
vals = self.get_is_database_vals(obj, DB, USERID, USERPASS, sock)
ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('is_database_origine_id', '=', obj.id)], {})
if not ids:
ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('name', '=', obj.name)], {})
if ids:
sock.execute(DB, USERID, USERPASS, 'is.database', 'write', ids, vals, {})
created_id = ids[0]
else:
created_id = sock.execute(DB, USERID, USERPASS, 'is.database', 'create', vals, {})
return True
@api.model
def get_is_database_vals(self, obj, DB, USERID, USERPASS, sock):
vals ={
'name' : tools.ustr(obj.name),
'is_database_origine_id' : obj.id,
}
return vals
class res_partner(models.Model):
_inherit = 'res.partner'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True, select=True)
is_database_line_ids = fields.Many2many('is.database','partner_database_rel','partner_id','database_id', string="Sites")
@api.multi
def write(self, vals):
for obj in self:
_logger.info(u'write : partner='+str(obj.is_code)+u'/'+str(obj.is_adr_code))
if 'is_adr_facturation' in vals:
if vals['is_adr_facturation']==obj.id:
vals['is_adr_facturation']=False
else:
if obj.is_adr_facturation.id==obj.id:
vals['is_adr_facturation']=False
try:
for obj in self:
res=super(res_partner, self).write(vals)
self.env['is.database'].copy_other_database(obj)
return res
except Exception as e:
raise osv.except_osv('Client recursif !','')
@api.model
def create(self, vals):
try:
obj=super(res_partner, self).create(vals)
self.env['is.database'].copy_other_database(obj)
except Exception as e:
raise osv.except_osv('Client recursif !','')
return obj
class is_mold_project(models.Model):
_inherit = 'is.mold.project'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_mold_project, self).write(vals)
for obj in self:
obj.copy_other_database_project()
return res
except Exception as e:
raise osv.except_osv(_('Project!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_mold_project, self).create(vals)
obj.copy_other_database_project()
return obj
except Exception as e:
raise osv.except_osv(_('Project!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_project(self):
cr , uid, context = self.env.args
context = dict(context)
project_obj = self.env['is.mold.project']
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for project in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
project_vals = self.get_project_vals(project, DB, USERID, USERPASS, sock)
ids = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'search', [('is_database_origine_id', '=', project.id)], {})
if not ids:
ids = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'search', [('name', '=', project.name)], {})
if ids:
sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'write', ids, project_vals, {})
created_id = ids[0]
else:
created_id = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'create', project_vals, {})
return True
@api.model
def get_project_vals(self, project, DB, USERID, USERPASS, sock):
project_vals = {
'name': project.name,
'client_id' : self._get_client_id(project, DB, USERID, USERPASS, sock),
'chef_projet_id' : self._get_chef_projet_id(project, DB, USERID, USERPASS, sock),
'choix_modele' : project.choix_modele,
'mold_ids' : self._get_mold_ids(project, DB, USERID, USERPASS, sock),
'is_database_origine_id': project.id,
'commentaire' : project.commentaire,
}
return project_vals
@api.model
def _get_client_id(self, project, DB, USERID, USERPASS, sock):
if project.client_id:
client_ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', project.client_id.id),'|',('active','=',True),('active','=',False)], {})
if client_ids:
return client_ids[0]
return False
@api.model
def _get_chef_projet_id(self, project, DB, USERID, USERPASS, sock):
if project.chef_projet_id:
chef_projet_ids = sock.execute(DB, USERID, USERPASS, 'res.users', 'search', [('login', '=', project.chef_projet_id.login)], {})
if chef_projet_ids:
return chef_projet_ids[0]
return False
def _get_mold_ids(self, project, DB, USERID, USERPASS, sock):
list_mold_ids =[]
for mold in project.mold_ids:
dest_mold_ids = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('is_database_origine_id', '=', mold.id)], {})
if dest_mold_ids:
list_mold_ids.append(dest_mold_ids[0])
return [(6, 0, list_mold_ids)]
class is_dossierf(models.Model):
_inherit='is.dossierf'
is_database_id = fields.Many2one('is.database', "Site")
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
res=super(is_dossierf, self).write(vals)
for obj in self:
obj.copy_other_database_dossierf()
return res
@api.model
def create(self, vals):
obj=super(is_dossierf, self).create(vals)
obj.copy_other_database_dossierf()
return obj
@api.multi
def copy_other_database_dossierf(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for dossierf in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
dossierf_vals = self.get_dossierf_vals(dossierf, DB, USERID, USERPASS, sock)
ids = sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'search', [('is_database_origine_id', '=', dossierf.id)], {})
if not ids:
ids = sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'search', [('name', '=', dossierf.name)], {})
if ids:
sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'write', ids, dossierf_vals, {})
created_id = ids[0]
else:
created_id = sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'create', dossierf_vals, {})
return True
@api.model
def get_dossierf_vals(self, dossierf, DB, USERID, USERPASS, sock):
dossierf_vals = {
'name': dossierf.name,
'designation':dossierf.designation,
'project':self._get_project(dossierf, DB, USERID, USERPASS, sock),
'mold_ids': self._get_mold_ids(dossierf, DB, USERID, USERPASS, sock),
'is_database_origine_id':dossierf.id,
'is_database_id':self._get_is_database_id(dossierf, DB, USERID, USERPASS, sock),
}
return dossierf_vals
@api.model
def _get_project(self, dossierf, DB, USERID, USERPASS, sock):
if dossierf.project:
project_ids = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'search', [('is_database_origine_id', '=', dossierf.project.id)], {})
if not project_ids:
dossierf.project.copy_other_database_project()
project_ids = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'search', [('is_database_origine_id', '=', dossierf.project.id)], {})
if project_ids:
return project_ids[0]
return False
def _get_mold_ids(self, dossierf, DB, USERID, USERPASS, sock):
list_mold_ids =[]
for mold in dossierf.mold_ids:
dest_mold_ids = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('is_database_origine_id', '=', mold.id)], {})
if dest_mold_ids:
list_mold_ids.append(dest_mold_ids[0])
return [(6, 0, list_mold_ids)]
@api.model
def _get_is_database_id(self, dossierf, DB, USERID, USERPASS, sock):
if dossierf.is_database_id:
ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('is_database_origine_id', '=', dossierf.is_database_id.id)], {})
if ids:
return ids[0]
return False
class is_mold(models.Model):
_inherit = 'is.mold'
is_database_id = fields.Many2one('is.database', "Site")
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
res=super(is_mold, self).write(vals)
for obj in self:
obj.copy_other_database_mold()
return res
@api.model
def create(self, vals):
obj=super(is_mold, self).create(vals)
obj.copy_other_database_mold()
return obj
@api.multi
def copy_other_database_mold(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for mold in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
mold_vals = self.get_mold_vals(mold, DB, USERID, USERPASS, sock)
ids = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('is_database_origine_id', '=', mold.id)], {})
if not ids:
ids = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('name', '=', mold.name)], {})
if ids:
sock.execute(DB, USERID, USERPASS, 'is.mold', 'write', ids, mold_vals, {})
created_id = ids[0]
else:
created_id = sock.execute(DB, USERID, USERPASS, 'is.mold', 'create', mold_vals, {})
return True
@api.model
def get_mold_vals(self, mold, DB, USERID, USERPASS, sock):
mold_vals = {
'name' : mold.name,
'designation' : mold.designation,
'project' : self._get_project(mold, DB, USERID, USERPASS, sock),
'dossierf_id' : self._get_dossierf_id(mold, DB, USERID, USERPASS, sock),
'dossierf_ids' : self._get_dossierf_ids(mold, DB, USERID, USERPASS, sock),
'nb_empreintes' : mold.nb_empreintes,
'moule_a_version' : mold.moule_a_version,
'lieu_changement' : mold.lieu_changement,
'temps_changement' : mold.temps_changement,
'nettoyer' : mold.nettoyer,
'nettoyer_vis' : mold.nettoyer_vis,
'date_creation' : mold.date_creation,
'date_fin' : mold.date_fin,
'mouliste_id' : self._get_mouliste_id(mold, DB, USERID, USERPASS, sock),
'carcasse' : mold.carcasse,
'emplacement' : mold.emplacement or '',
'type_dateur' : mold.type_dateur,
'dateur_specifique': mold.dateur_specifique,
'date_peremption' : mold.date_peremption,
'qt_dans_moule' : mold.qt_dans_moule,
'diametre_laiton' : mold.diametre_laiton,
'diametre_fleche' : mold.diametre_fleche,
'is_database_origine_id': mold.id,
'is_database_id' : self._get_is_database_id(mold, DB, USERID, USERPASS, sock),
}
return mold_vals
@api.model
def _get_dossierf_id(self, mold, DB, USERID, USERPASS, sock):
if mold.dossierf_id:
dossierf_ids = sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'search', [('is_database_origine_id', '=', mold.dossierf_id.id)], {})
if dossierf_ids:
mold.dossierf_id.copy_other_database_dossierf()
dossierf_ids = sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'search', [('is_database_origine_id', '=', mold.dossierf_id.id)], {})
if dossierf_ids:
return dossierf_ids[0]
return False
def _get_dossierf_ids(self, mold, DB, USERID, USERPASS, sock):
list_dossierf_ids =[]
for dossierf in mold.dossierf_ids:
dest_dossierf_ids = sock.execute(DB, USERID, USERPASS, 'is.dossierf', 'search', [('is_database_origine_id', '=', dossierf.id)], {})
if dest_dossierf_ids:
list_dossierf_ids.append(dest_dossierf_ids[0])
return [(6, 0, list_dossierf_ids)]
@api.model
def _get_project(self, mold, DB, USERID, USERPASS, sock):
if mold.project:
project_ids = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'search', [('is_database_origine_id', '=', mold.project.id)], {})
if not project_ids:
mold.project.copy_other_database_project()
project_ids = sock.execute(DB, USERID, USERPASS, 'is.mold.project', 'search', [('is_database_origine_id', '=', mold.project.id)], {})
if project_ids:
return project_ids[0]
return False
@api.model
def _get_mouliste_id(self, mold, DB, USERID, USERPASS, sock):
if mold.mouliste_id:
mouliste_ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', mold.mouliste_id.id),'|',('active','=',True),('active','=',False)], {})
if not mouliste_ids:
self.env['is.database'].copy_other_database(mold.mouliste_id)
mouliste_ids = sock.execute(DB, USERID, USERPASS, 'res.partner', 'search', [('is_database_origine_id', '=', mold.mouliste_id.id),'|',('active','=',True),('active','=',False)], {})
if mouliste_ids:
return mouliste_ids[0]
return False
@api.model
def _get_is_database_id(self, mold, DB, USERID, USERPASS, sock):
if mold.is_database_id:
ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('is_database_origine_id', '=', mold.is_database_id.id)], {})
if ids:
return ids[0]
return False
class is_segment_achat(models.Model):
_inherit = 'is.segment.achat'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_segment_achat, self).write(vals)
for obj in self:
obj.copy_other_database_segment_achat()
return res
except Exception as e:
raise osv.except_osv(_('Segment!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_segment_achat, self).create(vals)
obj.copy_other_database_segment_achat()
return obj
except Exception as e:
raise osv.except_osv(_('Segment!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_segment_achat(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for segment in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
segment_achat_vals = self.get_segment_achat_vals(segment, DB, USERID, USERPASS, sock)
dest_segment_achat_ids = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'search', [('is_database_origine_id', '=', segment.id)], {})
if not dest_segment_achat_ids:
dest_segment_achat_ids = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'search', [('name', '=', segment.name)], {})
if dest_segment_achat_ids:
sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'write', dest_segment_achat_ids, segment_achat_vals, {})
segment_achat_created_id = dest_segment_achat_ids[0]
else:
segment_achat_created_id = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'create', segment_achat_vals, {})
return True
def _get_family_line(self, segment, DB, USERID, USERPASS, sock):
lines = []
for family_line in segment.family_line:
lines.append(((0, 0, {'name':tools.ustr(family_line.name), 'description': family_line.description,})))
return lines
@api.model
def get_segment_achat_vals(self, segment, DB, USERID, USERPASS, sock):
segment_achat_vals ={
'name' : tools.ustr(segment.name),
'description': tools.ustr(segment.description),
'is_database_origine_id':segment.id,
}
return segment_achat_vals
class is_famille_achat(models.Model):
_inherit = 'is.famille.achat'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_famille_achat, self).write(vals)
for obj in self:
obj.copy_other_database_famille_achat()
return res
except Exception as e:
raise osv.except_osv(_('Famille!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_famille_achat, self).create(vals)
obj.copy_other_database_famille_achat()
return obj
except Exception as e:
raise osv.except_osv(_('Famille!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_famille_achat(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for famille in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
famille_achat_vals = self.get_famille_achat_vals(famille, DB, USERID, USERPASS, sock)
dest_famille_achat_ids = sock.execute(DB, USERID, USERPASS, 'is.famille.achat', 'search', [('is_database_origine_id', '=', famille.id)], {})
if not dest_famille_achat_ids:
dest_famille_achat_ids = sock.execute(DB, USERID, USERPASS, 'is.famille.achat', 'search', [('name', '=', famille.name)], {})
if dest_famille_achat_ids:
sock.execute(DB, USERID, USERPASS, 'is.famille.achat', 'write', dest_famille_achat_ids, famille_achat_vals, {})
famille_achat_created_id = dest_famille_achat_ids[0]
else:
famille_achat_created_id = sock.execute(DB, USERID, USERPASS, 'is.famille.achat', 'create', famille_achat_vals, {})
return True
@api.model
def get_segment_id(self, famille, DB, USERID, USERPASS, sock):
if famille.segment_id:
segment_ids = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'search', [('is_database_origine_id', '=', famille.segment_id.id)], {})
if not segment_ids:
famille.segment_id.copy_other_database_segment_achat()
segment_ids = sock.execute(DB, USERID, USERPASS, 'is.segment.achat', 'search', [('is_database_origine_id', '=', famille.segment_id.id)], {})
if segment_ids:
return segment_ids[0]
return False
@api.model
def get_famille_achat_vals(self, famille, DB, USERID, USERPASS, sock):
famille_achat_vals ={
'name' : tools.ustr(famille.name),
'description': tools.ustr(famille.description),
'segment_id' : self.get_segment_id(famille, DB, USERID, USERPASS, sock),
'is_database_origine_id':famille.id,
}
return famille_achat_vals
class is_site(models.Model):
_inherit = 'is.site'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_site, self).write(vals)
for obj in self:
obj.copy_other_database_is_site()
return res
except Exception as e:
raise osv.except_osv(_('Site!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_site, self).create(vals)
obj.copy_other_database_is_site()
return obj
except Exception as e:
raise osv.except_osv(_('Site!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_is_site(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for is_site in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
is_site_vals = self.get_is_site_vals(is_site, DB, USERID, USERPASS, sock)
dest_is_site_ids = sock.execute(DB, USERID, USERPASS, 'is.site', 'search', [('is_database_origine_id', '=', is_site.id)], {})
if not dest_is_site_ids:
dest_is_site_ids = sock.execute(DB, USERID, USERPASS, 'is.site', 'search', [('name', '=', is_site.name)], {})
if dest_is_site_ids:
sock.execute(DB, USERID, USERPASS, 'is.site', 'write', dest_is_site_ids, is_site_vals, {})
is_site_created_id = dest_is_site_ids[0]
else:
is_site_created_id = sock.execute(DB, USERID, USERPASS, 'is.site', 'create', is_site_vals, {})
return True
@api.model
def get_is_site_vals(self, is_site, DB, USERID, USERPASS, sock):
is_site_vals ={
'name' : tools.ustr(is_site.name),
'is_database_origine_id':is_site.id
}
return is_site_vals
class is_transmission_cde(models.Model):
_inherit = 'is.transmission.cde'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_transmission_cde, self).write(vals)
for obj in self:
obj.copy_other_database_transmission_cde()
return res
except Exception as e:
raise osv.except_osv(_('Transmission!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_transmission_cde, self).create(vals)
obj.copy_other_database_transmission_cde()
return obj
except Exception as e:
raise osv.except_osv(_('Transmission!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_transmission_cde(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for is_transmission in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
is_transmission_vals = self.get_is_transmission_vals(is_transmission, DB, USERID, USERPASS, sock)
dest_is_transmission_ids = sock.execute(DB, USERID, USERPASS, 'is.transmission.cde', 'search', [('is_database_origine_id', '=', is_transmission.id)], {})
if not dest_is_transmission_ids:
dest_is_transmission_ids = sock.execute(DB, USERID, USERPASS, 'is.transmission.cde', 'search', [('name', '=', is_transmission.name)], {})
if dest_is_transmission_ids:
sock.execute(DB, USERID, USERPASS, 'is.transmission.cde', 'write', dest_is_transmission_ids, is_transmission_vals, {})
is_transmission_created_id = dest_is_transmission_ids[0]
else:
is_transmission_created_id = sock.execute(DB, USERID, USERPASS, 'is.transmission.cde', 'create', is_transmission_vals, {})
return True
@api.model
def get_is_transmission_vals(self, is_transmission, DB, USERID, USERPASS, sock):
is_transmission_vals ={
'name' : tools.ustr(is_transmission.name),
'is_database_origine_id':is_transmission.id,
}
return is_transmission_vals
class is_norme_certificats(models.Model):
_inherit = 'is.norme.certificats'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_norme_certificats, self).write(vals)
for obj in self:
obj.copy_other_database_norme_certificats()
return res
except Exception as e:
raise osv.except_osv(_('Norme!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_norme_certificats, self).create(vals)
obj.copy_other_database_norme_certificats()
return obj
except Exception as e:
raise osv.except_osv(_('Norme!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_norme_certificats(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for norme_certificats in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
norme_certificats_vals = self.get_is_norme_certificats_vals(norme_certificats, DB, USERID, USERPASS, sock)
dest_norme_certificats_ids = sock.execute(DB, 1, USERPASS, 'is.norme.certificats', 'search', [('is_database_origine_id', '=', norme_certificats.id)], {})
if not dest_norme_certificats_ids:
dest_norme_certificats_ids = sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'search', [('name', '=', norme_certificats.name)], {})
if dest_norme_certificats_ids:
sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'write', dest_norme_certificats_ids, norme_certificats_vals, {})
norme_certificats_created_id = dest_norme_certificats_ids[0]
else:
norme_certificats_created_id = sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'create', norme_certificats_vals, {})
return True
@api.model
def get_is_norme_certificats_vals(self, norme_certificats, DB, USERID, USERPASS, sock):
norme_certificats_vals ={
'name' : tools.ustr(norme_certificats.name),
'notation_fournisseur' : norme_certificats.notation_fournisseur,
'commentaire' : tools.ustr(norme_certificats.commentaire or ''),
'is_database_origine_id': norme_certificats.id,
}
return norme_certificats_vals
class is_certifications_qualite(models.Model):
_inherit = 'is.certifications.qualite'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_certifications_qualite, self).write(vals)
for obj in self:
obj.copy_other_database_certifications_qualite()
return res
except Exception as e:
raise osv.except_osv(_('Qualite!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_certifications_qualite, self).create(vals)
obj.copy_other_database_certifications_qualite()
return obj
except Exception as e:
raise osv.except_osv(_('Qualite!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_certifications_qualite(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for certifications_qualite in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
certifications_qualite_vals = self.get_is_certifications_qualite_vals(certifications_qualite, DB, USERID, USERPASS, sock)
dest_certifications_qualite_ids = sock.execute(DB, USERID, USERPASS, 'is.certifications.qualite', 'search', [('is_database_origine_id', '=', certifications_qualite.id)], {})
if dest_certifications_qualite_ids:
sock.execute(DB, USERID, USERPASS, 'is.certifications.qualite', 'write', dest_certifications_qualite_ids, certifications_qualite_vals, {})
certifications_qualite_created_id = dest_certifications_qualite_ids[0]
else:
certifications_qualite_created_id = sock.execute(DB, USERID, USERPASS, 'is.certifications.qualite', 'create', certifications_qualite_vals, {})
return True
@api.model
def _get_is_norme(self, certifications_qualite, DB, USERID, USERPASS, sock):
if certifications_qualite.is_norme:
is_norme_ids = sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'search', [('is_database_origine_id', '=', certifications_qualite.is_norme.id)], {})
if not is_norme_ids:
certifications_qualite.is_norme.copy_other_database_norme_certificats()
is_norme_ids = sock.execute(DB, USERID, USERPASS, 'is.norme.certificats', 'search', [('is_database_origine_id', '=', certifications_qualite.is_norme.id)], {})
if is_norme_ids:
return is_norme_ids[0]
return False
@api.model
def _get_certificat_ids(self, certifications_qualite, DB, USERID, USERPASS, sock):
certificat_data = []
for certificat in certifications_qualite.is_certificat_ids:
certificat_data.append(((0, 0, {'name':tools.ustr(certificat.name), 'datas':certificat.datas, 'res_model':certificat.res_model})))
return certificat_data
@api.model
def get_is_certifications_qualite_vals(self, certifications_qualite, DB, USERID, USERPASS, sock):
certifications_qualite_vals ={
'is_norme' : self._get_is_norme(certifications_qualite, DB, USERID, USERPASS, sock),
'is_date_validation':certifications_qualite.is_date_validation,
'is_certificat_ids':self._get_certificat_ids(certifications_qualite, DB, USERID, USERPASS, sock),
'is_database_origine_id':certifications_qualite.id,
}
return certifications_qualite_vals
class is_facturation_fournisseur_justification(models.Model):
_inherit='is.facturation.fournisseur.justification'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_facturation_fournisseur_justification, self).write(vals)
for obj in self:
obj.copy_other_database_fournisseur_justification()
return res
except Exception as e:
raise osv.except_osv(_('Justification!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_facturation_fournisseur_justification, self).create(vals)
obj.copy_other_database_fournisseur_justification()
return obj
except Exception as e:
raise osv.except_osv(_('Justification!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_fournisseur_justification(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for justification in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
justification_vals = self.get_justification_vals(justification, DB, USERID, USERPASS, sock)
dest_justification_ids = sock.execute(DB, USERID, USERPASS, 'is.facturation.fournisseur.justification', 'search', [('is_database_origine_id', '=', justification.id)], {})
if not dest_justification_ids:
dest_justification_ids = sock.execute(DB, USERID, USERPASS, 'is.facturation.fournisseur.justification', 'search', [('name', '=', justification.name)], {})
if dest_justification_ids:
sock.execute(DB, USERID, USERPASS, 'is.facturation.fournisseur.justification', 'write', dest_justification_ids, justification_vals, {})
justification_created_id = dest_justification_ids[0]
else:
justification_created_id = sock.execute(DB, USERID, USERPASS, 'is.facturation.fournisseur.justification', 'create', justification_vals, {})
return True
@api.model
def get_justification_vals(self, justification, DB, USERID, USERPASS, sock):
justification_vals ={
'name' : tools.ustr(justification.name),
'is_database_origine_id':justification.id,
}
return justification_vals
class is_secteur_activite(models.Model):
_inherit='is.secteur.activite'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_secteur_activite, self).write(vals)
for obj in self:
obj.copy_other_database_secteur_activite()
return res
except Exception as e:
raise osv.except_osv(_('Secteur!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_secteur_activite, self).create(vals)
obj.copy_other_database_secteur_activite()
return obj
except Exception as e:
raise osv.except_osv(_('Secteur!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_secteur_activite(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for activite in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
activite_vals = self.get_activite_vals(activite, DB, USERID, USERPASS, sock)
dest_activite_ids = sock.execute(DB, USERID, USERPASS, 'is.secteur.activite', 'search', [('is_database_origine_id', '=', activite.id)], {})
if not dest_activite_ids:
dest_activite_ids = sock.execute(DB, USERID, USERPASS, 'is.secteur.activite', 'search', [('name', '=', activite.name)], {})
if dest_activite_ids:
sock.execute(DB, USERID, USERPASS, 'is.secteur.activite', 'write', dest_activite_ids, activite_vals, {})
activite_created_id = dest_activite_ids[0]
else:
activite_created_id = sock.execute(DB, USERID, USERPASS, 'is.secteur.activite', 'create', activite_vals, {})
return True
@api.model
def get_activite_vals(self, activite, DB, USERID, USERPASS, sock):
activite_vals ={
'name' : tools.ustr(activite.name),
'is_database_origine_id':activite.id,
}
return activite_vals
class is_type_contact(models.Model):
_inherit='is.type.contact'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_type_contact, self).write(vals)
for obj in self:
obj.copy_other_database_type_contact()
return res
except Exception as e:
raise osv.except_osv(_('Contact!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_type_contact, self).create(vals)
obj.copy_other_database_type_contact()
return obj
except Exception as e:
raise osv.except_osv(_('Contact!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_type_contact(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for type_contact in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
type_contact_vals = self.get_type_contact_vals(type_contact, DB, USERID, USERPASS, sock)
dest_type_contact_ids = sock.execute(DB, USERID, USERPASS, 'is.type.contact', 'search', [('is_database_origine_id', '=', type_contact.id)], {})
if not dest_type_contact_ids:
dest_type_contact_ids = sock.execute(DB, USERID, USERPASS, 'is.type.contact', 'search', [('name', '=', type_contact.name)], {})
if dest_type_contact_ids:
sock.execute(DB, USERID, USERPASS, 'is.type.contact', 'write', dest_type_contact_ids, type_contact_vals, {})
type_contact_created_id = dest_type_contact_ids[0]
else:
type_contact_created_id = sock.execute(DB, USERID, USERPASS, 'is.type.contact', 'create', type_contact_vals, {})
return True
@api.model
def get_type_contact_vals(self, type_contact, DB, USERID, USERPASS, sock):
type_contact_vals ={
'name' : tools.ustr(type_contact.name),
'is_database_origine_id':type_contact.id,
}
return type_contact_vals
class is_escompte(models.Model):
_inherit='is.escompte'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_escompte, self).write(vals)
for obj in self:
obj.copy_other_database_is_escompte()
return res
except Exception as e:
raise osv.except_osv(_('escompte!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_escompte, self).create(vals)
obj.copy_other_database_is_escompte()
return obj
except Exception as e:
raise osv.except_osv(_('escompte!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_is_escompte(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for is_escompte in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
is_escompte_vals = self.get_is_escompte_vals(is_escompte, DB, USERID, USERPASS, sock)
dest_is_escompte_ids = sock.execute(DB, USERID, USERPASS, 'is.escompte', 'search', [('is_database_origine_id', '=', is_escompte.id)], {})
if not dest_is_escompte_ids:
dest_is_escompte_ids = sock.execute(DB, USERID, USERPASS, 'is.escompte', 'search', [('name', '=', is_escompte.name)], {})
if dest_is_escompte_ids:
sock.execute(DB, USERID, USERPASS, 'is.escompte', 'write', dest_is_escompte_ids, is_escompte_vals, {})
is_escompte_created_id = dest_is_escompte_ids[0]
else:
is_escompte_created_id = sock.execute(DB, USERID, USERPASS, 'is.escompte', 'create', is_escompte_vals, {})
return True
@api.model
def _get_is_escompte_compte(self, is_escompte, DB, USERID, USERPASS, sock):
if is_escompte.compte:
dest_compte_ids = sock.execute(DB, USERID, USERPASS, 'account.account', 'search', [('code', '=', is_escompte.compte.code)], {})
if dest_compte_ids:
return dest_compte_ids[0]
return False
@api.model
def get_is_escompte_vals(self, is_escompte, DB, USERID, USERPASS, sock):
is_escompte_vals ={
'name' : tools.ustr(is_escompte.name),
'taux' : is_escompte.taux,
'compte': self._get_is_escompte_compte(is_escompte, DB, USERID, USERPASS, sock),
'is_database_origine_id':is_escompte.id
}
return is_escompte_vals
class is_type_controle_gabarit(models.Model):
_inherit = "is.type.controle.gabarit"
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_type_controle_gabarit, self).write(vals)
for obj in self:
obj.copy_other_database_controle_gabarit()
return res
except Exception as e:
raise osv.except_osv(_('Gabarit!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_type_controle_gabarit, self).create(vals)
obj.copy_other_database_controle_gabarit()
return obj
except Exception as e:
raise osv.except_osv(_('Gabarit!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_controle_gabarit(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for gabarit in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
controle_gabarit_vals = self.get_type_controle_gabarit_vals(gabarit, DB, USERID, USERPASS, sock)
dest_controle_gabarit_ids = sock.execute(DB, USERID, USERPASS, 'is.type.controle.gabarit', 'search', [('is_database_origine_id', '=', gabarit.id)], {})
if not dest_controle_gabarit_ids:
dest_controle_gabarit_ids = sock.execute(DB, USERID, USERPASS, 'is.type.controle.gabarit', 'search', [('name', '=', gabarit.name)], {})
if dest_controle_gabarit_ids:
sock.execute(DB, USERID, USERPASS, 'is.type.controle.gabarit', 'write', dest_controle_gabarit_ids, controle_gabarit_vals, {})
controle_gabarit_created_id = dest_controle_gabarit_ids[0]
else:
controle_gabarit_created_id = sock.execute(DB, USERID, USERPASS, 'is.type.controle.gabarit', 'create', controle_gabarit_vals, {})
return True
@api.model
def get_type_controle_gabarit_vals(self, gabarit, DB, USERID, USERPASS, sock):
controle_gabarit_vals ={
'name' : tools.ustr(gabarit.name),
'is_database_origine_id':gabarit.id,
}
return controle_gabarit_vals
class is_presse_classe(models.Model):
_inherit='is.presse.classe'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_presse_classe, self).write(vals)
for obj in self:
obj.copy_other_database_presse_classe()
return res
except Exception as e:
raise osv.except_osv(_('Classe!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_presse_classe, self).create(vals)
obj.copy_other_database_presse_classe()
return obj
except Exception as e:
raise osv.except_osv(_('Classe!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_presse_classe(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for classe in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
presse_classe_vals = self.get_presse_classe_vals(classe, DB, USERID, USERPASS, sock)
dest_presse_classe_ids = sock.execute(DB, USERID, USERPASS, 'is.presse.classe', 'search', [('is_database_origine_id', '=', classe.id)], {})
if not dest_presse_classe_ids:
dest_presse_classe_ids = sock.execute(DB, USERID, USERPASS, 'is.presse.classe', 'search', [('name', '=', classe.name)], {})
if dest_presse_classe_ids:
sock.execute(DB, USERID, USERPASS, 'is.presse.classe', 'write', dest_presse_classe_ids, presse_classe_vals, {})
presse_classe_created_id = dest_presse_classe_ids[0]
else:
presse_classe_created_id = sock.execute(DB, USERID, USERPASS, 'is.presse.classe', 'create', presse_classe_vals, {})
return True
@api.model
def get_presse_classe_vals(self, classe, DB, USERID, USERPASS, sock):
controle_gabarit_vals ={
'name' : tools.ustr(classe.name),
'is_database_origine_id':classe.id,
}
return controle_gabarit_vals
class is_outillage_constructeur(models.Model):
_inherit='is.outillage.constructeur'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_outillage_constructeur, self).write(vals)
for obj in self:
obj.copy_other_database_outillage_constructeur()
return res
except Exception as e:
raise osv.except_osv(_('Constructeur!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_outillage_constructeur, self).create(vals)
obj.copy_other_database_outillage_constructeur()
return obj
except Exception as e:
raise osv.except_osv(_('Constructeur!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_outillage_constructeur(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for constructeur in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
outillage_constructeur_vals = self.get_outillage_constructeur_vals(constructeur, DB, USERID, USERPASS, sock)
dest_outillage_constructeur_ids = sock.execute(DB, USERID, USERPASS, 'is.outillage.constructeur', 'search', [('is_database_origine_id', '=', constructeur.id)], {})
if not dest_outillage_constructeur_ids:
dest_outillage_constructeur_ids = sock.execute(DB, USERID, USERPASS, 'is.outillage.constructeur', 'search', [('name', '=', constructeur.name)], {})
if dest_outillage_constructeur_ids:
sock.execute(DB, USERID, USERPASS, 'is.outillage.constructeur', 'write', dest_outillage_constructeur_ids, outillage_constructeur_vals, {})
outillage_constructeur_created_id = dest_outillage_constructeur_ids[0]
else:
outillage_constructeur_created_id = sock.execute(DB, USERID, USERPASS, 'is.outillage.constructeur', 'create', outillage_constructeur_vals, {})
return True
@api.model
def get_outillage_constructeur_vals(self, constructeur, DB, USERID, USERPASS, sock):
outillage_constructeur_vals ={
'name' : tools.ustr(constructeur.name),
'is_database_origine_id':constructeur.id,
}
return outillage_constructeur_vals
class is_famille_instrument(models.Model):
_inherit = 'is.famille.instrument'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_famille_instrument, self).write(vals)
for obj in self:
obj.copy_other_database_famille_instrument()
return res
except Exception as e:
raise osv.except_osv(_('Instrument!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_famille_instrument, self).create(vals)
obj.copy_other_database_famille_instrument()
return obj
except Exception as e:
raise osv.except_osv(_('Instrument!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_famille_instrument(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for instrument in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
famille_instrument_vals = self.get_famille_instrument_vals(instrument, DB, USERID, USERPASS, sock)
dest_famille_instrument_ids = sock.execute(DB, USERID, USERPASS, 'is.famille.instrument', 'search', [('is_database_origine_id', '=', instrument.id)], {})
if not dest_famille_instrument_ids:
dest_famille_instrument_ids = sock.execute(DB, USERID, USERPASS, 'is.famille.instrument', 'search', [('name', '=', instrument.name)], {})
if dest_famille_instrument_ids:
sock.execute(DB, USERID, USERPASS, 'is.famille.instrument', 'write', dest_famille_instrument_ids, famille_instrument_vals, {})
famille_instrument_created_id = dest_famille_instrument_ids[0]
else:
famille_instrument_created_id = sock.execute(DB, USERID, USERPASS, 'is.famille.instrument', 'create', famille_instrument_vals, {})
return True
@api.model
def get_famille_instrument_vals(self, instrument, DB, USERID, USERPASS, sock):
famille_instrument_vals ={
'name' : tools.ustr(instrument.name or ''),
'intensive': tools.ustr(instrument.intensive or ''),
'moyenne': tools.ustr(instrument.moyenne or ''),
'faible': tools.ustr(instrument.faible or ''),
'tolerance': tools.ustr(instrument.tolerance or ''),
'afficher_classe':instrument.afficher_classe,
'afficher_type':instrument.afficher_type,
'is_database_origine_id':instrument.id,
}
return famille_instrument_vals
class is_type_equipement(models.Model):
_inherit='is.type.equipement'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_type_equipement, self).write(vals)
for obj in self:
obj.copy_other_database_type_equipement()
return res
except Exception as e:
raise osv.except_osv(_('Equipement!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_type_equipement, self).create(vals)
obj.copy_other_database_type_equipement()
return obj
except Exception as e:
raise osv.except_osv(_('Equipement!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_type_equipement(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for equipement in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
type_equipement_vals = self.get_type_equipement_vals(equipement, DB, USERID, USERPASS, sock)
dest_type_equipement_ids = sock.execute(DB, USERID, USERPASS, 'is.type.equipement', 'search', [('is_database_origine_id', '=', equipement.id)], {})
if not dest_type_equipement_ids:
dest_type_equipement_ids = sock.execute(DB, USERID, USERPASS, 'is.type.equipement', 'search', [('name', '=', equipement.name)], {})
if dest_type_equipement_ids:
sock.execute(DB, USERID, USERPASS, 'is.type.equipement', 'write', dest_type_equipement_ids, type_equipement_vals, {})
type_equipement_created_id = dest_type_equipement_ids[0]
else:
type_equipement_created_id = sock.execute(DB, USERID, USERPASS, 'is.type.equipement', 'create', type_equipement_vals, {})
return True
@api.model
def get_type_equipement_vals(self, equipement, DB, USERID, USERPASS, sock):
type_equipement_vals ={
'name' : tools.ustr(equipement.name),
'is_database_origine_id':equipement.id,
}
return type_equipement_vals
class is_emplacement_outillage(models.Model):
_inherit = "is.emplacement.outillage"
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
@api.multi
def write(self, vals):
try:
res=super(is_emplacement_outillage, self).write(vals)
for obj in self:
obj.copy_other_database_emplacement_outillage()
return res
except Exception as e:
raise osv.except_osv(_('Outillage!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_emplacement_outillage, self).create(vals)
obj.copy_other_database_emplacement_outillage()
return obj
except Exception as e:
raise osv.except_osv(_('Outillage!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_emplacement_outillage(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for outillage in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
emplacement_outillage_vals = self.get_emplacement_outillage_vals(outillage, DB, USERID, USERPASS, sock)
dest_emplacement_outillage_ids = sock.execute(DB, USERID, USERPASS, 'is.emplacement.outillage', 'search', [('is_database_origine_id', '=', outillage.id)], {})
if not dest_emplacement_outillage_ids:
dest_emplacement_outillage_ids = sock.execute(DB, USERID, USERPASS, 'is.emplacement.outillage', 'search', [('name', '=', outillage.name)], {})
if dest_emplacement_outillage_ids:
sock.execute(DB, USERID, USERPASS, 'is.emplacement.outillage', 'write', dest_emplacement_outillage_ids, emplacement_outillage_vals, {})
emplacement_outillage_created_id = dest_emplacement_outillage_ids[0]
else:
emplacement_outillage_created_id = sock.execute(DB, USERID, USERPASS, 'is.emplacement.outillage', 'create', emplacement_outillage_vals, {})
return True
@api.model
def get_emplacement_outillage_vals(self, outillage, DB, USERID, USERPASS, sock):
emplacement_outillage_vals ={
'name' : tools.ustr(outillage.name),
'is_database_origine_id':outillage.id,
}
return emplacement_outillage_vals
class is_presse(models.Model):
_inherit='is.presse'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
active = fields.Boolean('Active', default=True)
@api.multi
def write(self, vals):
try:
res=super(is_presse, self).write(vals)
for obj in self:
obj.copy_other_database_is_presse()
return res
except Exception as e:
raise osv.except_osv(_('Presse!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_presse, self).create(vals)
obj.copy_other_database_is_presse()
return obj
except Exception as e:
raise osv.except_osv(_('Presse!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_is_presse(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for presse in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
is_presse_vals = self.get_is_presse_vals(presse, DB, USERID, USERPASS, sock)
dest_is_presse_ids = sock.execute(DB, USERID, USERPASS, 'is.presse', 'search', [('is_database_origine_id', '=', presse.id),
'|',('active','=',True),('active','=',False)], {})
if dest_is_presse_ids:
sock.execute(DB, USERID, USERPASS, 'is.presse', 'write', dest_is_presse_ids, is_presse_vals, {})
is_presse_created_id = dest_is_presse_ids[0]
else:
is_presse_created_id = sock.execute(DB, USERID, USERPASS, 'is.presse', 'create', is_presse_vals, {})
return True
@api.model
def get_is_presse_vals(self, presse, DB, USERID, USERPASS, sock):
is_presse_vals ={
'name' : tools.ustr(presse.name or ''),
'designation':tools.ustr(presse.designation or ''),
'classe':tools.ustr(presse.classe or ''),
'emplacement':self._get_emplacement(presse, DB, USERID, USERPASS, sock),
'classe_commerciale':self._get_classe_commerciale(presse, DB, USERID, USERPASS, sock),
'puissance' : self._get_puissance(presse, DB, USERID, USERPASS, sock),
'puissance_reelle':tools.ustr(presse.puissance_reelle or ''),
'type_de_presse':tools.ustr(presse.type_de_presse or ''),
'constructeur': self._get_constructeur(presse, DB, USERID, USERPASS, sock),
'num_construceur':tools.ustr(presse.num_construceur or ''),
'type_commande':tools.ustr(presse.type_commande or ''),
'annee':tools.ustr(presse.annee or ''),
'energie':presse.energie,
'volume_tremie':tools.ustr(presse.volume_tremie or ''),
'volume_alimentateur':tools.ustr(presse.volume_alimentateur or ''),
'dimension_col_h':tools.ustr(presse.dimension_col_h or ''),
'dimension_col_v':tools.ustr(presse.dimension_col_v or ''),
'diametre_colonne':tools.ustr(presse.diametre_colonne or ''),
'epaisseur_moule':tools.ustr(presse.epaisseur_moule or ''),
'faux_plateau':presse.faux_plateau,
'epaisseur_faux_plateau': tools.ustr(presse.epaisseur_faux_plateau or ''),
'epaisseur_moule_mini':tools.ustr(presse.epaisseur_moule_mini or ''),
'epaisseur_moule_maxi':tools.ustr(presse.epaisseur_moule_maxi or ''),
'dimension_plateau_h':tools.ustr(presse.dimension_plateau_h or ''),
'dimension_plateau_v':tools.ustr(presse.dimension_plateau_v or ''),
'dimension_hors_tout_haut':tools.ustr(presse.dimension_hors_tout_haut or ''),
'dimension_hors_tout_bas':tools.ustr(presse.dimension_hors_tout_bas or ''),
'coefficient_vis':tools.ustr(presse.coefficient_vis or ''),
'diametre_vis':tools.ustr(presse.diametre_vis or ''),
'type_clapet':tools.ustr(presse.type_clapet or ''),
'volume_injectable':tools.ustr(presse.volume_injectable or ''),
'presse_matiere':tools.ustr(presse.presse_matiere or ''),
'course_ejection':tools.ustr(presse.course_ejection or ''),
'course_ouverture':tools.ustr(presse.course_ouverture or ''),
'diametre_centrage_moule':tools.ustr(presse.diametre_centrage_moule or ''),
'diametre_centrage_presse':tools.ustr(presse.diametre_centrage_presse or ''),
'hauteur_porte_sol':tools.ustr(presse.hauteur_porte_sol or ''),
'bridage_rapide':tools.ustr(presse.bridage_rapide or ''),
'diametre_bridage':tools.ustr(presse.diametre_bridage or ''),
'pas_bridage':tools.ustr(presse.pas_bridage or ''),
'type_huile_hydraulique':tools.ustr(presse.type_huile_hydraulique or ''),
'volume_reservoir':tools.ustr(presse.volume_reservoir or ''),
'longueur':presse.longueur,
'largeur':presse.largeur,
'hauteur':presse.hauteur,
'puissance_electrique': tools.ustr(presse.puissance_electrique or ''),
'type_huile_graissage':presse.type_huile_graissage,
'puissance_electrique_chauffe':tools.ustr(presse.puissance_electrique_chauffe or ''),
'nombre_noyau':tools.ustr(presse.nombre_noyau or ''),
'compensation_cosinus': presse.compensation_cosinus,
'nb_noyau_pf':tools.ustr(presse.nb_noyau_pf or ''),
'nb_noyau_pm':tools.ustr(presse.nb_noyau_pm or ''),
'nombre_circuit_haut':tools.ustr(presse.nombre_circuit_haut or ''),
'diametre_passage_buse':tools.ustr(presse.diametre_passage_buse or ''),
'zone_chauffe':tools.ustr(presse.zone_chauffe or ''),
'poids':tools.ustr(presse.poids or ''),
'site_id':self._get_site_id(presse, DB, USERID, USERPASS, sock),
'active':presse.site_id and presse.site_id.database == DB and True or False,
'is_database_origine_id':presse.id,
}
return is_presse_vals
@api.model
def _get_site_id(self, presse, DB, USERID, USERPASS, sock):
if presse.site_id:
ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('is_database_origine_id', '=', presse.site_id.id)], {})
if ids:
return ids[0]
return False
@api.model
def _get_emplacement(self, presse, DB, USERID, USERPASS, sock):
if presse.emplacement:
emplacement_ids = sock.execute(DB, USERID, USERPASS, 'is.emplacement.outillage', 'search', [('is_database_origine_id', '=', presse.emplacement.id)], {})
if not emplacement_ids:
presse.emplacement.copy_other_database_emplacement_outillage()
emplacement_ids = sock.execute(DB, USERID, USERPASS, 'is.emplacement.outillage', 'search', [('is_database_origine_id', '=', presse.emplacement.id)], {})
if emplacement_ids:
return emplacement_ids[0]
return False
@api.model
def _get_classe_commerciale(self, presse, DB, USERID, USERPASS, sock):
if presse.classe_commerciale:
classe_commerciale_ids = sock.execute(DB, USERID, USERPASS, 'is.presse.classe', 'search', [('is_database_origine_id', '=', presse.classe_commerciale.id)], {})
if not classe_commerciale_ids:
presse.classe_commerciale.copy_other_database_presse_classe()
classe_commerciale_ids = sock.execute(DB, USERID, USERPASS, 'is.presse.classe', 'search', [('is_database_origine_id', '=', presse.classe_commerciale.id)], {})
if classe_commerciale_ids:
return classe_commerciale_ids[0]
return False
@api.model
def _get_puissance(self, presse, DB, USERID, USERPASS, sock):
if presse.puissance:
puissance_ids = sock.execute(DB, USERID, USERPASS, 'is.presse.puissance', 'search', [('is_database_origine_id', '=', presse.puissance.id)], {})
if not puissance_ids:
presse.puissance.copy_other_database_presse_puissance()
puissance_ids = sock.execute(DB, USERID, USERPASS, 'is.presse.puissance', 'search', [('is_database_origine_id', '=', presse.puissance.id)], {})
if puissance_ids:
return puissance_ids[0]
return False
@api.model
def _get_constructeur(self, presse, DB, USERID, USERPASS, sock):
if presse.constructeur:
constructeur_ids = sock.execute(DB, USERID, USERPASS, 'is.outillage.constructeur', 'search', [('is_database_origine_id', '=', presse.constructeur.id)], {})
if not constructeur_ids:
presse.constructeur.copy_other_database_outillage_constructeur()
constructeur_ids = sock.execute(DB, USERID, USERPASS, 'is.outillage.constructeur', 'search', [('is_database_origine_id', '=', presse.constructeur.id)], {})
if constructeur_ids:
return constructeur_ids[0]
return False
class is_prechauffeur(models.Model):
_inherit='is.prechauffeur'
is_database_origine_id = fields.Integer("Id d'origine", readonly=True)
active = fields.Boolean('Active', default=True)
@api.multi
def write(self, vals):
try:
res=super(is_prechauffeur, self).write(vals)
for obj in self:
obj.copy_other_database_is_prechauffeur()
return res
except Exception as e:
raise osv.except_osv(_('Prechauffeur!'),
_('(%s).') % str(e).decode('utf-8'))
@api.model
def create(self, vals):
try:
obj=super(is_prechauffeur, self).create(vals)
obj.copy_other_database_is_prechauffeur()
return obj
except Exception as e:
raise osv.except_osv(_('Prechauffeur!'),
_('(%s).') % str(e).decode('utf-8'))
@api.multi
def copy_other_database_is_prechauffeur(self):
cr , uid, context = self.env.args
context = dict(context)
database_obj = self.env['is.database']
database_lines = database_obj.search([])
for prechauffeur in self:
for database in database_lines:
if not database.ip_server or not database.database or not database.port_server or not database.login or not database.password:
continue
DB = database.database
USERID = SUPERUSER_ID
DBLOGIN = database.login
USERPASS = database.password
DB_SERVER = database.ip_server
DB_PORT = database.port_server
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (DB_SERVER, DB_PORT))
is_prechauffeur_vals = self.get_is_prechauffeur_vals(prechauffeur, DB, USERID, USERPASS, sock)
dest_is_prechauffeur_ids = sock.execute(DB, USERID, USERPASS, 'is.prechauffeur', 'search', [('is_database_origine_id', '=', prechauffeur.id),
'|',('active','=',True),('active','=',False)], {})
if dest_is_prechauffeur_ids:
sock.execute(DB, USERID, USERPASS, 'is.prechauffeur', 'write', dest_is_prechauffeur_ids, is_prechauffeur_vals, {})
is_prechauffeur_created_id = dest_is_prechauffeur_ids[0]
else:
is_prechauffeur_created_id = sock.execute(DB, USERID, USERPASS, 'is.prechauffeur', 'create', is_prechauffeur_vals, {})
return True
@api.model
def get_is_prechauffeur_vals(self, prechauffeur, DB, USERID, USERPASS, sock):
is_prechauffeur_vals ={
'name' : tools.ustr(prechauffeur.name or ''),
'presse_id':self._get_presse_id(prechauffeur, DB, USERID, USERPASS, sock),
'constructeur':tools.ustr(prechauffeur.constructeur or ''),
'marque':tools.ustr(prechauffeur.marque or ''),
'type_prechauffeur':tools.ustr(prechauffeur.type_prechauffeur or ''),
'num_serie':tools.ustr(prechauffeur.num_serie or ''),
'date_fabrication':prechauffeur.date_fabrication,
'poids':prechauffeur.poids,
'longueur':prechauffeur.longueur,
'largeur': prechauffeur.largeur ,
'hauteur':prechauffeur.hauteur,
'type_fluide': prechauffeur.type_fluide ,
'temperature_maxi': prechauffeur.temperature_maxi ,
'puissance_installee': prechauffeur.puissance_installee ,
'puissance_chauffe': prechauffeur.puissance_chauffe ,
'puissance_refroidissement': prechauffeur.puissance_refroidissement ,
'debit_maximum': prechauffeur.debit_maximum ,
'pression_maximum':prechauffeur.pression_maximum,
'commande_deportee': prechauffeur.commande_deportee ,
'option_depression': prechauffeur.option_depression ,
'mesure_debit': prechauffeur.mesure_debit ,
'site_id':self._get_site_id(prechauffeur, DB, USERID, USERPASS, sock),
'moule_ids':self._get_moule_ids(prechauffeur, DB, USERID, USERPASS, sock),
'active':prechauffeur.site_id and prechauffeur.site_id.database == DB and True or False,
'is_database_origine_id':prechauffeur.id,
}
return is_prechauffeur_vals
@api.model
def _get_moule_ids(self, prechauffeur , DB, USERID, USERPASS, sock):
ids = []
for moule in prechauffeur.moule_ids:
res = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('is_database_origine_id', '=', moule.id)], {})
if res:
ids.append(res[0])
return [(6,0,ids)]
@api.model
def _get_presse_id(self, prechauffeur, DB, USERID, USERPASS, sock):
if prechauffeur.presse_id:
ids = sock.execute(DB, USERID, USERPASS, 'is.presse', 'search', [('is_database_origine_id', '=', prechauffeur.presse_id.id)], {})
if ids:
return ids[0]
return False
@api.model
def _get_site_id(self, prechauffeur, DB, USERID, USERPASS, sock):
if prechauffeur.site_id:
ids = sock.execute(DB, USERID, USERPASS, 'is.database', 'search', [('is_database_origine_id', '=', prechauffeur.site_id.id)], {})
if ids:
return ids[0]
return False
@api.model
def _get_mold_id(self, prechauffeur, DB, USERID, USERPASS, sock):
if prechauffeur.mold_id:
is_mold_ids = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('is_database_origine_id', '=', prechauffeur.mold_id.id)], {})
if not is_mold_ids:
prechauffeur.mold_id.copy_other_database_mold()
is_mold_ids = sock.execute(DB, USERID, USERPASS, 'is.mold', 'search', [('is_database_origine_id', '=', prechauffeur.mold_id.id)], {})
if is_mold_ids:
return is_mold_ids[0]
return False
| mit |
co-stig/opencompare | org.opencompare.core/src/org/opencompare/external/ExternalException.java | 395 | package org.opencompare.external;
public class ExternalException extends Exception {
private static final long serialVersionUID = 1L;
public ExternalException() {
}
public ExternalException(String message, Throwable cause) {
super(message, cause);
}
public ExternalException(String message) {
super(message);
}
public ExternalException(Throwable cause) {
super(cause);
}
}
| mit |
DarkRobin/finalproject | public/modules/addtodatabases/config/addtodatabases.client.routes.js | 830 | 'use strict';
//Setting up route
angular.module('addtodatabases').config(['$stateProvider',
function($stateProvider) {
// Addtodatabases state routing
$stateProvider.
state('listAddtodatabases', {
url: '/addtodatabases',
templateUrl: 'modules/addtodatabases/views/list-addtodatabases.client.view.html'
}).
state('createAddtodatabase', {
url: '/addtodatabases/create',
templateUrl: 'modules/addtodatabases/views/create-addtodatabase.client.view.html'
}).
//Chaneged route to my library
state('viewAddtodatabase', {
url: '/mylibrary',
templateUrl: 'modules/addtodatabases/views/view-addtodatabase.client.view.html'
}).
state('editAddtodatabase', {
url: '/addtodatabases/:addtodatabaseId/edit',
templateUrl: 'modules/addtodatabases/views/edit-addtodatabase.client.view.html'
});
}
]); | mit |
lordmos/blink | Source/web/WebGeolocationPosition.cpp | 2444 | /*
* Copyright (C) 2010 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "WebGeolocationPosition.h"
#include "modules/geolocation/GeolocationPosition.h"
using namespace WebCore;
namespace blink {
void WebGeolocationPosition::assign(double timestamp, double latitude, double longitude, double accuracy, bool providesAltitude, double altitude, bool providesAltitudeAccuracy, double altitudeAccuracy, bool providesHeading, double heading, bool providesSpeed, double speed)
{
m_private = GeolocationPosition::create(timestamp, latitude, longitude, accuracy, providesAltitude, altitude, providesAltitudeAccuracy, altitudeAccuracy, providesHeading, heading, providesSpeed, speed);
}
void WebGeolocationPosition::assign(const WebGeolocationPosition& other)
{
m_private = other.m_private;
}
void WebGeolocationPosition::reset()
{
m_private.reset();
}
WebGeolocationPosition& WebGeolocationPosition::operator=(PassRefPtr<GeolocationPosition> position)
{
m_private = position;
return *this;
}
WebGeolocationPosition::operator PassRefPtr<GeolocationPosition>() const
{
return m_private.get();
}
} // namespace blink
| mit |
rzane/zendesk_rails | spec/support/active_model.rb | 259 | shared_examples 'ActiveModel' do
include ActiveModel::Lint::Tests
def model
subject
end
ActiveModel::Lint::Tests.public_instance_methods.map { |m| m.to_s }.grep(/^test/).each do |m|
example m.gsub('_', ' ') do
send m
end
end
end
| mit |
bonusbits/bonusbits_awsapi_proxy | attributes/squid.rb | 199 | default['bonusbits_awsapi_proxy']['squid'].tap do |squid|
squid['user'] = 'squid'
squid['group'] = 'squid'
squid['rpm_filename'] = 'squid-3.5.11-1.el6.x86_64.rpm'
squid['port'] = '3128'
end
| mit |
oenstrom/anax-lite | src/DiceGame/Game.php | 3472 | <?php
namespace Oenstrom\DiceGame;
/**
* Class for handling the Dice game.
*/
class Game
{
/**
* @var integer $player the player "id".
* @var Round $round Round object.
* @var array $playerList List of players.
* @var string $winner The winner.
*/
private $player;
private $round;
private $playerList;
public $winner;
/**
* Constructor
*
* @param integer $number The number of players.
*/
public function __construct($number = 2)
{
$this->round = new \Oenstrom\DiceGame\Round();
$this->player = 0;
$this->winner = null;
$this->playerList = $this->createPlayers($number);
}
/**
* Create all the players.
*
* @param int $number The number of players to create.
*
* @return array as the player list.
*/
private function createPlayers($number)
{
$players = [];
for ($i = 1; $i <= $number; $i++) {
$players[] = [
"name" => "Spelare " . $i,
"score" => 0,
];
}
return $players;
}
/**
* Set next player in turn and set a new dice.
*/
private function nextPlayer()
{
$new = $this->player + 1;
$this->player = array_key_exists($new, $this->playerList) ? $new : 0;
}
/**
* Handle the incoming route.
*
* @param string as the route.
*/
public function handle($route, $roll = null)
{
$sameRound = true;
if ($route == "reset") {
$this->__construct(count($this->playerList));
} else if ($route == "roll") {
$sameRound = $this->round->roll($roll);
} else if ($route == "save") {
$score = $this->getCurrentScore();
if ($score > 0) {
$this->playerList[$this->player]["score"] += $score;
$sameRound = false;
}
}
if (!$sameRound) {
$this->round->newRound();
$this->nextPlayer();
}
}
/**
* Get the array of players.
*
* @return array as the player list.
*/
public function getPlayerList()
{
return $this->playerList;
}
/**
* Returns true if there is a player with a score of 100 or more.
*
* @return bool as winner or not.
*/
public function hasWinner()
{
foreach ($this->playerList as $player) {
if ($player["score"] >= 100) {
$this->winner = $player["name"];
return true;
}
}
return false;
}
/**
* Get the latest roll.
*
* @return int as the last roll.
*/
public function getLastRoll()
{
return $this->round->getLastRoll();
}
/**
* Get the current player's rolls as a comma seperated string.
*
* @return string as the rolls.
*/
public function getRolls()
{
return implode(", ", $this->round->getRolls());
}
/**
* Get the total score of the current player's rolls.
*
* @return int as the score.
*/
public function getCurrentScore()
{
return $this->round->getScore();
}
/**
* Get the current player name.
*
* @return string as the player name.
*/
public function getPlayer()
{
return $this->getPlayerList()[$this->player]["name"];
}
}
| mit |
32leaves/SDSL | execution/web/app/js_unzip.rb | 571 | module JSUnZip
class ZipFile
def initialize(blob)
@zip = `new JSUnzip(blob)`
`self.zip.readEntries()`
end
def is_zip?
`self.zip.isZipFile()`
end
def entries
result = {}
`for (var i = 0; i < self.zip.entries.length; i++) {`
entry = `self.zip.entries[i]`
if `entry.compressionMethod === 0`
result[`entry.fileName`] = `entry.data`
elsif `entry.compressionMethod === 8`
result[`entry.fileName`] = ` JSInflate.inflate(entry.data)`
end
`}`
result
end
end
end
| mit |
shashanksingh28/code-similarity | data/Java Cookbook 3rd Edition/javacooksrc/javacooksrc/main/java/io/Stdout.java | 775 | package io;
/**
* All the examples for the Standard output recipe.
* @author Ian F. Darwin, http://www.darwinsys.com/
*/
public class Stdout {
public static void main(String[] argv) {
// BEGIN main
Object anObject = new Object();
String myAnswer = "no";
int i = 42;
System.out.println("Hello, World of Java");
System.out.println("An object is " + anObject);
System.out.println("The answer is " + myAnswer + " at this time.");
System.out.println("The answer is " + i + '.');
System.out.println("The answer is " + i + ".");
System.out.println(i + '=' + " the answer.");
System.out.println(new StringBuffer("The answer is ").append(i).append('.'));
// END main
}
}
| mit |
alexandonian/ptutils | ptutils/utils/serialization/__init__.py | 7982 | """
Interfaces for serializing ptutils objects.
Usage::
from ptutils import serializers
json = serializers.serialize("json", some_queryset)
objects = list(serializers.deserialize("json", json))
"""
import importlib
from ptutils.serializers.base import SerializerDoesNotExist
# Built-in serializers
BUILTIN_SERIALIZERS = {
"xml": "ptutils.serializers.xml_serializer",
"python": "ptutils.serializers.python",
"json": "ptutils.serializers.json",
"yaml": "ptutils.serializers.pyyaml",
}
_serializers = {}
class BadSerializer:
"""
Stub serializer to hold exception raised during registration
This allows the serializer registration to cache serializers and if there
is an error raised in the process of creating a serializer it will be
raised and passed along to the caller when the serializer is used.
"""
internal_use_only = False
def __init__(self, exception):
self.exception = exception
def __call__(self, *args, **kwargs):
raise self.exception
def register_serializer(format, serializer_module, serializers=None):
"""Register a new serializer.
``serializer_module`` should be the fully qualified module name
for the serializer.
If ``serializers`` is provided, the registration will be added
to the provided dictionary.
If ``serializers`` is not provided, the registration will be made
directly into the global register of serializers. Adding serializers
directly is not a thread-safe operation.
"""
if serializers is None and not _serializers:
_load_serializers()
try:
module = importlib.import_module(serializer_module)
except ImportError as exc:
bad_serializer = BadSerializer(exc)
module = type('BadSerializerModule', (), {
'Deserializer': bad_serializer,
'Serializer': bad_serializer,
})
if serializers is None:
_serializers[format] = module
else:
serializers[format] = module
def unregister_serializer(format):
"Unregister a given serializer. This is not a thread-safe operation."
if not _serializers:
_load_serializers()
if format not in _serializers:
raise SerializerDoesNotExist(format)
del _serializers[format]
def get_serializer(format):
if not _serializers:
_load_serializers()
if format not in _serializers:
raise SerializerDoesNotExist(format)
return _serializers[format].Serializer
def get_serializer_formats():
if not _serializers:
_load_serializers()
return list(_serializers)
def get_public_serializer_formats():
if not _serializers:
_load_serializers()
return [k for k, v in _serializers.items() if not v.Serializer.internal_use_only]
def get_deserializer(format):
if not _serializers:
_load_serializers()
if format not in _serializers:
raise SerializerDoesNotExist(format)
return _serializers[format].Deserializer
def serialize(format, queryset, **options):
"""
Serialize a queryset (or any iterator that returns database objects) using
a certain serializer.
"""
s = get_serializer(format)()
s.serialize(queryset, **options)
return s.getvalue()
def deserialize(format, stream_or_string, **options):
"""
Deserialize a stream or a string. Return an iterator that yields ``(obj,
m2m_relation_dict)``, where ``obj`` is an instantiated -- but *unsaved* --
object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
list_of_related_objects}``.
"""
d = get_deserializer(format)
return d(stream_or_string, **options)
def _load_serializers():
"""
Register built-in and settings-defined serializers. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
"""
global _serializers
serializers = {}
for format in BUILTIN_SERIALIZERS:
register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
# if hasattr(settings, "SERIALIZATION_MODULES"):
# for format in settings.SERIALIZATION_MODULES:
# register_serializer(format, settings.SERIALIZATION_MODULES[
# format], serializers)
_serializers = serializers
def sort_dependencies(app_list):
"""Sort a list of (app_config, models) pairs into a single list of models.
The single list of models is sorted so that any model with a natural key
is serialized before a normal model, and any model with a natural key
dependency has it's dependencies serialized first.
"""
# Process the list of models, and get the list of dependencies
model_dependencies = []
models = set()
for app_config, model_list in app_list:
if model_list is None:
model_list = app_config.get_models()
for model in model_list:
models.add(model)
# Add any explicitly defined dependencies
if hasattr(model, 'natural_key'):
deps = getattr(model.natural_key, 'dependencies', [])
if deps:
deps = [apps.get_model(dep) for dep in deps]
else:
deps = []
# Now add a dependency for any FK relation with a model that
# defines a natural key
for field in model._meta.fields:
if field.remote_field:
rel_model = field.remote_field.model
if hasattr(rel_model, 'natural_key') and rel_model != model:
deps.append(rel_model)
# Also add a dependency for any simple M2M relation with a model
# that defines a natural key. M2M relations with explicit through
# models don't count as dependencies.
for field in model._meta.many_to_many:
if field.remote_field.through._meta.auto_created:
rel_model = field.remote_field.model
if hasattr(rel_model, 'natural_key') and rel_model != model:
deps.append(rel_model)
model_dependencies.append((model, deps))
model_dependencies.reverse()
# Now sort the models to ensure that dependencies are met. This
# is done by repeatedly iterating over the input list of models.
# If all the dependencies of a given model are in the final list,
# that model is promoted to the end of the final list. This process
# continues until the input list is empty, or we do a full iteration
# over the input models without promoting a model to the final list.
# If we do a full iteration without a promotion, that means there are
# circular dependencies in the list.
model_list = []
while model_dependencies:
skipped = []
changed = False
while model_dependencies:
model, deps = model_dependencies.pop()
# If all of the models in the dependency list are either already
# on the final model list, or not on the original serialization list,
# then we've found another model with all it's dependencies
# satisfied.
found = True
for candidate in ((d not in models or d in model_list) for d in deps):
if not candidate:
found = False
if found:
model_list.append(model)
changed = True
else:
skipped.append((model, deps))
if not changed:
raise RuntimeError(
"Can't resolve dependencies for %s in serialized app list." %
', '.join(
'%s.%s' % (model._meta.app_label, model._meta.object_name)
for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__)
)
)
model_dependencies = skipped
return model_list
| mit |
jLKisni/PitchItUp | application/modules/Default/views/main_header.php | 1853 | <!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<title>PitchITup | <?php echo $title; ?></title>
<!-- Bootstrap Core CSS -->
<link href="<?php echo base_url();?>assets/vendor/bootstrap/css/bootstrap.min.css" rel="stylesheet">
<!-- Custom Fonts -->
<link href="<?php echo base_url();?>assets/vendor/font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css">
<link href='https://fonts.googleapis.com/css?family=Open+Sans:300italic,400italic,600italic,700italic,800italic,400,300,600,700,800' rel='stylesheet' type='text/css'>
<link href='https://fonts.googleapis.com/css?family=Merriweather:400,300,300italic,400italic,700,700italic,900,900italic' rel='stylesheet' type='text/css'>
<link href="<?php echo base_url();?>assets/css/datatables/dataTables.bootstrap.css" rel="stylesheet" type="text/css" />
<!-- Plugin CSS -->
<link href="<?php echo base_url();?>assets/vendor/magnific-popup/magnific-popup.css" rel="stylesheet">
<!-- Theme CSS -->
<link href="<?php echo base_url();?>assets/css/creative.min.css" rel="stylesheet">
<style type="text/css">
body{
width:100%;
margin: 0;
min-height: auto;
-webkit-background-size: cover;
-moz-background-size: cover;
background-size: cover;
-o-background-size: cover;
background: url(<?php echo base_url();?>assets/img/header.jpg) center center;
}
</style>
<script src="<?php echo base_url();?>assets/vendor/jquery/jquery.min.js"></script>
</head>
<body id="page-top">
| mit |
EbTech/rust-algorithms | src/graph/flow.rs | 9318 | //! Maximum flows, matchings, and minimum cuts.
use super::{AdjListIterator, Graph};
/// Representation of a network flow problem with (optional) costs.
pub struct FlowGraph {
/// Owned graph, managed by this FlowGraph object.
pub graph: Graph,
/// Edge capacities.
pub cap: Vec<i64>,
/// Edge cost per unit flow.
pub cost: Vec<i64>,
}
impl FlowGraph {
/// An upper limit to the flow.
const INF: i64 = i64::MAX;
/// Initializes an flow network with vmax vertices and no edges.
pub fn new(vmax: usize, emax_hint: usize) -> Self {
Self {
graph: Graph::new(vmax, 2 * emax_hint),
cap: Vec::with_capacity(2 * emax_hint),
cost: Vec::with_capacity(2 * emax_hint),
}
}
/// Adds an edge with specified directional capacities and cost per unit of
/// flow. If only forward flow is allowed, rcap should be zero.
pub fn add_edge(&mut self, u: usize, v: usize, cap: i64, rcap: i64, cost: i64) {
self.cap.push(cap);
self.cap.push(rcap);
self.cost.push(cost);
self.cost.push(-cost);
self.graph.add_undirected_edge(u, v);
}
/// Dinic's algorithm to find the maximum flow from s to t where s != t.
/// Generalizes the Hopcroft-Karp maximum bipartite matching algorithm.
/// V^2E in general, min(V^(2/3),sqrt(E))E when all edges are unit capacity,
/// sqrt(V)E when all vertices are unit capacity as in bipartite graphs.
///
/// # Panics
///
/// Panics if the maximum flow is 2^63 or larger.
pub fn dinic(&self, s: usize, t: usize) -> (i64, Vec<i64>) {
let mut flow = vec![0; self.graph.num_e()];
let mut max_flow = 0;
loop {
let dist = self.dinic_search(s, &flow);
if dist[t] == Self::INF {
break;
}
// Keep track of adjacency lists to avoid revisiting blocked edges.
let mut adj_iters = (0..self.graph.num_v())
.map(|u| self.graph.adj_list(u).peekable())
.collect::<Vec<_>>();
max_flow += self.dinic_augment(s, t, Self::INF, &dist, &mut adj_iters, &mut flow);
}
(max_flow, flow)
}
// Compute BFS distances to restrict attention to shortest path edges.
fn dinic_search(&self, s: usize, flow: &[i64]) -> Vec<i64> {
let mut dist = vec![Self::INF; self.graph.num_v()];
let mut q = ::std::collections::VecDeque::new();
dist[s] = 0;
q.push_back(s);
while let Some(u) = q.pop_front() {
for (e, v) in self.graph.adj_list(u) {
if dist[v] == Self::INF && flow[e] < self.cap[e] {
dist[v] = dist[u] + 1;
q.push_back(v);
}
}
}
dist
}
// Pushes a blocking flow that increases the residual's s-t distance.
fn dinic_augment(
&self,
u: usize,
t: usize,
f: i64,
dist: &[i64],
adj: &mut [::std::iter::Peekable<AdjListIterator>],
flow: &mut [i64],
) -> i64 {
if u == t {
return f;
}
let mut df = 0;
while let Some(&(e, v)) = adj[u].peek() {
let rem_cap = (self.cap[e] - flow[e]).min(f - df);
if rem_cap > 0 && dist[v] == dist[u] + 1 {
let cf = self.dinic_augment(v, t, rem_cap, dist, adj, flow);
flow[e] += cf;
flow[e ^ 1] -= cf;
df += cf;
if df == f {
break;
}
}
// The current edge is either saturated or blocked.
adj[u].next();
}
df
}
/// After running maximum flow, use this to recover the dual minimum cut.
pub fn min_cut(&self, dist: &[i64]) -> Vec<usize> {
(0..self.graph.num_e())
.filter(|&e| {
let u = self.graph.endp[e ^ 1];
let v = self.graph.endp[e];
dist[u] < Self::INF && dist[v] == Self::INF
})
.collect()
}
/// Among all s-t maximum flows, finds one with minimum cost, assuming
/// s != t and no negative-cost cycles.
///
/// # Panics
///
/// Panics if the flow or cost overflow a 64-bit signed integer.
pub fn mcf(&self, s: usize, t: usize) -> (i64, i64, Vec<i64>) {
let mut pot = vec![0; self.graph.num_v()];
// Bellman-Ford deals with negative-cost edges at initialization.
for _ in 1..self.graph.num_v() {
for e in 0..self.graph.num_e() {
if self.cap[e] > 0 {
let u = self.graph.endp[e ^ 1];
let v = self.graph.endp[e];
pot[v] = pot[v].min(pot[u] + self.cost[e]);
}
}
}
let mut flow = vec![0; self.graph.num_e()];
let (mut min_cost, mut max_flow) = (0, 0);
loop {
let par = self.mcf_search(s, &flow, &mut pot);
if par[t] == None {
break;
}
let (dc, df) = self.mcf_augment(t, &par, &mut flow);
min_cost += dc;
max_flow += df;
}
(min_cost, max_flow, flow)
}
// Maintains Johnson's potentials to prevent negative-cost residual edges.
// This allows running Dijkstra instead of the slower Bellman-Ford.
fn mcf_search(&self, s: usize, flow: &[i64], pot: &mut [i64]) -> Vec<Option<usize>> {
let mut vis = vec![false; self.graph.num_v()];
let mut dist = vec![Self::INF; self.graph.num_v()];
let mut par = vec![None; self.graph.num_v()];
dist[s] = 0;
while let Some(u) = (0..self.graph.num_v())
.filter(|&u| !vis[u] && dist[u] < Self::INF)
.min_by_key(|&u| dist[u] - pot[u])
{
vis[u] = true;
pot[u] = dist[u];
for (e, v) in self.graph.adj_list(u) {
if dist[v] > dist[u] + self.cost[e] && flow[e] < self.cap[e] {
dist[v] = dist[u] + self.cost[e];
par[v] = Some(e);
}
}
}
par
}
// Pushes flow along an augmenting path of minimum cost.
fn mcf_augment(&self, t: usize, par: &[Option<usize>], flow: &mut [i64]) -> (i64, i64) {
let (mut dc, mut df) = (0, Self::INF);
let mut u = t;
while let Some(e) = par[u] {
df = df.min(self.cap[e] - flow[e]);
u = self.graph.endp[e ^ 1];
}
u = t;
while let Some(e) = par[u] {
flow[e] += df;
flow[e ^ 1] -= df;
dc += df * self.cost[e];
u = self.graph.endp[e ^ 1];
}
(dc, df)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_basic_flow() {
let mut graph = FlowGraph::new(3, 2);
graph.add_edge(0, 1, 4, 0, 0);
graph.add_edge(1, 2, 3, 0, 0);
let flow = graph.dinic(0, 2).0;
assert_eq!(flow, 3);
}
#[test]
fn test_min_cost_flow() {
let mut graph = FlowGraph::new(4, 4);
graph.add_edge(0, 1, 10, 0, -10);
graph.add_edge(1, 2, 7, 0, 8);
graph.add_edge(2, 3, 7, 0, 8);
graph.add_edge(1, 3, 7, 0, 10);
let (cost, flow, _) = graph.mcf(0, 3);
assert_eq!(cost, 18);
assert_eq!(flow, 10);
}
#[test]
fn test_max_matching() {
let mut graph = FlowGraph::new(14, 4);
let source = 0;
let sink = 13;
//Vertex indices of "left hand side" of bipartite graph go from [left_start, right_start)
let left_start = 1;
//Vertex indices of "right hand side" of bipartite graph go from [right_start, sink)
let right_start = 7;
//Initialize source / sink connections; both left & right have 6 nodes
for lhs_vertex in left_start..left_start + 6 {
graph.add_edge(source, lhs_vertex, 1, 0, 0);
}
for rhs_vertex in right_start..right_start + 6 {
graph.add_edge(rhs_vertex, sink, 1, 0, 0);
}
graph.add_edge(left_start + 0, right_start + 1, 1, 0, 0);
graph.add_edge(left_start + 0, right_start + 2, 1, 0, 0);
graph.add_edge(left_start + 2, right_start + 0, 1, 0, 0);
graph.add_edge(left_start + 2, right_start + 3, 1, 0, 0);
graph.add_edge(left_start + 3, right_start + 2, 1, 0, 0);
graph.add_edge(left_start + 4, right_start + 2, 1, 0, 0);
graph.add_edge(left_start + 4, right_start + 3, 1, 0, 0);
graph.add_edge(left_start + 5, right_start + 5, 1, 0, 0);
let (flow_amt, flow) = graph.dinic(source, sink);
assert_eq!(flow_amt, 5);
//L->R edges in maximum matching
let left_right_edges = flow
.into_iter()
.enumerate()
.filter(|&(_e, f)| f > 0)
//map to u->v
.map(|(e, _f)| (graph.graph.endp[e ^ 1], graph.graph.endp[e]))
//leave out source and sink nodes
.filter(|&(u, v)| u != source && v != sink)
.collect::<Vec<_>>();
assert_eq!(
left_right_edges,
vec![(1, 8), (3, 7), (4, 9), (5, 10), (6, 12)]
);
}
}
| mit |
tilap/weleanit-landing | src/js/misc/external-links-in-new-window.js | 286 | 'use strict';
/* global $*/
/* global document */
/* global location */
// Add target="_blank" to all external link.
$(document).ready(function() {
$('a').filter(function() {
return this.hostname && this.hostname !== location.hostname;
}).attr('target', '_blank');
}); | mit |
spzvtbg/TechModuleProgrammingFundamentals23.05.2017 | 10. Arrays - More Exercises/01. Last 3 Consecutive Equal Strings/EqualStrings.cs | 817 | namespace _01.Last_3_Consecutive_Equal_Strings
{
using System;
public class EqualStrings
{
public static void Main()
{
var givenStrings = Console.ReadLine().Split();
var stringCounter = 1;
var counter = givenStrings.Length - 1;
for (int index = counter; index >= 1; index--)
{
if (givenStrings[index] == givenStrings[index - 1])
{
stringCounter++;
}
else
{
stringCounter = 1;
}
if (stringCounter == 3)
{
Console.WriteLine("{0} {0} {0}", givenStrings[index]);
return;
}
}
}
}
}
| mit |
studyindenmark/clerkster | js/controllers/settings.js | 376 | function SettingsController($scope, $routeParams, api, user) {
$scope.user = user;
var timeout = null;
$scope.save = function() {
$scope.saved = false;
if (timeout) {
clearTimeout(timeout);
}
timeout = setTimeout(function() {
api.saveSettings({author: user.author}).success(function() {
$scope.saved = true;
});
}, 1000);
};
}
| mit |
amezhenin/ella | src/test/scala/ella/EllaServiceSpec.scala | 1263 | package ella
import org.specs2.mutable.Specification
import spray.testkit.Specs2RouteTest
import spray.http._
import StatusCodes._
class EllaServiceSpec extends Specification with Specs2RouteTest with EllaService {
def actorRefFactory = system
"Ella" should {
"return a greeting for GET requests to the root path" in {
Get() ~> route ~> check {
responseAs[String] must be equalTo "Hello world"
}
}
"return handle GET requests with integers in the URL path" in {
Get("/42") ~> route ~> check {
responseAs[String] must contain("42")
}
}
"return handle GET requests with string in the URL path, but only one" in {
Get("/asdf") ~> route ~> check {
responseAs[String] must contain("asdf")
}
Get("/asdf/asdf") ~> route ~> check {
handled must beFalse
}
}
"leave POST requests to other paths unhandled" in {
Post("/asdf") ~> route ~> check {
handled must beFalse
}
}
"return a MethodNotAllowed error for PUT requests to the root path" in {
Put() ~> sealRoute(route) ~> check {
status === MethodNotAllowed
responseAs[String] === "HTTP method not allowed, supported methods: GET"
}
}
}
}
| mit |
Tythos/hypyr | main.py | 2460 | """Pyglet/GLSL aliasing demo, evolved from pythonstuff.org
"""
from math import pi, sin, cos, sqrt
from random import random
from pyglet import gl, window, image, resource, clock, text, event, app
from os import path
from hypyr import particles, linal, shader, scene, data, solids
class HypyrApp(window.Window):
def __init__(self):
config = gl.Config(sample_buffers=1, samples=4, depth_size=16, double_buffer=True)
try:
super(HypyrApp, self).__init__(resizable=True, config=config, vsync=False, width=800, height=600)
except pyglet.window.NoSuchConfigException:
super(HypyrApp, self).__init__(resizable=True)
self.scene = scene.Thing()
self.camera = scene.Camera()
clock.schedule(self.update)
self.setupOpenGL()
def setupOpenGL(self):
gl.glClearColor(0., 0., 0., 1.)
gl.glColor4f(1.0, 0.0, 0.0, 0.5)
gl.glEnable(gl.GL_DEPTH_TEST)
#gl.glEnable(gl.GL_CULL_FACE)
gl.glEnable(gl.GL_BLEND)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
#gl.glPolygonMode(gl.GL_FRONT, gl.GL_FILL)
def on_resize(self, width, height):
if height == 0:
height = 1
gl.glViewport(0, 0, width, height)
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
self.camera.yFov_rad = self.camera.xFov_rad * height / width
self.camera.apply()
gl.glMatrixMode(gl.GL_MODELVIEW)
return event.EVENT_HANDLED
def update(self, dt):
self.scene.update(dt)
def on_draw(self):
gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
gl.glLoadIdentity()
self.camera.apply()
self.scene.chain()
def on_key_press(self, symbol, modifiers):
k = window.key
if symbol == k.R:
rot = linal.Vec3()
elif symbol == k.ESCAPE or symbol == k.Q:
app.exit()
return event.EVENT_HANDLED
elif symbol == k.A:
self.camera.sphericalRotation(-0.1, 0.)
particles.Sprite.isFirstPassRender = True
elif symbol == k.S:
self.camera.sphericalRotation(0., -0.1)
elif symbol == k.W:
self.camera.sphericalRotation(0., 0.1)
elif symbol == k.D:
self.camera.sphericalRotation(0.1, 0.)
elif symbol == k.EQUAL:
self.camera.zoom(0.9)
elif symbol == k.MINUS:
self.camera.zoom(1.1)
if __name__ == "__main__":
a = HypyrApp()
a.camera.eye = linal.Vec3(3., -1., 1.)
for i in range(16):
s = particles.Sprite()
s.position = linal.Vec3((i/8.-1.),random()-0.5,random()-0.5)
s.material.ambient_rgb = linal.Vec3(random(),random(),random())
s.size = 0.1 * random()
a.scene.children.append(s)
app.run()
| mit |
mob35/cyber-ttsm-r2 | www/scripts/problemCause.js | 8127 | (function(global) {
var ProblemCauseViewModel,
app = global.app = global.app || {};
ProblemCauseViewModel = kendo.data.ObservableObject.extend({
_isLoading: true,
userId: function() {
var cache = localStorage.getItem("profileData");
if (cache == null || cache == undefined) {
return null;
} else {
return JSON.parse(cache).userId;
}
},
initProblemCauseMaster: function() {
var that = this;
$("#lvProblemCauseMaster").kendoMobileListView({
dataSource: {
transport: {
read: function(operation) {
operation.success(JSON.parse(localStorage.getItem("problemCauseData")));
}
},
schema: {
data: "problemCauses"
}
},
template: $("#problem-cause-master-template").html(),
databound: function() {
that.hideLoading();
},
filterable: {
field: "description",
ignoreCase: true
},
click: function(e) {
that.selectPbC(e);
}
//virtualViewSize: 30,
//endlessScroll: true,
});
////console.log('lv Problemcause Master Loaded');
},
loadProblemCauseMaster: function() {
var that = this;
var lvProblemCauseMaster = $("#lvProblemCauseMaster").data("kendoMobileListView");
//lvProblemCauseMaster.reset();
app.application.view().scroller.reset();
//$("#lvProblemCauseMaster").kendoMobileListView({
// dataSource: problemCauseData
// },
// schema: {
// data: "problemCauses"
// },
//
// }),
// template: $("#problem-cause-template").html(),
//});
////console.log('lv Problemcause Master Loaded');
},
selectPbC: function(e) {
//console.log("###### selectPbC #########");
var that = app.jobService.viewModel;
var selectItem = that.get("selectItem");
var selectProblemC = that.get("selectProblemC");
var flag = true;
//var pbc = [{"jobId": selectItem.jobId,
// "problemCauseMainId": e.problemCauseId,
// "problemCauseDesc": e.problemCauseDescription,
// "problemCauseSubId": e.subproCauseId,
// "problemCauseSubDesc": e.subproCauseDescription,
// "seqId":null,
// "levelCause":null,
// "problemCauseId":null
// }]
if (selectProblemC != null && selectProblemC != undefined) {
var data = selectProblemC.data();
for (var i = 0; i < data.length; i++) {
if (data[i].problemCauseSubId == e.dataItem.subProblemCauseId) {
flag = false;
e.preventDefault();
navigator.notification.alert("Duplicate problem cause.",
function() {}, "Error", 'OK');
i = data.length;
}
}
} else {
selectProblemC = new kendo.data.DataSource();
}
if (flag) {
selectItem.cntProblemCause++;
var pbc = {
"jobId": selectItem.jobId,
"problemCauseMainId": e.dataItem.id,
"problemCauseDesc": e.dataItem.description,
"problemCauseSubId": e.dataItem.subProblemCauseId,
"problemCauseSubDesc": e.dataItem.subProblemCauseDesc,
"seqId": null,
"levelCause": null,
"problemCauseId": null
};
selectProblemC.pushCreate(pbc);
selectProblemC.fetch(function() {
that.set("selectProblemC", selectProblemC);
});
app.problemCauseService.viewModel.setProblemSolveRadio();
//SUBPRO_CAUSE_ID
//SUBPRO_CAUSE_DESCRIPTION
//SUBPRO_CAUSE_STATUS
//SUBPRO_CAUSE_PRO_CAUSE_ID
//PROBLEM_CAUSE_ID
//PROBLEM_CAUSE_DESCRIPTION
that.set("selectItem", selectItem);
//that.set("selectPage", 2);
app.application.navigate(
'#job-problem-cause'
);
} else {
// navigator.notification.alert("Problem cause duplicate",
// function() {}, "Error", 'OK');
}
},
setProblemSolveRadio: function() {
var that = app.jobService.viewModel;
var selectItem = that.get("selectItem");
var problemSolveRadioData = null;
var selectProblemC = that.get("selectProblemC");
var selectProblemS = that.get("selectProblemS");
var problemSolveData = new kendo.data.DataSource({
transport: {
read: function(operation) {
operation.success(JSON.parse(localStorage.getItem("problemSolveData")));
}
},
schema: {
data: "problemSolves"
}
});
var filter = {
logic: "or",
filters: []
}
if (selectProblemC != undefined && selectProblemC != null) {
var data = selectProblemC.data();
for (var i = 0; i < data.length; i++) {
var filters = {
field: "subproblemCauseId",
operator: "eq",
value: data[i].problemCauseSubId
};
filter.filters.push(filters);
}
problemSolveData.filter(filter);
problemSolveData.fetch(function() {
problemSolveRadioData = new kendo.data.DataSource({
transport: {
read: function(operation) {
operation.success(problemSolveData.view());
}
},
filter: [{
field: "description",
operator: "eq",
value: "Temporary"
}]
});
////console.log(JSON.stringify(problemSolveData));
problemSolveRadioData.fetch(function() {
data = problemSolveRadioData.view();
if (data.length > 0) {
var a = data.length;
for (var i = 0; i < a; i++) {
if (selectProblemS != undefined && selectProblemS != null) {
selectProblemS.fetch(function() {
dataS = selectProblemS.data();
if (dataS.length > 0) {
var flagDup = false;
var b = dataS.length;
for (var j = 0; j < b; j++) {
if (data[i].subproblemCauseId == dataS[j].subProblemCauseId) {
var flagDup = true;
//return false;
j = dataS.length;
}
}
if (!flagDup) {
var pbs = {
"jobId": selectItem.jobId,
"problemSolveId": data[i].id,
"problemSolveDesc": data[i].subProblemCauseDesc + "-" + data[i].description,
"processDesc": "",
"subProblemCauseId": data[i].subproblemCauseId,
"process": "N"
};
selectProblemS.pushCreate(pbs);
}
} else {
var pbs = [{
"jobId": selectItem.jobId,
"problemSolveId": data[i].id,
"problemSolveDesc": data[i].subProblemCauseDesc + "-" + data[i].description,
"processDesc": "",
"subProblemCauseId": data[i].subproblemCauseId,
"process": "N"
}];
selectProblemS = new kendo.data.DataSource({
data: pbs
});
}
})
} else {
var pbs = [{
"jobId": selectItem.jobId,
"problemSolveId": data[i].id,
"problemSolveDesc": data[i].subProblemCauseDesc + "-" + data[i].description,
"processDesc": "",
"subProblemCauseId": data[i].subproblemCauseId,
"process": "N"
}];
selectProblemS = new kendo.data.DataSource({
data: pbs
});
}
}
}
if (selectProblemS != null && selectProblemS != undefined) {
selectProblemS.fetch(function() {
that.set("selectProblemS", selectProblemS);
});
} else {
that.set("selectProblemS", new kendo.data.DataSource());
}
});
});
}
//
},
showLoading: function() {
//if (this._isLoading) {
app.application.showLoading();
//}
},
hideLoading: function() {
app.application.hideLoading();
},
});
app.problemCauseService = {
init: function() {
////console.log("myteam init start");
app.problemCauseService.viewModel.initProblemCauseMaster();
////console.log("myteam init end");
},
show: function() {
////console.log("myteam show start");
//app.problemCauseService.viewModel.showLoading();
//app.problemCauseService.viewModel.loadProblemCauseMaster();
//app.myService.viewModel.hideLoading(////console.logle.debug("myteam hide hide");
},
hide: function() {
////console.log("myteam hide start");
//app.myService.viewModel.hideLoading();
////console.log("myteam hide hide");
},
viewModel: new ProblemCauseViewModel()
}
})(window);
| mit |
orb15/dicebag | src/main/java/net/orb15/dicebag/dice/D2.java | 247 | package net.orb15.dicebag.dice;
public class D2 extends Die
{
private static final int DIE_SIZE = 2;
public D2()
{
super(DIE_SIZE);
}
public boolean rollBool()
{
int roll = super.roll();
return (roll == 2) ? true : false;
}
}
| mit |
buenadigital/SaaSPro | src/SaaSPro.Web/Application/Events.cs | 598 | using SaaSPro.Common.Helpers;
using SaaSPro.Common;
using System;
namespace SaaSPro.Web
{
public static class Events
{
static Lazy<IEventBus> _bus;
public static void Raise<TEvent>(TEvent evnt) where TEvent : IEvent
{
Ensure.Argument.NotNull(evnt, "evnt");
_bus?.Value.Publish(evnt);
}
public static void RegisterEventBus(Func<IEventBus> eventBusFactory)
{
Ensure.Argument.NotNull(eventBusFactory, "eventBusFactory");
_bus = new Lazy<IEventBus>(eventBusFactory);
}
}
} | mit |
napple-team/youwatana.be | frontend/server/index.js | 756 | const express = require('express')
const consola = require('consola')
const { Nuxt, Builder } = require('nuxt')
const app = express()
// Import and Set Nuxt.js options
const config = require('../nuxt.config.js')
config.dev = process.env.NODE_ENV !== 'production'
async function start () {
// Init Nuxt.js
const nuxt = new Nuxt(config)
const { host, port } = nuxt.options.server
// Build only in dev mode
if (config.dev) {
const builder = new Builder(nuxt)
await builder.build()
} else {
await nuxt.ready()
}
// Give nuxt middleware to express
app.use(nuxt.render)
// Listen the server
app.listen(port, host)
consola.ready({
message: `Server listening on http://${host}:${port}`,
badge: true
})
}
start()
| mit |
GETLIMS/LIMS-Backend | lims/workflows/migrations/0031_stepfieldproperty_measure_not_required.py | 479 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-03-08 09:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workflows', '0030_auto_20180306_1110'),
]
operations = [
migrations.AddField(
model_name='stepfieldproperty',
name='measure_not_required',
field=models.BooleanField(default=False),
),
]
| mit |
poofee/fastFEM | gmsh-4.2.2/demos/api/t10.py | 1813 | # This file reimplements gmsh/tutorial/t10.geo in Python.
import gmsh
import math
model = gmsh.model
factory = model.geo
gmsh.initialize()
gmsh.option.setNumber("General.Terminal", 1)
model.add("t10")
lc = .15
factory.addPoint(0.0,0.0,0, lc, 1)
factory.addPoint(1,0.0,0, lc, 2)
factory.addPoint(1,1,0, lc, 3)
factory.addPoint(0,1,0, lc, 4)
factory.addPoint(0.2,.5,0, lc, 5)
factory.addLine(1,2, 1);
factory.addLine(2,3, 2);
factory.addLine(3,4, 3);
factory.addLine(4,1, 4);
factory.addCurveLoop([1,2,3,4], 5)
factory.addPlaneSurface([5], 6)
model.mesh.field.add("Distance", 1)
model.mesh.field.setNumbers(1, "NodesList", [5])
model.mesh.field.setNumber(1, "NNodesByEdge", 100)
model.mesh.field.setNumbers(1, "EdgesList", [2])
model.mesh.field.add("Threshold", 2);
model.mesh.field.setNumber(2, "IField", 1);
model.mesh.field.setNumber(2, "LcMin", lc / 30)
model.mesh.field.setNumber(2, "LcMax", lc)
model.mesh.field.setNumber(2, "DistMin", 0.15)
model.mesh.field.setNumber(2, "DistMax", 0.5)
model.mesh.field.add("MathEval", 3)
model.mesh.field.setString(3, "F", "Cos(4*3.14*x) * Sin(4*3.14*y) / 10 + 0.101")
model.mesh.field.add("Distance", 4)
model.mesh.field.setNumbers(4, "NodesList", [1])
model.mesh.field.add("MathEval", 5);
model.mesh.field.setString(5, "F", "F4^3 + " + str(lc / 100))
model.mesh.field.add("Box", 6)
model.mesh.field.setNumber(6, "VIn", lc / 15)
model.mesh.field.setNumber(6, "VOut", lc)
model.mesh.field.setNumber(6, "XMin", 0.3)
model.mesh.field.setNumber(6, "XMax", 0.6)
model.mesh.field.setNumber(6, "YMin", 0.3)
model.mesh.field.setNumber(6, "YMax", 0.6)
model.mesh.field.add("Min", 7)
model.mesh.field.setNumbers(7, "FieldsList", [2, 3, 5, 6])
model.mesh.field.setAsBackgroundMesh(7)
factory.synchronize()
model.mesh.generate(2)
gmsh.write("t10.msh")
gmsh.finalize()
| mit |
isdaviddong/LineBotBook_Basic | LineBotExamples/WebHook_First/Global.asax.cs | 475 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Routing;
using System.Web.Security;
using System.Web.SessionState;
using System.Web.Http;
namespace WebHook_First
{
public class Global : HttpApplication
{
void Application_Start(object sender, EventArgs e)
{
// Code that runs on application startup
GlobalConfiguration.Configure(WebApiConfig.Register);
}
}
} | mit |
sayanee/word-overlap | lib/lib.js | 1804 | 'use strict';
var pluralize = require('pluralize');
var natural = require('natural');
var isNumber = require('is-number');
exports.sanitize = function sanitize(word) {
// matches common punctuations
// , / ? # ! $ % ^ & * ; : { } = _ ` ~ ( )
var punctuationRegex = /[,\/\?@#!$%\^&\*;:{}=_`~()]/g;
// matches a dot between 2 strings or words
var dotBetweenWords = /(\w+)\.(\w+)/;
// matches a hyphen between 2 strings or words
var dashBetweenWords = /(\w+)\-(\w+)/;
// matches a full stop
var fullStop = /(\w+)\./;
// matches a .com for websites
var dotcom = /\.com/;
return word
.replace(dotcom, '')
.replace(punctuationRegex, '')
.replace(dotBetweenWords, '$1 $2')
.replace(dashBetweenWords, '$1 $2')
.replace(fullStop, '$1');
}
exports.tokenize = function tokenize(word) {
return word.split(' ');
}
exports.removeEmptyString = function removeEmptyString(word) {
return word.length > 0;
}
exports.onlyUnique = function onlyUnique(value, index, self) {
return self.indexOf(value) === index;
}
exports.removeCommonWords = function removeCommonWords(words, common) {
return words.filter(function(element) {
return common.indexOf(element) < 0;
});
}
exports.removeNumbers = function removeNumbers(words) {
return words.filter(function(element) {
return !isNumber(element);
});
}
exports.getWordsWithMinLen = function getWordsWithMinLen(minLen, word) {
return word.length >= minLen;
}
exports.depluralize = function depluralize(words, ignorePlurals) {
ignorePlurals.forEach(pluralize.addUncountableRule);
return words.map(function(element) {
return pluralize.singular(element);
})
}
exports.stem = function stem(words) {
return words.map(function(element) {
return natural.PorterStemmer.stem(element);
})
}
| mit |
Marinski/TelerikAcademy2015 | CSharp-1/Telerik Academy Exam 1 2013/1.2-4-8/2-4-8.cs | 1103 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
using System.Numerics;
namespace _1._2_4_8
{
class Program
{
static void Main()
{
//StreamReader reader = new StreamReader("..\\..\\input.txt");
//Console.SetIn(reader);
BigInteger a = int.Parse(Console.ReadLine());
BigInteger b = int.Parse(Console.ReadLine());
BigInteger c = int.Parse(Console.ReadLine());
BigInteger result = 0;
BigInteger r = 0;
if (b == 2)
{
r = a % c;
}
else if (b == 4)
{
r = a + c;
}
else if (b == 8)
{
r = a * c;
}
if (r % 4 == 0)
{
result = r / 4;
}
else
{
result = r % 4;
}
Console.WriteLine(result);
Console.WriteLine(r);
}
}
}
| mit |
daryllabar/DLaB.Xrm.XrmToolBoxTools | DLaB.CrmSvcUtilExtensions.Tests/CrmSvcUtilTests.cs | 4424 | using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using Microsoft.Crm.Services.Utility;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace DLaB.CrmSvcUtilExtensions.Tests
{
[TestClass]
public class CrmSvcUtilTests
{
[TestMethod]
public void CreateTestEntityFile()
{
var factory = new ServiceFactory();
var customizeDom = new CrmSvcUtilExtensions.Entity.CustomizeCodeDomService();
var codeGen = new CrmSvcUtilExtensions.Entity.CustomCodeGenerationService(factory.GetService<ICodeGenerationService>());
var filter = new CrmSvcUtilExtensions.Entity.CodeWriterFilterService(factory.GetService<ICodeWriterFilterService>());
TestFileCreation(factory, customizeDom, codeGen, filter);
}
[TestMethod]
public void CreateTestOptionSetFile()
{
var factory = new ServiceFactory();
var customizeDom = new OptionSet.CustomizeCodeDomService(new Dictionary<string, string>());
var codeGen = new OptionSet.CustomCodeGenerationService(factory.GetService<ICodeGenerationService>());
var filter = new OptionSet.CodeWriterFilterService(factory.GetService<ICodeWriterFilterService>());
TestFileCreation(factory, customizeDom, codeGen, filter);
}
[TestMethod]
public void CreateTestActionFile()
{
var factory = new ServiceFactory();
var customizeDom = new Action.CustomizeCodeDomService();
var codeGen = new Action.CustomCodeGenerationService(factory.GetService<ICodeGenerationService>());
var filter = new Action.CodeWriterFilterService(factory.GetService<ICodeWriterFilterService>());
TestFileCreation(factory, customizeDom, codeGen, filter);
}
private static void TestFileCreation(ServiceFactory factory, ICustomizeCodeDomService customizeDom, ICodeGenerationService codeGen, ICodeWriterFilterService filter)
{
if (!Debugger.IsAttached && !ConfigHelper.GetAppSettingOrDefault("TestFileCreation", false))
{
return;
}
using (var tmp = TempDir.Create())
{
var fileName = Path.Combine(tmp.Name, Guid.NewGuid() + ".txt");
try
{
//factory.Add<ICustomizeCodeDomService>(new CustomizeCodeDomService(new Dictionary<string, string>
//{
// { "url", @"https://allegient.api.crm.dynamics.com/XRMServices/2011/Organization.svc"},
// { "namespace", @"Test.Xrm.Entities"},
// { "out", fileName },
// {"servicecontextname", "CrmContext"},
// {"codecustomization", "DLaB.CrmSvcUtilExtensions.Entity.CustomizeCodeDomService,DLaB.CrmSvcUtilExtensions"},
// {"codegenerationservice", "DLaB.CrmSvcUtilExtensions.Entity.CustomCodeGenerationService,DLaB.CrmSvcUtilExtensions" },
// {"codewriterfilter", "DLaB.CrmSvcUtilExtensions.Entity.CodeWriterFilterService,DLaB.CrmSvcUtilExtensions"},
// {"metadataproviderservice:", "DLaB.CrmSvcUtilExtensions.Entity.MetadataProviderService,DLaB.CrmSvcUtilExtensions"},
// {"namingservice", "DLaB.CrmSvcUtilExtensions.NamingService,DLaB.CrmSvcUtilExtensions"},
// {"username", "dlabar@allegient.com"},
// {"password", "*********"}
//}));
factory.Add(customizeDom);
factory.Add(codeGen);
factory.Add(filter);
factory.Add<INamingService>(new NamingService(factory.GetService<INamingService>()));
factory.GetService<ICodeGenerationService>().Write(factory.GetMetadata(), "CS", fileName, "DLaB.CrmSvcUtilExtensions.UnitTest", factory.ServiceProvider);
}
catch (Exception ex)
{
// Line for adding a debug breakpoint
var message = ex.Message;
if (message != null)
{
throw;
}
}
}
}
}
}
| mit |
mongodb/mongoid | lib/mongoid/errors/invalid_collection.rb | 438 | # frozen_string_literal: true
module Mongoid
module Errors
# This error is raised when trying to access a Mongo::Collection from an
# embedded document.
#
# @example Create the error.
# InvalidCollection.new(Address)
class InvalidCollection < MongoidError
def initialize(klass)
super(
compose_message("invalid_collection", { klass: klass.name })
)
end
end
end
end
| mit |
lam-vy-mulodo/thodanhxo | fuel/app/views/users/index.php | 5695 | <?php include __DIR__.'/../welcome/header.php';?>
<div class="container padBot">
<div class="row">
<section class="span4-v">
<div class="navbar navbar_ clearfix">
<div class="navbar-inner">
<div class="clearfix">
<div class="nav-collapse">
<ul class="user-menu-top">
<li><a href="<?php echo _SITE_DOMAIN_;?>tho-can">Trang cần thợ</a></li>
<li><a href="<?php echo _SITE_DOMAIN_;?>users/index">Tài khoản</a></li>
<li><a href="<?php echo _SITE_DOMAIN_;?>users/list-ho-so">Tủ hồ sơ</a></li>
<li><a href="<?php echo _SITE_DOMAIN_;?>users/logout"><i class="icon-signout"></i>Thoát!</a></li>
</ul>
</div>
</div>
</div>
</div>
<article class="contentSpan">
<div class="holder">
<h2>Thông tin tài khoản</h2>
<em></em>
</div>
<div class="contentHolder">
<form id="contact-form" action="<?php echo _SITE_DOMAIN_;?>dang-ki-tim-viec" method="post" enctype="multipart/form-data">
<fieldset>
<div> <h3>Thông tin đăng nhập</h3>
<div class="form-div-1">
<p>Tên đăng nhập*</p>
<label class="name">
<input type="text" name="user" value="">
<br>
</label>
</div>
<div class="form-div-2">
<p>E-mail*</p>
<label class="email">
<input type="email" name="email" value="">
<br>
</label>
</div>
<div class="form-div-3">
<p>Điện thoại (* dùng để xác nhận )</p>
<label class="phone notRequired">
<input type="tel" name="phone" value="">
<br>
</label>
</div>
<div class="form-div-1">
<p>Mật khẩu</p>
<label class="phone notRequired">
<input type="password" name="pass" value="">
<br>
</label>
</div>
<div class="form-div-1">
<p>Nhập lại mật khẩu</p>
<label class="phone notRequired">
<input type="password" name="re_pass" value="">
<br>
</label>
</div>
</div>
<div class="clearfix"></div>
<hr>
<div><h3>Thông tin ứng viên</h3>
<div class="form-div-1">
<p>Họ tên*</p>
<label class="name">
<input type="text" name="u_name" value="">
<br>
</label>
</div>
<div class="form-div-2">
<p>E-mail*</p>
<label class="email">
<input type="email" name="u_email" value="">
<br>
</label>
</div>
<div class="form-div-3">
<p>Điện thoại liên lạc</p>
<label class="phone notRequired">
<input type="tel" name="u_phone" value="">
<br>
</label>
</div>
<div>
<p>Địa chỉ*</p>
<label>
<input type="text" name="u_address" maxlength="120" value="">
<br>
</label>
</div>
</div>
<div class="group-field">
<label class="label-form"></label>
<div class="group-right">
<button type="submit" class="btn btn-big btn-primary">Cập nhật</button>
</div>
</div>
</fieldset>
</form>
</div>
</article>
</section>
<!-- right menu for user -->
<?php include 'right.php'?>
</div>
</div>
<?php include __DIR__.'/../welcome/footer.php';?> | mit |
kennyllau/CI_HMVC | application/migrations/007_add_priority_post_info_columns_to_categories.php | 479 | <?php
class Migration_Add_priority_post_info_columns_to_categories extends CI_Migration
{
public function up()
{
$fields = array(
'priority' => array(
'type' => 'INT'),
'posted_info' => array(
'type' => 'TEXT')
);
$this->dbforge->add_column('store_categories', $fields);
}
public function down()
{
$this->dbforge->drop_column('store_categories', $fields);
}
}
| mit |
hpcslag/extenders | index.js | 1362 | 'use strict'
//node-module loading
var path = require('path');
var colors = require('colors');
//system configure
var process_dir = path.join(__dirname,'./test/'); //fake path
//lib module loading
var config_reader = require('./lib/config_reader.js');
var core_plugin_loader = require('./lib/core_plugin_loader.js');
var ex_plugin_loader = require('./lib/extenders_plugin_loader.js');
//first read config
config_reader.readIndexConfig(process_dir).then(function(config){
console.log("loading your application...".yellow);
var manifest = "manifest name: " + config.manifest;
console.log(manifest.green);
console.log("loading your application ex_plugins...".yellow);
for (var i = 0; i < config.ex_plugins.length; i++) {
var app = "load : " + config.ex_plugins[i].app_name;
console.log(app.green);
};
//loading core application
load_core(config);
}).then(function(error){
if(error){
console.log("Can't read your configure files.".red);
process.exit();
}
});
function load_core(config){
core_plugin_loader.main(config,function(application){
var app = new application();
load_ex_plugin(config,app.api,function(plugin){
app.main(plugin);
});
app.run();
});
}
function load_ex_plugin(config,api,cb){
ex_plugin_loader.main(config,api,function(plugin){
cb(plugin);
});
}
function process_applicaton(){
}
//check folder exists | mit |
magicien/JSceneKit | src/js/Foundation/NSKeyedUnarchiver.js | 23113 | 'use strict'
import NSCoder from './NSCoder'
import NSData from './NSData'
import _File from '../util/_File'
import _FileReader from '../util/_FileReader'
import _BinaryReader from '../util/_BinaryReader'
import _ClassList from '../util/_ClassList'
/*global Buffer*/
const _classForKey = new Map()
const _loadingSymbol = Symbol('loading')
class _UID {
constructor(unarchiver, value) {
this._unarchiver = unarchiver
this._value = value
}
get value() {
return this._value
}
get obj() {
return this._unarchiver._parsedObj.$objects[this._value]
}
}
/**
* NSKeyedUnarchiver, a concrete subclass of NSCoder, defines methods for decoding a set of named objects (and scalar values) from a keyed archive. Such archives are produced by instances of the NSKeyedArchiver class.
* @access public
* @extends {NSCoder}
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver
*/
export default class NSKeyedUnarchiver extends NSCoder {
// Initializing a Keyed Unarchiver
/**
* Initializes the receiver for decoding an archive previously encoded by NSKeyedArchiver.
* @access public
* @constructor
* @param {Data} data - An archive previously encoded by NSKeyedArchiver.
* @desc When you finish decoding data, you should invoke finishDecoding(). This method throws an exception if data is not a valid archive.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1410862-init
*/
constructor(data = null, options = new Map()) {
super()
// Unarchiving Data
/**
* Indicates whether the receiver requires all unarchived classes to conform to NSSecureCoding.
* @type {boolean}
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1410824-requiressecurecoding
*/
this._requiresSecureCoding = false
// Managing the Delegate
/**
* The receiver’s delegate.
* @type {?NSKeyedUnarchiverDelegate}
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1415688-delegate
*/
this.delegate = null
// Instance Properties
/**
*
* @type {NSCoder.DecodingFailurePolicy}
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1643164-decodingfailurepolicy
*/
this._decodingFailurePolicy = null
/**
* @access private
* @type {?_BinaryReader}
*/
this._reader = null
/**
* @access private
* @type {number}
*/
this._offsetSize = 0
/**
* @access private
* @type {number}
*/
this._objCount = 0
/**
* @access private
* @type {Object[]}
*/
this._offsetArray = []
/**
* @access private
* @type {Object}
*/
this._parsedObj = {}
/**
* @access private
* @type {Object[]}
*/
this._dataObj = []
this._resolveFunctions = []
/**
* @access private
* @type {string}
*/
this._filePath = null
/**
* @access private
* @type {?Object}
*/
this._refObj = null
/**
* @access private
* @type {boolean}
*/
this._decodingFinished = false
this._options = options
this._promises = []
if(data !== null){
this._reader = new _BinaryReader(data, true, 'utf8')
this._checkHeader()
this._parsedObj = this._parseBPlist()
}
}
copy() {
const coder = new NSKeyedUnarchiver()
coder._requiresSecureCoding = this._requiresSecureCoding
coder.delegate = this.delegate
coder._decodingFailurePolicy = this._decodingFailurePolicy
coder._reader = this._reader
coder._offsetSize = this._offsetSize
coder._objCount = this._objCount
coder._offsetArray = this._offsetArray
coder._parsedObj = this._parsedObj
coder._dataObj = this._dataObj
coder._resolveFunctions = this._resolveFunctions
coder._filePath = this._filePath
coder._refObj = this._refObj
coder._decodingFinished = this._decodingFinished
return coder
}
// Unarchiving Data
/**
* Decodes and returns the object graph previously encoded by NSKeyedArchiver and stored in a given NSData object.
* @access public
* @param {Buffer} data - An object graph previously encoded by NSKeyedArchiver.
* @param {?string} path -
* @returns {?Object} -
* @desc This method raises an invalidArchiveOperationException if data is not a valid archive.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1413894-unarchiveobject
*/
static unarchiveObjectWithData(data, path = null, options = new Map()) {
const unarchiver = new NSKeyedUnarchiver(data, options)
unarchiver._filePath = path
const topObjIndex = unarchiver._parsedObj.$top.root.value
return unarchiver._parseClassAt(topObjIndex)
}
_checkHeader() {
this._reader.seek(0)
const header = this._reader.readString(8)
if(header !== 'bplist00'){
throw new Error(`unsupported file format: ${header}`)
}
}
static _getBufferOfFile(path, options) {
// TODO: use 'await' to return Buffer instead of Promise
const promise = new Promise((resolve, reject) => {
const file = new _File([], path)
const reader = new _FileReader()
reader.onload = () => {
const data = reader.result
resolve(data)
}
reader.onerror = () => {
reject(reader.error)
}
reader.readAsBinaryString(file)
})
return promise
}
_parseBPlist() {
const reader = this._reader
// read basic info
reader.seek(-26)
const dataLen = reader.length
const intSize = reader.readUnsignedByte()
this._offsetSize = reader.readUnsignedByte()
this._objCount = reader.readUnsignedLongLong()
const topIndex = reader.readUnsignedLongLong()
const tablePos = reader.readUnsignedLongLong()
//console.log(`dataLen: ${dataLen}`)
//console.log(`intSize: ${intSize}`)
//console.log(`offsetSize: ${this._offsetSize}`)
//console.log(`objCount: ${this._objCount}`)
//console.log(`topIndex: ${topIndex}`)
//console.log(`tablePos: ${tablePos}`)
this._offsetArray = []
let pos = tablePos
reader.seek(pos)
const objCount = this._objCount
for(let i=0; i<objCount; i++){
const offset = reader.readInteger(intSize)
this._offsetArray.push(offset)
}
return this._parseObjAtIndex(topIndex)
}
_parseObjAtIndex(index) {
return this._parseObj(this._offsetArray[index])
}
_parseObj(offset = null, signed = false) {
const reader = this._reader
if(offset !== null){
reader.seek(offset)
}
const type = reader.readUnsignedByte()
const type1 = type & 0xF0
const type2 = type & 0x0F
//console.log(`parseObj: type: ${type1} ${type2}`)
if(type1 === 0x00){
// null, boolean
if(type2 === 0){
//console.log(' type: null')
return null
}else if(type2 === 8){
//console.log(' type: boolean')
return false
}else if(type2 === 9){
//console.log(' type: boolean')
return true
}
}else if(type1 === 0x10){
// Int
const len = Math.pow(2, type2)
//console.log(' type: integer ' + len)
return reader.readInteger(len, signed)
}else if(type1 === 0x20){
// Float
const len = Math.pow(2, type2)
if(len === 4){
//console.log(' type: float')
return reader.readFloat()
}else if(len === 8){
//console.log(' type: double')
return reader.readDouble()
}
throw new Error(`unsupported float size: ${len}`)
}else if(type1 === 0x30){
// Date
//console.log(' type: Date')
}else if(type1 === 0x40){
// Data
const count = this._getDataSize(type2)
//console.log(` type: Data: length: ${count}`)
return reader.readData(count)
}else if(type1 === 0x50){
// ASCII
const count = this._getDataSize(type2)
//console.log(' type: ascii ' + count)
return reader.readString(count, 'ascii')
}else if(type1 === 0x60){
// UTF-16
const count = this._getDataSize(type2)
//console.log(' type: UTF-16 ' + count)
return reader.readString(count, 'utf16be') // Big Endian might not be supported...
}else if(type1 === 0x80){
// UID
const uid = reader.readInteger(type2 + 1, false)
//console.log(' type: UID: ' + uid)
return new _UID(this, uid)
}else if(type1 === 0xA0){
// Array
const count = this._getDataSize(type2)
//console.log(' type: array: ' + count)
const arrIndex = []
for(let i=0; i<count; i++){
arrIndex.push(reader.readInteger(this._offsetSize, false))
}
const arr = arrIndex.map((index) => this._parseObjAtIndex(index))
//console.log(`***arr.length: ${arr.length}`)
return arr
}else if(type1 === 0xC0){
// Set
const count = this._getDataSize(type2)
const setIndex = []
for(let i=0; i<count; i++){
setIndex.push(reader.readInteger(this._offsetSize, false))
}
const arr = setIndex.map((index) => this._parseObjAtIndex(index))
return new Set(arr)
}else if(type1 === 0xD0){
// Dictionary
//console.log(' type: dictionary')
const count = this._getDataSize(type2)
const keyIndex = []
const valueIndex = []
for(let i=0; i<count; i++){
keyIndex.push(reader.readInteger(this._offsetSize, false))
}
for(let i=0; i<count; i++){
valueIndex.push(reader.readInteger(this._offsetSize, false))
}
const result = {}
for(let i=0; i<count; i++){
const key = this._parseObjAtIndex(keyIndex[i])
//console.log('key: ' + key)
const val = this._parseObjAtIndex(valueIndex[i])
//console.log('val: ' + val)
result[key] = val
}
return result
}
throw new Error(`unknown data type: ${type}`)
}
_getDataSize(type2) {
let count = 0
if(type2 !== 0x0F){
count = type2
}else{
count = this._parseObj(null, false)
if(typeof count !== 'number'){
throw new Error('data size must be int type')
}
}
return count
}
_parseClassAt(index) {
const obj = this._parsedObj.$objects[index]
if(this._dataObj[index] === _loadingSymbol){
// it seems to be a reference loop; return Promise
return new Promise((resolve, reject) => {
if(typeof this._resolveFunctions[index] === 'undefined'){
this._resolveFunctions[index] = []
}
this._resolveFunctions[index].push(resolve)
})
}else if(typeof this._dataObj[index] !== 'undefined'){
return this._dataObj[index]
}
this._dataObj[index] = _loadingSymbol
const data = this._parseClass(obj)
this._dataObj[index] = data
if(Array.isArray(this._resolveFunctions[index])){
this._resolveFunctions[index].forEach((resolve) => {
resolve(data)
})
delete this._resolveFunctions[index]
}
return data
}
_parseClass(obj) {
const className = obj.$class.obj.$classname
//console.log(`parseClass ${className}`)
const classObj = NSKeyedUnarchiver.classForClassName(className)
if(classObj){
const unarchiver = this.copy()
unarchiver._refObj = obj
return classObj.initWithCoder(unarchiver)
}
return null
}
/**
* @access private
* @param {Object} obj -
* @param {Object} classObj -
* @returns {Object} -
*/
_parseStruct(obj, classObj) {
if(typeof classObj._initWithData !== 'function'){
throw new Error(`${classObj.prototype.constructor.name} class doesn't have _initWithData function`)
}
return classObj._initWithData(obj)
}
/**
* Decodes and returns the object graph previously encoded by NSKeyedArchiver written to the file at a given path.
* @access public
* @param {string} path - A path to a file that contains an object graph previously encoded by NSKeyedArchiver.
* @returns {Promise} -
* @desc This method raises an invalidArgumentException if the file at path does not contain a valid archive.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1417153-unarchiveobject
*/
static unarchiveObjectWithFile(path, options = new Map()) {
const promise = NSKeyedUnarchiver._getBufferOfFile(path, options)
.then((data) => {
return NSKeyedUnarchiver.unarchiveObjectWithData(data, path, options)
})
return promise
}
// Decoding Data
/**
* Returns a Boolean value that indicates whether the archive contains a value for a given key within the current decoding scope.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {boolean} -
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1413564-containsvalue
*/
containsValueForKey(key) {
return typeof this._refObj[key] !== 'undefined'
}
/**
* Decodes a Boolean value associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {boolean} -
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1413260-decodebool
*/
decodeBoolForKey(key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
const value = this._getValueForKey(key)
return Boolean(value)
}
/**
* Decodes a stream of bytes associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @param {?UnsafeMutablePointer<Int>} lengthp - Upon return, contains the number of bytes returned.
* @returns {?UnsafePointer<UInt8>} -
* @desc The returned value is a pointer to a temporary buffer owned by the receiver. The buffer goes away with the unarchiver, not the containing autorelease pool block. You must copy the bytes into your own buffer if you need the data to persist beyond the life of the receiver.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1418091-decodebytes
*/
decodeBytesForKeyReturnedLength(key, lengthp) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
return this._getValueForKey(key)
}
/**
* Decodes a double-precision floating-point value associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {number} -
* @desc If the archived value was encoded as single-precision, the type is coerced.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1414963-decodedouble
*/
decodeDoubleForKey(key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
return this._getValueForKey(key)
}
/**
* Decodes a single-precision floating-point value associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {number} -
* @desc If the archived value was encoded as double precision, the type is coerced, loosing precision. If the archived value is too large for single precision, the method raises an NSRangeException.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1412252-decodefloat
*/
decodeFloatForKey(key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
return this._getValueForKey(key)
}
/**
* Decodes and returns an int value that was previously encoded with encodeCInt(_:forKey:), encode(_:forKey:), encode(_:forKey:), or encode(_:forKey:) and associated with the string key.
* @access public
* @param {string} key -
* @returns {number} -
* @desc If the encoded integer does not fit into the default integer size, the method raises an NSRangeException. Subclasses must override this method if they perform keyed coding.
* @see https://developer.apple.com/documentation/foundation/nscoder/1411168-decodecint
*/
decodeCIntForKey(key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
return this._getValueForKey(key)
}
/**
* Decodes a 32-bit integer value associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {number} -
* @desc If the archived value was encoded with a different size but is still an integer, the type is coerced. If the archived value is too large to fit into a 32-bit integer, the method raises an NSRangeException.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1416327-decodeint32
*/
decodeInt32ForKey(key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
return this._getValueForKey(key)
}
/**
* Decodes a 64-bit integer value associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {Int64} -
* @desc If the archived value was encoded with a different size but is still an integer, the type is coerced.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1413288-decodeint64
*/
decodeInt64ForKey(key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
return this._getValueForKey(key)
}
/**
* Decodes and returns an object associated with a given key.
* @access public
* @param {string} key - A key in the archive within the current decoding scope. key must not be nil.
* @returns {?Object} -
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1409082-decodeobject
*/
decodeObjectForKey(key, options) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
const parsedObj = this._refObj[key]
if(typeof parsedObj === 'string'){
return parsedObj
}else if(parsedObj instanceof _UID){
const obj = parsedObj.obj
if(typeof obj.$class !== 'undefined'){
return this._parseClassAt(parsedObj.value, options)
}
return obj
}
throw new Error(`unknown data type for key ${key}: ${parsedObj}`)
}
/**
* Returns a decoded property list for the specified key.
* @access public
* @param {string} key - The coder key.
* @returns {?Object} -
* @desc This method calls decodeObjectOfClasses:forKey: with a set allowing only property list types.
* @see https://developer.apple.com/documentation/foundation/nscoder/1416284-decodepropertylist
*/
decodePropertyListForKey(key, options) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
const parsedObj = this.decodeObjectForKey(key, options)
//console.log(`${key}: ${parsedObj.constructor.name}`)
if(!(parsedObj instanceof Buffer)){
throw new Error(`propertylist of key ${key} is not Buffer data`)
}
//console.log(`***header: ${parsedObj.toString('ascii', 0, 8)}`)
//console.log(`length: ${parsedObj.length}`)
//for(let i=0; i<8; i++){
// console.log(`${i}: ${parsedObj.readUIntBE(i, 1)}`)
//}
return NSKeyedUnarchiver.unarchiveObjectWithData(parsedObj, this._filePath, options)
}
decodeObjectOfTypeForKey(type, key) {
if(this._decodingFinished){
throw new Error(`can't decode '${key}' after finishDecoding() is called`)
}
const parsedObj = this._refObj[key]
if(!(parsedObj instanceof Buffer)){
throw new Error(`value is not Buffer data for key: ${key}`)
}
return this._parseStruct(parsedObj, type)
}
get _fileName() {
if(this._filePath === null){
return null
}
const paths = this._filePath.split('/')
const fileName = paths.pop()
return fileName
}
get _directoryPath() {
if(this._filePath === null){
return null
}
const paths = this._filePath.split('/')
const fileName = paths.pop()
const directoryPath = paths.join('/') + '/'
return directoryPath
}
/**
* Tells the receiver that you are finished decoding objects.
* @access public
* @returns {void}
* @desc Invoking this method allows the receiver to notify its delegate and to perform any final operations on the archive. Once this method is invoked, the receiver cannot decode any further values.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1418233-finishdecoding
*/
finishDecoding() {
this._decodingFinished = true
}
// Managing Class Names
/**
* Adds a class translation mapping to the receiver whereby objects encoded with a given class name are decoded as instances of a given class instead.
* @access public
* @param {?Object} cls - The class with which to replace instances of the class named codedName.
* @param {string} codedName -
* @returns {void}
* @desc When decoding, the receiver’s translation map overrides any translation that may also be present in the class’s map (see setClass(_:forClassName:)).
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1414659-setclass
*/
static setClassForClassName(cls, codedName) {
_classForKey.set(codedName, cls)
}
/**
* Returns the class from which the receiver instantiates an encoded object with a given class name.
* @access public
* @param {string} codedName -
* @returns {?Object} -
* @desc The class’s separate translation map is not searched.
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1412476-class
*/
static classForClassName(codedName) {
const classObj = _classForKey.get(codedName)
if(classObj){
return classObj
}
return _ClassList.get(codedName)
}
// Type Methods
/**
*
* @access public
* @param {NSData} data -
* @param {string} path -
* @returns {void}
* @throws {Error}
* @see https://developer.apple.com/documentation/foundation/nskeyedunarchiver/1413622-unarchivetoplevelobjectwithdata
*/
static unarchiveTopLevelObjectWithData(data, path = null, options = new Map()) {
// what's different from unarchiveObjectWithData???
return NSKeyedUnarchiver.unarchiveObjectWithData(data, path, options)
}
_getValueForKey(key) {
const value = this._refObj[key]
if(value instanceof _UID){
return value.obj
}
return value
}
}
| mit |
axelpale/tresdb | migration/lib/dropCollections.js | 658 | const asyn = require('async');
module.exports = (db, callback) => {
// Drop all collections in the given db.
//
// Parameters:
// db
// georap-db instance
//
db.get()
.listCollections({})
.toArray((err, colls) => {
if (err) {
return callback(err);
}
const collNames = colls.map(c => c.name);
asyn.eachSeries(collNames, (collName, next) => {
// Drop possibly existing collection before population.
db.get().dropCollection(collName, next);
}, (eachErr) => {
if (eachErr) {
return callback(eachErr);
}
return callback();
});
});
};
| mit |
gmamaladze/lego-rc-ms-iot | Lego.PowerFunctions.WebApi/SpiDeviceFactory.cs | 2098 | using System;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Devices.Spi;
namespace Lego.PowerFunctions.WebApi
{
/* Uncomment for Raspberry Pi 2 or 3 */
/* For Raspberry Pi 2 or 3, use SPI0 */
/* Line 0 maps to physical pin number 24 on the RPi2 or RPi3 */
internal static class SpiDeviceFactory
{
private const string SPI_CONTROLLER_NAME = "SPI0";
private const int SPI_CHIP_SELECT_LINE = 0;
internal static async Task<SpiDevice> InitSpi()
{
//Frequency is 38KHz in the protocol
const double tCarrier = 1/38.0f;
//Reality is that there is milliseconds 2us difference in the output as there is always milliseconds 2us bit on on SPI using MOSI
const double tUshort = tCarrier - 2e-3f;
//Calulate the outpout frenquency. Here = 16/(1/38 -2^-3) = 658KHz
var freq = Convert.ToInt32(16.0f/tUshort);
var settings = new SpiConnectionSettings(SPI_CHIP_SELECT_LINE)
{
ClockFrequency = 658000,
Mode = SpiMode.Mode3
}; /* Create SPI initialization settings */
/* Datasheet specifies maximum SPI clock frequency of 10MHz */
/* The display expects an idle-high clock polarity, we use Mode3
* to set the clock polarity and phase to: CPOL = 1, CPHA = 1
*/
var spiAqs = SpiDevice.GetDeviceSelector(SPI_CONTROLLER_NAME);
/* Find the selector string for the SPI bus controller */
var devicesInfo = await DeviceInformation.FindAllAsync(spiAqs);
/* Find the SPI bus controller device with our selector string */
return await SpiDevice.FromIdAsync(devicesInfo[0].Id, settings);
/* Create an SpiDevice with our bus controller and SPI settings */
}
}
} | mit |
GlukKazan/GlukKazan.github.io | xiangqi/scripts/gwangsanghui-invariant.js | 3044 | (function() {
var checkVersion = Dagaz.Model.checkVersion;
Dagaz.Model.checkVersion = function(design, name, value) {
if (name != "gwangsanghui-invariant") {
checkVersion(design, name, value);
}
}
var isOpposite = function(design, board, king) {
var pos = null;
var player = null;
_.each(design.allPositions(), function(p) {
if (design.inZone(0, board.player, p)) {
var piece = board.getPiece(p);
if ((piece !== null) && (piece.player != board.player) && (piece.type == king)) {
pos = p;
player = piece.player;
}
}
});
if (pos !== null) {
var n = design.getDirection("n");
var p = design.navigate(player, pos, n);
while (p !== null) {
var piece = board.getPiece(p);
if (piece !== null) {
if (piece.type == king) {
return true;
}
break;
}
p = design.navigate(player, p, n);
}
}
return false;
}
var isDraw = function(design, board, king) {
if (board.parent === null) return false;
if (isOpposite(design, board, king)) {
return isOpposite(design, board.parent, king);
}
return false;
}
var checkGoals = Dagaz.Model.checkGoals;
Dagaz.Model.checkGoals = function(design, board, player) {
var king = design.getPieceType("King");
if (isDraw(design, board, king)) {
return 0;
}
var friends = 0;
var enemies = 0;
_.each(design.allPositions(), function(pos) {
var piece = board.getPiece(pos);
if ((piece !== null) && (piece.type == king)) {
if (piece.player == player) {
friends++;
} else {
enemies++;
}
}
});
if (enemies == 0) {
return 1;
}
if (friends == 0) {
return -1;
}
return checkGoals(design, board, player);
}
var CheckInvariants = Dagaz.Model.CheckInvariants;
Dagaz.Model.CheckInvariants = function(board) {
var design = Dagaz.Model.design;
var c = design.getPieceType("Cannon");
var ec = design.getPieceType("EastCannon");
var wc = design.getPieceType("WestCannon");
if (board.parent === null) {
_.each(board.moves, function(m) {
if ((m.actions.length == 1) && (m.actions[0][0] !== null)) {
var pos = m.actions[0][0];
var piece = board.getPiece(pos);
if ((piece !== null) && ((piece.type == c) || (piece.type == ec) || (piece.type == wc))) {
m.failed = true;
}
}
});
}
var fw = design.getPieceType("Forward");
var bw = design.getPieceType("Backward");
_.each(board.moves, function(m) {
if ((m.actions.length == 1) && (m.actions[0][0] !== null) && (m.actions[0][1] !== null)) {
var pos = m.actions[0][1][0];
var piece = board.getPiece(pos);
if ((piece !== null) &&
((piece.type == fw) || (piece.type == bw))) {
m.capturePiece(pos);
}
}
});
CheckInvariants(board);
}
})();
| mit |
randywallace/expense-report-rails | app/controllers/employers_controller.rb | 2097 | class EmployersController < ApplicationController
before_filter :authenticate_user!
# GET /employers
# GET /employers.json
def index
@employers = Employer.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @employers }
end
end
# GET /employers/1
# GET /employers/1.json
def show
@employer = Employer.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @employer }
end
end
# GET /employers/new
# GET /employers/new.json
def new
@employer = Employer.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @employer }
end
end
# GET /employers/1/edit
def edit
@employer = Employer.find(params[:id])
end
# POST /employers
# POST /employers.json
def create
@employer = Employer.new(params[:employer])
respond_to do |format|
if @employer.save
format.html { redirect_to @employer, :flash => {:notice => 'Employer was successfully created.' }}
format.json { render json: @employer, status: :created, location: @employer }
else
format.html { render action: "new" }
format.json { render json: @employer.errors, status: :unprocessable_entity }
end
end
end
# PUT /employers/1
# PUT /employers/1.json
def update
@employer = Employer.find(params[:id])
respond_to do |format|
if @employer.update_attributes(params[:employer])
format.html { redirect_to @employer, :flash => {notice: 'Employer was successfully updated.' }}
format.json { head :no_content }
else
format.html { render action: "edit" }
format.json { render json: @employer.errors, status: :unprocessable_entity }
end
end
end
# DELETE /employers/1
# DELETE /employers/1.json
def destroy
@employer = Employer.find(params[:id])
@employer.destroy
respond_to do |format|
format.html { redirect_to employers_url }
format.json { head :no_content }
end
end
end
| mit |
trumpsilver/SNotepad- | NotePad++/Classes/TabPageStatusClass.cs | 3716 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Drawing;
using System.Windows.Forms;
/// <summary>
/// Tab page Status
/// </summary>
namespace NotePad__
{
partial class TabControlClass
{
//A list to store all the tabpages status
static List<TabPageStatus> listOfTabPageStatus = new List<TabPageStatus>();
/// <summary>
/// Get selected tab page status
/// </summary>
public static TabPageStatus CurrentTabPageStatus
{
get
{
foreach (TabPageStatus tabPageStatus in listOfTabPageStatus)
{
if (tabPageStatus.TabPage == tabControl.SelectedTab)
{
return tabPageStatus;
}
}
return null;
}
}
//a class to store tab status
public class TabPageStatus
{
private TabPage tabPage;
private string language;
private bool documentMapEnabled;
private bool canUndo;
private bool canRedo;
public TabPage TabPage
{
get
{
return tabPage;
}
set
{
tabPage = value;
}
}
public string Language
{
get
{
return language;
}
set
{
language = value;
}
}
public bool CanUndo
{
get
{
return canUndo;
}
set
{
canUndo = value;
}
}
public bool CanRedo
{
get
{
return canRedo;
}
set
{
canRedo = value;
}
}
public bool DocumentMapEnabled
{
get
{
return documentMapEnabled;
}
set
{
documentMapEnabled = value;
}
}
};
/// <summary>
/// Init tab page status
/// </summary>
/// <param name="tabPage"></param>
private static void InitTabPageStatus(TabPage tabPage)
{
TabPageStatus tabPageStatus = new TabPageStatus();
tabPageStatus.TabPage = tabPage;
tabPageStatus.Language = StylesClass.DefaultLanguage;
tabPageStatus.CanUndo = false;
tabPageStatus.CanRedo = false;
tabPageStatus.DocumentMapEnabled = false;
listOfTabPageStatus.Add(tabPageStatus);
}
/// <summary>
/// Delete status of current tabpage
/// </summary>
/// <param name="tabPage"></param>
public static void RemoveTabPageStatus(TabPage tabPage)
{
TabPageStatus tabStatusToDelete = null;
foreach (TabPageStatus tabPageStatus in listOfTabPageStatus)
{
if (tabPageStatus.TabPage == tabPage)
{
tabStatusToDelete = tabPageStatus;
}
}
if (tabStatusToDelete != null)
{
listOfTabPageStatus.Remove(tabStatusToDelete);
}
}
}
}
| mit |
tonimsc/dsaqt1516g1 | service/src/main/java/edu/upc/eetac/dsa/videostore/DAO/ResourcesDAOImpl.java | 3399 | package edu.upc.eetac.dsa.videostore.DAO;
import edu.upc.eetac.dsa.videostore.db.Database;
import edu.upc.eetac.dsa.videostore.entity.Resources;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class ResourcesDAOImpl implements ResourcesDAO {
@Override
public Resources createResource(String peliculaid, String recursopeli) throws SQLException, ResourcesAlreadyExistsException{
Connection connection = null;
PreparedStatement stmt = null;
try {
connection = Database.getConnection();
stmt = connection.prepareStatement(ResourcesDAOQuery.POST_RES);
stmt.setString(1, peliculaid);
stmt.setString(2, recursopeli);
stmt.executeUpdate();
} catch (SQLException e) {
throw e;
} finally {
if (stmt != null) stmt.close();
if (connection != null) {
connection.close();
}
}
return getResource(peliculaid);
}
@Override
public Resources updateResource(String peliculaid, String recursopeli) throws SQLException{
Connection connection = null;
PreparedStatement stmt = null;
try {
Resources resources = new Resources();
connection = Database.getConnection();
stmt = connection.prepareStatement(ResourcesDAOQuery.UPDATE_RES);
stmt.setString(1, recursopeli);
stmt.setString(2, peliculaid);
stmt.executeUpdate();
int rows = stmt.executeUpdate();
if (rows == 1)
resources = getResource(peliculaid);
return resources;
} catch (SQLException e) {
throw e;
} finally {
if (stmt != null) stmt.close();
if (connection != null) connection.close();
}
}
@Override
public Resources getResource(String peliculaid) throws SQLException{
Connection connection = null;
PreparedStatement stmt = null;
Resources resources = new Resources();
try {
connection = Database.getConnection();
stmt = connection.prepareStatement(ResourcesDAOQuery.GET_RES);
stmt.setString(1, peliculaid);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
resources.setIdmovie(rs.getString("peliculaid"));
resources.setResourcesmovie(rs.getString("recursopeli"));
}
} catch (SQLException e) {
throw e;
} finally {
if (stmt != null) stmt.close();
if (connection != null) connection.close();
}
return resources;
}
@Override
public boolean deleteResource(String peliculaid) throws SQLException{
Connection connection = null;
PreparedStatement stmt = null;
try {
connection = Database.getConnection();
stmt = connection.prepareStatement(ResourcesDAOQuery.DELETE_RES);
stmt.setString(1, peliculaid);
int rows = stmt.executeUpdate();
return (rows == 1);
} catch (SQLException e) {
throw e;
} finally {
if (stmt != null) stmt.close();
if (connection != null) connection.close();
}
}
}
| mit |
jabdul/appland | src/module-appland/nls/conf.js | 738 | define({
"root": {
"module-appland": {
"projectName": 'Project Name',
"nameSpaces":['apl-'],
"labels": [{
"LABEL_0": "Appland",
"LABEL_1": "Overview",
"LABEL_2": "Statistics overview",
"LABEL_3": "Modules",
"LABEL_4": "Active Modules",
"LABEL_5": "Unit Tests",
"LABEL_6": "Passed",
"LABEL_7": "Failed",
"LABEL_8": "Builds",
"LABEL_9": "Active",
"LABEL_10": "Module ID",
"LABEL_11": "This is the monitoring console for all your modules. Future versions of this admin area will contain features to help you manage your modules."
}],
"pathToAssets": "../"
}
},
"zh-cn": true
}); | mit |
idreamsoft/ueditor | _src/adapter/editorui.js | 33470 | //ui跟编辑器的适配層
//那个按钮弹出是dialog,是下拉筐等都是在这个js中配置
//自己写的ui也要在这里配置,放到baidu.editor.ui下边,当编辑器实例化的时候会根据ueditor.config中的toolbars找到相应的进行实例化
(function () {
var utils = baidu.editor.utils;
var editorui = baidu.editor.ui;
var _Dialog = editorui.Dialog;
editorui.buttons = {};
editorui.Dialog = function (options) {
var dialog = new _Dialog(options);
dialog.addListener('hide', function () {
if (dialog.editor) {
var editor = dialog.editor;
try {
if (browser.gecko) {
var y = editor.window.scrollY,
x = editor.window.scrollX;
editor.body.focus();
editor.window.scrollTo(x, y);
} else {
editor.focus();
}
} catch (ex) {
}
}
});
return dialog;
};
var iframeUrlMap = {
'anchor':'~/dialogs/anchor/anchor.html',
'insertimage':'~/dialogs/image/image.html',
'link':'~/dialogs/link/link.html',
'spechars':'~/dialogs/spechars/spechars.html',
'searchreplace':'~/dialogs/searchreplace/searchreplace.html',
'map':'~/dialogs/map/map.html',
'insertvideo':'~/dialogs/video/video.html',
'help':'~/dialogs/help/help.html',
'preview':'~/dialogs/preview/preview.html',
'wordimage':'~/dialogs/wordimage/wordimage.html',
'attachment':'~/dialogs/attachment/attachment.html',
'edittip':'~/dialogs/table/edittip.html',
'edittable':'~/dialogs/table/edittable.html',
'edittd':'~/dialogs/table/edittd.html',
'snapscreen':'~/dialogs/snapscreen/snapscreen.html',
'scrawl':'~/dialogs/scrawl/scrawl.html',
'music':'~/dialogs/music/music.html',
'template':'~/dialogs/template/template.html'
};
//为工具栏添加按钮,以下都是统一的按钮触发命令,所以写在一起
var btnCmds = ['undo', 'redo', 'formatmatch',
'bold', 'italic', 'underline', 'fontborder', 'touppercase', 'tolowercase',
'strikethrough', 'subscript', 'superscript', 'source', 'indent', 'outdent',
'blockquote', 'pasteplain', 'pagebreak',
'selectall', 'print','horizontal', 'removeformat', 'time', 'date', 'unlink',
'insertparagraphbeforetable', 'insertrow', 'insertcol', 'mergeright', 'mergedown', 'deleterow',
'deletecol', 'splittorows', 'splittocols', 'splittocells', 'mergecells', 'deletetable', 'drafts'];
for (var i = 0, ci; ci = btnCmds[i++];) {
ci = ci.toLowerCase();
editorui[ci] = function (cmd) {
return function (editor) {
var ui = new editorui.Button({
className:'edui-for-' + cmd,
title:editor.options.labelMap[cmd] || editor.getLang("labelMap." + cmd) || '',
onclick:function () {
editor.execCommand(cmd);
},
theme:editor.options.theme,
showText:false
});
editorui.buttons[cmd] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
var state = editor.queryCommandState(cmd);
if (state == -1) {
ui.setDisabled(true);
ui.setChecked(false);
} else {
if (!uiReady) {
ui.setDisabled(false);
ui.setChecked(state);
}
}
});
return ui;
};
}(ci);
}
//清除文档
editorui.cleardoc = function (editor) {
var ui = new editorui.Button({
className:'edui-for-cleardoc',
title:editor.options.labelMap.cleardoc || editor.getLang("labelMap.cleardoc") || '',
theme:editor.options.theme,
onclick:function () {
if (confirm(editor.getLang("confirmClear"))) {
editor.execCommand('cleardoc');
}
}
});
editorui.buttons["cleardoc"] = ui;
editor.addListener('selectionchange', function () {
ui.setDisabled(editor.queryCommandState('cleardoc') == -1);
});
return ui;
};
//排版,图片排版,文字方向
var typeset = {
'justify':['left', 'right', 'center', 'justify'],
'imagefloat':['none', 'left', 'center', 'right'],
'directionality':['ltr', 'rtl']
};
for (var p in typeset) {
(function (cmd, val) {
for (var i = 0, ci; ci = val[i++];) {
(function (cmd2) {
editorui[cmd.replace('float', '') + cmd2] = function (editor) {
var ui = new editorui.Button({
className:'edui-for-' + cmd.replace('float', '') + cmd2,
title:editor.options.labelMap[cmd.replace('float', '') + cmd2] || editor.getLang("labelMap." + cmd.replace('float', '') + cmd2) || '',
theme:editor.options.theme,
onclick:function () {
editor.execCommand(cmd, cmd2);
}
});
editorui.buttons[cmd] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
ui.setDisabled(editor.queryCommandState(cmd) == -1);
ui.setChecked(editor.queryCommandValue(cmd) == cmd2 && !uiReady);
});
return ui;
};
})(ci)
}
})(p, typeset[p])
}
//字体颜色和背景颜色
for (var i = 0, ci; ci = ['backcolor', 'forecolor'][i++];) {
editorui[ci] = function (cmd) {
return function (editor) {
var ui = new editorui.ColorButton({
className:'edui-for-' + cmd,
color:'default',
title:editor.options.labelMap[cmd] || editor.getLang("labelMap." + cmd) || '',
editor:editor,
onpickcolor:function (t, color) {
editor.execCommand(cmd, color);
},
onpicknocolor:function () {
editor.execCommand(cmd, 'default');
this.setColor('transparent');
this.color = 'default';
},
onbuttonclick:function () {
editor.execCommand(cmd, this.color);
}
});
editorui.buttons[cmd] = ui;
editor.addListener('selectionchange', function () {
ui.setDisabled(editor.queryCommandState(cmd) == -1);
});
return ui;
};
}(ci);
}
var dialogBtns = {
noOk:['searchreplace', 'help', 'spechars','preview'],
ok:['attachment', 'anchor', 'link', 'insertimage', 'map', 'wordimage',
'insertvideo', 'edittip', 'edittable', 'edittd', 'scrawl', 'template', 'music']
};
for (var p in dialogBtns) {
(function (type, vals) {
for (var i = 0, ci; ci = vals[i++];) {
//todo opera下存在问题
if (browser.opera && ci === "searchreplace") {
continue;
}
(function (cmd) {
editorui[cmd] = function (editor, iframeUrl, title) {
iframeUrl = iframeUrl || (editor.options.iframeUrlMap || {})[cmd] || iframeUrlMap[cmd];
title = editor.options.labelMap[cmd] || editor.getLang("labelMap." + cmd) || '';
var dialog;
//没有iframeUrl不创建dialog
if (iframeUrl) {
dialog = new editorui.Dialog(utils.extend({
iframeUrl:editor.ui.mapUrl(iframeUrl),
editor:editor,
className:'edui-for-' + cmd,
title:title,
holdScroll: cmd === 'insertimage',
fullscreen: /preview/.test(cmd),
closeDialog:editor.getLang("closeDialog")
}, type == 'ok' ? {
buttons:[
{
className:'edui-okbutton',
label:editor.getLang("ok"),
editor:editor,
onclick:function () {
dialog.close(true);
}
},
{
className:'edui-cancelbutton',
label:editor.getLang("cancel"),
editor:editor,
onclick:function () {
dialog.close(false);
}
}
]
} : {}));
editor.ui._dialogs[cmd + "Dialog"] = dialog;
}
var ui = new editorui.Button({
className:'edui-for-' + cmd,
title:title,
onclick:function () {
if (dialog) {
switch (cmd) {
case "wordimage":
var images = editor.execCommand("wordimage");
if (images && images.length) {
dialog.render();
dialog.open();
}
break;
case "scrawl":
if (editor.queryCommandState("scrawl") != -1) {
dialog.render();
dialog.open();
}
break;
default:
dialog.render();
dialog.open();
}
}
},
theme:editor.options.theme,
disabled:(cmd == 'scrawl' && editor.queryCommandState("scrawl") == -1)
});
editorui.buttons[cmd] = ui;
editor.addListener('selectionchange', function () {
//只存在于右键菜单而无工具栏按钮的ui不需要检测状态
var unNeedCheckState = {'edittable':1};
if (cmd in unNeedCheckState)return;
var state = editor.queryCommandState(cmd);
if (ui.getDom()) {
ui.setDisabled(state == -1);
ui.setChecked(state);
}
});
return ui;
};
})(ci.toLowerCase())
}
})(p, dialogBtns[p]);
}
editorui.snapscreen = function (editor, iframeUrl, title) {
title = editor.options.labelMap['snapscreen'] || editor.getLang("labelMap.snapscreen") || '';
var ui = new editorui.Button({
className:'edui-for-snapscreen',
title:title,
onclick:function () {
editor.execCommand("snapscreen");
},
theme:editor.options.theme
});
editorui.buttons['snapscreen'] = ui;
iframeUrl = iframeUrl || (editor.options.iframeUrlMap || {})["snapscreen"] || iframeUrlMap["snapscreen"];
if (iframeUrl) {
var dialog = new editorui.Dialog({
iframeUrl:editor.ui.mapUrl(iframeUrl),
editor:editor,
className:'edui-for-snapscreen',
title:title,
buttons:[
{
className:'edui-okbutton',
label:editor.getLang("ok"),
editor:editor,
onclick:function () {
dialog.close(true);
}
},
{
className:'edui-cancelbutton',
label:editor.getLang("cancel"),
editor:editor,
onclick:function () {
dialog.close(false);
}
}
]
});
dialog.render();
editor.ui._dialogs["snapscreenDialog"] = dialog;
}
editor.addListener('selectionchange', function () {
ui.setDisabled(editor.queryCommandState('snapscreen') == -1);
});
return ui;
};
editorui.insertcode = function (editor, list, title) {
list = editor.options['insertcode'] || [];
title = editor.options.labelMap['insertcode'] || editor.getLang("labelMap.insertcode") || '';
// if (!list.length) return;
var items = [];
utils.each(list,function(key,val){
items.push({
label:key,
value:val,
theme:editor.options.theme,
renderLabelHtml:function () {
return '<div class="edui-label %%-label" >' + (this.label || '') + '</div>';
}
});
});
var ui = new editorui.Combox({
editor:editor,
items:items,
onselect:function (t, index) {
editor.execCommand('insertcode', this.items[index].value);
},
onbuttonclick:function () {
this.showPopup();
},
title:title,
initValue:title,
className:'edui-for-insertcode',
indexByValue:function (value) {
if (value) {
for (var i = 0, ci; ci = this.items[i]; i++) {
if (ci.value.indexOf(value) != -1)
return i;
}
}
return -1;
}
});
editorui.buttons['insertcode'] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
if (!uiReady) {
var state = editor.queryCommandState('insertcode');
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue('insertcode');
if(!value){
ui.setValue(title);
return;
}
//trace:1871 ie下从源码模式切换回来时,字体会带单引号,而且会有逗号
value && (value = value.replace(/['"]/g, '').split(',')[0]);
ui.setValue(value);
}
}
});
return ui;
};
editorui.fontfamily = function (editor, list, title) {
list = editor.options['fontfamily'] || [];
title = editor.options.labelMap['fontfamily'] || editor.getLang("labelMap.fontfamily") || '';
if (!list.length) return;
for (var i = 0, ci, items = []; ci = list[i]; i++) {
var langLabel = editor.getLang('fontfamily')[ci.name] || "";
(function (key, val) {
items.push({
label:key,
value:val,
theme:editor.options.theme,
renderLabelHtml:function () {
return '<div class="edui-label %%-label" style="font-family:' +
utils.unhtml(this.value) + '">' + (this.label || '') + '</div>';
}
});
})(ci.label || langLabel, ci.val)
}
var ui = new editorui.Combox({
editor:editor,
items:items,
onselect:function (t, index) {
editor.execCommand('FontFamily', this.items[index].value);
},
onbuttonclick:function () {
this.showPopup();
},
title:title,
initValue:title,
className:'edui-for-fontfamily',
indexByValue:function (value) {
if (value) {
for (var i = 0, ci; ci = this.items[i]; i++) {
if (ci.value.indexOf(value) != -1)
return i;
}
}
return -1;
}
});
editorui.buttons['fontfamily'] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
if (!uiReady) {
var state = editor.queryCommandState('FontFamily');
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue('FontFamily');
//trace:1871 ie下从源码模式切换回来时,字体会带单引号,而且会有逗号
value && (value = value.replace(/['"]/g, '').split(',')[0]);
ui.setValue(value);
}
}
});
return ui;
};
editorui.fontsize = function (editor, list, title) {
title = editor.options.labelMap['fontsize'] || editor.getLang("labelMap.fontsize") || '';
list = list || editor.options['fontsize'] || [];
if (!list.length) return;
var items = [];
for (var i = 0; i < list.length; i++) {
var size = list[i] + 'px';
items.push({
label:size,
value:size,
theme:editor.options.theme,
renderLabelHtml:function () {
return '<div class="edui-label %%-label" style="line-height:1;font-size:' +
this.value + '">' + (this.label || '') + '</div>';
}
});
}
var ui = new editorui.Combox({
editor:editor,
items:items,
title:title,
initValue:title,
onselect:function (t, index) {
editor.execCommand('FontSize', this.items[index].value);
},
onbuttonclick:function () {
this.showPopup();
},
className:'edui-for-fontsize'
});
editorui.buttons['fontsize'] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
if (!uiReady) {
var state = editor.queryCommandState('FontSize');
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
ui.setValue(editor.queryCommandValue('FontSize'));
}
}
});
return ui;
};
editorui.paragraph = function (editor, list, title) {
title = editor.options.labelMap['paragraph'] || editor.getLang("labelMap.paragraph") || '';
list = editor.options['paragraph'] || [];
if (utils.isEmptyObject(list)) return;
var items = [];
for (var i in list) {
items.push({
value:i,
label:list[i] || editor.getLang("paragraph")[i],
theme:editor.options.theme,
renderLabelHtml:function () {
return '<div class="edui-label %%-label"><span class="edui-for-' + this.value + '">' + (this.label || '') + '</span></div>';
}
})
}
var ui = new editorui.Combox({
editor:editor,
items:items,
title:title,
initValue:title,
className:'edui-for-paragraph',
onselect:function (t, index) {
editor.execCommand('Paragraph', this.items[index].value);
},
onbuttonclick:function () {
this.showPopup();
}
});
editorui.buttons['paragraph'] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
if (!uiReady) {
var state = editor.queryCommandState('Paragraph');
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue('Paragraph');
var index = ui.indexByValue(value);
if (index != -1) {
ui.setValue(value);
} else {
ui.setValue(ui.initValue);
}
}
}
});
return ui;
};
//自定义标题
editorui.customstyle = function (editor) {
var list = editor.options['customstyle'] || [],
title = editor.options.labelMap['customstyle'] || editor.getLang("labelMap.customstyle") || '';
if (!list.length)return;
var langCs = editor.getLang('customstyle');
for (var i = 0, items = [], t; t = list[i++];) {
(function (t) {
var ck = {};
ck.label = t.label ? t.label : langCs[t.name];
ck.style = t.style;
ck.className = t.className;
ck.tag = t.tag;
items.push({
label:ck.label,
value:ck,
theme:editor.options.theme,
renderLabelHtml:function () {
return '<div class="edui-label %%-label">' + '<' + ck.tag + ' ' + (ck.className ? ' class="' + ck.className + '"' : "")
+ (ck.style ? ' style="' + ck.style + '"' : "") + '>' + ck.label + "<\/" + ck.tag + ">"
+ '</div>';
}
});
})(t);
}
var ui = new editorui.Combox({
editor:editor,
items:items,
title:title,
initValue:title,
className:'edui-for-customstyle',
onselect:function (t, index) {
editor.execCommand('customstyle', this.items[index].value);
},
onbuttonclick:function () {
this.showPopup();
},
indexByValue:function (value) {
for (var i = 0, ti; ti = this.items[i++];) {
if (ti.label == value) {
return i - 1
}
}
return -1;
}
});
editorui.buttons['customstyle'] = ui;
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
if (!uiReady) {
var state = editor.queryCommandState('customstyle');
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue('customstyle');
var index = ui.indexByValue(value);
if (index != -1) {
ui.setValue(value);
} else {
ui.setValue(ui.initValue);
}
}
}
});
return ui;
};
editorui.inserttable = function (editor, iframeUrl, title) {
title = editor.options.labelMap['inserttable'] || editor.getLang("labelMap.inserttable") || '';
var ui = new editorui.TableButton({
editor:editor,
title:title,
className:'edui-for-inserttable',
onpicktable:function (t, numCols, numRows) {
editor.execCommand('InsertTable', {numRows:numRows, numCols:numCols, border:1});
},
onbuttonclick:function () {
this.showPopup();
}
});
editorui.buttons['inserttable'] = ui;
editor.addListener('selectionchange', function () {
ui.setDisabled(editor.queryCommandState('inserttable') == -1);
});
return ui;
};
editorui.lineheight = function (editor) {
var val = editor.options.lineheight || [];
if (!val.length)return;
for (var i = 0, ci, items = []; ci = val[i++];) {
items.push({
//todo:写死了
label:ci,
value:ci,
theme:editor.options.theme,
onclick:function () {
editor.execCommand("lineheight", this.value);
}
})
}
var ui = new editorui.MenuButton({
editor:editor,
className:'edui-for-lineheight',
title:editor.options.labelMap['lineheight'] || editor.getLang("labelMap.lineheight") || '',
items:items,
onbuttonclick:function () {
var value = editor.queryCommandValue('LineHeight') || this.value;
editor.execCommand("LineHeight", value);
}
});
editorui.buttons['lineheight'] = ui;
editor.addListener('selectionchange', function () {
var state = editor.queryCommandState('LineHeight');
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue('LineHeight');
value && ui.setValue((value + '').replace(/cm/, ''));
ui.setChecked(state)
}
});
return ui;
};
var rowspacings = ['top', 'bottom'];
for (var r = 0, ri; ri = rowspacings[r++];) {
(function (cmd) {
editorui['rowspacing' + cmd] = function (editor) {
var val = editor.options['rowspacing' + cmd] || [];
if (!val.length) return null;
for (var i = 0, ci, items = []; ci = val[i++];) {
items.push({
label:ci,
value:ci,
theme:editor.options.theme,
onclick:function () {
editor.execCommand("rowspacing", this.value, cmd);
}
})
}
var ui = new editorui.MenuButton({
editor:editor,
className:'edui-for-rowspacing' + cmd,
title:editor.options.labelMap['rowspacing' + cmd] || editor.getLang("labelMap.rowspacing" + cmd) || '',
items:items,
onbuttonclick:function () {
var value = editor.queryCommandValue('rowspacing', cmd) || this.value;
editor.execCommand("rowspacing", value, cmd);
}
});
editorui.buttons[cmd] = ui;
editor.addListener('selectionchange', function () {
var state = editor.queryCommandState('rowspacing', cmd);
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue('rowspacing', cmd);
value && ui.setValue((value + '').replace(/%/, ''));
ui.setChecked(state)
}
});
return ui;
}
})(ri)
}
//有序,无序列表
var lists = ['insertorderedlist', 'insertunorderedlist'];
for (var l = 0, cl; cl = lists[l++];) {
(function (cmd) {
editorui[cmd] = function (editor) {
var vals = editor.options[cmd],
_onMenuClick = function () {
editor.execCommand(cmd, this.value);
}, items = [];
for (var i in vals) {
items.push({
label:vals[i] || editor.getLang()[cmd][i] || "",
value:i,
theme:editor.options.theme,
onclick:_onMenuClick
})
}
var ui = new editorui.MenuButton({
editor:editor,
className:'edui-for-' + cmd,
title:editor.getLang("labelMap." + cmd) || '',
'items':items,
onbuttonclick:function () {
var value = editor.queryCommandValue(cmd) || this.value;
editor.execCommand(cmd, value);
}
});
editorui.buttons[cmd] = ui;
editor.addListener('selectionchange', function () {
var state = editor.queryCommandState(cmd);
if (state == -1) {
ui.setDisabled(true);
} else {
ui.setDisabled(false);
var value = editor.queryCommandValue(cmd);
ui.setValue(value);
ui.setChecked(state)
}
});
return ui;
};
})(cl)
}
editorui.fullscreen = function (editor, title) {
title = editor.options.labelMap['fullscreen'] || editor.getLang("labelMap.fullscreen") || '';
var ui = new editorui.Button({
className:'edui-for-fullscreen',
title:title,
theme:editor.options.theme,
onclick:function () {
if (editor.ui) {
editor.ui.setFullScreen(!editor.ui.isFullScreen());
}
this.setChecked(editor.ui.isFullScreen());
}
});
editorui.buttons['fullscreen'] = ui;
editor.addListener('selectionchange', function () {
var state = editor.queryCommandState('fullscreen');
ui.setDisabled(state == -1);
ui.setChecked(editor.ui.isFullScreen());
});
return ui;
};
editorui.autotypeset = function (editor) {
var ui = new editorui.AutoTypeSetButton({
editor:editor,
title:editor.options.labelMap['autotypeset'] || editor.getLang("labelMap.autotypeset") || '',
className:'edui-for-autotypeset',
onbuttonclick:function () {
editor.execCommand('autotypeset')
}
});
editorui.buttons['autotypeset'] = ui;
editor.addListener('selectionchange', function () {
ui.setDisabled(editor.queryCommandState('autotypeset') == -1);
});
return ui;
};
/* 简单上传插件 */
editorui["simpleupload"] = function (editor) {
var name = 'simpleupload',
ui = new editorui.Button({
className:'edui-for-' + name,
title:editor.options.labelMap[name] || editor.getLang("labelMap." + name) || '',
onclick:function () {},
theme:editor.options.theme,
showText:false
});
editorui.buttons[name] = ui;
editor.addListener('ready', function() {
var b = ui.getDom('body'),
iconSpan = b.children[0];
editor.fireEvent('simpleuploadbtnready', iconSpan);
});
editor.addListener('selectionchange', function (type, causeByUi, uiReady) {
var state = editor.queryCommandState(name);
if (state == -1) {
ui.setDisabled(true);
ui.setChecked(false);
} else {
if (!uiReady) {
ui.setDisabled(false);
ui.setChecked(state);
}
}
});
return ui;
};
})();
| mit |
hpkns/laravel-node-seeder | src/NodeSeeder.php | 5818 | <?php namespace Hpkns\Seeder;
use Symfony\Component\Yaml\Yaml;
class NodeSeeder implements \ArrayAccess {
use ArrayAccessible;
/**
* @var string
*/
protected $folder;
/**
* @var string
*/
protected $configFileName = 'conf';
/**
* A list of accepted format
*
* @var array
*/
protected $confFormats = ['json', 'yml', 'php'];
/**
* A list of translations
*
* @var array
*/
protected $translations = [];
/**
* Initialize the instance
*
* @param string $folder
* @return void
*/
public function __construct(array $default = [], array $translations_default = [])
{
if( ! empty($default))
{
$this->withDefault($default);
}
$this->translationsDefault = $translations_default;
}
/**
* Populate the seeder with the content of a folder
*
* @param string $folder
* @return NodeSeeder
*/
public function fromFolder($folder)
{
$this->folder = $folder;
if( ! file_exists($folder) || ! is_dir($folder))
{
throw new \Exception("Folder {$folder} not found");
}
if(($configuration = $this->getConfiguration($folder)) !== false)
{
$this->withDefault((array)$configuration);
}
else throw new \Exception("No config found in folder {$folder}");
if($translations = $this->readTranslations($folder))
{
$this->translations = $translations;
}
return $this;
}
/**
* Return the forlder containing the seeder.
*
* @return string
*/
public function getFolder()
{
return $this->folder;
}
/**
* Get the configuration
*
* @param string $folder
* @return mixed
*/
public function getConfiguration($folder)
{
foreach($this->confFormats as $format)
{
$path = "{$folder}/{$this->configFileName}.{$format}";
if( ! file_exists($path)) continue;
if($format == 'php')
{
return require($path);
}
$content = file_get_contents($path);
if($format == 'json')
{
return json_decode($content);
}
if($format == 'yml')
{
return Yaml::parse($content);
}
}
return false;
}
/**
* Return the translations
*
* @param string $folder
* @return array
*/
public function readTranslations($folder)
{
$translations = [];
foreach(glob("{$folder}/*") as $file)
{
if(strpos(basename($file), $this->configFileName) === 0 || is_dir($file))
{
continue;
}
$translations[] = ( new NodeTranslationSeeder($this->translationsDefault) )->fromFile($file);
}
return $translations;
}
/**
* Add a translation
*
* @param array $default
* @return void
*/
public function addTranslation(array $default = [])
{
$this->translations[] = new NodeTranslationSeeder($default);
}
/**
* Return the translations
*
* @return array
*/
public function getTranslations()
{
return $this->translations;
}
/**
*
* @param string $output
* @return string
*/
public function cleanOutput($output)
{
return $output;
}
/**
* Return the string version of the node
*
* @return string
*/
public function __toString()
{
return $this->toString('json');
}
public function simplify(array $input)
{
foreach($input as $key => $value)
{
if(is_object($value) && $value instanceof \stdClass)
{
$input[$key] = (array)$value;
}
if(is_array($input[$key]))
{
$input[$key] = $this->simplify($input[$key]);
}
}
return $input;
}
/**
* @return string
*/
public function toString($format = 'json')
{
$content = $this->simplify($this->attributes);
if($format == 'php')
{
$export = $this->cleanOutput(var_export($content, true));
return "<?php\n\nreturn {$export};";
}
elseif($format == 'yml' || $format == 'yaml')
{
return Yaml::dump($content, 50);
}
else // Json
{
return json_encode($content);
}
}
/**
* Return the extension corresponding to the format
*
* @param string $format
* @return string
*/
public function getExtension($format)
{
switch($format)
{
case 'yaml':
case 'yml':
return 'yml';
case 'php':
return 'php';
default:
return 'json';
}
}
/**
* Save the seeder
*
* @param string $format
* @return void
*/
public function save($folder = null, $format = 'json')
{
if($folder)
{
$this->folder = $folder;
}
if( ! $this->folder) throw new \Exception("Cannot determin output folder");
if( ! file_exists($this->folder)) mkdir($folder);
$path = "{$this->folder}/{$this->configFileName}." . $this->getExtension($format);
file_put_contents($path, $this->toString($format));
foreach($this->getTranslations() as $translation)
{
$path = "{$this->folder}/{$translation->locale}.md";
$translation->save($path);
}
}
}
| mit |
rakasiwi23/currency-converter | app/screens/Options.js | 1687 | import React, { Component } from 'react';
import { ScrollView, StatusBar, Platform, Linking } from 'react-native';
import Icon from 'react-native-vector-icons/Ionicons';
import PropTypes from 'prop-types';
import { ListItem, Separator } from '../components/List';
import { connectAlert } from '../components/Alert';
const ICON_PREFIX = Platform.OS === 'ios' ? 'ios' : 'md';
const ICON_COLOR = '#868686';
const ICON_SIZE = 23;
class Options extends Component {
constructor(props) {
super(props);
this.handleThemesPress = this.handleThemesPress.bind(this);
this.handleSitePress = this.handleSitePress.bind(this);
}
handleThemesPress() {
this.props.navigation.navigate('Themes');
}
handleSitePress() {
Linking.openURL('http://fixer.io')
.catch(() => this.props.alertWithType('error', 'Sorry!', "Fixer.io can't be opened right now."));
}
render() {
return (
<ScrollView>
<StatusBar translucent={false} barStyle="default" />
<ListItem
text="Themes"
onPress={this.handleThemesPress}
customIcon={<Icon name={`${ICON_PREFIX}-arrow-forward`} color={ICON_COLOR} size={ICON_SIZE} />}
/>
<Separator />
<ListItem
text="Fixer.io"
onPress={this.handleSitePress}
customIcon={<Icon name={`${ICON_PREFIX}-link`} color={ICON_COLOR} size={ICON_SIZE} />}
/>
<Separator />
</ScrollView>
);
}
}
Options.propTypes = {
navigation: PropTypes.oneOfType([
PropTypes.object,
]).isRequired,
alertWithType: PropTypes.func,
};
Options.defaultProps = {
alertWithType: () => null,
};
export default connectAlert(Options);
| mit |
jiasir/redis-ha | redis_ha_installer.py | 2966 | #!/usr/bin/env python
# Install redis server
# Usage: sudo python redis_ha_installer.py [master|backup]
# Author: jiasir (Taio Jia)
# E-Mail: jiasir@icloud.com
# License: The MIT License
import os
import sys
import shutil
import logging
from command import Command
run = Command()
def usage():
print 'Usage: sudo python redis_ha_installer.py <master|backup>'
def start_instace():
run.execute_get_output('sudo', 'redis-server', '/etc/redis/redis.conf')
run.execute_get_output('sudo', 'service', 'keepalived', 'start')
def install_keepalived():
try:
with open('/etc/sysctl.conf', 'a') as f:
f.write('net.ipv4.ip_nonlocal_bind = 1')
except IOError:
print IOError.__doc__
try:
with open('/etc/rc.local', 'a') as f:
f.write('ulimit -SHn 65535')
except IOError:
print IOError.__doc__
run.execute_get_output('sudo', 'sysctl', '-p')
run.execute_get_output('sudo', 'ulimit', '-SHn', '65535')
run.execute_get_output('sudo', 'apt-get', 'update')
run.execute_get_output('sudo', 'apt-get', '-y', 'install', 'keepalived')
def install_redis():
try:
with open('/etc/sysctl.conf', 'a') as f:
f.write('vm.overcommit_memory = 1')
except IOError:
print IOError.__doc__
run.execute_get_output('sudo', 'sysctl', '-p')
run.execute_get_output('sudo', 'apt-get', '-y', 'install', 'redis-server')
def copy_keepalived_master_conf():
shutil.copy('conf/keepalived.conf.master', '/etc/keepalived/keepalived.conf')
print '[OK] Create keepalived config file: /etc/keepalived/keepalived.conf'
def copy_keepalived_backup_conf():
shutil.copy('conf/keepalived.conf.backup', '/etc/keepalived/keepalived.conf')
print '[OK] Create keepalived config file: /etc/keepalived/keepalived.conf'
def copy_redis_master_conf():
shutil.copy('conf/redis.conf.master', '/etc/redis/redis.conf')
print '[OK] Create redis config file: /etc/redis/redis.conf'
def copy_redis_slave_conf():
shutil.copy('conf/redis.conf.slave', '/etc/redis/redis.conf')
print '[OK] Create redis config file: /etc/redis/redis.conf'
def copy_fail_over_script():
shutil.copy('tools/redis.sh', '/var/lib/redis/redis.sh')
print '[OK] Create fail-over script: /var/lib/redis/redis.sh'
def main():
if len(sys.argv) > 1:
option = sys.argv[1]
if option == "master":
install_keepalived()
install_redis()
copy_keepalived_master_conf()
copy_redis_master_conf()
copy_fail_over_script()
start_instace()
elif option == "backup":
install_keepalived()
install_redis()
copy_keepalived_backup_conf()
copy_redis_slave_conf()
copy_fail_over_script()
start_instace()
else:
usage()
if __name__ == '__main__':
if os.getuid() == 0:
main()
else:
print 'You do not have permission'
usage()
exit() | mit |
JohnRyanTsai/LearnJSHardWay | js_fullstack/web_develop/main.js | 114 | 'use strict';
// var koa_main = require('./koa');
// koa_main();
var koa2_main = require('./koa2');
koa2_main(); | mit |
ninjatronic/angular-kinvey | kinvey.js | 41787 | (function() {
'use strict';
angular
.module('kinvey', ['ngResource', 'base64'])
// this constant contains strings for url building
.constant('$kUrl', {
base: 'https://baas.kinvey.com/',
appdata: 'appdata/',
user: 'user/',
group: 'group/',
rpc: 'rpc/',
custom: 'custom/',
blob: 'blob/',
push: 'push/'
})
// this is the target Kinvey API version
.constant('$kVer', 3)
// this constant contains the live header objects
.constant('$kHead', {
user: {
'X-Kinvey-API-Version': '',
'Authorization': ''
},
basic: {
'X-Kinvey-API-Version': '',
'Authorization': ''
}
})
// these are error strings
.constant('$kErr', {
init: '$kinveyProvider.init requires an options object: {\'appId\':\'YOUR APP ID\',\'appSecret\':\'YOUR APP SECRET\'}',
alias: 'aliases must not attempt to overwrite $kinvey.'
})
// this is the custom serializer which protects mongo operators in the `$` namespace
.constant('$kSerialize', function(obj, pretty) {
var mongoOperators = [
"$gt", "$gte", "$in", "$lt", "$lte", "$ne", "$nin", // comparison
"$or", "$and", "$not", "$nor", // logical
"$exists", "$type", // element
"$mod", "$regex", "$where", //evaluation
"$geoWithin", "$geoIntersects", "$near", "$nearSphere", //geospatial
"$all", "$elemMatch", "$size", // array
"$", "$elemMatch", "$slice" // projection
];
function isWindow(obj) {
return obj && obj.document && obj.location && obj.alert && obj.setInterval;
}
function isScope(obj) {
return obj && obj.$evalAsync && obj.$watch;
}
function toJsonReplacer(key, value) {
var val = value;
if (typeof key === 'string' && key.charAt(0) === '$') {
var isMongo = false;
angular.forEach(mongoOperators, function(op) {
if(op == key) {
isMongo = true;
}
});
if(!isMongo) {
val = undefined;
}
} else if (isWindow(value)) {
val = '$WINDOW';
} else if (value && document === value) {
val = '$DOCUMENT';
} else if (isScope(value)) {
val = '$SCOPE';
}
return val;
}
if (typeof obj === 'undefined') return undefined;
return JSON.stringify(obj, toJsonReplacer, pretty ? ' ' : null);
})
.constant('$kStorageAdapter', function(storage) {
return {
get: function(key) {
return storage[key];
},
put: function(key, value) {
storage[key] = value;
},
remove: function(key) {
storage[key] = null;
}
};
})
.constant('$kRequire', function($module, $provider, $injector, $rootScope) {
// we need an isolated injector to instantiate the storage factory
// as it will confuse our app $injector otherwise
var modInjector = angular.injector(['ng', $module, 'kinvey']);
var $external = modInjector.instantiate($provider.$get);
var $externalScope = modInjector.get('$rootScope');
// $rootScope.$watch is what drives the dirty checking in $cookieStore
// since we injected it using an isolated injector (essentially an
// injector for a separate app) it has a separate $rootScope, so we need
// to ensure that whenever our $rootScope $applies it triggers the same
// on the isolated $rootScope
$rootScope.$watch(function() {
$externalScope.$apply();
});
return $external;
})
// this is the service provider
.provider('$kinvey', [
'$kUrl', '$kHead', '$kVer', '$kErr', '$kSerialize', '$kStorageAdapter', '$kRequire', '$base64', '$injector',
function($kUrl, $kHead, $kVer, $kErr, $kSerialize, $kStorageAdapter, $kRequire, $base64, $injector) {
// strap up the headers with the target API version
$kHead.user['X-Kinvey-API-Version'] = $kVer;
$kHead.basic['X-Kinvey-API-Version'] = $kVer;
var appKey;
var storageOption;
return {
init: function(options) {
// there are certain mandatory initialisation options
if(!options || !('appKey' in options) || !('appSecret' in options)) {
throw $kErr.init;
}
appKey = options.appKey;
storageOption = options.storage;
// strap up the headers with the basic authentication string
var auth = 'Basic '+$base64.encode(options.appKey+':'+options.appSecret);
$kHead.user.Authorization = auth;
$kHead.basic.Authorization = auth;
},
$get: ['$rootScope', '$resource', '$http', '$q', function($rootScope, $resource, $http, $q) {
var storageAdapter;
switch(storageOption) {
case 'cookies':
var $cookieStoreProvider = $injector.get('$cookieStoreProvider');
var $cookieStore = $kRequire('ngCookies', $cookieStoreProvider, $injector, $rootScope);
storageAdapter = $cookieStore;
break;
case 'local':
var $localStorageProvider = $injector.get('$localStorageProvider');
var $localStorage = $kRequire('ngStorage', $localStorageProvider, $injector, $rootScope);
storageAdapter = $kStorageAdapter($localStorage);
break;
default:
var temp = {};
storageAdapter = {
get: function(key) {
return temp[key];
},
put: function(key, value) {
temp[key] = value;
},
remove: function(key) {
temp[key] = undefined;
}
};
break;
}
/*
RETRIEVE THE LAST SESSION FROM COOKIES
*/
var oldToken = storageAdapter.get(appKey+':authToken');
if(oldToken) {
$kHead.user.Authorization = oldToken;
}
/*
CUSTOM HTTP TARGETS NOT GENERATED BY THE $resource DECLARATIONS
*/
var funcDefs = {
handshake: function() {
return {
method: 'GET',
url: $kUrl.base + $kUrl.appdata + appKey,
headers: $kHead.basic
};
},
rpc: function(endpoint, data) {
return {
method: 'POST',
url: $kUrl.base + $kUrl.rpc + appKey + '/' + $kUrl.custom + endpoint,
headers: $kHead.user,
data: data
};
},
upload: function(file, filedata, mimeType) {
return {
method: 'PUT',
url: file._uploadURL,
data: filedata,
headers: angular.extend({
'Content-Type': mimeType,
'Accept': undefined
}, file._requiredHeaders),
transformRequest: angular.identity
};
},
download: function(file) {
return {
method: 'GET',
url: file._downloadURL
};
},
saveFile: function(file, mimeType) {
return {
method: file._id ? 'PUT' : 'POST',
url: $kUrl.base + $kUrl.blob + appKey + (file._id ? '/'+file._id : ''),
headers: angular.extend({
'X-Kinvey-Content-Type': mimeType
}, $kHead.user),
data: file
};
}
};
/*
STRINGS FOR MONGO COMPATABILITY
*/
var mongoMethods = ['query', 'delete'];
/*
THESE METHODS PROVIDE WAYS TO AUGMENT WORKFLOW WITH COMMON ADDITIONS
*/
// decorates an acting promise function with a `$resource` style response structure
function augmentPromise(actor, orig) {
var deferred = $q.defer();
var retVal = orig || { };
if(!('$resolved' in retVal)) {
retVal.$resolved = false;
}
retVal.$promise = deferred.promise;
actor(retVal, deferred);
return retVal;
}
// provides a resolving function that manipulates a `$resource` style response structure
function augmentResolve(returningObj, deferred, transformResponse) {
return function(response) {
var publicResponse = transformResponse ? transformResponse(response) : response;
angular.extend(returningObj, publicResponse);
returningObj.$resolved = true;
deferred.resolve(publicResponse);
};
}
// provides a rejecting function that manipulates a `$resource` style response structure
function augmentReject(deferred, transformResponse) {
return function(response) {
var publicResponse = transformResponse ? transformResponse(response) : response;
deferred.reject(publicResponse);
};
}
// provides special serialization for methods that require mongo-friendly serialization
function augmentForMongo(resourceDef) {
angular.forEach(mongoMethods, function(method) {
var origMethod = resourceDef[method];
resourceDef[method] = function(a1, a2, a3, a4) {
if(a1 && 'query' in a1) {
a1.query = JSON.stringify(a1.query);
}
return origMethod(a1, a2, a3, a4);
};
});
var origGroup = resourceDef.group;
resourceDef.group = function(a1, a2, a3) {
if(a1.reduce) {
a1.reduce = a1.reduce.toString();
a1.reduce = a1.reduce.replace(/\n/g,'');
a1.reduce = a1.reduce.replace(/\s/g,'');
}
return origGroup(undefined, a1, a2, a3);
};
return resourceDef;
}
// augments the File `$resource` definition with extra, promise based methods
function augmentFileDef(resourceDef) {
resourceDef.prototype.$download = function() {
var file = this;
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.download(file))
.then(
augmentResolve(retVal, deferred, getData),
augmentReject(deferred, getData));
});
};
resourceDef.prototype.$upload = function(filedata, mimeType) {
var file = this;
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.upload(file, filedata, mimeType))
.then(
augmentResolve(retVal, deferred, getData),
augmentReject(deferred, getData));
});
};
resourceDef.prototype.$save = function(mimeType) {
var file = this;
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.saveFile(flatten(file), mimeType))
.then(
augmentResolve(retVal, deferred, getFile),
augmentReject(deferred, getData));
}, file);
};
resourceDef.prototype.$reference = function() {
if(this._id) {
return {
_type: 'KinveyFile',
_id: this._id
};
}
};
resourceDef.upload = function(file, filedata, mimeType) {
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.upload(file, filedata, mimeType))
.then(
augmentResolve(retVal, deferred, getData),
augmentReject(deferred, getData));
});
};
resourceDef.download = function(file) {
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.download(file))
.then(
augmentResolve(retVal, deferred, getData),
augmentReject(deferred, getData));
});
};
resourceDef.save = function(file, mimeType) {
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.saveFile(flatten(file), mimeType))
.then(
augmentResolve(retVal, deferred, getFile),
augmentReject(deferred, getData));
}, file);
};
return resourceDef;
}
// augments the Object `$resource` definition
function augmentObjectDef(className, resourceDef) {
resourceDef.save = function(obj) {
if(obj._id) {
return Object(className).update(obj);
} else {
return Object(className).create(obj);
}
};
resourceDef.prototype.$save = function(args) {
if(args && args._id && !this._id) {
this._id = args._id;
}
if(this._id) {
return this.$update(args);
} else {
return this.$create(args);
}
};
return resourceDef;
}
// augments a resource definition to provide a $reference instance-level method
function augmentReference(classname, resourceDef) {
resourceDef.prototype.$reference = function() {
if(this._id) {
return {
_type: 'KinveyRef',
_collection: classname,
_id: this._id
};
}
};
return resourceDef;
}
// gets the data component of a `$http` response object
function getData(response) {
return response.data;
}
// gets a File from a `$http` repsonse object
function getFile(response) {
return new File(getData(response));
}
/*
THESE METHODS PERFORM SIMPLE 'ROUNDTRIP' OPERATIONS
*/
// performs a simple handshake
function handshake() {
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.handshake())
.then(
augmentResolve(retVal, deferred, getData),
augmentReject(deferred, getData));
});
}
// performs an rpc call
function rpc(endpoint, data) {
return augmentPromise(function(retVal, deferred) {
$http(funcDefs.rpc(endpoint, data))
.then(
augmentResolve(retVal, deferred, getData),
augmentReject(deferred, getData));
});
}
/*
DEALS WITH AUTOMATIC REFERENCE RESOLUTION AS PART OF SERIALIZATION
*/
function flatten(src) {
var dst = {};
dst = angular.copy(src);
angular.forEach(dst, function(value, key) {
if(typeof(value) === 'object' && value !== null) {
if(('$reference' in value) && (typeof(value.$reference) === 'function')) {
dst[key] = value.$reference();
} else {
dst[key] = flatten(value);
}
}
});
return dst;
}
/*
HERE BE `$resource` DEFINITIONS AND FACTORIES
*/
// Object `$resource` definition factory
var Object = function(className) {
return augmentReference(className,
augmentObjectDef(className,
augmentForMongo(
$resource($kUrl.base + $kUrl.appdata + appKey + '/' + className + '/:_id', {_id: '@_id'}, {
create: {
method: 'POST',
transformResponse: function(data) {
return new (Object(className))(angular.fromJson(data));
},
transformRequest: function(data) {
var flat = flatten(data);
return angular.toJson(flat);
},
headers: $kHead.user,
params: {
_id: ''
}
},
get: {
method: 'GET',
transformResponse: function(data) {
return new (Object(className))(angular.fromJson(data));
},
headers: $kHead.user
},
count: {
method: 'GET',
headers: $kHead.user,
params: {
_id: '_count'
}
},
update: {
method: 'PUT',
transformResponse: function(data) {
return new (Object(className))(angular.fromJson(data));
},
transformRequest: function(data) {
var flat = flatten(data);
return angular.toJson(flat);
},
headers: $kHead.user
},
delete: {
method: 'DELETE',
headers: $kHead.user
},
query: {
method: 'GET',
transformResponse: function(data) {
var retVal = [];
var objs = angular.fromJson(data);
angular.forEach(objs, function(obj) {
retVal.push(new (Object(className))(obj));
});
return retVal;
},
headers: $kHead.user,
isArray: true,
params: {
_id: ''
}
},
group: {
method: 'POST',
headers: $kHead.user,
isArray: true,
params: {
_id: '_group'
},
transformRequest: function(data) {
return $kSerialize(data);
}
}
}))));
};
// User `$resource` definition
var User =
augmentReference('user',
augmentForMongo(
$resource($kUrl.base + $kUrl.user + appKey + '/:_id', {_id: '@_id'} ,{
login: {
method: 'POST',
params: {
_id: 'login'
},
transformResponse: function(data) {
data = angular.fromJson(data);
if(!data.error) {
$kHead.user.Authorization = 'Kinvey '+data._kmd.authtoken;
storageAdapter.put(appKey+':authToken', 'Kinvey '+data._kmd.authtoken);
}
return new User(data);
},
transformRequest: function(data) {
return angular.toJson(flatten(data));
},
headers: $kHead.user
},
current: {
method: 'GET',
params: {
_id: '_me'
},
transformResponse: function(data) {
return new User(angular.fromJson(data));
},
headers: $kHead.user
},
logout: {
method: 'POST',
params: {
_id: '_logout'
},
transformResponse: function() {
$kHead.user.Authorization = $kHead.basic.Authorization;
storageAdapter.remove(appKey+':authToken');
},
headers: $kHead.user
},
signup: {
method: 'POST',
headers: $kHead.basic,
transformResponse: function(data) {
data = angular.fromJson(data);
if(!data.error) {
$kHead.user.Authorization = 'Kinvey '+data._kmd.authtoken;
storageAdapter.put(appKey+':authToken', 'Kinvey '+data._kmd.authtoken);
}
return new User(data);
},
transformRequest: function(data) {
return angular.toJson(flatten(data));
}
},
get: {
method: 'GET',
transformResponse: function(data) {
return new User(angular.fromJson(data));
},
headers: $kHead.user
},
lookup: {
method: 'POST',
transformResponse: function(data) {
var retVal = [];
data = angular.fromJson(data);
angular.forEach(data, function(user) {
retVal.push(new User(user));
});
return retVal;
},
headers: $kHead.user,
isArray:true,
params: {
_id: '_lookup'
}
},
save: {
method:'PUT',
transformResponse: function(data) {
return new User(angular.fromJson(data));
},
transformRequest: function(data) {
return angular.toJson(flatten(data));
},
headers: $kHead.user
},
query: {
url: $kUrl.base + $kUrl.user + appKey + '/?query=:query',
method:'GET',
transformResponse: function(data) {
var retVal = [];
data = angular.fromJson(data);
angular.forEach(data, function(user) {
retVal.push(new User(user));
});
return retVal;
},
headers: $kHead.user,
isArray:true,
params: { }
},
delete: {
method:'DELETE',
params: {
hard: true
},
headers: $kHead.user
},
suspend: {
method:'DELETE',
headers: $kHead.user
},
verifyEmail: {
method: 'POST',
headers: {
Authorization: $kHead.basic.Authorization,
'X-Kinvey-API-Version': $kHead.basic['X-Kinvey-API-Version'],
'Content-Type': undefined
},
url: $kUrl.base+$kUrl.rpc+appKey+'/:username:email/user-email-verification-initiate',
params: {
username: '@username',
email: '@email'
},
transformRequest: function() {
return '';
}
},
resetPassword: {
method: 'POST',
headers: $kHead.basic,
url: $kUrl.base+$kUrl.rpc+appKey+'/:username:email/user-password-reset-initiate',
params: {
username: '@username',
email: '@email'
},
transformRequest: function() {
return '';
}
},
checkUsernameExists: {
method: 'POST',
headers: $kHead.basic,
url: $kUrl.base+$kUrl.rpc+appKey+'/check-username-exists'
}
})));
// Group `$resource` definition
var Group =
$resource($kUrl.base + $kUrl.group + appKey + '/:_id', {_id: '@_id'}, {
get: {
method: 'GET',
headers: $kHead.user
},
save: {
method: 'PUT',
headers: $kHead.user
},
delete: {
method: 'DELETE',
headers: $kHead.user
}
});
// File `$resource` definition
var File =
augmentFileDef(
augmentForMongo(
$resource($kUrl.base + $kUrl.blob + appKey + '/:_id', {_id: '@_id'}, {
get: {
method: 'GET',
headers: $kHead.user,
transformResponse: function(data) {
return new File(angular.fromJson(data));
}
},
query: {
method:'GET',
headers: $kHead.user,
isArray:true,
params: {
_id: ''
},
transformResponse: function(data) {
var retVal = [];
angular.forEach(angular.fromJson(data), function(obj) {
retVal.push(new File(obj));
});
return retVal;
}
},
delete: {
method:'DELETE',
headers: $kHead.user
}
})));
var Push =
$resource($kUrl.base + $kUrl.push + appKey + '/:verb', {verb: '@verb'}, {
register: {
method: 'POST',
headers: $kHead.user,
params: {
verb: 'register-device'
}
},
unregister: {
method: 'POST',
headers: $kHead.user,
params: {
verb: 'unregister-device'
}
}
});
/*
THESE METHODS ALLOW ALIASES FOR OBJECT DEFINITIONS TO BE CREATED
*/
// verify that a critical method is not being overridden
function verifyAlias(alias, protectedName) {
if(alias === protectedName) {
throw $kErr.alias+protectedName;
}
}
// set up an alias
function alias(classname, aliasname) {
verifyAlias(aliasname, 'handshake');
verifyAlias(aliasname, 'User');
verifyAlias(aliasname, 'Group');
verifyAlias(aliasname, 'Object');
verifyAlias(aliasname, 'alias');
api[aliasname] = Object(classname);
}
/*
THIS STATEMENT RETURNS THE PUBLIC API
*/
var api = {
handshake: handshake,
User: User,
Group: Group,
Object: Object,
File: File,
Push: Push,
alias: alias,
rpc: rpc
};
return api;
}]
};
}]);
})(); | mit |
viridia/klendathu | client/src/nav/FilterLinks.tsx | 1321 | import * as React from 'react';
import * as qs from 'qs';
import { ProjectPrefs, PublicAccount, Project } from '../../../common/types/graphql';
import { QueryLink } from '../controls';
import styled from 'styled-components';
const FilterList = styled.ul`
margin: 0;
padding-left: 30px;
> li {
margin: 4px 0;
> a {
color: ${props => props.theme.leftNavTextColor};
text-decoration: none;
&:hover {
text-decoration: underline;
}
&.active {
font-weight: bold;
text-decoration: none;
}
}
}
`;
interface Props {
project: Project;
account: PublicAccount;
prefs: ProjectPrefs;
}
function parseFilterString(query: string): any {
try {
return qs.parse(query, { ignoreQueryPrefix: true });
} catch (e) {
return {};
}
}
export function FilterLinks({ prefs, account, project }: Props) {
if (!account || !project) {
return null;
}
return (
<FilterList>
{prefs.filters.map(filter => (
<li key={filter.name}>
<QueryLink
to={`/${account.accountName}/${project.name}/${filter.view}`}
query={parseFilterString(filter.value)}
strict={true}
>
<span>{filter.name}</span>
</QueryLink>
</li>
))}
</FilterList>
);
}
| mit |
kstreith/csharp6 | StringInterpolation/CSharp5.cs | 1058 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace StringInterpolation
{
class CSharp5
{
public static void ValidateCart(ShoppingCart cart, int maxQuantity, double maxLineTotal)
{
List<String> errors = new List<String>();
foreach (var item in cart.Orders)
{
if (item.Quantity > maxQuantity)
{
errors.Add(String.Format("'{0}' cannot be ordered in a quantity greather than {2}, you ordered, '{1}'", item.Name, item.Quantity, maxQuantity));
}
if (item.Quantity * item.Price > maxLineTotal)
{
errors.Add(String.Format("Attempted to order '{1:c}' worth of '{0}', max allowed is {2:c}", item.Name, item.Quantity * item.Price, maxLineTotal));
}
}
if (errors.Any())
{
throw new ValidationException(errors);
}
}
}
}
| mit |
avm99963/codejam | problem.php | 2849 | <?php
require_once("core.php");
if (getrole() > 0)
{
initi18n("problem");
$msg = "";
if (isset($_GET['msg']) && $_GET['msg'] == "empty")
$msg = '<p class="alert-danger">Por favor, rellena todos los campos</p>';
if (isset($_GET['msg']) && $_GET['msg'] == "nameunique")
$msg = '<p class="alert-danger">Un problema con este nombre ya existe</p>';
?>
<!DOCTYPE html>
<html>
<head>
<?php require ("head.php"); ?>
<title><?=i18n("problem", "title")?> – <?=$appname?></title>
<style>
#description {
/* Font */
font-family: sans-serif, Arial, Verdana, "Trebuchet MS";
font-size: 13px;
/* Text color */
color: #333;
}
</style>
</head>
<body>
<div class="content">
<?php require("nav.php"); ?>
<article>
<?php anuncio(); ?>
<?php require("sidebar.php"); ?>
<div class="text right large">
<?php
$query = mysqli_query($con, "SELECT * FROM problems WHERE id = '".(INT)$_GET["id"]."'");
if (mysqli_num_rows($query)) {
$row = mysqli_fetch_assoc($query);
$io = json_decode($row["io"], true);
?>
<h1><?=$row["name"]?></h1>
<div id="description"><?=$row["description"]?></div>
<h3><?=i18n("problem", "inputoutput")?></h3>
<h4><?=i18n("problem", "smallinputset")?></h4>
<p>
<?php
$echo = array();
for ($i = 1; $i < 4; $i++) {
$echo[] = '<a href="download.php?problem='.$row["id"].'&type=in'.$i.'_sinput"><span class="icon svg-ic_file_download_24px"></span></a> <a href="download.php?problem='.$row["id"].'&type=in'.$i.'_sinput">'.i18n("problem", "inputlabel", array($i)).'</a>';
$echo[] = '<a href="download.php?problem='.$row["id"].'&type=out'.$i.'_sinput"><span class="icon svg-ic_file_upload_24px"></span></a> <a href="download.php?problem='.$row["id"].'&type=out'.$i.'_sinput">'.i18n("problem", "outputlabel", array($i)).'</a>';
}
echo implode($echo, "<br>");
?>
</p>
<h4><?=i18n("problem", "largeinputset")?></h4>
<p><a href="download.php?problem=<?=$row["id"]?>&type=in_linput"><span class="icon svg-ic_file_download_24px"></span></a> <a href="download.php?problem=<?=$row["id"]?>&type=in_linput"><?=i18n("global", "input")?></a><br>
<a href="download.php?problem=<?=$row["id"]?>&type=out_linput"><span class="icon svg-ic_file_upload_24px"></span></a> <a href="download.php?problem=<?=$row["id"]?>&type=out_linput"><?=i18n("global", "output")?></a></p>
<?php
} else {
echo "<div class='alert-danger'>Este problema no existe :-/</div>";
}
?>
</div>
</article>
</div>
</body>
</html>
<?php
}
else
{
header('HTTP/1.0 404 Not Found');
}
?> | mit |
plumer/codana | tomcat_files/6.0.43/Http11NioProtocol.java | 30594 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.http11;
import java.net.InetAddress;
import java.net.URLEncoder;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicInteger;
import javax.management.MBeanRegistration;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.coyote.AbstractProtocol;
import org.apache.coyote.ActionCode;
import org.apache.coyote.ActionHook;
import org.apache.coyote.Adapter;
import org.apache.coyote.RequestGroupInfo;
import org.apache.coyote.RequestInfo;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.net.AbstractEndpoint;
import org.apache.tomcat.util.net.NioChannel;
import org.apache.tomcat.util.net.NioEndpoint;
import org.apache.tomcat.util.net.NioEndpoint.Handler;
import org.apache.tomcat.util.net.jsse.JSSEImplementation;
import org.apache.tomcat.util.net.SecureNioChannel;
import org.apache.tomcat.util.net.SocketStatus;
import org.apache.tomcat.util.res.StringManager;
/**
* Abstract the protocol implementation, including threading, etc.
* Processor is single threaded and specific to stream-based protocols,
* will not fit Jk protocols like JNI.
*
* @author Remy Maucherat
* @author Costin Manolache
* @author Filip Hanik
*/
public class Http11NioProtocol extends AbstractProtocol implements MBeanRegistration
{
protected JSSEImplementation sslImplementation = null;
public Http11NioProtocol() {
cHandler = new Http11ConnectionHandler( this );
setSoLinger(Constants.DEFAULT_CONNECTION_LINGER);
setSoTimeout(Constants.DEFAULT_CONNECTION_TIMEOUT);
//setServerSoTimeout(Constants.DEFAULT_SERVER_SOCKET_TIMEOUT);
setTcpNoDelay(Constants.DEFAULT_TCP_NO_DELAY);
}
/**
* The string manager for this package.
*/
protected static StringManager sm =
StringManager.getManager(Constants.Package);
/** Pass config info
*/
public void setAttribute( String name, Object value ) {
if( log.isTraceEnabled())
log.trace(sm.getString("http11protocol.setattribute", name, value));
attributes.put(name, value);
}
public Object getAttribute( String key ) {
if( log.isTraceEnabled())
log.trace(sm.getString("http11protocol.getattribute", key));
return attributes.get(key);
}
public Iterator getAttributeNames() {
return attributes.keySet().iterator();
}
/**
* Set a property.
*/
public boolean setProperty(String name, String value) {
setAttribute(name, value); //store all settings
if ( name!=null && (name.startsWith("socket.") ||name.startsWith("selectorPool.")) ){
return ep.setProperty(name, value);
} else {
return ep.setProperty(name,value); //make sure we at least try to set all properties
}
}
/**
* Get a property
*/
public String getProperty(String name) {
return (String)getAttribute(name);
}
/** The adapter, used to call the connector
*/
public void setAdapter(Adapter adapter) {
this.adapter=adapter;
}
public Adapter getAdapter() {
return adapter;
}
/** Start the protocol
*/
public void init() throws Exception {
ep.setName(getName());
ep.setHandler(cHandler);
//todo, determine if we even need these
ep.getSocketProperties().setRxBufSize(Math.max(ep.getSocketProperties().getRxBufSize(),getMaxHttpHeaderSize()));
ep.getSocketProperties().setTxBufSize(Math.max(ep.getSocketProperties().getTxBufSize(),getMaxHttpHeaderSize()));
try {
ep.init();
sslImplementation = new JSSEImplementation();
} catch (Exception ex) {
log.error(sm.getString("http11protocol.endpoint.initerror"), ex);
throw ex;
}
if(log.isInfoEnabled())
log.info(sm.getString("http11protocol.init", getName()));
}
ObjectName tpOname;
ObjectName rgOname;
public void start() throws Exception {
if( this.domain != null ) {
try {
tpOname=new ObjectName
(domain + ":" + "type=ThreadPool,name=" + getName());
Registry.getRegistry(null, null)
.registerComponent(ep, tpOname, null );
} catch (Exception e) {
log.error("Can't register threadpool" );
}
rgOname=new ObjectName
(domain + ":type=GlobalRequestProcessor,name=" + getName());
Registry.getRegistry(null, null).registerComponent
( cHandler.global, rgOname, null );
}
try {
ep.start();
} catch (Exception ex) {
log.error(sm.getString("http11protocol.endpoint.starterror"), ex);
throw ex;
}
if(log.isInfoEnabled())
log.info(sm.getString("http11protocol.start", getName()));
}
public void pause() throws Exception {
try {
ep.pause();
} catch (Exception ex) {
log.error(sm.getString("http11protocol.endpoint.pauseerror"), ex);
throw ex;
}
if(log.isInfoEnabled())
log.info(sm.getString("http11protocol.pause", getName()));
}
public void resume() throws Exception {
try {
ep.resume();
} catch (Exception ex) {
log.error(sm.getString("http11protocol.endpoint.resumeerror"), ex);
throw ex;
}
if(log.isInfoEnabled())
log.info(sm.getString("http11protocol.resume", getName()));
}
public void destroy() throws Exception {
if(log.isInfoEnabled())
log.info(sm.getString("http11protocol.stop", getName()));
ep.destroy();
if( tpOname!=null )
Registry.getRegistry(null, null).unregisterComponent(tpOname);
if( rgOname != null )
Registry.getRegistry(null, null).unregisterComponent(rgOname);
}
// -------------------- Properties--------------------
protected NioEndpoint ep=new NioEndpoint();
protected final AbstractEndpoint getEndpoint() {
return ep;
}
protected boolean secure = false;
protected Hashtable attributes = new Hashtable();
private int maxKeepAliveRequests=100; // as in Apache HTTPD server
private int timeout = 300000; // 5 minutes as in Apache HTTPD server
private int maxSavePostSize = 4 * 1024;
private int maxHttpHeaderSize = 8 * 1024;
protected int processorCache = 200; //max number of Http11NioProcessor objects cached
private int socketCloseDelay=-1;
private boolean disableUploadTimeout = true;
private int socketBuffer = 9000;
private Adapter adapter;
private Http11ConnectionHandler cHandler;
/**
* Compression value.
*/
private String compression = "off";
private String noCompressionUserAgents = null;
private String restrictedUserAgents = null;
private String compressableMimeTypes = "text/html,text/xml,text/plain";
private int compressionMinSize = 2048;
private String server;
// -------------------- Pool setup --------------------
public void setPollerThreadCount(int count) {
ep.setPollerThreadCount(count);
}
public int getPollerThreadCount() {
return ep.getPollerThreadCount();
}
public void setSelectorTimeout(long timeout) {
ep.setSelectorTimeout(timeout);
}
public long getSelectorTimeout() {
return ep.getSelectorTimeout();
}
// *
public Executor getExecutor() {
return ep.getExecutor();
}
// *
public void setExecutor(Executor executor) {
ep.setExecutor(executor);
}
public void setUseExecutor(boolean useexec) {
ep.setUseExecutor(useexec);
}
public int getMaxThreads() {
return ep.getMaxThreads();
}
public void setMaxThreads( int maxThreads ) {
ep.setMaxThreads(maxThreads);
setAttribute("maxThreads", "" + maxThreads);
}
public void setThreadPriority(int threadPriority) {
ep.setThreadPriority(threadPriority);
setAttribute("threadPriority", "" + threadPriority);
}
public void setAcceptorThreadPriority(int threadPriority) {
ep.setAcceptorThreadPriority(threadPriority);
setAttribute("acceptorThreadPriority", "" + threadPriority);
}
public void setPollerThreadPriority(int threadPriority) {
ep.setPollerThreadPriority(threadPriority);
setAttribute("pollerThreadPriority", "" + threadPriority);
}
public int getThreadPriority() {
return ep.getThreadPriority();
}
public int getAcceptorThreadPriority() {
return ep.getAcceptorThreadPriority();
}
public int getPollerThreadPriority() {
return ep.getThreadPriority();
}
public boolean getUseSendfile() {
return ep.getUseSendfile();
}
public void setUseSendfile(boolean useSendfile) {
ep.setUseSendfile(useSendfile);
}
// -------------------- Tcp setup --------------------
public int getBacklog() {
return ep.getBacklog();
}
public void setBacklog( int i ) {
ep.setBacklog(i);
setAttribute("backlog", "" + i);
}
public int getPort() {
return ep.getPort();
}
public void setPort( int port ) {
ep.setPort(port);
setAttribute("port", "" + port);
}
public InetAddress getAddress() {
return ep.getAddress();
}
public void setAddress(InetAddress ia) {
ep.setAddress( ia );
setAttribute("address", "" + ia);
}
public String getName() {
String encodedAddr = "";
if (getAddress() != null) {
encodedAddr = "" + getAddress();
if (encodedAddr.startsWith("/"))
encodedAddr = encodedAddr.substring(1);
encodedAddr = URLEncoder.encode(encodedAddr) + "-";
}
return ("http-" + encodedAddr + ep.getPort());
}
public boolean getTcpNoDelay() {
return ep.getTcpNoDelay();
}
public void setTcpNoDelay( boolean b ) {
ep.setTcpNoDelay( b );
setAttribute("tcpNoDelay", "" + b);
}
public boolean getDisableUploadTimeout() {
return disableUploadTimeout;
}
public void setDisableUploadTimeout(boolean isDisabled) {
disableUploadTimeout = isDisabled;
}
public int getSocketBuffer() {
return socketBuffer;
}
public void setSocketBuffer(int valueI) {
socketBuffer = valueI;
}
public String getCompression() {
return compression;
}
public void setCompression(String valueS) {
compression = valueS;
setAttribute("compression", valueS);
}
public int getMaxSavePostSize() {
return maxSavePostSize;
}
public void setMaxSavePostSize(int valueI) {
maxSavePostSize = valueI;
setAttribute("maxSavePostSize", "" + valueI);
}
public int getMaxHttpHeaderSize() {
return maxHttpHeaderSize;
}
public void setMaxHttpHeaderSize(int valueI) {
maxHttpHeaderSize = valueI;
setAttribute("maxHttpHeaderSize", "" + valueI);
}
public String getRestrictedUserAgents() {
return restrictedUserAgents;
}
public void setRestrictedUserAgents(String valueS) {
restrictedUserAgents = valueS;
setAttribute("restrictedUserAgents", valueS);
}
public String getNoCompressionUserAgents() {
return noCompressionUserAgents;
}
public void setNoCompressionUserAgents(String valueS) {
noCompressionUserAgents = valueS;
setAttribute("noCompressionUserAgents", valueS);
}
public String getCompressableMimeType() {
return compressableMimeTypes;
}
public void setCompressableMimeType(String valueS) {
compressableMimeTypes = valueS;
setAttribute("compressableMimeTypes", valueS);
}
public int getCompressionMinSize() {
return compressionMinSize;
}
public void setCompressionMinSize(int valueI) {
compressionMinSize = valueI;
setAttribute("compressionMinSize", "" + valueI);
}
public int getSoLinger() {
return ep.getSoLinger();
}
public void setSoLinger( int i ) {
ep.setSoLinger( i );
setAttribute("soLinger", "" + i);
}
public int getSoTimeout() {
return ep.getSoTimeout();
}
public void setSoTimeout( int i ) {
ep.setSoTimeout(i);
setAttribute("soTimeout", "" + i);
}
public String getProtocol() {
return getProperty("protocol");
}
public void setProtocol( String k ) {
setSecure(true);
setAttribute("protocol", k);
}
public boolean getSecure() {
return secure;
}
public void setSecure( boolean b ) {
ep.setSecure(b);
secure=b;
setAttribute("secure", "" + b);
}
public int getMaxKeepAliveRequests() {
return maxKeepAliveRequests;
}
/** Set the maximum number of Keep-Alive requests that we will honor.
*/
public void setMaxKeepAliveRequests(int mkar) {
maxKeepAliveRequests = mkar;
setAttribute("maxKeepAliveRequests", "" + mkar);
}
/**
* Return the Keep-Alive policy for the connection.
*/
public boolean getKeepAlive() {
return ((maxKeepAliveRequests != 0) && (maxKeepAliveRequests != 1));
}
/**
* Set the keep-alive policy for this connection.
*/
public void setKeepAlive(boolean keepAlive) {
if (!keepAlive) {
setMaxKeepAliveRequests(1);
}
}
public int getSocketCloseDelay() {
return socketCloseDelay;
}
public void setSocketCloseDelay( int d ) {
socketCloseDelay=d;
setAttribute("socketCloseDelay", "" + d);
}
public void setServer( String server ) {
this.server = server;
}
public String getServer() {
return server;
}
public int getTimeout() {
return timeout;
}
public void setTimeout( int timeouts ) {
timeout = timeouts;
setAttribute("timeout", "" + timeouts);
}
public void setProcessorCache(int processorCache) {
this.processorCache = processorCache;
}
public void setOomParachute(int oomParachute) {
ep.setOomParachute(oomParachute);
setAttribute("oomParachute",oomParachute);
}
// -------------------- SSL related properties --------------------
public String getKeystoreFile() { return ep.getKeystoreFile();}
public void setKeystoreFile(String s ) { ep.setKeystoreFile(s);}
public void setKeystore(String s) { setKeystoreFile(s);}
public String getKeystore(){ return getKeystoreFile();}
public String getKeyAlias() { return ep.getKeyAlias();}
public void setKeyAlias(String s ) { ep.setKeyAlias(s);}
public String getAlgorithm() { return ep.getAlgorithm();}
public void setAlgorithm(String s ) { ep.setAlgorithm(s);}
public void setClientauth(String s) {setClientAuth(s);}
public String getClientauth(){ return getClientAuth();}
public String getClientAuth() { return ep.getClientAuth();}
public void setClientAuth(String s ) { ep.setClientAuth(s);}
public String getKeystorePass() { return ep.getKeystorePass();}
public void setKeystorePass(String s ) { ep.setKeystorePass(s);}
public void setKeypass(String s) { setKeystorePass(s);}
public String getKeypass() { return getKeystorePass();}
public String getKeystoreType() { return ep.getKeystoreType();}
public void setKeystoreType(String s ) { ep.setKeystoreType(s);}
public String getKeytype() { return getKeystoreType();}
public void setKeytype(String s ) { setKeystoreType(s);}
public void setTruststoreFile(String f){ep.setTruststoreFile(f);}
public String getTruststoreFile(){return ep.getTruststoreFile();}
public void setTruststorePass(String p){ep.setTruststorePass(p);}
public String getTruststorePass(){return ep.getTruststorePass();}
public void setTruststoreType(String t){ep.setTruststoreType(t);}
public String getTruststoreType(){ return ep.getTruststoreType();}
public String getSslProtocol() { return ep.getSslProtocol();}
public void setSslProtocol(String s) { ep.setSslProtocol(s);}
public String getCiphers() { return ep.getCiphers();}
public void setCiphers(String s) { ep.setCiphers(s);}
public boolean getSSLEnabled() { return ep.isSSLEnabled(); }
public void setSSLEnabled(boolean SSLEnabled) { ep.setSSLEnabled(SSLEnabled); }
// Alias for sslEnabledProtocols
public void setProtocols(String k) {
setSslEnabledProtocols(k);
}
public void setSslEnabledProtocols(String k) {
ep.setSslEnabledProtocols(k);
}
/**
* When client certificate information is presented in a form other than
* instances of {@link java.security.cert.X509Certificate} it needs to be
* converted before it can be used and this property controls which JSSE
* provider is used to perform the conversion. For example it is used with
* the AJP connectors, the HTTP APR connector and with the
* {@link org.apache.catalina.valves.SSLValve}. If not specified, the
* default provider will be used.
*/
protected String clientCertProvider = null;
public String getClientCertProvider() { return clientCertProvider; }
public void setClientCertProvider(String s) { this.clientCertProvider = s; }
// -------------------- Connection handler --------------------
static class Http11ConnectionHandler implements Handler {
protected Http11NioProtocol proto;
protected static int count = 0;
protected RequestGroupInfo global = new RequestGroupInfo();
protected ConcurrentHashMap<NioChannel, Http11NioProcessor> connections =
new ConcurrentHashMap<NioChannel, Http11NioProcessor>();
protected ConcurrentLinkedQueue<Http11NioProcessor> recycledProcessors = new ConcurrentLinkedQueue<Http11NioProcessor>() {
protected AtomicInteger size = new AtomicInteger(0);
public boolean offer(Http11NioProcessor processor) {
boolean offer = proto.processorCache==-1?true:size.get() < proto.processorCache;
//avoid over growing our cache or add after we have stopped
boolean result = false;
if ( offer ) {
result = super.offer(processor);
if ( result ) {
size.incrementAndGet();
}
}
if (!result) deregister(processor);
return result;
}
public Http11NioProcessor poll() {
Http11NioProcessor result = super.poll();
if ( result != null ) {
size.decrementAndGet();
}
return result;
}
public void clear() {
Http11NioProcessor next = poll();
while ( next != null ) {
deregister(next);
next = poll();
}
super.clear();
size.set(0);
}
};
Http11ConnectionHandler(Http11NioProtocol proto) {
this.proto = proto;
}
public void releaseCaches() {
recycledProcessors.clear();
}
/**
* Use this only if the processor is not available, otherwise use
* {@link #release(NioChannel, Http11NioProcessor).
*/
public void release(NioChannel socket) {
Http11NioProcessor result = connections.remove(socket);
if ( result != null ) {
result.recycle();
recycledProcessors.offer(result);
}
}
public void release(NioChannel socket, Http11NioProcessor processor) {
connections.remove(socket);
processor.recycle();
recycledProcessors.offer(processor);
}
public SocketState event(NioChannel socket, SocketStatus status) {
Http11NioProcessor result = connections.get(socket);
SocketState state = SocketState.CLOSED;
if (result != null) {
if (log.isDebugEnabled()) log.debug("Http11NioProcessor.error="+result.error);
// Call the appropriate event
try {
state = result.event(status);
} catch (java.net.SocketException e) {
// SocketExceptions are normal
Http11NioProtocol.log.debug
(sm.getString
("http11protocol.proto.socketexception.debug"), e);
} catch (java.io.IOException e) {
// IOExceptions are normal
Http11NioProtocol.log.debug
(sm.getString
("http11protocol.proto.ioexception.debug"), e);
}
// Future developers: if you discover any other
// rare-but-nonfatal exceptions, catch them here, and log as
// above.
catch (Throwable e) {
// any other exception or error is odd. Here we log it
// with "ERROR" level, so it will show up even on
// less-than-verbose logs.
Http11NioProtocol.log.error
(sm.getString("http11protocol.proto.error"), e);
} finally {
if (state != SocketState.LONG) {
release(socket, result);
if (state == SocketState.OPEN) {
socket.getPoller().add(socket);
}
} else {
if (log.isDebugEnabled()) log.debug("Keeping processor["+result);
//add correct poller events here based on Comet stuff
NioEndpoint.KeyAttachment att = (NioEndpoint.KeyAttachment)socket.getAttachment(false);
socket.getPoller().add(socket,att.getCometOps());
}
}
}
return state;
}
public SocketState process(NioChannel socket) {
Http11NioProcessor processor = null;
try {
processor = connections.remove(socket);
if (processor == null) {
processor = recycledProcessors.poll();
}
if (processor == null) {
processor = createProcessor();
}
if (processor instanceof ActionHook) {
((ActionHook) processor).action(ActionCode.ACTION_START, null);
}
if (proto.ep.isSSLEnabled() && (proto.sslImplementation != null)) {
if (socket instanceof SecureNioChannel) {
SecureNioChannel ch = (SecureNioChannel)socket;
processor.setSslSupport(proto.sslImplementation.getSSLSupport(ch.getSslEngine().getSession()));
}else processor.setSslSupport(null);
} else {
processor.setSslSupport(null);
}
SocketState state = processor.process(socket);
if (state == SocketState.LONG) {
// In the middle of processing a request/response. Keep the
// socket associated with the processor.
connections.put(socket, processor);
socket.getPoller().add(socket);
} else if (state == SocketState.OPEN) {
// In keep-alive but between requests. OK to recycle
// processor. Continue to poll for the next request.
release(socket, processor);
socket.getPoller().add(socket);
} else {
// Connection closed. OK to recycle the processor.
release(socket, processor);
}
return state;
} catch (java.net.SocketException e) {
// SocketExceptions are normal
Http11NioProtocol.log.debug
(sm.getString
("http11protocol.proto.socketexception.debug"), e);
} catch (java.io.IOException e) {
// IOExceptions are normal
Http11NioProtocol.log.debug
(sm.getString
("http11protocol.proto.ioexception.debug"), e);
}
// Future developers: if you discover any other
// rare-but-nonfatal exceptions, catch them here, and log as
// above.
catch (Throwable e) {
// any other exception or error is odd. Here we log it
// with "ERROR" level, so it will show up even on
// less-than-verbose logs.
Http11NioProtocol.log.error
(sm.getString("http11protocol.proto.error"), e);
}
release(socket, processor);
return SocketState.CLOSED;
}
public Http11NioProcessor createProcessor() {
Http11NioProcessor processor = new Http11NioProcessor(
proto.ep.getSocketProperties().getRxBufSize(),
proto.ep.getSocketProperties().getTxBufSize(),
proto.maxHttpHeaderSize,
proto.ep);
processor.setAdapter(proto.adapter);
processor.setMaxKeepAliveRequests(proto.maxKeepAliveRequests);
processor.setTimeout(proto.timeout);
processor.setDisableUploadTimeout(proto.disableUploadTimeout);
processor.setCompressionMinSize(proto.compressionMinSize);
processor.setCompression(proto.compression);
processor.setNoCompressionUserAgents(proto.noCompressionUserAgents);
processor.setCompressableMimeTypes(proto.compressableMimeTypes);
processor.setRestrictedUserAgents(proto.restrictedUserAgents);
processor.setSocketBuffer(proto.socketBuffer);
processor.setMaxSavePostSize(proto.maxSavePostSize);
processor.setServer(proto.server);
register(processor);
return processor;
}
AtomicInteger registerCount = new AtomicInteger(0);
public void register(Http11NioProcessor processor) {
if (proto.getDomain() != null) {
synchronized (this) {
try {
registerCount.addAndGet(1);
if (log.isDebugEnabled()) log.debug("Register ["+processor+"] count="+registerCount.get());
RequestInfo rp = processor.getRequest().getRequestProcessor();
rp.setGlobalProcessor(global);
ObjectName rpName = new ObjectName
(proto.getDomain() + ":type=RequestProcessor,worker="
+ proto.getName() + ",name=HttpRequest" + count++);
Registry.getRegistry(null, null).registerComponent(rp, rpName, null);
rp.setRpName(rpName);
} catch (Exception e) {
log.warn("Error registering request");
}
}
}
}
public void deregister(Http11NioProcessor processor) {
if (proto.getDomain() != null) {
synchronized (this) {
try {
registerCount.addAndGet(-1);
if (log.isDebugEnabled()) log.debug("Deregister ["+processor+"] count="+registerCount.get());
RequestInfo rp = processor.getRequest().getRequestProcessor();
rp.setGlobalProcessor(null);
ObjectName rpName = rp.getRpName();
Registry.getRegistry(null, null).unregisterComponent(rpName);
rp.setRpName(null);
} catch (Exception e) {
log.warn("Error unregistering request", e);
}
}
}
}
}
protected static org.apache.juli.logging.Log log
= org.apache.juli.logging.LogFactory.getLog(Http11NioProtocol.class);
// -------------------- Various implementation classes --------------------
protected String domain;
protected ObjectName oname;
protected MBeanServer mserver;
public ObjectName getObjectName() {
return oname;
}
public String getDomain() {
return domain;
}
public int getProcessorCache() {
return processorCache;
}
public int getOomParachute() {
return ep.getOomParachute();
}
public ObjectName preRegister(MBeanServer server,
ObjectName name) throws Exception {
oname=name;
mserver=server;
domain=name.getDomain();
return name;
}
public void postRegister(Boolean registrationDone) {
}
public void preDeregister() throws Exception {
}
public void postDeregister() {
}
}
| mit |