text stringlengths 1 1.05M |
|---|
package fr.iv.calories.config;
import java.beans.PropertyVetoException;
import java.util.Properties;
import java.util.logging.Logger;
import javax.sql.DataSource;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.web.servlet.ViewResolver;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import com.mchange.v2.c3p0.ComboPooledDataSource;
@Configuration
@EnableWebMvc
@EnableTransactionManagement
@ComponentScan(basePackages = "fr.iv.calories")
@PropertySource({"classpath:food-db.properties"})
public class FoodContextAppConfig implements WebMvcConfigurer {
@Autowired
private Environment env;
private final Logger logger = Logger.getLogger(getClass().getName());
@Bean
public ViewResolver viewResolver() {
InternalResourceViewResolver viewResolver = new InternalResourceViewResolver();
viewResolver.setPrefix("/WEB-INF/view/");
viewResolver.setSuffix(".jsp");
return viewResolver;
}
// Food database through pool
@Bean
public DataSource myDataSource() {
ComboPooledDataSource myDataSource = new ComboPooledDataSource();
try {
myDataSource.setDriverClass(env.getProperty("jdbc.driver"));
} catch (PropertyVetoException e) {
throw new RuntimeException(e);
}
logger.info("=====>>>url = " + env.getProperty("jdbc.url"));
logger.info("=====>>>user = " + env.getProperty("jdbc.user"));
myDataSource.setJdbcUrl(env.getProperty("jdbc.url"));
myDataSource.setUser(env.getProperty("jdbc.user"));
myDataSource.setPassword(env.getProperty("jdbc.password"));
myDataSource.setInitialPoolSize(getIntProperty("connection.pool.initialPoolSize"));
myDataSource.setMinPoolSize(getIntProperty("connection.pool.minPoolSize"));
myDataSource.setMaxPoolSize(getIntProperty("connection.pool.maxPoolSize"));
myDataSource.setMaxIdleTime(getIntProperty("connection.pool.maxIdleTime"));
return myDataSource;
}
@Bean
public LocalSessionFactoryBean sessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(myDataSource());
sessionFactory.setPackagesToScan(env.getProperty("hibernate.packagesToScan"));
sessionFactory.setHibernateProperties(getHibernateProperty());
return sessionFactory;
}
@Bean
@Autowired
public HibernateTransactionManager mytransactionManager(SessionFactory sessionFactory) {
HibernateTransactionManager mytransactionManager = new HibernateTransactionManager();
mytransactionManager.setSessionFactory(sessionFactory);
return mytransactionManager;
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/resources/**")
.addResourceLocations("/resources/");
}
private Properties getHibernateProperty() {
Properties properties = new Properties();
properties.setProperty("show_sql", env.getProperty("hibernate.show_sql"));
properties.setProperty("hibernate.dialect", env.getProperty("hibernate.dialect"));
return properties;
}
private int getIntProperty(String nameProperty) {
return Integer.parseInt(env.getProperty(nameProperty));
}
}
|
#!/usr/bin/env python2.7
# Copyright 2015 <NAME> All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OneResume
- Write your resume in YAML
- Output it to word, html, txt, etc
"""
import logging
import imp
import pkgutil
import pkg_resources
import sys
class Plugin(object):
class __metaclass__(type):
def __init__(cls, name, base, attrs):
if not hasattr(cls, 'registered'):
cls.registered = {}
else:
cls.registered[name] = cls
@classmethod
def load(cls):
cls.registered = {}
for plugin in pkg_resources.iter_entry_points('one_resume.plugins'):
plugin.load()
|
#!/bin/bash
rm $PWD/simbel/nodeInfo.ds
output="$(./geth --datadir=$PWD/simbel/data console <<< $'admin.nodeInfo')"
if [[ "$output" =~ \"enode[^,]* ]]; then
echo "your enode is: ${BASH_REMATCH[0]}"
echo "${BASH_REMATCH[0]}" >> $PWD/simbel/nodeInfo.ds
fi
output="$(ifconfig)"
#if [[ "$output" =~ inet[^,]*[B] ]]; then
# echo "your ip address is: ${BASH_REMATCH[0]}"
#fi
#echo "$output" | grep -oE '(((inet addr:)1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])\.){3}(1?[0-9][0-9]?|2[0-4][0-9]|25[0-5]) ' >> $PWD/simbel/nodeInfo.ds
result=$(echo "$output" | (grep -oE "\b((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b" | head -1))
echo your ip address is: ${result}
echo "${result}" >> $PWD/simbel/nodeInfo.ds
|
import mysql.connector
# Connect to the database
mydb = mysql.connector.connect(
host="localhost",
user="yourusername",
passwd="yourpassword",
database="mydatabase"
)
# Create a cursor
mycursor = mydb.cursor()
# Execute a query
mycursor.execute("SELECT * FROM customers")
# Fetch all records
records = mycursor.fetchall()
# Print records
for record in records:
print(record) |
#include "peer.h"
#include "protocol.h"
#include "exception.h"
#include "torrent.h"
#include "server.h"
using namespace std;
namespace torrent
{
bool PeerInfo::operator==(const PeerInfo& rhs) const
{
return m_Address == rhs.m_Address && m_Port == rhs.m_Port;
}
PeerInfo::PeerInfo(const QHostAddress& address, quint16 port, const QString& peerId) : m_Address(address), m_Port(port), m_TrackedPeerId(peerId), m_AvailabilityAmount(0), m_DownloadedBytes(0), m_UploadedBytes(0), m_DownloadingSpeed(0), m_UploadingSpeed(0)
{
}
quint64 PeerInfo::GetDownloadingSpeed() const
{
return m_DownloadingSpeed;
}
quint64 PeerInfo::GetUploadingSpeed() const
{
return m_UploadingSpeed;
}
quint64 PeerInfo::GetDownloadedBytes() const
{
return m_DownloadedBytes;
}
quint64 PeerInfo::GetUploadedBytes() const
{
return m_UploadedBytes;
}
QHostAddress PeerInfo::GetHostAddress() const
{
return m_Address;
}
quint16 PeerInfo::GetPort() const
{
return m_Port;
}
QString PeerInfo::GetPeerId() const
{
return m_PeerId;
}
bool PeerInfo::HasPiece(quint32 index)
{
if (index >= (quint32)m_PeerPieces.size())
THROW;
return m_PeerPieces.testBit(index);
}
Peer::PeerState Peer::GetState()
{
return m_State;
}
void Peer::Choke()
{
SendChoke();
m_AmChoking = true;
}
void Peer::Unchoke()
{
SendUnchoke();
m_AmChoking = false;
}
void Peer::Interesting()
{
if (!m_AmInterested)
{
SendInterested();
m_AmInterested = true;
}
}
void Peer::NotInteresting()
{
if (m_AmInterested)
{
SendNotInterested();
m_AmInterested = false;
}
}
bool Peer::AmChoking()
{
return m_AmChoking;
}
bool Peer::AmInterested()
{
return m_AmInterested;
}
bool Peer::PeerChoking()
{
return m_PeerChoking;
}
bool Peer::PeerInterested()
{
return m_PeerInterested;
}
void Peer::RequestPiece(quint32 index, quint32 begin, quint32 length)
{
//CHECK(!m_OutcomingRequestList.contains(PiecePart(index, begin, length)));
SendRequest(index, begin, length);
//m_OutcomingRequestList.append(PiecePart(index, begin, length));
}
void Peer::CancelPiece(quint32 index, quint32 begin, quint32 length)
{
if (!m_OutcomingRequestList.contains(PiecePart(index, begin, length)))
THROW;
if (!m_OutcomingRequestList.removeOne(PiecePart(index, begin, length)))
THROW;
SendCancel(index, begin, length);
}
void Peer::UploadPiece(quint32 index, quint32 begin, quint32 length)
{
PiecePart piece(index, begin, length);
if (m_UploadingPieceParts.contains(piece))
THROW;
m_UploadingPieceParts.append(piece);
}
void Peer::Have(quint32 index)
{
if (m_State != torrent::Peer::PeerState::PS_RECVING)
return;
if (!SendHave(index))
THROW;
}
/*bool Peer::Process(quint32 deltaTime)
{
switch (m_State)
{
case PeerState::PS_NULL:
{
if (!Connection(deltaTime))
return false;
}
break;
case PeerState::PS_CONNECTED:
{
if (!TickTimers(deltaTime))
return false;
if (!ReciveHandshake())
return false;
}
break;
case PeerState::PS_HANDSHAKED:
{
if (m_Torrent->GetInfoHash() != m_Infohash)
return false;
if (!m_SentHandshake)
{
if (!SendHandshake(m_Torrent->GetInfoHash()))
return false;
if (!SendPeerID())
return false;
m_SentHandshake = true;
}
if (!TickTimers(deltaTime))
return false;
if (!RecivePeerID())
return false;
}
break;
case PeerState::PS_RECV_ID:
{
if (!m_PeerInfo->m_TrackedPeerId.isEmpty() && m_PeerInfo->m_TrackedPeerId != m_PeerInfo->m_PeerId)
return false;
if (m_PeerInfo->m_PeerId == m_Torrent->m_Server->GetPeerId())
return false;
m_State = PeerState::PS_RECVING;
if (!TickTimers(deltaTime))
return false;
if (m_Torrent->m_Pieces.count(true) > 0)
{
if (!SendBitField(m_Torrent->m_Pieces))
return false;
}
}
break;
case PeerState::PS_RECVING:
{
if (!TickTimers(deltaTime))
return false;
if (!SendPackets())
return false;
if (!RecivePackets())
return false;
}
break;
default:
THROW;
break;
}
return true;
}
*/
bool Peer::operator==(const PeerInfo& rhs) const
{
return *m_PeerInfo == rhs;
}
bool Peer::operator==(const Peer& rhs) const
{
return *m_PeerInfo == *rhs.m_PeerInfo;
}
Peer::Peer(QTcpSocket* socket) : m_State(PeerState::PS_CONNECTED), m_Torrent(nullptr), m_PeerInfo(new PeerInfo(socket->peerAddress(), socket->peerPort())), m_AmChoking(true), m_AmInterested(false), m_PeerChoking(true), m_PeerInterested(false), m_OutcomingRequestIndex(0), m_OutcomingRequestBegin(0),
m_Socket(socket), m_NextPacketLength(0), m_LocalKeepAliveTimer(this), m_RemoteKeepAliveTimer(this), m_SpeedTimer(0), m_DownloadedBytes(0), m_UploadedBytes(0), m_PrevDownloadedBytes(0), m_PrevUploadedBytes(0), m_SentHandshake(false), m_RequestSizePower(0), m_MaxRequestSizePower(2*PiecePartSize), m_DownloadingDeltaSpeed(0)
{
connect(&m_LocalKeepAliveTimer,SIGNAL(timeout()), SLOT(OnTimeout()));
m_LocalKeepAliveTimer.start(KeepAliveInterval);
connect(&m_RemoteKeepAliveTimer,SIGNAL(timeout()), SLOT(OnTimeout()));
m_RemoteKeepAliveTimer.start(KeepAliveInterval);
connect(m_Socket.data(),SIGNAL(readReady()), SLOT(OnReadReady()));
//connect(m_Socket.data(),SIGNAL(disconnect()), SIGNAL(NeedRemove()));
connect(m_Socket.data(),SIGNAL(error(QAbstractSocket::SocketError)), SLOT(OnError(QAbstractSocket::SocketError)));
}
Peer::Peer(Torrent* torrent, QSharedPointer<PeerInfo> peerInfo) : m_State(PeerState::PS_NULL), m_Torrent(torrent), m_PeerInfo(peerInfo), m_AmChoking(true), m_AmInterested(false), m_PeerChoking(true), m_PeerInterested(false), m_OutcomingRequestIndex(0), m_OutcomingRequestBegin(0),
m_Socket(nullptr), m_NextPacketLength(0), m_LocalKeepAliveTimer(this), m_RemoteKeepAliveTimer(this), m_SpeedTimer(0), m_DownloadedBytes(0), m_UploadedBytes(0), m_PrevDownloadedBytes(0), m_PrevUploadedBytes(0), m_SentHandshake(false), m_RequestSizePower(0), m_MaxRequestSizePower(2*PiecePartSize), m_DownloadingDeltaSpeed(0)
{
m_PeerInfo->m_PeerPieces.resize(torrent->PieceCount());
SetupConnection();
}
void Peer::OnReadReady()
{
CHECK(m_State != PeerState::PS_NULL);
ResetTimers();
if (m_State == PeerState::PS_CONNECTED)
{
if (!ReciveHandshake())
return;
}
if (m_State == PeerState::PS_HANDSHAKED)
{
if (m_Torrent->GetInfoHash() != m_Infohash)
{
emit NeedRemove();
return;
}
if (!m_SentHandshake)
{
if (!SendHandshake(m_Torrent->GetInfoHash()))
{
emit NeedRemove();
return;
}
if (!SendPeerID())
{
emit NeedRemove();
return;
}
m_SentHandshake = true;
}
if (!RecivePeerID())
return;
emit PeerIdRecived();
if (!m_PeerInfo->m_TrackedPeerId.isEmpty() && m_PeerInfo->m_TrackedPeerId != m_PeerInfo->m_PeerId)
{
emit NeedRemove();
return;
}
if (m_PeerInfo->m_PeerId == m_Torrent->m_Server->GetPeerId())
{
emit NeedRemove();
return;
}
m_State = PeerState::PS_RECVING;
if (m_Torrent->m_Pieces.count(true) > 0)
{
if (!SendBitField(m_Torrent->m_Pieces))
{
emit NeedRemove();
return;
}
}
emit DataTransferReady();
m_RequestSizePower += m_MaxRequestSizePower;
}
if (m_State == PeerState::PS_RECVING)
{
RecivePackets();
/* static quint32 index = 0;
static quint32 begin = 0;
while (m_OutcomingRequestList.size() < 2)
{
quint32 lengthRem = m_Torrent->PieceLengthAt(index) - begin;
quint32 length = qMin(lengthRem, PiecePartSize);
RequestPiece(index,begin,length);
begin += length;
if (lengthRem == length)
{
begin = 0;
index += 1;
if (index == m_Torrent->PieceCount())
{
qWarning("Downloaded!!!");
return;
}
}
}*/
}
}
void Peer::OnTimeout()
{
auto timer = sender();
if (timer == &m_LocalKeepAliveTimer)
{
emit NeedRemove();
}
else
{
CHECK( timer == &m_RemoteKeepAliveTimer );
emit NeedRemove();
}
}
void Peer::ResetTimers()
{
m_LocalKeepAliveTimer.stop();
m_LocalKeepAliveTimer.start();
m_RemoteKeepAliveTimer.stop();
m_RemoteKeepAliveTimer.start();
}
void Peer::UpdateSpeeds(quint32 deltaTime)
{
//m_PeerInfo->m_DownloadingSpeed = (m_DownloadedBytes - m_PeerInfo->m_DownloadedBytes) * 1000 / deltaTime;
quint32 prevDownloadingSpeed = m_PeerInfo->m_DownloadingSpeed;
m_PeerInfo->m_DownloadingSpeed = m_DownloadedBytes - m_PeerInfo->m_DownloadedBytes;
m_PeerInfo->m_DownloadedBytes = m_DownloadedBytes;
m_DownloadingDeltaSpeed = m_PeerInfo->m_DownloadingSpeed - prevDownloadingSpeed;
/* m_PeerInfo->m_UploadingSpeed = (m_UploadedBytes - m_PeerInfo->m_UploadedBytes) * 1000 / deltaTime;
m_PeerInfo->m_UploadedBytes = m_UploadedBytes;*/
}
void Peer::OnError(QAbstractSocket::SocketError error)
{
qInfo("Peer %s:%i SocketError %s", qPrintable(m_PeerInfo->GetHostAddress().toString()), m_PeerInfo->GetPort(), qPrintable(m_Socket->errorString()) );
emit NeedRemove();
}
void Peer::OnConnected()
{
connect(&m_LocalKeepAliveTimer,SIGNAL(timeout()), SLOT(OnTimeout()));
m_LocalKeepAliveTimer.start(KeepAliveInterval);
connect(&m_RemoteKeepAliveTimer,SIGNAL(timeout()), SLOT(OnTimeout()));
m_RemoteKeepAliveTimer.start(KeepAliveInterval);
if (!SendHandshake(m_Torrent->GetInfoHash()))
{
emit NeedRemove();
return;
}
if (!SendPeerID())
{
emit NeedRemove();
return;
}
m_SentHandshake = true;
m_State = PeerState::PS_CONNECTED;
}
void Peer::Download()
{
if (m_State != PeerState::PS_RECVING)
{
return;
}
/*if (m_RequestSizePower == 0 || m_OutcomingRequestIndex == m_OutcomingRequestList.size())
return;*/
if (m_OutcomingRequestIndex == m_OutcomingRequestList.size())
return;
if (m_OutcomingRequestIndex == m_OutcomingRequestList.size())
{
NotInteresting();
// emit ChooseAlgorithm();
}
else
{
if (AmInterested())
{
if (PeerChoking())
return;
for (auto& piecePart : m_OutcomingRequestList)
{
SendRequest(piecePart.Index, piecePart.Begin, piecePart.Length);
}
m_OutcomingRequestIndex = m_OutcomingRequestList.size();
}
else
{
Interesting();
}
}
}
bool Peer::TickTimers(quint32 deltaTime)
{
//if (m_RemoteKeepAliveTimer >= deltaTime)
//{
// m_RemoteKeepAliveTimer -= deltaTime;
//}
//else
//{
// m_RemoteKeepAliveTimer = 0;
//}
//if (m_LocalKeepAliveTimer >= deltaTime)
//{
// m_LocalKeepAliveTimer -= deltaTime;
//}
//else
//{
// m_LocalKeepAliveTimer = 0;
//}
//m_SpeedTimer += deltaTime;
//if (m_LocalKeepAliveTimer == 0)
//{
// if (!SendKeepAlive())
// return false;
//
//}
//if (m_RemoteKeepAliveTimer == 0)
//{
// return false;
//}
//if (m_SpeedTimer >= SpeedRefreshInterval)
//{
// m_PeerInfo->m_DownloadingSpeed = (m_DownloadedBytes - m_PeerInfo->m_DownloadedBytes) * 1000 / m_SpeedTimer;
// m_PeerInfo->m_DownloadedBytes = m_DownloadedBytes;
// m_PeerInfo->m_UploadingSpeed = (m_UploadedBytes - m_PeerInfo->m_UploadedBytes) * 1000 / m_SpeedTimer;
// m_PeerInfo->m_UploadedBytes = m_UploadedBytes;
// m_SpeedTimer = 0;
//}
return true;
}
bool Peer::SendHandshake(const QByteArray& infohash)
{
qint8 buffer[48] = { 0, };
qint8* pbuffer = buffer;
pbuffer[0] = ProtocolIdSize;
pbuffer += sizeof(ProtocolIdSize);
memcpy(pbuffer, ProtocolId, ProtocolIdSize);
pbuffer += ProtocolIdSize;
memset(pbuffer, 0, 8);
pbuffer += 8;
memcpy(pbuffer, infohash.data(), infohash.size());
pbuffer += m_Infohash.size();
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendPeerID()
{
if (m_Socket->write(m_Torrent->m_Server->GetPeerId()) != PeerIdSize)
return false;
return true;
}
bool Peer::SendKeepAlive()
{
qint8 buffer[] = { 0, 0, 0, 0 };
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendChoke()
{
qint8 buffer[] = { 0, 0, 0, 1, 0 };
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendUnchoke()
{
qint8 buffer[] = { 0, 0, 0, 1, 1 };
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendInterested()
{
qint8 buffer[] = { 0, 0, 0, 1, 2 };
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendNotInterested()
{
qint8 buffer[] = { 0, 0, 0, 1, 3 };
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendHave(quint32 index)
{
qint8 buffer[4 + 1 + 4] = { 0, 0, 0, 5, 4 };
*(quint32*)(buffer + 5) = toNetworkData(index);
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::SendBitField(const QBitArray& bitArray)
{
QByteArray buffer(4 + 1 + m_Torrent->BitFieldByteSize(), 0);
quint8* pbuffer = (quint8*)buffer.data();
*(quint32*)pbuffer = toNetworkData(1 + m_Torrent->BitFieldByteSize());
pbuffer += sizeof(quint32);
*pbuffer = 5;
pbuffer += sizeof(quint8);
quint32 byte = 0;
quint32 bit = 0;
// The peer has the following pieces available.
for (int c = 0, size = bitArray.size(); c < size; c++)
{
pbuffer[byte] += (bitArray[c] << (7 - bit));
if (++bit == 8)
{
bit = 0;
++byte;
}
}
if (m_Socket->write(buffer) != buffer.size())
return false;
return true;
}
bool Peer::SendRequest(quint32 index, quint32 begin, quint32 length)
{
qint8 buffer[4 + 1 + sizeof(quint32)* 3] = { 0, 0, 0, 1 + sizeof(quint32)* 3, 6 };
qint32* pbuffer = (qint32*)&buffer[5];
pbuffer[0] = toNetworkData(index);
pbuffer[1] = toNetworkData(begin);
pbuffer[2] = toNetworkData(length);
m_Socket->write((char*)buffer, sizeof(buffer));
return true;
}
bool Peer::SendPiece(quint32 index, quint32 begin, quint32 length)
{
QByteArray buffer(4 + 9 + length, 0);
quint8* pbuffer = (quint8*)buffer.data();
*(quint32*)pbuffer = toNetworkData(9 + length);
pbuffer += sizeof(quint32);
*pbuffer = 7;
pbuffer += 1;
*(quint32*)pbuffer = toNetworkData(index);
pbuffer += sizeof(quint32);
*(quint32*)pbuffer = toNetworkData(begin);
pbuffer += sizeof(quint32);
m_Torrent->ReadPiecePart(index, begin, length, pbuffer);
if (m_Socket->write(buffer) != buffer.size())
return false;
return true;
}
bool Peer::SendCancel(quint32 index, quint32 begin, quint32 length)
{
qint8 buffer[4 + 1 + sizeof(quint32)* 3] = { 0, 0, 0, 1 + sizeof(quint32)* 3, 8 };
qint32* pbuffer = (qint32*)&buffer[5];
pbuffer[0] = toNetworkData(index);
pbuffer[1] = toNetworkData(begin);
pbuffer[2] = toNetworkData(length);
if (m_Socket->write((char*)buffer, sizeof(buffer)) != sizeof(buffer))
return false;
return true;
}
bool Peer::RecivePackets()
{
CHECK(m_Socket->isValid() && m_Socket->state() == QAbstractSocket::ConnectedState);
quint32 sum_length = 0;
do
{
if (!m_NextPacketLength)
{
if (m_Socket->bytesAvailable() < sizeof(m_NextPacketLength))
return false;
m_NextPacketLength = fromNetworkData((const char *)m_Socket->read(sizeof(m_NextPacketLength)));
}
if (m_Socket->bytesAvailable() < m_NextPacketLength)
return false;
if (m_NextPacketLength == 0)//KeepAlivePacket
{
return true;
}
QByteArray packet = m_Socket->read(m_NextPacketLength);
if (packet.size() != m_NextPacketLength)
{
emit NeedRemove();
return false;
}
switch ((PacketType)packet.at(0))
{
case PacketType::ChokePacket:
// We have been choked.
ReciveChoke();
break;
case PacketType::UnchokePacket:
// We have been unchoked.
ReciveUnchoke();
break;
case PacketType::InterestedPacket:
// The peer is interested in downloading.
ReciveInterested();
break;
case PacketType::NotInterestedPacket:
// The peer is not interested in downloading.
ReciveNotInterested();
break;
case PacketType::HavePacket:
{
// The peer has a new piece available.
quint32 index = fromNetworkData(&packet.data()[1]);
if (!ReciveHave(index))
{
emit NeedRemove();
return false;
}
}
break;
case PacketType::BitFieldPacket:
{
if (m_NextPacketLength - 1 != m_Torrent->BitFieldByteSize())
{
return false;
}
QBitArray pieces(m_Torrent->PieceCount());
quint32 bit = 0;
quint32 byte = 0;
for (quint32 c_piece = 0, size_piece = m_Torrent->PieceCount(); c_piece < size_piece; c_piece++)
{
if (packet[1 + byte] & (1 << (7 - bit)))
pieces[c_piece] = true;
else
pieces[c_piece] = false;
if (++bit == 8)
{
++byte;
bit = 0;
}
}
if (!ReciveBitField(pieces))
return false;
}
break;
case PacketType::RequestPacket:
{
// The peer requests a block.
quint32 index = fromNetworkData(&packet.data()[1]);
quint32 begin = fromNetworkData(&packet.data()[5]);
quint32 length = fromNetworkData(&packet.data()[9]);
if (!ReciveRequest(index, begin, length))
return false;
}
break;
case PacketType::PiecePacket:
{
quint32 index = quint32(fromNetworkData(&packet.data()[1]));
quint32 begin = quint32(fromNetworkData(&packet.data()[5]));
quint32 length = m_NextPacketLength - 9;
if (!RecivePiece(index, begin, length, (quint8*)packet.data() + 9))
return false;
// sum_length += length;
}
break;
case PacketType::CancelPacket:
{
// The peer cancels a block request.
quint32 index = fromNetworkData(&packet.data()[1]);
quint32 begin = fromNetworkData(&packet.data()[5]);
quint32 length = fromNetworkData(&packet.data()[9]);
if (!ReciveCancel(index, begin, length))
return false;
}
break;
default:
// Unsupported packet type; just ignore it.
break;
}
m_NextPacketLength = 0;
} while (m_Socket->bytesAvailable());
//m_RequestSizePower += sum_length;
//if (sum_length)
//{
// Download();
// //emit ChooseAlgorithm();
//}
return true;
}
bool Peer::ReciveHandshake()
{
CHECK(m_Socket->isValid() && m_Socket->state() == QAbstractSocket::ConnectedState);
if (m_Socket->bytesAvailable() < MinimalHeaderSize)
return false;
// Sanity check the protocol ID
QByteArray id = m_Socket->read(ProtocolIdSize + 1);
if (id.at(0) != ProtocolIdSize || !id.mid(1).startsWith((char*)ProtocolId))
{
emit NeedRemove();
return false;
}
// Discard 8 reserved bytes, then read the info hash and peer ID
(void)m_Socket->read(8);
// Read infoHash
m_Infohash = m_Socket->read(20);
m_State = PeerState::PS_HANDSHAKED;
emit InfohashRecived(m_Infohash);
return true;
}
bool Peer::RecivePeerID()
{
CHECK(m_Socket->isValid() && m_Socket->state() == QAbstractSocket::ConnectedState);
if (m_Socket->bytesAvailable() < PeerIdSize)
return false;
m_PeerInfo->m_PeerId = m_Socket->read(PeerIdSize);
m_State = PeerState::PS_RECV_ID;
return true;
}
void Peer::ReciveChoke()
{
if (!m_PeerChoking)
{
emit ChooseAlgorithm();
m_PeerChoking = true;
}
}
void Peer::ReciveUnchoke()
{
m_PeerChoking = false;
//Download();
}
void Peer::ReciveInterested()
{
m_PeerInterested = true;
m_PeerChoking = false;
}
void Peer::ReciveNotInterested()
{
m_PeerInterested = false;
}
bool Peer::ReciveHave(quint32 index)
{
if (index >= quint32(m_PeerInfo->m_PeerPieces.size()))
return false;
m_PeerInfo->m_PeerPieces.setBit(int(index));
//m_PeerInfo->m_PieceIndices << index;
m_Torrent->OnReciveHave(this,index);
// emit ChooseAlgorithm();
return true;
}
bool Peer::ReciveBitField(const QBitArray& pieces)
{
if (m_PeerInfo->m_PeerPieces.size() != pieces.size())
{
emit NeedRemove();
return false;
}
m_PeerInfo->m_PeerPieces = pieces;
//for (quint32 c = 0, size = m_Torrent->m_PieceCount; c < size; c++)
//{
// if (pieces[c])
// {
// m_PeerInfo->m_PieceIndices << c;
// }
//}
m_Torrent->OnReciveBitField(this,pieces);
// emit ChooseAlgorithm();
return true;
}
bool Peer::ReciveRequest(quint32 index, quint32 begin, quint32 length)
{
if (index >= (quint32)m_PeerInfo->m_PeerPieces.size())
{
emit NeedRemove();
return false;
}
if (length == 0)
{
emit NeedRemove();
return false;
}
if (m_Torrent->PieceOffset(index) + begin + length > m_Torrent->GetTotalBytes())
{
emit NeedRemove();
return false;
}
if (m_IncomingRequestList.contains(PiecePart(index, begin, length)))
return true;//ignoring
m_IncomingRequestList.append(PiecePart(index, begin, length));
return true;
}
bool Peer::RecivePiece(quint32 index, quint32 begin, quint32 length, quint8* data)
{
//if (m_OutcomingRequestIndex >= m_OutcomingRequestList.size())
// return true;//ignoring
for (quint32 piece = 0; piece < m_OutcomingRequestIndex; piece++ )
{
if (m_OutcomingRequestList[piece].Index == index && m_OutcomingRequestList[piece].Begin == begin && m_OutcomingRequestList[piece].Length == length)
{
m_Torrent->WritePiecePart(index, begin, length, data);
m_DownloadedBytes += length;
m_OutcomingRequestList.removeAt(piece);
--m_OutcomingRequestIndex;
return true;
}
}
return true;//ignoring
}
bool Peer::ReciveCancel(quint32 index, quint32 begin, quint32 length)
{
if (index >= (quint32)m_PeerInfo->m_PeerPieces.size())
{
emit NeedRemove();
return false;
}
if (length == 0)
{
emit NeedRemove();
return false;
}
if (m_Torrent->PieceOffset(index) + begin + length >= m_Torrent->GetTotalBytes())
{
emit NeedRemove();
return false;
}
if (!m_IncomingRequestList.contains(PiecePart(index, begin, length)))
return true;//ignoring
if (!m_IncomingRequestList.removeOne(PiecePart(index, begin, length)))
THROW;
return true;
}
void Peer::SetupConnection()
{
m_Socket.reset(new QTcpSocket);
connect(m_Socket.data(), SIGNAL(connected()), SLOT(OnConnected()));
connect(m_Socket.data(),SIGNAL(readyRead()), SLOT(OnReadReady()));
//connect(m_Socket.data(),SIGNAL(disconnected()), SIGNAL(NeedRemove()));
connect(m_Socket.data(),SIGNAL(error(QAbstractSocket::SocketError)), SLOT(OnError(QAbstractSocket::SocketError)));
m_Socket->connectToHost(m_PeerInfo->m_Address, m_PeerInfo->m_Port);
}
bool Peer::SendPackets()
{
for (auto& part : m_UploadingPieceParts)
{
if (!SendPiece(part.Index, part.Begin, part.Length))
return false;
m_UploadedBytes += part.Length;
}
m_UploadingPieceParts.clear();
return true;
}
bool PiecePart::operator==(const PiecePart& rhs) const
{
return Index == rhs.Index && Begin == rhs.Begin && Length == rhs.Length;
}
PiecePart::PiecePart()
{
}
PiecePart::PiecePart(quint32 index, quint32 begin, quint32 length) : Index(index), Begin(begin), Length(length)
{
}
DataPiece::DataPiece(quint32 index, quint32 begin, quint32 length, quint8* data) : PiecePart(index, begin, length), Data((char*)data, length)
{
}
}
|
def insertionSort(arr):
# loop through the array
for i in range(1, len(arr)):
currentValue = arr[i] # save current value
position = i
# loop upwards while the left index is bigger than 0 and
# the current value is less than the left value
while position > 0 and currentValue < arr[position - 1]:
arr[position] = arr[position - 1] # shift all values to the right
position -= 1 # move left
# insert the current value in the left position
arr[position] = currentValue
return arr |
<reponame>hiNISAL/qrqr<gh_stars>0
import QRCodeGenerator from 'qrcode-generator';
/**
* *********************************************************************
* type start
*
*/
type TypeNumber =
| 0
| 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10
| 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20
| 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30
| 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40
;
type Mode = 'Numeric' | 'Alphanumeric' | 'Byte' /* Default */ | 'Kanji';
interface IOptions {
// 版本
version?: TypeNumber,
// 容错率
level?: 'L'|'M'|'Q'|'H',
// 码内容
content: string,
// 编码
mode?: Mode,
// 尺寸
size?: number,
}
/**
*
* type end
* *********************************************************************
*/
class QRSupporter {
private qr;
private size: number = 128;
constructor(
private readonly options: IOptions|string,
) {
const defOpts: IOptions = {
version: 0,
level: 'H',
content: '',
mode: 'Byte',
size: 128,
};
if (typeof options === 'string') {
options = {
content: options,
};
}
Object.assign(defOpts, options);
const qr = QRCodeGenerator(defOpts.version!, defOpts.level!);
qr.addData(defOpts.content);
qr.make();
this.qr = qr;
this.size = defOpts.size!;
}
private getCellSize(cellSize): number {
if (!cellSize) {
cellSize = Math.floor(this.size / this.qr.getModuleCount());
}
return cellSize;
}
public version(): number {
return this.qr.getModuleCount();
}
public setSize(size: number) {
this.size = size;
return this;
}
public table(margin: number = 0, cellSize?: number): HTMLTableElement {
cellSize = this.getCellSize(cellSize);
const el = document.createElement('div');
el.innerHTML = this.qr.createTableTag(cellSize, margin);
return el.querySelector('table')!;
}
public img(margin: number = 0, cellSize?: number): HTMLImageElement {
cellSize = this.getCellSize(cellSize);
const el = document.createElement('div');
el.innerHTML = this.qr.createImgTag(cellSize, margin);
return el.querySelector('img')!;
}
public base64(margin: number = 0, cellSize?: number): string {
cellSize = this.getCellSize(cellSize);
return this.qr.createDataURL(cellSize, margin);
}
public ascii(margin: number = 0, cellSize?: number): string {
cellSize = this.getCellSize(cellSize);
return this.qr.createASCII(cellSize, margin);
}
}
export default QRSupporter;
|
//
// AbstractRegionMap.h
//
// Created by <NAME> on 1/22/13.
//
#import <Foundation/Foundation.h>
#import <CoreFoundation/CoreFoundation.h>
#define DEFAULT_CACHE_SIZE 1000
@class DSValue;
@interface AbstractDataStoreMap : NSObject
{
CFMutableDictionaryRef map;
DSValue *first;
DSValue *last;
int32_t size;
}
@property (atomic, readonly) CFMutableDictionaryRef map;
- (id) initWithSize:(int)maxSize;
- (id) initWithKeyCB:(CFDictionaryKeyCallBacks)kcb andWithValueCB:(CFDictionaryValueCallBacks)vcb;
- (BOOL) addValue:(id)value toKey:(id)key;
- (id) getValueForKey:(id)key;
- (id) removeValueForKey:(id)key;
- (NSInteger) getSize;
- (void) clear;
@end
@interface DSValue : NSObject <NSCoding>
@property (retain, nonatomic) NSString *lastAccessTime;
@property (retain, nonatomic) NSObject *value;
@property (retain, atomic) NSObject *key;
@property (retain, nonatomic) DSValue *prev;
@property (retain, nonatomic) DSValue *next;
-(id) initWithValue:(NSObject *)val andKey:(NSObject *)k;
@end
NSString *GFGenerateSystemTimeStampAsString(void); |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_DEVICE_INFO_H_
#define PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_DEVICE_INFO_H_
#include <string>
namespace camera_windows {
// Name and device ID information for a capture device.
class CaptureDeviceInfo {
public:
CaptureDeviceInfo() {}
virtual ~CaptureDeviceInfo() = default;
// Disallow copy and move.
CaptureDeviceInfo(const CaptureDeviceInfo&) = delete;
CaptureDeviceInfo& operator=(const CaptureDeviceInfo&) = delete;
// Build unique device name from display name and device id.
// Format: "display_name <device_id>".
std::string GetUniqueDeviceName() const;
// Parses display name and device id from unique device name format.
// Format: "display_name <device_id>".
bool CaptureDeviceInfo::ParseDeviceInfoFromCameraName(
const std::string& camera_name);
// Updates display name.
void SetDisplayName(const std::string& display_name) {
display_name_ = display_name;
}
// Updates device id.
void SetDeviceID(const std::string& device_id) { device_id_ = device_id; }
// Returns device id.
std::string GetDeviceId() const { return device_id_; }
private:
std::string display_name_;
std::string device_id_;
};
} // namespace camera_windows
#endif // PACKAGES_CAMERA_CAMERA_WINDOWS_WINDOWS_CAPTURE_DEVICE_INFO_H_
|
def max_value_index(arr)
max_value = arr.max
max_index = arr.index(max_value)
return max_value, max_index
end |
<filename>src/astra_core/astra_stream_reader.cpp
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Or<NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#include "astra_stream_reader.hpp"
#include <algorithm>
#include <cassert>
#include <chrono>
#include <astra_core/capi/astra_core.h>
#include "astra_streamset_connection.hpp"
#include "astra_streamset.hpp"
#include "astra_logger.hpp"
#include "astra_cxx_compatibility.hpp"
namespace astra {
using namespace std::placeholders;
stream_reader::stream_reader(streamset_connection& connection)
: connection_(connection),
scFrameReadyCallback_(nullptr)
{
}
stream_reader::~stream_reader()
{
LOG_TRACE("astra.stream_reader", "destroying reader: %p", this);
for (auto& pair : streamMap_)
{
reader_connection_data* data = pair.second;
data->connection->unregister_frame_ready_callback(data->scFrameReadyCallbackId);
connection_.get_streamSet()->destroy_stream_connection(data->connection);
delete data;
}
streamMap_.clear();
}
stream_connection* stream_reader::find_stream_of_type(astra_stream_desc_t& desc)
{
auto it = streamMap_.find(desc);
if (it != streamMap_.end())
{
return it->second->connection;
}
return nullptr;
}
stream_connection::FrameReadyCallback stream_reader::get_sc_frame_ready_callback()
{
if (scFrameReadyCallback_ == nullptr)
{
scFrameReadyCallback_ = [this](stream_connection* sc, astra_frame_index_t frameIndex)
{ this->on_connection_frame_ready(sc, frameIndex); };
}
return scFrameReadyCallback_;
}
stream_connection* stream_reader::get_stream(astra_stream_desc_t& desc)
{
stream_connection* connection = find_stream_of_type(desc);
if (connection)
{
return connection;
}
connection = connection_.get_streamSet()->create_stream_connection(desc);
assert(connection != nullptr);
astra_callback_id_t cbId = connection->register_frame_ready_callback(get_sc_frame_ready_callback());
reader_connection_data* data = new reader_connection_data;
data->connection = connection;
data->scFrameReadyCallbackId = cbId;
data->isNewFrameReady = false;
data->currentFrameIndex = -1;
streamMap_.insert(std::make_pair(desc, data));
return connection;
}
astra_frame_t* stream_reader::get_subframe(astra_stream_desc_t& desc)
{
if (!locked_)
return nullptr;
stream_connection* connection = find_stream_of_type(desc);
if (connection == nullptr)
{
return nullptr;
}
return connection->lock();
}
astra_callback_id_t stream_reader::register_frame_ready_callback(astra_frame_ready_callback_t callback,
void* clientTag)
{
auto thunk = [clientTag, callback](astra_reader_t reader, astra_reader_frame_t frame)
{ callback(clientTag, reader, frame); };
return frameReadySignal_ += thunk;
}
void stream_reader::unregister_frame_ready_callback(astra_callback_id_t& callbackId)
{
frameReadySignal_ -= callbackId;
callbackId = 0;
}
stream_reader::block_result stream_reader::block_until_frame_ready_or_timeout(int timeoutMillis)
{
LOG_TRACE("astra.stream_reader", "%p block_until_frame_ready_or_timeout", this);
if (isFrameReadyForLock_)
{
return block_result::FRAMEREADY;
}
if (timeoutMillis != ASTRA_TIMEOUT_RETURN_IMMEDIATELY)
{
long long milliseconds = 0;
std::chrono::steady_clock::time_point start, end;
start = std::chrono::steady_clock::now();
bool forever = timeoutMillis == ASTRA_TIMEOUT_FOREVER;
do
{
astra_temp_update();
if (isFrameReadyForLock_)
{
return block_result::FRAMEREADY;
}
end = std::chrono::steady_clock::now();
std::chrono::duration<double> elapsed_seconds = end - start;
milliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(elapsed_seconds).count();
} while (forever || milliseconds < timeoutMillis);
}
return isFrameReadyForLock_ ? block_result::FRAMEREADY : block_result::TIMEOUT;
}
astra_status_t stream_reader::lock(int timeoutMillis, astra_reader_frame_t& readerFrame)
{
LOG_TRACE("astra.stream_reader", "%p lock", this);
if (!locked_)
{
stream_reader::block_result result = block_until_frame_ready_or_timeout(timeoutMillis);
isFrameReadyForLock_ = false;
if (result == block_result::TIMEOUT)
{
readerFrame = nullptr;
return ASTRA_STATUS_TIMEOUT;
}
}
readerFrame = lock_frame_for_poll();
return ASTRA_STATUS_SUCCESS;
}
astra_status_t stream_reader::unlock(astra_reader_frame_t& readerFrame)
{
LOG_TRACE("astra.stream_reader", "%p unlock", this);
if (readerFrame == nullptr)
{
LOG_WARN("astra.stream_reader", "%p unlock with null frame parameter", this);
assert(readerFrame != nullptr);
return ASTRA_STATUS_INVALID_PARAMETER;
}
if (readerFrame->status == ASTRA_FRAME_STATUS_AVAILABLE)
{
LOG_WARN("astra.stream_reader", "%p readerFrame was closed more than once", this);
assert(readerFrame->status != ASTRA_FRAME_STATUS_AVAILABLE);
return ASTRA_STATUS_INVALID_OPERATION;
}
if (readerFrame->status == ASTRA_FRAME_STATUS_LOCKED_EVENT)
{
LOG_WARN("astra.stream_reader", "%p readerFrame from FrameReady event was closed manually", this);
assert(readerFrame->status != ASTRA_FRAME_STATUS_LOCKED_EVENT);
return ASTRA_STATUS_INVALID_OPERATION;
}
return unlock_frame_and_check_connections(readerFrame);
}
astra_status_t stream_reader::unlock_frame_and_check_connections(astra_reader_frame_t& readerFrame)
{
LOG_TRACE("astra.stream_reader", "%p unlock_frame_and_check_connections", this);
astra_status_t rc = return_locked_frame(readerFrame);
if (rc != ASTRA_STATUS_SUCCESS)
{
return rc;
}
return unlock_connections_if_able();
}
astra_reader_frame_t stream_reader::lock_frame_for_event_callback()
{
LOG_TRACE("astra.stream_reader", "%p lock_frame_for_event_callback", this);
ensure_connections_locked();
astra_reader_frame_t frame = acquire_available_reader_frame();
frame->status = ASTRA_FRAME_STATUS_LOCKED_EVENT;
++lockedFrameCount_;
return frame;
}
astra_reader_frame_t stream_reader::lock_frame_for_poll()
{
LOG_TRACE("astra.stream_reader", "%p lock_frame_for_poll", this);
ensure_connections_locked();
astra_reader_frame_t frame = acquire_available_reader_frame();
frame->status = ASTRA_FRAME_STATUS_LOCKED_POLL;
++lockedFrameCount_;
return frame;
}
astra_reader_frame_t stream_reader::acquire_available_reader_frame()
{
LOG_TRACE("astra.stream_reader", "%p acquire_reader_frame", this);
for (auto& frame : frameList_)
{
if (frame->status == ASTRA_FRAME_STATUS_AVAILABLE)
{
return frame.get();
}
}
//frameList_ empty or all frames locked already
FramePtr newFrame = astra::make_unique<_astra_reader_frame>();
newFrame->id = frameList_.size();
newFrame->status = ASTRA_FRAME_STATUS_AVAILABLE;
newFrame->reader = get_handle();
astra_reader_frame_t framePtr = newFrame.get();
frameList_.push_back(std::move(newFrame));
return framePtr;
}
astra_status_t stream_reader::return_locked_frame(astra_reader_frame_t& readerFrame)
{
LOG_TRACE("astra.stream_reader", "%p return_locked_frame", this);
if (lockedFrameCount_ == 0)
{
LOG_WARN("astra.stream_reader", "%p return_locked_frame too many times (lockedFrameCount == 0)", this);
assert(lockedFrameCount_ != 0);
return ASTRA_STATUS_INVALID_OPERATION;
}
if (readerFrame == nullptr)
{
LOG_WARN("astra.stream_reader", "%p return_locked_frame with null readerFrame parameter", this);
assert(readerFrame != nullptr);
return ASTRA_STATUS_INVALID_PARAMETER;
}
if (readerFrame->reader != get_handle())
{
LOG_WARN("astra.stream_reader", "%p return_locked_frame readerFrame closed on wrong stream_reader", this);
assert(readerFrame->reader == get_handle());
return ASTRA_STATUS_INVALID_OPERATION;
}
if (readerFrame->id >= static_cast<int>(frameList_.size()))
{
LOG_WARN("astra.stream_reader", "%p return_locked_frame readerFrame parameter with id greater than frameList size", this);
assert(readerFrame->id < static_cast<int>(frameList_.size()));
return ASTRA_STATUS_INVALID_PARAMETER;
}
if (readerFrame->status == ASTRA_FRAME_STATUS_AVAILABLE)
{
LOG_WARN("astra.stream_reader", "%p return_locked_frame frame status is already available", this);
assert(readerFrame->status != ASTRA_FRAME_STATUS_AVAILABLE);
return ASTRA_STATUS_INVALID_PARAMETER;
}
astra_reader_frame_t checkFrame = frameList_[readerFrame->id].get();
if (readerFrame != checkFrame)
{
LOG_WARN("astra.stream_reader", "%p return_locked_frame readerFrame parameter does not match pointer in frameList", this);
assert(readerFrame == checkFrame);
return ASTRA_STATUS_INVALID_PARAMETER;
}
checkFrame->status = ASTRA_FRAME_STATUS_AVAILABLE;
--lockedFrameCount_;
readerFrame = nullptr;
return ASTRA_STATUS_SUCCESS;
}
void stream_reader::ensure_connections_locked()
{
LOG_TRACE("astra.stream_reader", "%p ensure_connections_locked locked_: %d", this, locked_);
if (!locked_)
{
//LOG_INFO("astra.stream_reader", "locked run start");
for (auto pair : streamMap_)
{
reader_connection_data* data = pair.second;
//LOG_INFO("astra.stream_reader", "locking: %u", data->connection->get_stream()->get_description().type);
if (data->connection->is_started())
{
//LOG_INFO("astra.stream_reader", "locked: %u", data->connection->get_stream()->get_description().type);
data->connection->lock();
}
}
//LOG_INFO("astra.stream_reader", "locked run end");
locked_ = true;
}
}
astra_status_t stream_reader::unlock_connections_if_able()
{
LOG_TRACE("astra.stream_reader", "%p unlock_connections_if_able lockedFrameCount_: %d locked_: %d",
this, lockedFrameCount_, locked_);
if (!locked_)
{
LOG_WARN("astra.stream_reader", "%p unlock_connections_if_able called too many times (locked_ == false)", this);
assert(locked_);
return ASTRA_STATUS_INVALID_OPERATION;
}
if (lockedFrameCount_ > 0)
{
//don't unlock connections when there are outstanding frames
return ASTRA_STATUS_SUCCESS;
}
//all frames should be available at this point
for (auto& frame : frameList_)
{
if (frame->status != ASTRA_FRAME_STATUS_AVAILABLE)
{
LOG_WARN("astra.stream_reader", "%p unlock_connections_if_able called but not all frames have been returned", this);
assert(frame->status == ASTRA_FRAME_STATUS_AVAILABLE);
}
}
for(auto& pair : streamMap_)
{
reader_connection_data* data = pair.second;
data->isNewFrameReady = false;
if (data->currentFrameIndex > lastFrameIndex_)
{
lastFrameIndex_ = data->currentFrameIndex;
}
}
locked_ = false;
//Do the connection unlock separately because unlock()
//could call connection_frame_ready(...) again and we want to be ready
for(auto& pair : streamMap_)
{
reader_connection_data* data = pair.second;
if (data->connection->is_started())
{
data->connection->unlock();
}
}
return ASTRA_STATUS_SUCCESS;
}
void stream_reader::on_connection_frame_ready(stream_connection* connection, astra_frame_index_t frameIndex)
{
LOG_TRACE("astra.stream_reader", "%p connection_frame_ready", this, streamMap_.size(), connection->get_description().type);
if (frameIndex > lastFrameIndex_)
{
auto& desc = connection->get_description();
auto pair = streamMap_.find(desc);
if (pair != streamMap_.end())
{
//TODO optimization/special case -- if streamMap_.size() == 1, call raise_frame_ready() directly
reader_connection_data* data = pair->second;
data->isNewFrameReady = true;
data->currentFrameIndex = frameIndex;
}
else
{
LOG_WARN("astra.stream_reader", "Unknown frame readied!");
}
check_for_all_frames_ready();
}
}
void stream_reader::check_for_all_frames_ready()
{
LOG_TRACE("astra.stream_reader", "%p check_for_all_frames_ready", this);
bool allReady = true;
for (auto& pair : streamMap_)
{
reader_connection_data* data = pair.second;
if (!data->isNewFrameReady && data->connection->is_started())
{
//TODO the new frames may not be synced.
//We need matching frame indices in the future
allReady = false;
break;
}
}
if (allReady)
{
isFrameReadyForLock_ = true;
raise_frame_ready();
}
}
void stream_reader::raise_frame_ready()
{
LOG_TRACE("astra.stream_reader", "%p raise_frame_ready", this);
if (frameReadySignal_.slot_count() == 0)
{
//no clients to serve, don't bother locking and unlocking
return;
}
astra_reader_t reader = get_handle();
astra_reader_frame_t frame = lock_frame_for_event_callback();
LOG_TRACE("astra.stream_reader", "%p raise_frame_ready raising frameReady signal", this);
frameReadySignal_.raise(reader, frame);
if (frame->status == ASTRA_FRAME_STATUS_AVAILABLE)
{
LOG_WARN("astra.stream_reader", "%p Frame was closed manually during stream_reader FrameReady callback", this);
}
else
{
LOG_TRACE("astra.stream_reader", "%p raise_frame_ready unlocking frame");
unlock_frame_and_check_connections(frame);
}
}
}
|
<filename>lombok-plugin/src/main/java/de/plushnikov/intellij/plugin/processor/method/MethodProcessor.java
package de.plushnikov.intellij.plugin.processor.method;
import com.intellij.psi.PsiAnnotation;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiMethod;
import de.plushnikov.intellij.plugin.processor.Processor;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public interface MethodProcessor extends Processor {
void process(@NotNull PsiMethod psiMethod, @NotNull PsiAnnotation psiAnnotation, @NotNull List<? super PsiElement> target);
} |
#!/bin/bash
python3 generative.py $1 $2 $3 $4 $5 $6 |
pragma solidity ^0.8.0;
contract ApprovalContract {
function verifyApprovalSignature(bytes32 transactionHash, address approverAddress, bytes memory signature) public pure {
// Implementing signature verification logic
// Assume the existence of a library or function to verify the signature
bool isValidSignature = verifySignature(transactionHash, approverAddress, signature);
if (!isValidSignature) {
revert("InvalidApprovalSignatureError");
}
}
// Assume the existence of a function to verify the signature
function verifySignature(bytes32 transactionHash, address approverAddress, bytes memory signature) internal pure returns (bool) {
// Implement the logic to verify the signature using ECDSA or other cryptographic methods
// Return true if the signature is valid, false otherwise
// Example:
// return ecrecover(transactionHash, v, r, s) == approverAddress;
}
} |
#!/bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
## Get the directory of the build script
scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}"))
## cd to the parent directory, i.e. the root of the git repo
cd ${scriptDir}/..
# include common functions
source ${scriptDir}/common.sh
# Print out Java version
java -version
echo ${JOB_TYPE}
# attempt to install 3 times with exponential backoff (starting with 10 seconds)
retry_with_backoff 3 10 \
mvn install -B -V \
-DskipTests=true \
-Dclirr.skip=true \
-Denforcer.skip=true \
-Dmaven.javadoc.skip=true \
-Dgcloud.download.skip=true \
-T 1C
# if GOOGLE_APPLICATION_CREDIENTIALS is specified as a relative path prepend Kokoro root directory onto it
if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" && "${GOOGLE_APPLICATION_CREDENTIALS}" != /* ]]; then
export GOOGLE_APPLICATION_CREDENTIALS=$(realpath ${KOKORO_GFILE_DIR}/${GOOGLE_APPLICATION_CREDENTIALS})
fi
RETURN_CODE=0
set +e
case ${JOB_TYPE} in
test)
mvn test -B -Dclirr.skip=true -Denforcer.skip=true
RETURN_CODE=$?
;;
lint)
mvn com.coveo:fmt-maven-plugin:check
RETURN_CODE=$?
;;
javadoc)
mvn javadoc:javadoc javadoc:test-javadoc
RETURN_CODE=$?
;;
integration)
mvn -B ${INTEGRATION_TEST_ARGS} \
-Penable-integration-tests \
-DtrimStackTrace=false \
-Dclirr.skip=true \
-Denforcer.skip=true \
-fae \
verify
RETURN_CODE=$?
;;
samples)
SAMPLES_DIR=samples
# only run ITs in snapshot/ on presubmit PRs. run ITs in all 3 samples/ subdirectories otherwise.
if [[ ! -z ${KOKORO_GITHUB_PULL_REQUEST_NUMBER} ]]
then
SAMPLES_DIR=samples/snapshot
fi
if [[ -f ${SAMPLES_DIR}/pom.xml ]]
then
pushd ${SAMPLES_DIR}
mvn -B \
-Penable-samples \
-DtrimStackTrace=false \
-Dclirr.skip=true \
-Denforcer.skip=true \
-fae \
verify
RETURN_CODE=$?
popd
else
echo "no sample pom.xml found - skipping sample tests"
fi
;;
clirr)
mvn -B -Denforcer.skip=true clirr:check
RETURN_CODE=$?
;;
*)
;;
esac
if [ "${REPORT_COVERAGE}" == "true" ]
then
bash ${KOKORO_GFILE_DIR}/codecov.sh
fi
# fix output location of logs
bash .kokoro/coerce_logs.sh
if [[ "${ENABLE_BUILD_COP}" == "true" ]]
then
chmod +x ${KOKORO_GFILE_DIR}/linux_amd64/buildcop
${KOKORO_GFILE_DIR}/linux_amd64/buildcop -repo=googleapis/java-bigtable
fi
echo "exiting with ${RETURN_CODE}"
exit ${RETURN_CODE}
|
<filename>src/test/org/puremvc/multicore/ExampleSpec.js
import chai from "chai"
describe("ExampleSpec", () => {
it("works", ()=> {
chai.assert.isTrue(true)
})
})
|
package cyclops.function.enhanced;
import java.util.function.BinaryOperator;
public interface BinaryFunction<T> extends BinaryOperator<T>, Function2<T, T, T> {
}
|
/*
* Copyright 2016 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.difference.historybook.index.lucene;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import com.difference.historybook.index.Index;
import com.difference.historybook.index.IndexException;
import com.difference.historybook.index.IndexTest;
import com.difference.historybook.index.lucene.LuceneIndex;
public class LuceneIndexTest extends IndexTest {
@Override
public Index getIndex() throws IndexException {
try {
Path tempIndex = Files.createTempDirectory("historybook-test-");
return new LuceneIndex(tempIndex);
} catch (IOException e) {
throw new IndexException(e);
}
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
public class SalesReason
{
[DataType(DataType.DateTime)]
[Display(Name = "Modified date")]
public DateTime ModifiedDate { get; set; }
public virtual ICollection<SalesOrderHeaderSalesReason> Sales_SalesOrderHeaderSalesReasons { get; set; }
public decimal CalculateTotalSalesAmount(List<SalesOrderHeader> salesOrders)
{
// Assuming SalesOrderHeader has a property named SalesAmount
decimal totalSalesAmount = salesOrders
.Where(so => so.Sales_SalesOrderHeaderSalesReasons.Any(sr => sr.SalesReasonId == this.Id))
.Sum(so => so.SalesAmount);
return totalSalesAmount;
}
}
public class SalesOrderHeader
{
public decimal SalesAmount { get; set; }
}
public class SalesOrderHeaderSalesReason
{
public int SalesReasonId { get; set; }
} |
import React, { Component } from 'react';
import Card from './Card';
import { CARD_TO_NUMBER, CLAIMED, SETS } from './Constants';
export default class CardGroup extends Component {
filterCards(cards, claims) {
// Keep the cards that do not fall under claimed half suits.
return (cards || []).filter((c) => {
return Object.keys(claims || {}).filter((h) => {
if (claims[h] === CLAIMED) {
if (c[1] === h[1] && SETS[h[0]].includes(c[0])) {
return true;
}
}
return false;
}).length === 0;
})
}
sortCard(a, b) {
if (CARD_TO_NUMBER[a] > CARD_TO_NUMBER[b]) return 1;
else if (CARD_TO_NUMBER[a] === CARD_TO_NUMBER[b]) return 0;
return -1;
}
render() {
const suited = {
'C': [],
'D': [],
'H': [],
'S': []
};
this.filterCards(this.props.cards, this.props.claims)
.forEach((c) => {
const suit = c[c.length - 1];
suited[suit].push(c);
});
for (let s in suited) {
suited[s].sort(this.sortCard);
}
let suitClass = "hand"
if (this.props.suitClass) {
suitClass += ' ' + this.props.suitClass;
}
return (
<div className={this.props.handClass}>
<div className={suitClass}>
{suited['C'].map((c) => <Card
key={'card-' + c}
clickCard={this.props.clickCard}
card={c} />)}
</div>
<div className={suitClass}>
{suited['D'].map((c) => <Card
key={'card-' + c}
clickCard={this.props.clickCard}
card={c} />)}
</div>
<div className={suitClass}>
{suited['H'].map((c) => <Card
key={'card-' + c}
clickCard={this.props.clickCard}
card={c} />)}
</div>
<div className={suitClass}>
{suited['S'].map((c) => <Card
key={'card-' + c}
clickCard={this.props.clickCard}
card={c} />)}
</div>
</div>
)
}
}
|
#!/usr/bin/env bash
set -e
set -o pipefail
info() {
# Since this script is run in the background from the Makefile,
# this apparently messes up the shell's ability to move the cursor
# back to the leftmost column when a newline is printed. The
# problem is fixed if we manually print a carriage return after
# the message.
echo "$@"$'\r' >&2
}
# Each loop iteration sleeps for 100ms, so this means an overall
# timeout of 5s.
for i in $(seq 50); do
if curl -s localhost:6060 >/dev/null; then
# Give some extra time for godoc to complete the initial scan.
sleep 0.2
url="http://localhost:6060/pkg/github.com/replit/upm/?m=all"
if command -v xdg-open &>/dev/null; then
info "godoc started; opening $url with xdg-open(1)"
xdg-open "$url"
elif command -v open &>/dev/null; then
info "godoc started; opening $url with open(1)"
open "$url"
else
info "please install either open(1) or xdg-open(1)"
exit 1
fi
exit 1
fi
sleep 0.1
done
info "godoc failed to start listening on port 6060"
exit 1
|
<reponame>messari/chart-stacked-histogram
export { default as ChartToggles } from './ChartToggles';
export { default as ToggleCrosshair } from './ToggleCrosshair';
export { default as ToggleDrawing } from './ToggleDrawing';
export { default as ToggleHUD } from './ToggleHUD';
export { default as TradeToggles } from './TradeToggles';
|
package features;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.noear.solon.test.SolonJUnit4ClassRunner;
import org.noear.solon.test.SolonTest;
import org.noear.weed.annotation.Db;
import webapp.DemoApp;
import webapp.dso.mapper.SqlMapper;
import webapp.model.AppxModel;
import java.util.Set;
/**
* @author noear 2021/2/3 created
*/
@RunWith(SolonJUnit4ClassRunner.class)
@SolonTest(DemoApp.class)
public class Test0 {
@Db
SqlMapper sqlMapper;
@Test
public void test0() throws Exception{
Integer tmp = sqlMapper.appx_get0();
assert tmp == null;
assert sqlMapper.appx_get() > 0;
}
@Test
public void test1() throws Exception{
Set<Integer> sets = sqlMapper.appx_getids2();
System.out.println(sets);
assert sets.size() == 4;
}
}
|
<reponame>daniel-baf/magazine-app
package BackendUtilities;
import com.google.gson.Gson;
import java.io.BufferedReader;
import java.io.IOException;
import java.lang.reflect.Type;
import java.sql.Date;
import java.time.LocalDate;
/**
* This class convert a JSON to object who extends from User this such as Admin,
* Reader or Editor
*
* @author jefemayoneso
*/
public class Parser {
// variables
private Gson gson;
/**
* constructor
*/
public Parser() {
gson = new Gson();
}
/**
* THis method transform any JSON string to OBJECT
*
* @param jsonString
* @param type
* @return
*/
public Object toObject(String jsonString, Type type) {
return this.gson.fromJson(jsonString, type);
}
/**
* This method transform any OBJECT to JSON string
*
* @param src
* @param typeOfSrc
* @return
*/
public String toJSON(Object src, Type typeOfSrc) {
return this.gson.toJson(src, typeOfSrc);
}
public String toJSON(String stg) {
return this.gson.toJson(stg);
}
/**
* this method reads the BUFFERED READER from the FRONTEND and returns the
* string read
*
* @param br
* @return
*/
public String getBody(BufferedReader br) {
try {
String body = "";
String line = br.readLine();
while (line != null) {
body += line;
line = br.readLine();
}
return body;
} catch (IOException e) {
System.out.println("Error parsing request string from buffered reader at [Parsers].[ReaderBR]\n" + e.getMessage());
return null;
}
}
/**
* This method transform any lOCALDATE to SQL DATE
*
* @param date
* @return
*/
public Date toDate(LocalDate date) {
try {
return Date.valueOf(date);
} catch (Exception e) {
return null;
}
}
public Date toDate(String date) {
try {
return Date.valueOf(date);
} catch (Exception e) {
return null;
}
}
/**
* This method transform any String to LocalDate
*
* @param dateString
* @return
*/
public LocalDate toLocalDate(String dateString) {
return LocalDate.parse(dateString);
}
/**
* Parse a SQL date to LocalDate
*
* @param date
* @return
*/
public LocalDate toLocalDate(Date date) {
return date.toLocalDate();
}
/**
* Cast a string to Integer
*
* @param parameter
* @return
*/
public Integer toInteger(String parameter) {
return Integer.valueOf(parameter);
}
}
|
from django.contrib.auth import authenticate, logout, login
from django.contrib.auth import get_user_model
from django.http import HttpResponse
from app.models import Label, Place, Guide, Question, Answer
from hashlib import md5
import json
import re
LOGIN_OK_CODE = 200
LOGIN_OK = 'Login success'
LOGOUT_OK_CODE = 201
LOGOUT_OK = 'Logout success'
REG_OK_CODE = 202
REG_OK = 'Regist success'
QUERY_OK_CODE = 203
QUERY_OK = ''
ADD_OK_CODE = 203
ADD_OK = 'Add success'
GET_OK_CODE = 204
GET_OK = ''
HAD_LOGIN_CODE = 301
HAD_LOGIN = 'Had logined'
NOT_LOGIN_CODE = 301
NOT_LOGIN = 'Not login'
NOT_ACTIVE_CODE = 401
NOT_ACTIVE = 'User Not Active'
NOT_MATCH_CODE = 402
NOT_MATCH = 'Username and Password not match'
DATE_ERR_CODE = 411
DATE_ERR = 'Datetime is not allow'
GENDER_ERR_CODE = 412
GENDER_ERR = 'Gender is not allow'
PHONE_ERR_CODE = 413
PHONE_ERR = 'Phone num is not allow'
EMAIL_ERR_CODE = 414
EMAIL_ERR = 'Email is not allow'
PHONE_EX_CODE = 421
PHONE_EX = 'Phone has already regist'
EMAIL_EX_CODE = 422
EMAIL_EX = 'Email has already regist'
UNAME_EX_CODE = 423
UNAME_EX = 'Username has already regist'
NAME_EX_CODE = 424
NAME_EX = 'This name is already exists'
KEY_ERR_CODE = 425
KEY_ERR = 'The Key Error'
ID_ERR_CODE = 426
ID_ERR = 'The ID Error'
TITLE_ERR_CODE = 427
TITLE_ERR = 'The Title Error'
PLACE_ERR_CODE = 428
PLACE_ERR = 'The Place Error'
LABEL_ERR_CODE = 429
LABEL_ERR = 'The Label Error'
NAME_ERR_CODE = 430
NAME_ERR = 'Name Error'
NAME_NEX_CODE = 431
NAME_NEX = 'Name Not exists'
INVALIED_CODE = 501
INVALIED = 'Not support this method'
UN_ERROR_CODE = 502
UN_ERROR = 'Something error'
def index(request):
return HttpResponse("hello.")
def test(request):
return HttpResponse("test ok")
def JSON(**kwargs):
return json.dumps(kwargs)
def user_logout(request):
if request.user.is_authenticated():
logout(request)
data = JSON(code=LOGOUT_OK_CODE, status=True, message=LOGOUT_OK)
else:
data = JSON(code=NOT_LOGIN_CODE, status=True, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def user_login(request):
if request.user.is_authenticated():
data = JSON(code=HAD_LOGIN_CODE, status=True, message=HAD_LOGIN)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
message = JSON(user_id=user.id, username=user.username)
data = JSON(code=LOGIN_OK_CODE, status=True, message=message)
login(request, user)
else:
data = JSON(code=NOT_ACTIVE_CODE, status=False,
message=NOT_ACTIVE)
else:
data = JSON(code=NOT_MATCH_CODE, status=False, message=NOT_MATCH)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
return HttpResponse(data, content_type="application/json")
def user_register(request):
if request.user.is_authenticated():
data = JSON(code=HAD_LOGIN_CODE, status=False, message=HAD_LOGIN)
elif not request.method == 'POST':
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
username = request.POST.get('username')
password = request.POST.get('password')
email = request.POST.get('email')
phone = request.POST.get('phone')
gender = request.POST.get('gender')
birthday = request.POST.get('birthday')
# check format
if re.match(r'(\d{4}([-/\.])\d{2}\2\d{2})', birthday) is None:
data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
elif gender not in {'1', '0'}:
data = JSON(code=GENDER_ERR_CODE, status=False, message=GENDER_ERR)
elif re.match(r'(\+\d{1,3})?1\d{10}', phone) is None:
data = JSON(code=PHONE_ERR_CODE, status=False, message=PHONE_ERR)
elif re.match(r'[^@\s]+@([^@\s]+\.)+[^@\s]+', email) is None:
data = JSON(code=EMAIL_ERR_CODE, status=False, message=EMAIL_ERR)
# database search
else:
all_user = get_user_model().objects
if all_user.filter(phone=phone).count() != 0:
data = JSON(CODE=PHONE_EX_CODE, status=False, message=PHONE_EX)
elif all_user.filter(email=email).count() != 0:
data = JSON(CODE=EMAIL_EX_CODE, status=False, message=EMAIL_EX)
elif all_user.filter(username=username).count() != 0:
data = JSON(CODE=UNAME_EX_CODE, status=False, message=UNAME_EX)
else:
app_user = get_user_model()
try:
birthday = birthday.replace('.', '-').replace('/', '-')
user = app_user.objects.create_user(username=username,
password=password,
email=email,
phone=phone,
gender=gender,
birthday=birthday)
message = JSON(user_id=user.id, username=user.username)
data = JSON(code=REG_OK_CODE, status=True, message=message)
except Exception as e:
print(e)
data = JSON(code=UN_ERROR_CODE, status=False,
message=UN_ERROR)
return HttpResponse(data, content_type="application/json")
def guide_add(request):
if request.user.is_authenticated():
if request.method == 'POST':
title = request.POST.get('title')
content = request.POST.get('content')
place = request.POST.get('place')
label = request.POST.getlist('label[]')
start_time = request.POST.get('start_time')
end_time = request.POST.get('end_time')
if len(title) == 0:
data = JSON(code=TITLE_ERR_CODE, status=False,
message=TITLE_ERR)
elif len(place) == 0:
data = JSON(code=PLACE_ERR_CODE, status=False,
message=PLACE_ERR)
elif re.match(r'(\d{4}([-/\.])\d{2}\2\d{2})', start_time) is None:
data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
elif re.match(r'(\d{4}([-/\.])\d{2}\2\d{2})', end_time) is None:
data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
elif start_time > end_time:
data = JSON(code=DATE_ERR_CODE, status=False, message=DATE_ERR)
elif not Place.objects.filter(id=place):
data = JSON(code=PLACE_ERR_CODE, status=False,
message=PLACE_ERR)
else:
label = Label.objects.filter(id__in=label)
a = Guide(name=title, user=request.user,
place=Place.objects.get(id=place), content=content,
start_time=start_time, end_time=end_time)
a.save()
a.label.add(*label)
data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def guide_id(request, _id):
if request.user.is_authenticated():
try:
guide = Guide.objects.filter(id=_id)[0]
labels = []
for l in guide.label.all():
labels.append(l.name)
submit = str(guide.submit.strftime('%Y-%m-%d %H:%M:%S'))
result = {'title': guide.name, 'username': guide.user.username,
'place': guide.place.name, 'labels': labels,
'start_time': str(guide.start_time),
'end_time': str(guide.end_time),
'content': guide.content, 'submit': submit,
'pageview': guide.pageview}
guide.pageview += 1
guide.save()
data = JSON(code=GET_OK_CODE, status=True, message=result)
except IndexError:
data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def guide_list(request):
if request.user.is_authenticated():
if request.method == 'POST':
start = int(request.POST.get('start'))
offset = int(request.POST.get('offset'))
try:
ans = Guide.objects.order_by('-id')[start:start + offset]
except IndexError:
ans = []
result = []
for i in ans:
labels = []
for l in i.label.all():
labels.append(l.name)
m = md5()
m.update(i.user.email.encode())
img = 'http://gravatar.eqoe.cn/avatar/%s?size=48&default=identicon&rating=pg' % (m.hexdigest())
_ = {'id': i.id, 'username': i.user.username, 'title': i.name,
'place': i.place.name, 'pageview': i.pageview,
'labels': labels, 'img': img}
result.append(_)
data = JSON(code=QUERY_OK_CODE, status=True, message=result)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def question_add(request):
if request.user.is_authenticated():
if request.method == 'POST':
title = request.POST.get('title')
content = request.POST.get('content')
place = request.POST.get('place')
label = request.POST.getlist('label[]')
if len(title) == 0:
data = JSON(code=TITLE_ERR_CODE, status=False,
message=TITLE_ERR)
elif len(place) == 0:
data = JSON(code=PLACE_ERR_CODE, status=False,
message=PLACE_ERR)
elif not Place.objects.filter(id=place):
data = JSON(code=PLACE_ERR_CODE, status=False,
message=PLACE_ERR)
else:
label = Label.objects.filter(id__in=label)
a = Question(title=title, user=request.user,
place=Place.objects.get(id=place),
content=content)
a.save()
a.label.add(*label)
data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def question_id(request, _id):
if request.user.is_authenticated():
try:
question = Question.objects.filter(id=_id)[0]
labels = []
for l in question.label.all():
labels.append(l.name)
answers = []
for i in Answer.objects.filter(question=question).order_by('-submit'):
m = md5()
m.update(i.user.email.encode())
img = 'http://gravatar.eqoe.cn/avatar/%s?size=48&default=identicon&rating=pg' % (m.hexdigest())
_submit = str(i.submit.strftime('%Y-%m-%d %H:%M:%S'))
_ = {'id': i.id, 'username': i.user.username, 'img': img,
'content': i.content, 'submit': _submit}
answers.append(_)
submit = str(question.submit.strftime('%Y-%m-%d %H:%M:%S'))
result = {'title': question.title,
'username': question.user.username,
'place': question.place.name, 'labels': labels,
'content': question.content, 'submit': submit,
'answer': answers}
data = JSON(code=GET_OK_CODE, status=True, message=result)
except IndexError:
data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def question_comment(request, _id):
if request.user.is_authenticated():
if request.method == 'POST':
content = request.POST.get('content')
try:
question = Question.objects.filter(id=_id)[0]
answer = Answer(user=request.user, question=question,
content=content)
answer.save()
data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
except IndexError:
data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def question_list(request):
if request.user.is_authenticated():
if request.method == 'POST':
start = int(request.POST.get('start'))
offset = int(request.POST.get('offset'))
try:
ans = Question.objects.order_by('-id')[start:start + offset]
except IndexError:
ans = []
result = []
for i in ans:
labels = []
for l in i.label.all():
labels.append(l.name)
m = md5()
m.update(i.user.email.encode())
ans_count = len(Answer.objects.filter(question=i))
img = 'http://gravatar.eqoe.cn/avatar/%s?size=48&default=identicon&rating=pg' % (m.hexdigest())
_ = {'id': i.id, 'username': i.user.username, 'title': i.title,
'place': i.place.name, 'answer': ans_count,
'labels': labels, 'img': img}
result.append(_)
data = JSON(code=QUERY_OK_CODE, status=True, message=result)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def __id(request, _id, model):
if request.user.is_authenticated():
try:
ans = model.objects.filter(id=_id)[0].name
data = JSON(code=QUERY_OK_CODE, status=True, message=ans)
except IndexError:
data = JSON(code=ID_ERR_CODE, status=False, message=ID_ERR)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def label_id(request, _id):
return __id(request, _id, Label)
def place_id(request, _id):
return __id(request, _id, Place)
def __list(request, model):
if request.user.is_authenticated():
ans = list(model.objects.values('id', 'name'))
data = JSON(code=QUERY_OK_CODE, status=True, message=ans)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def place_list(request):
return __list(request, Place)
def label_list(request):
return __list(request, Label)
def user_add_place(request):
if request.user.is_authenticated():
if request.method == 'POST':
if 'name' in request.POST:
name = request.POST.get('name')
if len(name) == 0:
data = data = JSON(code=NAME_ERR_CODE, status=True,
message=NAME_ERR)
elif not Place.objects.filter(name=name):
data = JSON(code=NAME_NEX_CODE, status=False,
message=NAME_NEX)
else:
request.user.place.add(Place.objects.get(name=name))
data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
else:
data = JSON(code=KEY_ERR_CODE, status=False, message=KEY_ERR)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def __add(request, model):
if request.user.is_authenticated():
if request.method == 'POST':
if 'name' in request.POST:
name = request.POST.get('name')
if len(name) == 0:
data = data = JSON(code=NAME_ERR_CODE, status=True,
message=NAME_ERR)
elif model.objects.filter(name=name):
data = JSON(code=NAME_EX_CODE, status=False,
message=NAME_EX)
else:
add = model(name=name)
add.save()
data = JSON(code=ADD_OK_CODE, status=True, message=ADD_OK)
else:
data = JSON(code=KEY_ERR_CODE, status=False, message=KEY_ERR)
else:
data = JSON(code=INVALIED_CODE, status=False, message=INVALIED)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json")
def label_add(request):
return __add(request, Label)
def place_add(request):
return __add(request, Place)
def user_info(request):
if request.user.is_authenticated():
I = request.user
places = []
for l in I.place.all():
places.append(l.name)
result = {'username': I.username, 'id': I.id,
'places': places, 'birthday': str(I.birthday),
'gender': I.gender}
data = JSON(code=GET_OK_CODE, status=True, message=result)
else:
data = JSON(code=NOT_LOGIN_CODE, status=False, message=NOT_LOGIN)
return HttpResponse(data, content_type="application/json") |
<reponame>firmanm/boxroom-engine
module Boxroom
class UserFile < ActiveRecord::Base
has_attached_file :attachment, path: ":rails_root/#{Boxroom.configuration.uploads_path}/:rails_env/:id/:style/:id", restricted_characters: Boxroom::RESTRICTED_CHARACTERS
do_not_validate_attachment_file_type :attachment
belongs_to :folder
has_many :share_links, dependent: :destroy
validates_attachment_presence :attachment, message: I18n.t(:blank, scope: [:activerecord, :errors, :messages])
validates_presence_of :folder_id
validates_uniqueness_of :attachment_file_name, scope: 'folder_id', message: I18n.t(:exists_already, scope: [:activerecord, :errors, :messages])
validates_format_of :attachment_file_name, with: /\A[^\/\\\?\*:|"<>]+\z/, message: I18n.t(:invalid_characters, scope: [:activerecord, :errors, :messages])
def copy(target_folder)
new_file = self.dup
new_file.folder = target_folder
new_file.save!
#TODO: refactor to support S3
path = "#{Rails.root}/#{Boxroom.configuration.uploads_path}/#{Rails.env}/#{new_file.id}/original"
FileUtils.mkdir_p path
FileUtils.cp_r self.attachment.path, "#{path}/#{new_file.id}"
new_file
end
def move(target_folder)
self.folder = target_folder
save!
end
def extension
File.extname(attachment_file_name)[1..-1]
end
end
end |
#Created by Samuele Buosi
#!/bin/sh
echo "Nome file C da creare , senza .c" ; read nomefile
nomefile=$nomefile.c
>$nomefile
chmod 711 $nomefile
echo "/* Created by Samuele Buosi */" >>$nomefile
echo "#include <stdio.h>" >>$nomefile
echo "#include <stdlib.h>" >>$nomefile
echo "#include <unistd.h>" >>$nomefile
echo "#include <sys/wait.h>" >>$nomefile
echo "#include <fcntl.h>" >>$nomefile
echo "#include <stdarg.h>" >>$nomefile
echo "#include <string.h>" >>$nomefile
echo 'typedef int pipe_t[2]; /* tipo per la pipe */' >>$nomefile
echo '/* Definisco le variabili globali */' >>$nomefile
echo 'pipe_t *pipes; /* pipes */' >>$nomefile
echo 'int pid; /* appoggio per valore pid (pid unico)*/' >>$nomefile
echo 'int status; /* appoggio per wait */' >>$nomefile
echo 'int fd; /* per la gestione dei file creati */' >>$nomefile
echo 'int nfigli,npipe; /* numero di figli e numero di pipe da creare */' >>$nomefile
echo '/* abbinamento snprintf+write */' >>$nomefile
echo 'void zprintf(int fd, const char *fmt, ...);' >>$nomefile
echo "int main(int argc, char **argv) {" >>$nomefile
echo $'\t' 'int i;'>> $nomefile
#richiesta del numero minimo di argomenti
echo "Numero minimo di argomenti, 0 per nessun controllo"; read minargs
test $minargs -eq 0 2> /dev/null
if test $? -eq 2
then
echo Valore non Valido
exit
fi
if test $minargs -gt 0
then
echo $'\t' '/* controllo numero di argomenti */'>> $nomefile
echo $'\t' "if (argc<"$minargs+1") {" >> $nomefile
echo $'\t' $'\t' 'zprintf(2,"Errore nel numero di argomenti\n");' >> $nomefile
echo $'\t' $'\t' "return(1);" >> $nomefile
echo $'\t'"}" >> $nomefile
echo $'\t''/*CONTROLLO PARAMETRI*/' >>$nomefile
fi
echo "K?, [0,1]"; read risp
if [ $risp -gt 0 ]
then
echo $'\t' 'K=atoi(argv[2]);' >> $nomefile
echo $'\t' '/* Controllo K */' >> $nomefile
echo $'\t' 'if(K <= 0)' >> $nomefile
echo $'\t' '{' >> $nomefile
echo $'\t' $'\t' 'zprintf(2,"Errore, K non strettamente positivo\n");' >> $nomefile
echo $'\t' $'\t' 'return 2;' >> $nomefile
echo $'\t' '}' >> $nomefile
fi
echo "C?, [0,1]"; read risp
if [ $risp -gt 0 ]
then
echo $'\t' '/* controllo che C sia un carattere */' >> $nomefile
echo $'\t' 'C=argv[argc-1]; /* Ultimo Argomento */' >> $nomefile
echo $'\t' 'if(strlen(C)!=1)' >> $nomefile
echo $'\t' '{' >> $nomefile
echo $'\t' $'\t' 'zprintf(2,"Errore, C non carattere singolo\n");' >> $nomefile
echo $'\t' $'\t' 'return 3;' >> $nomefile
echo $'\t' '}' >> $nomefile
fi
echo "Figli da sh?, [0,1]"; read risp
if [ $risp -gt 0 ]
then
echo $'\t' '/* Controllo su nfigli */' >> $nomefile
echo $'\t' 'nfigli=atoi(argv[argc-1]);' >> $nomefile
echo $'\t' 'if(nfigli</*Numero minimo*/)' >> $nomefile
echo $'\t' '{' >> $nomefile
echo $'\t' $'\t' 'zprintf(2,"Errore, numero nfigli non corretto\n");' >> $nomefile
echo $'\t' $'\t' 'return 4;' >> $nomefile
echo $'\t' '}' >> $nomefile
fi
echo "pipe?, [0,1]"; read risp
if [ $risp -gt 0 ]
then
echo $'\t' '/* Allocazione di un array di pipe per la comunicazione fra figli */' >> $nomefile
echo $'\t' 'npipe=nfigli-1;' >> $nomefile
echo $'\t' 'pipes= (pipe_t*)malloc(sizeof(pipe_t)*npipe);' >> $nomefile
echo $'\t' 'if(pipes==NULL)' >> $nomefile
echo $'\t' '{' >> $nomefile
echo $'\t' $'\t' 'zprintf(2,"errore nella malloc del vettore di pipe\n");' >> $nomefile
echo $'\t' $'\t' 'return(5);' >> $nomefile
echo $'\t' '}' >> $nomefile
echo $'\t' '/* creazione pipe */' >> $nomefile
echo $'\t' 'for (i=0; i<npipe; i++)' >> $nomefile
echo $'\t' '{/* creazione della pipe */' >> $nomefile
echo $'\t' $'\t' 'if (pipe(pipes[i])!=0)' >> $nomefile
echo $'\t' $'\t' '{/*in caso di errore*/' >> $nomefile
echo $'\t' $'\t' $'\t' 'zprintf(2,"Errore, pipe fallita in %d\n",i);' >> $nomefile
echo $'\t' $'\t' $'\t' 'return(6);' >> $nomefile
echo $'\t' '}' >> $nomefile
echo $'\t' '}' >> $nomefile
echo $'\t' '/*creazione figli*/' >> $nomefile
echo $'\t' 'for (i=0; i<nfigli; i++)' >> $nomefile
echo $'\t' '{' >> $nomefile
echo $'\t' $'\t' 'pid=fork();' >> $nomefile
echo $'\t' $'\t' 'switch(pid)' >> $nomefile
echo $'\t' $'\t' '{' >> $nomefile
echo $'\t' $'\t' $'\t' 'case 0: /* Figlio */' >> $nomefile
echo $'\t' $'\t' $'\t' '/*la prima cosa che i figli devono fare è chiudere il lato della pipe che non usano*/' >> $nomefile
echo $'\t' $'\t' $'\t' 'for (j=0; j<npipe; j++)' >> $nomefile
echo $'\t' $'\t' $'\t' '{' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' 'if (j!=(i-1)) /*collegamento diretto --> j!=i*/' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' '{ /* figlio[i] scrive su pipe[i-1] */' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' $'\t' 'close(pipes[j][1]);' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' '}' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' 'if (j!=i) /*collegamento diretto --> j!=(i-1)*/' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' '{ /* e legge da pipe[i] */' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' $'\t' 'close(pipes[j][0]);' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' '}' >> $nomefile
echo $'\t' $'\t' $'\t' '}' >> $nomefile
echo $'\t' $'\t' $'\t' '/*richiama la funzione figlio per fare quel che deve*/' >> $nomefile
echo $'\t' $'\t' $'\t' 'return (figlio(argv[i]));' >> $nomefile
echo $'\t' $'\t' $'\t' 'case -1:' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' 'zprintf(2,"Errore nella %d fork\n",i);' >> $nomefile
echo $'\t' $'\t' $'\t' $'\t' 'return(7);' >> $nomefile
echo $'\t' $'\t' '}' >> $nomefile
echo $'\t' '}' >> $nomefile
echo $'\t' '/* PADRE */' >> $nomefile
echo $'\t' 'for (i=0; i<npipe; i++)' >> $nomefile
echo $'\t' '{ /* per tutte le pipe */' >> $nomefile
echo $'\t' $'\t' ' close(pipes[i][0]); /* il padre non legge, chiudo il fd di lettura*/' >> $nomefile
echo $'\t' $'\t' ' close(pipes[i][1]); /* e non scrive, chiudo il fd di scrittura */' >> $nomefile
echo $'\t' '}' >> $nomefile
echo $'\t' '/*attende i figli per recuperare gli exit value*/' >> $nomefile
echo $'\t' 'for (i=0; i<nfigli; i++){' >> $nomefile
echo $'\t' $'\t' 'pid=wait(&status);' >> $nomefile
echo $'\t' $'\t' '/*attesa figli con recupero variabile di ritorno e controllo errore*/' >> $nomefile
echo $'\t' $'\t' 'if (WIFEXITED(status) == 0) {' >> $nomefile
echo $'\t' $'\t' $'\t' 'zprintf(1,"Figlio con pid: %d terminato in modo anomalo\n", pid);' >> $nomefile
echo $'\t' $'\t' '} else {' >> $nomefile
echo $'\t' $'\t' $'\t' 'zprintf(1,"Per il figlio con pid %d lo stato di EXIT e %d\n",pid,WEXITSTATUS(status));' >> $nomefile
echo $'\t' $'\t' '}' >> $nomefile
echo $'\t' '}' >> $nomefile
fi
echo $'\t' "return 0;" >>$nomefile
echo "}" >>$nomefile
#Manca parte figlio
echo 'void zprintf(int fd, const char *fmt, ...) {' >>$nomefile
echo $'\t' '/* printf wrapper using write instead */' >>$nomefile
echo $'\t' 'static char msg[256];' >>$nomefile
echo $'\t' 'va_list ap;' >>$nomefile
echo $'\t' 'int n;' >>$nomefile
echo $'\t' 'va_start(ap, fmt);' >>$nomefile
echo $'\t' 'n=vsnprintf (msg, 256, fmt, ap);' >>$nomefile
echo $'\t' 'write(fd,msg,n);' >>$nomefile
echo $'\t' 'va_end(ap);' >>$nomefile
echo '}' >>$nomefile
|
import React from 'react';
import TokenBalance from '../TokenBalance';
import { TOKENS_BY_NETWORK } from '../TokenBalance/constants';
interface Props {
chainId: number;
}
const TokenList = ({ chainId }: Props) => {
return (
<>
{TOKENS_BY_NETWORK[chainId]?.map(token => (
<TokenBalance key={token.address} {...token} />
))}
</>
);
};
export default TokenList;
|
//package com.Card;
import com.Card.Card;
import com.Card.Deck;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Created by woramet on 12/20/15.
*/
public class DeckTest {
Deck deck;
@Before
public void setUp() throws Exception {
deck = new Deck();
}
@After
public void tearDown() throws Exception {
deck = null;
}
@Test
public void testDeckShouldContain52Cards() throws Exception {
assertEquals(52, deck.getCardCount());
}
@Test
public void testDrawCardFromTheTop() throws Exception {
Card card = deck.drawCardFromTheTop();
assertNotNull(card);
assertEquals(51, deck.getCardCount());
}
@Test
public void testCardShouldNotBeDrawnWhenDeckEmpty() throws Exception {
for(int i = 0 ; i < 52 ; i++) {
deck.drawCardFromTheTop();
}
assertEquals(true, deck.isEmpty());
assertNull(deck.drawCardFromTheTop());
}
@Test
public void testIsEmpty() throws Exception {
assertEquals(false, deck.isEmpty());
}
} |
<reponame>Professorvennie/Bronze-Age<filename>src/main/java/com/professorvennie/bronzeage/core/proxeys/CommonProxey.java
package com.professorvennie.bronzeage.core.proxeys;
import com.professorvennie.bronzeage.api.manual.IPage;
/**
* Created by ProfessorVennie on 10/21/2014 at 5:22 PM.
*/
public class CommonProxey {
public void registerRenders() {
}
public void setPageToOpen(IPage page) {
}
}
|
#!/bin/bash
node=`which node`
composite=`which composite`
curl=`which curl`
base_dir="/usr/local/aclu/voter-apple-wallet"
mapbox_api_url="https://api.mapbox.com/styles/v1/mapbox/light-v9/static"
zoom=15
pin="$base_dir/img/pin.png"
pin2x="$base_dir/img/pin@2x.png"
if [ $# -lt 4 ] ; then
echo "Usage: pkpass.sh [address] [hours] [lat] [lng]"
exit 1
fi
address=$1
hours=$2
lat=$3
lng=$4
hash=`echo -n "$address" | openssl sha1`
hash=${hash#*= }
if [ ! -f "$base_dir/passes/$hash/pass.pkpass" ] ; then
dir="$base_dir/passes/$hash"
cp -R "$base_dir/template" "$dir"
$curl -s "$mapbox_api_url/$lng,$lat,$zoom.0,0,0/375x144?access_token=$MAPBOX_API_KEY" > "$dir/map.png"
$curl -s "$mapbox_api_url/$lng,$lat,$zoom.0,0,0/375x144@2x?access_token=$MAPBOX_API_KEY" > "$dir/map@2x.png"
$composite -gravity center "$pin" "$dir/map.png" "$dir/strip.png"
$composite -gravity center "$pin2x" "$dir/map@2x.png" "$dir/strip@2x.png"
$node "$base_dir/pkpass.js" "$hash" "$address" "$hours" "$lat" "$lng"
fi
echo "$base_dir/passes/$hash/pass.pkpass"
|
<reponame>raghaven447/gaeguli
"use strict";
class Client {
constructor() {
this.onproperty = undefined
this.onopen = undefined
this.onerror = undefined
this.__ws = undefined
}
connect() {
if (this.__ws) {
this.__ws.close()
}
this.__ws = new WebSocket(`ws://${window.location.host}/ws`)
this.__ws.onopen = () => {
if (this.onopen) {
this.onopen()
}
}
this.__ws.onmessage = message => {
var msg = JSON.parse(message.data)
switch (msg.msg) {
case 'property':
this.onproperty(msg)
break
}
}
this.__ws.onerror = (error) => {
console.log(`websocket error: ${error.message}`)
if (this.onerror) {
this.onerror()
}
}
}
stream(state, codec) {
this.__sendRequest('stream', { state: state, codec: codec })
}
property(name, value) {
this.__sendRequest('property', { name: name, value: value })
}
__sendRequest(type, args) {
var request = Object.assign({msg: type}, args)
this.__ws.send(JSON.stringify(request))
}
}
export class AdaptorDemo {
constructor() {
this.__signaling = new Client()
this.__signaling.onproperty = msg => {
var element = document.getElementById(msg.name)
if (element.type == "checkbox") {
element.checked = msg.value
} else {
element.value = msg.value
}
element.dispatchEvent(new Event ('change'))
}
this.__signaling.connect()
}
property(name, value) {
this.__signaling.property(name, value)
}
stream(state, codec) {
this.__signaling.stream(state, codec)
}
}
|
def sort_array(arr):
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] > arr[j]:
arr[i], arr[j] = arr[j], arr[i]
return arr
result = sort_array([6, 8, 3, 2, 10])
print(result) |
from django.conf.urls import url
from django.contrib import admin
from rest_framework_jwt.views import obtain_jwt_token
from apps.meiduo_admin.views.admin_group import GroupView
from apps.meiduo_admin.views.admin_permission import PermissionView, GroupLIView
from apps.meiduo_admin.views.category import Cate3View
from apps.meiduo_admin.views.orders import OrderSet
from apps.meiduo_admin.views.sku import SKUGoodsView
from apps.meiduo_admin.views.sku import SKUSIMP
from apps.meiduo_admin.views.sku_image import ImageViewSet
from .views import statistical
from .views import specs
from .views import spu
from apps.meiduo_admin.views.uesrs import UserView
from apps.goods import views
from rest_framework.routers import SimpleRouter, DefaultRouter
urlpatterns = [
url(r'^authorizations/$', obtain_jwt_token),
url(r'^statistical/total_count/$', statistical.ToytalView.as_view()),
url(r'^statistical/day_increment/$', statistical.DayView.as_view()),
url(r'^statistical/day_active/$', statistical.ActiveView.as_view()),
url(r'^statistical/day_orders/$', statistical.OrderView.as_view()),
url(r'^statistical/month_increment/$', statistical.MonthView.as_view()),
url(r'^statistical/goods_day_views/$', statistical.GoodsView.as_view()),
url(r'^users/$', UserView.as_view()),
url(r'^goods/simple/$', spu.SpuSimleView.as_view()),
url(r'^goods/(?P<pk>\d+)/specs/$', specs.SpecOpTIONView.as_view()),
url(r'^skus/categories/$', Cate3View.as_view()),
url(r'^skus/simple/$', SKUSIMP.as_view()),
url(r'^permission/simple/$', GroupLIView.as_view()),
]
ro = SimpleRouter()
ro.register('goods/specs',specs.SpecViewSet,base_name='specs')
urlpatterns += ro.urls
router = DefaultRouter()
router.register('skus/images',ImageViewSet,base_name='images')
router.register('skus', SKUGoodsView, base_name='skus')
router.register('orders', OrderSet, base_name='orders')
router.register('permission/perms', PermissionView, base_name='PermissionView')
router.register('permission/groups', GroupView, base_name='PermissionView')
urlpatterns += router.urls
|
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.util.progress;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
/**
* Tests the {@link ProgressHelper} class.
*/
public class ProgressHelperTest {
/**
* Tests progress reporting for a single seed.
*
* @throws InterruptedException if the simulation failed to execute
* properly due to an interruption
*/
@Test
public void testTimingSingleSeed() throws InterruptedException {
test(1, 100000, 10000, 500);
}
/**
* Tests progress reporting for many seeds.
*
* @throws InterruptedException if the simulation failed to execute
* properly due to an interruption
*/
@Test
public void testTimingManySeeds() throws InterruptedException {
test(10, 100000, 10000, 500);
}
/**
* Tests progress reporting for a single seed with fine-grain step sizes.
*
* @throws InterruptedException if the simulation failed to execute
* properly due to an interruption
*/
@Test
public void testTimingFineGrained() throws InterruptedException {
test(1, 1000, 1, 50);
}
/**
* Tests if ProgressHelper functions correctly by simulating the execution
* of an algorithm.
*
* @param totalSeeds the total number of seeds to simulate
* @param maxNFE the maximum NFE per seed to simulate
* @param frequency the frequency of progress updates
* @param time the simulated time per step
* @throws InterruptedException if the simulation failed to execute
* properly due to an interruption
*/
private void test(int totalSeeds, int maxNFE, int frequency, int time)
throws InterruptedException {
ProgressHelper helper = new ProgressHelper(null);
final List<ProgressEvent> events = new ArrayList<ProgressEvent>();
helper.addProgressListener(new ProgressListener() {
@Override
public void progressUpdate(ProgressEvent event) {
events.add(event);
}
});
helper.start(totalSeeds, maxNFE);
for (int i = 0; i < totalSeeds; i++) {
for (int j = 0; j <= maxNFE-frequency; j += frequency) {
long start = System.nanoTime();
while (System.nanoTime() - start < time*1000000) {
//loop for the given amount of time
}
helper.setCurrentNFE(j+frequency);
}
helper.nextSeed();
}
int expectedCount = totalSeeds * (maxNFE/frequency + 1);
double expectedTime = ((expectedCount - totalSeeds) * time) / 1000.0;
double error = 0.05 * expectedTime;
Assert.assertEquals(expectedCount, events.size());
Assert.assertFalse(events.get(0).isSeedFinished());
Assert.assertTrue(events.get(events.size() - 1).isSeedFinished());
// test seed count
Assert.assertEquals(1, events.get(0).getCurrentSeed());
Assert.assertEquals(totalSeeds/2 + 1,
events.get(events.size()/2).getCurrentSeed());
Assert.assertEquals(totalSeeds,
events.get(events.size() - 2).getCurrentSeed());
// test elapsed time
Assert.assertEquals(expectedTime / 2.0,
events.get(events.size()/2 - 1).getElapsedTime(), error);
Assert.assertEquals(expectedTime,
events.get(events.size()-1).getElapsedTime(), error);
// test remaining time
Assert.assertEquals(expectedTime / 2.0,
events.get(events.size()/2 - 1).getRemainingTime(), error);
Assert.assertEquals(0.0,
events.get(events.size() - 1).getRemainingTime(), error);
Assert.assertEquals(events.get(events.size()-1).getElapsedTime(),
events.get(events.size()/2).getElapsedTime() +
events.get(events.size()/2).getRemainingTime(), error);
// test percent complete
Assert.assertEquals(0.5,
events.get(events.size()/2 - 1).getPercentComplete(), 0.05);
Assert.assertEquals(1.0,
events.get(events.size() - 1).getPercentComplete(), 0.05);
// test constant attributes
for (ProgressEvent event : events) {
Assert.assertEquals(totalSeeds, event.getTotalSeeds());
Assert.assertEquals(maxNFE, event.getMaxNFE());
}
}
/**
* Tests if progress reporting handles situations where no change in NFE
* occurs.
*
* @throws InterruptedException if the simulation failed to execute
* properly due to an interruption
*/
@Test
public void testNoProgress() throws InterruptedException {
ProgressHelper helper = new ProgressHelper(null);
final List<ProgressEvent> events = new ArrayList<ProgressEvent>();
helper.addProgressListener(new ProgressListener() {
@Override
public void progressUpdate(ProgressEvent event) {
events.add(event);
}
});
helper.start(10, 100000);
helper.setCurrentNFE(0);
Thread.sleep(1000);
helper.setCurrentNFE(0);
Thread.sleep(1000);
helper.nextSeed();
Assert.assertEquals(3, events.size());
Assert.assertTrue(Double.isNaN(events.get(0).getRemainingTime()));
Assert.assertTrue(Double.isNaN(events.get(1).getRemainingTime()));
Assert.assertTrue(events.get(2).getRemainingTime() > 0.0);
}
/**
* Tests if progress reporting handles the situation where no change in
* time occurs.
*
* @throws InterruptedException if the simulation failed to execute
* properly due to an interruption@throws InterruptedException
*/
@Test
public void testNoTime() throws InterruptedException {
ProgressHelper helper = new ProgressHelper(null);
final List<ProgressEvent> events = new ArrayList<ProgressEvent>();
helper.addProgressListener(new ProgressListener() {
@Override
public void progressUpdate(ProgressEvent event) {
events.add(event);
}
});
helper.start(10, 100000);
helper.setCurrentNFE(0);
helper.setCurrentNFE(50000);
Thread.sleep(1000);
helper.setCurrentNFE(100000);
Assert.assertEquals(3, events.size());
Assert.assertTrue(Double.isNaN(events.get(0).getRemainingTime()));
Assert.assertTrue(Double.isNaN(events.get(1).getRemainingTime()));
Assert.assertTrue(events.get(2).getRemainingTime() > 0.0);
}
}
|
def updateBitAlternative(c: int, val: int, pos: int) -> int:
return (c << 1) | val |
<reponame>chapman-cs510-2017f/cw-03-cpcw2
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Name: <NAME> & <NAME>
# Student ID: 2298930 & 1450263
# Email: <EMAIL> & <EMAIL>
# Course: CS510 Fall 2017
# Assignment: Classwork 3
###
def fibonacci(n):
try:
int(n)
except:
print("this isn't a number")
if n <= 0:
print("you put in a negative number or 0")
k = 1
j = 1
fibList = []
if n == 1:
# print([1])
return [1]
elif n == 2:
# print([1,1])
return [1,1]
else:
fibList.append(1)
fibList.append(1)
for i in range(3,n+1):
fibList.append(k+j)
l = j
j = k + j
k = l
return fibList
|
#!/bin/sh
set -x -e
TOPDIR=`pwd`
mkdir -p $PREFIX/bin
chmod 777 bin/*
cp bin/* $PREFIX/bin
mkdir -p $TOPDIR/scripts
mv transabyss $TOPDIR/scripts/
mv transabyss-merge $TOPDIR/scripts/
cd $TOPDIR/scripts/
2to3 -w *
cd $TOPDIR/utilities
2to3 -w *
cd $TOPDIR
cd utilities
sed -i.bak 's/from utilities import/from transabyss import/' *.py
cd $TOPDIR
mkdir transabyss
cp -rf utilities/* transabyss/
cp $RECIPE_DIR/setup.py ./
python setup.py build
$PYTHON -m pip install . --ignore-installed --no-deps -vv
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.speech_bubble_2 = void 0;
var speech_bubble_2 = {
"viewBox": "0 0 64 64",
"children": [{
"name": "g",
"attribs": {
"id": "SPEECH_BUBBLE_2_1_",
"enable-background": "new "
},
"children": [{
"name": "g",
"attribs": {
"id": "SPEECH_BUBBLE_2"
},
"children": [{
"name": "g",
"attribs": {
"id": "SPEECH_BUBBLE_2"
},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M33.697,3.963c-18.83,0-33.738,10.996-33.738,24.56c0,7.681,4.609,14.54,12.274,19.043\r\n\t\t\t\tc1.736,1.02,1.934,8.675-7.831,13.297c0,0,13.825,2.18,22.591-8.902c2.167,0.311,4.409,0.975,6.704,0.975\r\n\t\t\t\tc18.83,0,30.307-10.848,30.307-24.412C64.004,14.959,52.527,3.963,33.697,3.963z M17,35c-3.314,0-6-2.686-6-6s2.686-6,6-6\r\n\t\t\t\ts6,2.686,6,6S20.314,35,17,35z M32,35c-3.314,0-6-2.686-6-6s2.686-6,6-6c3.314,0,6,2.686,6,6S35.314,35,32,35z M47,35\r\n\t\t\t\tc-3.314,0-6-2.686-6-6s2.686-6,6-6s6,2.686,6,6S50.314,35,47,35z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M33.697,3.963c-18.83,0-33.738,10.996-33.738,24.56c0,7.681,4.609,14.54,12.274,19.043\r\n\t\t\t\tc1.736,1.02,1.934,8.675-7.831,13.297c0,0,13.825,2.18,22.591-8.902c2.167,0.311,4.409,0.975,6.704,0.975\r\n\t\t\t\tc18.83,0,30.307-10.848,30.307-24.412C64.004,14.959,52.527,3.963,33.697,3.963z M17,35c-3.314,0-6-2.686-6-6s2.686-6,6-6\r\n\t\t\t\ts6,2.686,6,6S20.314,35,17,35z M32,35c-3.314,0-6-2.686-6-6s2.686-6,6-6c3.314,0,6,2.686,6,6S35.314,35,32,35z M47,35\r\n\t\t\t\tc-3.314,0-6-2.686-6-6s2.686-6,6-6s6,2.686,6,6S50.314,35,47,35z"
},
"children": []
}]
}]
}]
}]
}]
}]
}]
};
exports.speech_bubble_2 = speech_bubble_2; |
#!/bin/bash
dirs=($@)
[ ${#dirs[@]} -eq 0 ] && dirs=(src tests)
find ${dirs[@]} -name '*.py' -o -name '*.pyx' -o -name '*.pxd'
|
# File: Q (Python 2.4)
from direct.directnotify import DirectNotifyGlobal
from pirates.quest import QuestLadderDB
from pirates.uberdog import DistributedInventoryBase
from pirates.piratesbase.PLocalizer import NPCNames
class QuestStatus:
notify = DirectNotifyGlobal.directNotify.newCategory('QuestStatus')
def __init__(self, av):
self.av = av
self.ladders = { }
self.choiceContainers = { }
self.invRequest = DistributedInventoryBase.DistributedInventoryBase.getInventory(self.av.inventoryId, self.createLadders)
self.NPCInteractMode = False
self.cacheHistoryMode = False
self.ladderDeleteList = []
self.initialized = False
def delete(self):
try:
pass
except:
self.QuestStatus_deleted = 1
self.choiceContainers = { }
for ladder in self.ladders.values():
ladder.destroy()
self.ladders = { }
self.ladderDeleteList = []
self.av = None
DistributedInventoryBase.DistributedInventoryBase.cancelGetInventory(self.invRequest)
def forceInit(self):
if not self.initialized:
inv = localAvatar.getInventory()
if inv:
self.createLadders(inv)
self.initialized = True
else:
self.notify.warning('inventory not available yet!')
def createLadders(self, inventory):
if inventory:
for quest in inventory.getQuestList():
self.assignQuest(quest)
ladderChoiceContainers = []
for ladder in self.ladders.values():
ladder.getChoiceContainers(ladderChoiceContainers)
choiceInts = self.av.getCurrentQuestChoiceContainers()
for container in ladderChoiceContainers:
containerInt = container.getQuestInt()
if containerInt in choiceInts:
self.choiceContainers[container.getName()] = container
continue
def assignQuest(self, quest, populateHistory = False):
questId = quest.getQuestId()
for (ladderId, ladderDNA) in QuestLadderDB.FameQuestLadderDict.items():
if ladderDNA.hasQuest(questId):
if not self.ladders.has_key(ladderId):
self.ladders[ladderId] = ladderDNA.constructDynamicCopy(self.av)
ladder = self.ladders[ladderId]
if ladder.linkQuest(quest):
if populateHistory:
questStub = ladder.getQuestStub(questId)
self.cacheHistoryMode = True
questStub.completePreviousContainers()
self.cacheHistoryMode = False
ladder.linkQuest(quest)
for (ladderId, ladderDNA) in QuestLadderDB.FortuneQuestLadderDict.items():
if ladderDNA.hasQuest(questId):
if not self.ladders.has_key(ladderId):
self.ladders[ladderId] = ladderDNA.constructDynamicCopy(self.av)
ladder = self.ladders[ladderId]
if ladder.linkQuest(quest):
if populateHistory:
questStub = ladder.getQuestStub(questId)
self.cacheHistoryMode = True
questStub.completePreviousContainers()
self.cacheHistoryMode = False
ladder.linkQuest(quest)
if populateHistory:
self.writeHistory()
def handleQuestDropped(self, droppedQuestId):
for ladder in self.ladders.values():
if ladder.hasQuest(droppedQuestId):
self.clearLadderFromHistory(ladder)
self.deleteLadder(ladder.getName())
continue
def handleLadderComplete(self, ladder):
ladderName = ladder.getName()
if not self.ladders.has_key(ladderName):
self.notify.warning('%s not in ladders dict!' % ladderName)
ladder.destroy()
else:
self.ladderDeleteList.append(ladderName)
def deleteLadder(self, ladderName):
if self.ladders.has_key(ladderName):
self.ladders[ladderName].destroy()
del self.ladders[ladderName]
else:
self.notify.warning('%s not in ladders dict!' % ladderName)
def getCurrentQuests(self):
inventory = self.av.getInventory()
if not inventory:
self.notify.warning('av: %s has no inventory!' % self.av.getDoId())
return []
quests = inventory.getQuestList()
if len(quests) == 0:
self.notify.warning('av: %s has no active quests!' % self.av.getDoId())
return []
return quests
def getCurrentQuest(self, questId):
quests = self.getCurrentQuests()
for q in quests:
if q.questId == questId:
return q
continue
def addCurrentQuestChoiceContainer(self, container):
nameInt = container.getQuestInt()
containers = self.av.getCurrentQuestChoiceContainers()
if nameInt in containers:
self.notify.warning('%d already in choice container list!' % nameInt)
return None
containers.append(nameInt)
if self.cacheHistoryMode == False:
self.av.b_setCurrentQuestChoiceContainers(containers)
else:
self.av.setCurrentQuestChoiceContainers(containers)
self.choiceContainers[container.getName()] = container
def removeCurrentQuestChoiceContainer(self, container):
nameInt = container.getQuestInt()
containers = self.av.getCurrentQuestChoiceContainers()
if nameInt in containers:
containers.remove(nameInt)
if self.cacheHistoryMode == False:
self.av.b_setCurrentQuestChoiceContainers(containers)
else:
self.av.setCurrentQuestChoiceContainers(containers)
name = container.getName()
if self.choiceContainers.has_key(name):
del self.choiceContainers[name]
def clearHistory(self):
ladderHistory = self.av.getQuestLadderHistory()
if len(ladderHistory):
self.av.b_setQuestLadderHistory([])
def writeHistory(self):
ladderHistory = self.av.getQuestLadderHistory()
self.av.b_setQuestLadderHistory(ladderHistory)
choiceContainers = self.av.getCurrentQuestChoiceContainers()
self.av.b_setCurrentQuestChoiceContainers(choiceContainers)
def removeFromHistory(self, ladder):
ladderHistory = self.av.getQuestLadderHistory()
ladderInt = ladder.getQuestInt()
if ladderInt in ladderHistory:
ladderHistory.remove(ladderInt)
self.av.setQuestLadderHistory(ladderHistory)
def getClearedLadderFromHistory(self, ladder, ladderHistory):
ladderInt = ladder.getQuestInt()
if ladderInt in ladderHistory:
ladderHistory.remove(ladderInt)
for container in ladder.getContainers():
ladderHistory = self.getClearedLadderFromHistory(container, ladderHistory)
return ladderHistory
def clearLadderFromHistory(self, ladder):
ladderHistory = self.av.getQuestLadderHistory()
ladderHistory = self.getClearedLadderFromHistory(ladder, ladderHistory)
self.av.setQuestLadderHistory(ladderHistory)
def updateHistory(self, completedContainer):
if completedContainer.isChoice():
self.removeCurrentQuestChoiceContainer(completedContainer)
ladderHistory = self.av.getQuestLadderHistory()
downstreamContainers = []
completedContainer.getDownstreamContainers(downstreamContainers)
for container in downstreamContainers:
questInt = container.getQuestInt()
if questInt in ladderHistory:
self.notify.warning('For avatar: %s; Purging child questInt: %s' % (self.av.doId, questInt))
ladderHistory.remove(questInt)
continue
newQuestInt = completedContainer.getQuestInt()
if newQuestInt not in ladderHistory:
ladderHistory.append(newQuestInt)
else:
self.notify.warning('%d already in ladder history' % newQuestInt)
if self.cacheHistoryMode == False:
self.av.b_setQuestLadderHistory(ladderHistory)
else:
self.av.setQuestLadderHistory(ladderHistory)
def _QuestStatus__getDeepestChoiceContainerWithGiver(self, giverId):
deepestContainer = None
for container in self.choiceContainers.values():
if giverId == container.getGiverId():
deepestContainer = container
continue
return deepestContainer
def getQuestOffersFromGiver(self, giverId):
quests = self.getCurrentQuests()
if not quests:
return (None, None, 0)
container = self._QuestStatus__getDeepestChoiceContainerWithGiver(giverId)
if not container:
return (None, None, 0)
offers = container.getValidContainers()
finalOffers = []
completedLadders = self.av.getQuestLadderHistory()
for offer in offers:
if offer.getQuestInt() not in completedLadders:
finalOffers = [
offer] + finalOffers
continue
totalOffers = container.getContainers()
numIncomplete = 0
for offer in totalOffers:
if offer.getQuestInt() not in completedLadders:
numIncomplete = numIncomplete + 1
continue
numAssignedIncomplete = numIncomplete - len(finalOffers)
if len(finalOffers) == 0:
pass
1
return (finalOffers, container, numAssignedIncomplete)
def hasLadderQuest(self, quest):
return self.hasLadderQuestId(quest.getQuestId())
def hasLadderQuestId(self, questId):
for currLadder in self.ladders.values():
if currLadder.hasQuest(questId):
return True
continue
return False
def getContainer(self, name):
for ladder in self.ladders.values():
ctr = ladder.getContainer(name)
if ctr:
return ctr
continue
def getLadderIdWithQuestId(self, questId):
for ladder in self.ladders.values():
if ladder.hasQuest(questId):
return ladder.getName()
continue
def hasQuestIdLadderId(self, questId, ladderId):
ladder = self.ladders.get(ladderId)
if ladder:
return ladder.hasQuest(questId)
else:
self.notify.warning('%s not in ladder list!' % ladderId)
return False
def getQuestStub(self, questId):
for currLadder in self.ladders.values():
stub = currLadder.getQuestStub(questId)
if stub:
return stub
continue
def getNextQuestId(self, questId):
for currLadder in self.ladders.values():
if currLadder.hasQuest(questId):
return currLadder.getNextQuestId(questId)
continue
def getSiblingQuestIds(self, questId):
for currLadder in self.ladders.values():
if currLadder.hasQuest(questId):
return currLadder.getSiblingQuestIds(questId)
continue
return []
def getCompletedContainer(self, questId, completedStubCount):
ladderId = self.av.questStatus.getLadderIdWithQuestId(questId)
container = self.ladders[ladderId].getContainer(questId)
lastCompleted = container
while lastCompleted.isComplete():
if lastCompleted.parent and lastCompleted.parent.isComplete():
lastCompleted = lastCompleted.parent
continue
return lastCompleted
ladderHistory = self.av.getQuestLadderHistory()
for containerInt in ladderHistory:
for ladder in self.ladders.values():
container = ladder.getContainerInt(containerInt)
if container:
includeSelf = True
if completedStubCount > 1:
if questId == container.getName():
self.notify.debug('getCompletedContainer().questId Excluded: %s' % questId)
self.notify.debug('getCompletedContainer().completedStubCount: %s' % completedStubCount)
includeSelf = False
if container and container.hasQuest(questId) and includeSelf:
ladderName = ladder.getName()
if ladderName in self.ladderDeleteList:
self.deleteLadder(ladderName)
self.ladderDeleteList.remove(ladderName)
self.notify.debug('getCompletedContainer().container.getName() Returned: %s' % container.getName())
return container
def dropSameLadderQuests(self, questId):
droppedQuests = []
for (ladderId, ladderDNA) in QuestLadderDB.FameQuestLadderDict.items():
if ladderDNA.hasQuest(questId):
currQuests = self.av.getInventory().getQuestList()
for quest in currQuests:
if ladderDNA.hasQuest(quest.getQuestId()):
droppedQuests.append(quest.getDeletedEventString())
simbase.air.questMgr.dropQuest(self.av, quest)
continue
for (ladderId, ladderDNA) in QuestLadderDB.FortuneQuestLadderDict.items():
if ladderDNA.hasQuest(questId):
currQuests = self.av.getInventory().getQuestList()
for quest in currQuests:
if ladderDNA.hasQuest(quest.getQuestId()):
droppedQuests.append(quest.getDeletedEventString())
simbase.air.questMgr.dropQuest(self.av, quest)
continue
return droppedQuests
def setNPCInteractMode(self, mode):
self.NPCInteractMode = mode
def getNPCInteractMode(self):
return self.NPCInteractMode
def getFortuneOffers(self, giverId):
offers = []
for ladder in QuestLadderDB.FortuneQuestLadderDict.values():
if giverId == ladder.getGiverId():
offers.append(ladder)
continue
return offers
|
"use strict"
const fs = require("fs");
const html = fs
.readFileSync(`${__dirname}/index.html`)
.toString();
module.exports = (context, callback) => {
callback(undefined, html);
}
|
#!/bin/sh
BUILD_VERSION=$1
if [ -z "$BUILD_VERSION" ]
then
BUILD_VERSION=$(date +"%Y%m%d")
fi
docker run -d \
--restart=always \
--name agent \
-p 8001:8001 \
-e MAPI_HOST=10.32.2.43:8000 \
-e JUDGE_IMAGE_JAVASCRIPT=codespark-judge-javascript:1.0-$BUILD_VERSION \
-e CONCURRENT=4 \
-e REDIS_PORT_6379_TCP_ADDR=10.32.2.43 \
-e REDIS_PASSWORD=xA123456 \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /proc:/host/proc \
--link redis:redis \
codespark-judge-agent:1.0-$BUILD_VERSION |
<gh_stars>1-10
import os
import glob
from PIL import Image, ImageFile
from joblib import Parallel, delayed
from skimage import exposure
import numpy as np
from tqdm import tqdm
inputfolder="source_data/data/" #enter the name of the folder where test and train folders are saved
output_folder="source_data/" #enter the directory where all the processed data is to be stored
#inside it 3 directories will be created to store, equalized, resized images, and npy files
#equalized images are already resized see line 39 if you don't want them resized
outputfolder_processed=os.path.join(output_folder,"processed/") #name of the output folder for processed images
outputfolder_resized=os.path.join(output_folder,"resized/") #name of the output folder for resized images
outputfolder_npy=os.path.join(output_folder,"npy/") #name of the output folder for npy files
#lists of test/benign test/malignant etc..
inputfolders=[os.path.join(inputfolder,i,j) for i in ["test/","train/"] for j in ["benign/","malignant/"]]
outputfolders_p=[os.path.join(outputfolder_processed,i,j) for i in ["test/","train/"] for j in ["benign/","malignant/"]]
outputfolders_r=[os.path.join(outputfolder_resized,i,j) for i in ["test/","train/"] for j in ["benign/","malignant/"]]
# this is for if output folders don't already exist, then to create them
for folder in outputfolders_p+outputfolders_r+[outputfolder_npy]:
if not os.path.exists(folder):
os.makedirs(folder)
def resize_image(path, outfolder_p,outfolder_r,res):
base_name = os.path.basename(path)
outpath_processed = os.path.join(outfolder_p, base_name)
outpath_resized= os.path.join(outfolder_r, base_name)
image = Image.open(path)
image_resized = image.resize(res,resample=Image.BILINEAR)
eq_image = exposure.equalize_adapthist(np.asarray(image,dtype='uint8'),clip_limit=0.03)
image_processed = Image.fromarray((eq_image*255).astype('uint8'),'RGB')
#comment next line to not resize equalized images
image_processed = image_processed.resize(res,resample=Image.BILINEAR)
image_processed.save(outpath_processed)
image_resized.save(outpath_resized)
def process_images(inputfolders,outputfolders_p,outputfolders_r,res):
for i in range(4):
images=glob.glob(os.path.join(inputfolders[i], "*.jpg"))
Parallel(n_jobs=8)(
delayed(resize_image)(
img,outputfolders_p[i],outputfolders_r[i],res
) for img in tqdm(images, desc=" ".join(inputfolders[i].split('/')[-3:-1])+" images")
)
def savenpyfiles(inputfolders,outfolder):
for folder in inputfolders:
#name for npy file is created based on where the data comes from
npyfilename="_".join(folder.split('/')[-4:-1])+".npy"
read = lambda imname: np.asarray(Image.open(imname).convert("RGB"))
image_list = [read(os.path.join(folder, filename)) for filename in tqdm(os.listdir(folder),desc=npyfilename)]
images = np.array(image_list, dtype='uint8')
np.save(os.path.join(outfolder,npyfilename),images)
if __name__=="__main__":
#comment and uncomment functions based on requirement
process_images(inputfolders,outputfolders_p,outputfolders_r,res=(112,112))
print("")
savenpyfiles(outputfolders_p,outputfolder_npy) #save npy for processed files
savenpyfiles(outputfolders_r,outputfolder_npy) #save npy for resized files
savenpyfiles(inputfolders,outputfolder_npy) #save npy for unprocessed files
|
<filename>driver/src/data/deepInsertService.ts
import { MetadataRepository, RecordRepository } from '../repositories';
import { DeepInsertResponse } from './deepInsertResponse';
import { Record } from './record';
/**
* Parses deep insert objects and returns references to all created records.
*
* @export
* @class DeepInsertService
*/
export default class DeepInsertService {
private readonly recordRepository: RecordRepository;
private readonly metadataRepository: MetadataRepository;
/**
* Creates an instance of DeepInsertService.
* @param {MetadataRepository} metadataRepository A metadata repository.
* @param {RecordRepository} recordRepository A record repostiroy.
* @memberof DeepInsertService
*/
constructor(metadataRepository: MetadataRepository, recordRepository: RecordRepository) {
this.metadataRepository = metadataRepository;
this.recordRepository = recordRepository;
}
/**
* A deep insert which returns a reference to all created records.
*
* @param {string} logicalName The entity logical name of the root record.
* @param {Record} record The deep insert object.
* @returns {Promise<DeepInsertResponse>} An async result with references to created records.
* @memberof DeepInsertService
*/
public async deepInsert(
logicalName: string,
record: Record,
dataByAlias: { [alias: string]: Xrm.LookupValue },
): Promise<DeepInsertResponse> {
const recordToCreate = record;
const associatedRecords: { alias?: string, reference: Xrm.LookupValue }[] = [];
const aliasedRecordsByNavProp = DeepInsertService.getAliasedLookups(recordToCreate);
await Promise.all(Object.keys(aliasedRecordsByNavProp).map(async (aliasedRecordNavProp) => {
const alias = recordToCreate[aliasedRecordNavProp] as string;
const reference = dataByAlias[alias];
if (!reference) {
throw new Error(`Unable to bind ${aliasedRecordNavProp} as a record with the alias '${alias}' has not been created.`);
}
const set = await this.metadataRepository
.getEntitySetForEntity(dataByAlias[alias].entityType);
delete recordToCreate[aliasedRecordNavProp];
recordToCreate[aliasedRecordNavProp.replace('@alias.bind', '@odata.bind')] = `/${set}(${dataByAlias[alias].id})`;
}));
const lookupRecordsByNavProp = DeepInsertService.getManyToOneRecords(recordToCreate);
const singleNavProps = Object.keys(lookupRecordsByNavProp);
await Promise.all(singleNavProps.map(async (singleNavProp) => {
const res = await this.createLookupRecord(
logicalName, recordToCreate, lookupRecordsByNavProp, singleNavProp, dataByAlias,
);
associatedRecords.push(res.record, ...res.associatedRecords);
}));
const collRecordsByNavProp = DeepInsertService.getOneToManyRecords(recordToCreate);
Object.keys(collRecordsByNavProp).forEach((collNavProp) => delete recordToCreate[collNavProp]);
const recordToCreateRef = await this.recordRepository.upsertRecord(logicalName, recordToCreate);
await Promise.all(Object.keys(collRecordsByNavProp).map(async (collNavProp) => {
const result = await this.createCollectionRecords(
logicalName, recordToCreateRef, collRecordsByNavProp, collNavProp, dataByAlias,
);
associatedRecords.push(...result);
}));
return {
associatedRecords,
record: {
alias: recordToCreate['@alias'] as string | undefined,
reference: recordToCreateRef,
},
};
}
private static getAliasedLookups(record: Record) {
return Object.keys(record)
.filter((key) => key.indexOf('@alias.bind') > -1)
.reduce((prev, curr) => {
// eslint-disable-next-line no-param-reassign
prev[curr] = record[curr] as Record[];
return prev;
}, {} as { [navigationProperty: string]: Record[] });
}
private static getOneToManyRecords(record: Record)
: { [navigationProperty: string]: Record[] } {
return Object.keys(record)
.filter((key) => Array.isArray(record[key]))
.reduce((prev, curr) => {
// eslint-disable-next-line no-param-reassign
prev[curr] = record[curr] as Record[];
return prev;
}, {} as { [navigationProperty: string]: Record[] });
}
private static getManyToOneRecords(record: Record)
: { [navigationProperty: string]: Record } {
return Object.keys(record)
.filter(
(key) => typeof record[key] === 'object'
&& !Array.isArray(record[key])
&& record[key] !== null
&& !(record[key] instanceof Date),
)
.reduce((prev, curr) => {
// eslint-disable-next-line no-param-reassign
prev[curr] = record[curr] as Record;
return prev;
}, {} as { [navigationProperty: string]: Record });
}
private async createLookupRecord(
logicalName: string,
entity: Record,
navigationPropertyMap: { [navigationProperty: string]: Record },
singleNavProp: string,
createdRecordsByAlias: { [alias: string]: Xrm.LookupValue },
): Promise<DeepInsertResponse> {
const record = entity;
delete record[singleNavProp];
const entityName = await this.metadataRepository.getEntityForLookupProperty(
logicalName, singleNavProp,
);
const deepInsertResponse = await this.deepInsert(
entityName, navigationPropertyMap[singleNavProp], createdRecordsByAlias,
);
const entitySet = await this.metadataRepository.getEntitySetForEntity(entityName);
record[`${singleNavProp}@odata.bind`] = `/${entitySet}(${deepInsertResponse.record.reference.id})`;
return deepInsertResponse;
}
private async createCollectionRecords(
logicalName: string,
parent: Xrm.LookupValue,
navPropMap: { [navigationProperty: string]: Record[] },
collNavProp: string,
refsByAlias: { [alias: string]: Xrm.LookupValue },
): Promise<{ alias?: string, reference: Xrm.LookupValue }[]> {
const relMetadata = await this.metadataRepository.getRelationshipMetadata(collNavProp);
const set = await this.metadataRepository.getEntitySetForEntity(logicalName);
if (DeepInsertService.isOneToManyMetadata(relMetadata)) {
return this.createOneToManyRecords(
relMetadata.ReferencingEntity, set, collNavProp, navPropMap, parent, refsByAlias,
);
}
const entity = relMetadata.Entity1LogicalName !== logicalName
? relMetadata.Entity1LogicalName : relMetadata.Entity2LogicalName;
return this.createManyToManyRecords(entity, collNavProp, navPropMap, parent, refsByAlias);
}
private async createOneToManyRecords(
entity: string,
entitySet: string,
navProp: string,
navPropMap: { [navProp: string]: Record[] },
parent: Xrm.LookupValue,
refsByAlias: { [alias: string]: Xrm.LookupValue },
): Promise<{ alias?: string, reference: Xrm.LookupValue }[]> {
const oppNavProp = await this.metadataRepository
.getLookupPropertyForCollectionProperty(navProp);
const res = await Promise.all(navPropMap[navProp].map((oneToManyRecord) => {
// eslint-disable-next-line no-param-reassign
oneToManyRecord[`${oppNavProp}@odata.bind`] = `/${entitySet}(${parent.id})`;
return this.deepInsert(entity, oneToManyRecord, refsByAlias);
}));
return res.reduce<{ reference: Xrm.LookupValue; alias?: string; }[]>(
(prev, curr) => prev.concat([curr.record, ...curr.associatedRecords]), [],
);
}
private async createManyToManyRecords(
entity: string,
navProp: string,
navPropMap: { [navProp: string]: Record[] },
parent: Xrm.LookupValue,
createdRecordsByAlias: { [alias: string]: Xrm.LookupValue },
): Promise<{ alias?: string, reference: Xrm.LookupValue }[]> {
const result = await Promise.all(navPropMap[navProp].map(async (manyToManyRecord) => {
const response = await this.deepInsert(entity, manyToManyRecord, createdRecordsByAlias);
await this.recordRepository.associateManyToManyRecords(
parent,
[response.record.reference],
navProp,
);
return [response.record, ...response.associatedRecords];
}));
return result.reduce((prev, curr) => prev.concat(curr));
}
private static isOneToManyMetadata(
metadata: Xrm.Metadata.RelationshipMetadata,
): metadata is Xrm.Metadata.OneToNRelationshipMetadata {
return metadata.RelationshipType === 'OneToManyRelationship';
}
}
|
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#ifndef MODEL_ELECTRICLOADCENTERSTORAGELIIONNMCBATTERY_HPP
#define MODEL_ELECTRICLOADCENTERSTORAGELIIONNMCBATTERY_HPP
#include "ModelAPI.hpp"
#include "ElectricalStorage.hpp"
namespace openstudio {
namespace model {
class Schedule;
class ThermalZone;
namespace detail {
class ElectricLoadCenterStorageLiIonNMCBattery_Impl;
} // namespace detail
/** ElectricLoadCenterStorageLiIonNMCBattery is a ElectricalStorage that wraps the OpenStudio IDD object 'OS:ElectricLoadCenter:Storage:LiIonNMCBattery'. */
class MODEL_API ElectricLoadCenterStorageLiIonNMCBattery : public ElectricalStorage
{
public:
/** @name Constructors and Destructors */
//@{
explicit ElectricLoadCenterStorageLiIonNMCBattery(const Model& model);
explicit ElectricLoadCenterStorageLiIonNMCBattery(const Model& model, const int numberofCellsinSeries, const int numberofStringsinParallel,
const double batteryMass, const double batterySurfaceArea);
virtual ~ElectricLoadCenterStorageLiIonNMCBattery() {}
//@}
static IddObjectType iddObjectType();
static std::vector<std::string> lifetimeModelValues();
/** @name Getters */
//@{
// Convenience method to return the electricalLoadCenter on which it's assigned (optional)
// In parent
// boost::optional<ElectricLoadCenterDistribution> electricLoadCenterDistribution() const;
Schedule availabilitySchedule() const;
// Included in parent class, shouldn't need to define it here...
//virtual boost::optional<ThermalZone> thermalZone() const override;
double radiativeFraction() const;
std::string lifetimeModel() const;
int numberofCellsinSeries() const;
int numberofStringsinParallel() const;
double initialFractionalStateofCharge() const;
double dctoDCChargingEfficiency() const;
double batteryMass() const;
double batterySurfaceArea() const;
double batterySpecificHeatCapacity() const;
double heatTransferCoefficientBetweenBatteryandAmbient() const;
double fullyChargedCellVoltage() const;
double cellVoltageatEndofExponentialZone() const;
double cellVoltageatEndofNominalZone() const;
double defaultNominalCellVoltage() const;
double fullyChargedCellCapacity() const;
double fractionofCellCapacityRemovedattheEndofExponentialZone() const;
double fractionofCellCapacityRemovedattheEndofNominalZone() const;
double chargeRateatWhichVoltagevsCapacityCurveWasGenerated() const;
double batteryCellInternalElectricalResistance() const;
//@}
/** @name Setters */
//@{
bool setAvailabilitySchedule(Schedule& schedule);
// Included in parent class, shouldn't need to define it...
//virtual bool setThermalZone(ThermalZone& zone) override;
//virtual void resetThermalZone() override;
bool setRadiativeFraction(double radiativeFraction);
bool setLifetimeModel(const std::string& lifetimeModel);
bool setNumberofCellsinSeries(int numberofCellsinSeries);
bool setNumberofStringsinParallel(int numberofStringsinParallel);
bool setInitialFractionalStateofCharge(double initialFractionalStateofCharge);
bool setDCtoDCChargingEfficiency(double dctoDCChargingEfficiency);
bool setBatteryMass(double batteryMass);
bool setBatterySurfaceArea(double batterySurfaceArea);
bool setBatterySpecificHeatCapacity(double batterySpecificHeatCapacity);
bool setHeatTransferCoefficientBetweenBatteryandAmbient(double heatTransferCoefficientBetweenBatteryandAmbient);
bool setFullyChargedCellVoltage(double fullyChargedCellCapacity);
bool setCellVoltageatEndofExponentialZone(double cellVoltageatEndofExponentialZone);
bool setCellVoltageatEndofNominalZone(double cellVoltageatEndofNominalZone);
bool setDefaultNominalCellVoltage(double defaultNominalCellVoltage);
bool setFullyChargedCellCapacity(double fullyChargedCellCapacity);
bool setFractionofCellCapacityRemovedattheEndofExponentialZone(double fractionofCellCapacityRemovedattheEndofExponentialZone);
bool setFractionofCellCapacityRemovedattheEndofNominalZone(double fractionofCellCapacityRemovedattheEndofNominalZone);
bool setChargeRateatWhichVoltagevsCapacityCurveWasGenerated(double chargeRateatWhichVoltagevsCapacityCurveWasGenerated);
bool setBatteryCellInternalElectricalResistance(double batteryCellInternalElectricalResistance);
//@}
/** @name Other */
//@{
//@}
protected:
/// @cond
typedef detail::ElectricLoadCenterStorageLiIonNMCBattery_Impl ImplType;
explicit ElectricLoadCenterStorageLiIonNMCBattery(std::shared_ptr<detail::ElectricLoadCenterStorageLiIonNMCBattery_Impl> impl);
friend class detail::ElectricLoadCenterStorageLiIonNMCBattery_Impl;
friend class Model;
friend class IdfObject;
friend class openstudio::detail::IdfObject_Impl;
/// @endcond
private:
REGISTER_LOGGER("openstudio.model.ElectricLoadCenterStorageLiIonNMCBattery");
};
/** \relates ElectricLoadCenterStorageLiIonNMCBattery*/
typedef boost::optional<ElectricLoadCenterStorageLiIonNMCBattery> OptionalElectricLoadCenterStorageLiIonNMCBattery;
/** \relates ElectricLoadCenterStorageLiIonNMCBattery*/
typedef std::vector<ElectricLoadCenterStorageLiIonNMCBattery> ElectricLoadCenterStorageLiIonNMCBatteryVector;
} // namespace model
} // namespace openstudio
#endif // MODEL_ELECTRICLOADCENTERSTORAGELIIONNMCBATTERY_HPP
|
<filename>src/simple-tasks/Task38.java
/**
* Task 38) Write a method that counts how often an element occurs in an array
* For example:
* int[] a = { 3, 5, 4, 6, 6, 2, 3, 4, 2, 3 }
* countOccurs(a,3) // should return 3;
*/
public class Task38 {
}
|
const fs = require('fs');
const path = require('path');
function YamlTemplateParser({ templatesPath }) {
async function parseYamlTemplatesFromPath() {
const files = await readTemplatesDir();
const contents = await Promise.all(files.map(getFileContents));
return contents;
}
function readTemplatesDir() {
return new Promise((resolve, reject) => {
fs.readdir(templatesPath, (err, files) => {
if (err) {
return reject(err);
}
return resolve(files);
});
});
}
function getFileContents(fileName) {
return new Promise((resolve, reject) => {
fs.readFile(path.join(templatesPath, fileName), (err, contents) => {
if (err) {
return reject(err);
}
return resolve(contents.toString());
});
});
}
return {
parseYamlTemplatesFromPath
}
}
module.exports = {
YamlTemplateParser
} |
public class City
{
private int cityId;
private string cityName;
private string countryCode;
private int population;
public City(int id, string name, string code, int pop)
{
cityId = id;
cityName = name;
countryCode = code;
population = pop;
}
public int GetCityId()
{
return cityId;
}
public string GetCityName()
{
return cityName;
}
public string GetCityCountryCode()
{
return countryCode;
}
public int GetCityPopulation()
{
return population;
}
public string GenerateHtmlTable()
{
string htmlTable = "<table>\n" +
" <tr>\n" +
" <th>City ID</th>\n" +
" <th>City Name</th>\n" +
" <th>Country Code</th>\n" +
" <th>Population</th>\n" +
" </tr>\n" +
" <tr>\n" +
" <td>" + GetCityId() + "</td>\n" +
" <td>" + GetCityName() + "</td>\n" +
" <td>" + GetCityCountryCode() + "</td>\n" +
" <td>" + GetCityPopulation() + "</td>\n" +
" </tr>\n" +
"</table>";
return htmlTable;
}
} |
<reponame>suyash-naithani/apicurio-registry
/**
* @license
* Copyright 2020 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from "react";
import {PureComponent, PureComponentProps, PureComponentState} from "../baseComponent";
import {Services} from "../../../services";
/**
* Properties
*/
export interface IfFeatureProps extends PureComponentProps {
feature: string;
is?: any;
isNot?: any;
children?: React.ReactNode;
}
/**
* State
*/
// tslint:disable-next-line:no-empty-interface
export interface IfFeatureState extends PureComponentState {
}
/**
* Wrapper around a set of arbitrary child elements and displays them only if the
* indicated feature matches the given criteria. Use this if you want to show/hide
* UI elements based on the configured application feature set.
*/
export class IfFeature extends PureComponent<IfFeatureProps, IfFeatureState> {
constructor(props: Readonly<IfFeatureProps>) {
super(props);
}
public render(): React.ReactElement {
if (this.accept()) {
return <React.Fragment children={this.props.children} />
} else {
return <React.Fragment />
}
}
protected initializeState(): IfFeatureState {
return {};
}
private accept(): boolean {
const features: any = Services.getConfigService().features();
const featureValue: any = features[this.props.feature];
if (this.props.is !== undefined) {
return featureValue === this.props.is;
} else if (this.props.isNot !== undefined) {
return featureValue !== this.props.isNot;
} else {
return featureValue !== undefined;
}
}
}
|
MainActivity.java
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button btnNext = (Button) findViewById(R.id.btnNext);
btnNext.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, SecondActivity.class);
startActivity(intent);
}
});
}
}
SecondActivity.java
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class SecondActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_second);
}
} |
// Copyright 2017 fatedier, <EMAIL>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vhost
import (
"bytes"
"io/ioutil"
"net/http"
frpLog "github.com/fatedier/frp/utils/log"
"github.com/fatedier/frp/utils/version"
)
var (
NotFoundPagePath = ""
)
const (
NotFound = `<!DOCTYPE html>
<html>
<head>
<title>Not Found</title>
<style>
body {
width: 35em;
margin: 0 auto;
font-family: Tahoma, Verdana, Arial, sans-serif;
}
</style>
</head>
<body>
<h1>The page you requested was not found.</h1>
<p>Sorry, the page you are looking for is currently unavailable.<br/>
Please try again later.</p>
<p>The server is powered by <a href="https://github.com/fatedier/frp">frp</a>.</p>
<p><em>Faithfully yours, frp.</em></p>
</body>
</html>
`
)
func getNotFoundPageContent() []byte {
var (
buf []byte
err error
)
if NotFoundPagePath != "" {
buf, err = ioutil.ReadFile(NotFoundPagePath)
if err != nil {
frpLog.Warn("read custom 404 page error: %v", err)
buf = []byte(NotFound)
}
} else {
buf = []byte(NotFound)
}
return buf
}
func notFoundResponse() *http.Response {
header := make(http.Header)
header.Set("server", "frp/"+version.Full())
header.Set("Content-Type", "text/html")
res := &http.Response{
Status: "Not Found",
StatusCode: 404,
Proto: "HTTP/1.0",
ProtoMajor: 1,
ProtoMinor: 0,
Header: header,
Body: ioutil.NopCloser(bytes.NewReader(getNotFoundPageContent())),
}
return res
}
func noAuthResponse() *http.Response {
header := make(map[string][]string)
header["WWW-Authenticate"] = []string{`Basic realm="Restricted"`}
res := &http.Response{
Status: "401 Not authorized",
StatusCode: 401,
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Header: header,
}
return res
}
|
<reponame>kuryaki/twilio-cli-core
const fs = require('fs-extra');
const path = require('path');
const shell = require('shelljs');
const MessageTemplates = require('./messaging/templates');
const CLI_NAME = 'twilio-cli';
class ConfigDataProject {
constructor(id, accountSid, region) {
this.id = id;
this.accountSid = accountSid;
this.region = region;
}
}
class ConfigData {
constructor() {
this.projects = [];
this.email = {};
this.activeProject = null;
}
getProjectFromEnvironment() {
const { TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN, TWILIO_API_KEY, TWILIO_API_SECRET } = process.env;
if (!TWILIO_ACCOUNT_SID) return;
if (TWILIO_API_KEY && TWILIO_API_SECRET)
return {
// eslint-disable-next-line no-template-curly-in-string
id: '${TWILIO_API_KEY}/${TWILIO_API_SECRET}',
accountSid: TWILIO_ACCOUNT_SID,
apiKey: TWILIO_API_KEY,
apiSecret: TWILIO_API_SECRET
};
if (TWILIO_AUTH_TOKEN)
return {
// eslint-disable-next-line no-template-curly-in-string
id: '${TWILIO_ACCOUNT_SID}/${TWILIO_AUTH_TOKEN}',
accountSid: TWILIO_ACCOUNT_SID,
apiKey: TWILIO_ACCOUNT_SID,
apiSecret: TWILIO_AUTH_TOKEN
};
}
getProjectById(projectId) {
let project;
if (!projectId) {
project = this.getProjectFromEnvironment();
}
if (!project) {
if (projectId) {
project = this.projects.find(project => project.id === projectId);
} else {
project = this.getActiveProject();
}
}
return project;
}
getActiveProject() {
let project;
if (this.projects.length > 0) {
if (this.activeProject) {
project = this.projects.find(project => project.id === this.activeProject);
}
if (!project) {
project = this.projects[0];
}
}
return project;
}
removeProject(projectToRemove) {
this.projects = this.projects.filter(project => {
return project.id !== projectToRemove.id;
});
if (projectToRemove.id === this.activeProject) {
this.activeProject = null;
}
}
addProject(id, accountSid, region) {
const existing = this.getProjectById(id);
if (existing) {
existing.accountSid = accountSid;
existing.region = region;
} else {
this.projects.push(new ConfigDataProject(id, accountSid, region));
}
}
loadFromObject(configObj) {
this.email = configObj.email || {};
this.activeProject = configObj.activeProject;
configObj.projects = configObj.projects || [];
configObj.projects.forEach(project => {
this.addProject(project.id, project.accountSid, project.region);
});
}
}
class Config {
constructor(configDir) {
this.configDir = configDir;
this.filePath = path.join(configDir, 'config.json');
}
async load() {
const configData = new ConfigData();
if (!fs.existsSync(this.filePath)) {
return configData;
}
configData.loadFromObject(await fs.readJSON(this.filePath));
return configData;
}
async save(userConfig) {
// Migrate to 'fs.mkdirSync' with 'recursive: true' when no longer supporting Node8.
shell.mkdir('-p', this.configDir);
await fs.writeJSON(this.filePath, userConfig, { flag: 'w' });
return MessageTemplates.configSaved({ path: this.filePath });
}
}
module.exports = {
CLI_NAME,
Config,
ConfigData
};
|
<filename>customs/test.js
var request = require('request');
var body = {a:'b'};
var options = {
uri:'http://localhost:8081/sendsms',
method:'POST',
headers:{
'content-type':'application/json'
},
path:'/sendsms',
body:body,
json:true
//http://server.setlurs.com:8080/sendsms
}
request(options);
|
import tensorflow as tf
import tensorflow_hub as tf_hub
# Import embedding library
embedding = tf_hub.load('https://tfhub.dev/google/nnlm-en-dim128/2')
# Create model
model = tf.keras.Sequential([
tf.keras.layers.Input(shape=[], dtype=tf.string),
# Use embedding layer
embedding,
# Define layers
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(2, activation='softmax')
])
# Compile model
model.compile(
loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
# Train model
model.fit(X_train,
y_train,
epochs=2) |
#!/bin/bash
VAR=hack.fun.book.txt
echo "VAR = $VAR"
# % 属于非贪婪操作。
# 操作符% 使用 .* 从右到左找出匹配通配符的字符串
# 移除.*最右边的内容
echo ${VAR%.*}
# 操作符 %% 使用 .* 从右到左执行贪婪匹配
echo ${VAR%%.*}
# 操作符 # 使用 *. 从左到右找出匹配通配符的字符串
echo ${VAR#*.}
# 操作符 ## 使用 *. 从左到右执行贪婪匹配
echo ${VAR##*.}
|
<reponame>youssef-sourour/web-poker
package ar.com.tandilweb.room.orchestratorBridge.processors;
import java.io.IOException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import ar.com.tandilweb.exchange.UserAuthSchema;
import ar.com.tandilweb.exchange.backwardValidation.DataChallenge;
import ar.com.tandilweb.exchange.backwardValidation.Invalid;
import ar.com.tandilweb.exchange.backwardValidation.Unknown;
import ar.com.tandilweb.exchange.userAuth.BadRequest;
import ar.com.tandilweb.exchange.userAuth.Kicked;
import ar.com.tandilweb.exchange.userAuth.Rejected;
import ar.com.tandilweb.exchange.userAuth.Validated;
import ar.com.tandilweb.room.handlers.SessionHandler;
import ar.com.tandilweb.room.handlers.dto.UserData;
import ar.com.tandilweb.room.handlers.dto.UserDataStatus;
import ar.com.tandilweb.room.orchestratorBridge.processors.dtoDB.Users;
@Component
public class BackwardValidationProcessor extends OrchestratorGenericProcessor {
public static Logger logger = LoggerFactory.getLogger(BackwardValidationProcessor.class);
@Autowired
private SessionHandler sessionHandler;
public void processDataChallengeSchema(String schemaBody) {
try {
ObjectMapper objectMapper = new ObjectMapper();
DataChallenge<Users> dataResponse = objectMapper.readValue(schemaBody, new TypeReference<DataChallenge<Users>>(){});
String sessionID = sessionHandler.getSessionByTransactionID(dataResponse.transactionID);
UserData userData = sessionHandler.getUserDataBySession(sessionID);
UserAuthSchema out;
if(userData.userID == dataResponse.idUser) {
if(dataResponse.claimToken.equals(userData.lastChallenge.claimToken)) {
out = new Validated();
userData.status = UserDataStatus.ACTIVED;
List<UserData> anotherSessions = sessionHandler.getAnotherSessions(sessionID, userData.userID);
Kicked kicked = new Kicked();
if(anotherSessions.size() > 0) {
for(UserData anotherSession: anotherSessions) {
anotherSession.status = UserDataStatus.KICKED;
sessionHandler.sendToSessID("/userAuth/kick", anotherSession.sessID, kicked);
sessionHandler.remove(anotherSession.sessID);
}
}
} else {
// TODO: check fails and send full rejected and block ip
// FullRejected frej = new FullRejected();
out = new BadRequest();
}
} else {
// TODO: check fails and send full rejected and block ip
// FullRejected frej = new FullRejected();
out = new BadRequest();
}
sessionHandler.sendToSessID("/userAuth/response", sessionID, out);
} catch (IOException e) {
logger.error("Error processing schema from orchestrator", e);
}
}
public void processInvalidSchema(String schemaBody) throws JsonProcessingException {
try {
ObjectMapper objectMapper = new ObjectMapper();
Invalid invalidResponse = objectMapper.readValue(schemaBody, Invalid.class);
String sessionID = sessionHandler.getSessionByTransactionID(invalidResponse.transactionID);
Rejected reject = new Rejected();
sessionHandler.sendToSessID("/userAuth/rejected", sessionID, reject);
} catch (IOException e) {
logger.error("Error processing schema from orchestrator", e);
}
}
public void processUnknownSchema(String schemaBody) throws JsonProcessingException {
try {
ObjectMapper objectMapper = new ObjectMapper();
Unknown unknownResponse = objectMapper.readValue(schemaBody, Unknown.class);
String sessionID = sessionHandler.getSessionByTransactionID(unknownResponse.transactionID);
Rejected reject = new Rejected();
sessionHandler.sendToSessID("/userAuth/rejected", sessionID, reject);
} catch (IOException e) {
logger.error("Error processing schema from orchestrator", e);
}
}
}
|
#! /bin/bash
ROOT=$(git rev-parse --show-toplevel)
pushd $ROOT/.buildkite/images/docker/
set -eux
# Version of the buildkite image. Update this when you make significant changes to the image.
IMAGE_VERSION="v6"
if [ "$#" -ne 2 ]; then
echo "Error: Must specify a Python version and image type.\n" 1>&2
echo "Usage: ./push.sh 3.7.4 integration" 1>&2
exit 1
fi
# e.g. 3.7.4
PYTHON_VERSION=$1
# e.g. 3
PYTHON_MAJOR_VERSION="${PYTHON_VERSION:0:1}"
# e.g. 37
PYTHON_MAJMIN=`echo "${PYTHON_VERSION:0:3}" | sed 's/\.//'`
IMAGE_TYPE=$2
TAG=`date '+%Y-%m-%d'`
if [ $IMAGE_TYPE == "integration" ]; then
docker tag "dagster/buildkite-integration:py${PYTHON_VERSION}-${IMAGE_VERSION}" \
"${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/buildkite-integration:py${PYTHON_VERSION}-${IMAGE_VERSION}"
docker push "${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/buildkite-integration:py${PYTHON_VERSION}-${IMAGE_VERSION}"
else
docker tag "dagster/dagster-py${PYTHON_MAJMIN}" "dagster/dagster-py${PYTHON_MAJMIN}:${TAG}"
docker tag "dagster/dagster-py${PYTHON_MAJMIN}" "dagster/dagster-py${PYTHON_MAJMIN}:latest"
docker push "dagster/dagster-py${PYTHON_MAJMIN}:${TAG}"
docker push "dagster/dagster-py${PYTHON_MAJMIN}:latest"
fi
|
<gh_stars>0
package com.comp.admin.entities;
import java.util.Date;
public class AccountGroup {
private Integer id;
private String userType;
private String groupNo;
private String groupName;
private Integer level;
private Integer pId;
private String note;
private String leader;
private String contactPhone;
private String email;
private Date createTime;
private Date updateTime;
public AccountGroup(){
}
public void setId(Integer id) {
this.id = id;
}
public Integer getId( ) {
return this.id;
}
public void setUserType(String userType) {
this.userType = userType;
}
public String getUserType( ) {
return this.userType;
}
public void setGroupNo(String groupNo) {
this.groupNo = groupNo;
}
public String getGroupNo( ) {
return this.groupNo;
}
public void setGroupName(String groupName) {
this.groupName = groupName;
}
public String getGroupName( ) {
return this.groupName;
}
public void setLevel(Integer level) {
this.level = level;
}
public Integer getLevel( ) {
return this.level;
}
public void setPId(Integer pId) {
this.pId = pId;
}
public Integer getPId( ) {
return this.pId;
}
public void setNote(String note) {
this.note = note;
}
public String getNote( ) {
return this.note;
}
public void setLeader(String leader) {
this.leader = leader;
}
public String getLeader( ) {
return this.leader;
}
public void setContactPhone(String contactPhone) {
this.contactPhone = contactPhone;
}
public String getContactPhone( ) {
return this.contactPhone;
}
public void setEmail(String email) {
this.email = email;
}
public String getEmail( ) {
return this.email;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getCreateTime( ) {
return this.createTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
public Date getUpdateTime( ) {
return this.updateTime;
}
} |
/**
* Copyright 2017 <NAME>. Distributed under the MIT license.
*/
#include "greentop/account/StatementLegacyData.h"
namespace greentop {
namespace account {
StatementLegacyData::StatementLegacyData(const Optional<double>& avgPrice,
const Optional<double>& betSize,
const std::string& betType,
const std::string& betCategoryType,
const std::string& commissionRate,
const Optional<int64_t>& eventId,
const Optional<int64_t>& eventTypeId,
const std::string& fullMarketName,
const Optional<double>& grossBetAmount,
const std::string& marketName,
const std::string& marketType,
const std::tm& placedDate,
const Optional<int64_t>& selectionId,
const std::string& selectionName,
const std::tm& startDate,
const std::string& transactionType,
const Optional<int64_t>& transactionId,
const std::string& winLose) :
avgPrice(avgPrice),
betSize(betSize),
betType(betType),
betCategoryType(betCategoryType),
commissionRate(commissionRate),
eventId(eventId),
eventTypeId(eventTypeId),
fullMarketName(fullMarketName),
grossBetAmount(grossBetAmount),
marketName(marketName),
marketType(marketType),
placedDate(placedDate),
selectionId(selectionId),
selectionName(selectionName),
startDate(startDate),
transactionType(transactionType),
transactionId(transactionId),
winLose(winLose) {
}
void StatementLegacyData::fromJson(const Json::Value& json) {
if (json.isMember("avgPrice")) {
avgPrice = json["avgPrice"].asDouble();
}
if (json.isMember("betSize")) {
betSize = json["betSize"].asDouble();
}
if (json.isMember("betType")) {
betType = json["betType"].asString();
}
if (json.isMember("betCategoryType")) {
betCategoryType = json["betCategoryType"].asString();
}
if (json.isMember("commissionRate")) {
commissionRate = json["commissionRate"].asString();
}
if (json.isMember("eventId")) {
eventId = json["eventId"].asInt64();
}
if (json.isMember("eventTypeId")) {
eventTypeId = json["eventTypeId"].asInt64();
}
if (json.isMember("fullMarketName")) {
fullMarketName = json["fullMarketName"].asString();
}
if (json.isMember("grossBetAmount")) {
grossBetAmount = json["grossBetAmount"].asDouble();
}
if (json.isMember("marketName")) {
marketName = json["marketName"].asString();
}
if (json.isMember("marketType")) {
marketType = json["marketType"].asString();
}
if (json.isMember("placedDate")) {
strptime(json["placedDate"].asString().c_str(), "%Y-%m-%dT%H:%M:%S.000Z", &placedDate);
}
if (json.isMember("selectionId")) {
selectionId = json["selectionId"].asInt64();
}
if (json.isMember("selectionName")) {
selectionName = json["selectionName"].asString();
}
if (json.isMember("startDate")) {
strptime(json["startDate"].asString().c_str(), "%Y-%m-%dT%H:%M:%S.000Z", &startDate);
}
if (json.isMember("transactionType")) {
transactionType = json["transactionType"].asString();
}
if (json.isMember("transactionId")) {
transactionId = json["transactionId"].asInt64();
}
if (json.isMember("winLose")) {
winLose = json["winLose"].asString();
}
}
Json::Value StatementLegacyData::toJson() const {
Json::Value json(Json::objectValue);
if (avgPrice.isValid()) {
json["avgPrice"] = avgPrice.toJson();
}
if (betSize.isValid()) {
json["betSize"] = betSize.toJson();
}
if (betType != "") {
json["betType"] = betType;
}
if (betCategoryType != "") {
json["betCategoryType"] = betCategoryType;
}
if (commissionRate != "") {
json["commissionRate"] = commissionRate;
}
if (eventId.isValid()) {
json["eventId"] = eventId.toJson();
}
if (eventTypeId.isValid()) {
json["eventTypeId"] = eventTypeId.toJson();
}
if (fullMarketName != "") {
json["fullMarketName"] = fullMarketName;
}
if (grossBetAmount.isValid()) {
json["grossBetAmount"] = grossBetAmount.toJson();
}
if (marketName != "") {
json["marketName"] = marketName;
}
if (marketType != "") {
json["marketType"] = marketType;
}
if (placedDate.tm_year > 0) {
char buffer[25];
strftime(buffer, 25,"%Y-%m-%dT%H:%M:%S.000Z", &placedDate);
json["placedDate"] = std::string(buffer);
}
if (selectionId.isValid()) {
json["selectionId"] = selectionId.toJson();
}
if (selectionName != "") {
json["selectionName"] = selectionName;
}
if (startDate.tm_year > 0) {
char buffer[25];
strftime(buffer, 25,"%Y-%m-%dT%H:%M:%S.000Z", &startDate);
json["startDate"] = std::string(buffer);
}
if (transactionType != "") {
json["transactionType"] = transactionType;
}
if (transactionId.isValid()) {
json["transactionId"] = transactionId.toJson();
}
if (winLose != "") {
json["winLose"] = winLose;
}
return json;
}
bool StatementLegacyData::isValid() const {
return true;
}
const Optional<double>& StatementLegacyData::getAvgPrice() const {
return avgPrice;
}
void StatementLegacyData::setAvgPrice(const Optional<double>& avgPrice) {
this->avgPrice = avgPrice;
}
const Optional<double>& StatementLegacyData::getBetSize() const {
return betSize;
}
void StatementLegacyData::setBetSize(const Optional<double>& betSize) {
this->betSize = betSize;
}
const std::string& StatementLegacyData::getBetType() const {
return betType;
}
void StatementLegacyData::setBetType(const std::string& betType) {
this->betType = betType;
}
const std::string& StatementLegacyData::getBetCategoryType() const {
return betCategoryType;
}
void StatementLegacyData::setBetCategoryType(const std::string& betCategoryType) {
this->betCategoryType = betCategoryType;
}
const std::string& StatementLegacyData::getCommissionRate() const {
return commissionRate;
}
void StatementLegacyData::setCommissionRate(const std::string& commissionRate) {
this->commissionRate = commissionRate;
}
const Optional<int64_t>& StatementLegacyData::getEventId() const {
return eventId;
}
void StatementLegacyData::setEventId(const Optional<int64_t>& eventId) {
this->eventId = eventId;
}
const Optional<int64_t>& StatementLegacyData::getEventTypeId() const {
return eventTypeId;
}
void StatementLegacyData::setEventTypeId(const Optional<int64_t>& eventTypeId) {
this->eventTypeId = eventTypeId;
}
const std::string& StatementLegacyData::getFullMarketName() const {
return fullMarketName;
}
void StatementLegacyData::setFullMarketName(const std::string& fullMarketName) {
this->fullMarketName = fullMarketName;
}
const Optional<double>& StatementLegacyData::getGrossBetAmount() const {
return grossBetAmount;
}
void StatementLegacyData::setGrossBetAmount(const Optional<double>& grossBetAmount) {
this->grossBetAmount = grossBetAmount;
}
const std::string& StatementLegacyData::getMarketName() const {
return marketName;
}
void StatementLegacyData::setMarketName(const std::string& marketName) {
this->marketName = marketName;
}
const std::string& StatementLegacyData::getMarketType() const {
return marketType;
}
void StatementLegacyData::setMarketType(const std::string& marketType) {
this->marketType = marketType;
}
const std::tm& StatementLegacyData::getPlacedDate() const {
return placedDate;
}
void StatementLegacyData::setPlacedDate(const std::tm& placedDate) {
this->placedDate = placedDate;
}
const Optional<int64_t>& StatementLegacyData::getSelectionId() const {
return selectionId;
}
void StatementLegacyData::setSelectionId(const Optional<int64_t>& selectionId) {
this->selectionId = selectionId;
}
const std::string& StatementLegacyData::getSelectionName() const {
return selectionName;
}
void StatementLegacyData::setSelectionName(const std::string& selectionName) {
this->selectionName = selectionName;
}
const std::tm& StatementLegacyData::getStartDate() const {
return startDate;
}
void StatementLegacyData::setStartDate(const std::tm& startDate) {
this->startDate = startDate;
}
const std::string& StatementLegacyData::getTransactionType() const {
return transactionType;
}
void StatementLegacyData::setTransactionType(const std::string& transactionType) {
this->transactionType = transactionType;
}
const Optional<int64_t>& StatementLegacyData::getTransactionId() const {
return transactionId;
}
void StatementLegacyData::setTransactionId(const Optional<int64_t>& transactionId) {
this->transactionId = transactionId;
}
const std::string& StatementLegacyData::getWinLose() const {
return winLose;
}
void StatementLegacyData::setWinLose(const std::string& winLose) {
this->winLose = winLose;
}
}
}
|
var _convert_fp32_network_to_fp16_8hpp =
[
[ "ConvertFp32NetworkToFp16Impl", "classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl.xhtml", "classarmnn_1_1optimizations_1_1_convert_fp32_network_to_fp16_impl" ],
[ "Fp32NetworkToFp16Converter", "_convert_fp32_network_to_fp16_8hpp.xhtml#a86d19da62b6cfed3928f6fe7026f22fa", null ]
]; |
<filename>src/Middlewares/agentValidation.js
const Validator = require('validator');
const isEmpty = require('./is-Empty');
const agentInputValidation = (data) => {
const errors = {};
data.CompanyName = !isEmpty(data.CompanyName) ? data.CompanyName : '';
data.CompanyDescription = !isEmpty(data.CompanyDescription) ? data.CompanyDescription : '';
data.CompanyAddress = !isEmpty(data.CompanyAddress) ? data.CompanyAddress : '';
if (!Validator.isLength(data.CompanyName, { min: 2, max: 200 })) {
errors.CompanyName = 'name must be between 2 and 200 character';
}
if (Validator.isEmpty(data.CompanyName)) {
errors.CompanyName = 'Company name filed is reqired';
}
if (!Validator.isLength(data.CompanyDescription, { min: 6, max: 200 })) {
errors.CompanyDescription = 'company description is important';
}
if (Validator.isEmpty(data.CompanyDescription)) {
errors.CompanyDescription = 'company description filed is reqired';
}
if (!Validator.isLength(data.CompanyAddress, { min: 6, max: 200 })) {
errors.CompanyAddress = 'Comapny Address is required';
}
if (Validator.isEmpty(data.CompanyAddress)) {
errors.CompanyAddress = 'Company Address is required';
}
return {
errors,
isValid: isEmpty(errors)
};
};
module.exports = agentInputValidation;
|
import java.util.Scanner;
import java.io.File;
import java.io.FileOutputStream;
import java.io.ObjectOutputStream;
import java.io.FileInputStream;
import java.io.ObjectInputStream;
public class Campeonato {
// ******************* INICIALIZANDO VARIAVEIS *******************
private static int njogadores = 0; // variavel controle para numero de jogadores
private static Jogador[] jogadores = new Jogador[5]; // vetor dos jogadores presentes para o campeonato
// ***************************************************************
public static void main(String[] args) {
Scanner teclado = new Scanner(System.in); // Funcao para "scanf" versao java ...
String s;
// variavel para selecionar as opções do MENU
char opcao;
// Inciando MENU de Opções (interativo)
do
{
System.out.println("(a) Incluir Jogador");
System.out.println("(b) Remover Jogador");
System.out.println("(c) Iniciar/Reiniciar o Campeonato");
System.out.println("(d) Mostrar a Cartela de Resultados");
System.out.println("(e) Gravar os Dados do Campeonato em Arquivo");
System.out.println("(f) Ler os Dados do Campeonato em Arquivo");
System.out.println("(g) Sair da Aplicação");
System.out.print("Entre com a opção do menu: ");
s = teclado.next();
opcao = s.charAt(0); // convertendo string para char
switch(opcao) {
// INCLUIR JOGADOR
case 'a':
{
System.out.println("");
// Validando para maximo de 5 jogadores
if(njogadores < 5) {
String nome;
System.out.print("Nome do Jogador(a): ");
nome = teclado.nextLine(); // Pega o enter do anterior
nome = teclado.nextLine();
jogadores[njogadores] = new Jogador(nome);
njogadores++;
} else {
System.out.println("Não é possivel inserir mais jogadores !!!");
}
break;
}
// REMOVE JOGADOR
case 'b':
{
if(njogadores != 0) {
int escolha; // para escolher o jogador
System.out.println("");
// Função para mostrar os jogadores presentes no campeonato
mostraJogadores(jogadores, njogadores);
System.out.print("Escolha o numero do jogador(a) a ser removido: ");
escolha = teclado.nextInt(); // leitura da escolha
if(escolha >= 1 && escolha <= njogadores) {
int aux = njogadores; //para nao perder o numero de jogadores na condição do for
for(int i = (escolha-1); i < (aux-1); i++){
jogadores[i] = jogadores[i+1]; // "Puxando" jogadores para o inicio do vetor
}
njogadores--;
} else {
System.out.println("Jogador inexistente . Existem apenas " + njogadores + " Jogadores presentes.");
}
} else {
System.out.println("");
System.out.println("Não há jogadores no Campeonato!");
}
for(int i = njogadores ; i<5 ; i++) {
jogadores[i] = null;
}
break;
}
// INICIA OU REINICIA CAMPEONATO
case 'c':
{
System.out.println("");
int escolha;
if(njogadores == 0) {
System.out.println("Não há jogadores para iniciar o Campeonato.");
} else {
// CASO SEJA PARA REINICIAR O CAMPEONATO
if(jogadores[0].chamaResultado() != -1) {
// chamaResultado --> Função para somar a pontuaçao de cada jogador (se -1 porque não iniciou jogada)
String nome;
// zerando as pontuações dos jogadores
for(int i=0 ; i<njogadores ; i++) {
nome = jogadores[i].getNome();
jogadores[i] = new Jogador(nome); // chamando construtor para zerar pontuação dos jogadores
}
System.out.println("Campeonato Reiniciado !!!");
}
for(int rodadas = 0; rodadas < 13; rodadas++) { // LOOP DAS JOGADAS
for(int j=0; j <= njogadores-1; j++){
System.out.println("");
jogadores[j].jogarDados();
System.out.println("--------------------------------");
System.out.print("Para qual jogada deseja marcar: [1 - 13] ");
System.out.println(jogadores[j] + "?");
jogadores[j].jogadasFeitas(); // Mostra Menu de jogadas e suas respectivas escolhas ja feitas
// validação do intervalo da escolha ...
do{
System.out.print("Sua escolha: ");
escolha = teclado.nextInt();
}while(jogadores[j].verificaJogadas(escolha) == false);
// validação se a escolha já foi feita ou não ...
jogadores[j].chamajogadas(escolha);
}
}
}
break;
}
// MOSTRAR CARTELA DOS RESULTADOS
case 'd':
{
System.out.println("");
// mostra o resultado do campeonato ... caso existam jogadores
mostraCartela(jogadores, njogadores);
break;
}
// GRAVAR DADOS DO CAMPEONATO
case 'e':
{
System.out.println("");
File arquivo = new File("Campeonato.dat");
try {
FileOutputStream fout = new FileOutputStream(arquivo);
ObjectOutputStream oos = new ObjectOutputStream(fout);
oos.writeObject(jogadores);
oos.flush();
oos.close();
fout.close();
System.out.println("Arquivo campeonato Gravado com sucesso !!!");
}catch(Exception ex) {
System.err.println("erro: " + ex.toString());
}
break;
}
// LER DADOS DO CAMPEONATO
case 'f':
{
System.out.println("");
File arquivo = new File("Campeonato.dat");
try {
FileInputStream fin = new FileInputStream(arquivo);
ObjectInputStream oin = new ObjectInputStream(fin);
jogadores = (Jogador[]) oin.readObject();
oin.close();
fin.close();
int i=0;
for(Jogador p : jogadores) {
if(p != null){
i++;
}
}
njogadores = i;
System.out.println("Leitura dos dados feita com sucesso !!!");
}catch (Exception ex) {
System.err.println("erro: " + ex.toString());
}
break;
}
// SAIR DO MENU
case 'g':
{
// Funcao para sair do MENU
break;
}
// caso opção for diferente do intervalo [a-g]
default:
{
System.out.println("\nOpção Invalida !!!");
}
}
System.out.println("");
}while(opcao != 'g');
}
// **************************************** FUNCÕES AUXILIARES ******************************************
public static void mostraJogadores(Jogador[] j, int cont) {
System.out.println("-- Lista de Jogadores --");
for (int i = 0; i < cont; i++) {
System.out.println("Jogador(a) " + (i+1) + ": " + j[i]);
}
System.out.println("------------------------");
}
public static void mostraCartela(Jogador[] jog, int nj) {
if(nj == 0) {
System.out.println("Não há jogadores para existir uma cartela de Campeonato.");
} else {
int i;
String[] s = {"1", "2", "3", "4", "5", "6", "7(T)", "8(Q)", "9(F)", "10(S+)", "11(S-)", "12(G)", "13(X)"};
System.out.println("-- Cartela de Resultados --");
System.out.print("\t"); // Cabeçalho
// Mostrar os nomes dos jogadores
for(i = 0 ; i < nj ; i++) {
System.out.print(jog[i] + " ");
}
System.out.println("");
// Mostra a pontuação da respectiva jogada
for (i = 0 ; i < 13 ; i++) {
System.out.print(s[i] + "\t");
for (int j = 0 ; j < nj ; j++) {
// getFichaJogada --> Função para pegar as jogadas da ficha dos respectivos jogadores.
System.out.print(jog[j].getFichaJogada(i) + " \t");
}
System.out.println("");
}
System.out.println("--------------------------------------------------------");
// MOSTRAR O TOTAL DE PONTOS DE CADA JOGADOR
System.out.print("Total" + "\t");
for(i = 0 ; i < nj ; i++) {
System.out.print(jog[i].chamaResultado() + " \t");
}
System.out.println("");
}
}
} |
<filename>FacebookHackerCup/LabelMaker.py
T = int(input())
for _ in range(1, T + 1):
line = input().split(' ')
L = line[0]
N = int(line[1])
sz = 0
bf = 0
ans = ""
for x in range(1, 50):
if bf > N:
break
bf += pow(len(L), x)
sz += 1
for x in range(sz):
ans += L[(N - 1) % len(L)]
N = (N - 1) // len(L)
print("Case #%d: %s" % (_, ans[::-1]))
|
<reponame>ryanliu18/Sorting-Visualizer
export function getHeapSortAnimations(array) {
const animations = [];
for (let i = array.length/2 -1; i >= 0; i--) {
heapify(animations,array,array.length,i);
}
for (let i = array.length -1; i >= 0; i--) {
animations.push("Swap");
animations.push([0,array[i],i,array[0]]);
//swap current index i (smallest element) to the beginning (index 0)
const temp = array[0];
array[0] = array[i];
array[i] = temp;
//heapify on reduced heap
heapify(animations,array,i,0)
}
return animations;
}
function heapify(animations,array,sizeOfHeap,rootIdx) {
var largestOfRootAndChildrenIdx = rootIdx;
const leftChildIdx = 2*rootIdx+1;
const rightChildIdx = 2*rootIdx+2;
if (leftChildIdx < sizeOfHeap) {
animations.push([leftChildIdx,largestOfRootAndChildrenIdx]);
animations.push([leftChildIdx,largestOfRootAndChildrenIdx]);
if (array[leftChildIdx]> array[largestOfRootAndChildrenIdx]) {
largestOfRootAndChildrenIdx = leftChildIdx;
}
}
if (rightChildIdx < sizeOfHeap) {
animations.push([rightChildIdx,largestOfRootAndChildrenIdx]);
animations.push([rightChildIdx,largestOfRootAndChildrenIdx]);
if (array[rightChildIdx] > array[largestOfRootAndChildrenIdx]) {
largestOfRootAndChildrenIdx = rightChildIdx;
}
}
if (largestOfRootAndChildrenIdx !== rootIdx) {
//swap array[rootIdx] and array[largestOfRootAndChildrenIdx]
animations.push("Swap");
animations.push([rootIdx,array[largestOfRootAndChildrenIdx],largestOfRootAndChildrenIdx,array[rootIdx]]);
const temp = array[rootIdx];
array[rootIdx] = array[largestOfRootAndChildrenIdx];
array[largestOfRootAndChildrenIdx] = temp;
heapify(animations,array,sizeOfHeap,largestOfRootAndChildrenIdx);
}
}
/*
heapSort
function heapSort(array) {
// Total O(n) time to buildHeap
for (let i = array.length/2 -1; i >= 0; i--) {
heapify(animations,array,array.length,i);
}
// iterate over heap
for (let i = array.length -1; i >= 0; i--) {
//swap max value (array[0]) to end of array at index i
const temp = array[0];
array[0] = array[i];
array[i] = temp;
//heapify on reduced heap, to ensure next iteration works
heapify(animations,array,i,0)
}
}
function heapify(animations,array,sizeOfHeap,rootIdx) {
var largestOfRootAndChildrenIdx = rootIdx;
const leftChildIdx = 2*rootIdx+1;
const rightChildIdx = 2*rootIdx+2;
// if leftChildIdx is within array, and its value is larger than value at largestOfRootAndChildrenIdx, update
if (leftChildIdx < sizeOfHeap && array[leftChildIdx]> array[largestOfRootAndChildrenIdx]) {
largestOfRootAndChildrenIdx = leftChildIdx;
}
// if rightChildIdx is within array, and its value is larger than value at largestOfRootAndChildrenIdx, update
if (rightChildIdx < sizeOfHeap && array[rightChildIdx] > array[largestOfRootAndChildrenIdx]) {
largestOfRootAndChildrenIdx = rightChildIdx;
}
// if at least one of the previous 2 "if" statements were true
if (largestOfRootAndChildrenIdx !== rootIdx) {
//swap array[rootIdx] and array[largestOfRootAndChildrenIdx]
const temp = array[rootIdx];
array[rootIdx] = array[largestOfRootAndChildrenIdx];
array[largestOfRootAndChildrenIdx] = temp;
// recursive call on largest child as root
heapify(animations,array,sizeOfHeap,largestOfRootAndChildrenIdx);
}
}
*/ |
const notes = require('express').Router();
const { readFromFile, readAndAppend } = require('../helpers/fsUtils');
const uuid = require('../helpers/uuid');
// GET Route for retrieving the notes
notes.get('/notes', (req, res) => {
console.info(`${req.method} request received for notes`);
readFromFile('./db/db.json').then((data) => res.json(JSON.parse(data)));
});
// POST Route for submitting notes
notes.post('/notes', (req, res) => {
console.info(`${req.method} request received to save notes`);
// Destructuring assignment for the items in req.body
const { title, text } = req.body;
// If all the required properties are present
if (title) {
// Variable for the object we will save
const newNote = {
title,
text,
note_id: uuid(),
};
readAndAppend(newNote, './db/db.json');
const response = {
status: 'success',
body: newNote,
};
res.json(response);
} else {
res.json('Error in posting note');
}
});
module.exports = notes;
|
#!/bin/bash
set -euo pipefail
declare -a supported_versions=(python3.10 python3.9 python3.8 python3.7)
declare install_path="$HOME/cloudkeeper"
declare python_cmd
declare git_install=false
declare dev_mode=false
declare venv=true
declare branch=main
main() {
echo "Cloudkeeper bootstrapper"
if grep "url =.*cloudkeeper.git" "$PWD/.git/config" > /dev/null 2>&1; then
install_path="$PWD"
fi
local end_of_opt
local positional=()
while [[ $# -gt 0 ]]; do
case "${end_of_opt:-}${1}" in
-h|--help) usage 0 ;;
--python) shift; python_cmd="${1:-}" ;;
--path) shift; install_path="${1:-}" ;;
--branch) shift; branch="${1:-}" ;;
--no-venv) venv=false ;;
--dev) dev_mode=true ;;
--git) git_install=true ;;
--) end_of_opt=1 ;;
-*) invalid "$1" ;;
*) positional+=("$1") ;;
esac
if [ $# -gt 0 ]; then
shift
fi
done
if [ ${#positional[@]} -gt 0 ]; then
set -- "${positional[@]}"
fi
install_path=${install_path%%+(/)}
if [ -z "${install_path:-}" ]; then
echo "Invalid install path $install_path"
exit 1
fi
if [ -z "${branch:-}" ]; then
echo "Invalid branch"
exit 1
fi
if [ -z "${python_cmd:-}" ]; then
python_cmd="$(find_python)"
fi
if [ -z "${python_cmd:-}" ]; then
echo -e "Could not find a compatible Python interpreter!\nSupported versions are" "${supported_versions[@]}"
exit 1
fi
if ! type "$python_cmd" > /dev/null 2>&1; then
echo -e "Unable to use Python interpreter $python_cmd"
exit 1
fi
echo "Using $python_cmd"
ensure_install_path
if [ "$venv" = true ]; then
activate_venv "$python_cmd"
fi
ensure_pip
if [ "$dev_mode" = true ]; then
install_dev
fi
install_cloudkeeper
install_plugins
echo -e "Install/Update completed.\nRun\n\tsource ${install_path}/venv/bin/activate\nto activate venv."
}
usage() {
cat <<EOF
Usage: $(basename "$0") [options]
Valid options:
-h, --help show this help message and exit
--path <path> install directory (default: . if in cloudkeeper git repo else ~/cloudkeeper/)
--python <path> Python binary to use (default: search for best match)
--branch <branch> Git branch/tag to use (default: main)
--dev install development dependencies (default: false)
--no-venv do not create a Python venv for package installation (default: false)
--git install from remote Git instead of local repo (default: false)
EOF
if [ -n "$1" ]; then
exit "$1"
fi
}
invalid() {
echo "ERROR: Unrecognized argument: $1" >&2
usage 1
}
ensure_install_path() {
echo "Using install path $install_path"
mkdir -p "$install_path"
cd "$install_path"
}
find_python() {
local version
for version in "${supported_versions[@]}"; do
if type "$version" > /dev/null 2>&1; then
echo "$version"
return 0
fi
done
}
activate_venv() {
local python_cmd=$1
if [ -d "venv/" ]; then
echo -e "Virtual Python env already exists!\nRun\n\trm -rf venv/\nif you want to recreate it."
else
echo "Creating virtual Python env in venv/ using $python_cmd"
"$python_cmd" -m venv venv
fi
echo "Activating venv"
source venv/bin/activate
}
ensure_pip() {
echo "Ensuring Python pip is available and up to date."
if ! python -m pip help > /dev/null 2>&1; then
python -m ensurepip -q -U
fi
pip install -q -U pip wheel
}
install_dev() {
echo "Installing development dependencies"
if [ -f "ckcore/requirements-dev.txt" ]; then
pip install -q -r "ckcore/requirements-dev.txt"
else
pip install -q -r "https://raw.githubusercontent.com/someengineering/cloudkeeper/main/ckcore/requirements-dev.txt"
fi
if [ -f "ckcore/requirements-test.txt" ]; then
pip install -q -r "ckcore/requirements-test.txt"
else
pip install -q -r "https://raw.githubusercontent.com/someengineering/cloudkeeper/main/ckcore/requirements-test.txt"
fi
}
install_cloudkeeper() {
echo "Installing Cloudkeeper"
local cloudkeeper_components=(cklib ckcore cksh ckworker ckmetrics)
for component in "${cloudkeeper_components[@]}"; do
pip_install "$component"
done
}
install_plugins() {
local collector_plugins=(aws gcp slack onelogin k8s onprem github example_collector)
for plugin in "${collector_plugins[@]}"; do
pip_install "$plugin" true
done
}
ensure_git() {
if ! type git > /dev/null 2>&1; then
echo "Git is not available in PATH - aborting install"
exit 1
fi
}
pip_install() {
local package=$1
local plugin=${2:-false}
local egg_prefix=""
local path_prefix=""
if [ "$plugin" = true ]; then
path_prefix="plugins/"
egg_prefix="cloudkeeper-plugin-"
fi
local package_name="${egg_prefix}${package}"
package_name=${package_name//_/-}
local relative_path="${path_prefix}${package}/"
if [ -d "$relative_path" ] && [ "$git_install" = false ]; then
echo "Installing $package_name editable from local path $relative_path"
pip install -q --editable "$relative_path"
else
ensure_git
local git_repo="git+https://github.com/someengineering/cloudkeeper.git@${branch}#egg=${package_name}&subdirectory=${relative_path}"
echo "Installing $package_name from remote $git_repo"
pip install -q -U "$git_repo"
fi
}
main "$@"
|
"""
Write a code to distribute the donation of 1000 USD equally to five charities
"""
donor_amount = 1000
num_charities = 5
donation_amount = donor_amount // num_charities
remaining_amount = donor_amount % num_charities
# Distribute the donation to the charities
for i in range(num_charities):
donation = donation_amount
if i == num_charities - 1:
donation += remaining_amount # Last charity gets the remaining amount
print('Donation to charity {}: {} USD'.format(i+1, donation)) |
<reponame>LarsGardien/HackerDogeAir
#include <stdbool.h>
#include <limits.h>
#include <time.h>
#include <libscrypt.h>
#include <kore/kore.h>
#include <kore/http.h>
#include <kore/pgsql.h>
#include "shared/shared_error.h"
#include "shared/shared_http.h"
#include "pages/admin/userlist/userlist_render.h"
#include "model/user.h"
#include "model/role.h"
#include "model/session.h"
#include "assets.h"
int admin_user_list(struct http_request *);
void admin_user_list_error_handler(struct http_request *, int);
int
admin_user_list(struct http_request *req)
{
uint32_t err = (SHARED_OK);
Session session = (Session) {
.identifier = NULL,
.user_identifier = 0
};
UserListContext context = {
.partial_context = { .session = &session } //TODO: fill from request cookie
};
if ((err = shared_http_find_session_from_request(req, &context.partial_context.session)) != (SHARED_OK))
{
admin_user_list_error_handler(req, err);
}
switch(req->method)
{
case HTTP_METHOD_GET:
{
context.user_collection = user_get_all_users(&err);
if(context.user_collection == NULL)
{
switch(err)
{
case (DATABASE_ENGINE_ERROR_NO_RESULTS):
case (SHARED_OK):
break;
default:
admin_user_list_error_handler(req, err);
goto exit;
}
}
if((err = admin_user_list_render(&context)) != (SHARED_OK))
{
admin_user_list_error_handler(req, err);
goto exit;
}
http_response_header(req, "content-type", "text/html");
http_response(req, HTTP_STATUS_OK,
context.partial_context.dst_context->string,
strlen(context.partial_context.dst_context->string));
goto exit;
}
default:
return(KORE_RESULT_ERROR); //No methods besides GET exist on this page
}
exit:
admin_user_list_render_clean(&context);
return (KORE_RESULT_OK);
}
void
admin_user_list_error_handler(struct http_request *req, int errcode)
{
bool handled = true;
switch(errcode)
{
default:
handled = false;
}
if (!handled)
{
shared_error_handler(req, errcode, "/admin");
}
} |
#!/usr/bin/env bash
deployment_tracker_table_exists() {
_table_exists=`${db_binary} -D ${deployment_db} ${server_flag} ${user_flag} ${port_flag} -N -s --skip-pager -e "
SELECT count(*)
FROM information_schema.tables
WHERE table_schema = '${deployment_db}'
AND table_name = 'deployment_tracker'
LIMIT 1;
"`
if [ "$_table_exists" = '1' ]
then
return 0
else
return 1
fi
}
|
package de.rieckpil.blog;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.json.JsonTest;
import org.springframework.boot.test.json.JacksonTester;
import org.springframework.boot.test.json.JsonContent;
import static org.assertj.core.api.Assertions.assertThat;
@JsonTest
class CarDetailsJsonTest {
@Autowired
private JacksonTester<CarDetails> json;
@Test
public void testSerialize() throws Exception {
CarDetails carDetails = new CarDetails("Audi", "A3", "gray");
JsonContent<CarDetails> result = this.json.write(carDetails);
System.out.println(result);
assertThat(result).extractingJsonPathStringValue("$.type").contains("Audi", "A3", "gray");
}
}
|
import torch
def create_custom_optimizer(model, learning_rate):
# Filter out parameters that require gradients
trainable_params = filter(lambda p: p.requires_grad, model.parameters())
# Create Adam optimizer with filtered parameters and specified learning rate
optimizer = torch.optim.Adam(trainable_params, lr=learning_rate)
return optimizer |
#!/bin/bash
sleep 2m;
java -jar swirlds.jar |
import { applyErrorConditions } from "./utils-service";
export function getWarnings(link = {}) {
const possibleErrors = [{ if: hasNoHref(link), error: "Interaction has no href." }];
return applyErrorConditions(possibleErrors);
}
export function hasNoHref(interaction) {
return !interaction.href || typeof interaction.href !== "string";
}
|
#!/bin/bash
gcloud compute instances create runner \
--machine-type=n1-standard-1 \
--preemptible \
--tags=gitlab-runner \
--subnet=gitlab \
--image-family=docker-host \
--boot-disk-size=10GB \
--boot-disk-type=pd-standard \
--boot-disk-device-name=runner \
--metadata startup-script='#! /bin/bash
curl --header "Private-Token: sKhREoPTkB7idwvLtqpZ" http://10.164.0.2/homework/config/raw/master/gitlab-runner.yml > /tmp/gitlab-runner.yml
sudo ansible-playbook /tmp/gitlab-runner.yml
EOF'
|
<filename>deviceOnline/src/redis_opt.h
/*
* CRedisOpt.h
*
* Created on: 2012-11-5
* Author: yaowei
*/
#ifndef CREDISOPT_H_
#define CREDISOPT_H_
#include <set>
#include <hiredis/hiredis.h>
#include "defines.h"
class CRedisOpt {
public:
CRedisOpt();
virtual ~CRedisOpt();
void SetRedisContext(redisContext* conn) { conn_ = conn;}
bool SelectDB(int db_num);
/* string 类型操作 */
bool Set(const std::string& key, const std::string& value);
bool Set(const std::string& key, const int value);
bool Set(const std::string& key, const long value);
bool Get(const std::string& key, std::string& value);
bool MGet(const std::vector<std::string>& vec_key, const std::string& extend_keyname, std::vector<std::string>& vec_value);
bool Incr(const std::string& key);
/* list 类型操作 */
bool LPush(const std::string& key, const std::vector<std::string>& vec_value);
bool LPush(const std::string& key, const std::string& value);
bool RPush(const std::string& key, const std::string& value);
bool LPop(const std::string& key);
bool LLen(const std::string& key, int& len);
bool LRange(const std::string& key, std::vector<std::string>& vec_value);
/* set 类型操作 */
bool SAdd(const std::string& key, const std::vector<std::string>& vec_value);
bool SAdd(const std::string& key, const std::string& value);
bool SAdd(const std::string& key, const std::set<std::string>& set_value);
bool SMembers(const std::string& key, std::vector<std::string>& vec_value);
/* map 类型操作 */
bool Hset(const std::string& key, const std::string& field, const std::string& value);
bool Hset(const std::string& key, const std::string& field, const int value);
bool Hget(const std::string& key, const std::string& field, std::string& value);
bool Hdel(const std::string& key, const std::string& field);
bool Hvals(const std::string& key, std::vector<std::string>& vec_value);
bool HLen(const std::string& key, int& len);
bool Del(const std::string& key);
bool Expire(const std::string& key, const int sec);
bool Exists(const std::string& key);
// need version of redis not lower than 2.8.
// The newest stable version the better.
bool Scan(long long cursor, std::vector<std::string> &vec_keys, long long &next_cursor);
bool Hscan(long long cursor, std::vector<std::string> &vec_keys, long long &next_cursor);
private:
void SafeFreeReplyObject(redisReply *reply)
{
if(reply)
{
freeReplyObject(reply);
reply = NULL;
}
}
void SafeFreeRedisContext(redisContext *conn)
{
if(conn)
{
redisFree(conn);
conn = NULL;
}
}
redisContext *conn_;
redisReply *reply_;
};
#endif /* CCACHED_H_ */
|
#!/bin/bash
# function: copy_file
function copy_file() {
FSIZE=$1
TESTNAME=$2
TMPFILE=data.tmp
RANDFILE=data.rand
DUPFILE=data.dup
dd if=/dev/zero of=$TMPFILE bs=$FSIZE count=1 status=noxfer >/dev/null 2>&1
openssl enc -aes-256-ctr -pass pass:"$(dd if=/dev/urandom bs=128 count=1 2>/dev/null | base64)" -nosalt < $TMPFILE > $RANDFILE
hdfs dfs -copyFromLocal $RANDFILE /$RANDFILE
hdfs dfs -copyToLocal /$RANDFILE ./$DUPFILE
diff $RANDFILE $DUPFILE
if [ $? != 0 ]; then
echo "$TESTNAME ... FAILED"
exit 1;
else
echo "$TESTNAME ... PASSED"
hdfs dfs -rm -f /$RANDFILE
rm -rf $TMPFILE $RANDFILE $DUPFILE
fi
}
# STEP 1
if [ $# != 2 ]; then
echo "$0 <pagesize> <blocksize>"
exit 1;
fi
PGS=$1
BKS=$2
# STEP 2
echo "TEST ONE: small read/write"
HALF_=`expr $PGS / 2`
ONE_=$PGS
TWO_=`expr $PGS + $PGS`
TWO_AND_HALF_=`expr $TWO_ + $HALF_`
copy_file $HALF_ "copy-0.5-page"
copy_file $ONE_ "copy-1-page"
copy_file $TWO_ "copy-2-page"
copy_file $TWO_AND_HALF_ "copy-2.5-page"
# STEP 3
echo ""
echo "TEST TWO: block read/write"
HALF_=`expr $BKS / 2`
ONE_=$BKS
TWO_=`expr $BKS + $BKS`
TWO_AND_HALF_=`expr $TWO_ + $HALF_`
copy_file $HALF_ "copy-0.5-block"
copy_file $ONE_ "copy-1-block"
copy_file $TWO_ "copy-2-block"
copy_file $TWO_AND_HALF_ "copy-2.5-block"
|
set -x
TAG=$1
if [[ -z "${TAG}" ]]
then
TAG="v$(date '+%Y-%m-%d')"
fi
PREFIX=gcr.io/hots-cockroach/website
IMG=${PREFIX}:${TAG}
LATEST=${PREFIX}:latest
echo building $TAG
echo $IMG
docker pull $IMG 2>/dev/null >/dev/null
if [[ $? -eq 0 ]]
then
echo 'image already exists'
exit 1
fi
set -e
docker build -t $IMG .
docker tag $IMG $LATEST
docker push $IMG
docker push $LATEST
kubectl set image deployment/website "website=${IMG}"
kubectl get po | grep updatedb | awk '{print $1}' | xargs kubectl delete po
|
package libs.trustconnector.scdp.smartcard.checkrule.tlv;
import libs.trustconnector.scdp.util.tlv.*;
import java.util.*;
import libs.trustconnector.scdp.util.tlv.TagList;
public class ResponseTLVCheckRuleByte extends ResponseTLVCheckRule
{
protected int retVlue;
protected int valueMask;
protected int expValue;
protected int expValueM;
public ResponseTLVCheckRuleByte(final String name, final TagList tagPath, final int valueOff) {
super(name, tagPath, valueOff);
this.expValueM = 255;
}
public ResponseTLVCheckRuleByte(final String name, final TagList tagPath, final int valueOff, final Map<String, String> valueInfoMap) {
super(name, tagPath, valueOff, valueInfoMap);
this.expValueM = 255;
}
public ResponseTLVCheckRuleByte(final String name, final TagList tagPath, final int valueOff, final int valueMask) {
super(name, tagPath, valueOff);
this.expValueM = valueMask;
}
public ResponseTLVCheckRuleByte(final String name, final TagList tagPath, final int valueOff, final int valueMask, final Map<String, String> valueInfoMap) {
super(name, tagPath, valueOff, valueInfoMap);
this.expValueM = valueMask;
}
@Override
public boolean checkTLVValue(final byte[] value) {
if (value != null && this.valueOff < value.length) {
this.retVlue = (value[this.valueOff] & this.expValueM);
this.retValue = String.format("%02X", this.retVlue);
if (this.matchSet) {
return (this.retVlue & 0xFF) == this.expValue;
}
}
else if (this.matchSet) {
return false;
}
return true;
}
public void setMatch(final int byteValue) {
this.matchSet = true;
this.expValue = byteValue;
super.expValue = String.format("%02X", byteValue);
}
public void setMatch(final int byteValue, final int expMask) {
this.matchSet = true;
this.expValue = byteValue;
this.expValueM = expMask;
super.expValue = String.format("%02X", byteValue);
super.dataMask = String.format("%02X", expMask);
}
public int getReturnValue() {
return this.retVlue & 0xFF;
}
}
|
#!/bin/bash
. $(dirname "$0")/include.sh
# Here our remote repo is one that has been cloned from the original
# test repo, rather than the actual original test repo itself. We
# perform a repoint install, then commit something to the remote, then
# request a specific pin to the id that was just committed, and check
# that repoint install now updates to that id.
libcontent=$(cat <<EOF
"A": {
"vcs": "hg",
"service": "testfile",
"repository": "A2"
},
"B": {
"vcs": "git",
"service": "testfile",
"repository": "B2"
},
"C": {
"vcs": "svn",
"service": "testfile",
"repository": "C2"
}
EOF
)
( cd ../../testrepos
rm -rf A2 B2 C2 C2_checkout
hg clone A A2
git clone -bmaster B B2
cp -a C C2
svn co file://$(pwd)/C2 C2_checkout
)
prepare
write_project_file "$libcontent"
"$repoint" install
check_expected f94ae9d7e5c9 3199655c658ff337ce24f78c6d1f410f34f4c6f2 2
( cd ../../testrepos
cd A2
echo 5 > file.txt
hg commit -m 5 -u testuser
cd ../B2
echo 5 > file-b.txt
git commit -a -m 5
cd ../C2_checkout
echo 5 > file.txt
svn commit -m 5
svn update
)
newidA=$( cd ../../testrepos/A2 ; hg id | awk '{ print $1; }' )
newidB=$( cd ../../testrepos/B2 ; git rev-parse HEAD )
newidC=$( cd ../../testrepos/C2_checkout ; svn info | grep '^Revision:' | awk '{ print $2; }' )
libcontent_pinned=$(cat <<EOF
"A": {
"vcs": "hg",
"service": "testfile",
"repository": "A2",
"pin": "$newidA"
},
"B": {
"vcs": "git",
"service": "testfile",
"repository": "B2",
"pin": "$newidB"
},
"C": {
"vcs": "svn",
"service": "testfile",
"repository": "C2",
"pin": "$newidC"
}
EOF
)
write_project_file "$libcontent_pinned"
"$repoint" install # always obeys lock file, so should do nothing here
check_expected f94ae9d7e5c9 3199655c658ff337ce24f78c6d1f410f34f4c6f2 2
rm repoint-lock.json
"$repoint" install
check_expected $newidA $newidB $newidC
|
#!/usr/bin/env python3
import copy
import functools
from xml.etree import ElementTree as ET
from PyQt5 import QtCore, QtGui, QtWidgets, QtSvg
class DndState:
def __init__(self, view):
self._view = view
def view(self):
return self._view
def mousePressEvent(self, event): # pylint: disable=W0613
return self
def mouseReleaseEvent(self, event): # pylint: disable=W0613
return self
def mouseMoveEvent(self, event): # pylint: disable=W0613
return self
class DndIdleState(DndState):
def mousePressEvent(self, event):
pixel = self.view().hitTest(event.pos())
if pixel + 256 in self.view().pads():
if event.button() == QtCore.Qt.LeftButton:
return DndStartDraggingPadState(self.view(), event.pos(), pixel)
if event.button() == QtCore.Qt.RightButton:
self.view().setValue(pixel + 256, QtCore.QPointF(0, 0))
elif event.button() == QtCore.Qt.LeftButton:
if pixel in self.view().buttons():
self.view().setValue(pixel, not self.view().value(pixel))
return super().mousePressEvent(event)
class DndStartDraggingPadState(DndState):
def __init__(self, view, initial_pos, index):
super().__init__(view)
self._initialPos = initial_pos
self._index = index
def mouseMoveEvent(self, event):
if (event.pos() - self._initialPos).manhattanLength() >= QtWidgets.QApplication.startDragDistance():
return DndDraggingPadState(self.view(), self._initialPos, event.pos(), self._index)
return super().mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
self.view().setValue(self._index, not self.view().value(self._index))
return DndIdleState(self.view())
class DndDraggingPadState(DndState):
def __init__(self, view, initial_pos, current_pos, index):
super().__init__(view)
self._initialPos = initial_pos
self._currentPos = current_pos
self._index = index
self._delta = view.value(index + 256)
def mouseMoveEvent(self, event):
self._currentPos = event.pos()
delta = (self.view().mapToSvg(self._currentPos) - self.view().mapToSvg(self._initialPos)) + self._delta
self.view().setValue(self._index + 256, delta)
self.view().setStatus(self._index)
return super().mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
self.view().clearStatus()
self.view().updateCoords()
return DndIdleState(self.view())
class DualShock4(QtWidgets.QWidget):
INDEX_RPAD = 1
INDEX_RPAD_TR = 257
INDEX_LPAD = 2
INDEX_LPAD_TR = 258
INDEX_PS = 3
INDEX_SHARE = 4
INDEX_OPTIONS = 5
INDEX_TRIANGLE = 6
INDEX_SQUARE = 7
INDEX_CIRCLE = 8
INDEX_CROSS = 9
INDEX_DPADU = 10
INDEX_DPADD = 11
INDEX_DPADL = 12
INDEX_DPADR = 13
INDEX_TPAD = 14
INDEX_L1 = 15
INDEX_R1 = 16
INDEX_L2 = 17
INDEX_R2 = 18
def __init__(self, parent):
super().__init__(parent)
with open('res/images/dualshock.svg', 'r') as fileobj:
self._svg = ET.parse(fileobj)
self._state = DndIdleState(self)
self._values = {}
for pad in self.pads():
self._values[pad] = QtCore.QPointF(0, 0)
for btn in self.buttons():
self._values[btn] = False
self._values[self.INDEX_L2] = self._values[self.INDEX_R2] = 0
self._status = ''
self._l2 = QtWidgets.QSlider(QtCore.Qt.Vertical, self)
self._l2.valueChanged.connect(functools.partial(self._valueChanged, self.INDEX_L2))
self._l2.setMinimum(0)
self._l2.setMaximum(255)
self._r2 = QtWidgets.QSlider(QtCore.Qt.Vertical, self)
self._r2.valueChanged.connect(functools.partial(self._valueChanged, self.INDEX_R2))
self._r2.setMinimum(0)
self._r2.setMaximum(255)
self.updateCoords()
def _valueChanged(self, index, value):
self.setValue(index, value)
def pads(self):
return (
self.INDEX_RPAD_TR,
self.INDEX_LPAD_TR,
)
def buttons(self):
return (
self.INDEX_RPAD,
self.INDEX_LPAD,
self.INDEX_PS,
self.INDEX_SHARE,
self.INDEX_OPTIONS,
self.INDEX_TRIANGLE,
self.INDEX_SQUARE,
self.INDEX_CIRCLE,
self.INDEX_CROSS,
self.INDEX_DPADU,
self.INDEX_DPADD,
self.INDEX_DPADL,
self.INDEX_DPADR,
self.INDEX_TPAD,
self.INDEX_L1,
self.INDEX_R1,
)
def indexToId(self, index):
return {
self.INDEX_RPAD: 'RPAD',
self.INDEX_LPAD: 'LPAD',
self.INDEX_PS: 'PS',
self.INDEX_SHARE: 'SHARE',
self.INDEX_OPTIONS: 'OPTIONS',
self.INDEX_TRIANGLE: 'TRIANGLE',
self.INDEX_SQUARE: 'SQUARE',
self.INDEX_CIRCLE: 'CIRCLE',
self.INDEX_CROSS: 'CROSS',
self.INDEX_DPADU: 'DPADU',
self.INDEX_DPADD: 'DPADD',
self.INDEX_DPADL: 'DPADL',
self.INDEX_DPADR: 'DPADR',
self.INDEX_TPAD: 'TPAD',
self.INDEX_RPAD_TR: 'RPAD',
self.INDEX_LPAD_TR: 'LPAD',
self.INDEX_L1: 'L1',
self.INDEX_R1: 'R1',
}[index]
def value(self, index):
return self._values[index]
def setValue(self, index, value):
if value:
if index == self.INDEX_DPADU and self._values[self.INDEX_DPADD]:
self._values[self.INDEX_DPADD] = False
elif index == self.INDEX_DPADD and self._values[self.INDEX_DPADU]:
self._values[self.INDEX_DPADU] = False
elif index == self.INDEX_DPADL and self._values[self.INDEX_DPADR]:
self._values[self.INDEX_DPADR] = False
elif index == self.INDEX_DPADR and self._values[self.INDEX_DPADL]:
self._values[self.INDEX_DPADL] = False
if index in self.pads():
line = QtCore.QLineF(QtCore.QPointF(0, 0), value)
if line.length() > 16:
line = QtCore.QLineF.fromPolar(16, line.angle())
self._values[index] = QtCore.QPointF(line.dx(), line.dy())
else:
self._values[index] = value
self.update()
def setStatus(self, index):
if index in self.pads():
pt = self._values[index]
dx = min(255, (pt.x() + 16) * 8)
dy = min(255, (pt.y() + 16) * 8)
self._status = 'X: %d; Y: %d' % (dx, dy)
self.update()
def clearStatus(self):
self._status = ''
self.update()
def hitTest(self, pt):
dx = (self.width() - self._hit.width()) / 2
dy = self.height() - self._hit.height()
pt = (pt - QtCore.QPointF(dx, dy)).toPoint()
if 0 <= pt.x() < self._hit.width() and 0 <= pt.y() < self._hit.height():
return self._hit.pixel(pt) & 0xFFFFFF
return None
def mapToSvg(self, pt):
dx = (self.width() - self._hit.width()) / 2
dy = self.height() - self._hit.height()
return (QtCore.QPointF(pt) - QtCore.QPointF(dx, dy)) * 600 / self.width()
def _fillElement(self, element, color, recurse=True):
if 'style' in element.attrib:
parts = element.attrib['style'].split(';')
for index, part in enumerate(parts):
if part.startswith('fill:'):
parts[index] = 'fill:#%06x' % color
break
else:
parts.append('fill:#%06x' % color)
element.attrib['style'] = ';'.join(parts)
else:
element.attrib['style'] = 'fill:#%06x' % color
if recurse:
for child in element:
self._fillElement(child, color)
def paintEvent(self, event): # pylint: disable=W0613
xml = copy.deepcopy(self._svg)
for pad in self.pads():
xml.find('.//*[@id="%s"]' % self.indexToId(pad)).attrib['transform'] = 'translate(%f %f)' % (self._values[pad].x(), self._values[pad].y())
for btn in self.buttons():
self._fillElement(xml.find('.//*[@id="%s"]' % self.indexToId(btn)), (0xFF0000 if self._values[btn] else 0xcccccc), recurse=btn not in {self.INDEX_PS, self.INDEX_TPAD})
img = QtGui.QImage(self._hit.width(), self._hit.height(), QtGui.QImage.Format_RGB888)
painter = QtGui.QPainter(img)
painter.setBackground(QtCore.Qt.white)
painter.eraseRect(self.rect())
renderer = QtSvg.QSvgRenderer(ET.tostring(xml.getroot()))
renderer.render(painter)
painter = QtGui.QPainter(self)
painter.drawImage(QtCore.QPoint((self.width() - self._hit.width()) // 2, self.height() - self._hit.height()), img)
if self._status:
mt = painter.fontMetrics()
rc = QtCore.QRectF(0, self.height() - mt.height() - 1, self.width(), mt.height())
painter.drawText(rc, QtCore.Qt.AlignCenter, self._status)
def updateCoords(self):
if self.width() / self.height() > 600 / 400:
img = QtGui.QImage(600 * self.height() // 400, self.height(), QtGui.QImage.Format_RGB888)
else:
img = QtGui.QImage(self.width(), 400 * self.width() // 600, QtGui.QImage.Format_RGB888)
xml = copy.deepcopy(self._svg)
for index in self.buttons():
self._fillElement(xml.find('.//*[@id="%s"]' % self.indexToId(index)), index)
for index in self.pads():
xml.find('.//*[@id="%s"]' % self.indexToId(index)).attrib['transform'] = 'translate(%f %f)' % (self._values[index].x(), self._values[index].y())
painter = QtGui.QPainter(img)
painter.setBackground(QtCore.Qt.white)
painter.eraseRect(self.rect())
renderer = QtSvg.QSvgRenderer(ET.tostring(xml.getroot()))
renderer.render(painter)
self._hit = img
def resizeEvent(self, event):
dx = (self.width() - self._hit.width()) // 2
dy = self.height() - self._hit.height()
self._l2.setGeometry(dx, dy, self._l2.sizeHint().width(), self._hit.height() // 4)
self._r2.setGeometry(self.width() - dx - self._r2.sizeHint().width(), dy, self._r2.sizeHint().width(), self._hit.height() // 4)
self.updateCoords()
super().resizeEvent(event)
def mousePressEvent(self, event):
self._state = self._state.mousePressEvent(event)
def mouseReleaseEvent(self, event):
self._state = self._state.mouseReleaseEvent(event)
def mouseMoveEvent(self, event):
self._state = self._state.mouseMoveEvent(event)
if __name__ == '__main__':
app = QtWidgets.QApplication([])
win = DualShock4(None)
win.show()
app.exec_()
|
<reponame>SolaceSamples/solace-samples-java-new<gh_stars>10-100
/*
* Copyright 2021 Solace Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.solace.samples.java.snippets;
import com.solace.messaging.MessagingService;
import com.solace.messaging.PubSubPlusClientException;
import com.solace.messaging.publisher.OutboundMessage;
import com.solace.messaging.publisher.OutboundMessageBuilder;
import com.solace.messaging.publisher.PersistentMessagePublisher;
import com.solace.messaging.publisher.PersistentMessagePublisher.MessagePublishReceiptListener;
import com.solace.messaging.resources.Topic;
import com.solace.messaging.util.Converter.ObjectToBytes;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
public class HowToPublishPersistentMessage {
public static PersistentMessagePublisher createPersistentMessagePublisher(
MessagingService service, Topic toDestination) {
final PersistentMessagePublisher messagePublisher = service
.createPersistentMessagePublisherBuilder()
.build().start();
// ready to go publisher
return messagePublisher;
}
/**
* Example how to create a topic
*
* @param topicName topic name/expression
* @return topic instanch to be used for publishing purposes
*/
public static Topic createATopic(String topicName) {
return Topic.of(topicName);
}
public static void publishByteMessageNonBlocking(
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
// listener that processes all delivery confirmations/timeouts for all messages all
// messages being send using given instance of messagePublisher
final MessagePublishReceiptListener deliveryConfirmationListener = (publishReceipt) -> {
// process delivery confirmation for some message ...
};
// listen to all delivery confirmations for all messages being send
messagePublisher.setMessagePublishReceiptListener(deliveryConfirmationListener);
// publishing a message (raw byte [] payload in this case)
messagePublisher
.publish("converted to bytes".getBytes(StandardCharsets.US_ASCII), toDestination);
}
public static void publishStringMessageNonBlocking(
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
// listener that processes all delivery confirmations/timeouts for all messages all
// messages being send using given instance of messagePublisher
final MessagePublishReceiptListener deliveryConfirmationListener = (publishReceipt) -> {
// process delivery confirmation for some message ...
};
// listen to all delivery confirmations for all messages being send
messagePublisher.setMessagePublishReceiptListener(deliveryConfirmationListener);
// publishing a message (String payload in this case)
messagePublisher
.publish("Hello world", toDestination);
}
public static void publishTypedMessageNonBlocking(OutboundMessageBuilder messageBuilder,
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
// listener that processes all delivery confirmations/timeouts for all messages all
// messages being send using given instance of messagePublisher
final MessagePublishReceiptListener deliveryConfirmationListener = (publishReceipt) -> {
// process delivery confirmation for some message ...
// there are different ways to correlate with a published message:
// - using message itself: publishReceipt.getMessage()
// - having access to user provided context: publishReceipt.getUserContext()
// ..
};
// listen to all delivery confirmations for all messages being send
messagePublisher.setMessagePublishReceiptListener(deliveryConfirmationListener);
final MyData data = new MyData("my message");
final ObjectToBytes<MyData> dto2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
// publishing a message (typed business object payload in this case)
messagePublisher
.publish(messageBuilder.build(data, dto2ByteConverter), toDestination);
}
public static void publishTypedMessageWithExtendedMessagePropertiesNonBlocking(
OutboundMessageBuilder messageBuilder,
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
// listener that processes all delivery confirmations/timeouts for all messages all
// messages being send using given instance of messagePublisher
final MessagePublishReceiptListener deliveryConfirmationListener = (publishReceipt) -> {
// process delivery confirmation for some message ...
// there are different ways to correlate with a published message:
// - using message itself: publishReceipt.getMessage()
// - having access to user provided context: publishReceipt.getUserContext()
// ..
};
// listen to all delivery confirmations for all messages being send
messagePublisher.setMessagePublishReceiptListener(deliveryConfirmationListener);
final MyData data = new MyData("my message");
final ObjectToBytes<MyData> dto2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
final OutboundMessage message = messageBuilder
.withPriority(255).build(data, dto2ByteConverter);
// publishing a message (typed business object payload in this case)
messagePublisher
.publish(message, toDestination);
}
public static void correlateMessageOnBrokerAcknowledgementWithUserContextNonBlocking(
OutboundMessageBuilder messageBuilder,
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
// listener that processes all delivery confirmations/timeouts for all messages all
// messages being send using given instance of messagePublisher
final MessagePublishReceiptListener publishConfirmationListener = (publishReceipt) -> {
final OutboundMessage acknowledgedMessage = publishReceipt.getMessage();
// corresponding context can be retrieved this way from a publish receipt
final Object processingContext = publishReceipt.getUserContext();
// when provided during message publishing
if (null != processingContext && processingContext instanceof MyContext) {
final MyContext myContext = (MyContext) processingContext;
// use 'myContext' and 'acknowledgedMessage' for processing/ failure check etc ...
}
};
// listen to all delivery confirmations for all messages being send
messagePublisher.setMessagePublishReceiptListener(publishConfirmationListener);
final ObjectToBytes<MyData> dto2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
// message payload
final MyData dataABC = new MyData("message ABC");
// corresponding context
final MyContext contextForDataABC = new MyContext("Context for message ABC");
// publishing a message, providing context,
messagePublisher
.publish(messageBuilder.build(dataABC, dto2ByteConverter), toDestination,
contextForDataABC);
}
public static void checkForMessageAcknowledgementFailuresNonBlocking(
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
// Listener that processes all publish confirmations/timeouts for all messages.
// Callback expected to be executed on a different thread then message was published on
final MessagePublishReceiptListener publishReceiptListener = (publishReceipt) -> {
final PubSubPlusClientException exceptionIfAnyOrNull = publishReceipt.getException();
if (null != exceptionIfAnyOrNull) {
// deal with a not acknowledged message ...
// there are different ways to correlate with a published message:
// - using message itself: publishReceipt.getMessage()
// - having access to user provided context: publishReceipt.getUserContext()
// ..
} else {
// process delivery confirmation for some message ...
}
};
// listen to all delivery confirmations for all messages being send
messagePublisher.setMessagePublishReceiptListener(publishReceiptListener);
// publish message...
// publishing a message (String payload in this case),
messagePublisher.publish("Hello world", toDestination);
}
public static void publishTypedMessageBlockingWaitingForDeliveryConfirmation(
OutboundMessageBuilder messageBuilder,
final PersistentMessagePublisher messagePublisher, Topic toDestination) {
final MyData data = new MyData("my blocking message");
final ObjectToBytes<MyData> dto2ByteConverter = (pojo) -> {
return pojo.getName().getBytes(StandardCharsets.US_ASCII);
};
// wait at the most for 20 seconds before considering that message is not delivered to the broker
final long deliveryConfirmationTimeOutInMilliseconds = 20000L;
// publishing a message (typed business object payload in this case), blocking
try {
messagePublisher
.publishAwaitAcknowledgement(messageBuilder.build(data, dto2ByteConverter),
toDestination, deliveryConfirmationTimeOutInMilliseconds);
} catch (InterruptedException e) {
// process InterruptedException
}
}
/**
* Basic example of a business object, for message payload
*/
static class MyData implements Serializable {
private static final long serialVersionUID = 1L;
private final String name;
MyData(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
/**
* Basic example of some context related to message post-processing (on acknowledgement)
*/
static class MyContext {
private final String name;
MyContext(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
}
|
//抽象工厂模式(Abstract Factory)
// CPU工厂接口
export interface CPUFactory {
createCPU();
}
// IntelCPU工厂
export class IntelCPU implements CPUFactory {
createCPU() {
console.log("Intel CPU");
}
}
// AMDCPU工厂
export class AMDCPU implements CPUFactory {
createCPU() {
console.log("AMD CPU");
}
}
// 上面是一个工厂模式。下面也是一个工厂模式
// 创建抽象工厂类接口
export interface Provider {
createCPUFactory():CPUFactory;
}
export class InterCPUFactory implements Provider {
createCPUFactory():CPUFactory {
return new IntelCPU();
}
}
export class AMDCPUFactory implements Provider {
createCPUFactory():CPUFactory {
return new AMDCPU();
}
}
export class AbstractFactoryTest {
public static test() {
// 创建一个生产CPU工厂的工厂
let cpufactory:Provider = new InterCPUFactory();
// 通过CPU工厂的工厂创建一个IntelCPU工厂
let intelcpu:CPUFactory = cpufactory.createCPUFactory();
// IntelCPU工厂生产intelCPU
intelcpu.createCPU();
}
} |
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
mkdir -p build
cd build || exit
function make_plugin() {
if ! cmake ..;
then
echo "cmake failed."
return 1
fi
if ! (make);
then
echo "make failed."
return 1
fi
return 0
}
if make_plugin;
then
echo "INFO: Build successfully."
else
echo "ERROR: Build failed."
fi
cd - || exit
|
#!/bin/sh
{
# NOTE: compile.sh looks for this file in order to determine if this is the Cuberite folder.
# Please modify compile.sh if you want to rename or remove this file.
# This file was chosen arbitrarily and it is a good enough indicator that we are in the Cuberite folder.
set -e
KERNEL=$(uname -s)
echo "Identifying kernel: $KERNEL"
if [ "$KERNEL" = "Linux" ]; then
PLATFORM=$(uname -m)
echo "Identifying platform: $PLATFORM"
case $PLATFORM in
"i686") DOWNLOADURL="https://github.com/nickc01/cuberite/releases/download/v1.0.0.0/Server.tar.gz" ;;
"x86_64") DOWNLOADURL="https://github.com/nickc01/cuberite/releases/download/v1.0.0.0/Server.tar.gz" ;;
# Assume that all arm devices are a raspi for now.
arm*) DOWNLOADURL="https://download.cuberite.org/linux-armhf-raspbian/Cuberite.tar.gz" ;;
# Allow install on Raspberry Pi 4 Ubuntu x64 (AArch64) using the ARM builds.
"aarch64") DOWNLOADURL="https://download.cuberite.org/linux-aarch64/Cuberite.tar.gz"
esac
elif [ "$KERNEL" = "Darwin" ]; then
# All Darwins we care about are x86_64
DOWNLOADURL="https://download.cuberite.org/darwin-x86_64/Cuberite.tar.gz"
#elif [ "$KERNEL" = "FreeBSD" ]; then
# DOWNLOADURL="https://builds.cuberite.org/job/Cuberite%20FreeBSD%20x64%20Master/lastSuccessfulBuild/artifact/Cuberite.tar.gz"
else
echo "Unsupported kernel."
exit 1
fi
echo "Downloading precompiled binaries."
curl -Ls $DOWNLOADURL | tar -xzf -
echo "Done."
echo "Cuberite is now installed, run using 'cd Server; ./Cuberite'."
}
|
<reponame>Product-Live/request.libary-1
'use strict';
const querystring = require('querystring');
class Response {
constructor(request, data) {
this._request = request;
this._data = data;
}
headers(key) {
let h = this._request.headers || {};
if (key) {
return h[key];
}
return h;
}
status() {
return Number(this._request.statusCode) || 0;
}
isStatus(n) {
return (Math.floor(this.status() / 100) === n);
}
isOkay() {
return this.isStatus(2);
}
body() {
return this._data;
}
parse() {
let content = this.headers()['content-type'] || '';
if (content && content.match('json')) {
try {
return JSON.parse(this._data.toString());
} catch(e) {
return this._data;
}
}
if (content && content.match('application/x-www-form-urlencoded')) {
return querystring.parse(this._data.toString());
}
return this._data;
}
}
module.exports = Response;
|
echo "running bootstrap script"
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2015:1458
#
# Security announcement date: 2015-07-22 06:40:58 UTC
# Script generation date: 2017-01-01 21:16:28 UTC
#
# Operating System: Red Hat 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - libreoffice-debuginfo.i686:4.2.8.2-11.el6
# - autocorr-af.noarch:4.2.8.2-11.el6
# - autocorr-bg.noarch:4.2.8.2-11.el6
# - autocorr-ca.noarch:4.2.8.2-11.el6
# - autocorr-cs.noarch:4.2.8.2-11.el6
# - autocorr-da.noarch:4.2.8.2-11.el6
# - autocorr-de.noarch:4.2.8.2-11.el6
# - autocorr-en.noarch:4.2.8.2-11.el6
# - autocorr-es.noarch:4.2.8.2-11.el6
# - autocorr-fa.noarch:4.2.8.2-11.el6
# - autocorr-fi.noarch:4.2.8.2-11.el6
# - autocorr-fr.noarch:4.2.8.2-11.el6
# - autocorr-ga.noarch:4.2.8.2-11.el6
# - autocorr-hr.noarch:4.2.8.2-11.el6
# - autocorr-hu.noarch:4.2.8.2-11.el6
# - autocorr-it.noarch:4.2.8.2-11.el6
# - autocorr-ja.noarch:4.2.8.2-11.el6
# - autocorr-ko.noarch:4.2.8.2-11.el6
# - autocorr-lb.noarch:4.2.8.2-11.el6
# - autocorr-lt.noarch:4.2.8.2-11.el6
# - autocorr-mn.noarch:4.2.8.2-11.el6
# - autocorr-nl.noarch:4.2.8.2-11.el6
# - autocorr-pl.noarch:4.2.8.2-11.el6
# - autocorr-pt.noarch:4.2.8.2-11.el6
# - autocorr-ro.noarch:4.2.8.2-11.el6
# - autocorr-ru.noarch:4.2.8.2-11.el6
# - autocorr-sk.noarch:4.2.8.2-11.el6
# - autocorr-sl.noarch:4.2.8.2-11.el6
# - autocorr-sr.noarch:4.2.8.2-11.el6
# - autocorr-sv.noarch:4.2.8.2-11.el6
# - autocorr-tr.noarch:4.2.8.2-11.el6
# - autocorr-vi.noarch:4.2.8.2-11.el6
# - autocorr-zh.noarch:4.2.8.2-11.el6
# - libreoffice-opensymbol-fonts.noarch:4.2.8.2-11.el6
# - libreoffice-base.x86_64:4.2.8.2-11.el6
# - libreoffice-calc.x86_64:4.2.8.2-11.el6
# - libreoffice-core.x86_64:4.2.8.2-11.el6
# - libreoffice-debuginfo.x86_64:4.2.8.2-11.el6
# - libreoffice-draw.x86_64:4.2.8.2-11.el6
# - libreoffice-emailmerge.x86_64:4.2.8.2-11.el6
# - libreoffice-graphicfilter.x86_64:4.2.8.2-11.el6
# - libreoffice-headless.x86_64:4.2.8.2-11.el6
# - libreoffice-impress.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-af.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ar.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-as.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-bg.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-bn.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ca.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-cs.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-cy.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-da.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-de.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-dz.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-el.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-en.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-es.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-et.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-eu.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-fi.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-fr.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ga.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-gl.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-gu.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-he.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-hi.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-hr.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-hu.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-it.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ja.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-kn.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ko.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-lt.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-mai.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ml.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-mr.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ms.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-nb.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-nl.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-nn.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-nr.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-nso.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-or.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-pa.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-pl.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-pt-BR.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-pt-PT.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ro.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ru.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-sk.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-sl.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-sr.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ss.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-st.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-sv.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ta.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-te.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-th.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-tn.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-tr.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ts.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-uk.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ur.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-ve.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-xh.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-zh-Hans.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-zh-Hant.x86_64:4.2.8.2-11.el6
# - libreoffice-langpack-zu.x86_64:4.2.8.2-11.el6
# - libreoffice-math.x86_64:4.2.8.2-11.el6
# - libreoffice-ogltrans.x86_64:4.2.8.2-11.el6
# - libreoffice-pdfimport.x86_64:4.2.8.2-11.el6
# - libreoffice-pyuno.x86_64:4.2.8.2-11.el6
# - libreoffice-ure.x86_64:4.2.8.2-11.el6
# - libreoffice-wiki-publisher.x86_64:4.2.8.2-11.el6
# - libreoffice-writer.x86_64:4.2.8.2-11.el6
# - libreoffice-xsltfilter.x86_64:4.2.8.2-11.el6
# - libreoffice-gdb-debug-support.i686:4.2.8.2-11.el6
# - autocorr-is.noarch:4.2.8.2-11.el6
# - libreoffice.x86_64:4.2.8.2-11.el6
# - libreoffice-bsh.x86_64:4.2.8.2-11.el6
# - libreoffice-filters.x86_64:4.2.8.2-11.el6
# - libreoffice-gdb-debug-support.x86_64:4.2.8.2-11.el6
# - libreoffice-glade.x86_64:4.2.8.2-11.el6
# - libreoffice-librelogo.x86_64:4.2.8.2-11.el6
# - libreoffice-nlpsolver.x86_64:4.2.8.2-11.el6
# - libreoffice-rhino.x86_64:4.2.8.2-11.el6
# - libreoffice-sdk.x86_64:4.2.8.2-11.el6
# - libreoffice-sdk-doc.x86_64:4.2.8.2-11.el6
#
# Last versions recommanded by security team:
# - libreoffice-debuginfo.i686:4.2.8.2-11.el6_7.1
# - autocorr-af.noarch:4.2.8.2-11.el6
# - autocorr-bg.noarch:4.2.8.2-11.el6
# - autocorr-ca.noarch:4.2.8.2-11.el6
# - autocorr-cs.noarch:4.2.8.2-11.el6
# - autocorr-da.noarch:4.2.8.2-11.el6
# - autocorr-de.noarch:4.2.8.2-11.el6
# - autocorr-en.noarch:4.2.8.2-11.el6
# - autocorr-es.noarch:4.2.8.2-11.el6
# - autocorr-fa.noarch:4.2.8.2-11.el6
# - autocorr-fi.noarch:4.2.8.2-11.el6
# - autocorr-fr.noarch:4.2.8.2-11.el6
# - autocorr-ga.noarch:4.2.8.2-11.el6
# - autocorr-hr.noarch:4.2.8.2-11.el6
# - autocorr-hu.noarch:4.2.8.2-11.el6
# - autocorr-it.noarch:4.2.8.2-11.el6
# - autocorr-ja.noarch:4.2.8.2-11.el6
# - autocorr-ko.noarch:4.2.8.2-11.el6
# - autocorr-lb.noarch:4.2.8.2-11.el6
# - autocorr-lt.noarch:4.2.8.2-11.el6
# - autocorr-mn.noarch:4.2.8.2-11.el6
# - autocorr-nl.noarch:4.2.8.2-11.el6
# - autocorr-pl.noarch:4.2.8.2-11.el6
# - autocorr-pt.noarch:4.2.8.2-11.el6
# - autocorr-ro.noarch:4.2.8.2-11.el6
# - autocorr-ru.noarch:4.2.8.2-11.el6
# - autocorr-sk.noarch:4.2.8.2-11.el6
# - autocorr-sl.noarch:4.2.8.2-11.el6
# - autocorr-sr.noarch:4.2.8.2-11.el6
# - autocorr-sv.noarch:4.2.8.2-11.el6
# - autocorr-tr.noarch:4.2.8.2-11.el6
# - autocorr-vi.noarch:4.2.8.2-11.el6
# - autocorr-zh.noarch:4.2.8.2-11.el6
# - libreoffice-opensymbol-fonts.noarch:4.2.8.2-11.el6
# - libreoffice-base.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-calc.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-core.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-debuginfo.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-draw.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-emailmerge.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-graphicfilter.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-headless.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-impress.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-af.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ar.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-as.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-bg.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-bn.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ca.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-cs.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-cy.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-da.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-de.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-dz.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-el.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-en.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-es.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-et.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-eu.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-fi.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-fr.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ga.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-gl.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-gu.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-he.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-hi.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-hr.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-hu.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-it.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ja.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-kn.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ko.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-lt.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-mai.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ml.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-mr.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ms.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-nb.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-nl.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-nn.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-nr.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-nso.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-or.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-pa.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-pl.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-pt-BR.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-pt-PT.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ro.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ru.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-sk.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-sl.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-sr.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ss.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-st.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-sv.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ta.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-te.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-th.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-tn.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-tr.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ts.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-uk.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ur.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-ve.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-xh.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-zh-Hans.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-zh-Hant.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-langpack-zu.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-math.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-ogltrans.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-pdfimport.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-pyuno.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-ure.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-wiki-publisher.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-writer.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-xsltfilter.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-gdb-debug-support.i686:4.2.8.2-11.el6_7.1
# - autocorr-is.noarch:4.2.8.2-11.el6
# - libreoffice.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-bsh.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-filters.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-gdb-debug-support.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-glade.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-librelogo.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-nlpsolver.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-rhino.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-sdk.x86_64:4.2.8.2-11.el6_7.1
# - libreoffice-sdk-doc.x86_64:4.2.8.2-11.el6_7.1
#
# CVE List:
# - CVE-2015-1774
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install libreoffice-debuginfo.i686-4.2.8.2 -y
sudo yum install autocorr-af.noarch-4.2.8.2 -y
sudo yum install autocorr-bg.noarch-4.2.8.2 -y
sudo yum install autocorr-ca.noarch-4.2.8.2 -y
sudo yum install autocorr-cs.noarch-4.2.8.2 -y
sudo yum install autocorr-da.noarch-4.2.8.2 -y
sudo yum install autocorr-de.noarch-4.2.8.2 -y
sudo yum install autocorr-en.noarch-4.2.8.2 -y
sudo yum install autocorr-es.noarch-4.2.8.2 -y
sudo yum install autocorr-fa.noarch-4.2.8.2 -y
sudo yum install autocorr-fi.noarch-4.2.8.2 -y
sudo yum install autocorr-fr.noarch-4.2.8.2 -y
sudo yum install autocorr-ga.noarch-4.2.8.2 -y
sudo yum install autocorr-hr.noarch-4.2.8.2 -y
sudo yum install autocorr-hu.noarch-4.2.8.2 -y
sudo yum install autocorr-it.noarch-4.2.8.2 -y
sudo yum install autocorr-ja.noarch-4.2.8.2 -y
sudo yum install autocorr-ko.noarch-4.2.8.2 -y
sudo yum install autocorr-lb.noarch-4.2.8.2 -y
sudo yum install autocorr-lt.noarch-4.2.8.2 -y
sudo yum install autocorr-mn.noarch-4.2.8.2 -y
sudo yum install autocorr-nl.noarch-4.2.8.2 -y
sudo yum install autocorr-pl.noarch-4.2.8.2 -y
sudo yum install autocorr-pt.noarch-4.2.8.2 -y
sudo yum install autocorr-ro.noarch-4.2.8.2 -y
sudo yum install autocorr-ru.noarch-4.2.8.2 -y
sudo yum install autocorr-sk.noarch-4.2.8.2 -y
sudo yum install autocorr-sl.noarch-4.2.8.2 -y
sudo yum install autocorr-sr.noarch-4.2.8.2 -y
sudo yum install autocorr-sv.noarch-4.2.8.2 -y
sudo yum install autocorr-tr.noarch-4.2.8.2 -y
sudo yum install autocorr-vi.noarch-4.2.8.2 -y
sudo yum install autocorr-zh.noarch-4.2.8.2 -y
sudo yum install libreoffice-opensymbol-fonts.noarch-4.2.8.2 -y
sudo yum install libreoffice-base.x86_64-4.2.8.2 -y
sudo yum install libreoffice-calc.x86_64-4.2.8.2 -y
sudo yum install libreoffice-core.x86_64-4.2.8.2 -y
sudo yum install libreoffice-debuginfo.x86_64-4.2.8.2 -y
sudo yum install libreoffice-draw.x86_64-4.2.8.2 -y
sudo yum install libreoffice-emailmerge.x86_64-4.2.8.2 -y
sudo yum install libreoffice-graphicfilter.x86_64-4.2.8.2 -y
sudo yum install libreoffice-headless.x86_64-4.2.8.2 -y
sudo yum install libreoffice-impress.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-af.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ar.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-as.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-bg.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-bn.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ca.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-cs.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-cy.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-da.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-de.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-dz.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-el.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-en.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-es.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-et.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-eu.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-fi.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-fr.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ga.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-gl.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-gu.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-he.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-hi.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-hr.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-hu.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-it.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ja.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-kn.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ko.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-lt.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-mai.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ml.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-mr.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ms.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-nb.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-nl.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-nn.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-nr.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-nso.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-or.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-pa.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-pl.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-pt-BR.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-pt-PT.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ro.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ru.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-sk.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-sl.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-sr.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ss.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-st.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-sv.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ta.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-te.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-th.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-tn.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-tr.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ts.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-uk.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ur.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-ve.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-xh.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-zh-Hans.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-zh-Hant.x86_64-4.2.8.2 -y
sudo yum install libreoffice-langpack-zu.x86_64-4.2.8.2 -y
sudo yum install libreoffice-math.x86_64-4.2.8.2 -y
sudo yum install libreoffice-ogltrans.x86_64-4.2.8.2 -y
sudo yum install libreoffice-pdfimport.x86_64-4.2.8.2 -y
sudo yum install libreoffice-pyuno.x86_64-4.2.8.2 -y
sudo yum install libreoffice-ure.x86_64-4.2.8.2 -y
sudo yum install libreoffice-wiki-publisher.x86_64-4.2.8.2 -y
sudo yum install libreoffice-writer.x86_64-4.2.8.2 -y
sudo yum install libreoffice-xsltfilter.x86_64-4.2.8.2 -y
sudo yum install libreoffice-gdb-debug-support.i686-4.2.8.2 -y
sudo yum install autocorr-is.noarch-4.2.8.2 -y
sudo yum install libreoffice.x86_64-4.2.8.2 -y
sudo yum install libreoffice-bsh.x86_64-4.2.8.2 -y
sudo yum install libreoffice-filters.x86_64-4.2.8.2 -y
sudo yum install libreoffice-gdb-debug-support.x86_64-4.2.8.2 -y
sudo yum install libreoffice-glade.x86_64-4.2.8.2 -y
sudo yum install libreoffice-librelogo.x86_64-4.2.8.2 -y
sudo yum install libreoffice-nlpsolver.x86_64-4.2.8.2 -y
sudo yum install libreoffice-rhino.x86_64-4.2.8.2 -y
sudo yum install libreoffice-sdk.x86_64-4.2.8.2 -y
sudo yum install libreoffice-sdk-doc.x86_64-4.2.8.2 -y
|
<gh_stars>10-100
package com.pearson.docussandra.controller;
import com.pearson.docussandra.abstracttests.AbstractTableControllerTest;
import com.pearson.docussandra.testhelper.Fixtures;
import com.pearson.docussandra.testhelper.RestExpressManager;
/**
* Functional tests for the table controller class. ROUTE :
* /databases/{database}/tables/{table}
*
* @author https://github.com/JeffreyDeYoung
*/
public class TableControllerTest extends AbstractTableControllerTest
{
/**
* Constructor. Creates a new implementation of this test to be a functional
* test (testing against a mock Cassandra).
*
* @throws Exception
*/
public TableControllerTest() throws Exception
{
super(Fixtures.getInstance(true));
RestExpressManager.getManager().ensureRestExpressRunning(true);
}
}
|
<filename>frontend/projects/storage/src/lib/storage-pipes/storage-node-to-name.pipe.ts
import {Pipe, PipeTransform} from '@angular/core';
import {StorageNode} from 'projects/storage/src/lib/entities/storage-node';
import * as _ from 'lodash';
@Pipe({
name: 'storageNodeToName'
})
export class StorageNodeToNamePipe implements PipeTransform {
transform(node: StorageNode, args?: any): string {
return _.last(_.split(node.path, '/'));
}
}
|
package com.mybatis.project.dao;
import com.mybatis.project.po.User;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.support.SqlSessionDaoSupport;
import java.util.Date;
import java.util.List;
/**
* @Project: mybatis
* @description: userDao的实现类
* @author: sunkang
* @create: 2018-10-07 15:10
* @ModificationHistory who when What
**/
public class UserDaoImpl implements UserDao {
//需要向dao实现类中注入SqlSessionFactory
//这里通过构造注入
private SqlSessionFactory sqlSessionFactory;
public UserDaoImpl(SqlSessionFactory sqlSessionFactory){
this.sqlSessionFactory = sqlSessionFactory;
}
@Override
public User findUserById(int id ) throws Exception {
SqlSession sqlSession = sqlSessionFactory.openSession();
User user = sqlSession.selectOne("test.findUserById",id);
//释放资源
sqlSession.close();
return user;
}
@Override
public List<User> findUserByName(String name) throws Exception {
SqlSession sqlSession = sqlSessionFactory.openSession();
List<User> list = sqlSession.selectList("test.findUserByName",name);
//释放资源
sqlSession.close();
return list;
}
@Override
public void insertUser(User user) throws Exception {
SqlSession sqlSession = sqlSessionFactory.openSession();
sqlSession.insert("test.insertUser",user);
//提交事务
sqlSession.commit();
sqlSession.close();
}
@Override
public void deleteUser(int id) throws Exception {
SqlSession sqlSession = sqlSessionFactory.openSession();
sqlSession.delete("test.deleteUser",id);
//提交事务
sqlSession.commit();
sqlSession.close();
}
}
|
import React from "react";
import { Link } from "react-router-dom";
import Typography from "@material-ui/core/Typography";
import Container from "@material-ui/core/Container";
import Box from "@material-ui/core/Box";
import { useStyles } from "../theme/theme";
import Grid from "@material-ui/core/Grid";
const footers = [
{
title: "Company",
description: [
<a href="/" target="_blank" >Home</a>,
<a href="/#/bakonPool" target="_blank">Pool</a>,
<a href="/#/hardwareSpecs" >Hardware</a>,
],
},
{
title: "Features",
description: ["Bare Metal", "Decentrilized Relays", "Unstopable Infrastructure"],
},
{
title: "Resources",
description: [
<a href="https://github.com/lowskidev/" target="_blank" >GitHub</a> as any,
<a href="https://twitter.com/lowskidev" target="_blank">Twitter</a>,
<a href="https://lowski.dev" >Home</a>,
],
}
];
export function Copyright() {
return (
<Typography variant="body2" color="textSecondary" align="center">
{"Copyright © "}
<Link to="home">
LowSki.dev
</Link>{" "}
{new Date().getFullYear()}
{"."}
</Typography>
);
}
export function Footer() {
const classes = useStyles();
return (
<Container maxWidth="md" component="footer" className={classes.footer}>
<Grid container spacing={4} justify="space-evenly">
{footers.map((footer) => (
<Grid item xs={6} sm={3} key={footer.title}>
<Typography variant="h6" color="textPrimary" gutterBottom>
{footer.title}
</Typography>
<ul>
{footer.description.map((item) => (
<li key={item}>
{item}
</li>
))}
</ul>
</Grid>
))}
</Grid>
<Box mt={5}>
<Copyright />
</Box>
</Container>
);
}
|
import { Module } from '@nestjs/common';
import { ContractsController } from 'src/contracts/controllers/contracts.controller';
import { ContractsService } from 'src/contracts/services/contracts.service';
@Module({
controllers: [ContractsController],
providers: [ContractsService],
})
export class ContractsModule { }
|
<gh_stars>0
const formMode = [
{ label: '内联', key: 'inline' },
{ label: '两栏', key: '2' },
{ label: '三栏', key: '3' },
{ label: '四栏', key: '4' }
]
const labelPos = [
{ label: '靠左', key: 'left' },
{ label: '靠右', key: 'right' },
]
const btnMode = [
{ label: '单独一行', key: 'block' },
{ label: '贴近表单', key: 'left' },
{ label: '贴近右侧', key: 'right' },
]
export { formMode, labelPos, btnMode } |
def parse_and_build_engine(spec, PATH_LIST):
try:
engine_module = __import__(name=spec, fromlist=PATH_LIST)
parse = engine_module.parse
build = engine_module.build
return parse, build
except ImportError:
raise Exception("[TEST GENERATION] : engine build failure !") |
#!/bin/bash
#
# This script iterates through all .ts and .tsx files recursively starting from the
# given directory and adds eslint:disable to the start of each.
#
# Usage:
function usage() {
echo -e "usage: ./$0 path-to-files\n"
}
#
##########################################################################################
##########################################################################################
# Check Programs needed are installed
##########################################################################################
type find >/dev/null 2>&1 || {
echo >&2 "find is required but is not installed. Aborting."
exit 1
}
type sed >/dev/null 2>&1 || {
echo >&2 "sed is required but is not installed. Aborting."
exit 1
}
hostOS="$(uname -s)"
if [ ${hostOS} = "Darwin" ]; then
find ${1} -name "*.ts" -exec sed -i '' \
'1i\
\/\* eslint-disable \*\/
' {} \;
find ${1} -name "*.tsx" -exec sed -i '' \
'1i\
\/\* eslint-disable \*\/
' {} \;
else
find ${1} -name "*.ts" -exec sed -i '1i \/\* eslint-disable \*\/' {} \;
find ${1} -name "*.tsx" -exec sed -i '1i \/\* eslint-disable \*\/' {} \;
fi
|
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
set -xeuo pipefail
export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}"
source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh
( endgroup "Start Docker" ) 2> /dev/null
( startgroup "Configuring conda" ) 2> /dev/null
export PYTHONUNBUFFERED=1
export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}"
export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support"
export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml"
cat >~/.condarc <<CONDARC
conda-build:
root-dir: ${FEEDSTOCK_ROOT}/build_artifacts
CONDARC
BUILD_CMD=build
conda install --yes --quiet "conda-forge-ci-setup=3" conda-build pip ${GET_BOA:-} -c conda-forge
# set up the condarc
setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
source run_conda_forge_build_setup
# Install the yum requirements defined canonically in the
# "recipe/yum_requirements.txt" file. After updating that file,
# run "conda smithy rerender" and this line will be updated
# automatically.
/usr/bin/sudo -n yum install -y mesa-libGL-devel xorg-x11-server-Xvfb dejavu-sans-mono-fonts xorg-x11-server-Xorg
# make the build number clobber
make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
( endgroup "Configuring conda" ) 2> /dev/null
if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then
if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then
EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}"
fi
conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \
${EXTRA_CB_OPTIONS:-} \
--clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml"
# Drop into an interactive shell
/bin/bash
else
conda $BUILD_CMD "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \
--suppress-variables ${EXTRA_CB_OPTIONS:-} \
--clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml"
( startgroup "Validating outputs" ) 2> /dev/null
validate_recipe_outputs "${FEEDSTOCK_NAME}"
( endgroup "Validating outputs" ) 2> /dev/null
( startgroup "Uploading packages" ) 2> /dev/null
if [[ "${UPLOAD_PACKAGES}" != "False" ]]; then
upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
fi
( endgroup "Uploading packages" ) 2> /dev/null
fi
( startgroup "Final checks" ) 2> /dev/null
touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" |
<filename>morse.js
/**
* Object : Morse
* purpose : will change a character to morse or the other way around
* - creates a reverse map which is most efficient for larger static (non-dynamic) objects
* - converting the encoded string instead of per character (@see (object) Output)
*/
function Morse()
{
//this.createReverse();
}
/**
* function() createReverse creates reverse map (map of morse values and their respective character)
* function(ch) {char} CharToMorse char -> morse
* function(string) {string} stringToMorse string -> morse
* function(string) {string} morseToString morse -> string
*/
Morse.prototype = {
ToString : function(){},
createReverse : function(){
var reverseMap = [];
for (var j in Morse.convert){
if (!Object.prototype.hasOwnProperty.call(Morse.convert, j)) continue;
reverseMap[Morse.convert[j]] = j;
}
this.reverseMap = reverseMap;
},
CharToMorse : function(ch){
if (ch === " ") return ' ';
if (!ch.match(/[a-z]/i)) throw "not a character";
ch = ch.toLowerCase();
for (var k in Morse.convert) {
if (k == ch) return Morse.convert[k];
}
},
stringToMorse : function(str){
var result = "";
for (var i = 0; i < str.length; i++)
{
result += this.CharToMorse(str.charAt(i));
if (i != str.length && result.charAt(result.length-1) != " ") result += " ";
}
return result;
},
morseToString : function(str){
var result = "";
var morseStr = str.replace( / +/g, ' 0 ').split(" ");
for (var i = 0; i < morseStr.length; i++) {
if (morseStr[i] === "0") { result+=" "; continue;}
if (this.reverseMap.hasOwnProperty(morseStr[i])){
result += this.reverseMap[morseStr[i]];
}
}
return result;
}
};
/* map of characters and their respective morse value */
Morse.convert = {
a : ".-",
b : "-...",
c : "-.-.",
d : "-..",
e : ".",
f : "..-.",
g : "--.",
h : "....",
i : "..",
j : ".---",
k : "-.-",
l : ".-..",
m : "--",
n : "-.",
o : "---",
p : ".--.",
q : "--.-",
r : ".-.",
s : "...",
t : "-",
u : "..-",
v : "...-",
w : ".--",
x : "-..-",
y : "-.--",
z : "--.."
};
|
/**
* @author TheTrueKuro
* @version 1.1
*
* Java class for common operations such as
* the sum or product of an array, multiplication of tuples
* and others.
*/
package Evomath;
import Evomath.Matrix;
public class Utilities {
public static double sum(double[] d) {
double s = 0;
for (int i = 0; i < d.length; i++)
s += d[i];
return s;
}
public static long sum(long[] l) {
long s = 0;
for (int i = 0; i < l.length; i++)
s += l[i];
return s;
}
public static double sum(Matrix m) {
double s = 0;
for (int i = 0; i < m.getSize()[0]; i++)
for (int j = 0; j < m.getSize()[1]; j++)
s += m.get(i, j);
return s;
}
public static double average(double[] d) {
double s = sum(d);
return s / d.length;
}
public static ComplexNumber sum(ComplexNumber[] c) {
ComplexNumber s = new ComplexNumber();
for (int i = 0; i < c.length; i++)
s = ComplexNumber.add(s, c[i]);
return s;
}
public static double product(double[] d) {
double p = 1;
for (int i = 0; i < d.length; i++)
p *= d[i];
return p;
}
public static long product(long[] l) {
long p = 1;
for (int i = 0; i < l.length; i++)
p *= l[i];
return p;
}
public static ComplexNumber product(ComplexNumber[] c) {
if (c.length < 1) return null;
ComplexNumber p = new ComplexNumber(c[0]);
for (int i = 1; i < c.length; i++)
p = ComplexNumber.multiply(p, c[i]);
return p;
}
public static double max(double[] d) {
double max = d[0];
for (int i = 1; i < d.length; i++) {
max = max < d[i] ? d[i] : max;
}
return max;
}
public static long max(long[] l) {
long max = l[0];
for (int i = 1; i < l.length; i++) {
max = max < l[i] ? l[i] : max;
}
return max;
}
public static int indexOfMax(double[] d) {
double max = d[0];
int index = 0;
for (int i = 1; i < d.length; i++) {
if (max < d[i]) {
max = d[i];
index = i;
}
}
return index;
}
public static int indexOfMax(long[] l) {
long max = l[0];
int index = 0;
for (int i = 1; i < l.length; i++) {
if (max < l[i]) {
max = l[i];
index = i;
}
}
return index;
}
public static int indexOf(double x, double[] d) {
for (int i = 0; i < d.length; i++)
if (x == d[i])
return i;
return -1;
}
public static int indexOf(long x, long[] l) {
for (int i = 0; i < l.length; i++)
if (x == l[i])
return i;
return -1;
}
public static int indexOf(Object x, Object[] arr) {
for (int i = 0; i < arr.length; i++)
if (x.equals(arr[i]))
return i;
return -1;
}
public static long fact(int f) {
if (f < 0) return -1;
if (f == 0) return 1;
return f * fact(f - 1);
}
public static long perm(int poss, int total) {
//Check if the order is reversed and poss actually has
//the value total should have
if (poss > total) {
total += poss;
poss = total - poss;
total = total - poss;
}
long p = 1;
for (int i = 0; i < poss; i++) {
p *= total;
total--;
}
return p;
}
public static long combinations(int poss, int total) {
//Check if the order is reversed and poss actually has
//the value total should have
if (poss > total) {
total += poss;
poss = total - poss;
total = total - poss;
}
long p = perm(poss, total);
p /= fact(poss);
return p;
}
//Multiplication of tuples
public static double[] multiply(final double[] d1, final double[] d2) {
if (d1.length != d2.length) return null;
double[] d = new double[d1.length];
for (int i = 0; i < d.length; i++)
d[i] = d1[i] * d2[i];
return d;
}
public static ComplexNumber[] multiply(ComplexNumber[] c1, ComplexNumber[] c2) {
if (c1.length != c2.length) return null;
ComplexNumber[] c = new ComplexNumber[c1.length];
for (int i = 0; i < c.length; i++)
c[i] = ComplexNumber.multiply(c1[i], c2[i]);
return c;
}
public static double round(double d, int precision) {
if (precision < 1)
return d;
for (int i = 0; i < precision; i++)
d *= 10;
if (d - ((int) d) > 0.5)
d++;
d = (int) d;
for (int i = 0; i < precision; i++)
d /= 10;
return d;
}
}
|
/**
*
*/
package io.vilya.maia.core.context;
/**
* @author erkea <<EMAIL>>
*
*/
public interface BeanNameGenerator {
String generate(Class<?> bean);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.