repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
hao-wang/Montage
|
js-test-suite/testsuite/8f936eaaa3a58e09d7b82246bd04634b.js
|
load("201224b0d1c296b45befd2285e95dd42.js");
function TestCase(n, d, e, a) {}
function reportCompare (expected, actual, description) {
new TestCase("", description, expected, actual);
}
new TestCase( "", "", 0, Number(new Number()) );
reportCompare(true, true);
evaluate("\
function TestCase(n, d, e, a) {}\
test_negation(-2147483648, 2147483648);\
test_negation(2147483647, -2147483647);\
function test_negation(value, expected) {\
reportCompare(expected, '', '-(' + value + ') == ' + expected);\
}");
|
neal-siekierski/kwiver
|
sprokit/tests/sprokit/pipeline/test_process_cluster.cxx
|
<reponame>neal-siekierski/kwiver
/*ckwg +29
* Copyright 2011-2017 by Kitware, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <test_common.h>
#include <vital/config/config_block.h>
#include <vital/plugin_loader/plugin_manager.h>
#include <sprokit/pipeline/edge.h>
#include <sprokit/pipeline/pipeline.h>
#include <sprokit/pipeline/pipeline_exception.h>
#include <sprokit/pipeline/process_cluster.h>
#include <sprokit/pipeline/process_cluster_exception.h>
#include <sprokit/pipeline/process_exception.h>
#include <memory>
#define TEST_ARGS ()
DECLARE_TEST_MAP();
int
main( int argc, char* argv[] )
{
CHECK_ARGS( 1 );
testname_t const testname = argv[1];
RUN_TEST( testname );
}
// ------------------------------------------------------------------
class empty_cluster :
public sprokit::process_cluster
{
public:
empty_cluster();
~empty_cluster();
};
// ------------------------------------------------------------------
IMPLEMENT_TEST( configure )
{
sprokit::process_cluster_t const cluster = std::make_shared< empty_cluster > ();
cluster->configure();
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( init )
{
sprokit::process_cluster_t const cluster = std::make_shared< empty_cluster > ();
cluster->configure();
cluster->init();
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( step )
{
sprokit::process_cluster_t const cluster = std::make_shared< empty_cluster > ();
cluster->configure();
cluster->init();
EXPECT_EXCEPTION( sprokit::process_exception,
cluster->step(),
"stepping a cluster" );
}
// ------------------------------------------------------------------
class sample_cluster :
public sprokit::process_cluster
{
public:
sample_cluster( kwiver::vital::config_block_sptr const& conf = kwiver::vital::config_block::empty_config() );
~sample_cluster();
void _declare_configuration_key( kwiver::vital::config_block_key_t const& key,
kwiver::vital::config_block_value_t const& def_,
kwiver::vital::config_block_description_t const& description_,
bool tunable_ );
void _map_config( kwiver::vital::config_block_key_t const& key, name_t const& name_, kwiver::vital::config_block_key_t const& mapped_key );
void _add_process( name_t const& name_, type_t const& type_,
kwiver::vital::config_block_sptr const& config = kwiver::vital::config_block::empty_config() );
void _map_input( port_t const& port, name_t const& name_, port_t const& mapped_port );
void _map_output( port_t const& port, name_t const& name_, port_t const& mapped_port );
void _connect( name_t const& upstream_name, port_t const& upstream_port,
name_t const& downstream_name, port_t const& downstream_port );
};
typedef std::shared_ptr< sample_cluster > sample_cluster_t;
// ------------------------------------------------------------------
IMPLEMENT_TEST( add_process )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "orphan" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
sprokit::processes_t const procs = cluster->processes();
if ( procs.empty() )
{
TEST_ERROR( "A cluster does not contain a process after adding one" );
// The remaining code won't be happy with an empty vector.
return;
}
if ( procs.size() != 1 )
{
TEST_ERROR( "A cluster has more processes than declared" );
}
sprokit::process_t const& proc = procs[0];
if ( proc->type() != type )
{
TEST_ERROR( "A cluster added a process of a different type than requested" );
}
// TODO: Get the mangled name.
if ( proc->name() == name )
{
TEST_ERROR( "A cluster did not mangle a processes name" );
}
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( duplicate_name )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "orphan" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
EXPECT_EXCEPTION( sprokit::duplicate_process_name_exception,
cluster->_add_process( name, type ),
"adding a process with a duplicate name to a cluster" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_config )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
cluster->_map_config( key, name, key );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_config_after_process )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "orphan" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
EXPECT_EXCEPTION( sprokit::mapping_after_process_exception,
cluster->_map_config( key, name, key ),
"mapping a configuration after the process has been added" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_config_no_exist )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "nnameame" );
cluster->_map_config( key, name, key );
EXPECT_EXCEPTION( sprokit::unknown_configuration_value_exception,
cluster->_add_process( name, type ),
"mapping an unknown configuration on a cluster" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_config_read_only )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
cluster->_declare_configuration_key(
key,
kwiver::vital::config_block_value_t(),
kwiver::vital::config_block_description_t(),
true );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "orphan" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const mapped_key = kwiver::vital::config_block_key_t( "mapped_key" );
cluster->_map_config( key, name, mapped_key );
kwiver::vital::config_block_value_t const mapped_value = kwiver::vital::config_block_value_t( "old_value" );
conf->set_value( mapped_key, mapped_value );
conf->mark_read_only( mapped_key );
EXPECT_EXCEPTION( sprokit::mapping_to_read_only_value_exception,
cluster->_add_process( name, type, conf ),
"when mapping to a value which already has a read-only value" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_config_ignore_override )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
kwiver::vital::config_block_sptr const cluster_conf = kwiver::vital::config_block::empty_config();
cluster_conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( cluster_conf );
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
kwiver::vital::config_block_value_t const tunable_value = kwiver::vital::config_block_value_t( "old_value" );
cluster->_declare_configuration_key(
key,
tunable_value,
kwiver::vital::config_block_description_t(),
true );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "expect" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const key_tunable = kwiver::vital::config_block_key_t( "tunable" );
kwiver::vital::config_block_key_t const key_expect = kwiver::vital::config_block_key_t( "expect" );
cluster->_map_config( key, name, key_expect );
kwiver::vital::config_block_value_t const tuned_value = kwiver::vital::config_block_value_t( "new_value" );
conf->set_value( key_tunable, tunable_value );
// The setting should be used from the mapping, not here.
conf->set_value( key_expect, tuned_value );
cluster->_add_process( name, type, conf );
sprokit::pipeline_t const pipeline = std::make_shared< sprokit::pipeline > ( kwiver::vital::config_block::empty_config() );
pipeline->add_process( cluster );
pipeline->setup_pipeline();
kwiver::vital::config_block_sptr const new_conf = kwiver::vital::config_block::empty_config();
// Fill a block so that the expect process gets reconfigured to do its check;
// if the block for it is empty, the check won't happen.
new_conf->set_value( cluster_name + kwiver::vital::config_block::block_sep + key, tuned_value );
pipeline->reconfigure( new_conf );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_input )
{
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( conf );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "print_number" );
sprokit::process::port_t const port = sprokit::process::port_t( "cluster_number" );
sprokit::process::port_t const mapped_port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
cluster->_map_input( port, name, mapped_port );
sprokit::process::connections_t const mappings = cluster->input_mappings();
if ( mappings.empty() )
{
TEST_ERROR( "A cluster does not contain an input mapping after adding one" );
// The remaining code won't be happy with an empty vector.
return;
}
if ( mappings.size() != 1 )
{
TEST_ERROR( "A cluster has more input mappings than declared" );
}
sprokit::process::connection_t const& mapping = mappings[0];
sprokit::process::port_addr_t const& up_addr = mapping.first;
sprokit::process::name_t const& up_name = up_addr.first;
sprokit::process::port_t const& up_port = up_addr.second;
if ( up_name != cluster_name )
{
TEST_ERROR( "A cluster input mapping\'s upstream name is not the cluster itself" );
}
if ( up_port != port )
{
TEST_ERROR( "A cluster input mapping\'s upstream port is not the one requested" );
}
sprokit::process::port_addr_t const& down_addr = mapping.second;
sprokit::process::name_t const& down_name = down_addr.first;
sprokit::process::port_t const& down_port = down_addr.second;
// TODO: Get the mangled name.
if ( down_name == name )
{
TEST_ERROR( "A cluster input mapping\'s downstream name was not mangled" );
}
if ( down_port != mapped_port )
{
TEST_ERROR( "A cluster input mapping\'s downstream port is not the one requested" );
}
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_input_twice )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "print_number" );
sprokit::process::port_t const port1 = sprokit::process::port_t( "cluster_number1" );
sprokit::process::port_t const port2 = sprokit::process::port_t( "cluster_number2" );
sprokit::process::port_t const mapped_port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
cluster->_map_input( port1, name, mapped_port );
EXPECT_EXCEPTION( sprokit::port_reconnect_exception,
cluster->_map_input( port2, name, mapped_port ),
"mapping a second cluster port to a process input port" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_input_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::port_t const port = sprokit::process::port_t( "port" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
EXPECT_EXCEPTION( sprokit::no_such_process_exception,
cluster->_map_input( port, name, port ),
"mapping an input to a non-existent process" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_input_port_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::port_t const port = sprokit::process::port_t( "no_such_port" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "orphan" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
EXPECT_EXCEPTION( sprokit::no_such_port_exception,
cluster->_map_input( port, name, port ),
"mapping an input to a non-existent port" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_output )
{
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( conf );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "numbers" );
sprokit::process::port_t const port = sprokit::process::port_t( "cluster_number" );
sprokit::process::port_t const mapped_port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
cluster->_map_output( port, name, mapped_port );
sprokit::process::connections_t const mappings = cluster->output_mappings();
if ( mappings.empty() )
{
TEST_ERROR( "A cluster does not contain an output mapping after adding one" );
// The remaining code won't be happy with an empty vector.
return;
}
if ( mappings.size() != 1 )
{
TEST_ERROR( "A cluster has more output mappings than declared" );
}
sprokit::process::connection_t const& mapping = mappings[0];
sprokit::process::port_addr_t const& down_addr = mapping.second;
sprokit::process::name_t const& down_name = down_addr.first;
sprokit::process::port_t const& down_port = down_addr.second;
if ( down_name != cluster_name )
{
TEST_ERROR( "A cluster output mapping\'s downstream name is not the cluster itself" );
}
if ( down_port != port )
{
TEST_ERROR( "A cluster output mapping\'s downstream port is not the one requested" );
}
sprokit::process::port_addr_t const& up_addr = mapping.first;
sprokit::process::name_t const& up_name = up_addr.first;
sprokit::process::port_t const& up_port = up_addr.second;
// TODO: Get the mangled name.
if ( up_name == name )
{
TEST_ERROR( "A cluster output mapping\'s upstream name was not mangled" );
}
if ( up_port != mapped_port )
{
TEST_ERROR( "A cluster output mapping\'s upstream port is not the one requested" );
}
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_output_twice )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name1 = sprokit::process::name_t( "name1" );
sprokit::process::name_t const name2 = sprokit::process::name_t( "name2" );
sprokit::process::type_t const type = sprokit::process::type_t( "numbers" );
sprokit::process::port_t const port = sprokit::process::port_t( "cluster_number" );
sprokit::process::port_t const mapped_port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name1, type );
cluster->_add_process( name2, type );
cluster->_map_output( port, name1, mapped_port );
EXPECT_EXCEPTION( sprokit::port_reconnect_exception,
cluster->_map_output( port, name2, mapped_port ),
"mapping a second port to a cluster output port" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_output_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::port_t const port = sprokit::process::port_t( "port" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
EXPECT_EXCEPTION( sprokit::no_such_process_exception,
cluster->_map_output( port, name, port ),
"mapping an output to a non-existent process" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( map_output_port_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::port_t const port = sprokit::process::port_t( "no_such_port" );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "orphan" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name, type );
EXPECT_EXCEPTION( sprokit::no_such_port_exception,
cluster->_map_output( port, name, port ),
"mapping an output to a non-existent port" );
}
IMPLEMENT_TEST( connect )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name1 = sprokit::process::name_t( "name1" );
sprokit::process::name_t const name2 = sprokit::process::name_t( "name2" );
sprokit::process::type_t const type1 = sprokit::process::type_t( "numbers" );
sprokit::process::type_t const type2 = sprokit::process::type_t( "print_number" );
sprokit::process::port_t const port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name1, type1 );
cluster->_add_process( name2, type2 );
cluster->_connect( name1, port, name2, port );
sprokit::process::connections_t const mappings = cluster->internal_connections();
if ( mappings.empty() )
{
TEST_ERROR( "A cluster does not contain an internal connection after adding one" );
// The remaining code won't be happy with an empty vector.
return;
}
if ( mappings.size() != 1 )
{
TEST_ERROR( "A cluster has more internal connections than declared" );
}
sprokit::process::connection_t const& mapping = mappings[0];
sprokit::process::port_addr_t const& down_addr = mapping.second;
sprokit::process::name_t const& down_name = down_addr.first;
sprokit::process::port_t const& down_port = down_addr.second;
sprokit::process::port_addr_t const& up_addr = mapping.first;
sprokit::process::name_t const& up_name = up_addr.first;
sprokit::process::port_t const& up_port = up_addr.second;
// TODO: Get the mangled name.
if ( up_name == name1 )
{
TEST_ERROR( "A cluster internal connection\'s upstream name was not mangled" );
}
if ( up_port != port )
{
TEST_ERROR( "A cluster internal connection\'s upstream port is not the one requested" );
}
// TODO: Get the mangled name.
if ( down_name == name2 )
{
TEST_ERROR( "A cluster internal connection\'s downstream name is not the cluster itself" );
}
if ( down_port != port )
{
TEST_ERROR( "A cluster internal connection\'s downstream port is not the one requested" );
}
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( connect_upstream_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name1 = sprokit::process::name_t( "name1" );
sprokit::process::name_t const name2 = sprokit::process::name_t( "name2" );
sprokit::process::type_t const type = sprokit::process::type_t( "print_number" );
sprokit::process::port_t const port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name2, type );
EXPECT_EXCEPTION( sprokit::no_such_process_exception,
cluster->_connect( name1, port, name2, port ),
"making a connection when the upstream process does not exist" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( connect_upstream_port_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name1 = sprokit::process::name_t( "name1" );
sprokit::process::name_t const name2 = sprokit::process::name_t( "name2" );
sprokit::process::type_t const type1 = sprokit::process::type_t( "numbers" );
sprokit::process::type_t const type2 = sprokit::process::type_t( "print_number" );
sprokit::process::port_t const port1 = sprokit::process::port_t( "no_such_port" );
sprokit::process::port_t const port2 = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name1, type1 );
cluster->_add_process( name2, type2 );
EXPECT_EXCEPTION( sprokit::no_such_port_exception,
cluster->_connect( name1, port1, name2, port2 ),
"making a connection when the upstream port does not exist" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( connect_downstream_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name1 = sprokit::process::name_t( "name1" );
sprokit::process::name_t const name2 = sprokit::process::name_t( "name2" );
sprokit::process::type_t const type = sprokit::process::type_t( "numbers" );
sprokit::process::port_t const port = sprokit::process::port_t( "number" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name1, type );
EXPECT_EXCEPTION( sprokit::no_such_process_exception,
cluster->_connect( name1, port, name2, port ),
"making a connection when the upstream process does not exist" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( connect_downstream_port_no_exist )
{
sample_cluster_t const cluster = std::make_shared< sample_cluster > ();
sprokit::process::name_t const name1 = sprokit::process::name_t( "name1" );
sprokit::process::name_t const name2 = sprokit::process::name_t( "name2" );
sprokit::process::type_t const type1 = sprokit::process::type_t( "numbers" );
sprokit::process::type_t const type2 = sprokit::process::type_t( "print_number" );
sprokit::process::port_t const port1 = sprokit::process::port_t( "number" );
sprokit::process::port_t const port2 = sprokit::process::port_t( "no_such_port" );
kwiver::vital::plugin_manager::instance().load_all_plugins();
cluster->_add_process( name1, type1 );
cluster->_add_process( name2, type2 );
EXPECT_EXCEPTION( sprokit::no_such_port_exception,
cluster->_connect( name1, port1, name2, port2 ),
"making a connection when the downstream port does not exist" );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( reconfigure_pass_tunable_mappings )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
kwiver::vital::config_block_sptr const cluster_conf = kwiver::vital::config_block::empty_config();
cluster_conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( cluster_conf );
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
cluster->_declare_configuration_key(
key,
kwiver::vital::config_block_value_t(),
kwiver::vital::config_block_description_t(),
true );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "expect" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const key_tunable = kwiver::vital::config_block_key_t( "tunable" );
kwiver::vital::config_block_key_t const key_expect = kwiver::vital::config_block_key_t( "expect" );
cluster->_map_config( key, name, key_tunable );
kwiver::vital::config_block_value_t const tunable_value = kwiver::vital::config_block_value_t( "old_value" );
kwiver::vital::config_block_value_t const tuned_value = kwiver::vital::config_block_value_t( "new_value" );
conf->set_value( key_tunable, tunable_value );
conf->set_value( key_expect, tuned_value );
cluster->_add_process( name, type, conf );
sprokit::pipeline_t const pipeline = std::make_shared< sprokit::pipeline > ( kwiver::vital::config_block::empty_config() );
pipeline->add_process( cluster );
pipeline->setup_pipeline();
kwiver::vital::config_block_sptr const new_conf = kwiver::vital::config_block::empty_config();
new_conf->set_value( cluster_name + kwiver::vital::config_block::block_sep + key, tuned_value );
pipeline->reconfigure( new_conf );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( reconfigure_no_pass_untunable_mappings )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
kwiver::vital::config_block_sptr const cluster_conf = kwiver::vital::config_block::empty_config();
cluster_conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( cluster_conf );
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
cluster->_declare_configuration_key(
key,
kwiver::vital::config_block_value_t(),
kwiver::vital::config_block_description_t(),
false );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "expect" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const key_tunable = kwiver::vital::config_block_key_t( "tunable" );
kwiver::vital::config_block_key_t const key_expect = kwiver::vital::config_block_key_t( "expect" );
cluster->_map_config( key, name, key_tunable );
kwiver::vital::config_block_value_t const tunable_value = kwiver::vital::config_block_value_t( "old_value" );
conf->set_value( key_tunable, tunable_value );
conf->set_value( key_expect, tunable_value );
cluster->_add_process( name, type, conf );
sprokit::pipeline_t const pipeline = std::make_shared< sprokit::pipeline > ( kwiver::vital::config_block::empty_config() );
pipeline->add_process( cluster );
pipeline->setup_pipeline();
kwiver::vital::config_block_sptr const new_conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_value_t const tuned_value = kwiver::vital::config_block_value_t( "new_value" );
new_conf->set_value( cluster_name + kwiver::vital::config_block::block_sep + key, tuned_value );
pipeline->reconfigure( new_conf );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( reconfigure_pass_extra )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
kwiver::vital::config_block_sptr const cluster_conf = kwiver::vital::config_block::empty_config();
cluster_conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( cluster_conf );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "expect" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const key_expect = kwiver::vital::config_block_key_t( "expect" );
kwiver::vital::config_block_key_t const key_expect_key = kwiver::vital::config_block_key_t( "expect_key" );
kwiver::vital::config_block_value_t const extra_key = kwiver::vital::config_block_value_t( "new_key" );
conf->set_value( key_expect, extra_key );
conf->set_value( key_expect_key, "true" );
cluster->_add_process( name, type, conf );
sprokit::pipeline_t const pipeline = std::make_shared< sprokit::pipeline > ( kwiver::vital::config_block::empty_config() );
pipeline->add_process( cluster );
pipeline->setup_pipeline();
kwiver::vital::config_block_sptr const new_conf = kwiver::vital::config_block::empty_config();
new_conf->set_value( cluster_name + kwiver::vital::config_block::block_sep + name + kwiver::vital::config_block::block_sep + extra_key, extra_key );
pipeline->reconfigure( new_conf );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( reconfigure_tunable_only_if_mapped )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
kwiver::vital::config_block_sptr const cluster_conf = kwiver::vital::config_block::empty_config();
cluster_conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( cluster_conf );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "expect" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const key_tunable = kwiver::vital::config_block_key_t( "tunable" );
kwiver::vital::config_block_key_t const key_expect = kwiver::vital::config_block_key_t( "expect" );
kwiver::vital::config_block_value_t const tunable_value = kwiver::vital::config_block_value_t( "old_value" );
conf->set_value( key_tunable, tunable_value );
conf->mark_read_only( key_tunable );
conf->set_value( key_expect, tunable_value );
cluster->_add_process( name, type, conf );
sprokit::pipeline_t const pipeline = std::make_shared< sprokit::pipeline > ( kwiver::vital::config_block::empty_config() );
pipeline->add_process( cluster );
pipeline->setup_pipeline();
kwiver::vital::config_block_sptr const new_conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_value_t const tuned_value = kwiver::vital::config_block_value_t( "new_value" );
new_conf->set_value( cluster_name + kwiver::vital::config_block::block_sep + name + kwiver::vital::config_block::block_sep + key_tunable,
tuned_value );
pipeline->reconfigure( new_conf );
}
// ------------------------------------------------------------------
IMPLEMENT_TEST( reconfigure_mapped_untunable )
{
kwiver::vital::plugin_manager::instance().load_all_plugins();
sprokit::process::name_t const cluster_name = sprokit::process::name_t( "cluster" );
kwiver::vital::config_block_sptr const cluster_conf = kwiver::vital::config_block::empty_config();
cluster_conf->set_value( sprokit::process::config_name, cluster_name );
sample_cluster_t const cluster = std::make_shared< sample_cluster > ( cluster_conf );
kwiver::vital::config_block_key_t const key = kwiver::vital::config_block_key_t( "key" );
kwiver::vital::config_block_value_t const tunable_value = kwiver::vital::config_block_value_t( "old_value" );
cluster->_declare_configuration_key(
key,
tunable_value,
kwiver::vital::config_block_description_t(),
true );
sprokit::process::name_t const name = sprokit::process::name_t( "name" );
sprokit::process::type_t const type = sprokit::process::type_t( "expect" );
kwiver::vital::config_block_sptr const conf = kwiver::vital::config_block::empty_config();
kwiver::vital::config_block_key_t const key_tunable = kwiver::vital::config_block_key_t( "tunable" );
kwiver::vital::config_block_key_t const key_expect = kwiver::vital::config_block_key_t( "expect" );
cluster->_map_config( key, name, key_expect );
kwiver::vital::config_block_value_t const tuned_value = kwiver::vital::config_block_value_t( "new_value" );
conf->set_value( key_tunable, tunable_value );
cluster->_add_process( name, type, conf );
sprokit::pipeline_t const pipeline = std::make_shared< sprokit::pipeline > ( kwiver::vital::config_block::empty_config() );
pipeline->add_process( cluster );
pipeline->setup_pipeline();
kwiver::vital::config_block_sptr const new_conf = kwiver::vital::config_block::empty_config();
new_conf->set_value( cluster_name + kwiver::vital::config_block::block_sep + key, tuned_value );
pipeline->reconfigure( new_conf );
}
// ------------------------------------------------------------------
empty_cluster
::empty_cluster()
: sprokit::process_cluster( kwiver::vital::config_block::empty_config() )
{
}
empty_cluster
::~empty_cluster()
{
}
sample_cluster
::sample_cluster( kwiver::vital::config_block_sptr const& conf )
: sprokit::process_cluster( conf )
{
}
sample_cluster
::~sample_cluster()
{
}
void
sample_cluster
::_declare_configuration_key( kwiver::vital::config_block_key_t const& key,
kwiver::vital::config_block_value_t const& def_,
kwiver::vital::config_block_description_t const& description_,
bool tunable_ )
{
declare_configuration_key( key, def_, description_, tunable_ );
}
void
sample_cluster
::_map_config( kwiver::vital::config_block_key_t const& key,
name_t const& name_,
kwiver::vital::config_block_key_t const& mapped_key )
{
map_config( key, name_, mapped_key );
}
void
sample_cluster
::_add_process( name_t const& name_,
type_t const& type_,
kwiver::vital::config_block_sptr const& config )
{
add_process( name_, type_, config );
}
void
sample_cluster
::_map_input( port_t const& port, name_t const& name_, port_t const& mapped_port )
{
map_input( port, name_, mapped_port );
}
void
sample_cluster
::_map_output( port_t const& port, name_t const& name_, port_t const& mapped_port )
{
map_output( port, name_, mapped_port );
}
void
sample_cluster
::_connect( name_t const& upstream_name, port_t const& upstream_port,
name_t const& downstream_name, port_t const& downstream_port )
{
connect( upstream_name, upstream_port, downstream_name, downstream_port );
}
|
applied-systems-biology/acaq5
|
jipipe-core/src/main/java/org/hkijena/jipipe/extensions/tables/display/OpenResultsTableInImageJDataOperation.java
|
/*
* Copyright by <NAME>, <NAME>
*
* Research Group Applied Systems Biology - Head: Prof. Dr. <NAME>
* https://www.leibniz-hki.de/en/applied-systems-biology.html
* HKI-Center for Systems Biology of Infection
* Leibniz Institute for Natural Product Research and Infection Biology - Hans Knöll Institute (HKI)
* Adolf-Reichwein-Straße 23, 07745 Jena, Germany
*
* The project code is licensed under BSD 2-Clause.
* See the LICENSE file provided with the code for the full license.
*/
package org.hkijena.jipipe.extensions.tables.display;
import ij.measure.ResultsTable;
import org.hkijena.jipipe.api.JIPipeProgressInfo;
import org.hkijena.jipipe.api.data.*;
import org.hkijena.jipipe.extensions.tables.datatypes.ResultsTableData;
import org.hkijena.jipipe.ui.JIPipeWorkbench;
import org.hkijena.jipipe.utils.PathUtils;
import org.hkijena.jipipe.utils.UIUtils;
import javax.swing.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
public class OpenResultsTableInImageJDataOperation implements JIPipeDataDisplayOperation {
@Override
public String getId() {
return "jipipe:open-table-in-imagej";
}
@Override
public void display(JIPipeData data, String displayName, JIPipeWorkbench workbench, JIPipeDataSource source) {
((ResultsTableData) data.duplicate(new JIPipeProgressInfo())).getTable().show(displayName);
}
@Override
public String getName() {
return "Open in ImageJ";
}
@Override
public String getDescription() {
return "Opens the table in ImageJ";
}
@Override
public int getOrder() {
return 100;
}
@Override
public Icon getIcon() {
return UIUtils.getIconFromResources("apps/imagej.png");
}
}
|
peterekepeter/libNIST
|
TestLibNistSts/CusumUnitTest.cpp
|
<reponame>peterekepeter/libNIST
#include "stdafx.h"
#include "CppUnitTest.h"
#include <vector>
#include "../LibNistSts/BitSequence.h"
#include "../LibNistSts/Parameters.h"
#include "../LibNistSts/Test.h"
#include "../LibNistSts/common/stat_fncs.h"
using namespace Microsoft::VisualStudio::CppUnitTestFramework;
namespace libNISTtest
{
TEST_CLASS(CumulativeSumsTest)
{
public:
Nist::BitSequence GetSeqLcg(size_t n)
{
std::vector<bool> data(n);
// make sequence of 'random'
int lcg = 112437;
data.resize(256);
for (auto& bit : data)
{
bit = lcg & 8 == 8;
lcg = lcg * 7612341 + 41235467;
}
return Nist::BitSequence(data);
}
// compare each version of cusum test
TEST_METHOD(CusumCompareVersions)
{
auto seq = GetSeqLcg(4096);
seq[0] = false;
seq[1] = false;
seq[2] = false;
Nist::Parameters parameters;
Nist::Results results1, results2, results3;
Nist::Test test1(&seq, ¶meters, &results1);
Nist::Test test2(&seq, ¶meters, &results2);
Nist::Test test3(&seq, ¶meters, &results3);
CumulativeSums(test1);
CumulativeSums2(test2);
CumulativeSums3(test3);
auto& result1 = test1.GetResults().cusum;
auto& result2 = test2.GetResults().cusum;
auto& result3 = test3.GetResults().cusum;
Assert::AreEqual(result1.p_valueA, result2.p_valueA, 0.01);
Assert::AreEqual(result1.p_valueA, result3.p_valueA, 0.01);
Assert::AreEqual(result1.p_valueB, result2.p_valueB, 0.01);
Assert::AreEqual(result1.p_valueB, result3.p_valueB, 0.01);
Assert::AreEqual(result1.sum1A, result2.sum1A, 0.01);
Assert::AreEqual(result1.sum2A, result2.sum2A, 0.01);
Assert::AreEqual(result1.sum1B, result2.sum1B, 0.01);
Assert::AreEqual(result1.sum2B, result2.sum2B, 0.01);
Assert::AreEqual(result1.z, result2.z, 0.01);
Assert::AreEqual(result1.z, result2.z, 0.01);
Assert::AreEqual(result1.zrev, result2.zrev, 0.01);
Assert::AreEqual(result1.zrev, result2.zrev, 0.01);
}
};
}
|
dmgerman/camel
|
components/camel-fop/src/test/java/org/apache/camel/component/fop/FopHelper.java
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
DECL|package|org.apache.camel.component.fop
package|package
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|component
operator|.
name|fop
package|;
end_package
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|IOException
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|StringWriter
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|Writer
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|pdfbox
operator|.
name|cos
operator|.
name|COSName
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|pdfbox
operator|.
name|pdmodel
operator|.
name|PDDocument
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|pdfbox
operator|.
name|pdmodel
operator|.
name|PDDocumentInformation
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|pdfbox
operator|.
name|text
operator|.
name|PDFTextStripper
import|;
end_import
begin_class
DECL|class|FopHelper
specifier|public
specifier|final
class|class
name|FopHelper
block|{
DECL|method|FopHelper ()
specifier|private
name|FopHelper
parameter_list|()
block|{ }
DECL|method|extractTextFrom (PDDocument document)
specifier|public
specifier|static
name|String
name|extractTextFrom
parameter_list|(
name|PDDocument
name|document
parameter_list|)
throws|throws
name|IOException
block|{
name|Writer
name|output
init|=
operator|new
name|StringWriter
argument_list|()
decl_stmt|;
name|PDFTextStripper
name|stripper
init|=
operator|new
name|PDFTextStripper
argument_list|()
decl_stmt|;
name|stripper
operator|.
name|writeText
argument_list|(
name|document
argument_list|,
name|output
argument_list|)
expr_stmt|;
return|return
name|output
operator|.
name|toString
argument_list|()
operator|.
name|trim
argument_list|()
return|;
block|}
DECL|method|getDocumentMetadataValue (PDDocument document, COSName name)
specifier|public
specifier|static
name|String
name|getDocumentMetadataValue
parameter_list|(
name|PDDocument
name|document
parameter_list|,
name|COSName
name|name
parameter_list|)
block|{
name|PDDocumentInformation
name|info
init|=
name|document
operator|.
name|getDocumentInformation
argument_list|()
decl_stmt|;
return|return
name|info
operator|.
name|getCOSObject
argument_list|()
operator|.
name|getString
argument_list|(
name|name
argument_list|)
return|;
block|}
DECL|method|decorateTextWithXSLFO (String text)
specifier|public
specifier|static
name|String
name|decorateTextWithXSLFO
parameter_list|(
name|String
name|text
parameter_list|)
block|{
return|return
literal|"<fo:root xmlns:fo=\"http://www.w3.org/1999/XSL/Format\">\n"
operator|+
literal|"<fo:layout-master-set>\n"
operator|+
literal|"<fo:simple-page-master master-name=\"only\">\n"
operator|+
literal|"<fo:region-body region-name=\"xsl-region-body\" margin=\"0.7in\" padding=\"0\" />\n"
operator|+
literal|"<fo:region-before region-name=\"xsl-region-before\" extent=\"0.7in\" />\n"
operator|+
literal|"<fo:region-after region-name=\"xsl-region-after\" extent=\"0.7in\" />\n"
operator|+
literal|"</fo:simple-page-master>\n"
operator|+
literal|"</fo:layout-master-set>\n"
operator|+
literal|"<fo:page-sequence master-reference=\"only\">\n"
operator|+
literal|"<fo:flow flow-name=\"xsl-region-body\">\n"
operator|+
literal|"<fo:block>"
operator|+
name|text
operator|+
literal|"</fo:block>\n"
operator|+
literal|"</fo:flow>\n"
operator|+
literal|"</fo:page-sequence>\n"
operator|+
literal|"</fo:root>"
return|;
block|}
block|}
end_class
end_unit
|
ed1rac/AulasEstruturasDados
|
2019/Structs/pontoFuncaoPorValor.c
|
#include <stdio.h>
#include <stdlib.h>
/* Função : Captura e imprime um ponto - Autor : Edkallenn - Data : 02/06/2016 - Obs: Usando passagem por valor */
typedef struct ponto{
char letra;
float x;
float y;
}Ponto;
void imprime(Ponto);
Ponto captura();
int main(){
Ponto p;
p = captura();
imprime(p);
getchar();
}
void imprime(Ponto p){
printf("O ponto fornecido foi: (%.2f, %.2f)\n", p.x, p.y);
}
Ponto captura(){
Ponto b;
printf("Digite as coordenadas do ponto(x,y): ");
scanf("%f,%f", &b.x, &b.y);
return b;
}
|
lathroplabs/CodeLaboratoryAddin
|
node_modules/@fluentui/react/lib-amd/components/GroupedList/GroupHeader.js
|
define(["require", "exports", "../../Utilities", "./GroupHeader.styles", "./GroupHeader.base"], function (require, exports, Utilities_1, GroupHeader_styles_1, GroupHeader_base_1) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GroupHeader = void 0;
exports.GroupHeader = Utilities_1.styled(GroupHeader_base_1.GroupHeaderBase, GroupHeader_styles_1.getStyles, undefined, {
scope: 'GroupHeader',
});
});
//# sourceMappingURL=GroupHeader.js.map
|
usamakhan049/assignment
|
src/Watermark/WatermarkAlignmentStateChangedMessage.h
|
<filename>src/Watermark/WatermarkAlignmentStateChangedMessage.h
// Copyright eeGeo Ltd (2012-2015), All Rights Reserved
#pragma once
#include <string>
namespace ExampleApp
{
namespace Watermark
{
class WatermarkAlignmentStateChangedMessage
{
public:
WatermarkAlignmentStateChangedMessage(bool shouldAlignBottom, bool shouldAlignBelowFloorDisplay)
: m_shouldAlignBottom(shouldAlignBottom), m_shouldAlignBelowFloorDisplay(shouldAlignBelowFloorDisplay)
{
}
bool ShouldAlignBottom() const {return m_shouldAlignBottom;}
bool ShouldAlignBelowFloorDisplay() const {return m_shouldAlignBelowFloorDisplay;}
private:
const bool m_shouldAlignBottom;
const bool m_shouldAlignBelowFloorDisplay;
};
}
}
|
swingflip/C64_mini_UBOOT
|
u-boot-2011.09/drivers/video_sunxi/dev_disp.c
|
#include "dev_disp.h"
#include <asm/arch/timer.h>
fb_info_t g_fbi;
__disp_drv_t g_disp_drv;
#define __user
#define HZ 100
#define EFAULT 1
static __u32 lcd_flow_cnt[2] = {0};
static __bool lcd_op_finished[2] = {0};
struct timer_list lcd_timer[2];
static __bool lcd_op_start[2] = {0};
static unsigned int gbuffer[4096];
static __u32 suspend_output_type[2] = {0,0};
static __u32 suspend_status = 0;//0:normal; suspend_status&1 != 0:in early_suspend; suspend_status&2 != 0:in suspend;
static uint display_opens;
__u32 output_type = DISP_OUTPUT_TYPE_LCD;
void DRV_lcd_close_callback(void *parg);
__s32 copy_from_user(void *dest, void* src, __u32 size)
{
memcpy(dest, src, size);
return 0;
}
__s32 copy_to_user(void *dest, void* src, __u32 size)
{
memcpy(dest, src, size);
return 0;
}
__s32 disp_delay_ms(__u32 ms)
{
/* todo */
udelay(ms*1000);
return 0;
}
// [before][step_0][delay_0][step_1][delay_1]......[step_n-2][delay_n-2][step_n-1][delay_n-1][after]
void DRV_lcd_open_callback(void *parg)
{
__lcd_flow_t *flow;
__u32 sel = (__u32)parg;
__s32 i = lcd_flow_cnt[sel]++;
flow = bsp_disp_lcd_get_open_flow(sel);
if(i < flow->func_num)
{
flow->func[i].func(sel);
if(flow->func[i].delay == 0)
{
DRV_lcd_open_callback((void*)sel);
}
else
{
lcd_timer[sel].data = sel;
lcd_timer[sel].expires = flow->func[i].delay;
lcd_timer[sel].function = DRV_lcd_open_callback;
add_timer(&lcd_timer[sel]);
}
}
else if(i == flow->func_num)
{
bsp_disp_lcd_open_after(sel);
lcd_op_finished[sel] = 1;
}
}
__s32 DRV_lcd_open(__u32 sel)
{
if(bsp_disp_lcd_used(sel))
{
lcd_flow_cnt[sel] = 0;
lcd_op_finished[sel] = 0;
lcd_op_start[sel] = 1;
init_timer(&lcd_timer[sel]);
bsp_disp_lcd_open_before(sel);
DRV_lcd_open_callback((void*)sel);
}
return 0;
}
__bool DRV_lcd_check_open_finished(__u32 sel)
{
if(bsp_disp_lcd_used(sel) && (lcd_op_start[sel] == 1))
{
if(lcd_op_finished[sel])
{
del_timer(&lcd_timer[sel]);
lcd_op_start[sel] = 0;
}
return lcd_op_finished[sel];
}
return 1;
}
__s32 DRV_lcd_close(__u32 sel)
{
if(bsp_disp_lcd_used(sel))
{
lcd_flow_cnt[sel] = 0;
lcd_op_finished[sel] = 0;
lcd_op_start[sel] = 1;
init_timer(&lcd_timer[sel]);
bsp_disp_lcd_close_befor(sel);
DRV_lcd_close_callback((void*)sel);
}
return 0;
}
void DRV_lcd_close_callback(void *parg)
{
__lcd_flow_t *flow;
__u32 sel = (__u32)parg;
__s32 i = lcd_flow_cnt[sel]++;
flow = bsp_disp_lcd_get_close_flow(sel);
if(i < flow->func_num)
{
flow->func[i].func(sel);
if(flow->func[i].delay == 0)
{
DRV_lcd_close_callback((void*)sel);
}
else
{
lcd_timer[sel].data = sel;
lcd_timer[sel].expires = flow->func[i].delay;
lcd_timer[sel].function = DRV_lcd_close_callback;
add_timer(&lcd_timer[sel]);
}
}
else if(i == flow->func_num)
{
bsp_disp_lcd_close_after(sel);
lcd_op_finished[sel] = 1;
}
}
__bool DRV_lcd_check_close_finished(__u32 sel)
{
if(bsp_disp_lcd_used(sel) && (lcd_op_start[sel] == 1))
{
if(lcd_op_finished[sel])
{
del_timer(&lcd_timer[sel]);
lcd_op_start[sel] = 0;
}
return lcd_op_finished[sel];
}
return 1;
}
__s32 DRV_hdmi_open(void)
{
Hdmi_hal_video_enable_sync(1);
return 0;
}
__s32 DRV_hdmi_close(void)
{
Hdmi_hal_video_enable_sync(0);
return 0;
}
__s32 DRV_hdmi_set_display_mode(__disp_tv_mode_t mode)
{
__u32 hdmi_mode;
switch(mode) {
case DISP_TV_MOD_480I:
hdmi_mode = HDMI1440_480I;
break;
case DISP_TV_MOD_576I:
hdmi_mode = HDMI1440_576I;
break;
case DISP_TV_MOD_480P:
hdmi_mode = HDMI480P;
break;
case DISP_TV_MOD_576P:
hdmi_mode = HDMI576P;
break;
case DISP_TV_MOD_720P_50HZ:
hdmi_mode = HDMI720P_50;
break;
case DISP_TV_MOD_720P_60HZ:
hdmi_mode = HDMI720P_60;
break;
case DISP_TV_MOD_1080I_50HZ:
hdmi_mode = HDMI1080I_50;
break;
case DISP_TV_MOD_1080I_60HZ:
hdmi_mode = HDMI1080I_60;
break;
case DISP_TV_MOD_1080P_24HZ:
hdmi_mode = HDMI1080P_24;
break;
case DISP_TV_MOD_1080P_50HZ:
hdmi_mode = HDMI1080P_50;
break;
case DISP_TV_MOD_1080P_60HZ:
hdmi_mode = HDMI1080P_60;
break;
case DISP_TV_MOD_1080P_24HZ_3D_FP:
hdmi_mode = HDMI1080P_24_3D_FP;
break;
case DISP_TV_MOD_720P_50HZ_3D_FP:
hdmi_mode = HDMI720P_50_3D_FP;
break;
case DISP_TV_MOD_720P_60HZ_3D_FP:
hdmi_mode = HDMI720P_60_3D_FP;
break;
default:
__wrn("unsupported video mode %d when set display mode\n", mode);
return -1;
}
return Hdmi_hal_set_display_mode(hdmi_mode);
}
__s32 DRV_hdmi_mode_support(__disp_tv_mode_t mode)
{
__u32 hdmi_mode;
switch(mode) {
case DISP_TV_MOD_480I:
hdmi_mode = HDMI1440_480I;
break;
case DISP_TV_MOD_576I:
hdmi_mode = HDMI1440_576I;
break;
case DISP_TV_MOD_480P:
hdmi_mode = HDMI480P;
break;
case DISP_TV_MOD_576P:
hdmi_mode = HDMI576P;
break;
case DISP_TV_MOD_720P_50HZ:
hdmi_mode = HDMI720P_50;
break;
case DISP_TV_MOD_720P_60HZ:
hdmi_mode = HDMI720P_60;
break;
case DISP_TV_MOD_1080I_50HZ:
hdmi_mode = HDMI1080I_50;
break;
case DISP_TV_MOD_1080I_60HZ:
hdmi_mode = HDMI1080I_60;
break;
case DISP_TV_MOD_1080P_24HZ:
hdmi_mode = HDMI1080P_24;
break;
case DISP_TV_MOD_1080P_50HZ:
hdmi_mode = HDMI1080P_50;
break;
case DISP_TV_MOD_1080P_60HZ:
hdmi_mode = HDMI1080P_60;
break;
case DISP_TV_MOD_1080P_24HZ_3D_FP:
hdmi_mode = HDMI1080P_24_3D_FP;
break;
case DISP_TV_MOD_720P_50HZ_3D_FP:
hdmi_mode = HDMI720P_50_3D_FP;
break;
case DISP_TV_MOD_720P_60HZ_3D_FP:
hdmi_mode = HDMI720P_60_3D_FP;
break;
default:
hdmi_mode = HDMI720P_50;
break;
}
return Hdmi_hal_mode_support(hdmi_mode);
}
__s32 DRV_hdmi_get_HPD_status(void)
{
return Hdmi_hal_get_HPD();
}
__s32 DRV_hdmi_set_pll(__u32 pll, __u32 clk)
{
Hdmi_hal_set_pll(pll, clk);
return 0;
}
__s32 disp_int_process(__u32 sel)
{
/* to do */
return 0;
}
int sunxi_disp_get_source_ops(struct sunxi_disp_source_ops *src_ops)
{
src_ops->sunxi_lcd_delay_ms = bsp_disp_lcd_delay_ms;
src_ops->sunxi_lcd_delay_us = bsp_disp_lcd_delay_us;
src_ops->sunxi_lcd_tcon_enable = bsp_disp_lcd_tcon_open;
src_ops->sunxi_lcd_tcon_disable = bsp_disp_lcd_tcon_close;
src_ops->sunxi_lcd_pwm_enable = bsp_disp_lcd_pwm_enable;
src_ops->sunxi_lcd_pwm_disable = bsp_disp_lcd_pwm_disable;
src_ops->sunxi_lcd_backlight_enable = bsp_disp_lcd_backlight_enable;
src_ops->sunxi_lcd_backlight_disable = bsp_disp_lcd_backlight_disable;
src_ops->sunxi_lcd_power_enable = bsp_disp_lcd_power_enable;
src_ops->sunxi_lcd_power_disable = bsp_disp_lcd_power_disable;
src_ops->sunxi_lcd_pin_cfg = bsp_disp_lcd_pin_cfg;
src_ops->sunxi_lcd_dsi_clk_enable = dsi_clk_enable;
src_ops->sunxi_lcd_get_driver_name = bsp_disp_lcd_get_driver_name;
src_ops->sunxi_lcd_set_panel_funs = bsp_disp_lcd_set_panel_funs;
src_ops->sunxi_lcd_get_panel_para = bsp_disp_lcd_get_panel_para;
src_ops->sunxi_lcd_iic_read = bsp_disp_lcd_iic_read;
src_ops->sunxi_lcd_iic_write = bsp_disp_lcd_iic_write;
return 0;
}
extern int lcd_init(void);
__s32 drv_disp_init(void)
{
__disp_bsp_init_para para;
sunxi_pwm_init();
debug("====display init =====\n");
memset(¶, 0, sizeof(__disp_bsp_init_para));
#if (!defined CONFIG_ARCH_SUN7IW1P1)
para.reg_base[DISP_MOD_BE0] = SUNXI_DE_BE0_BASE;
para.reg_size[DISP_MOD_BE0] = 0x9fc;
para.reg_base[DISP_MOD_BE1] = SUNXI_DE_BE1_BASE;
para.reg_size[DISP_MOD_BE1] = 0x9fc;
para.reg_base[DISP_MOD_FE0] = SUNXI_DE_FE0_BASE;
para.reg_size[DISP_MOD_FE0] = 0x22c;
para.reg_base[DISP_MOD_FE1] = SUNXI_DE_FE1_BASE;
para.reg_size[DISP_MOD_FE1] = 0x22c;
para.reg_base[DISP_MOD_LCD0] = SUNXI_LCD0_BASE;
para.reg_size[DISP_MOD_LCD0] = 0x3fc;
para.reg_base[DISP_MOD_LCD1] = SUNXI_LCD1_BASE;
para.reg_size[DISP_MOD_LCD1] = 0x3fc;
para.reg_base[DISP_MOD_CCMU] = SUNXI_CCM_BASE;
para.reg_size[DISP_MOD_CCMU] = 0x2dc;
para.reg_base[DISP_MOD_PIOC] = SUNXI_PIO_BASE;
para.reg_size[DISP_MOD_PIOC] = 0x27c;
para.reg_base[DISP_MOD_PWM] = SUNXI_PWM_BASE;
para.reg_size[DISP_MOD_PWM] = 0x3c;
para.reg_base[DISP_MOD_DEU0] = SUNXI_DE_DEU0_BASE;
para.reg_size[DISP_MOD_DEU0] = 0x60;
para.reg_base[DISP_MOD_DEU1] = SUNXI_DE_DEU1_BASE;
para.reg_size[DISP_MOD_DEU1] = 0x60;
para.reg_base[DISP_MOD_CMU0] = SUNXI_DE_BE0_BASE;
para.reg_size[DISP_MOD_CMU0] = 0xfc;
para.reg_base[DISP_MOD_CMU1] = SUNXI_DE_BE1_BASE;
para.reg_size[DISP_MOD_CMU1] = 0xfc;
para.reg_base[DISP_MOD_DRC0] = SUNXI_DE_DRC0_BASE;
para.reg_size[DISP_MOD_DRC0] = 0xfc;
para.reg_base[DISP_MOD_DRC1] = SUNXI_DE_DRC1_BASE;
para.reg_size[DISP_MOD_DRC1] = 0xfc;
para.reg_base[DISP_MOD_DSI0] = SUNXI_MIPI_DSI0_BASE;
para.reg_size[DISP_MOD_DSI0] = 0x2fc;
para.reg_base[DISP_MOD_DSI0_DPHY] = SUNXI_MIPI_DSI0_DPHY_BASE;
para.reg_size[DISP_MOD_DSI0_DPHY] = 0xfc;
para.reg_base[DISP_MOD_HDMI] = SUNXI_HDMI_BASE;
para.reg_size[DISP_MOD_HDMI] = 0x58c;
para.irq[DISP_MOD_BE0] = AW_IRQ_DEBE0;
para.irq[DISP_MOD_BE1] = AW_IRQ_DEBE1;
para.irq[DISP_MOD_FE0] = AW_IRQ_DEFE0;
para.irq[DISP_MOD_FE1] = AW_IRQ_DEFE1;
para.irq[DISP_MOD_DRC0] = AW_IRQ_DRC01;
para.irq[DISP_MOD_DRC1] = AW_IRQ_DEU01;
para.irq[DISP_MOD_LCD0] = AW_IRQ_LCD0;
para.irq[DISP_MOD_LCD1] = AW_IRQ_LCD1;
para.irq[DISP_MOD_DSI0] = AW_IRQ_MIPIDSI;
#else
para.reg_base[DISP_MOD_BE0] = SUNXI_DE_BE0_BASE;
para.reg_size[DISP_MOD_BE0] = 0x5ff;
para.reg_base[DISP_MOD_BE1] = SUNXI_DE_BE1_BASE;
para.reg_size[DISP_MOD_BE1] = 0x5ff;
para.reg_base[DISP_MOD_FE0] = SUNXI_DE_FE0_BASE;
para.reg_size[DISP_MOD_FE0] = 0xa18;
para.reg_base[DISP_MOD_FE1] = SUNXI_DE_FE1_BASE;
para.reg_size[DISP_MOD_FE1] = 0xa18;
para.reg_base[DISP_MOD_LCD0] = SUNXI_LCD0_BASE;
para.reg_size[DISP_MOD_LCD0] = 0x800;
para.reg_base[DISP_MOD_LCD1] = SUNXI_LCD1_BASE;
para.reg_size[DISP_MOD_LCD1] = 0x800;
para.reg_base[DISP_MOD_CCMU] = SUNXI_CCM_BASE;
para.reg_size[DISP_MOD_CCMU] = 0x2dc;
para.reg_base[DISP_MOD_PIOC] = SUNXI_PIO_BASE;
para.reg_size[DISP_MOD_PIOC] = 0x27c;
para.reg_base[DISP_MOD_PWM] = SUNXI_PWM_BASE;
para.reg_size[DISP_MOD_PWM] = 0x3c;
para.reg_base[DISP_MOD_HDMI] = SUNXI_HDMI_BASE;
para.reg_size[DISP_MOD_HDMI] = 0x580;
para.reg_base[DISP_MOD_TVE0] = SUNXI_TVE0_BASE;
para.reg_size[DISP_MOD_TVE0] = 0x20c;
para.irq[DISP_MOD_BE0] = AW_IRQ_DEBE0;
para.irq[DISP_MOD_BE1] = AW_IRQ_DEBE1;
para.irq[DISP_MOD_FE0] = AW_IRQ_DEFE0;
para.irq[DISP_MOD_FE1] = AW_IRQ_DEFE1;
para.irq[DISP_MOD_LCD0] = AW_IRQ_LCD0;
para.irq[DISP_MOD_LCD1] = AW_IRQ_LCD1;
#endif
para.disp_int_process = disp_int_process;
#if ((defined CONFIG_SUN6I) || (defined CONFIG_ARCH_SUN8IW1P1) || (defined CONFIG_ARCH_SUN7IW1P1))
para.hdmi_open = DRV_hdmi_open;
para.hdmi_close = DRV_hdmi_close;
para.hdmi_set_mode = DRV_hdmi_set_display_mode;
para.hdmi_mode_support = DRV_hdmi_mode_support;
para.hdmi_get_HPD_status = DRV_hdmi_get_HPD_status;
para.hdmi_set_pll = DRV_hdmi_set_pll;
#endif
//para.hdmi_get_disp_func = disp_get_hdmi_func;
memset(&g_disp_drv, 0, sizeof(__disp_drv_t));
bsp_disp_init(¶);
#if ((defined CONFIG_SUN6I) || (defined CONFIG_ARCH_SUN8IW1P1) || (defined CONFIG_ARCH_SUN7IW1P1))
Hdmi_set_reg_base(0x01c16000);
Hdmi_hal_init();
#endif
bsp_disp_open();
lcd_init();
#if 0
if(0)
{
__disp_color_t bk_color;
printf("====DRV_lcd_open before ====\n");
DRV_lcd_open(0);
printf("====DRV_lcd_open after ====\n");
bsp_disp_print_reg(1, DISP_MOD_PIOC);
bsp_disp_print_reg(1, DISP_MOD_PWM);
bsp_disp_print_reg(1, DISP_MOD_CCMU);
bsp_disp_print_reg(1, DISP_MOD_LCD0);
bsp_disp_lcd_set_src(0,DISP_LCDC_SRC_WHITE);
printf("====lcd_white_src ====\n");
__msdelay(1000);
*(__u32*)0x1c0c0f4 = 0xffff;
bsp_disp_print_reg(1, DISP_MOD_LCD0);
//bsp_disp_lcd_set_src(0,DISP_LCDC_SRC_BLACK);
//printf("====lcd_black_src ====\n");
//__msdelay(1000);
bsp_disp_print_reg(1, DISP_MOD_LCD0);
bsp_disp_lcd_set_src(0,DISP_LCDC_SRC_DE_CH1);
printf("====lcd_ch1_src ====\n");
__msdelay(1000);
bk_color.red = 0xff;
bk_color.green = 0x00;
bk_color.blue = 0x00;
bsp_disp_set_bk_color(0,&bk_color);
printf("==== red bk color ====\n");
__msdelay(1000);
return 0;
bk_color.red = 0x00;
bk_color.green = 0xff;
bk_color.blue = 0x00;
bsp_disp_set_bk_color(0,&bk_color);
printf("==== red green color ====\n");
__msdelay(1000);
bk_color.red = 0x00;
bk_color.green = 0x00;
bk_color.blue = 0xff;
bsp_disp_set_bk_color(0,&bk_color);
printf("==== red bk color ====\n");
__msdelay(1000);
}
printf("====display init end ====\n");
#endif
printf("DRV_DISP_Init: opened\n");
display_opens = 1;
return 0;
}
__s32 drv_disp_exit(void)
{
if(!display_opens)
{
printf("DRV_DISP_Exit: not open\n");
return 0;
}
display_opens = 0;
//bsp_disp_close();
bsp_disp_exit(g_disp_drv.exit_mode);
printf("DRV_DISP_Exit: closed\n");
return 0;
}
long disp_ioctl(void *hd, unsigned int cmd, void *arg)
{
unsigned long karg[4];
unsigned long ubuffer[4] = {0};
__s32 ret = 0;
if(!display_opens)
{
printf("de not open\n");
return -1;
}
if (copy_from_user((void*)karg,(void __user*)arg,4*sizeof(unsigned long)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ubuffer[0] = *(unsigned long*)karg;
ubuffer[1] = (*(unsigned long*)(karg+1));
ubuffer[2] = (*(unsigned long*)(karg+2));
ubuffer[3] = (*(unsigned long*)(karg+3));
if(cmd < DISP_CMD_FB_REQUEST)
{
if((ubuffer[0] != 0) && (ubuffer[0] != 1))
{
__wrn("para err in disp_ioctl, cmd = 0x%x,screen id = %d\n", cmd, (int)ubuffer[0]);
return -1;
}
}
if(suspend_status & 2)
{
__wrn("ioctl:%x fail when in suspend!\n", cmd);
return -1;
}
#if 0
if(cmd!=DISP_CMD_TV_GET_INTERFACE && cmd!=DISP_CMD_HDMI_GET_HPD_STATUS && cmd!=DISP_CMD_GET_OUTPUT_TYPE
&& cmd!=DISP_CMD_SCN_GET_WIDTH && cmd!=DISP_CMD_SCN_GET_HEIGHT
&& cmd!=DISP_CMD_VIDEO_SET_FB && cmd!=DISP_CMD_VIDEO_GET_FRAME_ID)
{
OSAL_PRINTF("cmd:0x%x,%ld,%ld\n",cmd, ubuffer[0], ubuffer[1]);
}
#endif
switch(cmd)
{
//----disp global----
case DISP_CMD_SET_BKCOLOR:
{
__disp_color_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_color_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_set_bk_color(ubuffer[0], ¶);
break;
}
case DISP_CMD_SET_COLORKEY:
{
__disp_colorkey_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_colorkey_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_set_color_key(ubuffer[0], ¶);
break;
}
case DISP_CMD_SET_PALETTE_TBL:
if((ubuffer[1] == 0) || ((int)ubuffer[3] <= 0))
{
__wrn("para invalid in disp ioctrl DISP_CMD_SET_PALETTE_TBL,buffer:0x%x, size:0x%x\n", (unsigned int)ubuffer[1], (unsigned int)ubuffer[3]);
return -1;
}
if(copy_from_user(gbuffer, (void __user *)ubuffer[1],ubuffer[3]))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_set_palette_table(ubuffer[0], (__u32 *)gbuffer, ubuffer[2], ubuffer[3]);
break;
case DISP_CMD_GET_PALETTE_TBL:
if((ubuffer[1] == 0) || ((int)ubuffer[3] <= 0))
{
__wrn("para invalid in disp ioctrl DISP_CMD_GET_PALETTE_TBL,buffer:0x%x, size:0x%x\n", (unsigned int)ubuffer[1], (unsigned int)ubuffer[3]);
return -1;
}
ret = bsp_disp_get_palette_table(ubuffer[0], (__u32 *)gbuffer, ubuffer[2], ubuffer[3]);
if(copy_to_user((void __user *)ubuffer[1], gbuffer,ubuffer[3]))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
case DISP_CMD_START_CMD_CACHE:
ret = bsp_disp_cmd_cache(ubuffer[0]);
break;
case DISP_CMD_EXECUTE_CMD_AND_STOP_CACHE:
ret = bsp_disp_cmd_submit(ubuffer[0]);
break;
case DISP_CMD_GET_OUTPUT_TYPE:
ret = bsp_disp_get_output_type(ubuffer[0]);
break;
case DISP_CMD_SCN_GET_WIDTH:
ret = bsp_disp_get_screen_width(ubuffer[0]);
break;
case DISP_CMD_SCN_GET_HEIGHT:
ret = bsp_disp_get_screen_height(ubuffer[0]);
break;
case DISP_CMD_SET_GAMMA_TABLE:
if((ubuffer[1] == 0) || ((int)ubuffer[2] <= 0))
{
__wrn("para invalid in disp ioctrl DISP_CMD_SET_GAMMA_TABLE,buffer:0x%x, size:0x%x\n", (unsigned int)ubuffer[1], (unsigned int)ubuffer[2]);
return -1;
}
if(copy_from_user(gbuffer, (void __user *)ubuffer[1],ubuffer[2]))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_set_gamma_table(ubuffer[0], (__u32 *)gbuffer, ubuffer[2]);
break;
case DISP_CMD_GAMMA_CORRECTION_ON:
ret = bsp_disp_gamma_correction_enable(ubuffer[0]);
break;
case DISP_CMD_GAMMA_CORRECTION_OFF:
ret = bsp_disp_gamma_correction_disable(ubuffer[0]);
break;
case DISP_CMD_SET_BRIGHT:
ret = bsp_disp_cmu_set_bright(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_GET_BRIGHT:
ret = bsp_disp_cmu_get_bright(ubuffer[0]);
break;
case DISP_CMD_SET_CONTRAST:
ret = bsp_disp_cmu_set_contrast(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_GET_CONTRAST:
ret = bsp_disp_cmu_get_contrast(ubuffer[0]);
break;
case DISP_CMD_SET_SATURATION:
ret = bsp_disp_cmu_set_saturation(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_GET_SATURATION:
ret = bsp_disp_cmu_get_saturation(ubuffer[0]);
break;
case DISP_CMD_SET_HUE:
ret = bsp_disp_cmu_set_hue(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_GET_HUE:
ret = bsp_disp_cmu_get_hue(ubuffer[0]);
break;
case DISP_CMD_ENHANCE_ON:
ret = bsp_disp_cmu_enable(ubuffer[0], 1);
break;
case DISP_CMD_ENHANCE_OFF:
ret = bsp_disp_cmu_enable(ubuffer[0], 0);
break;
case DISP_CMD_GET_ENHANCE_EN:
ret = bsp_disp_cmu_get_enable(ubuffer[0]);
break;
case DISP_CMD_SET_ENHANCE_MODE:
ret = bsp_disp_cmu_set_mode(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_GET_ENHANCE_MODE:
ret = bsp_disp_cmu_get_mode(ubuffer[0]);
break;
case DISP_CMD_SET_ENHANCE_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_cmu_set_window(ubuffer[0], ¶);
break;
}
case DISP_CMD_GET_ENHANCE_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_cmu_get_window(ubuffer[0], ¶);
if(copy_to_user((void __user *)ubuffer[1],¶, sizeof(__disp_layer_info_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_CAPTURE_SCREEN:
ret = bsp_disp_capture_screen(ubuffer[0], (__disp_capture_screen_para_t *)ubuffer[1]);
break;
case DISP_CMD_SET_SCREEN_SIZE:
ret = bsp_disp_set_screen_size(ubuffer[0], (__disp_rectsz_t*)ubuffer[1]);
break;
case DISP_CMD_DE_FLICKER_ON:
ret = bsp_disp_de_flicker_enable(ubuffer[0], 1);
break;
case DISP_CMD_DE_FLICKER_OFF:
ret = bsp_disp_de_flicker_enable(ubuffer[0], 0);
break;
case DISP_CMD_DRC_ON:
ret = bsp_disp_drc_enable(ubuffer[0], 1);
break;
case DISP_CMD_DRC_OFF:
ret = bsp_disp_drc_enable(ubuffer[0], 0);
break;
case DISP_CMD_GET_DRC_EN:
ret = bsp_disp_drc_get_enable(ubuffer[0]);
break;
case DISP_CMD_DRC_SET_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_drc_set_window(ubuffer[0], ¶);
break;
}
case DISP_CMD_DRC_GET_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_drc_get_window(ubuffer[0], ¶);
if(copy_to_user((void __user *)ubuffer[1], ¶,sizeof(__disp_rect_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
return ret;
break;
}
//----layer----
case DISP_CMD_LAYER_REQUEST:
ret = bsp_disp_layer_request(ubuffer[0], (__disp_layer_work_mode_t)ubuffer[1]);
break;
case DISP_CMD_LAYER_RELEASE:
ret = bsp_disp_layer_release(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_OPEN:
ret = bsp_disp_layer_open(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_CLOSE:
ret = bsp_disp_layer_close(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_FB:
{
__disp_fb_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_fb_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_layer_set_framebuffer(ubuffer[0], ubuffer[1], ¶);
//DRV_disp_wait_cmd_finish(ubuffer[0]);
break;
}
case DISP_CMD_LAYER_GET_FB:
{
__disp_fb_t para;
ret = bsp_disp_layer_get_framebuffer(ubuffer[0], ubuffer[1], ¶);
if(copy_to_user((void __user *)ubuffer[2], ¶,sizeof(__disp_fb_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_LAYER_SET_SRC_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_layer_set_src_window(ubuffer[0],ubuffer[1], ¶);
//DRV_disp_wait_cmd_finish(ubuffer[0]);
break;
}
case DISP_CMD_LAYER_GET_SRC_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_layer_get_src_window(ubuffer[0],ubuffer[1], ¶);
if(copy_to_user((void __user *)ubuffer[2], ¶, sizeof(__disp_rect_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_LAYER_SET_SCN_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_layer_set_screen_window(ubuffer[0],ubuffer[1], ¶);
//DRV_disp_wait_cmd_finish(ubuffer[0]);
break;
}
case DISP_CMD_LAYER_GET_SCN_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_layer_get_screen_window(ubuffer[0],ubuffer[1], ¶);
if(copy_to_user((void __user *)ubuffer[2], ¶, sizeof(__disp_rect_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_LAYER_SET_PARA:
{
__disp_layer_info_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_layer_info_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_layer_set_para(ubuffer[0], ubuffer[1], ¶);
//DRV_disp_wait_cmd_finish(ubuffer[0]);
break;
}
case DISP_CMD_LAYER_GET_PARA:
{
__disp_layer_info_t para;
ret = bsp_disp_layer_get_para(ubuffer[0], ubuffer[1], ¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_layer_info_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_LAYER_TOP:
ret = bsp_disp_layer_set_top(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_BOTTOM:
ret = bsp_disp_layer_set_bottom(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_ALPHA_ON:
ret = bsp_disp_layer_alpha_enable(ubuffer[0], ubuffer[1], 1);
break;
case DISP_CMD_LAYER_ALPHA_OFF:
ret = bsp_disp_layer_alpha_enable(ubuffer[0], ubuffer[1], 0);
break;
case DISP_CMD_LAYER_SET_ALPHA_VALUE:
ret = bsp_disp_layer_set_alpha_value(ubuffer[0], ubuffer[1], ubuffer[2]);
//DRV_disp_wait_cmd_finish(ubuffer[0]);
break;
case DISP_CMD_LAYER_CK_ON:
ret = bsp_disp_layer_colorkey_enable(ubuffer[0], ubuffer[1], 1);
break;
case DISP_CMD_LAYER_CK_OFF:
ret = bsp_disp_layer_colorkey_enable(ubuffer[0], ubuffer[1], 0);
break;
case DISP_CMD_LAYER_SET_PIPE:
ret = bsp_disp_layer_set_pipe(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_ALPHA_VALUE:
ret = bsp_disp_layer_get_alpha_value(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_GET_ALPHA_EN:
ret = bsp_disp_layer_get_alpha_enable(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_GET_CK_EN:
ret = bsp_disp_layer_get_colorkey_enable(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_GET_PRIO:
ret = bsp_disp_layer_get_piro(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_GET_PIPE:
ret = bsp_disp_layer_get_pipe(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_SMOOTH:
ret = bsp_disp_layer_set_smooth(ubuffer[0], ubuffer[1],(__disp_video_smooth_t) ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_SMOOTH:
ret = bsp_disp_layer_get_smooth(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_BRIGHT:
ret = bsp_disp_cmu_layer_set_bright(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_BRIGHT:
ret = bsp_disp_cmu_layer_get_bright(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_CONTRAST:
ret = bsp_disp_cmu_layer_set_contrast(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_CONTRAST:
ret = bsp_disp_cmu_layer_get_contrast(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_SATURATION:
ret = bsp_disp_cmu_layer_set_saturation(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_SATURATION:
ret = bsp_disp_cmu_layer_get_saturation(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_HUE:
ret = bsp_disp_cmu_layer_set_hue(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_HUE:
ret = bsp_disp_cmu_layer_get_hue(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_ENHANCE_ON:
ret = bsp_disp_cmu_layer_enable(ubuffer[0], ubuffer[1], 1);
break;
case DISP_CMD_LAYER_ENHANCE_OFF:
ret = bsp_disp_cmu_layer_enable(ubuffer[0], ubuffer[1], 0);
break;
case DISP_CMD_LAYER_GET_ENHANCE_EN:
ret = bsp_disp_cmu_layer_get_enable(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_ENHANCE_MODE:
ret = bsp_disp_cmu_layer_set_mode(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_ENHANCE_MODE:
ret = bsp_disp_cmu_layer_get_mode(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_ENHANCE_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_cmu_layer_set_window(ubuffer[0], ubuffer[1], ¶);
break;
}
case DISP_CMD_LAYER_GET_ENHANCE_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_cmu_layer_get_window(ubuffer[0], ubuffer[1], ¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_layer_info_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_LAYER_VPP_ON:
ret = bsp_disp_deu_enable(ubuffer[0], ubuffer[1], 1);
break;
case DISP_CMD_LAYER_VPP_OFF:
ret = bsp_disp_deu_enable(ubuffer[0], ubuffer[1], 0);
break;
case DISP_CMD_LAYER_GET_VPP_EN:
ret = bsp_disp_deu_get_enable(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_LUMA_SHARP_LEVEL:
ret = bsp_disp_deu_set_luma_sharp_level(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_LUMA_SHARP_LEVEL:
ret = bsp_disp_deu_get_luma_sharp_level(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_CHROMA_SHARP_LEVEL:
ret = bsp_disp_deu_set_chroma_sharp_level(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_CHROMA_SHARP_LEVEL:
ret = bsp_disp_deu_get_chroma_sharp_level(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_WHITE_EXTEN_LEVEL:
ret = bsp_disp_deu_set_white_exten_level(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_WHITE_EXTEN_LEVEL:
ret = bsp_disp_deu_get_white_exten_level(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_SET_BLACK_EXTEN_LEVEL:
ret = bsp_disp_deu_set_black_exten_level(ubuffer[0], ubuffer[1], ubuffer[2]);
break;
case DISP_CMD_LAYER_GET_BLACK_EXTEN_LEVEL:
ret = bsp_disp_deu_get_black_exten_level(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_LAYER_VPP_SET_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_deu_set_window(ubuffer[0], ubuffer[1], ¶);
break;
}
case DISP_CMD_LAYER_VPP_GET_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_deu_get_window(ubuffer[0], ubuffer[1], ¶);
if(copy_to_user((void __user *)ubuffer[2], ¶,sizeof(__disp_rect_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
return ret;
break;
}
//----scaler----
case DISP_CMD_SCALER_REQUEST:
ret = bsp_disp_scaler_request();
break;
case DISP_CMD_SCALER_RELEASE:
ret = bsp_disp_scaler_release(ubuffer[1]);
break;
case DISP_CMD_SCALER_EXECUTE:
{
__disp_scaler_para_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_scaler_para_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_scaler_start(ubuffer[1],¶);
break;
}
case DISP_CMD_SCALER_EXECUTE_EX:
{
__disp_scaler_para_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_scaler_para_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_scaler_start_ex(ubuffer[1],¶);
break;
}
//----hwc----
case DISP_CMD_HWC_OPEN:
ret = bsp_disp_hwc_enable(ubuffer[0], 1);
break;
case DISP_CMD_HWC_CLOSE:
ret = bsp_disp_hwc_enable(ubuffer[0], 0);
break;
case DISP_CMD_HWC_SET_POS:
{
__disp_pos_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_pos_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_hwc_set_pos(ubuffer[0], ¶);
break;
}
case DISP_CMD_HWC_GET_POS:
{
__disp_pos_t para;
ret = bsp_disp_hwc_get_pos(ubuffer[0], ¶);
if(copy_to_user((void __user *)ubuffer[1],¶, sizeof(__disp_pos_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_HWC_SET_FB:
{
__disp_hwc_pattern_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_hwc_pattern_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_hwc_set_framebuffer(ubuffer[0], ¶);
break;
}
case DISP_CMD_HWC_SET_PALETTE_TABLE:
if((ubuffer[1] == 0) || ((int)ubuffer[3] <= 0))
{
__wrn("para invalid in display ioctrl DISP_CMD_HWC_SET_PALETTE_TABLE,buffer:0x%x, size:0x%x\n", (unsigned int)ubuffer[1], (unsigned int)ubuffer[3]);
return -1;
}
if(copy_from_user(gbuffer, (void __user *)ubuffer[1],ubuffer[3]))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_hwc_set_palette(ubuffer[0], (void*)gbuffer, ubuffer[2], ubuffer[3]);
break;
//----video----
case DISP_CMD_VIDEO_START:
ret = bsp_disp_video_start(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_VIDEO_STOP:
ret = bsp_disp_video_stop(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_VIDEO_SET_FB:
{
__disp_video_fb_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_video_fb_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = bsp_disp_video_set_fb(ubuffer[0], ubuffer[1], ¶);
break;
}
case DISP_CMD_VIDEO_GET_FRAME_ID:
ret = bsp_disp_video_get_frame_id(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_VIDEO_GET_DIT_INFO:
{
__disp_dit_info_t para;
ret = bsp_disp_video_get_dit_info(ubuffer[0], ubuffer[1],¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_dit_info_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
//----lcd----
case DISP_CMD_LCD_ON:
ret = DRV_lcd_open(ubuffer[0]);
output_type = DISP_OUTPUT_TYPE_LCD;
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_LCD;
}
break;
case DISP_CMD_LCD_OFF:
ret = DRV_lcd_close(ubuffer[0]);
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_NONE;
}
break;
case DISP_CMD_LCD_SET_BRIGHTNESS:
ret = bsp_disp_lcd_set_bright(ubuffer[0], ubuffer[1], 0);
break;
case DISP_CMD_LCD_GET_BRIGHTNESS:
ret = bsp_disp_lcd_get_bright(ubuffer[0]);
break;
// case DISP_CMD_LCD_CPUIF_XY_SWITCH:
// ret = bsp_disp_lcd_xy_switch(ubuffer[0], ubuffer[1]);
// break;
case DISP_CMD_LCD_SET_SRC:
ret = bsp_disp_lcd_set_src(ubuffer[0], (__disp_lcdc_src_t)ubuffer[1]);
break;
case DISP_CMD_LCD_USER_DEFINED_FUNC:
ret = bsp_disp_lcd_user_defined_func(ubuffer[0], ubuffer[1], ubuffer[2], ubuffer[3]);
break;
//----tv----
case DISP_CMD_TV_ON:
ret = bsp_disp_tv_open(ubuffer[0]);
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_TV;
}
break;
case DISP_CMD_TV_OFF:
ret = bsp_disp_tv_close(ubuffer[0]);
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_NONE;
}
break;
case DISP_CMD_TV_SET_MODE:
ret = bsp_disp_tv_set_mode(ubuffer[0], (__disp_tv_mode_t)ubuffer[1]);
break;
case DISP_CMD_TV_GET_MODE:
ret = bsp_disp_tv_get_mode(ubuffer[0]);
break;
case DISP_CMD_TV_AUTOCHECK_ON:
ret = bsp_disp_tv_auto_check_enable(ubuffer[0]);
break;
case DISP_CMD_TV_AUTOCHECK_OFF:
ret = bsp_disp_tv_auto_check_disable(ubuffer[0]);
break;
case DISP_CMD_TV_GET_INTERFACE:
if(suspend_status != 0)
{
ret = DISP_TV_NONE;
}
else
{
ret = bsp_disp_tv_get_interface(ubuffer[0]);
}
break;
case DISP_CMD_TV_SET_SRC:
ret = bsp_disp_tv_set_src(ubuffer[0], (__disp_lcdc_src_t)ubuffer[1]);
break;
case DISP_CMD_TV_GET_DAC_STATUS:
if(suspend_status != 0)
{
ret = 0;
}
else
{
ret = bsp_disp_tv_get_dac_status(ubuffer[0], ubuffer[1]);
}
break;
case DISP_CMD_TV_SET_DAC_SOURCE:
ret = bsp_disp_tv_set_dac_source(ubuffer[0], ubuffer[1], (__disp_tv_dac_source)ubuffer[2]);
break;
case DISP_CMD_TV_GET_DAC_SOURCE:
ret = bsp_disp_tv_get_dac_source(ubuffer[0], ubuffer[1]);
break;
//----hdmi----
case DISP_CMD_HDMI_ON:
ret = bsp_disp_hdmi_open(ubuffer[0]);
output_type = DISP_OUTPUT_TYPE_HDMI;
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_HDMI;
}
break;
case DISP_CMD_HDMI_OFF:
ret = bsp_disp_hdmi_close(ubuffer[0]);
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_NONE;
}
break;
case DISP_CMD_HDMI_SET_MODE:
ret = bsp_disp_hdmi_set_mode(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_HDMI_GET_MODE:
ret = bsp_disp_hdmi_get_mode(ubuffer[0]);
break;
case DISP_CMD_HDMI_GET_HPD_STATUS:
if(suspend_status != 0)
{
ret = 0;
}
else
{
ret = bsp_disp_hdmi_get_hpd_status(ubuffer[0]);
}
break;
case DISP_CMD_HDMI_SUPPORT_MODE:
ret = bsp_disp_hdmi_check_support_mode(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_HDMI_SET_SRC:
ret = bsp_disp_hdmi_set_src(ubuffer[0], (__disp_lcdc_src_t)ubuffer[1]);
break;
//----vga----
case DISP_CMD_VGA_ON:
ret = bsp_disp_vga_open(ubuffer[0]);
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_VGA;
}
break;
case DISP_CMD_VGA_OFF:
ret = bsp_disp_vga_close(ubuffer[0]);
if(suspend_status != 0)
{
suspend_output_type[ubuffer[0]] = DISP_OUTPUT_TYPE_NONE;
}
break;
case DISP_CMD_VGA_SET_MODE:
ret = bsp_disp_vga_set_mode(ubuffer[0], (__disp_vga_mode_t)ubuffer[1]);
break;
case DISP_CMD_VGA_GET_MODE:
ret = bsp_disp_vga_get_mode(ubuffer[0]);
break;
case DISP_CMD_VGA_SET_SRC:
ret = bsp_disp_vga_set_src(ubuffer[0], (__disp_lcdc_src_t)ubuffer[1]);
break;
//----sprite----
/*
case DISP_CMD_SPRITE_OPEN:
ret = bsp_disp_sprite_open(ubuffer[0]);
break;
case DISP_CMD_SPRITE_CLOSE:
ret = bsp_disp_sprite_close(ubuffer[0]);
break;
case DISP_CMD_SPRITE_SET_FORMAT:
ret = bsp_disp_sprite_set_format(ubuffer[0], (__disp_pixel_fmt_t)ubuffer[1], (__disp_pixel_seq_t)ubuffer[2]);
break;
case DISP_CMD_SPRITE_GLOBAL_ALPHA_ENABLE:
ret = bsp_disp_sprite_alpha_enable(ubuffer[0]);
break;
case DISP_CMD_SPRITE_GLOBAL_ALPHA_DISABLE:
ret = bsp_disp_sprite_alpha_disable(ubuffer[0]);
break;
case DISP_CMD_SPRITE_GET_GLOBAL_ALPHA_ENABLE:
ret = bsp_disp_sprite_get_alpha_enable(ubuffer[0]);
break;
case DISP_CMD_SPRITE_SET_GLOBAL_ALPHA_VALUE:
ret = bsp_disp_sprite_set_alpha_vale(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_GET_GLOBAL_ALPHA_VALUE:
ret = bsp_disp_sprite_get_alpha_value(ubuffer[0]);
break;
case DISP_CMD_SPRITE_SET_ORDER:
ret = bsp_disp_sprite_set_order(ubuffer[0], ubuffer[1],ubuffer[2]);
break;
case DISP_CMD_SPRITE_GET_TOP_BLOCK:
ret = bsp_disp_sprite_get_top_block(ubuffer[0]);
break;
case DISP_CMD_SPRITE_GET_BOTTOM_BLOCK:
ret = bsp_disp_sprite_get_bottom_block(ubuffer[0]);
break;
case DISP_CMD_SPRITE_SET_PALETTE_TBL:
if((ubuffer[1] == 0) || ((int)ubuffer[3] <= 0))
{
__wrn("para invalid in display ioctrl DISP_CMD_SPRITE_SET_PALETTE_TBL,buffer:0x%x, size:0x%x\n", (unsigned int)ubuffer[1], (unsigned int)ubuffer[3]);
return -1;
}
if(copy_from_user(gbuffer, (void __user *)ubuffer[1],ubuffer[3]))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_sprite_set_palette_table(ubuffer[0], (__u32 * )gbuffer,ubuffer[2],ubuffer[3]);
break;
case DISP_CMD_SPRITE_GET_BLOCK_NUM:
ret = bsp_disp_sprite_get_block_number(ubuffer[0]);
break;
case DISP_CMD_SPRITE_BLOCK_REQUEST:
{
__disp_sprite_block_para_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_sprite_block_para_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_sprite_block_request(ubuffer[0], ¶);
break;
}
case DISP_CMD_SPRITE_BLOCK_RELEASE:
ret = bsp_disp_sprite_block_release(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_SET_SCREEN_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_sprite_block_set_screen_win(ubuffer[0], ubuffer[1],¶);
break;
}
case DISP_CMD_SPRITE_BLOCK_GET_SCREEN_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_sprite_block_get_srceen_win(ubuffer[0], ubuffer[1],¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_rect_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_SPRITE_BLOCK_SET_SOURCE_WINDOW:
{
__disp_rect_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_rect_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_sprite_block_set_src_win(ubuffer[0], ubuffer[1],¶);
break;
}
case DISP_CMD_SPRITE_BLOCK_GET_SOURCE_WINDOW:
{
__disp_rect_t para;
ret = bsp_disp_sprite_block_get_src_win(ubuffer[0], ubuffer[1],¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_rect_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_SPRITE_BLOCK_SET_FB:
{
__disp_fb_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_fb_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_sprite_block_set_framebuffer(ubuffer[0], ubuffer[1],¶);
break;
}
case DISP_CMD_SPRITE_BLOCK_GET_FB:
{
__disp_fb_t para;
ret = bsp_disp_sprite_block_get_framebufer(ubuffer[0], ubuffer[1],¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_fb_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_SPRITE_BLOCK_SET_TOP:
ret = bsp_disp_sprite_block_set_top(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_SET_BOTTOM:
ret = bsp_disp_sprite_block_set_bottom(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_GET_PREV_BLOCK:
ret = bsp_disp_sprite_block_get_pre_block(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_GET_NEXT_BLOCK:
ret = bsp_disp_sprite_block_get_next_block(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_GET_PRIO:
ret = bsp_disp_sprite_block_get_prio(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_OPEN:
ret = bsp_disp_sprite_block_open(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_CLOSE:
ret = bsp_disp_sprite_block_close(ubuffer[0], ubuffer[1]);
break;
case DISP_CMD_SPRITE_BLOCK_SET_PARA:
{
__disp_sprite_block_para_t para;
if(copy_from_user(¶, (void __user *)ubuffer[2],sizeof(__disp_sprite_block_para_t)))
{
__wrn("copy_from_user fail\n");
return -EFAULT;
}
ret = bsp_disp_sprite_block_set_para(ubuffer[0], ubuffer[1],¶);
break;
}
case DISP_CMD_SPRITE_BLOCK_GET_PARA:
{
__disp_sprite_block_para_t para;
ret = bsp_disp_sprite_block_get_para(ubuffer[0], ubuffer[1],¶);
if(copy_to_user((void __user *)ubuffer[2],¶, sizeof(__disp_sprite_block_para_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
*/
#ifdef __LINUX_OSAL__
//----framebuffer----
case DISP_CMD_FB_REQUEST:
{
__disp_fb_create_para_t para;
if(copy_from_user(¶, (void __user *)ubuffer[1],sizeof(__disp_fb_create_para_t)))
{
__wrn("copy_from_user fail %d \n",__LINE__);
return -EFAULT;
}
ret = Display_Fb_Request(ubuffer[0], ¶);
break;
}
case DISP_CMD_FB_RELEASE:
ret = Display_Fb_Release(ubuffer[0]);
break;
case DISP_CMD_FB_GET_PARA:
{
__disp_fb_create_para_t para;
ret = Display_Fb_get_para(ubuffer[0], ¶);
if(copy_to_user((void __user *)ubuffer[1],¶, sizeof(__disp_fb_create_para_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_GET_DISP_INIT_PARA:
{
__disp_init_t para;
ret = Display_get_disp_init_para(¶);
if(copy_to_user((void __user *)ubuffer[0],¶, sizeof(__disp_init_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
//----for test----
case DISP_CMD_MEM_REQUEST:
ret = disp_mem_request(ubuffer[0]);
break;
case DISP_CMD_MEM_RELASE:
ret = disp_mem_release(ubuffer[0]);
break;
case DISP_CMD_MEM_SELIDX:
g_disp_mm_sel = ubuffer[0];
break;
case DISP_CMD_MEM_GETADR:
ret = g_disp_mm[ubuffer[0]].mem_start;
break;
case DISP_CMD_MEM_GET_INFO:
{
__disp_mm_t para;
ret = disp_mem_get_info(ubuffer[0], ¶);
if(copy_to_user((void __user *)ubuffer[1],¶, sizeof(__disp_mm_t)))
{
__wrn("copy_to_user fail\n");
return -EFAULT;
}
break;
}
case DISP_CMD_SUSPEND:
{
pm_message_t state;
ret = disp_suspend(0, state);
break;
}
case DISP_CMD_RESUME:
ret = disp_resume(0);
break;
#endif
case DISP_CMD_SET_EXIT_MODE:
ret = g_disp_drv.exit_mode = ubuffer[0];
break;
case DISP_CMD_LCD_CHECK_OPEN_FINISH:
ret = DRV_lcd_check_open_finished(ubuffer[0]);
break;
case DISP_CMD_LCD_CHECK_CLOSE_FINISH:
ret = DRV_lcd_check_close_finished(ubuffer[0]);
break;
case DISP_CMD_PRINT_REG:
ret = bsp_disp_print_reg(1, ubuffer[0]);
break;
default:
break;
}
return ret;
}
#define DELAY_ONCE_TIME (50)
__s32 drv_disp_standby(__u32 cmd, void *pArg)
{
__s32 ret;
__s32 timedly = 5000;
__s32 check_time = timedly/DELAY_ONCE_TIME;
if(cmd == BOOT_MOD_ENTER_STANDBY)
{
if(output_type == DISP_OUTPUT_TYPE_HDMI)
{
DRV_hdmi_close();
}
else
{
DRV_lcd_close(0);
}
do
{
ret = DRV_lcd_check_close_finished(0);
if(ret == 1)
{
break;
}
else if(ret == -1)
{
return -1;
}
__msdelay(DELAY_ONCE_TIME);
check_time --;
if(check_time <= 0)
{
return -1;
}
}
while(1);
bsp_disp_clk_off(3);
return 0;
}
else if(cmd == BOOT_MOD_EXIT_STANDBY)
{
bsp_disp_clk_on(3);
if(output_type == DISP_OUTPUT_TYPE_HDMI)
{
DRV_hdmi_open();
}
else
{
DRV_lcd_open(0);
}
do
{
ret = DRV_lcd_check_open_finished(0);
if(ret == 1)
{
break;
}
else if(ret == -1)
{
return -1;
}
__msdelay(DELAY_ONCE_TIME);
check_time --;
if(check_time <= 0)
{
return -1;
}
}
while(1);
return 0;
}
return -1;
}
|
GodBastardNeil/somethings
|
projects/IPR_DIPLOM/diplom/project/frontend/public/js/apprentice_main.js
|
const socket = io.connect('', {
'forceNew': true,
'reconnection': false
});
socket.on('disconnect', () => {
setTimeout(() => {
alert("Потеряно соединение с сервером.");
document.location.reload();
}, 2000);
});
const userlog = window.location.href.split('/')[4];
socket.emit('take_user', userlog);
async function do_test()
{
window.open(`/apprentice_do_test/${userlog}`, "_self");
}
async function start_theory()
{
window.open(`/apprentice_theory/${userlog}`, "_self");
}
async function out_to_init() {
if (confirm("Вы точно хотите выйти?"))
{
window.open("/init", "_self");
}
}
socket.on('set_stat_from_data', (data) => {
document.getElementById('howmany_tests').innerHTML += data['kol_n'];
document.getElementById('howmany_esc_tests').innerHTML += data['kol_e'];
});
socket.on('set_balls_from_data', (data) => {
document.getElementById('max_ball').innerHTML += (data['MAX']*100);
document.getElementById('min_ball').innerHTML += (data['MIN']*100);
document.getElementById('sred_ball').innerHTML += (data['SRED']*100);
});
// добавляем строку со статистикой
socket.on('addTestRow', (data) => {
addTestRow(data['idтест'], data['лог_создатель'], data['отношение'], data['баллы']);
});
async function addTestRow(id, creator_log, attitude, balls)
{
let tbody = document.getElementById("table_of_stat_of_tests").getElementsByTagName("TBODY")[0];
let row = document.createElement("TR");
let td_id = document.createElement("TD");
row.appendChild(td_id);
let td_creator_log = document.createElement("TD");
row.appendChild(td_creator_log);
let td_attitude = document.createElement("TD");
row.appendChild(td_attitude);
let td_balls = document.createElement("TD");
row.appendChild(td_balls);
tbody.appendChild(row);
td_id.innerHTML = id;
td_creator_log.innerHTML = creator_log;
td_attitude.innerHTML = attitude;
td_balls.innerHTML = balls;
}
async function get_main_stat()
{
socket.emit('get_stat', userlog);
}
async function get_balls()
{
socket.emit('get_balls', userlog);
}
// как только документ прогрузится вызвать эти функции
document.addEventListener("DOMContentLoaded", function (event)
{
get_main_stat();
get_balls();
document.getElementById('do_test').addEventListener('click', () => do_test());
document.getElementById('start_theory').addEventListener('click', () => start_theory());
document.getElementById('out_to_init').addEventListener('click', () => out_to_init());
});
|
Site-Command/sourcegraph
|
enterprise/internal/codeintel/autoindex/inference/typescript.go
|
<reponame>Site-Command/sourcegraph
package inference
import (
"context"
"encoding/json"
"path/filepath"
"regexp"
"github.com/sourcegraph/sourcegraph/enterprise/internal/codeintel/autoindex/config"
)
const (
lsifTscImage = "sourcegraph/lsif-node:autoindex"
nMuslCommand = "N_NODE_MIRROR=https://unofficial-builds.nodejs.org/download/release n --arch x64-musl auto"
)
type lsifTscJobRecognizer struct{}
var _ IndexJobRecognizer = lsifTscJobRecognizer{}
type lernaConfig struct {
NPMClient string `json:"npmClient"`
}
type packageJSONEngine struct {
Engines *struct {
Node *string `json:"node"`
} `json:"engines"`
}
func (r lsifTscJobRecognizer) CanIndex(paths []string, gitserver GitserverClientWrapper) bool {
for _, path := range paths {
if r.canIndexPath(path) {
return true
}
}
return false
}
func (r lsifTscJobRecognizer) InferIndexJobs(paths []string, gitserver GitserverClientWrapper) (indexes []config.IndexJob) {
for _, path := range paths {
if !r.canIndexPath(path) {
continue
}
// check first if anywhere along the ancestor path there is a lerna.json
isYarn := checkLernaFile(path, paths, gitserver)
var dockerSteps []config.DockerStep
for _, dir := range ancestorDirs(path) {
if !contains(paths, filepath.Join(dir, "package.json")) {
continue
}
var commands []string
if isYarn || contains(paths, filepath.Join(dir, "yarn.lock")) {
commands = append(commands, "yarn --ignore-engines")
} else {
commands = append(commands, "npm install")
}
dockerSteps = append(dockerSteps, config.DockerStep{
Root: dir,
Image: lsifTscImage,
Commands: commands,
})
}
var localSteps []string
if checkCanDeriveNodeVersion(path, paths, gitserver) {
for i, step := range dockerSteps {
step.Commands = append([]string{nMuslCommand}, step.Commands...)
dockerSteps[i] = step
}
localSteps = append(localSteps, nMuslCommand)
}
n := len(dockerSteps)
for i := 0; i < n/2; i++ {
dockerSteps[i], dockerSteps[n-i-1] = dockerSteps[n-i-1], dockerSteps[i]
}
indexes = append(indexes, config.IndexJob{
Steps: dockerSteps,
LocalSteps: localSteps,
Root: dirWithoutDot(path),
Indexer: lsifTscImage,
IndexerArgs: []string{"lsif-tsc", "-p", "."},
Outfile: "",
})
}
return indexes
}
func checkCanDeriveNodeVersion(path string, paths []string, gitserver GitserverClientWrapper) bool {
for _, dir := range ancestorDirs(path) {
packageJSONPath := filepath.Join(dir, "package.json")
nvmrcPath := filepath.Join(dir, ".nvmrc")
nodeVersionPath := filepath.Join(dir, ".node-version")
nnodeVersionPath := filepath.Join(dir, ".n-node-version")
if (contains(paths, packageJSONPath) && hasEnginesField(packageJSONPath, gitserver)) ||
contains(paths, nvmrcPath) ||
contains(paths, nodeVersionPath) ||
contains(paths, nnodeVersionPath) {
return true
}
}
return false
}
func hasEnginesField(packageJSONPath string, gitserver GitserverClientWrapper) (hasField bool) {
packageJSON := &packageJSONEngine{}
if b, err := gitserver.RawContents(context.TODO(), packageJSONPath); err == nil {
if err := json.Unmarshal(b, packageJSON); err == nil {
if packageJSON.Engines != nil && packageJSON.Engines.Node != nil {
return true
}
}
}
return
}
func checkLernaFile(path string, paths []string, gitserver GitserverClientWrapper) (isYarn bool) {
for _, dir := range ancestorDirs(path) {
lernaPath := filepath.Join(dir, "lerna.json")
if contains(paths, lernaPath) && !isYarn {
if b, err := gitserver.RawContents(context.TODO(), lernaPath); err == nil {
var c lernaConfig
if err := json.Unmarshal(b, &c); err == nil {
isYarn = c.NPMClient == "yarn"
}
}
}
}
return
}
func (lsifTscJobRecognizer) Patterns() []*regexp.Regexp {
return []*regexp.Regexp{
suffixPattern("tsconfig.json"),
suffixPattern("package.json"),
suffixPattern("lerna.json"),
suffixPattern("yarn.lock"),
suffixPattern(".nvmrc"),
suffixPattern(".node-version"),
suffixPattern(".n-node-version"),
}
}
func (r lsifTscJobRecognizer) canIndexPath(path string) bool {
// TODO(efritz) - check for javascript files
return filepath.Base(path) == "tsconfig.json" && containsNoSegments(path, tscSegmentBlockList...)
}
var tscSegmentBlockList = append([]string{
"node_modules",
}, segmentBlockList...)
|
LTNGlobal-opensource/libatsc3
|
src/mmt/MMTExtractor.h
|
<gh_stars>10-100
#include <stdio.h>
using namespace std;
#ifndef LIBATSC3_MMTEXTRACTOR_H
#define LIBATSC3_MMTEXTRACTOR_H
#include "atsc3_utils.h"
#include "atsc3_udp.h"
#include "atsc3_mmtp_packet_types.h"
#include "atsc3_mmtp_parser.h"
//TODO: remove this
#include "atsc3_lls_types.h"
class MMTExtractor {
public:
MMTExtractor();
void atsc3_core_service_bridge_process_mmt_packet(block_t* packet);
int8_t atsc3_core_service_bridge_process_mmt_udp_packet(udp_packet_t* udp_packet, mmtp_asset_t* mmtp_asset, lls_sls_mmt_session_t* lls_sls_mmt_session);
private:
atsc3_lls_slt_service_t* atsc3_lls_slt_service;
//jjustman-2019-10-03 - context event callbacks...
lls_slt_monitor_t* lls_slt_monitor = NULL;
//mmtp/sls flow management
lls_sls_mmt_monitor_t* lls_sls_mmt_monitor = NULL;
atsc3_mmt_mfu_context_t* atsc3_mmt_mfu_context = NULL;
};
#endif //LIBATSC3_MMTEXTRACTOR_H
|
IvanildoPereira/ShopApp
|
backend/src/controllers/productsController.js
|
<gh_stars>0
const Product = require("../models/Product");
const User = require("../models/User");
const Comment = require("../models/Comment");
const HttpError = require("../models/http-error");
const { Op } = require("sequelize");
const getProducts = async (req, res, next) => {
const { page, perPage, category, sort, search } = req.query;
const userId = req.params.userId;
const pg = (page - 1) * perPage;
let options = {
attributes: ["id", "cover_img", "name", "price", "details", "categories"],
offset: pg,
limit: parseInt(perPage),
where: {},
order: [],
};
if (userId) {
options.where.user_id = userId;
}
if (search) {
options.where.name = { [Op.like]: "%" + search + "%" };
}
if (category && category !== "All Products") {
options.where.categories = category;
}
switch (sort) {
case "recently":
options.order.push(["createdAt", "DESC"]);
break;
case "name":
options.order.push(["name", "ASC"]);
break;
case "price":
options.order.push(["price", "ASC"]);
break;
default:
options.order.push(["createdAt", "DESC"]);
}
try {
const { count, rows: products } = await Product.findAndCountAll(options);
res.json({ products, total: count, lastPage: Math.ceil(count / perPage) });
} catch (err) {
const error = new HttpError("Can't load the Products!", 500);
return next(error);
}
};
const getProduct = async (req, res, next) => {
const { id } = req.params;
const product = await Product.findByPk(id);
const comments = await Comment.findAll({
where: { product_id: id },
include: [{ association: "user", attributes: ["avatar_img","name"] }],
});
if (!product) return res.json("Não há produtos");
res.json({ product, comments });
};
const createProduct = async (req, res, next) => {
const { name, category, price, details } = req.body;
const user = await User.findByPk(req.userData.id);
if (!user) return res.status(404).json({ Message: "User doesn't exist!" });
try {
const product = await Product.create({
cover_img: req.files[0].path,
second_img: req.files[1] === undefined ? null : req.files[1].path,
third_img: req.files[2] === undefined ? null : req.files[2].path,
name,
categories: category,
price,
details,
user_id: req.userData.id,
});
res.json({ message: "Created with Success!", product });
} catch (err) {
const error = new HttpError("It wasn't possible save the new product!", 500);
return next(error);
}
};
const updateProduct = async (req, res, next) => {
const productId = req.params.productId;
const { name, category, price, details } = req.body;
const user = await User.findByPk(req.userData.id);
if (!user) return res.status(404).json({ Message: "User doesn't exist!" });
try {
const product = await Product.findByPk(productId);
if (product.user_id === user.id) {
await product.update({
name,
categories: category,
price,
details
});
res.json({ message: "Updated with Success!", product });
} else {
return next(new HttpError("You're not allowed to delete this product!", 403));
}
} catch (err) {
const error = HttpError("It wasn't possible save the new product!");
return next(error);
}
};
const deleteProduct = async (req, res, next) => {
const productId = req.params.productId;
const user = await User.findByPk(req.userData.id);
if (!user) return res.status(404).json({ Message: "User doesn't exist!" });
try {
const product = await Product.findByPk(productId);
if (product.user_id === user.id) {
await product.destroy();
res.json({ message: 'Product deleted with success!' });
} else {
return next(new HttpError("You're not allowed to delete this product!", 403));
}
} catch (err) {
const error = new HttpError("Can't delete the product, probably there are orders with this product! ", 500);
return next(error);
}
}
exports.getProducts = getProducts;
exports.getProduct = getProduct;
exports.createProduct = createProduct;
exports.updateProduct = updateProduct;
exports.deleteProduct = deleteProduct;
|
koungkmitl/webpro-jwt-springboot
|
src/main/java/murraco/dto/StudentConferenceResponse.java
|
<filename>src/main/java/murraco/dto/StudentConferenceResponse.java
package murraco.dto;
import murraco.domain.Student;
import java.util.List;
public class StudentConferenceResponse {
private List<Student> conferenceList;
private int amount;
public StudentConferenceResponse(List<Student> conferenceList, int amount) {
this.conferenceList = conferenceList;
this.amount = amount;
}
public StudentConferenceResponse() {
}
public List<Student> getConferenceList() {
return conferenceList;
}
public void setConferenceList(List<Student> conferenceList) {
this.conferenceList = conferenceList;
}
public int getAmount() {
return amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
}
|
dgomesbr/felaguiar-trip
|
src/main/java/org/springframework/social/instagram/api/impl/LocationList.java
|
package org.springframework.social.instagram.api.impl;
import java.util.List;
import org.springframework.social.instagram.api.Location;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
@JsonIgnoreProperties(ignoreUnknown=true)
public class LocationList {
private List<Location> list;
public LocationList(@JsonProperty("data") List<Location> list) {
this.list = list;
}
public List<Location> getList() {
return list;
}
}
|
Kannanradhi/myrafrance
|
src/main/java/com/isteer/b2c/repository/ReportsOrderVM.java
|
<gh_stars>0
package com.isteer.b2c.repository;
import android.arch.lifecycle.LiveData;
import android.arch.lifecycle.ViewModel;
import android.arch.paging.LivePagedListBuilder;
import android.arch.paging.PagedList;
import com.isteer.b2c.dao.BillData_DAO;
import com.isteer.b2c.dao.PendingOrderData_DAO;
import com.isteer.b2c.model.BillData;
import com.isteer.b2c.model.OrderNewData;
import com.isteer.b2c.model.PendingOrderData;
public class ReportsOrderVM extends ViewModel{
public LiveData<PagedList<OrderNewData>> pendingOrderDataList;
public LiveData<PagedList<OrderNewData>> pendingAllOrderDataList;
public LiveData<PagedList<OrderNewData>> ReportPendingAllOrderDataList;
public void init(PendingOrderData_DAO userDao, String fromdate,String todate) {
pendingOrderDataList = (new LivePagedListBuilder(userDao.getReportOrderByDate(fromdate,todate), 50))
.build();
}
public void init(PendingOrderData_DAO userDao) {
pendingAllOrderDataList = (new LivePagedListBuilder(userDao.getReportOrder(), 50))
.build();
ReportPendingAllOrderDataList = (new LivePagedListBuilder(userDao.getActionOrder(), 50))
.build();
}
}
|
slategroup/clay-kiln
|
lib/validators/mutations.js
|
import _ from 'lodash';
import { UPDATE_VALIDATION } from './mutationTypes';
export default {
[UPDATE_VALIDATION]: (state, validationState) => {
_.set(state, 'validation', validationState);
return state;
}
};
|
ynsingh/brihaspati2
|
WEB-INF/src/java/org/iitk/brihaspati/modules/actions/ExtractAction.java
|
package org.iitk.brihaspati.modules.actions;
/*
* @(#)ExtractAction.java
*
* Copyright (c) 2005-2006 ETRG,IIT Kanpur.
* All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistribution in binary form must reproducuce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL ETRG OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL,SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*
* Contributors: Members of ETRG, I.I.T. Kanpur
*
*/
import java.io.File;
import java.util.Vector;
import java.util.StringTokenizer;
import java.util.Date;
import org.apache.velocity.context.Context;
import org.apache.turbine.util.RunData;
import org.apache.turbine.util.parser.ParameterParser;
import org.apache.turbine.services.servlet.TurbineServlet;
import org.apache.turbine.om.security.User;
import org.iitk.brihaspati.modules.utils.TotalFileCount;
import org.iitk.brihaspati.modules.utils.XmlWriter;
import org.iitk.brihaspati.modules.utils.TopicMetaDataXmlWriter;
import org.iitk.brihaspati.modules.utils.SystemIndependentUtil;
/**
* This class Read zip file and Extract specified path
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author <a href="mailto:<EMAIL>"><NAME></a>
*
*/
public class ExtractAction extends SecureAction_Instructor
{
/**
* In this method, Cancel process then back to original postion
*
* @param data RunData
* @param context Context
* @see SystemIndependentUtil Utils
*/
public void doCancel(RunData data,Context context)
{
User user=data.getUser();
ParameterParser pp=data.getParameters();
String topic=pp.getString("topic","");
String userLoginName=user.getName();
String dir;
dir=(String)user.getTemp("course_id");
String filePath=data.getServletContext().getRealPath("/Courses")+"/"+dir+"/Content/"+topic;
File tempDirFilePath=new File(filePath+"/Unpublished/"+userLoginName);
SystemIndependentUtil.deleteFile(tempDirFilePath);
data.setScreenTemplate("call,CourseMgmt_User,PublishModule.vm");
}
/**
* In this method, Extract a zip file
* @param data RunData
* @param context Context
* @see SystemIndependentUtil Utils
*/
public void doExtract(RunData data,Context context) throws Exception
{
User user=data.getUser();
ParameterParser pp=data.getParameters();
String topic=pp.getString("topic","");
int totalFiles=pp.getInt("totalfiles",0);
String userLoginName=user.getName();
String group,dir;
group=dir=(String)user.getTemp("course_id");
String filePath=data.getServletContext().getRealPath("/Courses")+"/"+dir+"/Content/"+topic; context.put("filePath",filePath);
int countFiles=0;
int fileseqno[];
int successfulUploadFilesCount=0;
int totalFilesEntries=0;
Vector failedFiles=new Vector();
String fileItem;
Vector fileList=new Vector();
for(int count=1;count<totalFiles;count++)
{
boolean fileExists=false;
fileItem=pp.getString("extract"+count);
String tempFile;
if(fileItem!=null)
{
String temp=fileItem;
int indexbackslash=temp.lastIndexOf("\\");
int indexfrontslash=temp.lastIndexOf("/");
int index=indexfrontslash>indexbackslash?indexfrontslash:indexbackslash;
++totalFilesEntries;
fileExists=false;
tempFile=temp.substring(index+1);
File uploadedFileInTopicDir=new File(filePath,tempFile);
File uploadedFileInUnpub=new File(filePath+"/Unpublished/"+tempFile);
if(uploadedFileInUnpub.exists() || uploadedFileInTopicDir.exists())
{
fileExists=true;
failedFiles.addElement(tempFile);
}
if(fileExists)
continue;
++successfulUploadFilesCount;
File tempDirFilePath=new File(filePath+"/Unpublished/"+userLoginName+"/"+fileItem);
tempDirFilePath.renameTo(uploadedFileInUnpub);
fileList.addElement("extract"+count+" : "+fileItem);
}
}
File tempDirFilePath=new File(filePath+"/Unpublished/"+userLoginName);
SystemIndependentUtil.deleteFile(tempDirFilePath);
if(successfulUploadFilesCount>0)
{
if(successfulUploadFilesCount==totalFilesEntries)
{
// all the entries given were uploaded successfully
context.put("uploadStatus","full");
}
else
{
// some of the entries given were uploaded successfully
context.put("uploadStatus","partial");
context.put("failedFiles",failedFiles);
}
}
else
{
// nothing was uploaded
context.put("uploadStatus","nothing");
context.put("totalFilesEntries",(new TotalFileCount(totalFilesEntries) ) );
context.put("failedFiles",failedFiles);
}
context.put("fileList",fileList);
}
/**
* Invoke this method as default
* @param data RunData
* @param context Context
*/
public void doPerform(RunData data,Context context) throws Exception
{
String actionToPerform=data.getParameters().getString("actionName","");
context.put("actionO",actionToPerform);
if( actionToPerform.equals("eventSubmit_doExtract") )
{
doExtract(data,context);
}
}
}
|
ndsc-iot/jcbase
|
src/main/java/com/jcbase/model/SysUser.java
|
/**
* Copyright (c) 2011-2016, <NAME>(<EMAIL>).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jcbase.model;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import com.google.common.collect.Lists;
import com.jcbase.core.cache.CacheClearUtils;
import com.jcbase.core.model.Condition;
import com.jcbase.core.model.Operators;
import com.jcbase.core.util.CommonUtils;
import com.jcbase.core.util.IWebUtils;
import com.jcbase.core.util.MyDigestUtils;
import com.jcbase.core.view.InvokeResult;
import com.jcbase.model.base.BaseSysUser;
import com.jfinal.kit.StrKit;
import com.jfinal.plugin.activerecord.Db;
import com.jfinal.plugin.activerecord.Page;
/**
* @author eason
* 系统用户
*/
public class SysUser extends BaseSysUser<SysUser>
{
/**
*
*/
private static final long serialVersionUID = -1982696969221258167L;
public static SysUser me = new SysUser();
/**
* 权限集
*/
public Set<String> getPermissionSets() {
return SysRes.me.getSysUserAllResUrl(this.getId());
}
/**
* 是否有管理员权限
*/
public boolean isAdmin(){
long count=Db.queryLong("select count(*) from sys_user_role where role_id=? and user_id=?", 1,this.getId());
return count>0?true:false;
}
/**
* 用户登陆
* @author eason
* @param username
* @param pwd
* @return
* @throws UnsupportedEncodingException
*/
public InvokeResult login(String username, String pwd,HttpServletResponse response,HttpSession session,String url) {
Set<Condition> conditions=new HashSet<Condition>();
conditions.add(new Condition("name",Operators.EQ,username));
conditions.add(new Condition("pwd",Operators.EQ,MyDigestUtils.shaDigestForPasswrod(pwd)));
SysUser sysUser=this.get(conditions);
if(sysUser==null){
return InvokeResult.failure("用户名或密码不对");
}
if(sysUser.getInt("status")==2){
return InvokeResult.failure("用户被冻结,请联系管理员");
}
//IWebUtils.setCurrentLoginSysUser(response,session,sysUser);
Map<String,Object> data=new HashMap<String,Object>();
data.put("url",url);
return InvokeResult.success(data);
}
/**
* 获取用户拥有的角色列表,最多查20个
* @author eason
* @param uid
* @return
*/
public List<SysUser> getSysUserList(int uid){
return this.paginate(1, 20, "select *", "from sys_user ",uid).getList();
}
public List<SysUser> getSysUserIdList(int uid){
return this.paginate(1, 20, "select id", "from sys_user ",uid).getList();
}
public InvokeResult setVisible(String bids, Integer visible) {
// List<Integer> i=CommonUtils.getIntegerListByStrs(bids);
// if(i.contains(1)){
// return InvokeResult.failure(-2,"超级管理员不能被修改");
// }
List<Integer> ids=new ArrayList<Integer>();
if(bids.contains(",")){
for(String aid : bids.split(",")){
if(StrKit.notBlank(aid)){
ids.add(Integer.valueOf(aid));
}
}
}else{
if(StrKit.notBlank(bids)){
ids.add(Integer.valueOf(bids));
}
}
if(bids.length()>0){
bids=bids.subSequence(0, bids.length()-1).toString();
}
Set<Condition> conditions=new HashSet<Condition>();
conditions.add(new Condition("id",Operators.IN,ids));
Map<String,Object> newValues=new HashMap<String,Object>();
newValues.put("status", visible);
this.update(conditions, newValues);
return InvokeResult.success();
}
/**
* 用户名是否已存在
* @param name
* @return
*/
public boolean hasExist(String name){
Set<Condition> conditions=new HashSet<Condition>();
conditions.add(new Condition("name",Operators.EQ,name));
long num=this.getCount(conditions);
return num>0?true:false;
}
public SysUser getByName(String name){
Set<Condition> conditions=new HashSet<Condition>();
conditions.add(new Condition("name",Operators.EQ,name));
return this.get(conditions);
}
public InvokeResult save(Integer id,String username,String password,String des,String phone,String email,Integer operation_class_id,Integer station_id){
if(null!=id){
SysUser sysUser=this.findById(id);
sysUser.set("des", des).set("phone", phone).set("email", email).set("operation_class_id", operation_class_id).set("station_id", station_id).update();
}else {
if(this.hasExist(username)){
return InvokeResult.failure("用户名已存在");
}else {
if(StrKit.isBlank(password))password="<PASSWORD>";
SysUser sysUser=new SysUser();
sysUser.set("name", username).set("pwd", <PASSWORD>Passw<PASSWORD>(password)).set("createdate", new Date()).set("des", des).set("phone", phone).set("email", email).set("operation_class_id", operation_class_id).set("station_id", station_id).save();
}
}
return InvokeResult.success();
}
/**
* 修改用户角色
* @param uid
* @param roleIds
* @return
*/
public InvokeResult changeUserRoles(Integer uid,String roleIds){
Db.update("delete from sys_user_role where user_id = ?", uid);
List<String> sqlList=Lists.newArrayList();
for(String roleId : roleIds.split(",")){
if(CommonUtils.isNotEmpty(roleId)){
sqlList.add("insert into sys_user_role (user_id,role_id) values ("+uid+","+Integer.valueOf(roleId)+")");
}
}
Db.batch(sqlList, 5);
CacheClearUtils.clearUserMenuCache();
return InvokeResult.success();
};
/**
* 密码修改
* @param uid
* @param newPwd
* @return
*/
public InvokeResult savePwdUpdate(Integer uid, String newPwd) {
// TODO Auto-generated method stub
SysUser sysUser=SysUser.me.findById(uid);
if(sysUser!=null){
sysUser.set("pwd", newPwd).update();
return InvokeResult.success();
}else{
return InvokeResult.failure(-2, "修改失败");
}
}
public Page<SysUser> getSysUserPage(int page, int rows, String keyword,
String orderbyStr) {
StringBuffer sqlExceptSelect = null;
String select="select su.*, (select group_concat(name) as roleNames from sys_role where id in(select role_id from sys_user_role where user_id=su.id)) as roleNames,(SELECT op_name FROM operation_class where su.operation_class_id=operation_class.id) AS op_name,(SELECT station_name FROM station WHERE station.id=su.station_id) AS station_name";
// System.out.println("**************"+SysUserRole.dao.isHight(keyword));
if(SysUserRole.dao.isHight(keyword)) {
sqlExceptSelect=new StringBuffer("from sys_user su");
}
else {
sqlExceptSelect=new StringBuffer("from sys_user su WHERE operation_class_id=(SELECT operation_class_id FROM sys_user WHERE `name`="+keyword+")");
}
return this.paginate(page, rows, select, sqlExceptSelect.toString());
}
//根据用户名模糊查找
public Page<SysUser> seachSysUserPage(int page, int rows, String keyword,
String orderbyStr,String name) {
StringBuffer sqlExceptSelect = null;
String select="select su.*, (select group_concat(name) as roleNames from sys_role where id in(select role_id from sys_user_role where user_id=su.id)) as roleNames,(SELECT op_name FROM operation_class where su.operation_class_id=operation_class.id) AS op_name,(SELECT station_name FROM station WHERE station.id=su.station_id) AS station_name";
System.out.println("&&&&&&&&&&&&&"+keyword);
System.out.println("**************"+SysUserRole.dao.isOp(keyword));
if(SysUserRole.dao.isOp(keyword)) {
sqlExceptSelect=new StringBuffer("from sys_user su where name like '%"+name+"%'");
System.out.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
}
else {
sqlExceptSelect=new StringBuffer("from sys_user su WHERE operation_class_id=(SELECT operation_class_id FROM sys_user WHERE `name`="+keyword+") and `name` like '%"+name+"%'");
System.out.println("2222222222222222222222");
}
return this.paginate(page, rows, select, sqlExceptSelect.toString());
}
//通过用户名查找用户id
public int getId(String name) {
List<SysUser> sRecords = this.find("select id from sys_user where name='"+name+"'");
int id=sRecords.get(0).getId();
System.out.println(id+"This is ID!!!!!!!!!!!");
return id;
}
//判断用户是否属于该运维班
public SysUser getByOp(String name,int op_class){
Set<Condition> conditions=new HashSet<Condition>();
conditions.add(new Condition("name",Operators.EQ,name));
conditions.add(new Condition("operation_class_id",Operators.EQ,op_class));
return this.get(conditions);
}
}
|
lijiahongda/22222
|
component/userPopup/userPopup.js
|
<filename>component/userPopup/userPopup.js
import {
post
} from '../../utils/util.js';
var app = getApp();
Component({
/**
* 组件的属性列表
*/
properties: {
},
created: function () {
let that = this
// that.getData()
},
/**
* 组件的初始数据
*/
data: {
list: {},
// isRedPopup: wx.getStorageSync('uid')?false:true
isRedPopup:false,
},
/**
* 组件的方法列表
*/
methods: {
// 接收父传来的参数 父页面的onload
_onOption: function (options, codeNumber,type) {
let that = this
console.log(options, codeNumber, '子组件option',type,'====')
if (options.isPopUp == 1){
that.setData({
isRedPopup: true
})
}else{
that.setData({
isRedPopup: false
})
}
if (type == 1){
that.setData({
urlType: options.urlType,
userLayer: options.userLayer,
couponInfo: options.couponInfo,
couponid: options.couponInfo.couponid,
popType: options.popType,
codeNumber: codeNumber,
})
if (options.couponInfo.end_dates != 0) {
that.startTimer(options.couponInfo.end_dates - options.couponInfo.nowTime)
}
}else if(type == 2){
console.log(that.data.userLayer)
that.setData({
popType: options.popType,
userLayer: 2,
urlType: options.urlType,
couponInfo: options.CouponInfo,
couponid: options.CouponInfo.couponid,
codeNumber: codeNumber,
})
// if (options.CouponInfo.end_dates != 0) {
// that.startTimer(options.CouponInfo.end_dates - options.CouponInfo.nowTime)
// }
}else{
that.setData({
popType: options.popType,
userLayer:type,
couponInfo: options.couponInfo,
couponid: options.couponInfo.couponid,
codeNumber: codeNumber,
})
console.log(this.data.userLayer, this.data.popType)
}
},
// 手机号验证码
VerificationCode: function () {
let that = this
console.log(that.data.codeNumber,'----------------')
wx.navigateTo({
url: '/page/Yuemall/pages/VerificationCode/VerificationCode?codeNumber' + that.data.codeNumber
})
},
// 关闭大红包
closeisredPopup: function () {
this.setData({
isRedPopup: false
})
},
// 分类列表
go: function (e) {
if (!wx.getStorageSync('uid')){
this.VerificationCode()
}else{
wx.navigateTo({
url: '/page/Yuemall/pages/newZone/newZone'
})
this.setData({
isRedPopup: false
})
}
console.log(e)
},
goNew:function(e){
console.log(e,'99999999999999')
let type = e.currentTarget.dataset.type
let couponid = e.currentTarget.dataset.couponid
let urltype = e.currentTarget.dataset.urltype
let userLayer = e.currentTarget.dataset.userlayer
if (type == 4 || urltype == 1) {
wx.navigateTo({
url: '/page/Yuemall/pages/newZone/newZone?couponid=' + couponid + '&userLayer=' + userLayer
})
} else {
wx.navigateTo({
url: '/page/Yuemall/pages/newZoneN/newZone?couponid=' + couponid + '&userLayer=' + userLayer
})
}
this.setData({
isRedPopup: false
})
},
// 倒计时
startTimer: function (totalSecond) {
console.log(totalSecond)
let that = this
// 倒计时
var totalSecond = totalSecond;
var interval = setInterval(function () {
// 秒数
var second = totalSecond;
// 天位
var dr = Math.floor((second) / 86400)
var drStr = dr.toString();
if (drStr.length == 1) drStr = '0' + drStr;
// 小时位
var hr = Math.floor((second - dr * 86400) / 3600);
var hrStr = hr.toString();
if (hrStr.length == 1) hrStr = '0' + hrStr;
// 分钟位
var min = Math.floor((second - dr * 86400 - hr * 3600) / 60);
var minStr = min.toString();
if (minStr.length == 1) minStr = '0' + minStr;
// 秒位
var sec = second - dr * 86400 - hr * 3600 - min * 60;
var secStr = sec.toString();
if (secStr.length == 1) secStr = '0' + secStr;
this.setData({
countDownDay: drStr,
countDownHour: hrStr,
countDownMinute: minStr,
countDownSecond: secStr,
});
totalSecond--;
if (totalSecond < 0) {
clearInterval(interval);
this.setData({
countDownDay: '0',
countDownHour: '00',
countDownMinute: '00',
countDownSecond: '00',
});
}
}.bind(this), 1000);
},
}
})
|
scarybeasts/beebj
|
asm/asm_defs_host.h
|
#ifndef BEEBJIT_ASM_DEFS_HOST_H
#define BEEBJIT_ASM_DEFS_HOST_H
#include "asm_platform.h"
/* NOTE: K_BBC_MEM_RAW_ADDR varies betweeen platforms, and is in
* asm_platform.h
*/
#define K_BBC_MEM_OFFSET_FROM_RAW 0x01000000
#define K_BBC_MEM_OFFSET_TO_WRITE_IND 0x01000000
#define K_BBC_MEM_OFFSET_TO_READ_FULL 0x02000000
#define K_BBC_MEM_OFFSET_TO_WRITE_FULL 0x03000000
#define K_BBC_MEM_OFFSET_READ_TO_WRITE 0x01000000
#define K_BBC_MEM_READ_IND_ADDR (K_BBC_MEM_RAW_ADDR + \
K_BBC_MEM_OFFSET_FROM_RAW)
#define K_BBC_MEM_WRITE_IND_ADDR (K_BBC_MEM_READ_IND_ADDR + \
K_BBC_MEM_OFFSET_TO_WRITE_IND)
#define K_BBC_MEM_READ_FULL_ADDR (K_BBC_MEM_READ_IND_ADDR + \
K_BBC_MEM_OFFSET_TO_READ_FULL)
#define K_BBC_MEM_WRITE_FULL_ADDR (K_BBC_MEM_READ_IND_ADDR + \
K_BBC_MEM_OFFSET_TO_WRITE_FULL)
#define K_BBC_MEM_OS_ROM_OFFSET 0xC000
#define K_BBC_MEM_INACCESSIBLE_OFFSET 0xF000
#define K_BBC_MEM_INACCESSIBLE_LEN 0x1000
#define K_6502_ADDR_SPACE_SIZE 0x10000
#define K_6502_VECTOR_IRQ 0xFFFE
#define K_CONTEXT_OFFSET_STATE_6502 8
#define K_CONTEXT_OFFSET_DEBUG_ASM 16
#define K_CONTEXT_OFFSET_INTERP_ASM 24
#define K_CONTEXT_OFFSET_DEBUG_CALLBACK 32
#define K_CONTEXT_OFFSET_DEBUG_OBJECT 40
#define K_CONTEXT_OFFSET_INTERP_CALLBACK 48
#define K_CONTEXT_OFFSET_INTERP_OBJECT 56
#define K_CONTEXT_OFFSET_ABI_END 64
#define K_CONTEXT_OFFSET_DRIVER_END (K_CONTEXT_OFFSET_ABI_END + 40)
#define K_STATE_6502_OFFSET_REG_A 0
#define K_STATE_6502_OFFSET_REG_X 4
#define K_STATE_6502_OFFSET_REG_Y 8
#define K_STATE_6502_OFFSET_REG_S 12
#define K_STATE_6502_OFFSET_REG_PC 16
#define K_STATE_6502_OFFSET_REG_FLAGS 20
#define K_STATE_6502_OFFSET_REG_IRQ_FIRE 24
#define K_STATE_6502_OFFSET_REG_HOST_PC 28
#define K_STATE_6502_OFFSET_REG_HOST_FLAGS 32
#define K_STATE_6502_OFFSET_REG_HOST_VALUE 36
/* TODO: these are x64 backend specific, and don't belong here. */
#define K_ASM_TABLE_ADDR 0x50000000
#define K_ASM_TABLE_6502_FLAGS_TO_X64 0x50000000
#define K_ASM_TABLE_6502_FLAGS_TO_MASK 0x50000100
#define K_ASM_TABLE_X64_FLAGS_TO_6502 0x50000200
#define K_ASM_TABLE_PAGE_WRAP_CYCLE_INV 0x50000300
#define K_ASM_TABLE_OF_TO_6502 0x50000500
#define K_ASM_TABLE_6502_FLAGS_TO_X64_OFFSET 0
#define K_ASM_TABLE_6502_FLAGS_TO_MASK_OFFSET 0x100
#define K_ASM_TABLE_X64_FLAGS_TO_6502_OFFSET 0x200
#define K_ASM_TABLE_PAGE_WRAP_CYCLE_INV_OFFSET 0x300
#define K_ASM_TABLE_OF_TO_6502_OFFSET 0x500
#endif /* BEEBJIT_ASM_DEFS_HOST_H */
|
jsdelivrbot/privosoft.github.io
|
jspm_packages/npm/lodash-compat@3.10.2/object/keysIn.js
|
<filename>jspm_packages/npm/lodash-compat@3.10.2/object/keysIn.js
/* */
var arrayEach = require('../internal/arrayEach'),
isArguments = require('../lang/isArguments'),
isArray = require('../lang/isArray'),
isFunction = require('../lang/isFunction'),
isIndex = require('../internal/isIndex'),
isLength = require('../internal/isLength'),
isObject = require('../lang/isObject'),
isString = require('../lang/isString'),
support = require('../support');
var arrayTag = '[object Array]',
boolTag = '[object Boolean]',
dateTag = '[object Date]',
errorTag = '[object Error]',
funcTag = '[object Function]',
numberTag = '[object Number]',
objectTag = '[object Object]',
regexpTag = '[object RegExp]',
stringTag = '[object String]';
var shadowProps = ['constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', 'valueOf'];
var errorProto = Error.prototype,
objectProto = Object.prototype,
stringProto = String.prototype;
var hasOwnProperty = objectProto.hasOwnProperty;
var objToString = objectProto.toString;
var nonEnumProps = {};
nonEnumProps[arrayTag] = nonEnumProps[dateTag] = nonEnumProps[numberTag] = {
'constructor': true,
'toLocaleString': true,
'toString': true,
'valueOf': true
};
nonEnumProps[boolTag] = nonEnumProps[stringTag] = {
'constructor': true,
'toString': true,
'valueOf': true
};
nonEnumProps[errorTag] = nonEnumProps[funcTag] = nonEnumProps[regexpTag] = {
'constructor': true,
'toString': true
};
nonEnumProps[objectTag] = {'constructor': true};
arrayEach(shadowProps, function(key) {
for (var tag in nonEnumProps) {
if (hasOwnProperty.call(nonEnumProps, tag)) {
var props = nonEnumProps[tag];
props[key] = hasOwnProperty.call(props, key);
}
}
});
function keysIn(object) {
if (object == null) {
return [];
}
if (!isObject(object)) {
object = Object(object);
}
var length = object.length;
length = (length && isLength(length) && (isArray(object) || isArguments(object) || isString(object)) && length) || 0;
var Ctor = object.constructor,
index = -1,
proto = (isFunction(Ctor) && Ctor.prototype) || objectProto,
isProto = proto === object,
result = Array(length),
skipIndexes = length > 0,
skipErrorProps = support.enumErrorProps && (object === errorProto || object instanceof Error),
skipProto = support.enumPrototypes && isFunction(object);
while (++index < length) {
result[index] = (index + '');
}
for (var key in object) {
if (!(skipProto && key == 'prototype') && !(skipErrorProps && (key == 'message' || key == 'name')) && !(skipIndexes && isIndex(key, length)) && !(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) {
result.push(key);
}
}
if (support.nonEnumShadows && object !== objectProto) {
var tag = object === stringProto ? stringTag : (object === errorProto ? errorTag : objToString.call(object)),
nonEnums = nonEnumProps[tag] || nonEnumProps[objectTag];
if (tag == objectTag) {
proto = objectProto;
}
length = shadowProps.length;
while (length--) {
key = shadowProps[length];
var nonEnum = nonEnums[key];
if (!(isProto && nonEnum) && (nonEnum ? hasOwnProperty.call(object, key) : object[key] !== proto[key])) {
result.push(key);
}
}
}
return result;
}
module.exports = keysIn;
|
ShaneJim/SpringBootSamples
|
personal-admin/src/main/java/com/shanejim/myweb/personaladmin/security/MyUserDetailsService.java
|
package com.shanejim.myweb.personaladmin.security;
import com.shanejim.myweb.personaldao.mapper.EmployeeMapper;
import com.shanejim.myweb.personalmodel.entity.Employee;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
* @description: TODO
* @author: panshenjia
* @create: 2018-10-30 16:46
**/
@Component
public class MyUserDetailsService implements UserDetailsService {
private Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
EmployeeMapper employeeMapper;
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
logger.info("用户的用户名: {}", username);
// TODO 根据用户名,去数据库查找到对应的密码,与权限
Employee employee = employeeMapper.selectByName(username);
if (employee == null)
throw new UsernameNotFoundException("用户名或密码错误!");
List<GrantedAuthority> grantedAuthorities = new ArrayList<>();
GrantedAuthority grantedAuthority = new SimpleGrantedAuthority("admintest");
grantedAuthorities.add(grantedAuthority);
// 封装用户信息,并返回。参数分别是:用户名,密码(数据库里面的),用户权限
//User user = new User(username, "123456", grantedAuthorities);
//User user = new User(username, "123456", AuthorityUtils.commaSeparatedStringToAuthorityList("admin2"));
// user.isCredentialsNonExpired()
//return user;
boolean isEnabled = true;
boolean accountNonExpired = true;
boolean credentialsNonExpired = true;
boolean accountNonLocked = true;
return new SaltedUser(username, employee.getPassword(),
isEnabled, accountNonExpired, credentialsNonExpired,
accountNonLocked, grantedAuthorities, employee.getSalt());
}
}
|
krnsk0/git_user_switcher
|
src/unsetConfig/unsetConfig.spec.js
|
<reponame>krnsk0/git_user_switcher<filename>src/unsetConfig/unsetConfig.spec.js
const { unsetConfig } = require('./unsetConfig');
const { unsetLocalGitUser } = require('./helpers');
const { UNSET_SUCCESSFUL, UNSET_FAILED } = require('../strings');
jest.mock('./helpers', () => ({
unsetLocalGitUser: jest
.fn()
.mockReturnValueOnce(true)
.mockReturnValueOnce(false),
}));
describe('The unsetConfig function', () => {
const log = console.log;
beforeEach(() => {
console.log = jest.fn();
});
afterEach(() => {
console.log = log;
});
it('should call the unsetLocalConfig helper and log on success', async () => {
await unsetConfig();
expect(unsetLocalGitUser).toHaveBeenCalled();
expect(console.log.mock.calls[0][0]).toEqual(UNSET_SUCCESSFUL);
});
it('should call the unsetLocalConfig helper and log on failure', async () => {
await unsetConfig();
expect(unsetLocalGitUser).toHaveBeenCalled();
expect(console.log.mock.calls[0][0]).toEqual(UNSET_FAILED);
});
});
|
senotrusov/rubymq
|
lib/rubymq/application/daemon.rb
|
<filename>lib/rubymq/application/daemon.rb
#
# Copyright 2007-2008 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class RubyMQ::Application::Daemon < RubyMQ::AbstractApplication::Daemon
def initialize factory, options
@factory = factory
@options = options
super()
end
def name
@options["only-endpoints"] ? "#{@factory.name}_#{@options["only-endpoints"].join("_")}".to_sym : @factory.name
end
def define_options(options)
super(options)
options.option "--scheduler-state-file FILE", "Scheduler state file. Default to 'log/\#{name}.scheduler.state', fallbacks to './\#{name}.scheduler.state', may be absolute path"
end
attr_reader :scheduler_state_file
def apply_options(options)
super(options)
@scheduler_state_file = options["scheduler-state-file"] || "log/#{name}.scheduler.state"
@scheduler_state_file = "./#{name}.scheduler.state" unless File.directory?(File.dirname(@scheduler_state_file))
end
def start
RubyMQ.initialize_orm :activerecord
GreedyLoader.run
super
end
private
def produce_application
@factory.produce(:reaper => @reaper, :logger => RubyMQ.logger, :daemon => self, :only_endpoints => @options["only-endpoints"])
end
end
|
936748646/TCP
|
vectoring/derby/v1/raw/000317.java
|
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.lang.holdCursorJava
Copyright 2002, 2005 The Apache Software Foundation or its licensors, as applicable.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.lang;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import org.apache.derby.tools.ij;
import org.apache.derby.tools.JDBCDisplayUtil;
/**
* Test hold cursor after commit
*/
public class holdCursorJava {
public static void main (String args[])
{
try {
/* Load the JDBC Driver class */
// use the ij utility to read the property file and
// make the initial connection.
ij.getPropertyArg(args);
Connection conn = ij.startJBMS();
createAndPopulateTable(conn);
//set autocommit to off after creating table and inserting data
conn.setAutoCommit(false);
testHoldability(conn,ResultSet.HOLD_CURSORS_OVER_COMMIT);
testHoldability(conn,ResultSet.CLOSE_CURSORS_AT_COMMIT);
testHoldCursorOnMultiTableQuery(conn);
testIsolationLevelChange(conn);
conn.close();
} catch (Exception e) {
System.out.println("FAIL -- unexpected exception "+e);
JDBCDisplayUtil.ShowException(System.out, e);
e.printStackTrace();
}
}
//create table and insert couple of rows
private static void createAndPopulateTable(Connection conn) throws SQLException {
Statement stmt = conn.createStatement();
System.out.println("Creating table...");
stmt.executeUpdate( "CREATE TABLE T1 (c11 int, c12 int)" );
stmt.executeUpdate("INSERT INTO T1 VALUES(1,1)");
stmt.executeUpdate("INSERT INTO T1 VALUES(2,1)");
stmt.executeUpdate( "CREATE TABLE T2 (c21 int, c22 int)" );
stmt.executeUpdate("INSERT INTO T2 VALUES(1,1)");
stmt.executeUpdate("INSERT INTO T2 VALUES(1,2)");
stmt.executeUpdate("INSERT INTO T2 VALUES(1,3)");
stmt.execute("create table testtable1 (id integer, vc varchar(100))");
stmt.execute("insert into testtable1 values (11, 'testtable1-one'), (12, 'testtable1-two')");
stmt.execute("create table testtable2 (id integer, vc varchar(100))");
stmt.execute("insert into testtable2 values (21, 'testtable2-one'), (22, 'testtable2-two')");
stmt.execute("create procedure MYPROC() language java parameter style java external name " +
"'org.apache.derbyTesting.functionTests.tests.lang.holdCursorJava.testProc' result sets 2");
System.out.println("done creating table and inserting data.");
stmt.close();
}
//test cursor holdability after commit on multi table query
private static void testHoldCursorOnMultiTableQuery(Connection conn) throws Exception
{
Statement s;
ResultSet rs;
System.out.println("Start multi table query with holdability true test");
s = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT );
//open a cursor with multiple rows resultset
rs = s.executeQuery("select t1.c11, t2.c22 from t1, t2 where t1.c11=t2.c21");
rs.next();
System.out.println("value of t2.c22 is " + rs.getString(2));
conn.commit();
rs.next(); //because holdability is true, should be able to navigate the cursor after commit
System.out.println("value of t2.c22 is " + rs.getString(2));
rs.close();
System.out.println("Multi table query with holdability true test over");
}
//test cursor holdability after commit
private static void testIsolationLevelChange(Connection conn) throws Exception
{
Statement s;
ResultSet rs;
System.out.println("Start isolation level change test");
//set current isolation to read committed
conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
s = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY,
ResultSet.HOLD_CURSORS_OVER_COMMIT );
//open a cursor with multiple rows resultset
rs = s.executeQuery("select * from t1");
rs.next();
//Changing to different isolation from the current isolation for connection
//will give an exception because there are held cursors
try {
System.out.println("Switch isolation while there are open cursors");
conn.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE);
} catch (SQLException se) {
System.out.println("Should see exceptions");
String m = se.getSQLState();
JDBCDisplayUtil.ShowSQLException(System.out,se);
if ("X0X03".equals(m)) {
System.out.println("PASS: Can't change isolation if they are open cursor");
} else {
System.out.println("FAIL: Shouldn't able to change isolation because there are open cursor");
}
}
//Close open cursors and then try changing to different isolation.
//It should work.
rs.close();
conn.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE);
// set the default holdability for the Connection and try setting the isolation level
conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
conn.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
conn.createStatement().executeUpdate("SET ISOLATION RS");
// test for bug4385 - internal ResultSets were being re-used incorrectly
// will occur in with JDBC 2.0,1.2 but the first statement I found that
// failed was an insert with generated keys.
conn.createStatement().executeUpdate("Create table bug4385 (i int not null primary key, c int generated always as identity)");
conn.commit();
PreparedStatement ps = conn.prepareStatement("insert into bug4385(i) values(?)", Statement.RETURN_GENERATED_KEYS);
ps.setInt(1, 199);
ps.executeUpdate();
rs = ps.getGeneratedKeys();
int count = 0;
while (rs.next()) {
rs.getInt(1);
count++;
}
rs.close();
if (count != 1)
System.out.println("FAIL returned more than one row for generated keys");
ps.setInt(1, 299);
ps.executeUpdate();
rs = ps.getGeneratedKeys();
count = 0;
while (rs.next()) {
rs.getInt(1);
count++;
}
if (count != 1)
System.out.println("FAIL returned more than one row for generated keys on re-execution");
rs.close();
ps.close();
conn.rollback();
//switch back to default isolation & holdability
conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT);
System.out.println("Isolation level change test over");
conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
}
//set connection holdability and test holdability of statements inside and outside procedures
//test that holdability of statements always overrides holdability of connection
private static void testHoldability(Connection conn,int holdability) throws SQLException{
conn.setHoldability(holdability);
switch(holdability){
case ResultSet.HOLD_CURSORS_OVER_COMMIT:
System.out.println("\ntestHoldability with HOLD_CURSORS_OVER_COMMIT\n");
break;
case ResultSet.CLOSE_CURSORS_AT_COMMIT:
System.out.println("\ntestHoldability with CLOSE_CURSORS_AT_COMMIT\n");
break;
}
testStatements(conn);
testStatementsInProcedure(conn);
}
//test holdability of statements outside procedures
private static void testStatements(Connection conn) throws SQLException{
System.out.println("\ntestStatements()\n");
//HOLD_CURSORS_OVER_COMMIT
Statement st1 = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE ,
ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT);
ResultSet rs1 = st1.executeQuery("select * from testtable1");
checkResultSet(rs1, "before");
conn.commit();
checkResultSet(rs1, "after");
st1.close();
//CLOSE_CURSORS_AT_COMMIT
Statement st2 = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE ,
ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
ResultSet rs2 = st2.executeQuery("select * from testtable2");
checkResultSet(rs2, "before");
conn.commit();
checkResultSet(rs2, "after");
st2.close();
}
//test holdability of statements in procedures
private static void testStatementsInProcedure(Connection conn) throws SQLException{
System.out.println("\ntestStatementsInProcedure()\n");
CallableStatement cs1 = conn.prepareCall("call MYPROC()");
cs1.execute();
do{
checkResultSet(cs1.getResultSet(), "before");
}while(cs1.getMoreResults());
CallableStatement cs2 = conn.prepareCall("call MYPROC()");
cs2.execute();
conn.commit();
do{
checkResultSet(cs2.getResultSet(),"after");
}while(cs2.getMoreResults());
cs1.close();
cs2.close();
}
//check if resultset is accessible
private static void checkResultSet(ResultSet rs, String beforeOrAfter) throws SQLException{
System.out.println("checkResultSet "+ beforeOrAfter + " commit");
try{
if(rs != null){
rs.next();
System.out.println(rs.getString(1) + ", " + rs.getString(2));
}
else{
System.out.println("EXPECTED:ResultSet is null");
}
} catch(SQLException se){
System.out.println("EXPECTED EXCEPTION:"+se.getMessage());
}
}
//Java method for stored procedure
public static void testProc(ResultSet[] rs1, ResultSet[] rs2) throws Exception
{
Connection conn = DriverManager.getConnection("jdbc:default:connection");
//HOLD_CURSORS_OVER_COMMIT
Statement st1 = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE ,
ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT);
rs1[0] = st1.executeQuery("select * from testtable1");
//CLOSE_CURSORS_AT_COMMIT
Statement st2 = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE ,
ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
rs2[0] = st2.executeQuery("select * from testtable2");
}
}
|
ychp/coding
|
common/src/main/java/com/ychp/coding/common/util/Encryption.java
|
package com.ychp.coding.common.util;
import com.google.common.base.Objects;
import com.sun.org.apache.xerces.internal.impl.dv.util.Base64;
import com.ychp.coding.common.exception.EncryptionException;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.UUID;
/**
* Desc:
* Author: <a href="<EMAIL>">应程鹏</a>
* Date: 16/7/31
*/
public class Encryption {
private static final String Algorithm = "DESede"; //定义 加密算法,可用 DES,DESede,Blowfish
private static final int KEY_LENGTH = 24; //密钥长度
/**
* 3des解密
* @param value 待加密字符串
* @param key 原始密钥字符串
*/
private static String Decrypt3DES(String value, String key) throws EncryptionException {
byte[] b = decryptMode(GetKeyBytes(key), Base64.decode(value));
return new String(b != null ? b : new byte[0]);
}
/**
* 3des加密
* @param value 待加密字符串
* @param key 原始密钥字符串
*/
public static String Encrypt3DES(String value, String key) throws EncryptionException {
return byte2Base64(encryptMode(GetKeyBytes(key), md5Encode(value, key).getBytes()));
}
/**
* 计算24位长的密码byte值,首先对原始密钥做MD5算hash值,再用前8位数据对应补全后8位
*/
private static byte[] GetKeyBytes(String strKey) throws EncryptionException {
if (null == strKey || strKey.length() < 1) {
throw new EncryptionException("encryption.key.not.empty");
}
MessageDigest alg;
try {
alg = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new EncryptionException("encryption.getMd5.fail");
}
alg.update(strKey.getBytes());
byte[] bkey = alg.digest();
int start = bkey.length;
byte[] bkey24 = new byte[KEY_LENGTH];
System.arraycopy(bkey, 0, bkey24, 0, start);
System.arraycopy(bkey, 0, bkey24, start, KEY_LENGTH - start);
return bkey24;
}
/**
*
* @param keybyte 加密密钥,长度为24字节
* @param src 为被加密的数据缓冲区(源)
*/
private static byte[] encryptMode(byte[] keybyte, byte[] src) {
try {
//生成密钥
SecretKey deskey = new SecretKeySpec(keybyte, Algorithm); //加密
Cipher c1 = Cipher.getInstance(Algorithm);
c1.init(Cipher.ENCRYPT_MODE, deskey);
return c1.doFinal(src);
} catch (Exception e3) {
e3.printStackTrace();
}
return null;
}
/**
*
* @param keybyte 加密密钥,长度为24字节
* @param src 为被加密的数据缓冲区(源)
*/
private static byte[] decryptMode(byte[] keybyte, byte[] src) {
try {
//生成密钥
SecretKey deskey = new SecretKeySpec(keybyte, Algorithm);
//解密
Cipher c1 = Cipher.getInstance(Algorithm);
c1.init(Cipher.DECRYPT_MODE, deskey);
return c1.doFinal(src);
} catch (Exception e3) {
e3.printStackTrace();
}
return null;
}
/**
* 转换成base64编码
*/
private static String byte2Base64(byte[] b) {
return Base64.encode(b);
}
/***
* MD5加密 生成32位md5码
* @param inStr 待加密字符串
* @return 返回32位md5码
*/
private static String md5Encode(String inStr, String key) {
MessageDigest md5;
try {
String password = inStr + key.substring(0,8);
md5 = MessageDigest.getInstance("MD5");
byte[] byteArray = password.getBytes("UTF-8");
byte[] md5Bytes = md5.digest(byteArray);
StringBuilder hexValue = toHexString(md5Bytes);
return hexValue.toString().substring(0,16);
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
/**
* 验证密码
*/
public static boolean checkPassword(String value, String key, String equStr){
String inStr = md5Encode(value, key);
String originStr = Decrypt3DES(equStr, key);
return Objects.equal(inStr, originStr);
}
public static String getSalt() {
return CustomerStringUtils.getRandomString(KEY_LENGTH);
}
/***
* MD5加密 生成32位md5码
* @param inStr 待加密字符串
* @return 返回32位md5码
*/
public static String md5Encode(String inStr) {
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
byte[] byteArray = inStr.getBytes("UTF-8");
byte[] md5Bytes = md5.digest(byteArray);
StringBuilder hexValue = toHexString(md5Bytes);
return hexValue.toString();
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
private static StringBuilder toHexString(byte[] md5Bytes){
StringBuilder hexValue = new StringBuilder();
for (byte md5Byte : md5Bytes) {
int val = ((int) md5Byte) & 0xff;
if (val < 16) {
hexValue.append("0");
}
hexValue.append(Integer.toHexString(val));
}
return hexValue;
}
public static String factoryAppCode(){
return UUID.randomUUID().toString().replace("-","");
}
public static String factoryAppSecret(String appCode, String appName){
return md5Encode(appName + System.currentTimeMillis() + appCode);
}
public static void main(String[] args) throws Exception {
String salt = "grqx5iCM2Ma8KT9x1hja6acW";
System.out.println("key:" + salt);
String password = "<PASSWORD>";
System.out.println("password:" + md5Encode(password, salt));
password = <PASSWORD>(password, salt);
System.out.println("password:" + password);
password = <PASSWORD>(password, salt);
System.out.println("origin_password:" + password);
}
}
|
78182648/blibli-go
|
app/interface/main/dm2/model/advert.go
|
<filename>app/interface/main/dm2/model/advert.go<gh_stars>10-100
package model
import (
"encoding/json"
"fmt"
)
// resource id defined by advert
const (
adRscIDIphone = 2630
adRscIDAndrod = 2631
adRscIDIphoneIcon = 2642
adRscIDAndroidIcon = 2643
)
// Resource get resource by mobi_app.
func Resource(mobiApp string) (rsc string) {
if mobiApp == "iphone" || mobiApp == "ipad" || mobiApp == "iphone_i" {
rsc = fmt.Sprintf("%d,%d", adRscIDIphone, adRscIDIphoneIcon)
} else {
rsc = fmt.Sprintf("%d,%d", adRscIDAndrod, adRscIDAndroidIcon)
}
return
}
// ADReq advert request params
type ADReq struct {
Aid int64 `json:"aid"`
Oid int64 `json:"oid"`
Mid int64 `json:"mid"`
Build int64 `json:"build"`
Buvid string `json:"buvid"`
ClientIP string `json:"ip"`
MobiApp string `json:"mobi_app"`
ADExtra string `json:"ad_extra"`
}
// ADResp advert response
type ADResp struct {
Icon *ADInfo `json:"icon,omitempty"`
ADs []*ADInfo `json:"ads_info,omitempty"`
}
// AD advert struct
type AD struct {
RequestID string `json:"request_id,omitempty"`
ADsInfo map[int64]map[int64]*ADInfo `json:"ads_info,omitempty"` // resource_id --> source_id --> adinfo
}
// ADInfo advert info.
type ADInfo struct {
// filed response from advert api
Index int `json:"index,omitempty"`
IsAd bool `json:"is_ad,omitempty"`
CmMark int `json:"cm_mark,omitempty"`
CardIndex int `json:"card_index,omitempty"`
ADInfo json.RawMessage `json:"ad_info,omitempty"`
// filed used in app
RequestID string `json:"request_id,omitempty"`
ResourceID int64 `json:"resource_id,omitempty"`
SourceID int64 `json:"source_id,omitempty"`
ClientIP string `json:"client_ip,omitempty"`
IsADLoc bool `json:"is_ad_loc,omitempty"`
}
// Convert convert AD to ADResp.
func (a *AD) Convert(clientIP string) (res *ADResp) {
res = new(ADResp)
for rscID, adInfoMap := range a.ADsInfo {
for srcID, adInfo := range adInfoMap {
v := new(ADInfo)
v.RequestID = a.RequestID
v.ResourceID = rscID
v.SourceID = srcID
v.ClientIP = clientIP
v.IsADLoc = true // 该字段服务端代码写死为true
if adInfo != nil {
v.Index = adInfo.Index
v.IsAd = adInfo.IsAd
v.CmMark = adInfo.CmMark
v.CardIndex = adInfo.CardIndex
}
if len(adInfo.ADInfo) > 0 {
v.ADInfo = adInfo.ADInfo
}
if v.ResourceID == adRscIDIphoneIcon || v.ResourceID == adRscIDAndroidIcon { // icon resouce id
res.Icon = v
continue
}
res.ADs = append(res.ADs, v)
}
}
return
}
|
axxel/GPUImage
|
examples/FilterShowcase/FilterShowcase/ShowcaseFilterViewController.h
|
#import <UIKit/UIKit.h>
#import "GPUImage.h"
typedef enum {
GPUIMAGE_SATURATION,
GPUIMAGE_CONTRAST,
GPUIMAGE_BRIGHTNESS,
GPUIMAGE_EXPOSURE,
GPUIMAGE_SHARPEN,
GPUIMAGE_UNSHARPMASK,
GPUIMAGE_TRANSFORM,
GPUIMAGE_TRANSFORM3D,
GPUIMAGE_CROP,
GPUIMAGE_GAMMA,
GPUIMAGE_HAZE,
GPUIMAGE_SEPIA,
GPUIMAGE_COLORINVERT,
GPUIMAGE_GRAYSCALE,
GPUIMAGE_THRESHOLD,
GPUIMAGE_ADAPTIVETHRESHOLD,
GPUIMAGE_PIXELLATE,
GPUIMAGE_POLARPIXELLATE,
GPUIMAGE_CROSSHATCH,
GPUIMAGE_SOBELEDGEDETECTION,
GPUIMAGE_SKETCH,
GPUIMAGE_TOON,
GPUIMAGE_CGA,
GPUIMAGE_POSTERIZE,
GPUIMAGE_KUWAHARA,
GPUIMAGE_VIGNETTE,
GPUIMAGE_GAUSSIAN,
GPUIMAGE_GAUSSIAN_SELECTIVE,
GPUIMAGE_FASTBLUR,
GPUIMAGE_BOXBLUR,
GPUIMAGE_SWIRL,
GPUIMAGE_BULGE,
GPUIMAGE_PINCH,
GPUIMAGE_STRETCH,
GPUIMAGE_DISSOLVE,
GPUIMAGE_CHROMAKEY,
GPUIMAGE_MULTIPLY,
GPUIMAGE_OVERLAY,
GPUIMAGE_LIGHTEN,
GPUIMAGE_DARKEN,
GPUIMAGE_COLORBURN,
GPUIMAGE_COLORDODGE,
GPUIMAGE_SCREENBLEND,
GPUIMAGE_DIFFERENCEBLEND,
GPUIMAGE_EXCLUSIONBLEND,
GPUIMAGE_HARDLIGHTBLEND,
GPUIMAGE_SOFTLIGHTBLEND,
GPUIMAGE_CUSTOM,
GPUIMAGE_FILECONFIG,
GPUIMAGE_FILTERGROUP,
GPUIMAGE_NUMFILTERS
} GPUImageShowcaseFilterType;
@interface ShowcaseFilterViewController : UIViewController
{
GPUImageVideoCamera *videoCamera;
GPUImageOutput<GPUImageInput> *filter;
GPUImagePicture *sourcePicture;
GPUImageShowcaseFilterType filterType;
GPUImageFilterPipeline *pipeline;
__unsafe_unretained UISlider *_filterSettingsSlider;
}
@property(readwrite, unsafe_unretained, nonatomic) IBOutlet UISlider *filterSettingsSlider;
// Initialization and teardown
- (id)initWithFilterType:(GPUImageShowcaseFilterType)newFilterType;
- (void)setupFilter;
// Filter adjustments
- (IBAction)updateFilterFromSlider:(id)sender;
@end
|
davidzou/WonderingWall
|
android/libcomm/src/com/wonderingwall/base/AbstractBaseService.java
|
<reponame>davidzou/WonderingWall<filename>android/libcomm/src/com/wonderingwall/base/AbstractBaseService.java<gh_stars>1-10
/**
* Project Name:libui
* File Name:BaseServiceImpl.java
* Package Name:com.wonderingwall.ui
* Date:Sep 18, 20145:10:22 PM
* Copyright (c) 2014, <EMAIL> All Rights Reserved.
*
*/
package com.wonderingwall.base;
import java.util.ArrayList;
import java.util.List;
import android.content.Intent;
import android.os.Bundle;
/**
* ClassName:BaseServiceImpl <br/>
* Function: TODO ADD FUNCTION. <br/>
* Reason: TODO ADD REASON. <br/>
* Date: Sep 18, 2014 5:10:22 PM <br/>
* @author DavidZou
* @version
* @see
*/
public abstract class AbstractBaseService implements BaseService {
/** The models what activity used data */
protected ArrayList<? extends BaseModel> _model = new ArrayList();
@Override
public <T extends BaseModel> void bind(T t) {
// TODO 绑定数据对象
// Service中可以有多个数据对象来处理界面数据,比如局部刷新的数据,和整体显示的数据等,但可能对应的数据请求也是不一样的
// _model.add(t);
}
@Override
public void onCreate(Bundle savedInstanceState, Intent intent) {
if(savedInstanceState != null){
// resetData
resetData(savedInstanceState);
}else{
// read form intent;
if(intent != null && intent.getExtras() != null){
setExtraData(intent);
}else{
setDafaultData();
}
}
}
/**
* Description(描述): 恢复缓存数据<br/>
* Conditions(适用条件): 界面被系统销毁后重建界面的数据恢复<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
* @param savedInstanceState
*/
protected abstract void resetData(Bundle savedInstanceState);
/**
* Description(描述): 初始化数据<br/>
* Conditions(适用条件): 界面被第一次创建后的界面传递的数据对象赋值<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
* @param intent
*/
protected abstract void setExtraData(Intent intent);
/**
* Description(描述): 初始化数据<br/>
* Conditions(适用条件): 界面被第一次创建后的界面传递值获取失败,或者没传的设置对象数据默认值<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
*/
protected abstract void setDafaultData();
@Override
public void onSaveInstanceState(Bundle outState) {
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
resetData(savedInstanceState);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// TODO Auto-generated method stub
}
@Override
public void onStart() {
}
/**
* Description(描述): 数据请求<br/>
* Conditions(适用条件): 请求网络数据,不包括大量的图片请求,主要是快速的json请求<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项): 此请求应该是只有一个,如果有新的就被新的取代<br/>
*
* @param refresh
*/
protected abstract void request(boolean refresh);
/**
* Description(描述): 取消请求<br/>
* Conditions(适用条件):请求网络数据,不包括大量的图片请求,主要是快速的json请求的取消<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
*/
protected abstract void cancel();
/**
* Description(描述): 请求图片<br/>
* Conditions(适用条件): 默认强制启动一个图片请求,否则自动加入队列的自动请求,比如打开一个图片详情的时候,强制图片最先打开。<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
* @param url 图片路径
*/
protected abstract void requestImage(String url);
/**
* Description(描述):<br/>
* Conditions(适用条件): 特定图片打开时关闭时调用,比如看原图的时候,但是下了一半又不想看了,那么删除任务,减小开销。<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
*/
protected abstract void cancelImage();
/**
* Description(描述):<br/>
* Conditions(适用条件): 当destroy的时候被调用,减少网络消耗。否者缓存,或者优先级降低,以保证最新的数据被最先发出<br/>
* Execution flow(执行流程):<br/>
* Usage(用法):<br/>
* Cautions(注意事项):<br/>
*
*/
protected abstract void cancelImageAll();
@Override
public void onRestart() {
// TODO Auto-generated method stub
}
@Override
public void onResume() {
// TODO Auto-generated method stub
}
@Override
public void onPause() {
// TODO Auto-generated method stub
cancelImageAll();
}
@Override
public void onStop() {
// TODO Auto-generated method stub
}
@Override
public void onDestroy() {
// TODO Auto-generated method stub
}
public List<? extends BaseModel> getModel() {
return _model;
}
@SuppressWarnings("unchecked")
public <T extends BaseModel> T getModel(int location){
return (T) _model.get(location);
}
}
|
Rose2073/RoseCppSource
|
Luogu/P1427.cpp
|
<gh_stars>1-10
#include<cstdio>
int main(){
int a[101],n=0;
for(;n==0||a[n-1]!=0;n++)
scanf("%d",&a[n]);
for(int i=n-2;i>-1;i--)
printf("%d ",a[i]);
return 0;
}
|
rafajpet/cloud
|
resource-aggregate/cqrs/eventstore/mongodb/maintenance.go
|
package mongodb
import (
"context"
"errors"
"fmt"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
"github.com/plgd-dev/cloud/resource-aggregate/cqrs/eventstore/maintenance"
"go.mongodb.org/mongo-driver/bson"
)
const maintenanceCName = "maintenance"
func makeDbAggregateVersion(task maintenance.Task) bson.M {
return bson.M{
aggregateIDKey: task.AggregateID,
versionKey: task.Version,
idKey: getID(task),
}
}
func getID(task maintenance.Task) string {
return task.AggregateID
}
// Insert stores (or updates) the information about the latest snapshot version per aggregate into the DB
func (s *EventStore) Insert(ctx context.Context, task maintenance.Task) error {
if task.AggregateID == "" {
return errors.New("could not insert record - aggregate ID and/or version cannot be empty")
}
record := makeDbAggregateVersion(task)
col := s.client.Database(s.DBName()).Collection(maintenanceCName)
opts := options.UpdateOptions{}
opts.SetUpsert(true)
res, err := col.UpdateOne(ctx,
bson.M{
idKey: getID(task),
versionKey: bson.M{
"$lt": task.Version,
},
},
bson.M{
"$set": record,
},
&opts,
)
if err != nil {
if err == mongo.ErrNilDocument || IsDup(err) {
return fmt.Errorf("could not insert record with aggregate ID %v, version %d - version is outdated - %w", task.AggregateID, task.Version, err)
}
return fmt.Errorf("could not insert record with aggregate ID %v, version %d - %w", task.AggregateID, task.Version, err)
}
if res.UpsertedCount != 1 && res.ModifiedCount != 1 {
return fmt.Errorf("could not insert record with aggregate ID %v, version %d", task.AggregateID, task.Version)
}
return nil
}
type dbAggregateVersionIterator struct {
iter *mongo.Cursor
}
func (i *dbAggregateVersionIterator) Next(ctx context.Context, task *maintenance.Task) bool {
var dbRecord bson.M
if !i.iter.Next(ctx) {
return false
}
err := i.iter.Decode(&dbRecord)
if err != nil {
return false
}
task.AggregateID = dbRecord[aggregateIDKey].(string)
version := dbRecord[versionKey].(int64)
task.Version = uint64(version)
return true
}
func (i *dbAggregateVersionIterator) Err() error {
return i.iter.Err()
}
// Query retrieves the latest snapshot version per aggregate for thw number of aggregates specified by 'limit'
func (s *EventStore) Query(ctx context.Context, limit int, taskHandler maintenance.TaskHandler) error {
opts := options.FindOptions{}
opts.SetLimit(int64(limit))
iter, err := s.client.Database(s.DBName()).Collection(maintenanceCName).Find(ctx, bson.M{}, &opts)
if err == mongo.ErrNilDocument {
return nil
}
if err != nil {
return err
}
i := dbAggregateVersionIterator{
iter: iter,
}
err = taskHandler.Handle(ctx, &i)
errClose := iter.Close(ctx)
if err == nil {
return errClose
}
return err
}
// Remove deletes (the latest snapshot version) database record for a given aggregate ID
func (s *EventStore) Remove(ctx context.Context, task maintenance.Task) error {
record := makeDbAggregateVersion(task)
col := s.client.Database(s.DBName()).Collection(maintenanceCName)
res, err := col.DeleteOne(ctx, record)
if err != nil {
return err
}
if res.DeletedCount != 1 {
return fmt.Errorf("could not remove record with aggregate ID %s and/or version %d", task.AggregateID, task.Version)
}
return nil
}
|
ealonas/Markus
|
app/models/assignment.rb
|
<gh_stars>0
require 'csv_invalid_line_error'
class Assignment < ActiveRecord::Base
include RepositoryHelper
MIN_PEER_REVIEWS_PER_GROUP = 1
has_many :rubric_criteria,
-> { order(:position) },
class_name: 'RubricCriterion',
dependent: :destroy
has_many :flexible_criteria,
-> { order(:position) },
class_name: 'FlexibleCriterion',
dependent: :destroy
has_many :checkbox_criteria,
-> { order(:position) },
class_name: 'CheckboxCriterion',
dependent: :destroy
has_many :test_support_files, dependent: :destroy
accepts_nested_attributes_for :test_support_files, allow_destroy: true
has_many :test_scripts, dependent: :destroy
accepts_nested_attributes_for :test_scripts, allow_destroy: true
has_many :annotation_categories,
-> { order(:position) },
class_name: 'AnnotationCategory',
dependent: :destroy
has_many :criterion_ta_associations,
dependent: :destroy
has_many :assignment_files,
dependent: :destroy
accepts_nested_attributes_for :assignment_files, allow_destroy: true
validates_associated :assignment_files
has_one :assignment_stat, dependent: :destroy
accepts_nested_attributes_for :assignment_stat, allow_destroy: true
validates_associated :assignment_stat
# Because of app/views/main/_grade_distribution_graph.html.erb:25
validates_presence_of :assignment_stat
# Assignments can now refer to themselves, where this is null if there
# is no parent (the same holds for the child peer reviews)
belongs_to :parent_assignment, class_name: 'Assignment', inverse_of: :pr_assignment
has_one :pr_assignment, class_name: 'Assignment', foreign_key: :parent_assignment_id, inverse_of: :parent_assignment
has_many :peer_reviews, through: :groupings
has_many :pr_peer_reviews, through: :parent_assignment, source: :peer_reviews
has_many :annotation_categories,
-> { order(:position) },
class_name: 'AnnotationCategory',
dependent: :destroy
has_many :groupings
has_many :ta_memberships, through: :groupings
has_many :student_memberships, through: :groupings
has_many :tokens, through: :groupings
has_many :submissions, through: :groupings
has_many :groups, through: :groupings
has_many :notes, as: :noteable, dependent: :destroy
has_many :section_due_dates
accepts_nested_attributes_for :section_due_dates
validates_uniqueness_of :short_identifier, case_sensitive: true
validates_numericality_of :group_min, only_integer: true, greater_than: 0
validates_numericality_of :group_max, only_integer: true, greater_than: 0
has_one :submission_rule, dependent: :destroy, inverse_of: :assignment
accepts_nested_attributes_for :submission_rule, allow_destroy: true
validates_associated :submission_rule
validates_presence_of :submission_rule
validates_presence_of :short_identifier
validates_presence_of :description
validates_presence_of :repository_folder
validates_presence_of :due_date
validates_presence_of :group_min
validates_presence_of :group_max
validates_presence_of :notes_count
validates_presence_of :assignment_stat
# "validates_presence_of" for boolean values.
validates_inclusion_of :allow_web_submits, in: [true, false]
validates_inclusion_of :vcs_submit, in: [true, false]
validates_inclusion_of :display_grader_names_to_students, in: [true, false]
validates_inclusion_of :is_hidden, in: [true, false]
validates_inclusion_of :has_peer_review, in: [true, false]
validates_inclusion_of :assign_graders_to_criteria, in: [true, false]
validates_inclusion_of :enable_test, in: [true, false]
validates_inclusion_of :enable_student_tests, in: [true, false], if: :enable_test
validates_inclusion_of :unlimited_tokens, in: [true, false], if: :enable_student_tests
validates_presence_of :token_start_date, if: :enable_student_tests
with_options if: ->{ :enable_student_tests && !:unlimited_tokens } do |assignment|
assignment.validates :tokens_per_period,
presence: true,
numericality: { only_integer: true,
greater_than_or_equal_to: 0 }
assignment.validates :token_period,
presence: true,
numericality: { greater_than: 0 }
end
validate :minimum_number_of_groups
after_create :build_repository
before_save :reset_collection_time
# Call custom validator in order to validate the :due_date attribute
# date: true maps to DateValidator (custom_name: true maps to CustomNameValidator)
# Look in lib/validators/* for more info
validates :due_date, date: true
after_save :update_assigned_tokens
after_save :create_peer_review_assignment_if_not_exist
# Set the default order of assignments: in ascending order of due_date
default_scope { order('due_date ASC') }
def minimum_number_of_groups
if (group_max && group_min) && group_max < group_min
errors.add(:group_max, 'must be greater than the minimum number of groups')
false
end
end
# Are we past all the due dates for this assignment?
def past_all_due_dates?
# If no section due dates /!\ do not check empty? it could be wrong
unless self.section_due_dates_type
return !due_date.nil? && Time.zone.now > due_date
end
# If section due dates
self.section_due_dates.each do |d|
if !d.due_date.nil? && Time.zone.now > d.due_date
return true
end
end
false
end
# Return an array with names of sections past
def section_names_past_due_date
sections_past = []
unless self.section_due_dates_type
if !due_date.nil? && Time.zone.now > due_date
return sections_past << 'Due Date'
end
end
self.section_due_dates.each do |d|
if !d.due_date.nil? && Time.zone.now > d.due_date
sections_past << d.section.name
end
end
sections_past
end
# Whether or not this grouping is past its due date for this assignment.
def grouping_past_due_date?(grouping)
if section_due_dates_type && grouping &&
grouping.inviter.section.present?
section_due_date =
SectionDueDate.due_date_for(grouping.inviter.section, self)
!section_due_date.nil? && Time.zone.now > section_due_date
else
past_all_due_dates?
end
end
def section_due_date(section)
unless section_due_dates_type && section
return due_date
end
SectionDueDate.due_date_for(section, self)
end
# Calculate the latest due date among all sections for the assignment.
def latest_due_date
return due_date unless section_due_dates_type
due_dates = section_due_dates.map(&:due_date) << due_date
due_dates.compact.max
end
def past_collection_date?(section=nil)
Time.zone.now > submission_rule.calculate_collection_time(section)
end
def past_all_collection_dates?
Time.zone.now > latest_due_date
end
def past_remark_due_date?
!remark_due_date.nil? && Time.zone.now > remark_due_date
end
# Return true if this is a group assignment; false otherwise
def group_assignment?
invalid_override || group_max > 1
end
# Returns the group by the user for this assignment. If pending=true,
# it will return the group that the user has a pending invitation to.
# Returns nil if user does not have a group for this assignment, or if it is
# not a group assignment
def group_by(uid, pending=false)
return unless group_assignment?
# condition = "memberships.user_id = ?"
# condition += " and memberships.status != 'rejected'"
# add non-pending status clause to condition
# condition += " and memberships.status != 'pending'" unless pending
# groupings.first(include: :memberships, conditions: [condition, uid]) #FIXME: needs schema update
#FIXME: needs to be rewritten using a proper query...
User.find(uid.id).accepted_grouping_for(id)
end
def display_for_note
short_identifier
end
# Returns the maximum possible mark for a particular assignment
def max_mark(user_visibility = :ta)
get_criteria(user_visibility).map(&:max_mark).sum.round(2)
end
# calculates summary statistics of released results for this assignment
def update_results_stats
marks = Result.student_marks_by_assignment(id)
# No marks released for this assignment.
return false if marks.empty?
self.results_fails = marks.count { |mark| mark < max_mark / 2.0 }
self.results_zeros = marks.count(&:zero?)
# Avoid division by 0.
self.results_average, self.results_median =
if max_mark.zero?
[0, 0]
else
# Calculates average and median in percentage.
[average(marks), median(marks)].map do |stat|
(stat * 100 / max_mark).round(2)
end
end
self.save
end
def average(marks)
marks.empty? ? 0 : marks.reduce(:+) / marks.size.to_f
end
def median(marks)
count = marks.size
return 0 if count.zero?
if count.even?
average([marks[count/2 - 1], marks[count/2]])
else
marks[count/2]
end
end
def self.get_current_assignment
# start showing (or "featuring") the assignment 3 days before it's due
# query uses Date.today + 4 because results from db seems to be off by 1
current_assignment = Assignment.where('due_date <= ?', Date.today + 4)
.reorder('due_date DESC').first
if current_assignment.nil?
current_assignment = Assignment.reorder('due_date ASC').first
end
current_assignment
end
def update_remark_request_count
outstanding_count = 0
groupings.each do |grouping|
submission = grouping.current_submission_used
if !submission.nil? && submission.has_remark?
if submission.remark_result.marking_state ==
Result::MARKING_STATES[:incomplete]
outstanding_count += 1
end
end
end
self.outstanding_remark_request_count = outstanding_count
self.save
end
def total_test_script_marks
return test_scripts.sum("max_marks")
end
#total marks for scripts that are run on student request
def total_ror_script_marks
return test_scripts.where("run_by_students" => true).sum("max_marks")
end
def add_group(new_group_name=nil)
if group_name_autogenerated
group = Group.new
group.save(validate: false)
group.group_name = group.get_autogenerated_group_name
group.save
else
return if new_group_name.nil?
if group = Group.where(group_name: new_group_name).first
unless groupings.where(group_id: group.id).first.nil?
raise "Group #{new_group_name} already exists"
end
else
group = Group.create(group_name: new_group_name)
end
end
group.set_repo_permissions
Grouping.create(group: group, assignment: self)
end
# Clones the Groupings from the assignment with id assignment_id
# into self. Destroys any previously existing Groupings associated
# with this Assignment
def clone_groupings_from(assignment_id)
original_assignment = Assignment.find(assignment_id)
self.transaction do
self.group_min = original_assignment.group_min
self.group_max = original_assignment.group_max
self.student_form_groups = original_assignment.student_form_groups
self.group_name_autogenerated = original_assignment.group_name_autogenerated
self.group_name_displayed = original_assignment.group_name_displayed
self.groupings.destroy_all
self.save
self.reload
original_assignment.groupings.each do |g|
unhidden_student_memberships = g.accepted_student_memberships.select do |m|
!m.user.hidden
end
unhidden_ta_memberships = g.ta_memberships.select do |m|
!m.user.hidden
end
#create the memberships for any user that is not hidden
unless unhidden_student_memberships.empty?
#create the groupings
grouping = Grouping.new
grouping.group_id = g.group_id
grouping.assignment_id = self.id
grouping.admin_approved = g.admin_approved
raise 'Could not save grouping' if !grouping.save
all_memberships = unhidden_student_memberships + unhidden_ta_memberships
all_memberships.each do |m|
membership = Membership.new
membership.user_id = m.user_id
membership.type = m.type
membership.membership_status = m.membership_status
raise 'Could not save membership' if !(grouping.memberships << membership)
end
# Ensure all student members have permissions on their group repositories
grouping.update_repository_permissions
end
end
end
end
# Add a group and corresponding grouping as provided in
# the passed in Array.
# Format: [ groupname, repo_name, member, member, etc ]
# The groupname, repo_name must not pre exist, each member should exist and
# not belong to a different grouping for the same assignment.
# If these requirements are not satisfied, the group and the grouping is
# not created.
def add_csv_group(row)
return if row.length.zero?
begin
row.map! { |item| item.strip }
rescue NoMethodError
raise CSVInvalidLineError
end
group = Group.where(group_name: row.first).first
unless group.nil?
if group.repo_name != row[1]
# CASE: Group already exists but the repo name is different
duplicate_group_error = I18n.t('csv.group_with_different_repo',
group_name: row[0])
raise CSVInvalidLineError, duplicate_group_error
else
any_grouping = Grouping.find_by group_id: group.id
if any_grouping.nil?
# CASE: Group exists with same repo name but has no grouping
# associated with it for any assignment
# Use existing group and create a new grouping between the existing
# group and the given students and return without error
add_new_grouping_for_group(row, group)
return
else
grouping_for_current_assignment = group.grouping_for_assignment(id)
if grouping_for_current_assignment.nil?
if same_membership_as_csv_row?(row, any_grouping)
# CASE: Group already exists with the same repo name and has a
# grouping for another assignment with the same membership
# Use existing group and create a new grouping between the
# existing group and the given students and return without error
add_new_grouping_for_group(row, group)
return
else
# CASE: Group already exists with the same repo name and has
# a grouping for another assignment BUT with different
# membership
# The existing groupings and the current group is not compatible
# Return an error.
duplicate_group_error = I18n.t(
'csv.group_with_different_membership_different_assignment',
group_name: row[0])
raise CSVInvalidLineError, duplicate_group_error
end
else
if same_membership_as_csv_row?(row,
grouping_for_current_assignment)
# CASE: Group already exists with the same repo name and also has
# a grouping for the current assignment with the same
# membership
# No new group or grouping created. Since the exact group given by
# the csv file already exists treat this as a successful case
# and don't return an error
return
else
# CASE: Group already exists with the same repo name and has a
# grouping for the current assignment BUT the membership is
# different.
# Return error since the membership is different
duplicate_group_error = I18n.t(
'csv.group_with_different_membership_current_assignment',
group_name: row[0])
raise CSVInvalidLineError, duplicate_group_error
end
end
end
end
end
# If any of the given members do not exist or is part of another group,
# an error is returned without creating a group
unless membership_unique?(row)
if !errors[:groupings].blank?
# groupings error set if a member is already in different group
membership_error = I18n.t('csv.memberships_not_unique',
group_name: row[0],
student_user_name: errors.get(:groupings)
.first)
errors.delete(:groupings)
else
# student_membership error set if a member does not exist
membership_error = I18n.t(
'csv.member_does_not_exist',
group_name: row[0],
student_user_name: errors.get(:student_memberships).first)
errors.delete(:student_memberships)
end
return membership_error
end
# If this assignment is an individual assignment, then the repostiory
# name is set to be the student's user name. If this assignment is a
# group assignment then the repository name is taken from the csv file
if is_candidate_for_setting_custom_repo_name?(row)
repo_name = row[2]
else
repo_name = row[1]
end
# If a repository already exists with the same repo name as the one given
# in the csv file, error is returned and the group is not created
begin
if repository_already_exists?(repo_name)
repository_error = I18n.t('csv.repository_already_exists',
group_name: row[0],
repo_path: errors.get(:repo_name).last)
errors.delete(:repo_name)
return repository_error
end
rescue TypeError
raise CSV::MalformedCSVError
end
# At this point we can be sure that the group_name, memberships and
# the repo_name does not already exist. So we create the new group.
group = Group.new
group.group_name = row[0]
group.repo_name = repo_name
# Note: after_create hook build_repository might raise
# Repository::RepositoryCollision. If it does, it adds the colliding
# repo_name to errors.on_base. This is how we can detect repo
# collisions here. Unfortunately, we can't raise an exception
# here, because we still want the grouping to be created. This really
# shouldn't happen anyway, because the lookup earlier should prevent
# repo collisions e.g. when uploading the same CSV file twice.
group.save
unless group.errors[:base].blank?
collision_error = I18n.t('csv.repo_collision_warning',
repo_name: group.errors.on_base,
group_name: row[0])
end
add_new_grouping_for_group(row, group)
collision_error
end
def grouped_students
student_memberships.map(&:user)
end
def ungrouped_students
Student.where(hidden: false) - grouped_students
end
def valid_groupings
groupings.includes(student_memberships: :user).select do |grouping|
grouping.admin_approved ||
grouping.student_memberships.count >= group_min
end
end
def invalid_groupings
groupings - valid_groupings
end
def assigned_groupings
groupings.joins(:ta_memberships).includes(ta_memberships: :user).uniq
end
def unassigned_groupings
groupings - assigned_groupings
end
# Get a list of subversion client commands to be used for scripting
def get_svn_checkout_commands
svn_commands = [] # the commands to be exported
self.groupings.each do |grouping|
submission = grouping.current_submission_used
if submission
svn_commands.push(
"svn checkout -r #{submission.revision_number} " +
"#{grouping.group.repository_external_access_url}/" +
"#{repository_folder} \"#{grouping.group.group_name}\"")
end
end
svn_commands
end
# Get a list of group_name, repo-url pairs
def get_svn_repo_list
CSV.generate do |csv|
self.groupings.each do |grouping|
group = grouping.group
csv << [group.group_name,group.repository_external_access_url]
end
end
end
# Get a detailed CSV report of criteria based marks
# (includes each criterion, with it's out-of value) for this assignment.
# Produces CSV rows such as the following:
# student_name,95.22222,3,4,2,5,5,4,0/2
# Criterion values should be read in pairs. I.e. 2,3 means 2 out-of 3.
# Last column are grace-credits.
def get_detailed_csv_report
out_of = max_mark
students = Student.all
MarkusCSV.generate(students) do |student|
result = [student.user_name]
grouping = student.accepted_grouping_for(self.id)
if grouping.nil? || !grouping.has_submission?
# No grouping/no submission
# total percentage, total_grade
result.concat(['','0'])
# mark, max_mark
result.concat(Array.new(criteria_count, '').
zip(get_criteria.map(&:max_mark)).flatten)
# extra-mark, extra-percentage
result.concat(['',''])
else
# Fill in actual values, since we have a grouping
# and a submission.
submission = grouping.current_submission_used
result.concat([submission.get_latest_result.total_mark / out_of * 100,
submission.get_latest_result.total_mark])
get_marks_list(submission).each do |mark|
result.concat(mark)
end
result.concat([submission.get_latest_result.get_total_extra_points,
submission.get_latest_result.get_total_extra_percentage])
end
# push grace credits info
grace_credits_data = student.remaining_grace_credits.to_s + '/' + student.grace_credits.to_s
result.push(grace_credits_data)
result
end
end
# Returns an array of [mark, max_mark].
def get_marks_list(submission)
get_criteria.map do |criterion|
mark = submission.get_latest_result.marks.find_by(markable_id: criterion.id)
[(mark.nil? || mark.mark.nil?) ? '' : mark.mark,
criterion.max_mark]
end
end
def replace_submission_rule(new_submission_rule)
if self.submission_rule.nil?
self.submission_rule = new_submission_rule
self.save
else
self.submission_rule.destroy
self.submission_rule = new_submission_rule
self.save
end
end
def next_criterion_position
# We're using count here because this fires off a DB query, thus
# grabbing the most up-to-date count of the criteria.
get_criteria.count > 0 ? get_criteria.last.position + 1 : 1
end
# Returns a filtered list of criteria.
def get_criteria(user_visibility = :all, type = :all, options = {})
include_opt = options[:includes]
if user_visibility == :all
get_all_criteria(type, include_opt)
elsif user_visibility == :ta
get_ta_visible_criteria(type, include_opt)
elsif user_visibility == :peer
get_peer_visible_criteria(type, include_opt)
end
end
def get_all_criteria(type, include_opt)
if type == :all
all_criteria = rubric_criteria.includes(include_opt) +
flexible_criteria.includes(include_opt) +
checkbox_criteria.includes(include_opt)
all_criteria.sort_by(&:position)
elsif type == :rubric
rubric_criteria.includes(include_opt).order(:position)
elsif type == :flexible
flexible_criteria.includes(include_opt).order(:position)
elsif type == :checkbox
checkbox_criteria.includes(include_opt).order(:position)
end
end
def get_ta_visible_criteria(type, include_opt)
get_all_criteria(type, include_opt).select(&:ta_visible)
end
def get_peer_visible_criteria(type, include_opt)
get_all_criteria(type, include_opt).select(&:peer_visible)
end
def criteria_count
get_criteria.size
end
# Returns an array with the number of groupings who scored between
# certain percentage ranges [0-5%, 6-10%, ...]
# intervals defaults to 20
def grade_distribution_as_percentage(intervals=20)
distribution = Array.new(intervals, 0)
out_of = max_mark
if out_of == 0
return distribution
end
steps = 100 / intervals # number of percentage steps in each interval
groupings = self.groupings.includes([{current_submission_used: :results}])
groupings.each do |grouping|
submission = grouping.current_submission_used
if submission && submission.has_result?
result = submission.get_latest_completed_result
unless result.nil?
percentage = (result.total_mark / out_of * 100).ceil
if percentage == 0
distribution[0] += 1
elsif percentage >= 100
distribution[intervals - 1] += 1
elsif (percentage % steps) == 0
distribution[percentage / steps - 1] += 1
else
distribution[percentage / steps] += 1
end
end
end
end # end of groupings loop
distribution
end
# Returns all the TAs associated with the assignment
def tas
Ta.find(ta_memberships.map(&:user_id))
end
# Returns all the submissions that have been graded (completed)
def graded_submission_results
results = []
groupings.each do |grouping|
if grouping.marking_completed?
submission = grouping.current_submission_used
results.push(submission.get_latest_result) unless submission.nil?
end
end
results
end
def groups_submitted
groupings.select(&:has_submission?)
end
def get_num_assigned(ta_id = nil)
if ta_id.nil?
groupings.size
else
ta_memberships.where(user_id: ta_id).size
end
end
def get_num_marked(ta_id = nil)
if ta_id.nil?
groupings.count(marking_completed: true)
else
n = 0
ta_memberships.includes(grouping: [{current_submission_used: [:submitted_remark, :results]}]).where(user_id: ta_id).find_each do |x|
x.grouping.marking_completed? && n += 1
end
n
end
end
def get_num_annotations(ta_id = nil)
if ta_id.nil?
num_annotations_all
else
n = 0
ta_memberships.where(user_id: ta_id).find_each do |x|
x.grouping.marking_completed? &&
n += x.grouping.current_submission_used.annotations.size
end
n
end
end
def num_annotations_all
groupings = Grouping.arel_table
submissions = Submission.arel_table
subs = Submission.joins(:grouping)
.where(groupings[:assignment_id].eq(id)
.and(submissions[:submission_version_used].eq(true)))
res = Result.submitted_remarks_and_all_non_remarks
.where(submission_id: subs.pluck(:id))
filtered_subs = subs.where(id: res.pluck(:submission_id))
Annotation.joins(:submission_file)
.where(submission_files:
{ submission_id: filtered_subs.pluck(:id) }).size
end
def average_annotations(ta_id = nil)
num_marked = get_num_marked(ta_id)
avg = 0
if num_marked != 0
num_annotations = get_num_annotations(ta_id)
avg = num_annotations.to_f / num_marked
end
avg.round(2)
end
# Assign graders to a criterion for this assignment.
# Raise a CSVInvalidLineError if the criterion or a grader doesn't exist.
def add_graders_to_criterion(criterion_name, graders)
criterion = get_criteria.find{ |crit| crit.name == criterion_name }
if criterion.nil?
raise CSVInvalidLineError
end
unless graders.all? { |g| Ta.exists?(user_name: g) }
raise CSVInvalidLineError
end
criterion.add_tas_by_user_name_array(graders)
end
# Returns the groupings of this assignment associated with the given section
def section_groupings(section)
groupings.select do |grouping|
grouping.inviter.present? &&
grouping.inviter.has_section? &&
grouping.inviter.section.id == section.id
end
end
def has_a_collected_submission?
submissions.where(submission_version_used: true).count > 0
end
# Returns the groupings of this assignment that have no associated section
def sectionless_groupings
groupings.select do |grouping|
grouping.inviter.present? &&
!grouping.inviter.has_section?
end
end
# TODO: This is currently disabled until starter code is automatically added
# to groups.
def can_upload_starter_code?
#groups.size == 0
false
end
# Returns true if this is a peer review, meaning it has a parent assignment,
# false otherwise.
def is_peer_review?
not parent_assignment_id.nil?
end
# Returns true if this is a parent assignment that has a child peer review
# assignment.
def has_peer_review_assignment?
not pr_assignment.nil?
end
def create_peer_review_assignment_if_not_exist
if has_peer_review and Assignment.where(parent_assignment_id: id).empty?
peerreview_assignment = Assignment.new
peerreview_assignment.parent_assignment = self
peerreview_assignment.submission_rule = NoLateSubmissionRule.new
peerreview_assignment.assignment_stat = AssignmentStat.new
peerreview_assignment.token_period = 1
peerreview_assignment.unlimited_tokens = false
peerreview_assignment.short_identifier = short_identifier + '_pr'
peerreview_assignment.description = description
peerreview_assignment.repository_folder = repository_folder
peerreview_assignment.due_date = due_date
peerreview_assignment.is_hidden = true
# We do not want to have the database in an inconsistent state, so we
# need to have the database rollback the 'has_peer_review' column to
# be false
if not peerreview_assignment.save
raise ActiveRecord::Rollback
end
end
end
### REPO ###
def repository_name
"#{short_identifier}_starter_code"
end
def build_repository
# create repositories if and only if we are admin
return true unless MarkusConfigurator.markus_config_repository_admin?
# only create if we can add starter code
return true unless can_upload_starter_code?
begin
Repository.get_class(MarkusConfigurator.markus_config_repository_type)
.create(File.join(MarkusConfigurator.markus_config_repository_storage,
repository_name))
rescue Repository::RepositoryCollision => e
# log the collision
errors.add(:base, self.repo_name)
m_logger = MarkusLogger.instance
m_logger.log("Creating repository '#{repository_name}' caused repository collision. " +
"Error message: '#{e.message}'",
MarkusLogger::ERROR)
end
true
end
# Return a repository object, if possible
def repo
repo_loc = File.join(MarkusConfigurator.markus_config_repository_storage, repository_name)
if Repository.get_class(MarkusConfigurator.markus_config_repository_type).repository_exists?(repo_loc)
Repository.get_class(MarkusConfigurator.markus_config_repository_type).open(repo_loc)
else
raise 'Repository not found and MarkUs not in authoritative mode!' # repository not found, and we are not repo-admin
end
end
#Yields a repository object, if possible, and closes it after it is finished
def access_repo
yield repo
repo.close()
end
### /REPO ###
private
# Returns true if we are safe to set the repository name
# to a non-autogenerated value. Called by add_csv_group.
def is_candidate_for_setting_custom_repo_name?(row)
# Repository name can be customized if
# - this assignment is set up to allow external submits only
# - group_max = 1
# - there's only one student member in this row of the csv and
# - the group name is equal to the only group member
if MarkusConfigurator.markus_config_repository_admin? &&
self.allow_web_submits == false &&
row.length == 3 && self.group_max == 1 &&
!row[2].blank? && row[0] == row[2]
true
else
false
end
end
def reset_collection_time
submission_rule.reset_collection_time
end
def update_assigned_tokens
self.tokens.each do |t|
t.update_tokens(tokens_per_period_was, tokens_per_period)
end
end
def add_new_grouping_for_group(row, group)
# Create a new Grouping for this assignment and the newly
# crafted group
grouping = Grouping.new(assignment: self, group: group)
grouping.save
# Form groups
start_index_group_members = 2
(start_index_group_members..(row.length - 1)).each do |i|
student = Student.find_by user_name: row[i]
if student
if grouping.student_membership_number == 0
# Add first valid member as inviter to group.
grouping.group_id = group.id
grouping.save # grouping has to be saved, before we can add members
# We could call grouping.add_member, but it updates repo permissions
# For performance reasons in the csv upload we will just create the
# member here, and do the permissions update as a bulk operation.
member = StudentMembership.new(
user: student,
membership_status: StudentMembership::STATUSES[:inviter],
grouping: grouping)
member.save
else
member = StudentMembership.new(
user: student,
membership_status: StudentMembership::STATUSES[:accepted],
grouping: grouping)
member.save
end
end
end
end
#
# Return true if for each membership given, a corresponding student exists
# and if they are not part of a different grouping for the same assignment
#
def membership_unique?(row)
start_index_group_members = 2 # index where student names start in the row
(start_index_group_members..(row.length - 1)).each do |i|
student = Student.find_by user_name: row[i]
if student
unless student.accepted_grouping_for(id).nil?
errors.add(:groupings, student.user_name)
return false
end
else
errors.add(:student_memberships, row[i])
return false
end
end
true
end
# Return true if the given membership in the csv row is the exact same as the
# membership of the given existing_grouping.
def same_membership_as_csv_row?(row, existing_grouping)
start_index_group_members = 2 # index where student names start in the row
# check if all the members given in the csv file exists and belongs to the
# given grouping
(start_index_group_members..(row.length - 1)).each do |i|
student = Student.find_by user_name: row[i]
if student
grouping = student
.accepted_grouping_for(existing_grouping.assignment.id)
if grouping.nil?
# Student doesn't belong to a grouping for the given assignment
# ==> membership cannot be the same
return false
elsif grouping.id != existing_grouping.id
# Student belongs to a different grouping for the given assignment
# ==> membership is different
return false
end
else
# Student doesn't exist in the database
# # ==> membership cannot be the same
return false
end
num_students_in_csv_row = row.length - start_index_group_members
num_students_in_existing_grouping = grouping.accepted_students.length
if num_students_in_csv_row != num_students_in_existing_grouping
# All students given in the csv row belongs to the existing grouping
# but the existing group contains more students than the ones given in
# the csv row
# ==> membership is different
return false
else
# All students given in the csv row belongs to the existing grouping
# and the grouping contains the same number of students as the one
# in the csv row
# ==> membership is the exact same
return true
end
end
end
end
|
mrkosterix/lua-commons
|
lua-commons-impl/lua-commons-impl-lua-5.2/lua-commons-impl-lua-tests/src/test/java/org/lua/commons/impl/nativelua/TestCoroutines.java
|
<reponame>mrkosterix/lua-commons
package org.lua.commons.impl.nativelua;
import org.lua.commons.nativeapi.LuaStateApi;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class TestCoroutines {
private LuaStateApi state;
private long peer;
@BeforeMethod
public void initLua() {
state = new NativeLuaStateApiFactory().getLuaStateApi();
peer = state._Lnewstate();
state._Lopenlibs(peer);
}
@AfterMethod
public void closeLua() {
state._close(peer);
}
@Test
public void testYield() {
state._pushjclosure(peer, this, "yieldfunc", 0);
state._setglobal(peer, "yieldfunc");
state._Ldostring(peer, "function foo(a, b)\nres = yieldfunc(a, b)\nreturn res .. \"!!\"\nend");
state._getglobal(peer, "foo");
state._pushstring(peer, "Hello, ");
state._pushstring(peer, "world!");
Assert.assertEquals(state._resume(peer, peer, 2), LuaStateApi.LUA_YIELD);
Assert.assertEquals(state._gettop(peer), 1);
Assert.assertEquals(state._resume(peer, peer, 1), LuaStateApi.LUA_OK);
Assert.assertEquals(state._gettop(peer), 1);
Assert.assertTrue(state._isstring(peer, -1));
Assert.assertEquals(state._tostring(peer, -1), "Hello, world!!!");
}
public int yieldfunc(long peer) {
LuaStateApi state = new NativeLuaStateApi();
state._concat(peer, 2);
state._yield(peer, 1);
return 0;
}
@Test
public void testYieldk() {
state._pushjclosure(peer, this, "yieldkfunc", 0);
state._setglobal(peer, "yieldkfunc");
state._Ldostring(peer, "function foo(a, b)\nres = yieldkfunc(a, b)\nreturn res\nend");
state._getglobal(peer, "foo");
state._pushstring(peer, "Hello, ");
state._pushstring(peer, "world!");
Assert.assertEquals(state._resume(peer, peer, 2), LuaStateApi.LUA_YIELD);
Assert.assertEquals(state._gettop(peer), 1);
Assert.assertEquals(state._resume(peer, peer, 1), LuaStateApi.LUA_OK);
Assert.assertEquals(state._gettop(peer), 1);
Assert.assertTrue(state._isstring(peer, -1));
Assert.assertEquals(state._tostring(peer, -1), "Hello, world!!!");
}
public int yieldkfunc(long peer) {
LuaStateApi state = new NativeLuaStateApi();
state._concat(peer, 2);
state._yieldk(peer, 1, this, "yieldkcontinuation");
return 0;
}
public int yieldkcontinuation(long peer) {
LuaStateApi state = new NativeLuaStateApi();
state._pushstring(peer, "!!");
state._concat(peer, 2);
return 1;
}
}
|
coderd-repos/eva
|
eva-front/src/api/system/department.js
|
<filename>eva-front/src/api/system/department.js
import request from '@/utils/request'
// 查询
export function fetchTree () {
return request.post('/system/department/tree')
}
// 查询部门用户
export function fetchUserList (data) {
return request.post('/system/department/users', data)
}
// 新建
export function create (data) {
return request.post('/system/department/create', data)
}
// 修改
export function updateById (data) {
return request.post('/system/department/updateById', data)
}
// 删除
export function deleteById (id) {
return request.get(`/system/department/delete/${id}`)
}
// 批量删除
export function deleteByIdInBatch (ids) {
return request.get('/system/department/delete/batch', {
params: {
ids
}
})
}
|
julienchastang/thredds
|
opendap/src/main/java/opendap/util/SortedTable.java
|
<filename>opendap/src/main/java/opendap/util/SortedTable.java
/////////////////////////////////////////////////////////////////////////////
// This file is part of the "Java-DAP" project, a Java implementation
// of the OPeNDAP Data Access Protocol.
//
// Copyright (c) 2010, OPeNDAP, Inc.
// Copyright (c) 2002,2003 OPeNDAP, Inc.
//
// Author: <NAME> <<EMAIL>>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms,
// with or without modification, are permitted provided
// that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// - Neither the name of the OPeNDAP nor the names of its contributors may
// be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/////////////////////////////////////////////////////////////////////////////
package opendap.util;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.Vector;
/**
* SortedTable looks exactly like Hashtable but preserves the insertion order
* of elements. While this results in slower performance, it ensures that
* the DAS will always be printed in the same order in which it was read.
*/
public final class SortedTable extends Dictionary implements java.io.Serializable {
static final long serialVersionUID = 1;
private Vector keys, elements;
public SortedTable() {
keys = new Vector();
elements = new Vector();
}
/**
* Returns the number of keys in this table.
*/
public int size() {
return keys.size();
}
/**
* Tests if this table is empty.
*/
public boolean isEmpty() {
return keys.isEmpty();
}
/**
* Returns an enumeration of the keys in this table.
*/
public Enumeration keys() {
return keys.elements();
}
/**
* Returns an enumeration of the values in this table.
*/
public Enumeration elements() {
return elements.elements();
}
/**
* Returns the value to which the key is mapped in this table.
*
* @param key a key in this table.
* @return the value to which the key is mapped, or null if the key is not
* mapped to any value in the table.
*/
public synchronized Object get(Object key) {
int index = keys.indexOf(key);
if (index != -1)
return elements.elementAt(index);
else
return null;
}
/**
* Returns the key at the specified index.
*
* @param index the index to return
* @return the key at the specified index.
*/
public synchronized Object getKey(int index) {
return keys.elementAt(index);
}
/**
* Returns the element at the specified index.
*
* @param index the index to return
* @return the element at the specified index.
*/
public synchronized Object elementAt(int index) {
return elements.elementAt(index);
}
/**
* Maps the specified key to the specified value in this table.
*
* @param key the key
* @param value the value
* @return the previous value to which the key is mapped, or null if the
* key did not have a previous mapping.
* @throws NullPointerException if the key or value is null.
*/
public synchronized Object put(Object key, Object value) throws NullPointerException {
if (key == null || value == null)
throw new NullPointerException();
int index = keys.indexOf(key);
if (index != -1) {
Object prev = elements.elementAt(index);
elements.setElementAt(value, index);
return prev;
} else {
keys.addElement(key);
elements.addElement(value);
return null;
}
}
/**
* Removes the key (and its corresponding value) from this table. If the
* key is not in the table, do nothing.
*
* @param key the key to remove.
* @return the value to which the key had been mapped, or null if the key
* did not have a mapping.
*/
public synchronized Object remove(Object key) {
int index = keys.indexOf(key);
if (index != -1) {
Object prev = elements.elementAt(index);
keys.removeElementAt(index);
elements.removeElementAt(index);
return prev;
} else {
return null;
}
}
/** Returns a Vector containing the elements in the SortedTable. This is
used for more efficient implementation of opendap.dap.Util.uniqueNames() by
opendap.dap.DDS.checkSemantics()
@return A Vector containing the elements in this SortedTable.
*/
public Vector getElementVector() {
return elements;
}
}
|
bobheadlabs/sourcegraph
|
lib/batches/execution/cache/cache_test.go
|
package cache
import (
"fmt"
"os"
"testing"
"gopkg.in/yaml.v2"
"github.com/sourcegraph/sourcegraph/lib/batches"
"github.com/sourcegraph/sourcegraph/lib/batches/template"
)
const testExecutionCacheKeyEnv = "TEST_EXECUTION_CACHE_KEY_ENV"
func TestExecutionKey_RegressionTest(t *testing.T) {
// This test is a regression that should fail when we change something that
// influences the cache key generation, which would lead to busted caches.
//
// If this test fails and you're sure about the change, update the `want`
// value below.
var steps []batches.Step
if err := yaml.Unmarshal([]byte(`
- run: if [[ -f "package.json" ]]; then cat package.json | jq -j .name; fi
container: jiapantw/jq-alpine:latest
outputs:
projectName:
value: ${{ step.stdout }}
- run: echo "This only runs in automation-testing" >> message.txt
container: alpine:3
if: ${{ eq repository.name "github.com/sourcegraph/automation-testing" }}
- run: bar
container: alpine:3
env:
- FILE_TO_CHECK: .tool-versions
- `+testExecutionCacheKeyEnv+`
`), &steps); err != nil {
t.Fatal(err)
}
key := ExecutionKey{
Repository: batches.Repository{
ID: "graphql-id",
Name: "github.com/sourcegraph/src-cli",
BaseRef: "refs/heads/f00b4r",
BaseRev: "c0mmit",
FileMatches: []string{"aa.go"},
},
Path: "path/to/workspace",
OnlyFetchWorkspace: true,
Steps: steps,
BatchChangeAttributes: &template.BatchChangeAttributes{
Name: "Batch Change Name",
Description: "Batch Change Description",
},
}
have, err := key.Key()
if err != nil {
t.Errorf("unexpected error: %v", err)
}
want := "fsDKj1Uf1jNMhRXCJXE6nQ"
if have != want {
t.Fatalf("regression detected! cache key changed. have=%q, want=%q", have, want)
}
}
func TestExecutionKeyWithEnvResolution(t *testing.T) {
// Let's set up an array of steps that we can test with. One step will
// depend on an environment variable outside the spec.
var steps []batches.Step
if err := yaml.Unmarshal([]byte(`
- run: foo
env:
FOO: BAR
- run: bar
env:
- FOO: BAR
- `+testExecutionCacheKeyEnv+`
`), &steps); err != nil {
t.Fatal(err)
}
// And now we can set up a key to work with.
key := ExecutionKeyWithGlobalEnv{
ExecutionKey: &ExecutionKey{
Repository: batches.Repository{
ID: "graphql-id",
Name: "github.com/sourcegraph/src-cli",
BaseRef: "refs/heads/f00b4r",
BaseRev: "c0mmit",
FileMatches: []string{"aa.go"},
},
Steps: steps,
},
GlobalEnv: os.Environ(),
}
// All righty. Let's get ourselves a baseline cache key here.
initial, err := key.Key()
if err != nil {
t.Errorf("unexpected error: %v", err)
}
// Let's set an unrelated environment variable and ensure we still have the
// same key.
key.GlobalEnv = append(key.GlobalEnv, fmt.Sprintf("%s=%s", testExecutionCacheKeyEnv+"_UNRELATED", "foo"))
have, err := key.Key()
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if initial != have {
t.Errorf("unexpected change in key: initial=%q have=%q", initial, have)
}
// Let's now set the environment variable referenced in the steps and verify
// that the cache key does change.
key.GlobalEnv = append(key.GlobalEnv, fmt.Sprintf("%s=%s", testExecutionCacheKeyEnv, "foo"))
have, err = key.Key()
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if initial == have {
t.Errorf("unexpected lack of change in key: %q", have)
}
// And, just to be sure, let's change it again.
key.GlobalEnv[len(key.GlobalEnv)-1] = fmt.Sprintf("%s=%s", testExecutionCacheKeyEnv, "bar")
again, err := key.Key()
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if initial == again || have == again {
t.Errorf("unexpected lack of change in key: %q", again)
}
// Finally, if we unset the environment variable again, we should get a key
// that matches the initial key.
key.GlobalEnv = key.GlobalEnv[:len(key.GlobalEnv)-1]
have, err = key.Key()
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if initial != have {
t.Errorf("unexpected change in key: initial=%q have=%q", initial, have)
}
}
|
superzhc/SuperzHadoop
|
superz-hadoop/superz-hadoop-spark/src/main/java/com/github/superzhc/hadoop/spark/DStreamDemo.java
|
package com.github.superzhc.hadoop.spark;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.bouncycastle.util.Strings;
import scala.Tuple2;
import java.util.Arrays;
import java.util.Iterator;
/**
* 2020年07月21日 superz add
*/
public class DStreamDemo
{
public static void main(String[] args) throws InterruptedException {
SparkConf conf = new SparkConf();
conf.setAppName("superz").setMaster("local[2]");
JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(3));
JavaDStream<String> jds = jssc.socketTextStream("localhost", 8090);
jds.foreachRDD(new VoidFunction<JavaRDD<String>>() {
@Override
public void call(JavaRDD<String> stringJavaRDD) throws Exception {
System.out.println("遍历RDD,start...");
stringJavaRDD.foreach(d-> System.out.println(d));
System.out.println("遍历RDD,stop!");
}
});
// JavaDStream<String> words = jds.flatMap(new FlatMapFunction<String, String>()
// {
// @Override
// public Iterator<String> call(String s) throws Exception {
// String[] ss = s.split(" ");
// return Arrays.asList(ss).iterator();
// }
// });
// JavaPairDStream<String, Integer> pairs = words.mapToPair(d -> new Tuple2(d, 1));
// JavaPairDStream<String, Integer> wordCounts = pairs.reduceByKey((i1, i2) -> i1 + i2);
// wordCounts.print();
jssc.start();
jssc.awaitTermination();
}
}
|
roti/lut
|
library/src/test/scala/roti/lut/Record3.scala
|
<filename>library/src/test/scala/roti/lut/Record3.scala
package roti.lut
import roti.lut.annotation.record
/**
* Test for when the companion object already exists.
*/
@record
trait Record3 extends Record {
def id: Long
def name: String
}
object Record3 {
def apply(s: String) = s
}
|
scalecube/reactor-aeron
|
reactor-aeron-benchmarks/src/main/java/reactor/aeron/ClientServerSends.java
|
<filename>reactor-aeron-benchmarks/src/main/java/reactor/aeron/ClientServerSends.java
package reactor.aeron;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.nio.charset.Charset;
import org.agrona.DirectBuffer;
import reactor.core.CoreSubscriber;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxOperator;
public class ClientServerSends {
/**
* Main runner.
*
* @param args program arguments.
*/
public static void main(String[] args) {
AeronResources resources = new AeronResources().useTmpDir().start().block();
AeronClient.create(resources)
.options("localhost", 13000, 13001)
.handle(
connection ->
connection
.inbound()
.receive()
.as(ByteBufFlux::create)
.asString()
.log("receive")
.then(connection.onDispose()))
.connect()
.block()
.onDispose(resources)
.onDispose()
.block();
}
static class ByteBufFlux extends FluxOperator<ByteBuf, ByteBuf> {
public ByteBufFlux(Flux<? extends ByteBuf> source) {
super(source);
}
public static ByteBufFlux create(Flux<DirectBuffer> directBufferFlux) {
return new ByteBufFlux(
directBufferFlux.map(
buffer -> {
byte[] bytes = new byte[buffer.capacity()];
buffer.getBytes(0, bytes);
return Unpooled.copiedBuffer(bytes);
}));
}
@Override
public void subscribe(CoreSubscriber<? super ByteBuf> actual) {
source.subscribe(actual);
}
public Flux<String> asString() {
return map(buffer -> buffer.toString(Charset.defaultCharset()));
}
}
}
|
serhiislobodian/peach-wallet-mobile
|
App/Containers/NfcPaymentScreen.js
|
import React, { Component } from 'react';
import { Image } from 'react-native';
import { SafeAreaView } from 'react-navigation';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import NfcActions from '../Redux/NfcRedux';
import Types from '../Config/Types';
import BackAwareComponent from '../Components/BackAwareComponent';
import Text from '../Components/Text';
// Styles
import styles from './Styles/NfcPaymentScreenStyle';
import { Images } from '../Themes';
class NfcPaymentScreen extends Component {
static navigationOptions = ({ navigation }) => ({
...BackAwareComponent.navigationOptions({ navigation }),
});
static propTypes = {
navigation: Types.NAVIGATION_PROPS().isRequired,
cancelNfc: PropTypes.func.isRequired,
};
goBack = () => {
this.props.cancelNfc();
this.props.navigation.goBack();
};
render() {
return (
<BackAwareComponent showCrossIcon goBack={this.goBack}>
<SafeAreaView style={styles.contentContainer}>
<Image style={styles.image} source={Images.nfcLarge} />
<Text style={styles.textTitle}>HOLD NEAR READER</Text>
</SafeAreaView>
</BackAwareComponent>
);
}
}
const mapStateToProps = () => ({});
const mapDispatchToProps = dispatch => ({
cancelNfc: () => dispatch(NfcActions.nfcCancelRequest()),
});
export default connect(
mapStateToProps,
mapDispatchToProps,
)(NfcPaymentScreen);
|
open-garden/garden-opendriveconverter
|
src/main/java/com/zipc/garden/webplatform/opendrive/converter/object/ObjectRoadMarks.java
|
package com.zipc.garden.webplatform.opendrive.converter.object;
import java.util.ArrayList;
public class ObjectRoadMarks {
private String name;
private String type;
private String mode;
private ObjectRoadMarksParameters parameters;
private ArrayList<ObjectRoadMarksSegments> segments;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public ObjectRoadMarksParameters getParameters() {
return parameters;
}
public void setParameters(ObjectRoadMarksParameters parameters) {
this.parameters = parameters;
}
public ArrayList<ObjectRoadMarksSegments> getSegments() {
return segments;
}
public void setSegments(ArrayList<ObjectRoadMarksSegments> segments) {
this.segments = segments;
}
}
|
IanAWatson/LillyMol-4.0-Bazel
|
src/Molecule_Lib/mdl_molecule.cc
|
<reponame>IanAWatson/LillyMol-4.0-Bazel
#include <stdlib.h>
#include <memory>
#include "Foundational/iwmisc/misc.h"
#define COMPILING_MDL_CC
#include "mdl.h"
#include "substructure.h"
#include "molecule_to_query.h"
#define COMPILING_MDL_CC
#include "mdl_molecule.h"
#include "mdl_atom_record.h"
#include "molecule_to_query.h"
static int convert_a_and_q_atoms_to_atom_lists = 1;
void
set_convert_a_and_q_atoms_to_atom_lists(int s)
{
convert_a_and_q_atoms_to_atom_lists = s;
}
static int convert_not_atom_lists_to_organic_lists = 0;
void
set_convert_not_atom_lists_to_organic_lists(int s)
{
convert_not_atom_lists_to_organic_lists = s;
}
static int mdl_molecule_discard_chirality = 0;
void
set_mdl_molecule_discard_chirality(const int s)
{
mdl_molecule_discard_chirality = s;
}
MDL_Molecule::MDL_Molecule()
{
#ifdef USE_IWMALLOC
cerr << "Checking MDL_Molecule\n";
iwmalloc_check_all_malloced(stderr);
#endif
return;
}
MDL_Molecule::~MDL_Molecule()
{
return;
}
int
MDL_Molecule::_parse_atom_alias(iwstring_data_source & input,
const const_IWSubstring & buffer)
{
assert (buffer.starts_with("A "));
const_IWSubstring tmp(buffer);
tmp.remove_leading_chars(3);
tmp.strip_leading_blanks();
atom_number_t zatom;
if (! tmp.numeric_value(zatom) || zatom < 1)
{
cerr << "MDL_Molecule::_parse_atom_alias: invalid atom number specification '" << buffer << "'\n";
return 0;
}
zatom--;
if (! input.next_record(tmp))
{
cerr << "MDL_Molecule::_parse_atom_alias:eof\n";
return 0;
}
_mdl_atom[zatom]->set_alias(tmp);
return 1;
}
int
MDL_Molecule::_parse_M_record(iwstring_data_source & input,
const const_IWSubstring & buffer,
::resizable_array_p<ISIS_Link_Atom> & ltmp,
int & fatal)
{
if (buffer.starts_with("A "))
{
if (! _parse_atom_alias(input, buffer))
{
fatal = 1;
return 0;
}
return 1;
}
if (buffer.starts_with("M ALS"))
{
if (! _parse_atom_list(buffer))
{
fatal = 1;
return 0;
}
return 1;
}
if (buffer.starts_with("M LIN"))
{
if (! _parse_link_record(buffer, ltmp))
{
fatal = 1;
return 0;
}
return 1;;
}
if (buffer.starts_with("M CHG"))
{
Aprop atom_properties[MAX_PAIRS];
int tokens;
if (! fill_atom_property_array(buffer, tokens, atom_properties))
return 0;
if (0 == tokens)
;
else if (! mdl_add_m_formal_charge(tokens, atom_properties))
{
fatal = 1;
return 0;
}
return 1;
}
if (buffer.starts_with("M ISO"))
{
Aprop atom_properties[MAX_PAIRS];
int tokens;
if (! fill_atom_property_array(buffer, tokens, atom_properties))
return 0;
if (0 == tokens)
;
else if (! mdl_add_m_isotope(tokens, atom_properties))
{
fatal = 1;
return 0;
}
return 1;
}
if (buffer.starts_with("M UNS"))
{
Aprop atom_properties[MAX_PAIRS];
int tokens;
if (! fill_atom_property_array(buffer, tokens, atom_properties))
return 0;
if (! _set_unsaturation_specifications(atom_properties, tokens))
{
fatal = 1;
return 0;
}
return 1;
}
if (buffer.starts_with("M SUB"))
{
Aprop atom_properties[MAX_PAIRS];
int tokens;
if (! fill_atom_property_array(buffer, tokens, atom_properties))
return 0;
if (! _set_substitution_specifications(atom_properties, tokens))
{
fatal = 1;
return 0;
}
return 1;
}
if (buffer.starts_with("M RBC"))
{
Aprop atom_properties[MAX_PAIRS];
int tokens;
if (! fill_atom_property_array(buffer, tokens, atom_properties))
return 0;
if (! _set_ring_bond_specifications(atom_properties, tokens))
{
fatal = 1;
return 0;
}
return 1;
}
fatal = 0;
return 0; // not recognised
}
int
MDL_Molecule::_parse_atom_list(const IWString & buffer)
{
assert (buffer.starts_with("M ALS"));
const_IWSubstring token;
if (! buffer.word(2, token))
{
cerr << "MDL_Molecule::_parse_atom_list:invalid record '" << buffer << "'\n";
return 0;
}
atom_number_t zatom;
if (! token.numeric_value(zatom) || zatom < 1)
{
cerr << "MDL_Molecule::_parse_atom_list:invalid atom number '" << buffer << "'\n";
return 0;
}
zatom--;
if (! _mdl_atom.ok_index(zatom))
{
cerr << "MDL_Molecule::_parse_atom_list:invalid atom number in '" << buffer << "', natoms = " << _mdl_atom.number_elements() << endl;
return 0;
}
return _mdl_atom[zatom]->build_atom_list(buffer);
}
int
MDL_Molecule::_parse_link_record(const IWString & buffer,
::resizable_array_p<ISIS_Link_Atom> & ltmp)
{
assert (buffer.starts_with("M LIN"));
Link_Atom * l = new Link_Atom;
atom_number_t a;
if (! l->initialise_from_mdl_record(buffer, Molecule::natoms(), a))
{
cerr << "MDL_Molecule::_parse_link_record:invalid link record '" << buffer << "'\n";
delete l;
return 0;
}
if (2 != Molecule::ncon(a))
{
cerr << "MDL_Molecule::_parse_link_record:link atoms must have 2 connections\n";
return 0;
}
// The bond types either side of the link atom must be the same
const MDL_Bond_Data * b1 = mdl_bond_between_atoms(a, l->a1());
const MDL_Bond_Data * b2 = mdl_bond_between_atoms(a, l->a2());
if (b1->btype() != b2->btype())
{
cerr << "MDL_Molecule::_parse_link_record:inconsitent bond types " << b1->btype() << " vs " << b2->btype() << '\n';
return 0;
}
if (b1->bond_topology() != b2->bond_topology())
{
cerr << "MDL_Molecule::_parse_link_record:inconsitent bond topologies " << b1->bond_topology() << " vs " << b2->bond_topology() << '\n';
return 0;
}
//cerr << "MDL_Molecule::_parse_link_record:set link atom bond type " << b1->btype() << ", bond topology " << b1->bond_topology() << endl;
l->set_bond_type(b1->btype());
l->set_bond_topology(b1->bond_topology());
_link_atom.add(l);
_mdl_atom[l->a1()]->increment_connections_lost();
_mdl_atom[l->a2()]->increment_connections_lost();
const IWString & s = atomi(a)->atomic_symbol();
if (! l->set_symbol(s))
{
cerr << "MDL_Molecule::_parse_link_record:invalid element??? '" << s << "'\n";
return 0;
}
l->set_mdl_atom_data(_mdl_atom[a]);
remove_atom(a); // the explicit link atom is no longer needed
return 1;
}
/*
Removing an atom is hard because of the arrays we must keep in sync
*/
int
MDL_Molecule::remove_atom(atom_number_t zatom)
{
Set_of_Atoms check_ncon;
atomic_number_t zremove = atomic_number(zatom);
for (int i = Molecule::nedges() - 1; i >= 0; i--)
{
const Bond * b = Molecule::bondi(i);
if (! b->involves(zatom))
continue;
check_ncon.add(b->other(zatom));
Molecule::remove_bond(i);
_mdl_bond.remove_item(i);
}
for (int i = 0; i < _link_atom.number_elements(); i++)
{
_link_atom[i]->atom_is_being_removed(zatom);
}
for (int i = 0; i < check_ncon.number_elements(); i++)
{
atom_number_t j = check_ncon[i];
_mdl_atom[j]->connected_atom_is_being_removed(zremove);
}
_mdl_atom.remove_item(zatom);
#ifdef DEBUG_MDL_MOLECULE_REMOVE_ATOM
cerr << "MDL_Molecule::remove_atom: removing atom " << zatom << " " << smarts_equivalent_for_atom(zatom) << endl;
#endif
Molecule::remove_atom(zatom);
Molecule::invalidate_fragment_membership();
#ifdef DEBUG_MDL_MOLECULE_REMOVE_ATOM
cerr << "After removing atom " << zatom << " " << smiles() << " natoms = " << natoms() << endl;
cerr << this << endl;
#endif
return 1;
}
int
MDL_Molecule::remove_atoms(const int * to_remove)
{
int rc = 0;
for (int i = Molecule::natoms() - 1; i >= 0; i--)
{
if (to_remove[i])
{
MDL_Molecule::remove_atom(i);
rc++;
}
}
return 1;
}
/*
If we are reading an MDL type file, we can process it here. Otherwise
go to the default processing
*/
int
MDL_Molecule::read_molecule_ds(iwstring_data_source & input,
FileType input_type)
{
//cerr << "MDL_Molecule::read_molecule_ds:type " << input_type << endl;
if (FILE_TYPE_SDF == input_type)
;
else if (FILE_TYPE_MDL == input_type)
;
else
{
if (! Molecule::read_molecule_ds(input, input_type))
return 0;
return 1;
}
return MDL_Molecule::read_molecule_mdl_ds(input);
}
/*
Sept 2014. Beware, automatically creating the A element means that if
there is a smarts like [AH] as an atom alias, it will break, because
the Aliphatic atom smarts token will get interpreted as element A.
We should think of a different way of handling these elements when
encountered...
*/
static const Element * element_a = NULL;
static const Element * element_q = NULL;
static const Element * element_l = NULL;
void
do_create_special_elements_for_mdl_stuff()
{
int isave = auto_create_new_elements();
if (0 == isave)
set_auto_create_new_elements(1);
element_a = get_element_from_symbol_no_case_conversion("A");
if (NULL == element_a)
element_a = create_element_with_symbol("A");
element_q = get_element_from_symbol_no_case_conversion("Q");
if (NULL == element_q)
element_q = create_element_with_symbol("Q");
element_l = get_element_from_symbol_no_case_conversion("L");
if (NULL == element_l)
element_l = create_element_with_symbol("L");
set_auto_create_new_elements(isave);
return;
}
/*
Very dismaying that there isn't code sharing between mdl.cc and
rxnfile.cc, but just too hard to figure out how to do it in a robust
and maintainable form!
*/
int
MDL_Molecule::read_molecule_mdl_ds (iwstring_data_source & input,
int return_on_m_end)
{
if (NULL == element_a)
do_create_special_elements_for_mdl_stuff();
const_IWSubstring buffer;
if (! input.next_record(buffer))
{
return 0;
}
set_name(buffer);
//cerr << "Name is '" << buffer << "'\n";
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::read_molecule_ds:eof reading header\n";
return 0;
}
//cerr << "2nd record '" << buffer << "'\n";
if (! input.next_record(_third_line_of_input_sdf_file))
{
cerr << "MDL_Molecule::read_molecule_ds:eof reading header\n";
return 0;
}
//cerr << "Third line " << _third_line_of_input_sdf_file << "'\n";
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::read_molecule_ds:eof reading header\n";
return 0;
}
//cerr << "NA NB '" << buffer << "'\n";
int na, nb;
if (2 != int3d(buffer, na, nb))
{
cerr << "MDL_Molecule::read_molecule_ds: error from int3d '" << buffer << "'\n";
cerr << "Line " << input.lines_read() << endl;
return 0;
}
if (0 == na && 0 == nb && buffer.contains("V3000"))
return _read_v3000(input);
assert (na >= 0 && (nb >= 0));
if (! allocate_arrays(na, nb))
return 0;
MDL_File_Supporting_Material * mdlfos = global_default_MDL_File_Supporting_Material();
MDL_Atom_Record mdlar;
for (int i = 0; i < na; i++)
{
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::read_molecule_ds:premature eof\n";
return 0;
}
if (! mdlar.build(buffer))
{
cerr << "MDL_Molecule::read_molecule_ds:invalid record, line " << input.lines_read() << endl;
cerr << buffer << endl;
return 0;
}
const_IWSubstring s = mdlar.atomic_symbol();
Atom * a = mdlar.create_atom();
if (NULL == a)
{
cerr << "MDL_Molecule::read_molecule_ds:cannot create atom, line " << input.lines_read() << endl;
return 0;
}
a->set_atom_map(mdlar.atom_map());
Molecule::add(a);
_mdl_atom[i]->extract_info_from_mdl_file_record(mdlar);
// cerr << " atom " << i << " chirality " << mdlar.astere() << endl;
if (0 == mdlar.astere()) // the most common case, no chirality
;
else if (mdl_molecule_discard_chirality)
;
else if (! _mdl_atom_is_chiral_centre(Molecule::natoms() - 1, mdlar.astere(), *mdlfos))
{
cerr << "MDL_Molecule::read_molecule_ds:invalid chirality on line " << input.lines_read() << endl;
cerr << buffer << endl;
return 0;
}
}
int wedge_bonds_present = 0;
MDL_Bond_Record mdlbr;
for (int i = 0; i < nb; i++)
{
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::read_molecule_ds:premature eof\n";
return 0;
}
if (! mdlbr.build(buffer, na, *mdlfos))
{
cerr << "MDL_Molecule::read_molecule_ds:invalid bond record, line " << input.lines_read() << endl;
cerr << buffer << endl;
return 0;
}
bond_type_t bt_for_molecule;
if (! mdlbr.bond_type_for_molecule(bt_for_molecule))
{
cerr << "MDL_Molecule::read_molecule_ds:invalid bond type\n";
return 0;
}
if (! _mdl_bond[i]->extract_info_from_mdl_file_record(mdlbr))
return 0;
Molecule::add_bond(mdlbr.a1(), mdlbr.a2(), bt_for_molecule, 1);
if (mdlbr.bond_stereo())
{
_mdl_set_bond_directionality(mdlbr.a1(), mdlbr.a2(), mdlbr.bond_stereo());
wedge_bonds_present++;
}
}
if (nb > 0)
check_bonding();
if (ignore_all_chiral_information_on_input())
Molecule::remove_all_chiral_centres();
else if (mdl_molecule_discard_chirality)
;
else if (wedge_bonds_present)
discern_chirality_from_wedge_bonds();
if (0 == chiral_centres()) // none to worry about
;
else if (_complete_chiral_centres_from_mdl_files(*mdlfos)) // good
;
else // OOPS, bad chirality info
{
cerr << "MDL_Molecule::read_molecule_mdl_ds: erroneous chiral input '" << Molecule::name() << "'\n";
Molecule::remove_all_chiral_centres();
if (! ignore_incorrect_chiral_input())
return 0;
}
//int got_mend = 0;
//int got_dollars = 0;
::resizable_array_p<ISIS_Link_Atom> ltmp;
while (input.next_record(buffer))
{
if ("$$$$" == buffer)
{
// got_dollars = 1;
break;
}
if ("M END" == buffer)
{
// got_mend = 1;
if (return_on_m_end)
break;
else
continue;
}
int fatal;
if (Molecule::_common_parse_M_record(buffer, fatal))
continue;
else if (fatal)
return 0;
if (_parse_M_record(input, buffer, ltmp, fatal))
continue;
else if (fatal)
{
cerr << "MDL_Molecule::read_molecule_ds:fatal error, line " << input.lines_read() << endl;
cerr << "'" << buffer << "'\n";
return 0;
}
if (read_extra_text_info())
Molecule::add_extra_text_info(buffer);
}
na = natoms(); // link atoms remove the atom, so NA may have changed. Recompute
for (int i = 0; i < na; i++)
{
MDL_Atom_Data * mdla = _mdl_atom[i];
if (Molecule::is_aromatic(i))
mdla->set_aromatic(1);
else
mdla->set_aromatic(0);
}
if (convert_a_and_q_atoms_to_atom_lists)
{
for (int i = 0; i < na; i++)
{
MDL_Atom_Data * mdla = _mdl_atom[i];
const IWString & s = atomic_symbol(i);
mdla->convert_a_or_q_atoms_to_atom_list(s);
const IWString & a = mdla->alias();
if (a.length())
mdla->convert_a_or_q_atoms_to_atom_list(a);
}
}
if (convert_not_atom_lists_to_organic_lists)
{
for (int i = 0; i < na; i++)
{
_mdl_atom[i]->convert_not_atom_lists_to_organic_lists();
}
}
#ifdef ECHO_ATOM_LIST_INFO
cerr << "After reading\n";
for (int i = 0; i < na; i++)
{
cerr << "Atom " << i << " atom list contains " << _mdl_atom[i]->atom_list().number_elements() << " elements\n";
}
#endif
// For now, I'm ignoring the trailing records
//reset_mdl_molecule_file_scope_variables();
return na;
}
const ISIS_Atom_List *
MDL_Molecule::atom_list_for_atom (atom_number_t a) const
{
assert (ok_atom_number(a));
const ISIS_Atom_List & rc = _mdl_atom[a]->atom_list();
if (! rc.active())
return NULL;
return &rc;
}
int
MDL_Molecule::_common_set_from_aprop (const Aprop * atom_properties,
int ntokens,
int * dest)
{
for (int i = 0; i < ntokens; i++)
{
const Aprop & a = atom_properties[i];
dest[a._atom_number - 1] = a._property;
}
return 1;
}
int
MDL_Molecule::_set_unsaturation_specifications (const Aprop * atom_properties,
int tokens)
{
for (int i = 0; i < tokens; i++)
{
const Aprop & a = atom_properties[i];
atom_number_t n = a._atom_number - 1;
_mdl_atom[n]->set_unsaturation(a._property);
}
return 1;
}
int
MDL_Molecule::_set_substitution_specifications (const Aprop * atom_properties,
int tokens)
{
for (int i = 0; i < tokens; i++)
{
const Aprop & a = atom_properties[i];
atom_number_t n = a._atom_number - 1;
_mdl_atom[n]->set_substitution(a._property);
}
return 1;
}
int
MDL_Molecule::_set_ring_bond_specifications (const Aprop * atom_properties,
int tokens)
{
for (int i = 0; i < tokens; i++)
{
const Aprop & a = atom_properties[i];
atom_number_t n = a._atom_number - 1;
_mdl_atom[n]->set_ring_bond(a._property);
}
return 1;
}
/*int
MDL_Molecule::initialise_mqs (Molecule_to_Query_Specifications & mqs) const
{
if (NULL == _hcount) // we've never been initialised
return 1;
int matoms = natoms();
mqs.set_hcount(_hcount, matoms);
mqs.set_h0designator(_h0designator, matoms);
mqs.set_unsaturation(_unsaturated, matoms);
mqs.set_substitution(_substitution, matoms);
mqs.set_ring_bonds(_ring_bond, matoms);
if (_atom_alias.number_elements())
mqs.set_atom_alias(_atom_alias);
if (_link_atom.number_elements())
mqs.set_link_atoms(_link_atom);
if (NULL != _bond_topology)
mqs.set_bond_topology (_bond_topology, nedges());
return 1;
}*/
/*
Removing atoms is very complex because we need to keep track of all the info in the arrays
*/
/*int
MDL_Molecule::remove_atoms (const int * to_remove)
{
int matoms = natoms();
int ne = nedges();
int * bond_being_lost = new_int(ne); std::unique_ptr<int[]> free_bond_being_lost(bond_being_lost);
for (int i = 0; i < ne; i++)
{
const Bond * b = bondi(i);
if (to_remove[b->a1()] || to_remove[b->a2()])
bond_being_lost[i] = 1;
}
int * atom_cross_reference = new_int(matoms); std::unique_ptr<int[]> free_atom_cross_reference(atom_cross_reference);
Molecule::remove_atoms(to_remove);
int ato = 0;
for (int i = 0; i < matoms; i++)
{
if (to_remove[i])
{
atom_cross_reference[i] = INVALID_ATOM_NUMBER;
continue;
}
if (ato != i)
{
_hcount[ato] = _hcount[i];
_h0designator[ato] = _hcount[i];
_unsaturated[ato] = _unsaturated[i];
_substitution[ato] = _substitution[i];
_ring_bond[ato] = _ring_bond[i];
}
atom_cross_reference[i] = ato;
ato++;
}
int bto = 0;
for (int i = 0; i < ne; i++)
{
if (bond_being_lost[i])
continue;
if (bto != i)
{
_bond_type_read_in[bto] = _bond_type_read_in[i];
_bond_topology[bto] = _bond_topology[i];
}
bto++;
}
for (int i = _atom_alias.number_elements() - 1; i >= 0; i--)
{
int a = _atom_alias[i]->atom_number();
if (to_remove[a])
{
_atom_alias.remove_item(i);
continue;
}
}
for (int i = _atom_list.number_elements() - 1; i >=0; i--)
{
atom_number_t a = _atom_list[i]->atom_number();
if (to_remove[a])
_atom_list.remove_item(i);
}
for (int i = 0; i < _link_atom.number_elements(); i++)
{
_link_atom[i]->adjust_atom_numbers(atom_cross_reference);
}
return 1;
}*/
/*
When we remove explicit hydrogens, we can increment the min_hcount value
of the atoms to which the H's were attached
*/
int
MDL_Molecule::remove_explicit_hydrogens(atomic_number_t z)
{
int matoms = natoms();
if (! arrays_allocated())
build(*this);
int number_hydrogens = Molecule::natoms(z);
if (0 == number_hydrogens)
return 0;
int rc = 0;
for (int i = matoms - 1; i >= 0; i--)
{
const MDL_Atom_Data * mdlad = _mdl_atom[i];
const ISIS_Atom_List & l = mdlad->atom_list(); // don't delete atom lists that start with H
// cerr << "Atom list on atom " << i << " contains " << l.number_elements() << " items\n";
if (l.number_elements() > 0)
continue;
const Atom * ai = Molecule::atomi(i);
if (z != ai->atomic_number())
continue;
if (1 != ai->ncon()) // I could actually handle the other cases, just messy and not needed
continue;
atom_number_t o = ai->other(i, 0);
if (1 == z)
{
_mdl_atom[o]->increment_min_hcount();
_mdl_atom[o]->increment_explicit_hydrogen_atoms_removed();
}
else
_mdl_atom[o]->increment_connections_lost();
MDL_Molecule::remove_atom(i);
rc++;
if (rc == number_hydrogens)
break;
}
return rc;
}
int
MDL_Molecule::not_atom_lists_present() const
{
int rc = 0;
int matoms = Molecule::natoms();
for (int i = 0; i < matoms; i++)
{
const MDL_Atom_Data * madi = MDL_File_Data::mdl_atom_data(i);
const ISIS_Atom_List & iali = madi->atom_list();
if (! iali.active())
continue;
if (! iali.normal_list())
rc++;
}
return rc;
}
MDL_Molecule::MDL_Molecule(const Molecule & m) : Molecule(m)
{
MDL_File_Data::build(*this);
return;
}
MDL_Molecule::MDL_Molecule (const MDL_Molecule & m) : Molecule(m),
MDL_File_Data(m)
{
return;
}
/*
Someone has a molecule with R groups. The atoms to which these
are attached are the only attachment points possible.
*/
int
MDL_Molecule::change_R_groups_to_substitutions (Element_Matcher & rgroup,
int enable_hydrogen_substituent)
{
int matoms = natoms();
for (int i = 0; i < matoms; i++)
{
_mdl_atom[i]->set_substitution(ncon(i));
}
int rc = 0;
for (int i = 0; i < matoms; i++)
{
const Atom * ai = atomi(i);
const Element * e = ai->element();
if (! rgroup.matches(e))
continue;
if (1 != ai->ncon()) // possible limitation, hmmmm
continue;
atom_number_t j = ai->other(i, 0);
// cerr << "Processing R group, bonded to atom " << j << ", has " << ncon(j) << " connections\n";
if (enable_hydrogen_substituent)
{
_mdl_atom[j]->set_min_ncon(ncon(j) - 1);
_mdl_atom[j]->set_substitution(0);
}
else
{
_mdl_atom[j]->set_min_ncon(ncon(j));
_mdl_atom[j]->set_substitution(0);
}
_substitution_points.add(j);
_substitution_points.adjust_for_loss_of_atom(i);
MDL_Molecule::remove_atom(i);
i--;
matoms--;
rc++;
}
if (0 == rc)
cerr << "MDL_Molecule::change_R_groups_to_substitutions:none of the rgroup element matches matched '" << name() << "'\n";
return rc;
}
int
MDL_Molecule::change_R_groups_to_match_any_atom (Element_Matcher & rgroup,
int only_substituents_at_matched_atoms)
{
const auto a = get_element_from_symbol_no_case_conversion("A"); // match any atom
const int matoms = natoms();
int rc = 0;
for (int i = 0; i < matoms; i++)
{
const Atom * ai = atomi(i);
const Element * e = ai->element();
if (! rgroup.matches(e))
continue;
Molecule::set_element(i, a);
_mdl_atom[i]->set_min_ncon(ncon(i));
_mdl_atom[i]->set_substitution(0);
if (only_substituents_at_matched_atoms)
_substitution_points.add(i);
if (0 == ai->ncon()) // very hard to imagine
continue;
// If we are searching a target with explicit Hydrogen atoms, we need to adjust hcount for our attached atom.
// If not searching a target with explicit H atoms, this just makes the query a little less specific
const atom_number_t c = ai->other(i, 0);
const int h = _mdl_atom[c]->hcount();
if (h > 0)
_mdl_atom[c]->set_min_hcount(h-1); // remember the +1 business
_mdl_atom[c]->set_hcount(0);
rc++;
}
if (0 == rc)
cerr << "MDL_Molecule::change_R_groups_to_match_any_atom:none of the rgroup element matches matched '" << name() << "'\n";
return rc;
}
int
MDL_Molecule::only_allow_substitutions_at_isotopic_atoms(const Molecule_to_Query_Specifications & mqs)
{
int matoms = natoms();
int rc = 0;
for (int i = 0; i < matoms; i++)
{
const Atom * ai = atomi(i);
int iso = ai->isotope();
MDL_Atom_Data * mad = _mdl_atom[i];
#ifdef DEBUG_ONLY_ALLOW_SUBSTITUTIONS_AT_ISOTOPIC_ATOMS
cerr << "Atom " << i << " is isotope " << iso << ", currently " << mad->substitution() << endl;
#endif
if (0 == iso)
{
mad->set_substitution(ai->ncon());
continue;
}
if (hcount(i))
;
else if (16 == atomic_number(i)) // can have any valence it wants
;
else
cerr << "MDL_Molecule::only_allow_substitutions_at_isotopic_atoms:no open valence in '" << name() << "', atom " << smarts_equivalent_for_atom(i) << endl;
if (must_have_substituent_at_every_isotopic_atom())
{
if (0 != mad->substitution()) // already set
;
else if (isotope_count_means_extra_connections())
mad->set_substitution(ai->ncon() + iso);
else
mad->set_min_ncon(ai->ncon() + 1);
}
else
{
if (isotope_count_means_extra_connections())
mad->set_min_ncon(ai->ncon() + iso);
else
mad->set_min_ncon(ai->ncon());
mad->set_substitution(0);
}
if (mqs.environment_near_substitution_points_specified()) // do I need this test, or should it always be done
_substitution_points.add(i);
set_isotope(i, 0);
rc++;
}
#ifdef DEBUG_ONLY_ALLOW_SUBSTITUTIONS_AT_ISOTOPIC_ATOMS
for (int i = 0; i < matoms; i++)
{
const MDL_Atom_Data * ma = _mdl_atom[i];
cerr << " atom " << i << " substitution " << ma->substitution() << endl;
}
#endif
return rc;
}
int
MDL_Molecule::only_allow_substitutions_at_non_isotopic_atoms()
{
int matoms = natoms();
int rc = 0;
for (int i = 0; i < matoms; i++)
{
const Atom * ai = atomi(i);
int iso = ai->isotope();
MDL_Atom_Data * mad = _mdl_atom[i];
#ifdef DEBUG_ONLY_ALLOW_SUBSTITUTIONS_AT_ISOTOPIC_ATOMS
cerr << "Atom " << i << " is isotope " << iso << ", currently " << mad->substitution() << endl;
#endif
if (0 != iso) // labelled, set max ncon
{
mad->set_substitution(ai->ncon());
set_isotope(i, 0);
continue;
}
// unlabelled,
if (hcount(i))
;
else if (16 == atomic_number(i)) // can have any valence it wants
;
else
cerr << "MDL_Molecule::only_allow_substitutions_at_non isotopic_atoms:no open valence in '" << name() << "'\n";
rc++;
}
#ifdef DEBUG_ONLY_ALLOW_SUBSTITUTIONS_AT_ISOTOPIC_ATOMS
for (int i = 0; i < matoms; i++)
{
const MDL_Atom_Data * ma = _mdl_atom[i];
cerr << " atom " << i << " substitution " << ma->substitution() << endl;
}
#endif
return rc;
}
int
MDL_Molecule::determine_attachment_points_by_query(Molecule_to_Query_Specifications & mqs)
{
Substructure_Results sresults;
int nhits = mqs.substitutions_only_at().substructure_search(*this, sresults);
//cerr << "MDL_Molecule::determine_attachment_points_by_query:nhits = " << nhits << endl;
if (0 == nhits)
{
cerr << "MDL_Molecule::determine_attachment_points_by_query:no hits to substitutions_only_at query\n";
return 0;
}
int matoms = natoms();
for (int i = 0; i < matoms; i++)
{
_mdl_atom[i]->set_substitution(ncon(i));
}
for (int i = 0; i < nhits; i++)
{
const Set_of_Atoms * e = sresults.embedding(i);
for (int j = 0; j < e->number_elements(); j++)
{
atom_number_t k = e->item(j);
_mdl_atom[k]->set_substitution(0);
if (0 == _mdl_atom[k]->min_ncon())
_mdl_atom[k]->set_min_ncon(ncon(k));
}
// should we also set the unsaturation flag?????
if (mqs.environment_near_substitution_points_specified())
_substitution_points.add_non_duplicated_elements(*e);
}
return 1;
}
int
MDL_Molecule::swap_atoms (atom_number_t a1,
atom_number_t a2)
{
Molecule::swap_atoms(a1, a2);
return MDL_File_Data::swap_atoms(a1, a2);
}
const MDL_Bond_Data *
MDL_Molecule::mdl_bond_between_atoms (atom_number_t a1,
atom_number_t a2) const
{
int j = Molecule::which_bond(a1, a2);
if (j < 0)
{
cerr << "MDL_Molecule::mdl_bond_between_atoms:no bond between " << a1 << " and " << a2 << endl;
return NULL;
}
return _mdl_bond[j];
}
int
MDL_Molecule::add (const Element * e)
{
if (! Molecule::add(e))
return 0;
if (! arrays_allocated())
return 1;
MDL_Atom_Data * t = new MDL_Atom_Data();
_mdl_atom.add(t);
return 1;
}
/*
The atomic symbol is something other than a known atomic symbol
*/
int
MDL_Molecule::add_atom_based_on_symbol (const IWString & s)
{
set_auto_create_new_elements(1);
if (! s.starts_with('[') && s.length() <= 2)
{
const Element * e = get_element_from_symbol_no_case_conversion(s);
return MDL_Molecule::add(e);
}
else // hope that somethingin the MDL_Atom_Data specifies this
{
const Element * e = get_element_from_symbol_no_case_conversion("*");
return MDL_Molecule::add(e);
}
}
int
MDL_Molecule::add_bond (atom_number_t a1, atom_number_t a2,
bond_type_t bond_for_molecule,
bond_type_t query_bond,
int bond_topology)
{
if (! Molecule::add_bond(a1, a2, bond_for_molecule))
return 0;
if (! arrays_allocated())
return 1;
MDL_Bond_Data * b = new MDL_Bond_Data();
b->set_btype(query_bond);
if (bond_topology >= 0)
b->set_bond_topology(bond_topology);
_mdl_bond.add(b);
return 1;
}
int
MDL_Molecule::remove_bond_between_atoms (atom_number_t a1, atom_number_t a2)
{
int i = Molecule::which_bond(a1, a2);
if (i < 0)
{
cerr << "MDL_Molecule::remove_bond_between_atoms:atoms " << a1 << " and " << a2 << " not bonded\n";
return 0;
}
if (! Molecule::remove_bond_between_atoms(a1, a2)) // how could that fail?
return 0;
_mdl_bond.remove_item(i);
// We don't have any link atoms, this function was built for help enumerating link atoms
return 1;
}
int
MDL_Molecule::set_mdl_atom_data (int i, const MDL_Atom_Data * a)
{
if (! arrays_allocated())
build(*this);
assert (_mdl_atom.ok_index(i));
*(_mdl_atom[i]) = *a;
return 1;
}
int
MDL_Molecule::_read_v3000 (iwstring_data_source & input)
{
IWString buffer;
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:no data\n";
return 0;
}
if (! buffer.starts_with("M V30 BEGIN CTAB"))
{
cerr << "MDL_Molecule::_read_v3000:not V30 BEGIN CTAB '" << buffer << "'\n";
return 0;
}
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:no counts\n";
return 0;
}
// M V30 COUNTS 4 3 0 0 0
if (! buffer.starts_with("M V30 COUNTS ") || buffer.nwords() < 5)
{
cerr << "MDL_Molecule::_read_v3000:not counts '" << buffer << "'\n";
return 0;
}
const_IWSubstring token = buffer.word(3);
int na;
if (! token.numeric_value(na) || na < 1)
{
cerr << "MDL_Molecule::_read_v3000:invalid atom count '" << buffer << "'\n";
return 0;
}
token = buffer.word(4);
int nb;
if (! token.numeric_value(nb) || nb < 0)
{
cerr << "MDL_Molecule::_read_v3000:invalid bond count '" << buffer << "'\n";
return 0;
}
_fill_empty_molecule_with_null_atoms(na);
if (! allocate_arrays(na, nb))
return 0;
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:atom data missing\n";
return 0;
}
if (! buffer.starts_with("M V30 BEGIN ATOM"))
{
cerr << "MDL_Molecule::_read_v3000:not BEGIN ATOM '" << buffer << "'\n";
return 0;
}
int isave = atomic_symbols_can_have_arbitrary_length();
set_atomic_symbols_can_have_arbitrary_length(1);
int rc = _read_v3000(input, na, nb);
set_atomic_symbols_can_have_arbitrary_length(isave);
return rc;
}
int
MDL_Molecule::_read_v3000(iwstring_data_source & input,
int na,
int nb)
{
MDL_File_Supporting_Material * mdlfos = global_default_MDL_File_Supporting_Material();
IWString buffer;
for (int i = 0; i < na; i++)
{
if (! read_next_v30_record(input, buffer))
{
cerr << "MDL_Molecule::_read_v3000:premature eof during atoms, expected " << na << endl;
return 0;
}
if (! _parse_v30_atom_record(buffer, 0, *mdlfos))
{
cerr << "MDL_Molecule::_read_v3000:invalid atom record '" << buffer << "'\n";
return 0;
}
const_IWSubstring token = buffer.word(2);
int ndx;
if (! token.numeric_value(ndx) || ndx < 1 || ndx > na) // how could that happen?
return 0;
ndx--;
token = buffer.word(3);
if (! _convert_symbol_to_element(ndx, token))
{
cerr << "MDL_Molecule::_read_v3000:invalid atomic symbol '" << token << "'\n";
cerr << buffer << endl;
return 0;
}
if (! _look_for_atom_query_directives(ndx, buffer))
{
cerr << "MDL_Molecule::_read_v3000:invalid query specifiers '" << buffer << "'\n";
return 0;
}
}
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:premature eof after atoms\n";
return 0;
}
if (! buffer.starts_with("M V30 END ATOM"))
{
cerr << "MDL_Molecule::_read_v3000:should be END ATOM '" << buffer << "'\n";
return 0;
}
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:premature eof at bonds\n";
return 0;
}
if (! buffer.starts_with("M V30 BEGIN BOND"))
{
cerr << "MDL_Molecule::_read_v3000:should be BEGIN BOND '" << buffer << "'\n";
return 0;
}
int * aromatic_atom = new_int(na); std::unique_ptr<int[]> free_aromatic_atom(aromatic_atom);
int * aromatic_bond = new_int(na); std::unique_ptr<int[]> free_aromatic_bond(aromatic_bond);
for (int i = 0; i < nb; i++)
{
if (! read_next_v30_record(input, buffer))
{
cerr << "MDL_Molecule::_read_v3000:premature eof during bonds, expected " << nb << endl;
return 0;
}
if (! _parse_v30_bond_record(buffer, aromatic_atom, aromatic_bond[i], 1)) // last arg means reading query file
{
cerr << "MDL_Molecule::_read_v3000:cannot parse bond record\n";
cerr << buffer << endl;
return 0;
}
_mdl_bond[i]->set_btype(bondi(i)->btype());
if (! _look_for_bond_query_directives(i, buffer))
{
cerr << "MDL_Molecule::_read_v3000:invalid bond directives\n";
cerr << buffer << endl;
return 0;
}
}
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:premature eof after bonds\n";
return 0;
}
if (! buffer.starts_with("M V30 END BOND"))
{
cerr << "MDL_Molecule::_read_v3000:should be END BOND '" << buffer << "'\n";
return 0;
}
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:premature eof near end\n";
return 0;
}
while (! buffer.starts_with("M V30 END CTAB"))
{
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:no END CTAB found\n";
return 0;
}
}
if (! input.next_record(buffer))
{
cerr << "MDL_Molecule::_read_v3000:end record missing\n";
return 0;
}
if (! buffer.starts_with("M END"))
{
cerr << "MDL_Molecule::_read_v3000:not M END '" << buffer << "'\n";
return 0;
}
return 1;
}
int
MDL_Molecule::_parse_v3000_atom_record (const const_IWSubstring & buffer)
{
return 1;
}
int
MDL_Molecule::_convert_symbol_to_element (int ndx,
const IWString & s)
{
if (s.length() <= 2)
{
const Element * e = get_element_from_symbol_no_case_conversion(s);
if (NULL == e)
{
cerr << "MDL_Molecule::_convert_symbol_to_element:cannot get element for '" << s << "'\n";
return 0;
}
Molecule::set_element(ndx, e);
return 1;
}
return _mdl_atom[ndx]->initialise_atom_list_from_symbol(s);
}
//M V30 3 N 2.075 -0.975 0 0 SUBST=1
int
MDL_Molecule::_look_for_atom_query_directives (int ndx,
const IWString & buffer)
{
int i = 0;
const_IWSubstring token;
int col = 0;
MDL_Atom_Data * mad = _mdl_atom[ndx];
while (buffer.nextword(token, i))
{
col++;
if (col < 7)
continue;
const_IWSubstring directive;
int v;
if (token.index('=') <= 0)
continue;
if (! token.split_into_directive_and_value(directive, '=', v))
continue;
if ("SUBST" == directive)
{
mad->set_substitution(v);
continue;
}
else if ("UNSAT" == directive)
{
mad->set_unsaturation(v);
continue;
}
else if ("RBCNT" == directive)
{
mad->set_ring_bond(v);
continue;
}
else
{
cerr << "MDL_Molecule::_look_for_query_directives:unrecognised directive '" << token << "'\n";
return 0;
}
}
return 1;
}
int
MDL_Molecule::_look_for_bond_query_directives (int ndx,
const IWString & buffer)
{
int i = 0;
const_IWSubstring token;
MDL_Bond_Data * b = _mdl_bond[ndx];
while (buffer.nextword(token, i))
{
if (token.index('=') <= 0)
continue;
const_IWSubstring directive;
int v;
if (! token.split_into_directive_and_value(directive, '=', v))
continue;
if ("TOPO" == directive)
{
b->set_bond_topology(v);
continue;
}
}
return 1;
}
/*
Atom lists cause problems because there is no atomic number
*/
int
MDL_Molecule::compute_aromaticity_handle_atom_lists ()
{
int matoms = natoms();
int rc = 0;
for (int i = 0; i < matoms; i++)
{
const MDL_Atom_Data * mdlat = mdl_atom_data(i);
const ISIS_Atom_List & alist = mdlat->atom_list();
if (! alist.active())
continue;
if (atomic_number(i) > 0)
continue;
set_element(i, alist.elementi(0));
rc++;
}
if (rc)
compute_aromaticity();
else
compute_aromaticity_if_needed();
return 1;
}
void
MDL_Molecule::set_substitution (const atom_number_t a, const int s)
{
_mdl_atom[a]->set_substitution(s);
return;
}
void
MDL_Molecule::set_ring_bond (const atom_number_t a, const int s)
{
_mdl_atom[a]->set_ring_bond(s);
return;
}
void
reset_mdl_molecule_file_scope_variables()
{
convert_a_and_q_atoms_to_atom_lists=0;
convert_not_atom_lists_to_organic_lists=0;
element_a = NULL;
element_q = NULL;
element_l = NULL;
}
|
tkamata-test/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_arp_cfg.py
|
""" Cisco_IOS_XR_ipv4_arp_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR ipv4\-arp package configuration.
This module contains definitions
for the following management objects\:
arp\: ARP configuraiton
arpgmp\: arpgmp
arp\-redundancy\: arp redundancy
This YANG module augments the
Cisco\-IOS\-XR\-ifmgr\-cfg
module with configuration data.
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class ArpEncapEnum(Enum):
"""
ArpEncapEnum
Arp encap
.. data:: arpa = 1
Encapsulation type ARPA
.. data:: srp = 4
Encapsulation type SRP
.. data:: srpa = 5
Encapsulation type SRPA
.. data:: srpb = 6
Encapsulation type SRPB
"""
arpa = 1
srp = 4
srpa = 5
srpb = 6
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpEncapEnum']
class ArpEntryEnum(Enum):
"""
ArpEntryEnum
Arp entry
.. data:: static = 0
Static ARP entry type
.. data:: alias = 1
Alias ARP entry type
"""
static = 0
alias = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpEntryEnum']
class Arp(object):
"""
ARP configuraiton
.. attribute:: inner_cos
Configure inner cos values for arp packets
**type**\: int
**range:** 0..7
.. attribute:: max_entries
Configure maximum number of safe ARP entries per line card
**type**\: int
**range:** 1..256000
.. attribute:: outer_cos
Configure outer cos values for arp packets
**type**\: int
**range:** 0..7
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.inner_cos = None
self.max_entries = None
self.outer_cos = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-arp-cfg:arp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.inner_cos is not None:
return True
if self.max_entries is not None:
return True
if self.outer_cos is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['Arp']['meta_info']
class Arpgmp(object):
"""
arpgmp
.. attribute:: vrf
Per VRF configuration, for the default VRF use 'default'
**type**\: list of :py:class:`Vrf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.Arpgmp.Vrf>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.vrf = YList()
self.vrf.parent = self
self.vrf.name = 'vrf'
class Vrf(object):
"""
Per VRF configuration, for the default VRF use
'default'
.. attribute:: vrf_name <key>
VRF name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: entries
ARP static and alias entry configuration
**type**\: :py:class:`Entries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.Arpgmp.Vrf.Entries>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.vrf_name = None
self.entries = Arpgmp.Vrf.Entries()
self.entries.parent = self
class Entries(object):
"""
ARP static and alias entry configuration
.. attribute:: entry
ARP static and alias entry configuration item
**type**\: list of :py:class:`Entry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.Arpgmp.Vrf.Entries.Entry>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.entry = YList()
self.entry.parent = self
self.entry.name = 'entry'
class Entry(object):
"""
ARP static and alias entry configuration item
.. attribute:: address <key>
IP Address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: encapsulation
Encapsulation type
**type**\: :py:class:`ArpEncapEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpEncapEnum>`
.. attribute:: entry_type
Entry type
**type**\: :py:class:`ArpEntryEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpEntryEnum>`
.. attribute:: interface
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: mac_address
MAC Address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.address = None
self.encapsulation = None
self.entry_type = None
self.interface = None
self.mac_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.address is None:
raise YPYModelError('Key property address is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:entry[Cisco-IOS-XR-ipv4-arp-cfg:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.encapsulation is not None:
return True
if self.entry_type is not None:
return True
if self.interface is not None:
return True
if self.mac_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['Arpgmp.Vrf.Entries.Entry']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:entries'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.entry is not None:
for child_ref in self.entry:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['Arpgmp.Vrf.Entries']['meta_info']
@property
def _common_path(self):
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
return '/Cisco-IOS-XR-ipv4-arp-cfg:arpgmp/Cisco-IOS-XR-ipv4-arp-cfg:vrf[Cisco-IOS-XR-ipv4-arp-cfg:vrf-name = ' + str(self.vrf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf_name is not None:
return True
if self.entries is not None and self.entries._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['Arpgmp.Vrf']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-arp-cfg:arpgmp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.vrf is not None:
for child_ref in self.vrf:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['Arpgmp']['meta_info']
class ArpRedundancy(object):
"""
arp redundancy
.. attribute:: redundancy
Configure parameter for ARP Geo redundancy
**type**\: :py:class:`Redundancy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy>`
**presence node**\: True
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.redundancy = None
class Redundancy(object):
"""
Configure parameter for ARP Geo redundancy
.. attribute:: enable
Enable Configure parameter for ARP Geo redundancy. Deletion of this object also causes deletion of all associated objects under ArpRedundancy
**type**\: :py:class:`Empty<ydk.types.Empty>`
**mandatory**\: True
.. attribute:: groups
Table of Group
**type**\: :py:class:`Groups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.enable = None
self.groups = ArpRedundancy.Redundancy.Groups()
self.groups.parent = self
class Groups(object):
"""
Table of Group
.. attribute:: group
None
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups.Group>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.group = YList()
self.group.parent = self
self.group.name = 'group'
class Group(object):
"""
None
.. attribute:: group_id <key>
Group ID
**type**\: int
**range:** 1..32
.. attribute:: interface_list
List of Interfaces for this Group
**type**\: :py:class:`InterfaceList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups.Group.InterfaceList>`
**presence node**\: True
.. attribute:: peers
Table of Peer
**type**\: :py:class:`Peers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups.Group.Peers>`
.. attribute:: source_interface
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.group_id = None
self.interface_list = None
self.peers = ArpRedundancy.Redundancy.Groups.Group.Peers()
self.peers.parent = self
self.source_interface = None
class Peers(object):
"""
Table of Peer
.. attribute:: peer
None
**type**\: list of :py:class:`Peer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups.Group.Peers.Peer>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.peer = YList()
self.peer.parent = self
self.peer.name = 'peer'
class Peer(object):
"""
None
.. attribute:: prefix_string <key>
Neighbor IPv4 address
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.prefix_string = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.prefix_string is None:
raise YPYModelError('Key property prefix_string is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:peer[Cisco-IOS-XR-ipv4-arp-cfg:prefix-string = ' + str(self.prefix_string) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.prefix_string is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups.Group.Peers.Peer']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:peers'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.peer is not None:
for child_ref in self.peer:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups.Group.Peers']['meta_info']
class InterfaceList(object):
"""
List of Interfaces for this Group
.. attribute:: enable
Enable List of Interfaces for this Group. Deletion of this object also causes deletion of all associated objects under InterfaceList
**type**\: :py:class:`Empty<ydk.types.Empty>`
**mandatory**\: True
.. attribute:: interfaces
Table of Interface
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups.Group.InterfaceList.Interfaces>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.enable = None
self.interfaces = ArpRedundancy.Redundancy.Groups.Group.InterfaceList.Interfaces()
self.interfaces.parent = self
class Interfaces(object):
"""
Table of Interface
.. attribute:: interface
Interface for this Group
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_arp_cfg.ArpRedundancy.Redundancy.Groups.Group.InterfaceList.Interfaces.Interface>`
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
Interface for this Group
.. attribute:: interface_name <key>
Interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface_id
Interface Id for the interface
**type**\: int
**range:** 1..65535
**mandatory**\: True
"""
_prefix = 'ipv4-arp-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.interface_id = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:interface[Cisco-IOS-XR-ipv4-arp-cfg:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.interface_id is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups.Group.InterfaceList.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups.Group.InterfaceList.Interfaces']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-arp-cfg:interface-list'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.enable is not None:
return True
if self.interfaces is not None and self.interfaces._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups.Group.InterfaceList']['meta_info']
@property
def _common_path(self):
if self.group_id is None:
raise YPYModelError('Key property group_id is None')
return '/Cisco-IOS-XR-ipv4-arp-cfg:arp-redundancy/Cisco-IOS-XR-ipv4-arp-cfg:redundancy/Cisco-IOS-XR-ipv4-arp-cfg:groups/Cisco-IOS-XR-ipv4-arp-cfg:group[Cisco-IOS-XR-ipv4-arp-cfg:group-id = ' + str(self.group_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group_id is not None:
return True
if self.interface_list is not None and self.interface_list._has_data():
return True
if self.peers is not None and self.peers._has_data():
return True
if self.source_interface is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups.Group']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-arp-cfg:arp-redundancy/Cisco-IOS-XR-ipv4-arp-cfg:redundancy/Cisco-IOS-XR-ipv4-arp-cfg:groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.group is not None:
for child_ref in self.group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy.Groups']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-arp-cfg:arp-redundancy/Cisco-IOS-XR-ipv4-arp-cfg:redundancy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.enable is not None:
return True
if self.groups is not None and self.groups._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy.Redundancy']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-arp-cfg:arp-redundancy'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.redundancy is not None and self.redundancy._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_arp_cfg as meta
return meta._meta_table['ArpRedundancy']['meta_info']
|
rage/concepts
|
backend/src/index.js
|
import path from 'path'
import { GraphQLServer } from 'graphql-yoga'
import express from 'express'
import cors from 'cors'
import cookieParser from 'cookie-parser'
import './util/titleCase'
import { prisma } from '../schema/generated/prisma-client'
import { authenticate } from './middleware/authentication'
import { logError } from './util/errorLogger'
import resolvers from './resolvers'
import { progressAPI, pointsAPI } from './controllers/pointsAPI'
import { exportAPI, markdownExportAPI } from './controllers/exportAPI'
import { loginAPIRedirect, loginAPIAssert, loginAPIMetadata } from './controllers/loginAPI'
const options = {
endpoint: '/graphql',
playground: '/playground',
subscriptions: '/subscription',
port: process.env.PORT || 4000,
formatError: logError
}
const server = new GraphQLServer({
typeDefs: './schema/generated/schema.graphql',
resolvers,
context: req => ({
prisma,
...req
}),
middlewares: [authenticate]
})
// Points for completions
server.express.get('/api/projects/:pid/courses/:cid/progress', cors(), progressAPI)
server.express.get('/api/projects/:pid/points', cors(), pointsAPI)
server.express.get('/api/workspace/:wid/export', cors(), exportAPI)
server.express.get('/api/workspace/:wid/markdown', cors(), markdownExportAPI)
// SAML API for Haka login
server.express.use(express.urlencoded({ extended: true }))
server.express.use(cookieParser())
server.express.get('/api/login', loginAPIRedirect)
server.express.post('/api/login/assert', loginAPIAssert)
server.express.post('/api/login/metadata', loginAPIMetadata)
if (process.env.ENVIRONMENT === 'production' || process.env.FRONTEND_PATH) {
const FRONTEND_PATH = process.env.FRONTEND_PATH || path.join(__dirname, '../../frontend/build')
server.express.use(express.static(FRONTEND_PATH))
server.express.get('*', (req, res) => {
res.sendFile(path.join(FRONTEND_PATH, 'index.html'))
})
}
server.start(options, () =>
console.log(`Server is running on http://localhost:${process.env.PORT || 4000}`))
|
NTrevisani/cmssw
|
Geometry/HcalCommonData/src/HcalGeomParameters.cc
|
#include "Geometry/HcalCommonData/interface/HcalGeomParameters.h"
#include "FWCore/MessageLogger/interface/MessageLogger.h"
#include "FWCore/Utilities/interface/Exception.h"
#include "DetectorDescription/Core/interface/DDutils.h"
#include "DetectorDescription/Core/interface/DDValue.h"
#include "DetectorDescription/Core/interface/DDFilter.h"
#include "DetectorDescription/Core/interface/DDSolid.h"
#include "DetectorDescription/Core/interface/DDConstant.h"
#include "DetectorDescription/Core/interface/DDVectorGetter.h"
#include "DetectorDescription/Core/interface/DDFilteredView.h"
#include "DetectorDescription/RegressionTest/interface/DDErrorDetection.h"
#include "CondFormats/GeometryObjects/interface/HcalParameters.h"
#include "CLHEP/Units/GlobalPhysicalConstants.h"
#include "CLHEP/Units/GlobalSystemOfUnits.h"
//#define EDM_ML_DEBUG
HcalGeomParameters::HcalGeomParameters() {
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::HcalGeomParameters ( const DDCompactView& cpv ) constructor";
#endif
}
HcalGeomParameters::~HcalGeomParameters() {
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::destructed!!!";
#endif
}
void HcalGeomParameters::getConstRHO(std::vector<double>& rHO) const {
rHO.emplace_back(rminHO);
for (double i : etaHO)
rHO.emplace_back(i);
}
std::vector<int> HcalGeomParameters::getModHalfHBHE(const int type) const {
std::vector<int> modHalf;
if (type == 0) {
modHalf.emplace_back(nmodHB);
modHalf.emplace_back(nzHB);
} else {
modHalf.emplace_back(nmodHE);
modHalf.emplace_back(nzHE);
}
return modHalf;
}
unsigned int HcalGeomParameters::find(int element, std::vector<int>& array) const {
unsigned int id = array.size();
for (unsigned int i = 0; i < array.size(); i++) {
if (element == array[i]) {
id = i;
break;
}
}
return id;
}
double HcalGeomParameters::getEta(double r, double z) const {
double tmp = 0;
if (z != 0)
tmp = -log(tan(0.5 * atan(r / z)));
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::getEta " << r << " " << z << " ==> " << tmp;
#endif
return tmp;
}
void HcalGeomParameters::loadGeometry(const DDFilteredView& _fv, HcalParameters& php) {
DDFilteredView fv = _fv;
bool dodet = true, hf = false;
std::vector<double> rb(20, 0.0), ze(20, 0.0), thkb(20, -1.0), thke(20, -1.0);
std::vector<int> ib(20, 0), ie(20, 0);
std::vector<int> izb, phib, ize, phie;
std::vector<double> rxb;
#ifdef EDM_ML_DEBUG
std::vector<double> rminHE(20, 0.0), rmaxHE(20, 0.0);
#endif
php.rhoxHB.clear();
php.zxHB.clear();
php.dyHB.clear();
php.dxHB.clear();
php.layHB.clear();
php.layHE.clear();
php.zxHE.clear();
php.rhoxHE.clear();
php.dyHE.clear();
php.dx1HE.clear();
php.dx2HE.clear();
dzVcal = -1.;
while (dodet) {
DDTranslation t = fv.translation();
std::vector<int> copy = fv.copyNumbers();
const DDSolid& sol = fv.logicalPart().solid();
int idet = 0, lay = -1;
int nsiz = (int)(copy.size());
if (nsiz > 0)
lay = copy[nsiz - 1] / 10;
if (nsiz > 1)
idet = copy[nsiz - 2] / 1000;
double dx = 0, dy = 0, dz = 0, dx1 = 0, dx2 = 0;
#ifdef EDM_ML_DEBUG
double alp(0);
#endif
if (sol.shape() == DDSolidShape::ddbox) {
const DDBox& box = static_cast<DDBox>(fv.logicalPart().solid());
dx = box.halfX();
dy = box.halfY();
dz = box.halfZ();
} else if (sol.shape() == DDSolidShape::ddtrap) {
const DDTrap& trp = static_cast<DDTrap>(fv.logicalPart().solid());
dx1 = trp.x1();
dx2 = trp.x2();
dx = 0.25 * (trp.x1() + trp.x2() + trp.x3() + trp.x4());
dy = 0.5 * (trp.y1() + trp.y2());
dz = trp.halfZ();
#ifdef EDM_ML_DEBUG
alp = 0.5 * (trp.alpha1() + trp.alpha2());
#endif
} else if (sol.shape() == DDSolidShape::ddtubs) {
const DDTubs& tub = static_cast<DDTubs>(fv.logicalPart().solid());
dx = tub.rIn();
dy = tub.rOut();
dz = tub.zhalf();
}
if (idet == 3) {
// HB
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HB " << sol.name() << " Shape " << sol.shape() << " Layer " << lay << " R "
<< t.Rho();
#endif
if (lay >= 0 && lay < 20) {
ib[lay]++;
rb[lay] += t.Rho();
if (thkb[lay] <= 0) {
if (lay < 17)
thkb[lay] = dx;
else
thkb[lay] = std::min(dx, dy);
}
if (lay < 17) {
bool found = false;
for (double k : rxb) {
if (std::abs(k - t.Rho()) < 0.01) {
found = true;
break;
}
}
if (!found) {
rxb.emplace_back(t.Rho());
php.rhoxHB.emplace_back(t.Rho() * std::cos(t.phi()));
php.zxHB.emplace_back(std::abs(t.z()));
php.dyHB.emplace_back(2. * dy);
php.dxHB.emplace_back(2. * dz);
php.layHB.emplace_back(lay);
}
}
}
if (lay == 2) {
int iz = copy[nsiz - 5];
int fi = copy[nsiz - 4];
unsigned int it1 = find(iz, izb);
if (it1 == izb.size())
izb.emplace_back(iz);
unsigned int it2 = find(fi, phib);
if (it2 == phib.size())
phib.emplace_back(fi);
}
if (lay == 18) {
int ifi = -1, ich = -1;
if (nsiz > 2)
ifi = copy[nsiz - 3];
if (nsiz > 3)
ich = copy[nsiz - 4];
double z1 = std::abs((t.z()) + dz);
double z2 = std::abs((t.z()) - dz);
if (std::abs(z1 - z2) < 0.01)
z1 = 0;
if (ifi == 1 && ich == 4) {
if (z1 > z2) {
double tmp = z1;
z1 = z2;
z2 = tmp;
}
bool sok = true;
for (unsigned int kk = 0; kk < php.zHO.size(); kk++) {
if (std::abs(z2 - php.zHO[kk]) < 0.01) {
sok = false;
break;
} else if (z2 < php.zHO[kk]) {
php.zHO.resize(php.zHO.size() + 2);
for (unsigned int kz = php.zHO.size() - 1; kz > kk + 1; kz = kz - 2) {
php.zHO[kz] = php.zHO[kz - 2];
php.zHO[kz - 1] = php.zHO[kz - 3];
}
php.zHO[kk + 1] = z2;
php.zHO[kk] = z1;
sok = false;
break;
}
}
if (sok) {
php.zHO.emplace_back(z1);
php.zHO.emplace_back(z2);
}
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "Detector " << idet << " Lay " << lay << " fi " << ifi << " " << ich << " z "
<< z1 << " " << z2;
#endif
}
}
} else if (idet == 4) {
// HE
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HE " << sol.name() << " Shape " << sol.shape() << " Layer " << lay << " Z "
<< t.z();
#endif
if (lay >= 0 && lay < 20) {
ie[lay]++;
ze[lay] += std::abs(t.z());
if (thke[lay] <= 0)
thke[lay] = dz;
#ifdef EDM_ML_DEBUG
double rinHE = t.Rho() * cos(alp) - dy;
double routHE = t.Rho() * cos(alp) + dy;
rminHE[lay] += rinHE;
rmaxHE[lay] += routHE;
#endif
bool found = false;
for (double k : php.zxHE) {
if (std::abs(k - std::abs(t.z())) < 0.01) {
found = true;
break;
}
}
if (!found) {
php.zxHE.emplace_back(std::abs(t.z()));
php.rhoxHE.emplace_back(t.Rho() * std::cos(t.phi()));
php.dyHE.emplace_back(dy * std::cos(t.phi()));
dx1 -= 0.5 * (t.rho() - dy) * std::cos(t.phi()) * std::tan(10 * CLHEP::deg);
dx2 -= 0.5 * (t.rho() + dy) * std::cos(t.phi()) * std::tan(10 * CLHEP::deg);
php.dx1HE.emplace_back(-dx1);
php.dx2HE.emplace_back(-dx2);
php.layHE.emplace_back(lay);
}
}
if (copy[nsiz - 1] == 21 || copy[nsiz - 1] == 71) {
int iz = copy[nsiz - 7];
int fi = copy[nsiz - 5];
unsigned int it1 = find(iz, ize);
if (it1 == ize.size())
ize.emplace_back(iz);
unsigned int it2 = find(fi, phie);
if (it2 == phie.size())
phie.emplace_back(fi);
}
} else if (idet == 5) {
// HF
if (!hf) {
const std::vector<double>& paras = sol.parameters();
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HF " << sol.name() << " Shape " << sol.shape() << " Z " << t.z() << " with "
<< paras.size() << " Parameters";
for (unsigned j = 0; j < paras.size(); j++)
edm::LogVerbatim("HCalGeom") << "HF Parameter[" << j << "] = " << paras[j];
#endif
if (sol.shape() == DDSolidShape::ddpolycone_rrz) {
int nz = (int)(paras.size()) - 3;
dzVcal = 0.5 * (paras[nz] - paras[3]);
hf = true;
} else if (sol.shape() == DDSolidShape::ddtubs || sol.shape() == DDSolidShape::ddcons) {
dzVcal = paras[0];
hf = true;
}
}
#ifdef EDM_ML_DEBUG
} else {
edm::LogVerbatim("HCalGeom") << "Unknown Detector " << idet << " for " << sol.name() << " Shape " << sol.shape()
<< " R " << t.Rho() << " Z " << t.z();
#endif
}
dodet = fv.next();
}
int ibmx = 0, iemx = 0;
for (int i = 0; i < 20; i++) {
if (ib[i] > 0) {
rb[i] /= (double)(ib[i]);
ibmx = i + 1;
}
if (ie[i] > 0) {
ze[i] /= (double)(ie[i]);
iemx = i + 1;
}
#ifdef EDM_ML_DEBUG
if (ie[i] > 0) {
rminHE[i] /= (double)(ie[i]);
rmaxHE[i] /= (double)(ie[i]);
}
edm::LogVerbatim("HCalGeom") << "Index " << i << " Barrel " << ib[i] << " " << rb[i] << " Endcap " << ie[i] << " "
<< ze[i] << ":" << rminHE[i] << ":" << rmaxHE[i];
#endif
}
for (int i = 4; i >= 0; i--) {
if (ib[i] == 0) {
rb[i] = rb[i + 1];
thkb[i] = thkb[i + 1];
}
if (ie[i] == 0) {
ze[i] = ze[i + 1];
thke[i] = thke[i + 1];
}
#ifdef EDM_ML_DEBUG
if (ib[i] == 0 || ie[i] == 0)
edm::LogVerbatim("HCalGeom") << "Index " << i << " Barrel " << ib[i] << " " << rb[i] << " Endcap " << ie[i] << " "
<< ze[i];
#endif
}
#ifdef EDM_ML_DEBUG
for (unsigned int k = 0; k < php.layHB.size(); ++k)
edm::LogVerbatim("HCalGeom") << "HB: " << php.layHB[k] << " R " << rxb[k] << " " << php.rhoxHB[k] << " Z "
<< php.zxHB[k] << " DY " << php.dyHB[k] << " DZ " << php.dxHB[k];
for (unsigned int k = 0; k < php.layHE.size(); ++k)
edm::LogVerbatim("HCalGeom") << "HE: " << php.layHE[k] << " R " << php.rhoxHE[k] << " Z " << php.zxHE[k]
<< " X1|X2 " << php.dx1HE[k] << "|" << php.dx2HE[k] << " DY " << php.dyHE[k];
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters: Maximum Layer for HB " << ibmx << " for HE " << iemx
<< " extent " << dzVcal;
#endif
if (ibmx > 0) {
php.rHB.resize(ibmx);
php.drHB.resize(ibmx);
for (int i = 0; i < ibmx; i++) {
php.rHB[i] = rb[i];
php.drHB[i] = thkb[i];
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters: php.rHB[" << i << "] = " << php.rHB[i] << " php.drHB[" << i
<< "] = " << php.drHB[i];
#endif
}
}
if (iemx > 0) {
php.zHE.resize(iemx);
php.dzHE.resize(iemx);
for (int i = 0; i < iemx; i++) {
php.zHE[i] = ze[i];
php.dzHE[i] = thke[i];
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters: php.zHE[" << i << "] = " << php.zHE[i] << " php.dzHE[" << i
<< "] = " << php.dzHE[i];
#endif
}
}
nzHB = (int)(izb.size());
nmodHB = (int)(phib.size());
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::loadGeometry: " << nzHB << " barrel half-sectors";
for (int i = 0; i < nzHB; i++)
edm::LogVerbatim("HCalGeom") << "Section " << i << " Copy number " << izb[i];
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::loadGeometry: " << nmodHB << " barrel modules";
for (int i = 0; i < nmodHB; i++)
edm::LogVerbatim("HCalGeom") << "Module " << i << " Copy number " << phib[i];
#endif
nzHE = (int)(ize.size());
nmodHE = (int)(phie.size());
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::loadGeometry: " << nzHE << " endcap half-sectors";
for (int i = 0; i < nzHE; i++)
edm::LogVerbatim("HCalGeom") << "Section " << i << " Copy number " << ize[i];
edm::LogVerbatim("HCalGeom") << "HcalGeomParameters::loadGeometry: " << nmodHE << " endcap modules";
for (int i = 0; i < nmodHE; i++)
edm::LogVerbatim("HCalGeom") << "Module " << i << " Copy number " << phie[i];
#endif
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HO has Z of size " << php.zHO.size();
for (unsigned int kk = 0; kk < php.zHO.size(); kk++)
edm::LogVerbatim("HCalGeom") << "ZHO[" << kk << "] = " << php.zHO[kk];
#endif
if (ibmx > 17 && php.zHO.size() > 4) {
rminHO = php.rHB[17] - 100.0;
etaHO[0] = getEta(0.5 * (php.rHB[17] + php.rHB[18]), php.zHO[1]);
etaHO[1] = getEta(php.rHB[18] + php.drHB[18], php.zHO[2]);
etaHO[2] = getEta(php.rHB[18] - php.drHB[18], php.zHO[3]);
etaHO[3] = getEta(php.rHB[18] + php.drHB[18], php.zHO[4]);
} else {
rminHO = -1.0;
etaHO[0] = etaHO[1] = etaHO[2] = etaHO[3] = 0;
}
#ifdef EDM_ML_DEBUG
edm::LogVerbatim("HCalGeom") << "HO Eta boundaries " << etaHO[0] << " " << etaHO[1] << " " << etaHO[2] << " "
<< etaHO[3];
edm::LogVerbatim("HCalGeom") << "HO Parameters " << rminHO << " " << php.zHO.size();
for (unsigned int i = 0; i < php.zHO.size(); ++i)
edm::LogVerbatim("HCalGeom") << " zho[" << i << "] = " << php.zHO[i];
#endif
}
|
ne-bknn/c_labs
|
1_lab/src/main.c
|
#include <stdio.h>
#include "lib.h"
#include <stdlib.h>
int main() {
// m - rows, n - columns
int n, m, status;
get_int(&m);
print_debug("M: %d\n", m);
get_int(&n);
print_debug("N: %d\n", n);
struct Matrix* pmatrix = create_matrix(m, n);
if (NULL == pmatrix) {
print_error("Failed to create matrix.");
exit(1);
}
double temp;
int len = -1;
for (int i = 0; i < m; ++i) {
status = get_int(&len);
if (status == 0) {
print_error("Received unexpected EOF");
delete_matrix(pmatrix);
exit(1);
}
if (len > n || len < 1) {
print_error("Failed reading row length");
delete_matrix(pmatrix);
exit(1);
}
print_debug("Length of the row: %d\n", len);
struct Row* row = create_row(len);
if (NULL == row) {
print_error("Failed to create row");
delete_matrix(pmatrix);
exit(1);
}
for (int j = 0; j < len; ++j) {
status = get_double(&temp);
if (status == 0) {
print_error("Recieved unexpected EOF");
delete_matrix(pmatrix);
free(row->numbers);
free(row);
exit(1);
}
row->numbers[j] = temp;
if (temp < 0) {
row->n_negative++;
}
if (temp > 0) {
row->n_positive++;
}
}
add_row_to_matrix(pmatrix, row, i);
}
print_matrix(pmatrix);
print_pointers(pmatrix);
int index_neg_max = 0, index_pos_max = 0;
get_min_max(pmatrix, &index_neg_max, &index_pos_max);
print_debug("index_neg_max in main: %d\n", index_neg_max);
print_debug("index_pos_max in main: %d\n", index_pos_max);
swap(pmatrix, index_pos_max, index_neg_max);
print_pointers(pmatrix);
print_debug("%s", "===============================\n");
print_matrix(pmatrix);
delete_matrix(pmatrix);
return 0;
}
|
FDA/precisionFDA
|
app/models/user.rb
|
<filename>app/models/user.rb
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# dxuser :string(255)
# private_files_project :string(255)
# public_files_project :string(255)
# private_comparisons_project :string(255)
# public_comparisons_project :string(255)
# schema_version :integer
# created_at :datetime not null
# updated_at :datetime not null
# org_id :integer
# first_name :string(255)
# last_name :string(255)
# email :string(255)
# normalized_email :string(255)
# last_login :datetime
# extras :text(65535)
# time_zone :string(255)
# review_app_developers_org :string(255) default("")
# user_state :integer default("enabled"), not null
# expiration :integer
# disable_message :string(255)
#
class User < ApplicationRecord
include Auditor
EMAIL_FORMAT = %r{
^(([^<>()\[\]\\.,;:\s@\"]+(\.[^<>()\[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.
[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$
}x.freeze
# The "schema_version" field is used to denote the schema
# associated with this user on the platform. Changing the
# Rails schema (for example, adding a new whatever_project
# field in user) should increase the current schema below
# so that users who log in and whose schema_version is
# lower will get migrated.
CURRENT_SCHEMA = 1
enum user_state: { enabled: 0, locked: 1, deactivated: 2 }
SYNC_EXCLUDED_FILE_STATES = [
UserFile::STATE_CLOSED,
UserFile::STATE_COPYING,
UserFile::STATE_REMOVING,
].freeze
has_many :uploaded_files, class_name: "UserFile", dependent: :restrict_with_exception, as: "parent"
has_many :user_files
has_many :nodes
has_many :assets
has_many :comparisons
has_many :notes
has_many :apps
has_many :app_series
has_many :jobs
has_many :discussions
has_many :answers
belongs_to :org
has_many :licenses
has_many :accepted_licenses
has_many :admin_memberships, dependent: :destroy
has_many :admin_groups, through: :admin_memberships
has_many :space_memberships
has_many :spaces, -> { where("space_memberships.active = ?", true) }, through: :space_memberships
has_one :appathon
has_many :meta_appathons
has_one :expert, dependent: :destroy
has_many :challenge_app_owners, class_name: "Challenge", foreign_key: "app_owner_id"
has_many :submissions
has_many :challenge_resources
has_many :analyses
has_one :usage_metric
has_many :tasks
has_many :workflows
has_one :notification_preference
has_one :profile, dependent: :destroy
has_one :invitation, dependent: :nullify
has_many :org_action_requests,
inverse_of: :initiator,
foreign_key: :initiator_id,
dependent: :destroy
store :extras, accessors: [:has_seen_guidelines], coder: JSON
include Gravtastic
gravtastic secure: true, default: "retro"
acts_as_voter
acts_as_followable
acts_as_follower
acts_as_tagger
scope :real, -> { where.not(dxuser: CHALLENGE_BOT_DX_USER) }
scope :pending, -> { where.not(last_login: nil) }
scope :belongs_to_org, ->(org_id) { where(org_id: org_id) }
scope :site_admins, lambda {
joins(:admin_groups).where(admin_groups: { role: AdminGroup::ROLE_SITE_ADMIN })
}
# Have the ability to create new review spaces and have full access to
# activities available within reviewer and cooperative areas.
scope :review_space_admins, lambda {
joins(:admin_groups).where(admin_groups: { role: AdminGroup::ROLE_REVIEW_SPACE_ADMIN })
}
scope :challenge_admins, lambda {
joins(:admin_groups).where(admin_groups: { role: AdminGroup::ROLE_CHALLENGE_ADMIN })
}
scope :challenge_evaluators, lambda {
joins(:admin_groups).where(admin_groups: { role: AdminGroup::ROLE_CHALLENGE_EVALUATOR })
}
validates :first_name, length: { minimum: 2, message: "The first name must be at least two letters long." }, presence: true
validates :last_name, length: { minimum: 2, message: "The last name must be at least two letters long." }, presence: true
validates :email, presence: true, uniqueness: { case_sensitive: false }
validates :disable_message, length: { maximum: 250, message: "Deactivation reason is too long (over 250 characters)" }
def self.challenge_bot
find_by!(dxuser: CHALLENGE_BOT_DX_USER)
end
def active_leave_org_request
org_action_requests.leave.find_by(org: org)
end
def challenge_bot?
dxuser == CHALLENGE_BOT_DX_USER
end
def uid
"user-#{id}"
end
def dxid
"user-#{dxuser}"
end
def klass
"user"
end
def status
if last_login.nil?
"Pending"
else
case user_state
when "enabled"
"Active"
when "deactivated"
"Disabled"
else
"N/A"
end
end
end
def org
challenge_bot? ? Org.new : super
end
delegate :real_files, to: :user_files
def singular?
org_id.blank? || org.singular
end
def can_provision_accounts?
!singular? && org.admin_id == id
end
def billto
org.dxorg
end
# Returns all accessible space scopes.
# @return [Array] Space scopes (UIDs).
def space_uids
Space.accessible_by(self).pluck(Arel.sql("concat('space-', spaces.id)"))
end
def activated?
private_files_project.present? && last_login.present?
end
def username
dxuser
end
def full_name
"#{first_name} #{last_name}"
end
def initials
"#{first_name[0]}#{last_name[0]}"
end
def select_text
"#{username} (#{full_name.titleize}, #{org.name})"
end
def is_self(context)
id == context.user_id
end
def logged_in?
!Session.find_by(user_id: id).expired?
rescue StandardError
false
end
def appathon_from_meta(meta_appathon)
following_by_type("Appathon").find do |appathon|
appathon.meta_appathon.uid == meta_appathon.uid
end
end
def can_administer_site?
admin_groups.any?(&:site?)
end
# Checks if a user can create spaces.
# @return [Boolean] Returns true if a user can create spaces, false otherwise.
def can_create_spaces?
can_administer_site? || review_space_admin?
end
def is_challenge_evaluator?
challenge_eval? || can_administer_site?
end
def challenge_eval?
admin_groups.any?(&:challenge_eval?)
end
def review_space_admin?
admin_groups.any?(&:space?)
end
# @param time_zone [String] new time zone
def update_time_zone(time_zone)
update(time_zone: time_zone) if Time.find_zone(time_zone)
end
def is_challenge_admin?
can_administer_site? || admin_groups.any?(&:challenge_admin?)
end
def challenge_admin?
admin_groups.any?(&:challenge_admin?)
end
# Selects users, according search string.
# Users selected are the given org members and are not in 'pending' state.
# @param search [String] - search string
# @param org [String] - org handle string
# @return [ActiveRecord::Relation<User>] - an array of users, searched by search string match.
def self.org_members(search, org)
org = Org.find_by(handle: org)
org_id = org&.id
query = "%" + sanitize_sql_like(search) + "%"
users = User.arel_table
where(users[:dxuser].matches(query).
or(users[:first_name].matches(query)).
or(users[:last_name].matches(query))).
belongs_to_org(org_id).
pending.
limit(ORG_MEMBERS_SEARCH_LIMIT)
end
def self.validate_email(email)
EMAIL_FORMAT =~ email
end
def self.validate_state(state, zip_code)
Country.state_matches_zip_code?(state, zip_code)
end
def self.construct_username(first, last)
"#{first.downcase.gsub(/[^a-z]/, '')}.#{last.downcase.gsub(/[^a-z]/, '')}"
end
def self.authserver_acceptable?(username)
username.size >= 3 && username.size <= 255 && username =~ /^[a-z][0-9a-z_\.]{2,}$/
end
def self.sync_challenge_file!(file_id)
user = User.challenge_bot
token = CHALLENGE_BOT_TOKEN
file = user.uploaded_files.find(file_id) # Re-check file id
return if SYNC_EXCLUDED_FILE_STATES.include?(file.state)
result = DNAnexusAPI.new(token).call(
"system",
"describeDataObjects",
objects: [file.dxid],
)["results"][0]
sync_file_state(result, file, user)
end
def self.sync_file!(context, file_id)
return if context.guest?
user = context.user
file = user.uploaded_files.find(file_id) # Re-check file id
token = context.token
return if SYNC_EXCLUDED_FILE_STATES.include?(file.state)
result = DNAnexusAPI.new(token).call(
"system",
"describeDataObjects",
objects: [file.dxid],
)["results"][0]
sync_file_state(result, file, user)
end
def self.sync_files!(context)
Auditor.suppress do
return if context.guest?
user = context.user
token = context.token
# Prefer "all.each_slice" to "find_batches" as the latter might not be transaction-friendly
user.uploaded_files.
where.not(state: SYNC_EXCLUDED_FILE_STATES).
all.each_slice(1000) do |files|
DNAnexusAPI.new(token).call(
"system",
"describeDataObjects",
objects: files.map(&:dxid),
)["results"].each_with_index do |result, i|
sync_file_state(result, files[i], user)
end
end
end
end
def self.sync_challenge_bot_files!(context)
return if context.guest?
user = User.challenge_bot
token = CHALLENGE_BOT_TOKEN
# Prefer "all.each_slice" to "find_batches" as the latter might not be transaction-friendly
user.uploaded_files.where.not(state: SYNC_EXCLUDED_FILE_STATES).all.each_slice(1000) do |files|
DNAnexusAPI.new(token).call(
"system",
"describeDataObjects",
objects: files.map(&:dxid),
)["results"].each_with_index do |result, i|
sync_file_state(result, files[i], user)
end
end
end
def self.sync_asset!(context, file_id)
return if context.guest?
user = context.user
token = context.token
file = user.assets.find(file_id) # Re-check file id
return if SYNC_EXCLUDED_FILE_STATES.include?(file.state)
result = DNAnexusAPI.new(token).call(
"system",
"describeDataObjects",
objects: [file.dxid],
)["results"][0]
sync_file_state(result, file, user)
end
def self.sync_assets!(context)
return if context.guest?
user = context.user
token = context.token
# Prefer "all.each_slice" to "find_batches" as the latter might not be transaction-friendly
user.assets.where.not(state: SYNC_EXCLUDED_FILE_STATES).all.each_slice(1000) do |files|
DNAnexusAPI.new(token).call("system", "describeDataObjects", objects: files.map(&:dxid))["results"].each_with_index do |result, i|
sync_file_state(result, files[i], user)
end
end
end
def self.sync_challenge_job!(job_id)
user = User.challenge_bot
token = CHALLENGE_BOT_TOKEN
job = user.jobs.find(job_id) # Re-check job id
unless job.terminal?
result = DNAnexusAPI.new(token).call("system", "findJobs",
includeSubjobs: false,
id: [job.dxid],
project: user.private_files_project,
parentJob: nil,
parentAnalysis: nil,
describe: true)["results"][0]
sync_job_state(result, job, user, token)
end
end
def self.sync_job!(context, job_id)
return if context.guest?
user = context.user
token = context.token
job = Job.accessible_by(context).find(job_id) # Re-check job id
return if job.terminal?
result = DNAnexusAPI.new(token).call("system", "findJobs",
includeSubjobs: false,
id: [job.dxid],
project: job.project || user.private_files_project,
parentJob: nil,
parentAnalysis: job.analysis.try(:dxid),
describe: true)["results"][0]
return if result.blank?
sync_job_state(result, job, user, token)
end
def self.sync_jobs!(context, jobs = Job.includes(:analysis), project = nil)
return if context.guest?
user_id = context.user_id
token = context.token
user = User.find(user_id)
# Prefer "all.each_slice" to "find_batches" as the latter might not be transaction-friendly
jobs.where(user_id: user_id).where.not(state: Job::TERMINAL_STATES).limit(SYNC_JOBS_LIMIT).each_slice(1000) do |jobs_batch|
jobs_hash = jobs_batch.map { |j| [j.dxid, j] }.to_h
jobs_hash.keys.each do |job_dxid|
job_project = project || Job.find_by(dxid: job_dxid).project
response = DNAnexusAPI.new(token).call(
"system",
"findJobs",
includeSubjobs: false,
id: [job_dxid],
project: job_project || user.private_files_project,
parentJob: nil,
describe: true,
)
response["results"].each do |result|
next if result.blank?
sync_job_state(result, jobs_hash[result["id"]], user, token)
end
end
end
end
def self.sync_challenge_jobs!
user = User.challenge_bot
# Prefer "all.each_slice" to "find_batches" as the latter might not be transaction-friendly
Job.where(user_id: user.id).where.not(state: Job::TERMINAL_STATES).all.each_slice(1000) do |jobs|
jobs_hash = jobs.map { |j| [j.dxid, j] }.to_h
DNAnexusAPI.new(CHALLENGE_BOT_TOKEN).call("system", "findJobs",
includeSubjobs: false,
id: jobs_hash.keys,
project: CHALLENGE_BOT_PRIVATE_FILES_PROJECT,
parentJob: nil,
parentAnalysis: nil,
describe: true)["results"].each do |result|
sync_job_state(result, jobs_hash[result["id"]], user, CHALLENGE_BOT_TOKEN)
end
end
end
def self.provision_params(id)
user = find(id)
{
first_name: user.first_name,
last_name: user.last_name,
email: user.email,
}
end
def self.user_helper_attribute(id, attribute)
find(id)[attribute]
end
private
def self.sync_file_state(result, file, user)
if result["statusCode"] == 404
# File was deleted by the DNAnexus stale file daemon; delete it on our end as well
UserFile.transaction do
# Use find_by(file.id) since file.reload may raise ActiveRecord::RecordNotFound
file = UserFile.find_by(id: file.id)
if file.present?
Event::FileDeleted.create_for(file, user)
file.destroy!
end
end
elsif result["describe"].present?
remote_state = result["describe"]["state"]
# Only begin transaction if stale file detected
if remote_state != file.state
UserFile.transaction do
old_file_state = file.state
file.reload
# confirm local file state is stale
if remote_state != file.state
if remote_state == UserFile::STATE_CLOSED
file.update!(state: remote_state, file_size: result["describe"]["size"])
Event::FileCreated.create_for(file, user)
elsif remote_state == UserFile::STATE_CLOSING && file.state == UserFile::STATE_OPEN ||
remote_state == UserFile::STATE_ABANDONED
file.update!(state: remote_state)
else
# NOTE we should never be here
raise "File #{file.uid} had local state #{file.state} " \
"(previously #{old_file_state}) and remote state #{remote_state}"
end
end
end
end
else
# NOTE we should never be here
raise "Unsupported response for file #{file.uid}: #{result}"
end
end
def self.sync_job_state(result, job, user, token)
state = result["describe"]["state"]
# Only do anything if local job state is stale
return if state == job.state
if state == "done"
# Use serialization to deep copy result since output will be modified
output = JSON.parse(result["describe"]["output"].to_json)
output_file_ids = []
output_file_cache = []
output.each_key do |key|
# TODO: handle arrays later
raise if output[key].is_a?(Array)
next unless output[key].is_a?(Hash)
raise unless output[key].key?("$dnanexus_link")
output_file_id = output[key]["$dnanexus_link"]
output_file_ids << output_file_id
output[key] = output_file_id
end
output_file_ids.uniq!
output_file_ids.each_slice(1000) do |slice_of_file_ids|
DNAnexusAPI.new(token).call("system", "describeDataObjects", objects: slice_of_file_ids)["results"].each_with_index do |api_result, i|
# Push avoids creating a new array as opposed to +/+=
output_file_cache.push(
dxid: slice_of_file_ids[i],
project: job.project || user.private_files_project,
name: api_result["describe"]["name"],
state: "closed",
description: "",
user_id: user.id,
scope: job.scope || "private",
file_size: api_result["describe"]["size"],
parent: job,
parent_folder_id: job.local_folder_id,
)
end
end
# Job is done and outputs need to be created
Job.transaction do
job.reload
if state != job.state
output_file_cache.each do |output_file|
user_file = UserFile.create!(output_file)
if user_file.scope =~ /^space-(\d+)$/
user_file.update(scoped_parent_folder_id: user_file.parent_folder_id)
end
Event::FileCreated.create_for(user_file, user)
end
job.run_outputs = output
job.state = state
job.describe = result["describe"]
job.save!
Event::JobClosed.create_for(job, user)
end
end
if job.scope =~ /^space-(\d+)$/
SpaceEventService.call(Regexp.last_match(1).to_i, user.id, nil, job, :job_completed)
end
else
# Job state changed but not done (no outputs)
Job.transaction do
job.reload
if state != job.state
job.state = state
job.describe = result["describe"]
job.save!
Event::JobClosed.create_for(job, user)
end
end
end
end
alias_method :site_admin?, :can_administer_site?
end
|
Dobrynin91/jepria-showcase
|
module/JepRiaShowcase/App/gwt/src/java/com/technology/jep/jepriashowcase/allshopgoods/server/AllShopGoodsServerConstant.java
|
package com.technology.jep.jepriashowcase.allshopgoods.server;
import com.technology.jep.jepria.server.JepRiaServerConstant;
public class AllShopGoodsServerConstant extends JepRiaServerConstant {
public static final String RESOURCE_BUNDLE_NAME = "com.technology.jep.jepriashowcase.allshopgoods.shared.text.AllShopGoodsText";
public static final String DATA_SOURCE_JNDI_NAME = "jdbc/ITMDS";
}
|
moltenguy1/deusexmachina
|
DEM/Src/L3/AI/Memory/MemFactSmartObj.h
|
#pragma once
#ifndef __DEM_L3_AI_MEM_FACT_SMART_OBJ_H__
#define __DEM_L3_AI_MEM_FACT_SMART_OBJ_H__
#include <AI/Memory/MemFact.h>
#include <Data/StringID.h>
#include <mathlib/vector.h>
// Memory fact representing overseer, entity that controls this actor's behavoiur
namespace AI
{
typedef Ptr<class CStimulus> PStimulus;
class CMemFactSmartObj: public CMemFact
{
DeclareRTTI;
DeclareFactory(CMemFactSmartObj);
protected:
public:
PStimulus pSourceStimulus; //???to CMemFact?
CStrID TypeID;
//???Position to validate this fact by vision?
//???need validation or simple forgetting is enough?
virtual bool Match(const CMemFact& Pattern, CFlags FieldMask) const;
};
RegisterFactory(CMemFactSmartObj);
typedef Ptr<CMemFactSmartObj> PMemFactSmartObj;
}
#endif
|
jayfans3/example
|
liupeng/src/main/java/liupeng/Ch13_MobileMonitor/CreateHBaseIndex/ParseDataIntoHBase.java
|
package HBaseIndexAndQuery.CreateHBaseIndex;
import java.io.IOException;
import java.util.Calendar;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import HBaseIndexAndQuery.HBaseDao.HBaseDaoImp;
import HBaseIndexAndQuery.HBaseDao.HBaseFileID;
import HBaseIndexAndQuery.HBaseDao.HBaseFileLog;
import ParseCDR.ParseData;
import cn.cstor.cloud.hbase.cdr.CDRBase.CDRTable;
public class ParseDataIntoHBase implements ParseData{
private String fileName = null;
private CDRTable.CDR type = null;
private RequestParse parse = null;
private HBaseDaoImp dao = null;
private FileSystem fs = null;
private int cdrSize = 0;
private long fileNameID = 0;
private String conFile ="";
private String fsURL = "hdfs://192.168.1.8:9000";
public void setGenerateConfFile(String filename)
{
conFile = filename;
}
public ParseDataIntoHBase( )
{
dao = HBaseDaoImp.GetDefaultDao();
Configuration fsConf = new Configuration();
fsConf.set("fs.default.name", fsURL);
try {
fs = FileSystem.get(fsConf);
} catch (IOException e1) {
e1.printStackTrace();
}
}
public void LogRecordFileNameAndID( String fileName, long fileID )
{
HBaseFileLog log = new HBaseFileLog(dao);
log.InsertFileAndID(fileName, fileID);
}
public void addCDRInputFileName(String lName,long lfileNameID)
{
fileName = lName;
fileNameID = lfileNameID;
}
public void setCDRTableType( CDRTable.CDR ltype)
{
type = ltype;
if( type == CDRTable.CDR.BSSAP)
{
try {
System.out.println("my name is bssap");
parse = new BSSAPRequestParse(dao);
cdrSize = CDRTable.BSSAPSIZE;
} catch (IOException e) {
e.printStackTrace();
}
}else if( type == CDRTable.CDR.IUCS)
{
try {
parse = new IUCSRequestParse(dao);
cdrSize = CDRTable.IUCSSIZE;
} catch (IOException e) {
e.printStackTrace();
}
}else if( type == CDRTable.CDR.BICC)
{
try {
parse = new BICCRequestParse(dao);
cdrSize = CDRTable.BICCSIZE;
} catch (IOException e) {
e.printStackTrace();
}
}
}
public boolean doWithCDR()
{
return InserCDRIntoHBase();
}
private boolean InserCDRIntoHBase()
{
FSDataInputStream hdfsInStream = null;
try
{
if(!fs.exists(new Path(fileName)))
{
System.out.println( fileName + " is not exists in the HDFS,please check ");
fs.close();
return false;
}
System.out.println(fileName);
hdfsInStream = fs.open(new Path(fileName));
int i = -1;
while(true)
{
if(hdfsInStream.available() > 0)
{
i++;
parse.ParseAndInsert(fileNameID,i*cdrSize,hdfsInStream,cdrSize);
if( i% 10000 == 0)
{
if(parse == null)
{
System.out.println("parse is null: before the parse");
}
parse.Commit();
}
}
else
{
break;
}
}
parse.Commit();
hdfsInStream.close();
return true;
}
catch(Exception e )
{
e.printStackTrace();
return false;
}
finally
{
try
{
if(hdfsInStream != null)
{
hdfsInStream.close();
}
}
catch(Exception e )
{
e.printStackTrace();
}
}
}
public static void main(String[] args) throws IOException {
System.out.println("Start Time: "
+ Calendar.getInstance().getTime().toString());
ParseDataIntoHBase a = new ParseDataIntoHBase();
a.setCDRTableType(CDRTable.CDR.BSSAP);
a.addCDRInputFileName("/ftest/bassap3/20110319/19/1300535993_933115_1300536000_283786_SMPBAK.dat",0);
a.doWithCDR();
}
}
|
gizemaltintas/online-education-tool
|
src/main/java/com/oet/application/usecases/manageAnswers/DTO/AssignmentDTO.java
|
package com.oet.application.usecases.manageAnswers.DTO;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.Setter;
@Builder
@Getter
@Setter
@AllArgsConstructor
public class AssignmentDTO {
private Long classId;
private String className;
private Long studentId;
private String name;
private String Surname;
private String studentNo;
private String articleName;
private Boolean isWritten;
private Feedback2DTO links;
private Boolean feedbackGiven;
}
|
sho25/cxf
|
rt/databinding/aegis/src/test/java/org/apache/cxf/aegis/type/map/fortest/MapTest.java
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|cxf
operator|.
name|aegis
operator|.
name|type
operator|.
name|map
operator|.
name|fortest
package|;
end_package
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Map
import|;
end_import
begin_import
import|import
name|javax
operator|.
name|jws
operator|.
name|WebService
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|cxf
operator|.
name|aegis
operator|.
name|type
operator|.
name|map
operator|.
name|ns2
operator|.
name|ObjectWithAMapNs2
import|;
end_import
begin_comment
comment|/** * */
end_comment
begin_interface
annotation|@
name|WebService
argument_list|(
name|targetNamespace
operator|=
literal|"uri:org.apache.cxf.aegis.fortest.map"
argument_list|,
name|name
operator|=
literal|"MapTest"
argument_list|)
specifier|public
interface|interface
name|MapTest
block|{
name|ObjectWithAMap
name|returnObjectWithAMap
parameter_list|()
function_decl|;
name|void
name|takeMap
parameter_list|(
name|ObjectWithAMap
name|map
parameter_list|)
function_decl|;
name|Map
argument_list|<
name|String
argument_list|,
name|Long
argument_list|>
name|getMapStringToLong
parameter_list|()
function_decl|;
name|Map
argument_list|<
name|?
argument_list|,
name|?
argument_list|>
name|getRawMapStringToInteger
parameter_list|()
function_decl|;
name|Map
argument_list|<
name|Long
argument_list|,
name|String
argument_list|>
name|getMapLongToString
parameter_list|()
function_decl|;
name|ObjectWithAMapNs2
name|returnObjectWithAMapNs2
parameter_list|()
function_decl|;
name|void
name|takeMapNs2
parameter_list|(
name|ObjectWithAMapNs2
name|map
parameter_list|)
function_decl|;
block|}
end_interface
end_unit
|
Gerile3/My_Python
|
Small Apps/money_toss.py
|
#!/usr/bin/env python3
import random
def toss(money):
"""
A player wins 1$ for every head and loses 1.5$ for every tail.
The game is over when the player's balance reaches 0$
"""
count = 0
while money > 0:
count += 1
coin = random.choice(["Heads", "Tails"])
if coin == "Heads":
money += 1
else:
money -= 1.5
if money > 0:
choice = input(f"{coin}! You now have {money}$. Press N to get your money, Enter to keep trying")
else:
print(f"{coin}!, Wow you lost all your money! Better luck next time :(")
if choice.lower() == "n":
break
return [money, count]
if __name__ == "__main__":
money = float(input("How much money you are putting($): "))
result = toss(money)
print(f"You have started with {money}$ and ended up with {result[0]}$, you have tossed coin {result[1]} times.")
|
i-sourabh/dhis2-android-sdk
|
ui-bindings/src/main/java/org/hisp/dhis/client/sdk/ui/bindings/commons/DefaultSyncAdapter.java
|
<reponame>i-sourabh/dhis2-android-sdk
package org.hisp.dhis.client.sdk.ui.bindings.commons;
import android.accounts.Account;
import android.content.ContentProviderClient;
import android.content.SyncResult;
import android.os.Bundle;
import android.os.IBinder;
public interface DefaultSyncAdapter {
void onPerformSync(Account account, Bundle extras, String authority, ContentProviderClient provider, SyncResult syncResult);
IBinder getSyncAdapterBinder();
}
|
krossenk/GoogleApiObjectivecClientREST
|
Source/GeneratedServices/GroupsSettings/GTLRGroupsSettingsObjects.h
|
// NOTE: This file was generated by the ServiceGenerator.
// ----------------------------------------------------------------------------
// API:
// Groups Settings API (groupssettings/v1)
// Description:
// Lets you manage permission levels and related settings of a group.
// Documentation:
// https://developers.google.com/google-apps/groups-settings/get_started
#if GTLR_BUILT_AS_FRAMEWORK
#import "GTLR/GTLRObject.h"
#else
#import "GTLRObject.h"
#endif
#if GTLR_RUNTIME_VERSION != 3000
#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
#endif
NS_ASSUME_NONNULL_BEGIN
/**
* JSON template for Group resource
*/
@interface GTLRGroupsSettings_Groups : GTLRObject
/** Are external members allowed to join the group. */
@property(nonatomic, copy, nullable) NSString *allowExternalMembers;
/** Is google allowed to contact admins. */
@property(nonatomic, copy, nullable) NSString *allowGoogleCommunication;
/** If posting from web is allowed. */
@property(nonatomic, copy, nullable) NSString *allowWebPosting;
/** If the group is archive only */
@property(nonatomic, copy, nullable) NSString *archiveOnly;
/** Custom footer text. */
@property(nonatomic, copy, nullable) NSString *customFooterText;
/** Default email to which reply to any message should go. */
@property(nonatomic, copy, nullable) NSString *customReplyTo;
/** Default message deny notification message */
@property(nonatomic, copy, nullable) NSString *defaultMessageDenyNotificationText;
/**
* Description of the group
*
* Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
*/
@property(nonatomic, copy, nullable) NSString *descriptionProperty;
/** Email id of the group */
@property(nonatomic, copy, nullable) NSString *email;
/** Whether to include custom footer. */
@property(nonatomic, copy, nullable) NSString *includeCustomFooter;
/** If this groups should be included in global address list or not. */
@property(nonatomic, copy, nullable) NSString *includeInGlobalAddressList;
/** If the contents of the group are archived. */
@property(nonatomic, copy, nullable) NSString *isArchived;
/** The type of the resource. */
@property(nonatomic, copy, nullable) NSString *kind;
/**
* Maximum message size allowed.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *maxMessageBytes;
/** Can members post using the group email address. */
@property(nonatomic, copy, nullable) NSString *membersCanPostAsTheGroup;
/**
* Default message display font. Possible values are: DEFAULT_FONT
* FIXED_WIDTH_FONT
*/
@property(nonatomic, copy, nullable) NSString *messageDisplayFont;
/**
* Moderation level for messages. Possible values are: MODERATE_ALL_MESSAGES
* MODERATE_NON_MEMBERS MODERATE_NEW_MEMBERS MODERATE_NONE
*/
@property(nonatomic, copy, nullable) NSString *messageModerationLevel;
/** Name of the Group */
@property(nonatomic, copy, nullable) NSString *name;
/** Primary language for the group. */
@property(nonatomic, copy, nullable) NSString *primaryLanguage;
/**
* Whome should the default reply to a message go to. Possible values are:
* REPLY_TO_CUSTOM REPLY_TO_SENDER REPLY_TO_LIST REPLY_TO_OWNER REPLY_TO_IGNORE
* REPLY_TO_MANAGERS
*/
@property(nonatomic, copy, nullable) NSString *replyTo;
/** Should the member be notified if his message is denied by owner. */
@property(nonatomic, copy, nullable) NSString *sendMessageDenyNotification;
/** Is the group listed in groups directory */
@property(nonatomic, copy, nullable) NSString *showInGroupDirectory;
/**
* Moderation level for messages detected as spam. Possible values are: ALLOW
* MODERATE SILENTLY_MODERATE REJECT
*/
@property(nonatomic, copy, nullable) NSString *spamModerationLevel;
/**
* Permissions to add members. Possible values are: ALL_MANAGERS_CAN_ADD
* ALL_MEMBERS_CAN_ADD NONE_CAN_ADD
*/
@property(nonatomic, copy, nullable) NSString *whoCanAdd;
/**
* Permission to contact owner of the group via web UI. Possible values are:
* ANYONE_CAN_CONTACT ALL_IN_DOMAIN_CAN_CONTACT ALL_MEMBERS_CAN_CONTACT
* ALL_MANAGERS_CAN_CONTACT
*/
@property(nonatomic, copy, nullable) NSString *whoCanContactOwner;
/**
* Permissions to invite members. Possible values are: ALL_MEMBERS_CAN_INVITE
* ALL_MANAGERS_CAN_INVITE NONE_CAN_INVITE
*/
@property(nonatomic, copy, nullable) NSString *whoCanInvite;
/**
* Permissions to join the group. Possible values are: ANYONE_CAN_JOIN
* ALL_IN_DOMAIN_CAN_JOIN INVITED_CAN_JOIN CAN_REQUEST_TO_JOIN
*/
@property(nonatomic, copy, nullable) NSString *whoCanJoin;
/**
* Permission to leave the group. Possible values are: ALL_MANAGERS_CAN_LEAVE
* ALL_MEMBERS_CAN_LEAVE NONE_CAN_LEAVE
*/
@property(nonatomic, copy, nullable) NSString *whoCanLeaveGroup;
/**
* Permissions to post messages to the group. Possible values are:
* NONE_CAN_POST ALL_MANAGERS_CAN_POST ALL_MEMBERS_CAN_POST
* ALL_IN_DOMAIN_CAN_POST ANYONE_CAN_POST
*/
@property(nonatomic, copy, nullable) NSString *whoCanPostMessage;
/**
* Permissions to view group. Possible values are: ANYONE_CAN_VIEW
* ALL_IN_DOMAIN_CAN_VIEW ALL_MEMBERS_CAN_VIEW ALL_MANAGERS_CAN_VIEW
*/
@property(nonatomic, copy, nullable) NSString *whoCanViewGroup;
/**
* Permissions to view membership. Possible values are: ALL_IN_DOMAIN_CAN_VIEW
* ALL_MEMBERS_CAN_VIEW ALL_MANAGERS_CAN_VIEW
*/
@property(nonatomic, copy, nullable) NSString *whoCanViewMembership;
@end
NS_ASSUME_NONNULL_END
|
SYCstudio/OI
|
Practice/2018/2018.10.28/BZOJ3111.cpp
|
<filename>Practice/2018/2018.10.28/BZOJ3111.cpp<gh_stars>1-10
#include<iostream>
#include<cstdio>
#include<cstdlib>
#include<cstring>
#include<algorithm>
using namespace std;
#define ll long long
#define mem(Arr,x) memset(Arr,x,sizeof(Arr))
const int maxN=110;
const int maxK=25;
const int inf=1000000000;
int n,m,K;
int Mp[maxN][maxN],S[maxN][maxN];
int F[maxK][maxN][maxN],up[maxK][maxN][maxN],down[maxK][maxN][maxN];
int main(){
scanf("%d%d%d",&n,&m,&K);K=K+K+1;
for (int i=1;i<=n;i++) for (int j=1;j<=m;j++) scanf("%d",&Mp[i][j]);
int Ans=-inf;
for (int i=n;i>=1;i--){
//cout<<"i:"<<i<<endl;
for (int j=0;j<=i+1;j++) for (int k=0;k<=m+1;k++) for (int l=0;l<=K;l++) F[l][j][k]=-inf;
for (int j=1;j<=m;j++) S[i][j]=Mp[i][j];
for (int j=i-1;j>=1;j--) for (int k=1;k<=m;k++) S[j][k]=S[j+1][k]+Mp[j][k];
/*
cout<<"S:"<<endl;
for (int j=1;j<=i;j++){
for (int k=1;k<=m;k++)
cout<<S[j][k]<<" ";
cout<<endl;
}
//*/
F[1][i][1]=Mp[i][1];
for (int j=2;j<=m;j++) F[1][i][j]=max(F[1][i][j-1]+Mp[i][j],Mp[i][j]);
for (int j=i-1;j>=1;j--) F[1][j][1]=F[1][j+1][1]+Mp[j][1];
for (int l=1;l<=K;l++){
up[l][i][1]=F[l][i][1];
for (int j=i-1;j>=1;j--) up[l][j][1]=max(up[l][j+1][1],F[l][j][1]);
}
for (int l=1;l<=K;l++){
down[l][1][1]=F[l][1][1];
for (int j=2;j<=i;j++) down[l][j][1]=max(down[l][j-1][1],F[l][j][1]);
}
for (int j=2;j<=m;j++){
for (int k=i;k>=1;k--)
for (int l=1;l<=K;l++){
if ((l==1)&&(k!=i)) F[l][k][j]=max(F[l][k][j],S[k][j]);
F[l][k][j]=max(F[l][k][j],F[l][k][j-1]+S[k][j]);
if ((l&1)&&(l>1)&&(k!=i)) F[l][k][j]=max(F[l][k][j],up[l-1][k+1][j-1]+S[k][j]);
if (((l&1)==0)&&(k!=1)) F[l][k][j]=max(F[l][k][j],down[l-1][k-1][j-1]+S[k][j]);
}
for (int l=1;l<=K;l++) up[l][i][j]=F[l][i][j],down[l][1][j]=F[l][1][j];
for (int k=i-1;k>=1;k--) for (int l=1;l<=K;l++) up[l][k][j]=max(up[l][k+1][j],F[l][k][j]);
for (int k=2;k<=i;k++) for (int l=1;l<=K;l++) down[l][k][j]=max(down[l][k-1][j],F[l][k][j]);
}
/*
cout<<"F"<<endl;
for (int j=1;j<=i;j++){
for (int k=1;k<=m;k++){
cout<<"(";
for (int l=1;l<=K;l++){
if (F[l][j][k]==-inf) cout<<"-inf";
else cout<<F[l][j][k];if (l!=K) cout<<" ";
}
cout<<") ";
}
cout<<endl;
}
cout<<"up"<<endl;
for (int j=1;j<=i;j++){
for (int k=1;k<=m;k++){
cout<<"(";
for (int l=1;l<=K;l++){
if (up[l][j][k]==-inf) cout<<"-inf";
else cout<<up[l][j][k];if (l!=K) cout<<" ";
}
cout<<") ";
}
cout<<endl;
}
cout<<"down"<<endl;
for (int j=1;j<=i;j++){
for (int k=1;k<=m;k++){
cout<<"(";
for (int l=1;l<=K;l++){
if (down[l][j][k]==-inf) cout<<"-inf";
else cout<<down[l][j][k];if (l!=K) cout<<" ";
}
cout<<") ";
}
cout<<endl;
}
//*/
for (int j=1;j<=i;j++) for (int k=1;k<=m;k++) Ans=max(Ans,F[K][j][k]);
}
printf("%d\n",Ans);return 0;
}
|
Commoble/magus
|
src/main/java/com/github/commoble/magus/CallbackRegistrar.java
|
<gh_stars>0
package com.github.commoble.magus;
import com.github.commoble.magus.api.CachedEntry;
import com.github.commoble.magus.api.serializablefunctions.CallbackFactory;
import com.github.commoble.magus.api.serializablefunctions.CallbackUtil;
import com.github.commoble.magus.content.SimpleCallbacks;
import com.github.commoble.magus.content.callbacks.CallbackKeys;
import com.github.commoble.magus.content.callbacks.CommandCallback;
import com.github.commoble.magus.content.callbacks.SpawnEntityCallback;
// deferred registries don't work with custom forge registries, have to register these the old-fashioned way
public class CallbackRegistrar
{
public static CachedEntry<CallbackFactory> BEE_SWARM;
public static CachedEntry<CallbackFactory> SPAWN_ENTITY;
public static CachedEntry<CallbackFactory> COMMAND;
public static void registerCallbacks()
{
BEE_SWARM = CallbackUtil.registerSimpleCallback(CallbackKeys.BEE_SWARM, SimpleCallbacks::ejectBeesFromEntity);
SPAWN_ENTITY = CallbackUtil.registerCallback(CallbackKeys.SPAWN_ENTITY, SpawnEntityCallback::new);
COMMAND = CallbackUtil.registerCallback(CallbackKeys.COMMAND_CALLBACK, CommandCallback::new);
}
}
|
Joon7891/Competitive-Programming
|
Miscellaneous/A Noisy Class.cpp
|
#include <bits/stdc++.h>
using namespace std;
const int MAXN = 10001;
int N, M;
vector<int> adj[MAXN];
bool v[MAXN][MAXN];
bool p[MAXN];
bool t[MAXN];
bool topologicalCheck(int i)
{
if (t[i]) return false;
if (p[i]) return true;
t[i] = 1;
for (int a : adj[i])
{
if (!topologicalCheck(a))
{
return false;
}
}
t[i] = false;
p[i] = true;
return true;
}
int main()
{
cin >> N >> M;
int a, b;
for (int i = 0; i < M; ++i)
{
cin >> a >> b;
if (v[a][b]) continue;
adj[a].push_back(b);
v[a][b] = 1;
}
for (int i = 1; i <= N; i++)
{
if (!p[i])
{
if (!topologicalCheck(i))
{
cout << "N" << endl;
return 0;
}
}
}
cout << "Y" << endl;
}
|
abreuboom/yap
|
node_modules/gatsby/node_modules/gatsby-cli/lib/structured-errors/construct-error.js
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _objectWithoutPropertiesLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/objectWithoutPropertiesLoose"));
const Joi = require(`@hapi/joi`);
const stackTrace = require(`stack-trace`);
const errorSchema = require(`./error-schema`);
const _require = require(`./error-map`),
errorMap = _require.errorMap,
defaultError = _require.defaultError;
const _require2 = require(`../reporter/errors`),
sanitizeStructuredStackTrace = _require2.sanitizeStructuredStackTrace; // Merge partial error details with information from the errorMap
// Validate the constructed object against an error schema
// TODO: 'details' is not a descriptive name
const constructError = (_ref) => {
let _ref$details = _ref.details,
id = _ref$details.id,
otherDetails = (0, _objectWithoutPropertiesLoose2.default)(_ref$details, ["id"]);
const result = id && errorMap[id] || defaultError; // merge
const structuredError = Object.assign({
context: {}
}, otherDetails, {}, result, {
text: result.text(otherDetails.context),
stack: otherDetails.error ? sanitizeStructuredStackTrace(stackTrace.parse(otherDetails.error)) : null,
docsUrl: result.docsUrl || `https://gatsby.dev/issue-how-to`
});
if (id) {
structuredError.code = id;
} // validate
const _Joi$validate = Joi.validate(structuredError, errorSchema),
error = _Joi$validate.error;
if (error !== null) {
console.log(`Failed to validate error`, error);
process.exit(1);
}
return structuredError;
};
module.exports = constructError;
|
visit-dav/vis
|
src/avt/Expressions/General/avtCurlExpression.h
|
<filename>src/avt/Expressions/General/avtCurlExpression.h
// Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
// Project developers. See the top-level LICENSE file for dates and other
// details. No copyright assignment is required to contribute to VisIt.
// ************************************************************************* //
// avtCurlExpression.h //
// ************************************************************************* //
#ifndef AVT_CURL_FILTER_H
#define AVT_CURL_FILTER_H
#include <avtMacroExpressionFilter.h>
// ****************************************************************************
// Class: avtCurlExpression
//
// Purpose:
// A filter that calculates the curl. The curl takes in a vector and
// produces a vector. This depends on several partial derivatives,
// which are accomplished using the gradient expression.
//
// Because we need to use other expressions, this is a derived type of
// the macro expression filter.
//
// curl of vector {u,v,w} = { grad(w)[1]-grad(v)[2],
// grad(u)[2]-grad(w)[0],
// grad(v)[0]-grad(u)[1] }
//
// Curl has the following physical interpretation:
// Imagine you have a pinwheel -- a disc with some squares placed along
// the disc in a direction orthogonal to the disc (so that air can spin
// the disc by pushing on the squares).
// If at some point (X,Y,Z) the curl is (a,b,c), then placing the disc so
// that it is normal to (a,b,c) will give the fastest possible rotational
// speed that is attainable by having the center of the pinwheel at
// (X,Y,Z).
//
// Also: <NAME> felt that we should define curl for 2D variables as
// well. In this case, only the third component of the vector will be
// non-zero, so we return a scalar (instead of a vector) in this case.
//
// Programmer: <NAME>
// Creation: December 27, 2004
//
// Modifications:
//
// <NAME>, Fri Aug 19 08:50:02 PDT 2005
// Move definition of GetVariableDimension to the .C file.
//
// ****************************************************************************
class EXPRESSION_API avtCurlExpression : public avtMacroExpressionFilter
{
public:
avtCurlExpression();
virtual ~avtCurlExpression();
virtual const char *GetType(void) { return "avtCurlExpression"; };
virtual const char *GetDescription(void)
{ return "Calculating Curl"; };
protected:
virtual int GetVariableDimension();
virtual void GetMacro(std::vector<std::string> &,
std::string &, Expression::ExprType &);
};
#endif
|
pradyumna2905/yanab
|
db/fixtures/05_friendships.rb
|
<filename>db/fixtures/05_friendships.rb
john = User.find_by(email: '<EMAIL>')
mary = User.find_by(email: '<EMAIL>')
john.friend_requests.create!(
first_name: mary.first_name,
email: mary.email,
status: FriendRequest::STATUSES[:accepted]
)
john.friends << mary
|
blgrossMS/xbox-live-api
|
Source/System/iOS/XBLiOSGlobalState.h
|
<filename>Source/System/iOS/XBLiOSGlobalState.h
//
// XBLiOSGlobalState.h
// XboxLiveServices
//
// Created by <NAME> on 5/3/16.
// Copyright © 2016 Microsoft Corporation. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface XBLiOSGlobalState : NSObject
/**
Set the launch view controller to be used by XSAPI. This is held by a weak pointer and is not thread safe to set this.
Typically you only need to set this once. If this is nil when XSAPI UI is launched, the application root view controller
will be used
@param viewController The view controller or nil to launch XSAPI from. Default is application root view controller
*/
+ (void)setLaunchViewController:(UIViewController * _Nullable)viewController;
/**
Returns the launch view controller used by XSAPI to launch UI from. This is either the view controller set previously
in the +setLaunchViewController method or the root view controller of the application
@return The launch view controller used by XSAPI
*/
+ (UIViewController * _Nullable)launchViewController;
/**
Set the notification registration token to be used by XSAPI.
@param registrationToken Used to register for XBL Notifications
*/
+ (void)setNotificationRegistrationToken:(NSString * _Nullable)registrationToken;
/**
Returns the notification registration token to be used by XSAPI
@return registrationToken Used to register for XBL Notifications
*/
+ (string_t)notificationRegistrationToken;
@end
|
carmon/Enduro
|
test/utilities_test/test_utilities_test.js
|
<reponame>carmon/Enduro
// * vendor dependencies
const expect = require('chai').expect
const path = require('path')
// * enduro dependencies
const local_enduro = require('../../index').quick_init()
const test_utilities = require(enduro.enduro_path + '/test/libs/test_utilities')
describe('Test utilites', function () {
this.timeout(7000)
before(function () {
return test_utilities.before(local_enduro, 'utilities test')
.then(() => {
return enduro.actions.start()
})
})
it('should get session id', function () {
return test_utilities.get_sid()
.then((sid) => {
expect(sid).to.not.be.empty
})
})
after(function () {
return enduro.actions.stop_server()
.then(() => {
return test_utilities.after()
})
})
})
|
TheVinic/bootcamp-01-template-casa-do-codigo
|
src/main/java/com/itau/cdc/validator/ValidaCpfCnpj.java
|
package com.itau.cdc.validator;
import org.hibernate.validator.internal.constraintvalidators.hv.br.CNPJValidator;
import org.hibernate.validator.internal.constraintvalidators.hv.br.CPFValidator;
import org.springframework.http.HttpStatus;
import com.itau.cdc.configuration.exception.ApiErroException;
//4
public class ValidaCpfCnpj{
//1
public String validaCpfCnpj(String cpfCnpj) {
//1
CPFValidator cpfValidator = new CPFValidator();
cpfValidator.initialize(null);
//1
CNPJValidator cnpjValidator = new CNPJValidator();
cnpjValidator.initialize(null);
//1
if(!cpfValidator.isValid(cpfCnpj, null) && !cnpjValidator.isValid(cpfCnpj, null)) {
throw new ApiErroException(HttpStatus.UNPROCESSABLE_ENTITY, "Documento (CPF ou CNPJ) não é válido.");
}
return cpfCnpj;
}
}
|
tumuyan/bluelineconsole
|
app/src/main/java/net/nhiroki/bluelineconsole/dataStore/persistent/URLEntry.java
|
package net.nhiroki.bluelineconsole.dataStore.persistent;
import android.content.Context;
import androidx.annotation.StringRes;
import net.nhiroki.bluelineconsole.R;
import net.nhiroki.bluelineconsole.lib.StringValidator;
public class URLEntry {
public int id;
public String name;
public String display_name;
public String url_base;
public boolean has_query;
public @StringRes int validate(Context context) {
if (this.name.equals("")) {
return R.string.error_invalid_command_name;
}
for (int i = 0; i < this.name.length(); ++i) {
char c = this.name.charAt(i);
if (c == ' ') {
return R.string.error_invalid_command_name;
}
}
if (this.display_name.equals("")) {
return R.string.error_empty_display_name;
}
if (! StringValidator.isValidURLAccepted(url_base, true, context)) {
return StringValidator.getPreferenceURLArbitrarySchemeAccepted(context) ? R.string.error_invalid_url_least_validation_for_web_arbitrary_schema
: R.string.error_invalid_url_least_validation_for_web;
}
return 0;
}
}
|
buptn/CCNxTomcat
|
ccn/org/ccnx/ccn/test/io/content/CCNNetworkObjectTestBase.java
|
/*
* A CCNx library test.
*
* Copyright (C) 2011, 2012 Palo Alto Research Center, Inc.
*
* This work is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License version 2 as published by the
* Free Software Foundation.
* This work is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details. You should have received a copy of the GNU General Public
* License along with this program; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
package org.ccnx.ccn.test.io.content;
import java.io.IOException;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.impl.support.ConcurrencyUtils.Waiter;
import org.ccnx.ccn.io.content.CCNNetworkObject;
import org.ccnx.ccn.io.content.CCNStringObject;
import org.ccnx.ccn.io.content.Collection;
import org.ccnx.ccn.io.content.Link;
import org.ccnx.ccn.io.content.LinkAuthenticator;
import org.ccnx.ccn.protocol.CCNTime;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.PublisherID;
import org.ccnx.ccn.test.CCNTestBase;
import org.ccnx.ccn.test.Flosser;
/**
* Common code between CCNObjectTests
*/
public class CCNNetworkObjectTestBase extends CCNTestBase {
static final int UPDATE_TIMEOUT = 5000;
static final int MAX_REPO_WAIT = 1000;
static final int REPO_WAIT_INCR = 100;
static String stringObjName = "StringObject";
static String collectionObjName = "CollectionObject";
static String prefix = "CollectionObject-";
static ContentName [] ns = null;
static public byte [] contenthash1 = new byte[32];
static public byte [] contenthash2 = new byte[32];
static public byte [] publisherid1 = new byte[32];
static public byte [] publisherid2 = new byte[32];
static PublisherID pubID1 = null;
static PublisherID pubID2 = null;
static int NUM_LINKS = 15;
static LinkAuthenticator [] las = new LinkAuthenticator[NUM_LINKS];
static Link [] lrs = null;
static Collection small1;
static Collection small2;
static Collection empty;
static Collection big;
static String [] numbers = new String[]{"ONE", "TWO", "THREE", "FOUR", "FIVE", "SIX", "SEVEN", "EIGHT", "NINE", "TEN"};
static CCNHandle handle;
static Flosser flosser = null;
public <T> CCNTime saveAndLog(String name, CCNNetworkObject<T> ecd, CCNTime version, T data) throws IOException {
CCNTime oldVersion = ecd.getVersion();
ecd.save(version, data);
Log.info(Log.FAC_TEST, name + " Saved " + name + ": " + ecd.getVersionedName() + " (" + ecd.getVersion() + ", updated from " + oldVersion + ")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public <T> CCNTime saveAsGoneAndLog(String name, CCNNetworkObject<T> ecd) throws IOException {
CCNTime oldVersion = ecd.getVersion();
ecd.saveAsGone();
Log.info(Log.FAC_TEST, "Saved " + name + ": " + ecd.getVersionedName() + " (" + ecd.getVersion() + ", updated from " + oldVersion + ")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public CCNTime waitForDataAndLog(String name, CCNNetworkObject<?> ecd) throws IOException {
ecd.waitForData();
Log.info(Log.FAC_TEST, "Initial read " + name + ", name: " + ecd.getVersionedName() + " (" + ecd.getVersion() +")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public CCNTime updateAndLog(String name, CCNNetworkObject<?> ecd, ContentName updateName) throws IOException {
if ((null == updateName) ? ecd.update() : ecd.update(updateName, null))
Log.info(Log.FAC_TEST, "Updated " + name + ", to name: " + ecd.getVersionedName() + " (" + ecd.getVersion() +")" + " gone? " + ecd.isGone() + " data: " + ecd);
else
Log.info(Log.FAC_TEST, "No update found for " + name + ((null != updateName) ? (" at name " + updateName) : "") + ", still: " + ecd.getVersionedName() + " (" + ecd.getVersion() +")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public void doWait(CCNStringObject cso, CCNTime t) throws Exception {
new Waiter(UPDATE_TIMEOUT) {
@Override
protected boolean check(Object o, Object check) throws Exception {
return ((CCNStringObject)o).getVersion().equals(check);
}
}.wait(cso, t);
}
}
|
kristenkotkas/moviediary
|
src/main/java/server/entity/Privilege.java
|
<reponame>kristenkotkas/moviediary<gh_stars>1-10
package server.entity;
public enum Privilege {
ADMIN
}
|
uptutu/tkeel
|
cmd/rudder/root.go
|
/*
Copyright 2021 The tKeel Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"context"
"errors"
"fmt"
"os"
"os/signal"
"strings"
"syscall"
"github.com/emicklei/go-restful"
"github.com/spf13/cobra"
"github.com/tkeel-io/kit/app"
"github.com/tkeel-io/kit/log"
entity_v1 "github.com/tkeel-io/security/apirouter/entity/v1"
oauth_v1 "github.com/tkeel-io/security/apirouter/oauth/v1"
rbac_v1 "github.com/tkeel-io/security/apirouter/rbac/v1"
tenant_v1 "github.com/tkeel-io/security/apirouter/tenant/v1"
"github.com/tkeel-io/security/apiserver/filters"
security_dao "github.com/tkeel-io/security/models/dao"
"github.com/tkeel-io/security/models/entity"
oauth2_v1 "github.com/tkeel-io/tkeel/api/oauth2/v1"
plugin_v1 "github.com/tkeel-io/tkeel/api/plugin/v1"
repo "github.com/tkeel-io/tkeel/api/repo/v1"
"github.com/tkeel-io/tkeel/cmd"
t_dapr "github.com/tkeel-io/tkeel/pkg/client/dapr"
"github.com/tkeel-io/tkeel/pkg/client/openapi"
"github.com/tkeel-io/tkeel/pkg/config"
"github.com/tkeel-io/tkeel/pkg/hub"
"github.com/tkeel-io/tkeel/pkg/model/plugin"
"github.com/tkeel-io/tkeel/pkg/model/prepo"
"github.com/tkeel-io/tkeel/pkg/model/proute"
"github.com/tkeel-io/tkeel/pkg/repository"
"github.com/tkeel-io/tkeel/pkg/repository/helm"
"github.com/tkeel-io/tkeel/pkg/server"
"github.com/tkeel-io/tkeel/pkg/service"
)
var (
configFile string
conf *config.Configuration
rudderApp *app.App
)
var rootCmd = &cobra.Command{
Use: "rudder is the main component in the tKeel.",
Short: `rudder is the main control component in the tkeel platform.
Used to manage plugins and tenants.`,
PreRun: func(cmd *cobra.Command, args []string) {
if configFile != "" {
c, err := config.LoadStandaloneConfiguration(configFile)
if err != nil {
log.Fatal("fatal config load(%s): %s", configFile, err)
os.Exit(-1)
}
conf = c
}
httpSrv := server.NewHTTPServer(conf.HTTPAddr)
grpcSrv := server.NewGRPCServer(conf.GRPCAddr)
rudderApp = app.New("rudder", &log.Conf{
App: "rudder",
Level: conf.Log.Level,
Dev: conf.Log.Dev,
Output: conf.Log.Output,
}, httpSrv, grpcSrv)
{
// init client.
// dapr grpc client.
daprGRPCClient, err := t_dapr.NewGPRCClient(10, "5s", conf.Dapr.GRPCPort)
if err != nil {
log.Fatal("fatal new dapr client: %s", err)
os.Exit(-1)
}
openapiCli := openapi.NewDaprClient("rudder", daprGRPCClient)
// init operator.
pOp := plugin.NewDaprStateOperator(conf.Dapr.PrivateStateName, daprGRPCClient)
prOp := proute.NewDaprStateOperator(conf.Dapr.PublicStateName, daprGRPCClient)
riOp := prepo.NewDaprStateOperator(conf.Dapr.PrivateStateName, daprGRPCClient)
// init repo hub.
hub.Init(conf.Tkeel.WatchPluginRouteInterval, riOp,
func(connectInfo *repository.Info,
args ...interface{}) (repository.Repository, error) {
if len(args) != 2 {
return nil, errors.New("invalid arguments")
}
drive, ok := args[0].(string)
if !ok {
return nil, errors.New("invaild argument type")
}
namespace, ok := args[1].(string)
if !ok {
return nil, errors.New("invaild argument type")
}
repo, err := helm.NewHelmRepo(*connectInfo, helm.Driver(drive), namespace)
if err != nil {
return nil, fmt.Errorf("error new helm repo: %w", err)
}
return repo, nil
},
func(pluginID string) error {
repo, err := helm.NewHelmRepo(repository.Info{}, helm.Mem, conf.Tkeel.Namespace)
if err != nil {
return fmt.Errorf("error new helm repo: %w", err)
}
installer := helm.NewHelmInstallerQuick(pluginID, conf.Tkeel.Namespace, repo.Config())
if err = installer.Uninstall(); err != nil {
return fmt.Errorf("error uninstall(%s) err: %w", pluginID, err)
}
return nil
}, helm.Mem, conf.Tkeel.Namespace)
// init service.
// plugin service.
PluginSrvV1 := service.NewPluginServiceV1(conf.Tkeel, pOp, prOp, openapiCli)
plugin_v1.RegisterPluginHTTPServer(httpSrv.Container, PluginSrvV1)
plugin_v1.RegisterPluginServer(grpcSrv.GetServe(), PluginSrvV1)
// oauth2 service.
Oauth2SrvV1 := service.NewOauth2ServiceV1(conf.Tkeel.Secret, pOp)
oauth2_v1.RegisterOauth2HTTPServer(httpSrv.Container, Oauth2SrvV1)
oauth2_v1.RegisterOauth2Server(grpcSrv.GetServe(), Oauth2SrvV1)
// repo service.
repoSrv := service.NewRepoService()
repo.RegisterRepoHTTPServer(httpSrv.Container, repoSrv)
repo.RegisterRepoServer(grpcSrv.GetServe(), repoSrv)
{
// copy mysql configuration.
conf.SecurityConf.RBAC.Adapter = conf.SecurityConf.Mysql
// init security service.
security_dao.SetUp(conf.SecurityConf.Mysql)
// tenant.
tenant_v1.RegisterToRestContainer(httpSrv.Container)
// oauth2.
oauth_v1.RegisterToRestContainer(httpSrv.Container, conf.SecurityConf.OAuth2)
// rbac.
rbac_v1.RegisterToRestContainer(httpSrv.Container, conf.SecurityConf.RBAC, conf.SecurityConf.OAuth2)
// entity token.
entityTokenOperator := entity.NewEntityTokenOperator(conf.Dapr.PrivateStateName, daprGRPCClient)
if entityTokenOperator == nil {
os.Exit(-1)
}
entity_v1.RegisterToRestContainer(httpSrv.Container, conf.SecurityConf.Entity, entityTokenOperator)
// add auth role filter.
tenantAdminRoleFilter := filters.AuthFilter(conf.SecurityConf.OAuth2, "admin")
for _, ws := range httpSrv.Container.RegisteredWebServices() {
if ws.RootPath() == "/v1/tenants" {
ws.Filter(func(r1 *restful.Request, r2 *restful.Response, fc *restful.FilterChain) {
if strings.HasPrefix(r1.Request.URL.Path, "/v1/tenants/users") {
tenantAdminRoleFilter(r1, r2, fc)
return
}
fc.ProcessFilter(r1, r2)
})
}
}
}
}
},
Run: func(cmd *cobra.Command, args []string) {
if err := rudderApp.Run(context.TODO()); err != nil {
log.Fatal("fatal rudder app run: %s", err)
os.Exit(-2)
}
stop := make(chan os.Signal, 1)
signal.Notify(stop, syscall.SIGTERM, os.Interrupt)
<-stop
if err := rudderApp.Stop(context.TODO()); err != nil {
log.Fatal("fatal rudder app stop: %s", err)
os.Exit(-3)
}
},
}
func init() {
conf = config.NewDefaultConfiguration()
conf.AttachCmdFlags(rootCmd.Flags().StringVar, rootCmd.Flags().BoolVar, rootCmd.Flags().IntVar)
rootCmd.Flags().StringVar(&configFile, "config", getEnvStr("RUDDER_CONFIG", ""), "rudder config file path.")
rootCmd.AddCommand(cmd.VersionCmd)
}
func getEnvStr(env string, defaultValue string) string {
v := os.Getenv(env)
if v == "" {
return defaultValue
}
return v
}
|
derekmerck/DIANA
|
packages/halibut/halibut/__init__.py
|
<reponame>derekmerck/DIANA
__name__="halibut"
__version__ = "0.1.0"
__author__ = "<NAME>"
__author_email__ = "<EMAIL>"
|
rowandh/gdrive
|
vendor/github.com/soniakeys/bits/bits_test.go
|
<gh_stars>0
// Copyright 2017 <NAME>
// License MIT: http://opensource.org/licenses/MIT
package bits_test
import (
"fmt"
"reflect"
"testing"
"github.com/soniakeys/bits"
)
func ExampleNew() {
b := bits.New(80)
fmt.Printf("%#v\n", b)
// Output:
// bits.Bits{Num:80, Bits:[]uint64{0x0, 0x0}}
}
func ExampleNewGivens() {
b := bits.NewGivens(0, 63, 65, 2)
fmt.Println(b.Slice())
// Output:
// [0 2 63 65]
}
func ExampleBits_AllOnes() {
b := bits.New(5)
b.SetAll()
fmt.Println(b.AllOnes())
b.SetBit(2, 0)
fmt.Println(b.AllOnes())
// Output:
// true
// false
}
func ExampleBits_AllZeros() {
b := bits.New(5)
fmt.Println(b.AllZeros())
b.SetBit(2, 1)
fmt.Println(b.AllZeros())
// Output:
// true
// false
}
func ExampleBits_And() {
x := bits.NewGivens(3, 5, 6)
y := bits.NewGivens(4, 5, 6)
x.And(x, y)
fmt.Println(x.Slice())
// Output:
// [5 6]
}
func ExampleBits_AndNot() {
x := bits.NewGivens(3, 5, 6)
y := bits.NewGivens(4, 5, 6)
x.AndNot(x, y)
fmt.Println(x.Slice())
// Output:
// [3]
}
func ExampleBits_Bit() {
b := bits.NewGivens(0, 63, 65, 2)
for _, n := range []int{0, 1, 2, 63, 64, 65} {
fmt.Printf("bit %d: %d\n", n, b.Bit(n))
}
// Output:
// bit 0: 1
// bit 1: 0
// bit 2: 1
// bit 63: 1
// bit 64: 0
// bit 65: 1
}
func ExampleBits_ClearAll() {
x := bits.NewGivens(3, 5, 6)
fmt.Println(x)
x.ClearAll()
fmt.Println(x)
// Output:
// 1101000
// 0000000
}
func ExampleBits_ClearBits() {
b := bits.NewGivens(0, 2, 63, 65)
b.ClearBits(0, 63)
fmt.Println(b.Slice())
// Output:
// [2 65]
}
func ExampleBits_Equal() {
a := bits.NewGivens(1, 3)
b := bits.NewGivens(3, 1)
// Bits values with same bit numbers set compare equal
fmt.Println(a, b, a.Equal(b))
// Output:
// 1010 1010 true
}
func ExampleBits_IterateOnes() {
b := bits.NewGivens(0, 63, 65, 2)
b.IterateOnes(func(n int) bool {
fmt.Print(n, " ")
return true
})
fmt.Println()
// Output:
// 0 2 63 65
}
func ExampleBits_IterateZeros() {
b := bits.NewGivens(0, 63, 65, 2)
b.Not(b)
fmt.Println(b)
b.IterateZeros(func(n int) bool {
fmt.Print(n, " ")
return true
})
fmt.Println()
// Output:
// 010111111111111111111111111111111111111111111111111111111111111010
// 0 2 63 65
}
func ExampleBits_Not() {
x := bits.NewGivens(3, 5, 6)
x.Not(x)
fmt.Println(x.Slice())
// Output:
// [0 1 2 4]
}
func ExampleBits_OneFrom() {
b := bits.NewGivens(0, 63, 65, 2)
for n := 0; ; n++ {
n = b.OneFrom(n)
if n < 0 {
break
}
fmt.Print(n, " ")
}
fmt.Println()
// Output:
// 0 2 63 65
}
func ExampleBits_OneFrom_sieve() {
q := 8
n := q * q
b := bits.New(n)
b.SetAll()
b.ClearBits(0, 1)
for p := 2; p < q; p = b.OneFrom(p + 1) {
for c := p + p; c < n; c += p {
b.SetBit(c, 0)
}
}
fmt.Println(b.Slice())
// Output:
// [2 3 5 7 11 13 17 19 23 29 31 37 41 43 47 53 59 61]
}
func ExampleBits_Or() {
x := bits.NewGivens(3, 5, 6)
y := bits.NewGivens(4, 5, 6)
x.Or(x, y)
fmt.Println(x.Slice())
// Output:
// [3 4 5 6]
}
func ExampleBits_OnesCount() {
b := bits.NewGivens(0, 2, 128)
fmt.Println(b.OnesCount())
// Output:
// 3
}
func ExampleBits_Set() {
x := bits.NewGivens(0, 2)
var z bits.Bits
z.Set(x)
fmt.Println(z.Slice())
// Output:
// [0 2]
}
func ExampleBits_SetAll() {
b := bits.New(5)
b.SetAll()
fmt.Println(b.Slice())
// Output:
// [0 1 2 3 4]
}
func ExampleBits_SetBit() {
b := bits.New(5)
b.SetBit(0, 1)
b.SetBit(2, 1)
fmt.Println(b.Slice())
// Output:
// [0 2]
}
func ExampleBits_SetBits() {
b := bits.NewGivens(2, 65)
b.SetBits(0, 63)
fmt.Println(b.Slice())
// Output:
// [0 2 63 65]
}
func ExampleBits_Single() {
x := bits.NewGivens(0, 2)
y := bits.NewGivens(129)
var z bits.Bits
fmt.Println(x.OnesCount(), "bits, single =", x.Single())
fmt.Println(y.OnesCount(), "bit, single =", y.Single())
fmt.Println(z.OnesCount(), "bits, single =", z.Single())
// Output:
// 2 bits, single = false
// 1 bit, single = true
// 0 bits, single = false
}
func ExampleBits_Slice() {
b := bits.NewGivens(0, 63, 65, 2)
fmt.Println(b.Slice())
// Output:
// [0 2 63 65]
}
func ExampleBits_String() {
b := bits.New(66)
b.SetBits(0, 2, 63, 64)
fmt.Println("bit 65 bit 0")
fmt.Println("| |")
fmt.Println("v v")
fmt.Println(b.String())
fmt.Println(b)
// Output:
// bit 65 bit 0
// | |
// v v
// 011000000000000000000000000000000000000000000000000000000000000101
// 011000000000000000000000000000000000000000000000000000000000000101
}
func ExampleBits_Xor() {
x := bits.NewGivens(3, 5, 6)
y := bits.NewGivens(4, 5, 6)
x.Xor(x, y)
fmt.Println(x.Slice())
// Output:
// [3 4]
}
func ExampleBits_ZeroFrom() {
b := bits.NewGivens(0, 63, 65, 2)
b.Not(b)
fmt.Println(b)
for n := 0; ; n++ {
n = b.ZeroFrom(n)
if n < 0 {
break
}
fmt.Print(n, " ")
}
fmt.Println()
// Output:
// 010111111111111111111111111111111111111111111111111111111111111010
// 0 2 63 65
}
// Tests probe some boundary conditions and push coverage to 100%
func TestNew(t *testing.T) {
// test that proper length slice is allocated
for _, tc := range []struct{ n, l int }{
{0, 0},
{1, 1},
{64, 1},
{65, 2},
} {
b := bits.New(tc.n)
if len(b.Bits) != tc.l {
t.Fatal("len(b.Bits) = ", len(b.Bits), " want ", tc.l)
}
}
// test that negative bit number panics
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
bits.New(-1)
}
func TestNewGivens(t *testing.T) {
// test negative bit number panics
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
bits.NewGivens(0, -1, 3)
}
func TestAllOnes(t *testing.T) {
// exercise early return
b := bits.NewGivens(63, 64)
b.Not(b)
if b.AllOnes() {
t.Fatal("real problem")
}
}
func TestAllZeros(t *testing.T) {
// exercise early return
b := bits.NewGivens(63, 64)
if b.AllZeros() {
t.Fatal("real problem")
}
}
func TestAnd(t *testing.T) {
// test allocate z if Num is wrong size
x := bits.New(1)
z := bits.Bits{}
z.And(x, x)
if z.Num != 1 || len(z.Bits) != 1 {
t.Fatal("z not allocated to size of args")
}
// test different Nums panic
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
z.And(z, bits.Bits{})
}
func TestAndNot(t *testing.T) {
// test allocate z if Num is wrong size
x := bits.New(1)
z := bits.Bits{}
z.AndNot(x, x)
if z.Num != 1 || len(z.Bits) != 1 {
t.Fatal("z not allocated to size of args")
}
// test different Nums panic
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
z.AndNot(z, bits.Bits{})
}
func TestBit(t *testing.T) {
b := bits.New(4)
// test negative bit number panics
func() {
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
b.Bit(-1)
}()
// test number out of range number panics
func() {
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
b.Bit(4)
}()
}
func TestEqual(t *testing.T) {
// test empty Bits
var a bits.Bits
if !a.Equal(a) {
t.Fatal("empty")
}
// test unequal first word
a = bits.NewGivens(200)
if a.Equal(bits.NewGivens(0, 200)) {
t.Fatal("nope 0")
}
// test unequal last word
a = bits.NewGivens(200)
if a.Equal(bits.NewGivens(199, 200)) {
t.Fatal("nope 199")
}
// test unqual Nums
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
a.Equal(bits.Bits{})
}
func TestIterateOnes(t *testing.T) {
// test visitor abort
b := bits.NewGivens(20)
if b.IterateOnes(func(int) bool { return false }) {
t.Fatal("but, but")
}
// test 1 after Num
b.Num = 10
b.IterateOnes(func(int) bool {
t.Fatal("just no")
return false
})
}
func TestIterateZeros(t *testing.T) {
// test visitor abort
b := bits.NewGivens(20)
b.Not(b)
if b.IterateZeros(func(int) bool { return false }) {
t.Fatal("but, but")
}
// test 0 after Num
b.Num = 10
b.IterateZeros(func(int) bool {
t.Fatal("just no")
return false
})
}
func TestNot(t *testing.T) {
// test allocation
var z bits.Bits
z.Not(bits.New(5))
if z.Num != 5 {
t.Fatal("z5")
}
}
func TestOneFrom(t *testing.T) {
// test 1 bit >= Num
b := bits.NewGivens(15)
b.Num = 8
if b.OneFrom(0) != -1 {
t.Fatal("iterate past Num")
}
// test all words zeros
b = bits.New(100)
if b.OneFrom(0) != -1 {
t.Fatal("no 100")
}
// test first word 0
b.SetBit(99, 1)
if b.OneFrom(0) != 99 {
t.Fatal("no 99")
}
// test 1 after Num
b.Num = 90
if b.OneFrom(0) != -1 {
t.Fatal("no 90")
}
}
func TestOr(t *testing.T) {
// test allocate z if Num is wrong size
x := bits.New(1)
z := bits.Bits{}
z.Or(x, x)
if z.Num != 1 || len(z.Bits) != 1 {
t.Fatal("z not allocated to size of args")
}
// test different Nums panic
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
z.Or(z, bits.Bits{})
}
func TestSetBit(t *testing.T) {
// primitive test, independent of other methods
b := bits.New(80)
for _, tc := range []struct {
pos, x int
bits []uint64
}{
{0, 1, []uint64{1, 0}},
{2, 1, []uint64{5, 0}},
{63, 1, []uint64{0x8000000000000005, 0}},
{65, 1, []uint64{0x8000000000000005, 2}},
{63, 0, []uint64{5, 2}},
{0, 0, []uint64{4, 2}},
} {
b.SetBit(tc.pos, tc.x)
if !reflect.DeepEqual(b.Bits, tc.bits) {
t.Fatal("got ", b.Bits, " want ", tc.bits)
}
}
// test set out of range
func() {
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
b.SetBit(-1, 1)
}()
func() {
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
b.SetBit(80, 1)
}()
}
func TestSetBits(t *testing.T) {
b := bits.New(10)
// test set out of range
defer func() {
if recover() == nil {
t.Fatal("panic expected")
}
}()
b.SetBits(1, 10)
}
func TestSingle(t *testing.T) {
// test early return
b := bits.NewGivens(7, 8, 78)
if b.Single() {
t.Fatal(78)
}
}
func TestSlice(t *testing.T) {
// test zero word
b := bits.New(5)
if len(b.Slice()) != 0 {
panic("of nothing")
}
}
func TestString(t *testing.T) {
// test Num == 0
var b bits.Bits
if s := b.String(); s > "" {
t.Fatal(s)
}
}
func TestXor(t *testing.T) {
// test allocate z if Num is wrong size
x := bits.New(1)
z := bits.Bits{}
z.Xor(x, x)
if z.Num != 1 || len(z.Bits) != 1 {
t.Fatal("z not allocated to size of args")
}
// test different Nums panic
defer func() {
if x := recover(); x == nil {
t.Fatal("panic expected")
}
}()
z.Xor(z, bits.Bits{})
}
func TestZeroFrom(t *testing.T) {
// test 0 bit >= Num
b := bits.NewGivens(15)
b.Not(b)
b.Num = 8
if b.ZeroFrom(0) != -1 {
t.Fatal("iterate past Num")
}
// test all words ones
b = bits.New(100)
b.SetAll()
if b.ZeroFrom(0) != -1 {
t.Fatal("no 100")
}
// test first word 1s
b.SetBit(99, 0)
if b.ZeroFrom(0) != 99 {
t.Fatal("no 99")
}
// test 0 after Num
b.Num = 90
if b.ZeroFrom(0) != -1 {
t.Fatal("no 90")
}
}
|
mengyangbai/leetcode
|
array/countRangeSum.py
|
<filename>array/countRangeSum.py
import bisect
class Solution(object):
def countRangeSum(self, nums, lower, upper):
"""
:type nums: List[int]
:type lower: int
:type upper: int
:rtype: int
"""
sums = nums[:]
for x in range(1, len(sums)):
sums[x] += sums[x - 1]
osums = sorted(set(sums))
ft = FenwickTree(len(osums))
ans = 0
for sumi in sums:
left = bisect.bisect_left(osums, sumi - upper)
right = bisect.bisect_right(osums, sumi - lower)
ans += ft.sum(right) - ft.sum(left) + (lower <= sumi <= upper)
ft.add(bisect.bisect_right(osums, sumi), 1)
return ans
class FenwickTree(object):
def __init__(self, n):
self.n = n
self.sums = [0] * (n + 1)
def add(self, x, val):
while x <= self.n:
self.sums[x] += val
x += self.lowbit(x)
def lowbit(self, x):
return x & -x
def sum(self, x):
res = 0
while x > 0:
res += self.sums[x]
x -= self.lowbit(x)
return res
|
rhpvorderman/galaxy
|
client/src/components/History/providers/CollectionContentProvider/CollectionContentProvider.js
|
<filename>client/src/components/History/providers/CollectionContentProvider/CollectionContentProvider.js
import { DatasetCollection } from "../../model";
import { ContentProvider, processContentStreams } from "../ContentProvider";
import { collectionPayload } from "./collectionPayload";
export default {
mixins: [ContentProvider],
computed: {
dsc() {
if (this.parent instanceof DatasetCollection) {
return this.parent;
}
return new DatasetCollection(this.parent);
},
},
watch: {
dsc(newDsc, oldDsc) {
if (!(newDsc.id == oldDsc.id)) {
this.resetScrollPos();
}
},
},
methods: {
initStreams() {
const { debouncePeriod, pageSize, params$, scrollPos$, debug } = this;
const parent$ = this.watch$("dsc", true);
const sources = { params$, parent$, scrollPos$ };
const settings = { debouncePeriod, pageSize, debug };
return processContentStreams(collectionPayload, sources, settings);
},
},
};
|
mvladev/gardenctl
|
vendor/google.golang.org/genproto/googleapis/ads/googleads/v0/enums/access_reason.pb.go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v0/enums/access_reason.proto
package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v0/enums"
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// Enum describing possible access reasons.
type AccessReasonEnum_AccessReason int32
const (
// Not specified.
AccessReasonEnum_UNSPECIFIED AccessReasonEnum_AccessReason = 0
// Used for return value only. Represents value unknown in this version.
AccessReasonEnum_UNKNOWN AccessReasonEnum_AccessReason = 1
// The entity is owned by the user.
AccessReasonEnum_OWNED AccessReasonEnum_AccessReason = 2
// The entity is shared to the user.
AccessReasonEnum_SHARED AccessReasonEnum_AccessReason = 3
// The entity is licensed to the user.
AccessReasonEnum_LICENSED AccessReasonEnum_AccessReason = 4
// The user subscribed to the entity.
AccessReasonEnum_SUBSCRIBED AccessReasonEnum_AccessReason = 5
// The entity is accessible to the user.
AccessReasonEnum_AFFILIATED AccessReasonEnum_AccessReason = 6
)
var AccessReasonEnum_AccessReason_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "OWNED",
3: "SHARED",
4: "LICENSED",
5: "SUBSCRIBED",
6: "AFFILIATED",
}
var AccessReasonEnum_AccessReason_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"OWNED": 2,
"SHARED": 3,
"LICENSED": 4,
"SUBSCRIBED": 5,
"AFFILIATED": 6,
}
func (x AccessReasonEnum_AccessReason) String() string {
return proto.EnumName(AccessReasonEnum_AccessReason_name, int32(x))
}
func (AccessReasonEnum_AccessReason) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_access_reason_757eb5f4e3d691fd, []int{0, 0}
}
// Indicates the way the entity such as user list is related to a user.
type AccessReasonEnum struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *AccessReasonEnum) Reset() { *m = AccessReasonEnum{} }
func (m *AccessReasonEnum) String() string { return proto.CompactTextString(m) }
func (*AccessReasonEnum) ProtoMessage() {}
func (*AccessReasonEnum) Descriptor() ([]byte, []int) {
return fileDescriptor_access_reason_757eb5f4e3d691fd, []int{0}
}
func (m *AccessReasonEnum) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_AccessReasonEnum.Unmarshal(m, b)
}
func (m *AccessReasonEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_AccessReasonEnum.Marshal(b, m, deterministic)
}
func (dst *AccessReasonEnum) XXX_Merge(src proto.Message) {
xxx_messageInfo_AccessReasonEnum.Merge(dst, src)
}
func (m *AccessReasonEnum) XXX_Size() int {
return xxx_messageInfo_AccessReasonEnum.Size(m)
}
func (m *AccessReasonEnum) XXX_DiscardUnknown() {
xxx_messageInfo_AccessReasonEnum.DiscardUnknown(m)
}
var xxx_messageInfo_AccessReasonEnum proto.InternalMessageInfo
func init() {
proto.RegisterType((*AccessReasonEnum)(nil), "google.ads.googleads.v0.enums.AccessReasonEnum")
proto.RegisterEnum("google.ads.googleads.v0.enums.AccessReasonEnum_AccessReason", AccessReasonEnum_AccessReason_name, AccessReasonEnum_AccessReason_value)
}
func init() {
proto.RegisterFile("google/ads/googleads/v0/enums/access_reason.proto", fileDescriptor_access_reason_757eb5f4e3d691fd)
}
var fileDescriptor_access_reason_757eb5f4e3d691fd = []byte{
// 298 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4e, 0xc2, 0x30,
0x1c, 0xc6, 0x1d, 0x08, 0xea, 0x1f, 0xa2, 0xb5, 0x77, 0x0e, 0xf2, 0x00, 0xdd, 0x8c, 0x47, 0x4f,
0x1d, 0x2d, 0xd8, 0x48, 0x0a, 0xd9, 0x1c, 0x24, 0x66, 0x89, 0x99, 0xdb, 0xd2, 0x98, 0xb0, 0x15,
0x57, 0xe1, 0x0d, 0x7c, 0x11, 0x8f, 0x3e, 0x86, 0x47, 0x9f, 0xca, 0xac, 0x13, 0xc2, 0x45, 0x2f,
0xcd, 0xd7, 0x7e, 0xfd, 0xb5, 0xff, 0xef, 0x83, 0x6b, 0xa5, 0xb5, 0x5a, 0xe5, 0x6e, 0x92, 0x19,
0xb7, 0x91, 0xb5, 0xda, 0x7a, 0x6e, 0x5e, 0x6e, 0x0a, 0xe3, 0x26, 0x69, 0x9a, 0x1b, 0xf3, 0x54,
0xe5, 0x89, 0xd1, 0x25, 0x59, 0x57, 0xfa, 0x4d, 0xe3, 0x41, 0x73, 0x8f, 0x24, 0x99, 0x21, 0x7b,
0x84, 0x6c, 0x3d, 0x62, 0x91, 0xe1, 0xbb, 0x03, 0x88, 0x5a, 0x2c, 0xb0, 0x14, 0x2f, 0x37, 0xc5,
0xf0, 0x15, 0xfa, 0x87, 0x67, 0xf8, 0x02, 0x7a, 0x91, 0x0c, 0xe7, 0x7c, 0x24, 0xc6, 0x82, 0x33,
0x74, 0x84, 0x7b, 0x70, 0x12, 0xc9, 0x7b, 0x39, 0x5b, 0x4a, 0xe4, 0xe0, 0x33, 0xe8, 0xcc, 0x96,
0x92, 0x33, 0xd4, 0xc2, 0x00, 0xdd, 0xf0, 0x8e, 0x06, 0x9c, 0xa1, 0x36, 0xee, 0xc3, 0xe9, 0x54,
0x8c, 0xb8, 0x0c, 0x39, 0x43, 0xc7, 0xf8, 0x1c, 0x20, 0x8c, 0xfc, 0x70, 0x14, 0x08, 0x9f, 0x33,
0xd4, 0xa9, 0xf7, 0x74, 0x3c, 0x16, 0x53, 0x41, 0x1f, 0x38, 0x43, 0x5d, 0xff, 0xcb, 0x81, 0xab,
0x54, 0x17, 0xe4, 0xdf, 0x69, 0xfd, 0xcb, 0xc3, 0xb1, 0xe6, 0x75, 0xbe, 0xb9, 0xf3, 0xe8, 0xff,
0x32, 0x4a, 0xaf, 0x92, 0x52, 0x11, 0x5d, 0x29, 0x57, 0xe5, 0xa5, 0x4d, 0xbf, 0x2b, 0x69, 0xfd,
0x62, 0xfe, 0xe8, 0xec, 0xd6, 0xae, 0x1f, 0xad, 0xf6, 0x84, 0xd2, 0xcf, 0xd6, 0x60, 0xd2, 0x3c,
0x45, 0x33, 0x43, 0x1a, 0x59, 0xab, 0x85, 0x47, 0xea, 0x5a, 0xcc, 0xf7, 0xce, 0x8f, 0x69, 0x66,
0xe2, 0xbd, 0x1f, 0x2f, 0xbc, 0xd8, 0xfa, 0xcf, 0x5d, 0xfb, 0xe9, 0xcd, 0x4f, 0x00, 0x00, 0x00,
0xff, 0xff, 0xd0, 0x24, 0x60, 0xa5, 0xa7, 0x01, 0x00, 0x00,
}
|
vonaxs/EdgeAdmin
|
web/public/js/components/server/http-cache-config-box.js
|
Vue.component("http-cache-config-box", {
props: ["v-cache-config", "v-is-location", "v-is-group", "v-cache-policy"],
data: function () {
let cacheConfig = this.vCacheConfig
if (cacheConfig == null) {
cacheConfig = {
isPrior: false,
isOn: false,
addStatusHeader: true,
addAgeHeader: false,
enableCacheControlMaxAge: false,
cacheRefs: [],
purgeIsOn: false,
purgeKey: "",
disablePolicyRefs: false
}
}
if (cacheConfig.cacheRefs == null) {
cacheConfig.cacheRefs = []
}
return {
cacheConfig: cacheConfig,
moreOptionsVisible: false,
enablePolicyRefs: !cacheConfig.disablePolicyRefs
}
},
watch: {
enablePolicyRefs: function (v) {
this.cacheConfig.disablePolicyRefs = !v
}
},
methods: {
isOn: function () {
return ((!this.vIsLocation && !this.vIsGroup) || this.cacheConfig.isPrior) && this.cacheConfig.isOn
},
isPlus: function () {
return Tea.Vue.teaIsPlus
},
generatePurgeKey: function () {
let r = Math.random().toString() + Math.random().toString()
let s = r.replace(/0\./g, "")
.replace(/\./g, "")
let result = ""
for (let i = 0; i < s.length; i++) {
result += String.fromCharCode(parseInt(s.substring(i, i + 1)) + ((Math.random() < 0.5) ? "a" : "A").charCodeAt(0))
}
this.cacheConfig.purgeKey = result
},
showMoreOptions: function () {
this.moreOptionsVisible = !this.moreOptionsVisible
},
changeStale: function (stale) {
this.cacheConfig.stale = stale
}
},
template: `<div>
<input type="hidden" name="cacheJSON" :value="JSON.stringify(cacheConfig)"/>
<table class="ui table definition selectable">
<prior-checkbox :v-config="cacheConfig" v-if="vIsLocation || vIsGroup"></prior-checkbox>
<tbody v-show="(!vIsLocation && !vIsGroup) || cacheConfig.isPrior">
<tr v-show="!vIsGroup">
<td>缓存策略</td>
<td>
<div v-if="vCachePolicy != null">{{vCachePolicy.name}} <link-icon :href="'/servers/components/cache/policy?cachePolicyId=' + vCachePolicy.id"></link-icon>
<p class="comment">使用当前服务所在集群的设置。</p>
</div>
<span v-else class="red">当前集群没有设置缓存策略,当前配置无法生效。</span>
</td>
</tr>
<tr>
<td class="title">开启缓存</td>
<td>
<div class="ui checkbox">
<input type="checkbox" v-model="cacheConfig.isOn"/>
<label></label>
</div>
</td>
</tr>
</tbody>
<tbody v-show="isOn()">
<tr>
<td colspan="2">
<a href="" @click.prevent="showMoreOptions"><span v-if="moreOptionsVisible">收起选项</span><span v-else>更多选项</span><i class="icon angle" :class="{up: moreOptionsVisible, down:!moreOptionsVisible}"></i></a>
</td>
</tr>
</tbody>
<tbody v-show="isOn() && moreOptionsVisible">
<tr>
<td>使用默认缓存条件</td>
<td>
<checkbox v-model="enablePolicyRefs"></checkbox>
<p class="comment">选中后使用系统中已经定义的默认缓存条件。</p>
</td>
</tr>
<tr>
<td>添加X-Cache Header</td>
<td>
<checkbox v-model="cacheConfig.addStatusHeader"></checkbox>
<p class="comment">选中后自动在响应Header中增加<code-label>X-Cache: BYPASS|MISS|HIT|PURGE</code-label>。</p>
</td>
</tr>
<tr>
<td>添加Age Header</td>
<td>
<checkbox v-model="cacheConfig.addAgeHeader"></checkbox>
<p class="comment">选中后自动在响应Header中增加<code-label>Age: [有效时间秒数]</code-label>。</p>
</td>
</tr>
<tr>
<td>支持源站控制有效时间</td>
<td>
<checkbox v-model="cacheConfig.enableCacheControlMaxAge"></checkbox>
<p class="comment">选中后表示支持源站在Header中设置的<code-label>Cache-Control: max-age=[有效时间秒数]</code-label>。</p>
</td>
</tr>
<tr>
<td class="color-border">允许PURGE</td>
<td>
<checkbox v-model="cacheConfig.purgeIsOn"></checkbox>
<p class="comment">允许使用PURGE方法清除某个URL缓存。</p>
</td>
</tr>
<tr v-show="cacheConfig.purgeIsOn">
<td class="color-border">PURGE Key *</td>
<td>
<input type="text" maxlength="200" v-model="cacheConfig.purgeKey"/>
<p class="comment"><a href="" @click.prevent="generatePurgeKey">[随机生成]</a>。需要在PURGE方法调用时加入<code-label>Edge-Purge-Key: {{cacheConfig.purgeKey}}</code-label> Header。只能包含字符、数字、下划线。</p>
</td>
</tr>
</tbody>
</table>
<div v-if="isOn() && moreOptionsVisible && isPlus()">
<h4>过时缓存策略</h4>
<http-cache-stale-config :v-cache-stale-config="cacheConfig.stale" @change="changeStale"></http-cache-stale-config>
</div>
<div v-show="isOn()" style="margin-top: 1em">
<h4>缓存条件</h4>
<http-cache-refs-config-box :v-cache-config="cacheConfig" :v-cache-refs="cacheConfig.cacheRefs" ></http-cache-refs-config-box>
</div>
<div class="margin"></div>
</div>`
})
|
gstavosanchez/tytus
|
parser/team15/TytusDB_G15/main.py
|
from tkinter import *
from tkinter import ttk
import random
from gramatica import parse
from principal import *
import ts as TS
from expresiones import *
from instrucciones import *
from ast import *
instrucciones_Global = []
root = Tk()
w, h = root.winfo_screenwidth(), root.winfo_screenheight()
root.geometry("%dx%d+0+0" % (w, h))
root.title("TytusDB - Query Tools")
global selected
selected = False
# ACTIONS
def analizar(txt):
global instrucciones_Global
instrucciones = g.parse(txt)
instrucciones_Global = instrucciones
ts_global = TS.TablaDeSimbolos()
salida = procesar_instrucciones(instrucciones, ts_global)
print("analizando...")
print(txt)
salida_table(2,salida)
#parse(txt)
def analizar_select(e):
global selected
if my_text.selection_get():
global instrucciones_Global
selected = my_text.selection_get()
print(selected)
instrucciones = g.parse(selected)
instrucciones_Global = instrucciones
ts_global = TS.TablaDeSimbolos()
salida = procesar_instrucciones(instrucciones, ts_global)
salida_table(2,salida)
def generarReporteAST():
global instrucciones_Global
astGraph = AST()
astGraph.generarAST(instrucciones_Global)
def graficar_TS():
''' '''
#ts_graph()
toolbar_frame = Frame(root)
toolbar_frame.pack(fill = X)
text_frame = Frame(root)
text_frame.pack(pady=5)
# VERTICAL SCROLL BAR
text_scroll = Scrollbar(text_frame)
text_scroll.pack(side = RIGHT, fill = Y)
# HORIZONTAL SCROLL BAR
hor_scroll = Scrollbar(text_frame, orient = 'horizontal')
hor_scroll.pack(side = BOTTOM, fill = X)
my_text_h = int(h * 0.028)
my_text = Text(text_frame, width=w, height=my_text_h, selectforeground="black", undo=True, yscrollcommand=text_scroll.set, wrap = "none", xscrollcommand = hor_scroll.set)
my_text.pack()
text_scroll.config(command = my_text.yview)
hor_scroll.config(command = my_text.xview)
#MENU
my_menu = Menu(root)
root.config(menu = my_menu)
file_menu = Menu(my_menu, tearoff = False)
my_menu.add_cascade(label = "Archivo", menu = file_menu)
file_menu.add_command(label = "Analizar", command = lambda: analizar(my_text.get("1.0",'end-1c')))
file_menu.add_command(label = "Analizar Query" , command = lambda: analizar_select(False))
file_menu.add_command(label = "Save")
file_menu.add_separator()
file_menu.add_command(label = "Exit", command = root.quit)
reportes_menu = Menu(my_menu, tearoff = False)
my_menu.add_cascade(label = "Reportes", menu = reportes_menu)
reportes_menu.add_command(label = "Tabla de Simbolos", command = lambda: graficar_TS())
reportes_menu.add_command(label = "AST", command = lambda: generarReporteAST())
reportes_menu.add_command(label = "Errores")
analizar_button = Button(toolbar_frame)
photoCompila = PhotoImage(file="iconos/all.png")
analizar_button.config(image=photoCompila, width="50", height="50", activebackground="black",command = lambda: analizar(my_text.get("1.0",'end-1c')))
analizar_button.grid(row = 0, column = 0, sticky = W)
analizar_step_step = Button(toolbar_frame)
photoCompila1 = PhotoImage(file="iconos/select.png")
analizar_step_step.config(image=photoCompila1, width="50", height="50", activebackground="black",command = lambda: analizar_select(False))
analizar_step_step.grid(row = 0, column = 1, sticky = W)
def salida_table(salida,textoSalida):
if salida == 1:
global salida_frame
try:
global salida_frame
salida_frame.destroy()
except:
pass
salida_frame = LabelFrame(root, text = "Salida")
salida_frame.pack(fill = X)
for widget in salida_frame.winfo_children():
widget.destroy()
global random_numero
random_numero = random.randint(5,10)
prueba_columna = []
i = 1
while i < random_numero:
prueba_columna.append(i)
i += 1
print(prueba_columna)
my_tree = ttk.Treeview(salida_frame, columns=prueba_columna)
my_tree.pack(side=LEFT)
my_tree.place(x=0,y=0)
for record in prueba_columna:
# print(record-1)
if record == 1:
my_tree.column("#"+str(record-1), stretch=False, width=40)
my_tree.heading("#"+str(record-1),text = " ")
else:
my_tree.column("#"+str(record-1), stretch=False, width=100)
my_tree.heading("#"+str(record-1),text = "Label"+str(record-1))
yscrollbar = ttk.Scrollbar(salida_frame, orient = "vertical", command=my_tree.yview)
yscrollbar.pack(side = RIGHT, fill = Y)
xscrollbar = ttk.Scrollbar(salida_frame, orient="horizontal", command = my_tree.xview)
xscrollbar.pack(side=BOTTOM, fill = X)
my_tree.configure(yscrollcommand=yscrollbar.set, xscrollcommand = xscrollbar.set)
data = []
j = 1
while j < 50:
data.append(["Usuario"+str(j),"Password"+str(j),j])
j += 1
count = 1
for record in data:
my_tree.insert(parent = '', index = 'end', iid=count, text = str(count), values = (record[0],record[1],record[2]))
count += 1
my_tree.pack(fill = X)
else:
global salida_frame1
try:
global salida_frame1
salida_frame1.destroy()
except:
pass
salida_frame1 = LabelFrame(root, text = "Salida")
salida_frame1.pack(fill = X)
my_text1 = Text(salida_frame1)
my_text1.pack(fill=X)
my_text1.delete(1.0,"end")
my_text1.insert(1.0, textoSalida)
my_text1.config(state=DISABLED)
root.mainloop()
|
mingmwang/parquet-mr
|
parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectInputOutputFormat.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.avro;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.reflect.Nullable;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.Union;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.parquet.column.ColumnReader;
import org.apache.parquet.filter.ColumnPredicates;
import org.apache.parquet.filter.ColumnRecordFilter;
import org.apache.parquet.filter.RecordFilter;
import org.apache.parquet.filter.UnboundRecordFilter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.Thread.sleep;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestReflectInputOutputFormat {
private static final Logger LOG = LoggerFactory.getLogger(TestReflectInputOutputFormat.class);
public static class Service {
private long date;
private String mechanic;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Service service = (Service) o;
if (date != service.date) return false;
if (!mechanic.equals(service.mechanic)) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (date ^ (date >>> 32));
result = 31 * result + mechanic.hashCode();
return result;
}
}
public static enum EngineType {
DIESEL, PETROL, ELECTRIC
}
public static class Engine {
private EngineType type;
private float capacity;
private boolean hasTurboCharger;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Engine engine = (Engine) o;
if (Float.compare(engine.capacity, capacity) != 0) return false;
if (hasTurboCharger != engine.hasTurboCharger) return false;
if (type != engine.type) return false;
return true;
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + (capacity != +0.0f ? Float.floatToIntBits(capacity) : 0);
result = 31 * result + (hasTurboCharger ? 1 : 0);
return result;
}
}
public static class Stereo extends Extra {
private String make;
private int speakers;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Stereo stereo = (Stereo) o;
if (speakers != stereo.speakers) return false;
if (!make.equals(stereo.make)) return false;
return true;
}
@Override
public int hashCode() {
int result = make.hashCode();
result = 31 * result + speakers;
return result;
}
}
public static class LeatherTrim extends Extra {
private String colour;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LeatherTrim that = (LeatherTrim) o;
if (!colour.equals(that.colour)) return false;
return true;
}
@Override
public int hashCode() {
return colour.hashCode();
}
}
@Union({Void.class, Stereo.class, LeatherTrim.class})
public static class Extra {}
public static class Car {
private long year;
private String registration;
private String make;
private String model;
private byte[] vin;
private int doors;
private Engine engine;
private Extra optionalExtra = null;
@Nullable
private List<Service> serviceHistory = null;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Car car = (Car) o;
if (doors != car.doors) return false;
if (year != car.year) return false;
if (!engine.equals(car.engine)) return false;
if (!make.equals(car.make)) return false;
if (!model.equals(car.model)) return false;
if (optionalExtra != null ? !optionalExtra.equals(car.optionalExtra) : car.optionalExtra != null)
return false;
if (!registration.equals(car.registration)) return false;
if (serviceHistory != null ? !serviceHistory.equals(car.serviceHistory) : car.serviceHistory != null)
return false;
if (!Arrays.equals(vin, car.vin)) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (year ^ (year >>> 32));
result = 31 * result + registration.hashCode();
result = 31 * result + make.hashCode();
result = 31 * result + model.hashCode();
result = 31 * result + Arrays.hashCode(vin);
result = 31 * result + doors;
result = 31 * result + engine.hashCode();
result = 31 * result + (optionalExtra != null ? optionalExtra.hashCode() : 0);
result = 31 * result + (serviceHistory != null ? serviceHistory.hashCode() : 0);
return result;
}
}
public static class ShortCar {
@Nullable
private String make = null;
private Engine engine;
private long year;
private byte[] vin;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ShortCar shortCar = (ShortCar) o;
if (year != shortCar.year) return false;
if (!engine.equals(shortCar.engine)) return false;
if (make != null ? !make.equals(shortCar.make) : shortCar.make != null)
return false;
if (!Arrays.equals(vin, shortCar.vin)) return false;
return true;
}
@Override
public int hashCode() {
int result = make != null ? make.hashCode() : 0;
result = 31 * result + engine.hashCode();
result = 31 * result + (int) (year ^ (year >>> 32));
result = 31 * result + Arrays.hashCode(vin);
return result;
}
}
public static final Schema CAR_SCHEMA = ReflectData.get()//AllowNulls.INSTANCE
.getSchema(Car.class);
public static final Schema SHORT_CAR_SCHEMA = ReflectData.get()//AllowNulls.INSTANCE
.getSchema(ShortCar.class);
public static Car nextRecord(int i) {
Car car = new Car();
car.doors = 2;
car.make = "Tesla";
car.model = String.format("Model X v%d", i % 2);
car.vin = String.format("1VXBR12EXCP%06d", i).getBytes();
car.year = 2014 + i;
car.registration = "California";
LeatherTrim trim = new LeatherTrim();
trim.colour = "black";
car.optionalExtra = trim;
Engine engine = new Engine();
engine.capacity = 85.0f;
engine.type = (i % 2) == 0 ? EngineType.ELECTRIC : EngineType.PETROL;
engine.hasTurboCharger = false;
car.engine = engine;
if (i % 4 == 0) {
Service service = new Service();
service.date = 1374084640;
service.mechanic = "<NAME>";
car.serviceHistory = Lists.newArrayList();
car.serviceHistory.add(service);
}
return car;
}
public static class MyMapper extends Mapper<LongWritable, Text, Void, Car> {
@Override
public void run(Context context) throws IOException ,InterruptedException {
for (int i = 0; i < 10; i++) {
context.write(null, nextRecord(i));
}
}
}
public static class MyMapper2 extends Mapper<Void, Car, Void, Car> {
@Override
protected void map(Void key, Car car, Context context) throws IOException ,InterruptedException {
// Note: Car can be null because of predicate pushdown defined by an UnboundedRecordFilter (see below)
if (car != null) {
context.write(null, car);
}
}
}
public static class MyMapperShort extends
Mapper<Void, ShortCar, Void, ShortCar> {
@Override
protected void map(Void key, ShortCar car, Context context)
throws IOException, InterruptedException {
// Note: Car can be null because of predicate pushdown defined by an
// UnboundedRecordFilter (see below)
if (car != null) {
context.write(null, car);
}
}
}
public static class ElectricCarFilter implements UnboundRecordFilter {
private final UnboundRecordFilter filter;
public ElectricCarFilter() {
filter = ColumnRecordFilter.column("engine.type", ColumnPredicates.equalTo(org.apache.parquet.avro.EngineType.ELECTRIC));
}
@Override
public RecordFilter bind(Iterable<ColumnReader> readers) {
return filter.bind(readers);
}
}
final Configuration conf = new Configuration();
final Path inputPath = new Path("src/test/java/org/apache/parquet/avro/TestReflectInputOutputFormat.java");
final Path parquetPath = new Path("target/test/hadoop/TestReflectInputOutputFormat/parquet");
final Path outputPath = new Path("target/test/hadoop/TestReflectInputOutputFormat/out");
@Before
public void createParquetFile() throws Exception {
// set up readers and writers not in MR
conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
AvroReadSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
AvroWriteSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
final FileSystem fileSystem = parquetPath.getFileSystem(conf);
fileSystem.delete(parquetPath, true);
fileSystem.delete(outputPath, true);
{
final Job job = new Job(conf, "write");
// input not really used
TextInputFormat.addInputPath(job, inputPath);
job.setInputFormatClass(TextInputFormat.class);
job.setMapperClass(TestReflectInputOutputFormat.MyMapper.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(AvroParquetOutputFormat.class);
AvroParquetOutputFormat.setOutputPath(job, parquetPath);
AvroParquetOutputFormat.setSchema(job, CAR_SCHEMA);
AvroParquetOutputFormat.setAvroDataSupplier(job, ReflectDataSupplier.class);
waitForJob(job);
}
}
@Test
public void testReadWrite() throws Exception {
conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
final Job job = new Job(conf, "read");
job.setInputFormatClass(AvroParquetInputFormat.class);
AvroParquetInputFormat.setInputPaths(job, parquetPath);
// Test push-down predicates by using an electric car filter
AvroParquetInputFormat.setUnboundRecordFilter(job, ElectricCarFilter.class);
// Test schema projection by dropping the optional extras
Schema projection = Schema.createRecord(CAR_SCHEMA.getName(),
CAR_SCHEMA.getDoc(), CAR_SCHEMA.getNamespace(), false);
List<Schema.Field> fields = Lists.newArrayList();
for (Schema.Field field : ReflectData.get().getSchema(Car.class).getFields()) {
if (!"optionalExtra".equals(field.name())) {
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(),
field.defaultValue(), field.order()));
}
}
projection.setFields(fields);
AvroParquetInputFormat.setRequestedProjection(job, projection);
job.setMapperClass(TestReflectInputOutputFormat.MyMapper2.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(AvroParquetOutputFormat.class);
AvroParquetOutputFormat.setOutputPath(job, outputPath);
AvroParquetOutputFormat.setSchema(job, CAR_SCHEMA);
waitForJob(job);
final Path mapperOutput = new Path(outputPath.toString(),
"part-m-00000.parquet");
final AvroParquetReader<Car> out = new AvroParquetReader<Car>(conf, mapperOutput);
Car car;
Car previousCar = null;
int lineNumber = 0;
while ((car = out.read()) != null) {
if (previousCar != null) {
// Testing reference equality here. The "model" field should be dictionary-encoded.
assertTrue(car.model == previousCar.model);
}
// Make sure that predicate push down worked as expected
if (car.engine.type == EngineType.PETROL) {
fail("UnboundRecordFilter failed to remove cars with PETROL engines");
}
// Note we use lineNumber * 2 because of predicate push down
Car expectedCar = nextRecord(lineNumber * 2);
// We removed the optional extra field using projection so we shouldn't
// see it here...
expectedCar.optionalExtra = null;
assertEquals("line " + lineNumber, expectedCar, car);
++lineNumber;
previousCar = car;
}
out.close();
}
@Test
public void testReadWriteChangedCar() throws Exception {
conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
final Job job = new Job(conf, "read changed/short");
job.setInputFormatClass(AvroParquetInputFormat.class);
AvroParquetInputFormat.setInputPaths(job, parquetPath);
// Test push-down predicates by using an electric car filter
AvroParquetInputFormat.setUnboundRecordFilter(job, ElectricCarFilter.class);
// Test schema projection by dropping the engine, year, and vin (like ShortCar),
// but making make optional (unlike ShortCar)
Schema projection = Schema.createRecord(CAR_SCHEMA.getName(),
CAR_SCHEMA.getDoc(), CAR_SCHEMA.getNamespace(), false);
List<Schema.Field> fields = Lists.newArrayList();
for (Schema.Field field : CAR_SCHEMA.getFields()) {
// No make!
if ("engine".equals(field.name()) || "year".equals(field.name()) || "vin".equals(field.name())) {
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(),
field.defaultValue(), field.order()));
}
}
projection.setFields(fields);
AvroParquetInputFormat.setRequestedProjection(job, projection);
AvroParquetInputFormat.setAvroReadSchema(job, SHORT_CAR_SCHEMA);
job.setMapperClass(TestReflectInputOutputFormat.MyMapperShort.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(AvroParquetOutputFormat.class);
AvroParquetOutputFormat.setOutputPath(job, outputPath);
AvroParquetOutputFormat.setSchema(job, SHORT_CAR_SCHEMA);
waitForJob(job);
final Path mapperOutput = new Path(outputPath.toString(), "part-m-00000.parquet");
final AvroParquetReader<ShortCar> out = new AvroParquetReader<ShortCar>(conf, mapperOutput);
ShortCar car;
int lineNumber = 0;
while ((car = out.read()) != null) {
// Make sure that predicate push down worked as expected
// Note we use lineNumber * 2 because of predicate push down
Car expectedCar = nextRecord(lineNumber * 2);
// We removed the optional extra field using projection so we shouldn't see it here...
assertNull(car.make);
assertEquals(car.engine, expectedCar.engine);
assertEquals(car.year, expectedCar.year);
assertArrayEquals(car.vin, expectedCar.vin);
++lineNumber;
}
out.close();
}
private void waitForJob(Job job) throws Exception {
job.submit();
while (!job.isComplete()) {
LOG.debug("waiting for job {}", job.getJobName());
sleep(100);
}
LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
if (!job.isSuccessful()) {
throw new RuntimeException("job failed " + job.getJobName());
}
}
@After
public void deleteOutputFile() throws IOException {
final FileSystem fileSystem = parquetPath.getFileSystem(conf);
fileSystem.delete(parquetPath, true);
fileSystem.delete(outputPath, true);
}
}
|
inaat/HRMFRONTEND
|
src/redux/session/reducer.js
|
<filename>src/redux/session/reducer.js
import {
SESSION_GET_LIST,
SESSION_GET_LIST_SUCCESS,
SESSION_GET_LIST_ERROR,
SESSION_ADD_ITEM,
SESSION_ADD_ITEM_SUCCESS,
SESSION_ADD_ITEM_ERROR,
SESSION_DELETE_ITEM,
SESSION_DELETE_SUCCESS,
SESSION_DELETE_ITEM_ERROR,
SESSION_UPDATE_ITEM,
SESSION_UPDATE_ITEM_SUCCESS,
SESSION_UPDATE_ITEM_ERROR,
SESSION_DELETE_CHECK,
} from "../actions";
const INIT_STATE = {
sessionItems: null,
sessionerror: "",
sessionloading: false,
};
export default (state = INIT_STATE, action) => {
switch (action.type) {
case SESSION_GET_LIST:
return { ...state, sessionloading: false };
case SESSION_GET_LIST_SUCCESS:
return {
...state,
sessionloading: true,
sessionItems: action.payload,
};
case SESSION_GET_LIST_ERROR:
return { ...state, sessionloading: true, sessionerror: action.payload };
case SESSION_ADD_ITEM:
return { ...state, sessionloading: false };
case SESSION_ADD_ITEM_SUCCESS:
let items = state.sessionItems;
let item = [];
item.id = action.payload.id;
item.sched_name_eng = action.payload.sched_name_eng;
item.sched_name_arab = action.payload.sched_name_arab;
item.start_time = action.payload.start_time;
item.end_time = action.payload.end_time;
item.late_minutes = action.payload.late_minutes;
item.early_minutes = action.payload.early_minutes;
item.check_in = action.payload.check_in;
item.check_out = action.payload.check_out;
item.overtime = action.payload.overtime;
item.flex_time = action.payload.flex_time;
item.hpd = action.payload.hpd;
item.day_off = action.payload.day_off;
item.ignore_aw = action.payload.ignore_aw;
item.absent_factor = action.payload.absent_factor;
item.in_begin = action.payload.in_begin;
item.out_begin = action.payload.out_begin;
item.in_end = action.payload.in_end;
item.out_end = action.payload.out_end;
item.extra_mins_overtime= action.payload.extra_mins_overtime;
item.no_log_penality= action.payload.no_log_penality;
item.monthly_grace_period= action.payload.monthly_grace_period;
item.no_late= action.payload.no_late;
item.no_absent= action.payload.no_absent;
item.no_ot= action.payload.no_ot;
items.splice(0, 0, item);
return {
...state,
sessionloading: true,
sessionItems: items,
};
case SESSION_ADD_ITEM_ERROR:
return { ...state, sessionloading: true, sessionerror: action.payload };
case SESSION_DELETE_ITEM:
return {
...state,
sessionloading: false,
};
case SESSION_DELETE_CHECK:
return {
...state,
sessionloading: true,
};
case SESSION_DELETE_SUCCESS:
const data = state.sessionItems.filter(
(x) => x.id !== action.payload.payload
);
return {
...state,
sessionloading: true,
sessionItems: data,
};
case SESSION_DELETE_ITEM_ERROR:
return { ...state, sessionloading: true, sessionerror: action.payload };
case SESSION_UPDATE_ITEM:
return {
...state,
sessionloading: false,
};
case SESSION_UPDATE_ITEM_SUCCESS:
const con = state.sessionItems.map((item) => {
if (item.id === action.payload.id) {
return {
...item,
sched_name_arab: action.payload.sched_name_arab,
sched_name_eng: action.payload.sched_name_eng,
start_time : action.payload.start_time,
end_time : action.payload.end_time,
late_minutes : action.payload.late_minutes,
early_minutes : action.payload.early_minutes,
check_in : action.payload.check_in,
check_out : action.payload.check_out,
overtime : action.payload.overtime,
flex_time : action.payload.flex_time,
hpd : action.payload.hpd,
day_off : action.payload.day_off,
ignore_aw : action.payload.ignore_aw,
absent_factor : action.payload.absent_factor,
in_begin : action.payload.in_begin,
out_begin : action.payload.out_begin,
in_end : action.payload.in_end,
out_end : action.payload.out_end,
extra_mins_overtime : action.payload.extra_mins_overtime,
no_log_penality : action.payload.no_log_penality,
monthly_grace_period : action.payload.monthly_grace_period,
no_late : action.payload.no_late,
no_absent : action.payload.no_absent,
no_ot : action.payload.no_ot,
};
}
return item;
});
return {
...state,
sessionloading: true,
sessionItems: con,
};
case SESSION_UPDATE_ITEM_ERROR:
return { ...state, sessionloading: true, sessionerror: action.payload };
default:
return { ...state };
}
};
|
Anteru/nsl
|
nsl/passes/ComputeTypes.py
|
from collections import OrderedDict
from nsl import ast, types, Errors, Visitor, LinearIR
from enum import Enum
from typing import List
def ParseSwizzleMask(mask):
'''Parse a swizzle mask into a list of element indices, starting
at 0.
'''
mapping = { 'x' : 0,
'y' : 1,
'z' : 2,
'w' : 3,
'r' : 0,
'g' : 1,
'b' : 2,
'a' : 3
}
return [mapping [e] for e in mask]
def ComputeSwizzleType(inType, mask):
'''Compute the resulting type of a swizzle operation.
@param inType: Must be a PrimitiveType
@param mask: A valid swizzle mask
'''
assert isinstance(inType, types.Type)
outComponentCount = len (mask)
swizzleType = inType.GetComponentType ()
if outComponentCount == 1:
return swizzleType
else:
result = types.VectorType (swizzleType, outComponentCount)
return result
class FunctionVisitationPass(Enum):
Register = 0
Visit = 1
class ComputeTypeVisitor(Visitor.DefaultVisitor):
def GetContext(self) -> List[types.Scope]:
return [self.scope]
def __init__(self):
self.ok = True
self.scope = types.Scope ()
self.__loader = LinearIR.FilesystemModuleLoader()
def v_StructureDefinition(self, decl, ctx):
assert isinstance(decl, ast.StructureDefinition)
scope = ctx[-1]
fields = OrderedDict ()
for field in decl.GetFields ():
self.v_Visit(field, ctx)
fields [field.GetName ()] = field.GetType ()
structType = types.StructType(decl.GetName (), fields)
scope.RegisterType (decl.GetName (), structType)
decl.SetType (structType)
def v_CompoundStatement(self, stmt, ctx):
ctx.append (types.Scope (ctx[-1]))
stmt.AcceptVisitor(self, ctx)
ctx.pop()
def v_ForStatement(self, stmt, ctx):
ctx.append (types.Scope (ctx[-1]))
stmt.AcceptVisitor(self, ctx)
ctx.pop()
def v_DoStatement(self, stmt, ctx):
ctx.append (types.Scope (ctx[-1]))
stmt.AcceptVisitor(self, ctx)
ctx.pop()
def v_WhileStatement(self, stmt, ctx):
ctx.append (types.Scope (ctx[-1]))
stmt.AcceptVisitor(self, ctx)
ctx.pop()
def v_IfStatement(self, stmt, ctx):
ctx.append (types.Scope (ctx[-1]))
stmt.AcceptVisitor(self, ctx)
ctx.pop()
def _GetClassScopeForMemberAccess(self, expr, scope):
return scope.GetFieldType(expr.GetMemberAccess().GetParent ().GetName())
def _ProcessExpression(self, expr, scope):
assert isinstance(expr, ast.Expression), 'Expression {1} has type {0} which is not an expression type'.format(type(expr), expr)
# We type-cast here so we can process access trees separately
if isinstance(expr, ast.VariableAccessExpression):
p = expr.GetParent ()
# Figure out the parent type
self._ProcessExpression(p, scope)
if isinstance (expr, ast.MemberAccessExpression):
parentType = p.GetType()
if parentType.IsPrimitive ():
if parentType.IsVector () or parentType.IsScalar ():
# We allow swizzling of vector and scalar types
expr.SetType (ComputeSwizzleType(parentType, expr.GetMember ().GetName ()))
expr.SetSwizzle(True)
else:
Errors.ERROR_CANNOT_SWIZZLE_PRIMITIVE_TYPE.Raise ()
elif parentType.IsAggregate():
expr.SetType (parentType.GetMembers ().GetFieldType (expr.GetMember ().GetName ()))
else:
Errors.ERROR_CANNOT_SWIZZLE_TYPE.Raise (parentType)
expr.GetMember ().SetType (expr.GetType ())
elif isinstance (expr, ast.ArrayExpression):
self._ProcessExpression (expr.GetExpression (), scope)
if not expr.GetExpression ().GetType ().IsScalar ():
Errors.ERROR_ARRAY_ACCESS_WITH_NONSCALAR.Raise (expr.GetExpression ().GetType ())
parentType = p.GetType()
nestedSize = parentType.GetSize ()
if isinstance(parentType, types.MatrixType):
# Array access on matrix returns a vector
arrayType = types.VectorType(parentType.GetComponentType (),
parentType.GetColumnCount())
expr.SetType(arrayType)
elif len(nestedSize) > 1:
# Drop one dimension from the array
arrayType = types.ArrayType (parentType.GetComponentType (), nestedSize [1:])
expr.SetType (arrayType)
else:
# We've reached the last dimension (array is 1D now), so
# return the element type
expr.SetType (p.GetType ().GetComponentType ())
elif isinstance(expr, ast.PrimaryExpression):
# Simply check the name
expr.SetType (scope.GetFieldType (expr.GetName ()))
else:
# Walk through all children
for c in expr:
self._ProcessExpression(c, scope)
# during the walking up, we can compute the expression
# type as well
if isinstance(expr, ast.CallExpression):
# As we know the parameter types now, we can finally resolve
# overloaded functions
expr.ResolveType (scope)
expr.SetType (expr.function.GetReturnType())
elif isinstance (expr, ast.BinaryExpression):
expr.ResolveType (expr.GetLeft().GetType(), expr.GetRight().GetType())
expr.SetType (expr.GetOperator ().GetReturnType ())
elif isinstance (expr, ast.AffixExpression):
expr.SetType (expr.children[0].GetType ())
return expr.GetType ()
def v_VariableDeclaration(self, decl, ctx):
assert isinstance(decl, ast.VariableDeclaration)
scope = ctx[-1]
scope.RegisterVariable (decl.GetName (),
decl.ResolveType (scope))
if decl.HasInitializerExpression():
self._ProcessExpression(decl.GetInitializerExpression (),
scope)
def v_Expression(self, expr, ctx):
self._ProcessExpression(expr, ctx[-1])
def __RegisterFunction(self, func, ctx):
assert isinstance(func, ast.Function)
func.ResolveType (ctx [-1])
func.GetType ().Resolve (ctx[-1])
funcType = func.GetType()
ctx[-1].RegisterFunction (funcType.GetName (), funcType)
def v_Function(self, func, ctx):
'''Computes the function type and processes all statements.'''
assert isinstance(func, ast.Function)
scope = types.Scope (ctx[-1])
ctx.append (scope)
for (name, argType) in func.GetType ().GetArgumentTypes().items ():
scope.RegisterVariable (name, argType)
self.v_Visit (func.GetBody(), ctx)
ctx.pop ()
def v_Module(self, module: ast.Module, ctx: List[types.Scope]):
import pickle
# Module imports are added first, so the symbols exported from a module
# are available to everyone
for importedModule in module.GetImports():
irModule = self.__loader.Load(importedModule)
for moduleType in irModule.Metadata['types']:
assert isinstance(moduleType, types.Type)
ctx[-1].RegisterType(moduleType.GetName(), moduleType)
for func in irModule.Metadata['functions']:
assert isinstance(func, types.Function)
ctx[-1].RegisterFunction(func.GetName(), func)
# Must visit types first
for programType in module.GetTypes ():
self.v_Visit (programType, ctx)
for decl in module.GetDeclarations ():
self.v_Visit (decl, ctx)
for func in module.GetFunctions ():
self.__RegisterFunction(func, ctx)
for func in module.GetFunctions():
self.v_Visit (func, ctx)
import nsl.Pass
class ComputeTypesPass(nsl.Pass.Pass):
def __init__(self):
import os, pickle, nsl.parser
# register default functions and types
self.visitor = ComputeTypeVisitor ()
def GetName (self):
return 'compute-types'
def Process (self, root: ast.Module, ctx=None,output=None):
self.visitor.Visit (root)
return self.visitor.ok
def GetPass():
return ComputeTypesPass ()
|
Knightwalker/Knowledgebase
|
19_JavaScript/2020_JS_Advanced/06_DomManipulations/Lab/01. List-Of-Items/app.js
|
<reponame>Knightwalker/Knowledgebase
function addItem() {
let el_items = document.getElementById("items");
let el_newItemText = document.getElementById("newItemText");
let el_li = document.createElement("li");
el_li.textContent = el_newItemText.value;
el_items.appendChild(el_li);
el_newItemText.value = "";
}
|
socialsoftware/edition
|
microfrontend/src/microfrontends/user/User_DISPATCHER.js
|
<reponame>socialsoftware/edition
import React from 'react'
import { Route, Switch } from 'react-router-dom'
import Login from './pages/Login';
import Register from './pages/Register';
import ChangePassword from './pages/ChangePassword';
import '../../resources/css/user/User.css'
const User_DISPATCHER = (props) => {
return (
<Switch>
<Route path="/auth/signin">
<Login messages={props.messages} onLogin={props.onLogin}/>
</Route>
<Route path="/auth/signup">
<Register messages={props.messages} onLogin={props.onLogin}/>
</Route>
<Route path="/auth/changePassword">
<ChangePassword messages={props.messages} onLogin={props.onLogin} user={props.user}/>
</Route>
</Switch>
)
}
export default User_DISPATCHER
|
mhamaneamogh50/All_Python_Pro
|
for loop.py
|
<reponame>mhamaneamogh50/All_Python_Pro<gh_stars>0
g=int(input("Enter number"))
for b in range(g)
print(b)
|
mccool/elastic-builder
|
src/queries/full-text-queries/mono-field-query-base.js
|
'use strict';
const has = require('lodash.has');
const isNil = require('lodash.isnil');
const FullTextQueryBase = require('./full-text-query-base');
/**
* The `MonoFieldQueryBase` provides support for common options used across
* various full text query implementations with single search field.
*
* **NOTE:** Instantiating this directly should not be required.
* However, if you wish to add a custom implementation for whatever reason,
* this class could be extended.
*
* @param {string} queryType
* @param {string=} field The document field to query against
* @param {string=} queryString The query string
*
* @extends FullTextQueryBase
*/
class MonoFieldQueryBase extends FullTextQueryBase {
// eslint-disable-next-line require-jsdoc
constructor(queryType, field, queryString) {
super(queryType, queryString);
if (!isNil(field)) this._field = field;
}
/**
* Sets the field to search on.
*
* @param {string} field
* @returns {MonoFieldQueryBase} returns `this` so that calls can be chained.
*/
field(field) {
this._field = field;
return this;
}
/**
* Override default `toJSON` to return DSL representation of the Full text query
* class instance.
*
* @override
* @returns {Object} returns an Object which maps to the elasticsearch query DSL
*/
toJSON() {
// recursiveToJSON doesn't seem to be required here.
// Revisit this.. Smells a little bit
if (!has(this._queryOpts, 'query')) {
throw new Error('Query string is required for full text query!');
}
const queryOptKeys = Object.keys(this._queryOpts);
const qryOpts =
queryOptKeys.length === 1 ? this._queryOpts.query : this._queryOpts;
const repr = {
[this.queryType]: {
[this._field]: qryOpts
}
};
return repr;
}
}
module.exports = MonoFieldQueryBase;
|
esastack/esa-cabin
|
cabin-container/src/main/java/io/esastack/cabin/container/service/deploy/BizModuleFactoryServiceImpl.java
|
<gh_stars>10-100
/*
* Copyright 2021 OPPO ESA Stack Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.esastack.cabin.container.service.deploy;
import io.esastack.cabin.api.service.deploy.BizModuleFactoryService;
import io.esastack.cabin.api.service.loader.BizModuleClassLoaderParam;
import io.esastack.cabin.api.service.loader.ClassLoaderService;
import io.esastack.cabin.common.exception.CabinRuntimeException;
import io.esastack.cabin.common.util.CabinStringUtil;
import io.esastack.cabin.container.domain.BizModule;
import java.net.URL;
import static io.esastack.cabin.common.constant.Constants.CABIN_UNIT_TEST_MAIN_CLASSNAME;
import static io.esastack.cabin.common.constant.Constants.CABIN_UNIT_TEST_MAIN_METHOD;
public class BizModuleFactoryServiceImpl implements BizModuleFactoryService<BizModule> {
private volatile ClassLoaderService classLoaderService;
@SuppressWarnings("unused")
public void setClassLoaderService(final ClassLoaderService service) {
classLoaderService = service;
}
@Override
public BizModule createModule(final URL[] bizUrls, final String[] args) throws CabinRuntimeException {
final boolean isUnitTest = CabinStringUtil.isNotBlank(args[0]) &&
CabinStringUtil.isNotBlank(args[1]) &&
args[0].equals(CABIN_UNIT_TEST_MAIN_CLASSNAME) &&
args[1].equals(CABIN_UNIT_TEST_MAIN_METHOD);
final BizModuleClassLoaderParam param = BizModuleClassLoaderParam.newBuilder()
.urls(bizUrls)
.isUnitTest(isUnitTest)
.build();
final ClassLoader classLoader = classLoaderService.createBizModuleClassLoader(param);
return BizModule.newBuilder()
.name("Biz Identity")
.urls(bizUrls)
.mainClass(args[0])
.mainMethod(args[1])
.arguments(getRealArguments(args))
.classLoader(classLoader)
.unitTest(isUnitTest)
.build();
}
private String[] getRealArguments(final String[] args) {
assert args.length >= 2;
final String[] arguments = new String[args.length - 2];
if (args.length > 2) {
System.arraycopy(args, 2, arguments, 0, args.length - 2);
}
return arguments;
}
}
|
Lin1225/vtk_v5.10.0
|
Wrapping/Python/PyVTKMutableObject.cxx
|
<gh_stars>1-10
/*=========================================================================
Program: Visualization Toolkit
Module: PyVTKMutableObject.cxx
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
/*-----------------------------------------------------------------------
The PyVTKMutableObject was created in Sep 2010 by <NAME>.
This class is a proxy for immutable python objects like int, float,
and string. It allows these objects to be passed to VTK methods that
require a ref.
-----------------------------------------------------------------------*/
#include "PyVTKMutableObject.h"
#include "vtkPythonUtil.h"
// Silence warning like
// "dereferencing type-punned pointer will break strict-aliasing rules"
// it happens because this kind of expression: (long *)&ptr
// pragma GCC diagnostic is available since gcc>=4.2
#if defined(__GNUG__) && (__GNUC__>4) || (__GNUC__==4 && __GNUC_MINOR__>=2)
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
#endif
//--------------------------------------------------------------------
// methods for adding this type to a module
#if defined(WIN32)
extern "C" { __declspec( dllexport ) void PyVTKAddFile_mutable(PyObject *, const char *); }
#else
extern "C" { void PyVTKAddFile_mutable(PyObject *, const char *); }
#endif
//--------------------------------------------------------------------
const char *PyVTKMutableObject_Doc =
"A mutable wrapper for immutable objects.\n\n"
"This wrapper class is needed when a VTK method returns a value\n"
"in an argument that has been passed by reference. By calling\n"
"\"m = vtk.mutable(a)\" on a value, you can create a mutable proxy\n"
"to that value. The value can be changed by calling \"m.set(b)\".\n";
//--------------------------------------------------------------------
// helper method: make sure than an object is usable
static PyObject *PyVTKMutableObject_CompatibleObject(PyObject *opn)
{
PyNumberMethods *nb = opn->ob_type->tp_as_number;
if (PyFloat_Check(opn) ||
PyLong_Check(opn) ||
PyInt_Check(opn) ||
#ifdef Py_USING_UNICODE
PyUnicode_Check(opn) ||
#endif
PyString_Check(opn))
{
Py_INCREF(opn);
}
else if (PyVTKMutableObject_Check(opn))
{
opn = ((PyVTKMutableObject *)opn)->value;
Py_INCREF(opn);
}
#if PY_VERSION_HEX >= 0x02050000
else if (nb && nb->nb_index)
{
opn = nb->nb_index(opn);
if (opn == 0 || (!PyLong_Check(opn) && !PyInt_Check(opn)))
{
PyErr_SetString(PyExc_TypeError,
"nb_index should return integer object");
return NULL;
}
}
#endif
else if (nb && nb->nb_float)
{
opn = nb->nb_float(opn);
if (opn == 0 || !PyFloat_Check(opn))
{
PyErr_SetString(PyExc_TypeError,
"nb_float should return float object");
return NULL;
}
}
else
{
PyErr_SetString(PyExc_TypeError,
"a numeric or string object is required");
return NULL;
}
return opn;
}
//--------------------------------------------------------------------
// methods from C
PyObject *PyVTKMutableObject_GetValue(PyObject *self)
{
if (PyVTKMutableObject_Check(self))
{
return ((PyVTKMutableObject *)self)->value;
}
else
{
PyErr_SetString(PyExc_TypeError, "a vtk.mutable() object is required");
}
return NULL;
}
int PyVTKMutableObject_SetValue(PyObject *self, PyObject *val)
{
if (PyVTKMutableObject_Check(self))
{
PyObject **op = &((PyVTKMutableObject *)self)->value;
if (PyFloat_Check(val) ||
PyLong_Check(val) ||
PyInt_Check(val))
{
if (PyFloat_Check(*op) ||
PyLong_Check(*op) ||
PyInt_Check(*op))
{
Py_DECREF(*op);
*op = val;
return 0;
}
PyErr_SetString(PyExc_TypeError,
"cannot set a string mutable to a numeric value");
}
else if (
#ifdef Py_USING_UNICODE
PyUnicode_Check(val) ||
#endif
PyString_Check(val))
{
if (
#ifdef Py_USING_UNICODE
PyUnicode_Check(*op) ||
#endif
PyString_Check(*op))
{
Py_DECREF(*op);
*op = val;
return 0;
}
PyErr_SetString(PyExc_TypeError,
"cannot set a numeric mutable to a string value");
}
else
{
PyErr_SetString(PyExc_TypeError,
"a float, long, int, or string is required");
}
}
else
{
PyErr_SetString(PyExc_TypeError, "a vtk.mutable() object is required");
}
return -1;
}
//--------------------------------------------------------------------
// methods from python
static PyObject *PyVTKMutableObject_Get(PyObject *self, PyObject *args)
{
if (PyArg_ParseTuple(args, (char*)":get"))
{
PyObject *ob = PyVTKMutableObject_GetValue(self);
Py_INCREF(ob);
return ob;
}
return NULL;
}
static PyObject *PyVTKMutableObject_Set(PyObject *self, PyObject *args)
{
PyObject *opn;
if (PyArg_ParseTuple(args, (char*)"O:set", &opn))
{
opn = PyVTKMutableObject_CompatibleObject(opn);
if (opn)
{
if (PyVTKMutableObject_SetValue(self, opn) == 0)
{
Py_INCREF(Py_None);
return Py_None;
}
}
}
return NULL;
}
static PyMethodDef PyVTKMutableObject_Methods[] = {
{(char*)"get", PyVTKMutableObject_Get, 1, (char *)"Get the stored value."},
{(char*)"set", PyVTKMutableObject_Set, 1, (char *)"Set the stored value."},
{ NULL, NULL, 0, NULL }
};
//--------------------------------------------------------------------
// Macros used for defining protocol methods
#define REFOBJECT_INTFUNC(prot, op) \
static int PyVTKMutableObject_##op(PyObject *ob) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob); \
}
#define REFOBJECT_SIZEFUNC(prot, op) \
static Py_ssize_t PyVTKMutableObject_##op(PyObject *ob) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob); \
}
#define REFOBJECT_INDEXFUNC(prot, op) \
static PyObject *PyVTKMutableObject_##op(PyObject *ob, Py_ssize_t i) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob, i); \
}
#define REFOBJECT_INDEXSETFUNC(prot, op) \
static int PyVTKMutableObject_##op(PyObject *ob, Py_ssize_t i, PyObject *o) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob, i, o); \
}
#define REFOBJECT_SLICEFUNC(prot, op) \
static PyObject *PyVTKMutableObject_##op(PyObject *ob, Py_ssize_t i, Py_ssize_t j) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob, i, j); \
}
#define REFOBJECT_SLICESETFUNC(prot, op) \
static int PyVTKMutableObject_##op(PyObject *ob, Py_ssize_t i, Py_ssize_t j, PyObject *o) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob, i, j, o); \
}
#define REFOBJECT_UNARYFUNC(prot, op) \
static PyObject *PyVTKMutableObject_##op(PyObject *ob) \
{ \
ob = ((PyVTKMutableObject *)ob)->value; \
return Py##prot##_##op(ob); \
}
#define REFOBJECT_BINARYFUNC(prot, op) \
static PyObject *PyVTKMutableObject_##op(PyObject *ob1, PyObject *ob2) \
{ \
if (PyVTKMutableObject_Check(ob1)) \
{ \
ob1 = ((PyVTKMutableObject *)ob1)->value; \
} \
if (PyVTKMutableObject_Check(ob2)) \
{ \
ob2 = ((PyVTKMutableObject *)ob2)->value; \
} \
return Py##prot##_##op(ob1, ob2); \
}
#define REFOBJECT_INPLACEFUNC(prot, op) \
static PyObject *PyVTKMutableObject_InPlace##op(PyObject *ob1, PyObject *ob2) \
{ \
PyVTKMutableObject *ob = (PyVTKMutableObject *)ob1; \
PyObject *obn;\
ob1 = ob->value; \
if (PyVTKMutableObject_Check(ob2)) \
{ \
ob2 = ((PyVTKMutableObject *)ob2)->value; \
} \
obn = Py##prot##_##op(ob1, ob2); \
if (obn) \
{ \
ob->value = obn; \
Py_DECREF(ob1); \
Py_INCREF(ob); \
return (PyObject *)ob; \
} \
return 0; \
}
#define REFOBJECT_INPLACEIFUNC(prot, op) \
static PyObject *PyVTKMutableObject_InPlace##op(PyObject *ob1, Py_ssize_t i) \
{ \
PyVTKMutableObject *ob = (PyVTKMutableObject *)ob1; \
PyObject *obn;\
ob1 = ob->value; \
obn = Py##prot##_##op(ob1, i); \
if (obn) \
{ \
ob->value = obn; \
Py_DECREF(ob1); \
Py_INCREF(ob); \
return (PyObject *)ob; \
} \
return 0; \
}
#define REFOBJECT_TERNARYFUNC(prot, op) \
static PyObject *PyVTKMutableObject_##op(PyObject *ob1, PyObject *ob2, PyObject *ob3) \
{ \
if (PyVTKMutableObject_Check(ob1)) \
{ \
ob1 = ((PyVTKMutableObject *)ob1)->value; \
} \
if (PyVTKMutableObject_Check(ob2)) \
{ \
ob2 = ((PyVTKMutableObject *)ob2)->value; \
} \
if (PyVTKMutableObject_Check(ob2)) \
{ \
ob3 = ((PyVTKMutableObject *)ob3)->value; \
} \
return Py##prot##_##op(ob1, ob2, ob3); \
}
#define REFOBJECT_INPLACETFUNC(prot, op) \
static PyObject *PyVTKMutableObject_InPlace##op(PyObject *ob1, PyObject *ob2, PyObject *ob3) \
{ \
PyVTKMutableObject *ob = (PyVTKMutableObject *)ob1; \
PyObject *obn; \
ob1 = ob->value; \
if (PyVTKMutableObject_Check(ob2)) \
{ \
ob2 = ((PyVTKMutableObject *)ob2)->value; \
} \
if (PyVTKMutableObject_Check(ob3)) \
{ \
ob3 = ((PyVTKMutableObject *)ob3)->value; \
} \
obn = Py##prot##_##op(ob1, ob2, ob3); \
if (obn) \
{ \
ob->value = obn; \
Py_DECREF(ob1); \
Py_INCREF(ob); \
return (PyObject *)ob; \
} \
return 0; \
}
//--------------------------------------------------------------------
// Number protocol
static int PyVTKMutableObject_NonZero(PyObject *ob)
{
ob = ((PyVTKMutableObject *)ob)->value;
return PyObject_IsTrue(ob);
}
static int PyVTKMutableObject_Coerce(PyObject **ob1, PyObject **ob2)
{
*ob1 = ((PyVTKMutableObject *)*ob1)->value;
if (PyVTKMutableObject_Check(*ob2))
{
*ob2 = ((PyVTKMutableObject *)*ob2)->value;
}
return PyNumber_CoerceEx(ob1, ob2);
}
static PyObject *PyVTKMutableObject_Hex(PyObject *ob)
{
ob = ((PyVTKMutableObject *)ob)->value;
#if PY_VERSION_HEX >= 0x02060000
return PyNumber_ToBase(ob, 16);
#else
if (ob->ob_type->tp_as_number &&
ob->ob_type->tp_as_number->nb_hex)
{
return ob->ob_type->tp_as_number->nb_hex(ob);
}
PyErr_SetString(PyExc_TypeError,
"hex() argument can't be converted to hex");
return NULL;
#endif
}
static PyObject *PyVTKMutableObject_Oct(PyObject *ob)
{
ob = ((PyVTKMutableObject *)ob)->value;
#if PY_VERSION_HEX >= 0x02060000
return PyNumber_ToBase(ob, 8);
#else
if (ob->ob_type->tp_as_number &&
ob->ob_type->tp_as_number->nb_oct)
{
return ob->ob_type->tp_as_number->nb_oct(ob);
}
PyErr_SetString(PyExc_TypeError,
"oct() argument can't be converted to oct");
return NULL;
#endif
}
REFOBJECT_BINARYFUNC(Number,Add)
REFOBJECT_BINARYFUNC(Number,Subtract)
REFOBJECT_BINARYFUNC(Number,Multiply)
REFOBJECT_BINARYFUNC(Number,Divide)
REFOBJECT_BINARYFUNC(Number,Remainder)
REFOBJECT_BINARYFUNC(Number,Divmod)
REFOBJECT_TERNARYFUNC(Number,Power)
REFOBJECT_UNARYFUNC(Number,Negative)
REFOBJECT_UNARYFUNC(Number,Positive)
REFOBJECT_UNARYFUNC(Number,Absolute)
// NonZero
REFOBJECT_UNARYFUNC(Number,Invert)
REFOBJECT_BINARYFUNC(Number,Lshift)
REFOBJECT_BINARYFUNC(Number,Rshift)
REFOBJECT_BINARYFUNC(Number,And)
REFOBJECT_BINARYFUNC(Number,Or)
REFOBJECT_BINARYFUNC(Number,Xor)
// Coerce
REFOBJECT_UNARYFUNC(Number,Int)
REFOBJECT_UNARYFUNC(Number,Long)
REFOBJECT_UNARYFUNC(Number,Float)
// Hex
// Oct
#if PY_VERSION_HEX >= 0x02000000
REFOBJECT_INPLACEFUNC(Number,Add)
REFOBJECT_INPLACEFUNC(Number,Subtract)
REFOBJECT_INPLACEFUNC(Number,Multiply)
REFOBJECT_INPLACEFUNC(Number,Divide)
REFOBJECT_INPLACEFUNC(Number,Remainder)
REFOBJECT_INPLACETFUNC(Number,Power)
REFOBJECT_INPLACEFUNC(Number,Lshift)
REFOBJECT_INPLACEFUNC(Number,Rshift)
REFOBJECT_INPLACEFUNC(Number,And)
REFOBJECT_INPLACEFUNC(Number,Or)
REFOBJECT_INPLACEFUNC(Number,Xor)
#endif
#if PY_VERSION_HEX >= 0x02020000
REFOBJECT_BINARYFUNC(Number,FloorDivide)
REFOBJECT_BINARYFUNC(Number,TrueDivide)
REFOBJECT_INPLACEFUNC(Number,FloorDivide)
REFOBJECT_INPLACEFUNC(Number,TrueDivide)
#endif
#if PY_VERSION_HEX >= 0x02050000
REFOBJECT_UNARYFUNC(Number,Index)
#endif
//--------------------------------------------------------------------
static PyNumberMethods PyVTKMutableObject_AsNumber = {
PyVTKMutableObject_Add, // nb_add
PyVTKMutableObject_Subtract, // nb_subtract
PyVTKMutableObject_Multiply, // nb_multiply
PyVTKMutableObject_Divide, // nb_divide
PyVTKMutableObject_Remainder, // nb_remainder
PyVTKMutableObject_Divmod, // nb_divmod
PyVTKMutableObject_Power, // nb_power
PyVTKMutableObject_Negative, // nb_negative
PyVTKMutableObject_Positive, // nb_positive
PyVTKMutableObject_Absolute, // nb_absolute
PyVTKMutableObject_NonZero, // nb_nonzero
PyVTKMutableObject_Invert, // nb_invert
PyVTKMutableObject_Lshift, // nb_lshift
PyVTKMutableObject_Rshift, // nb_rshift
PyVTKMutableObject_And, // nb_and
PyVTKMutableObject_Xor, // nb_xor
PyVTKMutableObject_Or, // nb_or
PyVTKMutableObject_Coerce, // nb_coerce
PyVTKMutableObject_Int, // nb_int
PyVTKMutableObject_Long, // nb_long
PyVTKMutableObject_Float, // nb_float
PyVTKMutableObject_Oct, // nb_oct
PyVTKMutableObject_Hex, // nb_hex
#if PY_VERSION_HEX >= 0x02000000
PyVTKMutableObject_InPlaceAdd, // nb_inplace_add
PyVTKMutableObject_InPlaceSubtract, // nb_inplace_subtract
PyVTKMutableObject_InPlaceMultiply, // nb_inplace_multiply
PyVTKMutableObject_InPlaceDivide, // nb_inplace_divide
PyVTKMutableObject_InPlaceRemainder, // nb_inplace_remainder
PyVTKMutableObject_InPlacePower, // nb_inplace_power
PyVTKMutableObject_InPlaceLshift, // nb_inplace_lshift
PyVTKMutableObject_InPlaceRshift, // nb_inplace_rshift
PyVTKMutableObject_InPlaceAnd, // nb_inplace_and
PyVTKMutableObject_InPlaceXor, // nb_inplace_xor
PyVTKMutableObject_InPlaceOr, // nb_inplace_or
#endif
#if PY_VERSION_HEX >= 0x02020000
PyVTKMutableObject_FloorDivide, // nb_floor_divide
PyVTKMutableObject_TrueDivide, // nb_true_divide
PyVTKMutableObject_InPlaceFloorDivide, // nb_inplace_floor_divide
PyVTKMutableObject_InPlaceTrueDivide, // nb_inplace_true_divide
#endif
#if PY_VERSION_HEX >= 0x02050000
PyVTKMutableObject_Index, // nb_index
#endif
};
// Disable sequence and mapping protocols until a subtype is made
#if 0
//--------------------------------------------------------------------
// Sequence protocol
#if PY_MAJOR_VERSION >= 2
REFOBJECT_SIZEFUNC(Sequence,Size)
#else
REFOBJECT_SIZEFUNC(Sequence,Length)
#endif
REFOBJECT_BINARYFUNC(Sequence,Concat)
REFOBJECT_INDEXFUNC(Sequence,Repeat)
REFOBJECT_INDEXFUNC(Sequence,GetItem)
REFOBJECT_SLICEFUNC(Sequence,GetSlice)
REFOBJECT_INDEXSETFUNC(Sequence,SetItem)
REFOBJECT_SLICESETFUNC(Sequence,SetSlice)
#if PY_VERSION_HEX >= 0x02000000
REFOBJECT_INPLACEFUNC(Sequence,Concat)
REFOBJECT_INPLACEIFUNC(Sequence,Repeat)
#endif
//--------------------------------------------------------------------
static PySequenceMethods PyVTKMutableObject_AsSequence = {
#if PY_MAJOR_VERSION >= 2
PyVTKMutableObject_Size, // sq_length
#else
PyVTKMutableObject_Length, // sq_length
#endif
PyVTKMutableObject_Concat, // sq_concat
PyVTKMutableObject_Repeat, // sq_repeat
PyVTKMutableObject_GetItem, // sq_item
PyVTKMutableObject_GetSlice, // sq_slice
PyVTKMutableObject_SetItem, // sq_ass_item
PyVTKMutableObject_SetSlice, // sq_ass_slice
#if PY_VERSION_HEX >= 0x02000000
0, // sq_contains
PyVTKMutableObject_InPlaceConcat, // sq_inplace_concat
PyVTKMutableObject_InPlaceRepeat, // sq_inplace_repeat
#endif
};
//--------------------------------------------------------------------
// Mapping protocol
static PyObject *
PyVTKMutableObject_GetMapItem(PyObject *ob, PyObject *key)
{
ob = ((PyVTKMutableObject *)ob)->value;
return PyObject_GetItem(ob, key);
}
static int
PyVTKMutableObject_SetMapItem(PyObject *ob, PyObject *key, PyObject *o)
{
ob = ((PyVTKMutableObject *)ob)->value;
return PyObject_SetItem(ob, key, o);
}
//--------------------------------------------------------------------
static PyMappingMethods PyVTKMutableObject_AsMapping = {
#if PY_MAJOR_VERSION >= 2
PyVTKMutableObject_Size, // mp_length
#else
PyVTKMutableObject_Length, // mp_length
#endif
PyVTKMutableObject_GetMapItem, // mp_subscript
PyVTKMutableObject_SetMapItem, // mp_ass_subscript
};
#endif
//--------------------------------------------------------------------
// Buffer protocol
static Py_ssize_t PyVTKMutableObject_GetReadBuf(
PyObject *op, Py_ssize_t segment, void **ptrptr)
{
char text[80];
PyBufferProcs *pb;
op = ((PyVTKMutableObject *)op)->value;
pb = op->ob_type->tp_as_buffer;
if (pb && pb->bf_getreadbuffer)
{
return op->ob_type->tp_as_buffer->bf_getreadbuffer(
op, segment, ptrptr);
}
sprintf(text, "type \'%.20s\' does not support readable buffer access",
op->ob_type->tp_name);
PyErr_SetString(PyExc_TypeError, text);
return -1;
}
static Py_ssize_t PyVTKMutableObject_GetWriteBuf(
PyObject *op, Py_ssize_t segment, void **ptrptr)
{
char text[80];
PyBufferProcs *pb;
op = ((PyVTKMutableObject *)op)->value;
pb = op->ob_type->tp_as_buffer;
if (pb && pb->bf_getwritebuffer)
{
return op->ob_type->tp_as_buffer->bf_getwritebuffer(
op, segment, ptrptr);
}
sprintf(text, "type \'%.20s\' does not support writeable buffer access",
op->ob_type->tp_name);
PyErr_SetString(PyExc_TypeError, text);
return -1;
}
static Py_ssize_t
PyVTKMutableObject_GetSegCount(PyObject *op, Py_ssize_t *lenp)
{
char text[80];
PyBufferProcs *pb;
op = ((PyVTKMutableObject *)op)->value;
pb = op->ob_type->tp_as_buffer;
if (pb && pb->bf_getsegcount)
{
return op->ob_type->tp_as_buffer->bf_getsegcount(op, lenp);
}
sprintf(text, "type \'%.20s\' does not support buffer access",
op->ob_type->tp_name);
PyErr_SetString(PyExc_TypeError, text);
return -1;
}
#if PY_VERSION_HEX >= 0x02050000
static Py_ssize_t PyVTKMutableObject_GetCharBuf(
PyObject *op, Py_ssize_t segment, char **ptrptr)
#else
static Py_ssize_t PyVTKMutableObject_GetCharBuf(
PyObject *op, Py_ssize_t segment, const char **ptrptr)
#endif
{
char text[80];
PyBufferProcs *pb;
op = ((PyVTKMutableObject *)op)->value;
pb = op->ob_type->tp_as_buffer;
if (pb && pb->bf_getcharbuffer)
{
return op->ob_type->tp_as_buffer->bf_getcharbuffer(
op, segment, ptrptr);
}
sprintf(text, "type \'%.20s\' does not support character buffer access",
op->ob_type->tp_name);
PyErr_SetString(PyExc_TypeError, text);
return -1;
}
static PyBufferProcs PyVTKMutableObject_AsBuffer = {
PyVTKMutableObject_GetReadBuf, // bf_getreadbuffer
PyVTKMutableObject_GetWriteBuf, // bf_getwritebuffer
PyVTKMutableObject_GetSegCount, // bf_getsegcount
PyVTKMutableObject_GetCharBuf, // bf_getcharbuffer
#if PY_VERSION_HEX >= 0x02060000
0, // bf_getbuffer
0 // bf_releasebuffer
#endif
};
//--------------------------------------------------------------------
// Object protocol
static void PyVTKMutableObject_Delete(PyObject *ob)
{
Py_DECREF(((PyVTKMutableObject *)ob)->value);
#if PY_MAJOR_VERSION >= 2
PyObject_Del(ob);
#else
PyMem_DEL(ob);
#endif
}
static PyObject *PyVTKMutableObject_Repr(PyObject *ob)
{
char textspace[128];
PyObject *r = 0;
const char *name = ob->ob_type->tp_name;
PyObject *s = PyObject_Repr(((PyVTKMutableObject *)ob)->value);
if (s)
{
const char *text = PyString_AsString(s);
size_t n = strlen(name) + strlen(text) + 3;
char *cp = textspace;
if (n > 128) { cp = (char *)malloc(n); }
sprintf(cp, "%s(%s)", name, text);
r = PyString_FromString(cp);
if (n > 128) { free(cp); }
Py_DECREF(s);
}
return r;
}
static PyObject *PyVTKMutableObject_Str(PyObject *ob)
{
return PyObject_Str(((PyVTKMutableObject *)ob)->value);
}
#if PY_VERSION_HEX >= 0x02010000
static PyObject *PyVTKMutableObject_RichCompare(
PyObject *ob1, PyObject *ob2, int opid)
{
if (PyVTKMutableObject_Check(ob1))
{
ob1 = ((PyVTKMutableObject *)ob1)->value;
}
if (PyVTKMutableObject_Check(ob2))
{
ob2 = ((PyVTKMutableObject *)ob2)->value;
}
return PyObject_RichCompare(ob1, ob2, opid);
}
#else
static int PyVTKMutableObject_Compare(PyObject *ob1, PyObject *ob2)
{
if (PyVTKMutableObject_Check(ob1))
{
ob1 = ((PyVTKMutableObject *)ob1)->value;
}
if (PyVTKMutableObject_Check(ob2))
{
ob2 = ((PyVTKMutableObject *)ob2)->value;
}
return PyObject_Compare(ob1, ob2);
}
#endif
static PyObject *PyVTKMutableObject_GetAttr(PyObject *self, PyObject *attr)
{
char text[128];
char *name = PyString_AsString(attr);
PyObject *a;
#if PY_VERSION_HEX < 0x02020000
PyMethodDef *meth;
if (name[0] == '_')
{
if (strcmp(name, "__name__") == 0)
{
return PyString_FromString(self->ob_type->tp_name);
}
if (strcmp(name, "__doc__") == 0)
{
return PyString_FromString(self->ob_type->tp_doc);
}
if (strcmp(name,"__methods__") == 0)
{
meth = PyVTKMutableObject_Methods;
PyObject *lst;
int i, n;
for (n = 0; meth && meth[n].ml_name; n++)
{
;
}
if ((lst = PyList_New(n)) != NULL)
{
meth = PyVTKMutableObject_Methods;
for (i = 0; i < n; i++)
{
PyList_SetItem(lst, i, PyString_FromString(meth[i].ml_name));
}
PyList_Sort(lst);
}
return lst;
}
if (strcmp(name, "__members__") == 0)
{
PyObject *lst;
if ((lst = PyList_New(4)) != NULL)
{
PyList_SetItem(lst, 0, PyString_FromString("__doc__"));
PyList_SetItem(lst, 1, PyString_FromString("__members__"));
PyList_SetItem(lst, 2, PyString_FromString("__methods__"));
PyList_SetItem(lst, 3, PyString_FromString("__name__"));
}
return lst;
}
}
for (meth = PyVTKMutableObject_Methods; meth && meth->ml_name; meth++)
{
if (strcmp(name, meth->ml_name) == 0)
{
return PyCFunction_New(meth, self);
}
}
#else
a = PyObject_GenericGetAttr(self, attr);
if (a || !PyErr_ExceptionMatches(PyExc_AttributeError))
{
return a;
}
PyErr_Clear();
#endif
if (name[0] != '_')
{
a = PyObject_GetAttr(((PyVTKMutableObject *)self)->value, attr);
if (a || !PyErr_ExceptionMatches(PyExc_AttributeError))
{
return a;
}
PyErr_Clear();
}
sprintf(text, "'%.20s' object has no attribute '%.80s'",
self->ob_type->tp_name, name);
PyErr_SetString(PyExc_AttributeError, text);
return NULL;
}
static PyObject *PyVTKMutableObject_New(
PyTypeObject *, PyObject *args, PyObject *kwds)
{
PyObject *o;
if (kwds && PyDict_Size(kwds))
{
PyErr_SetString(PyExc_TypeError,
"mutable() does not take keyword arguments");
return NULL;
}
if (PyArg_ParseTuple(args, (char *)"O:mutable", &o))
{
o = PyVTKMutableObject_CompatibleObject(o);
if (o)
{
#if PY_MAJOR_VERSION >= 2
PyVTKMutableObject *self = PyObject_New(PyVTKMutableObject, &PyVTKMutableObject_Type);
#else
PyVTKMutableObject *self = PyObject_NEW(PyVTKMutableObject, &PyVTKMutableObject_Type);
#endif
self->value = o;
return (PyObject *)self;
}
}
return NULL;
}
//--------------------------------------------------------------------
PyTypeObject PyVTKMutableObject_Type = {
PyObject_HEAD_INIT(&PyType_Type)
0,
(char*)"vtk.mutable", // tp_name
sizeof(PyVTKMutableObject), // tp_basicsize
0, // tp_itemsize
PyVTKMutableObject_Delete, // tp_dealloc
0, // tp_print
0, // tp_getattr
0, // tp_setattr
#if PY_VERSION_HEX >= 0x02010000
0, // tp_compare
#else
PyVTKMutableObject_Compare, // tp_compare
#endif
PyVTKMutableObject_Repr, // tp_repr
&PyVTKMutableObject_AsNumber, // tp_as_number
0, // tp_as_sequence
0, // tp_as_mapping
#if PY_VERSION_HEX >= 0x02060000
PyObject_HashNotImplemented, // tp_hash
#else
0, // tp_hash
#endif
0, // tp_call
PyVTKMutableObject_Str, // tp_string
PyVTKMutableObject_GetAttr, // tp_getattro
0, // tp_setattro
&PyVTKMutableObject_AsBuffer, // tp_as_buffer
#if PY_VERSION_HEX >= 0x02020000
Py_TPFLAGS_CHECKTYPES |
#endif
Py_TPFLAGS_DEFAULT, // tp_flags
(char*)PyVTKMutableObject_Doc, // tp_doc
0, // tp_traverse
0, // tp_clear
#if PY_VERSION_HEX >= 0x02010000
PyVTKMutableObject_RichCompare, // tp_richcompare
#else
0, // tp_richcompare
#endif
0, // tp_weaklistoffset
#if PY_VERSION_HEX >= 0x02020000
0, // tp_iter
0, // tp_iternext
PyVTKMutableObject_Methods, // tp_methods
0, // tp_members
0, // tp_getset
0, // tp_base
0, // tp_dict
0, // tp_descr_get
0, // tp_descr_set
0, // tp_dictoffset
0, // tp_init
0, // tp_alloc
PyVTKMutableObject_New, // tp_new
#if PY_VERSION_HEX >= 0x02030000
PyObject_Del, // tp_free
#else
_PyObject_Del, // tp_free
#endif
0, // tp_is_gc
0, // tp_bases
0, // tp_mro
0, // tp_cache
0, // tp_subclasses
0, // tp_weaklist
#endif
VTK_WRAP_PYTHON_SUPRESS_UNINITIALIZED
};
//--------------------------------------------------------------------
// Classic new method
#if PY_VERSION_HEX < 0x02020000
static PyObject *PyVTKMutableObject_ClassicNew(PyObject *, PyObject *args)
{
return PyVTKMutableObject_New(PyVTKMutableObject_Type, args, 0);
}
static PyMethodDef PyVTKMutableObject_NewMethod =
{(char*)"mutable", PyVTKMutableObject_ClassicNew, 1,
(char*)PyVTKMutableObject_Doc }
};
#endif
//--------------------------------------------------------------------
// Exported method for adding this type to a module's dict
void PyVTKAddFile_mutable(
PyObject *dict, const char *)
{
#if PY_VERSION_HEX < 0x2020000
PyObject *o = PyCFunction_New(&PyVTKMutableObject_NewMethod, Py_None);
#else
PyObject *o = (PyObject *)&PyVTKMutableObject_Type;
#endif
if (o && PyDict_SetItemString(dict, (char *)"mutable", o) != 0)
{
Py_DECREF(o);
}
}
|
masssoud/hr-plus
|
jobs/models/__init__.py
|
<gh_stars>0
from .category import Category
from .job_posting import JobPosting
from .applicant import Applicant, ApplicantHistory
from .comment import Comment
|
haikelfazzani/instant-file-sharing
|
src/components/Faq.js
|
import React from 'react';
const faqQuestions = [
{ title: 'What Is Instant?', desc: 'Peer to peer file sharing.' },
{ title: 'How much does Insant cost?', desc: 'Insant is completely free for life.' },
];
export default function Faq() {
return (<div className="text-uppercase ltsp2 mt-5 mb-5">
<div className="b-title">
<h3>asked questions</h3>
</div>
{faqQuestions.map((q, i) => <details key={i} className="mb-2">
<summary className="bg-yellow mb-1 cp">{q.title}</summary>
<h5 className="p-10 m-0 bg-white">{q.desc}</h5>
</details>)}
</div>);
}
|
jack-gannon/Personal-Site
|
src/pages/contact.js
|
import React from "react"
import { graphql } from "gatsby"
import Layout from "../components/layout"
import SEO from "../components/seo"
import styled from "styled-components"
import { breakpoints } from "../utils/breakpoints"
import GetInTouch from "../components/about/GetInTouch"
import StayConnected from "../components/about/StayConnected"
class Contact extends React.Component {
render() {
const { data } = this.props
const siteTitle = data.site.siteMetadata.title
return (
<Layout location={this.props.location} title={siteTitle}>
<SEO title="Contact" />
<ContactContainer>
<StayConnected />
<GetInTouch />
</ContactContainer>
</Layout>
)
}
}
const ContactContainer = styled.div`
display: flex;
flex-direction: column;
& .contact-container:first-child {
margin-bottom: 1rem;
}
@media (min-width: ${breakpoints.desktop.small}) {
flex-direction: row;
justify-content: space-between;
& .contact-container:first-child {
flex-basis: 45%;
margin-right: 2rem;
margin-bottom: 0rem;
}
}
`
export default Contact
export const pageQuery = graphql`
query {
site {
siteMetadata {
title
}
}
}
`
|
seventy-three/gameofcode-app
|
gameofcode/data/src/main/java/lu/ing/gameofcode/services/BusDataGenerator.java
|
package lu.ing.gameofcode.services;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Deque;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import lu.ing.gameofcode.geojson.*;
import lu.ing.gameofcode.geojson.GeoJsonData;
import lu.ing.gameofcode.geojson.GeoJsonDataType;
import lu.ing.gameofcode.model.BusData;
import lu.ing.gameofcode.model.BusLine;
import lu.ing.gameofcode.model.BusPath;
import lu.ing.gameofcode.model.BusStop;
/**
* Created by patrice on 09.04.16.
*/
public class BusDataGenerator {
public static Pattern BUS_CODE_PATTERN = Pattern.compile("Ligne-([^ ]+) .*");
public static Pattern BUS_STOP_PATTERN = Pattern.compile("Ligne autobus: ([0-9]+) / (.*)<br>Direction\\(s\\): (.*)");
// Ligne autobus: 9 / Martyrs Quai 1<br>Direction(s): Cents-Waassertuerm
private List<GeoJsonData> geoJsonDataList;
private BusData busData = new BusData();
public static void main(String... args) throws IOException {
System.out.println("------------------------------------------");
System.out.println("- Read data from http://opendata.vdl.lu");
System.out.println("------------------------------------------");
GeoJsonParser parser = new GeoJsonParser();
List<GeoJsonData> jsonData = parser.readData(null);
System.out.println("------------------------------------------");
System.out.println("- Generate bus data");
System.out.println("------------------------------------------");
BusDataGenerator generator = new BusDataGenerator(jsonData);
generator.interpretBusLines();
System.out.println("------------------------------------------");
for (BusLine line : generator.getBusData().getLines()) {
if ("18".equals(line.getCode())) {
for (BusStop stop : line.getWay()) {
System.out.println(stop);
}
}
}
}
public BusDataGenerator(List<GeoJsonData> geoJsonDataList) {
this.geoJsonDataList = geoJsonDataList;
}
public BusData getBusData() {
for (BusPath path : busData.getPaths()) {
String name = path.getParentBusStop().getName();
if ("<NAME> 3".equals(name)) {
path.setLatitude(6.170159);
path.setLongitude(49.634316);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.168955);
path.setLongitude(49.635584);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.16917);
path.setLongitude(49.637525);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.165017);
path.setLongitude(49.637292);
} else if ("<NAME> 2".equals(name)) {
path.setLatitude(6.161368);
path.setLongitude(49.632635);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.157332);
path.setLongitude(49.629615);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.151489);
path.setLongitude(49.627179);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.145385);
path.setLongitude(49.62477);
} else if ("B.E.I.".equals(name)) {
path.setLatitude(6.13885);
path.setLongitude(49.620077);
} else if ("<NAME> Quai 2".equals(name)) {
path.setLatitude(6.126768);
path.setLongitude(49.615664);
} else if ("<NAME>ai 2".equals(name)) {
path.setLatitude(6.126049);
path.setLongitude(49.610971);
} else if ("Martyrs Quai 2".equals(name)) {
path.setLatitude(6.128512);
path.setLongitude(49.60654);
} else if ("Paris / Zitha Quai 3".equals(name)) {
path.setLatitude(6.130778);
path.setLongitude(49.603843);
} else if ("<NAME>ai 102".equals(name)) {
path.setLatitude(6.133205);
path.setLongitude(49.600814);
} else if ("Alsace".equals(name)) {
path.setLatitude(6.131119);
path.setLongitude(49.596472);
} else if ("Lascombes".equals(name)) {
path.setLatitude(6.125519);
path.setLongitude(49.59426);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.120988);
path.setLongitude(49.595141);
} else if ("Gaasperecherbierg".equals(name)) {
path.setLatitude(6.116988);
path.setLongitude(49.593038);
} else if ("Plantin".equals(name)) {
path.setLatitude(6.116422);
path.setLongitude(49.589739);
} else if ("<NAME>'Or".equals(name)) {
path.setLatitude(6.115334);
path.setLongitude(49.583446);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.112988);
path.setLongitude(49.579446);
} else if ("Kockelscheuer, Camping".equals(name)) {
path.setLatitude(6.113213);
path.setLongitude(49.573226);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.108538);
path.setLongitude(49.565369);
} else if ("Kockelscheuer, Camping".equals(name)) {
path.setLatitude(6.113213);
path.setLongitude(49.573226);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.112988);
path.setLongitude(49.579446);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.115334);
path.setLongitude(49.583446);
} else if ("Raiffeisen".equals(name)) {
path.setLatitude(6.116907);
path.setLongitude(49.586673);
} else if ("Plantin".equals(name)) {
path.setLatitude(6.116422);
path.setLongitude(49.589739);
} else if ("Gaasperecherbierg".equals(name)) {
path.setLatitude(6.115338);
path.setLongitude(49.583449);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.120988);
path.setLongitude(49.595141);
} else if ("Lascombes".equals(name)) {
path.setLatitude(6.125519);
path.setLongitude(49.59426);
} else if ("Alsace".equals(name)) {
path.setLatitude(6.131119);
path.setLongitude(49.596472);
} else if ("<NAME> Quai 1".equals(name)) {
path.setLatitude(6.133034);
path.setLongitude(49.599807);
} else if ("Paris / Zitha Quai 1".equals(name)) {
path.setLatitude(6.130778);
path.setLongitude(49.603843);
} else if ("<NAME>ai 1".equals(name)) {
path.setLatitude(6.126049);
path.setLongitude(49.610971);
} else if ("<NAME> 1".equals(name)) {
path.setLatitude(6.126049);
path.setLongitude(49.610971);
} else if ("<NAME>ai 1".equals(name)) {
path.setLatitude(6.126768);
path.setLongitude(49.615664);
} else if ("B.E.I.".equals(name)) {
path.setLatitude(6.13885);
path.setLongitude(49.620077);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.145385);
path.setLongitude(49.62477);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.151489);
path.setLongitude(49.627179);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.157332);
path.setLongitude(49.629615);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.161368);
path.setLongitude(49.632635);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.165017);
path.setLongitude(49.637292);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.16917);
path.setLongitude(49.637525);
} else if ("<NAME>".equals(name)) {
path.setLatitude(6.168955);
path.setLongitude(49.635584);
}
}
return busData;
}
public void interpretBusLines() {
for (GeoJsonData geoJsonData : geoJsonDataList) {
// Keep only bus lines for now
if (geoJsonData.getType() == GeoJsonDataType.BUS_LINE) {
BusLine busLine = interpretBusLine(geoJsonData);
convertUnits(busLine);
busData.addLine(busLine);
for (BusStop stop : busLine.getWay()) {
busData.addPath(stop.getPath());
}
}
}
}
public BusLine interpretBusLine(GeoJsonData geoJsonData) {
BusLine busLine = new BusLine();
busLine.setName(geoJsonData.getName());
Matcher busCodeMatcher = BUS_CODE_PATTERN.matcher(busLine.getName());
busLine.setCode(busCodeMatcher.matches() ? busCodeMatcher.group(1) : "???");
// Interpret bus routes
// Separate places and paths
List<GeoJsonItemPlace> places = new ArrayList<>();
Deque<GeoJsonItemPath> paths = new LinkedList<>();
Deque<GeoJsonItemPath> stops = new LinkedList<>();
for (GeoJsonItem item : geoJsonData.getItems()) {
if (item instanceof GeoJsonItemPlace) {
places.add((GeoJsonItemPlace) item);
} else if (item instanceof GeoJsonItemPath) {
if (((GeoJsonItemPath) item).getPoints().size() > 1) {
paths.add((GeoJsonItemPath) item);
} else {
stops.add((GeoJsonItemPath) item);
}
}
}
List<GeoJsonItemPath> way1 = extractAndSortOneWay(paths);
// TODO add both terminus
/*
System.out.println("Way1 _______________");
for (GeoJsonItemPath path : way1) {
System.out.println(path);
}
*/
if (paths.isEmpty()) {
System.out.println("All ok: " + busLine.getName());
} else {
System.err.println("Paths not empty: " + busLine.getName());
}
String firstDirection = null;
List<BusStop> busStops = new ArrayList<>();
for (GeoJsonItemPath path : way1) {
BusStop busStop = new BusStop(busLine);
BusPath way1Path = new BusPath(busStop);
way1Path.setDistance(path.getDistance());
GeoJsonItemPath.GeoJsonItemPoint point = path.getPoints().getFirst();
way1Path.setLatitude(point.getLatitude());
way1Path.setLongitude(point.getLongitude());
way1Path.setAltitude(point.getAltitude());
busStop.setPath(way1Path);
busStops.add(busStop);
for (GeoJsonItemPath stop : stops) {
if (point.equals(stop.getPoints().getFirst())) {
Matcher matcher = BUS_STOP_PATTERN.matcher(stop.getName());
if (matcher.matches()) {
busStop.setName(matcher.group(2));
String direction = matcher.group(3).trim();
String[] directions = direction.split("/");
if (directions.length > 1) {
direction = directions[0].trim();
}
if (firstDirection == null) {
firstDirection = direction;
}
busStop.setDirection(directions.length > 1 ? BusStop.Direction.BOTH : (direction.equals(firstDirection) ? BusStop.Direction.WAY1 : BusStop.Direction.WAY2));
} else {
busStop.setName(stop.getName());
}
}
}
}
busLine.setWay(busStops);
return busLine;
}
private List<GeoJsonItemPath> extractAndSortOneWay(Deque<GeoJsonItemPath> paths) {
LinkedList<GeoJsonItemPath> extracted = new LinkedList<>();
GeoJsonItemPath place = paths.poll();
extracted.add(place);
boolean found;
GeoJsonItemPath.GeoJsonItemPoint firstPoint = place.getPoints().getFirst();
GeoJsonItemPath.GeoJsonItemPoint lastPoint = place.getPoints().getLast();
// To the left
do {
found = false;
Iterator<GeoJsonItemPath> iter = paths.iterator();
while (!found && iter.hasNext()) {
GeoJsonItemPath path = iter.next();
GeoJsonItemPath.GeoJsonItemPoint point = path.getPoints().getLast();
if (firstPoint.equals(point)) {
extracted.addFirst(path);
iter.remove();
firstPoint = path.getPoints().getFirst();
found = true;
}
}
} while(found);
// To the right
do {
found = false;
Iterator<GeoJsonItemPath> iter = paths.iterator();
while (!found && iter.hasNext()) {
GeoJsonItemPath path = iter.next();
GeoJsonItemPath.GeoJsonItemPoint point = path.getPoints().getFirst();
if (lastPoint.equals(point)) {
extracted.addLast(path);
iter.remove();
lastPoint = path.getPoints().getLast();
found = true;
}
}
} while(found);
return extracted;
}
public void convertUnits(BusLine busLine) {
for (BusStop busStop : busLine.getWay()) {
BusPath path = busStop.getPath();
path.setTimeBike(UnitsConvertor.distanceToTimeBike(path.getDistance()));
path.setTimeFoot(UnitsConvertor.distanceToTimeFoot(path.getDistance()));
path.setTimeBus(UnitsConvertor.distanceToTimeBus(path.getDistance()));
}
}
}
|
player1537-forks/spack
|
var/spack/repos/builtin/packages/r-rbibutils/package.py
|
<reponame>player1537-forks/spack
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRbibutils(RPackage):
"""Convert Between Bibliography Formats.
Converts between a number of bibliography formats, including 'BibTeX',
'BibLaTeX' and 'Bibentry'. Includes a port of the 'bibutils' utilities by
<NAME> <https://sourceforge.net/projects/bibutils/>. Supports all
bibliography formats and character encodings implemented in 'bibutils'."""
cran = "rbibutils"
version('2.2.7', sha256='7c9e6719556b8caa9fb58743b717e89f45e8e7018371bf16f07dc3c1f96a55c5')
version('2.0', sha256='03d13abee321decb88bc4e7c9f27276d62a4a880fa72bb6b86be91885010cfed')
depends_on('r@2.10:', type=('build', 'run'))
|
NazarethCollege/heweb2017-devops-presentation
|
sites/tweetheat/src/backend/vendor/src/github.com/youtube/vitess/go/vt/topo/replication.go
|
/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package topo
import (
log "github.com/golang/glog"
"github.com/golang/protobuf/proto"
"golang.org/x/net/context"
"github.com/youtube/vitess/go/trace"
"github.com/youtube/vitess/go/vt/logutil"
"github.com/youtube/vitess/go/vt/topo/topoproto"
topodatapb "github.com/youtube/vitess/go/vt/proto/topodata"
)
// ShardReplicationInfo is the companion structure for ShardReplication.
type ShardReplicationInfo struct {
*topodatapb.ShardReplication
cell string
keyspace string
shard string
}
// NewShardReplicationInfo is for topo.Server implementations to
// create the structure
func NewShardReplicationInfo(sr *topodatapb.ShardReplication, cell, keyspace, shard string) *ShardReplicationInfo {
return &ShardReplicationInfo{
ShardReplication: sr,
cell: cell,
keyspace: keyspace,
shard: shard,
}
}
// Cell returns the cell for a ShardReplicationInfo
func (sri *ShardReplicationInfo) Cell() string {
return sri.cell
}
// Keyspace returns the keyspace for a ShardReplicationInfo
func (sri *ShardReplicationInfo) Keyspace() string {
return sri.keyspace
}
// Shard returns the shard for a ShardReplicationInfo
func (sri *ShardReplicationInfo) Shard() string {
return sri.shard
}
// GetShardReplicationNode finds a node for a given tablet.
func (sri *ShardReplicationInfo) GetShardReplicationNode(tabletAlias *topodatapb.TabletAlias) (*topodatapb.ShardReplication_Node, error) {
for _, rl := range sri.Nodes {
if proto.Equal(rl.TabletAlias, tabletAlias) {
return rl, nil
}
}
return nil, ErrNoNode
}
// UpdateShardReplicationRecord is a low level function to add / update an
// entry to the ShardReplication object.
func UpdateShardReplicationRecord(ctx context.Context, ts Server, keyspace, shard string, tabletAlias *topodatapb.TabletAlias) error {
span := trace.NewSpanFromContext(ctx)
span.StartClient("TopoServer.UpdateShardReplicationFields")
span.Annotate("keyspace", keyspace)
span.Annotate("shard", shard)
span.Annotate("tablet", topoproto.TabletAliasString(tabletAlias))
defer span.Finish()
return ts.UpdateShardReplicationFields(ctx, tabletAlias.Cell, keyspace, shard, func(sr *topodatapb.ShardReplication) error {
// Not very efficient, but easy to read, and allows us
// to remove duplicate entries if any.
nodes := make([]*topodatapb.ShardReplication_Node, 0, len(sr.Nodes)+1)
found := false
modified := false
for _, node := range sr.Nodes {
if proto.Equal(node.TabletAlias, tabletAlias) {
if found {
log.Warningf("Found a second ShardReplication_Node for tablet %v, deleting it", tabletAlias)
modified = true
continue
}
found = true
}
nodes = append(nodes, node)
}
if !found {
nodes = append(nodes, &topodatapb.ShardReplication_Node{TabletAlias: tabletAlias})
modified = true
}
if !modified {
return ErrNoUpdateNeeded
}
sr.Nodes = nodes
return nil
})
}
// RemoveShardReplicationRecord is a low level function to remove an
// entry from the ShardReplication object.
func RemoveShardReplicationRecord(ctx context.Context, ts Server, cell, keyspace, shard string, tabletAlias *topodatapb.TabletAlias) error {
err := ts.UpdateShardReplicationFields(ctx, cell, keyspace, shard, func(sr *topodatapb.ShardReplication) error {
nodes := make([]*topodatapb.ShardReplication_Node, 0, len(sr.Nodes))
for _, node := range sr.Nodes {
if !proto.Equal(node.TabletAlias, tabletAlias) {
nodes = append(nodes, node)
}
}
sr.Nodes = nodes
return nil
})
return err
}
// FixShardReplication will fix the first problem it encounters within
// a ShardReplication object.
func FixShardReplication(ctx context.Context, ts Server, logger logutil.Logger, cell, keyspace, shard string) error {
sri, err := ts.GetShardReplication(ctx, cell, keyspace, shard)
if err != nil {
return err
}
for _, node := range sri.Nodes {
ti, err := ts.GetTablet(ctx, node.TabletAlias)
if err == ErrNoNode {
logger.Warningf("Tablet %v is in the replication graph, but does not exist, removing it", node.TabletAlias)
return RemoveShardReplicationRecord(ctx, ts, cell, keyspace, shard, node.TabletAlias)
}
if err != nil {
// unknown error, we probably don't want to continue
return err
}
if ti.Keyspace != keyspace || ti.Shard != shard || ti.Alias.Cell != cell {
logger.Warningf("Tablet '%v' is in the replication graph, but has wrong keyspace/shard/cell, removing it", ti.Tablet)
return RemoveShardReplicationRecord(ctx, ts, cell, keyspace, shard, node.TabletAlias)
}
logger.Infof("Keeping tablet %v in the replication graph", node.TabletAlias)
}
logger.Infof("All entries in replication graph are valid")
return nil
}
|
bzxy/cydia
|
iOSOpenDev/frameworks/ChatKit.framework/Headers/CKMessagePartComposeResourcePool.h
|
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/ChatKit.framework/ChatKit
*/
#import <ChatKit/XXUnknownSuperclass.h>
@interface CKMessagePartComposeResourcePool : XXUnknownSuperclass {
}
+ (int)requestProviderID; // 0x451bd
+ (void)relinquishProviderID:(int)anId; // 0x450f1
+ (void)addComposeImages:(id)images withID:(int)anId forProvider:(int)provider; // 0x45005
+ (void)removeComposeImagesWithID:(int)anId forProvider:(int)provider; // 0x44f25
+ (void)removeAllPartsForProviderID:(int)providerID; // 0x44e4d
+ (id)composeImagesWithPartID:(int)partID providerID:(int)anId; // 0x44d39
@end
|
unexpectedBy/dcrd
|
dcrjson/dcrwalletextresults.go
|
// Copyright (c) 2015 The btcsuite developers
// Copyright (c) 2015 The Decred developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package dcrjson
// GetMultisigOutInfoResult models the data returned from the getmultisigoutinfo
// command.
type GetMultisigOutInfoResult struct {
Address string `json:"address"`
RedeemScript string `json:"redeemscript"`
M uint8 `json:"m"`
N uint8 `json:"n"`
Pubkeys []string `json:"pubkeys"`
TxHash string `json:"txhash"`
BlockHeight uint32 `json:"blockheight"`
BlockHash string `json:"blockhash"`
Spent bool `json:"spent"`
SpentBy string `json:"spentby"`
SpentByIndex uint32 `json:"spentbyindex"`
Amount float64 `json:"amount"`
}
// GetSeedResult models the data returned from the getseed
// command.
type GetSeedResult struct {
Seed string `json:"seed"`
}
// GetSeedResult models the data returned from the getseed
// command.
type GetMasterPubkeyResult struct {
MasterPubkey string `json:"key"`
}
// GetTicketsResult models the data returned from the getticketmaxprice
// command.
type GetTicketMaxPriceResult struct {
Price float64 `json:"price"`
}
// GetTicketsResult models the data returned from the gettickets
// command.
type GetTicketsResult struct {
Hashes []string `json:"hashes"`
}
// RedeemMultiSigOutResult models the data returned from the redeemmultisigout
// command.
type RedeemMultiSigOutResult struct {
Hex string `json:"hex"`
Complete bool `json:"complete"`
Errors []SignRawTransactionError `json:"errors,omitempty"`
}
// RedeemMultiSigOutsResult models the data returned from the redeemmultisigouts
// command.
type RedeemMultiSigOutsResult struct {
Results []RedeemMultiSigOutResult `json:"results"`
}
// SendToMultiSigResult models the data returned from the sendtomultisig
// command.
type SendToMultiSigResult struct {
TxHash string `json:"txhash"`
Address string `json:"address"`
RedeemScript string `json:"redeemscript"`
}
// SignedTransaction is a signed transaction resulting from a signrawtransactions
// command.
type SignedTransaction struct {
SigningResult SignRawTransactionResult `json:"signingresult"`
Sent bool `json:"sent"`
TxHash *string `json:"txhash,omitempty"`
}
// SignRawTransactionsResult models the data returned from the signrawtransactions
// command.
type SignRawTransactionsResult struct {
Results []SignedTransaction `json:"results"`
}
|
mgoyal2-atl/atlassian-slack-integration-server
|
confluence-slack-server-integration-plugin/src/main/java/com/atlassian/confluence/plugins/slack/spacetochannel/model/EventInfo.java
|
package com.atlassian.confluence.plugins.slack.spacetochannel.model;
import com.atlassian.confluence.user.ConfluenceUser;
import java.util.Objects;
import java.util.Set;
public class EventInfo {
private final ConfluenceUser trigger;
private final String spaceKey;
private final String typeName;
private final String calendarName;
private final String description;
private final String name;
private final Set<ConfluenceUser> invitees;
private final long startTime;
private final long endTime;
private final String timeZoneId;
private final boolean allDay;
public EventInfo(final ConfluenceUser trigger,
final String spaceKey,
final String typeName,
final String calendarName,
final String description,
final String name,
final Set<ConfluenceUser> invitees,
final long startTime,
final long endTime,
final String timeZoneId,
final boolean allDay) {
this.trigger = trigger;
this.spaceKey = spaceKey;
this.typeName = typeName;
this.calendarName = calendarName;
this.description = description;
this.name = name;
this.invitees = invitees;
this.startTime = startTime;
this.endTime = endTime;
this.timeZoneId = timeZoneId;
this.allDay = allDay;
}
public ConfluenceUser getTrigger() {
return trigger;
}
public String getSpaceKey() {
return spaceKey;
}
public String getTypeName() {
return typeName;
}
public String getCalendarName() {
return calendarName;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public boolean isAllDay() {
return allDay;
}
public Set<ConfluenceUser> getInvitees() {
return invitees;
}
public long getStartTime() {
return startTime;
}
public long getEndTime() {
return endTime;
}
public String getTimeZoneId() {
return timeZoneId;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (!(o instanceof EventInfo)) return false;
final EventInfo eventInfo = (EventInfo) o;
return startTime == eventInfo.startTime &&
endTime == eventInfo.endTime &&
allDay == eventInfo.allDay &&
Objects.equals(trigger, eventInfo.trigger) &&
Objects.equals(spaceKey, eventInfo.spaceKey) &&
Objects.equals(typeName, eventInfo.typeName) &&
Objects.equals(calendarName, eventInfo.calendarName) &&
Objects.equals(description, eventInfo.description) &&
Objects.equals(name, eventInfo.name) &&
Objects.equals(invitees, eventInfo.invitees) &&
Objects.equals(timeZoneId, eventInfo.timeZoneId);
}
@Override
public int hashCode() {
return Objects.hash(trigger, spaceKey, typeName, calendarName, description, name, invitees, startTime, endTime, timeZoneId, allDay);
}
@Override
public String toString() {
return "EventInfo{" +
"trigger=" + trigger +
", spaceKey='" + spaceKey + '\'' +
", typeName='" + typeName + '\'' +
", calendarName='" + calendarName + '\'' +
", description='" + description + '\'' +
", name='" + name + '\'' +
", invitees=" + invitees +
", startTime=" + startTime +
", endTime=" + endTime +
", timeZoneId='" + timeZoneId + '\'' +
", allDay=" + allDay +
'}';
}
}
|
jamesanto/scala
|
test/files/run/t7850c.scala
|
// Testing that isEmpty and get are viewed with `memberType` from `Casey1`.
trait T[A, B >: Null] { def isEmpty: A = false.asInstanceOf[A]; def get: B = null}
class Casey1() extends T[Boolean, String]
object Casey1 { def unapply(a: Casey1) = a }
object Test {
def main(args: Array[String]): Unit = {
val c @ Casey1(x) = new Casey1()
assert(x == c.get)
}
}
|
shaojiankui/iOS10-Runtime-Headers
|
PrivateFrameworks/CoreParsec.framework/PARRankerSearchRequestParameters.h
|
<filename>PrivateFrameworks/CoreParsec.framework/PARRankerSearchRequestParameters.h
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/CoreParsec.framework/CoreParsec
*/
@interface PARRankerSearchRequestParameters : NSObject {
unsigned long long _numTopics;
double _thresholdTopicScore;
unsigned long long _topicScoreBinSize;
}
@property (nonatomic) unsigned long long numTopics;
@property (nonatomic) double thresholdTopicScore;
@property (nonatomic) unsigned long long topicScoreBinSize;
+ (id)defaultValue;
+ (id)responseFromJSON:(id)arg1;
- (unsigned long long)numTopics;
- (void)setNumTopics:(unsigned long long)arg1;
- (void)setThresholdTopicScore:(double)arg1;
- (void)setTopicScoreBinSize:(unsigned long long)arg1;
- (double)thresholdTopicScore;
- (unsigned long long)topicScoreBinSize;
@end
|
767214481/Summer
|
src/test/java/com/swingfrog/summer/test/ecsgameserver/module/player/base/PlayerBeanComponent.java
|
package com.swingfrog.summer.test.ecsgameserver.module.player.base;
import com.swingfrog.summer.ecs.component.AbstractBeanComponent;
import com.swingfrog.summer.test.ecsgameserver.module.player.Player;
public abstract class PlayerBeanComponent<B extends PlayerBean> extends AbstractBeanComponent<Long, B, Player> {
public PlayerBeanComponent(Player entity) {
super(entity);
}
}
|
googleinterns/gail-dyn
|
third_party/a2c_ppo_acktr/collect_tarsim_traj.py
|
<reponame>googleinterns/gail-dyn
# MIT License
#
# Copyright (c) 2017 <NAME> and (c) 2020 Google LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import os
from typing import *
import sys
import time
import numpy as np
import torch
import gym
import my_pybullet_envs
import random
from matplotlib import pyplot as plt
import pickle
import joblib
from third_party.a2c_ppo_acktr.envs import VecPyTorch, make_vec_envs
from third_party.a2c_ppo_acktr.utils import get_render_func, get_vec_normalize
from third_party.a2c_ppo_acktr.arguments import parse_args_with_unknown
from gan.utils import *
def plot_avg_dis_prob(args, avg_reward_list, dxs):
env_name = args.env_name
_, axs = plt.subplots(2, 1)
axs[0].plot(avg_reward_list)
# plt.title('Average Dis Reward, Env: {}'.format(env_name))
plt.xlabel('steps')
# plt.ylabel('average reward')
axs[1].plot(dxs)
plt.show()
np.save(os.path.join('./imgs', env_name + '_avg_dreward.npy'), np.array(avg_reward_list))
plt.savefig(os.path.join('./imgs', env_name + '_avg_dreward.png'))
input("press enter plt")
def plot_avg_dis_prob_2(args, avg_reward_list, avg_reward_list_2, dxs):
env_name = args.env_name
_, axs = plt.subplots(2, 1)
axs[0].plot(avg_reward_list)
axs[0].plot(avg_reward_list_2)
# plt.title('Average Dis Reward, Env: {}'.format(env_name))
plt.xlabel('steps')
# plt.ylabel('average reward')
axs[1].plot(dxs)
plt.show()
np.save(os.path.join('./imgs', env_name + '_avg_dreward.npy'), np.array(avg_reward_list))
plt.savefig(os.path.join('./imgs', env_name + '_avg_dreward.png'))
input("press enter plt")
sys.path.append("third_party")
parser = argparse.ArgumentParser(description="RL")
parser.add_argument(
"--seed", type=int, default=1, help="random seed (default: 1)"
)
parser.add_argument(
"--env-name",
default="HumanoidSwimmerEnv-v1",
help="environment to load and test on",
)
parser.add_argument(
"--src-env-name",
default="",
help="environment to transfer policy from ("" if same as test env)",
)
parser.add_argument(
"--load-dir",
default="./trained_models/",
help="directory to save agent logs (default: ./trained_models/)",
)
parser.add_argument(
"--save-traj",
type=int,
default=0,
help="whether to save traj tuples",
)
parser.add_argument(
"--num-trajs",
type=int,
default=200,
help="how many trajs to rollout/store",
)
parser.add_argument(
"--save-path",
default="./tmp.pkl",
help="where the traj tuples are stored",
)
# parser.add_argument(
# "--load-dis",
# type=int,
# default=0,
# help="whether to load gail discriminator for debugging",
# )
# parser.add_argument(
# "--enlarge-act-range",
# type=float,
# default=0.15,
# help="add white noise to action during rollout",
# )
parser.add_argument(
"--non-det",
type=int,
default=0,
help="whether to use a non-deterministic policy, 1 true 0 false",
)
parser.add_argument(
"--iter",
type=int,
default=None,
help="which iter pi to test"
)
parser.add_argument(
"--r-thres",
type=int,
default=4000,
help="The threshold reward value above which it is considered a success.",
)
args, extra_dict = parse_args_with_unknown(parser)
np.set_printoptions(precision=2, suppress=None, threshold=sys.maxsize)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
random.seed(args.seed)
is_cuda = False
device = "cuda" if is_cuda else "cpu"
args.det = not args.non_det
# If render is provided, use that. Otherwise, turn it on.
if "render" not in extra_dict:
extra_dict["render"] = True
env = make_vec_envs(
args.env_name,
args.seed + 1000,
1,
None,
None,
device=device,
allow_early_resets=False,
**extra_dict,
)
# dont know why there are so many wrappers in make_vec_envs...
env_core = env.venv.venv.envs[0].env.env
if args.src_env_name == "":
env_name_transfer = args.env_name
else:
env_name_transfer = args.src_env_name
actor_critic, ob_rms, recurrent_hidden_states, masks \
= load(args.load_dir, env_name_transfer, is_cuda, args.iter)
# discri = None
# if args.load_dis:
# discri = load_gail_discriminator(args.load_dir, env_name_transfer, is_cuda, args.iter)
if ob_rms:
print(ob_rms.mean)
print(ob_rms.var)
print(ob_rms.count)
input("ob_rms")
vec_norm = get_vec_normalize(env)
if vec_norm is not None:
vec_norm.eval()
vec_norm.ob_rms = ob_rms
all_trajs = {}
cur_traj = []
cur_traj_idx = 0
obs = env.reset()
# print("obs", obs)
# input("reset, press enter")
done = False
reward_total = 0
list_rewards = []
list_traj_lengths = []
list_r_per_step = []
dist = 0
last_dist = 0
dis_probs_imaginary = None
dis_probs_real = None
dxs = []
# if args.load_dis:
# dis_probs_imaginary = []
# dis_probs_real = []
while True:
# try:
# env_core.reset_counter = 5000
# except:
# pass
with torch.no_grad():
# value, action, _, recurrent_hidden_states = actor_critic.act(
# obs, recurrent_hidden_states, masks, deterministic=True
# )
# action += torch.normal(torch.zeros(action.size()), 0.1).to(device)
# print(action)
value, action, _, recurrent_hidden_states = actor_critic.act(
obs, recurrent_hidden_states, masks, deterministic=args.det
)
# # TODO, name duplicate
# # TODO parameter space noise
# # xx% noise before tanh
# action += (torch.rand(action.size()).to(device) - 0.5) * (args.enlarge_act_range * 2)
# # print(action)
# if args.save_traj:
# tuple_sas = []
# obs_feat = replace_obs_with_feat(obs, is_cuda, feat_select_func, return_tensor=False)
# tuple_sas.append(obs_feat[0]) # only one process env
#
# # save clamped action (note: dyn envs might have action larger than 1)
# action = action.clamp(-1., 1)
# print("obs", obs)
# print("act", torch.tanh(action))
# if args.load_dis:
# obs_feat = replace_obs_with_feat(obs, is_cuda, feat_select_func, return_tensor=True)
# dis_state = torch.cat((obs_feat, obs[:, env_core.behavior_obs_len:]), 1)
# Obser reward and next obs
obs, reward, done, info = env.step(action)
list_r_per_step.append(reward)
if args.save_traj:
sas_window = info[0]["sas_window"] # info[0] because 1-core dummy vec env.
# tuple_sas.append(list(unwrap(action, is_cuda=is_cuda)))
#
# obs_feat = replace_obs_with_feat(obs, is_cuda, feat_select_func, return_tensor=False)
# tuple_sas.append(obs_feat[0])
next_obs = list(unwrap(obs, is_cuda=is_cuda))
# print(sas_window)
cur_traj.append(sas_window)
# if args.load_dis:
# dis_action = replace_obs_with_feat(obs, is_cuda, feat_select_func, return_tensor=True)
# dis_r = discri.predict_prob_single_step(dis_state, dis_action)
# dis_probs_real.append(unwrap(dis_r, is_cuda=is_cuda))
# if len(dis_probs_real)>20 and np.mean(dis_probs_real[-20:]) < 0.4:
# done = True
# env.reset()
# try:
# obs_i = env_core.return_imaginary_obs()
# dis_action = obs_i[:env_core.behavior_obs_len] # dis action is next state
# dis_action = wrap(dis_action, is_cuda=is_cuda)
# dis_action = replace_obs_with_feat(dis_action, is_cuda, feat_select_func, return_tensor=True)
# dis_r = discri.predict_prob_single_step(dis_state, dis_action)
# dis_probs_imaginary.append(unwrap(dis_r, is_cuda=is_cuda))
# except:
# pass
# dxs.append(env_core.get_ave_dx())
try:
env_core.cam_track_torso_link()
last_dist = dist
dist = env_core.get_dist()
except:
print("not bullet locomotion env")
reward_total += reward.cpu().numpy()[0][0]
if done:
list_rewards.append(reward_total)
list_traj_lengths.append(len(list_r_per_step))
print(
f"{args.load_dir}\t"
f"tr: {reward_total:.1f}\t"
f"x: {last_dist:.2f}\t"
f"tr_ave: {reward_total/len(list_r_per_step):.2f}\t"
f"total_per_step_r_ave: {np.sum(list_rewards)/np.sum(list_traj_lengths):.2f}\t"
)
reward_total = 0.0
# env_core.reset_counter = 0
cur_traj_idx += 1
if cur_traj_idx >= args.num_trajs:
break
if args.save_traj:
print(np.array(cur_traj).shape)
all_trajs[cur_traj_idx] = cur_traj
cur_traj = []
# if args.load_dis:
# print(
# f"{np.array(dis_probs_real).mean()}\t"
# )
# # plot_avg_dis_prob_2(args, dis_probs_imaginary, dis_probs_real, list_r_per_step)
# dis_probs_imaginary = []
# dis_probs_real = []
# else:
# # plot_avg_dis_prob(args, list_r_per_step, dxs)
# pass
list_r_per_step = []
dxs = []
masks.fill_(0.0 if done else 1.0)
with open(args.save_path, "wb") as handle:
# print(all_trajs)
pickle.dump(all_trajs, handle, protocol=pickle.HIGHEST_PROTOCOL)
# joblib.dump(all_trajs, handle)
bins_list = np.arange(40) * 50.0
print(bins_list)
plt.hist(list_rewards, alpha=0.5, label='r hist', bins=bins_list)
plt.legend(loc='upper right')
plt.show()
|
bold-commerce/checkout-react-components
|
src/components/product/index.js
|
<reponame>bold-commerce/checkout-react-components<gh_stars>1-10
// eslint-disable-next-line import/prefer-default-export
export { default as Product } from './Product';
|
smallpdf/Leanplum-Android-SDK
|
AndroidSDKTests/src/androidTest/java/com/leanplum/ScreenshotTestRunner.java
|
<gh_stars>10-100
package com.leanplum;
import android.os.Bundle;
import androidx.test.runner.AndroidJUnitRunner;
import com.facebook.testing.screenshot.ScreenshotRunner;
public class ScreenshotTestRunner extends AndroidJUnitRunner {
@Override
public void onCreate(Bundle args) {
ScreenshotRunner.onCreate(this, args);
super.onCreate(args);
}
@Override
public void finish(int resultCode, Bundle results) {
ScreenshotRunner.onDestroy();
super.finish(resultCode, results);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.