text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Author: yeho <lj2007331 AT gmail.com>
# BLOG: https://blog.linuxeye.cn
#
# Notes: OneinStack for CentOS/RedHat 6+ Debian 7+ and Ubuntu 12+
#
# Project home page:
# https://oneinstack.com
# https://github.com/lj2007331/oneinstack
Install_Jemalloc() {
if [ ! -e "/usr/local/lib/libjemalloc.so" ]; then
pushd ${oneinstack_dir}/src > /dev/null
tar xjf jemalloc-${jemalloc_ver}.tar.bz2
pushd jemalloc-${jemalloc_ver} > /dev/null
./configure
make -j ${THREAD} && make install
popd > /dev/null
if [ -f "/usr/local/lib/libjemalloc.so" ]; then
if [ "${OS_BIT}" == '64' -a "${OS}" == 'CentOS' ]; then
ln -s /usr/local/lib/libjemalloc.so.2 /usr/lib64/libjemalloc.so.1
else
ln -s /usr/local/lib/libjemalloc.so.2 /usr/lib/libjemalloc.so.1
fi
[ -z "`grep /usr/local/lib /etc/ld.so.conf.d/*.conf`" ] && echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf
ldconfig
echo "${CSUCCESS}jemalloc module installed successfully! ${CEND}"
rm -rf jemalloc-${jemalloc_ver}
else
echo "${CFAILURE}jemalloc install failed, Please contact the author! ${CEND}"
kill -9 $$
fi
popd > /dev/null
fi
}
|
def unescapeHTML(s):
html_entities = {
"<": "<",
">": ">",
"&": "&",
""": "\"",
"'": "'"
}
for entity, char in html_entities.items():
s = s.replace(entity, char)
return s |
import { Quote } from 'yahoo-finance2/dist/esm/src/modules/quote';
export class SendResourceMailDto {
userName: string;
userEmail: string;
quotes: Quote[];
}
|
// Declaration for the window.sk global variable.
import { SkPerfConfig } from '../json';
declare global {
interface Window {
sk: {
perf: SkPerfConfig;
};
}
}
|
#!/bin/sh
. venv/bin/activate
python3 model_converter.py
python3 tflite_test.py
|
#!/bin/sh
set -x
g++ DigestOOP.cc test_oop.cc -std=c++17 -lcrypto -o oop
g++ DigestOOP2.cc test_oop.cc -std=c++17 -lcrypto -o oop2
g++ DigestTMP.cc test_oop.cc -std=c++17 -lcrypto -o tmp
g++ test_evp.cc -std=c++17 -lcrypto -o evp
g++ test_evp2.cc -std=c++17 -lcrypto -o evp2
g++ bench.cc DigestOOP.cc -std=c++17 -lbenchmark -lpthread -lcrypto -O2 -o bench
|
<gh_stars>0
package com.m2m.shgs.android;
import android.app.Application;
import android.content.Context;
import com.alibaba.android.arouter.launcher.ARouter;
import com.ayvytr.logger.L;
import com.base.manager.ActivityStack;
import com.common.base.BaseApplication;
import com.common.config.AppConfig;
import com.http.ApiClient;
import com.tencent.mmkv.MMKV;
import com.tencent.mmkv.MMKVLogLevel;
import java.util.concurrent.Executors;
/**
* </br>
* Date: 2018/11/29 11:10
*
* @author hemin
*/
public class MyApplication extends BaseApplication {
// public static boolean isServerTrusted = false;
// private static SSLContext sslContext = null;
@Override
protected void init() {
super.init();
//初始化bugly
initBugly(getApplicationContext());
AppConfig.init(MyBuildConfig.env_mode);
//初始化友盟
// initUM(getApplicationContext());
ActivityStack.registerCallback(this);
initArouter(this);
initLogger();
initApiClient();
MMKV.initialize(getApplicationContext());
MMKV.setLogLevel(AppConfig.DEBUG ? MMKVLogLevel.LevelInfo : MMKVLogLevel.LevelNone);
// //7.0拍照问题
// StrictMode.VmPolicy.Builder builder = new StrictMode.VmPolicy.Builder();
// StrictMode.setVmPolicy(builder.build());
// LanguageWrapper.attachBaseContext(this);
// builder.detectFileUriExposure();
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
public void run() {
}
});
}
@Override
public void initApiClient() {
ApiClient.init(AppConfig.getBaseHttpUrl(), new HeaderInterceptor());
}
private void initLogger() {
L.settings().showLog(AppConfig.DEBUG);
L.settings().tag("SmartSecurity");
}
private void initArouter(Application context) {
if (AppConfig.DEBUG) {
ARouter.openLog();
ARouter.openDebug();
}
ARouter.init(context);
}
private void initBugly(Context context) {
boolean isDebug = AppConfig.DEBUG;
// // 获取当前包名
// String packageName = context.getPackageName();
// // 获取当前进程名
// String processName = DeviceUtil.getProcessName(android.os.Process.myPid());
// // 设置是否为上报进程
// CrashReport.UserStrategy strategy = new CrashReport.UserStrategy(context);
// strategy.setUploadProcess(processName == null || processName.equals(packageName));
// // 初始化Bugly
// CrashReport.initCrashReport(context, "?", isDebug, strategy);
}
}
|
#include "pch-cpp.hpp"
#ifndef _MSC_VER
# include <alloca.h>
#else
# include <malloc.h>
#endif
#include <limits>
#include <stdint.h>
template <typename R>
struct VirtFuncInvoker0
{
typedef R (*Func)(void*, const RuntimeMethod*);
static inline R Invoke (Il2CppMethodSlot slot, RuntimeObject* obj)
{
const VirtualInvokeData& invokeData = il2cpp_codegen_get_virtual_invoke_data(slot, obj);
return ((Func)invokeData.methodPtr)(obj, invokeData.method);
}
};
// System.Action`1<UnityEngine.AsyncOperation>
struct Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31;
// System.Char[]
struct CharU5BU5D_t7B7FC5BC8091AA3B9CB0B29CDD80B5EE9254AA34;
// System.Delegate[]
struct DelegateU5BU5D_t677D8FE08A5F99E8EE49150B73966CD6E9BF7DB8;
// System.Type[]
struct TypeU5BU5D_t85B10489E46F06CEC7C4B1CCBD0E01FAB6649755;
// UnityEngine.AssetBundle
struct AssetBundle_t4D34D7FDF0F230DC641DC1FCFA2C0E7E9E628FA4;
// UnityEngine.AssetBundleCreateRequest
struct AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A;
// UnityEngine.AssetBundleRequest
struct AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A;
// System.AsyncCallback
struct AsyncCallback_tA7921BEF974919C46FF8F9D9867C567B200BB0EA;
// UnityEngine.AsyncOperation
struct AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86;
// System.Reflection.Binder
struct Binder_t2BEE27FD84737D1E79BC47FD67F6D3DD2F2DDA30;
// System.DelegateData
struct DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288;
// System.IAsyncResult
struct IAsyncResult_tC9F97BF36FCF122D29D3101D80642278297BF370;
// System.Reflection.MemberFilter
struct MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81;
// System.Reflection.MethodInfo
struct MethodInfo_t;
// UnityEngine.Object
struct Object_tF2F3778131EFF286AF62B7B013A170F95A91571A;
// System.String
struct String_t;
// System.Type
struct Type_t;
// System.Void
struct Void_t700C6383A2A510C2CF4DD86DABD5CA9FF70ADAC5;
IL2CPP_EXTERN_C RuntimeClass* Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var;
IL2CPP_EXTERN_C RuntimeClass* Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_il2cpp_TypeInfo_var;
IL2CPP_EXTERN_C const RuntimeMethod* AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013_RuntimeMethod_var;
IL2CPP_EXTERN_C const RuntimeMethod* AssetBundleRequest_get_asset_mB0A96FBC026D143638E467DEB37228ACD55F1813_RuntimeMethod_var;
IL2CPP_EXTERN_C const RuntimeMethod* AssetBundle__ctor_mCE6DB7758AAD0EDDB044FC67C5BC7EC987BF3F71_RuntimeMethod_var;
struct Delegate_t_marshaled_com;
struct Delegate_t_marshaled_pinvoke;
IL2CPP_EXTERN_C_BEGIN
IL2CPP_EXTERN_C_END
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
// <Module>
struct U3CModuleU3E_t7988782848E87B4A172D2199D2BAC372F93351FD
{
public:
public:
};
// System.Object
struct Il2CppArrayBounds;
// System.Array
// System.Reflection.MemberInfo
struct MemberInfo_t : public RuntimeObject
{
public:
public:
};
// System.String
struct String_t : public RuntimeObject
{
public:
// System.Int32 System.String::m_stringLength
int32_t ___m_stringLength_0;
// System.Char System.String::m_firstChar
Il2CppChar ___m_firstChar_1;
public:
inline static int32_t get_offset_of_m_stringLength_0() { return static_cast<int32_t>(offsetof(String_t, ___m_stringLength_0)); }
inline int32_t get_m_stringLength_0() const { return ___m_stringLength_0; }
inline int32_t* get_address_of_m_stringLength_0() { return &___m_stringLength_0; }
inline void set_m_stringLength_0(int32_t value)
{
___m_stringLength_0 = value;
}
inline static int32_t get_offset_of_m_firstChar_1() { return static_cast<int32_t>(offsetof(String_t, ___m_firstChar_1)); }
inline Il2CppChar get_m_firstChar_1() const { return ___m_firstChar_1; }
inline Il2CppChar* get_address_of_m_firstChar_1() { return &___m_firstChar_1; }
inline void set_m_firstChar_1(Il2CppChar value)
{
___m_firstChar_1 = value;
}
};
struct String_t_StaticFields
{
public:
// System.String System.String::Empty
String_t* ___Empty_5;
public:
inline static int32_t get_offset_of_Empty_5() { return static_cast<int32_t>(offsetof(String_t_StaticFields, ___Empty_5)); }
inline String_t* get_Empty_5() const { return ___Empty_5; }
inline String_t** get_address_of_Empty_5() { return &___Empty_5; }
inline void set_Empty_5(String_t* value)
{
___Empty_5 = value;
Il2CppCodeGenWriteBarrier((void**)(&___Empty_5), (void*)value);
}
};
// System.ValueType
struct ValueType_tDBF999C1B75C48C68621878250DBF6CDBCF51E52 : public RuntimeObject
{
public:
public:
};
// Native definition for P/Invoke marshalling of System.ValueType
struct ValueType_tDBF999C1B75C48C68621878250DBF6CDBCF51E52_marshaled_pinvoke
{
};
// Native definition for COM marshalling of System.ValueType
struct ValueType_tDBF999C1B75C48C68621878250DBF6CDBCF51E52_marshaled_com
{
};
// UnityEngine.YieldInstruction
struct YieldInstruction_tB0B4E05316710E51ECCC1E57174C27FE6DEBBEAF : public RuntimeObject
{
public:
public:
};
// Native definition for P/Invoke marshalling of UnityEngine.YieldInstruction
struct YieldInstruction_tB0B4E05316710E51ECCC1E57174C27FE6DEBBEAF_marshaled_pinvoke
{
};
// Native definition for COM marshalling of UnityEngine.YieldInstruction
struct YieldInstruction_tB0B4E05316710E51ECCC1E57174C27FE6DEBBEAF_marshaled_com
{
};
// System.Enum
struct Enum_t23B90B40F60E677A8025267341651C94AE079CDA : public ValueType_tDBF999C1B75C48C68621878250DBF6CDBCF51E52
{
public:
public:
};
struct Enum_t23B90B40F60E677A8025267341651C94AE079CDA_StaticFields
{
public:
// System.Char[] System.Enum::enumSeperatorCharArray
CharU5BU5D_t7B7FC5BC8091AA3B9CB0B29CDD80B5EE9254AA34* ___enumSeperatorCharArray_0;
public:
inline static int32_t get_offset_of_enumSeperatorCharArray_0() { return static_cast<int32_t>(offsetof(Enum_t23B90B40F60E677A8025267341651C94AE079CDA_StaticFields, ___enumSeperatorCharArray_0)); }
inline CharU5BU5D_t7B7FC5BC8091AA3B9CB0B29CDD80B5EE9254AA34* get_enumSeperatorCharArray_0() const { return ___enumSeperatorCharArray_0; }
inline CharU5BU5D_t7B7FC5BC8091AA3B9CB0B29CDD80B5EE9254AA34** get_address_of_enumSeperatorCharArray_0() { return &___enumSeperatorCharArray_0; }
inline void set_enumSeperatorCharArray_0(CharU5BU5D_t7B7FC5BC8091AA3B9CB0B29CDD80B5EE9254AA34* value)
{
___enumSeperatorCharArray_0 = value;
Il2CppCodeGenWriteBarrier((void**)(&___enumSeperatorCharArray_0), (void*)value);
}
};
// Native definition for P/Invoke marshalling of System.Enum
struct Enum_t23B90B40F60E677A8025267341651C94AE079CDA_marshaled_pinvoke
{
};
// Native definition for COM marshalling of System.Enum
struct Enum_t23B90B40F60E677A8025267341651C94AE079CDA_marshaled_com
{
};
// System.IntPtr
struct IntPtr_t
{
public:
// System.Void* System.IntPtr::m_value
void* ___m_value_0;
public:
inline static int32_t get_offset_of_m_value_0() { return static_cast<int32_t>(offsetof(IntPtr_t, ___m_value_0)); }
inline void* get_m_value_0() const { return ___m_value_0; }
inline void** get_address_of_m_value_0() { return &___m_value_0; }
inline void set_m_value_0(void* value)
{
___m_value_0 = value;
}
};
struct IntPtr_t_StaticFields
{
public:
// System.IntPtr System.IntPtr::Zero
intptr_t ___Zero_1;
public:
inline static int32_t get_offset_of_Zero_1() { return static_cast<int32_t>(offsetof(IntPtr_t_StaticFields, ___Zero_1)); }
inline intptr_t get_Zero_1() const { return ___Zero_1; }
inline intptr_t* get_address_of_Zero_1() { return &___Zero_1; }
inline void set_Zero_1(intptr_t value)
{
___Zero_1 = value;
}
};
// System.Void
struct Void_t700C6383A2A510C2CF4DD86DABD5CA9FF70ADAC5
{
public:
union
{
struct
{
};
uint8_t Void_t700C6383A2A510C2CF4DD86DABD5CA9FF70ADAC5__padding[1];
};
public:
};
// UnityEngine.AsyncOperation
struct AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86 : public YieldInstruction_tB0B4E05316710E51ECCC1E57174C27FE6DEBBEAF
{
public:
// System.IntPtr UnityEngine.AsyncOperation::m_Ptr
intptr_t ___m_Ptr_0;
// System.Action`1<UnityEngine.AsyncOperation> UnityEngine.AsyncOperation::m_completeCallback
Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31 * ___m_completeCallback_1;
public:
inline static int32_t get_offset_of_m_Ptr_0() { return static_cast<int32_t>(offsetof(AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86, ___m_Ptr_0)); }
inline intptr_t get_m_Ptr_0() const { return ___m_Ptr_0; }
inline intptr_t* get_address_of_m_Ptr_0() { return &___m_Ptr_0; }
inline void set_m_Ptr_0(intptr_t value)
{
___m_Ptr_0 = value;
}
inline static int32_t get_offset_of_m_completeCallback_1() { return static_cast<int32_t>(offsetof(AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86, ___m_completeCallback_1)); }
inline Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31 * get_m_completeCallback_1() const { return ___m_completeCallback_1; }
inline Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31 ** get_address_of_m_completeCallback_1() { return &___m_completeCallback_1; }
inline void set_m_completeCallback_1(Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31 * value)
{
___m_completeCallback_1 = value;
Il2CppCodeGenWriteBarrier((void**)(&___m_completeCallback_1), (void*)value);
}
};
// Native definition for P/Invoke marshalling of UnityEngine.AsyncOperation
struct AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_pinvoke : public YieldInstruction_tB0B4E05316710E51ECCC1E57174C27FE6DEBBEAF_marshaled_pinvoke
{
intptr_t ___m_Ptr_0;
Il2CppMethodPointer ___m_completeCallback_1;
};
// Native definition for COM marshalling of UnityEngine.AsyncOperation
struct AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_com : public YieldInstruction_tB0B4E05316710E51ECCC1E57174C27FE6DEBBEAF_marshaled_com
{
intptr_t ___m_Ptr_0;
Il2CppMethodPointer ___m_completeCallback_1;
};
// System.Reflection.BindingFlags
struct BindingFlags_tAAAB07D9AC588F0D55D844E51D7035E96DF94733
{
public:
// System.Int32 System.Reflection.BindingFlags::value__
int32_t ___value___2;
public:
inline static int32_t get_offset_of_value___2() { return static_cast<int32_t>(offsetof(BindingFlags_tAAAB07D9AC588F0D55D844E51D7035E96DF94733, ___value___2)); }
inline int32_t get_value___2() const { return ___value___2; }
inline int32_t* get_address_of_value___2() { return &___value___2; }
inline void set_value___2(int32_t value)
{
___value___2 = value;
}
};
// System.Delegate
struct Delegate_t : public RuntimeObject
{
public:
// System.IntPtr System.Delegate::method_ptr
Il2CppMethodPointer ___method_ptr_0;
// System.IntPtr System.Delegate::invoke_impl
intptr_t ___invoke_impl_1;
// System.Object System.Delegate::m_target
RuntimeObject * ___m_target_2;
// System.IntPtr System.Delegate::method
intptr_t ___method_3;
// System.IntPtr System.Delegate::delegate_trampoline
intptr_t ___delegate_trampoline_4;
// System.IntPtr System.Delegate::extra_arg
intptr_t ___extra_arg_5;
// System.IntPtr System.Delegate::method_code
intptr_t ___method_code_6;
// System.Reflection.MethodInfo System.Delegate::method_info
MethodInfo_t * ___method_info_7;
// System.Reflection.MethodInfo System.Delegate::original_method_info
MethodInfo_t * ___original_method_info_8;
// System.DelegateData System.Delegate::data
DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288 * ___data_9;
// System.Boolean System.Delegate::method_is_virtual
bool ___method_is_virtual_10;
public:
inline static int32_t get_offset_of_method_ptr_0() { return static_cast<int32_t>(offsetof(Delegate_t, ___method_ptr_0)); }
inline Il2CppMethodPointer get_method_ptr_0() const { return ___method_ptr_0; }
inline Il2CppMethodPointer* get_address_of_method_ptr_0() { return &___method_ptr_0; }
inline void set_method_ptr_0(Il2CppMethodPointer value)
{
___method_ptr_0 = value;
}
inline static int32_t get_offset_of_invoke_impl_1() { return static_cast<int32_t>(offsetof(Delegate_t, ___invoke_impl_1)); }
inline intptr_t get_invoke_impl_1() const { return ___invoke_impl_1; }
inline intptr_t* get_address_of_invoke_impl_1() { return &___invoke_impl_1; }
inline void set_invoke_impl_1(intptr_t value)
{
___invoke_impl_1 = value;
}
inline static int32_t get_offset_of_m_target_2() { return static_cast<int32_t>(offsetof(Delegate_t, ___m_target_2)); }
inline RuntimeObject * get_m_target_2() const { return ___m_target_2; }
inline RuntimeObject ** get_address_of_m_target_2() { return &___m_target_2; }
inline void set_m_target_2(RuntimeObject * value)
{
___m_target_2 = value;
Il2CppCodeGenWriteBarrier((void**)(&___m_target_2), (void*)value);
}
inline static int32_t get_offset_of_method_3() { return static_cast<int32_t>(offsetof(Delegate_t, ___method_3)); }
inline intptr_t get_method_3() const { return ___method_3; }
inline intptr_t* get_address_of_method_3() { return &___method_3; }
inline void set_method_3(intptr_t value)
{
___method_3 = value;
}
inline static int32_t get_offset_of_delegate_trampoline_4() { return static_cast<int32_t>(offsetof(Delegate_t, ___delegate_trampoline_4)); }
inline intptr_t get_delegate_trampoline_4() const { return ___delegate_trampoline_4; }
inline intptr_t* get_address_of_delegate_trampoline_4() { return &___delegate_trampoline_4; }
inline void set_delegate_trampoline_4(intptr_t value)
{
___delegate_trampoline_4 = value;
}
inline static int32_t get_offset_of_extra_arg_5() { return static_cast<int32_t>(offsetof(Delegate_t, ___extra_arg_5)); }
inline intptr_t get_extra_arg_5() const { return ___extra_arg_5; }
inline intptr_t* get_address_of_extra_arg_5() { return &___extra_arg_5; }
inline void set_extra_arg_5(intptr_t value)
{
___extra_arg_5 = value;
}
inline static int32_t get_offset_of_method_code_6() { return static_cast<int32_t>(offsetof(Delegate_t, ___method_code_6)); }
inline intptr_t get_method_code_6() const { return ___method_code_6; }
inline intptr_t* get_address_of_method_code_6() { return &___method_code_6; }
inline void set_method_code_6(intptr_t value)
{
___method_code_6 = value;
}
inline static int32_t get_offset_of_method_info_7() { return static_cast<int32_t>(offsetof(Delegate_t, ___method_info_7)); }
inline MethodInfo_t * get_method_info_7() const { return ___method_info_7; }
inline MethodInfo_t ** get_address_of_method_info_7() { return &___method_info_7; }
inline void set_method_info_7(MethodInfo_t * value)
{
___method_info_7 = value;
Il2CppCodeGenWriteBarrier((void**)(&___method_info_7), (void*)value);
}
inline static int32_t get_offset_of_original_method_info_8() { return static_cast<int32_t>(offsetof(Delegate_t, ___original_method_info_8)); }
inline MethodInfo_t * get_original_method_info_8() const { return ___original_method_info_8; }
inline MethodInfo_t ** get_address_of_original_method_info_8() { return &___original_method_info_8; }
inline void set_original_method_info_8(MethodInfo_t * value)
{
___original_method_info_8 = value;
Il2CppCodeGenWriteBarrier((void**)(&___original_method_info_8), (void*)value);
}
inline static int32_t get_offset_of_data_9() { return static_cast<int32_t>(offsetof(Delegate_t, ___data_9)); }
inline DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288 * get_data_9() const { return ___data_9; }
inline DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288 ** get_address_of_data_9() { return &___data_9; }
inline void set_data_9(DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288 * value)
{
___data_9 = value;
Il2CppCodeGenWriteBarrier((void**)(&___data_9), (void*)value);
}
inline static int32_t get_offset_of_method_is_virtual_10() { return static_cast<int32_t>(offsetof(Delegate_t, ___method_is_virtual_10)); }
inline bool get_method_is_virtual_10() const { return ___method_is_virtual_10; }
inline bool* get_address_of_method_is_virtual_10() { return &___method_is_virtual_10; }
inline void set_method_is_virtual_10(bool value)
{
___method_is_virtual_10 = value;
}
};
// Native definition for P/Invoke marshalling of System.Delegate
struct Delegate_t_marshaled_pinvoke
{
intptr_t ___method_ptr_0;
intptr_t ___invoke_impl_1;
Il2CppIUnknown* ___m_target_2;
intptr_t ___method_3;
intptr_t ___delegate_trampoline_4;
intptr_t ___extra_arg_5;
intptr_t ___method_code_6;
MethodInfo_t * ___method_info_7;
MethodInfo_t * ___original_method_info_8;
DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288 * ___data_9;
int32_t ___method_is_virtual_10;
};
// Native definition for COM marshalling of System.Delegate
struct Delegate_t_marshaled_com
{
intptr_t ___method_ptr_0;
intptr_t ___invoke_impl_1;
Il2CppIUnknown* ___m_target_2;
intptr_t ___method_3;
intptr_t ___delegate_trampoline_4;
intptr_t ___extra_arg_5;
intptr_t ___method_code_6;
MethodInfo_t * ___method_info_7;
MethodInfo_t * ___original_method_info_8;
DelegateData_t17DD30660E330C49381DAA99F934BE75CB11F288 * ___data_9;
int32_t ___method_is_virtual_10;
};
// UnityEngine.Object
struct Object_tF2F3778131EFF286AF62B7B013A170F95A91571A : public RuntimeObject
{
public:
// System.IntPtr UnityEngine.Object::m_CachedPtr
intptr_t ___m_CachedPtr_0;
public:
inline static int32_t get_offset_of_m_CachedPtr_0() { return static_cast<int32_t>(offsetof(Object_tF2F3778131EFF286AF62B7B013A170F95A91571A, ___m_CachedPtr_0)); }
inline intptr_t get_m_CachedPtr_0() const { return ___m_CachedPtr_0; }
inline intptr_t* get_address_of_m_CachedPtr_0() { return &___m_CachedPtr_0; }
inline void set_m_CachedPtr_0(intptr_t value)
{
___m_CachedPtr_0 = value;
}
};
struct Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_StaticFields
{
public:
// System.Int32 UnityEngine.Object::OffsetOfInstanceIDInCPlusPlusObject
int32_t ___OffsetOfInstanceIDInCPlusPlusObject_1;
public:
inline static int32_t get_offset_of_OffsetOfInstanceIDInCPlusPlusObject_1() { return static_cast<int32_t>(offsetof(Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_StaticFields, ___OffsetOfInstanceIDInCPlusPlusObject_1)); }
inline int32_t get_OffsetOfInstanceIDInCPlusPlusObject_1() const { return ___OffsetOfInstanceIDInCPlusPlusObject_1; }
inline int32_t* get_address_of_OffsetOfInstanceIDInCPlusPlusObject_1() { return &___OffsetOfInstanceIDInCPlusPlusObject_1; }
inline void set_OffsetOfInstanceIDInCPlusPlusObject_1(int32_t value)
{
___OffsetOfInstanceIDInCPlusPlusObject_1 = value;
}
};
// Native definition for P/Invoke marshalling of UnityEngine.Object
struct Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_marshaled_pinvoke
{
intptr_t ___m_CachedPtr_0;
};
// Native definition for COM marshalling of UnityEngine.Object
struct Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_marshaled_com
{
intptr_t ___m_CachedPtr_0;
};
// System.RuntimeTypeHandle
struct RuntimeTypeHandle_tC33965ADA3E041E0C94AF05E5CB527B56482CEF9
{
public:
// System.IntPtr System.RuntimeTypeHandle::value
intptr_t ___value_0;
public:
inline static int32_t get_offset_of_value_0() { return static_cast<int32_t>(offsetof(RuntimeTypeHandle_tC33965ADA3E041E0C94AF05E5CB527B56482CEF9, ___value_0)); }
inline intptr_t get_value_0() const { return ___value_0; }
inline intptr_t* get_address_of_value_0() { return &___value_0; }
inline void set_value_0(intptr_t value)
{
___value_0 = value;
}
};
// UnityEngine.AssetBundle
struct AssetBundle_t4D34D7FDF0F230DC641DC1FCFA2C0E7E9E628FA4 : public Object_tF2F3778131EFF286AF62B7B013A170F95A91571A
{
public:
public:
};
// UnityEngine.AssetBundleCreateRequest
struct AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86
{
public:
public:
};
// Native definition for P/Invoke marshalling of UnityEngine.AssetBundleCreateRequest
struct AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_pinvoke : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_pinvoke
{
};
// Native definition for COM marshalling of UnityEngine.AssetBundleCreateRequest
struct AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_com : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_com
{
};
// UnityEngine.AssetBundleRecompressOperation
struct AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31 : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86
{
public:
public:
};
// Native definition for P/Invoke marshalling of UnityEngine.AssetBundleRecompressOperation
struct AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_pinvoke : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_pinvoke
{
};
// Native definition for COM marshalling of UnityEngine.AssetBundleRecompressOperation
struct AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_com : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_com
{
};
// System.MulticastDelegate
struct MulticastDelegate_t : public Delegate_t
{
public:
// System.Delegate[] System.MulticastDelegate::delegates
DelegateU5BU5D_t677D8FE08A5F99E8EE49150B73966CD6E9BF7DB8* ___delegates_11;
public:
inline static int32_t get_offset_of_delegates_11() { return static_cast<int32_t>(offsetof(MulticastDelegate_t, ___delegates_11)); }
inline DelegateU5BU5D_t677D8FE08A5F99E8EE49150B73966CD6E9BF7DB8* get_delegates_11() const { return ___delegates_11; }
inline DelegateU5BU5D_t677D8FE08A5F99E8EE49150B73966CD6E9BF7DB8** get_address_of_delegates_11() { return &___delegates_11; }
inline void set_delegates_11(DelegateU5BU5D_t677D8FE08A5F99E8EE49150B73966CD6E9BF7DB8* value)
{
___delegates_11 = value;
Il2CppCodeGenWriteBarrier((void**)(&___delegates_11), (void*)value);
}
};
// Native definition for P/Invoke marshalling of System.MulticastDelegate
struct MulticastDelegate_t_marshaled_pinvoke : public Delegate_t_marshaled_pinvoke
{
Delegate_t_marshaled_pinvoke** ___delegates_11;
};
// Native definition for COM marshalling of System.MulticastDelegate
struct MulticastDelegate_t_marshaled_com : public Delegate_t_marshaled_com
{
Delegate_t_marshaled_com** ___delegates_11;
};
// UnityEngine.ResourceRequest
struct ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86
{
public:
// System.String UnityEngine.ResourceRequest::m_Path
String_t* ___m_Path_2;
// System.Type UnityEngine.ResourceRequest::m_Type
Type_t * ___m_Type_3;
public:
inline static int32_t get_offset_of_m_Path_2() { return static_cast<int32_t>(offsetof(ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD, ___m_Path_2)); }
inline String_t* get_m_Path_2() const { return ___m_Path_2; }
inline String_t** get_address_of_m_Path_2() { return &___m_Path_2; }
inline void set_m_Path_2(String_t* value)
{
___m_Path_2 = value;
Il2CppCodeGenWriteBarrier((void**)(&___m_Path_2), (void*)value);
}
inline static int32_t get_offset_of_m_Type_3() { return static_cast<int32_t>(offsetof(ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD, ___m_Type_3)); }
inline Type_t * get_m_Type_3() const { return ___m_Type_3; }
inline Type_t ** get_address_of_m_Type_3() { return &___m_Type_3; }
inline void set_m_Type_3(Type_t * value)
{
___m_Type_3 = value;
Il2CppCodeGenWriteBarrier((void**)(&___m_Type_3), (void*)value);
}
};
// Native definition for P/Invoke marshalling of UnityEngine.ResourceRequest
struct ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD_marshaled_pinvoke : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_pinvoke
{
char* ___m_Path_2;
Type_t * ___m_Type_3;
};
// Native definition for COM marshalling of UnityEngine.ResourceRequest
struct ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD_marshaled_com : public AsyncOperation_tB6913CEC83169F22E96067CE8C7117A221E51A86_marshaled_com
{
Il2CppChar* ___m_Path_2;
Type_t * ___m_Type_3;
};
// System.Type
struct Type_t : public MemberInfo_t
{
public:
// System.RuntimeTypeHandle System.Type::_impl
RuntimeTypeHandle_tC33965ADA3E041E0C94AF05E5CB527B56482CEF9 ____impl_9;
public:
inline static int32_t get_offset_of__impl_9() { return static_cast<int32_t>(offsetof(Type_t, ____impl_9)); }
inline RuntimeTypeHandle_tC33965ADA3E041E0C94AF05E5CB527B56482CEF9 get__impl_9() const { return ____impl_9; }
inline RuntimeTypeHandle_tC33965ADA3E041E0C94AF05E5CB527B56482CEF9 * get_address_of__impl_9() { return &____impl_9; }
inline void set__impl_9(RuntimeTypeHandle_tC33965ADA3E041E0C94AF05E5CB527B56482CEF9 value)
{
____impl_9 = value;
}
};
struct Type_t_StaticFields
{
public:
// System.Reflection.MemberFilter System.Type::FilterAttribute
MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * ___FilterAttribute_0;
// System.Reflection.MemberFilter System.Type::FilterName
MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * ___FilterName_1;
// System.Reflection.MemberFilter System.Type::FilterNameIgnoreCase
MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * ___FilterNameIgnoreCase_2;
// System.Object System.Type::Missing
RuntimeObject * ___Missing_3;
// System.Char System.Type::Delimiter
Il2CppChar ___Delimiter_4;
// System.Type[] System.Type::EmptyTypes
TypeU5BU5D_t85B10489E46F06CEC7C4B1CCBD0E01FAB6649755* ___EmptyTypes_5;
// System.Reflection.Binder System.Type::defaultBinder
Binder_t2BEE27FD84737D1E79BC47FD67F6D3DD2F2DDA30 * ___defaultBinder_6;
public:
inline static int32_t get_offset_of_FilterAttribute_0() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___FilterAttribute_0)); }
inline MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * get_FilterAttribute_0() const { return ___FilterAttribute_0; }
inline MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 ** get_address_of_FilterAttribute_0() { return &___FilterAttribute_0; }
inline void set_FilterAttribute_0(MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * value)
{
___FilterAttribute_0 = value;
Il2CppCodeGenWriteBarrier((void**)(&___FilterAttribute_0), (void*)value);
}
inline static int32_t get_offset_of_FilterName_1() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___FilterName_1)); }
inline MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * get_FilterName_1() const { return ___FilterName_1; }
inline MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 ** get_address_of_FilterName_1() { return &___FilterName_1; }
inline void set_FilterName_1(MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * value)
{
___FilterName_1 = value;
Il2CppCodeGenWriteBarrier((void**)(&___FilterName_1), (void*)value);
}
inline static int32_t get_offset_of_FilterNameIgnoreCase_2() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___FilterNameIgnoreCase_2)); }
inline MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * get_FilterNameIgnoreCase_2() const { return ___FilterNameIgnoreCase_2; }
inline MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 ** get_address_of_FilterNameIgnoreCase_2() { return &___FilterNameIgnoreCase_2; }
inline void set_FilterNameIgnoreCase_2(MemberFilter_t48D0AA10105D186AF42428FA532D4B4332CF8B81 * value)
{
___FilterNameIgnoreCase_2 = value;
Il2CppCodeGenWriteBarrier((void**)(&___FilterNameIgnoreCase_2), (void*)value);
}
inline static int32_t get_offset_of_Missing_3() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___Missing_3)); }
inline RuntimeObject * get_Missing_3() const { return ___Missing_3; }
inline RuntimeObject ** get_address_of_Missing_3() { return &___Missing_3; }
inline void set_Missing_3(RuntimeObject * value)
{
___Missing_3 = value;
Il2CppCodeGenWriteBarrier((void**)(&___Missing_3), (void*)value);
}
inline static int32_t get_offset_of_Delimiter_4() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___Delimiter_4)); }
inline Il2CppChar get_Delimiter_4() const { return ___Delimiter_4; }
inline Il2CppChar* get_address_of_Delimiter_4() { return &___Delimiter_4; }
inline void set_Delimiter_4(Il2CppChar value)
{
___Delimiter_4 = value;
}
inline static int32_t get_offset_of_EmptyTypes_5() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___EmptyTypes_5)); }
inline TypeU5BU5D_t85B10489E46F06CEC7C4B1CCBD0E01FAB6649755* get_EmptyTypes_5() const { return ___EmptyTypes_5; }
inline TypeU5BU5D_t85B10489E46F06CEC7C4B1CCBD0E01FAB6649755** get_address_of_EmptyTypes_5() { return &___EmptyTypes_5; }
inline void set_EmptyTypes_5(TypeU5BU5D_t85B10489E46F06CEC7C4B1CCBD0E01FAB6649755* value)
{
___EmptyTypes_5 = value;
Il2CppCodeGenWriteBarrier((void**)(&___EmptyTypes_5), (void*)value);
}
inline static int32_t get_offset_of_defaultBinder_6() { return static_cast<int32_t>(offsetof(Type_t_StaticFields, ___defaultBinder_6)); }
inline Binder_t2BEE27FD84737D1E79BC47FD67F6D3DD2F2DDA30 * get_defaultBinder_6() const { return ___defaultBinder_6; }
inline Binder_t2BEE27FD84737D1E79BC47FD67F6D3DD2F2DDA30 ** get_address_of_defaultBinder_6() { return &___defaultBinder_6; }
inline void set_defaultBinder_6(Binder_t2BEE27FD84737D1E79BC47FD67F6D3DD2F2DDA30 * value)
{
___defaultBinder_6 = value;
Il2CppCodeGenWriteBarrier((void**)(&___defaultBinder_6), (void*)value);
}
};
// System.Action`1<UnityEngine.AsyncOperation>
struct Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31 : public MulticastDelegate_t
{
public:
public:
};
// UnityEngine.AssetBundleRequest
struct AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A : public ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD
{
public:
public:
};
// Native definition for P/Invoke marshalling of UnityEngine.AssetBundleRequest
struct AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_pinvoke : public ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD_marshaled_pinvoke
{
};
// Native definition for COM marshalling of UnityEngine.AssetBundleRequest
struct AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_com : public ResourceRequest_tD2D09E98C844087E6AB0F04532B7AA139558CBAD_marshaled_com
{
};
#ifdef __clang__
#pragma clang diagnostic pop
#endif
// System.Void UnityEngine.Object::.ctor()
IL2CPP_EXTERN_C IL2CPP_METHOD_ATTR void Object__ctor_m4DCF5CDB32C2C69290894101A81F473865169279 (Object_tF2F3778131EFF286AF62B7B013A170F95A91571A * __this, const RuntimeMethod* method);
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
#ifdef __clang__
#pragma clang diagnostic pop
#endif
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
// System.Void UnityEngine.AssetBundle::.ctor()
IL2CPP_EXTERN_C IL2CPP_METHOD_ATTR void AssetBundle__ctor_mCE6DB7758AAD0EDDB044FC67C5BC7EC987BF3F71 (AssetBundle_t4D34D7FDF0F230DC641DC1FCFA2C0E7E9E628FA4 * __this, const RuntimeMethod* method)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&AssetBundle__ctor_mCE6DB7758AAD0EDDB044FC67C5BC7EC987BF3F71_RuntimeMethod_var);
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_il2cpp_TypeInfo_var);
s_Il2CppMethodInitialized = true;
}
StackTraceSentry _stackTraceSentry(AssetBundle__ctor_mCE6DB7758AAD0EDDB044FC67C5BC7EC987BF3F71_RuntimeMethod_var);
{
IL2CPP_RUNTIME_CLASS_INIT(Object_tF2F3778131EFF286AF62B7B013A170F95A91571A_il2cpp_TypeInfo_var);
Object__ctor_m4DCF5CDB32C2C69290894101A81F473865169279(__this, /*hidden argument*/NULL);
return;
}
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
// Conversion methods for marshalling of: UnityEngine.AssetBundleCreateRequest
IL2CPP_EXTERN_C void AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshal_pinvoke(const AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A& unmarshaled, AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_pinvoke& marshaled)
{
marshaled.___m_Ptr_0 = unmarshaled.get_m_Ptr_0();
marshaled.___m_completeCallback_1 = il2cpp_codegen_marshal_delegate(reinterpret_cast<MulticastDelegate_t*>(unmarshaled.get_m_completeCallback_1()));
}
IL2CPP_EXTERN_C void AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshal_pinvoke_back(const AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_pinvoke& marshaled, AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A& unmarshaled)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var);
s_Il2CppMethodInitialized = true;
}
intptr_t unmarshaled_m_Ptr_temp_0;
memset((&unmarshaled_m_Ptr_temp_0), 0, sizeof(unmarshaled_m_Ptr_temp_0));
unmarshaled_m_Ptr_temp_0 = marshaled.___m_Ptr_0;
unmarshaled.set_m_Ptr_0(unmarshaled_m_Ptr_temp_0);
unmarshaled.set_m_completeCallback_1(il2cpp_codegen_marshal_function_ptr_to_delegate<Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31>(marshaled.___m_completeCallback_1, Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var));
}
// Conversion method for clean up from marshalling of: UnityEngine.AssetBundleCreateRequest
IL2CPP_EXTERN_C void AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshal_pinvoke_cleanup(AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_pinvoke& marshaled)
{
}
// Conversion methods for marshalling of: UnityEngine.AssetBundleCreateRequest
IL2CPP_EXTERN_C void AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshal_com(const AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A& unmarshaled, AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_com& marshaled)
{
marshaled.___m_Ptr_0 = unmarshaled.get_m_Ptr_0();
marshaled.___m_completeCallback_1 = il2cpp_codegen_marshal_delegate(reinterpret_cast<MulticastDelegate_t*>(unmarshaled.get_m_completeCallback_1()));
}
IL2CPP_EXTERN_C void AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshal_com_back(const AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_com& marshaled, AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A& unmarshaled)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var);
s_Il2CppMethodInitialized = true;
}
intptr_t unmarshaled_m_Ptr_temp_0;
memset((&unmarshaled_m_Ptr_temp_0), 0, sizeof(unmarshaled_m_Ptr_temp_0));
unmarshaled_m_Ptr_temp_0 = marshaled.___m_Ptr_0;
unmarshaled.set_m_Ptr_0(unmarshaled_m_Ptr_temp_0);
unmarshaled.set_m_completeCallback_1(il2cpp_codegen_marshal_function_ptr_to_delegate<Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31>(marshaled.___m_completeCallback_1, Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var));
}
// Conversion method for clean up from marshalling of: UnityEngine.AssetBundleCreateRequest
IL2CPP_EXTERN_C void AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshal_com_cleanup(AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A_marshaled_com& marshaled)
{
}
// UnityEngine.AssetBundle UnityEngine.AssetBundleCreateRequest::get_assetBundle()
IL2CPP_EXTERN_C IL2CPP_METHOD_ATTR AssetBundle_t4D34D7FDF0F230DC641DC1FCFA2C0E7E9E628FA4 * AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013 (AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A * __this, const RuntimeMethod* method)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013_RuntimeMethod_var);
s_Il2CppMethodInitialized = true;
}
StackTraceSentry _stackTraceSentry(AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013_RuntimeMethod_var);
typedef AssetBundle_t4D34D7FDF0F230DC641DC1FCFA2C0E7E9E628FA4 * (*AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013_ftn) (AssetBundleCreateRequest_t6AB0C8676D1DAA5F624663445F46FAB7D63EAA3A *);
static AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013_ftn _il2cpp_icall_func;
if (!_il2cpp_icall_func)
_il2cpp_icall_func = (AssetBundleCreateRequest_get_assetBundle_m608C1516A7DC8E4B1F9D63EDCF6EE8D6C2CFF013_ftn)il2cpp_codegen_resolve_icall ("UnityEngine.AssetBundleCreateRequest::get_assetBundle()");
AssetBundle_t4D34D7FDF0F230DC641DC1FCFA2C0E7E9E628FA4 * icallRetVal = _il2cpp_icall_func(__this);
return icallRetVal;
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
// Conversion methods for marshalling of: UnityEngine.AssetBundleRecompressOperation
IL2CPP_EXTERN_C void AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshal_pinvoke(const AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31& unmarshaled, AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_pinvoke& marshaled)
{
marshaled.___m_Ptr_0 = unmarshaled.get_m_Ptr_0();
marshaled.___m_completeCallback_1 = il2cpp_codegen_marshal_delegate(reinterpret_cast<MulticastDelegate_t*>(unmarshaled.get_m_completeCallback_1()));
}
IL2CPP_EXTERN_C void AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshal_pinvoke_back(const AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_pinvoke& marshaled, AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31& unmarshaled)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var);
s_Il2CppMethodInitialized = true;
}
intptr_t unmarshaled_m_Ptr_temp_0;
memset((&unmarshaled_m_Ptr_temp_0), 0, sizeof(unmarshaled_m_Ptr_temp_0));
unmarshaled_m_Ptr_temp_0 = marshaled.___m_Ptr_0;
unmarshaled.set_m_Ptr_0(unmarshaled_m_Ptr_temp_0);
unmarshaled.set_m_completeCallback_1(il2cpp_codegen_marshal_function_ptr_to_delegate<Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31>(marshaled.___m_completeCallback_1, Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var));
}
// Conversion method for clean up from marshalling of: UnityEngine.AssetBundleRecompressOperation
IL2CPP_EXTERN_C void AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshal_pinvoke_cleanup(AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_pinvoke& marshaled)
{
}
// Conversion methods for marshalling of: UnityEngine.AssetBundleRecompressOperation
IL2CPP_EXTERN_C void AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshal_com(const AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31& unmarshaled, AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_com& marshaled)
{
marshaled.___m_Ptr_0 = unmarshaled.get_m_Ptr_0();
marshaled.___m_completeCallback_1 = il2cpp_codegen_marshal_delegate(reinterpret_cast<MulticastDelegate_t*>(unmarshaled.get_m_completeCallback_1()));
}
IL2CPP_EXTERN_C void AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshal_com_back(const AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_com& marshaled, AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31& unmarshaled)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var);
s_Il2CppMethodInitialized = true;
}
intptr_t unmarshaled_m_Ptr_temp_0;
memset((&unmarshaled_m_Ptr_temp_0), 0, sizeof(unmarshaled_m_Ptr_temp_0));
unmarshaled_m_Ptr_temp_0 = marshaled.___m_Ptr_0;
unmarshaled.set_m_Ptr_0(unmarshaled_m_Ptr_temp_0);
unmarshaled.set_m_completeCallback_1(il2cpp_codegen_marshal_function_ptr_to_delegate<Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31>(marshaled.___m_completeCallback_1, Action_1_tC1348BEB2C677FD60E4B65764CA3A1CAFF6DFB31_il2cpp_TypeInfo_var));
}
// Conversion method for clean up from marshalling of: UnityEngine.AssetBundleRecompressOperation
IL2CPP_EXTERN_C void AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshal_com_cleanup(AssetBundleRecompressOperation_t960AA4671D6EB0A10A041FA29B8C2A7D70C07D31_marshaled_com& marshaled)
{
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winvalid-offsetof"
#pragma clang diagnostic ignored "-Wunused-variable"
#endif
// Conversion methods for marshalling of: UnityEngine.AssetBundleRequest
IL2CPP_EXTERN_C void AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshal_pinvoke(const AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A& unmarshaled, AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_pinvoke& marshaled)
{
Exception_t* ___m_Type_3Exception = il2cpp_codegen_get_marshal_directive_exception("Cannot marshal field 'm_Type' of type 'AssetBundleRequest': Reference type field marshaling is not supported.");
IL2CPP_RAISE_MANAGED_EXCEPTION(___m_Type_3Exception, NULL);
}
IL2CPP_EXTERN_C void AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshal_pinvoke_back(const AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_pinvoke& marshaled, AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A& unmarshaled)
{
Exception_t* ___m_Type_3Exception = il2cpp_codegen_get_marshal_directive_exception("Cannot marshal field 'm_Type' of type 'AssetBundleRequest': Reference type field marshaling is not supported.");
IL2CPP_RAISE_MANAGED_EXCEPTION(___m_Type_3Exception, NULL);
}
// Conversion method for clean up from marshalling of: UnityEngine.AssetBundleRequest
IL2CPP_EXTERN_C void AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshal_pinvoke_cleanup(AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_pinvoke& marshaled)
{
}
// Conversion methods for marshalling of: UnityEngine.AssetBundleRequest
IL2CPP_EXTERN_C void AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshal_com(const AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A& unmarshaled, AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_com& marshaled)
{
Exception_t* ___m_Type_3Exception = il2cpp_codegen_get_marshal_directive_exception("Cannot marshal field 'm_Type' of type 'AssetBundleRequest': Reference type field marshaling is not supported.");
IL2CPP_RAISE_MANAGED_EXCEPTION(___m_Type_3Exception, NULL);
}
IL2CPP_EXTERN_C void AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshal_com_back(const AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_com& marshaled, AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A& unmarshaled)
{
Exception_t* ___m_Type_3Exception = il2cpp_codegen_get_marshal_directive_exception("Cannot marshal field 'm_Type' of type 'AssetBundleRequest': Reference type field marshaling is not supported.");
IL2CPP_RAISE_MANAGED_EXCEPTION(___m_Type_3Exception, NULL);
}
// Conversion method for clean up from marshalling of: UnityEngine.AssetBundleRequest
IL2CPP_EXTERN_C void AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshal_com_cleanup(AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A_marshaled_com& marshaled)
{
}
// UnityEngine.Object UnityEngine.AssetBundleRequest::get_asset()
IL2CPP_EXTERN_C IL2CPP_METHOD_ATTR Object_tF2F3778131EFF286AF62B7B013A170F95A91571A * AssetBundleRequest_get_asset_mB0A96FBC026D143638E467DEB37228ACD55F1813 (AssetBundleRequest_tBCF59D1FD408125E4C2C937EC23AB0ABB7E4051A * __this, const RuntimeMethod* method)
{
static bool s_Il2CppMethodInitialized;
if (!s_Il2CppMethodInitialized)
{
il2cpp_codegen_initialize_runtime_metadata((uintptr_t*)&AssetBundleRequest_get_asset_mB0A96FBC026D143638E467DEB37228ACD55F1813_RuntimeMethod_var);
s_Il2CppMethodInitialized = true;
}
StackTraceSentry _stackTraceSentry(AssetBundleRequest_get_asset_mB0A96FBC026D143638E467DEB37228ACD55F1813_RuntimeMethod_var);
Object_tF2F3778131EFF286AF62B7B013A170F95A91571A * V_0 = NULL;
{
Object_tF2F3778131EFF286AF62B7B013A170F95A91571A * L_0;
L_0 = VirtFuncInvoker0< Object_tF2F3778131EFF286AF62B7B013A170F95A91571A * >::Invoke(4 /* UnityEngine.Object UnityEngine.ResourceRequest::GetResult() */, __this);
V_0 = L_0;
goto IL_000a;
}
IL_000a:
{
Object_tF2F3778131EFF286AF62B7B013A170F95A91571A * L_1 = V_0;
return L_1;
}
}
#ifdef __clang__
#pragma clang diagnostic pop
#endif
|
'use strict'
const HDWalletProvider = require('@truffle/hdwallet-provider');
const BigNumber = require('bignumber.js');
const Web3 = require('web3');
const API_QUOTE_URL = 'https://api.0x.org/swap/v1/quote';
const MNEMONIC = 'walnut mutual phone police nut tribe cross coast donate early urban target';
const RPC_URL = 'http://localhost:7545';
function createQueryString(params) {
return Object.entries(params).map(([k, v]) => `${k}=${v}`).join('&');
}
// Wait for a web3 tx `send()` call to be mined and return the receipt.
function waitForTxSuccess(tx) {
return new Promise((accept, reject) => {
try {
tx.on('error', err => reject(err));
tx.on('receipt', receipt => accept(receipt));
} catch (err) {
reject(err);
}
});
}
function createProvider() {
console.info('MNEMONIC is ' + MNEMONIC)
console.info('RPC_URL is ' + RPC_URL)
const provider = /^ws?:\/\//.test(RPC_URL)
? new Web3.providers.WebsocketProvider(RPC_URL)
: new Web3.providers.HttpProvider(RPC_URL);
if (!MNEMONIC) {
return provider;
}
return new HDWalletProvider({
mnemonic: MNEMONIC,
providerOrUrl: provider,
numberOfAddresses: 2,
shareNonce: true,
derivationPath: "m/44'/60'/0'/0/"
});
}
function createWeb3() {
return new Web3(createProvider());
}
function etherToWei(etherAmount) {
return new BigNumber(etherAmount)
.times('1e18')
.integerValue()
.toString(10);
}
function weiToEther(weiAmount) {
return new BigNumber(weiAmount)
.div('1e18')
.toString(10);
}
module.exports = {
etherToWei,
weiToEther,
createWeb3,
createQueryString,
waitForTxSuccess,
createProvider,
};
|
<reponame>phovea/phovea_core<filename>src/stratification/Stratification.ts
/* *****************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
**************************************************************************** */
/**
* Created by <NAME> on 04.08.2014.
*/
import {BaseUtils} from '../base/BaseUtils';
import {RangeLike, Range, CompositeRange1D, ParseRangeUtils} from '../range';
import {IDTypeManager, LocalIDAssigner} from '../idtype';
import {ADataType, IDataType, ValueTypeUtils, ICategory} from '../data';
import {DataCache} from '../data/DataCache';
import {ICategoricalVector} from '../vector';
import {RangeHistogram, IHistogram} from '../data/histogram';
import {IStratification, IStratificationDataDescription} from './IStratification';
import {StratificationGroup} from './StratificationGroup';
import {StratificationUtils} from './StratificationUtils';
import {IStratificationLoader, StratificationLoaderUtils} from './loader';
import {StratificationCategoricalVector} from './StratificationCategoricalVector';
/**
* root matrix implementation holding the data
* @internal
*/
export class Stratification extends ADataType<IStratificationDataDescription> implements IStratification {
private _v: Promise<ICategoricalVector>;
constructor(desc: IStratificationDataDescription, private loader: IStratificationLoader) {
super(desc);
}
get idtype() {
return IDTypeManager.getInstance().resolveIdType(this.desc.idtype);
}
get groups() {
return this.desc.groups;
}
group(group: number): IStratification {
return new StratificationGroup(this, group, this.groups[group]);
}
async hist(bins?: number, range?: Range): Promise<IHistogram> {
//TODO
return RangeHistogram.rangeHist(await this.range());
}
vector() {
return this.asVector();
}
asVector(): Promise<ICategoricalVector> {
if (!this._v) {
this._v = this.loader(this.desc).then((data) => new StratificationCategoricalVector(this, data.range));
}
return this._v;
}
origin(): Promise<IDataType> {
if ('origin' in this.desc) {
return DataCache.getInstance().getFirstByFQName(this.desc.origin);
}
return Promise.reject('no origin specified');
}
async range(): Promise<CompositeRange1D> {
return (await this.loader(this.desc)).range;
}
async idRange(): Promise<CompositeRange1D> {
const data = await this.loader(this.desc);
const ids = data.rowIds.dim(0);
const range = data.range;
return <CompositeRange1D>ids.preMultiply(range, this.dim[0]);
}
async names(range: RangeLike = Range.all()) {
return ParseRangeUtils.parseRangeLike(range).filter((await this.loader(this.desc)).rows, this.dim);
}
async ids(range: RangeLike = Range.all()): Promise<Range> {
return (await this.loader(this.desc)).rowIds.preMultiply(ParseRangeUtils.parseRangeLike(range), this.dim);
}
get idtypes() {
return [this.idtype];
}
size() {
return this.desc.size;
}
get length() {
return this.dim[0];
}
get ngroups() {
return this.desc.ngroups;
}
get dim() {
return [this.size()];
}
persist() {
return this.desc.id;
}
static guessColor(stratification: string, group: string) {
switch (group.toLowerCase()) {
case 'male':
return 'blue';
case 'female':
return 'red';
case 'deceased':
return '#e41a1b';
case 'living':
return '#377eb8';
}
return 'gray';
}
/**
* module entry point for creating a datatype
* @param desc
* @returns {IVector}
*/
static create(desc: IStratificationDataDescription): Stratification {
return new Stratification(desc, StratificationLoaderUtils.viaAPILoader());
}
static wrap(desc: IStratificationDataDescription, rows: string[], rowIds: number[], range: CompositeRange1D) {
return new Stratification(desc, StratificationLoaderUtils.viaDataLoader(rows, rowIds, range));
}
static asStratification(rows: string[], range: CompositeRange1D, options: IAsStratifcationOptions = {}) {
const desc = BaseUtils.mixin(StratificationUtils.createDefaultStratificationDesc(), {
size: 0,
groups: range.groups.map((r) => ({name: r.name, color: r.color, size: r.length})),
ngroups: range.groups.length
}, options);
const rowAssigner = options.rowassigner || LocalIDAssigner.create();
return new Stratification(desc, StratificationLoaderUtils.viaDataLoader(rows, rowAssigner(rows), range));
}
static wrapCategoricalVector(v: ICategoricalVector) {
if (v.valuetype.type !== ValueTypeUtils.VALUE_TYPE_CATEGORICAL) {
throw new Error('invalid vector value type: ' + v.valuetype.type);
}
const toGroup = (g: string|ICategory) => {
if (typeof g === 'string') {
return {name: <string>g, color: 'gray', size: NaN};
}
const cat = <ICategory>g;
return {name: cat.name, color: cat.color || 'gray', size: NaN};
};
const cats = v.desc.value.categories.map(toGroup);
const desc: IStratificationDataDescription = {
id: v.desc.id + '-s',
type: 'stratification',
name: v.desc.name + '-s',
fqname: v.desc.fqname + '-s',
description: v.desc.description,
idtype: v.idtype.id,
ngroups: cats.length,
groups: cats,
size: v.length,
creator: v.desc.creator,
ts: v.desc.ts
};
function loader() {
return Promise.all<any>([v.groups(), v.ids(), v.names()]).then((args) => {
const range = <CompositeRange1D>args[0];
range.groups.forEach((g, i) => cats[i].size = g.length);
return {
range: args[0],
rowIds: args[1],
rows: args[2]
};
});
}
return new Stratification(desc, loader);
}
}
export interface IAsStratifcationOptions {
name?: string;
idtype?: string;
rowassigner?(ids: string[]): Range;
}
|
<reponame>mcrisostomo9/votta-store<filename>gatsby-node.js
const path = require("path")
const { routes } = require("./src/data/routes")
exports.createPages = async ({ graphql, actions: { createPage } }) => {
const product = await graphql(`
query PagesQuery {
allShopifyProduct {
edges {
node {
id
handle
tags
}
}
}
}
`)
product.data.allShopifyProduct.edges.forEach(({ node: { id, handle } }) => {
createPage({
path: routes.productDetail(handle),
component: path.resolve("./src/templates/ProductDetailTemplate.js"),
context: {
id,
handle,
},
})
})
const collections = await graphql(`
query allShopifyCollections {
allShopifyCollection {
edges {
node {
id
handle
}
}
}
}
`)
//
collections.data.allShopifyCollection.edges.forEach(
({ node: { id, handle } }) => {
createPage({
path: routes.collections(handle),
component: path.resolve("./src/templates/CollectionTemplate.js"),
context: {
id,
handle,
},
})
}
)
const legalPages = await graphql(`
query legalPages {
allPrismicLegalPages {
nodes {
id
data {
body {
html
}
title {
text
}
}
}
}
}
`)
const { allPrismicLegalPages } = legalPages.data
allPrismicLegalPages.nodes.forEach(({ data: { title }, id }) => {
createPage({
path: `/${title.text.replace(/\s+/g, "-").toLowerCase()}`,
component: path.resolve("./src/templates/LegalPageTemplate.js"),
context: {
id,
},
})
})
}
|
<filename>src/main/java/net/b07z/sepia/websockets/server/SocketChannelPool.java
package net.b07z.sepia.websockets.server;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.b07z.sepia.server.core.tools.JSON;
import net.b07z.sepia.server.core.tools.Security;
import net.b07z.sepia.websockets.common.SocketChannel;
import net.b07z.sepia.websockets.common.SocketConfig;
import net.b07z.sepia.websockets.database.ChannelsDatabase;
/**
* Handle different channels on same webSocketServer.
*
* @author <NAME>
*
*/
public class SocketChannelPool {
static Logger log = LoggerFactory.getLogger(SocketChannelPool.class);
private static Map<String, SocketChannel> channelPool = new ConcurrentHashMap<>();
private static AtomicLong channelCounter = new AtomicLong(0);
/**
* Get a unique ID for a new channel.
*/
public static String getRandomUniqueChannelId() throws Exception {
return Security.bytearrayToHexString(Security.getSha256(
channelCounter.incrementAndGet() + "-" + SocketConfig.localName + "-" + System.currentTimeMillis()
));
}
/**
* Set a channel pool to start with. Usually loaded during server start.
* @param newChannelPool
*/
public static void setPool(Map<String, SocketChannel> newChannelPool){
channelPool = newChannelPool;
}
/**
* Check if channel pool has channel with certain ID.
* @param channelId - ID to check
*/
public static boolean hasChannelId(String channelId) {
return (getChannel(channelId) != null);
}
/**
* Create new channel and return access key.
* @param channelId - channel ID
* @param owner - creator and admin of the channel
* @param isOpenChannel - is the channel open for everybody?
* @param channelName - arbitrary channel name
* @param members - collection of initial members
* @param addAssistant - add an assistant like SEPIA to the channel? Note: open channels will have it in any case
* @return {@link SocketChannel} or null if channel already exists
* @throws Exception
*/
public static SocketChannel createChannel(String channelId, String owner, boolean isOpenChannel, String channelName,
Collection<String> members, boolean addAssistant) throws Exception{
//is channelId allowed?
/* -- this restriction only applies to the API endpoint not the method itself --
if (!channelId.equalsIgnoreCase(owner) && !owner.equalsIgnoreCase(SocketConfig.SERVERNAME)){
log.info("Channel '" + channelId + "' is NOT ALLOWED! Must be equal to owner OR owner must be server."); //INFO
return null;
}
*/
//does channel already exist?
if (hasChannelId(channelId)){
log.info("Channel '" + channelId + "' already exists!"); //INFO
return null;
}
//has server reach max. number of channels?
if (channelPool.size() >= SocketConfig.maxChannelsPerServer){
log.error("Server has reached MAXIMUM NUMBER OF CHANNELS."); //ERROR
return null;
}
String key = "open";
if (!isOpenChannel){
key = Security.bytearrayToHexString(
Security.getEncryptedPassword(
Security.getRandomUUID().replaceAll("-", ""),
Security.getRandomSalt(32),
10, 64));
}
SocketChannel sc = new SocketChannel(channelId, key, owner, channelName);
String channelKey = sc.getChannelKey();
addChannel(sc);
//make sure the owner is also a member (except when it's the server)
if (!owner.equals(SocketConfig.SERVERNAME)){
members.add(owner);
}
//add members
for (String s : members){
sc.addUser(s, channelKey);
//System.out.println("Member: " + s); //DEBUG
}
//add assistant
if (addAssistant){
sc.addSystemDefaultAssistant(); //Add SEPIA too
//System.out.println("Member: " + SocketConfig.systemAssistantId); //DEBUG
}
log.info("New channel has been created by '" + owner + "' with ID: " + channelId); //INFO
//store channel
ChannelsDatabase channelsDb = SocketConfig.getDefaultChannelsDatabase();
int resCode = channelsDb.storeChannel(sc);
if (resCode != 0){
log.error("Failed to store new channel with ID: " + channelId + " - Result code: " + resCode);
//TODO: retry later
}
return sc;
}
/**
* Update channel with new member.
* @param sc - socket channel
* @param members
*/
public static boolean addMembersToChannel(SocketChannel sc, List<String> members){
//update channel
for (String m : members){
sc.addUser(m, sc.getChannelKey()); //we should already have permission here, so we can use sc.getChannelKey()
}
String channelId = sc.getChannelId();
ChannelsDatabase channelsDb = SocketConfig.getDefaultChannelsDatabase();
int resCode = channelsDb.updateChannel(channelId, sc.getJson());
if (resCode != 0){
log.error("Failed to update channel with ID: " + channelId + " - Result code: " + resCode);
//TODO: retry later
return false;
}else{
return true;
}
}
/**
* Add channel to pool.
*/
public static void addChannel(SocketChannel sc){
channelPool.put(sc.getChannelId(), sc);
}
/**
* Get channel or null.
*/
public static SocketChannel getChannel(String channelId){
//ChannelsDatabase channelsDb = SocketConfig.getDefaultChannelsDatabase(); //TODO: use?
return channelPool.get(channelId);
}
/**
* Remove channel from pool.
*/
public static boolean deleteChannel(SocketChannel sc){
String channelId = sc.getChannelId();
channelPool.remove(channelId);
//delete channel
ChannelsDatabase channelsDb = SocketConfig.getDefaultChannelsDatabase();
int resCode = channelsDb.removeChannel(channelId);
if (resCode != 0){
log.error("Failed to delete channel with ID: " + channelId + " - Result code: " + resCode);
//TODO: retry later
}
//TODO: remove all pending missed messages for this channel
return true;
}
/**
* Return all channel IDs known to this server.
*/
public static Set<String> getAllRegisteredChannelIds(){
return channelPool.keySet();
}
/**
* Get all channels owned by a specific user.
* @param userId - ID of owner
* @return List (can be empty) or null (error)
*/
public static List<SocketChannel> getAllChannelsOwnedBy(String userId){
//ChannelsDatabase channelsDb = SocketConfig.getDefaultChannelsDatabase(); //TODO: use?
List<SocketChannel> channels = new ArrayList<>();
for (String cId : channelPool.keySet()) {
SocketChannel sc = channelPool.get(cId);
if (sc.getOwner().equalsIgnoreCase(userId)) {
channels.add(sc);
}
}
return channels;
}
/**
* Get all channels that have a specific user as a member.
* @param userId - member ID to check
* @param includePublic - include public channels
* @return List (can be empty) or null (error)
*/
public static List<SocketChannel> getAllChannelsAvailableTo(String userId, boolean includePublic){
//ChannelsDatabase channelsDb = SocketConfig.getDefaultChannelsDatabase(); //TODO: use?
List<SocketChannel> channels = new ArrayList<>();
for (String cId : channelPool.keySet()) {
SocketChannel sc = channelPool.get(cId);
if ((includePublic && sc.isOpen()) || sc.isUserMemberOfChannel(userId)) {
channels.add(sc);
}
}
return channels;
}
/**
* Convert list of channels to JSONArray for client.
* @param channels - list of channels
* @param receiverUserId - ID of receiver or null. If not null some fields are modified if its equal to channel owner (e.g. key).
* @return JSONArray expected to be compatible with client channel list
*/
public static JSONArray convertChannelListToClientArray(List<SocketChannel> channels, String receiverUserId){
JSONArray channelsArray = new JSONArray();
for (SocketChannel sc : channels){
JSONObject channelJson = JSON.make(
"id", sc.getChannelId(),
"name", sc.getChannelName(),
"owner", sc.getOwner(),
"server", sc.getServerId(),
"isOpen", sc.isOpen()
);
if (sc.getOwner().equals(receiverUserId)){
JSON.put(channelJson, "key", sc.getChannelKey()); //add key so the owner can generate invite links
}
JSON.add(channelsArray, channelJson);
}
return channelsArray;
}
}
|
#include "Config.h"
#include "StorageSqlite.h"
#include "utils/NumberFormatter.h"
namespace Storage
{
extern StorageSqlite storageSqlite;
bool StorageSqlite::Open( const char * filename )
{
if(sqlite3_open(filename, &_sqlite) == SQLITE_OK)
{
return true;
}
return false;
}
void StorageSqlite::Close()
{
if(_sqlite != NULL)
{
sqlite3_close(_sqlite);
_sqlite = NULL;
}
}
bool StorageSqlite::Create( const char * tablename, StructDef * structs )
{
if(_sqlite == NULL || structs == 0)
{
return false;
}
std::string query = "CREATE TABLE IF NOT EXISTS ";
query = query + tablename + " (";
std::string keys = "";
std::string unis = "";
for(uint i = 0; structs[i].name != NULL; i ++)
{
if(i > 0)
query += ", ";
query = query + structs[i].name + ' ' + getTypeName(structs[i].type);
if((structs[i].option & 3) == 1)
{
if(keys.length() > 0)
keys += ", ";
keys += structs[i].name;
}
else if((structs[i].option & 3) == 2)
{
if(unis.length() > 0)
unis += ", ";
unis += structs[i].name;
}
else if((structs[i].option & 4) == 4)
{
query += " NOT NULL";
}
}
if(keys.length() > 0)
{
query += ", PRIMARY KEY(";
query += keys;
query += ")";
}
if(unis.length() > 0)
{
query += ", UNIQUE(";
query += unis;
query += ")";
}
query += ")";
return doExec(query.c_str());
}
const char * StorageSqlite::getTypeName( Storage::DataType t )
{
switch(t)
{
case Storage::Int:
return "INTEGER(20)";
case Storage::Real:
return "REAL";
case Storage::String:
return "TEXT";
default:
return "BLOB";
}
return "BLOB";
}
bool StorageSqlite::doExec( const char* exec )
{
return sqlite3_exec(_sqlite, exec, NULL, NULL, NULL) == SQLITE_OK;
}
bool StorageSqlite::doQuery( const char* query, uint col, std::vector<std::vector<DataDef> >& result)
{
sqlite3_stmt * stmt;
const char * tail;
if(sqlite3_prepare_v2(_sqlite, query, -1, &stmt, &tail) == SQLITE_OK)
{
while(sqlite3_step(stmt) == SQLITE_ROW)
{
result.resize(result.size() + 1);
std::vector<DataDef>& row = result.back();
row.resize(col);
for(uint i = 0; i < col; i ++)
{
switch(sqlite3_column_type(stmt, i))
{
case SQLITE_NULL:
row[i].SetInt(0L);
break;
case SQLITE_INTEGER:
row[i].SetInt(sqlite3_column_int64(stmt, i));
break;
case SQLITE_FLOAT:
row[i].SetReal(sqlite3_column_double(stmt, i));
break;
case SQLITE_TEXT:
{
row[i].type = Storage::String;
uint size = sqlite3_column_bytes(stmt, i);
if(size > 0)
{
row[i].SetString((const char *)sqlite3_column_text(stmt, i));
}
else
{
row[i].SetString("");
}
}
break;
default:
{
uint size = sqlite3_column_bytes(stmt, i);
if(size > 0)
{
row[i].SetBinary((const byte *)sqlite3_column_blob(stmt, i), size);
}
else
{
row[i].bVal = NULL;
row[i].bSize = 0;
}
}
break;
}
}
}
return true;
}
return false;
}
uint StorageSqlite::Set( const char * tablename, const char ** colnames, DataDef * data )
{
bool result = true;
std::string query = "REPLACE INTO ";
query = query + tablename + " (";
uint i;
for(i = 0; colnames[i] != NULL; i ++)
{
if(i > 0)
query += ", ";
query += colnames[i];
}
query += ") VALUES (";
uint len = i;
for(uint j = 0; j < len; j ++)
{
if(j > 0)
query += ", ";
switch(data[j].type)
{
case Storage::Int:
Utils::NumberFormatter::append(query, (Utils::Int64)data[j].lVal);
break;
case Storage::Real:
Utils::NumberFormatter::append(query, data[j].rVal, 2);
break;
case Storage::String:
query += '"';
query += data[j].sVal;
query += '"';
break;
default:
query += "X'";
uint bSize = data[j].bSize;
for(uint k = 0; k < bSize; k ++)
{
Utils::NumberFormatter::appendHex(query, data[j].bVal[k], 2);
}
query += "'";
break;
}
}
query += ")";
result = doExec(query.c_str());
return result;
}
bool StorageSqlite::GetAll( const char * tablename, const char ** colnames, std::vector<std::vector<DataDef> >& result )
{
std::string query = "SELECT ";
uint i;
for(i = 0; colnames[i] != NULL; i ++)
{
if(i > 0)
query += ", ";
query += colnames[i];
}
query += " FROM ";
query += tablename;
return doQuery(query.c_str(), i, result);
}
bool StorageSqlite::GetMatch( const char * tablename, const char ** colnames, const char * matchcond, std::vector<std::vector<DataDef> >& result )
{
if(matchcond == NULL || matchcond[0] == 0)
{
return GetAll(tablename, colnames, result);
}
std::string query = "SELECT ";
uint i;
for(i = 0; colnames[i] != NULL; i ++)
{
if(i > 0)
query += ", ";
query += colnames[i];
}
query += " FROM ";
query += tablename;
query += " WHERE ";
query += matchcond;
return doQuery(query.c_str(), i, result);
}
void StorageSqlite::RemoveAll( const char * tablename )
{
std::string query = "DELETE FROM ";
query += tablename;
doExec(query.c_str());
}
void StorageSqlite::RemoveMatch( const char * tablename, const char * cond )
{
if(cond == NULL || cond[0] == 0)
{
RemoveAll(tablename);
return;
}
std::string query = "DELETE FROM ";
query += tablename;
query += " WHERE ";
query += cond;;
doExec(query.c_str());
}
void StorageSqlite::Begin()
{
doExec("BEGIN");
}
void StorageSqlite::Commit()
{
doExec("COMMIT");
}
void StorageSqlite::Optimize()
{
doExec("VACUUM");
}
}
|
<filename>modules/jooby-apitool/src/test/java/apps/Controller1059.java
package apps;
import org.jooby.mvc.GET;
import org.jooby.mvc.Path;
import javax.inject.Inject;
/**
* Top level comment.
*/
@Path("/test")
public class Controller1059 {
@Inject
public Controller1059() {
}
/**
* Say hi.
* @return Hi.
*/
@GET
public String salute() {
return "Hi";
}
/**
* Say X.
* @return Hi.
*/
@GET
@Path("/x")
public String salutex() {
return "Hi";
}
}
|
$(document).on('ready', function () {
get_lista_taller_semestre($('#talleres_semestre_div .panel-heading h3.actual').data('id'));
$('#calendar_actividades').fullCalendar({
header: {
left: 'prev,next',
center: '',
right: 'agendaWeek,agendaDay'
},
defaultView: 'agendaWeek',
theme: true,
editable: true,
minTime: 7,
maxTime: 20,
height: 650,
date: 1,
month: 6,
year: 2013,
columnFormat: {
month: 'ddd',
week: 'ddd',
day: 'dddd'
},
allDaySlot: false,
allDayText: 'Sin asignar',
dayNames: ['Domingo', 'Lunes', 'Martes', 'Miercoles', 'Jueves', 'Viernes', 'Sabado'],
dayNamesShort: ['Dom', 'Lun', 'Mar', 'Mie', 'Jue', 'Vie', 'Sab'],
monthNames: ['Enero', 'Febrero', 'Marzo', 'Abril', 'Mayo', 'Junio', 'Julio', 'Agosto', 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre'],
monthNamesShort: ['Ene', 'Feb', 'Mar', 'Abr', 'Mayo', 'Jun', 'Jul', 'Ago', 'Sept', 'Oct', 'Nov', 'Dic'],
weekMode: 'variable',
handleWindowResize: false,
defaultEventMinutes: 120,
weekends: false,
droppable: true,
weekNumberTitle: 'Sem',
slotMinutes: 15,
slotEventOverlap: false,
buttonText: {
today: 'Hoy',
month: 'Mes',
week: 'Semana',
day: 'Dia'
},
drop: function (date, allDay, jsEvent, ui) {
var $this = $(this);
var copiedEventObject = $.extend({}, $this.data('eventObject'));
copiedEventObject.start = date;
copiedEventObject.end = new Date(date.getFullYear(), date.getMonth(), date.getDate(), date.getHours(), date.getMinutes() + copiedEventObject.duration, 0, 0);
copiedEventObject.allDay = allDay;
var inicio = '';
var termino = '';
if (!allDay) {
inicio = copiedEventObject.start.getHours() + ':' + copiedEventObject.start.getMinutes() + ':00';
termino = copiedEventObject.end.getHours() + ':' + copiedEventObject.end.getMinutes() + ':00';
}
var data_ajax = 'id=' + copiedEventObject.id_actividad
+ '&dia=' + date.getDay() +
'&termino=' + termino +
'&inicio=' + inicio;
$.ajax({
url: base_url + 'admin/taller_semestre_horario/insert/',
data: data_ajax,
type: 'POST',
dataType: 'json',
success: function (data) {
if (data.status === "MSG") {
if (data.type === 'success') {
copiedEventObject.id = data.id;
$('#calendar_actividades').fullCalendar('renderEvent', copiedEventObject, true);
} else {
alerts(data.type, data.message);
}
}
}
});
},
eventMouseRemove: function (event) {
$.ajax({
url: base_url + 'admin/taller_semestre_horario/delete/' + event.id,
type: 'POST',
dataType: 'json',
success: function (data) {
if (data.status === "MSG") {
if (data.type === 'success') {
$('#calendar_actividades').fullCalendar('removeEvents', event.id);
} else {
alerts(data.type, data.message);
}
}
}
});
},
eventDrop: function (event, dayDelta, minuteDelta, allDay, revertFunc, jsEvent, ui, view) {
update_event(event);
},
eventResize: function (event, dayDelta, minuteDelta, revertFunc, jsEvent, ui, view) {
update_event(event);
},
viewRender: function (currentView) {
currentView.start;
var inicio_date = new Date(2013, 6, 1, 0, 0, 0, 0);
var fin_date = new Date(2013, 6, 5, 20, 59, 0, 0);
if (currentView.start < inicio_date) {
$('#calendar_actividades').fullCalendar('gotoDate', fin_date);
} else if (currentView.start > fin_date) {
$('#calendar_actividades').fullCalendar('gotoDate', inicio_date);
}
},
dayClick: function (date, allDay, jsEvent, view) {
if (view.name === 'agendaWeek') {
$('#calendar_actividades').fullCalendar('changeView', 'agendaDay');
$('#calendar_actividades').fullCalendar('gotoDate', date);
} else {
$('#calendar_actividades').fullCalendar('changeView', 'agendaWeek');
}
}
});
$('#table_talleres .btn-editar').on('click', function () {
var $tr = $(this).closest('tr');
$('#talleres_form').attr('action', base_url + 'admin/talleres_semestre/update/' + $tr.data('id'));
$('#add_talleres_semestre_modal').modal('show');
$('#talleres_form input[name="taller"]').val($tr.find('td').eq(0).text());
});
$('#taller_semestre_form').on('submit', function (event) {
event.preventDefault();
$.ajax({
url: $(this).attr('action'),
data: $(this).serialize(),
type: 'POST',
dataType: 'json',
success: function (data) {
if (data.status === "MSG") {
if (data.type === 'success') {
if (data.tipo == 0) {
var html = '<div data-event="0" class="dragg_div" data-id="' + data.id + '">' +
'<div style="display: block">' +
'<button type="button" class="btn btn-link btn-sm pull-right btn-remove"><span class="glyphicon glyphicon-remove"></span></button>' +
'<button type="button" class="btn btn-link btn-sm pull-right btn-editar"><span class="glyphicon glyphicon-pencil"></span></button>' +
'<span class="glyphicon glyphicon-move move_dragg"></span></div>' +
'<div class="content_dragg">' +
'<div class="taller_span">Taller: ' + data.datos.taller + '</div>' +
'<div class="profesor_span">Profesor: ' + data.datos.nombre + ' ' + data.datos.paterno + ' ' + data.datos.materno + '</div>' +
'<div class="salon_span">Salón: ' + data.datos.salon + '</div> ' +
'<div class="grupo_span">Grupo: ' + data.datos.grupo + '</div> ' +
'</div>' +
'</div>';
$('#talleres_semestre_div .panel-body').append(html);
$('#talleres_semestre_div .panel-body .dragg_div').each(function () {
if ($(this).data('event') == 0) {
addEventDragg($(this));
}
});
} else {
$('#talleres_semestre_div .panel-body .dragg_div').each(function () {
if ($(this).data('id') == data.id) {
$(this).find('.taller_span').data('id', data.datos.taller_id);
$(this).find('.taller_span').text('Taller: ' + data.datos.taller);
$(this).find('.profesor_span').data('id', data.datos.profesor_id);
$(this).find('.profesor_span').text('Profesor: ' + data.datos.nombre + ' ' + data.datos.paterno + ' ' + data.datos.materno);
$(this).find('.salon_span').data('id', data.datos.salon_id);
$(this).find('.salon_span').html('Salón: ' + data.datos.salon);
$(this).find('.cupo_span').data('id', data.datos.cupo);
$(this).find('.cupo_span').text('Cupo: ' + data.datos.cupo);
$(this).find('.grupo_span').data('id', data.datos.grupo);
$(this).find('.grupo_span').text('Grupo: ' + data.datos.grupo);
}
});
}
$('#add_talleres_semestre_modal').modal('hide');
}
alerts(data.type, data.message);
}
}
});
});
$('#salones_select').on('change', function () {
$('#cupo_input').val($("#salones_select option:selected").data('cupo'));
});
$('.btn-agregar-taller-sem').on('click', function () {
$('#taller_semestre_form select').val(0);
$('#taller_semestre_form #cupo_input').val("");
$('#taller_semestre_form #grupo_input').val("");
$('#taller_semestre_form #can_input_a').prop("checked" , false);
$('#taller_semestre_form #can_input_ex').prop("checked" , false);
$('#taller_semestre_form #can_input_t').prop("checked" , false);
$('#taller_semestre_form #can_input_et').prop("checked" , false);
$('#taller_semestre_form').attr('action', base_url + 'admin/talleres_semestre/insert');
$('#add_talleres_semestre_modal h3').text('Agregar nuevo');
$('#add_talleres_semestre_modal').modal('show');
});
$('#talleres_semestre_div .panel-heading .pull-left').on('click', function () {
var index = $('#talleres_semestre_div .panel-heading h3.actual').data('index');
if (index > 1) {
$('#talleres_semestre_div .panel-heading h3').removeClass('actual');
$('#talleres_semestre_div .panel-heading h3').hide();
$('#talleres_semestre_div .panel-heading h3.title_' + (parseInt(index) - 1)).addClass('actual');
$('#talleres_semestre_div .panel-heading h3.title_' + (parseInt(index) - 1)).show();
get_lista_taller_semestre($('#talleres_semestre_div .panel-heading h3.title_' + (parseInt(index) - 1)).data('id'));
}
});
$('#talleres_semestre_div .panel-heading .pull-right').on('click', function () {
var index = $('#talleres_semestre_div .panel-heading h3.actual').data('index');
if (index < $('#talleres_semestre_div .panel-heading').data('count')) {
$('#talleres_semestre_div .panel-heading h3').removeClass('actual');
$('#talleres_semestre_div .panel-heading h3').hide();
$('#talleres_semestre_div .panel-heading h3.title_' + (parseInt(index) + 1)).addClass('actual');
$('#talleres_semestre_div .panel-heading h3.title_' + (parseInt(index) + 1)).show();
get_lista_taller_semestre($('#talleres_semestre_div .panel-heading h3.title_' + (parseInt(index) + 1)).data('id'));
}
});
});
function update_event(event) {
var inicio = event.start.getHours() + ':' + event.start.getMinutes() + ':00';
var termino = event.end.getHours() + ':' + event.end.getMinutes() + ':00';
var data = 'id=' + event.id_actividad
+ '&dia=' + event.start.getDay() +
'&termino=' + termino +
'&inicio=' + inicio;
$.ajax({
url: base_url + 'admin/taller_semestre_horario/update/' + event.id,
type: 'POST',
dataType: 'json',
data: data,
success: function (data) {
if (data.status === "MSG") {
if (data.type === 'success') {
} else {
alerts(data.type, data.message);
}
}
}
});
}
function addEventDragg($element) {
$element.data('event', 1);
$element.draggable({
helper: 'clone',
zIndex: 2000,
handle: '.move_dragg',
start: function (event, ui) {
ui.helper.css("background-color", "#eee");
ui.helper.css("width", "373px");
ui.helper.find('.action_dragg').remove();
ui.helper.find('.salon_span').hide();
ui.helper.find('.grupo_span').hide();
}
});
var texto_long = $element.find('.content_dragg').html();
$element.data('eventObject', {
title: texto_long,
allDay: true,
duration: 120,
min_duration: 5,
durationEditable: true,
startEditable: true,
editable: true,
end: null,
id_actividad: $element.data('id')
});
$element.find('.btn-editar').on('click', function () {
var $padre = $(this).closest('.dragg_div');
$.ajax({
url: base_url + 'admin/talleres_semestre/get/' + $(this).closest('.dragg_div').data('id'),
type: 'GET',
dataType: 'json',
success: function (data) {
if (data.status === "OK") {
$('#taller_semestre_form #taller_select').val(data.taller.taller_id);
$('#taller_semestre_form #profesores_select').val(data.taller.profesor_id);
$('#taller_semestre_form #salones_select').val(data.taller.salon_id);
$('#taller_semestre_form #cupo_input').val(data.taller.cupo);
$('#taller_semestre_form #grupo_input').val(data.taller.grupo);
$('#taller_semestre_form #can_input_a').prop("checked" , (data.taller.puede_alumno == 1) ? true:false);
$('#taller_semestre_form #can_input_ex').prop("checked" , (data.taller.puede_exalumno == 1) ? true:false);
$('#taller_semestre_form #can_input_t').prop("checked" , (data.taller.puede_trabajador == 1) ? true:false);
$('#taller_semestre_form #can_input_et').prop("checked" , (data.taller.puede_externo == 1) ? true:false);
$('#taller_semestre_form').attr('action', base_url + 'admin/talleres_semestre/update/' + data.taller.id);
$('#add_talleres_semestre_modal h3').text('Editar');
$('#add_talleres_semestre_modal').modal('show');
} else {
alerts(data.type, data.message);
}
}
});
});
$element.find('.btn-remove').on('click', function () {
var $dragg = $(this).closest('.dragg_div');
$.ajax({
url: base_url + 'admin/talleres_semestre/delete/' + $dragg.data('id'),
type: 'POST',
dataType: 'json',
success: function (data) {
if (data.status === "MSG") {
if (data.type === 'success') {
$dragg.remove();
}
alerts(data.type, data.message);
}
}
});
});
}
function get_lista_taller_semestre(semestre_id) {
$.ajax({
url: base_url + 'admin/talleres_semestre/get_talleres/' + semestre_id,
type: 'POST',
dataType: 'json',
success: function (data) {
if (data.status === "OK") {
$('#talleres_semestre_div .panel-body').html(data.content);
$.ajax({
url: base_url + 'admin/talleres_semestre/get_talleres_group/' + semestre_id,
type: 'POST',
dataType: 'json',
success: function (data1) {
if (data1.status === "OK") {
var html_select = '';
$.each(data1.talleres, function (key, event) {
html_select += '<option value="' + event.taller_id + '">' + event.taller + '</option>';
$('#calendar_actividades .fc-header-center').html('<select class="multiselect" multiple="multiple">' + html_select + '</select>');
$('#calendar_actividades .fc-header-center .multiselect').multiselect({
enableFiltering: true,
enableCaseInsensitiveFiltering: true,
maxHeight: 310,
filterPlaceholder: 'Buscar',
buttonWidth: '230px',
nonSelectedText: 'Sin Selección',
onChange: function (element, checked) {
get_by_semestre_taller();
}
});
});
}
}
});
$("#semestre_input_hidden").val(semestre_id);
$('.dragg_div').each(function () {
if ($(this).data('event') == 0) {
addEventDragg($(this));
}
});
} else {
alerts(data.type, data.message);
}
}
});
}
function get_by_semestre_taller() {
var data_html = '';
$('#calendar_actividades .fc-header-center .multiselect-container input[type="checkbox"]').each(function () {
if ($(this).is(':checked')) {
data_html += 'talleres[]=' + $(this).val() + '&';
}
});
if (data_html !== '') {
var semestre_id = $('#talleres_semestre_div .panel-heading h3.actual').data('id');
$.ajax({
url: base_url + 'admin/taller_semestre_horario/get_by_semestre/' + semestre_id,
type: 'POST',
data: data_html,
dataType: 'json',
success: function (data) {
if (data.status === "OK") {
$('#calendar_actividades').fullCalendar('removeEvents');
$.each(data.talleres, function (key, event) {
var titulo = '<div class="taller_span">Taller: ' + event.taller + '</div>' +
'<div class="profesor_span">Profesor: ' + event.nombre + " " + event.paterno + " " + event.materno + '</div>' +
'<div class="salon_span">Salón: ' + event.salon + '</div>' +
'<div class="salon_span">Grupo: ' + event.grupo + '</div>';
var end_array = event.termino.split(':');
var end = new Date(2013, 6, event.dia, end_array[0], end_array[1], 0, 0);
var start_array = event.inicio.split(':');
var start = new Date(2013, 6, event.dia, start_array[0], start_array[1], 0, 0);
var _event = {
id: event.id,
title: titulo,
end: end,
id_actividad: event.taller_semestre_id,
min_duration: 5,
duration: 120,
durationEditable: true,
startEditable: true,
editable: true,
start: start,
allDay: false
};
$('#calendar_actividades').fullCalendar('renderEvent', _event, true);
});
} else {
alerts(data.type, data.message);
}
}
});
} else {
$('#calendar_actividades').fullCalendar('removeEvents');
}
} |
<reponame>AnDamazio/book4u-api
export class PersonalData {
cpf: string;
cellphone: string;
telephone: string;
email: string;
password: string;
token: string;
streetName: string;
complement: string;
district: string;
houseNumber: string;
zipCode: string;
city: string;
state: string;
}
|
#!/bin/bash
cd "${BASH_SOURCE%/*}" || exit
ls -lahS ~/.*/wallet.dat ~/.komodo/*/wallet.dat
|
mkdir evaluation
mkdir evaluation/cnnprom
# CNNPROM trained model on DPROM dataset
python test/cnnprom.py --output evaluation/cnnprom/ours.csv --input models/ours/dataframe.csv
python analysis.py --output evaluation/cnnprom/ours_results/ --results evaluation/cnnprom/ours.csv
mkdir evaluation/dprom
# DPROM trained model on CNNPROM dataset
python test/dprom.py --output evaluation/dprom/ours.csv --input models/ours/dataframe.csv
python analysis.py --output evaluation/dprom/ours_results/ --results evaluation/dprom/ours.csv
mkdir evaluation/icnn
# ICNN trained model on CNNPROM dataset
python test/icnn.py --output evaluation/icnn/ours.csv --input models/ours/dataframe.csv
python analysis.py --output evaluation/icnn/ours_results/ --results evaluation/icnn/ours.csv |
class CustomExpressionVisitor implements ExpressionVisitor {
@Override
public String visitSearchedCaseExpression(SearchedCaseExpression node, List<Expression> parameters) {
var parts = new StringJoiner(" ", "(", ")");
parts.add("CASE");
for (WhenClause whenClause : node.getWhenClauses()) {
parts.add(whenClause.accept(this, parameters));
}
if (node.getDefaultValue() != null) {
parts.add("ELSE");
parts.add(node.getDefaultValue().accept(this, parameters));
}
return parts.toString();
}
// Other visit methods for different types of expressions
} |
"use strict";
let playg = new Playground();
let kim = document.querySelector("#selectkim");
kim.addEventListener("click", function(event) {
let launch = document.querySelector("#launch")
launch.style.display = 'none';
playg.setPlayer1();
});
let donald = document.querySelector("#selectdonald");
donald.addEventListener("click", function(event) {
let launch = document.querySelector("#launch");
launch.style.display = 'none';
playg.setPlayer2();
});
let kiss1 = document.querySelector("#kiss1")
kiss1.addEventListener("click", function (event) {
playg.action("kiss");
prog();
});
let hug1 = document.querySelector("#hug1")
hug1.addEventListener("click", function (event) {
playg.action("hug");
prog();
});
let insult1 = document.querySelector("#insult1")
insult1.addEventListener("click", function (event) {
playg.action("insult");
prog();
});
let threaten1 = document.querySelector("#threaten1")
threaten1.addEventListener("click", function (event) {
playg.action("threaten");
prog();
});
let bomb1 = document.querySelector("#bomb1")
bomb1.addEventListener("click", function(event){
playg.action("bomb");
prog();
});
|
sslcert_check() { # print cert info for all ssls in httpd.conf
ec yellow "Checking for SSL Certificates..."
if grep SSLCertificateFile $dir/usr/local/apache/conf/httpd.conf | grep -qv cpanel.pem; then
# get a list of certs and start counting
local i=0
local now=$(date +"%s")
for domain in $domainlist; do
for crt in $(egrep 'SSLCertificateFile.*/(www\.)?'$domain'(\.crt|\/combined){1}' $dir/usr/local/apache/conf/httpd.conf | awk '{print $2}'; egrep 'SSLCertificateFile.*/(www_)?'$(echo $domain | tr '.' '_')'(\.crt|\/combined){1}' $dir/usr/local/apache/conf/httpd.conf | awk '{print $2}'); do
# check with and without www, make temp file for multiple parses
local crtout=$(mktemp)
# stop echoing certs, saves screen space on larger migrations
#ec white $dir$crt
openssl x509 -noout -in $dir$crt -issuer -subject -dates > $crtout
# set variable if using autossl
grep -q -e "O=Let's Encrypt" -e "O=cPanel, Inc." $crtout && usingautossl=1
local enddate=$(date -d "$(grep notAfter $crtout | cut -d\= -f2)" +"%s")
# determine if any certs are expired
[ "$enddate" -lt "$now" ] && local expiredcert=1
rm -f $crtout
i=$(($i + 1))
done
done
ec yellow "There were $i certificates located for domains being migrated."
[ $usingautossl ] && ec red "The source server seems to be using AutoSSL!"
[ $expiredcert ] && ec red "The source server is using some expired SSL certificates." | errorlogit 4
say_ok
else
ec yellow "No SSL Certificates found in httpd.conf."
fi
}
|
<gh_stars>0
export const version = "providers/5.6.4";
|
#include "catch.hpp"
#include <maybe/result.hpp>
class A final {
public:
A(std::string value) : value(value)
{
}
std::string value;
};
class B final {
public:
B(std::string value) : value(value)
{
}
std::string value;
};
using maybe::result;
TEST_CASE("result_and_then")
{
SECTION("chains another function returning different result if previous one was successful")
{
auto a = result<A, int>::ok(A("hello"));
auto b = a.and_then([](A v) { return result<B, int>::ok(B(v.value + " world")); });
REQUIRE(b);
REQUIRE(b.ok_value().value == "hello world");
}
SECTION("chains another function returning different void result if previous one was successful")
{
auto a = result<A, int>::ok(A("hello"));
auto b = a.and_then([](A v) { return result<void, int>::ok(); });
REQUIRE(b);
}
SECTION("chains another function returning different result if previous one was successful void")
{
auto a = result<void, int>::ok();
auto b = a.and_then([]() { return result<B, int>::ok(B("Void world")); });
REQUIRE(b);
REQUIRE(b.ok_value().value == "Void world");
}
SECTION(
"should not run another function returning different result if previous one returned error")
{
auto a = result<A, int>::err(43);
auto b = a.and_then([](A v) { return result<B, int>::ok(B(v.value + " world")); });
REQUIRE(!b);
REQUIRE(43 == b.err_value());
}
SECTION(
"should not run another function returning different void result if previous one returned error")
{
auto a = result<A, int>::err(43);
auto b = a.and_then([](A v) { return result<void, int>::ok(); });
REQUIRE(!b);
REQUIRE(43 == b.err_value());
}
SECTION(
"should not run another function returning different result if previous void one returned error")
{
auto a = result<void, int>::err(43);
auto b = a.and_then([]() { return result<B, int>::ok(B("Void world")); });
REQUIRE(!b);
REQUIRE(43 == b.err_value());
}
} |
#!/bin/bash
xbacklight -dec 5
|
<gh_stars>1-10
/*
* Author: <NAME>
* github: https://github.com/Tomp0801/Micro-RTSP-Audio
*
* Based on Micro-RTSP library for video streaming by Kevin Hester:
*
* https://github.com/geeksville/Micro-RTSP
*
* Copyright 2018 <NAME>, <EMAIL> (MIT License)
*/
#pragma once
#include "AudioStreamer.h"
/**
* Creates an RTSP Server to listen for client connections and start sessions
*/
class RTSPServer {
private:
TaskHandle_t serverTaskHandle;
TaskHandle_t sessionTaskHandle;
SOCKET MasterSocket; // our masterSocket(socket that listens for RTSP client connections)
SOCKET ClientSocket; // RTSP socket to handle an client
sockaddr_in ServerAddr; // server address parameters
sockaddr_in ClientAddr; // address parameters of a new RTSP client
int port; // port that the RTSP Server listens on
int core; // the ESP32 core number the RTSP runs on (0 or 1)
int numClients = 0; // number of connected clients
AudioStreamer * streamer; // AudioStreamer object that acts as a source for data streams
public:
/**
* Creates a new RTSP server
* @param streamer AudioStreamer object that acts as a source for data streams
* @param port port that the RTSP Server should listen on (default 8554)
* @param core the ESP32 core number the RTSP runs on (0 or 1, default 1)
*/
RTSPServer(AudioStreamer * streamer, int port = 8554, int core = 1);
/**
* Starts running the server in a new asynchronous Task
* @return 0 on success, or error number
*/
int runAsync();
TaskHandle_t getTaskHandle() { return serverTaskHandle; };
private:
/**
* Routine for main server thread, listens for new clients
*/
static void serverThread(void* server_obj);
/**
* Routine for a session if it is started
*/
static void sessionThread(void* server_obj);
}; |
def filterList(nums, filter):
# Initialize a filtered list
filtered_list = []
# Iterate over the list
for num in nums:
# Check if filter is true
if eval(str(num)+filter):
filtered_list.append(num)
# Return the filtered list
return filtered_list
result = filterList([1,2,5,6,-4,7,8], '> 0')
print(result)
# Output: [1,2,5,6,7,8] |
import gql from 'graphql-tag'
import { FETCH_RECENT_CONTACTS } from 'store/constants'
export const RecentContactsQuery = gql`
query RecentContactsQuery ($first: Int) {
connections (first: $first) {
items {
id
person {
id
name
avatarUrl
memberships (first: 1) {
id
community {
id
name
}
}
}
type
updatedAt
}
}
}
`
export function fetchRecentContacts (query = RecentContactsQuery, first = 20) {
return {
type: FETCH_RECENT_CONTACTS,
graphql: {
query,
variables: { first }
},
meta: { extractModel: 'PersonConnection' }
}
}
export default fetchRecentContacts
|
package com.carson.cloud.business.service;
import com.carson.cloud.business.entity.UserEntity;
public interface UserService {
/**
*
* @param
* @return
*/
public UserEntity findUserByName(String username);
public UserEntity findUserById(String id);
/**
*
* @param user
* @return
*/
public void registUser(UserEntity user);
/**
*
* @param user
* @return
*/
public UserEntity updatePassword(String password,String username);
}
|
<reponame>tenebrousedge/ruby-packer<gh_stars>1-10
require File.expand_path('../../../../spec_helper', __FILE__)
with_feature :encoding do
describe "Encoding::Converter.search_convpath" do
before :all do
@perms = Encoding.name_list.permutation(2).map do |pair|
Encoding::Converter.search_convpath(pair.first, pair.last) rescue []
end
end
it "returns an Array" do
Encoding::Converter.search_convpath('ASCII', 'EUC-JP').\
should be_an_instance_of(Array)
end
it "returns each encoding pair as a sub-Array" do
cp = Encoding::Converter.search_convpath('ASCII', 'EUC-JP')
cp.first.should be_an_instance_of(Array)
cp.first.size.should == 2
end
it "returns each encoding as an Encoding object" do
cp = Encoding::Converter.search_convpath('ASCII', 'EUC-JP')
cp.first.first.should be_an_instance_of(Encoding)
cp.first.last.should be_an_instance_of(Encoding)
end
it "returns multiple encoding pairs when direct conversion is impossible" do
cp = Encoding::Converter.search_convpath('ascii','Big5')
cp.size.should == 2
cp.first.should == [Encoding::US_ASCII, Encoding::UTF_8]
cp.last.should == [Encoding::UTF_8, Encoding::Big5]
end
it "sets the last element of each pair to the first element of the next" do
@perms.each do |convpath|
next if convpath.size == 1
convpath.each_with_index do |pair, idx|
break if idx == convpath.size - 1
pair.last.should == convpath[idx+1].first
end
end
end
it "only lists a source encoding once" do
@perms.each do |convpath|
next if convpath.size < 2
seen = Hash.new(false)
convpath.each_with_index do |pair, idx|
seen.key?(pair.first).should be_false if idx > 0
seen[pair.first] = true
end
end
end
it "indicates if crlf_newline conversion would occur" do
cp = Encoding::Converter.search_convpath(
"ISo-8859-1", "EUC-JP", {crlf_newline: true})
cp.last.should == "crlf_newline"
cp = Encoding::Converter.search_convpath(
"ASCII", "UTF-8", {crlf_newline: false})
cp.last.should_not == "crlf_newline"
end
it "raises an Encoding::ConverterNotFoundError if no conversion path exists" do
lambda do
Encoding::Converter.search_convpath(
Encoding::ASCII_8BIT, Encoding::Emacs_Mule)
end.should raise_error(Encoding::ConverterNotFoundError)
end
end
end
|
import React from 'react';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { translateCard } from '../actions/translateCard';
import { nextCard } from '../actions/nextCard';
const mapDispatchToProps = (dispatch) => {
return {
nextCard: bindActionCreators(nextCard, dispatch),
translateCard: bindActionCreators(translateCard, dispatch)
}
}
const mapStateToProps = state => ({
textSource: state.textSource,
textDest: state.textDest,
isShowTranslate: state.isShowTranslate,
isShowButtonsContainer: state.isShowButtonsContainer
});
class Card extends React.Component {
_onGetAnswer() {
console.log('translateCard');
this.props.translateCard();
};
_onAgain() {
this.props.nextCard(1);
};
_onHard() {
this.props.nextCard(2);
};
_onEasy() {
this.props.nextCard(3);
};
render() {
let text = this.props.isShowTranslate ? this.props.textDest : '?';
return (
<View style={ { flex: 1 } }>
<View style={styles.cardContainer}>
<View style={styles.wordContainer}>
<Text style={styles.text}>{this.props.textSource}</Text>
</View>
<TouchableOpacity style={[styles.wordContainer, styles.noBorder]} onPress={this._onGetAnswer.bind(this)}>
<Text style={styles.text}>{text}</Text>
</TouchableOpacity>
</View>
{ this.props.isShowButtonsContainer ?
<View style={styles.rateContainer}>
<TouchableOpacity style={[styles.button, styles.againButton]} onPress={this._onAgain.bind(this)}>
<Text style={ styles.buttonText }>Again</Text>
</TouchableOpacity>
<TouchableOpacity style={[styles.button, styles.hardButton]} onPress={this._onHard.bind(this)}>
<Text style={ styles.buttonText }>Hard</Text>
</TouchableOpacity>
<TouchableOpacity style={[styles.button, styles.easyButton]} onPress={this._onEasy.bind(this)}>
<Text style={ styles.buttonText }>Easy</Text>
</TouchableOpacity>
</View>
: null }
</View>
);
}
}
export default connect(mapStateToProps, mapDispatchToProps)(Card);
const styles = StyleSheet.create({
cardContainer: {
flex: 0.9
},
rateContainer: {
flex: 0.1,
flexDirection: 'row',
justifyContent: 'space-around'
},
wordContainer: {
flex: 0.5,
alignItems: 'center',
justifyContent: 'center',
borderBottomWidth: 1,
borderBottomColor: '#d6d7da',
},
noBorder: {
borderBottomWidth: 0
},
text: {
fontWeight: 'bold',
fontSize: 30,
color: "white"
},
button: {
flex: 0.33,
justifyContent: 'center',
alignItems: 'center'
},
againButton: {
backgroundColor: '#E74C3C'
},
hardButton: {
backgroundColor: '#D68910'
},
easyButton: {
backgroundColor: '#28B463'
},
buttonText:
{
color: 'white'
}
}); |
#pragma once
#define DEFAULT_VS \
"#version 150 \n" \
\
"uniform mat4 projectionMatrix;\n" \
"uniform mat4 modelViewMatrix;\n" \
"attribute vec3 vertexPosition;\n" \
"attribute vec2 aTexCoord;\n" \
\
"varying vec2 TexCoord;\n" \
\
"void main()\n" \
"{\n" \
"gl_Position = projectionMatrix * modelViewMatrix * vec4(vertexPosition, 1.0);\n" \
"TexCoord = aTexCoord;\n" \
"}\n"
#define DEFAULT_FS \
"#version 150 \n" \
\
"uniform vec4 color;\n" \
"uniform sampler2D customTexture;\n" \
\
"varying vec2 TexCoord;\n" \
\
"void main()\n" \
"{\n" \
"gl_FragColor = texture(customTexture, TexCoord) * color;\n" \
"}\n" |
const sharp = require('sharp')
const svgSharp = (svg) => sharp(Buffer.from(svg), {density: 500})
const svgToExtendedPng = async (svg) => {
return svgSharp(svg)
.extend({
top: 20,
right: 20,
bottom: 20,
left: 20,
background: '#00000000',
})
.toFormat('png')
.toBuffer()
}
const flattenPng = (png) => {
return sharp(png).flatten({background: '#ffffff'}).toBuffer()
}
const getImageForImg = (svg) => {
return svgToExtendedPng(svg)
.then(flattenPng)
}
const getImageForSocialPreview = async (svg) => {
const extendedPng = await svgToExtendedPng(svg)
return sharp(extendedPng)
.metadata()
.then(({width, height}) => {
let desiredWidth = width
let desiredHeight = height
const ratio = desiredWidth / desiredHeight
const minRatio = 1 / 3
const maxRatio = 4
if (ratio > maxRatio) {
desiredHeight = Math.round(desiredWidth / maxRatio)
}
if (ratio < minRatio) {
desiredWidth = Math.round(desiredHeight * minRatio)
}
return sharp(extendedPng)
.resize({
height: desiredHeight,
width: desiredWidth,
fit: 'contain',
background: '#00000000',
})
.toFormat('png')
.toBuffer()
})
.then(flattenPng)
}
const pngToJpeg = (pngBuffer) => {
return sharp(pngBuffer)
.toFormat(sharp.format.jpeg)
.toBuffer()
}
const pngToWebp = (pngBuffer) => {
return sharp(pngBuffer)
// .resize(500)
.toFormat(sharp.format.webp)
.toBuffer()
}
module.exports = {
getImageForImg,
getImageForSocialPreview,
pngToJpeg,
pngToWebp,
}
|
<gh_stars>10-100
package com.yoga.weixinapp.controller;
import com.yoga.admin.shiro.OperatorToken;
import com.yoga.admin.shiro.SuperAdminUser;
import com.yoga.core.base.BaseController;
import com.yoga.core.base.BaseDto;
import com.yoga.core.data.ApiResult;
import com.yoga.core.data.ResultConstants;
import com.yoga.core.exception.IllegalArgumentException;
import com.yoga.core.utils.NumberUtil;
import com.yoga.core.utils.StringUtil;
import com.yoga.operator.user.model.User;
import com.yoga.setting.annotation.Settable;
import com.yoga.tenant.tenant.model.Tenant;
import com.yoga.tenant.tenant.service.TenantService;
import com.yoga.weixinapp.ao.SettingConfig;
import com.yoga.weixinapp.dto.SaveSettingDto;
import com.yoga.weixinapp.dto.WexinBindDto;
import com.yoga.weixinapp.dto.WexinLoginDto;
import com.yoga.weixinapp.service.WxmpService;
import com.yoga.weixinapp.service.WxmpUserService;
import com.yoga.weixinapp.shiro.WechatToken;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authz.annotation.RequiresAuthentication;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.ThreadContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import springfox.documentation.annotations.ApiIgnore;
import javax.validation.Valid;
@Settable
@Api(tags = "小程序接入")
@Controller("weixinAppController")
@RequestMapping("/admin/weixinapp")
@Settable(module = WxmpService.ModuleName, key = WxmpService.Key_AppState, name = "微信小程序-小程序发布状态", placeHolder = "developer为开发版;trial为体验版;formal为正式版")
public class WeixinappController extends BaseController {
@Autowired
private WxmpService wxmpService;
@Autowired
private WxmpUserService wxmpUserService;
@Autowired
private SuperAdminUser superAdminUser;
@Autowired
private TenantService tenantService;
@ApiIgnore
@RequiresAuthentication
@Settable(module = WxmpService.ModuleName, key = WxmpService.Key_Setting, name = "微信小程序-开发者ID")
@RequestMapping("/setting")
public String setting(ModelMap model, @Valid BaseDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
SettingConfig config = wxmpService.getSetting(dto.getTid());
model.put("setting", config);
return "/admin/weixinapp/setting";
}
@ResponseBody
@ApiOperation("微信用户登录")
@PostMapping("/weixin/login.json")
public ApiResult<String> login(@Valid @ModelAttribute WexinLoginDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
String openid = wxmpUserService.getOpenidByCode(dto.getTid(), dto.getCode());
WechatToken wxToken = new WechatToken(dto.getTid(), openid);
Subject subject = (new Subject.Builder()).buildSubject();
subject.getSession().setTimeout(-1);
ThreadContext.bind(subject);
try {
subject.login(wxToken);
} catch (AuthenticationException e) {
return new ApiResult<>(-20, "用户未绑定");
}
String token = subject.getSession().getId().toString();
return new ApiResult<>(token);
}
@ResponseBody
@ApiOperation("微信用户绑定")
@PostMapping("/weixin/bind.json")
public ApiResult<String> bind(@Valid @ModelAttribute WexinBindDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (StringUtil.isNotBlank(dto.getSite())) {
String site = dto.getSite();
if (site.contains(".")) site = site.substring(0, site.indexOf("."));
Long tid = NumberUtil.optLong(site);
Tenant tenant = null;
if (tid != null) tenant = tenantService.get(tid);
if (tenant == null) tenant = tenantService.getByCode(site);
if (tenant == null) throw new IllegalArgumentException("站点不存在!");
dto.setTid(tenant.getId());
}
String openId = wxmpUserService.getOpenidByCode(dto.getTid(), dto.getCode());
OperatorToken token = new OperatorToken(dto.getTid(), dto.getUsername(), dto.getPassword());
Subject subject = (new Subject.Builder()).buildSubject();
subject.getSession().setTimeout(15 * 24 * 3600 * 1000);
ThreadContext.bind(subject);
try {
subject.login(token);
} catch (AuthenticationException e) {
return new ApiResult<>(ResultConstants.ERROR_FORBIDDEN, "用户不存在或者密码错误");
}
if (superAdminUser.isAdmin(dto.getUsername()) || !subject.isPermitted("web.login")) {
subject.logout();
return new ApiResult<>(ResultConstants.ERROR_FORBIDDEN, "您没有前台登录的权限!");
}
User user = User.getLoginUser();
wxmpUserService.bindUser(dto.getTid(), openId, user.getId());
return new ApiResult<>(subject.getSession().getId().toString());
}
@ResponseBody
@RequiresAuthentication
@ApiOperation("微信用户解除绑定")
@PostMapping("/weixin/unbind.json")
public ApiResult unbind(@ModelAttribute BaseDto dto) {
String openId = (String) SecurityUtils.getSubject().getSession().getAttribute("openId");
User user = (User) SecurityUtils.getSubject().getSession().getAttribute("user");
wxmpUserService.unbindUser(dto.getTid(), openId, user.getId());
SecurityUtils.getSubject().logout();
return new ApiResult();
}
@ResponseBody
@ApiOperation("微信访问Token")
@PostMapping("/weixin/token.json")
public ApiResult<String> token(@Valid @ModelAttribute WexinLoginDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
String token = wxmpService.getToken(dto.getTid(), false);
return new ApiResult<>(token);
}
@ApiIgnore
@ResponseBody
@RequiresAuthentication
@PostMapping("/setting/save.json")
public ApiResult fillSetting(@Valid @ModelAttribute SaveSettingDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
SettingConfig config = new SettingConfig(dto.getAppId(), dto.getAppSecret());
wxmpService.saveSetting(dto.getTid(), config);
return new ApiResult();
}
}
|
# completion for some directories
_mcd() { _files -W $PWD -/; }
compdef _mcd mcd
cdgit() { cd ~git/$1; }
_cdgit() { _files -W ~git -/; }
compdef _cdgit cdgit
cdmp() { cd ~git/mediaport3/$1; }
_cdmp() { _files -W ~git/mediaport3 -/; }
compdef _cdmp cdmp
# setup completion
unsetopt menu_complete # do not autoselect the first completion entry
unsetopt flowcontrol
setopt auto_menu # show completion menu on succesive tab press
setopt complete_in_word
setopt always_to_end
WORDCHARS=''
# some defaults
zstyle ':completion:*' completer _expand _complete _ignored _approximate
zstyle ':completion:*' use-perl on
zstyle ':completion:*' rehash yes
zstyle -e ':completion:*:approximate:*' max-errors 'reply=( $(( ($#PREFIX + $#SUFFIX) / 3 )) )'
zstyle ':completion:*:descriptions' format "- %d -"
zstyle ':completion:*:corrections' format "- %d - (errors %e})"
zstyle ':completion:*:default' list-prompt '%S%M matches%s'
zstyle ':completion:*' group-name ''
zstyle ':completion:*:manuals' separate-sections true
zstyle ':completion:*:manuals.(^1*)' insert-sections true
# Use caching so that commands like apt and dpkg complete are useable
zstyle ':completion::complete:*' use-cache 1
zstyle ':completion::complete:*' cache-path $ZSH/cache/
# enable menu completion
zstyle ':completion:*' menu select
# enable verbose completion; descriptions like: '-a -- list entries starting with .'
zstyle ':completion:*' verbose yes
zstyle ':completion:*:-command-:*:' verbose no
# default menu selection for a few commands
zstyle ':completion:*:*:(kill*|man|git|e|c|cd*):*' menu yes
# remove current commandline form possible completions
zstyle ':completion:*:(rm|kill):*' ignore-line yes
# completion order
zstyle ':completion:*:complete:-command-:*:*' tag-order aliases commands functions builtins reserved-words
zstyle ':completion:*:complete:-command-:*:*' group-order aliases functions builtins commands reserved-words
# use known_hosts for hostname completion
[ -r /etc/ssh/ssh_known_hosts ] && _global_ssh_hosts=(${${${${(f)"$(</etc/ssh/ssh_known_hosts)"}:#[\|]*}%%\ *}%%,*}) || _ssh_hosts=()
[ -r ~/.ssh/known_hosts ] && _ssh_hosts=(${${${${(f)"$(<$HOME/.ssh/known_hosts)"}:#[\|]*}%%\ *}%%,*}) || _ssh_hosts=()
hosts=(
#"$_global_ssh_hosts[@]"
#"$_ssh_hosts[@]"
$hosts
)
zstyle ':completion:*:hosts' hosts $hosts
zstyle ':completion:*:*:*:*:processes' command "ps -u `whoami` -o pid,user,comm -w -w"
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#) ([0-9a-z-]#)*=01;34=0=01'
# Don't complete uninteresting users
zstyle ':completion:*:*:*:*:users' ignored-patterns \
apache bin cacti canna clamav daemon \
dbus distcache dovecot fax ftp games gdm gkrellmd gopher \
hacluster haldaemon halt hsqldb ident junkbust ldap lp mail \
mailman mailnull mldonkey mongod mysql nagios \
named netdump news nfsnobody nobody nscd ntp nut nx openvpn \
operator pcap postfix postgres privoxy pulse pvm quagga radvd \
rpc rpcuser rpm shutdown squid sshd sync uucp vcsa webmail xfs \
'qmail*' \
'_*'
# ... unless we really want to.
zstyle '*' single-ignored show
# Colors
zstyle ':completion:*' list-colors ${(s.:.)LS_COLORS}
# colorize parameters with uncommon names
zstyle ':completion:*:parameters' list-colors "=[^a-zA-Z]*=$color[red]"
# colorize aliases
zstyle ':completion:*:aliases' list-colors "=*=$color[green]"
# colorize _* functions
zstyle ':completion:*:functions' list-colors "=_*=$color[red]"
# colorize original input
zstyle ':completion:*:original' list-colors "=*=$color[red];$color[bold]"
# highlight reserved words like 'select' or 'end'
zstyle ':completion:*:reserved-words' list-colors "=*=$color[red]"
# colorize hostname completion
zstyle ':completion:*:*:*:*:hosts' list-colors "=*=$color[green]"
# colorize username completion
zstyle ':completion:*:*:*:*:users' list-colors "=*=$color[red]"
# description in selection menu
zstyle ':completion:*:descriptions' format "- %{${fg_bold[magenta]}%}%d%{${reset_color}%} -"
zstyle ':completion:*:messages' format "- %{${fg_bold[yellow]}%}%d%{${reset_color}%} -"
zstyle ':completion:*:corrections' format "- %{${fg_bold[magenta]}%}%d%{${reset_color}%} - (%{${fg[yellow]}%}errors %e%{${reset_color}%})"
zstyle ':completion:*:default' \
select-prompt \
"Match %m% Line %l%{${fg_no_bold[red]}%} %p%{${reset_color}%}"
zstyle ':completion:*:default' \
list-prompt \
"Line %l%{${fg_no_bold[red]}%} Continue?%{${reset_color}%}"
zstyle ':completion:*:warnings' \
format \
"- %{${fg_bold[red]}%}no match%{${reset_color}%} - %{${fg_no_bold[yellow]}%}%d%{${reset_color}%}"
zstyle ':completion:*' group-name ''
|
#!/bin/bash
# Download CIFAR10 and CIFAR100 dataset
cd data
wget https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
wget https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz
tar -xvf cifar-10-python.tar.gz
tar -xvf cifar-100-python.tar.gz
rm cifar-10-python.tar.gz
rm cifar-100-python.tar.gz
# Format
cd ../
python data_formatting.py --dataset cifar10
python generate_splits.py --dataset cifar10
python data_formatting.py --dataset cifar100
python generate_splits.py --dataset cifar100
# Format MNIST and SVHN datasets
python data/data_formatting.py --dataset mnist
python data/generate_splits.py --dataset mnist
rm data/train_32x32.mat
rm data/test_32x32.mat
python data/data_formatting.py --dataset svhn
python data/generate_splits.py --dataset svhn
|
String.prototype.graphiteGlob = function(glob) {
var regex = '^';
for (var i = 0; i < glob.length; i++ ) {
var c = glob.charAt(i);
switch (c) {
case '*':
regex += '[^\.]+';
break;
case '.':
regex += '\\.';
break;
default:
regex += c;
}
}
regex += '$';
return this.match(regex);
}
/*
if (!"stats.dfs4.timer".graphiteGlob('stats.*.timer')) {
console.log('fail 1');
}
if ("stats.dfs4.timer".graphiteGlob('statsd.*.timer')) {
console.log('fail 2');
}
if ("stats.dfs4.foo.timer".graphiteGlob('stats.*.timer')) {
console.log('fail 3');
}
*/
|
-- Create the table for storing people's information
CREATE TABLE People (
Id INT PRIMARY KEY,
Name VARCHAR(100) NOT NULL,
-- Add other relevant columns for storing additional information about people
);
-- Create the table for storing contact details
CREATE TABLE ContactDetails (
Id INT PRIMARY KEY,
Telefono INT NOT NULL,
PersonaId INT NOT NULL,
FOREIGN KEY (PersonaId) REFERENCES People(Id)
); |
#!/bin/bash
while getopts :podc: flag; do
case $flag in
d) delta=true;;
p) print=true;;
o) offset=true;;
#c) count=$OPTARG;;
c)
until (($(ps -C lemonbar -o pid= | wc -l) == OPTARG)); do
sleep 0.1
done;;
esac
done
#if [[ $flag ]]; then
# if [[ $flag == d ]]; then
# bar_name=$OPTARG
# else
# [[ $flag == p ]] && print=true || offset=true
# fi
#fi
#[[ $print || $offset || $bar_name ]] || sleep 1
#[[ $print || $offset || $delta ]] || sleep 1
config=~/.config/orw/config
eval $(awk -F '[_ ]' '/^display/ {
if($3 == "xy") o = o " [" $2 "]=\"" $4
else if($3 == "size") o = o " " $4 "\""
} END { print "displays=( " o " )" }' $config)
eval bars=( $(ps -C lemonbar -o args= | awk '{ ab = ab " \"" $0 "\"" } END { print ab }') )
for display in ${!displays[*]}; do
top_offset=0 bottom_offset=0
read min max <<< ${displays[$display]}
while read name position bar_x bar_y bar_widht bar_height frame; do
#if ((adjustable_width)); then
# echo here $name
# read bar_width bar_height bar_x bar_y < ~/.config/orw/bar/geometries/$name
#fi
current_bar_height=$((bar_y + bar_height + frame))
#[[ $bar_name == $name ]] && delta_min=$current_bar_height delta_position=$position
if ((position)); then
((current_bar_height > top_offset)) && top_offset=$current_bar_height
else
((current_bar_height > bottom_offset)) && bottom_offset=$current_bar_height
fi
[[ $print && $name ]] && echo $name $position $bar_x $bar_y $bar_widht $bar_height $frame
done <<< $(for bar in "${bars[@]}"; do
awk -F '[- ]' '{
p = ($(NF - 6) == "b") ? 0 : 1
split($(NF - 3), g, "[x+]")
x = g[3]; y = g[4]; w = g[1]; h = g[2]
ff = (p) ? 7 : 9
fw = ($(NF - ff) == "r") ? $(NF - (ff - 1)) * 2 : 0
bn = $NF
} {
#if(nr && NR == nr + 1 && x >= '$bar_min' && x + w <= '$bar_max') {
if(x >= '$min' && x + w <= '$max') {
#aw = (/-w [a-z]/) ? 1 : 0
#print bn, p, x, y, w, h, aw, fw
print bn, p, x, y, w, h, fw
}
}' <<< "$bar"; done)
#if ((delta_min)); then
# ((delta_position)) && delta_max=$top_offset || delta_max=$bottom_offset
# echo $((delta_max - delta_min))
#fi
[[ $offset ]] && echo display_$display $top_offset $bottom_offset
[[ $print ]] ||
awk -i inplace '
BEGIN {
to = '${top_offset:-0}'
bo = '${bottom_offset:-0}'
}
/display_'$display'_offset/ {
if("'$delta'") {
#td = ($2 > to) ? $2 - to : to - $2
#bd = ($3 > bo) ? $3 - bo : bo - $3
#td = gensub("^-", "", 1, $2 - to)
#bd = gensub("^-", "", 1, $3 - bo)
#ts = ($2 > to) ? "+" : "-"
#bs = ($3 > bo) ? "+" : "-"
#if($2 > to) ts = "+"
#if($3 > bo) bs = "+"
td = $2 - to
bd = $3 - bo
if(td > 0) ts = "+"
if(bd > 0) bs = "+"
}
$2 = to
$3 = bo
} { print } END { print ts td, bs bd }' $config
unset current_bar_height {top,bottom}_offset delta_{min,max,position}
done
exit
#done | awk '{ ab = ab " " $0 } END { print ab }')
eval read -a bar_properties <<< $(for bar in "${bars[@]}"; do
awk -F '[- ]' '{
p = ($(NF - 6) == "b") ? 0 : 1
split($(NF - 3), g, "[x+]")
x = g[3]; y = g[4]; w = g[1]; h = g[2]
ff = (p) ? 7 : 9
fw = ($(NF - ff) == "r") ? $(NF - (ff - 1)) * 2 : 0
bn = $NF
} {
#if(nr && NR == nr + 1 && x >= '$bar_min' && x + w <= '$bar_max') {
if(x >= '$min' && x + w <= '$max') {
aw = (/-w [a-z]/) ? 1 : 0
print "\"" bn, p, x, y, w, h, aw, fw "\""
}
}' <<< "$bar"
done | awk '{ ab = ab " " $0 } END { print ab }')
#}' <<< "$bar" | \
echo ${#bar_properties[*]}
exit
while read name position bar_x bar_y bar_widht bar_height adjustable_width frame; do
if ((adjustable_width)); then
read bar_width bar_height bar_x bar_y < ~/.config/orw/bar/geometries/$bar_name
fi
current_bar_height=$((bar_y + bar_height + frame))
if ((position)); then
((current_bar_height > top_offset)) && top_offset=$current_bar_height
else
((current_bar_height > bottom_offset)) && bottom_offset=$current_bar_height
fi
echo -n to $top_offset, bo $bottom_offset
done
done
echo $display:
echo $top_offset
echo $bottom_offset
unset current_bar_height {top,bottom}_offset
done
#echo ${#bars[*]}
#echo ${bars[1]}
|
<filename>engines/csv_importer/lib/csv_importer.rb
require 'csv_importer/engine'
class WebImport
def initialize(url)
@url = url
end
def call
url = 'http://assets.cahootify.com/recruitment/people.csv'
csv_string = open(url).read.force_encoding('UTF-8')
string_to_users(csv_string)
end
def string_to_users(csv_string)
counter = 0
duplicate_counter = 0
CSV.parse(csv_string, headers: true, header_converters: :symbol) do |row|
next unless row[:name].present? && row[:email_address].present?
user = CsvImporter::User.create row.to_h
if user.persisted?
counter += 1
else
duplicate_counter += 1
end
# user = []
if user.errors.any?
p "Email duplicate record: #{user.email_address} - #{user.errors.full_messages.join(',')}"
end
end
p "Imported #{counter} users, #{duplicate_counter} duplicate rows ain't added in total"
end
end |
/* globals CreateHTML, CreateMethodProperty */
// B.2.3.6String.prototype.fixed ( )
// When the fixed method is called with no arguments, the following steps are taken:
CreateMethodProperty(String.prototype, 'fixed', function fixed() {
// 1. Let S be the this value.
var S = this;
// 2. Return ? CreateHTML(S, "tt", "", "").
return CreateHTML(S, "tt", "", "");
}); |
#!/bin/bash -e
if [ "$1" == "REVERT" ]; then
NODES=($(kubectl get nodes -l "purpose notin (monitoring)" | grep -v ' Ready ' | cut -d' ' -f1))
else
NODES=($(kubectl get nodes -l 'purpose notin (monitoring)' | tail -n +2 | grep ' Ready ' | cut -d' ' -f1))
fi
INDEX=$(($RANDOM % ${#NODES[@]}))
NODE=${NODES[$INDEX]}
MASTER_ENDPOINT=$(kubectl get endpoints kubernetes -o jsonpath='{.subsets[0].addresses[0].ip}')
if [ "$1" == "REVERT" ]; then
echo "Reverting breaking node: ${NODE}"
gcloud compute ssh ${NODE} -- "while \$(sudo iptables --delete OUTPUT --destination ${MASTER_ENDPOINT} --jump REJECT); do :; done"
EXPECTED_READY_STATE="True"
else
echo "Breaking node: ${NODE}"
gcloud compute ssh ${NODE} -- sudo iptables --insert OUTPUT --destination ${MASTER_ENDPOINT} --jump REJECT
EXPECTED_READY_STATE="Unknown"
fi
NODE_CURRENT_READY_STATE=$(kubectl get node ${NODE} -o jsonpath='{.status.conditions[?(@.type=="Ready")].status}')
while [ ${NODE_CURRENT_READY_STATE} != ${EXPECTED_READY_STATE} ] ; do
echo "Waiting for node state READY=${EXPECTED_READY_STATE}, current state READY=${NODE_CURRENT_READY_STATE}"
sleep 3
NODE_CURRENT_READY_STATE=$(kubectl get node ${NODE} -o jsonpath='{.status.conditions[?(@.type=="Ready")].status}')
done;
echo "Node status: ${NODE_CURRENT_READY_STATE}"
|
import React from 'react';
import { useState, useEffect } from 'react';
import axios from 'axios';
function App() {
const [items, setItems] = useState([]);
useEffect(() => {
axios
.get('https://api.example.com/items')
.then(response => {
const result = response.data;
setItems(result);
})
.catch(error => console.log(error));
}, []);
return (
<div>
<h1>Items</h1>
<ul>
{items.map(item => (
<li>{item.name}</li>
))}
</ul>
</div>
);
}
export default App; |
class Solution:
# @param n, an integer
# @return an integer
def reverseBits(self, n):
result = 0
for _ in range(32):
result = (result << 1) | (n & 1)
n >>= 1
return result |
def minimax(board, depth, maximizingPlayer):
# Base Case: Game over
if gameOver(board):
return evalGameOver(board)
# Base Case: Max Depth Reached
if depth == 0:
return evalState(board)
# Maximizing Player
if maximizingPlayer:
bestValue = -inf
for move in getLegalMoves(board):
# Recursively search child states
value = minimax(board, depth -1, False)
# Pick the maximum value
bestValue = max(bestValue, value)
return bestValue
# Minimizing Player
else:
bestValue = inf
for move in getLegalMoves(board):
# Recursively search child states
value = minimax(board, depth -1, True)
# Pick the minimum value
bestValue = min(bestValue, value)
return bestValue |
//src/services/factory.service.ts
import { Request, Response } from "express";
import Order from "../database/orders/orders.model";
import {
ORDERS_STATUS_PENDING,
ORDERS_STATUS_PRODUCTION,
} from "../constants/miniApi.constants";
import RequestUtil from "./requestUtil.service";
import { Status } from "../database/orders/orders.types";
export class FactoryService {
public async getNewOrders(req: Request, res: Response): Promise<Response> {
const orders = await Order.find({ status: ORDERS_STATUS_PENDING });
try {
await Promise.all(
orders.map(async (order) => {
order.status = ORDERS_STATUS_PRODUCTION;
await order.save();
})
);
return res
.status(200)
.json(RequestUtil.apiSuccessResponse("New orders found.", { orders }));
} catch (error) {
return res
.status(500)
.json(RequestUtil.apiErrorResponse("Cannot get new orders.", error));
}
}
public async setSerial(req: Request, res: Response): Promise<Response> {
const { miss, extra, ok } = RequestUtil.checkFields(
["orderID", "serials"],
Object.entries(req.body).length !== 0 ? Object.keys(req.body) : []
);
if (!ok) {
return res
.status(400)
.json(RequestUtil.apiFieldsErrorReponse(miss, extra));
}
const orderID: string = req.body.orderID;
const serials: Record<string, string> = JSON.parse(req.body.serials);
try {
const order = await Order.findById(orderID).exec();
if (!order) {
return res
.status(404)
.json(RequestUtil.apiErrorResponse("Order not found."));
}
try {
await Promise.all(
order.items.map(async (item, index) => {
if (item._id && serials[item._id]) {
order.items[index].serial = serials[item._id];
}
})
);
await order.save();
return res
.status(200)
.json(
RequestUtil.apiSuccessResponse(
"Serial successfully set.",
order.toJSON()
)
);
} catch (error) {
return res
.status(500)
.json(
RequestUtil.apiErrorResponse(
`Cannot update order ${orderID}`,
error
)
);
}
} catch (error) {
return res
.status(500)
.json(
RequestUtil.apiErrorResponse(`Cannot get order ${orderID}`, error)
);
}
}
public async updateStatus(req: Request, res: Response): Promise<Response> {
const { miss, extra, ok } = RequestUtil.checkFields(
["orderID", "status"],
Object.entries(req.body).length !== 0 ? Object.keys(req.body) : []
);
if (!ok) {
return res
.status(400)
.json(RequestUtil.apiFieldsErrorReponse(miss, extra));
}
const orderID: string = req.body.orderID;
const status: Status = req.body.status;
if (!this.checkStatus(status)) {
return res
.status(500)
.json(RequestUtil.apiErrorResponse(`Invalide status "${status}".`));
}
try {
const order = await Order.findById(orderID).exec();
if (!order) {
return res
.status(404)
.json(RequestUtil.apiErrorResponse("Order not found."));
}
order.status = status;
await order.save();
return res
.status(200)
.json(
RequestUtil.apiSuccessResponse(
"Order status successfully updated.",
order.toJSON()
)
);
} catch (error) {
return res
.status(500)
.json(
RequestUtil.apiErrorResponse(`Cannot update order ${orderID}`, error)
);
}
}
private checkStatus(status: string): boolean {
return (
status === "Pending" ||
status === "Production" ||
status === "Complete" ||
status === "Shipped"
);
}
}
|
#!/bin/bash
########################################
# #
# Install packages for Ubuntu 14.04 #
# #
########################################
IS_INSTALLED=".installed"
# Setup Timezone
TIMEZONE="Asia/Taipei"
# Setup packages and version
PACKAGES_LIST="
apache2
curl
git
mysql-server
php5
php5-mysql
php5-xdebug
vim
"
PACKAGES=""
for package in $PACKAGES_LIST
do
PACKAGES="$PACKAGES $package"
done
# is root?
if [ "`whoami`" != "root" ]; then
echo "You may use root permission!"
exit 1
fi
# set time zone
ln -sf /usr/share/zoneinfo/$TIMEZONE /etc/localtime
# update time
ntpdate time.stdtime.gov.tw
if [ ! -e $IS_INSTALLED ];then
touch $IS_INSTALLED
# update
apt-get update -y
export DEBIAN_FRONTEND=noninteractive
debconf-set-selections <<< 'mysql-server-5.5 mysql-server/root_password password password'
debconf-set-selections <<< 'mysql-server-5.5 mysql-server/root_password_again password password'
# install packages
apt-get install -y $PACKAGES
# enable rewrite
a2enmod rewrite
sed -i 's/AllowOverride None/AllowOverride All/g' /etc/apache2/apache2.conf
# set MySQL password and domain
mysql -uroot -ppassword -e 'USE mysql; UPDATE `user` SET `Host`="%" WHERE `User`="root" AND `Host`="localhost"; DELETE FROM `user` WHERE `Host` != "%" AND `User`="root"; FLUSH PRIVILEGES;'
mysql -uroot -ppassword -e 'CREATE DATABASE `default` DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci;'
mysql -uroot -ppassword -e 'CREATE DATABASE `testing` DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci;'
# modified mysql config
sed -i 's/127\.0\.0\.1/0\.0\.0\.0/g' /etc/mysql/my.cnf
# modified php.ini
sed -i "s/^;date.timezone =.*/date.timezone = $TIMEZONE/g" /etc/php5/apache2/php.ini
# install composer
curl -sS https://getcomposer.org/installer | php
mv composer.phar /usr/local/bin/composer
fi
# modified php.ini
sed -i "s/^error_reporting =.*/error_reporting = E_ALL \| E_STRICT/g" /etc/php5/apache2/php.ini
sed -i "s/^display_errors =.*/display_errors = On/g" /etc/php5/apache2/php.ini
sed -i "s/^display_startup_errors =.*/display_startup_errors = On/g" /etc/php5/apache2/php.ini
# modified nginx default site
echo "$DEFAULT_SITE" > "/etc/nginx/sites-available/default"
# restart services
service apache2 restart
service mysql restart
|
import os
import sys
from os import path
current_dir = path.dirname(path.abspath(__file__))
while path.split(current_dir)[-1] != r'Heron':
current_dir = path.dirname(current_dir)
sys.path.insert(0, path.dirname(current_dir))
from Heron import general_utils as gu
Exec = os.path.abspath(__file__)
# <editor-fold desc="The following code is called from the GUI process as part of the generation of the node.
# It is meant to create node specific elements (not part of a generic node).
# This is where a new nodes individual elements should be defined">
"""
Properties of the generated Node
"""
BaseName = 'Arducam Quadrascopic Camera'
NodeAttributeNames = ['Parameters', 'Frame Out']
NodeAttributeType = ['Static', 'Output']
ParameterNames = ['Cam Index', 'FourCC', 'Exposure', 'Gain', 'Trigger Mode', 'Get Sub Camera', 'Sub Camera Scale',
'Save File', 'Timestamp', 'FPS of File']
ParameterTypes = ['int', 'str', 'int', 'int', 'bool', 'list', 'float', 'str', 'bool', 'int']
ParametersDefaultValues = [0, 'FMP4', -1, -1, False, ['0', '1', '2', '3', '4'], 1.0, '', False, 40]
WorkerDefaultExecutable = os.path.join(os.path.dirname(Exec), 'arducam_quadrascopic_camera_worker.py')
# </editor-fold>
# <editor-fold desc="The following code is called as its own process when the editor starts the graph">
if __name__ == "__main__":
camera_com = gu.start_the_source_communications_process(NodeAttributeType, NodeAttributeNames)
gu.register_exit_signals(camera_com.on_kill)
camera_com.start_ioloop()
# </editor-fold>
|
#!/bin/bash
# TODOs
# 1. Add validation for each steps and abort the test if steps fails
# Build environment `Docker image` has all prerequisite setup and credentials are being passed using AWS system manager
CLUSTER_REGION=${CLUSTER_REGION:-us-east-1}
CLUSTER_VERSION=${CLUSTER_VERSION:-1.13}
# Define the list of optional subnets for the EKS test cluster
CLUSTER_PUBLIC_SUBNETS=${CLUSTER_PUBLIC_SUBNETS:-}
CLUSTER_PRIVATE_SUBNETS=${CLUSTER_PRIVATE_SUBNETS:-}
# Verbose trace of commands, helpful since test iteration takes a long time.
set -x
function delete_tests {
# Stop jobs so we can do PrivateLink test.
kubectl delete hyperparametertuningjob --all
kubectl delete trainingjob --all
kubectl delete batchtransformjob --all
kubectl delete hostingdeployment --all
kubectl delete model --all
}
# A function to delete cluster, if cluster was not launched this will fail, so test will fail ultimately too
function cleanup {
# We want to run every command in this function, even if some fail.
set +e
echo "Controller manager logs:"
kubectl -n sagemaker-k8s-operator-system logs "$(kubectl get pods -n sagemaker-k8s-operator-system | grep sagemaker-k8s-operator-controller-manager | awk '{print $1}')" manager
# Describe, if the test fails the Additional field might have more helpful info.
echo "trainingjob description:"
kubectl describe trainingjob
delete_tests
if [ -z "${USE_EXISTING_CLUSTER}" ]
then
# Tear down the cluster if we set it up.
echo "USE_EXISTING_CLUSTER is false, tearing down cluster we created."
eksctl delete cluster --name "${cluster_name}" --region "${CLUSTER_REGION}"
fi
}
# Set the trap to clean up resources
# In case of error or normal exit delete the cluster
trap cleanup EXIT
# If any command fails, exit the script with an error code.
set -e
# Output the commit SHA for logging sake
echo "Launching canary test for ${COMMIT_SHA}"
# Launch EKS cluster if we need to and define cluster_name,CLUSTER_REGION.
echo "Launching the cluster"
cluster_name="sagemaker-k8s-canary-"$(date '+%Y-%m-%d-%H-%M-%S')""
if [ -z "${USE_EXISTING_CLUSTER}" ]
then
eksctl_args=( --nodes 1 --node-type=c5.xlarge --timeout=40m --region "${CLUSTER_REGION}" --auto-kubeconfig --version "${CLUSTER_VERSION}" )
[ "${CLUSTER_PUBLIC_SUBNETS}" != "" ] && eksctl_args+=( --vpc-public-subnets="${CLUSTER_PUBLIC_SUBNETS}" )
[ "${CLUSTER_PRIVATE_SUBNETS}" != "" ] && eksctl_args+=( --vpc-private-subnets="${CLUSTER_PRIVATE_SUBNETS}" )
eksctl create cluster "${cluster_name}" "${eksctl_args[@]}"
echo "Setting kubeconfig"
export KUBECONFIG="/root/.kube/eksctl/clusters/${cluster_name}"
else
cluster_name="non-ephemeral-cluster"
aws eks update-kubeconfig --name "${cluster_name}" --region "${CLUSTER_REGION}"
fi
# Download the CRD
tar -xf sagemaker-k8s-operator.tar.gz
# jump to the root dir of operator
pushd sagemaker-k8s-operator
# Setup the PATH for smlogs
mv smlogs-plugin/linux.amd64/kubectl-smlogs /usr/bin/kubectl-smlogs
# Goto directory that holds the CRD
pushd sagemaker-k8s-operator-install-scripts
# Since OPERATOR_AWS_SECRET_ACCESS_KEY and OPERATOR_AWS_ACCESS_KEY_ID defined in task definition, we will not create new user
./setup_awscreds
echo "Deploying the operator"
kustomize build config/default | kubectl apply -f -
popd
popd
echo "Waiting for controller pod to be Ready"
# Wait to increase chance that pod is ready
# TODO: Should upgrade kubectl to version that supports `kubectl wait pods --all`
sleep 60
# Run the integration test file
./run_all_sample_canary_tests.sh
delete_tests
# Send results back to results bucket
FILE_NAME=`TZ=UTC date +%Y-%m-%d-%H-%M-%S`
touch /tmp/$FILE_NAME
aws s3 cp /tmp/$FILE_NAME s3://${RESULT_BUCKET}/${CLUSTER_REGION}/$FILE_NAME
|
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.ui.factory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.eclipse.draw2d.ColorConstants;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.editparts.AbstractConnectionEditPart;
import org.junit.Before;
import org.junit.Test;
import com.archimatetool.editor.ui.factory.diagram.DiagramConnectionUIProvider;
import com.archimatetool.model.IArchimatePackage;
import junit.framework.JUnit4TestAdapter;
public class LineConnectionUIProviderTests extends AbstractGraphicalObjectUIProviderTests {
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(LineConnectionUIProviderTests.class);
}
@Before
public void runOnceBeforeAllTests() {
provider = new DiagramConnectionUIProvider();
expectedClass = IArchimatePackage.eINSTANCE.getDiagramModelConnection();
}
@Override
public void testCreateEditPart() {
EditPart editPart = provider.createEditPart();
assertTrue(editPart instanceof AbstractConnectionEditPart);
}
@Override
@Test
public void testGetDefaultColor() {
assertEquals(ColorConstants.black, getProvider().getDefaultColor());
}
@Override
public void testGetDefaultLineColor() {
assertEquals(ColorConstants.black, getProvider().getDefaultLineColor());
}
@Override
@Test
public void testGetDefaultSize() {
assertEquals(new Dimension(-1, -1), getProvider().getDefaultSize());
}
}
|
<filename>src/app/loggedin/league/view/index.ts<gh_stars>0
import { ViewLeagueComponent } from './view-league.component';
import { LinkSpotifyForm } from './link-spotify.form';
export const VIEW_LEAGUE_COMPONENTS: Array<any> = [
ViewLeagueComponent, LinkSpotifyForm
];
|
<gh_stars>1-10
package dev.arkav.openoryx.game.models;
public class GameState {
public String buildVersion;
public int characterId;
public int gameId;
public byte[] key;
public int keyTime;
public String connectionGuid;
public GameState(String buildVersion) {
this.buildVersion = buildVersion;
this.characterId = 0;
this.gameId = -2;
this.key = new byte[0];
this.keyTime = -1;
this.connectionGuid = "";
}
}
|
package org.usfirst.frc5112.Robot2017V3.commands;
import edu.wpi.first.wpilibj.command.Command;
/**
*
*/
public class DoNothingAtAll extends Command {
public DoNothingAtAll() {
}
protected void initialize() {
end();
}
protected void execute() {
end();
}
protected boolean isFinished() {
return true;
}
protected void end() {
}
protected void interrupted() {
end();
}
}
|
require 'rubygems'
require 'bundler/setup'
Bundler.require(:default)
require "mygame/version"
require "mygame/models/player"
require "mygame/controllers/window"
require "mygame/models/zorder"
require "mygame/models/star"
module Mygame
# Your code goes here...
def self.start
Window.new.show
end
end
|
'use strict'
var test = require('tape')
, extract = require('../lib')
test('works without params: "y=7"',function(t) {
var parsed = extract('[]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {}, 'params = ' + JSON.stringify({}) )
t.end()
})
test('extracts a parameter from a string: "[width=200][name=some equation]y=7"',function(t) {
var parsed = extract('[width=200][name=some equation]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {width: '200', name: 'some equation'}, 'params = ' + JSON.stringify({width: '200', name: 'some equation'}) )
t.end()
})
test('a leading space yields no parameters: " [width=200][name=some equation]y=7"',function(t) {
var parsed = extract(' [width=200][name=some equation]y=7')
t.deepEqual( parsed.content, ' [width=200][name=some equation]y=7', 'content = " [width=200][name=some equation]y=7"')
t.deepEqual( parsed.params, {}, 'params = ' + JSON.stringify({}) )
t.end()
})
test('allows escaped hashes: "[width=\\[200\\]][name=some equation]y=7"',function(t) {
var parsed = extract('[width=\\[200\\]][name=some equation]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {width: '[200]', name: 'some equation'}, 'params = ' + JSON.stringify({width: '[200]', name: 'some equation'}) )
t.end()
})
test('allows bare params: "[displaystyle]y=7"',function(t) {
var parsed = extract('[displaystyle]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {displaystyle: true}, 'params = ' + JSON.stringify({displaystyle:true}) )
t.end()
})
test('allows bare params followed by regular: "[displaystyle][foo=bar]y=7"',function(t) {
var parsed = extract('[displaystyle]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {displaystyle: true}, 'params = ' + JSON.stringify({displaystyle:true, foo: 'bar'}) )
t.end()
})
test('allows empty hashes: "[]y=7"',function(t) {
var parsed = extract('[]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {}, 'params = ' + JSON.stringify({}) )
t.end()
})
test('handles hashes with a single-character value: "[name=x]y=7"',function(t) {
var parsed = extract('[name=x]y=7')
t.deepEqual( parsed.content, 'y=7', 'content = "y=7"')
t.deepEqual( parsed.params, {name: 'x'}, 'params = ' + JSON.stringify({name: 'x'}) )
t.end()
})
|
import React from 'react';
import AirdropView from '~components/airdrop/Airdrop';
export function AirdropPage() {
return (
<div className="airdrop">
<div className="title text-center text-3xl pb-3 text-white font-semibold">
Airdrop
</div>
<div className="describe text-center text-sm pb-5 text-white">
Claim Ref token
</div>
<section className="w-1/3 md:w-5/6 xs:w-11/12 m-auto">
<AirdropView />
</section>
</div>
);
}
|
<filename>src/lib/utils.ts
import {
baseTestResultType, FamousList,
IDescList,
IDescWithRange,
IDescWithStatus, IFamous,
IOctant,
ITendency
} from '../types/types';
import { octantCodeList } from './UserResult';
/**
* Отдает, в виде строки, описание в зависимости от интенсивности значения (value)
* @param value - значение интенсивности
* @param descList - список описаний из API
*/
export function getDescByRange(value: number, descList: IDescList): IDescWithStatus {
let desc = '';
let index = null;
// eslint-disable-next-line functional/no-loop-statement
for (let i = 0; i < descList.options.length; i++) {
if (value > (descList.options[i].range[0]) && value <= (descList.options[i].range[1])) {
desc = descList.options[i].desc;
index = i;
break;
}
}
const status = index === 0 ? 0 : (index === descList.options.length ? 2 : 1);
return { title: descList.title, desc, status };
}
/**
* Отдает, в виде числового индекса, описание в зависимости от интенсивности значения (value)
* @param value - значение интенсивности
* @param descList - список описаний из API
*/
export function getIndexByRange(value: number, descList: readonly IDescWithRange[]): number {
// eslint-disable-next-line functional/no-loop-statement
for (let i = 0; i < descList.length; i++) {
if (value > (descList[i].range[0]) && value <= (descList[i].range[1])) {
return i;
}
}
return -1;
}
/**
* отдает ключевое описание в зависимости от интенсивности
* @param value
* @param results
* @param range - брейкпоинты для сравнения с переданной интенсивностью. По умолчанию range = [.2, .5, .8] идентично [20%, 50%, 80%]
*/
export function getKeyResult(value: number, results: readonly string[], range = [ .2, .5, .8 ]): string {
if (value < range[0]) {
return results[0];
} else if (value >= range[0] && value < range[1]) {
return results[1];
} else if (value >= range[1] && value < range[2]) {
return results[2];
} else {
return results[3];
}
}
/**
* Функция обсчитывает результат пользователя вида 5х5
* Словесные значения добавляются в программе с помощью маппинга в зависимости от языка
* @param testResult
* @returns Психологический профиль пользователя (8 тенденций, например тревожность, лабильность и далее).
*
*/
export function getPersonProfile(testResult: baseTestResultType): readonly ITendency[] {
const values = testResult.map(item => {
let pos = 0;
let neg = 0;
item.forEach(value => {
if (value > 0) {
pos += value;
} else {
neg += value * -1;
}
});
return [ neg, pos ];
});
return [
{ index: 0, value: values[1][0] },
{ index: 1, value: values[4][0] },
{ index: 2, value: values[0][0] },
{ index: 3, value: values[2][1] },
{ index: 4, value: values[1][1] },
{ index: 5, value: values[4][1] },
{ index: 6, value: values[0][1] },
{ index: 7, value: values[2][0] }
];
}
/**
* Функция обсчитывает результат пользователя вида 5х5
* Словесные значения добавляются в программе с помощью маппинга в зависимости от языка
* @param profile
* @returns Психологический портрет пользователя (8 октант или секторов, например модник, консерватор и далее)
*/
export function getPersonPortrait(profile: readonly ITendency[]): readonly IOctant[] {
const codeList = octantCodeList;
const axisValues = profile.map(item => item.value);
const axisValuesReversed = [ ...axisValues ].reverse();
// sinus of 45 degrees
const sin45 = 0.7071;
const octantsValues = [];
// eslint-disable-next-line functional/no-loop-statement
for (let i = 0; i < axisValuesReversed.length; i++) {
if (i === axisValues.length - 1) {
// eslint-disable-next-line functional/immutable-data
octantsValues.unshift(axisValuesReversed[i] * axisValuesReversed[0] * sin45 / 2);
break;
}
// eslint-disable-next-line functional/immutable-data
octantsValues.push(axisValuesReversed[i] * axisValuesReversed[i + 1] * sin45 / 2);
}
//octant names begin with aggression and go in reverse order. So, change order values
const swappedValues = [ ...octantsValues.slice(4), ...octantsValues.slice(0, 4) ];
return swappedValues.map((value, i) => {
return { code: codeList[i], index: i, value: Number(value.toFixed(2)) };
});
}
/**
* Какая вы знаменитость
* @param octant
* @param famousList - список знаменитостей из API
* @param sex - пол пользователя
* @param range - брейкпоинты для определения к конкретной интенсивности. По умолчанию [0, 42.35, 140, 1000]
*/
export function getFamous(octant: IOctant, famousList: FamousList, sex = 0, range = [ 0, 42.35, 140, 1000 ]): IFamous | null {
//sex: 0 - male, 1 - female, 2 - some else
const value = octant.value;
if (value < range[0] || value > range[3]) {
return null;
}
if (value >= range[0] && value < range[1]) {
return {
person: famousList[octant.index][0][sex],
picture: `${octant.index}_0_${sex}`
};
} else if (value >= range[1] && value < range[2]) {
return {
person: famousList[octant.index][1][sex],
picture: `${octant.index}_1_${sex}`
};
}
return {
person: famousList[octant.index][2][sex],
picture: `${octant.index}_2_${sex}`
};
}
|
import {Component, OnInit, ViewChild} from '@angular/core';
import {SchedulingService} from '@app/scheduling/scheduling.service';
import { extend } from '@syncfusion/ej2-base';
import {ScheduleComponent} from '@syncfusion/ej2-ng-schedule';
import {scheduleData} from '@app/scheduling/datasource';
import { EventSettingsModel} from '@syncfusion/ej2-ng-schedule';
import { ChangeEventArgs } from '@syncfusion/ej2-ng-calendars';
@Component({
selector: 'app-scheduling',
templateUrl: './scheduling.component.html',
styleUrls: ['./scheduling.component.scss']
})
export class SchedulingComponent implements OnInit {
public selectedDate: Date = new Date(2018, 1, 15);
public eventSettings: EventSettingsModel = { dataSource: <Object[]>extend([], scheduleData, null, true) };
@ViewChild('schedule')
public schedule: ScheduleComponent;
onDateChange(args: ChangeEventArgs): void {
this.schedule.selectedDate = args.value;
}
constructor(private schedulingService: SchedulingService) { }
ngOnInit() {
}
}
|
def add(num1, num2):
return num1 + num2
def subtract(num1, num2):
return num1 - num2
def multiply(num1, num2):
return num1 * num2
def divide(num1, num2):
return num1 / num2
print("Welcome to the interactive command line calculator")
while True:
print("Enter the operation - add, subtract, multiply, divide:")
operation = input()
if operation == "add":
print("Enter the two numbers to be added:")
num1 = float(input())
num2 = float(input())
print("Result:", add(num1, num2))
elif operation == "subtract":
print("Enter the two numbers to be subtracted:")
num1 = float(input())
num2 = float(input())
print("Result:", subtract(num1, num2))
elif operation == "multiply":
print("Enter the two numbers to be multiplied:")
num1 = float(input())
num2 = float(input())
print("Result:", multiply(num1, num2))
elif operation == "divide":
print("Enter the two numbers to be divided:")
num1 = float(input())
num2 = float(input())
print("Result:", divide(num1, num2))
else:
print("Invalid operation!") |
<reponame>jmosro/trazactivo<filename>src/main/java/com/ipec/trazactivo/repository/ActivoDao.java
package com.ipec.trazactivo.repository;
import com.ipec.trazactivo.model.Activo;
import com.ipec.trazactivo.model.ActivoPK;
import org.springframework.data.jpa.repository.JpaRepository;
public interface ActivoDao extends JpaRepository<Activo, ActivoPK>{
}
|
<gh_stars>1-10
export default {
startAt: 0,
showBackButton: true,
showFooterButtons: true,
onInit: $.noop,
onDestroy: $.noop,
onFinish: $.noop,
onChange() { return true; },
stepSelector: '.step-steps > li',
contentSelector: '.step-content > .step-tab-panel',
footerSelector: '.step-footer',
buttonSelector: 'button',
activeClass: 'active',
doneClass: 'done',
errorClass: 'error',
};
|
<gh_stars>1-10
package api
import (
"cf"
"cf/models"
"cf/net"
)
type FakeUserRepository struct {
FindByUsernameUsername string
FindByUsernameUserFields models.UserFields
FindByUsernameNotFound bool
ListUsersOrganizationGuid string
ListUsersSpaceGuid string
ListUsersByRole map[string][]models.UserFields
CreateUserUsername string
CreateUserPassword string
CreateUserExists bool
DeleteUserGuid string
SetOrgRoleUserGuid string
SetOrgRoleOrganizationGuid string
SetOrgRoleRole string
UnsetOrgRoleUserGuid string
UnsetOrgRoleOrganizationGuid string
UnsetOrgRoleRole string
SetSpaceRoleUserGuid string
SetSpaceRoleOrgGuid string
SetSpaceRoleSpaceGuid string
SetSpaceRoleRole string
UnsetSpaceRoleUserGuid string
UnsetSpaceRoleSpaceGuid string
UnsetSpaceRoleRole string
}
func (repo *FakeUserRepository) FindByUsername(username string) (user models.UserFields, apiResponse net.ApiResponse) {
repo.FindByUsernameUsername = username
user = repo.FindByUsernameUserFields
if repo.FindByUsernameNotFound {
apiResponse = net.NewNotFoundApiResponse("User not found")
}
return
}
func (repo *FakeUserRepository) ListUsersInOrgForRole(orgGuid string, roleName string) ([]models.UserFields, net.ApiResponse) {
repo.ListUsersOrganizationGuid = orgGuid
return repo.ListUsersByRole[roleName], net.NewApiResponseWithStatusCode(200)
}
func (repo *FakeUserRepository) ListUsersInSpaceForRole(spaceGuid string, roleName string) ([]models.UserFields, net.ApiResponse) {
repo.ListUsersSpaceGuid = spaceGuid
return repo.ListUsersByRole[roleName], net.NewApiResponseWithStatusCode(200)
}
func (repo *FakeUserRepository) Create(username, password string) (apiResponse net.ApiResponse) {
repo.CreateUserUsername = username
repo.CreateUserPassword = password
if repo.CreateUserExists {
apiResponse = net.NewApiResponse("User already exists", cf.USER_EXISTS, 400)
}
return
}
func (repo *FakeUserRepository) Delete(userGuid string) (apiResponse net.ApiResponse) {
repo.DeleteUserGuid = userGuid
return
}
func (repo *FakeUserRepository) SetOrgRole(userGuid, orgGuid, role string) (apiResponse net.ApiResponse) {
repo.SetOrgRoleUserGuid = userGuid
repo.SetOrgRoleOrganizationGuid = orgGuid
repo.SetOrgRoleRole = role
return
}
func (repo *FakeUserRepository) UnsetOrgRole(userGuid, orgGuid, role string) (apiResponse net.ApiResponse) {
repo.UnsetOrgRoleUserGuid = userGuid
repo.UnsetOrgRoleOrganizationGuid = orgGuid
repo.UnsetOrgRoleRole = role
return
}
func (repo *FakeUserRepository) SetSpaceRole(userGuid, spaceGuid, orgGuid, role string) (apiResponse net.ApiResponse) {
repo.SetSpaceRoleUserGuid = userGuid
repo.SetSpaceRoleOrgGuid = orgGuid
repo.SetSpaceRoleSpaceGuid = spaceGuid
repo.SetSpaceRoleRole = role
return
}
func (repo *FakeUserRepository) UnsetSpaceRole(userGuid, spaceGuid, role string) (apiResponse net.ApiResponse) {
repo.UnsetSpaceRoleUserGuid = userGuid
repo.UnsetSpaceRoleSpaceGuid = spaceGuid
repo.UnsetSpaceRoleRole = role
return
}
|
use quick_xml::Reader;
use quick_xml::events::Event;
use std::io::BufRead;
struct Trackpoint {
latitude: f64,
longitude: f64,
elevation: f64,
}
fn parse_trackpoint<R: BufRead>(reader: &mut Reader<R>, buf: &mut Vec<u8>) -> Result<Trackpoint, String> {
let mut latitude: Option<f64> = None;
let mut longitude: Option<f64> = None;
let mut elevation: Option<f64> = None;
loop {
buf.clear();
match reader.read_event(buf) {
Ok(Event::Start(ref e)) => {
match e.name() {
b"Latitude" => {
latitude = Some(parse_value(reader, buf)?);
}
b"Longitude" => {
longitude = Some(parse_value(reader, buf)?);
}
b"Elevation" => {
elevation = Some(parse_value(reader, buf)?);
}
_ => (),
}
}
Ok(Event::End(ref e)) if e.name() == b"Trackpoint" => {
if let (Some(lat), Some(lon), Some(elev)) = (latitude, longitude, elevation) {
return Ok(Trackpoint {
latitude: lat,
longitude: lon,
elevation: elev,
});
} else {
return Err("Incomplete trackpoint data".to_string());
}
}
Ok(Event::Eof) => {
return Err("Unexpected end of file".to_string());
}
_ => (),
}
}
}
fn parse_value<R: BufRead>(reader: &mut Reader<R>, buf: &mut Vec<u8>) -> Result<f64, String> {
buf.clear();
reader.read_event(buf).map_err(|e| e.to_string())?;
let value = reader.read_text(b"Value", buf).map_err(|e| e.to_string())?;
value.parse().map_err(|e| e.to_string())
}
fn main() {
// Example usage
let xml_data = r#"
<Trackpoint>
<Latitude>37.7749</Latitude>
<Longitude>-122.4194</Longitude>
<Elevation>61.0</Elevation>
</Trackpoint>
"#;
let mut reader = Reader::from_str(xml_data);
reader.trim_text(true);
let mut buf = Vec::new();
let trackpoint = parse_trackpoint(&mut reader, &mut buf);
match trackpoint {
Ok(tp) => println!("Parsed trackpoint: {:?}", tp),
Err(e) => eprintln!("Error parsing trackpoint: {}", e),
}
} |
<reponame>angelxehg/angelxehg.github.io
import React from "react"
export interface IconMeta {
// color?: string
fill?: boolean
style?: React.CSSProperties
svgPath: string
}
export interface LinkMeta {
name: string
displayName?: string
icon: IconMeta
href: string
}
|
import base64
class IdentificationError(Exception):
pass
class BasicAuthIdentifier:
def identify(self, headers):
auth_header = headers.get('HTTP_AUTHORIZATION')
if not auth_header:
raise IdentificationError("Invalid credentials in HTTP_AUTHORIZATION header")
auth_parts = auth_header.split()
if len(auth_parts) != 2 or auth_parts[0] != 'Basic':
raise IdentificationError("Invalid credentials in HTTP_AUTHORIZATION header")
credentials = auth_parts[1]
try:
decoded_credentials = base64.b64decode(credentials).decode('utf-8')
return decoded_credentials
except (TypeError, UnicodeDecodeError):
raise IdentificationError("Invalid credentials in HTTP_AUTHORIZATION header")
# Test cases
identifier = BasicAuthIdentifier()
# Test case 1: Missing HTTP_AUTHORIZATION header
try:
identifier.identify({})
except IdentificationError as e:
assert str(e) == "Invalid credentials in HTTP_AUTHORIZATION header"
# Test case 2: Empty HTTP_AUTHORIZATION header
try:
identifier.identify({'HTTP_AUTHORIZATION': ''})
except IdentificationError as e:
assert str(e) == "Invalid credentials in HTTP_AUTHORIZATION header"
# Test case 3: Malformed credentials in HTTP_AUTHORIZATION header
try:
identifier.identify({'HTTP_AUTHORIZATION': 'Basic \x000'})
except IdentificationError as e:
assert str(e) == "Invalid credentials in HTTP_AUTHORIZATION header"
# Test case 4: Valid credentials in HTTP_AUTHORIZATION header
credentials = base64.standard_b64encode('username:password'.encode('utf-8')).decode('utf-8')
assert identifier.identify({'HTTP_AUTHORIZATION': 'Basic ' + credentials}) == 'username:password' |
def solve_quadratic_equation(b, a, c):
disc = (b * b) - (4 * a * c)
if disc < 0:
return [None, None]
# compute the two roots using the formula
x1 = (-b + sqrt(disc)) / (2 * a)
x2 = (-b - sqrt(disc)) / (2 * a)
return [x1, x2] |
<reponame>vitalics/playwright<gh_stars>1-10
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from 'react';
import { expect, test } from '@playwright/experimental-ct-react';
import { Source } from './source';
test.use({ viewport: { width: 500, height: 500 } });
const javascriptSnippet = `import { test, expect } from '@playwright/test';
test('basic test', async ({ page }) => {
await page.goto('https://playwright.dev/');
const title = page.locator('.navbar__inner .navbar__title');
await expect(title).toHaveText('Playwright');
});
`;
const pythonSnippet = `import asyncio
from playwright.async_api import async_playwright
async def main():
async with async_playwright() as p:
# Works across chromium, firefox and webkit
browser = await p.chromium.launch(headless=False)
asyncio.run(main())
`;
const javaSnippet = `import com.microsoft.playwright.*;
public class Example {
public static void main(String[] args) {
try (Playwright playwright = Playwright.create()) {
BrowserType chromium = playwright.chromium();
Browser browser = chromium.launch(new BrowserType.LaunchOptions().setHeadless(false));
}
}
}
`;
const csharpSnippet = `
using Microsoft.Playwright;
using System.Threading.Tasks;
class Program
{
public static async Task Main()
{
using var playwright = await Playwright.CreateAsync();
await playwright.Chromium.LaunchAsync(new BrowserTypeLaunchOptions
{
Headless = false
});
}
}
`;
test('highlight JavaScript', async ({ mount }) => {
const component = await mount(<Source text={javascriptSnippet} language='javascript'></Source>);
await expect(component.locator('text="async"').first()).toHaveClass('hljs-keyword');
});
test('highlight Python', async ({ mount }) => {
const component = await mount(<Source text={pythonSnippet} language='python'></Source>);
await expect(component.locator('text="async"').first()).toHaveClass('hljs-keyword');
});
test('highlight Java', async ({ mount }) => {
const component = await mount(<Source text={javaSnippet} language='java'></Source>);
await expect(component.locator('text="public"').first()).toHaveClass('hljs-keyword');
});
test('highlight C#', async ({ mount }) => {
const component = await mount(<Source text={csharpSnippet} language='csharp'></Source>);
await expect(component.locator('text="public"').first()).toHaveClass('hljs-keyword');
});
test('highlight lines', async ({ mount }) => {
const component = await mount(<Source text={javascriptSnippet} language='javascript' highlight={[
{ line: 4, type: 'running' },
{ line: 5, type: 'paused' },
{ line: 6, type: 'error' },
]}></Source>);
await expect(component.locator('.source-line-running')).toContainText('goto');
await expect(component.locator('.source-line-paused')).toContainText('title');
await expect(component.locator('.source-line-error')).toContainText('expect');
});
|
<reponame>shreyasbapat/app
package kubernetes
import (
"fmt"
composev1beta1 "github.com/docker/compose-on-kubernetes/api/client/clientset/typed/compose/v1beta1"
composev1beta2 "github.com/docker/compose-on-kubernetes/api/client/clientset/typed/compose/v1beta2"
"github.com/docker/compose-on-kubernetes/api/compose/v1beta1"
"github.com/docker/compose-on-kubernetes/api/compose/v1beta2"
"github.com/docker/compose-on-kubernetes/api/labels"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
corev1 "k8s.io/client-go/kubernetes/typed/core/v1"
"k8s.io/client-go/rest"
)
// StackClient talks to a kubernetes compose component.
type StackClient interface {
StackConverter
CreateOrUpdate(s Stack, childResources []childResource) error
Delete(name string) error
Get(name string) (Stack, error)
List(opts metav1.ListOptions) ([]Stack, error)
IsColliding(servicesClient corev1.ServiceInterface, s Stack) error
}
// stackV1Beta1 implements stackClient interface and talks to compose component v1beta1.
type stackV1Beta1 struct {
stackV1Beta1Converter
stacks composev1beta1.StackInterface
}
func newStackV1Beta1(config *rest.Config, namespace string) (*stackV1Beta1, error) {
client, err := composev1beta1.NewForConfig(config)
if err != nil {
return nil, err
}
return &stackV1Beta1{stacks: client.Stacks(namespace)}, nil
}
func (s *stackV1Beta1) CreateOrUpdate(internalStack Stack, childResources []childResource) error {
// If it already exists, update the stack
var (
stack *v1beta1.Stack
err error
)
if stack, err = s.stacks.Get(internalStack.Name, metav1.GetOptions{}); err == nil {
stack.Spec.ComposeFile = internalStack.ComposeFile
stack, err = s.stacks.Update(stack)
} else {
// Or create it
stack, err = s.stacks.Create(stackToV1beta1(internalStack))
}
if err != nil {
deleteChildResources(childResources)
return err
}
blockOwnerDeletion := true
isController := true
return setChildResourcesOwner(childResources, metav1.OwnerReference{
APIVersion: v1beta1.SchemeGroupVersion.String(),
Kind: "Stack",
Name: stack.Name,
UID: stack.UID,
BlockOwnerDeletion: &blockOwnerDeletion,
Controller: &isController,
})
}
func (s *stackV1Beta1) Delete(name string) error {
return s.stacks.Delete(name, &metav1.DeleteOptions{})
}
func (s *stackV1Beta1) Get(name string) (Stack, error) {
stackBeta1, err := s.stacks.Get(name, metav1.GetOptions{})
if err != nil {
return Stack{}, err
}
return stackFromV1beta1(stackBeta1)
}
func (s *stackV1Beta1) List(opts metav1.ListOptions) ([]Stack, error) {
list, err := s.stacks.List(opts)
if err != nil {
return nil, err
}
stacks := make([]Stack, len(list.Items))
for i := range list.Items {
stack, err := stackFromV1beta1(&list.Items[i])
if err != nil {
return nil, err
}
stacks[i] = stack
}
return stacks, nil
}
// IsColliding verifies that services defined in the stack collides with already deployed services
func (s *stackV1Beta1) IsColliding(servicesClient corev1.ServiceInterface, st Stack) error {
for _, srv := range st.getServices() {
if err := verify(servicesClient, st.Name, srv); err != nil {
return err
}
}
return nil
}
// verify checks whether the service is already present in kubernetes.
// If we find the service by name but it doesn't have our label or it has a different value
// than the stack name for the label, we fail (i.e. it will collide)
func verify(services corev1.ServiceInterface, stackName string, service string) error {
svc, err := services.Get(service, metav1.GetOptions{})
if err == nil {
if key, ok := svc.ObjectMeta.Labels[labels.ForStackName]; ok {
if key != stackName {
return fmt.Errorf("service %s already present in stack named %s", service, key)
}
return nil
}
return fmt.Errorf("service %s already present in the cluster", service)
}
return nil
}
// stackV1Beta2 implements stackClient interface and talks to compose component v1beta2.
type stackV1Beta2 struct {
stackV1Beta2Converter
stacks composev1beta2.StackInterface
}
func newStackV1Beta2(config *rest.Config, namespace string) (*stackV1Beta2, error) {
client, err := composev1beta2.NewForConfig(config)
if err != nil {
return nil, err
}
return &stackV1Beta2{stacks: client.Stacks(namespace)}, nil
}
func (s *stackV1Beta2) CreateOrUpdate(internalStack Stack, childResources []childResource) error {
// If it already exists, update the stack
var (
stack *v1beta2.Stack
err error
)
if stack, err = s.stacks.Get(internalStack.Name, metav1.GetOptions{}); err == nil {
stack.Spec = internalStack.Spec
stack, err = s.stacks.Update(stack)
} else {
// Or create it
stack, err = s.stacks.Create(stackToV1beta2(internalStack))
}
if err != nil {
deleteChildResources(childResources)
return err
}
blockOwnerDeletion := true
isController := true
return setChildResourcesOwner(childResources, metav1.OwnerReference{
APIVersion: v1beta2.SchemeGroupVersion.String(),
Kind: "Stack",
Name: stack.Name,
UID: stack.UID,
BlockOwnerDeletion: &blockOwnerDeletion,
Controller: &isController,
})
}
func (s *stackV1Beta2) Delete(name string) error {
return s.stacks.Delete(name, &metav1.DeleteOptions{})
}
func (s *stackV1Beta2) Get(name string) (Stack, error) {
stackBeta2, err := s.stacks.Get(name, metav1.GetOptions{})
if err != nil {
return Stack{}, err
}
return stackFromV1beta2(stackBeta2), nil
}
func (s *stackV1Beta2) List(opts metav1.ListOptions) ([]Stack, error) {
list, err := s.stacks.List(opts)
if err != nil {
return nil, err
}
stacks := make([]Stack, len(list.Items))
for i := range list.Items {
stacks[i] = stackFromV1beta2(&list.Items[i])
}
return stacks, nil
}
// IsColliding is handle server side with the compose api v1beta2, so nothing to do here
func (s *stackV1Beta2) IsColliding(servicesClient corev1.ServiceInterface, st Stack) error {
return nil
}
|
import { Button, Form, Typography } from 'antd';
import TextEditor from 'lib/components/TextEditor';
import React from 'react';
import { useDispatch } from 'react-redux';
import { useParams } from 'react-router-dom';
import { addAnswer } from 'store/modules/answers/answers.actions';
const { Item } = Form;
const { Title } = Typography;
interface MatchProps {
id: string;
}
interface ICommentCreate {
body: string;
}
export const PostCreateAnswer = () => {
const [form] = Form.useForm();
const { id } = useParams<MatchProps>();
const dispatch = useDispatch();
const onSubmit = (values: ICommentCreate) => {
dispatch(addAnswer({ body: values.body, postId: id }));
form.resetFields();
};
return (
<>
<Title level={5}>Your Answer</Title>
<Form layout="vertical" form={form} onFinish={onSubmit}>
<Item
name="body"
rules={[
{
required: true,
message: 'Please enter correct answer',
},
]}
>
{/* @ts-ignore */}
<TextEditor />
</Item>
<Item>
<Button htmlType="submit" type="primary">
Post Your Answer
</Button>
</Item>
</Form>
</>
);
};
|
using UnityEditor;
using UnityEngine;
[CustomPropertyDrawer(typeof(NoiseSettings))]
public class NoiseSettingsDrawer : PropertyDrawer
{
public override void OnGUI(Rect position, SerializedProperty property, GUIContent label)
{
EditorGUI.BeginProperty(position, label, property);
SerializedProperty filterType = property.FindPropertyRelative("filterType");
SerializedProperty simpleNoiseSettings = property.FindPropertyRelative("simpleNoiseSettings");
SerializedProperty rigidNoiseSettings = property.FindPropertyRelative("rigidNoiseSettings");
EditorGUI.PropertyField(position, filterType, true);
position.y += EditorGUIUtility.singleLineHeight + EditorGUIUtility.standardVerticalSpacing;
if (filterType.enumValueIndex == (int)NoiseSettings.FilterType.Simple)
{
EditorGUI.PropertyField(position, simpleNoiseSettings, true);
}
else if (filterType.enumValueIndex == (int)NoiseSettings.FilterType.Rigid)
{
EditorGUI.PropertyField(position, rigidNoiseSettings, true);
}
EditorGUI.EndProperty();
}
public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
{
SerializedProperty filterType = property.FindPropertyRelative("filterType");
SerializedProperty simpleNoiseSettings = property.FindPropertyRelative("simpleNoiseSettings");
SerializedProperty rigidNoiseSettings = property.FindPropertyRelative("rigidNoiseSettings");
float height = EditorGUIUtility.singleLineHeight + EditorGUIUtility.standardVerticalSpacing;
if (filterType.enumValueIndex == (int)NoiseSettings.FilterType.Simple)
{
height += EditorGUI.GetPropertyHeight(simpleNoiseSettings, true);
}
else if (filterType.enumValueIndex == (int)NoiseSettings.FilterType.Rigid)
{
height += EditorGUI.GetPropertyHeight(rigidNoiseSettings, true);
}
return height;
}
} |
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from unittest import TestCase
from uw_nws import NWS
from uw_nws.models import Channel
from uw_nws.utilities import fdao_nws_override
from uw_nws.exceptions import InvalidUUID
from restclients_core.exceptions import DataFailureException
from datetime import datetime
@fdao_nws_override
class NWSTestChannel(TestCase):
def test_channel_by_channel_id(self):
nws = NWS()
channel = nws.get_channel_by_channel_id(
"b779df7b-d6f6-4afb-8165-8dbe6232119f")
self._assert_channel(channel)
def test_channel_by_channel_id_exceptions(self):
nws = NWS()
self.assertRaises(InvalidUUID, nws.get_channel_by_channel_id, "abc")
self.assertRaises(
DataFailureException, nws.get_channel_by_channel_id,
"00000000-d6f6-4afb-8165-8dbe6232119f")
def test_channel_sln(self):
nws = NWS()
channels = nws.get_channels_by_sln(
"uw_student_courseavailable", "12345")
self.assertEquals(len(channels), 1)
self.assertRaises(
DataFailureException, nws.get_channels_by_sln,
"uw_student_courseavailable", "00000")
def test_channel_sln_and_term(self):
nws = NWS()
channels = nws.get_channels_by_sln_year_quarter(
"uw_student_courseavailable", "12345", 2012, "autumn")
self.assertEquals(len(channels), 1)
self.assertRaises(
DataFailureException, nws.get_channels_by_sln_year_quarter,
"uw_student_courseavailable", "12345", 2013, "summer")
def test_active_channels_by_year_quarter(self):
nws = NWS()
dt = datetime(2013, 5, 31, 0, 0, 0)
channels = nws.get_active_channels_by_year_quarter(
"uw_student_courseavailable", 2013, 'spring', expires=dt)
self.assertEquals(len(channels), 1)
self.assertRaises(
DataFailureException, nws.get_active_channels_by_year_quarter,
"uw_student_courseavailable", 2013, 'summer', expires=dt)
def _assert_channel(self, channel):
self.assertEquals(
channel.channel_id, "b779df7b-d6f6-4afb-8165-8dbe6232119f")
self.assertEquals(channel.surrogate_id, "2012,autumn,cse,100,w")
self.assertEquals(channel.type, "uw_student_courseavailable")
self.assertEquals(channel.name, "FLUENCY IN INFORMATION TECHNOLOGY")
self.assertEquals(channel.description, (
"Introduces skills, concepts, and capabilities necessary to "
"effectively use information technology. Includes logical "
"reasoning, managing complexity, operation of computers and "
"networks, and contemporary applications such as effective web "
"searching and database manipulation, ethical aspects, and "
"social impacts of information technology. Offered: jointly "
"with INFO 100.\n"))
|
#!/bin/bash
helm repo add templating https://harbor.34.89.193.23.nip.io/chartrepo/andrew |
<reponame>smagill/opensphere-desktop
package io.opensphere.core.util;
/** An arbitrator which will dictate whether phased commits are required. */
@FunctionalInterface
public interface PhasedChangeArbitrator
{
/**
* Tell whether phased commits are required.
*
* @return <code>true</code> when phased commits are required.
*/
boolean isPhasedCommitRequired();
}
|
<filename>src/main/java/org/felix/ml/sampling/filter/single/RelMaxPosFilter.java
package org.felix.ml.sampling.filter.single;
import org.felix.ml.sampling.FilterContext;
import org.felix.ml.sampling.SamplePackage;
import org.felix.ml.sampling.ScoreResult;
import org.felix.ml.sampling.exception.ConfigException;
import org.felix.ml.sampling.exception.FilterException;
import org.felix.ml.sampling.util.FilterUtil;
import org.felix.ml.sampling.util.QueryUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.felix.ml.sampling.util.Constant.POS_TAG;
/**
*
* */
public class RelMaxPosFilter extends PosFilter {
private Integer label;
private Boolean sample;
public void init(String param) throws ConfigException {
Map<String, String[]> map = QueryUtil.parseUrlQueryString(param);
String[] labels = map.get("label");
String[] samples = map.get("sample");
if (labels == null && samples == null)
throw new ConfigException(String.format("wrong config for %s,param:%s", getClass().getSimpleName(), param));
if (labels != null && samples != null)
throw new ConfigException(String.format("wrong config for %s,param:%s", getClass().getSimpleName(), param));
if (labels != null) {
if (labels.length > 1)
throw new ConfigException(
String.format("wrong config for %s,param:%s", getClass().getSimpleName(), param));
try {
label = Integer.parseInt(labels[0]);
} catch (Exception e) {
throw new ConfigException(
String.format("wrong config for %s,param:%s", getClass().getSimpleName(), param));
}
}
if (samples != null) {
if (samples.length > 1)
throw new ConfigException(
String.format("wrong config for %s,param:%s", getClass().getSimpleName(), param));
try {
sample = Boolean.parseBoolean(samples[0]);
} catch (Exception e) {
throw new ConfigException(
String.format("wrong config for %s,param:%s", getClass().getSimpleName(), param));
}
}
dynamic = true;
super.init(param);
}
public Integer getLabel() {
return label;
}
public void setLabel(Integer label) {
this.label = label;
}
public Boolean getSample() {
return sample;
}
public void setSample(Boolean sample) {
this.sample = sample;
}
@Override
public List<Integer> doFilter(SamplePackage spackage, ScoreResult scoreResult, FilterContext context) {
if (label != null) {
List<Integer> selList = spackage.cloneFilterBefore();
Map<Integer, Integer> id2pos = FilterUtil.posMap(POS_TAG, selList, spackage.getList());
Integer maxPos = FilterUtil.findLast(label, selList, id2pos, scoreResult.getLabelMap());
if (maxPos == null)
return new ArrayList<Integer>();
setPos(maxPos);
} else if (sample != null) {
List<Integer> selList = spackage.cloneFilterBefore();
Map<Integer, Integer> id2pos = FilterUtil.posMap(POS_TAG, selList, spackage.getList());
Integer maxPos = FilterUtil.findLast(sample, selList, id2pos, scoreResult.getBinMap());
if (maxPos == null)
return new ArrayList<Integer>();
setPos(maxPos);
} else {
valid = false;
return spackage.cloneFilterBefore();
}
return super.doFilter(spackage, scoreResult, context);
}
public void preFilter(SamplePackage spackage, ScoreResult scoreResult, FilterContext context) throws FilterException {
this.pos = 9999;
super.preFilter(spackage, scoreResult, context);
}
}
|
#!/bin/bash
# Script Name: AtoMiC Muximux Menu
SUBCHOICE=$(whiptail --title "AtoMiC Toolkit - Manage Muximux" \
--menu "What would you like to do?" --backtitle "$BACKTITLE" \
--fb --cancel-button "Exit" $LINES $COLUMNS "$NETLINES" \
"Install" "Install Muximux" \
"Uninstall" "Uninstall Muximux" \
"Manual Update" "Manually update Muximux" \
"Access Details" "View Muximux access details" \
"Go Back" "Back to Main Menu" 3>&1 1>&2 2>&3)
exitstatus=$?
if [[ $exitstatus = 0 ]]; then
source "$SCRIPTPATH/muximux/muximux-constants.sh"
case "$SUBCHOICE" in
"Install" ) source "$SCRIPTPATH/$APPNAME/$APPNAME-installer.sh" ;;
"Uninstall" ) source "$SCRIPTPATH/$APPNAME/$APPNAME-uninstaller.sh" ;;
"Manual Update" ) source "$SCRIPTPATH/$APPNAME/$APPNAME-update.sh" ;;
"Access Details" ) source "$SCRIPTPATH/inc/app-access-details.sh" ;;
"Go Back" ) source "$SCRIPTPATH/menus/menu-administration-tools.sh" ;;
*) source "$SCRIPTPATH/inc/invalid-option.sh" ;;
esac
else
source "$SCRIPTPATH/inc/thankyou.sh"
echo
sleep 1
exit 0
fi
|
#!/bin/zsh
# Copyright 2016 Dylan Baker
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
source zsh/zsh.d/G01-export.zsh
# Solve the zsh bootstrap problem locally, since every user might not what this
# setup.
echo "export ZDOTDIR=$XDG_CONFIG_HOME/zsh" > ~/.zshenv
# Create directoires for zsh
mkdir -p $XDG_DATA_HOME/zsh
mkdir -p $XDG_DATA_HOME/zsh/zkbd
mkdir -p $XDG_DATA_HOME/zsh/completions
# tmuxinator still has no XDG support
if [[ ! -e ~/.tmuxinator ]]; then
ln -sf $XDG_CONFIG_HOME/tmuxinator ~/.tmuxinator
fi
# python
mkdir -p $XDG_CACHE_HOME/python-eggs
# ccache
mkdir -p $XDG_CACHE_HOME/ccache
# less
mkdir -p $XDG_DATA_HOME/less
# git
mkdir -p $XDG_DATA_HOME/git/templates
# nvim
mkdir -p $XDG_CACHE_HOME/nvim/{backup,swap,undo}
# try to get vundle, you'll need git
if [[ ! -d $XDG_DATA_HOME/nvim/bundle/Vundle.vim ]]; then
git clone https://github.com/VundleVim/Vundle.vim.git $XDG_DATA_HOME/nvim/bundle/Vundle.vim 1>/dev/null
nvim +PluginInstall +qall
fi
# Try to get antigen, you'll need git for this too
if [[ ! -d $XDG_DATA_HOME/zsh/bundle ]]; then
git clone https://github.com/zsh-users/antigen.git $XDG_DATA_HOME/zsh/bundle/antigen.git 1>/dev/null
fi
# Add some other helpers
mkdir -p $XDG_DATA_HOME/repos
mkdir -p $HOME/.local/bin/
if [[ ! -d $XDG_DATA_HOME/repos/jsontidy ]]; then
git clone https://github.com/dcbaker/jsontidy $XDG_DATA_HOME/repos/jsontidy 1>/dev/null
ln -s $XDG_DATA_HOME/repos/jsontidy/jsontidy.py $HOME/.local/bin/
fi
if [[ ! -d $XDG_DATA_HOME/repos/git-ck ]]; then
git clone https://github.com/dcbaker/git-ck $XDG_DATA_HOME/repos/git-ck 1>/dev/null
ln -s $XDG_DATA_HOME/repos/git-ck/git-ck $HOME/.local/bin/
ln -s $XDG_DATA_HOME/repos/git-ck/zsh/_git-ck $XDG_DATA_HOME/zsh/completions/
fi
if [[ ! -d $XDG_DATA_HOME/repos/pip-update ]]; then
git clone https://github.com/dcbaker/pip-update.git $XDG_DATA_HOME/repos/pip-update
ln -s $XDG_DATA_HOME/repos/pip-update/pip-update $HOME/.local/bin/
fi
if [[ ! -d $XDG_DATA_HOME/repos/tig-rebase ]]; then
git clone https://github.com/Nattfarinn/tig-rebase.git $XDG_DATA_HOME/repos/tig-rebase
ln -s $XDG_DATA_HOME/repos/tig-rebase/tig-rebase.sh $HOME/.local/bin/tig-rebase
fi
if [[ ! -d $HOME/.pyenv ]]; then
curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash
fi
|
/**
* Signature/interface for a `CampaignData` object
* @see https://developer.apple.com/documentation/apple_news/metadata/campaigndata
*/
export interface CampaignData {
[key: string]: string[];
}
|
expected_version="$(echo "${TEST_PKG_IDENT}" | cut -d/ -f3)"
@test "make matches version ${expected_version}" {
actual_version="$(hab pkg exec "${TEST_PKG_IDENT}" make --version | grep "GNU Make" | awk '{print $3}')"
diff <( echo "$actual_version" ) <( echo "${expected_version}" )
}
@test "Make simple task" {
run hab pkg exec "$TEST_PKG_IDENT" make --directory "$BATS_TEST_DIRNAME/fixtures/" ci-test
grep -q "Make in CI" <<< "$output"
}
|
import numpy as np
from PIL import Image
from io import BytesIO
from base64 import b64encode
def array_to_base64(arr: np.ndarray) -> str:
img = Image.fromarray(arr)
byte_io = BytesIO()
img.save(byte_io, 'JPEG')
im_data = byte_io.getvalue()
return 'data:image/jpeg;base64,' + b64encode(im_data).decode() |
#!/bin/bash
set -e
function version_le() { test "$(echo "$@" | tr " " "\n" | sort -V | tail -n 1)" != "$1"; }
# List of devices
targets='raspberry-pi raspberry-pi2 beaglebone-black intel-edison intel-nuc via-vab820-quad zynq-xz702 odroid-c1 odroid-xu4 parallella nitrogen6x hummingboard ts4900 colibri-imx6dl apalis-imx6q ts7700 raspberrypi3 artik5 artik10 beaglebone-green-wifi qemux86 qemux86-64 beaglebone-green cybertan-ze250 artik710 am571x-evm up-board kitra710 imx6ul-var-dart kitra520 jetson-tx2 iot2000 jetson-tx1 generic-armv7ahf generic-aarch64 bananapi-m1-plus'
# List of archs
targets+=' armv7hf armel i386 amd64 aarch64'
fedora_targets=' raspberry-pi2 beaglebone-black via-vab820-quad zynq-xz702 odroid-c1 odroid-xu4 parallella nitrogen6x hummingboard ts4900 colibri-imx6dl apalis-imx6q raspberrypi3 artik5 artik10 beaglebone-green-wifi beaglebone-green intel-nuc qemux86-64 artik710 am571x-evm kitra710 up-board imx6ul-var-dart kitra520 jetson-tx2 jetson-tx1 armv7hf amd64 aarch64 generic-armv7ahf generic-aarch64 bananapi-m1-plus '
goVersions='1.4.3 1.5.4 1.6.4 1.7.5 1.8.7 1.9.4 1.10'
resinUrl="http://resin-packages.s3.amazonaws.com/golang/v\$GO_VERSION/go\$GO_VERSION.linux-#{TARGET_ARCH}.tar.gz"
golangUrl="https://storage.googleapis.com/golang/go\$GO_VERSION.linux-#{TARGET_ARCH}.tar.gz"
for target in $targets; do
case "$target" in
'armv7hf'|'generic-armv7ahf'|'bananapi-m1-plus')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'armel')
binary_url=$resinUrl
binary_arch='armel'
;;
'i386')
binary_url=$golangUrl
binary_arch='386'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-i386'
;;
'amd64')
binary_url=$golangUrl
binary_arch='amd64'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-amd64'
fedora_binary_url=$golangUrl
fedora_binary_arch='amd64'
;;
'aarch64')
binary_url=$resinUrl
binary_arch='aarch64'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-aarch64'
fedora_binary_url=$resinUrl
fedora_binary_arch='aarch64'
;;
'raspberry-pi')
binary_url=$resinUrl
binary_arch='armv6hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
;;
'raspberry-pi2')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'raspberrypi3')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'beaglebone-black')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'beaglebone-green-wifi')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'beaglebone-green')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'intel-edison')
binary_url=$golangUrl
binary_arch='386'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-i386'
;;
'cybertan-ze250')
binary_url=$resinUrl
binary_arch='i386'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-i386'
;;
'iot2000')
binary_url=$resinUrl
binary_arch='i386'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-i386'
;;
'qemux86')
binary_url=$golangUrl
binary_arch='386'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-i386'
;;
'intel-nuc')
binary_url=$golangUrl
binary_arch='amd64'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-amd64'
fedora_binary_url=$golangUrl
fedora_binary_arch='amd64'
;;
'qemux86-64')
binary_url=$golangUrl
binary_arch='amd64'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-amd64'
fedora_binary_url=$golangUrl
fedora_binary_arch='amd64'
;;
'up-board')
binary_url=$golangUrl
binary_arch='amd64'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-amd64'
fedora_binary_url=$golangUrl
fedora_binary_arch='amd64'
;;
'via-vab820-quad')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'zynq-xz702')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'odroid-c1')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'odroid-xu4')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'parallella')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'nitrogen6x')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'hummingboard')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'ts4900')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'colibri-imx6dl')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'apalis-imx6q')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'am571x-evm')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'ts7700')
binary_url=$resinUrl
binary_arch='armel'
# Not supported yet
#alpine_binary_url=$resinUrl
#alpine_binary_arch='alpine-armhf'
;;
'artik5')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'artik10')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'kitra520')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'imx6ul-var-dart')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'ccon-01')
binary_url=$resinUrl
binary_arch='armv7hf'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-armhf'
fedora_binary_url=$resinUrl
fedora_binary_arch='armv7hf'
;;
'jetson-tx2'|'jetson-tx1'|'artik710'|'kitra710'|'generic-aarch64')
binary_url=$resinUrl
binary_arch='aarch64'
alpine_binary_url=$resinUrl
alpine_binary_arch='alpine-aarch64'
fedora_binary_url=$resinUrl
fedora_binary_arch='aarch64'
;;
esac
for goVersion in $goVersions; do
baseVersion=$(expr match "$goVersion" '\([0-9]*\.[0-9]*\)')
if ([ $target == "cybertan-ze250" ] || [ $target == "iot2000" ]) && ( version_le $goVersion "1.6" ); then
continue
fi
# Only v1.6 and higher aarch64 binaries are available.
if [ $binary_arch == "aarch64" ] && ( version_le $goVersion "1.6" ); then
continue
fi
# Debian.
# Extract checksum
checksum=$(grep " go$goVersion.linux-$binary_arch.tar.gz" SHASUMS256.txt)
debian_dockerfilePath=$target/debian/$baseVersion
mkdir -p $debian_dockerfilePath
sed -e s~#{FROM}~resin/$target-buildpack-deps:jessie~g \
-e s~#{BINARY_URL}~$binary_url~g \
-e s~#{GO_VERSION}~$goVersion~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~$binary_arch~g Dockerfile.tpl > $debian_dockerfilePath/Dockerfile
cp go-wrapper $debian_dockerfilePath/
mkdir -p $debian_dockerfilePath/stretch
sed -e s~#{FROM}~resin/$target-buildpack-deps:stretch~g \
-e s~#{BINARY_URL}~$binary_url~g \
-e s~#{GO_VERSION}~$goVersion~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~$binary_arch~g Dockerfile.tpl > $debian_dockerfilePath/stretch/Dockerfile
cp go-wrapper $debian_dockerfilePath/stretch/
mkdir -p $debian_dockerfilePath/onbuild
sed -e s~#{FROM}~resin/$target-golang:$goVersion~g Dockerfile.onbuild.tpl > $debian_dockerfilePath/onbuild/Dockerfile
# Only for RPI1 target
if [ $target == "raspberry-pi" ]; then
base_image="resin/rpi-raspbian:jessie"
else
base_image="resin/$target-debian:jessie"
fi
mkdir -p $debian_dockerfilePath/slim
sed -e s~#{FROM}~$base_image~g \
-e s~#{BINARY_URL}~$binary_url~g \
-e s~#{GO_VERSION}~$goVersion~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~$binary_arch~g Dockerfile.slim.tpl > $debian_dockerfilePath/slim/Dockerfile
cp go-wrapper $debian_dockerfilePath/slim/
# Fedora
if [[ $fedora_targets == *" $target "* ]]; then
fedora_dockerfilePath=$target/fedora/$baseVersion
mkdir -p $fedora_dockerfilePath
sed -e s~#{FROM}~resin/$target-fedora-buildpack-deps:latest~g \
-e s~#{BINARY_URL}~$fedora_binary_url~g \
-e s~#{GO_VERSION}~$goVersion~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~$fedora_binary_arch~g Dockerfile.tpl > $fedora_dockerfilePath/Dockerfile
cp go-wrapper $fedora_dockerfilePath/
mkdir -p $fedora_dockerfilePath/25
sed -e s~#{FROM}~resin/$target-fedora-buildpack-deps:25~g \
-e s~#{BINARY_URL}~$fedora_binary_url~g \
-e s~#{GO_VERSION}~$goVersion~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~$fedora_binary_arch~g Dockerfile.tpl > $fedora_dockerfilePath/25/Dockerfile
cp go-wrapper $fedora_dockerfilePath/25/
mkdir -p $fedora_dockerfilePath/onbuild
sed -e s~#{FROM}~resin/$target-fedora-golang:$goVersion~g Dockerfile.onbuild.tpl > $fedora_dockerfilePath/onbuild/Dockerfile
mkdir -p $fedora_dockerfilePath/slim
sed -e s~#{FROM}~resin/$target-fedora:latest~g \
-e s~#{BINARY_URL}~$fedora_binary_url~g \
-e s~#{GO_VERSION}~$goVersion~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~$fedora_binary_arch~g Dockerfile.fedora.slim.tpl > $fedora_dockerfilePath/slim/Dockerfile
cp go-wrapper $fedora_dockerfilePath/slim/
fi
# Alpine.
# TS7700 not supported yet
if [ $target == "ts7700" ] || [ $target == "armel" ]; then
continue
fi
if [ $target == "armv7hf" ]; then
target='armhf'
fi
# Extract checksum
checksum=$(grep " go$goVersion.linux-$alpine_binary_arch.tar.gz" SHASUMS256.txt)
alpine_dockerfilePath=$target/alpine/$baseVersion
mkdir -p $alpine_dockerfilePath
sed -e s~#{FROM}~"resin/$target-alpine-buildpack-deps:latest"~g \
-e s~#{BINARY_URL}~"$alpine_binary_url"~g \
-e s~#{GO_VERSION}~"$goVersion"~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~"$alpine_binary_arch"~g Dockerfile.alpine.tpl > $alpine_dockerfilePath/Dockerfile
cp go-wrapper $alpine_dockerfilePath/
mkdir -p $alpine_dockerfilePath/slim
sed -e s~#{FROM}~"resin/$target-alpine:latest"~g \
-e s~#{BINARY_URL}~"$alpine_binary_url"~g \
-e s~#{GO_VERSION}~"$goVersion"~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~"$alpine_binary_arch"~g Dockerfile.alpine.slim.tpl > $alpine_dockerfilePath/slim/Dockerfile
cp go-wrapper $alpine_dockerfilePath/slim/
mkdir -p $alpine_dockerfilePath/onbuild
sed -e s~#{FROM}~resin/$target-alpine-golang:$goVersion~g Dockerfile.onbuild.tpl > $alpine_dockerfilePath/onbuild/Dockerfile
mkdir -p $alpine_dockerfilePath/edge
sed -e s~#{FROM}~"resin/$target-alpine-buildpack-deps:edge"~g \
-e s~#{BINARY_URL}~"$alpine_binary_url"~g \
-e s~#{GO_VERSION}~"$goVersion"~g \
-e s~#{CHECKSUM}~"$checksum"~g \
-e s~#{TARGET_ARCH}~"$alpine_binary_arch"~g Dockerfile.alpine.tpl > $alpine_dockerfilePath/edge/Dockerfile
cp go-wrapper $alpine_dockerfilePath/edge/
done
done
|
<gh_stars>1-10
import React from 'react'
import styled from 'styled-components'
const Styles = styled.span`
padding:8px;
.tagWrapepr{
padding:5px;
background-color:#684656;
color:#f2f2f3;
border-radius:7px;
}
`
const Tag = ({ tagName, ...props }) => {
return (
<Styles {...props}>
<span className="tagWrapepr">
#{tagName}
</span>
</Styles>
)
}
export default Tag |
<filename>app/src/main/java/com/github/starter/core/secrets/SecretsProperties.java
package com.github.starter.core.secrets;
import com.github.skhatri.mounted.model.SecretConfiguration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@ConfigurationProperties(prefix = "secrets")
@Component
public class SecretsProperties {
private SecretConfiguration config;
public SecretConfiguration getConfig() {
return config;
}
public void setConfig(SecretConfiguration config) {
this.config = config;
}
}
|
<filename>src/cpp-ethereum/test/tools/jsontests/TransactionTests.h
// Aleth: Ethereum C++ client, tools and libraries.
// Copyright 2015-2019 Aleth Authors.
// Licensed under the GNU General Public License, Version 3.
/// @file
/// Transaction test functions.
#pragma once
#include <test/tools/libtesteth/TestSuite.h>
#include <boost/filesystem/path.hpp>
namespace dev
{
namespace test
{
class TransactionTestSuite: public TestSuite
{
json_spirit::mValue doTests(json_spirit::mValue const& _input, bool _fillin) const override;
boost::filesystem::path suiteFolder() const override;
boost::filesystem::path suiteFillerFolder() const override;
};
}
}
|
# Write your solution here
def line(number, text):
if len(text) == 0:
print("*" * number)
else:
print(text[0]*number)
# You can test your function by calling it within the following block
if __name__ == "__main__":
line(5, "x") |
# if [[ $(host_os) = FreeBSD ]] ; then
# POWERLEVEL9K_MODE=""
# else
# POWERLEVEL9K_MODE="nerdfont-complete"
# fi
POWERLEVEL9K_MODE="nerdfont-complete"
# Hide false-positive(?) warning on FreeBSD
[ $host_os = "FreeBSD" ] && POWERLEVEL9K_IGNORE_TERM_COLORS=true
####
#### Conext (user + host)
####
POWERLEVEL9K_CONTEXT_TEMPLATE=%n:%m
POWERLEVEL9K_ALWAYS_SHOW_CONTEXT=false
POWERLEVEL9K_ALWAYS_SHOW_USER=false
# State Meaning
# DEFAULT You are a normal user
# ROOT You are the root user
# SUDO You are using elevated rights
# REMOTE_SUDO You are SSH'ed into the machine and have elevated rights
# REMOTE You are SSH'ed into the machine
POWERLEVEL9K_CONTEXT_DEFAULT_BACKGROUND='black'
POWERLEVEL9K_CONTEXT_DEFAULT_FOREGROUND='grey9'
####
#### Icons
####
POWERLEVEL9K_OS_ICON_BACKGROUND='black'
POWERLEVEL9K_ROOT_ICON="\uF09C"
POWERLEVEL9K_USER_ICON="" # "\uF415"
POWERLEVEL9K_SUDO_ICON=$'\uF09C'
POWERLEVEL9K_HOST_ICON=""
POWERLEVEL9K_SSH_ICON=""
# Default user only for my local machine
DEFAULT_USER=Lukas
###
### Folder truncation
###
POWERLEVEL9K_SHORTEN_STRATEGY="truncate_to_first_and_last" # "truncate_to_unique" # "truncate_middle"
POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
####
#### Multiline
####
POWERLEVEL9K_PROMPT_ON_NEWLINE=true
# POWERLEVEL9K_RPROMPT_ON_NEWLINE=false
# POWERLEVEL9K_RPROMPT_ON_NEWLINE=false
# POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=""
POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX="%F{blue}\u256D\u2500%F{white}"
POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX="%F{blue}\u2570\uf460%F{white} "
# POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX="%F{blue}\u2570%F{cyan}\uF460%F{073}\uF460%F{109}\uF460%f "
POWERLEVEL9K_PROMPT_ADD_NEWLINE=true
# POWERLEVEL9K_LEFT_SEGMENT_END_SEPARATOR=""
###
### colors
###
POWERLEVEL9K_USER_DEFAULT_BACKGROUND='black'
POWERLEVEL9K_USER_DEFAULT_FOREGROUND='grey9'
POWERLEVEL9K_USER_ROOT_BACKGROUND='black'
POWERLEVEL9K_USER_ROOT_FOREGROUND='red'
POWERLEVEL9K_HOST_LOCAL_BACKGROUND='black'
POWERLEVEL9K_HOST_LOCAL_FOREGROUND='grey9'
POWERLEVEL9K_HOST_REMOTE_FOREGROUND='white'
POWERLEVEL9K_HOST_REMOTE_BACKGROUND='black'
POWERLEVEL9K_DIR_WRITABLE_FORBIDDEN_BACKGROUND="black"
POWERLEVEL9K_DIR_WRITABLE_FORBIDDEN_FOREGROUND="red"
POWERLEVEL9K_DIR_HOME_BACKGROUND="blue"
POWERLEVEL9K_DIR_HOME_FOREGROUND="black"
POWERLEVEL9K_DIR_DEFAULT_BACKGROUND="blue"
POWERLEVEL9K_DIR_DEFAULT_FOREGROUND="black"
POWERLEVEL9K_DIR_HOME_SUBFOLDER_BACKGROUND="blue"
POWERLEVEL9K_DIR_HOME_SUBFOLDER_FOREGROUND="black"
POWERLEVEL9K_VCS_CLEAN_BACKGROUND="green"
POWERLEVEL9K_VCS_CLEAN_FOREGROUND="black"
POWERLEVEL9K_VCS_MODIFIED_BACKGROUND="yellow"
POWERLEVEL9K_VCS_MODIFIED_FOREGROUND="black"
POWERLEVEL9K_VCS_UNTRACKED_BACKGROUND="red"
POWERLEVEL9K_VCS_UNTRACKED_FOREGROUND="black"
#POWERLEVEL9K_LEFT_SEGMENT_SEPARATOR=$''
#POWERLEVEL9K_RIGHT_SEGMENT_SEPARATOR=$''
#POWERLEVEL9K_LEFT_SUBSEGMENT_SEPARATOR=''
###
### Segments
###
POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
os_icon
# ssh
context_joined
root_indicator_joined
# user_joined host_joined
dir_writable dir
# newline
vcs
# newline
# os_icon
# battery
)
POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
status
command_execution_time
anaconda
virtualenv
background_jobs
# disk_usage
# ram
time
# date
)
# For debugging etc, to manually print specific segments/characters
# echo $(print_icon 'LEFT_SEGMENT_SEPARATOR') |
java -Xmx256M -classpath ../../../dist/IceNLPCore.jar is.iclt.icenlp.core.iceparser.PP_errors $1 > $2
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/core/src/main/java/io/opensphere/core/util/collections/SetProvider.java
package io.opensphere.core.util.collections;
import java.util.Collection;
import java.util.Set;
/**
* Interface for a facility that provides sets.
*
* @param <E> The type of elements in the provided collections.
*/
public interface SetProvider<E> extends CollectionProvider<E>
{
@Override
Set<E> get();
@Override
Set<E> get(Collection<? extends E> contents);
@Override
Set<E> get(int size);
@Override
Set<E> getEmpty();
}
|
package au.org.noojee.irrigation.views;
import java.security.SecureRandom;
import java.time.LocalDateTime;
import java.util.Base64;
import java.util.Base64.Encoder;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.mail.EmailException;
import org.apache.commons.mail.SimpleEmail;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.vaadin.navigator.View;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinService;
import com.vaadin.server.VaadinServletRequest;
import com.vaadin.ui.Button;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.Notification;
import com.vaadin.ui.Notification.Type;
import com.vaadin.ui.TextField;
import com.vaadin.ui.UI;
import au.org.noojee.irrigation.PiGationConfig;
import au.org.noojee.irrigation.dao.UserDao;
import au.org.noojee.irrigation.entities.User;
@SuppressWarnings("serial")
public class ForgottenPasswordView extends FormLayout implements View
{
static private final Logger logger = LogManager.getLogger();
static public String NAME = "ForgottenPasswordView";
private static final String NEWLINE = "\r\n";
SecureRandom random = new SecureRandom();
private TextField emailAddress;
public ForgottenPasswordView()
{
emailAddress = new TextField("Email Address");
this.addComponent(emailAddress);
emailAddress.setPlaceholder("Enter your email address");
emailAddress.setWidth(100, Unit.MM);
Button btnSend = new Button("Send");
this.addComponent(btnSend);
btnSend.addClickListener(l -> send());
}
private void send()
{
String body = "Click the following link to reset your password." + NEWLINE;
String token = generateToken();
VaadinRequest vaadinRequest = VaadinService.getCurrentRequest();
HttpServletRequest httpServletRequest = ((VaadinServletRequest)vaadinRequest).getHttpServletRequest();
String requestUrl = httpServletRequest.getRequestURL().toString();
String link = requestUrl + "/ResetPasswordView/" + token;
body += link + NEWLINE;
// Finder user by email
UserDao daoUser = new UserDao();
User user = daoUser.getByEmailAddress(emailAddress.getValue().trim());
if (user == null)
{
Notification.show("Error", "The entered email address is unknown.", Type.ERROR_MESSAGE);
}
else
{
user.setSecurityToken(token);
user.setTokenExpiryDate(LocalDateTime.now().plusMinutes(10));
sendEmail(user, "Email reset", body);
Notification.show("Reset sent", "A reset link has been sent to the entered email address.", Type.WARNING_MESSAGE);
UI.getCurrent().getNavigator().navigateTo(LoginView.NAME);
}
}
private String generateToken()
{
byte bytes[] = new byte[20];
random.nextBytes(bytes);
Encoder encoder = Base64.getUrlEncoder().withoutPadding();
String token = encoder.encodeToString(bytes);
return token;
}
private void sendEmail(User user, String subject, String body)
{
SimpleEmail email = new SimpleEmail();
email.setHostName(PiGationConfig.SELF.getSmtpServer());
email.setSmtpPort(PiGationConfig.SELF.getSmtpPort());
try
{
email.setFrom("<EMAIL>");
email.setSubject(subject);
email.setMsg(body);
email.addTo(user.getEmailAddress());
email.send();
}
catch (EmailException e)
{
logger.error(e, e);
}
}
}
|
#!/usr/bin/env bash
set -eo pipefail
SCRIPT_VERSION=3.1 # Build script version (change this to re-build the CICD image)
##########################################################################
# This is the EOSIO automated install script for Linux and Mac OS.
# This file was downloaded from https://github.com/EOSIO/eos
#
# Copyright (c) 2017, Respective Authors all rights reserved.
#
# After June 1, 2018 this software is available under the following terms:
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# https://github.com/EOSIO/eos/blob/master/LICENSE
##########################################################################
function usage() {
printf "Usage: $0 OPTION...
-P Build with pinned clang and libcxx
-o TYPE Build <Debug|Release|RelWithDebInfo|MinSizeRel> (default: Release)
-s NAME Core Symbol Name <1-7 characters> (default: SYS)
-b DIR Use pre-built boost in DIR
-i DIR Directory to use for installing dependencies & EOSIO (default: $HOME)
-y Noninteractive mode (answers yes to every prompt)
-c Enable Code Coverage
-d Generate Doxygen
-m Build MongoDB dependencies
\\n" "$0" 1>&2
exit 1
}
TIME_BEGIN=$( date -u +%s )
if [ $# -ne 0 ]; then
while getopts "o:s:b:i:ycdhmP" opt; do
case "${opt}" in
o )
options=( "Debug" "Release" "RelWithDebInfo" "MinSizeRel" )
if [[ "${options[*]}" =~ "${OPTARG}" ]]; then
CMAKE_BUILD_TYPE=$OPTARG
else
echo "Invalid argument: ${OPTARG}" 1>&2
usage
fi
;;
s )
if [ "${#OPTARG}" -gt 7 ] || [ -z "${#OPTARG}" ]; then
echo "Invalid argument: ${OPTARG}" 1>&2
usage
else
CORE_SYMBOL_NAME=$OPTARG
fi
;;
b )
BOOST_LOCATION=$OPTARG
;;
i )
INSTALL_LOCATION=$OPTARG
;;
y )
NONINTERACTIVE=true
PROCEED=true
;;
c )
ENABLE_COVERAGE_TESTING=true
;;
d )
ENABLE_DOXYGEN=true
;;
m )
ENABLE_MONGO=true
;;
P )
PIN_COMPILER=true
;;
h )
usage
;;
? )
echo "Invalid Option!" 1>&2
usage
;;
: )
echo "Invalid Option: -${OPTARG} requires an argument." 1>&2
usage
;;
* )
usage
;;
esac
done
fi
export CURRENT_WORKING_DIR=$(pwd) # relative path support
# Ensure we're in the repo root and not inside of scripts
cd $( dirname "${BASH_SOURCE[0]}" )/..
# Load eosio specific helper functions
. ./scripts/helpers/eosio.sh
$VERBOSE && echo "Build Script Version: ${SCRIPT_VERSION}"
echo "EOSIO Version: ${EOSIO_VERSION_FULL}"
echo "$( date -u )"
echo "User: ${CURRENT_USER}"
# echo "git head id: %s" "$( cat .git/refs/heads/master )"
echo "Current branch: $( execute git rev-parse --abbrev-ref HEAD 2>/dev/null )"
( [[ ! $NAME == "Ubuntu" ]] && [[ ! $ARCH == "Darwin" ]] ) && set -i # Ubuntu doesn't support interactive mode since it uses dash + Some folks are having this issue on Darwin; colors aren't supported yet anyway
# Ensure sudo is available (only if not using the root user)
ensure-sudo
# Test that which is on the system before proceeding
ensure-which
# Prevent a non-git clone from running
ensure-git-clone
# Prompt user for installation path (Set EOSIO_INSTALL_DIR)
install-directory-prompt
# If the same version has already been installed...
previous-install-prompt
# Prompt user and asks if we should install mongo or not
prompt-mongo-install
# Setup directories and envs we need (must come last)
setup
execute cd $REPO_ROOT
# Submodules need to be up to date
ensure-submodules-up-to-date
# Check if cmake already exists
( [[ -z "${CMAKE}" ]] && [[ ! -z $(command -v cmake 2>/dev/null) ]] ) && export CMAKE=$(command -v cmake 2>/dev/null) && export CMAKE_CURRENT_VERSION=$($CMAKE --version | grep -E "cmake version[[:blank:]]*" | sed 's/.*cmake version //g')
# If it exists, check that it's > required version +
if [[ ! -z $CMAKE_CURRENT_VERSION ]] && [[ $((10#$( echo $CMAKE_CURRENT_VERSION | awk -F. '{ printf("%03d%03d%03d\n", $1,$2,$3); }' ))) -lt $((10#$( echo $CMAKE_REQUIRED_VERSION | awk -F. '{ printf("%03d%03d%03d\n", $1,$2,$3); }' ))) ]]; then
export CMAKE=
if [[ $ARCH == 'Darwin' ]]; then
echo "${COLOR_RED}The currently installed cmake version ($CMAKE_CURRENT_VERSION) is less than the required version ($CMAKE_REQUIRED_VERSION). Cannot proceed."
exit 1
else
echo "${COLOR_YELLOW}The currently installed cmake version ($CMAKE_CURRENT_VERSION) is less than the required version ($CMAKE_REQUIRED_VERSION). We will be installing $CMAKE_VERSION.${COLOR_NC}"
fi
fi
# Use existing cmake on system (either global or specific to eosio)
# Setup based on architecture
if [[ $ARCH == "Linux" ]]; then
export CMAKE=${CMAKE:-${EOSIO_INSTALL_DIR}/bin/cmake}
[[ ! -e /etc/os-release ]] && print_supported_linux_distros_and_exit
case $NAME in
"Amazon Linux AMI" | "Amazon Linux")
echo "${COLOR_CYAN}[Ensuring YUM installation]${COLOR_NC}"
FILE="${REPO_ROOT}/scripts/eosio_build_amazonlinux.sh"
;;
"CentOS Linux")
FILE="${REPO_ROOT}/scripts/eosio_build_centos.sh"
;;
"Ubuntu")
FILE="${REPO_ROOT}/scripts/eosio_build_ubuntu.sh"
;;
*) print_supported_linux_distros_and_exit;;
esac
CMAKE_PREFIX_PATHS="${EOSIO_INSTALL_DIR}"
fi
if [ "$ARCH" == "Darwin" ]; then
# opt/gettext: remcli requires Intl, which requires gettext; it's keg only though and we don't want to force linking: https://github.com/EOSIO/eos/issues/2240#issuecomment-396309884
# EOSIO_INSTALL_DIR/lib/cmake: mongo_db_plugin.cpp:25:10: fatal error: 'bsoncxx/builder/basic/kvp.hpp' file not found
CMAKE_PREFIX_PATHS="/usr/local/opt/gettext;${EOSIO_INSTALL_DIR}"
FILE="${SCRIPT_DIR}/eosio_build_darwin.sh"
export CMAKE=${CMAKE}
fi
# Find and replace OPT_DIR in pinned_toolchain.cmake, then move it into build dir
execute bash -c "sed -e 's~@~$OPT_DIR~g' $SCRIPT_DIR/pinned_toolchain.cmake &> $BUILD_DIR/pinned_toolchain.cmake"
echo "${COLOR_CYAN}====================================================================================="
echo "======================= ${COLOR_WHITE}Starting EOSIO Dependency Install${COLOR_CYAN} ===========================${COLOR_NC}"
execute cd $SRC_DIR
set_system_vars # JOBS, Memory, disk space available, etc
echo "Architecture: ${ARCH}"
. $FILE # Execute OS specific build file
execute cd $REPO_ROOT
echo ""
echo "${COLOR_CYAN}========================================================================"
echo "======================= ${COLOR_WHITE}Starting EOSIO Build${COLOR_CYAN} ===========================${COLOR_NC}"
if $VERBOSE; then
echo "CXX: $CXX"
echo "CC: $CC"
fi
execute cd $BUILD_DIR
# LOCAL_CMAKE_FLAGS
$ENABLE_MONGO && LOCAL_CMAKE_FLAGS="-DBUILD_MONGO_DB_PLUGIN=true ${LOCAL_CMAKE_FLAGS}" # Enable Mongo DB Plugin if user has enabled -m
if $PIN_COMPILER; then
CMAKE_PREFIX_PATHS="${CMAKE_PREFIX_PATHS};${LLVM_ROOT}"
LOCAL_CMAKE_FLAGS="${PINNED_TOOLCHAIN} -DCMAKE_PREFIX_PATH='${CMAKE_PREFIX_PATHS}' ${LOCAL_CMAKE_FLAGS}"
else
LOCAL_CMAKE_FLAGS="-DCMAKE_CXX_COMPILER='${CXX}' -DCMAKE_C_COMPILER='${CC}' -DCMAKE_PREFIX_PATH='${CMAKE_PREFIX_PATHS}' ${LOCAL_CMAKE_FLAGS}"
fi
$ENABLE_DOXYGEN && LOCAL_CMAKE_FLAGS="-DBUILD_DOXYGEN='${DOXYGEN}' ${LOCAL_CMAKE_FLAGS}"
$ENABLE_COVERAGE_TESTING && LOCAL_CMAKE_FLAGS="-DENABLE_COVERAGE_TESTING='${ENABLE_COVERAGE_TESTING}' ${LOCAL_CMAKE_FLAGS}"
execute bash -c "$CMAKE -DCMAKE_BUILD_TYPE='${CMAKE_BUILD_TYPE}' -DCORE_SYMBOL_NAME='${CORE_SYMBOL_NAME}' -DCMAKE_INSTALL_PREFIX='${EOSIO_INSTALL_DIR}' ${LOCAL_CMAKE_FLAGS} '${REPO_ROOT}'"
execute make -j$JOBS
execute cd $REPO_ROOT 1>/dev/null
TIME_END=$(( $(date -u +%s) - $TIME_BEGIN ))
echo " _______ _______ _______ _________ _______"
echo "( ____ \( ___ )( ____ __ __ ( ___ )"
echo "| ( \/| ( ) || ( \/ ) ( | ( ) |"
echo "| (__ | | | || (_____ | | | | | |"
echo "| __) | | | |(_____ ) | | | | | |"
echo "| ( | | | | ) | | | | | | |"
echo "| (____/\| (___) |/\____) |___) (___| (___) |"
echo "(_______/(_______)\_______)\_______/(_______)"
echo "=============================================${COLOR_NC}"
echo "${COLOR_GREEN}EOSIO has been successfully built. $(($TIME_END/3600)):$(($TIME_END%3600/60)):$(($TIME_END%60))"
echo "${COLOR_GREEN}You can now install using: ${SCRIPT_DIR}/eosio_install.sh${COLOR_NC}"
echo "${COLOR_YELLOW}Uninstall with: ${SCRIPT_DIR}/eosio_uninstall.sh${COLOR_NC}"
echo ""
echo "${COLOR_CYAN}If you wish to perform tests to ensure functional code:${COLOR_NC}"
if $ENABLE_MONGO; then
echo "${BIN_DIR}/mongod --dbpath ${MONGODB_DATA_DIR} -f ${MONGODB_CONF} --logpath ${MONGODB_LOG_DIR}/mongod.log &"
PATH_TO_USE=" PATH=\$PATH:$OPT_DIR/mongodb/bin"
fi
echo "cd ${BUILD_DIR} &&${PATH_TO_USE} make test" # PATH is set as currently 'mongo' binary is required for the mongodb test
echo ""
resources
|
<reponame>saviorocha/freeCodeCamp-study
import React, { useState } from 'react';
import { AiOutlineMinus, AiOutlinePlus } from 'react-icons/ai';
const Question = () => {
return <h2>question component</h2>;
};
export default Question;
|
<reponame>vvydier/misk-web<gh_stars>10-100
import { logDebug, execute, handleCommand, npmRunScript } from "../utils"
export const command = "zip"
export const desc = "zip source code for tab\n"
export const handlerFn = async (...args: any) => {
logDebug(command, desc)
execute(npmRunScript(command), ...args)
}
export const handler = async (yargs: any) => handleCommand(yargs, handlerFn)
|
class CustomBinaryNumber:
def __init__(self, num):
self.BitNum = num
def __repr__(self):
return bin(self.BitNum)[2:]
def __str__(self):
return bin(self.BitNum)[2:]
def test_custom_binary_number(num):
custom_num = CustomBinaryNumber(num)
return custom_num.__repr__(), custom_num.__str__() |
#!/bin/bash
set -euo pipefail
LICENSES=$(poetry run pip-licenses)
INCOMPATIBLE_LIBS=$(echo "$LICENSES" | grep 'GPL' || true)
if [[ -z $INCOMPATIBLE_LIBS ]]; then
exit 0
else
echo "The following libraries were found which are not compatible with this project's license:"
echo "$INCOMPATIBLE_LIBS"
exit 1
fi
|
SELECT Name, Age FROM Person ORDER BY Age DESC LIMIT 1; |
#!/usr/bin/env bash
set -o errexit
set -o pipefail
set -o nounset
celery -A big_data_for_education.taskapp beat -l INFO
|
function assertNotNullable(value, label) {
if (value === null) {
throw new Error(`Expected value (${label}) not to be nullable, actually null.`);
}
}
// Test the function
test('Throw error if value is null.', () => {
expect(() => assertNotNullable(null, 'fuga'))
.toThrowError('Expected value (fuga) not to be nullable, actually null.');
}); |
package com.honyum.elevatorMan.net.base;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class NetConstant {
public static final String UPDATEWORKORDER= "/workOrder/updateWorkOrder";
public static final String GETINDEXWEIXIUGONGLINE= "/getIndexWeiXiuGongLine";
public static final String GETINDEXALARMREPORTELEVATORLINE= "/getIndexAlarmReportElevatorLine";
public static final String GETINDEXBAOXIUREPORTELEVATORLINE= "/getIndexBaoXiuReportElevatorLine";
public static final String GETINDEXWORKORDERREPORTELEVATORLINE= "/getIndexWorkOrderReportElevatorLine";
public static final String GETINDEXBAOXIUREPORT= "/getIndexBaoXiuReport";
public static final String GETINDEXWORKORDERREPORT= "/getIndexWorkOrderReport";
public static final String GETINDEXALARMREPORT= "/getIndexAlarmReport";
public static final String GETINDEXKPI= "/getIndexKPI";
public static final String WORKORDERGETWORKORDERCOUNTBYCOMMUNITY= "/workOrder/getWorkOrderCountByCommunity";
public static final String REDISTRIBUTE= "/workOrder/reDistribute";
public static final String GETCONTRACTPAYMENTINFOBYID= "/getContractPaymentInfoById";
public static final String GETUNREAD= "/getRedHint";
/**
* 报修流程确认
*/
public static final String GETSTATSTATE = "/workOrder/getStatState";
/**
* 报修流程确认
*/
public static final String GETMAINPLAN = "/getMainPlanInfoById";
/**
* 报修流程确认
*/
public static final String COMMITMAINPLAN = "/commitMainPlan";
/**
* 报修流程确认
*/
public static final String GETBAOXIULISTBYSTATE = "/getBaoxiuListByState";
// /**
// * 报修流程确认
// */
// public static final String GETMAINTLISTBYBRANCHID = "/getMaintListByBranchId";
/**
* 报修流程确认
*/
public static final String CONFIRM_BAOXIU_PROCESS = "/confirmBaoxiuProcess";
/**
* 根据电梯编号获取电梯详情
*/
public static final String GET_ELEVATOR_BY_ELEVATOR_ID = "/getElevatorByElevatorId";
/**
* 报修流程处理
*/
public static final String GETBAOXIUDETAILBYID = "/getBaoxiuDetailById";
/**
* 报修流程处理
*/
public static final String ASSIGNBAOXIUUSER = "/assignBaoxiuUser";
/**
* 报修流程处理
*/
public static final String GETUSERLISTBYBAOXIUID = "/getUserListByBaoxiuId";
/**
* 获取保修单
*/
public static final String UNDEALCOUNT = "/undealCount";
/**
* 获取保修单
*/
public static final String GET_BAOXIU_LIST = "/workOrder/getBaoxiuListByRepair";
/**
* 获取维保内容
*/
public static final String GET_MAINT_ITEM= "/workOrder/getMaintItemByOrderId";
/**
* 获取故障类型
*/
public static final String GET_FAULT_TYPE = "/getFaultTypeByCode";
/**
* 处理报修工单
*/
public static final String SUCCESS_BAOXIU_WORK_ORDER = "/workOrder/successBaoxiuWorkOrder";
/**
* 处理维保工单
*/
public static final String SUCCESS_MAINT_WORK_ORDER = "/workOrder/successMaintWorkOrder";
/**
* 获取维保工单
*/
public static final String GET_WORK_ORDERS_BY_REPAIR="/workOrder/getWorkOrdersByRepair";
/**
* 取消工单
*/
public static final String CANCEL_WORK_ORDER = "/workOrder/cancelWorkOrder";
/**
* 获取维保订单列表
*/
public static final String GET_MAINT_LIST_BY_REPAIR = "/workOrder/getMaintListByRepair";
/**
* 获取合同详情
*/
public static final String GET_CONTRACT = "/workOrder/getContract";
/**
* 添加维保工单
*/
public static final String EDIT_WORK_ORDER = "/workOrder/editWorkOrderDate";
/**
* 添加维保工单
*/
public static final String ADD_WORK_ORDER = "/workOrder/addWorkOrder";
/**
* 获取同意、不同意
*/
public static final String GETAPPROVEHISTORYDATAGRID = "/getApproveHistoryDataGrid";
/**
* 获取同意、不同意
*/
public static final String GETPROCESSPATH = "/getProcessPath";
/**
* 获取联系人列表
*/
public static final String SELECTUSERDATAGRID = "/selectUserDataGrid";
/**
* 提交下一流程
*/
public static final String COMMITNEXTPROCESSNODE = "/commitNextProcessNode";
/**
* 获取电梯故障类型
*/
public static final String GET_REPAIR_TYPE_LIST = "/getFaultType";
/**
* 报修管理列表
*/
public static final String GET_BAOXIU_BY_ROIE_ID = "/getBaoxiuByRoleId";
/**
* 报修管理列表
*/
public static final String ADD_BAOXIU = "/addBaoxiu";
/**
* 获取电梯信息
*/
public static final String GET_ELEVATOR_LIFT_NUM = "/getElevatorByLiftNumAndNumber";
/**
* 报修流程处理
*/
public static final String SAVE_BAOXIU_PROCESS = "/saveBaoxiuProcess";
/**
* 代办列表
*/
public static final String UNDEALDATAGRID = "/undealDataGrid";
/**
* 获取合同列表
*/
public static final String GET_CONTRACT_BY_ROLEID = "/getContractByRoleId";
/**
* 获取合同详情
*/
public static final String GET_CONTRACT_INFO_BY_ID = "/getContractInfoById";
/**
* 保险条款
*
*/
public static final String GET_CHATUSERBYALARMID = "/getChatUserByAlarmId";
/**
* 保险条款
*
*/
public static final String COMPANY_APPLY = "/h5/regProtocol";
/**
* 根据项目ID获取电梯信息
*/
public static final String GET_ELEVATOR_BY_COMMUNITYID = "/getElevatorByCommunityID";
/**
* 根据距离获取项目信息
*/
public static final String GET_COMMUNITY_BY_DISTANCE = "/getCommunityByDistance";
/**
* 获取扫码信息错误类型
*/
public static final String EDIT_ERROR_REASON = "/editElevatorError";
/**
* 获取救援故障类型
*/
public static final String TOP_FAULT_TYPE = "/getTopFaultType";
/**
* 获取扫码信息错误类型
*/
public static final String ERROR_REASON = "/getErrorReason";
/**
* 根据识别编码获取电梯信息
*/
public static final String ELE_RECORD_BYID = "/getElevatorByLiftNum";
/**
* 根据识别编码获取电梯信息
*/
public static final String ELE_RECORD_UPDATE = "/editElevatorPosition";
/**
* 签到、签退、请假
*/
public static final String SIGN = "/addAttendance";
/**
* 销假
*/
public static final String SIGN_CANCEL = "/editAttendance";
/**
* 销假
*/
public static final String SIGN_GET = "/getAttendance";
/**
* 上传log文件
*/
public static final String UPLOAD_FILE = "/uploadFile";
/**
* 查询企业申请状态
*/
public static final String GET_POLICY = "/getPolicyUserByUserInfo";
/**
* 查询企业申请状态
*/
public static final String UPDATE_APPLIY = "/editAppliy";
/**
* 查询企业申请状态
*/
public static final String GET_APPLIY = "/getAppliyByuserId";
/**
* 添加企业申请
*/
public static final String ADD_APPLY = "/addAppliy";
/**
* 获取验证码
*/
public static final String SMS_CODE = "/getSMSCodeBySmallOwner";
/**
* 提交保险信息
*/
public static final String EDIT_INSURANCE = "/editInsuranceByUser";
/**
* 提交保险信息
*/
public static final String GET_DISTANCE = "/getReachDistanceBybranch";
/**
* 获取保险人员信息
*/
public static final String GET_PERSON_INFO = "/getUserById";
/**
* 获取在线人员列表
*/
public static final String GET_PERSONS_LIST = "/getUserWorkBybranch";
/**
* 获取在线人数
*/
public static final String GET_PERSONS = "/getUserWorkOnLineCountBybranch";
/**
* e梯配
*/
public static final String EASY_LADDER_ORDERBYUSER = "/getEasyLadderOrderByUser";
/**
* e梯配
*/
public static final String EASY_LADDER_URL = "/getEasyLadderUrl";
/**
* 维修公司提交任务完成状态
*/
public static final String PROPERTY_CONFIRM = "/propertyConfirmComplete";
/**
* 上传音频
*/
public static final String UPLOAD_VIDEO = "/uploadVideo";
/**
* 获取广告条
*/
public static final String YI_ZHU = "/h5/yiZhuIndexPage";
/**
* 获取广告条
*/
public static final String NY_YI_ZHU = "/h5/myYiZhuPage";
/**
* 注册信息
*/
public static final String REG_COMPANY = "/registerMaintUser";
/**
* 获取广告条
*/
public static final String GET_BANNER = "/getAdvertisementBySmallOwner";
/**
* 电梯常识
*/
public static final String GET_KNOWLEDGE_BYKNTYPE = "/getKnowledgeByKntype";
/**
* 返回成功
*/
public static final String RSP_CODE_SUC_0 = "0";
/**
* 服务器重启,导致登陆超时
*/
public static final String RSP_CODE_TIME_OUT = "-2";
/**
* 其他人登陆
*/
public static final String RSP_CODE_OTHER_LOGIN = "-3";
/**
* @Fields login:
*/
public static final String URL_LOG_IN = "/login";
/**
* 请求报警信息
*/
public static final String URL_ALARM_INFO = "/getAlarmDetail";
/**
* 上传位置信息
*/
public static final String URL_PROCESS_ALARM = "/getProcessByAlarmId";
/**
* 上传位置信息
*/
public static final String URL_REPORT_LOCATION = "/updateLocation";
/**
* 接受任务
*/
public static final String URL_ACCEPT_ALARM = "/userAcceptAlarm";
/**
* 上报状态
*/
public static final String URL_REPORT_STATE = "/saveProcessRecord";
/**
* 物业请求最近的一条报警信息
*/
public static final String URL_ALARM_LIST_ONE = "/getOneAlarmListByUserId";
/**
* 物业请求报警信息列表
*/
public static final String URL_ALARM_LIST = "/getAlarmListByUserId";
/**
* 维修工请求报警信息列表
*/
public static final String URL_WORKER_ALARM_LIST = "/getAlarmListByRepairUserId";
/**
* 物业请求项目列表
*/
public static final String URL_PROJECT_LIST = "/getElevatorList";
/**
* 物业报警
*/
public static final String URL_REPORT_ALARM = "/alarm";
/**
* 物业请求报警状态
*/
public static final String URL_ALARM_STATE = "/getRepairListByAlarmId";
/**
* 维修工途中遇到意外情况上报
*/
public static final String URL_WORKER_EXCEPT = "/unexpectedByUser";
/**
* 物业确认报警救援完成
*/
public static final String URL_CONFIRM_ALARM = "/propertyConfirmComplete";
/**
* 版本检测接口
*/
public static final String URL_VERSION_CHECK = "/checkVersion";
/**
* 退出登录
*/
public static final String URL_LOG_OUT = "/logout";
/**
* 请求报警地点周围维修工位置
*/
public static final String URL_WORKERS = "/getNearUserLocation";
/**
* 上报时间节点
*/
public static final String URL_REPORT_TIME = "/online";
/**
* 请求维保电梯信息
*/
public static final String URL_GET_LIFT_INFO1 = "/getMaintenanceByElevatorId";
/**
* 请求维保电梯信息
*/
public static final String URL_GET_LIFT_INFO = "/getMainElevatorListBJ";
/**
* 请求维保电梯信息
*/
public static final String URL_GET_LIFT_INFO2 = "/getMainElevatorList";
/**
* 上报维保结果
*/
public static final String URL_REPORT_MAIN = "/finishMain";
/**
* 上报维保计划
*/
public static final String URL_REPORT_PLAN = "/newNewMainPlan";
/**
* 获取临时维保计划
*/
public static final String MAIN_PLAN_CACHE = "/mainPlanCache";
/**
* 修改维保计划
*/
public static final String URL_MODIFY_PLAN = "/updateMainPlan";
/**
* 删除维保计划
*/
public static final String URL_DELETE_PLAN = "/removeMainPlan";
/**
* 维修工注册
*/
public static final String URL_WORKER_REGISTER = "/registerRepair";
/**
* 物业获取维保电梯计划列表
*/
public static final String URL_PRO_PLAN_LIST = "/getMainPlanByPropertyId";
/**
* 物业上报维保计划处置结果
*/
public static final String URL_PRO_REPORT_PLAN_RESULT = "/verifyMainPlan";
/**
* 物业获取维保完成待确认的电梯列表
*/
public static final String URL_PRO_GET_FINISH_RESULT = "/getFinishedMainList";
/**
* 物业获取维保历史
*/
public static final String URL_PRO_GET_MAIN_HISTORY = "/getHistoryMainList";
/**
* 物业获取维保详情
*/
public static final String URL_PRO_GET_MAIN_DETAIL = "/getMainDetail";
/**
* 修改个人信息
*/
public static final String URL_MODIFY_INFO = "/editUser";
/**
* 密码修改
*/
public static final String URL_PWD_MODIFY = "/editUserPwd";
/**
* 上传头像
*/
public static final String URL_UPLOAD_ICON = "/updateLoadPic";
/**
* 请求维保公司列表
*/
public static final String URL_REQUEST_COMPANY = "/getBranchs";
/**
* 重置密码接口
*/
public static final String URL_RESET_PWD = "/resetPWD";
/**
* 获取汉邦一点通视频参数信息
*/
public static final String URL_REQUEST_YDT = "/getYdtParam";
/**
* 接收到通知后的反馈接口
*/
public static final String URL_NOTIFY_FEEDBACK = "/alarmFeedback";
public static final String URL_ALARM_UNASSIGNED = "/getAlarmListByReceiveAndUnassign";
/**
* 上报工作地点
*/
public static final String URL_REPORT_WORK_PLACE = "/saveOrUpdateResidentAddress";
/**
* 上报家庭地址
*/
public static final String URL_REPORT_HOME_PLACE = "/saveOrUpdateFamilyAddress";
/**
* 上传音频
*/
public static final String UPLOAD_AUDIO = "/uploadAudio";
/**
* 添加聊天记录
*/
public static final String SEND_CHAT = "/addChat";
/**
* 获取聊天记录
*/
public static final String GET_CHAT_LIST = "/getChatList";
/**
* 上传图片
*/
public static final String UPLOAD_CERT = "/updateLoadCert";
/**
* 物业获取驻点地址信息
*/
public static final String GET_PROPERTY_ADDRESS_LIST = "/getPropertyAddressList";
/**
* 物业添加驻点地址
*/
public static final String ADD_PROPERTY_ADDRESS = "/addPropertyAddress";
/**
* 请求所有的维保项目
*/
public static final String GET_ALL_COMMUNITYS_BY_PROPERTY = "/getAllCommunitysByProperty";
/**
* 物业公司注册
*/
public static final String REGISTER_PROPERTY = "/propertyRegist";
/**
* 添加物业联系维保公司记录
*/
public static final String ADD_CONTACT_MAINT = "/addContactMaint";
/**
* 维修工获取正在进行中的报修单
*/
public static final String GET_UNDERWAY_REPAIR = "/getRepairByWorker";
/**
* 维修工获取已完成的报修单
*/
public static final String GET_HISTORY_REPAIR = "/getRepairByWorkerComplete";
/**
* 维修工获取已完成的报修单
*/
public static final String COMMIT_REPAIR_DESCRIBE = "/editRepairByWorker";
/**
* 物业获取聊天频道
*/
public static final String URL_CHAT_CHANNEL = "/getChatChannels";
/**
*上传签名
*/
public static final String URL_UPLOAD_SIGN = "/updateLoadAutograph";
/**
* 物业反馈维保不合格
*/
public static final String URL_MAINT_FAILED = "/backMaint";
/**
* 获取维保服务
*/
public static final String URL_MAINT_SERVICE = "/getMaintOrderByWorker";
/**
* 获取指定服务的任务单
*/
public static final String URL_MAINT_TASK = "/getMaintOrderProcessByMaintOrder";
/**
* 维保任务出发
*/
public static final String URL_MAINT_TASK_START = "/editMaintOrderProcessWorkerSetOut";
/**
* 维保任务到达
*/
public static final String URL_MAINT_TASK_ARRIVE = "/editMaintOrderProcessWorkerArrive";
/**
* 维保任务完成
*/
public static final String URL_MAINT_TASK_FINISH = "/editMaintOrderProcessWorkerFinish";
/**
* 任务无法完成
*/
public static final String URL_MAINT_TASK_UNFINISH = "/editMaintOrderProcessWorkerUnableFinish";
/**
* 制定维保计划
*/
public static final String URL_MAINT_SERVICE_ADD = "/addMaintOrderProcess";
/**
* 添加维修任务单
*/
public static final String URL_TASK_ADD = "/addRepairOrderProcess";
/**
* 添加维修支付详情
*/
public static final String URL_FIX_PAY_ADD = "/addPriceDetails";
/**
* 维修工完成
*/
public static final String URL_FIX_FINISH = "/editRepairOrderProcessWorkerFinish";
/**
* 维修工到达
*/
public static final String URL_FIX_ARRIVE = "/editRepairOrderProcessWorkerArrive";
/**
* 维修工出发
*/
public static final String URL_FIX_START = "/editRepairOrderProcessWorkerSetOut";
/**
*根据服务单获取维修任务单
*/
public static final String URL_FIX_TASK = "/getRepairOrderProcessByRepairOrder";
/**
* 根据部门ID获取部门及下级部门信息
*/
public static final String URL_DEP_LIST = "/getBranchsByBranchId";
/**
* 根据部门Id查找维修工
*/
public static final String URL_WORKER_LIST = "/getWorkListByBranchId";
/**
* 维修工获取维修服务单
*/
public static final String URL_MAINT_LIST = "/getMaintListByBranchId";
/**
* 维修工获取维修服务单
*/
public static final String URL_MAINT_LIST_ELE = "/getMaintListByElevatorId";
/**
* 维修工获取维修服务单
*/
public static final String URL_ALARM_LIST_BRANCH = "/getAlarmListByBranchId";
/**
* 维修工获取维修服务单
*/
public static final String URL_COMPANY_NHMENTENANCE = "/getMaintOrderProcessByBranchIdOnState";
/**
* 维修工获取维修服务单
*/
public static final String URL_COMPANY_NHMENTENANCE_LIST = "/getPaymentByBranchId";
// /**
// * 维修工获取维修服务单
// */
public static final String URL_FIX_LIST = "/getRepairOrderByWorker";
/**
* 维修工获取维修服务单
*/
public static final String URL_COMPANY_FIX = "/getRepairOrderProcessByBranchIdOnState";
/**
* 维修工获取维修服务单
*/
public static final String URL_COMPANY_FIX_LIST = "/getRepairOrderByBranchId";
/**
* 添加聊天记录
*/
public static final String UP_LOAD_IMG = "/uploadImg";
/**
* 添加聊天记录
*/
public static final String GET_ADVERTISEMENT_DETAIL = "/getAdvertisementDetail";
/**
* 添加聊天记录
*/
public static final String GET_PROCESS_BY_ALARMID = "/getProcessByAlarmId";
//维保服务标识位
//维修工任务 添加 标识
public static final String ADD_STATE = "-1";
//维修工任务 待确认 标识
public static final String UNENSURE_STATE = "0";
//维修工任务 已确认 标识
public static final String ENSURED_STATE = "1";
//维修工任务 已出发 标识
public static final String START_STATE = "2";
//维修工任务 已到达 标识
public static final String ARRIVE_STATE = "3";
//维修工任务 已完成 标识
public static final String FINISH_STATE = "4";
//维修工任务 已评价 标识
public static final String EVA_STATE = "5";
//维修工任务 已评价 标识
public static final String UN_FINISH = "6";
//维修工任务 已评价 标识
public static final int PAGE = 1;
//维修工任务 已评价 标识
public static final int ROWS = 100;
//end
//维修任务状态
//维修工任务 添加 标识
public static final String FIX_BEFORE_START = "1";
//维修工任务 待确认 标识
public static final String FIX_STARTED = "2";
//维修工任务 已确认 标识
public static final String FIX_ARRIVED = "3";
//维修工任务 检修完成 标识
public static final String FIX_LOOK_FINISHED = "5";
//维修工任务 已到达 标识
public static final String FIX_FIX_FINISH = "6";
//end
//维修任务结束状态
// 5 为 检修结束 ,6维修结束
public static final String FIX_LOOK = "5";
//维修工任务 待确认 标识
public static final String FIX_FINISH = "6";
//end
// 支付详情生成,用于判断是否可以完成维修
public static final String FIX_PAYMENT_END = "7";
//
//获取签到状态
public static final String GET_ATTENDANCE_MAINTENANCE="/getAttendanceMaintenanceByUserId";
//签到
public static final String ADD_ATTENDANCE_MAINTENANCE = "/addAttendanceMaintenance";
@Nullable
public static final String GETSELECT = "/workOrder/selectAssistant";
@Nullable
public static final String GETWORKORDERSBYID = "/workOrder/getWorkOrdersById";
@Nullable
public static final String GETWORKORDERMAINTAINITEMBYWORKORDERID="/workOrder/getWorkOrderMaintainItemByWorkOrderId";
@Nullable
public static final String GETBAOXIUORMAINTENANCEBYBIZID= "/workOrder/getBaoXiuOrMaintenanceByBizId";
@Nullable
public static final String GETWORKORDERBYBIZIDORBIZTYPE = "/workOrder/getWorkOrderByBizIdOrBizType";
@Nullable
public static final String GETDRAFTMAINTENANCEDEALLIST = "/getDraftMaintenanceDealList";
@Nullable
public static final String GETDEPARTMENT = "/comm/getDepartment";
@Nullable
public static final String GETCOMMUNITY = "/comm/getCommunity";
@Nullable
public static final String GETSTATUS = "/comm/getStatus";
@Nullable
public static final String GETWORKORDERCOUNTBYCOMMUNITY = "/workOrder/getWorkOrderCountByCommunity";
@NotNull
public static final String GETDRAFTMAINTENANCEDEALINFO = "/getDraftMaintenanceDealInfo";
@Nullable
public static final Object DELETEMAINTENANCEDEALINFO = "/deleteMaintenanceDealInfo";
}
|
#!/bin/sh
# Modify 256-color palette in supported terminals
# Script adapted from https://github.com/chriskempson/base16-shell
set -o errexit
hex2rgb() {
local s=$1
local r=${s:1:2}
local g=${s:3:2}
local b=${s:5:2}
echo "$r/$g/$b"
}
if [ "${TERM%%-*}" = 'linux' ]; then
# This script doesn't support linux console
return 2>/dev/null || exit 0
fi
if [[ "$TERM" =~ "^(tmux|screen).*" && -n "$TMUX" ]]; then
# tell tmux to pass the escape sequences through
# (Source: http://permalink.gmane.org/gmane.comp.terminal-emulators.tmux.user/1324)
printf_template="\033Ptmux;\033\033]4;%d;rgb:%s\007\033\\"
printf_template_var="\033Ptmux;\033\033]%d;rgb:%s\007\033\\"
printf_template_custom="\033Ptmux;\033\033]%s%s\007\033\\"
elif [ "${TERM%%-*}" = "screen" ]; then
# GNU screen (screen, screen-256color, screen-256color-bce)
printf_template="\033P\033]4;%d;rgb:%s\007\033\\"
printf_template_var="\033P\033]%d;rgb:%s\007\033\\"
printf_template_custom="\033P\033]%s%s\007\033\\"
else
printf_template="\033]4;%d;rgb:%s\033\\"
printf_template_var="\033]%d;rgb:%s\033\\"
printf_template_custom="\033]%s%s\033\\"
fi
printf $printf_template 235 $(hex2rgb "#002b36")
printf $printf_template 236 $(hex2rgb "#073642")
printf $printf_template 242 $(hex2rgb "#586e75")
printf $printf_template 66 $(hex2rgb "#657b83")
printf $printf_template 246 $(hex2rgb "#839496")
printf $printf_template 247 $(hex2rgb "#93a1a1")
printf $printf_template 254 $(hex2rgb "#eee8d5")
printf $printf_template 230 $(hex2rgb "#fdf6e3")
printf $printf_template 136 $(hex2rgb "#b58900")
printf $printf_template 166 $(hex2rgb "#cb4b16")
printf $printf_template 160 $(hex2rgb "#dc322f")
printf $printf_template 162 $(hex2rgb "#d33682")
printf $printf_template 61 $(hex2rgb "#6c71c4")
printf $printf_template 32 $(hex2rgb "#268bd2")
printf $printf_template 37 $(hex2rgb "#2aa198")
printf $printf_template 106 $(hex2rgb "#859900")
# foreground / background / cursor color
# if [ -n "$ITERM_SESSION_ID" ]; then
# # iTerm2 proprietary escape codes
# printf $printf_template_custom Pg 93a1a1 # forground
# printf $printf_template_custom Ph 002b36 # background
# printf $printf_template_custom Pi 93a1a1 # bold color
# printf $printf_template_custom Pj 586e75 # selection color
# printf $printf_template_custom Pk 93a1a1 # selected text color
# printf $printf_template_custom Pl 93a1a1 # cursor
# printf $printf_template_custom Pm 002b36 # cursor text
# else
# printf $printf_template_var 10 $color_foreground
# printf $printf_template_var 11 $color_background
# printf $printf_template_var 12 $color_cursor
# fi
unset printf_template
unset printf_template_var
unset printf_template_custom
|
<reponame>licjus/terraform-provider-ochk
// Code generated by go-swagger; DO NOT EDIT.
package reservations
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
)
// NewReservationListUsingGETParams creates a new ReservationListUsingGETParams object
// with the default values initialized.
func NewReservationListUsingGETParams() *ReservationListUsingGETParams {
var ()
return &ReservationListUsingGETParams{
timeout: cr.DefaultTimeout,
}
}
// NewReservationListUsingGETParamsWithTimeout creates a new ReservationListUsingGETParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewReservationListUsingGETParamsWithTimeout(timeout time.Duration) *ReservationListUsingGETParams {
var ()
return &ReservationListUsingGETParams{
timeout: timeout,
}
}
// NewReservationListUsingGETParamsWithContext creates a new ReservationListUsingGETParams object
// with the default values initialized, and the ability to set a context for a request
func NewReservationListUsingGETParamsWithContext(ctx context.Context) *ReservationListUsingGETParams {
var ()
return &ReservationListUsingGETParams{
Context: ctx,
}
}
// NewReservationListUsingGETParamsWithHTTPClient creates a new ReservationListUsingGETParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewReservationListUsingGETParamsWithHTTPClient(client *http.Client) *ReservationListUsingGETParams {
var ()
return &ReservationListUsingGETParams{
HTTPClient: client,
}
}
/*ReservationListUsingGETParams contains all the parameters to send to the API endpoint
for the reservation list using g e t operation typically these are written to a http.Request
*/
type ReservationListUsingGETParams struct {
/*SubtenantID
subtenantId
*/
SubtenantID string
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the reservation list using g e t params
func (o *ReservationListUsingGETParams) WithTimeout(timeout time.Duration) *ReservationListUsingGETParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the reservation list using g e t params
func (o *ReservationListUsingGETParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the reservation list using g e t params
func (o *ReservationListUsingGETParams) WithContext(ctx context.Context) *ReservationListUsingGETParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the reservation list using g e t params
func (o *ReservationListUsingGETParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the reservation list using g e t params
func (o *ReservationListUsingGETParams) WithHTTPClient(client *http.Client) *ReservationListUsingGETParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the reservation list using g e t params
func (o *ReservationListUsingGETParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithSubtenantID adds the subtenantID to the reservation list using g e t params
func (o *ReservationListUsingGETParams) WithSubtenantID(subtenantID string) *ReservationListUsingGETParams {
o.SetSubtenantID(subtenantID)
return o
}
// SetSubtenantID adds the subtenantId to the reservation list using g e t params
func (o *ReservationListUsingGETParams) SetSubtenantID(subtenantID string) {
o.SubtenantID = subtenantID
}
// WriteToRequest writes these params to a swagger request
func (o *ReservationListUsingGETParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
// path param subtenantId
if err := r.SetPathParam("subtenantId", o.SubtenantID); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
|
#!/bin/sh
# Integration tests for qcow2.c via catqcow2 using the output from qemu-img.
bail () { echo "Bail out! $*"; exit 1; }
result () { # [0|1] <desc>
counter=$(expr ${counter:-0} + 1)
if [ $1 -eq 0 ]; then echo "ok $counter - $2"
else echo "not ok $counter - $2"; failed=1; fi
if [ $counter -gt $countermax ]; then
echo "too many tests!" >&2; failed=1
fi
}
start () { # number of tests
countermax=$1 counter=0 failed=
echo 1..$1
trap '[ $failed ] && exit 1' 0
}
# Thin wrapper program around qcow2.c
SUT=./catqcow2
start 3
# Precondition checks
qemu-img --version >/dev/null || bail "missing qemu-img"
[ -x $SUT ] || bail "missing $SUT"
tmp=${TMPDIR:-/tmp}/check
# 1. An "empty" 4MB qcow file dumps to 4MB of NULs
# - use qemu-img to make an empty 4MB qcow2 file
qemu-img create -q -f qcow2 $tmp.zero4M.qcow2 4194304
# - use dd to create a 4MB file of NULs
dd if=/dev/zero bs=1024 count=4096 of=$tmp.zero4M.raw status=none
# - catqcow2 the .qcow2 file and compare it against the raw
$SUT <$tmp.zero4M.qcow2 | cmp -s - $tmp.zero4M.raw
# - exit code should be success
result $? "empty 4M .qcow2 dumps to 4M of NULs"
# 2. A "random" 8MB input file converted will dump to its original data.
# - create the start of the random 8MB file
cat catqcow2 README.md >$tmp.blah8M.raw
# - keep growing the file by doubling it until it exceeds 8MB in size
while [ $(wc -c <$tmp.blah8M.raw) -lt 8388608 ]; do
# making semi-random input
cat $tmp.blah8M.raw $tmp.blah8M.raw > $tmp.blah8M.raw2
mv $tmp.blah8M.raw2 $tmp.blah8M.raw
done
# - trim the file back to 8MB
dd bs=1048576 count=8 status=none <$tmp.blah8M.raw >$tmp.blah8M.raw2
mv $tmp.blah8M.raw2 $tmp.blah8M.raw
# - convert it to .qcow2 using qemu-img
qemu-img convert -q -f raw -O qcow2 $tmp.blah8M.raw $tmp.blah8M.qcow2
# - check that what we extract using catqcow2 is what we put in
$SUT $tmp.blah8M.qcow2 | cmp -s - $tmp.blah8M.raw
result $? "random 8M as .qcow2 dumps to same input"
# 3. Same again, but with a smaller cluster size.
# (This may fail with "too fine" on some systems)
# - convert random input to .qcow2 with 4MB cluster size
qemu-img convert -q -f raw -O qcow2 -o cluster_size=4096 \
$tmp.blah8M.raw $tmp.blah8M.qcow2
# - check that what we extract using catqcow2 is what we put in
$SUT $tmp.blah8M.qcow2 | cmp -s - $tmp.blah8M.raw
result $? "random 8M as .qcow2 dumps to same input, cluster_size=4M"
# Clean up temporary files
rm -f $tmp.zero4M.raw
rm -f $tmp.zero4M.qcow2
rm -f $tmp.blah8M.raw
rm -f $tmp.blah8M.qcow2
|
# Set of functions for testing docker images in OpenShift using 'oc' command
# ct_os_get_status
# --------------------
# Returns status of all objects to make debugging easier.
function ct_os_get_status() {
oc get all
oc status
}
# ct_os_print_logs
# --------------------
# Returns status of all objects and logs from all pods.
function ct_os_print_logs() {
ct_os_get_status
while read pod_name; do
echo "INFO: printing logs for pod ${pod_name}"
oc logs ${pod_name}
done < <(oc get pods --no-headers=true -o custom-columns=NAME:.metadata.name)
}
# ct_os_enable_print_logs
# --------------------
# Enables automatic printing of pod logs on ERR.
function ct_os_enable_print_logs() {
set -E
trap ct_os_print_logs ERR
}
# ct_get_public_ip
# --------------------
# Returns best guess for the IP that the node is accessible from other computers.
# This is a bit funny heuristic, simply goes through all IPv4 addresses that
# hostname -I returns and de-prioritizes IP addresses commonly used for local
# addressing. The rest of addresses are taken as public with higher probability.
function ct_get_public_ip() {
local hostnames=$(hostname -I)
local public_ip=''
local found_ip
for guess_exp in '127\.0\.0\.1' '192\.168\.[0-9\.]*' '172\.[0-9\.]*' \
'10\.[0-9\.]*' '[0-9\.]*' ; do
found_ip=$(echo "${hostnames}" | grep -oe "${guess_exp}")
if [ -n "${found_ip}" ] ; then
hostnames=$(echo "${hostnames}" | sed -e "s/${found_ip}//")
public_ip="${found_ip}"
fi
done
if [ -z "${public_ip}" ] ; then
echo "ERROR: public IP could not be guessed." >&2
return 1
fi
echo "${public_ip}"
}
# ct_os_run_in_pod POD_NAME CMD
# --------------------
# Runs [cmd] in the pod specified by prefix [pod_prefix].
# Arguments: pod_name - full name of the pod
# Arguments: cmd - command to be run in the pod
function ct_os_run_in_pod() {
local pod_name="$1" ; shift
oc exec "$pod_name" -- "$@"
}
# ct_os_get_service_ip SERVICE_NAME
# --------------------
# Returns IP of the service specified by [service_name].
# Arguments: service_name - name of the service
function ct_os_get_service_ip() {
local service_name="${1}" ; shift
oc get "svc/${service_name}" -o yaml | grep clusterIP | \
cut -d':' -f2 | grep -oe '172\.30\.[0-9\.]*'
}
# ct_os_get_all_pods_status
# --------------------
# Returns status of all pods.
function ct_os_get_all_pods_status() {
oc get pods -o custom-columns=Ready:status.containerStatuses[0].ready,NAME:.metadata.name
}
# ct_os_get_all_pods_name
# --------------------
# Returns the full name of all pods.
function ct_os_get_all_pods_name() {
oc get pods --no-headers -o custom-columns=NAME:.metadata.name
}
# ct_os_get_pod_status POD_PREFIX
# --------------------
# Returns status of the pod specified by prefix [pod_prefix].
# Note: Ignores -build and -deploy pods
# Arguments: pod_prefix - prefix or whole ID of the pod
function ct_os_get_pod_status() {
local pod_prefix="${1}" ; shift
ct_os_get_all_pods_status | grep -e "${pod_prefix}" | grep -Ev "(build|deploy)$" \
| awk '{print $1}' | head -n 1
}
# ct_os_get_pod_name POD_PREFIX
# --------------------
# Returns the full name of pods specified by prefix [pod_prefix].
# Note: Ignores -build and -deploy pods
# Arguments: pod_prefix - prefix or whole ID of the pod
function ct_os_get_pod_name() {
local pod_prefix="${1}" ; shift
ct_os_get_all_pods_name | grep -e "^${pod_prefix}" | grep -Ev "(build|deploy)$"
}
# ct_os_get_pod_ip POD_NAME
# --------------------
# Returns the ip of the pod specified by [pod_name].
# Arguments: pod_name - full name of the pod
function ct_os_get_pod_ip() {
local pod_name="${1}"
oc get pod "$pod_name" --no-headers -o custom-columns=IP:status.podIP
}
# ct_os_check_pod_readiness POD_PREFIX STATUS
# --------------------
# Checks whether the pod is ready.
# Arguments: pod_prefix - prefix or whole ID of the pod
# Arguments: status - expected status (true, false)
function ct_os_check_pod_readiness() {
local pod_prefix="${1}" ; shift
local status="${1}" ; shift
test "$(ct_os_get_pod_status ${pod_prefix})" == "${status}"
}
# ct_os_wait_pod_ready POD_PREFIX TIMEOUT
# --------------------
# Wait maximum [timeout] for the pod becomming ready.
# Arguments: pod_prefix - prefix or whole ID of the pod
# Arguments: timeout - how many seconds to wait seconds
function ct_os_wait_pod_ready() {
local pod_prefix="${1}" ; shift
local timeout="${1}" ; shift
SECONDS=0
echo -n "Waiting for ${pod_prefix} pod becoming ready ..."
while ! ct_os_check_pod_readiness "${pod_prefix}" "true" ; do
echo -n "."
[ ${SECONDS} -gt ${timeout} ] && echo " FAIL" && return 1
sleep 3
done
echo " DONE"
}
# ct_os_wait_rc_ready POD_PREFIX TIMEOUT
# --------------------
# Wait maximum [timeout] for the rc having desired number of replicas ready.
# Arguments: pod_prefix - prefix of the replication controller
# Arguments: timeout - how many seconds to wait seconds
function ct_os_wait_rc_ready() {
local pod_prefix="${1}" ; shift
local timeout="${1}" ; shift
SECONDS=0
echo -n "Waiting for ${pod_prefix} pod becoming ready ..."
while ! test "$((oc get --no-headers statefulsets; oc get --no-headers rc) 2>/dev/null \
| grep "^${pod_prefix}" | awk '$2==$3 {print "ready"}')" == "ready" ; do
echo -n "."
[ ${SECONDS} -gt ${timeout} ] && echo " FAIL" && return 1
sleep 3
done
echo " DONE"
}
# ct_os_deploy_pure_image IMAGE [ENV_PARAMS, ...]
# --------------------
# Runs [image] in the openshift and optionally specifies env_params
# as environment variables to the image.
# Arguments: image - prefix or whole ID of the pod to run the cmd in
# Arguments: env_params - environment variables parameters for the images.
function ct_os_deploy_pure_image() {
local image="${1}" ; shift
# ignore error exit code, because oc new-app returns error when image exists
oc new-app ${image} "$@" || :
# let openshift cluster to sync to avoid some race condition errors
sleep 3
}
# ct_os_deploy_s2i_image IMAGE APP [ENV_PARAMS, ... ]
# --------------------
# Runs [image] and [app] in the openshift and optionally specifies env_params
# as environment variables to the image.
# Arguments: image - prefix or whole ID of the pod to run the cmd in
# Arguments: app - url or local path to git repo with the application sources.
# Arguments: env_params - environment variables parameters for the images.
function ct_os_deploy_s2i_image() {
local image="${1}" ; shift
local app="${1}" ; shift
# ignore error exit code, because oc new-app returns error when image exists
oc new-app "${image}~${app}" "$@" || :
# let openshift cluster to sync to avoid some race condition errors
sleep 3
}
# ct_os_deploy_template_image TEMPLATE [ENV_PARAMS, ...]
# --------------------
# Runs template in the openshift and optionally gives env_params to use
# specific values in the template.
# Arguments: template - prefix or whole ID of the pod to run the cmd in
# Arguments: env_params - environment variables parameters for the template.
# Example usage: ct_os_deploy_template_image mariadb-ephemeral-template.yaml \
# DATABASE_SERVICE_NAME=mysql-57-centos7 \
# DATABASE_IMAGE=mysql-57-centos7 \
# MYSQL_USER=testu \
# MYSQL_PASSWORD=testp \
# MYSQL_DATABASE=testdb
function ct_os_deploy_template_image() {
local template="${1}" ; shift
oc process -f "${template}" "$@" | oc create -f -
# let openshift cluster to sync to avoid some race condition errors
sleep 3
}
# _ct_os_get_uniq_project_name
# --------------------
# Returns a uniq name of the OpenShift project.
function _ct_os_get_uniq_project_name() {
local r
while true ; do
r=${RANDOM}
mkdir /var/tmp/os-test-${r} &>/dev/null && echo test-${r} && break
done
}
# ct_os_new_project [PROJECT]
# --------------------
# Creates a new project in the openshfit using 'os' command.
# Arguments: project - project name, uses a new random name if omitted
# Expects 'os' command that is properly logged in to the OpenShift cluster.
# Not using mktemp, because we cannot use uppercase characters.
function ct_os_new_project() {
local project_name="${1:-$(_ct_os_get_uniq_project_name)}" ; shift || :
oc new-project ${project_name}
# let openshift cluster to sync to avoid some race condition errors
sleep 3
}
# ct_os_delete_project [PROJECT]
# --------------------
# Deletes the specified project in the openshfit
# Arguments: project - project name, uses the current project if omitted
function ct_os_delete_project() {
local project_name="${1:-$(oc project -q)}" ; shift || :
oc delete project "${project_name}"
}
# ct_os_docker_login
# --------------------
# Logs in into docker daemon
function ct_os_docker_login() {
# docker login fails with "404 page not found" error sometimes, just try it more times
for i in `seq 12` ; do
docker login -u developer -p $(oc whoami -t) 172.30.1.1:5000 && return 0 || :
sleep 5
done
return 1
}
# ct_os_upload_image IMAGE [IMAGESTREAM]
# --------------------
# Uploads image from local registry to the OpenShift internal registry.
# Arguments: image - image name to upload
# Arguments: imagestream - name and tag to use for the internal registry.
# In the format of name:tag ($image_name:latest by default)
function ct_os_upload_image() {
local input_name="${1}" ; shift
local image_name=${input_name##*/}
local imagestream=${1:-$image_name:latest}
local output_name="172.30.1.1:5000/$(oc project -q)/$imagestream"
ct_os_docker_login
docker tag ${input_name} ${output_name}
docker push ${output_name}
}
# ct_os_install_in_centos
# --------------------
# Installs os cluster in CentOS
function ct_os_install_in_centos() {
yum install -y centos-release-openshift-origin
yum install -y wget git net-tools bind-utils iptables-services bridge-utils\
bash-completion origin-clients docker origin-clients
}
# ct_os_cluster_up [DIR, IS_PUBLIC, CLUSTER_VERSION]
# --------------------
# Runs the local OpenShift cluster using 'oc cluster up' and logs in as developer.
# Arguments: dir - directory to keep configuration data in, random if omitted
# Arguments: is_public - sets either private or public hostname for web-UI,
# use "true" for allow remote access to the web-UI,
# "false" is default
# Arguments: cluster_version - version of the OpenShift cluster to use, empty
# means default version of `oc`; example value: 3.7;
# also can be specified outside by OC_CLUSTER_VERSION
function ct_os_cluster_up() {
ct_os_cluster_running && echo "Cluster already running. Nothing is done." && return 0
mkdir -p /var/tmp/openshift
local dir="${1:-$(mktemp -d /var/tmp/openshift/os-data-XXXXXX)}" ; shift || :
local is_public="${1:-'false'}" ; shift || :
local default_cluster_version=${OC_CLUSTER_VERSION:-}
local cluster_version=${1:-${default_cluster_version}} ; shift || :
if ! grep -qe '--insecure-registry.*172\.30\.0\.0' /etc/sysconfig/docker ; then
sed -i "s|OPTIONS='|OPTIONS='--insecure-registry 172.30.0.0/16 |" /etc/sysconfig/docker
fi
systemctl stop firewalld
setenforce 0
iptables -F
systemctl restart docker
local cluster_ip="127.0.0.1"
[ "${is_public}" == "true" ] && cluster_ip=$(ct_get_public_ip)
if [ -n "${cluster_version}" ] ; then
# if $cluster_version is not set, we simply use oc that is available
ct_os_set_path_oc "${cluster_version}"
fi
mkdir -p ${dir}/{config,data,pv}
case $(oc version| head -n 1) in
"oc v3.1"?.*)
oc cluster up --base-dir="${dir}/data" --public-hostname="${cluster_ip}"
;;
"oc v3."*)
oc cluster up --host-data-dir="${dir}/data" --host-config-dir="${dir}/config" \
--host-pv-dir="${dir}/pv" --use-existing-config --public-hostname="${cluster_ip}"
;;
*)
echo "ERROR: Unexpected oc version." >&2
return 1
;;
esac
oc version
oc login -u system:admin
oc project default
ct_os_wait_rc_ready docker-registry 180
ct_os_wait_rc_ready router 30
oc login -u developer -p developer
# let openshift cluster to sync to avoid some race condition errors
sleep 3
}
# ct_os_cluster_down
# --------------------
# Shuts down the local OpenShift cluster using 'oc cluster down'
function ct_os_cluster_down() {
oc cluster down
}
# ct_os_cluster_running
# --------------------
# Returns 0 if oc cluster is running
function ct_os_cluster_running() {
oc cluster status &>/dev/null
}
# ct_os_set_path_oc OC_VERSION
# --------------------
# This is a trick that helps using correct version of the `oc`:
# The input is version of the openshift in format v3.6.0 etc.
# If the currently available version of oc is not of this version,
# it first takes a look into /usr/local/oc-<ver>/bin directory,
# and if not found there it downloads the community release from github.
# In the end the PATH variable is changed, so the other tests can still use just 'oc'.
# Arguments: oc_version - X.Y part of the version of OSE (e.g. 3.9)
function ct_os_set_path_oc() {
local oc_version=$(ct_os_get_latest_ver $1)
local oc_path
if oc version | grep -q "oc ${oc_version%.*}." ; then
echo "Binary oc found already available in version ${oc_version}: `which oc` Doing noting."
return 0
fi
# first check whether we already have oc available in /usr/local
local installed_oc_path="/usr/local/oc-${oc_version%.*}/bin"
if [ -x "${installed_oc_path}/oc" ] ; then
oc_path="${installed_oc_path}"
echo "Binary oc found in ${installed_oc_path}" >&2
else
# oc not available in /usr/local, try to download it from github (community release)
oc_path="/tmp/oc-${oc_version}-bin"
ct_os_download_upstream_oc "${oc_version}" "${oc_path}"
fi
if [ -z "${oc_path}/oc" ] ; then
echo "ERROR: oc not found installed, nor downloaded" >&1
return 1
fi
export PATH="${oc_path}:${PATH}"
if ! oc version | grep -q "oc ${oc_version%.*}." ; then
echo "ERROR: something went wrong, oc located at ${oc_path}, but oc of version ${oc_version} not found in PATH ($PATH)" >&1
return 1
else
echo "PATH set correctly, binary oc found in version ${oc_version}: `which oc`"
fi
}
# ct_os_get_latest_ver VERSION_PART_X
# --------------------
# Returns full version (vX.Y.Z) from part of the version (X.Y)
# Arguments: vxy - X.Y part of the version
# Returns vX.Y.Z variant of the version
function ct_os_get_latest_ver(){
local vxy="v$1"
for vz in {3..0} ; do
curl -sif "https://github.com/openshift/origin/releases/tag/${vxy}.${vz}" >/dev/null && echo "${vxy}.${vz}" && return 0
done
echo "ERROR: version ${vxy} not found in https://github.com/openshift/origin/tags" >&2
return 1
}
# ct_os_download_upstream_oc OC_VERSION OUTPUT_DIR
# --------------------
# Downloads a particular version of openshift-origin-client-tools from
# github into specified output directory
# Arguments: oc_version - version of OSE (e.g. v3.7.2)
# Arguments: output_dir - output directory
function ct_os_download_upstream_oc() {
local oc_version=$1
local output_dir=$2
# check whether we already have the binary in place
[ -x "${output_dir}/oc" ] && return 0
mkdir -p "${output_dir}"
# using html output instead of https://api.github.com/repos/openshift/origin/releases/tags/${oc_version},
# because API is limited for number of queries if not authenticated
tarball=$(curl -si "https://github.com/openshift/origin/releases/tag/${oc_version}" | grep -o -e "openshift-origin-client-tools-${oc_version}-[a-f0-9]*-linux-64bit.tar.gz" | head -n 1)
# download, unpack the binaries and then put them into output directory
echo "Downloading https://github.com/openshift/origin/releases/download/${oc_version}/${tarball} into ${output_dir}/" >&2
curl -sL https://github.com/openshift/origin/releases/download/${oc_version}/"${tarball}" | tar -C "${output_dir}" -xz
mv -f "${output_dir}"/"${tarball%.tar.gz}"/* "${output_dir}/"
rmdir "${output_dir}"/"${tarball%.tar.gz}"
}
# ct_os_test_s2i_app_func IMAGE APP CONTEXT_DIR CHECK_CMD [OC_ARGS]
# --------------------
# Runs [image] and [app] in the openshift and optionally specifies env_params
# as environment variables to the image. Then check the container by arbitrary
# function given as argument (such an argument may include <IP> string,
# that will be replaced with actual IP).
# Arguments: image - prefix or whole ID of the pod to run the cmd in (compulsory)
# Arguments: app - url or local path to git repo with the application sources (compulsory)
# Arguments: context_dir - sub-directory inside the repository with the application sources (compulsory)
# Arguments: check_command - CMD line that checks whether the container works (compulsory; '<IP>' will be replaced with actual IP)
# Arguments: oc_args - all other arguments are used as additional parameters for the `oc new-app`
# command, typically environment variables (optional)
function ct_os_test_s2i_app_func() {
local image_name=${1}
local app=${2}
local context_dir=${3}
local check_command=${4}
local oc_args=${5:-}
local image_name_no_namespace=${image_name##*/}
local service_name="${image_name_no_namespace}-testing"
local image_tagged="${image_name_no_namespace}:testing"
if [ $# -lt 4 ] || [ -z "${1}" -o -z "${2}" -o -z "${3}" -o -z "${4}" ]; then
echo "ERROR: ct_os_test_s2i_app_func() requires at least 4 arguments that cannot be emtpy." >&2
return 1
fi
ct_os_new_project
# Create a specific imagestream tag for the image so that oc cannot use anything else
ct_os_upload_image "${image_name}" "${image_tagged}"
local app_param="${app}"
if [ -d "${app}" ] ; then
# for local directory, we need to copy the content, otherwise too smart os command
# pulls the git remote repository instead
app_param=$(ct_obtain_input "${app}")
fi
ct_os_deploy_s2i_image "${image_tagged}" "${app_param}" \
--context-dir="${context_dir}" \
--name "${service_name}" \
${oc_args}
if [ -d "${app}" ] ; then
# in order to avoid weird race seen sometimes, let's wait shortly
# before starting the build explicitly
sleep 5
oc start-build "${service_name}" --from-dir="${app_param}"
fi
ct_os_wait_pod_ready "${service_name}" 300
local ip=$(ct_os_get_service_ip "${service_name}")
local check_command_exp=$(echo "$check_command" | sed -e "s/<IP>/$ip/g")
echo " Checking APP using $check_command_exp ..."
local result=0
eval "$check_command_exp" || result=1
if [ $result -eq 0 ] ; then
echo " Check passed."
else
echo " Check failed."
fi
ct_os_delete_project
return $result
}
# ct_os_test_s2i_app IMAGE APP CONTEXT_DIR EXPECTED_OUTPUT [PORT, PROTOCOL, RESPONSE_CODE, OC_ARGS, ... ]
# --------------------
# Runs [image] and [app] in the openshift and optionally specifies env_params
# as environment variables to the image. Then check the http response.
# Arguments: image - prefix or whole ID of the pod to run the cmd in (compulsory)
# Arguments: app - url or local path to git repo with the application sources (compulsory)
# Arguments: context_dir - sub-directory inside the repository with the application sources (compulsory)
# Arguments: expected_output - PCRE regular expression that must match the response body (compulsory)
# Arguments: port - which port to use (optional; default: 8080)
# Arguments: protocol - which protocol to use (optional; default: http)
# Arguments: response_code - what http response code to expect (optional; default: 200)
# Arguments: oc_args - all other arguments are used as additional parameters for the `oc new-app`
# command, typically environment variables (optional)
function ct_os_test_s2i_app() {
local image_name=${1}
local app=${2}
local context_dir=${3}
local expected_output=${4}
local port=${5:-8080}
local protocol=${6:-http}
local response_code=${7:-200}
local oc_args=${8:-}
if [ $# -lt 4 ] || [ -z "${1}" -o -z "${2}" -o -z "${3}" -o -z "${4}" ]; then
echo "ERROR: ct_os_test_s2i_app() requires at least 4 arguments that cannot be emtpy." >&2
return 1
fi
ct_os_test_s2i_app_func "${image_name}" \
"${app}" \
"${context_dir}" \
"ct_test_response '${protocol}://<IP>:${port}' '${response_code}' '${expected_output}'" \
"${oc_args}"
}
# ct_os_test_template_app_func IMAGE APP IMAGE_IN_TEMPLATE CHECK_CMD [OC_ARGS]
# --------------------
# Runs [image] and [app] in the openshift and optionally specifies env_params
# as environment variables to the image. Then check the container by arbitrary
# function given as argument (such an argument may include <IP> string,
# that will be replaced with actual IP).
# Arguments: image_name - prefix or whole ID of the pod to run the cmd in (compulsory)
# Arguments: template - url or local path to a template to use (compulsory)
# Arguments: name_in_template - image name used in the template
# Arguments: check_command - CMD line that checks whether the container works (compulsory; '<IP>' will be replaced with actual IP)
# Arguments: oc_args - all other arguments are used as additional parameters for the `oc new-app`
# command, typically environment variables (optional)
# Arguments: other_images - some templates need other image to be pushed into the OpenShift registry,
# specify them in this parameter as "<image>|<tag>", where "<image>" is a full image name
# (including registry if needed) and "<tag>" is a tag under which the image should be available
# in the OpenShift registry.
function ct_os_test_template_app_func() {
local image_name=${1}
local template=${2}
local name_in_template=${3}
local check_command=${4}
local oc_args=${5:-}
local other_images=${6:-}
if [ $# -lt 4 ] || [ -z "${1}" -o -z "${2}" -o -z "${3}" -o -z "${4}" ]; then
echo "ERROR: ct_os_test_template_app_func() requires at least 4 arguments that cannot be emtpy." >&2
return 1
fi
local service_name="${name_in_template}-testing"
local image_tagged="${name_in_template}:${VERSION}"
ct_os_new_project
# Create a specific imagestream tag for the image so that oc cannot use anything else
ct_os_upload_image "${image_name}" "${image_tagged}"
# upload also other images, that template might need (list of pairs in the format <image>|<tag>
local images_tags_a
local i_t
for i_t in ${other_images} ; do
echo "${i_t}"
IFS='|' read -ra image_tag_a <<< "${i_t}"
docker pull "${image_tag_a[0]}"
ct_os_upload_image "${image_tag_a[0]}" "${image_tag_a[1]}"
done
local local_template=$(ct_obtain_input "${template}")
oc new-app ${local_template} \
-p NAME="${service_name}" \
-p NAMESPACE="$(oc project -q)" \
${oc_args}
oc start-build "${service_name}"
ct_os_wait_pod_ready "${service_name}" 300
local ip=$(ct_os_get_service_ip "${service_name}")
local check_command_exp=$(echo "$check_command" | sed -e "s/<IP>/$ip/g")
echo " Checking APP using $check_command_exp ..."
local result=0
eval "$check_command_exp" || result=1
if [ $result -eq 0 ] ; then
echo " Check passed."
else
echo " Check failed."
fi
ct_os_delete_project
return $result
}
# params:
# ct_os_test_template_app IMAGE APP IMAGE_IN_TEMPLATE EXPECTED_OUTPUT [PORT, PROTOCOL, RESPONSE_CODE, OC_ARGS, ... ]
# --------------------
# Runs [image] and [app] in the openshift and optionally specifies env_params
# as environment variables to the image. Then check the http response.
# Arguments: image_name - prefix or whole ID of the pod to run the cmd in (compulsory)
# Arguments: template - url or local path to a template to use (compulsory)
# Arguments: name_in_template - image name used in the template
# Arguments: expected_output - PCRE regular expression that must match the response body (compulsory)
# Arguments: port - which port to use (optional; default: 8080)
# Arguments: protocol - which protocol to use (optional; default: http)
# Arguments: response_code - what http response code to expect (optional; default: 200)
# Arguments: oc_args - all other arguments are used as additional parameters for the `oc new-app`
# command, typically environment variables (optional)
# Arguments: other_images - some templates need other image to be pushed into the OpenShift registry,
# specify them in this parameter as "<image>|<tag>", where "<image>" is a full image name
# (including registry if needed) and "<tag>" is a tag under which the image should be available
# in the OpenShift registry.
function ct_os_test_template_app() {
local image_name=${1}
local template=${2}
local name_in_template=${3}
local expected_output=${4}
local port=${5:-8080}
local protocol=${6:-http}
local response_code=${7:-200}
local oc_args=${8:-}
local other_images=${9:-}
if [ $# -lt 4 ] || [ -z "${1}" -o -z "${2}" -o -z "${3}" -o -z "${4}" ]; then
echo "ERROR: ct_os_test_template_app() requires at least 4 arguments that cannot be emtpy." >&2
return 1
fi
ct_os_test_template_app_func "${image_name}" \
"${template}" \
"${name_in_template}" \
"ct_test_response '${protocol}://<IP>:${port}' '${response_code}' '${expected_output}'" \
"${oc_args}" \
"${other_images}"
}
# ct_os_test_image_update IMAGE IS CHECK_CMD OC_ARGS
# --------------------
# Runs an image update test with [image] uploaded to [is] imagestream
# and checks the services using an arbitrary function provided in [check_cmd].
# Arguments: image - prefix or whole ID of the pod to run the cmd in (compulsory)
# Arguments: is - imagestream to upload the images into (compulsory)
# Arguments: check_cmd - command to be run to check functionality of created services (compulsory)
# Arguments: oc_args - arguments to use during oc new-app (compulsory)
ct_os_test_image_update() {
local image_name=$1; shift
local istag=$1; shift
local check_function=$1; shift
local service_name=${image_name##*/}
local old_image="" ip="" check_command_exp="" registry=""
registry=$(ct_registry_from_os "$OS")
old_image="$registry/$image_name"
echo "Running image update test for: $image_name"
ct_os_new_project
# Get current image from repository and create an imagestream
docker pull "$old_image:latest" 2>/dev/null
ct_os_upload_image "$old_image" "$istag"
# Setup example application with curent image
oc new-app "$@" --name "$service_name"
ct_os_wait_pod_ready "$service_name" 60
# Check application output
ip=$(ct_os_get_service_ip "$service_name")
check_command_exp=${check_function//<IP>/$ip}
ct_assert_cmd_success "$check_command_exp"
# Tag built image into the imagestream and wait for rebuild
ct_os_upload_image "$image_name" "$istag"
ct_os_wait_pod_ready "${service_name}-2" 60
# Check application output
ip=$(ct_os_get_service_ip "$service_name")
check_command_exp=${check_function//<IP>/$ip}
ct_assert_cmd_success "$check_command_exp"
ct_os_delete_project
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.