text stringlengths 1 1.05M |
|---|
<reponame>wormwlrm/full-stack-service-networking
const axios = require("axios");
(async function () {
console.log("## HTTP client started.");
console.log("## GET request for http://localhost:8080/temp/");
http_request = await axios.get("http://localhost:8080/temp/");
console.log("## GET response [start]");
console.log(http_request.data);
console.log("## GET response [end]");
console.log("## GET request for http://localhost:8080/?var1=9&var2=9");
http_request = await axios.get("http://localhost:8080/?var1=9&var2=9");
console.log("## GET response [start]");
console.log(http_request.data);
console.log("## GET response [end]");
console.log(
"## POST request for http://localhost:8080/ with var1 is 9 and var2 is 9"
);
http_request = await axios.post("http://localhost:8080", {
var1: "9",
var2: "9",
});
console.log("## POST response [start]");
console.log(http_request.data);
console.log("## POST response [end]");
console.log("## HTTP client completed.");
})();
|
#!/bin/sh
[ "$$" -eq 1 ] && exec /tini -- "$0" "$@"
exec tail -f /dev/null
|
echo Enter name of new migration
read name
node ./scripts/generate-orm-config.js
ts-node -r tsconfig-paths/register ./node_modules/typeorm/cli.js migration:create -n $name |
# frozen_string_literal: true
require 'spec_helper'
describe 'str2bool' do
it { is_expected.not_to eq(nil) }
it { is_expected.to run.with_params.and_raise_error(Puppet::ParseError, %r{wrong number of arguments}i) }
it {
pending('Current implementation ignores parameters after the first.')
is_expected.to run.with_params('true', 'extra').and_raise_error(Puppet::ParseError, %r{wrong number of arguments}i)
}
it { is_expected.to run.with_params('one').and_raise_error(Puppet::ParseError, %r{Unknown type of boolean given}) }
describe 'when testing values that mean "true"' do
['TRUE', '1', 't', 'y', 'true', 'yes', true].each do |value|
it { is_expected.to run.with_params(value).and_return(true) }
end
end
describe 'when testing values that mean "false"' do
['FALSE', '', '0', 'f', 'n', 'false', 'no', false, 'undef', 'undefined'].each do |value|
it { is_expected.to run.with_params(value).and_return(false) }
end
end
end
|
<reponame>abugler/SMLFinalProject
from comet_ml import BaseExperiment
BaseExperiment._report_summary = lambda args : None
from .docker_runner import DockerRunner |
package subsets
import (
"testing"
"fmt"
)
func TestSubsets(t *testing.T) {
m:= Set([]int{1,4,6})
fmt.Printf(" set: %#v\nsubsets:%#v\n", m, m.Subsets())
} |
<reponame>IvanShafran/android-course-1C-2019<filename>ActivitySample/app/src/main/java/com/github/ivanshafran/activitysample/ArgumentsActivity.java<gh_stars>0
package com.github.ivanshafran.activitysample;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.view.Gravity;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
public class ArgumentsActivity extends AppCompatActivity {
private static final String ARGUMENT_KEY = "ARGUMENT_KEY";
public static Intent getIntent(
final Context context,
@NonNull final String argument
) {
final Intent intent = new Intent(context, ArgumentsActivity.class);
intent.putExtra(ARGUMENT_KEY, argument);
return intent;
}
@Override
protected void onCreate(@Nullable final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final TextView textView = new TextView(this);
final String argument = getIntent().getStringExtra(ARGUMENT_KEY);
textView.setGravity(Gravity.CENTER);
textView.setText(argument);
setContentView(textView);
}
}
|
<filename>src/test/java/io/shadowstack/incumbents/InvocationSinkTest.java
package io.shadowstack.incumbents;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class InvocationSinkTest {
@Test
public void testBadBatchSizeDefault() {
InvocationSink sink = new InvocationSink(invocations -> {
return null;
}).withBatchSize(-1);
assertEquals(1, sink.getBatchSize());
}
}
|
import React from 'react';
import './App.css';
import UseStateDemo from './pages/UseStateDemo';
import UseEffectDemo from './pages/UseEffectDemo';
import './styles/iconfont.scss';
function App() {
return (
<div className="App">
{/*<UseStateDemo />*/}
<UseEffectDemo />
</div>
);
}
export default App;
|
<reponame>dotnotation/Bat_Cloud<filename>config/routes.rb
Rails.application.routes.draw do
root 'static#home'
get '/signup', to: 'researchers#new'
post '/signup', to: 'researchers#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#delete'
get '/auth/google_oauth2/callback', to: 'sessions#omniauth'
get '/search', to: 'bats#search'
get '/recently_discovered', to: 'bats#recently_discovered'
get '/alphabetized_bats', to: 'bats#alphabetized_bats'
resources :bats do
resources :notes, only: [:new, :create, :index]
end
resources :researchers
match '/*path', to: 'errors#not_found', via: [:get, :post]
end
|
<gh_stars>1-10
#pragma once
#include <string>
/**
* @file gpio/gpio.h
* @author Group 7 - Informatica
*/
namespace goliath::gpio {
class GPIO {
public:
enum class MapPin {
GPIO2 = 2,
GPIO3 = 3,
GPIO4 = 4,
GPIO5 = 5,
GPIO6 = 6,
GPIO7 = 7,
GPIO8 = 8,
GPIO9 = 9,
GPIO10 = 10,
GPIO11 = 11,
GPIO12 = 12,
GPIO13 = 13,
GPIO14 = 14,
GPIO15 = 15,
GPIO16 = 16,
GPIO17 = 17,
GPIO18 = 18,
GPIO19 = 19,
GPIO20 = 20,
GPIO21 = 21,
GPIO22 = 22,
GPIO23 = 23,
GPIO24 = 24,
GPIO25 = 25,
GPIO26 = 26,
GPIO27 = 27,
};
enum class State {
Low = 0,
High = 1,
};
enum class Direction {
Out = 0,
In = 1
};
GPIO();
explicit GPIO(MapPin x);
GPIO(MapPin x, Direction dir);
GPIO(MapPin x, Direction dir, State val);
void setup(MapPin x);
void setup(MapPin x, Direction dir);
void setup(MapPin x, Direction dir, State val);
int exportGpio();
int unexportGpio();
int close();
int setdirGpio(std::string dir);
int setdirGpio(int dir);
int setvalGpio(std::string val);
int setvalGpio(int val);
int set(State val);
int getvalGpio(std::string &val);
int getvalGpio(int &val);
int get();
std::string getGpionum();
int getiGpionum();
private:
std::string gpionum;
};
}
|
<reponame>alterem/smartCityService
package com.zhcs.entity;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.Date;
//*****************************************************************************
/**
* <p>Title:DlEntity</p>
* <p>Description:事件派单</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司</p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
public class DlEntity implements Serializable {
private static final long serialVersionUID = 1L;
/**
* token
*/
@ApiModelProperty(value="token" ,required=true)
private String token;
/**
* 处理方式(回复还是指派人员?)
*/
@ApiModelProperty(value="处理方式(回复值为\"0\",派单值为\"1\")" ,required=true)
private String btype;
/**
* 事件id
*/
@ApiModelProperty(value="事件id" ,required=true)
private Long eventId;
/**
* 当前环节
*/
@ApiModelProperty(value="当前环节('sjpd' or 'fhpd')" ,required=true)
private String current;
/**
* 预计完成时间
*/
@ApiModelProperty(value="预计完成时间")
private Date estimatetm;
/**
* 备注说明
*/
@ApiModelProperty(value="备注说明")
private String content;
/**
* 主要负责人
*/
@ApiModelProperty(value="主要负责人")
private Long handle;
/**
* 协作人员
*/
@ApiModelProperty(value="协作人员")
private Long auxiliary;
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public Long getHandle() {
return handle;
}
public Long getEventId() {
return eventId;
}
public void setEventId(Long eventId) {
this.eventId = eventId;
}
public void setHandle(Long handle) {
this.handle = handle;
}
public Long getAuxiliary() {
return auxiliary;
}
public void setAuxiliary(Long auxiliary) {
this.auxiliary = auxiliary;
}
public String getCurrent() {
return current;
}
public void setCurrent(String current) {
this.current = current;
}
public Date getEstimatetm() {
return estimatetm;
}
public void setEstimatetm(Date estimatetm) {
this.estimatetm = estimatetm;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getBtype() {
return btype;
}
public void setBtype(String btype) {
this.btype = btype;
}
}
|
package com.github.aha.poc.micro.district.service;
import static com.github.aha.poc.micro.district.persistence.repository.DistrictSpecifications.byCsuCode;
import static com.github.aha.poc.micro.district.persistence.repository.DistrictSpecifications.byName;
import static com.github.aha.poc.micro.district.persistence.repository.DistrictSpecifications.byPlateCode;
import static org.springframework.data.jpa.domain.Specifications.where;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.domain.Specifications;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import com.github.aha.poc.micro.district.persistence.domain.District;
import com.github.aha.poc.micro.district.persistence.repository.DistrictRepository;
@Service
@Transactional(readOnly = true)
public class DistrictServiceImpl implements DistrictService {
private static final String DEFAULT_SORTING = "name";
private Sort sorting = new Sort(Sort.Direction.ASC, DEFAULT_SORTING);
@Autowired
private DistrictRepository districtRepository;
/* (non-Javadoc)
* @see com.asseco.aha.poc.micro.district.service.DistrictService#findAll(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public List<District> findAll(String name, String csuCode, String plateCode) {
Specifications<District> spec = null;
if (!StringUtils.isEmpty(name)) {
spec = where(byName(name));
}
if (!StringUtils.isEmpty(csuCode)) {
Specification<District> localSpec = byCsuCode(csuCode);
spec = spec == null ? where(localSpec) : spec.and(localSpec);
}
if (!StringUtils.isEmpty(plateCode)) {
Specification<District> localSpec = byPlateCode(plateCode);
spec = spec == null ? where(localSpec) : spec.and(localSpec);
}
return districtRepository.findAll(spec, sorting);
}
/*
* (non-Javadoc)
*
* @see com.asseco.aha.poc.micro.district.service.DistrictService#getItem(java.lang.String)
*/
public District getItem(String code) {
return districtRepository.findByCsuCodeIgnoringCase(code);
}
}
|
import PropTypes from 'prop-types'
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { bindActionCreators } from 'redux'
import { updateNote } from '../../../actions/home'
import NoteEditorDialog from './NoteEditorDialog'
import NoteToolBar from './NoteToolBar'
@connect(
null,
dispatch => ({
updateNote: bindActionCreators(updateNote, dispatch)
})
)
export default class NoteView extends Component {
state = {
showEditor: false
}
handleShowEditor = () => {
this.setState({ showEditor: true })
}
handleCloseEditor = (changes) => {
const { note, updateNote } = this.props
const newNote = note.merge(changes)
if (!note.equals(newNote)) {
updateNote(note.get('id'), changes)
}
this.setState({ showEditor: false })
}
render() {
const { note } = this.props
const name = note.get('name')
return (
<div>
<div className='note-view'>
<div onClick={this.handleShowEditor}>
{name && <div className='title'>{name}</div>}
<div className='content'>{note.get('content')}</div>
</div>
<NoteToolBar note={note} />
</div>
<NoteEditorDialog
note={note}
showEditor={this.state.showEditor}
onDismiss={this.handleCloseEditor}
/>
</div>
)
}
}
NoteView.propTypes = {
note: PropTypes.object.isRequired
}
|
#!/bin/bash
#
# Script to run tests on Travis-CI.
# Exit on error.
set -e;
if test ${MODE} = "dpkg"; then
CONTAINER_NAME="ubuntu${UBUNTU_VERSION}";
CONTAINER_OPTIONS="-e LANG=en_US.UTF-8";
if test "${TARGET}" = "pylint"; then
TEST_COMMAND="./config/travis/run_pylint.sh";
else
TEST_COMMAND="./config/travis/run_python3.sh";
fi
# Note that exec options need to be defined before the container name.
docker exec ${CONTAINER_OPTIONS} ${CONTAINER_NAME} sh -c "cd timesketch && ${TEST_COMMAND}";
elif test ${MODE} = "pypi"; then
python3 ./run_tests.py
elif test "${TRAVIS_OS_NAME}" = "linux"; then
python3 ./run_tests.py
python3 ./setup.py bdist
python3 ./setup.py sdist
fi
|
<gh_stars>0
import React from 'react';
import { Link } from 'react-router-dom';
export default ({ close }) => (
<div className='menu'>
<ul>
<li onClick={close}><Link to="/UserLogin" style={{ textDecoration: 'none' }}>Log In</Link></li>
<li onClick={close}><Link to="/" style={{ textDecoration: 'none' }}>Meet Our Servers!</Link></li>
<li onClick={close}><Link to="/UserSignup" style={{ textDecoration: 'none' }}>Sign Up</Link></li>
<li onClick={close}><Link to="/UserLogin" onClick={() => {localStorage.removeItem("token");}} style={{ textDecoration: 'none' }}>Log Out!</Link></li>
</ul>
</div>
);
|
#!/usr/bin/with-contenv bash
export JAVA_OPTS="${TOMCAT_JAVA_OPTS}"
export CATALINA_OPTS="${TOMCAT_CATALINA_OPTS}"
export CATALINA_OPTS="${CATALINA_OPTS} -Dcantaloupe.config=/opt/tomcat/conf/cantaloupe.properties"
export CATALINA_OPTS="${CATALINA_OPTS} -Dorg.apache.tomcat.util.buf.UDecoder.ALLOW_ENCODED_SLASH=true"
export CATALINA_OPTS="${CATALINA_OPTS} -Dorg.apache.catalina.connector.CoyoteAdapter.ALLOW_BACKSLASH=true"
|
#!/usr/bin/env python3.6
# Work with Python 3.6
import json
import time
from requests import get, post
with open("pool.json") as data_file:
pools = json.load(data_file)
with open("check_time.json") as data_file:
last_check = json.load(data_file)
with open("links.json") as data_file:
data = json.load(data_file)
for a in range(len(pools)):
if pools[a]["link"] == "https://comining.io/":
with post(pools[a]["api"], json={"method": "coins_list"}) as api:
if api.status_code == 200:
pool_api = api.json()
pools[a]["hash"] = int(pool_api["data"][0]["workersHashrate"])
else:
print(f"{pools[a]['api']} is down")
elif pools[a]["link"] == "https://aka.fairpool.xyz/":
with get(pools[a]["api"]) as api:
if api.status_code == 200:
pool_api = api.json()
pools[a]["hash"] = int(pool_api["pool"]["hashrate"])
else:
print(f"{pools[a]['api']} is down")
elif pools[a]["link"] == "https://aikapool.com/aka/":
with get(pools[a]["api"]) as api:
if api.status_code == 200:
pool_api = api.json()
pools[a]["hash"] = int(pool_api["pool_hashrate"])
else:
print(f"{pools[a]['api']} is down")
else:
with get(pools[a]["api"]) as api:
if api.status_code == 200:
pool_api = api.json()
pools[a]["hash"] = int(pool_api["hashrate"])
else:
print(f"{pools[a]['api']} is down")
pools.sort(key=lambda x: x["hash"])
with open("pool.json", "w") as file:
json.dump(pools, file, indent=2)
last_check["last_check"] = time.ctime() + " EET"
with open("check_time.json", "w") as file:
json.dump(last_check, file, indent=2)
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -euxo pipefail
export PYTEST_ADDOPTS="-m gpu ${PYTEST_ADDOPTS:-}"
# Test most of the enabled runtimes here.
export TVM_TEST_TARGETS="cuda;opencl;metal;rocm;nvptx;opencl -device=mali,aocl_sw_emu"
export TVM_UNITTEST_TESTSUITE_NAME=python-unittest-gpu
./tests/scripts/task_python_unittest.sh
# Kept separate to avoid increasing time needed to run CI, testing
# only minimal functionality of Vulkan runtime.
export TVM_TEST_TARGETS="vulkan -from_device=0"
export TVM_UNITTEST_TESTSUITE_NAME=python-unittest-vulkan
source tests/scripts/setup-pytest-env.sh
run_pytest ctypes ${TVM_UNITTEST_TESTSUITE_NAME}-0 tests/python/unittest/test_target_codegen_vulkan.py
run_pytest cython ${TVM_UNITTEST_TESTSUITE_NAME}-1 tests/python/unittest/test_target_codegen_vulkan.py
|
<filename>app/src/main/java/com/ervin/litepal/Model/LoginData.java
package com.ervin.litepal.model;
import com.ervin.litepal.table.BadState;
import com.ervin.litepal.table.FriendList;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/**
* Created by Ervin on 2015/11/12.
*/
public class LoginData {
@Expose
@SerializedName("phoneNumber")
public String phoneNumber;
@Expose
@SerializedName("token")
public String token;
@Expose
@SerializedName("friendsList")
public List<FriendList> friendsList;
@Expose
@SerializedName("friendStatus")
public int friendStatus;
@Expose
@SerializedName("ownBadStateList")
public List<BadState> ownBadState;
@Expose
@SerializedName("nickname")
public String nickname;
@Expose
@SerializedName("sex")
public int sex;
@Expose
@SerializedName("height")
public float height;
@Expose
@SerializedName("weight")
public float weight;
@Expose
@SerializedName("headFileUrl")
public String headFileUrl;
@Expose
@SerializedName("age")
public int age;
@Expose
@SerializedName("vip")
public int vip;
@Expose
@SerializedName("number")
public int number;
@Expose
@SerializedName("whetherSetTarget")
public int whetherSetTarget;
@Expose
@SerializedName("modelName")
public String modelName;
@Expose
@SerializedName("modelId")
public String modelId;
@Expose
@SerializedName("modelType")
public int modelType;
@Expose
@SerializedName("steps")
public int steps;
@Expose
@SerializedName("kilometres")
public float kilometres;
@Expose
@SerializedName("totalCalories")
public float totalCalories;
@Expose
@SerializedName("calories")
public float calories;
@Expose
@SerializedName("timeInterval")
public float timeInterval;
@Expose
@SerializedName("highQuality")
public float highQuality;
@Expose
@SerializedName("waterAmount")
public int waterAmount;
@Expose
@SerializedName("totalIntake")
public int totalIntake;
@Expose
@SerializedName("intake")
public int intake;
@Expose
@SerializedName("intakeType")
public int intakeType;
@Expose
@SerializedName("configurationChange")
public String configurationChange;
}
|
def average_age(people):
total_age = 0
for person in people:
total_age += person['age']
return total_age / len(people)
print(average_age(people)) # Output 27 |
package com.foxconn.iot.dto;
import javax.validation.constraints.NotBlank;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonFormat.Shape;
import com.fasterxml.jackson.annotation.JsonView;
public class PropertyDto {
public interface Basic {}
public interface Create extends Basic {}
@JsonView(Basic.class)
@JsonFormat(shape = Shape.STRING)
private long id;
@JsonView(Basic.class)
@NotBlank(message = "屬性name不能為空")
private String name;
@JsonView(Basic.class)
@NotBlank(message = "屬性標題不能為空")
private String title;
@JsonView(Basic.class)
private String details;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDetails() {
return details;
}
public void setDetails(String details) {
this.details = details;
}
}
|
import numpy as np
import pandas as pd
from sklearn import preprocessing, cross_validation, neighbors
df = pd.read_csv('breast-cancer-wisconsin.data')
df.replace('?', -99999, inplace=True)
df.drop(['id'], 1, inplace=True)
X = np.array(df.drop(['class'],1))
y = np.array(df['class'])
X_traiin, X_test, y_train, y_test = cross_validation.train_test_split(X,y,test_size=0.2)
clf = neighbors.KNeighborsClassifier()
clf.fit(X_traiin,y_train)
accuracy = clf.score(X_test,y_test)
print(accuracy)
example_measures = np.array([3,4,4,2,7,3,3,2,8])
prediction = clf.predict(example_measures)
print(prediction) |
package controller;
import java.util.HashMap;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.Vector;
import ui.MainUI;
import ui.UIAPI;
import javafx.scene.paint.Color;
import javafx.stage.Stage;
import controller.channel.PubSub;
import controller.channel.messages.FunctionUpdate;
import controller.channel.messages.Interpret;
import controller.channel.messages.LanguageChange;
import controller.channel.messages.VariableLoad;
import controller.channel.messages.VariableUpdate;
import controller.file.Reader;
import controller.file.WorkspaceLoader;
import controller.file.WorkspaceSaver;
import executor.commandable.turtle.StandardTurtle;
/**
*
* @author cy122
*
* This class is to encapsulate the information from the back-end, reorganize it and pass the information to front-end.
* A type of glue code to glue the front-end with back-end.
*
*/
public class View implements Controller2ViewAPI, Observer{
private MainUI mainUI;
private PubSub pubsub;
private Controller controller;
private Map<PubSub, Map<String, String>> variables = new HashMap<PubSub, Map<String, String>>();
private Map<PubSub, Map<String, String>> functions = new HashMap<PubSub, Map<String, String>>();
private Vector<PubSub> pubsubs = new Vector<PubSub>();
public View(Stage primaryStage, Controller controller, Reader reader) {
mainUI = new MainUI(primaryStage, (Controller2ViewAPI)this, reader);
this.controller=controller;
createWorkspace();
primaryStage.setScene(((MainUI) mainUI).getScene());
primaryStage.show();
}
@Override
public void update(Observable o, Object arg) {
Map<String, Double> turtleStatus = new HashMap<String, Double>((Map) arg);
if (turtleStatus.containsKey("bad input")) {
mainUI.showError("wrong command!");
} else if (turtleStatus.containsKey("reset")) {
mainUI.clearScreen();
} else {
if (turtleStatus.containsKey("angle")) {
int turtleID=new Double(((StandardTurtle)o).getID()).intValue();
mainUI.rotateTurtleTo(turtleID, 360 - turtleStatus.get("angle"));
}
if ((turtleStatus.containsKey("x coord")) && (turtleStatus.containsKey("y coord"))) {
int turtleID=new Double(((StandardTurtle)o).getID()).intValue();
double coordinateX = turtleStatus.get("x coord");
double coordinateY = turtleStatus.get("y coord");
mainUI.moveTurtleTo(turtleID,coordinateX, coordinateY);
}
if (turtleStatus.containsKey("pVis")) {
if (turtleStatus.get("pVis") == 0.0) {
mainUI.setPenDown(false);
} else {
mainUI.setPenDown(true);
}
}
if (turtleStatus.containsKey("tVis")) {
int turtleID=new Double(((StandardTurtle)o).getID()).intValue();
if (turtleStatus.get("tVis") == 0.0) {
mainUI.setTurtleVisibility(turtleID,false);
} else {
mainUI.setTurtleVisibility(turtleID,true);
}
}
if(turtleStatus.containsKey("palette")){
double value = turtleStatus.get("palette");
Double index = value/(256*256*256);
String colorString = String.format("#%06X", value%(256*256*256));
mainUI.setPalette(index.intValue(), Color.web(colorString));
}
if(turtleStatus.containsKey("background")){
mainUI.setBackgroundColor(turtleStatus.get("background").intValue());
}
if(turtleStatus.containsKey("pensize")){
mainUI.setPenSize(turtleStatus.get("pensize").intValue());
}
if(turtleStatus.containsKey("pencolor")){
mainUI.setPenColor(turtleStatus.get("pencolor").intValue());
}
if(turtleStatus.containsKey("shape")){
mainUI.setTurtleImage(turtleStatus.get("shape").intValue());
}
if(turtleStatus.containsKey("active")){
int turtleID=new Double(((StandardTurtle)o).getID()).intValue();
Boolean flag=true;
if(turtleStatus.get("active")==1.0){
flag=mainUI.setActive(turtleID, true);
}else{
flag=mainUI.setActive(turtleID, false);
}
if(flag==false){
mainUI.createTurtle(turtleID);
}
}
}
}
@Override
public void runCommand(String command) {
pubsub.publish(PubSub.Channel.INTERPRET, new Interpret(command));
}
@Override
public void changeLanguage(String language) {
pubsub.publish(PubSub.Channel.LANGUAGE_CHANGE, new LanguageChange(language));
}
public void updateVariables(VariableUpdate variable){
this.variables.get(pubsub).put(variable.key, variable.value);
mainUI.updateVariables(variables.get(pubsub));
}
public void updateFunctions(FunctionUpdate function){
this.functions.get(pubsub).put(function.key, function.value);
mainUI.updateUserMethods(variables.get(pubsub));
}
@Override
public void createWorkspace() {
PubSub newPubSub = new PubSub();
pubsub=newPubSub;
pubsubs.add(newPubSub);
functions.put(pubsub, new HashMap<String,String>());
variables.put(pubsub, new HashMap<String,String>());
pubsub.subscribe(PubSub.Channel.FUNCTION_UPDATE, e -> updateFunctions((FunctionUpdate)e));
pubsub.subscribe(PubSub.Channel.VARIABLE_UPDATE, e -> updateVariables((VariableUpdate)e));
controller.createBackEnd(this,newPubSub);
}
@Override
public void switchToWorkSpace(int index) {
pubsub = pubsubs.get(index);
}
@Override
public void save(String absolutePath) {
new WorkspaceSaver(absolutePath).saveVariables(variables.get(pubsub));
new WorkspaceSaver(absolutePath).saveMethods(functions.get(pubsub));
}
@Override
public void load(String absolutePath) {
Map<String, String> variables = new WorkspaceLoader(absolutePath).loadVariables();
Map<String, String> functions = new WorkspaceLoader(absolutePath).loadMethods();
this.variables.put(pubsub, variables);
this.functions.put(pubsub, functions);
pubsub.publish(PubSub.Channel.VARIABLE_LOAD, new VariableLoad(functions, variables));
}
}
|
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { XmUserLoginWidgetComponent } from './xm-user-login-widget.component';
import { XmUserService } from 'path-to-xm-user-service'; // Replace with actual path
import { MockUserService } from 'path-to-mock-user-service'; // Replace with actual path
describe('XmUserLoginWidgetComponent', () => {
let component: XmUserLoginWidgetComponent;
let fixture: ComponentFixture<XmUserLoginWidgetComponent>;
let userService: XmUserService;
beforeEach(async(() => {
TestBed.configureTestingModule({
providers: [{ provide: XmUserService, useClass: MockUserService }],
declarations: [XmUserLoginWidgetComponent],
schemas: [NO_ERRORS_SCHEMA],
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(XmUserLoginWidgetComponent);
component = fixture.componentInstance;
userService = TestBed.inject(XmUserService);
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should call userService.login() when login is triggered', () => {
spyOn(userService, 'login');
component.onLogin('username', 'password');
expect(userService.login).toHaveBeenCalledWith('username', 'password');
});
// Add more test cases as per the component's functionality
}); |
<gh_stars>0
#pragma once
#include <type_traits>
#include <polymesh/fwd.hh>
namespace glow
{
namespace viewer
{
namespace detail
{
struct type_like_helper
{
template <class T>
static auto is_pos2_like(T const& v) -> decltype(float(v[0]), std::enable_if_t<sizeof(T) == 2 * sizeof(v[0]), std::true_type>());
static std::false_type is_pos2_like(...);
template <class T>
static auto is_pos3_like(T const& v) -> decltype(float(v[0]), std::enable_if_t<sizeof(T) == 3 * sizeof(v[0]), std::true_type>());
static std::false_type is_pos3_like(...);
template <class T>
static auto is_pos4_like(T const& v) -> decltype(float(v[0]), std::enable_if_t<sizeof(T) == 4 * sizeof(v[0]), std::true_type>());
static std::false_type is_pos4_like(...);
template <class T>
static auto is_color3_like(T const& v)
-> decltype(float(v.r), float(v.g), float(v.b), std::enable_if_t<sizeof(T) == 3 * sizeof(v.r), std::true_type>());
static std::false_type is_color3_like(...);
template <class T>
static auto is_color4_like(T const& v)
-> decltype(float(v.r), float(v.g), float(v.b), float(v.a), std::enable_if_t<sizeof(T) == 4 * sizeof(v.r), std::true_type>());
static std::false_type is_color4_like(...);
};
// ======================= Traits =======================
#define GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT(trait) \
template <class T> \
constexpr bool trait = decltype(type_like_helper::trait(std::declval<T>()))::value // force ;
GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT(is_pos2_like);
GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT(is_pos3_like);
GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT(is_pos4_like);
GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT(is_color3_like);
GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT(is_color4_like);
#undef GLOW_VIEWER_IMPL_TYPE_LIKE_TRAIT
template <class T>
struct is_vertex_attr_t : std::false_type
{
};
template <class T>
struct is_vertex_attr_t<pm::vertex_attribute<T>> : std::true_type
{
};
template <class T>
constexpr bool is_vertex_attr = is_vertex_attr_t<T>::value;
template <class T>
struct is_face_attr_t : std::false_type
{
};
template <class T>
struct is_face_attr_t<pm::face_attribute<T>> : std::true_type
{
};
template <class T>
constexpr bool is_face_attr = is_face_attr_t<T>::value;
template <class T>
struct is_edge_attr_t : std::false_type
{
};
template <class T>
struct is_edge_attr_t<pm::edge_attribute<T>> : std::true_type
{
};
template <class T>
constexpr bool is_edge_attr = is_edge_attr_t<T>::value;
template <class T>
struct is_halfedge_attr_t : std::false_type
{
};
template <class T>
struct is_halfedge_attr_t<pm::halfedge_attribute<T>> : std::true_type
{
};
template <class T>
constexpr bool is_halfedge_attr = is_halfedge_attr_t<T>::value;
}
}
}
|
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
set -ex
usage() {
echo "run_sidetrail.sh install_dir s2n_dir"
exit 1
}
runSingleTest() {
cd "${BASE_S2N_DIR}/tests/sidetrail/working/${1}"
./copy_as_needed.sh
make clean
make 2>&1 | tee out.txt
../../count_success.pl 1 0 out.txt
}
runNegativeTest() {
cd "${BASE_S2N_DIR}/tests/sidetrail/working/${1}"
./copy_as_needed.sh
make clean
make 2>&1 | tee out.txt
../../count_success.pl 0 1 out.txt
}
if [[ "$#" -ne "2" ]]; then
usage
fi
INSTALL_DIR=$1
SMACK_DIR="${1}/smack"
BASE_S2N_DIR=$2
#Put the dependencies on the path
# Disabling ShellCheck using https://github.com/koalaman/shellcheck/wiki/Directive
# Turn of Warning in one line as https://github.com/koalaman/shellcheck/wiki/SC1090
# shellcheck disable=SC1090
source "${INSTALL_DIR}/smack.environment"
export PATH="${SMACK_DIR}/bin:${SMACK_DIR}/build:${PATH}"
#Test that they are really there
which smack || echo "can't find smack"
which boogie || echo "can't find z3"
which llvm2bpl || echo "can't find llvm2bpl"
which clang
clang --version
echo $BOOGIE
echo $CORRAL
runNegativeTest "s2n-record-read-cbc-negative-test"
runSingleTest "s2n-cbc" # Takes 6m 30s
runSingleTest "s2n-record-read-aead"
runSingleTest "s2n-record-read-cbc"
runSingleTest "s2n-record-read-composite"
runSingleTest "s2n-record-read-stream"
|
package com.test_officialweb.t15_Formatter;
import com.test_officialweb.t14_BeanWapper.bean.People;
import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.format.datetime.DateFormatter;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.util.Date;
import java.util.Locale;
/**
*
*/
public class IocTest {
public static void main(String[] args) throws Exception {
DateFormatter dateFormatter = new DateFormatter();
dateFormatter.setIso(DateTimeFormat.ISO.DATE);
System.out.println(dateFormatter.print(new Date(), Locale.CHINA));
System.out.println(dateFormatter.parse("2020-03-26", Locale.CHINA));
// 程序打印:
// 2020-03-26
// Thu Mar 26 08:00:00 CST 2020
}
} |
<gh_stars>0
/*
* FineList.java
*
* Created on January 3, 2006, 6:50 PM
*
* From "Multiprocessor Synchronization and Concurrent Data Structures",
* by <NAME> and <NAME>.
* Copyright 2006 Elsevier Inc. All rights reserved.
*/
package tamp.ch09.Lists.lists;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Fine-grained synchronization: lock coupling (hand-over-hand locking).
*
* @param T Item type.
* @author <NAME>
*/
public class FineList<T> {
/**
* First list entry
*/
private Node head;
/**
* Constructor
*/
public FineList() {
// Add sentinels to start and end
head = new Node(Integer.MIN_VALUE);
head.next = new Node(Integer.MAX_VALUE);
}
/**
* Add an element.
*
* @param item element to add
* @return true iff element was not there already
*/
public boolean add(T item) {
int key = item.hashCode();
head.lock();
Node pred = head;
try {
Node curr = pred.next;
curr.lock();
try {
while (curr.key < key) {
pred.unlock();
pred = curr;
curr = curr.next;
curr.lock();
}
if (curr.key == key) {
return false;
}
Node newNode = new Node(item);
newNode.next = curr;
pred.next = newNode;
return true;
} finally {
curr.unlock();
}
} finally {
pred.unlock();
}
}
/**
* Remove an element.
*
* @param item element to remove
* @return true iff element was present
*/
public boolean remove(T item) {
Node pred = null, curr = null;
int key = item.hashCode();
head.lock();
try {
pred = head;
curr = pred.next;
curr.lock();
try {
while (curr.key < key) {
pred.unlock();
pred = curr;
curr = curr.next;
curr.lock();
}
if (curr.key == key) {
pred.next = curr.next;
return true;
}
return false;
} finally {
curr.unlock();
}
} finally {
pred.unlock();
}
}
public boolean contains(T item) {
Node last = null, pred = null, curr = null;
int key = item.hashCode();
head.lock();
try {
pred = head;
curr = pred.next;
curr.lock();
try {
while (curr.key < key) {
pred.unlock();
pred = curr;
curr = curr.next;
curr.lock();
}
return (curr.key == key);
} finally {
curr.unlock();
}
} finally {
pred.unlock();
}
}
/**
* list Node
*/
private class Node {
/**
* actual item
*/
T item;
/**
* item's hash code
*/
int key;
/**
* next Node in list
*/
Node next;
/**
* synchronizes individual Node
*/
Lock lock;
/**
* Constructor for usual Node
*
* @param item element in list
*/
Node(T item) {
this.item = item;
this.key = item.hashCode();
this.lock = new ReentrantLock();
}
/**
* Constructor for sentinel Node
*
* @param key should be min or max int value
*/
Node(int key) {
this.item = null;
this.key = key;
this.lock = new ReentrantLock();
}
/**
* Lock Node
*/
void lock() {
lock.lock();
}
/**
* Unlock Node
*/
void unlock() {
lock.unlock();
}
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.star = void 0;
var star = {
"viewBox": "0 0 512 512",
"children": [{
"name": "polygon",
"attribs": {
"points": "480,200 308.519,200 256.029,32 203.519,200 32,200 170.946,304.209 116,480 256,368 396,480 341.073,304.195 "
},
"children": []
}]
};
exports.star = star; |
################################################################################
# IME-USP (2018) #
# MAC0219 - Programacao Concorrente e Paralela - MiniEP2 #
# #
# Testes #
# #
# Marcelo Schmitt - NUSP 9297641 #
# Raphael R. Gusmao - NUSP 9778561 #
################################################################################
ras=2
sapos=2
execucoes=10000
solucoes=0
resposta=0
comando='./miniep2 -s '$ras' '$sapos
#echo $comando
contador=0
while [ $contador -lt $execucoes ]; do
#resposta=(eval $comando)
eval $comando
resposta=$?
let solucoes=solucoes+resposta
let contador=contador+1
done
echo 'Rodando o mini-ep2 com '$ras' ras e '$sapos' sapos'
echo 'Numero de execucoes: '$execucoes
#calculando porcentagem de finais ideais (solucionou)
echo 'Vezes que chegou no final ideal: '$solucoes
porcent=$(echo "scale=5; 100.0 / $execucoes" | bc)
echo 'Peso percentual de cada execucao: '$porcent'%'
porcent_ideal=$(echo "scale=5; $porcent * $solucoes" | bc)
echo 'Porcentagem de vezes no final ideal: '$porcent_ideal'%'
################################################################################
|
#!/bin/bash
kubectl apply -f ../elasticsearch-deployment.yml -n elk
kubectl apply -f ../elasticsearch-service.yml -n elk
exit 0
|
const sortServices = function(services) {
const servicesNoNode = services.map(({ node }) => ({ ...node }))
function compare(a, b) {
const serviceA = a.servicePosition || services.length
const serviceB = b.servicePosition || services.lengths
let comparison = 0
if (serviceA > serviceB) {
comparison = 1
} else if (serviceA < serviceB) {
comparison = -1
}
return comparison
}
return servicesNoNode.sort(compare)
}
export default sortServices
|
<reponame>timpur/esphome-components<filename>esphome-components/wind/wind.h<gh_stars>0
#pragma once
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/text_sensor/text_sensor.h"
namespace esphome {
namespace wind {
class WindComponent : public Component {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override;
void set_heading_sensor(sensor::Sensor *heading_sensor) { heading_sensor_ = heading_sensor; }
void set_heading_compass_sensor(text_sensor::TextSensor *heading_compass_sensor) {
heading_compass_sensor_ = heading_compass_sensor;
}
void set_speed_sensor(sensor::Sensor *speed_sensor) { speed_sensor_ = speed_sensor; }
void set_max_speed_sensor(sensor::Sensor *max_speed_sensor) { max_speed_sensor_ = max_speed_sensor; }
void set_dependent_sensors(sensor::Sensor *dep_heading_sensor, sensor::Sensor *dep_frequency_sensor,
sensor::Sensor *dep_max_frequency_sensor) {
dep_heading_sensor_ = dep_heading_sensor;
dep_frequency_sensor_ = dep_frequency_sensor;
dep_max_frequency_sensor_ = dep_max_frequency_sensor;
}
protected:
sensor::Sensor *heading_sensor_;
text_sensor::TextSensor *heading_compass_sensor_;
sensor::Sensor *speed_sensor_;
sensor::Sensor *max_speed_sensor_;
sensor::Sensor *dep_heading_sensor_;
sensor::Sensor *dep_frequency_sensor_;
sensor::Sensor *dep_max_frequency_sensor_;
enum ErrorCode {
NONE = 0,
DEPENDENCIES_ERROR,
} error_code_;
private:
void update_heading_sensor(float value);
void update_heading_compass_sensor(float value);
void update_speed_sensor(float value);
void update_max_speed_sensor(float value);
float heading_to_heading(float value);
std::string heading_to_heading_compass(float value);
float frequency_to_speed(float value);
};
} // namespace wind
} // namespace esphome |
package io.opensphere.wfs.gml311;
import io.opensphere.mantle.data.element.DataElement;
import io.opensphere.mantle.data.geom.AbstractMapGeometrySupport;
import io.opensphere.mantle.data.geom.MapGeometrySupport;
/**
* Abstract SAX handler for individual GML geometries.
*/
public abstract class AbstractGmlGeometryHandler
{
/** Flag that's true if position order is Lat/Lon, false if Lon/Lat. */
private final boolean myIsLatBeforeLon;
/** The GML Geometry tag. */
private final String myTagName;
/**
* Abstract constructor for GML geometry handlers.
*
* @param tagName the GML geometry tag name
* @param isLatBeforeLon flag indicating position order in points
*/
public AbstractGmlGeometryHandler(String tagName, boolean isLatBeforeLon)
{
myTagName = tagName;
myIsLatBeforeLon = isLatBeforeLon;
}
/**
* Gets the name of the GML Geometry tag for this geometry type.
*
* @return the GML Geometry tag name
*/
public String getTagName()
{
return myTagName;
}
/**
* Gets the position ordering in GML points.
*
* @return true if position order is Lat/Lon, false if Lon/Lat.
*/
public boolean isLatBeforeLong()
{
return myIsLatBeforeLon;
}
/**
* Gets the {@link MapGeometrySupport} part of the {@link DataElement} that
* this class was responsible for parsing from the GML.
*
* @return the mantle-formatted geometry
*/
public abstract AbstractMapGeometrySupport getGeometry();
/**
* Handle a GML Geometry-related opening tag.
*
* @param tag the name of the GML tag to handle
* @param value the value of the XML tag
*/
public abstract void handleClosingTag(String tag, String value);
/**
* Handle a GML Geometry-related closing tag.
*
* @param tag the name of the GML tag to handle
*/
public abstract void handleOpeningTag(String tag);
}
|
"""
Develop a class in java to take input from the user and perform basic arithmatic operations
"""
import java.util.Scanner;
public class BasicArithmatic {
/*
* Creating a scanner object to take input
*/
static Scanner scanner = new Scanner(System.in);
public static void main(String[] args) {
/*
* Taking the user inputs
*/
int x = scanner.nextInt();
int y = scanner.nextInt();
/*
* Performing the arithmatic operations and printing the result
*/
int sum = x + y;
int diff = x - y;
int product = x * y;
int quotient = x / y;
int modulo = x % y;
System.out.println("Sum of "+x+" and "+y+" = "+sum);
System.out.println("Difference of "+x+" and "+y+" = "+diff);
System.out.println("Product of "+x+" and "+y+" = "+product);
System.out.println("Quotient of "+x+" and "+y+" = "+quotient);
System.out.println("Remainder of "+x+" and "+y+" = "+modulo);
}
} |
import { Page, ResourceType } from 'puppeteer'
/**
* Blocks given types of resurces from getting downloaded. Should be called right before `page.goto()`
*
* Resource types are: "document", "stylesheet", "image", "media", "font", "script", "texttrack", "xhr", "fetch", "eventsource", "websocket", "manifest" and "other"
*/
export default function blockRequests(page: Page, ...resourceTypes: ResourceType[]): void {
page.setRequestInterception(true)
page.on('request', request => {
if (resourceTypes.includes(request.resourceType())) {
request.abort()
} else {
request.continue()
}
})
}
|
#!/usr/bin/env bash
git pull origin develop; ./install.sh; ./kill.sh; ./deploy.sh development; ./monitor.sh;
|
<filename>models/gift.model.js
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const random = require('mongoose-simple-random');
const gift = new Schema({
Name: {
type: String,
required: true,
},
image: {
type: String,
required: true,
},
});
gift.plugin(random)
const giftModel = mongoose.model("gift",gift);
module.exports ={
giftModel,
gift
} |
<reponame>wuximing/dsshop
import window from 'global/window';
import { flatten } from './utils/list';
import { merge } from './utils/object';
import { findChildren, getContent } from './utils/xml';
import { parseAttributes } from './parseAttributes';
import errors from './errors';
import resolveUrl from '@videojs/vhs-utils/es/resolve-url';
import decodeB64ToUint8Array from '@videojs/vhs-utils/es/decode-b64-to-uint8-array';
const keySystemsMap = {
'urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b': 'org.w3.clearkey',
'urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed': 'com.widevine.alpha',
'urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95': 'com.microsoft.playready',
'urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb': 'com.adobe.primetime'
};
/**
* Builds a list of urls that is the product of the reference urls and BaseURL values
*
* @param {string[]} referenceUrls
* List of reference urls to resolve to
* @param {Node[]} baseUrlElements
* List of BaseURL nodes from the mpd
* @return {string[]}
* List of resolved urls
*/
export const buildBaseUrls = (referenceUrls, baseUrlElements) => {
if (!baseUrlElements.length) {
return referenceUrls;
}
return flatten(referenceUrls.map(function(reference) {
return baseUrlElements.map(function(baseUrlElement) {
return resolveUrl(reference, getContent(baseUrlElement));
});
}));
};
/**
* Contains all Segment information for its containing AdaptationSet
*
* @typedef {Object} SegmentInformation
* @property {Object|undefined} template
* Contains the attributes for the SegmentTemplate node
* @property {Object[]|undefined} segmentTimeline
* Contains a list of atrributes for each S node within the SegmentTimeline node
* @property {Object|undefined} list
* Contains the attributes for the SegmentList node
* @property {Object|undefined} base
* Contains the attributes for the SegmentBase node
*/
/**
* Returns all available Segment information contained within the AdaptationSet node
*
* @param {Node} adaptationSet
* The AdaptationSet node to get Segment information from
* @return {SegmentInformation}
* The Segment information contained within the provided AdaptationSet
*/
export const getSegmentInformation = (adaptationSet) => {
const segmentTemplate = findChildren(adaptationSet, 'SegmentTemplate')[0];
const segmentList = findChildren(adaptationSet, 'SegmentList')[0];
const segmentUrls = segmentList && findChildren(segmentList, 'SegmentURL')
.map(s => merge({ tag: 'SegmentURL' }, parseAttributes(s)));
const segmentBase = findChildren(adaptationSet, 'SegmentBase')[0];
const segmentTimelineParentNode = segmentList || segmentTemplate;
const segmentTimeline = segmentTimelineParentNode &&
findChildren(segmentTimelineParentNode, 'SegmentTimeline')[0];
const segmentInitializationParentNode = segmentList || segmentBase || segmentTemplate;
const segmentInitialization = segmentInitializationParentNode &&
findChildren(segmentInitializationParentNode, 'Initialization')[0];
// SegmentTemplate is handled slightly differently, since it can have both
// @initialization and an <Initialization> node. @initialization can be templated,
// while the node can have a url and range specified. If the <SegmentTemplate> has
// both @initialization and an <Initialization> subelement we opt to override with
// the node, as this interaction is not defined in the spec.
const template = segmentTemplate && parseAttributes(segmentTemplate);
if (template && segmentInitialization) {
template.initialization =
(segmentInitialization && parseAttributes(segmentInitialization));
} else if (template && template.initialization) {
// If it is @initialization we convert it to an object since this is the format that
// later functions will rely on for the initialization segment. This is only valid
// for <SegmentTemplate>
template.initialization = { sourceURL: template.initialization };
}
const segmentInfo = {
template,
segmentTimeline: segmentTimeline &&
findChildren(segmentTimeline, 'S').map(s => parseAttributes(s)),
list: segmentList && merge(
parseAttributes(segmentList),
{
segmentUrls,
initialization: parseAttributes(segmentInitialization)
}
),
base: segmentBase && merge(parseAttributes(segmentBase), {
initialization: parseAttributes(segmentInitialization)
})
};
Object.keys(segmentInfo).forEach(key => {
if (!segmentInfo[key]) {
delete segmentInfo[key];
}
});
return segmentInfo;
};
/**
* Contains Segment information and attributes needed to construct a Playlist object
* from a Representation
*
* @typedef {Object} RepresentationInformation
* @property {SegmentInformation} segmentInfo
* Segment information for this Representation
* @property {Object} attributes
* Inherited attributes for this Representation
*/
/**
* Maps a Representation node to an object containing Segment information and attributes
*
* @name inheritBaseUrlsCallback
* @function
* @param {Node} representation
* Representation node from the mpd
* @return {RepresentationInformation}
* Representation information needed to construct a Playlist object
*/
/**
* Returns a callback for Array.prototype.map for mapping Representation nodes to
* Segment information and attributes using inherited BaseURL nodes.
*
* @param {Object} adaptationSetAttributes
* Contains attributes inherited by the AdaptationSet
* @param {string[]} adaptationSetBaseUrls
* Contains list of resolved base urls inherited by the AdaptationSet
* @param {SegmentInformation} adaptationSetSegmentInfo
* Contains Segment information for the AdaptationSet
* @return {inheritBaseUrlsCallback}
* Callback map function
*/
export const inheritBaseUrls =
(adaptationSetAttributes, adaptationSetBaseUrls, adaptationSetSegmentInfo) =>
(representation) => {
const repBaseUrlElements = findChildren(representation, 'BaseURL');
const repBaseUrls = buildBaseUrls(adaptationSetBaseUrls, repBaseUrlElements);
const attributes = merge(adaptationSetAttributes, parseAttributes(representation));
const representationSegmentInfo = getSegmentInformation(representation);
return repBaseUrls.map(baseUrl => {
return {
segmentInfo: merge(adaptationSetSegmentInfo, representationSegmentInfo),
attributes: merge(attributes, { baseUrl })
};
});
};
/**
* Tranforms a series of content protection nodes to
* an object containing pssh data by key system
*
* @param {Node[]} contentProtectionNodes
* Content protection nodes
* @return {Object}
* Object containing pssh data by key system
*/
const generateKeySystemInformation = (contentProtectionNodes) => {
return contentProtectionNodes.reduce((acc, node) => {
const attributes = parseAttributes(node);
const keySystem = keySystemsMap[attributes.schemeIdUri];
if (keySystem) {
acc[keySystem] = { attributes };
const psshNode = findChildren(node, 'cenc:pssh')[0];
if (psshNode) {
const pssh = getContent(psshNode);
const psshBuffer = pssh && decodeB64ToUint8Array(pssh);
acc[keySystem].pssh = psshBuffer;
}
}
return acc;
}, {});
};
// defined in ANSI_SCTE 214-1 2016
export const parseCaptionServiceMetadata = (service) => {
// 608 captions
if (service.schemeIdUri === 'urn:scte:dash:cc:cea-608:2015') {
const values = service.value.split(';');
return values.map((value) => {
let channel;
let language;
// default language to value
language = value;
if (/^CC\d=/.test(value)) {
[channel, language] = value.split('=');
} else if (/^CC\d$/.test(value)) {
channel = value;
}
return {channel, language};
});
} else if (service.schemeIdUri === 'urn:scte:dash:cc:cea-708:2015') {
const values = service.value.split(';');
return values.map((value) => {
const flags = {
// service or channel number 1-63
'channel': undefined,
// language is a 3ALPHA per ISO 639.2/B
// field is required
'language': undefined,
// BIT 1/0 or ?
// default value is 1, meaning 16:9 aspect ratio, 0 is 4:3, ? is unknown
'aspectRatio': 1,
// BIT 1/0
// easy reader flag indicated the text is tailed to the needs of beginning readers
// default 0, or off
'easyReader': 0,
// BIT 1/0
// If 3d metadata is present (CEA-708.1) then 1
// default 0
'3D': 0
};
if (/=/.test(value)) {
const [channel, opts = ''] = value.split('=');
flags.channel = channel;
flags.language = value;
opts.split(',').forEach((opt) => {
const [name, val] = opt.split(':');
if (name === 'lang') {
flags.language = val;
// er for easyReadery
} else if (name === 'er') {
flags.easyReader = Number(val);
// war for wide aspect ratio
} else if (name === 'war') {
flags.aspectRatio = Number(val);
} else if (name === '3D') {
flags['3D'] = Number(val);
}
});
} else {
flags.language = value;
}
if (flags.channel) {
flags.channel = 'SERVICE' + flags.channel;
}
return flags;
});
}
};
/**
* Maps an AdaptationSet node to a list of Representation information objects
*
* @name toRepresentationsCallback
* @function
* @param {Node} adaptationSet
* AdaptationSet node from the mpd
* @return {RepresentationInformation[]}
* List of objects containing Representaion information
*/
/**
* Returns a callback for Array.prototype.map for mapping AdaptationSet nodes to a list of
* Representation information objects
*
* @param {Object} periodAttributes
* Contains attributes inherited by the Period
* @param {string[]} periodBaseUrls
* Contains list of resolved base urls inherited by the Period
* @param {string[]} periodSegmentInfo
* Contains Segment Information at the period level
* @return {toRepresentationsCallback}
* Callback map function
*/
export const toRepresentations =
(periodAttributes, periodBaseUrls, periodSegmentInfo) => (adaptationSet) => {
const adaptationSetAttributes = parseAttributes(adaptationSet);
const adaptationSetBaseUrls = buildBaseUrls(
periodBaseUrls,
findChildren(adaptationSet, 'BaseURL')
);
const role = findChildren(adaptationSet, 'Role')[0];
const roleAttributes = { role: parseAttributes(role) };
let attrs = merge(
periodAttributes,
adaptationSetAttributes,
roleAttributes,
);
const accessibility = findChildren(adaptationSet, 'Accessibility')[0];
const captionServices = parseCaptionServiceMetadata(parseAttributes(accessibility));
if (captionServices) {
attrs = merge(attrs, { captionServices });
}
const label = findChildren(adaptationSet, 'Label')[0];
if (label && label.childNodes.length) {
const labelVal = label.childNodes[0].nodeValue.trim();
attrs = merge(attrs, { label: labelVal });
}
const contentProtection = generateKeySystemInformation(findChildren(adaptationSet, 'ContentProtection'));
if (Object.keys(contentProtection).length) {
attrs = merge(attrs, { contentProtection });
}
const segmentInfo = getSegmentInformation(adaptationSet);
const representations = findChildren(adaptationSet, 'Representation');
const adaptationSetSegmentInfo = merge(periodSegmentInfo, segmentInfo);
return flatten(representations.map(inheritBaseUrls(attrs, adaptationSetBaseUrls, adaptationSetSegmentInfo)));
};
/**
* Contains all period information for mapping nodes onto adaptation sets.
*
* @typedef {Object} PeriodInformation
* @property {Node} period.node
* Period node from the mpd
* @property {Object} period.attributes
* Parsed period attributes from node plus any added
*/
/**
* Maps a PeriodInformation object to a list of Representation information objects for all
* AdaptationSet nodes contained within the Period.
*
* @name toAdaptationSetsCallback
* @function
* @param {PeriodInformation} period
* Period object containing necessary period information
* @param {number} periodIndex
* Index of the Period within the mpd
* @return {RepresentationInformation[]}
* List of objects containing Representaion information
*/
/**
* Returns a callback for Array.prototype.map for mapping Period nodes to a list of
* Representation information objects
*
* @param {Object} mpdAttributes
* Contains attributes inherited by the mpd
* @param {string[]} mpdBaseUrls
* Contains list of resolved base urls inherited by the mpd
* @return {toAdaptationSetsCallback}
* Callback map function
*/
export const toAdaptationSets = (mpdAttributes, mpdBaseUrls) => (period, index) => {
const periodBaseUrls = buildBaseUrls(mpdBaseUrls, findChildren(period.node, 'BaseURL'));
const parsedPeriodId = parseInt(period.attributes.id, 10);
// fallback to mapping index if Period@id is not a number
const periodIndex = window.isNaN(parsedPeriodId) ? index : parsedPeriodId;
const periodAttributes = merge(mpdAttributes, {
periodIndex,
periodStart: period.attributes.start
});
if (typeof period.attributes.duration === 'number') {
periodAttributes.periodDuration = period.attributes.duration;
}
const adaptationSets = findChildren(period.node, 'AdaptationSet');
const periodSegmentInfo = getSegmentInformation(period.node);
return flatten(adaptationSets.map(toRepresentations(periodAttributes, periodBaseUrls, periodSegmentInfo)));
};
/**
* Gets Period@start property for a given period.
*
* @param {Object} options
* Options object
* @param {Object} options.attributes
* Period attributes
* @param {Object} [options.priorPeriodAttributes]
* Prior period attributes (if prior period is available)
* @param {string} options.mpdType
* The MPD@type these periods came from
* @return {number|null}
* The period start, or null if it's an early available period or error
*/
export const getPeriodStart = ({ attributes, priorPeriodAttributes, mpdType }) => {
// Summary of period start time calculation from DASH spec section 5.3.2.1
//
// A period's start is the first period's start + time elapsed after playing all
// prior periods to this one. Periods continue one after the other in time (without
// gaps) until the end of the presentation.
//
// The value of Period@start should be:
// 1. if Period@start is present: value of Period@start
// 2. if previous period exists and it has @duration: previous Period@start +
// previous Period@duration
// 3. if this is first period and MPD@type is 'static': 0
// 4. in all other cases, consider the period an "early available period" (note: not
// currently supported)
// (1)
if (typeof attributes.start === 'number') {
return attributes.start;
}
// (2)
if (priorPeriodAttributes &&
typeof priorPeriodAttributes.start === 'number' &&
typeof priorPeriodAttributes.duration === 'number') {
return priorPeriodAttributes.start + priorPeriodAttributes.duration;
}
// (3)
if (!priorPeriodAttributes && mpdType === 'static') {
return 0;
}
// (4)
// There is currently no logic for calculating the Period@start value if there is
// no Period@start or prior Period@start and Period@duration available. This is not made
// explicit by the DASH interop guidelines or the DASH spec, however, since there's
// nothing about any other resolution strategies, it's implied. Thus, this case should
// be considered an early available period, or error, and null should suffice for both
// of those cases.
return null;
};
/**
* Traverses the mpd xml tree to generate a list of Representation information objects
* that have inherited attributes from parent nodes
*
* @param {Node} mpd
* The root node of the mpd
* @param {Object} options
* Available options for inheritAttributes
* @param {string} options.manifestUri
* The uri source of the mpd
* @param {number} options.NOW
* Current time per DASH IOP. Default is current time in ms since epoch
* @param {number} options.clientOffset
* Client time difference from NOW (in milliseconds)
* @return {RepresentationInformation[]}
* List of objects containing Representation information
*/
export const inheritAttributes = (mpd, options = {}) => {
const {
manifestUri = '',
NOW = Date.now(),
clientOffset = 0
} = options;
const periodNodes = findChildren(mpd, 'Period');
if (!periodNodes.length) {
throw new Error(errors.INVALID_NUMBER_OF_PERIOD);
}
const locations = findChildren(mpd, 'Location');
const mpdAttributes = parseAttributes(mpd);
const mpdBaseUrls = buildBaseUrls([ manifestUri ], findChildren(mpd, 'BaseURL'));
// See DASH spec section 5.3.1.2, Semantics of MPD element. Default type to 'static'.
mpdAttributes.type = mpdAttributes.type || 'static';
mpdAttributes.sourceDuration = mpdAttributes.mediaPresentationDuration || 0;
mpdAttributes.NOW = NOW;
mpdAttributes.clientOffset = clientOffset;
if (locations.length) {
mpdAttributes.locations = locations.map(getContent);
}
const periods = [];
// Since toAdaptationSets acts on individual periods right now, the simplest approach to
// adding properties that require looking at prior periods is to parse attributes and add
// missing ones before toAdaptationSets is called. If more such properties are added, it
// may be better to refactor toAdaptationSets.
periodNodes.forEach((node, index) => {
const attributes = parseAttributes(node);
// Use the last modified prior period, as it may contain added information necessary
// for this period.
const priorPeriod = periods[index - 1];
attributes.start = getPeriodStart({
attributes,
priorPeriodAttributes: priorPeriod ? priorPeriod.attributes : null,
mpdType: mpdAttributes.type
});
periods.push({
node,
attributes
});
});
return {
locations: mpdAttributes.locations,
representationInfo: flatten(periods.map(toAdaptationSets(mpdAttributes, mpdBaseUrls)))
};
};
|
def max_profit(stock_prices):
if not stock_prices:
return 0
min_price = stock_prices[0]
max_profit = 0
for price in stock_prices:
if price < min_price:
min_price = price
else:
max_profit = max(max_profit, price - min_price)
return max_profit |
#!/usr/bin/env bash
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
EXIT_CODE=0
err() {
echo -e "${RED}> Failed!${NC}"
EXIT_CODE=1
}
cmd() {
echo "> $@"
$@ || err
}
check_vet() {
cmd go vet -all ./...
}
check_staticcheck() {
cmd staticcheck ./...
}
main() {
case "$1" in
"")
check_vet
check_staticcheck
;;
*)
echo "Unknown command"
exit
esac
if [[ $EXIT_CODE != 0 ]]; then
echo -e "${RED}FAILED!, check errors above${NC}"
else
echo -e "${GREEN}SUCCESS!${NC}"
fi
exit $EXIT_CODE
}
main $@
|
package main
import (
"sort"
)
func sortAlphabetically(strs []string) []string {
sort.Strings(strs)
return strs
} |
#!/bin/bash
# Get script root directory, solution courtesy of Dave Dopson via (https://stackoverflow.com/a/246128)
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
SCRIPT_DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPT_DIR="$( cd -P "$( dirname "$SOURCE" )" >/dev/null 2>&1 && pwd )"
## Setup Firewalld
firewall-cmd --permanent --zone=drop --remove-source=ipset:blocklist_v4
firewall-cmd --permanent --zone=drop --remove-source=ipset:blocklist_v6
firewall-cmd --reload
firewall-cmd --permanent --delete-ipset=blocklist_v4
firewall-cmd --permanent --delete-ipset=blocklist_v6
firewall-cmd --reload
firewall-cmd --permanent --new-ipset=blocklist_v4 --type=hash:net --option=family=inet --option=hashsize=4096 --option=maxelem=200000
firewall-cmd --permanent --new-ipset=blocklist_v6 --type=hash:net --option=family=inet6 --option=hashsize=4096 --option=maxelem=200000
firewall-cmd --reload
## Get our intelligence (sources from ipdeny.com) and block them! ## Note that the following lines will create a 'zones' directory within the $SCRIPT_DIR and populate it with textfiles from ipdeny.com
rm -rfv $SCRIPT_DIR/zones
mkdir -pv $SCRIPT_DIR/zones
cd $SCRIPT_DIR/zones/
for n in $(cat ../index.txt)
do
wget https://www.ipdeny.com/ipv6/ipaddresses/aggregated/$n-aggregated.zone
wget https://www.ipdeny.com/ipblocks/data/countries/$n.zone
echo "bye-bye $n"
firewall-cmd --permanent --ipset=blocklist_v4 --add-entries-from-file="$n.zone"
firewall-cmd --permanent --ipset=blocklist_v6 --add-entries-from-file="$n-aggregated.zone"
done
## Re-add the sources back to the drop zone
firewall-cmd --permanent --zone=drop --add-source=ipset:blocklist_v4
firewall-cmd --permanent --zone=drop --add-source=ipset:blocklist_v6
## Reload one last time, and we should have blocked all country-code targets in index.txt
firewall-cmd --reload
echo "---"
echo "Thank you to the folks at ipdeny.com for generating these lists for us to freely utilize."
echo ""
echo "Blocking approx. $(ipset list blocklist_v4 | wc -l) ipv4 target ranges, and approx. $(ipset list blocklist_v6 | wc -l) ipv6 target ranges."
echo "---"
echo ""
exit 0
|
#!/bin/bash
set -eou pipefail
(cd ${BUILDDIR}/release; \
sha512sum parquet-tools* > checksum-sha512.txt; \
md5sum parquet-tools* > checksum-md5.txt)
# version file
echo ${VERSION} > ${BUILDDIR}/VERSION
PREV_VERSION=$(git tag --sort=-committerdate | head -2 | tail -1)
# changelog file
echo "Changes since [${PREV_VERSION}](https://github.com/hangxie/parquet-tools/releases/tag/${PREV_VERSION}):" > ${BUILDDIR}/CHANGELOG
echo >> ${BUILDDIR}/CHANGELOG
git log --pretty=format:"* %h %s" ${VERSION}...${PREV_VERSION} >> ${BUILDDIR}/CHANGELOG
echo >> ${BUILDDIR}/CHANGELOG
# license file
cp LICENSE ${BUILDDIR}/release/LICENSE
|
import gensim
# Create a word2vec model
model = gensim.models.Word2Vec(min_count=1, window=10, size=300, sample=1e-3, negative=5)
# Build the vocab
model.build_vocab(sentences)
# Train the model
model.train(sentences, total_examples=model.corpus_count, epochs=10) |
<reponame>geopm/geopm.github.io<filename>dox/search/files_6.js
var searchData=
[
['iogroup_2ehpp',['IOGroup.hpp',['../_i_o_group_8hpp.html',1,'']]]
];
|
const { colors } = require("tailwindcss/defaultTheme");
module.exports = {
mode: "jit",
// important: true,
purge: [
"./pages/**/*.{js,ts,jsx,tsx}",
"./components/**/*.{js,ts,jsx,tsx}",
],
darkMode: false, // or 'media' or 'class'
theme: {
extend: {
colors: {
primary: "#313638",
blue: "#222831",
},
fontSize: {
title: "15rem",
},
typography: {
DEFAULT: {
css: {},
},
},
},
},
variants: {
extend: {
borderStyle: ["responsive", "hover"],
borderWidth: ["responsive", "hover"],
},
},
plugins: [require("@tailwindcss/typography")],
};
|
package me.minidigger.minicraft.model;
public enum ChatPosition {
CHAT(0), SYSTEM(1), HOTBAR(2);
private byte id;
ChatPosition(int id) {
this.id = (byte) id;
}
public byte getId() {
return id;
}
}
|
#! /bin/sh
docker build -t lucianozu/azure-kube-client .
|
package set summary "Implementation of Telnet and SSH"
package set webpage "https://www.chiark.greenend.org.uk/~sgtatham/putty/"
package set git.url "https://git.tartarus.org/simon/putty.git"
package set src.url "https://the.earth.li/~sgtatham/putty/0.76/putty-0.76.tar.gz"
package set src.sum "547cd97a8daa87ef71037fab0773bceb54a8abccb2f825a49ef8eba5e045713f"
package set bsystem "configure"
build() {
# int glob(const char* __pattern, int __flags, int (*__error_callback)(const char* __failure_path, int __failure_errno), glob_t* __result_ptr) __INTRODUCED_IN(28);
# void globfree(glob_t* __result_ptr) __INTRODUCED_IN(28);
if [ "$TARGET_OS_VERS" -lt 28 ] ; then
ac_cv_header_glob_h=no
else
ac_cv_header_glob_h=yes
fi
configure \
--disable-gtktest \
--without-gtk \
ac_cv_header_glob_h="$ac_cv_header_glob_h"
}
|
#include <lpc40xx/Pwm.hpp>
namespace lpc17xx
{
class PwmCompatibility
{
public:
// Method to get an instance of the Pwm class from the lpc40xx library
static ::sjsu::lpc40xx::Pwm& GetPwm()
{
// Implement the logic to obtain and return an instance of the Pwm class
// from the lpc40xx library
// For example:
static ::sjsu::lpc40xx::Pwm pwm_instance;
return pwm_instance;
}
};
} |
# -*- coding: utf-8 -*-
repo_path=/home/wangfei/bert_glyce_new/
data_sign=xnli
data_dir=/data/nfsdata/nlp/datasets/sentence_pair/xnli
output_dir=/data/nfsdata2/wangfei/sever5/train_logs/xnli_bert # change save_path !!!
config_path=/home/wangfei/bert_glyce_new/configs/bert.json
bert_model=/home/wangfei/data/bert_base_chinese
task_name=clf
max_seq_len=128
train_batch=32
dev_batch=32
test_batch=32
learning_rate=2e-5
num_train_epochs=4
warmup=0.1
local_rank=-1
seed=3306
checkpoint=6000
gpus=2
python3 ${repo_path}/bin/run_text_classification.py \
--data_sign ${data_sign} \
--config_path ${config_path} \
--data_dir ${data_dir} \
--bert_model ${bert_model} \
--task_name ${task_name} \
--max_seq_length ${max_seq_len} \
--do_train \
--do_eval \
--train_batch_size ${train_batch} \
--dev_batch_size ${dev_batch} \
--test_batch_size ${test_batch} \
--learning_rate ${learning_rate} \
--num_train_epochs ${num_train_epochs} \
--checkpoint ${checkpoint} \
--warmup_proportion ${warmup} \
--output_dir ${output_dir}
|
<reponame>NAVEENRH/FULL_STACK_TRAINING<filename>shop-app/src/services/MenuService.ts
import axios from "axios";
import { MenuType } from "../types";
const getMenuData = () => {
const url = "/menu.json";
return axios.get<MenuType[]>(url);
};
export default { getMenuData };
|
class ReactiveSystem<State, Event, SideEffect> {
private var state: State
private var inputEvent: PublishSubject<Event>
private var reduce: (Event, inout State) -> Void
private var feedback: Observable<SideEffect>
private let disposeBag = DisposeBag()
init(initialState: State, reduce: @escaping (Event, inout State) -> Void, feedback: Observable<SideEffect>) {
self.state = initialState
self.inputEvent = PublishSubject<Event>()
self.reduce = reduce
self.feedback = feedback
inputEvent
.scan(initialState, accumulator: { currentState, event in
var newState = currentState
self.reduce(event, &newState)
return newState
})
.subscribe(onNext: { [weak self] newState in
self?.state = newState
})
.disposed(by: disposeBag)
feedback
.subscribe(onNext: { sideEffect in
// Handle side effects
})
.disposed(by: disposeBag)
}
func send(_ event: Event) {
inputEvent.onNext(event)
}
func initialState() -> State! {
return state
}
} |
<reponame>finogeeks/FinChat-Web<filename>src/views/Chat/components/editor-new/editor.mixin.js
import emojione from 'emojione';
// import eventBridge from '@/utils/event-bridge';
import { cliperboardImgToLocalPath } from '@/utils/common';
import canvasUtils from '@/utils/canvas';
import { iconTypeMatcher } from '@/utils/icon';
const ENABLE_CHAT_RECORD = false;
const MAX_FILESIZE = 10485760;
export default {
data() {
return {
editorContent: '',
ifShowAtList: false,
ifShowEmojiPicker: false,
curHoverUserId: '', // 当前hover的那个被at的用户id
compositionInputing: false, // 键盘是否正在输入
// atIntervalId 为一个计时器,用于计算 ’@‘到光标之间的 string, 用来为allist做filter
// esc 和 插入@区块会清除这个计时器
// 在按下@键之后弹出区块时会开始计时
atIntervalId: '',
lastInputData: '',
atFilter: '',
savedRange: null,
enableChatRecord: ENABLE_CHAT_RECORD, // eslint-disable-line
uploadQueue: [],
confirmFile: {},
lastTypingTime: 0,
};
},
methods: {
handleUpload(files) {
this.uploadQueue.unshift(...files);
return true;
},
async handleUploadQueue(file, forceFile = false) {
const maxFileSize = MAX_FILESIZE || (100 * 1024 * 1024); // default 100 MB
console.log(file);
console.log(file.size);
if (file.size > maxFileSize) {
this.$Message.error(`仅限发送不超过${maxFileSize / (1024 * 1024)}M的文件`);
this.uploadQueue.pop();
return;
}
if (forceFile) {
// upload as file anyway
this.handleOtherFileModel(file);
} else if (file.type.startsWith('image')) {
this.handleImageModel(file);
} else {
this.handleOtherFileModel(file);
}
},
handleImageModel(fileBlob) {
const imageUrl = window.URL.createObjectURL(fileBlob);
this.confirmFile = {
show: true,
isImage: true,
title: `发送到:${this.roomEntity.name.length > 16 ? `${this.roomEntity.name.slice(0, 16)}...` : this.roomEntity.name}`,
file: fileBlob,
name: fileBlob.name,
url: imageUrl,
};
},
handleFileConfirmClick(value) {
// this.confirmFile.show = false;
// if (this.uploadQueue.length) {
// const lastFile = this.uploadQueue.pop();
// if (!lastFile || !lastFile.size) {
// this.$Message.error('无效文件');
// return;
// }
// if (value) {
// this.handleSendFile(lastFile);
// }
// if (this.confirmFile.url) {
// window.URL.revokeObjectURL(this.confirmFile.url);
// }
// }
this.confirmFile.show = false;
if (this.uploadQueue.length) {
const lastFile = this.uploadQueue.pop();
if (!lastFile || !lastFile.size) {
this.$Message.error('无效文件');
return;
}
if (value) {
if (lastFile.type.startsWith('image')) {
this.$emit('handleImage', lastFile);
} else if (lastFile.type.startsWith('video')) {
this.$emit('handleVideo', lastFile);
} else {
this.$emit('handleOtherFile', lastFile);
}
} else if (this.confirmFile.url) {
window.URL.revokeObjectURL(this.confirmFile.url);
}
}
},
handleOtherFileModel(fileBlob) {
console.log('handleOtherFileModel', fileBlob);
let extention = '';
if (fileBlob.name.split('.')[1]) {
extention = fileBlob.name.split('.')[1].toLowerCase();
}
const iconUrl = iconTypeMatcher(extention);
this.confirmFile = {
show: true,
title: `发送到:${this.roomEntity.name.length > 16 ? `${this.roomEntity.name.slice(0, 16)}...` : this.roomEntity.name}`,
icon: iconUrl,
type: 'file',
file: fileBlob,
name: fileBlob.name,
size: fileBlob.size > 1048576 ? `${parseInt(fileBlob.size / 1048576, 10)} MB` : `${parseInt(fileBlob.size / 1024, 10)} KB`,
};
},
insertEmoji(emoji) {
if (this.isCursorInEditor() && this.savedRange) {
this.restoreCursorPos();
} else {
this.focus();
}
setTimeout(() => {
document.execCommand('insertHTML', false, emojione.unicodeToImage(emoji));
}, 10);
this.ifShowEmojiPicker = false;
},
restoreCursorPos() {
if (this.savedRange) {
const range = window.getSelection();
range.removeAllRanges();
range.addRange(this.savedRange);
}
},
saveCursorPos() {
const range = window.getSelection();
if (range.getRangeAt && range.rangeCount) {
this.savedRange = range.getRangeAt(0);
}
},
handleOpenEmoji() {
this.$refs.editor.focus();
if (this.roomEntity && this.roomEntity.isBan) {
this.$Message.error('房间已禁言');
return;
}
this.saveCursorPos();
setTimeout(() => {
this.ifShowEmojiPicker = true;
}, 10);
},
handleScreenshot() {
if (this.roomEntity && this.roomEntity.isBan) {
this.$Message.error('房间已禁言');
return;
}
window.finstation.sendMessage('SCREENSHOT');
},
handleOpenChatRecord() {
if (window.finstation) {
// 重写逻辑
window.finstation.sendMessage(undefined, 'OPEN_CHAT_RECORD', {
roomName: this.roomEntity.name,
roomId: this.roomEntity.roomId,
}, 'webview');
window.finstation.addons.launch({
name: `聊天记录-${this.roomEntity.name}`,
id: 'com.finogeeks.personal.chatRecord',
disableNavigationBar: true,
single: true,
// url: 'http://localhost:9080/chat-record.html',
url: VIEW_CHAT_RECORD, //eslint-disable-line
width: 700,
height: 620,
frame: false,
powerLevel: 5,
type: 'window',
meta: {
roomName: this.roomEntity.name,
roomId: this.roomEntity.roomId,
},
});
}
},
handleCloseEmoji() {
this.restoreCursorPos();
this.ifShowEmojiPicker = false;
},
focus() {
if (!this.roomEntity.roomId) return;
const range = window.getSelection();
range.selectAllChildren(document.getElementById(`editor-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`));
range.collapseToEnd();
},
handleAtItemHover(member) {
this.curHoverUserId = member.userId;
},
isCursorInEditor() {
console.log('isCursorInEditor');
const editorNode = document.getElementById(`editor-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`);
const anchorNode = getSelection().anchorNode;
// console.log(editorNode);
// console.log(anchorNode);
// console.log(anchorNode.parentElement.attributes.contenteditable);
// console.log(anchorNode.parentElement.attributes.contenteditable.nodeValue);
if (anchorNode.parentElement.attributes.contenteditable && anchorNode.parentElement.attributes.contenteditable.nodeValue === 'false') return false;
return editorNode.contains(anchorNode);
},
getCursorStr() {
const selection = getSelection();
const content = (selection.anchorNode && selection.anchorNode.textContent) || '';
const range = selection.getRangeAt(0);
return {
before: (content && content[range.startOffset - 1]) || null,
after: (content && content[range.startOffset]) || null,
};
},
findAtFilterStr() {
const selection = getSelection();
const content = (selection.anchorNode && selection.anchorNode.textContent) || '';
if (!content) return null;
const range = selection.getRangeAt(0);
const endIndex = range.endOffset;
const stratIndex = content.lastIndexOf('@', endIndex - 1);
if (stratIndex === -1) return null;
return content.substring(stratIndex + 1, endIndex);
},
handleKeyEvent(e) {
// console.log('handleKeyEvent');
// console.log(e);
// console.log(e, e.metaKey, e.keyCode);
// console.log(window.getSelection());
// console.log('handleKeyEvent', this.roomEntity.roomId, e);
// if (e.key === 'F1') {
// this.saveCursorPos();
// }
// if (e.key === 'F2') {
// this.restoreCursorPos();
// }
if (e.keyCode === 8) {
const editorNode = document.getElementById(`editor-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`);
const str = editorNode.innerHTML;
if (str === '<br>') {
editorNode.innerHTML = '';
}
}
if (this.confirmFile.show) {
if (e.key === 'Escape') {
e.preventDefault();
this.handleFileConfirmClick(false);
this.focus();
}
if (e.key === 'Enter') {
e.preventDefault();
this.handleFileConfirmClick(true);
this.focus();
return;
}
}
if (this.ifShowAtList) {
if (e.key === 'Backspace') {
if (this.getCursorStr().before === '@') {
this.closeAtList();
}
}
if (e.key === 'Escape') {
e.preventDefault();
this.closeAtList();
}
if (e.key === 'Enter') {
if (this.curHoverUserId) {
e.preventDefault();
this.handleInsertAtBlock();
return;
}
}
if (e.key === 'ArrowUp') {
e.preventDefault();
const curInx = this.filtedAtList.findIndex(item => item.userId === this.curHoverUserId);
if (curInx > 0) {
this.curHoverUserId = this.filtedAtList[curInx - 1].userId;
document.getElementById(`hoveredAtBlock-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`).scrollIntoView(false);
}
}
if (e.key === 'ArrowDown') {
e.preventDefault();
const curInx = this.filtedAtList.findIndex(item => item.userId === this.curHoverUserId);
if (curInx < this.filtedAtList.length - 1 && this.filtedAtList.length > 0) {
this.curHoverUserId = this.filtedAtList[curInx + 1].userId;
document.getElementById(`hoveredAtBlock-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`).scrollIntoView();
}
}
}
if (e.key === 'Enter') {
e.preventDefault();
if (!this.canSend) return;
if (e.ctrlKey || e.metaKey || e.altKey) {
document.execCommand('insertHTML', false, '<br></br>');
return;
}
if (e.shiftKey || this.compositionInputing) return;
this.sendTextMessage();
}
// 去除Crtl+b/Ctrl+i/Ctrl+u等快捷键 不然会修改contenteditable内容
if (e.ctrlKey || e.metaKey) {
if (e.keyCode === 66 || e.keyCode === 98 || e.keyCode === 73 || e.keyCode === 105 || e.keyCode === 85 || e.keyCode === 117) e.preventDefault();
}
},
async handlePaste(e) {
console.log('handlePaste');
console.log(this.isCursorInEditor());
if (this.isCursorInEditor()) {
e && e.preventDefault();
let plainText = '';
const clipboardData = e.clipboardData;
console.log(clipboardData);
console.log(clipboardData.types);
// console.log(clipboardData.items[1].getAsFile());
// console.log(clipboardData.items[1]);
const lastItem = clipboardData.items[clipboardData.items.length - 1];
console.log(lastItem);
if (lastItem && lastItem.getAsFile()) {
const file = lastItem.getAsFile();
this.handleUpload([file]);
} else if (clipboardData.types.indexOf('text/plain') > -1 || clipboardData.types.indexOf('text/html') > -1) {
plainText = clipboardData.getData('text/plain');
if (plainText.length > 5000) {
this.$Message.error('消息过长,无法粘贴');
return;
}
document.execCommand('insertHTML', false, emojione.unicodeToImage(plainText.replace(/</g, '<').replace(/>/g, '>').replace(/\r\n/g, '<br>').replace(/\n/g, '<br>')
.replace(/ /g, ' ')));
} else if (clipboardData.types.indexOf('Files') > -1) {
const file = clipboardData.files[0];
this.handleUpload([file]);
}
}
},
handleContextMenu() {
window.finstation && window.finstation.buildContextMenu([
{
label: '剪切',
role: 'cut',
},
{
label: '复制',
role: 'copy',
},
{
label: '粘贴',
click: () => this.handlePaste(),
},
]);
},
originContent() {
if (!this.roomEntity.roomId) return '';
const editorNode = document.getElementById(`editor-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`);
const str = editorNode.innerHTML;
return str;
},
parseContent() {
const editorNode = document.getElementById(`editor-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`);
let str = editorNode.innerHTML;
str = str.replace(/<span>/g, '').replace(/<\/span>/g, '').replace(/<br>/g, '\n').replace(/ /g, ' ');
if (str.length > 6000) {
this.$Message.error('消息过长,无法发送');
return;
}
// parse emojione img tags
const emojitags = str.match(/<img class="emojione" .+?>/g);
emojitags && emojitags.length && emojitags.forEach((item) => {
const emoji = item.match(/alt="(.+?)"/);
str = str.replace(item, emoji[1]);
});
// parse atlist
const idlist = [];
const signals = [];
let matchItem = /<label data-user-id="(.+?)" .+?>(.+?)<\/label>/g.exec(str);
while (matchItem) { //eslint-disable-line
idlist.push(matchItem[1]);
str = str.replace(matchItem[0], matchItem[2]);
signals.push({
start: matchItem.index,
end: matchItem.index + matchItem[2].length,
type: '@',
val: matchItem[1],
});
matchItem = /<label data-user-id="(.+?)" .+?>(.+?)<\/label>/g.exec(str);
}
str = str.replace(/</g, '<').replace(/>/g, '>').trim();
if (idlist.length) return { msgtype: 'm.alert', body: str, idlist, signals };
return { msgtype: 'm.text', body: str };
},
showAtList() {
this.ifShowAtList = true;
this.curHoverUserId = this.filtedAtList[0].userId;
if (!this.atIntervalId) this.startAtInterval();
},
closeAtList() {
this.ifShowAtList = false;
this.curHoverUserId = '';
clearInterval(this.atIntervalId);
this.atIntervalId = null;
},
startAtInterval() {
this.atIntervalId = setInterval(() => {
if (!this.isCursorInEditor()) {
this.ifShowAtList = false;
} else {
this.ifShowAtList = true;
this.atFilter = this.findAtFilterStr();
}
}, 500);
},
handleEditorChange(e) {
if (e.data === '@') {
if (this.roomEntity.isDirect) return;
const selection = window.getSelection();
const focusNode = selection.focusNode;
this.focuseNode = focusNode;
this.showAtList();
}
this.handleUserTyping();
},
handleOpenFile() {
if (this.roomEntity && this.roomEntity.isBan) {
this.$Message.error('房间已禁言');
return;
}
if (this.roomEntity && this.roomEntity.isQQRoom && !this.isQQServerWorking) {
this.$Message.error('QQ服务不可用');
return;
}
this.$refs.editorFiles.click();
},
handleComposition(isInputing) {
// console.log('handleComposition', isInputing);
this.compositionInputing = isInputing;
},
replaceEmojiWithImg() {
if (!this.roomEntity.roomId) {
setTimeout(() => {
this.replaceEmojiWithImg();
}, 1000);
return;
}
const editorNode = document.getElementById(`editor-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`);
const observer = new MutationObserver((mutations) => {
mutations.forEach((item) => {
if (item.type === 'characterData') {
// if (item.target.data && item.target.data.match(/(?:[\u2700-\u27bf]|(?:\ud83c[\udde6-\uddff]){2}|[\ud800-\udbff][\udc00-\udfff]|[\u0023-\u0039]\ufe0f?\u20e3|\u3299|\u3297|\u303d|\u3030|\u24c2|\ud83c[\udd70-\udd71]|\ud83c[\udd7e-\udd7f]|\ud83c\udd8e|\ud83c[\udd91-\udd9a]|\ud83c[\udde6-\uddff]|[\ud83c[\ude01-\ude02]|\ud83c\ude1a|\ud83c\ude2f|[\ud83c[\ude32-\ude3a]|[\ud83c[\ude50-\ude51]|\u203c|\u2049|[\u25aa-\u25ab]|\u25b6|\u25c0|[\u25fb-\u25fe]|\u00a9|\u00ae|\u2122|\u2139|\ud83c\udc04|[\u2600-\u26FF]|\u2b05|\u2b06|\u2b07|\u2b1b|\u2b1c|\u2b50|\u2b55|\u231a|\u231b|\u2328|\u23cf|[\u23e9-\u23f3]|[\u23f8-\u23fa]|\ud83c\udccf|\u2934|\u2935|[\u2190-\u21ff])/g)) {
// if (!item.target.data || item.target.data.length > 500) return;
if (item.target.data && item.target.data.match(/(\ud83c[\udf00-\udfff])|(\ud83d[\udc00-\ude4f\ude80-\udeff])|[\u2600-\u2B55]/g)) {
const target = getSelection().anchorNode;
const spanNode = document.createElement('span');
// 该死的win10 emoji表情兼容!
if (!item.oldValue) {
spanNode.innerHTML = '';
item.target.parentNode.replaceChild(spanNode, item.target);
return;
}
spanNode.innerHTML = emojione.unicodeToImage(target.textContent.replace(/</g, '<').replace(/>/g, '>'));
item.target.parentNode.replaceChild(spanNode, item.target);
let idx = [].findIndex.call(spanNode.childNodes, item => item.tagName === 'IMG');
if (idx === -1) idx = spanNode.childNodes.length - 1;
getSelection().setBaseAndExtent(spanNode, idx + 1, spanNode, idx + 1);
}
}
});
});
const options = {
characterData: true,
characterDataOldValue: true,
subtree: true,
};
observer.observe(editorNode, options);
},
insertAtBlock(innerHtml) {
console.log('isCursorInEditor: ', this.isCursorInEditor());
// if (!this.isCursorInEditor()) return;
const selection = getSelection();
console.log(selection);
const tempnode = document.createElement('span');
tempnode.innerHTML = innerHtml;
const content = (selection.anchorNode && selection.anchorNode.textContent) || '';
console.log('content: ', content);
const range = selection.getRangeAt(0);
console.log('range: ', range);
let endIndex = range.endOffset;
console.log('endIndex: ', endIndex);
const atIndex = content.lastIndexOf('@', endIndex - 1);
endIndex = atIndex + 1;
console.log('atIndex, endIndex : ', atIndex, endIndex);
if (atIndex > -1) range.setStart(selection.anchorNode, atIndex);
if (atIndex > -1) range.setEnd(selection.anchorNode, endIndex);
console.log('range: ', range);
range.deleteContents();
range.insertNode(tempnode.lastChild);
range.collapse();
document.execCommand('insertText', false, ' ');
this.closeAtList();
},
async blob2FileInfo(fileBlob) {
const fileInfo = {
name: fileBlob.name,
path: fileBlob.path,
size: fileBlob.size,
type: fileBlob.type,
};
if (fileBlob.type.startsWith('image')) {
const imageInfo = await canvasUtils.getImageInfo(fileBlob);
fileInfo.width = imageInfo.width;
fileInfo.height = imageInfo.height;
if (fileBlob.base64Contents) {
fileInfo.base64Contents = fileBlob.base64Contents;
}
}
return fileInfo;
},
handleClickCloseEmoji(e) {
if (!this.ifShowEmojiPicker) return;
const epickerNode = document.getElementById(`epicker-${this.roomEntity.roomId.replace(/!|:|\./g, '')}`);
if (!epickerNode) return;
if (!epickerNode.contains(e.target) && this.ifShowEmojiPicker) {
this.ifShowEmojiPicker = false;
}
},
handleUserTyping() {
// if (Date.now() - this.lastTypingTime >= 4 * 1000) {
// eventBridge.proxy.userTyping(this.roomEntity.roomId);
// this.lastTypingTime = Date.now();
// }
},
},
watch: {
filtedAtList() {
this.curHoverUserId = (this.filtedAtList[0] && this.filtedAtList[0].userId) || null;
},
uploadQueue(newVal) {
if (newVal.length) {
const lastFile = newVal[newVal.length - 1];
this.handleUploadQueue(lastFile);
}
},
},
mounted() {
window.document.addEventListener('keydown', this.handleKeyEvent);
window.document.addEventListener('paste', this.handlePaste);
window.document.addEventListener('mousedown', this.handleClickCloseEmoji);
// if (window.finstation.getPlatform() === 'darwin') {
// window.document.addEventListener('compositionstart', this.handleComposition.bind(this, true));
// window.document.addEventListener('compositionend', this.handleComposition.bind(this, false));
// }
setTimeout(() => {
this.replaceEmojiWithImg();
this.focus();
}, 10);
},
beforeDestroy() {
window.document.removeEventListener('keydown', this.handleKeyEvent);
window.document.removeEventListener('paste', this.handlePaste);
window.document.removeEventListener('mousedown', this.handleClickCloseEmoji);
// if (window.finstation.getPlatform() === 'darwin') {
// window.document.removeEventListener('compositionstart', this.handleComposition.bind(this, true));
// window.document.removeEventListener('compositionend', this.handleComposition.bind(this, false));
// }
clearInterval(this.atIntervalId);
},
};
|
import React, { useState } from 'react';
import { useDrag, useDrop } from 'react-dnd';
const componentMap = {
'header': Header,
'footer': Footer,
'body': Body,
};
function App() {
const [components, setComponents] = useState([
{
type: 'header',
data: {
title: 'My Site'
}
},
{
type: 'footer',
data: {
content: 'Copyright 2020'
}
},
{
type: 'body',
data: {
content: 'Welcome to my site!'
}
},
]);
// ... Implementation of the useDrag, useDrop functions
return (
<div className="App">
<div className="builder">
{components.map((component, index) => (
<Component
key={index}
component={component}
index={index}
onUpdate={handleUpdate}
onDelete={handleDelete}
/>
))}
</div>
</div>
);
}
export default App; |
package org.multibit.hd.ui.languages;
import org.junit.Test;
import static org.fest.assertions.Assertions.assertThat;
public class LanguagesTest {
@Test
public void testGetOrdinalFor() throws Exception {
// Test against all possible Trezor values just to be sure
assertThat(Languages.getOrdinalFor(1)).isEqualTo("1st");
assertThat(Languages.getOrdinalFor(2)).isEqualTo("2nd");
assertThat(Languages.getOrdinalFor(3)).isEqualTo("3rd");
assertThat(Languages.getOrdinalFor(4)).isEqualTo("4th");
assertThat(Languages.getOrdinalFor(5)).isEqualTo("5th");
assertThat(Languages.getOrdinalFor(6)).isEqualTo("6th");
assertThat(Languages.getOrdinalFor(7)).isEqualTo("7th");
assertThat(Languages.getOrdinalFor(8)).isEqualTo("8th");
assertThat(Languages.getOrdinalFor(9)).isEqualTo("9th");
assertThat(Languages.getOrdinalFor(10)).isEqualTo("10th");
assertThat(Languages.getOrdinalFor(11)).isEqualTo("11th");
assertThat(Languages.getOrdinalFor(12)).isEqualTo("12th");
assertThat(Languages.getOrdinalFor(13)).isEqualTo("13th");
assertThat(Languages.getOrdinalFor(14)).isEqualTo("14th");
assertThat(Languages.getOrdinalFor(15)).isEqualTo("15th");
assertThat(Languages.getOrdinalFor(16)).isEqualTo("16th");
assertThat(Languages.getOrdinalFor(17)).isEqualTo("17th");
assertThat(Languages.getOrdinalFor(18)).isEqualTo("18th");
assertThat(Languages.getOrdinalFor(19)).isEqualTo("19th");
assertThat(Languages.getOrdinalFor(20)).isEqualTo("20th");
assertThat(Languages.getOrdinalFor(21)).isEqualTo("21st");
assertThat(Languages.getOrdinalFor(22)).isEqualTo("22nd");
assertThat(Languages.getOrdinalFor(23)).isEqualTo("23rd");
assertThat(Languages.getOrdinalFor(24)).isEqualTo("24th");
}
} |
import java.sql.*;
public class CorrelationCheck {
public static double getCorrelation(String tableName, String field1, String field2) throws SQLException {
String sql = "SELECT " + field1 +", "+ field2 + " FROM " + tableName;
ResultSet rs = null;
double result = 0;
try {
Statement stmt = connection.createStatement();
rs = stmt.executeQuery(sql);
int n = 0;
double sum_sq_x = 0;
double sum_sq_y = 0;
double sum_coproduct = 0;
double mean_x = 0;
double mean_y = 0;
while(rs.next()) {
double x = rs.getDouble(field1);
double y = rs.getDouble(field2);
sum_sq_x += x * x;
sum_sq_y += y * y;
sum_coproduct += x * y;
mean_x += x;
mean_y += y;
n++;
}
if (n > 0) {
result = (n * sum_coproduct - mean_x * mean_y) / Math.sqrt(
(n * sum_sq_x - mean_x * mean_x) *
(n * sum_sq_y - mean_y * mean_y));
}
}
finally {
rs.close();
}
return result;
}
} |
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/1/2.
//
#ifndef CPP_0108_SOLUTION1_H
#define CPP_0108_SOLUTION1_H
#include "TreeNode.h"
#include <vector>
class Solution {
public:
// 复制vector,时空复杂度变低
TreeNode *sortedArrayToBST2(vector<int> &nums) {
if (nums.empty()) return nullptr;
// 节点向左偏
int mid = nums.size() / 2;
// 节点向右偏
//int mid = (nums.size() - 1) / 2;
TreeNode *node = new TreeNode(nums[mid]);
vector<int> left(nums.begin(), nums.begin() + mid);
vector<int> right(nums.begin() + mid + 1, nums.end());
node->left = sortedArrayToBST(left);
node->right = sortedArrayToBST(right);
return node;
}
TreeNode *help(vector<int> &nums, int left, int right) {
if (left == right || nums.empty()) return nullptr;
int mid = left + (right - left) / 2;
TreeNode *node = new TreeNode(nums[mid]);
node->left = help(nums, left, mid);
node->right = help(nums, mid + 1, right);
return node;
}
TreeNode *sortedArrayToBST(vector<int> &nums) {
return help(nums, 0, nums.size());
}
};
#endif //CPP_0108_SOLUTION1_H
|
<filename>src/util/datastructure/function/TriFunction.java
package util.datastructure.function;
@FunctionalInterface
public interface TriFunction<P1, P2, P3, R> {
R apply(P1 param1, P2 param2, P3 param3);
}
|
(function() {
let privateVar = 'This is a private variable.';
window.getPrivateVar = function() {
return privateVar;
};
})();
console.log(window.getPrivateVar()); // Output: This is a private variable. |
package com.alexjing.pullpushtorefresh.lib.base;
/**
* 用于注册与反注册
*
* @author: <NAME>(<EMAIL>)
* @date: 2016-05-06
* @time: 14:42
*/
public interface BasePresenter {
void subscribe();
void unsubscribe();
}
|
<filename>src/components/Button/index.js
import component from './Button'
export default component
|
package com.qweex.openbooklikes;
import android.content.res.Resources;
import android.util.Log;
import com.loopj.android.http.AsyncHttpClient;
import com.loopj.android.http.JsonHttpResponseHandler;
import com.loopj.android.http.RequestParams;
import com.qweex.openbooklikes.activity.MainActivity;
import org.json.JSONException;
import org.json.JSONObject;
import cz.msebera.android.httpclient.Header;
public class ApiClient {
private static final String BASE_URL = "http://booklikes.com/api/v1_05/";
private static String API_KEY;
public static void setApiKey(Resources r) { API_KEY = r.getString(R.string.api_key); }
protected static AsyncHttpClient client = new AsyncHttpClient();
public static void get(RequestParams params, ApiResponseHandler responseHandler) {
params.put("key", API_KEY);
if(MainActivity.me !=null) {
params.put("usr_token", MainActivity.me.token());
}
Log.d("OBL:Getting", getAbsoluteUrl(responseHandler.urlPath()) + "?" + params.toString());
client.get(getAbsoluteUrl(responseHandler.urlPath()), params, responseHandler);
}
public static void get(ApiResponseHandler responseHandler) {
get(new RequestParams(), responseHandler);
}
public static void post(RequestParams params, ApiResponseHandler responseHandler) {
params.put("key", API_KEY);
if(MainActivity.me !=null) {
params.put("usr_token", MainActivity.me.token());
}
Log.d("OBL:Posting", getAbsoluteUrl(responseHandler.urlPath()) + "?" + params.toString());
client.post(getAbsoluteUrl(responseHandler.urlPath()), params, responseHandler);
}
public static void post(ApiResponseHandler responseHandler) {
post(new RequestParams(), responseHandler);
}
protected static String getAbsoluteUrl(String relativeUrl) {
return BASE_URL + relativeUrl;
}
public static abstract class ApiResponseHandler extends JsonHttpResponseHandler {
abstract protected String urlPath();
abstract protected String countFieldName();
private boolean lastFetchWasNull = false;
public boolean wasLastFetchNull() { return lastFetchWasNull; }
public void reset() { lastFetchWasNull = false; }
@Override
public void onSuccess(int statusCode, Header[] headers, JSONObject response) {
if(!lastFetchWasNull)
try {
lastFetchWasNull = countFieldName()!=null && response.getInt(countFieldName())==0;
} catch (JSONException e) {
e.printStackTrace();
lastFetchWasNull = true;
}
}
}
public static class PagedParams extends RequestParams {
public PagedParams(int page, AdapterBase adapter) {
put("PerPage", adapter.perScreen());
put("Page", page);
}
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Bolts/Bolts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/LTMorphingLabel/LTMorphingLabel.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MaterialTextField/MaterialTextField.framework"
install_framework "${BUILT_PRODUCTS_DIR}/PMSuperButton/PMSuperButton.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Parse/Parse.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RAMAnimatedTabBarController/RAMAnimatedTabBarController.framework"
install_framework "${BUILT_PRODUCTS_DIR}/TweeTextField/TweeTextField.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Bolts/Bolts.framework"
install_framework "${BUILT_PRODUCTS_DIR}/LTMorphingLabel/LTMorphingLabel.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MaterialTextField/MaterialTextField.framework"
install_framework "${BUILT_PRODUCTS_DIR}/PMSuperButton/PMSuperButton.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Parse/Parse.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RAMAnimatedTabBarController/RAMAnimatedTabBarController.framework"
install_framework "${BUILT_PRODUCTS_DIR}/TweeTextField/TweeTextField.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
export interface ModalProps extends React.ComponentPropsWithoutRef<'div'> {
isOpen: boolean
onClose?: React.MouseEventHandler
portalElement?: Element
}
|
#! /bin/bash
cp -p ./$1 ./$2 -r
echo "\$1 is $1, \$2 is $2"
files_name=`find ./$2 -name '*.[c|h]' -type f -print`
files_name+=' '
files_name+=$(find ./$2 -name '*.dia' -type f -print)
files_name+=' '
files_name+=$(find ./$2 -name '*.jpeg' -type f -print)
files_name+=' '
files_name+=$(find ./$2 -name '*.txt' -type f -print)
echo $files_name
#echo ""
for file in $files_name ;do
mv $file `echo $file|sed s/$1/$2/g`
done
files_name=$(find ./$2 -name '*.[c|h]' -type f -print)
files_name+=' '
files_name+=$(find ./$2 -name 'Makefile' -type f -print)
echo $files_name
for file in $files_name ;do
cp $file "$file.$$"
sed -e s/BKC_MOD/`echo $2 | tr "[:lower:]" "[:upper:]"`/g -e s/bkc_mod/`echo $2 | tr "[:upper:]" "[:lower:]"`/g $file >"$file.$$"
rm $file -rf
mv "$file.$$" $file
done
rm -rf ../../../src/$2
cp -r ./$2 ../../../src/
rm -rf ./$2
echo "$2 is in bkc/src, congratulatins!!!"
|
<reponame>natflausino/cub3D
#include "libft.h"
void *ft_memset(void *s, int c, size_t n)
{
size_t i;
unsigned char *pointer;
i = 0;
pointer = (unsigned char *)s;
while (i < n)
{
((unsigned char *)s)[i] = c;
i++;
}
return (s);
}
|
<filename>example/main.go
package main;
import(
"github.com/eftakhairul/go-gisbn/gisbn"
"fmt"
)
func main() {
g := gisbn.New("9780262033848", "<KEY>", "ca")
g.Fetch()
fmt.Println(g.Title()) //Introduction to Algorithms
} |
<reponame>wuximing/dsshop
import { assign } from '@antv/util';
import { EVENT_MAP, onEvent } from '../../util/event';
var SHAPE_EVENT_MAP = {
onAreaClick: 'area:click',
onAreaDblclick: 'area:dblclick',
onAreaMousemove: 'area:mousemove',
onAreaMousedown: 'area:mousedown',
onAreaMouseup: 'area:mouseup',
onAreaMouseenter: 'area:mouseenter',
onAreaMouseleave: 'area:mouseleave',
onAreaContextmenu: 'area:contextmenu',
onLineClick: 'line:click',
onLineDblclick: 'line:dblclick',
onLineMousemove: 'line:mousemove',
onLineMousedown: 'line:mousedown',
onLineMouseup: 'line:mouseup',
onLineMouseenter: 'line:mouseenter',
onLineMouseleave: 'line:mouseleave',
onLineContextmenu: 'line:contextmenu',
};
assign(EVENT_MAP, SHAPE_EVENT_MAP);
export { EVENT_MAP, onEvent };
//# sourceMappingURL=event.js.map |
#!/bin/sh
export FOGLAMP_DATA=.
if [ $# -eq 1 ] ; then
echo Starting storage layer $1
$1
elif [ "${FOGLAMP_ROOT}" != "" ] ; then
echo Starting storage service in $FOGLAMP_ROOT
$FOGLAMP_ROOT/services/storage
else
echo Must either set FOGLAMP_ROOT or provide storage service to test
exit 1
fi
export IFS=","
testNum=1
n_failed=0
n_passed=0
n_unchecked=0
./testSetup.sh > /dev/null 2>&1
rm -f failed
rm -rf results
mkdir results
cat testset | while read name method url payload optional; do
echo -n "Test $testNum ${name}: "
if [ "$payload" = "" ] ; then
curl -X $method $url -o results/$testNum >/dev/null 2>&1
curlstate=$?
else
curl -X $method $url -d@payloads/$payload -o results/$testNum >/dev/null 2>&1
curlstate=$?
fi
if [ "$optional" = "" ] ; then
if [ ! -f expected/$testNum ]; then
n_unchecked=`expr $n_unchecked + 1`
echo Missing expected results for test $testNum - result unchecked
else
cmp -s results/$testNum expected/$testNum
if [ $? -ne "0" ]; then
echo Failed
n_failed=`expr $n_failed + 1`
if [ "$payload" = "" ]
then
echo Test $testNum ${name} curl -X $method $url >> failed
else
echo Test $testNum ${name} curl -X $method $url -d@payloads/$payload >> failed
fi
(
unset IFS
echo " " Expected: "`cat expected/$testNum`" >> failed
echo " " Got: "`cat results/$testNum`" >> failed
)
echo >> failed
else
echo Passed
n_passed=`expr $n_passed + 1`
fi
fi
elif [ "$optional" = "checkstate" ] ; then
if [ $curlstate -eq 0 ] ; then
echo Passed
n_passed=`expr $n_passed + 1`
else
echo Failed
n_failed=`expr $n_failed + 1`
if [ "$payload" = "" ]
then
echo Test $testNum curl -X $method $url >> failed
else
echo Test $testNum curl -X $method $url -d@payloads/$payload >> failed
fi
fi
fi
testNum=`expr $testNum + 1`
rm -f tests.result
echo $n_failed Tests Failed > tests.result
echo $n_passed Tests Passed >> tests.result
echo $n_unchecked Tests Unchecked >> tests.result
done
./testCleanup.sh > /dev/null
cat tests.result
rm -f tests.result
if [ -f "failed" ]; then
echo
echo "Failed Tests"
echo "============"
cat failed
exit 1
fi
exit 0
|
Uri=$1
HANAUSR=$2
HANAPWD=$3
HANASID=$4
HANANUMBER=$5
vmSize=$6
SUBEMAIL=$7
SUBID=$8
SUBURL=$9
#if needed, register the machine
if [ "$SUBEMAIL" != "" ]; then
if [ "$SUBURL" != "" ]; then
SUSEConnect -e $SUBEMAIL -r $SUBID --url $SUBURL
else
SUSEConnect -e $SUBEMAIL -r $SUBID
fi
fi
#install hana prereqs
sudo zypper install -y glibc-2.22-51.6
sudo zypper install -y systemd-228-142.1
sudo zypper install -y unrar
sudo zypper install -y sapconf
sudo zypper install -y saptune
sudo mkdir /etc/systemd/login.conf.d
sudo mkdir /hana
sudo mkdir /hana/data
sudo mkdir /hana/log
sudo mkdir /hana/shared
sudo mkdir /hana/backup
sudo mkdir /usr/sap
sudo zypper se -t pattern
zypper in -t -y pattern sap-hana
sudo saptune solution apply HANA
# step2
echo $Uri >> /tmp/url.txt
cp -f /etc/waagent.conf /etc/waagent.conf.orig
sedcmd="s/ResourceDisk.EnableSwap=n/ResourceDisk.EnableSwap=y/g"
sedcmd2="s/ResourceDisk.SwapSizeMB=0/ResourceDisk.SwapSizeMB=229376/g"
cat /etc/waagent.conf | sed $sedcmd | sed $sedcmd2 > /etc/waagent.conf.new
cp -f /etc/waagent.conf.new /etc/waagent.conf
number="$(lsscsi [*] 0 0 4| cut -c2)"
echo "logicalvols start" >> /tmp/parameter.txt
hanavg1lun="$(lsscsi $number 0 0 4 | grep -o '.\{9\}$')"
hanavg2lun="$(lsscsi $number 0 0 5 | grep -o '.\{9\}$')"
hanavg3lun="$(lsscsi $number 0 0 6 | grep -o '.\{9\}$')"
pvcreate $hanavg1lun $hanavg2lun $hanavg3lun
vgcreate hanavg $hanavg1lun $hanavg2lun $hanavg3lun
lvcreate -l 80%FREE -n datalv hanavg
lvcreate -l 20%VG -n loglv hanavg
mkfs.xfs /dev/hanavg/datalv
mkfs.xfs /dev/hanavg/loglv
echo "logicalvols end" >> /tmp/parameter.txt
#!/bin/bash
echo "logicalvols2 start" >> /tmp/parameter.txt
sharedvglun="$(lsscsi $number 0 0 0 | grep -o '.\{9\}$')"
usrsapvglun="$(lsscsi $number 0 0 1 | grep -o '.\{9\}$')"
backupvglun1="$(lsscsi $number 0 0 2 | grep -o '.\{9\}$')"
backupvglun2="$(lsscsi $number 0 0 3 | grep -o '.\{9\}$')"
pvcreate $backupvglun1 $backupvglun2 $sharedvglun $usrsapvglun
vgcreate backupvg $backupvglun1 $backupvglun2
vgcreate sharedvg $sharedvglun
vgcreate usrsapvg $usrsapvglun
lvcreate -l 100%FREE -n sharedlv sharedvg
lvcreate -l 100%FREE -n backuplv backupvg
lvcreate -l 100%FREE -n usrsaplv usrsapvg
mkfs -t xfs /dev/sharedvg/sharedlv
mkfs -t xfs /dev/backupvg/backuplv
mkfs -t xfs /dev/usrsapvg/usrsaplv
echo "logicalvols2 end" >> /tmp/parameter.txt
#!/bin/bash
echo "mounthanashared start" >> /tmp/parameter.txt
mount -t xfs /dev/sharedvg/sharedlv /hana/shared
mount -t xfs /dev/backupvg/backuplv /hana/backup
mount -t xfs /dev/usrsapvg/usrsaplv /usr/sap
mount -t xfs /dev/hanavg/datalv /hana/data
mount -t xfs /dev/hanavg/loglv /hana/log
mkdir /hana/data/sapbits
echo "mounthanashared end" >> /tmp/parameter.txt
echo "write to fstab start" >> /tmp/parameter.txt
echo "/dev/mapper/hanavg-datalv /hana/data xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/hanavg-loglv /hana/log xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/sharedvg-sharedlv /hana/shared xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/backupvg-backuplv /hana/backup xfs defaults 0 0" >> /etc/fstab
echo "/dev/mapper/usrsapvg-usrsaplv /usr/sap xfs defaults 0 0" >> /etc/fstab
echo "write to fstab end" >> /tmp/parameter.txt
if [ ! -d "/hana/data/sapbits" ]
then
mkdir "/hana/data/sapbits"
fi
#!/bin/bash
cd /hana/data/sapbits
echo "hana download start" >> /tmp/parameter.txt
/usr/bin/wget --quiet $Uri/SapBits/md5sums
/usr/bin/wget --quiet $Uri/SapBits/51052325_part1.exe
/usr/bin/wget --quiet $Uri/SapBits/51052325_part2.rar
/usr/bin/wget --quiet $Uri/SapBits/51052325_part3.rar
/usr/bin/wget --quiet $Uri/SapBits/51052325_part4.rar
/usr/bin/wget --quiet "https://raw.githubusercontent.com/AzureCAT-GSI/Hana-Test-Deploy/master/hdbinst.cfg"
echo "hana download end" >> /tmp/parameter.txt
date >> /tmp/testdate
cd /hana/data/sapbits
echo "hana unrar start" >> /tmp/parameter.txt
#!/bin/bash
cd /hana/data/sapbits
unrar x 51052325_part1.exe
echo "hana unrar end" >> /tmp/parameter.txt
echo "hana prepare start" >> /tmp/parameter.txt
cd /hana/data/sapbits
#!/bin/bash
cd /hana/data/sapbits
myhost=`hostname`
sedcmd="s/REPLACE-WITH-HOSTNAME/$myhost/g"
sedcmd2="s/\/hana\/shared\/sapbits\/51052325/\/hana\/data\/sapbits\/51052325/g"
sedcmd3="s/root_user=root/root_user=$HANAUSR/g"
sedcmd4="s/AweS0me@PW/$HANAPWD/g"
sedcmd5="s/sid=H10/sid=$HANASID/g"
sedcmd6="s/number=00/number=$HANANUMBER/g"
cat hdbinst.cfg | sed $sedcmd | sed $sedcmd2 | sed $sedcmd3 | sed $sedcmd4 | sed $sedcmd5 | sed $sedcmd6 > hdbinst-local.cfg
echo "hana preapre end" >> /tmp/parameter.txt
#!/bin/bash
echo "install hana start" >> /tmp/parameter.txt
cd /hana/data/sapbits/51052325/DATA_UNITS/HDB_LCM_LINUX_X86_64
/hana/data/sapbits/51052325/DATA_UNITS/HDB_LCM_LINUX_X86_64/hdblcm -b --configfile /hana/data/sapbits/hdbinst-local.cfg
echo "install hana end" >> /tmp/parameter.txt
shutdown -r 1
|
package sword.langbook3.android.db;
import android.content.Intent;
import android.os.Bundle;
public final class CorrelationBundler {
public static Correlation<AlphabetId> readAsIntentExtra(Intent intent, String key) {
final ParcelableCorrelation parcelable = intent.getParcelableExtra(key);
return (parcelable != null)? parcelable.get() : null;
}
public static void writeAsIntentExtra(Intent intent, String key, Correlation<AlphabetId> correlation) {
if (correlation != null) {
intent.putExtra(key, new ParcelableCorrelation(correlation.toImmutable()));
}
}
public static Correlation<AlphabetId> read(Bundle bundle, String key) {
final ParcelableCorrelation parcelable = bundle.getParcelable(key);
return (parcelable != null)? parcelable.get() : null;
}
public static void write(Bundle bundle, String key, Correlation<AlphabetId> correlation) {
if (correlation != null) {
bundle.putParcelable(key, new ParcelableCorrelation((correlation.toImmutable())));
}
}
private CorrelationBundler() {
}
}
|
package com.demo.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.HttpSecurityBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.access.AccessDeniedHandler;
import org.springframework.security.web.authentication.logout.LogoutHandler;
import org.springframework.security.web.authentication.logout.LogoutSuccessHandler;
import org.springframework.security.web.authentication.logout.SecurityContextLogoutHandler;
import org.springframework.security.web.authentication.rememberme.PersistentTokenBasedRememberMeServices;
import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
import org.springframework.security.web.authentication.www.DigestAuthenticationFilter;
/**
* @author ankidaemon
*
*/
@Configuration
@EnableWebSecurity
@ComponentScan(basePackages = "com.demo.config")
public class SecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
auth.inMemoryAuthentication().withUser("ankidaemon").password("password").roles("CHIEF")
.and().withUser("test").password("<PASSWORD>").roles("USER");
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests()
.regexMatchers("/chief/.*").hasRole("CHIEF")
.regexMatchers("/agent/.*").access("hasRole('USER') and principal.name='<NAME>'")
.anyRequest().authenticated()
.and().httpBasic()
.and().requiresChannel().anyRequest().requiresSecure();
http.formLogin().loginPage("/login").permitAll();
http.exceptionHandling().accessDeniedPage("/accessDenied");
//http.logout();
http
.logout()
.logoutUrl("/customlogout")
.logoutSuccessUrl("/")
.logoutSuccessHandler(new CustomLogoutSuccessHandler())
.invalidateHttpSession(true) //true by default
.addLogoutHandler(new SecurityContextLogoutHandler())
.deleteCookies("JSESSIONID");
}
}
|
<filename>src/discovery/index.ts
import { CloudEventV1Service } from "../";
interface RequestHandler {
(
req: { query: { matching: string }; url: string },
res: {
end: () => void;
status: (
code: number,
) => { json: (obj: Record<string, unknown> | CloudEventV1Service[]) => string; end: () => void };
},
): void;
}
/**
* DiscoveryService to implement the discovery spec:
*
* It provides a way to register Service through programmation and annotation
*
* It also include a express method to expose it through your API
*/
export class DiscoveryService {
/**
* Services mapped by name
*/
static servicesMap: { [key: string]: CloudEventV1Service } = {};
/**
* Events mapped by type
*/
static eventsMap: { [key: string]: CloudEventV1Service[] } = {};
/**
* Register your service to the DiscoveryService
*
* Annotation to declare a Service
*
* TODO Implement after discussion with team
* It would add a @Service in front of class to declare a new service
*
* @returns {void}
*/
static Service(): void {
// TO IMPLEMENT
}
/**
* Register your service to the DiscoveryService
*
* @param {CloudEventV1Service} service to register
* @returns {void}
*/
static registerService(service: CloudEventV1Service): void {
if (DiscoveryService.servicesMap[service.name]) {
throw new Error(`Service ${service.name} is already registered`);
}
DiscoveryService.servicesMap[service.name] = service;
service.types.forEach((evt) => {
DiscoveryService.eventsMap[evt.type] = DiscoveryService.eventsMap[evt.type] || [];
DiscoveryService.eventsMap[evt.type].push(service);
});
}
/**
* Retrieve all services
* @returns {CloudEventV1Service[]} array of services
*/
static getServices(): CloudEventV1Service[] {
return this.searchService();
}
/**
* Search for a service
* @param {string} term to search for, case insensitive
* @returns {CloudEventV1Service[]} array of filtered services
*/
static searchService(term = ""): CloudEventV1Service[] {
//
const searchTerm = term.toLowerCase();
return Object.keys(DiscoveryService.servicesMap)
.filter((k) => term === "" || k.toLowerCase().includes(searchTerm))
.map((k) => DiscoveryService.servicesMap[k]);
}
/**
* Retrieve all event types
* @returns {CloudEventV1Service[]} map of services by types
*/
static getTypes(): { [key: string]: CloudEventV1Service[] } {
return this.searchType();
}
/**
* Search for a type of event
* @param {string} term to search for, case insensitive
* @returns {CloudEventV1Service[]} map of services by filtered types
*/
static searchType(term = ""): { [key: string]: CloudEventV1Service[] } {
const searchTerm = term.toLowerCase();
const result: { [key: string]: CloudEventV1Service[] } = {};
Object.keys(DiscoveryService.eventsMap)
.filter((k) => term === "" || k.toLowerCase().includes(searchTerm))
.forEach((k) => (result[k] = DiscoveryService.eventsMap[k]));
return result;
}
/**
* Express handler
*
* You can add it
*
* @param {Object} app Your express app
* @param {string} prefix Prefix for all discovery url
* @param {Object} permissions Callback to implement CloudEvent permissions
* @returns {void}
*/
static express(
app: { get: (path: string | RegExp, handler: RequestHandler) => void },
prefix = "",
permissions: (name: string, type: "Service" | "Type", req: unknown) => boolean = () => true,
): void {
/**
* Based on the spec
* Note: for each query if the client is not authorized to see any particular
* entity then that entity MUST be excluded from the response.
*
* Therefore we filter events from service as they are an entity
*
* TODO Need to confirm this with the group
* @param {CloudEventV1Service} object a service object
* @param {Record<string, unknown>} req a request object
* @returns {CloudEventV1Service} services
*/
const filterTypes = (object: CloudEventV1Service, req: unknown) => ({
...object,
types: object.types.filter((type) => permissions(type.type, "Type", req)),
});
// Implement services listing
app.get(`${prefix}/services`, (req, res) => {
const term = req.query.matching || "";
res.status(200).json(
DiscoveryService.searchService(term)
.filter((service) => permissions(service.name, "Service", req))
.map((service) => filterTypes(service, req)),
);
res.end();
});
app.get(new RegExp(`${prefix}/services/.+`), (req, res) => {
const name = req.url.substr(prefix.length + "/services/".length);
if (
// Non existing service
!DiscoveryService.servicesMap[name] ||
// User does not have permission
!permissions(name, "Service", req)
) {
res.status(404).end();
return;
}
res.status(200).json(filterTypes(DiscoveryService.servicesMap[name], req));
});
app.get(`${prefix}/types`, (req, res) => {
const term = req.query.matching || "";
const types = DiscoveryService.searchType(term);
const result: { [key: string]: CloudEventV1Service[] } = {};
for (const i in types) {
if (permissions(i, "Type", req)) {
result[i] = types[i]
.filter((service) => permissions(service.name, "Service", req))
.map((service) => filterTypes(service, req));
}
}
res.status(200).json(result);
res.end();
});
app.get(new RegExp(`${prefix}/types/.+`), (req, res) => {
const name = req.url.substr(prefix.length + "/types/".length);
if (
// Non existing type
!DiscoveryService.eventsMap[name] ||
// User does not have permission
!permissions(name, "Type", req)
) {
res.status(404).end();
return;
}
// Filter service and events from service
const result = DiscoveryService.eventsMap[name]
.filter((service) => permissions(service.name, "Service", req))
.map((service) => filterTypes(service, req));
// If no service with this event is available to the user return 404
if (result.length === 0) {
res.status(404).end();
return;
}
res.status(200).json({ [name]: result });
});
}
}
|
import web3 from '../services/web3';
import * as SPHToken from '../build/SPHToken.json';
const contractAbi = SPHToken.abi as any;
const contractAddress = '0xd0d383a4D93C1B9e11a71Fc013f8d7AD98481B00';
const instance = new web3.eth.Contract(contractAbi, contractAddress);
export default instance;
|
function isPalindrome($str)
{
$low = 0;
$high = strlen($str) - 1;
while ($low < $high)
{
if ($str[$low] != $str[$high])
return false;
$low++;
$high--;
}
return true;
} |
<filename>src/components/Footer.js
import React from 'react'
import './styles/Footer.css';
export function Footer() {
return (
<footer className='Footer'>
Footer goes here
</footer>
)
}
|
#!/bin/bash
#BASE shell script 'module'
#Sets up BASE for for Autosnort
#Updated on 2/1/2014
########################################
#logging setup: Stack Exchange made this.
base_logfile=/var/log/base_install.log
mkfifo ${base_logfile}.pipe
tee < ${base_logfile}.pipe $base_logfile &
exec &> ${base_logfile}.pipe
rm ${base_logfile}.pipe
########################################
#Metasploit-like print statements: status, good, bad and notification. Gratouitiously copied from Darkoperator's metasploit install script.
function print_status ()
{
echo -e "\x1B[01;34m[*]\x1B[0m $1"
}
function print_good ()
{
echo -e "\x1B[01;32m[*]\x1B[0m $1"
}
function print_error ()
{
echo -e "\x1B[01;31m[*]\x1B[0m $1"
}
function print_notification ()
{
echo -e "\x1B[01;33m[*]\x1B[0m $1"
}
########################################
#grab packages for BASE, and supresses the notification for libphp-adodb. Most of the primary required packages are pulled by the main AS script.
print_status "Grabbing packages required for BASE.."
echo libphp-adodb libphp-adodb/pathmove note | debconf-set-selections
apt-get install -y libphp-adodb ca-certificates php-pear libwww-perl php5 php5-mysql php5-gd &>> $base_logfile
if [ $? != 0 ];then
print_error "Failed to acquire required packages for Base. See $base_logfile for details."
exit 1
else
print_good "Successfully acquired packages."
fi
########################################
#These are php-pear config commands Seen in the 2.9.4.0 install guide for Debian.
print_status "Setting php-pear options.."
pear config-set preferred_state alpha &>> $base_logfile
pear channel-update pear.php.net &>> $base_logfile
pear install --alldeps Image_Color Image_Canvas Image_Graph &>> $base_logfile
print_good "Successfully configured php-pear options."
#Have to adjust PHP logging otherwise BASE will barf on startup.
print_status "Reconfiguring php error reporting for BASE.."
sed -i 's/error_reporting \= E_ALL \& ~E_DEPRECATED/error_reporting \= E_ALL \& ~E_NOTICE/' /etc/php5/apache2/php.ini
########################################
#The BASE tarball creates a directory for us, all we need to do is move to webroot.
print_status "Installing BASE.."
cd /var/www/
# We need to grab BASE from sourceforge. If this fails, we exit the script with a status of 1
# A check is built into the main script to verify this script exits cleanly. If it doesn't,
# The user should be informed and brought back to the main interface selection menu.
print_status "Grabbing BASE via Sourceforge.."
wget http://sourceforge.net/projects/secureideas/files/BASE/base-1.4.5/base-1.4.5.tar.gz -O base-1.4.5.tar.gz &>> $base_logfile
if [ $? != 0 ];then
print_error "Attempt to pull down BASE failed. See $base_logfile for details."
exit 1
else
print_good "Successfully downloaded the BASE tarball."
fi
tar -xzvf base-1.4.5.tar.gz &>> $base_logfile
if [ $? != 0 ];then
print_error "Attempt to install BASE has failed. See $base_logfile for details."
exit 1
else
print_good "Successfully installed base to /var/www/base."
fi
rm base-1.4.5.tar.gz
mv base-* base
#BASE requires the /var/www/ directory to be owned by www-data
print_status "Granting ownership of /var/www to www-data user and group."
chown -R www-data:www-data /var/www
########################################
#These are virtual host settings. The 000-default.conf virtual host forces redirect of all traffic to https (SSL, port 443) to ensure console traffic is encrypted and secure. We then enable the new SSL site we made, and restart apache to start serving it.
print_status "Configuring Virtual Host Settings for Base.."
echo "#This 000-default.conf vhost config geneated by autosnort. To remove, run cp /etc/apache2/000-default.confsiteconfbak /etc/apache2/sites-available/000-default.conf" > /etc/apache2/sites-available/000-default.conf
echo "#This VHOST exists as a catch, to redirect any requests made via HTTP to HTTPS." >> /etc/apache2/sites-available/000-default.conf
echo "<VirtualHost *:80>" >> /etc/apache2/sites-available/000-default.conf
echo " DocumentRoot /var/www/base" >> /etc/apache2/sites-available/000-default.conf
echo " #Mod_Rewrite Settings. Force everything to go over SSL." >> /etc/apache2/sites-available/000-default.conf
echo " RewriteEngine On" >> /etc/apache2/sites-available/000-default.conf
echo " RewriteCond %{HTTPS} off" >> /etc/apache2/sites-available/000-default.conf
echo " RewriteRule (.*) https://%{HTTP_HOST}%{REQUEST_URI}" >> /etc/apache2/sites-available/000-default.conf
echo "</VirtualHost>" >> /etc/apache2/sites-available/000-default.conf
echo "#This is an SSL VHOST added by autosnort. Simply remove the file if you no longer wish to serve the web interface." > /etc/apache2/sites-available/base-ssl.conf
echo "<VirtualHost *:443>" >> /etc/apache2/sites-available/base-ssl.conf
echo " #Turn on SSL. Most of the relevant settings are set in /etc/apache2/mods-available/ssl.conf" >> /etc/apache2/sites-available/base-ssl.conf
echo " SSLEngine on" >> /etc/apache2/sites-available/base-ssl.conf
echo "" >> /etc/apache2/sites-available/base-ssl.conf
echo " #Mod_Rewrite Settings. Force everything to go over SSL." >> /etc/apache2/sites-available/base-ssl.conf
echo " RewriteEngine On" >> /etc/apache2/sites-available/base-ssl.conf
echo " RewriteCond %{HTTPS} off" >> /etc/apache2/sites-available/base-ssl.conf
echo " RewriteRule (.*) https://%{HTTP_HOST}%{REQUEST_URI}" >> /etc/apache2/sites-available/base-ssl.conf
echo "" >> /etc/apache2/sites-available/base-ssl.conf
echo " #Now, we finally get to configuring our VHOST." >> /etc/apache2/sites-available/base-ssl.conf
echo " ServerName base.localhost" >> /etc/apache2/sites-available/base-ssl.conf
echo " DocumentRoot /var/www/base" >> /etc/apache2/sites-available/base-ssl.conf
echo "</VirtualHost>" >> /etc/apache2/sites-available/base-ssl.conf
########################################
#enable our vhost and restart apache to serve them.
a2ensite 000-default.conf
if [ $? -ne 0 ]; then
print_error "Failed to enable default virtual host. See $base_logfile for details."
exit 1
else
print_good "Successfully made virtual host changes."
fi
a2ensite base-ssl.conf &>> $base_logfile
if [ $? -ne 0 ]; then
print_error "Failed to enable base-ssl.conf virtual host. See $base_logfile for details."
exit 1
else
print_good "Successfully made virtual host changes."
fi
service apache2 restart &>> $base_logfile
if [ $? -ne 0 ]; then
print_error "Failed to restart apache2. See $base_logfile for details."
exit 1
else
print_good "Successfully restarted apache2."
fi
print_notification "The log file for this interface installation is located at: $base_logfile"
exit 0 |
#!/bin/bash
echo " 1. The REST API must be available and configured in env.sh"
echo " 2. This script copies the web module to a temporary directory and configures the endpoint REST endpoint"
echo " 3. It then copies it to the configured S3 bucket (which should be configured for static website hosting)"
. ./env.sh
export TEMP_DIR=temp-web-package
rm -rf $TEMP_DIR
mkdir $TEMP_DIR
echo pwd
echo ""
echo "Step 0 - Applying bucket policy"
echo "========================"
sed "s/EXAMPLE-BUCKET.*/"${S3_BUCKET}"\/*\"/" s3-policy.json > $TEMP_DIR/policy.json
aws2 s3api put-bucket-policy --bucket $S3_BUCKET --policy file://$TEMP_DIR/policy.json
echo pwd
echo ""
echo "Step 1 - Copy artifacts"
echo "========================"
rm -rf $TEMP_DIR
mkdir $TEMP_DIR
cp -R ../web/src/main/webapp/* $TEMP_DIR
cd $TEMP_DIR/js
echo "Step 2 - Configuring REST API in js/backend.js"
echo "========================"
mv main.js main.js.BAK
echo "SERVICE_URL = '$SERVICE_API'" > main.js
sed '1d' main.js.BAK >> main.js
echo "Step 3 - Configuring TENANT_S3 BUCKET in js/backend.js"
echo "========================"
mv main.js main.js.BAK.2
sed "s/DEFAULT_TENANT=.*/"DEFAULT_TENANT=\"${S3_TENANT_BUCKET}"\"/" main.js.BAK.2 > main.js
cd ..
echo "Step 4 - Publishing to S3 Bucket:" + $S3_BUCKET
echo "========================"
aws2 s3 cp . s3://$S3_BUCKET --recursive
export REGION=`aws2 configure get region`
echo "====================================="
echo "Point your browser to http://"$S3_BUCKET".s3."$REGION".amazonaws.com/index.html"
echo "DONE!"
|
<reponame>iostrovok/cacheproxy<filename>server.go<gh_stars>0
package cacheproxy
import (
"context"
"github.com/iostrovok/cacheproxy/config"
"github.com/iostrovok/cacheproxy/handler"
)
func Server(ctx context.Context, cfg *config.Config) error {
if cfg.FileName == "" {
cfg.DynamoFileName = true
}
return handler.Start(ctx, cfg)
}
|
#!/usr/bin/env bash
for dir in `find packages -mindepth 1 -maxdepth 1 -type d | sort -nr`; do
cd $dir
echo $PWD
npm publish --tag alpha
cd ../..
done
|
class Manifest:
def __init__(self, paths):
self.paths = paths
class ManifestManager:
def __init__(self, matcher):
self.__matcher = matcher
self.manifests = []
def _fetch_manifest(self, repository, index):
manifest = self.__matcher._fetch_manifest(repository, index)
self.manifests.append(Manifest(manifest))
def fetch_all_manifests(self):
self.manifest_matches = range(self.__matcher.get_manifest_count())
self.manifests = [
self.__matcher._fetch_manifest(self.repository, i)
for i in self.manifest_matches
]
def manifest_at(self, path):
for d in self.manifests:
if path in d.paths:
return d
return None |
ALTER TABLE packages DROP COLUMN downloads; |
public class Palindrome {
public static void main(String[] args) {
String str = "testset";
boolean isPalindrome = true;
for (int i = 0; i < (str.length()/2); i++) {
if (str.charAt(i) != str.charAt(str.length() - i - 1)) {
isPalindrome = false;
break;
}
}
if (isPalindrome)
System.out.println("The given string is Palindrome.");
else
System.out.println("The given string is not Palindrome.");
}
} |
/*
Copyright (c) 2014, 2015 InternetWide.org and the ARPA2.net project
All rights reserved. See file LICENSE for exact terms (2-clause BSD license).
<NAME> <<EMAIL>>
*/
#include "fcgi.h"
#include "logger.h"
#include "crank.h"
static const char* copyright = "Copyright (C) 2014, 2015 InternetWide.org and the ARPA2.net project";
int main(int argc, char** argv)
{
SteamWorks::Logging::Manager logManager("crank.properties");
SteamWorks::Logging::getRoot().debugStream() << "Steamworks Crank " << copyright;
CrankDispatcher* dispatcher = new CrankDispatcher();
SteamWorks::FCGI::init_logging("crank.fcgi");
SteamWorks::FCGI::mainloop(dispatcher);
return 0;
}
|
#!/bin/bash
if [ $# -ne 1 ]; then
echo "Needs File Name as Argument."
exit 0
fi
if [ ! -f ./code.swift ]; then
echo "Source Code file: code.swift not exists."
exit 0
fi
echo $1
echo "- Create file $1.swift"
touch ./Swift/$1.swift
echo "- Add File Document Header."
echo "/**" >> ./Swift/$1.swift
echo " * https://leetcode.com/problems/$1/" >> ./Swift/$1.swift
echo " * " >> ./Swift/$1.swift
echo " * " >> ./Swift/$1.swift
echo " */ " >> ./Swift/$1.swift
echo "// Date: $(date)" >> ./Swift/$1.swift
echo "- Copy Source Code to $1.swift tail."
cat ./code.swift >> ./Swift/$1.swift
echo "- Clear code.swift"
echo "" > ./code.swift
echo "- Generate README.md"
swift readme_gen.swift > README.md
./push_with_msg.sh "Add $1.swift"
|
var gulp = require('gulp');
var configuration = require('../configuration.js');
gulp.task('copy', function() {
gulp.src(configuration.source.root + '/assets/**')
.pipe(gulp.dest(configuration.target.root + '/assets'));
}); |
#! /bin/bash -e
source flags.sh
# export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/poweic/Local/ATG/acore/acore/python/
bazel build $compile_flags //tensorflow/tools/pip_package:build_pip_package
bazel-bin/tensorflow/tools/pip_package/build_pip_package /mnt
if [ $use_python3 -eq 1 ]
then
echo "Use Python3"
pip3 uninstall -y tensorflow
pip3 install /mnt/tensorflow-1.9.0-cp36-cp36m-linux_x86_64.whl
else
echo "Use Python2"
pip2.7 uninstall -y tensorflow
pip2.7 install /mnt/tensorflow-1.9.0rc0-cp27-cp27mu-linux_x86_64.whl
fi
|
class Library:
def __init__(self):
self.books = []
def add_book(self, title, author):
self.books.append({"title": title, "author": author})
def remove_book(self, title):
self.books = [book for book in self.books if book["title"] != title]
def search_by_title(self, title):
return [book for book in self.books if title in book["title"]]
def search_by_author(self, author):
return [book for book in self.books if author == book["author"]]
def total_books(self):
return len(self.books)
# Usage example
library = Library()
library.add_book("The Great Gatsby", "F. Scott Fitzgerald")
library.add_book("To Kill a Mockingbird", "Harper Lee")
library.add_book("1984", "George Orwell")
print(library.total_books()) # Output: 3
print(library.search_by_title("The")) # Output: [{'title': 'The Great Gatsby', 'author': 'F. Scott Fitzgerald'}]
print(library.search_by_author("Harper Lee")) # Output: [{'title': 'To Kill a Mockingbird', 'author': 'Harper Lee'}]
library.remove_book("1984")
print(library.total_books()) # Output: 2 |
<gh_stars>1-10
const { age, graduation, modalidad, date, grade } = require('../lib/utils')
const Student = require('../models/Student')
const teachers = require('./teachers')
module.exports = {
index (req, res) {
let {filter, page, limit} = req.query
page = page || 1
limit = limit || 3
let offset = limit * (page -1)
const params = {
filter,
page,
limit,
offset,
callback (students) {
const pagination = {
page,
total: Math.ceil(students[0].total / limit)
}
students.map(student => {
student.education_level = grade(student.education_level)
return student
})
return res.render('students/index', { students, filter, pagination })
}
}
Student.paginate(params)
},
create (req, res) {
Student.teacherSelectOption(function (options) {
return res.render('students/create', {teachersOptions: options})
})
},
post (req, res) {
const keys = Object.keys(req.body)
for (key of keys) {
if (req.body[key] == "") {
return res.send('Preencha todos os campos!')
}
}
Student.create (req.body, function (student) {
return res.redirect(`/students/${student}`)
})
},
show (req, res) {
Student.find (req.params.id, function (student) {
if (!student) return res.send('student not found')
student.age = age(student.birth)
student.birth = date(student.birth).format
student.education_level = grade(student.education_level)
return res.render('students/show', { student })
})
},
edit (req,res) {
Student.find(req.params.id, function (student) {
if (!student) return res.send ('Studnet not found')
student.birth = date(student.birth).iso
Student.teacherSelectOption (function (options) {
return res.render('students/edit', { student, teachersOptions: options })
})
})
},
put (req, res) {
const keys = Object.keys(req.body)
for (key of keys) {
if (req.body[key] == "") {
return res.send('Preencha todos os campos!')
}
}
Student.update(req.body, function (student) {
return res.redirect(`/students/${req.body.id}`)
})
},
delete (req, res) {
Student.delete (req.body.id, function (student) {
return res.redirect (`/students`)
})
}
} |
#!/bin/bash
refmt --print=binary tests/src/js/IndexJs.re | ./cps_ppx.exe /dev/stdin /tmp/cps-ppx.out.tmp
refmt --print-width=140 --parse=binary --print=re /tmp/cps-ppx.out.tmp > tests/src/js/IndexJsCpsed.re |
#!/bin/bash
# Copyright 2019 The Kubernetes Authors.
# Copyright 2020 The OpenEBS Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
## find or download controller-gen
CONTROLLER_GEN=$(which controller-gen)
if [ "$CONTROLLER_GEN" = "" ]
then
echo "ERROR: failed to get controller-gen, Please run make bootstrap to install it";
exit 1;
fi
$CONTROLLER_GEN crd:trivialVersions=false,preserveUnknownFields=false paths=./pkg/apis/... output:crd:artifacts:config=deploy/yamls
## create the the crd yamls
echo '
##############################################
########### ############
########### ZFSVolume CRD ############
########### ############
##############################################
# ZFSVolume CRD is autogenerated via `make manifests` command.
# Do the modification in the code and run the `make manifests` command
# to generate the CRD definition' > deploy/yamls/zfsvolume-crd.yaml
cat deploy/yamls/zfs.openebs.io_zfsvolumes.yaml >> deploy/yamls/zfsvolume-crd.yaml
rm deploy/yamls/zfs.openebs.io_zfsvolumes.yaml
echo '
##############################################
########### ############
########### ZFSSnapshot CRD ############
########### ############
##############################################
# ZFSSnapshot CRD is autogenerated via `make manifests` command.
# Do the modification in the code and run the `make manifests` command
# to generate the CRD definition' > deploy/yamls/zfssnapshot-crd.yaml
cat deploy/yamls/zfs.openebs.io_zfssnapshots.yaml >> deploy/yamls/zfssnapshot-crd.yaml
rm deploy/yamls/zfs.openebs.io_zfssnapshots.yaml
echo '
##############################################
########### ############
########### ZFSBackup CRD ############
########### ############
##############################################
# ZFSBackups CRD is autogenerated via `make manifests` command.
# Do the modification in the code and run the `make manifests` command
# to generate the CRD definition' > deploy/yamls/zfsbackup-crd.yaml
cat deploy/yamls/zfs.openebs.io_zfsbackups.yaml >> deploy/yamls/zfsbackup-crd.yaml
rm deploy/yamls/zfs.openebs.io_zfsbackups.yaml
echo '
##############################################
########### ############
########### ZFSRestore CRD ############
########### ############
##############################################
# ZFSRestores CRD is autogenerated via `make manifests` command.
# Do the modification in the code and run the `make manifests` command
# to generate the CRD definition' > deploy/yamls/zfsrestore-crd.yaml
cat deploy/yamls/zfs.openebs.io_zfsrestores.yaml >> deploy/yamls/zfsrestore-crd.yaml
rm deploy/yamls/zfs.openebs.io_zfsrestores.yaml
echo '
##############################################
########### ############
########### ZFSNode CRD ############
########### ############
##############################################
# ZFSNode CRD is autogenerated via `make manifests` command.
# Do the modification in the code and run the `make manifests` command
# to generate the CRD definition' > deploy/yamls/zfsnode-crd.yaml
cat deploy/yamls/zfs.openebs.io_zfsnodes.yaml >> deploy/yamls/zfsnode-crd.yaml
rm deploy/yamls/zfs.openebs.io_zfsnodes.yaml
## create the operator file using all the yamls
echo '# This manifest is autogenerated via `make manifests` command
# Do the modification to the zfs-driver.yaml in directory deploy/yamls/
# and then run `make manifests` command
# This manifest deploys the OpenEBS ZFS control plane components,
# with associated CRs & RBAC rules.
' > deploy/zfs-operator.yaml
# Add namespace creation to the Operator yaml
cat deploy/yamls/namespace.yaml >> deploy/zfs-operator.yaml
# Add ZFSVolume v1alpha1 and v1 CRDs to the Operator yaml
cat deploy/yamls/zfsvolume-crd.yaml >> deploy/zfs-operator.yaml
# Add ZFSSnapshot v1alpha1 and v1 CRDs to the Operator yaml
cat deploy/yamls/zfssnapshot-crd.yaml >> deploy/zfs-operator.yaml
# Add ZFSBackup v1 CRDs to the Operator yaml
cat deploy/yamls/zfsbackup-crd.yaml >> deploy/zfs-operator.yaml
# Add ZFSRestore v1 CRDs to the Operator yaml
cat deploy/yamls/zfsrestore-crd.yaml >> deploy/zfs-operator.yaml
# Add ZFSNode v1alpha1 CRDs to the Operator yaml
cat deploy/yamls/zfsnode-crd.yaml >> deploy/zfs-operator.yaml
# Add the driver deployment to the Operator yaml
cat deploy/yamls/zfs-driver.yaml >> deploy/zfs-operator.yaml
# To use your own boilerplate text use:
# --go-header-file ${SCRIPT_ROOT}/hack/custom-boilerplate.go.txt
|
# frozen_string_literal: true
module Qernel
module FeverFacade
# Represents a Fever participant which will store excess energy from
# elsewhere, convert it to heat, and make it available for use later.
class StorageAdapter < ProducerAdapter
# Prevents the demand of the producer being included twice in the Merit
# order: once as P2H and again in the Fever hot water curve.
def producer_for_electricity_demand
nil
end
def participant
@participant ||=
Fever::Activity.new(
Fever::ReserveProducer.new(
total_value(:heat_output_capacity),
reserve
)
)
end
def inject!
super
inject_curve!(:input) { participant.producer.input_curve }
end
def input?(*)
# Storage adapters are a "dump" in which excess electricity is converted
# to heat. Their heat demand is _not_ to be accounted for in Merit
# otherwise their electricity consumption will be included twice.
false
end
private
def reserve
Merit::Flex::SimpleReserve.new(
total_value { @node.dataset_get(:storage).volume }
)
end
end
end
end
|
# Distributed under the MIT License.
# See LICENSE.txt for details.
# This file defines the framework for running simple textual checks on
# files in the repo and defines a collection of standard checks.
# A check C consists of three functions:
# - `C`, which takes one filename as argument and should return true
# if there is a problem with the file, and
# - `C_report`, which takes the list of bad files and should print a
# message about them, and
# - `C_test`, which is used to test the check if the global $1 =
# --test.
# The report function will not be called if there are no bad files.
# `C_test` should consist of several calls to the test_check function.
# Exit with a failure message
die() {
[ -n "$1" ] && echo "$1" >&2 || echo "died" >&2
exit 1
}
# Option to enable color in grep or the empty string if grep does not
# support color
color_option=''
if grep --help 2>&1 | grep -q -e --color ; then
color_option='--color=auto'
fi
# Utility that uses grep on the staged version of the file specified by the last argument
# does not work with multiple files as argument
staged_grep() {
# git show ":./path/to/file" shows the content of the file as it appears in the staging area
git show ":./${@: -1}" | grep "${@:1:$(($#-1))}"
}
# Utility function for reporters that enables lots of decorators in grep
# Works like staged_grep
pretty_grep() {
echo -n -e "\033[0;35m${@: -1}\033[0m:"
git show ":./${@: -1}" | GREP_COLOR='1;37;41' grep -n $color_option "${@:1:$(($#-1))}"
}
# Utility functions for checks classifying a file based on its name
is_includible() { [[ $1 =~ \.hpp$ ]] || [[ $1 =~ \.tpp$ ]] ; }
is_c++() { [[ $1 =~ \.cpp$ ]] || [[ $1 =~ \.hpp$ ]] || [[ $1 =~ \.tpp$ ]] ; }
# Utility function for checks that returns false if the first argument
# matches any of the shell regexes passed as subsequent arguments.
whitelist() {
local check pattern
check=$1
shift
for pattern in "$@" ; do
[[ ${check} =~ ${pattern} ]] && return 1
done
return 0
}
# Main driver. Takes a list of checks as arguments and a list of
# filenames as null separated strings on its standard input. Returns
# true if all the checks passed.
run_checks() {
local check failures file files ret
ret=0
files=()
while IFS= read -d '' -r file ; do
files+=("${file}")
done
for check in "$@" ; do
failures=()
for file in "${files[@]}" ; do
${check} "${file}" && failures+=("${file}")
done
if [ ${#failures[@]} -ne 0 ] ; then
ret=1
${check}_report "${failures[@]}"
echo
fi
done
return "${ret}"
}
# test_check pass|fail filename contents
test_check() {
# check and failed are global variables
local contents expected file tempdir
[ $# -eq 3 ] || die "Wrong number of arguments"
expected=$1
file=$2
contents=$3
[ "${expected}" = pass ] || [ "${expected}" = fail ] || \
die "Expected pass or fail, got '${expected}'"
[[ "${file}" =~ ^/ ]] && die "Can't test with absolute path"
# Delete the temporary directory if the script exits. (This is
# reset before leaving this function.)
trap 'rm -rf "${tempdir}"' EXIT
tempdir=$(mktemp -d)
pushd "${tempdir}" >/dev/null
mkdir -p "$(dirname "${file}")"
printf '%s' "${contents}" >"${file}"
if ${check} "${file}" ; then
if [ "${expected}" != fail ] ; then
echo "${check} unexpectedly failed on ${file}:"
cat "${file}"
failed=yes
fi
else
if [ "${expected}" != pass ] ; then
echo "${check} unexpectedly passed on ${file}:"
cat "${file}"
failed=yes
fi
fi
rm -rf "${tempdir}"
popd >/dev/null
trap - EXIT
return 0
}
# Run the specified tests. Automatically run on the standard_checks
# if $1 is --test.
run_tests() {
local check failed
failed=no
for check in "$@" ; do
${check}_test
done
[ "${failed}" != no ] && die "Tests failed"
return 0
}
###### Standard checks ######
standard_checks=()
# Check for lines longer than 80 characters
long_lines() {
is_c++ "$1" && staged_grep '^[^#].\{80,\}' "$1" | grep -Ev 'https?://' | \
grep -v '// IWYU pragma:' >/dev/null
}
long_lines_report() {
echo "Found lines over 80 characters:"
# This doesn't filter out URLs, but I can't think of a way to do
# that without breaking the highlighting. They only get printed
# if there's another problem in the file.
pretty_grep '^[^#].\{80,\}' "$@"
}
long_lines_test() {
local ten=xxxxxxxxxx
local eighty=${ten}${ten}${ten}${ten}${ten}${ten}${ten}${ten}
test_check pass foo.cpp "${eighty}"$'\n'
test_check fail foo.cpp "${eighty}x"$'\n'
test_check fail foo.hpp "${eighty}x"$'\n'
test_check fail foo.tpp "${eighty}x"$'\n'
test_check pass foo.yaml "${eighty}x"$'\n'
test_check pass foo.cpp "#include ${eighty}x"$'\n'
test_check pass foo.cpp "// IWYU pragma: no_include ${eighty}x"$'\n'
test_check pass foo.cpp "xxx http://${eighty}x"$'\n'
test_check pass foo.cpp "xxx https://${eighty}x"$'\n'
}
standard_checks+=(long_lines)
# Check for files containing tabs
tabs() {
whitelist "$1" '.png' &&
staged_grep -q -F $'\t' "$1"
}
tabs_report() {
echo "Found tabs in the following files:"
pretty_grep -F $'\t' "$@"
}
tabs_test() {
test_check pass foo.cpp "x x"$'\n'
test_check fail foo.cpp x$'\t'x$'\n'
}
standard_checks+=(tabs)
# Check for end-of-line spaces
trailing_space() {
whitelist "$1" '.png' &&
staged_grep -q -E ' +$' "$1"
}
trailing_space_report() {
echo "Found white space at end of line in the following files:"
pretty_grep -E ' +$' "$@"
}
trailing_space_test() {
test_check pass foo.cpp ' x'$'\n'
test_check fail foo.cpp 'x '$'\n'
}
standard_checks+=(trailing_space)
# Check for carriage returns
carriage_returns() {
whitelist "$1" '.png' &&
staged_grep -q -F $'\r' "$1"
}
carriage_returns_report() {
echo "Found carriage returns in the following files:"
# Skip highlighting because trying to highlight a carriage return
# confuses some terminals.
pretty_grep ${color_option:+--color=no} -F $'\r' "$@"
}
carriage_returns_test() {
test_check pass foo.cpp 'x'
test_check fail foo.cpp $'\r'
}
standard_checks+=(carriage_returns)
# Check for license file.
license() {
whitelist "$1" \
'cmake/FindCatch.cmake$' \
'cmake/CodeCoverage.cmake$' \
'cmake/CodeCoverageDetection.cmake$' \
'cmake/FindLIBCXX.cmake$' \
'cmake/FindPAPI.cmake$' \
'cmake/FindPythonModule.cmake$' \
'cmake/Findcppcheck.cmake$' \
'cmake/Findcppcheck.cpp$' \
'docs/config/footer.html' \
'docs/config/header.html' \
'docs/config/layout.xml' \
'LICENSE' \
'support/TeXLive/texlive.profile' \
'tools/Iwyu/boost-all.imp$' \
'.github/ISSUE_TEMPLATE.md' \
'.github/PULL_REQUEST_TEMPLATE.md' \
'.png' \
'.svg' \
'.clang-format$' && \
! staged_grep -q "Distributed under the MIT License" "$1"
}
license_report() {
echo "Did not find a license in these files:"
printf '%s\n' "$@"
}
license_test() {
test_check pass foo.cpp 'XXDistributed under the MIT LicenseXX'
test_check fail foo.cpp ''
test_check pass LICENSE ''
}
standard_checks+=(license)
# Check for tests using Catch's TEST_CASE instead of SPECTRE_TEST_CASE
test_case() {
is_c++ "$1" && staged_grep -q "^TEST_CASE" "$1"
}
test_case_reoprt() {
echo "Found occurrences of TEST_CASE, must use SPECTRE_TEST_CASE:"
pretty_grep "^TEST_CASE" "$@"
}
test_case_test() {
test_check pass foo.cpp ''
test_check pass foo.cpp 'SPECTRE_TEST_CASE()'
test_check fail foo.cpp 'TEST_CASE()'
test_check pass foo.yaml 'TEST_CASE()'
}
standard_checks+=(test_case)
# Check for tests using Catch's Approx, which has a very loose tolerance
catch_approx() {
is_c++ "$1" && staged_grep -q "Approx(" "$1"
}
catch_approx_report() {
echo "Found occurrences of Approx, must use approx from"
echo "tests/Unit/TestHelpers.hpp instead:"
pretty_grep "Approx(" "$@"
}
catch_approx_test() {
test_check pass foo.cpp ''
test_check pass foo.cpp 'a == approx(b)'
test_check fail foo.cpp 'a == Approx(b)'
test_check pass foo.yaml 'a == Approx(b)'
}
standard_checks+=(catch_approx)
# Check for Doxygen comments on the same line as a /*!
doxygen_start_line() {
is_c++ "$1" && staged_grep -q '/\*\![^\n]' "$1"
}
doxygen_start_line_report() {
echo "Found occurrences of bad Doxygen syntax: /*! STUFF:"
pretty_grep -E '\/\*\!.*' "$@"
}
doxygen_start_line_test() {
test_check pass foo.cpp ''
test_check pass foo.cpp ' /*!'$'\n'
test_check fail foo.cpp ' /*! '$'\n'
test_check pass foo.yaml ' /*! '$'\n'
}
standard_checks+=(doxygen_start_line)
# Check for Ls because of a preference not to use it as short form for List
ls_list() {
is_c++ "$1" && staged_grep -q Ls "$1"
}
ls_list_report() {
echo "Found occurrences of 'Ls', which is usually short for List:"
pretty_grep Ls "$@"
}
ls_list_test() {
test_check pass foo.cpp ''
test_check pass foo.cpp ' FooList '
test_check fail foo.cpp ' FooLs '
test_check pass foo.yaml ' FooLs '
}
standard_checks+=(ls_list)
# Check for pragma once in all header files
pragma_once() {
is_includible "$1" && \
whitelist "$1" \
'tools/SpectrePch.hpp$' && \
! staged_grep -q -x '#pragma once' "$1"
}
pragma_once_report() {
echo "Did not find '#pragma once' in these header files:"
printf '%s\n' "$@"
}
pragma_once_test() {
test_check pass foo.cpp ''
test_check fail foo.hpp ''
test_check fail foo.tpp ''
test_check pass foo.hpp '#pragma once'$'\n'
test_check fail foo.hpp '//#pragma once'$'\n'
test_check pass foo.hpp $'\n''#pragma once'$'\n\n'
}
standard_checks+=(pragma_once)
# Check for a newline at end of file
final_newline() {
whitelist "$1" '.png' '.svg' &&
# Bash strips trailing newlines from $() output
[ "$(tail -c 1 "$1" ; echo x)" != $'\n'x ]
}
final_newline_report() {
echo "No newline at end of file in:"
printf '%s\n' "$@"
}
final_newline_test() {
test_check pass foo.cpp $'\n'
test_check fail foo.cpp ''
test_check fail foo.cpp $'\n'x
}
standard_checks+=(final_newline)
# Check for enable_if and request replacing it with Requires
enable_if() {
is_c++ "$1" && \
whitelist "$1" \
'src/DataStructures/Tensor/Structure.hpp$' \
'src/IO/H5/File.hpp$' \
'src/Parallel/CharmMain.cpp$' \
'src/Utilities/PointerVector.hpp$' \
'src/Utilities/Requires.hpp$' \
'src/Utilities/TMPL.hpp$' \
'src/Utilities/TaggedTuple.hpp$' \
'tests/Unit/Utilities/Test_TypeTraits.cpp$' && \
staged_grep -q std::enable_if "$1"
}
enable_if_report() {
echo "Found occurrences of 'std::enable_if', prefer 'Requires':"
pretty_grep std::enable_if "$@"
}
enable_if_test() {
test_check pass foo.cpp 'enable'
test_check pass foo.cpp 'enable if'
test_check pass foo.cpp 'enable_if'
test_check fail foo.cpp 'std::enable_if'
}
standard_checks+=(enable_if)
# Check for struct TD and class TD asking to remove it
struct_td() {
is_c++ "$1" && staged_grep -q "\(struct TD;\|class TD;\)" "$1"
}
struct_td_report() {
echo "Found 'struct TD;' or 'class TD;' which should be removed"
pretty_grep "\(struct TD;\|class TD;\)" "$@"
}
struct_td_test() {
test_check pass foo.cpp ''
test_check fail foo.cpp 'struct TD;'
test_check fail foo.cpp 'class TD;'
}
standard_checks+=(struct_td)
# Check for _details and details namespaces, request replacement with detail
namespace_details() {
is_c++ "$1" && staged_grep -q "\(_details\|namespace[[:space:]]\+details\)" "$1"
}
namespace_details_report() {
echo "Found '_details' namespace, please replace with '_detail'"
pretty_grep "\(_details\|namespace details\)" "$@"
}
namespace_details_test() {
test_check pass foo.cpp ''
test_check fail foo.cpp 'namespace details'
test_check fail foo.cpp 'namespace details'
test_check fail foo.cpp 'namespace Test_details'
test_check pass foo.cpp 'namespace Test_detail'
test_check pass foo.cpp 'namespace detail'
test_check pass foo.cpp 'details'
}
standard_checks+=(namespace_details)
# if test is enabled: redefines staged_grep to run tests on files that are not in git
[ "$1" = --test ] && staged_grep() { grep "$@"; } && run_tests "${standard_checks[@]}"
# True result for sourcing
:
|
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
public class TaskReminderApp extends AppCompatActivity {
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_task_reminder);
// Initialize the AlarmManager
AlarmManager alarmManager = (AlarmManager) getSystemService(Context.ALARM_SERVICE);
// Set the alarm
Intent intent = new Intent(this, TaskReminderReceiver.class);
PendingIntent pendingIntent = PendingIntent.getBroadcast(this, 0, intent, 0);
alarmManager.setRepeating(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + 60000, 60000, pendingIntent);
}
} |
<filename>auklib/auklib_test.go
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package auklib
import (
"io/ioutil"
"os"
"runtime"
"testing"
)
type pathTest struct {
desc string
path string
expect bool
}
func TestPathExists(t *testing.T) {
tempDir, err := ioutil.TempDir("", "")
defer os.RemoveAll(tempDir)
if err != nil {
t.Fatalf("error creating temp directory: %v", err)
}
tests := []pathTest{
{"generated test dir", tempDir, true},
{"made up path", "/probably/a/made/up/path/to/nothing", false},
}
if runtime.GOOS == "windows" {
tests = append(tests, pathTest{"windows root dir", `C:\`, true})
}
for _, p := range tests {
b, err := PathExists(p.path)
if b != p.expect {
t.Errorf("TestPathExists(%q) should be: %t, was: %t", p.desc, p.expect, b)
}
if err != nil {
t.Errorf("TestPathExists(%q) returned error: %v", p.desc, err)
}
}
}
func TestEmptyPath(t *testing.T) {
empty := pathTest{"empty path", "", false}
b, err := PathExists(empty.path)
if err == nil {
t.Errorf("TestEmptyPath(%q) did not result in error output.", empty.desc)
}
if b != empty.expect {
t.Errorf("TestEmptyPath(%q) returned %t", empty.desc, b)
}
}
|
pid=$(/usr/java/default/bin/jps -l | awk '$2=="kafka.Kafka"{print $1}')
if [ "${pid}" == "" ]; then
cd /home/koqizhao/kafka/kafka
./kafka.sh daemon-start
fi
|
<filename>pelicanconf.py
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
SITENAME = u'Victoria Mo'
AUTHOR = u'<NAME>'
TAGLINE = u'Moving mountains, one stone at a time.'
SITEURL = 'http://localhost:8000'
FEED_DOMAIN = SITEURL
FEED_ATOM = 'feeds/all.atom.xml'
FEED_RSS = 'feeds/all.rss'
TIMEZONE = 'America/New_York'
DEFAULT_LANG = u'en'
DATE_FORMATS = {
'en': '%Y-%m-%d',
}
DEFAULT_PAGINATION = 10
THEME = 'themes/pelican-svbtle'
# display items
LOGO_URL = 'https://dl.dropboxusercontent.com/u/7030113/www/art-noveau-ornament.png'
MENUITEMS = (
('archives', '/archives.html'),
('feed', '/feeds/all.atom.xml'),
('github', 'https://github.com/vickimo/'),
)
DISPLAY_PAGES_ON_MENU = True
FOOTER_MESSAGE = u'This work is licensed under the <a href="http://creativecommons.org/licenses/by-sa/3.0/" rel="license">CC BY-SA</a>.'
TWITTER_USERNAME = u'mo_vicki'
#STATIC_PATHS = ()
FILES_TO_COPY = (
('extra/README', 'README'),
('extra/LICENSE', 'LICENSE'),
('extra/CNAME', 'CNAME'),
('extra/humans.txt', 'humans.txt'),
('extra/favicon.ico', 'favicon.ico'),
('extra/404.html', '404.html'),
('extra/snowy_peaks.jpg', 'snowy_peaks.jpg'),
)
# Plugins and their settings.
PLUGIN_PATH = 'pelican-plugins'
PLUGINS = ('sitemap', 'gist', )
SITEMAP = {
'format': 'xml',
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'weekly',
'indexes': 'weekly',
'pages': 'monthly'
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.