text stringlengths 1 1.05M |
|---|
<reponame>shasjSrv/assistantApp<filename>app/src/main/java/com/example/jzy/helloword/util/TTS.java
package com.example.jzy.helloword.util;
import android.os.Environment;
import android.util.Log;
import com.example.jzy.helloword.ChatActivity;
import com.example.jzy.helloword.HomePageActivity;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechSynthesizer;
import com.iflytek.cloud.SynthesizerListener;
/**
* Created by jzy on 8/8/17.
*/
public class TTS {
private SpeechSynthesizer mTts;
private boolean initSuccess = true;
public TTS(){
mTts = SpeechSynthesizer.createSynthesizer(HomePageActivity.getContext(), new InitListener() {
@Override
public void onInit(int code) {
Log.d(HomePageActivity.TAG, "InitListener init() code = " + code);
if (code != ErrorCode.SUCCESS) {
HomePageActivity.showTip("初始化失败,错误码:" + code);
initSuccess = false;
}
}
});
}
public void startSpeaking(String answerText,SynthesizerListener mTtsListener){
setParamTTS();
mTts.startSpeaking(answerText, mTtsListener);
}
private void setParamTTS() {
// 清空参数
mTts.setParameter(SpeechConstant.PARAMS, null);
// 设置合成
mTts.setParameter(SpeechConstant.VOICE_NAME, "nannan");// 设置发音人
mTts.setParameter(SpeechConstant.SPEED, "50");// 设置语速
mTts.setParameter(SpeechConstant.VOLUME, "80");// 设置音量,范围0~100
mTts.setParameter(SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_CLOUD); // 设置云端
// 设置播放器音频流类型
mTts.setParameter(SpeechConstant.STREAM_TYPE, "3");
// 设置播放合成音频打断音乐播放,默认为true
mTts.setParameter(SpeechConstant.KEY_REQUEST_FOCUS, "true");
// 设置音频保存路径,保存音频格式支持pcm、wav,设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
// 注:AUDIO_FORMAT参数语记需要更新版本才能生效
mTts.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
mTts.setParameter(SpeechConstant.TTS_AUDIO_PATH, Environment.getExternalStorageDirectory() + "/msc/tts.wav");
}
public void destory(){
mTts.stopSpeaking();
mTts.destroy();
}
}
|
from mymodule import funproject2,funproject3,funproject4
inp= int(raw_input("which page do you want to see?? \n Press 1 for watching Mp4-Videos \n Press 2 for Groceries \n Press 3 for news"))
if inp==1:
funproject2.main()
elif inp==2:
funproject3.main()
elif inp==3:
funproject4.main()
else:
print "Invalid choice!!!"
|
let num1 = 2;
let num2 = 3;
let minimum = (num1 < num2) ? num1 : num2;
console.log(minimum); |
temperatures_fahrenheit = [32, 64, 78, -10]
temperatures_celsius = [((temp-32)*5)/9 for temp in temperatures_fahrenheit]
print(temperatures_celsius) |
import {Deck, OrthographicView} from '@deck.gl/core'
import {interpolateICurve, GeomNode, GeomGraph, Point, GeomEdge} from 'msagl-js'
import NodeLayer from './layers/node-layer'
import EdgeLayer from './layers/edge-layer'
export default class Renderer {
deck: any
geomGraph?: GeomGraph
constructor(geomGraph?: GeomGraph) {
this.deck = new Deck({
views: [new OrthographicView({})],
initialViewState: {
// @ts-ignore
target: [0, 0, 0],
zoom: 0,
},
controller: true,
onLoad: () => this.update(),
})
this.geomGraph = geomGraph
console.log(geomGraph)
}
setGraph(geomGraph?: GeomGraph) {
this.geomGraph = geomGraph
if (this.deck.layerManager) {
// loaded
this.update()
}
}
update() {
const {geomGraph} = this
if (!geomGraph) return
const center = this.geomGraph.boundingBox.center
const nodeLayer = new NodeLayer<GeomNode>({
id: 'nodes',
data: Array.from(this.geomGraph.shallowNodes()),
background: true,
getPosition: (n) => [n.center.x, n.center.y],
getText: (n) => n.id,
getBorderWidth: 1,
getSize: 14,
// @ts-ignore
sizeUnits: 'common',
sizeMaxPixels: 24,
_subLayerProps: {
// Background box is absolutely positioned
background: {
getSize: 1,
sizeScale: 1,
sizeMinPixels: 0,
sizeMaxPixels: Number.MAX_SAFE_INTEGER,
},
},
})
const edgeLayer = new EdgeLayer<GeomEdge>({
id: 'edges',
data: Array.from(this.geomGraph.edges()),
getPath: (e) => interpolateICurve(e.curve, 0.5).map((p) => [p.x, p.y]),
getColor: (_) => [255 * Math.random(), 128, 255 * Math.random()],
//getArrowSize: (e)=>e.edgeGeometry.targetArrowhead.length,
getArrowType: 'none',
getWidth: 1,
opacity: 0.5,
})
this.deck.setProps({
initialViewState: {
target: [center.x, center.y, 0],
zoom: 0,
},
layers: [edgeLayer, nodeLayer],
})
}
}
|
<gh_stars>10-100
package amora.backend.actors
import scala.util.Try
import org.apache.jena.query.ResultSetRewindable
import akka.actor.Actor
import akka.actor.ActorLogging
import amora.api.SparqlModel
import amora.api.Turtle
import amora.backend.Content
import amora.backend.Logger
import amora.backend.indexer.Indexer
class IndexerActor extends Actor with ActorLogging {
import IndexerMessage._
private def akkaLog = log
private val logger = new Logger {
override def debug(msg: String): Unit = akkaLog.debug(msg)
override def warning(msg: String): Unit = akkaLog.warning(msg)
override def info(msg: String): Unit = akkaLog.info(msg)
override def error(msg: String, t: Throwable): Unit = akkaLog.error(t, msg)
override def log = throw new UnsupportedOperationException
override def logLevel = throw new UnsupportedOperationException
override def logLevel_=(level: Logger.LogLevel) = throw new UnsupportedOperationException
override def close() = throw new UnsupportedOperationException
override def isClosed = false
}
private val indexer = new Indexer(Content.ModelName, logger)
private val config = context.system.settings.config
private val testMode = config.getBoolean("app.test-mode")
private val dataset =
if (testMode)
indexer.mkInMemoryDataset
else
indexer.mkDataset(config.getString("app.storage.index-dataset"))
log.info("Indexer created dataset at: " + (if (testMode) "<memory>" else config.getString("app.storage.index-dataset")))
indexer.writeDataset(dataset)(indexer.startupIndexer)
override def receive = {
case RunQuery(query) ⇒
sender ! Try(handleQuery(query))
case RunUpdate(query) ⇒
sender ! Try(handleUpdate(query))
case RunConstruct(query) ⇒
sender ! Try(handleConstruct(query))
case RunTurtleUpdate(query) ⇒
sender ! Try(handleTurtleUpdate(query))
case RunNlq(query) ⇒
sender ! Try(handleNlq(query))
case GetHeadCommit ⇒
sender ! Try(headCommit())
case ListCommits ⇒
sender ! Try(listCommits())
case ShowCommit(commit) ⇒
sender ! Try(showCommit(commit))
}
override def postStop() = {
dataset.close()
}
def handleQuery(query: String): ResultSetRewindable = {
log.info(s"Handle SPARQL query:\n$query")
indexer.readDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.withQueryService(model, query)
}
}
}
def handleConstruct(query: String): SparqlModel = {
log.info(s"Handle SPARQL construct query:\n$query")
indexer.readDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.withConstructService(model, query)
}
}
}
def handleUpdate(query: String): Unit = {
log.info(s"Handle SPARQL update:\n$query")
indexer.writeDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.withUpdateService(model, query)(_ ⇒ ())
}
}
}
def handleTurtleUpdate(query: String): Unit = {
log.info(s"Handle Turtle update:\n$query")
indexer.writeDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.writeAs(dataset, model, Turtle, query)
}
}
}
def handleNlq(query: String): String = {
log.info(s"Handle natural language query:\n$query")
indexer.writeDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.askNlq(model, query)
}
}
}
def headCommit(): String = {
indexer.readDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.headCommit(model).getOrElse("")
}
}
}
def listCommits(): List[String] = {
indexer.readDataset(dataset) { dataset ⇒
indexer.withModel(dataset) { model ⇒
indexer.listCommits(model)
}
}
}
def showCommit(commit: String): SparqlModel = {
indexer.readDataset(dataset) { dataset ⇒
indexer.showCommit(dataset, commit)
}
}
}
sealed trait IndexerMessage
object IndexerMessage {
case class RunQuery(query: String) extends IndexerMessage
case class RunUpdate(query: String) extends IndexerMessage
case class RunConstruct(query: String) extends IndexerMessage
case class RunTurtleUpdate(query: String) extends IndexerMessage
case class RunNlq(query: String) extends IndexerMessage
case object GetHeadCommit extends IndexerMessage
case object ListCommits extends IndexerMessage
case class ShowCommit(commit: String) extends IndexerMessage
}
|
import test from 'ava';
import camelcaseKeys from '.';
test('main', t => {
t.true(camelcaseKeys({'foo-bar': true}).fooBar);
});
test('exclude option', t => {
t.true(camelcaseKeys({'--': true}, {exclude: ['--']})['--']);
t.deepEqual(camelcaseKeys({'foo-bar': true}, {exclude: [/^f/]}), {'foo-bar': true});
});
test('deep option', t => {
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({foo_bar: true, obj: {one_two: false, arr: [{three_four: true}]}}, {deep: true}),
{fooBar: true, obj: {oneTwo: false, arr: [{threeFour: true}]}}
);
});
test('stopPaths option', t => {
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({foo_bar: true, obj: {one_two: false, arr: [{three_four: true}]}}, {deep: true, stopPaths: ['obj']}),
// eslint-disable-next-line camelcase
{fooBar: true, obj: {one_two: false, arr: [{three_four: true}]}}
);
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({foo_bar: true, obj: {one_two: false, arr: [{three_four: true}]}}, {deep: true, stopPaths: ['obj.arr']}),
// eslint-disable-next-line camelcase
{fooBar: true, obj: {oneTwo: false, arr: [{three_four: true}]}}
);
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({q_w_e: [[{foo_bar: 1}, {one_two: 2}, {foo_bar: 3, one_two: 4}]]}, {deep: true, stopPaths: ['q_w_e.foo_bar']}),
{qWE: [[{fooBar: 1}, {oneTwo: 2}, {fooBar: 3, oneTwo: 4}]]}
);
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({a_b: 1, a_c: {c_d: 1, c_e: {e_f: 1}}}, {deep: true, stopPaths: ['a_c.c_e']}),
// eslint-disable-next-line camelcase
{aB: 1, aC: {cD: 1, cE: {e_f: 1}}}
);
});
test('pascalCase option only', t => {
t.true(camelcaseKeys({'new-foo-bar': true}, {pascalCase: true}).NewFooBar);
});
test('pascalCase and deep options', t => {
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({p_foo_bar: true, p_obj: {p_two: false, p_arr: [{p_three_four: true}]}}, {deep: true, pascalCase: true}),
{PFooBar: true, PObj: {PTwo: false, PArr: [{PThreeFour: true}]}}
);
});
test('handles nested arrays', t => {
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys({q_w_e: [['a', 'b']]}, {deep: true}),
{qWE: [['a', 'b']]}
);
});
test('accepts an array of objects', t => {
t.deepEqual(
// eslint-disable-next-line camelcase
camelcaseKeys([{foo_bar: true}, {bar_foo: false}, {'bar-foo': 'false'}]),
[{fooBar: true}, {barFoo: false}, {barFoo: 'false'}]
);
});
test('different pascalCase option values', t => {
// eslint-disable-next-line camelcase
t.true(camelcaseKeys({foo_bar_UPPERCASE: true}).fooBarUppercase);
// eslint-disable-next-line camelcase
t.true(camelcaseKeys({foo_bar_UPPERCASE: true}, {pascalCase: true}).FooBarUppercase);
t.deepEqual(
camelcaseKeys({'p-foo-bar': true, 'p-obj': {'p-two': false, 'p-arr': [{'p-three-four': true}]}}, {deep: true, pascalCase: true}),
{PFooBar: true, PObj: {PTwo: false, PArr: [{PThreeFour: true}]}}
);
t.deepEqual(
camelcaseKeys({'p-foo-bar': true, 'p-obj': {'p-two': false, 'p-arr': [{'p-three-four': true}]}}, {deep: true}),
{pFooBar: true, pObj: {pTwo: false, pArr: [{pThreeFour: true}]}}
);
});
test('handle array of non-objects', t => {
const input = ['name 1', 'name 2'];
t.deepEqual(
camelcaseKeys(input),
input
);
});
test('handle array of non-objects with `deep` option', t => {
const input = ['name 1', 'name 2'];
t.deepEqual(
camelcaseKeys(input, {deep: true}),
input
);
});
|
<reponame>sitewhere/sitewhere-java-api
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.model.device.support;
import com.sitewhere.rest.model.common.Location;
/**
* Entry used in InterpolatedAssignmentHistory.
*/
public class DeviceAssignmentHistoryEntry {
/** Time slot (in ms.) */
private long timeSlot;
/** Location information */
private Location location;
public long getTimeSlot() {
return timeSlot;
}
public void setTimeSlot(long timeSlot) {
this.timeSlot = timeSlot;
}
public Location getLocation() {
return location;
}
public void setLocation(Location location) {
this.location = location;
}
} |
package com.mzapps.app.cotoflix.Adapter;
import android.content.Context;
import android.content.Intent;
import androidx.recyclerview.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RatingBar;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.mzapps.app.cotoflix.Activitys.DetailsMovie_Activity;
import com.mzapps.app.cotoflix.Activitys.DetailsTV_Activity;
import com.mzapps.app.cotoflix.MainActivity;
import com.mzapps.app.cotoflix.Model.Movie;
import com.mzapps.app.cotoflix.R;
import com.mzapps.app.cotoflix.Utility.DownloadImage;
import java.util.List;
import me.samthompson.bubbleactions.BubbleActions;
import me.samthompson.bubbleactions.Callback;
public class MoviesAdapter extends RecyclerView.Adapter<MoviesAdapter.MovieViewHolder> {
private List<Movie> movies;
private Context mContext;
private int counter = 1;
public static class MovieViewHolder extends RecyclerView.ViewHolder {
TextView movieTitle;
TextView rating;
ImageView thumbnail;
RatingBar ratingBar;
ProgressBar vote_average_progressbar;
public MovieViewHolder(View v) {
super(v);
movieTitle = v.findViewById(R.id.title);
rating = v.findViewById(R.id.rating);
thumbnail = v.findViewById(R.id.thumbnail);
ratingBar = v.findViewById(R.id.ratingbar);
vote_average_progressbar = v.findViewById(R.id.vote_average_progressbar);
}
}
public MoviesAdapter(List<Movie> movies, Context mContext) {
this.movies = movies;
this.mContext = mContext;
}
@Override
public MoviesAdapter.MovieViewHolder onCreateViewHolder(ViewGroup parent,
int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.list_item_movie, parent, false);
return new MovieViewHolder(view);
}
@Override
public void onBindViewHolder(final MovieViewHolder holder, final int position) {
final Movie movie = movies.get(position);
if(movie.getTitle()==null) {
holder.movieTitle.setText(movie.getOriginal_name());
}else {
holder.movieTitle.setText(movie.getTitle());
}
holder.rating.setText(movie.getVoteAverage().toString());
holder.ratingBar.setRating(movie.getVoteAverage().floatValue()/2);
//=================================================
holder.vote_average_progressbar.setProgress(movie.getVoteAverage().intValue()*10);
Glide.with(mContext).load("http://image.tmdb.org/t/p/w342"+movies.get(position).getPosterPath()).into(holder.thumbnail);
holder.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if(movie.getTitle() == null){
Activity(movie,mContext, DetailsTV_Activity.TAG_DetailsTV_Fragment);
ShowInterstitials();
}else {
Activity(movie,mContext, DetailsMovie_Activity.TAG_DetailsMovie_Fragment);
ShowInterstitials();
}
}
});
holder.itemView.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(final View view) {
BubbleActions.on(view)
.addAction("Star", R.drawable.if_heart_119_111093, new Callback() {
@Override
public void doAction() {
//new RequestMovie(movie.getId().toString(),movie.getTitle()+" : "+movie.getReleaseDate().substring(0, 4),mContext);
Toast.makeText(view.getContext(), "Favourite!", Toast.LENGTH_SHORT).show();
}
})
.addAction("Share", R.drawable.if_share4_216719, new Callback() {
@Override
public void doAction() {
// share(movie.getPosterPath());
new DownloadImage(mContext,movie.getPosterPath()).shareX();
Toast.makeText(view.getContext(), "Wait for share!", Toast.LENGTH_SHORT).show();
}
})
.addAction("Hide", R.drawable.if_icon_close_round_211651, new Callback() {
@Override
public void doAction() {
Toast.makeText(view.getContext(), "Hide pressed!", Toast.LENGTH_SHORT).show();
}
})
.show();
return false;
}
});
}
@Override
public int getItemCount() {
return movies.size();
}
private void Activity(Movie movie,Context mContext, String TAG) {
if (TAG.equals(DetailsTV_Activity.TAG_DetailsTV_Fragment)){
Intent intent = new Intent(mContext, DetailsTV_Activity.class);
intent.putExtra("movie", movie);
mContext.startActivity(intent);
}else if (TAG.equals(DetailsMovie_Activity.TAG_DetailsMovie_Fragment)){
Intent intent = new Intent(mContext, DetailsMovie_Activity.class);
intent.putExtra("movie", movie);
mContext.startActivity(intent);
}
}
private void ShowInterstitials(){
if (counter == 2){
if (MainActivity.mInterstitialAd.isLoaded()) {
MainActivity.mInterstitialAd.show();
}
counter = 1;
}else {
counter++;
}
}
}
|
#!/bin/bash
echo '============================================================='
echo '$ $'
echo '$ liumapp $'
echo '$ $'
echo '$ $'
echo '$ email: liumapp.com@gmail.com $'
echo '$ homePage: http://www.liumapp.com $'
echo '$ Github: https://github.com/liumapp $'
echo '$ $'
echo '============================================================='
echo '.'
docker rmi liumapp/admin-client:v4.2.0
docker rmi liumapp/admin-server:v4.2.0
docker rmi liumapp/admin-eureka:v4.2.0
|
<reponame>seraekim/mobile-table<gh_stars>1-10
package com.ccmedia.mbtable.util;
import java.io.UnsupportedEncodingException;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
public class AES256Util {
private String iv = "7104zz1329964c66";
private String key = "7104zz1329964c667104ee1329964c66";
private Key keySpec;
public AES256Util() throws UnsupportedEncodingException {
byte[] keyBytes = new byte[32];
byte[] b = key.getBytes("UTF-8");
int len = b.length;
if (len > keyBytes.length)
len = keyBytes.length;
System.arraycopy(b, 0, keyBytes, 0, len);
SecretKeySpec keySpec = new SecretKeySpec(keyBytes, "AES");
this.keySpec = keySpec;
}
public String encryptAES(String str) throws java.io.UnsupportedEncodingException, NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException, BadPaddingException {
Cipher c = Cipher.getInstance("AES/CBC/PKCS5Padding");
c.init(Cipher.ENCRYPT_MODE, keySpec, new IvParameterSpec(iv.getBytes()));
byte[] encrypted = c.doFinal(str.getBytes("UTF-8"));
String enStr = new String(Base64.encodeBase64(encrypted));
return enStr;
}
public String decryptAES(String str) throws java.io.UnsupportedEncodingException, NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException, BadPaddingException {
Cipher c = Cipher.getInstance("AES/CBC/PKCS5Padding");
c.init(Cipher.DECRYPT_MODE, keySpec, new IvParameterSpec(iv.getBytes("UTF-8")));
byte[] byteStr = Base64.decodeBase64(str.getBytes());
return new String(c.doFinal(byteStr), "UTF-8");
}
// public static void main(String[] args) throws UnsupportedEncodingException, InvalidKeyException, NoSuchAlgorithmException, NoSuchPaddingException, InvalidAlgorithmParameterException, IllegalBlockSizeException, BadPaddingException {
// AES256Util a = new AES256Util();
// System.out.println(a.encryptAES("root"));
// }
} |
import os
import shutil
def create_symbolic_links(root_dir):
links_dir = '/semantic_web_links'
if not os.path.exists(links_dir):
os.makedirs(links_dir)
for root, dirs, files in os.walk(root_dir):
for file in files:
file_path = os.path.join(root, file)
link_path = os.path.join(links_dir, file)
try:
os.symlink(file_path, link_path)
print(f"Created symbolic link for {file} in {links_dir}")
except OSError as e:
print(f"Error creating symbolic link for {file}: {e}")
# Usage
create_symbolic_links('/semantic_web') |
<reponame>Crazychicken563/RhythmGameCharterAI<gh_stars>0
import numpy as np
from os import listdir
from os.path import isfile, join, isdir
import cv2
import soundfile as sf
import csv
import pickle as pkl
from aubio import source, onset
import markovify
def main2(directory):
files = [join(directory, f) for f in listdir(directory) if isfile(join(directory, f))]
#beat_samples = [join('./beat_samples', f) for f in listdir('./beat_samples') if isfile(join('./beat_samples', f))]
for file in files:
print(file)
try:
with open(file, 'rb') as f:
## NOTE For reconstruction, this data needs to be added to *items*
#sample = pkl.load(f)
name = file # this is needed for reconstruction of the song
s = source(file)
data, samplerate = sf.read(file)
#samplerate = sample['samplerate']
songlength = data.shape[0]/samplerate
win_s = 256 # fft size
hop_s = win_s // 2 # hop size
#o = onset("default", win_s, hop_s, samplerate)
o = onset('specdiff', 512*8, 512, samplerate=samplerate)
# list of onsets, in samples
onsets = []
# total number of frames read
total_frames = 0
while True:
samples, read = s()
if o(samples):
if(o.get_last_s() >= songlength - 2): # something is wrong w this api, so lets kill the onsets manually
break
print('Beat Detected:', o.get_last_s())
onsets.append(o.get_last())
total_frames += read
# yeah not dealing with 48000 right now
assert samplerate == 44100
print(len(onsets))
with open('markov.txt', 'r') as f2:
sentences = f2.readline()
text_model = markovify.Text(sentences)
sentence = 'hi there'
while(len(sentence.split(' ')) < len(onsets)):
sentence = text_model.make_sentence(max_words=len(onsets)*3)
mapping = sentence.split(' ')
print(name, samplerate)
with open('./output/Normal.dat', 'r') as f6:
line = f6.readline()
choreo = eval(line)
choreo['_obstacles'] = [] # set the obstacles to empty, since we didnt predict those
choreo['_notes'] = [] # clear both the notes and events
choreo['_events'] = []
for idx, entry in enumerate(onsets):
mapp = mapping[idx]
time = entry/samplerate
cutDirection = int(mapp[1])
lineLayer = int(mapp[3])
lineIndex = int(mapp[2])
type = int(mapp[0])
note = {'_cutDirection': cutDirection, '_lineIndex': lineIndex, '_lineLayer': lineLayer, '_time': time, '_type': type}
# we're just going to randomly assign lighting
event = {'_time': time, '_type': type, '_value': cutDirection}
choreo['_notes'].append(note)
choreo['_events'].append(event)
with open('./output/Normal.dat', 'w') as f4:
f4.write(repr(choreo).replace('\'', '\"').replace(' ', ''))
except Exception as f:
print(f)
if __name__ == "__main__":
directory = './samples_infer/'
#main(directory)
main2(directory) #second preprocessing needed |
<filename>src/main/java/mastermind/views/console/menu/SavedGameSelectMenu.java
package mastermind.views.console.menu;
import mastermind.controllers.StartController;
import mastermind.views.console.menu.command.SavedGameSelectCommand;
public class SavedGameSelectMenu extends Menu {
public SavedGameSelectMenu(StartController startController) {
super();
String[] gamesNames = startController.getSavedGamesNames();
for (String title : gamesNames) {
this.addCommand(new SavedGameSelectCommand(title, startController));
}
}
}
|
#!/bin/sh
. ./trace.sh
. ./sql.sh
get_txns_by_watchlabel(){
trace "Entering get_txns_by_watchlabel() for label ${1}..."
local label_txns
query=$(cat <<-HERE
SELECT w32.label, w.address, tx.txid, tx.confirmations,tx.blockheight, wtxn.vout, wtxn.amount, tx.blockhash, tx.blocktime, tx.timereceived
FROM watching_by_pub32 as w32
INNER JOIN watching AS w ON w32.id = w.watching_by_pub32_id
INNER JOIN watching_tx AS wtxn ON w.id = wtxn.watching_id
INNER JOIN tx AS tx ON wtxn.tx_id = tx.id
WHERE w32.label='${1}'
LIMIT ${2-10} OFFSET 0
HERE
)
label_txns=$(sql "$query")
returncode=$?
trace_rc ${returncode}
label_txns_json=$(echo "$label_txns" | jq -Rcsn '
{"label_txns":
[inputs
| . / "\n"
| (.[] | select(length > 0) | . / "|") as $input
| {"label": $input[0], "address": $input[1], "txid": $input[2], "confirmations": $input[3], "blockheight": $input[4], "v_out": $input[5], "amount": $input[6], "blockhash": $input[7], "blocktime": $input[8], "timereceived": $input[9]}
]
}
')
echo "$label_txns_json"
return ${returncode}
}
get_unused_addresses_by_watchlabel(){
trace "Entering get_unused_addresses_by_watchlabel() for label ${1}..."
local label_unused_addrs
query=$(cat <<-HERE
SELECT w32.id, w32.label, w32.pub32, w.pub32_index, w.address
FROM watching as w
INNER JOIN watching_by_pub32 AS w32 ON w.watching_by_pub32_id = w32.id
WHERE w32.label='${1}'
AND NOT EXISTS (
SELECT 1 FROM watching_tx WHERE watching_id = w.id
)
ORDER BY w.pub32_index ASC
LIMIT ${2-10} OFFSET 0
HERE
)
label_unused_addrs=$(sql "$query")
returncode=$?
trace_rc ${returncode}
label_unused_addrs_json=$(echo "$label_unused_addrs" | jq -Rcsn '
{"label_unused_addresses":
[inputs
| . / "\n"
| (.[] | select(length > 0) | . / "|") as $input
| {"pub32_watch_id": $input[0], "pub32_label": $input[1], "pub32" : $input[2], "address_pub32_index": $input[3], "address": $input[4]}
]
}
')
echo "$label_unused_addrs_json"
return ${returncode}
}
getactivewatches() {
trace "Entering getactivewatches()..."
local watches
# Let's build the string directly with dbms instead of manipulating multiple strings afterwards, it's faster.
# {"id":"${id}","address":"${address}","imported":"${imported}","unconfirmedCallbackURL":"${cb0conf_url}","confirmedCallbackURL":"${cb1conf_url}","watching_since":"${timestamp}"}
watches=$(sql "SELECT '{\"id\":' || id || ',\"address\":\"' || address || '\",\"imported\":' || imported || ',\"unconfirmedCallbackURL\":' || CASE WHEN callback0conf IS NULL THEN 'null' ELSE ('\"' || callback0conf || '\"') END || ',\"confirmedCallbackURL\":' || CASE WHEN callback1conf IS NULL THEN 'null' ELSE ('\"' || callback1conf || '\"') END || ',\"label\":\"' || COALESCE(label, '') || '\",\"watching_since\":\"' || inserted_ts || '\"}' FROM watching WHERE watching AND NOT calledback1conf ORDER BY id")
returncode=$?
trace_rc ${returncode}
local notfirst=false
echo -n "{\"watches\":["
local IFS=$'\n'
for row in ${watches}
do
if ${notfirst}; then
echo ","
else
notfirst=true
fi
trace "[getactivewatches] row=${row}"
echo -n "${row}"
done
echo "]}"
return ${returncode}
}
getactivewatchesbyxpub() {
trace "Entering getactivewatchesbyxpub()..."
local xpub=${1}
local returncode
getactivewatchesxpub "pub32" "${xpub}"
returncode=$?
trace_rc ${returncode}
return ${returncode}
}
getactivewatchesbylabel() {
trace "Entering getactivewatchesbylabel()..."
local label=${1}
local returncode
getactivewatchesxpub "label" "${label}"
returncode=$?
trace_rc ${returncode}
return ${returncode}
}
getactivewatchesxpub() {
trace "Entering getactivewatchesxpub()..."
local where=${1}
trace "[getactivewatchesxpub] where=${where}"
local value=${2}
trace "[getactivewatchesxpub] value=${value}"
local watches
# Let's build the string directly with dbms instead of manipulating multiple strings afterwards, it's faster.
# {"id":"${id}","address":"${address}","imported":"${imported}","unconfirmedCallbackURL":"${cb0conf_url}","confirmedCallbackURL":"${cb1conf_url}","watching_since":"${timestamp}","derivation_path":"${derivation_path}","pub32_index":"${pub32_index}"}
watches=$(sql "SELECT '{\"id\":' || w.id || ',\"address\":\"' || address || '\",\"imported\":' || imported || ',\"unconfirmedCallbackURL\":' || CASE WHEN w.callback0conf IS NULL THEN 'null' ELSE ('\"' || w.callback0conf || '\"') END || ',\"confirmedCallbackURL\":' || CASE WHEN w.callback1conf IS NULL THEN 'null' ELSE ('\"' || w.callback1conf || '\"') END || ',\"watching_since\":\"' || w.inserted_ts || '\",\"derivation_path\":\"' || derivation_path || '\",\"pub32_index\":' || pub32_index || '}' FROM watching w, watching_by_pub32 w32 WHERE watching_by_pub32_id = w32.id AND w32.${where} = '${value}' AND w.watching AND NOT calledback1conf ORDER BY w.id")
returncode=$?
trace_rc ${returncode}
local notfirst=false
echo -n "{\"watches\":["
local IFS=$'\n'
for row in ${watches}
do
if ${notfirst}; then
echo ","
else
notfirst=true
fi
trace "[getactivewatchesxpub] row=${row}"
echo -n "${row}"
done
echo "]}"
return ${returncode}
}
getactivexpubwatches() {
trace "Entering getactivexpubwatches()..."
local watches
# Let's build the string directly with dbms instead of manipulating multiple strings afterwards, it's faster.
# {"id":"${id}","pub32":"${pub32}","label":"${label}","derivation_path":"${derivation_path}","last_imported_n":${last_imported_n},"unconfirmedCallbackURL":"${cb0conf_url}","confirmedCallbackURL":"${cb1conf_url}","watching_since":"${timestamp}"}
watches=$(sql "SELECT '{\"id\":' || id || ',\"pub32\":\"' || pub32 || '\",\"label\":\"' || label || '\",\"derivation_path\":\"' || derivation_path || '\",\"last_imported_n\":' || last_imported_n || ',\"unconfirmedCallbackURL\":' || CASE WHEN callback0conf IS NULL THEN 'null' ELSE ('\"' || callback0conf || '\"') END || ',\"confirmedCallbackURL\":' || CASE WHEN callback1conf IS NULL THEN 'null' ELSE ('\"' || callback1conf || '\"') END || ',\"watching_since\":\"' || inserted_ts || '\"}' FROM watching_by_pub32 WHERE watching ORDER BY id")
returncode=$?
trace_rc ${returncode}
local notfirst=false
echo -n "{\"watches\":["
local IFS=$'\n'
for row in ${watches}
do
if ${notfirst}; then
echo ","
else
notfirst=true
fi
trace "[getactivexpubwatches] row=${row}"
echo -n "${row}"
done
echo "]}"
return ${returncode}
}
|
#!/bin/bash
# Git pre-commit hook that prevents committing with trailing
# whitespace or a blank line at EOL.
#
# Based on
# http://madebyted.com/blog/fight-whitespace-with-git/
if git rev-parse --verify HEAD >/dev/null 2>&1
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=4b825dc642cb6eb9a060e54bf8d69288fbee4904
fi
# If there are whitespace errors, print the offending file names and fail.
git diff-index --check --cached $against --
exitcode=$?
if [ $exitcode != 0 ]; then
echo "To commit anyway, use --no-verify"
exit $exitcode
fi
|
var cssbeautify = require('gulp-cssbeautify');
var gulp = require('gulp');
var imagemin = require('gulp-imagemin');
var jsprettify = require('gulp-jsbeautifier');
var path = require('path');
var pngcrush = require('imagemin-pngcrush');
var ROOT = path.join(__dirname, '..');
gulp.task('format-css', function() {
var files = [
'src/**/*.css',
'!src/aui-css/css/*.css'
];
return gulp.src(files, { cwd: ROOT })
.pipe(cssbeautify())
.pipe(gulp.dest(path.join(ROOT, 'src/')));
});
gulp.task('format-js', function() {
var configFile = path.join(ROOT, '.jsbeautifyrc');
var files = [
'src/**/*.js',
'!build/**/*.js',
'!src/aui-base/js/aui-aliases.js',
'!src/aui-base/js/aui-loader.js',
'!src/yui/js/*.js'
];
return gulp.src(files, { cwd: ROOT })
.pipe(jsprettify({
config: configFile
}))
.pipe(gulp.dest(path.join(ROOT, 'src/')));
});
gulp.task('format-img', function() {
return gulp.src('src/**/*.png', { cwd: ROOT })
.pipe(imagemin({
progressive: true,
svgoPlugins: [{
removeViewBox: false
}],
use: [pngcrush()]
}))
.pipe(gulp.dest(path.join(ROOT, 'src/')));
});
gulp.task('format', ['format-css', 'format-js', 'format-img']); |
<reponame>nicholasjackson/event-sauce
package queue
import (
"time"
"github.com/nicholasjackson/sorcery/entities"
)
type Queue interface {
Add(eventName string, payload string) error
AddEvent(event *entities.Event, callback string) error
StartConsuming(size int, pollInterval time.Duration, callback func(callbackItem interface{}))
}
|
<reponame>dheerajbharsiya/MetalJsLearing
import './modal.scss';
import './CloseHeader.js';
import ModalBasicBody from './templates/modal-basic-body.js';
import templates from './Modal.soy.js';
import Component from 'metal-component';
import Soy from 'metal-soy';
class Modal extends Component {
close() {
this.shown = false;
}
rendered() {
let modalBody = document.getElementById('modal-body');
new ModalBasicBody({name:'dheeraj'}, modalBody);
//new ModalBasicBody({element: '#modal-body'});
}
}
Soy.register(Modal, templates);
Modal.STATE = {
body: {
value: ''
},
header: {
value: 'Default header'
},
shown: {
// The default value will be: `true`.
value: false
}
};
export default Modal;
|
package bibliotecaUFMA;
public class CadastroUsuarioFunction {
public boolean cadastro(biblioteca b,int senha, int id, String nome, String sobrenome, long cpf) {
try {
if(b != null) {
contaUsuario userAccount = new contaUsuario(senha, id);
if(userAccount != null) {
usuario user = new usuario(nome, sobrenome, cpf, userAccount);
if(user != null){
b.getUsuarios().put(id, user);
return true;
}
}
}
}catch(Exception excessao) {}
return false;
}
}
|
// Custom youtube-audio-stream with bitrate, start time option and reference to ffmpeg process
import * as ytdl from 'ytdl-core';
import * as FFmpeg from 'fluent-ffmpeg';
import * as through from 'through2';
import * as xtend from 'xtend';
import * as fs from 'fs';
export default function streamify(uri, opt?, startTimeInSeconds?) {
opt = xtend(
{
videoFormat: 'mp4',
quality: 'lowest',
audioFormat: 'mp3',
filter: filterVideo,
applyOptions() {},
},
opt,
);
const video = ytdl(uri, opt);
function filterVideo(format) {
return format.container === opt.videoFormat && format.audioEncoding;
}
const stream = opt.file ? fs.createWriteStream(opt.file) : through();
const ffmpeg = FFmpeg(video);
opt.applyOptions(ffmpeg);
let output;
if (startTimeInSeconds) {
output = ffmpeg
.setStartTime(startTimeInSeconds)
.audioBitrate(128)
.format(opt.audioFormat)
.pipe(stream);
} else {
output = ffmpeg
.audioBitrate(128)
.format(opt.audioFormat)
.pipe(stream);
}
video.on('info', stream.emit.bind(stream, 'info'));
// output.on('error', video.end.bind(video));
output.on('error', stream.emit.bind(stream, 'error'));
return {
stream,
ffmpeg,
};
}
|
import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestRegressor
# Load the dataset
data = pd.read_csv('data.csv')
# Create features
data['date'] = pd.to_datetime(data['date'])
data['month'] = data['date'].dt.month
data['day'] = data['date'].dt.day
data['day_of_week'] = data['date'].dt.dayofweek
data['year'] = data['date'].dt.year
# Define input and output features
X = data[['month', 'day', 'day_of_week', 'year', 'type', 'location']]
y = data['attendees']
# Encode categorical features
cat_features = ['type', 'location']
X = pd.get_dummies(X, columns=cat_features)
# Build and evaluate the model
model = RandomForestRegressor(n_estimators=100)
model.fit(X, y)
# Test our input
test_input = [[2, 28, 0, 2021, 'Party', 'San Francisco, CA']]
encoded_test_input = pd.get_dummies(pd.DataFrame(test_input, columns=X.columns))
prediction = model.predict(encoded_test_input)[0]
print(f'We predict a total of {prediction:.2f} people will attend the event.') |
package com.zutubi.android.ant;
/**
* Exception raised on failure to parse a manifest.
*
* @see Manifest
*/
public class ParseException extends Exception {
private static final long serialVersionUID = 7467884156539661067L;
/**
* Creates a new exception with the given error details.
*
* @param message context about the error that occurred
*/
public ParseException(final String message) {
super(message);
}
/**
* Creates a new exception caused by another exception.
*
* @param message context about the error that occurred
* @param t the cause of this exception
*/
public ParseException(final String message, final Throwable t) {
super(message, t);
}
}
|
#!/bin/bash
set -x
set -e
. $DOWNLOAD_TOOL -u https://www.cp2k.org/static/downloads/lapack-3.8.0.tgz
cd ${JARVIS_TMP}
rm -rf lapack-3.0
tar -xvf ${JARVIS_DOWNLOAD}/lapack-3.8.0.tgz
cd lapack-3.8.0
cp make.inc.example make.inc
make -j
mkdir $1/lib/
cp *.a $1/lib/
|
##
# The file to be sourced if you're using zsh and want tab-completion.
##
# Source the plugin and completion functions.
source "${0:h}/cdc.sh"
##
# Add completion arguments.
_cdc() {
_arguments -s \
-D"[Debug mode for when unexpected things are happening.]" \
- help \
-h"[Print this help.]" \
- no_other_args \
-n"[cd to the current directory in the stack.]" \
-p"[cd to previous directory and pop from the stack]" \
-t"[Toggle between the last two directories in the stack.]" \
-i"[List all directories that are to be ignored.]" \
-l"[List all directories that are cdc-able.]" \
-L"[List all directories in which to search.]" \
-d"[List the directories in stack.]" \
- allow_arg \
-u"[Push the directory onto the stack.]" \
-U"[Do not push the directory onto the stack.]" \
-r"[Only cdc to repositories.]" \
-R"[cd to any directory, even it is not a repository.]" \
-a"[cd to the directory even if it is ignored.]" \
-s"[Re-source the config file ('~/.cdcrc')]" \
-w"[Print directory location instead of changing to it]" \
1::"[Directory to cd]:($(_cdc_repo_list))"
}
##
# Define completions.
compdef '_cdc' cdc
|
def calculate_deformation(force, length):
E = 30e6 # psi (Young's modulus of the material)
A = 2.0 # in^2 (cross-sectional area of the bar)
deformation = (force * length) / (A * E)
return deformation |
#!/bin/bash
#
# Copyright (C) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
source "$(dirname "${BASH_SOURCE}")/lib/init.sh"
os::build::setup_env
os::util::ensure::built_binary_exists 'informer-gen' 'vendor/k8s.io/kubernetes/staging/src/k8s.io/code-generator/cmd/informer-gen'
# list of package to generate informers for
packages=(
github.com/openshift/origin/pkg/authorization/apis/authorization
github.com/openshift/origin/pkg/build/apis/build
github.com/openshift/origin/pkg/apps/apis/apps
github.com/openshift/origin/pkg/image/apis/image
github.com/openshift/origin/pkg/oauth/apis/oauth
github.com/openshift/origin/pkg/project/apis/project
github.com/openshift/origin/pkg/quota/apis/quota
github.com/openshift/origin/pkg/route/apis/route
github.com/openshift/origin/pkg/network/apis/network
github.com/openshift/origin/pkg/security/apis/security
github.com/openshift/origin/pkg/template/apis/template
github.com/openshift/origin/pkg/user/apis/user
)
function generate_informers_for() {
local package="$1";shift
echo "-- Generating informers for ${package} ..."
grouppkg=$(realpath --canonicalize-missing --relative-to=$(pwd) ${package}/../..)
informer-gen --logtostderr \
--go-header-file=hack/boilerplate.txt \
--input-dirs="${package}" \
--output-package="${grouppkg}/generated/informers" \
--internal-clientset-package "${grouppkg}/generated/internalclientset" \
--listers-package "${grouppkg}/generated/listers" \
"$@"
}
verify="${VERIFY:-}"
# remove the old informers
for pkg in "${packages[@]}"; do
if [[ -z "${verify}" ]]; then
grouppkg=$(realpath --canonicalize-missing --relative-to=$(pwd) ${pkg}/../..)
go list -f '{{.Dir}}' "${grouppkg}/generated/informers/..." | xargs rm -rf
fi
done
for pkg in "${packages[@]}"; do
generate_informers_for "${pkg}"
done
|
#!/usr/bin/env bash
set -e
git clone --bare https://github.com/jawang35/dotfiles.git "${HOME}/.dotfiles.git"
git --git-dir="${HOME}/.dotfiles.git" --work-tree="${HOME}" checkout
git --git-dir="${HOME}/.dotfiles.git" --work-tree="${HOME}" submodule update --init --recursive
git --git-dir="${HOME}/.dotfiles.git" --work-tree="${HOME}" config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"
if [ "$(uname -s)" == Darwin ]; then
# Fast keyboard repeat
defaults write NSGlobalDomain KeyRepeat -int 2
defaults write NSGlobalDomain InitialKeyRepeat -int 15
# Disable annoying automatic keyboard settings
defaults write NSGlobalDomain NSAutomaticCapitalizationEnabled -bool false
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
defaults write NSGlobalDomain NSAutomaticPeriodSubstitutionEnabled -bool false
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
# Dark mode
defaults write AppleInterfaceStyle -string Dark
# Disable startup sound
sudo nvram SystemAudioVolume=" "
# Cloudflare/APNIC DNS
networksetup -setdnsservers Wi-Fi 1.1.1.1 1.0.0.1 2606:4700:4700::1111 2606:4700:4700::1001
if command -v brew > /dev/null 2>&1; then
brew update && brew bundle --file="${HOME}/.config/brew/Brewfile"
fi
fi
# shellcheck source=.bash_profile
source "${HOME}/.bash_profile"
|
class Stack:
def __init__(self):
self.items = []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[-1]
def is_empty(self):
return self.items == [] |
#!/bin/bash
#SBATCH --job-name=PW_run
#SBATCH --output=out.txt
#SBATCH --ntasks-per-node=28
#SBATCH --nodes=1
#SBATCH --time=168:00:00
#SBATCH -p extended-28core
module load shared
module load R/3.6.2
module load gcc-stack
module load openblas/dynamic/0.2.18
module load lapack/gcc/64/3.6.0
module load JAGS/4.3.0
module load gnu-parallel/6.0
export R_LIBS=/gpfs/home/cyoungflesh/R_libs
cd /gpfs/home/cyoungflesh/penguin_watch_model/Scripts
Rscript 2-model.R
|
<gh_stars>0
package IO;
import java.util.ArrayList;
/**
* Created by IntelliJ IDEA.
* User: swyna
* Date: Jun 3, 2011
* Time: 1:58:27 AM
* To change this template use File | Settings | File Templates.
*/
public class UserFileIO extends FileIO {
public void save(ArrayList<String[]> users) {
setFile("users.dat");
initOutput();
for (String[] i : users) {
write(i[0] + " " + i[1] + " " + i[2] + " " + i[3]);
}
closeOutput();
}
public ArrayList<String[]> load() {
ArrayList<String[]> users = new ArrayList<String[]>();
setFile("users.dat");
initInput();
String line = read();
while (line != null) {
String[] lineData = line.split(" ");
if (lineData.length == 4) {
users.add(new String[]{lineData[0], lineData[1], lineData[2], lineData[3]});
}
line = read();
}
closeInput();
return users; //To change body of created methods use File | Settings | File Templates.
}
}
|
<filename>tools/virtualDisk/test/test_virtualDisk/test_virtualDisk.java
import java.io.File;
import java.util.ArrayList;
public class test_virtualDisk {
private static String baseLocation;
public static void main(String[] args) {
int i;
ArrayList<String> subBaseLocation;
test_virtualDisk.baseLocation = System.getProperty("user.dir");
subBaseLocation = fileToolset.pathParser(baseLocation);
test_virtualDisk.baseLocation = "";
for (i = 0; i < subBaseLocation.size() - 5; i++) {
test_virtualDisk.baseLocation += subBaseLocation.get(i) + File.separator;
}
test_virtualDisk.baseLocation += subBaseLocation.get(i);
virtualDisk vdisk = new virtualDisk(test_virtualDisk.baseLocation);
}
}
|
#!/bin/bash
#
# Copyright 2019 held jointly by the individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -x
#ADAM="adam-shell"
ADAM="../../adam/bin/adam-shell"
#ADAM="adam-shell --conf spark.kryo.registrationRequired=true"
SAMPLE="NA12878.alignedHg38.duplicateMarked.baseRealigned"
export INPUT="$SAMPLE.alignments.adam"
export OUTPUT="$SAMPLE.out.bam"
time ($ADAM -i convert_parquet_alignments_adam_dataset.scala &> /dev/null)
rm -Rf $OUTPUT
time ($ADAM -i convert_parquet_alignments_adam_rdd.scala &> /dev/null)
rm -Rf $OUTPUT
time ($ADAM -i convert_parquet_alignments_disq_adam.scala &> /dev/null)
rm -Rf $OUTPUT
#time ($ADAM -i convert_parquet_alignments_disq_convert.scala &> /dev/null)
#rm -Rf $SAMPLE.alignments.adam
export OUTPUT="$SAMPLE.out.cram"
# https://github.com/bigdatagenomics/adam/issues/2214
#time ($ADAM -i convert_parquet_alignments_adam_dataset.scala &> /dev/null)
#rm -Rf $OUTPUT
#time ($ADAM -i convert_parquet_alignments_adam_rdd.scala &> /dev/null)
#rm -Rf $OUTPUT
#time ($ADAM -i convert_parquet_alignments_disq_adam.scala &> /dev/null)
#rm -Rf $OUTPUT
#time ($ADAM -i convert_parquet_alignments_disq_convert.scala &> /dev/null)
#rm -Rf $SAMPLE.alignments.adam
time ($ADAM -i count_parquet_alignments_adam_dataframe.scala &> /dev/null)
time ($ADAM -i count_parquet_alignments_adam_dataset.scala &> /dev/null)
time ($ADAM -i count_parquet_alignments_adam_rdd.scala &> /dev/null)
time ($ADAM -i count_parquet_alignments_sparksql.scala &> /dev/null)
time ($ADAM -i filter_parquet_alignments_adam_dataset.scala &> /dev/null)
time ($ADAM -i filter_parquet_alignments_adam_rdd.scala &> /dev/null)
time ($ADAM -i filter_parquet_alignments_sparksql.scala &> /dev/null)
time ($ADAM -i range_filter_parquet_alignments_adam_dataset.scala &> /dev/null)
time ($ADAM -i range_filter_parquet_alignments_adam_rdd.scala &> /dev/null)
time ($ADAM -i range_filter_parquet_alignments_sparksql.scala &> /dev/null)
export INPUT="$SAMPLE.alignments.1m.adam"
time ($ADAM -i range_filter_parquet_alignments_adam_partitioned_dataset.scala &> /dev/null)
time ($ADAM -i range_filter_parquet_alignments_adam_partitioned_rdd.scala &> /dev/null)
time ($ADAM -i range_filter_parquet_alignments_adam_partitioned_dataset.scala &> /dev/null)
time ($ADAM -i range_filter_parquet_alignments_adam_partitioned_rdd.scala &> /dev/null)
export INPUT="$SAMPLE.alignments.adam"
export OUTPUT="$SAMPLE.out.alignments.adam"
time ($ADAM -i rw_parquet_alignments_adam_dataframe.scala &> /dev/null)
rm -Rf $OUTPUT
time ($ADAM -i rw_parquet_alignments_adam_dataset.scala &> /dev/null)
rm -Rf $OUTPUT
time ($ADAM -i rw_parquet_alignments_adam_rdd.scala &> /dev/null)
rm -Rf $OUTPUT
|
#!/bin/bash
if [[ "$1" != "" ]]
then
user="$1"
fi
ps aux | grep "$user" | awk '{print $2}' | xargs kill -15 |
#ifndef LEXUTIL_H
#define LEXUTIL_H
#include <stdbool.h>
#define EG_MAX_IDENT_LG 48
/*
* Analyseur lexical, conçu pour l'analyse de fichiers de configuration.
* Cet analyseur travaille sur un fichier à la fois, son but est d'être
* utilisé par d'autres modules qui se chargeront de l'analyse syntaxique.
*/
/*****************************************************************************
*********Variables maintenues au cours de l'analyse par l'analyseur***********
******************************************************************************/
/*Caractère actuel lu avec la fonction lex_readChar.*/
extern char lex_CurrentChar;
/*Ligne actuelle de l'analyse lexicale dans le fichier*/
extern int lex_LineCount;
/*Si on est arrivé à la fin du fichier ou s'il y a une erreur, ceci vaudra 0.*/
extern int lex_EtatOK;
/*****************************************************************************
************************FONCTIONS D'ANALYSE LEXICALE**************************
******************************************************************************/
/*Tente d'ouvrir le fichier indiqué.
* Le fichier est ouvert en vue d'une analyse lexicale,
* c'est à lui que feront référence les appels aux fonctions de ce module.
* PARAM:
fichier au format texte à ouvrir.
id de log où écrire les messages (0 = aucun).
* RETOURNE:
true(1) si succès, false(0) si échec.
*/
bool lex_openFile(char*, int);
/*Ferme le fichier actuellement lu par l'analyseur.
* RETOURNE:
1 si OK, 0 sinon.
renvoie aussi 1 si aucun fichier n'était ouvert par l'analyseur.
*/
bool lex_closeFile();
/*lire un caractère. Augmente le compteur de lignes si saut détecté.
* Retourne la valeur de etatOK*/
int lex_readChar();
/*lire des caractères blancs jusqu'à un non-blanc (1) ou EOF (0).
* lit aussi les commentaires (précédés par #) jusqu'à newline.*/
int lex_readWhites();
/*sauter une ligne, s'arrêter si on tombe sur un caractère non commenté.
* Retourne 1 si saut OK, 0 si caractère rencontré.*/
int lex_readToNewline();
/*Lire jusqu'à rencontrer le caractère sépcifié (ne le passe pas).
* Retourne 1 si caractère atteint, 0 sinon (EOF)*/
int lex_readUntil(char);
//En cas d'erreur syntaxique, saute au moins une ligne.
void lex_errorLoop();
//tente de lire un identificateur, stocké dans la chaîne passée si OK.
//retourne 0 si erreur, 1 si OK.
bool lex_readIdent(char*);
//lit une chaîne comprenant n'importe quel caractère, ALLOUE LA MEMOIRE.
//retourne la taille de la chaîne, -1 si erreur.
int lex_readString(char**);
//lit un nombre. 1 si succès, 0 si erreur. Pas d'allocation.
bool lex_readInt(int*);
//lit un flottant. Requiert la présence d'un '.' dans le nombre.
bool lex_readFloat(float*);
//lit un booléen. 0 ou 1, true ou false (insensible à la casse)
bool lex_readBool(bool*);
#endif
|
<gh_stars>0
package config_test
import (
"encoding/json"
"encoding/xml"
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
. "go.octolab.org/toolkit/config"
)
func TestSecret(t *testing.T) {
type password struct {
XMLName struct{} `json:"-" xml:"password" yaml:"-"`
Value Secret `json:"password" xml:"value,attr" yaml:"password"`
}
secret := password{Value: "secret"}
tests := map[string]struct {
marshal func(password) ([]byte, error)
}{
"print by `%#v`": {
func(pass password) ([]byte, error) {
str := fmt.Sprintf("%#v", pass)
return []byte(str), nil
},
},
"print by `%s`": {
func(pass password) ([]byte, error) {
str := fmt.Sprintf("%s", pass.Value) //nolint:gosimple
return []byte(str), nil
},
},
"json marshal": {
func(pass password) ([]byte, error) {
return json.Marshal(pass)
},
},
"xml marshal": {
func(pass password) ([]byte, error) {
return xml.Marshal(pass)
},
},
"yaml marshal": {
func(pass password) ([]byte, error) {
return yaml.Marshal(pass)
},
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
raw, err := test.marshal(secret)
assert.NoError(t, err)
assert.False(t, strings.Contains(string(raw), string(secret.Value)))
})
}
}
|
require "culerity"
require "forwardable"
require "webrat/core/save_and_open_page"
module Webrat #:nodoc:
class CulerityResponse
attr_reader :body
def initialize(body)
@body = body
end
end
class CuleritySession #:nodoc:
include Webrat::SaveAndOpenPage
extend Forwardable
def initialize(*args) # :nodoc:
end
def response
CulerityResponse.new(response_body)
end
def current_url
container.url
end
def visit(url = nil, http_method = :get, data = {})
reset
# TODO querify data
container.goto(absolute_url(url))
end
webrat_deprecate :visits, :visit
def click_link_within(selector, text_or_title_or_id)
within(selector) do
click_link(text_or_title_or_id)
end
end
webrat_deprecate :clicks_link_within, :click_link_within
def reload
reset
container.refresh
end
webrat_deprecate :reloads, :reload
def clear_cookies
container.clear_cookies
end
def execute_script(source)
container.execute_script(source)
end
def current_scope
scopes.last || base_scope
end
def scopes
@_scopes ||= []
end
def base_scope
@_base_scope ||= CulerityScope.new(container)
end
def within(selector)
xpath = Webrat::XML.css_to_xpath(selector).first
scope = CulerityScope.new(container.element_by_xpath(xpath))
scopes.push(scope)
ret = yield
scopes.pop
return ret
end
def within_frame(name)
scope = CulerityScope.new(container.frame(:name => name))
scopes.push(scope)
if block_given?
ret = yield
scopes.pop
return ret
end
scope
end
def self.delegate_and_wait(*methods)
for method in methods
module_eval(<<-RUBY, __FILE__, __LINE__+1)
def #{method}(*args, &block)
result = current_scope.__send__(:#{method}, *args, &block)
container.wait
result
end
RUBY
end
end
delegate_and_wait :check, :checks
delegate_and_wait :choose, :chooses
delegate_and_wait :click_button, :clicks_button
delegate_and_wait :click_link, :clicks_link
delegate_and_wait :fill_in, :fills_in
delegate_and_wait :attach_file
delegate_and_wait :field_by_xpath
delegate_and_wait :field_labeled
delegate_and_wait :field_with_id
delegate_and_wait :response_body
delegate_and_wait :select, :selects
delegate_and_wait :select_date, :selects_date
delegate_and_wait :uncheck, :unchecks
protected
def container
setup unless $setup_done
browser
end
def browser
@_browser ||= begin
$browser ||= ::Culerity::RemoteBrowserProxy.new(server, {:browser => :firefox, :log_level => :off})
$browser.clear_cookies
$browser
end
end
def server
$server ||= ::Culerity::run_server
end
def absolute_url(url) #:nodoc:
if url =~ Regexp.new('^https?://')
url
elsif url =~ Regexp.new('^/')
"#{current_host}#{url}"
else
"#{current_host}/#{url}"
end
end
def current_host
@_current_host ||= [Webrat.configuration.application_address, Webrat.configuration.application_port].join(":")
end
def setup #:nodoc:
silence_stream(STDOUT) do
Webrat.start_app_server
end
teardown_at_exit
$setup_done = true
end
def teardown_at_exit #:nodoc:
at_exit do
silence_stream(STDOUT) do
Webrat.stop_app_server
end
$browser.exit if $browser
$server.close if $server
end
end
private
def reset
@_scopes = nil
@_base_scope = nil
end
end
end
|
package main
import "fmt"
func Fibonacci(max int){
var x, y int = 0, 1
for x <= max {
fmt.Print(x, " ")
x, y = y, x+y
}
}
func main(){
Fibonacci(10)
} |
#!/bin/bash
#Entrypoint script that sets up Vault config required for Goldfish
#API calls were converted from:
#- https://github.com/Caiyeon/goldfish/blob/master/vagrant/policies/goldfish.hcl
#- https://github.com/Caiyeon/goldfish/wiki/Production-Deployment#1-prepare-vault-only-needs-to-be-done-once
#
#Based of https://github.com/Caiyeon/goldfish/blob/master/docker/entrypoint.sh
#Be verbose for demo
set -x
CA_CERT="--cacert ${VAULT_CACERT}"
JWT=`cat /run/secrets/kubernetes.io/serviceaccount/token`
VAULT_TOKEN=`curl -v ${CA_CERT} ${VAULT_ADDR}/v1/auth/kubernetes/login -d "{\"jwt\":\"$JWT\", \"role\":\"demo3\"}" | jq .auth.client_token | tr -d \"`
#One place for curl options
CURL_OPT="-v ${CA_CERT} -H X-Vault-Token:${VAULT_TOKEN}"
#TLS for goldfish
PKI_DATA=`curl ${CURL_OPT} ${VAULT_ADDR}/v1/pki/issue/goldfish-tls -d '{"common_name":"goldfish","ip_sans":"1.1.1.11"}' | jq .data`
echo $PKI_DATA | jq -r .certificate > /certs/cert_bundle.pem
echo $PKI_DATA | jq -r .issuing_ca>> /certs/cert_bundle.pem
echo $PKI_DATA | jq -r .private_key > /certs/key.pem
curl ${CURL_OPT} ${VAULT_ADDR}/v1/auth/approle/role/goldfish/role-id -d '{"role_id":"goldfish"}'
# initialize transit key. This is not strictly required but is proper procedure
curl ${CURL_OPT} -X POST ${VAULT_ADDR}/v1/transit/keys/goldfish
# production goldfish needs a generic secret endpoint to hot reload settings from. See Configuration page for details
curl ${CURL_OPT} ${VAULT_ADDR}/v1/secret/goldfish/conf -d '{"DefaultSecretPath":"secret/", "TransitBackend":"transit", "UserTransitKey":"usertransit", "ServerTransitKey":"goldfish", "BulletinPath":"secret/goldfish/msg/"}'
#Generate token to start Goldfish with
WRAPPED_TOKEN=`curl ${CURL_OPT} --header "X-Vault-Wrap-TTL: 3600" -X POST ${VAULT_ADDR}/v1/auth/approle/role/goldfish/secret-id | jq -r .wrap_info.token`
echo $WRAPPED_TOKEN > /secrets/wrapped_token
|
"""
Test cases for PyEval
<NAME>, July 2018
"""
import unittest
from pyeval_expression import Expression
class TestPyEval(unittest.TestCase):
"""
Validation of Expression and Operator classes.
No setup function is needed
"""
def test_positive_operand_expression(self):
"""
Tests a single positive operand expression
"""
expr = Expression("53")
self.assertEqual("53 ", expr.result(), "ERROR: Positive operand")
def test_negative_operand_expression(self):
"""
Tests a single negative operand expression
"""
expr = Expression("-53")
self.assertEqual("-53 ", expr.result(), "ERROR: Negative operand")
def test_double_term_expression(self):
"""
Tests a set of double term expressions
"""
expr = Expression("53+2")
self.assertEqual(
"53 2 + ", expr.result(), "ERROR: Double positive term expression"
)
expr = Expression("-53+2")
self.assertEqual(
"-53 2 + ",
expr.result(),
"ERROR: Negative/positive term expression",
)
expr = Expression("53+-2")
self.assertEqual(
"53 -2 + ",
expr.result(),
"ERROR: Positive/negative term expression",
)
expr = Expression("-53+-2")
self.assertEqual(
"-53 -2 + ",
expr.result(),
"ERROR: Double negative term expression",
)
def test_double_term_operands(self):
"""
Tests a set of operands
"""
expr = Expression("53+2")
self.assertEqual(
"53 2 + ", expr.result(), "ERROR: Additive expression"
)
expr = Expression("53-2")
self.assertEqual(
"53 2 - ", expr.result(), "ERROR: Subtrative expression"
)
expr = Expression("53*2")
self.assertEqual(
"53 2 * ", expr.result(), "ERROR: Multiplicative expression"
)
expr = Expression("53/2")
self.assertEqual("53 2 / ", expr.result(), "ERROR: Divide expression")
def test_triple_term_expression(self):
"""
Tests a set of triple term expressions
"""
expr = Expression("53+2+37")
self.assertEqual(
"53 2 37 + + ", expr.result(), "ERROR: Add/Add expression"
)
expr = Expression("53+2*37")
self.assertEqual(
"53 2 37 * + ", expr.result(), "ERROR: Add/Multiply expression"
)
expr = Expression("53*2+37")
self.assertEqual(
"53 2 * 37 + ", expr.result(), "ERROR: Multiply/Add expression"
)
expr = Expression("53*2*37")
self.assertEqual(
"53 2 37 * * ",
expr.result(),
"ERROR: Multiply/Multiply expression",
)
def test_whitespace_expression(self):
"""
Tests a set of expressions with a variety of whitespace
"""
expr = Expression("53+2+37")
self.assertEqual(
"53 2 37 + + ", expr.result(), "ERROR: No whitespace expression"
)
expr = Expression("53 + 2 + 37")
self.assertEqual(
"53 2 37 + + ",
expr.result(),
"ERROR: Infixed whitespace expression",
)
expr = Expression(" 53+2+37 ")
self.assertEqual(
"53 2 37 + + ",
expr.result(),
"ERROR: Pre/post-fixed whitespace expression",
)
expr = Expression(" 53 + 2 + 37 ")
self.assertEqual(
"53 2 37 + + ",
expr.result(),
"ERROR: Pre/post/in-fixed whitespace expression",
)
expr = Expression(" 53 + 2 + 37 ")
self.assertEqual(
"53 2 37 + + ",
expr.result(),
"ERROR: Multiple whitespace expression",
)
# This test should throw an exception - spaces in between operands
# should give an error
with self.assertRaises(SyntaxError):
expr = Expression(" 53 + - 2 + 37 ")
expr.parse()
|
#!/usr/bin/env bash
set -euo pipefail
mkdir -p tmp
echo 'Fetching deploy-script...'
[[ -d tmp/deploy-script ]] || git clone git@github.com:toggl/deploy-script.git tmp/deploy-script
cd tmp/deploy-script
git fetch
git checkout master
git reset --hard origin/master
|
import tensorflow as tf
# Importing the dataset
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
# Reshaping the data to fit the model
x_train = x_train.reshape(60000, 28, 28, 1)
x_test = x_test.reshape(10000, 28, 28, 1)
# Building the model
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, kernel_size = (3, 3), activation='relu', input_shape=(28, 28, 1)),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
# Compiling the model
model.compile(
optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy']
)
# Training the model
model.fit(x=x_train,y=y_train, epochs=5)
# Evaluating the model
model.evaluate(x_test, y_test) |
public class CustomTypeMapper implements ITypeMapper {
public Class<?> mapType(Object object) {
if (object instanceof EObject) {
// Map EObject to its corresponding property section
return EObjectPropertySection.class;
} else if (object instanceof EditPart) {
// Map EditPart to its corresponding property section
return EditPartPropertySection.class;
} else if (object instanceof PictogramElement) {
// Map PictogramElement to its corresponding property section
return PictogramElementPropertySection.class;
} else {
// Default mapping for other types of objects
return DefaultPropertySection.class;
}
}
} |
<reponame>HMS-Core/hms-drive-serverdemo<gh_stars>10-100
/*
* Copyright 2020. Huawei Technologies Co., Ltd. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.huawei.drive.demo.files;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.huawei.drive.demo.utils.HttpClientUtil;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
/**
*
*
* @author drive
* @since
*/
public class FilesCopy {
public static void main(String[] args) throws IOException {
String url = "https://drive.cloud.hicloud.com/drive/v1/files";
String fileId = "AAAKRqd2AIYCkUs1SedB9ewNSzNJ6gCAA";
String access_token = "<KEY>;
JSONObject fileInfo = filesCopy(url, access_token, fileId);
System.out.println(fileInfo.toJSONString());
}
private static JSONObject filesCopy(String url, String access_token, String fileId) throws IOException {
StringBuilder stringBuilder = new StringBuilder("");
stringBuilder.append("/").append(fileId).append("/").append("copy");
stringBuilder.append("?").append("fields=*");
HttpPost httpPost = new HttpPost(url + stringBuilder);
httpPost.setHeader("Authorization","Bearer " + access_token);
httpPost.setHeader("Content-Type", "application/json");
httpPost.setHeader("Accept", "application/json");
JSONObject jsonParam = new JSONObject();
jsonParam.put("fileName", "HWFileCopy001");
StringEntity entity = new StringEntity(jsonParam.toString());
entity.setContentType("application/json");
httpPost.setEntity(entity);
CloseableHttpResponse response = HttpClientUtil.getClient().execute(httpPost);
try {
HttpEntity responseEntity = response.getEntity();
String ret = responseEntity != null ? EntityUtils.toString(responseEntity) : null;
JSONObject jsonObject = (JSONObject) JSON.parse(ret);
EntityUtils.consume(responseEntity);
return jsonObject;
}finally {
response.close();
}
}
}
|
#!/bin/bash -v
rundir=/opt/airflow/cwl_rundir/medicaid-years/run-`date +%Y-%m-%d-%H-%M`
docker exec webserver bash -c "source /root/anaconda/etc/profile.d/conda.sh && conda activate nsaph && mkdir -p ${rundir} && cd ${rundir} && cwl-runner /opt/airflow/project/cms/src/cwl/medicaid.cwl --database /opt/airflow/project/database.ini --connection_name nsaph_cms --input /data/incoming/medicaid/selected/ "
|
#!/bin/bash
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This script generates common_words.gperf. See README.md for more info.
# Where to download the full wordlist if needed.
FULL_WORDLIST_URL=https://norvig.com/ngrams/count_1w.txt
# Where the wordlist is, or should be, stored on disk.
FULL_WORDLIST_PATH=${FULL_WORDLIST_PATH:-count_1w.txt}
# Where the list of brands found in the common word list is found.
BRAND_WORDLIST=${BRAND_WORDLIST:-brands_in_common_words.list}
# Where to store the output file.
OUTPUT_PATH=${OUTPUT_PATH:-common_words.gperf}
set -e
if [ ! -e $FULL_WORDLIST_PATH ]; then
echo "= Fetching wordlist"
wget -q -O $FULL_WORDLIST_PATH $FULL_WORDLIST_URL
USING_TEMPORARY_WORDLIST=1
else
echo "= Using provided wordlist"
fi
echo "= Generating regular expressions"
REGEX_TMPFILE=$(mktemp)
sed 's/^/^/; s/$/$/' $BRAND_WORDLIST > $REGEX_TMPFILE
echo "= Generating gperf list"
awk 'length($1) > 2 {print $1}' $FULL_WORDLIST_PATH \
| grep -v -f $REGEX_TMPFILE \
| head -n 10000 | sort \
| awk 'BEGIN { print "%%" } { print $0", 0" } END { print "%%" }' \
> $OUTPUT_PATH
echo "= Cleaning up"
rm $REGEX_TMPFILE
[ $USING_TEMPORARY_WORDLIST ] && rm $FULL_WORDLIST_PATH
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Multiply-included message file, hence no include guard.
#include "build/build_config.h"
#include "content/common/content_export.h"
#include "content/common/content_param_traits.h"
#include "content/public/common/common_param_traits.h"
#include "ipc/ipc_channel_handle.h"
#include "ipc/ipc_message_macros.h"
#include "ui/gfx/native_widget_types.h"
#include "ui/gfx/rect.h"
#include "webkit/glue/webcursor.h"
#if defined(OS_POSIX)
#include "base/file_descriptor_posix.h"
#endif
#undef IPC_MESSAGE_EXPORT
#define IPC_MESSAGE_EXPORT CONTENT_EXPORT
#define IPC_MESSAGE_START PluginMsgStart
IPC_STRUCT_BEGIN(PluginMsg_Init_Params)
IPC_STRUCT_MEMBER(gfx::NativeViewId, containing_window)
IPC_STRUCT_MEMBER(GURL, url)
IPC_STRUCT_MEMBER(GURL, page_url)
IPC_STRUCT_MEMBER(std::vector<std::string>, arg_names)
IPC_STRUCT_MEMBER(std::vector<std::string>, arg_values)
IPC_STRUCT_MEMBER(bool, load_manually)
IPC_STRUCT_MEMBER(int, host_render_view_routing_id)
IPC_STRUCT_END()
IPC_STRUCT_BEGIN(PluginHostMsg_URLRequest_Params)
IPC_STRUCT_MEMBER(std::string, url)
IPC_STRUCT_MEMBER(std::string, method)
IPC_STRUCT_MEMBER(std::string, target)
IPC_STRUCT_MEMBER(std::vector<char>, buffer)
IPC_STRUCT_MEMBER(int, notify_id)
IPC_STRUCT_MEMBER(bool, popups_allowed)
IPC_STRUCT_MEMBER(bool, notify_redirects)
IPC_STRUCT_END()
IPC_STRUCT_BEGIN(PluginMsg_DidReceiveResponseParams)
IPC_STRUCT_MEMBER(unsigned long, id)
IPC_STRUCT_MEMBER(std::string, mime_type)
IPC_STRUCT_MEMBER(std::string, headers)
IPC_STRUCT_MEMBER(uint32, expected_length)
IPC_STRUCT_MEMBER(uint32, last_modified)
IPC_STRUCT_MEMBER(bool, request_is_seekable)
IPC_STRUCT_END()
IPC_STRUCT_BEGIN(PluginMsg_UpdateGeometry_Param)
IPC_STRUCT_MEMBER(gfx::Rect, window_rect)
IPC_STRUCT_MEMBER(gfx::Rect, clip_rect)
IPC_STRUCT_MEMBER(bool, transparent)
IPC_STRUCT_MEMBER(TransportDIB::Handle, windowless_buffer0)
IPC_STRUCT_MEMBER(TransportDIB::Handle, windowless_buffer1)
IPC_STRUCT_MEMBER(int, windowless_buffer_index)
IPC_STRUCT_MEMBER(TransportDIB::Handle, background_buffer)
IPC_STRUCT_END()
//-----------------------------------------------------------------------------
// PluginProcess messages
// These are messages sent from the browser to the plugin process.
// Tells the plugin process to create a new channel for communication with a
// given renderer. The channel name is returned in a
// PluginProcessHostMsg_ChannelCreated message. The renderer ID is passed so
// that the plugin process reuses an existing channel to that process if it
// exists. This ID is a unique opaque identifier generated by the browser
// process.
IPC_MESSAGE_CONTROL2(PluginProcessMsg_CreateChannel,
int /* renderer_id */,
bool /* off_the_record */)
// Tells the plugin process to notify every connected renderer of the pending
// shutdown, so we don't mistake it for a crash.
IPC_MESSAGE_CONTROL0(PluginProcessMsg_NotifyRenderersOfPendingShutdown)
//-----------------------------------------------------------------------------
// PluginProcessHost messages
// These are messages sent from the plugin process to the browser process.
// Response to a PluginProcessMsg_CreateChannel message.
IPC_MESSAGE_CONTROL1(PluginProcessHostMsg_ChannelCreated,
IPC::ChannelHandle /* channel_handle */)
#if defined(OS_WIN)
// Destroys the given window's parent on the UI thread.
IPC_MESSAGE_CONTROL2(PluginProcessHostMsg_PluginWindowDestroyed,
HWND /* window */,
HWND /* parent */)
#endif
#if defined(USE_X11)
// On X11, the mapping between NativeViewId and X window ids
// is known only to the browser. This message lets the plugin process
// ask about a NativeViewId that was provided by the renderer.
// It will get 0 back if it's a bogus input.
IPC_SYNC_MESSAGE_CONTROL1_1(PluginProcessHostMsg_MapNativeViewId,
gfx::NativeViewId /* input: native view id */,
gfx::PluginWindowHandle /* output: X window id */)
#endif
#if defined(OS_MACOSX)
// On Mac OS X, we need the browser to keep track of plugin windows so
// that it can add and remove them from stacking groups, hide and show the
// menu bar, etc. We pass the window rect for convenience so that the
// browser can easily tell if the window is fullscreen.
// Notifies the browser that the plugin has selected a window (i.e., brought
// it to the front and wants it to have keyboard focus).
IPC_MESSAGE_CONTROL3(PluginProcessHostMsg_PluginSelectWindow,
uint32 /* window ID */,
gfx::Rect /* window rect */,
bool /* modal */)
// Notifies the browser that the plugin has shown a window.
IPC_MESSAGE_CONTROL3(PluginProcessHostMsg_PluginShowWindow,
uint32 /* window ID */,
gfx::Rect /* window rect */,
bool /* modal */)
// Notifies the browser that the plugin has hidden a window.
IPC_MESSAGE_CONTROL2(PluginProcessHostMsg_PluginHideWindow,
uint32 /* window ID */,
gfx::Rect /* window rect */)
// Notifies the browser that a plugin instance has requested a cursor
// visibility change.
IPC_MESSAGE_CONTROL1(PluginProcessHostMsg_PluginSetCursorVisibility,
bool /* cursor visibility */)
#endif
//-----------------------------------------------------------------------------
// Plugin messages
// These are messages sent from the renderer process to the plugin process.
// Tells the plugin process to create a new plugin instance with the given
// id. A corresponding WebPluginDelegateStub is created which hosts the
// WebPluginDelegateImpl.
IPC_SYNC_MESSAGE_CONTROL1_1(PluginMsg_CreateInstance,
std::string /* mime_type */,
int /* instance_id */)
// The WebPluginDelegateProxy sends this to the WebPluginDelegateStub in its
// destructor, so that the stub deletes the actual WebPluginDelegateImpl
// object that it's hosting.
IPC_SYNC_MESSAGE_CONTROL1_0(PluginMsg_DestroyInstance,
int /* instance_id */)
IPC_SYNC_MESSAGE_CONTROL0_1(PluginMsg_GenerateRouteID,
int /* id */)
// The messages below all map to WebPluginDelegate methods.
IPC_SYNC_MESSAGE_ROUTED1_1(PluginMsg_Init,
PluginMsg_Init_Params,
bool /* result */)
// Used to synchronously request a paint for windowless plugins.
IPC_SYNC_MESSAGE_ROUTED1_0(PluginMsg_Paint,
gfx::Rect /* damaged_rect */)
// Sent by the renderer after it paints from its backing store so that the
// plugin knows it can send more invalidates.
IPC_MESSAGE_ROUTED0(PluginMsg_DidPaint)
IPC_SYNC_MESSAGE_ROUTED0_1(PluginMsg_GetPluginScriptableObject,
int /* route_id */)
// Gets the form value of the plugin instance synchronously.
IPC_SYNC_MESSAGE_ROUTED0_2(PluginMsg_GetFormValue,
string16 /* value */,
bool /* success */)
IPC_MESSAGE_ROUTED3(PluginMsg_DidFinishLoadWithReason,
GURL /* url */,
int /* reason */,
int /* notify_id */)
// Updates the plugin location.
IPC_MESSAGE_ROUTED1(PluginMsg_UpdateGeometry,
PluginMsg_UpdateGeometry_Param)
// A synchronous version of above.
IPC_SYNC_MESSAGE_ROUTED1_0(PluginMsg_UpdateGeometrySync,
PluginMsg_UpdateGeometry_Param)
IPC_SYNC_MESSAGE_ROUTED1_0(PluginMsg_SetFocus,
bool /* focused */)
IPC_SYNC_MESSAGE_ROUTED1_2(PluginMsg_HandleInputEvent,
IPC::WebInputEventPointer /* event */,
bool /* handled */,
WebCursor /* cursor type*/)
IPC_MESSAGE_ROUTED1(PluginMsg_SetContentAreaFocus,
bool /* has_focus */)
#if defined(OS_WIN)
IPC_MESSAGE_ROUTED4(PluginMsg_ImeCompositionUpdated,
string16 /* text */,
std::vector<int> /* clauses */,
std::vector<int>, /* target */
int /* cursor_position */)
IPC_MESSAGE_ROUTED1(PluginMsg_ImeCompositionCompleted,
string16 /* text */)
#endif
#if defined(OS_MACOSX)
IPC_MESSAGE_ROUTED1(PluginMsg_SetWindowFocus,
bool /* has_focus */)
IPC_MESSAGE_ROUTED0(PluginMsg_ContainerHidden)
IPC_MESSAGE_ROUTED3(PluginMsg_ContainerShown,
gfx::Rect /* window_frame */,
gfx::Rect /* view_frame */,
bool /* has_focus */)
IPC_MESSAGE_ROUTED2(PluginMsg_WindowFrameChanged,
gfx::Rect /* window_frame */,
gfx::Rect /* view_frame */)
IPC_MESSAGE_ROUTED1(PluginMsg_ImeCompositionCompleted,
string16 /* text */)
#endif
IPC_SYNC_MESSAGE_ROUTED3_0(PluginMsg_WillSendRequest,
unsigned long /* id */,
GURL /* url */,
int /* http_status_code */)
IPC_MESSAGE_ROUTED1(PluginMsg_DidReceiveResponse,
PluginMsg_DidReceiveResponseParams)
IPC_MESSAGE_ROUTED3(PluginMsg_DidReceiveData,
unsigned long /* id */,
std::vector<char> /* buffer */,
int /* data_offset */)
IPC_MESSAGE_ROUTED1(PluginMsg_DidFinishLoading,
unsigned long /* id */)
IPC_MESSAGE_ROUTED1(PluginMsg_DidFail,
unsigned long /* id */)
IPC_MESSAGE_ROUTED4(PluginMsg_SendJavaScriptStream,
GURL /* url */,
std::string /* result */,
bool /* success */,
int /* notify_id */)
IPC_MESSAGE_ROUTED2(PluginMsg_DidReceiveManualResponse,
GURL /* url */,
PluginMsg_DidReceiveResponseParams)
IPC_MESSAGE_ROUTED1(PluginMsg_DidReceiveManualData,
std::vector<char> /* buffer */)
IPC_MESSAGE_ROUTED0(PluginMsg_DidFinishManualLoading)
IPC_MESSAGE_ROUTED0(PluginMsg_DidManualLoadFail)
IPC_MESSAGE_ROUTED3(PluginMsg_HandleURLRequestReply,
unsigned long /* resource_id */,
GURL /* url */,
int /* notify_id */)
IPC_MESSAGE_ROUTED2(PluginMsg_HTTPRangeRequestReply,
unsigned long /* resource_id */,
int /* range_request_id */)
IPC_MESSAGE_CONTROL1(PluginMsg_SignalModalDialogEvent,
gfx::NativeViewId /* containing_window */)
IPC_MESSAGE_CONTROL1(PluginMsg_ResetModalDialogEvent,
gfx::NativeViewId /* containing_window */)
#if defined(OS_MACOSX)
// This message, used only on 10.6 and later, transmits the "fake"
// window handle allocated by the browser on behalf of the renderer
// to the GPU plugin.
IPC_MESSAGE_ROUTED1(PluginMsg_SetFakeAcceleratedSurfaceWindowHandle,
gfx::PluginWindowHandle /* window */)
#endif
IPC_MESSAGE_CONTROL3(PluginMsg_ClearSiteData,
std::string /* site */,
uint64 /* flags */,
uint64 /* max_age */)
//-----------------------------------------------------------------------------
// PluginHost messages
// These are messages sent from the plugin process to the renderer process.
// They all map to the corresponding WebPlugin methods.
// Sends the plugin window information to the renderer.
// The window parameter is a handle to the window if the plugin is a windowed
// plugin. It is NULL for windowless plugins.
IPC_SYNC_MESSAGE_ROUTED1_0(PluginHostMsg_SetWindow,
gfx::PluginWindowHandle /* window */)
#if defined(OS_WIN)
// The modal_loop_pump_messages_event parameter is an event handle which is
// passed in for windowless plugins and is used to indicate if messages
// are to be pumped in sync calls to the plugin process. Currently used
// in HandleEvent calls.
IPC_SYNC_MESSAGE_ROUTED2_0(PluginHostMsg_SetWindowlessData,
HANDLE /* modal_loop_pump_messages_event */,
gfx::NativeViewId /* dummy_activation_window*/)
// Send the IME status retrieved from a windowless plug-in. A windowless plug-in
// uses the IME attached to a browser process as a renderer does. A plug-in
// sends this message to control the IME status of a browser process. I would
// note that a plug-in sends this message to a renderer process that hosts this
// plug-in (not directly to a browser process) so the renderer process can
// update its IME status.
IPC_MESSAGE_ROUTED2(PluginHostMsg_NotifyIMEStatus,
int /* input_type */,
gfx::Rect /* caret_rect */)
#endif
IPC_MESSAGE_ROUTED1(PluginHostMsg_URLRequest,
PluginHostMsg_URLRequest_Params)
IPC_MESSAGE_ROUTED1(PluginHostMsg_CancelResource,
int /* id */)
IPC_MESSAGE_ROUTED1(PluginHostMsg_InvalidateRect,
gfx::Rect /* rect */)
IPC_SYNC_MESSAGE_ROUTED1_1(PluginHostMsg_GetWindowScriptNPObject,
int /* route id */,
bool /* success */)
IPC_SYNC_MESSAGE_ROUTED1_1(PluginHostMsg_GetPluginElement,
int /* route id */,
bool /* success */)
IPC_SYNC_MESSAGE_ROUTED1_2(PluginHostMsg_ResolveProxy,
GURL /* url */,
bool /* result */,
std::string /* proxy list */)
IPC_MESSAGE_ROUTED3(PluginHostMsg_SetCookie,
GURL /* url */,
GURL /* first_party_for_cookies */,
std::string /* cookie */)
IPC_SYNC_MESSAGE_ROUTED2_1(PluginHostMsg_GetCookies,
GURL /* url */,
GURL /* first_party_for_cookies */,
std::string /* cookies */)
IPC_MESSAGE_ROUTED0(PluginHostMsg_CancelDocumentLoad)
IPC_MESSAGE_ROUTED3(PluginHostMsg_InitiateHTTPRangeRequest,
std::string /* url */,
std::string /* range_info */,
int /* range_request_id */)
IPC_MESSAGE_ROUTED2(PluginHostMsg_DeferResourceLoading,
unsigned long /* resource_id */,
bool /* defer */)
IPC_SYNC_MESSAGE_CONTROL1_0(PluginHostMsg_SetException,
std::string /* message */)
IPC_MESSAGE_CONTROL0(PluginHostMsg_PluginShuttingDown)
#if defined(OS_MACOSX)
IPC_MESSAGE_ROUTED1(PluginHostMsg_FocusChanged,
bool /* focused */)
IPC_MESSAGE_ROUTED0(PluginHostMsg_StartIme)
//----------------------------------------------------------------------
// Legacy Core Animation plugin implementation rendering directly to screen.
// This message, used in Mac OS X 10.5 and earlier, is sent from the plug-in
// process to the renderer process to indicate that the plug-in allocated a
// new TransportDIB that holds the GPU's rendered image. This information is
// then forwarded to the browser process via a similar message.
IPC_MESSAGE_ROUTED4(PluginHostMsg_AcceleratedSurfaceSetTransportDIB,
gfx::PluginWindowHandle /* window */,
int32 /* width */,
int32 /* height */,
TransportDIB::Handle /* handle to the TransportDIB */)
// Synthesize a fake window handle for the plug-in to identify the instance
// to the browser, allowing mapping to a surface for hardware accelleration
// of plug-in content. The browser generates the handle which is then set on
// the plug-in. |opaque| indicates whether the content should be treated as
// opaque.
IPC_MESSAGE_ROUTED1(PluginHostMsg_BindFakePluginWindowHandle,
bool /* opaque */)
// This message, used only on 10.6 and later, is sent from the plug-in process
// to the renderer process to indicate that the plugin allocated a new
// IOSurface object of the given width and height. This information is then
// forwarded on to the browser process.
//
// NOTE: the original intent was to pass a mach port as the IOSurface
// identifier but it looks like that will be a lot of work. For now we pass an
// ID from IOSurfaceGetID.
IPC_MESSAGE_ROUTED4(PluginHostMsg_AcceleratedSurfaceSetIOSurface,
gfx::PluginWindowHandle /* window */,
int32 /* width */,
int32 /* height */,
uint64 /* surface_id */)
// On the Mac, shared memory can't be allocated in the sandbox, so
// the TransportDIB used by the plug-in for rendering has to be allocated
// and managed by the browser. This is a synchronous message, use with care.
IPC_SYNC_MESSAGE_ROUTED1_1(PluginHostMsg_AllocTransportDIB,
size_t /* requested memory size */,
TransportDIB::Handle /* output: DIB handle */)
// Since the browser keeps handles to the allocated transport DIBs, this
// message is sent to tell the browser that it may release them when the
// renderer is finished with them.
IPC_MESSAGE_ROUTED1(PluginHostMsg_FreeTransportDIB,
TransportDIB::Id /* DIB id */)
// This message notifies the renderer process (and from there the
// browser process) that the plug-in swapped the buffers associated
// with the given "window", which should cause the browser to redraw
// the various plug-ins' contents.
IPC_MESSAGE_ROUTED2(PluginHostMsg_AcceleratedSurfaceBuffersSwapped,
gfx::PluginWindowHandle /* window */,
uint64 /* surface_handle */)
//----------------------------------------------------------------------
// New Core Animation plugin implementation rendering via compositor.
// Notifies the renderer process that this plugin will be using the
// accelerated rendering path.
IPC_MESSAGE_ROUTED0(PluginHostMsg_AcceleratedPluginEnabledRendering)
// Notifies the renderer process that the plugin allocated a new
// IOSurface into which it is rendering. The renderer process forwards
// this IOSurface to the GPU process, causing it to be bound to a
// texture from which the compositor can render. Any previous
// IOSurface allocated by this plugin must be implicitly released by
// the receipt of this message.
IPC_MESSAGE_ROUTED3(PluginHostMsg_AcceleratedPluginAllocatedIOSurface,
int32 /* width */,
int32 /* height */,
uint32 /* surface_id */)
// Notifies the renderer process that the plugin produced a new frame
// of content into its IOSurface, and therefore that the compositor
// needs to redraw.
IPC_MESSAGE_ROUTED0(PluginHostMsg_AcceleratedPluginSwappedIOSurface)
#endif
IPC_MESSAGE_CONTROL1(PluginHostMsg_ClearSiteDataResult,
bool /* success */)
IPC_MESSAGE_ROUTED2(PluginHostMsg_URLRedirectResponse,
bool /* allow */,
int /* resource_id */)
//-----------------------------------------------------------------------------
// NPObject messages
// These are messages used to marshall NPObjects. They are sent both from the
// plugin to the renderer and from the renderer to the plugin.
IPC_SYNC_MESSAGE_ROUTED0_0(NPObjectMsg_Release)
IPC_SYNC_MESSAGE_ROUTED1_1(NPObjectMsg_HasMethod,
content::NPIdentifier_Param /* name */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED3_2(NPObjectMsg_Invoke,
bool /* is_default */,
content::NPIdentifier_Param /* method */,
std::vector<content::NPVariant_Param> /* args */,
content::NPVariant_Param /* result_param */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED1_1(NPObjectMsg_HasProperty,
content::NPIdentifier_Param /* name */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED1_2(NPObjectMsg_GetProperty,
content::NPIdentifier_Param /* name */,
content::NPVariant_Param /* property */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED2_1(NPObjectMsg_SetProperty,
content::NPIdentifier_Param /* name */,
content::NPVariant_Param /* property */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED1_1(NPObjectMsg_RemoveProperty,
content::NPIdentifier_Param /* name */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED0_0(NPObjectMsg_Invalidate)
IPC_SYNC_MESSAGE_ROUTED0_2(NPObjectMsg_Enumeration,
std::vector<content::NPIdentifier_Param> /* value */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED1_2(NPObjectMsg_Construct,
std::vector<content::NPVariant_Param> /* args */,
content::NPVariant_Param /* result_param */,
bool /* result */)
IPC_SYNC_MESSAGE_ROUTED2_2(NPObjectMsg_Evaluate,
std::string /* script */,
bool /* popups_allowed */,
content::NPVariant_Param /* result_param */,
bool /* result */)
|
#!/bin/sh
7z x Bosphorus_1920x1080_120fps_420_8bit_YUV_Y4M.7z
echo "#!/bin/sh
./rav1e-20191023.exe Bosphorus_1920x1080_120fps_420_8bit_YUV.y4m --threads \$NUM_CPU_CORES --tile-rows 4 --tile-cols 4 --output output --limit 60 > log.out 2>&1
rm -f output
tr -s '\r' '\n' < log.out > \$LOG_FILE" > rav1e
chmod +x rav1e
|
<reponame>navikt/sykepengesoknad
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import { Link } from 'react-router';
import { sykepengesoknad as sykepengesoknadPt, soknadPt } from '../../propTypes';
import logger from '../../logging';
import { getUrlTilSoknad } from '../../utils/urlUtils';
class StartIgjen extends Component {
componentDidMount() {
const { soknad } = this.props;
const type = soknad.soknadstype ? soknad.soknadstype : 'ARBEIDSTAKER_GAMMEL_PLATTFORM';
const sporsmalsliste = soknad.soknadstype ? JSON.stringify(soknad.sporsmal) : null;
logger.error({
message: `Ugyldig tilstand i søknad av typen ${type} med ID: ${soknad.id}`,
sporsmalsliste,
});
}
render() {
const { soknad } = this.props;
return (<div className="panel">
<div className="hode hode--informasjon">
<h1 className="hode__tittel">Oops, nå har vi mistet dataene dine</h1>
<p className="hode__melding">
Derfor må du dessverre <Link
className="lenke"
to={getUrlTilSoknad(soknad.id)}>fylle ut søknaden på nytt</Link>.
</p>
</div>
</div>);
}
}
StartIgjen.propTypes = {
soknad: PropTypes.oneOfType([sykepengesoknadPt, soknadPt]),
};
export default StartIgjen;
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/AppAnalyticsObjC/AppAnalyticsObjC.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/AppAnalyticsObjC/AppAnalyticsObjC.framework"
fi
|
<gh_stars>0
/*
Copyright IBM Corp. 2017 All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package experiments
import (
"flag"
"github.com/Yunpeng-J/HLF-2.2/core/ledger/kvledger/benchmark/chainmgmt"
)
// txConf captures the transaction related configurations
// numTotalTxs specifies the total transactions that should be executed and committed across chains
// numParallelTxsPerChain specifies the parallel transactions on each of the chains
// numWritesPerTx specifies the number of keys to write in each transaction
// numReadsPerTx specifies the number of keys to read in each transaction, Note: this parameters
// match the numWritesPerTx for normal benchmarks. This can be set to zero to make batch update measurements.
type txConf struct {
numTotalTxs int
numParallelTxsPerChain int
numWritesPerTx int
numReadsPerTx int
}
// dataConf captures the data related configurations
// numKVs specifies number of total key-values across chains
// kvSize specifies the size of a key-value (in bytes)
// useJSON specifies if the value stored is in JSON format
type dataConf struct {
numKVs int
kvSize int
useJSON bool
}
// configuration captures all the configurations for an experiment
// For details of individual configuration, see comments on the specific type
type configuration struct {
chainMgrConf *chainmgmt.ChainMgrConf
batchConf *chainmgmt.BatchConf
dataConf *dataConf
txConf *txConf
}
// emptyConf returns a an empty configuration (with nested structure only)
func emptyConf() *configuration {
conf := &configuration{}
conf.chainMgrConf = &chainmgmt.ChainMgrConf{}
conf.batchConf = &chainmgmt.BatchConf{}
conf.txConf = &txConf{}
conf.dataConf = &dataConf{}
return conf
}
// confFromTestParams consumes the parameters passed by an experiment
// and returns the configuration loaded with the parsed param values
func confFromTestParams(testParams []string) *configuration {
conf := emptyConf()
flags := flag.NewFlagSet("testParams", flag.ExitOnError)
// chainMgrConf
dataDir := flags.String("DataDir", conf.chainMgrConf.DataDir, "Dir for ledger data")
numChains := flags.Int("NumChains", conf.chainMgrConf.NumChains, "Number of chains")
// txConf
numParallelTxsPerChain := flags.Int("NumParallelTxPerChain",
conf.txConf.numParallelTxsPerChain, "Number of TxSimulators concurrently on each chain")
numTotalTxs := flags.Int("NumTotalTx",
conf.txConf.numTotalTxs, "Number of total transactions")
numWritesPerTx := flags.Int("NumWritesPerTx",
conf.txConf.numWritesPerTx, "number of keys written in each Tx")
numReadsPerTx := flags.Int("NumReadsPerTx",
conf.txConf.numReadsPerTx, "number of keys to read in each Tx")
// batchConf
batchSize := flags.Int("BatchSize",
conf.batchConf.BatchSize, "number of Txs in each batch")
// dataConf
numKVs := flags.Int("NumKVs",
conf.dataConf.numKVs, "the keys are named as key_0, key_1,... upto key_(NumKVs-1)")
kvSize := flags.Int("KVSize",
conf.dataConf.kvSize, "size of the key-value in bytes")
useJSON := flags.Bool("UseJSONFormat", conf.dataConf.useJSON, "should CouchDB use JSON for values")
flags.Parse(testParams)
conf.chainMgrConf.DataDir = *dataDir
conf.chainMgrConf.NumChains = *numChains
conf.txConf.numParallelTxsPerChain = *numParallelTxsPerChain
conf.txConf.numTotalTxs = *numTotalTxs
conf.txConf.numWritesPerTx = *numWritesPerTx
conf.txConf.numReadsPerTx = *numReadsPerTx
conf.batchConf.BatchSize = *batchSize
conf.dataConf.numKVs = *numKVs
conf.dataConf.kvSize = *kvSize
conf.dataConf.useJSON = *useJSON
return conf
}
|
const express = require('express')
const app = express()
const port = 9000
const fs = require('fs')
const path = require('path')
const bodyParser = require("body-parser");
var cors = require('cors')
const dree = require('dree')
const options = {
followLinks: true,
depth: 5
};
const current = "./data/current/"
const projects = "./data/projects";
const templates = "./data/templates";
const uikits = "./data/uikits";
const build = "/app/sveltekit/src/lib/pages";
const local = "./data";
app.use(bodyParser.json({ limit: '50mb' }));
app.use(bodyParser.urlencoded({ limit: '50mb', extended: true }));
app.use(express.json({limit: '50mb'}));
app.use(cors())
// return templates
// app.get ( '/templates' , ( req , res ) => {
// const tree = dree.scan ( path.resolve ( templates) )
// const files = dree.parseTree ( tree , options )
// res.json ( tree )
// })
app.get('/' , ( req, res ) => {
const rawdata = require( path.resolve ( current ) + '/config.json' )
res.json ( rawdata )
})
app.get ( '/tree/:name' , ( req , res ) => {
const tree = dree.scan ( path.resolve ( local ) + '/' + req.params.name )
res.json ( tree )
})
//return data folders
// app.get('/folders', (req, res) => {
// const files = fs.readdirSync ( path.resolve ( local ) )
// res.json ( files )
// })
// app.get('/folders/:name', (req, res) => {
// let searchPath = req.params.name
// req.query.folder ? searchPath += '/' + req.query.folder : null
// const files = fs.readdirSync ( path.resolve ( local ) + '/' + searchPath )
// res.json ( files )
// })
// app.get('/file/:folder/:name' , ( req , res ) => {
// const rawdata = fs.readFileSync ( path.resolve ( local ) + '/' + req.params.folder + '/' + req.params.name )
// res.json ( JSON.parse(rawdata) )
// })
app.get('/file' , ( req , res ) => {
if ( req.query.path ){
const rawdata = fs.readFileSync ( path.resolve ( req.query.path ) )
res.json ( JSON.parse(rawdata) )
} else {
res.json ( {} )
}
})
app.get('/folder' , ( req , res ) => {
if ( req.query.path ){
res.json ( { path: req.query.path } )
} else {
res.json ( {} )
}
})
// app.get('/dir' , (req, res) => {
// console.log ( req.params.path )
// res.json ( {name:req.params.path} )
// //let stream = fs.mkdirSync( path.resolve ( req.params.path ) )
// //res.send ( req.body )
// })
// app.get('/projects', (req, res) => {
// const files = fs.readdirSync ( path.resolve ( projects ) )
// res.json ( files )
// })
// app.get('/projects/:name', (req, res) => {
// const rawdata = fs.readFileSync ( path.resolve ( projects ) + '/' + req.params.name )
// res.json ( JSON.parse(rawdata) )
// })
// app.get('/templates', (req, res) => {
// const files = fs.readdirSync ( path.resolve ( templates ) )
// console.log ( files )
// res.json ( files )
// })
// app.get('/templates/:name', (req, res) => {
// const rawdata = fs.readFileSync ( path.resolve ( templates ) + '/' + req.params.name )
// res.json ( JSON.parse(rawdata) )
// })
// app.get('/uikits', (req, res) => {
// const files = fs.readdirSync ( path.resolve ( uikits ) )
// res.json ( files )
// })
// app.get('/uikits/:name', (req, res) => {
// const rawdata = fs.readFileSync ( path.resolve ( uikits ) + '/' + req.params.name )
// res.json ( JSON.parse(rawdata) )
// })
app.post('/template/:name' , (req, res) => {
let stream = fs.createWriteStream( path.resolve ( templates ) + '/' + req.params.name + '.json' )
stream.once('open', function(fd) {
stream.write(JSON.stringify(req.body));
stream.end();
});
res.send ( req.body )
})
app.post('/project/:name' , (req, res) => {
//let stream = fs.createWriteStream( path.resolve ( projects ) + '/' + req.params.name + '.json' )
let stream = fs.createWriteStream( path.resolve ( req.params.path ) )
stream.once('open', function(fd) {
stream.write(JSON.stringify(req.body));
stream.end();
});
res.send ( req.body )
})
app.post('/uikit/:name' , (req, res) => {
let stream = fs.createWriteStream( path.resolve ( uikits ) + '/' + req.params.name + '.json' )
stream.once('open', function(fd) {
stream.write(JSON.stringify(req.body));
stream.end();
});
res.send ( req.body )
})
app.post('/file/save' , (req, res) => {
let stream = fs.createWriteStream( path.resolve ( req.body.path ) )
stream.once('open', function(fd) {
stream.write(JSON.stringify(req.body));
stream.end();
});
res.send ( req.body )
})
app.listen(port,'0.0.0.0',(err)=>{
if ( err ){
console.log ( err )
} else {
console.log("server is listening on port " , port );
}
}) |
<filename>demos/VariantesDePhrases/VariantesDePhrases.js
var struct;
var $langue,$generer,$tab;
var $struct,$tbody,$infos,$message;
var allTypes=["neg","contr","pas","prog","perf","int","mod"];
var types; // change selon la langue
var pos={"int":["yon","wos","was","wod","wad","woi","wai","whe","why","whn","how","muc"],
"mod":["poss","perm","nece","obli","will"]}
var nb,ex;
// gérer un three state checkbox (i.e. on alterne entre checked )
// adapté de https://css-tricks.com/indeterminate-checkboxes/
function threeStatesCB() {
if (this.readOnly) this.checked=this.readOnly=false;
else if (!this.checked) this.readOnly=this.indeterminate=true;
}
// génération d'une phrase
function generer(s,$tab,obj){
// console.log("generer(%o)",obj);
var $tr=$("<tr/>");
for (var i = 0; i < types.length; i++) {
var v=obj[types[i]];
$tr.append(v?("<td>"+v+"</td>"):"<td/>");
}
$tr.append("<td>"+s.clone().typ(obj)+"</td>");
$tab.append($tr);
nb++;
}
// génération récursive de toutes les possibilités de types
function genererTypes(s,$tab,types,obj){
// console.log("genererTypes(%o,%o)",types,obj);
if (types.length==0){
generer(s,$tab,obj)
} else {
var type=types[0]
obj[type]=false;
genererTypes(s,$tab,types.slice(1),obj);
var $cbType=$("#cb-"+type)
if ($cbType.is(":checked") || $cbType.prop("indeterminate")){
if (type in pos){ // plusieurs possibilités
if ($cbType.prop("indeterminate")){ // ne faire que la première
obj[type]=pos[type][0];
genererTypes(s,$tab,types.slice(1),obj);
} else {
var poss=pos[type]
for (var i = 0; i < poss.length; i++) {
obj[type]=poss[i];
genererTypes(s,$tab,types.slice(1),obj);
}
}
} else { // que vrai et faux comme alternatives
obj[type]=true;
genererTypes(s,$tab,types.slice(1),obj);
}
}
}
}
function genererHeader(lang){
$tab.empty();
types=allTypes.slice(); // copier tous les types
if (lang=="fr"){// pas de "perf", ni "contr" en français, les enlever de types
types.splice(types.indexOf("perf"),1)
types.splice(types.indexOf("contr"),1)
}
// générer le titre du tableau
var $thead=$("<thead></thead>");
var $tr=$("<tr></tr>");
$thead.append($tr);
for (var i = 0; i < types.length; i++) {
var type=types[i];
var cb = '<input type="checkbox" checked="checked" id="cb-'+type+'"/> ';
$tr.append("<th class='flag'>"+cb+type+"</th>")
}
$tr.append(lang=="fr"?"<th>Réalisation (<span id='nbSentences'/> phrases)</th>":
"<th>Realization (<span id='nbSentences'/> sentences)</th>");
$tab.append($thead);
$tbody=$("<tbody></tbody>");
$tab.append($tbody);
// patch for three state checkboxes
for (type in pos){
$("#cb-"+type).prop("checked",false).prop("indeterminate",true).click(threeStatesCB);
}
}
function genererStruct(struct,lang){
$tbody.empty();
// évaluer les phrases
try {
nb=0;
genererTypes(eval(struct),$tbody,types,{});
$message.html(" ")
$("#nbSentences").text(nb);
} catch (err) {
$message.text((lang=="fr"?"Erreur dans la structure jsRealB: ":
"Error in jsRealB: ")+err.message);
}
}
function showLangTextArea(){
var isEn=$langue.val()=="en";
var $currentMenu;
if (isEn) {
$sentMenuEn.show();$sentMenuFr.hide();
genererHeader("en");
loadSentence($sentMenuEn,examplesEn);
} else {
$sentMenuEn.hide();$sentMenuFr.show();
genererHeader("fr");
loadSentence($sentMenuFr,examplesFr);
}
}
function createSentenceMenu(lang,examples,$menu){
if (lang=="en")loadEn();else loadFr();
for (var i = 0; i < examples.length; i++) {
var expr=examples[i].expr;
var sent=eval(expr).toString();
examples[i].sent=sent;
var $option=$(`<option value="${i}">`+sent+"</option>");
if (i==0)$option.attr("selected","selected");
$menu.append($option);
}
}
function loadSentence($menu,examples){
var index=parseInt($menu.val());
ex=examples[index]
$struct.val(ex.expr.trim());
if (ex.ref!==undefined){
$infos.html('<a target="_blank" href="'+ex.url+'">'+ex.ref+'</a> '+ex.no);
}
$("tbody",$tab).empty();
$("#nbSentences").text("-");
$message.html(" ")
}
$(document).ready(function() {
$sentMenuEn=$("#sentMenuEn");
$sentMenuFr=$("#sentMenuFr");
$struct=$("#struct");
$langue=$("#langue");
$infos=$("#infos");
$tab=$("#tab");
$message=$("#message");
$langue.change(showLangTextArea);
$generer=$("#generer");
createSentenceMenu("en",examplesEn,$sentMenuEn);
createSentenceMenu("fr",examplesFr,$sentMenuFr);
$sentMenuEn.change(function(){loadSentence($sentMenuEn,examplesEn)});
$sentMenuFr.change(function(){loadSentence($sentMenuFr,examplesFr)});
showLangTextArea();
$generer.click(function(e){
if ($langue.val()=="en"){
loadEn();
genererStruct($struct.val(),"en")
} else {
loadFr();
genererStruct($struct.val(),"fr")
}
});
});
|
package com.github.chen0040.leetcode.day09.easy;
/**
* Created by xschen on 4/8/2017.
*
* summary:
* Write a function that takes a string as input and reverse only the vowels of a string.
*
* link: https://leetcode.com/problems/reverse-vowels-of-a-string/description/
*/
public class ReverseVowels {
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.reverseVowels("hello"));
System.out.println(s.reverseVowels("leetcode"));
}
public static class Solution {
public String reverseVowels(String s) {
if(s.length() == 0) return s;
char[] a = s.toCharArray();
int i = -1;
int j = a.length;
while(true) {
while(!isVowel(a[++i])) {
if(i >= a.length-1) break;
}
while(!isVowel(a[--j])) {
if(j <= 0) break;
}
if(i >= j) break;
exchange(a, i, j);
}
return new String(a);
}
private void exchange(char[] a, int i, int j) {
char temp = a[i];
a[i] = a[j];
a[j] = temp;
}
private boolean isVowel(char c) {
return "AEIOUaeiou".indexOf(c) != -1;
}
}
}
|
class MerchantParamsProcessor:
def __init__(self):
self.merchant_params = {}
def add_parameter(self, name, value):
self.merchant_params[name] = value
def get_parameter_value(self, name):
return self.merchant_params.get(name, None)
def remove_parameter(self, name):
if name in self.merchant_params:
del self.merchant_params[name] |
<html>
<head>
<title>Start-up XYZ</title>
<style>
body {
text-align: center;
margin: 40px 0;
}
#logo {
font-size: 55px;
color: #F39C12;
}
#button-box {
margin-top: 20px;
}
.button {
background-color: #16A085;
border: none;
color: white;
padding: 15px 32px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: 16px;
}
</style>
</head>
<body>
<div id="logo">Start-up XYZ</div>
<div id="button-box">
<a href="#" class="button">Contact Us</a>
<a href="#" class="button">About Us</a>
</div>
</body>
</html> |
package edu.wpi.first.gradlerio.deploy.roborio;
import java.io.File;
import java.util.Optional;
import java.util.Set;
import javax.inject.Inject;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.util.PatternFilterable;
import org.gradle.api.tasks.util.PatternSet;
import edu.wpi.first.deployutils.deploy.artifact.FileCollectionArtifact;
import edu.wpi.first.deployutils.deploy.context.DeployContext;
import edu.wpi.first.deployutils.deploy.target.RemoteTarget;
import edu.wpi.first.gradlerio.deploy.FRCDeployPlugin;
public class FRCJNILibraryArtifact extends FileCollectionArtifact {
private Property<Configuration> configuration;
private boolean zipped;
private PatternFilterable filter;
@Inject
public FRCJNILibraryArtifact(String name, RemoteTarget target) {
super(name, target);
getDirectory().set(FRCDeployPlugin.LIB_DEPLOY_DIR);
filter = new PatternSet();
configuration = target.getProject().getObjects().property(Configuration.class);
setOnlyIf(ctx -> {
return getFiles().isPresent() && !getFiles().get().isEmpty() && !getFiles().get().getFiles().isEmpty();
});
getPreWorkerThread().add(cfg -> {
if (!configuration.isPresent()) {
return;
}
getFiles().set(computeFiles());
});
getPostdeploy().add(ctx -> {
FRCDeployPlugin.ownDirectory(ctx, FRCDeployPlugin.LIB_DEPLOY_DIR);
ctx.execute("ldconfig");
});
}
public Property<Configuration> getConfiguration() {
return configuration;
}
public boolean isZipped() {
return zipped;
}
public void setZipped(boolean zipped) {
this.zipped = zipped;
}
public PatternFilterable getFilter() {
return filter;
}
@Override
public void deploy(DeployContext arg0) {
super.deploy(arg0);
}
public FileCollection computeFiles() {
Set<File> configFileCaches = configuration.get().getResolvedConfiguration().getFiles();
if (zipped) {
Optional<FileTree> allFiles = configFileCaches.stream().map(file -> getTarget().getProject().zipTree(file).matching(filter)).filter(x -> x != null).reduce((a, b) -> a.plus(b));
if (allFiles.isPresent()) {
return allFiles.get();
} else {
return getTarget().getProject().files();
}
} else {
return getTarget().getProject().files(configFileCaches);
}
}
}
|
#!/usr/bin/env bash
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
train_set=train
train_dev="dev_all"
test_set="dev_all test"
asr_config=conf/train_asr.yaml
lm_config=conf/train_lm.yaml
inference_config=conf/decode_asr.yaml
fs=8k
nbpe=1000
./asr.sh \
--ngpu 1 \
--audio_format "flac.ark" \
--local_data_opts "--stage 0" \
--use_lm false \
--lm_config "${lm_config}" \
--fs ${fs} \
--token_type bpe \
--nbpe $nbpe \
--feats_type raw \
--speed_perturb_factors "0.9 1.0 1.1" \
--asr_config "${asr_config}" \
--inference_config "${inference_config}" \
--train_set "${train_set}" \
--valid_set "${train_dev}" \
--test_sets "${test_set}" \
--gpu_inference true \
--inference_nj 10 \
--bpe_train_text "data/${train_set}/text" \
--lm_train_text "data/${train_set}/text" "$@"
|
import pandas as pd
data = pd.DataFrame(numbers, columns=['Numbers'])
# Provide descriptive statistics
print(data.describe())
# Output
Numbers
count 7.000000
mean 3.371429
std 0.754857
min 2.500000
25% 2.800000
50% 3.100000
75% 4.200000
max 4.200000 |
package com.hapramp.ui.fragments;
import android.content.Context;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.hapramp.R;
import com.hapramp.datastore.DataStore;
import com.hapramp.datastore.callbacks.UserFeedCallback;
import com.hapramp.steem.models.Feed;
import com.hapramp.views.feedlist.FeedListView;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class UserBlogFragment extends Fragment implements UserFeedCallback, FeedListView.FeedListViewListener {
@BindView(R.id.feedListView)
FeedListView feedListView;
Unbinder unbinder;
private DataStore dataStore;
private String mUsername;
private String last_author;
private String last_permlink;
public UserBlogFragment() {
// Required empty public constructor
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
dataStore = new DataStore();
setRetainInstance(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_user_blog, container, false);
unbinder = ButterKnife.bind(this, view);
feedListView.setFeedListViewListener(this);
feedListView.initialLoading();
fetchAllPosts();
return view;
}
@Override
public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
@Override
public void onDetach() {
super.onDetach();
}
private void fetchAllPosts() {
dataStore.requestUserBlog(mUsername, false, this);
}
public void setUsername(String username) {
this.mUsername = username;
}
@Override
public void onFeedsFetching() {
}
@Override
public void onUserFeedsAvailable(List<Feed> feeds, boolean isFreshData, boolean isAppendable) {
if (feedListView != null) {
if (isAppendable) {
if (feeds.size() > 0) {
feeds.remove(0);
}
feedListView.loadedMoreFeeds(feeds);
} else {
feedListView.feedsRefreshed(feeds);
}
int size = feeds.size();
if (feeds.size() > 0) {
last_author = feeds.get(size - 1).getAuthor();
last_permlink = feeds.get(size - 1).getPermlink();
}
}
}
@Override
public void onUserFeedFetchError(String err) {
if (feedListView != null) {
feedListView.failedToRefresh("");
}
}
@Override
public void onRetryFeedLoading() {
}
@Override
public void onRefreshFeeds() {
refreshAllPosts();
}
@Override
public void onLoadMoreFeeds() {
if (last_author.length() > 0) {
dataStore.requestUserBlog(mUsername, last_author, last_permlink, this);
}
}
@Override
public void onHideCommunityList() {
}
@Override
public void onShowCommunityList() {
}
private void refreshAllPosts() {
dataStore.requestUserBlog(mUsername, true, this);
}
}
|
<gh_stars>0
class GymClass < ApplicationRecord
end
|
import React, { useState } from 'react';
const WelcomeMessage = (props) => {
const [message, setMessage] = useState(props.message);
return (
<div>
<h1>{message}</h1>
</div>
);
};
export default WelcomeMessage;
// Usage
<WelcomeMessage message="Welcome to my app!" /> |
<gh_stars>0
# frozen_string_literal: true
require 'set' # ruby std lib
require 'bisect' # insert into a sorted array
require 'tzinfo' # timezone information
require 'mt-uv-rays/scheduler/time'
module MTUV
class ScheduledEvent < ::MTLibuv::Q::DeferredPromise
# Note:: Comparable should not effect Hashes
# it will however effect arrays
include Comparable
attr_reader :created
attr_reader :last_scheduled
attr_reader :next_scheduled
attr_reader :trigger_count
def initialize(scheduler)
# Create a dummy deferrable
reactor = scheduler.reactor
defer = reactor.defer
# Record a backtrace of where the schedule was created
@trace = caller
# Setup common event variables
@scheduler = scheduler
@created = reactor.now
@last_scheduled = @created
@trigger_count = 0
# init the promise
super(reactor, defer)
end
# Provide relevant inspect information
def inspect
insp = String.new("#<#{self.class}:#{"0x00%x" % (self.__id__ << 1)} ")
insp << "trigger_count=#{@trigger_count} "
insp << "config=#{info} " if self.respond_to?(:info, true)
insp << "next_scheduled=#{to_time(@next_scheduled)} "
insp << "last_scheduled=#{to_time(@last_scheduled)} created=#{to_time(@created)}>"
insp
end
alias_method :to_s, :inspect
def to_time(internal_time)
if internal_time
((internal_time + @scheduler.time_diff) / 1000).to_i
end
end
# required for comparable
def <=>(anOther)
@next_scheduled <=> anOther.next_scheduled
end
# reject the promise
def cancel
@defer.reject(:cancelled)
end
# notify listeners of the event
def trigger
@trigger_count += 1
@defer.notify(@reactor.now, self)
end
end
class OneShot < ScheduledEvent
def initialize(scheduler, at)
super(scheduler)
@next_scheduled = at
end
# Updates the scheduled time
def update(time)
@last_scheduled = @reactor.now
parsed_time = Scheduler.parse_in(time, :quiet)
if parsed_time.nil?
# Parse at will throw an error if time is invalid
parsed_time = Scheduler.parse_at(time) - @scheduler.time_diff
else
parsed_time += @last_scheduled
end
@next_scheduled = parsed_time
@scheduler.reschedule(self)
end
# Runs the event and cancels the schedule
def trigger
super()
@defer.resolve(:triggered)
end
end
class Repeat < ScheduledEvent
def initialize(scheduler, every)
super(scheduler)
@every = every
next_time
end
# Update the time period of the repeating event
#
# @param schedule [String] a standard CRON job line or a human readable string representing a time period.
def update(every, timezone: nil)
time = Scheduler.parse_in(every, :quiet) || Scheduler.parse_cron(every, :quiet, timezone: timezone)
raise ArgumentError.new("couldn't parse \"#{o}\"") if time.nil?
@every = time
reschedule
end
# removes the event from the schedule
def pause
@paused = true
@scheduler.unschedule(self)
end
# reschedules the event to the next time period
# can be used to reset a repeating timer
def resume
@paused = false
@last_scheduled = @reactor.now
reschedule
end
# Runs the event and reschedules
def trigger
super()
@reactor.next_tick do
# Do this next tick to avoid needless scheduling
# if the event is stopped in the callback
reschedule
end
end
protected
def next_time
@last_scheduled = @reactor.now
if @every.is_a? Integer
@next_scheduled = @last_scheduled + @every
else
# must be a cron
@next_scheduled = (@every.next.to_f * 1000).to_i - @scheduler.time_diff
end
end
def reschedule
unless @paused
next_time
@scheduler.reschedule(self)
end
end
def info
"repeat:#{@every.inspect}"
end
end
class Scheduler
attr_reader :reactor
attr_reader :time_diff
attr_reader :next
def initialize(reactor)
@reactor = reactor
@schedules = Set.new
@scheduled = []
@next = nil # Next schedule time
@timer = nil # Reference to the timer
# Not really required when used correctly
@critical = Mutex.new
# Every hour we should re-calibrate this (just in case)
calibrate_time
@calibrate = @reactor.timer do
calibrate_time
@calibrate.start(3600000)
end
@calibrate.start(3600000)
@calibrate.unref
end
# As the libuv time is taken from an arbitrary point in time we
# need to roughly synchronize between it and ruby's Time.now
def calibrate_time
@reactor.update_time
@time_diff = (Time.now.to_f * 1000).to_i - @reactor.now
end
# Create a repeating event that occurs each time period
#
# @param time [String] a human readable string representing the time period. 3w2d4h1m2s for example.
# @param callback [Proc] a block or method to execute when the event triggers
# @return [::MTUV::Repeat]
def every(time, &block)
ms = Scheduler.parse_in(time)
event = Repeat.new(self, ms)
event.progress &block if block_given?
schedule(event)
event
end
# Create a one off event that occurs after the time period
#
# @param time [String] a human readable string representing the time period. 3w2d4h1m2s for example.
# @param callback [Proc] a block or method to execute when the event triggers
# @return [::MTUV::OneShot]
def in(time, &block)
ms = @reactor.now + Scheduler.parse_in(time)
event = OneShot.new(self, ms)
event.progress &block if block_given?
schedule(event)
event
end
# Create a one off event that occurs at a particular date and time
#
# @param time [String, Time] a representation of a date and time that can be parsed
# @param callback [Proc] a block or method to execute when the event triggers
# @return [::MTUV::OneShot]
def at(time, &block)
ms = Scheduler.parse_at(time) - @time_diff
event = OneShot.new(self, ms)
event.progress &block if block_given?
schedule(event)
event
end
# Create a repeating event that uses a CRON line to determine the trigger time
#
# @param schedule [String] a standard CRON job line.
# @param callback [Proc] a block or method to execute when the event triggers
# @return [::MTUV::Repeat]
def cron(schedule, timezone: nil, &block)
ms = Scheduler.parse_cron(schedule, timezone: timezone)
event = Repeat.new(self, ms)
event.progress &block if block_given?
schedule(event)
event
end
# Schedules an event for execution
#
# @param event [ScheduledEvent]
def reschedule(event)
# Check promise is not resolved
return if event.resolved?
@critical.synchronize {
# Remove the event from the scheduled list and ensure it is in the schedules set
if @schedules.include?(event)
remove(event)
else
@schedules << event
end
# optimal algorithm for inserting into an already sorted list
Bisect.insort(@scheduled, event)
# Update the timer
check_timer
}
end
# Removes an event from the schedule
#
# @param event [ScheduledEvent]
def unschedule(event)
@critical.synchronize {
# Only call delete and update the timer when required
if @schedules.include?(event)
@schedules.delete(event)
remove(event)
check_timer
end
}
end
private
# Remove an element from the array
def remove(obj)
position = nil
@scheduled.each_index do |i|
# object level comparison
if obj.equal? @scheduled[i]
position = i
break
end
end
@scheduled.slice!(position) unless position.nil?
end
# First time schedule we want to bind to the promise
def schedule(event)
reschedule(event)
event.finally do
unschedule event
end
end
# Ensures the current timer, if any, is still
# accurate by checking the head of the schedule
def check_timer
@reactor.update_time
existing = @next
schedule = @scheduled.first
@next = schedule.nil? ? nil : schedule.next_scheduled
if existing != @next
# lazy load the timer
if @timer.nil?
new_timer
else
@timer.stop
end
if not @next.nil?
in_time = @next - @reactor.now
# Ensure there are never negative start times
if in_time > 3
@timer.start(in_time)
else
# Effectively next tick
@timer.start(0)
end
end
end
end
# Is called when the libuv timer fires
def on_timer
@critical.synchronize {
schedule = @scheduled.shift
@schedules.delete(schedule)
schedule.trigger
# execute schedules that are within 3ms of this event
# Basic timer coalescing..
now = @reactor.now + 3
while @scheduled.first && @scheduled.first.next_scheduled <= now
schedule = @scheduled.shift
@schedules.delete(schedule)
schedule.trigger
end
check_timer
}
end
# Provide some assurances on timer failure
def new_timer
@timer = @reactor.timer { on_timer }
@timer.finally do
new_timer
unless @next.nil?
@timer.start(@next)
end
end
end
end
end
|
package com.yin.springboot.mybatis.mapper;
import com.yin.springboot.mybatis.domain.OmsOrderReturnReason;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface OmsOrderReturnReasonMapper {
int deleteByPrimaryKey(Long id);
int insert(OmsOrderReturnReason record);
int insertOrUpdate(OmsOrderReturnReason record);
int insertOrUpdateSelective(OmsOrderReturnReason record);
int insertSelective(OmsOrderReturnReason record);
OmsOrderReturnReason selectByPrimaryKey(Long id);
int updateByPrimaryKeySelective(OmsOrderReturnReason record);
int updateByPrimaryKey(OmsOrderReturnReason record);
int updateBatch(List<OmsOrderReturnReason> list);
int batchInsert(@Param("list") List<OmsOrderReturnReason> list);
} |
<gh_stars>1-10
/*
* Copyright 2003-2015, Haiku, Inc.
* Distributed under the terms of the MIT license.
*
* Authors:
* Sikosis, <NAME>
* yourpalal, <NAME>
*/
#include "MediaWindow.h"
#include <stdio.h>
#include <Application.h>
#include <Autolock.h>
#include <Button.h>
#include <CardLayout.h>
#include <Catalog.h>
#include <Debug.h>
#include <Deskbar.h>
#include <IconUtils.h>
#include <LayoutBuilder.h>
#include <Locale.h>
#include <MediaRoster.h>
#include <MediaTheme.h>
#include <Resources.h>
#include <Roster.h>
#include <Screen.h>
#include <ScrollView.h>
#include <SeparatorView.h>
#include <SpaceLayoutItem.h>
#include <StorageKit.h>
#include <String.h>
#include <TextView.h>
#include "Media.h"
#include "MediaIcons.h"
#include "MidiSettingsView.h"
#undef B_TRANSLATION_CONTEXT
#define B_TRANSLATION_CONTEXT "Media Window"
const uint32 ML_SELECTED_NODE = 'MlSN';
const uint32 ML_RESTART_THREAD_FINISHED = 'MlRF';
class NodeListItemUpdater : public MediaListItem::Visitor {
public:
typedef void (NodeListItem::*UpdateMethod)(bool);
NodeListItemUpdater(NodeListItem* target, UpdateMethod action)
:
fComparator(target),
fAction(action)
{
}
virtual void Visit(AudioMixerListItem*){}
virtual void Visit(DeviceListItem*){}
virtual void Visit(MidiListItem*){}
virtual void Visit(NodeListItem* item)
{
item->Accept(fComparator);
(item->*(fAction))(fComparator.result == 0);
}
private:
NodeListItem::Comparator fComparator;
UpdateMethod fAction;
};
MediaWindow::SmartNode::SmartNode(const BMessenger& notifyHandler)
:
fNode(NULL),
fMessenger(notifyHandler)
{
}
MediaWindow::SmartNode::~SmartNode()
{
_FreeNode();
}
void
MediaWindow::SmartNode::SetTo(const dormant_node_info* info)
{
_FreeNode();
if (!info)
return;
fNode = new media_node();
BMediaRoster* roster = BMediaRoster::Roster();
status_t status = B_OK;
media_node_id node_id;
if (roster->GetInstancesFor(info->addon, info->flavor_id, &node_id) == B_OK)
status = roster->GetNodeFor(node_id, fNode);
else
status = roster->InstantiateDormantNode(*info, fNode, B_FLAVOR_IS_GLOBAL);
if (status != B_OK) {
fprintf(stderr, "SmartNode::SetTo error with node %" B_PRId32
": %s\n", fNode->node, strerror(status));
}
status = roster->StartWatching(fMessenger, *fNode, B_MEDIA_WILDCARD);
if (status != B_OK) {
fprintf(stderr, "SmartNode::SetTo can't start watching for"
" node %" B_PRId32 "\n", fNode->node);
}
}
void
MediaWindow::SmartNode::SetTo(const media_node& node)
{
_FreeNode();
fNode = new media_node(node);
BMediaRoster* roster = BMediaRoster::Roster();
roster->StartWatching(fMessenger, *fNode, B_MEDIA_WILDCARD);
}
bool
MediaWindow::SmartNode::IsSet()
{
return fNode != NULL;
}
MediaWindow::SmartNode::operator media_node()
{
if (fNode)
return *fNode;
media_node node;
return node;
}
void
MediaWindow::SmartNode::_FreeNode()
{
if (!IsSet())
return;
BMediaRoster* roster = BMediaRoster::Roster();
if (roster != NULL) {
status_t status = roster->StopWatching(fMessenger,
*fNode, B_MEDIA_WILDCARD);
if (status != B_OK) {
fprintf(stderr, "SmartNode::_FreeNode can't unwatch"
" media services for node %" B_PRId32 "\n", fNode->node);
}
roster->ReleaseNode(*fNode);
if (status != B_OK) {
fprintf(stderr, "SmartNode::_FreeNode can't release"
" node %" B_PRId32 "\n", fNode->node);
}
}
delete fNode;
fNode = NULL;
}
// #pragma mark -
MediaWindow::MediaWindow(BRect frame)
:
BWindow(frame, B_TRANSLATE_SYSTEM_NAME("Media"), B_TITLED_WINDOW,
B_ASYNCHRONOUS_CONTROLS | B_AUTO_UPDATE_SIZE_LIMITS),
fCurrentNode(BMessenger(this)),
fParamWeb(NULL),
fAudioInputs(5, true),
fAudioOutputs(5, true),
fVideoInputs(5, true),
fVideoOutputs(5, true),
fInitCheck(B_OK),
fRestartThread(-1),
fRestartAlert(NULL)
{
_InitWindow();
BMediaRoster* roster = BMediaRoster::Roster();
roster->StartWatching(BMessenger(this, this),
B_MEDIA_SERVER_STARTED);
roster->StartWatching(BMessenger(this, this),
B_MEDIA_SERVER_QUIT);
}
MediaWindow::~MediaWindow()
{
_EmptyNodeLists();
_ClearParamView();
char buffer[512];
BRect rect = Frame();
PRINT_OBJECT(rect);
snprintf(buffer, 512, "# MediaPrefs Settings\n rect = %i,%i,%i,%i\n",
int(rect.left), int(rect.top), int(rect.right), int(rect.bottom));
BPath path;
if (find_directory(B_USER_SETTINGS_DIRECTORY, &path) == B_OK) {
path.Append(SETTINGS_FILE);
BFile file(path.Path(), B_READ_WRITE | B_CREATE_FILE | B_ERASE_FILE);
if (file.InitCheck() == B_OK)
file.Write(buffer, strlen(buffer));
}
BMediaRoster* roster = BMediaRoster::CurrentRoster();
roster->StopWatching(BMessenger(this, this),
B_MEDIA_SERVER_STARTED);
roster->StartWatching(BMessenger(this, this),
B_MEDIA_SERVER_QUIT);
}
status_t
MediaWindow::InitCheck()
{
return fInitCheck;
}
void
MediaWindow::SelectNode(const dormant_node_info* node)
{
fCurrentNode.SetTo(node);
_MakeParamView();
fTitleView->SetLabel(node->name);
}
void
MediaWindow::SelectAudioSettings(const char* title)
{
fContentLayout->SetVisibleItem(fContentLayout->IndexOfView(fAudioView));
fTitleView->SetLabel(title);
}
void
MediaWindow::SelectVideoSettings(const char* title)
{
fContentLayout->SetVisibleItem(fContentLayout->IndexOfView(fVideoView));
fTitleView->SetLabel(title);
}
void
MediaWindow::SelectAudioMixer(const char* title)
{
media_node mixerNode;
BMediaRoster* roster = BMediaRoster::Roster();
roster->GetAudioMixer(&mixerNode);
fCurrentNode.SetTo(mixerNode);
_MakeParamView();
fTitleView->SetLabel(title);
}
void
MediaWindow::SelectMidiSettings(const char* title)
{
fContentLayout->SetVisibleItem(fContentLayout->IndexOfView(fMidiView));
fTitleView->SetLabel(title);
}
void
MediaWindow::UpdateInputListItem(MediaListItem::media_type type,
const dormant_node_info* node)
{
NodeListItem compareTo(node, type);
NodeListItemUpdater updater(&compareTo, &NodeListItem::SetDefaultInput);
for (int32 i = 0; i < fListView->CountItems(); i++) {
MediaListItem* item = static_cast<MediaListItem*>(fListView->ItemAt(i));
item->Accept(updater);
}
fListView->Invalidate();
}
void
MediaWindow::UpdateOutputListItem(MediaListItem::media_type type,
const dormant_node_info* node)
{
NodeListItem compareTo(node, type);
NodeListItemUpdater updater(&compareTo, &NodeListItem::SetDefaultOutput);
for (int32 i = 0; i < fListView->CountItems(); i++) {
MediaListItem* item = static_cast<MediaListItem*>(fListView->ItemAt(i));
item->Accept(updater);
}
fListView->Invalidate();
}
bool
MediaWindow::QuitRequested()
{
if (fRestartThread > 0) {
BString text(B_TRANSLATE("Quitting Media now will stop the "
"restarting of the media services. Flaky or unavailable media "
"functionality is the likely result."));
fRestartAlert = new BAlert(B_TRANSLATE("Warning!"), text,
B_TRANSLATE("Quit anyway"), NULL, NULL,
B_WIDTH_AS_USUAL, B_OFFSET_SPACING, B_WARNING_ALERT);
fRestartAlert->Go();
}
// Stop watching the MediaRoster
fCurrentNode.SetTo(NULL);
be_app->PostMessage(B_QUIT_REQUESTED);
return true;
}
void
MediaWindow::MessageReceived(BMessage* message)
{
switch (message->what) {
case ML_RESTART_THREAD_FINISHED:
fRestartThread = -1;
_InitMedia(false);
break;
case ML_RESTART_MEDIA_SERVER:
{
fRestartThread = spawn_thread(&MediaWindow::_RestartMediaServices,
"restart_thread", B_NORMAL_PRIORITY, this);
if (fRestartThread < 0)
fprintf(stderr, "couldn't create restart thread\n");
else
resume_thread(fRestartThread);
break;
}
case B_MEDIA_WEB_CHANGED:
case ML_SELECTED_NODE:
{
PRINT_OBJECT(*message);
MediaListItem* item = static_cast<MediaListItem*>(
fListView->ItemAt(fListView->CurrentSelection()));
if (item == NULL)
break;
fCurrentNode.SetTo(NULL);
_ClearParamView();
item->AlterWindow(this);
break;
}
case B_MEDIA_SERVER_STARTED:
case B_MEDIA_SERVER_QUIT:
{
PRINT_OBJECT(*message);
_InitMedia(false);
break;
}
default:
BWindow::MessageReceived(message);
break;
}
}
// #pragma mark - private
void
MediaWindow::_InitWindow()
{
fListView = new BListView("media_list_view");
fListView->SetSelectionMessage(new BMessage(ML_SELECTED_NODE));
fListView->SetExplicitMinSize(BSize(140, B_SIZE_UNSET));
// Add ScrollView to Media Menu for pretty border
BScrollView* scrollView = new BScrollView("listscroller",
fListView, 0, false, false, B_FANCY_BORDER);
// Create the Views
fTitleView = new BSeparatorView(B_HORIZONTAL, B_FANCY_BORDER);
fTitleView->SetLabel(B_TRANSLATE("Audio settings"));
fTitleView->SetFont(be_bold_font);
fContentLayout = new BCardLayout();
new BView("content view", 0, fContentLayout);
fContentLayout->Owner()->SetViewUIColor(B_PANEL_BACKGROUND_COLOR);
fContentLayout->SetExplicitMaxSize(BSize(B_SIZE_UNLIMITED, B_SIZE_UNSET));
fAudioView = new AudioSettingsView();
fContentLayout->AddView(fAudioView);
fVideoView = new VideoSettingsView();
fContentLayout->AddView(fVideoView);
fMidiView = new MidiSettingsView();
fContentLayout->AddView(fMidiView);
// Layout all views
BLayoutBuilder::Group<>(this, B_HORIZONTAL)
.SetInsets(B_USE_WINDOW_SPACING)
.Add(scrollView, 0.0f)
.AddGroup(B_VERTICAL)
.SetInsets(0, 0, 0, 0)
.Add(fTitleView)
.Add(fContentLayout);
// Start the window
fInitCheck = _InitMedia(true);
if (fInitCheck != B_OK)
PostMessage(B_QUIT_REQUESTED);
else if (IsHidden())
Show();
}
status_t
MediaWindow::_InitMedia(bool first)
{
status_t err = B_OK;
BMediaRoster* roster = BMediaRoster::Roster(&err);
if (first && err != B_OK) {
BAlert* alert = new BAlert("start_media_server",
B_TRANSLATE("Could not connect to the media server.\n"
"Would you like to start it ?"),
B_TRANSLATE("Quit"),
B_TRANSLATE("Start media server"), NULL,
B_WIDTH_AS_USUAL, B_WARNING_ALERT);
alert->SetShortcut(0, B_ESCAPE);
if (alert->Go() == 0)
return B_ERROR;
Show();
launch_media_server();
}
Lock();
bool isVideoSelected = true;
if (!first && fListView->ItemAt(0) != NULL
&& fListView->ItemAt(0)->IsSelected())
isVideoSelected = false;
while (fListView->CountItems() > 0)
delete fListView->RemoveItem((int32)0);
_EmptyNodeLists();
// Grab Media Info
_FindNodes();
// Add video nodes first. They might have an additional audio
// output or input, but still should be listed as video node.
_AddNodeItems(fVideoOutputs, MediaListItem::VIDEO_TYPE);
_AddNodeItems(fVideoInputs, MediaListItem::VIDEO_TYPE);
_AddNodeItems(fAudioOutputs, MediaListItem::AUDIO_TYPE);
_AddNodeItems(fAudioInputs, MediaListItem::AUDIO_TYPE);
fAudioView->AddOutputNodes(fAudioOutputs);
fAudioView->AddInputNodes(fAudioInputs);
fVideoView->AddOutputNodes(fVideoOutputs);
fVideoView->AddInputNodes(fVideoInputs);
// build our list view
DeviceListItem* audio = new DeviceListItem(B_TRANSLATE("Audio settings"),
MediaListItem::AUDIO_TYPE);
fListView->AddItem(audio);
MidiListItem* midi = new MidiListItem(B_TRANSLATE("MIDI Settings"));
fListView->AddItem(midi);
MediaListItem* video = new DeviceListItem(B_TRANSLATE("Video settings"),
MediaListItem::VIDEO_TYPE);
fListView->AddItem(video);
MediaListItem* mixer = new AudioMixerListItem(B_TRANSLATE("Audio mixer"));
fListView->AddItem(mixer);
fListView->SortItems(&MediaListItem::Compare);
_UpdateListViewMinWidth();
// Set default nodes for our setting views
media_node defaultNode;
dormant_node_info nodeInfo;
int32 outputID;
BString outputName;
if (roster->GetAudioInput(&defaultNode) == B_OK) {
roster->GetDormantNodeFor(defaultNode, &nodeInfo);
fAudioView->SetDefaultInput(&nodeInfo);
// this causes our listview to be updated as well
}
if (roster->GetAudioOutput(&defaultNode, &outputID, &outputName) == B_OK) {
roster->GetDormantNodeFor(defaultNode, &nodeInfo);
fAudioView->SetDefaultOutput(&nodeInfo);
fAudioView->SetDefaultChannel(outputID);
// this causes our listview to be updated as well
}
if (roster->GetVideoInput(&defaultNode) == B_OK) {
roster->GetDormantNodeFor(defaultNode, &nodeInfo);
fVideoView->SetDefaultInput(&nodeInfo);
// this causes our listview to be updated as well
}
if (roster->GetVideoOutput(&defaultNode) == B_OK) {
roster->GetDormantNodeFor(defaultNode, &nodeInfo);
fVideoView->SetDefaultOutput(&nodeInfo);
// this causes our listview to be updated as well
}
if (first)
fListView->Select(fListView->IndexOf(mixer));
else if (isVideoSelected)
fListView->Select(fListView->IndexOf(video));
else
fListView->Select(fListView->IndexOf(audio));
Unlock();
return B_OK;
}
void
MediaWindow::_FindNodes()
{
_FindNodes(B_MEDIA_RAW_AUDIO, B_PHYSICAL_OUTPUT, fAudioOutputs);
_FindNodes(B_MEDIA_RAW_AUDIO, B_PHYSICAL_INPUT, fAudioInputs);
_FindNodes(B_MEDIA_ENCODED_AUDIO, B_PHYSICAL_OUTPUT, fAudioOutputs);
_FindNodes(B_MEDIA_ENCODED_AUDIO, B_PHYSICAL_INPUT, fAudioInputs);
_FindNodes(B_MEDIA_RAW_VIDEO, B_PHYSICAL_OUTPUT, fVideoOutputs);
_FindNodes(B_MEDIA_RAW_VIDEO, B_PHYSICAL_INPUT, fVideoInputs);
_FindNodes(B_MEDIA_ENCODED_VIDEO, B_PHYSICAL_OUTPUT, fVideoOutputs);
_FindNodes(B_MEDIA_ENCODED_VIDEO, B_PHYSICAL_INPUT, fVideoInputs);
}
void
MediaWindow::_FindNodes(media_type type, uint64 kind, NodeList& into)
{
dormant_node_info nodeInfo[64];
int32 nodeInfoCount = 64;
media_format format;
media_format* nodeInputFormat = NULL;
media_format* nodeOutputFormat = NULL;
format.type = type;
// output nodes must be BBufferConsumers => they have an input format
// input nodes must be BBufferProducers => they have an output format
if ((kind & B_PHYSICAL_OUTPUT) != 0)
nodeInputFormat = &format;
else if ((kind & B_PHYSICAL_INPUT) != 0)
nodeOutputFormat = &format;
else
return;
BMediaRoster* roster = BMediaRoster::Roster();
if (roster->GetDormantNodes(nodeInfo, &nodeInfoCount, nodeInputFormat,
nodeOutputFormat, NULL, kind) != B_OK) {
// TODO: better error reporting!
fprintf(stderr, "error\n");
return;
}
for (int32 i = 0; i < nodeInfoCount; i++) {
PRINT(("node : %s, media_addon %i, flavor_id %i\n",
nodeInfo[i].name, (int)nodeInfo[i].addon,
(int)nodeInfo[i].flavor_id));
dormant_node_info* info = new dormant_node_info();
strlcpy(info->name, nodeInfo[i].name, B_MEDIA_NAME_LENGTH);
info->flavor_id = nodeInfo[i].flavor_id;
info->addon = nodeInfo[i].addon;
into.AddItem(info);
}
}
void
MediaWindow::_AddNodeItems(NodeList& list, MediaListItem::media_type type)
{
int32 count = list.CountItems();
for (int32 i = 0; i < count; i++) {
dormant_node_info* info = list.ItemAt(i);
if (_FindNodeListItem(info) == NULL)
fListView->AddItem(new NodeListItem(info, type));
}
}
void
MediaWindow::_EmptyNodeLists()
{
fAudioOutputs.MakeEmpty();
fAudioInputs.MakeEmpty();
fVideoOutputs.MakeEmpty();
fVideoInputs.MakeEmpty();
}
NodeListItem*
MediaWindow::_FindNodeListItem(dormant_node_info* info)
{
NodeListItem audioItem(info, MediaListItem::AUDIO_TYPE);
NodeListItem videoItem(info, MediaListItem::VIDEO_TYPE);
NodeListItem::Comparator audioComparator(&audioItem);
NodeListItem::Comparator videoComparator(&videoItem);
for (int32 i = 0; i < fListView->CountItems(); i++) {
MediaListItem* item = static_cast<MediaListItem*>(fListView->ItemAt(i));
item->Accept(audioComparator);
if (audioComparator.result == 0)
return static_cast<NodeListItem*>(item);
item->Accept(videoComparator);
if (videoComparator.result == 0)
return static_cast<NodeListItem*>(item);
}
return NULL;
}
void
MediaWindow::_UpdateListViewMinWidth()
{
float width = 0;
for (int32 i = 0; i < fListView->CountItems(); i++) {
BListItem* item = fListView->ItemAt(i);
width = max_c(width, item->Width());
}
fListView->SetExplicitMinSize(BSize(width, B_SIZE_UNSET));
fListView->InvalidateLayout();
}
status_t
MediaWindow::_RestartMediaServices(void* data)
{
MediaWindow* window = (MediaWindow*)data;
shutdown_media_server();
if (window->fRestartAlert != NULL
&& window->fRestartAlert->Lock()) {
window->fRestartAlert->Quit();
}
return window->PostMessage(ML_RESTART_THREAD_FINISHED);
}
void
MediaWindow::_ClearParamView()
{
BLayoutItem* item = fContentLayout->VisibleItem();
if (!item)
return;
BView* view = item->View();
if (view != fVideoView && view != fAudioView && view != fMidiView) {
fContentLayout->RemoveItem(item);
delete item;
delete view;
delete fParamWeb;
fParamWeb = NULL;
}
}
void
MediaWindow::_MakeParamView()
{
if (!fCurrentNode.IsSet())
return;
fParamWeb = NULL;
BMediaRoster* roster = BMediaRoster::Roster();
if (roster->GetParameterWebFor(fCurrentNode, &fParamWeb) == B_OK) {
BRect hint(fContentLayout->Frame());
BView* paramView = BMediaTheme::ViewFor(fParamWeb, &hint);
if (paramView) {
fContentLayout->AddView(paramView);
fContentLayout->SetVisibleItem(fContentLayout->CountItems() - 1);
return;
}
}
_MakeEmptyParamView();
}
void
MediaWindow::_MakeEmptyParamView()
{
fParamWeb = NULL;
BStringView* stringView = new BStringView("noControls",
B_TRANSLATE("This hardware has no controls."));
BSize unlimited(B_SIZE_UNLIMITED, B_SIZE_UNLIMITED);
stringView->SetExplicitMaxSize(unlimited);
BAlignment centered(B_ALIGN_HORIZONTAL_CENTER,
B_ALIGN_VERTICAL_CENTER);
stringView->SetExplicitAlignment(centered);
stringView->SetAlignment(B_ALIGN_CENTER);
fContentLayout->AddView(stringView);
fContentLayout->SetVisibleItem(fContentLayout->CountItems() - 1);
rgb_color panel = stringView->LowColor();
stringView->SetHighColor(tint_color(panel,
B_DISABLED_LABEL_TINT));
}
|
package freeProjects;
import java.util.Scanner;
public class HesapMakinesi {
public static void main(String[] args) {
Scanner scan =new Scanner(System.in);
int sayi1,sayi2,secim;
System.out.println("İlk sayiyi giriniz: ");
sayi1=scan.nextInt();
System.out.println("\nİkinci sayıyı giriniz : ");
sayi2=scan.nextInt();
System.out.println("\n Lütfen yapmak istediğiniz işlemi seçiniz :");
System.out.println("1-Toplama\n2-Çıkarma\n3-Çarpma\n4-Bölme");
System.out.print("Seçiminiz : ");
secim=scan.nextInt();
if(secim ==1){
System.out.println("Toplama: " +(sayi1+sayi2));
}else if(secim==2) {
System.out.println("Çıkarma : " +(sayi1-sayi2));
}else if(secim==3) {
System.out.println("Çarpma : "+(sayi1*sayi2));
}else if (secim==4) {
if(sayi2==0)
System.out.println("ikinci sayı 0'a eşittir ve sonuç belirsizdir");
System.out.println("Bölme : "+(sayi1/sayi2));
}
}
}
|
<filename>site/shadyside/bytecodeparser/fields/FieldPool.java
package site.shadyside.bytecodeparser.fields;
import java.util.ArrayList;
import java.util.List;
import site.shadyside.bytecodeparser.parse.classParser.ClassParser;
public class FieldPool {
private ClassParser parentClass;
private List<Field> fields;
public FieldPool(ClassParser parent){
this.parentClass = parent;
fields = new ArrayList<Field>();
}
public void addField(Field field) {
fields.add(field);
}
public List<Field> getFields(){
return fields;
}
}
|
<reponame>tiagoceluppi/backstage-plugin-bitbucket
export type { PipelineSummary, BitbucketCIApi } from './BitbucketCIApi';
export { BitbucketCIApiRef } from './BitbucketCIApi';
export { BitbucketCIClient } from './BitbucketCIClient'; |
<filename>features/FEATURE_BLE/targets/TARGET_NORDIC/TARGET_NRF5/source/btle/btle_clock.h
/*
* Copyright (c) 2016 Nordic Semiconductor ASA
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* 2. Redistributions in binary form, except as embedded into a Nordic Semiconductor ASA
* integrated circuit in a product or a software update for such product, must reproduce
* the above copyright notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of Nordic Semiconductor ASA nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific prior
* written permission.
*
* 4. This software, with or without modification, must only be used with a
* Nordic Semiconductor ASA integrated circuit.
*
* 5. Any software provided in binary or object form under this license must not be reverse
* engineered, decompiled, modified and/or disassembled.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef _BTLE_CLOCK_H_
#define _BTLE_CLOCK_H_
#include "nrf5x_lf_clk_helper.h"
/**
* @brief Module that generates settings for the low-frequency (LF) clock configuration.
*
* This module provides macros that are generated from the mbed config system macros.
*
*
*
* As a result, this module provides the following: @n
* - literal value LFCLK_CONF_SOURCE @n
* - literal value LFCLK_CONF_ACCURACY @n
* - literal value LFCLK_CONF_RC_CTIV @n
* - literal value LFCLK_CONF_RC_TEMP_CTIV
*/
#include "nrf_sdm.h"
#define DEFAULT_LFCLK_CONF_ACCURACY NRF_CLOCK_LF_XTAL_ACCURACY_20_PPM
#ifdef NRF52
#define MAX_LFCLK_CONF_RC_CTIV 32
#else
#define MAX_LFCLK_CONF_RC_CTIV 64
#endif
#define MAX_LFCLK_CONF_RC_TEMP_CTIV 33
#define DEFAULT_LFCLK_CONF_RC_CTIV 16 // Check temperature every 16 * 250ms.
#define DEFAULT_LFCLK_CONF_RC_TEMP_CTIV 1 // Only calibrate if temperature has changed.
#define NRF_LF_SRC_XTAL 2
#define NRF_LF_SRC_SYNTH 3
#define NRF_LF_SRC_RC 4
#if MBED_CONF_NORDIC_NRF_LF_CLOCK_SRC == NRF_LF_SRC_RC
#define LFCLK_CONF_SOURCE NRF_CLOCK_LF_SRC_RC
#ifdef MBED_CONF_NORDIC_NRF_LF_CLOCK_CALIB_TIMER_INTERVAL
#define LFCLK_CONF_RC_CTIV MBED_CONF_NORDIC_NRF_LF_CLOCK_CALIB_TIMER_INTERVAL
#else
#define LFCLK_CONF_RC_CTIV DEFAULT_LFCLK_CONF_RC_CTIV
#endif
#ifdef MBED_CONF_NORDIC_NRF_LF_CLOCK_CALIB_MODE_CONFIG
#define LFCLK_CONF_RC_TEMP_CTIV MBED_CONF_NORDIC_NRF_LF_CLOCK_CALIB_MODE_CONFIG
#else
#define LFCLK_CONF_RC_TEMP_CTIV DEFAULT_LFCLK_CONF_RC_TEMP_CTIV
#endif
#if (LFCLK_CONF_RC_CTIV < 1) || (LFCLK_CONF_RC_CTIV > MAX_LFCLK_CONF_RC_CTIV)
#error Calibration timer interval out of range!
#endif
#if (LFCLK_CONF_RC_TEMP_CTIV < 0 ) || (LFCLK_CONF_RC_TEMP_CTIV > 33)
#error Number/mode of LF RC calibration intervals out of range!
#endif
#elif MBED_CONF_NORDIC_NRF_LF_CLOCK_SRC == NRF_LF_SRC_SYNTH
#define LFCLK_CONF_SOURCE NRF_CLOCK_LF_SRC_SYNTH
#define LFCLK_CONF_RC_CTIV 0 // Must be 0 if source is not NRF_CLOCK_LF_SRC_RC.
#define LFCLK_CONF_RC_TEMP_CTIV 0 // Must be 0 if source is not NRF_CLOCK_LF_SRC_RC.
#ifdef MBED_CONF_NORDIC_LF_CLOCK_HF_SYNTH_ACCURACY
#define LFCLK_CONF_ACCURACY MBED_CONF_NORDIC_LF_CLOCK_HF_SYNTH_ACCURACY
#endif
#else // default is NRF_LF_SRC_SYNTH
#define LFCLK_CONF_SOURCE NRF_CLOCK_LF_SRC_XTAL
#define LFCLK_CONF_RC_CTIV 0 // Must be 0 if source is not NRF_CLOCK_LF_SRC_RC.
#define LFCLK_CONF_RC_TEMP_CTIV 0 // Must be 0 if source is not NRF_CLOCK_LF_SRC_RC.
#ifdef MBED_CONF_NORDIC_LF_CLOCK_XTAL_ACCURACY
#define LFCLK_CONF_ACCURACY MBED_CONF_NORDIC_LF_CLOCK_XTAL_ACCURACY
#endif
#endif
#ifndef LFCLK_CONF_ACCURACY
#define LFCLK_CONF_ACCURACY DEFAULT_LFCLK_CONF_ACCURACY
#endif
#if (LFCLK_CONF_ACCURACY > NRF_CLOCK_LF_XTAL_ACCURACY_20_PPM) || (LFCLK_CONF_ACCURACY < NRF_CLOCK_LF_XTAL_ACCURACY_250_PPM)
#error Low frequency clock accuracy out of range!
#endif
#endif //_BTLE_CLOCK_H_
|
<filename>src/components/Nav.js
import React, { useEffect, useState } from "react";
// import { useNavigate } from "react-router-dom";
import { getStorage, ref, getDownloadURL } from "firebase/storage";
import noAvatar from "../assets/avatars/sample-10.png";
import { isMobile } from "react-device-detect";
// Icons
import {
UserAddIcon,
QuestionMarkCircleIcon,
DotsHorizontalIcon,
SearchIcon,
} from "@heroicons/react/outline";
export default function Nav(props) {
const [data, setData] = useState({});
// const nav = useNavigate();
useEffect(() => {
if (props.userData.avatar) {
const storage = getStorage();
const pathReference = ref(
storage,
"general/assets/avatars/" + props.userData.avatar
);
getDownloadURL(pathReference).then((url) => {
console.log("Requested avatar URL.");
setData((d) => ({
...d,
user: {
avatar: url,
name: { f: props.userData.fname, l: props.userData.lname },
},
}));
});
} else {
setData((d) => ({
...d,
user: {
avatar: false,
name: { f: props.userData.fname, l: props.userData.lname },
},
}));
}
}, [props.avatar, props.userData]);
if (Object.keys(data).length === 0) {
return (
<div className="flex items-center justify-between p-5 animate-pulse">
<div className="flex items-center gap-2">
<div className="w-8 h-8 bg-slate-100 rounded-full" />
<div className="h-3 w-20 bg-slate-100 rounded-full" />
</div>
<div className="w-4 h-4 rounded-full bg-slate-10" />
</div>
);
}
return (
<div className="flex flex-col w-screen">
<div className="flex items-center w-full p-5 justify-between">
<div className="flex items-center gap-5">
<div className="flex gap-3 items-center">
<button
className="rounded-full w-8 h-8 cursor-pointer hover:ring-4 transition"
onClick={props.logout}
>
<img
src={data.user.avatar ? data.user.avatar : noAvatar}
alt="User avatar"
className="w-full h-full"
/>
</button>
<p className="text-slate-900">
<span className="font-bold bg-gradient-to-r from-blue-300 to-pink-500 text-transparent bg-clip-text">
CappCamp
</span>
<span className="font-medium"> di {data.user.name.f}</span>
</p>
</div>
<button className="items-center gap-2 text-slate-900 dark:text-white bg-gray-100 dark:bg-gray-700 py-2 px-3 rounded-lg hover:ring-4 hover:ring-gray-200 transition hidden md:flex">
<UserAddIcon className="w-4" />
<span className="font-medium text-sm">Invita amici</span>
</button>
{!isMobile && (
<div className="group hidden md:flex items-center px-2 text-sm rounded-lg bg-slate-100 overflow-hidden">
<SearchIcon className="w-4 text-slate-800 opacity-50 group-focus-within:opacity-100 transition-opacity" />
<input
type="text"
className="py-2 px-3 text-sm outline-none bg-transparent w-64 min-w-min"
placeholder="Cerca per titolo, autore, tags..."
/>
</div>
)}
</div>
{/* Navigation bar: other buttons */}
<div className="flex items-center gap-4 text-slate-900">
<button title="Menu">
<DotsHorizontalIcon className="w-6 aspect-square" />
</button>
<button title="Help">
<QuestionMarkCircleIcon className="w-6 aspect-square" />
</button>
</div>
</div>
{isMobile && (
<div className="flex px-4 pb-4">
<div className="group flex items-center px-2 text-sm rounded-lg bg-slate-100 overflow-hidden w-full">
<SearchIcon className="w-4 text-slate-800 opacity-50 group-focus-within:opacity-100 transition-opacity" />
<input
type="text"
className="py-2 px-3 text-sm outline-none bg-transparent w-64 min-w-min"
placeholder="Cerca per titolo, autore, tags..."
/>
</div>
</div>
)}
</div>
);
}
|
/**
* Copyright (c) 2010 MongoDB, Inc. <http://mongodb.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For questions and comments about this product, please see the project page at:
*
* http://github.com/mongodb/casbah
*
*/
package com.mongodb.casbah
import com.mongodb.{MongoCredential => JavaMongoCredential}
/**
* Helper class for creating MongoCredential instances
*
* @since 2.6
* @see com.mongodb.MongoCredential
*/
object MongoCredential {
/**
*
* Creates a MongoCredential instance for the GSSAPI SASL mechanism
*
* @param userName the user name
*/
@deprecated("Please use MongoCredential.createGSSAPICredential", "2.7")
def apply(userName: String): JavaMongoCredential =
JavaMongoCredential.createGSSAPICredential(userName)
/**
*
* Creates a MongoCredential instance for the MongoDB Challenge Response protocol
*
* @param userName the user name
* @param database the source of the user name, typically a database name
* @param password the password
*/
@deprecated("Please use MongoCredential.createMongoCRCredential", "2.7")
def apply(userName: String, database: String, password: Array[Char]): JavaMongoCredential =
JavaMongoCredential.createMongoCRCredential(userName, database, password)
/**
*
* Creates a MongoCredential instance for the GSSAPI SASL mechanism
*
* @param userName the user name
*/
def createGSSAPICredential(userName: String): JavaMongoCredential =
JavaMongoCredential.createGSSAPICredential(userName)
/**
*
* Creates a MongoCredential instance for the MongoDB Challenge Response protocol
*
* @param userName the user name
* @param database the source of the user name, typically a database name
* @param password the password
*/
def createMongoCRCredential(userName: String, database: String, password: Array[Char]): JavaMongoCredential =
JavaMongoCredential.createMongoCRCredential(userName, database, password)
/**
* Creates a MongoCredential instance for the MongoDB X.509 protocol.
*
* @param userName the non-null user name
* @return the credential
*/
def createMongoX509Credential(userName: String): JavaMongoCredential =
JavaMongoCredential.createMongoX509Credential(userName)
/**
* Creates a MongoCredential instance for the PLAIN SASL mechanism.
*
* @param userName the non-null user name
* @param source the source where the user is defined. This can be either `"\$external"` or the name of a database.
* @return the credential
*/
def createPlainCredential(userName: String, source: String, password: Array[Char]): JavaMongoCredential =
JavaMongoCredential.createPlainCredential(userName, source, password)
}
|
#!/bin/sh -e
DIR=$PWD
git_bin=$(which git)
repo="https://github.com/torvalds/linux/commit"
compare="https://github.com/torvalds/linux/compare"
if [ -e ${DIR}/version.sh ]; then
unset BRANCH
unset BUILD
unset prev_KERNEL_SHA
unset KERNEL_SHA
. ${DIR}/version.sh
if [ ! "${BRANCH}" ] ; then
BRANCH="master"
fi
if [ "x${prev_KERNEL_SHA}" = "x" ] ; then
${git_bin} commit -a -m "${KERNEL_TAG}${BUILD}: merge to: ${repo}/${KERNEL_SHA}" -s
else
${git_bin} commit -a -m "${KERNEL_TAG}${BUILD}: merge to: ${repo}/${KERNEL_SHA}" -m "Compare: ${compare}/${prev_KERNEL_SHA}...${KERNEL_SHA}" -s
fi
echo "log: git push origin ${BRANCH}"
${git_bin} push origin ${BRANCH}
fi
|
<gh_stars>0
import web3 from './web3';
import CampaignFactory from '../build/contracts/CampaignFactory.json';
const instance = new web3.eth.Contract(
JSON.parse(CampaignFactory.interface || {}),
'0xfC57b9c20c31537cE4326091B5e7b9ca14F9012C'
);
export default instance;
|
#!/bin/bash
source="${BASH_SOURCE[0]}"
while [ -h "$source" ]; do
dir="$(cd -P "$(dirname "$source")" >/dev/null 2>&1 && pwd -P)"
source="$(readlink "$source")"
[[ $source != /* ]] && source="$dir/$source"
done
directory="$(cd -P "$(dirname "$source")" >/dev/null 2>&1 && pwd -P)"
root=$(pwd -P)
for i in "$@"; do
case $i in
-DD=*)
dependencies="${i#*=}"
shift
;;
*)
config="$i"
shift
;;
esac
done
if [ "$(uname -s)" = "Darwin" ]; then
STAT='stat -x -t "%Y%m%d%H%M%S"'
else
STAT='stat'
fi
if [ "$config" = "clean" ]; then
rm -rf library
rm -f mod_time.txt
exit 0
fi
if [ "$config" = "reset" ]; then
rm -rf library
rm -f mod_time.txt
rm -rf node_modules
rm -f package-lock.json
exit 0
fi
"$directory/configure.sh"
if [ ! -d "node_modules" ]; then
UPDATE_NODE=1
else
if [ ! -f "mod_time.txt" ]; then
UPDATE_NODE=1
else
pt="$($STAT $directory/package.json | grep Modify | awk '{print $2 $3}')"
mt="$($STAT mod_time.txt | grep Modify | awk '{print $2 $3}')"
if [ "$pt" \> "$mt" ]; then
UPDATE_NODE=1
fi
fi
fi
if [ "$UPDATE_NODE" = "1" ]; then
UPDATE_BUILD=1
npm install
fi
if [ ! -d "library" ]; then
UPDATE_BUILD=1
else
st="$(find source/ -type f | xargs $STAT | grep Modify | awk '{print $2 $3}' | sort -r | head -1)"
lt="$(find library/ -type f | xargs $STAT | grep Modify | awk '{print $2 $3}' | sort -r | head -1)"
if [ "$st" \> "$lt" ]; then
UPDATE_BUILD=1
fi
fi
if [ ! -f "mod_time.txt" ]; then
UPDATE_BUILD=1
else
pt="$($STAT $directory/tsconfig.json | grep Modify | awk '{print $2 $3}')"
mt="$($STAT mod_time.txt | grep Modify | awk '{print $2 $3}')"
if [ "$pt" \> "$mt" ]; then
UPDATE_BUILD=1
fi
fi
if [ "$UPDATE_BUILD" = "1" ]; then
if [ -d library ]; then
rm -rf library
fi
npm run build
echo "timestamp" > mod_time.txt
fi
|
#!/bin/bash
echo "CIRCLE_PULL_REQUEST: " $CIRCLE_PULL_REQUEST
echo "CIRCLE_PULL_REQUESTS: " $CIRCLE_PULL_REQUESTS
echo "CIRCLE_PR_NUMBER: " $CIRCLE_PR_NUMBER
exit 1
|
<filename>modules/caas/api/src/main/java/io/cattle/platform/api/hostapi/HostApiProxyTokenManager.java
package io.cattle.platform.api.hostapi;
import io.cattle.platform.api.auth.Policy;
import io.cattle.platform.api.resource.AbstractNoOpResourceManager;
import io.cattle.platform.api.utils.ApiUtils;
import io.cattle.platform.core.constants.AgentConstants;
import io.cattle.platform.core.dao.AgentDao;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.server.context.ServerContext;
import io.cattle.platform.token.TokenService;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.exception.ValidationErrorException;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import io.github.ibuildthecloud.gdapi.validation.ValidationErrorCodes;
import java.util.HashMap;
import java.util.Map;
public class HostApiProxyTokenManager extends AbstractNoOpResourceManager {
private static final String VERIFY_AGENT = "CantVerifyAgent";
TokenService tokenService;
AgentDao agentDao;
ObjectManager objectManager;
public HostApiProxyTokenManager(TokenService tokenService, AgentDao agentDao, ObjectManager objectManager) {
super();
this.tokenService = tokenService;
this.agentDao = agentDao;
this.objectManager = objectManager;
}
@Override
public Object create(String type, ApiRequest request) {
HostApiProxyToken p = request.proxyRequestObject(HostApiProxyToken.class);
String hostUuid = validate(p);
HostApiProxyTokenImpl token = new HostApiProxyTokenImpl();
token.setToken(getToken(hostUuid));
token.setReportedUuid(hostUuid);
StringBuilder buffer = new StringBuilder();
if (ServerContext.isCustomApiHost()) {
buffer.append(ServerContext.getHostApiBaseUrl(ServerContext.BaseProtocol.WEBSOCKET));
} else {
buffer.append(request.getResponseUrlBase().replaceFirst("http", "ws"));
}
if (buffer.length() <= 0) {
throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, "CantConstructUrl");
}
if ('/' == buffer.charAt(buffer.length() - 1)) {
buffer.deleteCharAt(buffer.length() - 1);
}
String url = buffer.append(HostApiUtils.HOST_API_PROXY_BACKEND.get()).toString();
token.setUrl(url);
return token;
}
protected String getToken(String reportedUuid) {
Map<String, Object> data = new HashMap<>();
data.put(AgentConstants.REPORTED_UUID, reportedUuid);
return tokenService.generateToken(data);
}
protected String validate(HostApiProxyToken proxyToken) {
String reportedUuid = proxyToken.getReportedUuid();
Policy policy = ApiUtils.getPolicy();
Agent agent = objectManager.loadResource(Agent.class, policy.getOption(Policy.AGENT_ID));
if (agent == null) {
throw new ClientVisibleException(ResponseCodes.FORBIDDEN, VERIFY_AGENT);
}
Map<String, Host> hosts = agentDao.getHosts(agent);
Host host = hosts.get(reportedUuid);
if (host == null) {
throw new ValidationErrorException(ValidationErrorCodes.INVALID_REFERENCE, AgentConstants.REPORTED_UUID);
}
return host.getUuid();
}
}
|
<gh_stars>1-10
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 10819번: 차이를 최대로
*
* @see https://www.acmicpc.net/problem/10819/
*
*/
public class Boj10819 {
private static final int SIZE = 10;
private static final int INF = 1_000;
public static void main(String[] args) throws Exception {
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
int[] arr = new int[SIZE];
int[] tmpBig = new int[SIZE];
int[] tmpSmall = new int[SIZE];
Arrays.fill(arr, INF);
StringTokenizer st = new StringTokenizer(br.readLine());
for (int i = 0; i < N; i++) {
arr[i] = Integer.parseInt(st.nextToken());
}
int loop = 0;
if (N % 2 == 0) loop = N / 2;
else loop = N / 2 + 1;
int idx = 1;
Arrays.sort(arr);
for (int i = 0; i < loop; i++) {
tmpBig[idx] = arr[i];
tmpBig[idx + 1] = arr[N - 1 - i]; // 가장 작은 값을 맨앞으로
tmpSmall[idx] = arr[N - 1 - i];
tmpSmall[idx + 1] = arr[i]; // 가장 큰 값을 맨 앞으로
// 각각 크고 작은 순으로 배치해줌
idx += 2;
}
tmpBig[0] = tmpBig[N]; // 맨뒤의 값을 가장 앞으로 빼줌
tmpBig[N] = 0;
tmpSmall[0] = tmpSmall[N];
tmpSmall[N] = 0;
int max = Math.max(getSum(tmpBig, N - 1), getSum(tmpSmall, N - 1)); // 두 배치 중 큰 값을 최대에 담고
System.out.println(max); // 결과값 출력
}
/**
*
* @param arr 연산 할 배열
* @param loop N - 2 까지 계산
* @return 그때의 최대 합
*/
private static int getSum(int[] arr, int loop) {
int sum = 0;
for(int i = 0; i < loop; i++) {
int A = arr[i] - arr[i + 1];
if(A < 0) A = -A; // A가 음수면 양수로 바꿔줌
sum += A;
}
return sum;
}
}
|
import * as React from 'react';
import { SymLink } from './SymLink';
import { InputGroup, Col, Row, FormControl } from 'react-bootstrap';
export let hostFolder = '.';
export function SymLinks(props: any) {
return props.folders.map((v: any) => {
return <SymLink name={v} />;
});
}
export class HostFolder extends React.Component {
state = { val: hostFolder };
props: any;
render() {
return (
<Row>
<Col lg={12} md={12} xs={12}>
<InputGroup>
<InputGroup.Prepend>
<InputGroup.Text className="bg-dark text-white">
Enter Host Directory
</InputGroup.Text>
</InputGroup.Prepend>
<FormControl
className="bg-dark text-white"
value={this.state.val}
onChange={(event: any) => {
this.setState({ val: event.target.value });
hostFolder = event.target.value;
}}
/>
</InputGroup>
</Col>
</Row>
);
}
}
|
class CustomException(Exception):
def __init__(self, code, msg=None):
super().__init__()
self.code = code
self.msg = msg |
def fibonacci(n):
a, b = 0, 1
for _ in range(n):
a, b = b, a + b
return b
print(fibonacci(10)) |
func normalizeValue(_ value: CGFloat, inRange range: ClosedRange<CGFloat>) -> CGFloat {
guard range.lowerBound != range.upperBound else {
// Avoid division by zero when the range is invalid
fatalError("Invalid range: lower bound is equal to upper bound")
}
if value < range.lowerBound {
return 0.0
} else if value > range.upperBound {
return 1.0
} else {
return (value - range.lowerBound) / (range.upperBound - range.lowerBound)
}
} |
#!/bin/bash
kubectl wait -n default --timeout=150s --for condition=Ready --all pods
# Ping all the things!
EXIT_VAL=0
for nsc in $(kubectl get pods -o=name | grep simple-client | sed 's@.*/@@'); do
echo "===== >>>>> PROCESSING ${nsc} <<<<< ==========="
for i in {1..10}; do
echo Try ${i}
for ip in $(kubectl exec -it "${nsc}" -- ip addr| grep inet | awk '{print $2}'); do
if [[ "${ip}" == 10.60.3.* ]];then
lastSegment=$(echo "${ip}" | cut -d . -f 4 | cut -d / -f 1)
nextOp=$((lastSegment + 1))
targetIp="10.60.3.${nextOp}"
endpointName="vpn-gateway-nse"
fi
if [ -n "${targetIp}" ]; then
if kubectl exec -it "${nsc}" -- ping -A -c 10 "${targetIp}" ; then
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} successful"
PingSuccess="true"
else
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} unsuccessful"
EXIT_VAL=1
fi
targetIp="10.60.2.2"
if kubectl exec -it "${nsc}" -- ping -A -c 10 "${targetIp}" ; then
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} successful"
PingSuccess="true"
else
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} unsuccessful"
EXIT_VAL=1
fi
unset targetIp
unset endpointName
fi
done
if [ ${PingSuccess} ]; then
break
fi
sleep 2
done
if [ -z ${PingSuccess} ]; then
EXIT_VAL=1
echo "+++++++==ERROR==ERROR=============================================================================+++++"
echo "NSC ${nsc} failed ping to a vpn-gateway NetworkService"
kubectl get pod "${nsc}" -o wide
echo "POD ${nsc} Network dump -------------------------------"
kubectl exec -ti "${nsc}" -- ip addr
echo "+++++++==ERROR==ERROR=============================================================================+++++"
fi
echo "All check OK. NSC ${nsc} behaving as expected."
unset PingSuccess
done
for nsc in $(kubectl get pods -o=name | grep -E "ucnf-client" | sed 's@.*/@@'); do
echo "===== >>>>> PROCESSING ${nsc} <<<<< ==========="
for i in {1..10}; do
echo Try ${i}
for ip in $(kubectl exec -it "${nsc}" -- vppctl show int addr | grep L3 | awk '{print $2}'); do
if [[ "${ip}" == 10.60.3.* ]];then
lastSegment=$(echo "${ip}" | cut -d . -f 4 | cut -d / -f 1)
nextOp=$((lastSegment + 1))
targetIp="10.60.3.${nextOp}"
endpointName="ucnf-endpoint"
fi
if [ -n "${targetIp}" ]; then
# Prime the pump, its normal to get a packet loss due to arp
kubectl exec -it "${nsc}" -- vppctl ping "${targetIp}" repeat 10 > /dev/null 2>&1
OUTPUT=$(kubectl exec -it "${nsc}" -- vppctl ping "${targetIp}" repeat 3)
echo "${OUTPUT}"
RESULT=$(echo "${OUTPUT}"| grep "packet loss" | awk '{print $6}')
if [ "${RESULT}" = "0%" ]; then
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} successful"
PingSuccess="true"
EXIT_VAL=0
else
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} unsuccessful"
EXIT_VAL=1
fi
targetIp="10.60.2.2"
kubectl exec -it "${nsc}" -- vppctl ping "${targetIp}" repeat 10 > /dev/null 2>&1
OUTPUT=$(kubectl exec -it "${nsc}" -- vppctl ping "${targetIp}" repeat 3)
echo "${OUTPUT}"
RESULT=$(echo "${OUTPUT}"| grep "packet loss" | awk '{print $6}')
if [ "${RESULT}" = "0%" ]; then
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} successful"
PingSuccess="true"
EXIT_VAL=0
else
echo "NSC ${nsc} with IP ${ip} pinging ${endpointName} TargetIP: ${targetIp} unsuccessful"
EXIT_VAL=1
fi
unset targetIp
unset endpointName
fi
done
if [ ${PingSuccess} ]; then
break
fi
sleep 2
done
if [ -z ${PingSuccess} ]; then
EXIT_VAL=1
echo "+++++++==ERROR==ERROR=============================================================================+++++"
echo "NSC ${nsc} failed to connect to an icmp-responder NetworkService"
kubectl get pod "${nsc}" -o wide
echo "POD ${nsc} Network dump -------------------------------"
kubectl exec -ti "${nsc}" -- vppctl show int
kubectl exec -ti "${nsc}" -- vppctl show int addr
kubectl exec -ti "${nsc}" -- vppctl show memif
echo "+++++++==ERROR==ERROR=============================================================================+++++"
fi
unset PingSuccess
done
exit ${EXIT_VAL}
|
#!/bin/bash
# Stop on error
#
set -e
# Log execution
#
#set -ex
BUILD_TYPE=$1
SOFT_DIR=/home/kettle/software
KETTLE_BUILD=8.2.0.7-719
BASE_FILE=$SOFT_DIR/pdi-ce-${KETTLE_BUILD}.zip
REMIX_VERSION=${KETTLE_BUILD}-REMIX
TMP_DIR_BASE=/tmp
KETTLE_FOLDER=${TMP_DIR_BASE}/data-integration
PLUGINS_TO_DELETE_LIST="kettle-openerp-plugin kettle-shapefilereader-plugin kettle-version-checker kettle-drools5-plugin lucid-db-streaming-loader-plugin ms-access-plugins pdi-teradata-tpt-plugin kettle-drools5-plugin lucid-db-streaming-loader-plugin ms-access-plugins pdi-teradata-tpt-plugin kettle-palo-plugin platform-utils-plugin"
ENGINE_CONFIG_PATCH=$SOFT_DIR/pdi-engine-configuration-${KETTLE_BUILD}.zip
BEAM_PLUGIN_FILE=$SOFT_DIR/kettle-beam-1.0.0-beta-2.15.0.zip
CARTE_PATCH_FILE=data-integration-static-folder.gz
# Make sure the base release file exists
#
if [ ! -f "$BEAM_PLUGIN_FILE" ]
then
echo The base Kettle release file \"$BASE_FILE\" couldn\'t be found
exit 1
fi
if [ "$BUILD_TYPE" = "beam" ]
then
################################################################
# BEAM options
################################################################
REMIX_ZIP=kettle-neo4j-remix-beam-${REMIX_VERSION}.zip
REMIX_TGZ=kettle-neo4j-remix-beam-${REMIX_VERSION}.tgz
REMIX_LOG=kettle-neo4j-remix-beam-${REMIX_VERSION}.log
# Make sure the beam plugin can be found
#
if [ ! -f "$BEAM_PLUGIN_FILE" ]
then
echo The beam plugin file \"$BEAM_PLUGIN_FILE\" couldn\'t be found
exit 1
fi
# Make sure the engine patch file exists
#
if [ ! -f "$ENGINE_CONFIG_PATCH" ]
then
echo The engine configuration patch file \"$ENGINE_CONFIG_PATCH\" couldn\'t be found
exit 1
fi
################################################################
# Kettle options
################################################################
elif [ "$BUILD_TYPE" = "kettle" ]
then
REMIX_ZIP=kettle-neo4j-remix-${REMIX_VERSION}.zip
REMIX_TGZ=kettle-neo4j-remix-${REMIX_VERSION}.tgz
REMIX_LOG=kettle-neo4j-remix-${REMIX_VERSION}.log
else
echo Specify \"beam\" or \"kettle\" as build type
exit
fi
LOGFILE=${TMP_DIR_BASE}/${REMIX_LOG}
> $LOGFILE
################################################################
# Start the build
################################################################
echo Remix build start >> ${LOGFILE}
echo Remix version : ${REMIX_VERSION} >> ${LOGFILE}
echo Remix date : $(date '+%F %H:%M:%S') >> ${LOGFILE}
if [ -d /tmp/data-integration ]
then
rm -rf /tmp/data-integration
fi
# Unzip the BASE_FILE
#
echo Extracting base archive ${BASE_FILE} >> ${LOGFILE}
unzip -q $BASE_FILE -d /tmp/
echo Patching the missing static folder for Carte with ${CARTE_PATCH_FILE} >> ${LOGFILE}
tar -xzf ${CARTE_PATCH_FILE} -C /tmp/data-integration/
# Get rid of a bunch of plugins...
#
for plugin in ${PLUGINS_TO_DELETE_LIST}
do
echo Removing plugin ${plugin} >> ${LOGFILE}
rm -rf $KETTLE_FOLDER/plugins/${plugin}
done
# Beam options
#
if [ "$BUILD_TYPE" = "beam" ]
then
# Install the Kettle Beam plugin
#
unzip -q -o $BEAM_PLUGIN_FILE -d $KETTLE_FOLDER/plugins
echo Installed $BEAM_PLUGIN_FILE >> ${LOGFILE}
# Patch the run configuration
#
unzip -q -o $ENGINE_CONFIG_PATCH -d $KETTLE_FOLDER
echo Patched to add the Beam Run Configuration >> ${LOGFILE}
fi
cp getLatestSamples.sh $TMP_DIR_BASE
cp getLatestBase.sh $TMP_DIR_BASE
cp getLatestSpoonGit.sh $TMP_DIR_BASE
# get the latest samples
#
cd $TMP_DIR_BASE
./getLatestSamples.sh >> ${LOGFILE}
unzip -q -o kettle-plugin-examples.zip -d $KETTLE_FOLDER
cd - > /dev/null
# Latest Azure plugins
#
./getLatestBase.sh mattcasters/kettle-azure-event-hubs kettle-azure-event-hubs >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/kettle-azure-event-hubs-latest.zip -d $KETTLE_FOLDER/plugins
# Latest Data Set plugins
#
./getLatestBase.sh mattcasters/pentaho-pdi-dataset pentaho-pdi-dataset >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/pentaho-pdi-dataset-latest.zip -d $KETTLE_FOLDER/plugins
# Latest Kettle debug plugin
#
./getLatestBase.sh mattcasters/kettle-debug-plugin kettle-debug-plugin >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/kettle-debug-plugin-latest.zip -d $KETTLE_FOLDER/plugins
# Latest Neo4j plugins
#
./getLatestBase.sh knowbi/knowbi-pentaho-pdi-neo4j-output Neo4JOutput >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/Neo4JOutput-latest.zip -d $KETTLE_FOLDER/plugins
# Latest Kettle Neo4j Logging plugin
#
./getLatestBase.sh mattcasters/kettle-neo4j-logging kettle-neo4j-logging >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/kettle-neo4j-logging-latest.zip -d $KETTLE_FOLDER/plugins
# Latest Kettle Metastore plugin
#
./getLatestBase.sh mattcasters/kettle-metastore kettle-metastore >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/kettle-metastore-latest.zip -d $KETTLE_FOLDER/plugins
# Latest needful things & install Maitre
#
./getLatestBase.sh mattcasters/kettle-needful-things kettle-needful-things >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/kettle-needful-things-latest.zip -d $KETTLE_FOLDER/plugins
cp $KETTLE_FOLDER/plugins/kettle-needful-things/Maitre.bat $KETTLE_FOLDER
cp $KETTLE_FOLDER/plugins/kettle-needful-things/maitre.sh $KETTLE_FOLDER
cp $KETTLE_FOLDER/plugins/kettle-needful-things/kettle-needful-things-*.jar $KETTLE_FOLDER/lib
cp $KETTLE_FOLDER/plugins/kettle-needful-things/lib/picocli-*.jar $KETTLE_FOLDER/lib
# GitSpoon!
#
./getLatestSpoonGit.sh >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/pdi-git-plugin-latest.zip -d $KETTLE_FOLDER/plugins
# The Environment plugin
#
./getLatestBase.sh mattcasters/kettle-environment kettle-environment >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/kettle-environment-latest.zip -d $KETTLE_FOLDER/plugins
# The Load Text From File plugin
#
./getLatestBase.sh mattcasters/load-text-from-file-plugin load-text-from-file-plugin >> ${LOGFILE}
unzip -q -o $TMP_DIR_BASE/load-text-from-file-plugin-latest.zip -d $KETTLE_FOLDER/plugins
################################################################
# Packaging
################################################################
# Correct other writeable permissions
#
cd $KETTLE_FOLDER
chmod -R o-w *
chmod 770 ../data-integration
echo file permissions fixed >> ${LOGFILE}
# Patch Spoon.sh, get rid of silly warnings
#
< spoon.sh \
sed 's/ -XX:MaxPermSize=256m//g' \
| sed 's/export UBUNTU_MENUPROXY=0/export UBUNTU_MENUPROXY=0\n\n# Skip WebkitGTK warning\n#\nexport SKIP_WEBKITGTK_CHECK=1\n/g' \
> spoon.sh.new
mv spoon.sh spoon.sh.orig
mv spoon.sh.new spoon.sh
chmod +x spoon.sh
echo patched \"spoon.sh\" >> ${LOGFILE}
# Now zip it back up...
#
cd $TMP_DIR_BASE
if [ -f "$REMIX_ZIP" ]
then
rm -f "$REMIX_ZIP"
fi
if [ -f "$REMIX_TGZ" ]
then
rm -f "$REMIX_TGZ"
fi
################################################################
# Packaging
################################################################
echo Building remix archive ${REMIX_ZIP} >> ${LOGFILE}
zip -q -r "$REMIX_ZIP" data-integration
echo Building remix archive ${REMIX_TGZ} >> ${LOGFILE}
tar -czf "$REMIX_TGZ" data-integration
################################################################
# Upload to AWS
################################################################
echo Uploading archive to s3://kettle-neo4j/$REMIX_ZIP >> ${LOGFILE}
s3cmd put "$REMIX_ZIP" s3://kettle-neo4j/ --multipart-chunk-size-mb=4096
s3cmd setacl s3://kettle-neo4j/"$REMIX_ZIP" --acl-public
echo Uploading archive to s3://kettle-neo4j/$REMIX_TGZ >> ${LOGFILE}
s3cmd put "$REMIX_TGZ" s3://kettle-neo4j/ --multipart-chunk-size-mb=4096
s3cmd setacl s3://kettle-neo4j/"$REMIX_TGZ" --acl-public
echo Remix build done >> ${LOGFILE}
s3cmd put "$REMIX_LOG" s3://kettle-neo4j/ --multipart-chunk-size-mb=4096
s3cmd setacl s3://kettle-neo4j/"$REMIX_LOG" --acl-public
cd -
|
#include <stdlib.h>
#include <stdio.h>
#include "gf2d_element_list.h"
#include "simple_logger.h"
Vector2D gf2d_element_get_item_position(Element* element, int i)
{
ListElement* list;
Vector2D position = { 0 };
int itemsPerLine;
if (!element)return position;
list = (ListElement*)element->data;
if (!list)return position;
if ((list->listStyle == LS_Horizontal) && (list->wraps))
{
itemsPerLine = element->bounds.w / list->itemSize.x;
position.x = element->bounds.x + ((i % itemsPerLine) * list->itemSize.x);
position.y = element->bounds.y + ((i / itemsPerLine) * list->itemSize.y);
return position;
}
if ((list->listStyle == LS_Vertical) && (list->wraps))
{
itemsPerLine = element->bounds.h / list->itemSize.y;
position.x = element->bounds.x + ((i / itemsPerLine) * list->itemSize.x);
position.y = element->bounds.y + ((i % itemsPerLine) * list->itemSize.y);
return position;
}
if (list->listStyle == LS_Horizontal)
{
position.x = element->bounds.x + (i * list->itemSize.x);
position.y = element->bounds.y;
return position;
}
if (list->listStyle == LS_Vertical)
{
position.x = element->bounds.x;
position.y = element->bounds.y + (i * list->itemSize.y);
return position;
}
return position;
}
void gf2d_element_list_draw(Element* element, Vector2D offset)
{
ListElement* list;
Vector2D position;
int count, i;
Element* e;
if (!element)return;
list = (ListElement*)element->data;
if (!list)return;
count = gfc_list_get_count(list->list);
for (i = 0; i < count; i++)
{
e = (Element*)gfc_list_get_nth(list->list, i);
if (!e)continue;
position = gf2d_element_get_item_position(element, i);
vector2d_add(position, position, offset);
gf2d_element_draw(e, position);
}
}
List* gf2d_element_list_update(Element* element, Vector2D offset)
{
ListElement* list;
Vector2D position;
int count, i;
Element* e;
List* ret = NULL;
List* updated;
if (!element)return NULL;
list = (ListElement*)element->data;
if (!list)return NULL;
vector2d_add(position, offset, element->bounds);
count = gfc_list_get_count(list->list);
for (i = 0; i < count; i++)
{
e = (Element*)gfc_list_get_nth(list->list, i);
if (!e)continue;
position = gf2d_element_get_item_position(element, i);
vector2d_add(position, position, offset);
updated = gf2d_element_update(e, position);
if (updated != NULL)
{
if (ret == NULL)
{
ret = gfc_list_new();
}
gfc_list_concat_free(ret, updated);
}
}
return ret;
}
void gf2d_element_list_free(Element* element)
{
int count, i;
Element* e;
ListElement* list;
if (!element)return;
list = (ListElement*)element->data;
if (list != NULL)
{
/*for each item, free it*/
count = gfc_list_get_count(list->list);
for (i = 0; i < count; i++)
{
e = (Element*)gfc_list_get_nth(list->list, i);
if (!e)continue;
gf2d_element_free(e);
}
gfc_list_delete(list->list);
free(list);
}
}
ListElement* gf2d_element_list_new()
{
ListElement* list;
list = (ListElement*)malloc(sizeof(ListElement));
if (!list)
{
slog("failed to allocate memory for list");
return NULL;
}
memset(list, 0, sizeof(ListElement));
list->list = gfc_list_new();
return list;
}
ListElement* gf2d_element_list_new_full(Vector2D itemSize, ListStyle ls, int wraps, int scrolls)
{
ListElement* list;
list = gf2d_element_list_new();
if (!list)
{
return NULL;
}
vector2d_copy(list->itemSize, itemSize);
list->listStyle = ls;
list->wraps = wraps;
list->scrolls = scrolls;
return list;
}
void gf2d_element_make_list(Element* e, ListElement* list)
{
if ((!e) || (!list))return;// no op
e->data = list;
e->type = ET_List;
e->draw = gf2d_element_list_draw;
e->update = gf2d_element_list_update;
e->free_data = gf2d_element_list_free;
}
void gf2d_element_list_remove_item(Element* e, Element* item)
{
ListElement* list;
if ((!e) || (!item))return;// no op
list = (ListElement*)e->data;
gfc_list_delete_data(list->list, (void*)item);
}
void gf2d_element_list_add_item(Element* e, Element* item)
{
ListElement* list;
if ((!e) || (!item))return;// no op
list = (ListElement*)e->data;
gfc_list_append(list->list, (void*)item);
}
void gf2d_element_load_list_from_config(Element* e, SJson* json)
{
SJson* value = NULL;
SJson* item = NULL;
ListElement* list = NULL;
Vector2D vector = { 0 };
ListStyle ls = 0;
int i, count;
const char* style = NULL;
short int wraps = 0, scrolls = 0;
if ((!e) || (!json))
{
slog("call missing parameters");
return;
}
value = sj_object_get_value(json, "style");
style = sj_get_string_value(value);
if (style)
{
if (strcmp(style, "horizontal") == 0)
{
ls = LS_Horizontal;
}
if (strcmp(style, "vertical") == 0)
{
ls = LS_Vertical;
}
}
value = sj_object_get_value(json, "wraps");
sj_get_bool_value(value, &wraps);
value = sj_object_get_value(json, "scrolls");
sj_get_bool_value(value, &scrolls);
value = sj_object_get_value(json, "item_size");
sj_value_as_vector2d(value, &vector);
list = gf2d_element_list_new_full(vector, ls, wraps, scrolls);
gf2d_element_make_list(e, list);
value = sj_object_get_value(json, "elements");
count = sj_array_get_count(value);
for (i = 0; i < count; i++)
{
item = sj_array_get_nth(value, i);
if (!item)continue;
gf2d_element_list_add_item(e, gf2d_element_load_from_config(item));
}
}
/*eol@eof*/ |
-- ***************************************************************************
-- File: 12_6.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 12_6.lis
SET SERVEROUTPUT ON SIZE 1000000
DECLARE
lv_module_txt VARCHAR2(48);
lv_action_txt VARCHAR2(32);
BEGIN
-- Read Original Values and Display
DBMS_APPLICATION_INFO.READ_MODULE(lv_module_txt, lv_action_txt);
DBMS_OUTPUT.PUT_LINE('Before Module: ' || lv_module_txt ||
CHR(9) || ' Action: ' || lv_action_txt);
DBMS_APPLICATION_INFO.SET_MODULE('PL/SQL BLock',
'Testing DBMS_APPLICATION_INFO');
-- Read Changed Values and Display
DBMS_APPLICATION_INFO.READ_MODULE(lv_module_txt, lv_action_txt);
DBMS_OUTPUT.PUT_LINE('After Module: ' || lv_module_txt ||
CHR(9) || ' Action: ' || lv_action_txt);
END;
/
SPOOL OFF
|
var searchData=
[
['xpsr_5ftype',['xPSR_Type',['../unionxPSR__Type.html',1,'']]]
];
|
<gh_stars>10-100
#!/usr/bin/python
# -*- coding:utf-8 -*-
from setuptools import setup
setup(
name='kumex-python',
version='v2.0.5',
packages=['kumex', 'kumex/base_request', 'kumex/marke_data', 'kumex/trade', 'kumex/user',
'kumex/websocket', 'kumex/ws_token'],
license="MIT",
author='Grape',
author_email="<EMAIL>",
url='https://github.com/Kucoin/kumex-python-sdk',
description="kumex-api-sdk",
install_requires=['requests', 'websockets'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
export LIB_NAME=AlignedAlloc
# Make Arduino library directory (and parent directories, if necessary).
mkdir -p "${PREFIX}"/include/Arduino
# Copy library source files into Arduino library directory.
cp -ra "${SRC_DIR}" "${PREFIX}"/include/Arduino/${LIB_NAME}
rm "${PREFIX}"/include/Arduino/${LIB_NAME}/build.sh
rc=$?; if [[ $rc != 0 ]]; then exit $rc; fi
|
#!/usr/bin/env bash
set -euo pipefail
GOOS="linux" go build -ldflags='-s -w' -tags osusergo -o bin/main github.com/paketo-buildpacks/leiningen/cmd/main
if [ "${STRIP:-false}" != "false" ]; then
strip bin/main
fi
if [ "${COMPRESS:-none}" != "none" ]; then
$COMPRESS bin/main
fi
ln -fs main bin/build
ln -fs main bin/detect
|
package api
import "testing"
var shortURLTests = []struct {
n string
}{
{"http://example.com"},
{"http://google.com"},
{"ASDFGHJKL"},
{"1234567!@#$%^ASDFGH"},
}
func TestCreateShortURL(t *testing.T) {
for _, tc := range shortURLTests {
url := tc.n
s1 := createShortURL(url)
s2 := createShortURL(url)
if s1 != s2 {
t.Fail()
}
if s1 == url {
t.Fail()
}
}
}
|
<gh_stars>0
package com.qht.common.util;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* set方法
* @author yangtonggan
* @date 2017-1-7
*/
public class GetSetMethodUtil {
private static final Logger logger = LoggerFactory.getLogger(GetSetMethodUtil.class);
/**
* 获取ge方法
* @param c
* @param fieldName
* @return
*/
public static Method obtainGetMethod(Class<?> c,String fieldName){
try {
PropertyDescriptor pd=new PropertyDescriptor(fieldName,c);
Method getMethod=pd.getReadMethod();
return getMethod;
} catch (Exception e) {
logger.debug(e.getMessage(), e);
throw new RuntimeException(e);
}
}
/**
* 获取set方法
* @param c
* @param fieldName
* @return
*/
public static Method obtainSetMethod(Class<?> c,String fieldName){
try{
PropertyDescriptor pd=new PropertyDescriptor(fieldName,c);
Method getMethod=pd.getWriteMethod();
return getMethod;
}catch(Exception e){
logger.debug(e.getMessage(), e);
throw new RuntimeException(e);
}
}
}
|
/*
* #%L
* Common package for I/O and related utilities
* %%
* Copyright (C) 2005 - 2016 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package loci.common.utests;
import static org.testng.AssertJUnit.assertEquals;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import loci.common.Constants;
import loci.common.HandleException;
import loci.common.URLHandle;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* Unit tests for the loci.common.URLHandle class.
*
* @see loci.common.URLHandle
*/
public class URLHandleTest {
// -- Fields --
private URLHandle fileHandle;
private static final boolean IS_WINDOWS = System.getProperty("os.name").startsWith("Windows");
// -- Setup methods --
@BeforeMethod
public void setup() throws IOException {
File tmpFile = File.createTempFile("urlhandle", "tmp");
tmpFile.deleteOnExit();
FileOutputStream out = new FileOutputStream(tmpFile);
out.write("hello, world!\n".getBytes(Constants.ENCODING));
out.close();
String path = tmpFile.getAbsolutePath();
fileHandle = new URLHandle((IS_WINDOWS ? "file:/" : "file://") + path);
}
// -- Test methods --
@Test
public void testLength() throws IOException {
assertEquals(14, fileHandle.length());
}
@Test
public void testSequentialReadByte() throws IOException {
fileHandle.seek(0);
assertEquals(0x68, fileHandle.readByte());
assertEquals(0x65, fileHandle.readByte());
assertEquals(0x6c, fileHandle.readByte());
assertEquals(0x6c, fileHandle.readByte());
assertEquals(0x6f, fileHandle.readByte());
assertEquals(0x2c, fileHandle.readByte());
assertEquals(0x20, fileHandle.readByte());
assertEquals(0x77, fileHandle.readByte());
assertEquals(0x6f, fileHandle.readByte());
assertEquals(0x72, fileHandle.readByte());
assertEquals(0x6c, fileHandle.readByte());
assertEquals(0x64, fileHandle.readByte());
assertEquals(0x21, fileHandle.readByte());
assertEquals(0x0a, fileHandle.readByte());
}
@Test
public void testSequentialReadShort() throws IOException {
fileHandle.seek(0);
assertEquals(0x6865, fileHandle.readShort());
assertEquals(0x6c6c, fileHandle.readShort());
assertEquals(0x6f2c, fileHandle.readShort());
assertEquals(0x2077, fileHandle.readShort());
assertEquals(0x6f72, fileHandle.readShort());
assertEquals(0x6c64, fileHandle.readShort());
assertEquals(0x210a, fileHandle.readShort());
}
@Test
public void testSequentialReadInt() throws IOException {
fileHandle.seek(0);
assertEquals(0x68656c6c, fileHandle.readInt());
assertEquals(0x6f2c2077, fileHandle.readInt());
assertEquals(0x6f726c64, fileHandle.readInt());
}
@Test
public void testSequentialReadLong() throws IOException {
fileHandle.seek(0);
assertEquals(0x68656c6c6f2c2077L, fileHandle.readLong());
}
@Test
public void testSeekForwardReadByte() throws IOException {
fileHandle.seek(5);
assertEquals(0x2c, fileHandle.readByte());
}
@Test
public void testSeekForwardReadShort() throws IOException {
fileHandle.seek(5);
assertEquals(0x2c20, fileHandle.readShort());
}
@Test
public void testSeekForwardReadInt() throws IOException {
fileHandle.seek(5);
assertEquals(0x2c20776f, fileHandle.readInt());
}
@Test
public void testSeekForwardReadLong() throws IOException {
fileHandle.seek(5);
assertEquals(0x2c20776f726c6421L, fileHandle.readLong());
}
@Test
public void testSeekBackReadByte() throws IOException {
fileHandle.seek(13);
fileHandle.seek(7);
assertEquals(0x77, fileHandle.readByte());
}
@Test
public void testSeekBackReadShort() throws IOException {
fileHandle.seek(13);
fileHandle.seek(7);
assertEquals(0x776f, fileHandle.readShort());
}
@Test
public void testSeekBackReadInt() throws IOException {
fileHandle.seek(13);
fileHandle.seek(7);
assertEquals(0x776f726c, fileHandle.readInt());
}
@Test
public void testSeekBackReadLong() throws IOException {
fileHandle.seek(13);
fileHandle.seek(5);
assertEquals(0x2c20776f726c6421L, fileHandle.readLong());
}
@Test (expectedExceptions = {EOFException.class})
public void testEOF() throws IOException {
fileHandle.seek(16);
fileHandle.readByte();
}
@Test (expectedExceptions = {HandleException.class})
public void testWrite() throws IOException {
fileHandle.write(0);
}
}
|
package dirtree
import (
"path/filepath"
"testing"
"testing/fstest"
)
func TestPrintMode_format(t *testing.T) {
root := filepath.Join("testdata", "dir")
dirA := filepath.Join(root, "A")
file1 := filepath.Join(root, "A", "file1")
symfile1 := filepath.Join(root, "A", "symfile1")
symdirA := filepath.Join(root, "A", "B", "symdirA")
tests := []struct {
name string
mode PrintMode
root string
fullpath string
ft filetype
want string
wantErr bool
}{
{
name: "mode=ModeType/file1",
mode: ModeType,
root: root, fullpath: file1, ft: typeFile,
want: "f ",
},
{
name: "mode=ModeSize/file1",
mode: ModeSize,
root: root, fullpath: file1, ft: typeFile,
want: "13b ",
},
{
name: "mode=ModeStd/file1",
mode: ModeDefault,
root: root, fullpath: file1, ft: typeFile,
want: "f 13b ",
},
{
name: "mode=ModeAll/file1",
mode: ModeAll,
root: root, fullpath: file1, ft: typeFile,
want: "f 13b crc=0451ac5e ",
},
{
name: "mode=ModeStd/dirA",
mode: ModeDefault,
root: root, fullpath: dirA, ft: typeDir,
want: "d ",
},
{
name: "mode=ModeType/symfile1",
mode: ModeDefault,
root: root, fullpath: symfile1, ft: typeOther,
want: "? ",
},
{
name: "mode=ModeType/symdirA",
mode: ModeDefault,
root: root, fullpath: symdirA, ft: typeOther,
want: "? ",
},
{
name: "mode=ModeCRC32/file1",
mode: ModeCRC32,
root: root, fullpath: file1, ft: typeFile,
want: "crc=0451ac5e ",
},
{
name: "mode=ModeCRC32/dirA",
mode: ModeCRC32,
root: root, fullpath: dirA, ft: typeDir,
want: "crc=n/a ",
},
{
name: "mode=ModeCRC32/symfile1",
mode: ModeCRC32,
root: root, fullpath: symfile1, ft: typeOther,
want: "crc=n/a ",
},
// Error cases
{
name: "mode=ModeAll/do-not-exist",
mode: ModeAll,
root: root, fullpath: "do-not-exist", ft: typeOther,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.mode.format(nil, tt.fullpath, tt.ft)
if (err != nil) != tt.wantErr {
t.Errorf("PrintMode.format() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("format error\ngot :%q\nwant:%q", got, tt.want)
}
})
}
}
func Test_checksumNA(t *testing.T) {
// Verify that checksum does not fail on error and that instead, it returns
// the string returned by checksumNA. Errors are caught before.
t.Run("fsys=nil", func(t *testing.T) {
if got := checksum(nil, "do-not-exist"); got != checksumNA() {
t.Errorf("checksum() = %v, want %v", got, checksumNA())
}
})
t.Run("fsys=MapFS", func(t *testing.T) {
if got := checksum(fstest.MapFS{}, "do-not-exist"); got != checksumNA() {
t.Errorf("checksum() = %v, want %v", got, checksumNA())
}
})
}
|
# Import the necessary libraries
from behave import given, when, then
# Define the BDD scenario for adding two numbers
@given("I have two numbers {a:d} and {b:d}")
def step_given_two_numbers(context, a, b):
context.a = a
context.b = b
@when("I add the numbers")
def step_when_add_numbers(context):
context.sum = context.a + context.b
@then("I print the addition result")
def step_then_print_result(context):
print(f"Sum of {context.a} and {context.b} is: {context.sum}")
# Run the BDD scenario
# This will execute the step implementations and print the addition result |
<reponame>nimoqqq/roses
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.system.modular.user.service;
import cn.stylefeng.roses.kernel.system.api.UserOrgServiceApi;
import cn.stylefeng.roses.kernel.system.api.pojo.user.request.UserOrgRequest;
import cn.stylefeng.roses.kernel.system.modular.user.entity.SysUserOrg;
import com.baomidou.mybatisplus.extension.service.IService;
import java.util.List;
/**
* 用户组织机构关联信息
*
* @author fengshuonan
* @date 2020/12/19 22:17
*/
public interface SysUserOrgService extends IService<SysUserOrg>, UserOrgServiceApi {
/**
* 新增
*
* @param userOrgResponse 参数对象
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void add(UserOrgRequest userOrgResponse);
/**
* 新增
*
* @param userId 用户id
* @param orgId 机构id
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void add(Long userId, Long orgId);
/**
* 新增
*
* @param userId 用户id
* @param orgId 机构id
* @param positionId 职位id
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void add(Long userId, Long orgId, Long positionId);
/**
* 删除
*
* @param userOrgResponse 参数对象
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void del(UserOrgRequest userOrgResponse);
/**
* 删除
*
* @param userId 用户id
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void delByUserId(Long userId);
/**
* 修改
*
* @param userOrgResponse 参数对象
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void edit(UserOrgRequest userOrgResponse);
/**
* 修改
*
* @param userId 用户id
* @param orgId 机构id
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void edit(Long userId, Long orgId);
/**
* 修改
*
* @param userId 用户id
* @param orgId 机构id
* @param positionId 职位id
* @author chenjinlong
* @date 2021/1/26 12:52
*/
void edit(Long userId, Long orgId, Long positionId);
/**
* 详情
*
* @param userOrgResponse 参数对象
* @author chenjinlong
* @date 2021/1/26 12:52
*/
SysUserOrg detail(UserOrgRequest userOrgResponse);
/**
* 查询-列表
*
* @param userOrgResponse 参数对象
* @author chenjinlong
* @date 2021/1/26 12:52
*/
List<SysUserOrg> findList(UserOrgRequest userOrgResponse);
}
|
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "adaf14e7ce5d8c25aca33489ffbe1bfb",
"url": "/index.html"
},
{
"revision": "7b1052cdbbfbb1ccca48",
"url": "/static/css/main.aa9ae84a.chunk.css"
},
{
"revision": "93985b26476462c54900",
"url": "/static/js/2.f5a89e1f.chunk.js"
},
{
"revision": "eacdcc46c2547a6a176d37719fbe19d4",
"url": "/static/js/2.f5a89e1f.chunk.js.LICENSE.txt"
},
{
"revision": "7b1052cdbbfbb1ccca48",
"url": "/static/js/main.ad8b6d46.chunk.js"
},
{
"revision": "65ded9f3214c8d22d007",
"url": "/static/js/runtime-main.4759d232.js"
},
{
"revision": "1b6c8068fd4dd474242f06b4439dd79e",
"url": "/static/media/info.1b6c8068.svg"
},
{
"revision": "f727a9be7cb1fc9d6cceb97c280f3255",
"url": "/static/media/tree-icon.f727a9be.svg"
},
{
"revision": "9852740d0efc4d46c35329110bd8c559",
"url": "/static/media/tree10.9852740d.svg"
},
{
"revision": "529d52c823f352a47573b5ec7bd3cec8",
"url": "/static/media/tree5.529d52c8.svg"
},
{
"revision": "6bcb438e0d877749aa7633d37602cb3e",
"url": "/static/media/tree5.6bcb438e.nwk"
},
{
"revision": "bb52a25aabb80ec90d82fae598b6b0e7",
"url": "/static/media/tree6.bb52a25a.eps"
},
{
"revision": "be7f672277f9eb031075dbcfa902b108",
"url": "/static/media/tree6.be7f6722.svg"
},
{
"revision": "d7e5d6c4f997b332208e716d4eb1e058",
"url": "/static/media/tree6.d7e5d6c4.nwk"
},
{
"revision": "95ca4c0061a6906969f08a543caa4cdb",
"url": "/static/media/tree7.95ca4c00.svg"
},
{
"revision": "a1c3e0d3e0a04022c863e9168b6c2e54",
"url": "/static/media/tree8.a1c3e0d3.svg"
},
{
"revision": "50b7d926ae3081595c845a12ccfe8b52",
"url": "/static/media/tree9.50b7d926.svg"
},
{
"revision": "615a28cb59f511be71f1baeb3742d70f",
"url": "/static/media/watermark.1a9fac0d.png"
}
]); |
#include <cstdio> //c¿é¤J¿é¥X
//#include <fstream>
//#include <sstream> //cmd
//#include <conio.h> //if(kbhit())
//#include <windows.h>
//#include <ctime> //time srand(time(NULL));
#include <algorithm> //sort(n,n+k) reverse
//#include <iomanip> //¤p¼Æ fixed<<setprecision or setw
//#include <cstdlib> //system
//#include <cmath>
//#include <iostream>
using namespace std;
int main(){
int n;
while(~scanf("%d",&n)){
int v[n];
for(int q=0;q<n;q++)scanf("%d",&v[q]);
sort(v,v+n);
int min=2147483647;
for(int q=0;q<=n-3;q++){
for(int w=q+1;w<=n-2;w++){
if(v[q]+v[w]>v[w+1]){
if(min>v[q]+v[w]+v[w+1])min=v[q]+v[w]+v[w+1];
break;
}
}
}
printf("%d\n",min);
}
} |
echo "starting the script"
echo "**** Creating and activating env****"
conda create --prefix ./env && source activate ./env
echo "**** Installing dependencies****"
pip install -r requirements.txt
echo "**** Installing Pytorch*****"
conda install pytorch torchvision torchaudio cudatoolkit=11.3 -c pytorch
echo "****exporting the env in conda.yaml file"
conda env export > conda.yaml
echo "*****Instalation completed*******"
|
// https://youtu.be/w-kcXRj0Tu4
// https://piazza.com/class/j63w1pdyopf7kj?cid=54
function stackMachine( str ) {
// create function to check that arr[arr.length - 1] and arr[arr.length - 2] are not undefined
// create an object with two keys
// '+' : (x, y) => (+x) + (+y),
// '*' : (x, y) => (+x) * (+y)
// create an array that I'll be using for stack
// let err = false
// for loop for i = 0; i < str.length
// if(!obj[str[i]]) arr.push(str[i])
// else {
// check();
// arr.push(obj[str[i]](arr.pop(), arr.pop()))
// }
// return err ? -1 : arr.splice(-1)[0];
const arr = [];
let err = false;
function check() {
if ( arr.length < 2 ) err = true;
}
const obj = {
'+': ( x, y ) => ( +x ) + ( +y ),
'*': ( x, y ) => ( +x ) * ( +y )
};
for ( let i = 0; i < str.length; i++ ) {
if ( obj[ str[ i ] ] ) {
check();
arr.push( obj[ str[ i ] ]( arr.pop(), arr.pop() ) );
} else arr.push( str[ i ] );
}
return err ? -1 : arr.splice( -1 )[ 0 ];
}
|
require( 'should' );
var _ = require( 'lodash' ),
Vector = require( '../src/vector.js' )( 'nodeid' );
describe( 'when version vector has a single node', function() {
describe( 'when comparing compatible but equal vectors', function() {
var v1 = new Vector( 'a:1' ),
v2 = new Vector( 'a:1' );
it( 'should be equal', function() {
v1.compare( v2 ).should.equal( 'equal' );
} );
} );
describe( 'when comparing compatible but unequal vectors', function() {
var v1 = new Vector( 'a:1' ),
v2 = new Vector( 'a:2' );
it( 'should be lesser', function() {
v1.compare( v2 ).should.equal( 'lesser' );
} );
} );
} );
describe( 'when incrementing vector', function() {
describe( 'when comparing compatible but equal vectors', function() {
var v1 = new Vector( 'a:1;b:2' ),
v2 = new Vector( 'nodeid:1;b:1' );
v1.increment();
v2.increment();
it( 'should increment missing node', function() {
v1.toString().should.equal( 'a:1;b:2;nodeid:2' );
v1.versions[ 'nodeid' ].should.equal( 2 );
} );
it( 'should increment existing node', function() {
v2.toString().should.equal( 'b:1;nodeid:2' );
v2.versions[ 'nodeid' ].should.equal( 2 );
} );
} );
describe( 'when comparing compatible but unequal vectors', function() {
var v1 = new Vector( 'a:1' ),
v2 = new Vector( 'a:2' );
it( 'should be lesser', function() {
v1.compare( v2 ).should.equal( 'lesser' );
} );
} );
} );
describe( 'when version vector has multiple nodes', function() {
describe( 'when comparing equal vectors', function() {
var v1 = new Vector( 'a:1;b:2' ),
v2 = new Vector( 'b:2;a:1' );
it( 'should be equal', function() {
v1.compare( v2 ).should.equal( 'equal' );
} );
} );
describe( 'when comparing incompatible vectors', function() {
var v1 = new Vector( 'a:1' ),
v2 = new Vector( 'b:2' );
it( 'should be diverged', function() {
v1.compare( v2 ).should.equal( 'diverged' );
} );
} );
describe( 'when comparing equal vectors', function() {
var v1 = new Vector( 'a:1;b:2' ),
v2 = new Vector( 'b:2;a:1' );
it( 'should be equal', function() {
v1.compare( v2 ).should.equal( 'equal' );
} );
} );
describe( 'when comparing a greater vector', function() {
var v1 = new Vector( 'a:2;b:2' ),
v2 = new Vector( 'b:2;a:1' );
it( 'should be greater', function() {
v1.compare( v2 ).should.equal( 'greater' );
} );
} );
describe( 'when comparing a greater vector because of additional nodes', function() {
var v1 = new Vector( 'a:1;b:2;c:1' ),
v2 = new Vector( 'b:2;a:1' );
it( 'should be greater', function() {
v1.compare( v2 ).should.equal( 'greater' );
} );
} );
} );
describe( 'when merging vectors', function() {
var v1 = new Vector( 'a:2;b:3' ),
v2 = new Vector( 'b:3;c:2' );
v1.merge(v2);
it( 'should include all nodes and increment local node', function() {
v1.toString().should.equal( 'a:2;b:3;c:2;nodeid:2' );
} );
} ); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.