text stringlengths 1 1.05M |
|---|
import React from 'react'
import Layout from '../components/Layout'
import Container from '../components/Container'
import PageTitle from '../components/PageTitle'
import SEO from '../components/SEO'
import SearchContainer from '../components/SearchContainer'
const SearchPage = () => (
<Layout>
<SEO title="Search" description="Search description goes here" />
<Container>
<PageTitle>Search</PageTitle>
<SearchContainer />
</Container>
</Layout>
)
export default SearchPage |
<filename>webpack.config.js<gh_stars>0
/* eslint-disable @typescript-eslint/no-var-requires */
/* eslint-disable no-undef */
/* eslint-disable @typescript-eslint/naming-convention */
const devCerts = require("office-addin-dev-certs");
const CopyWebpackPlugin = require("copy-webpack-plugin");
const CustomFunctionsMetadataPlugin = require("custom-functions-metadata-plugin");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const MonacoWebpackPlugin = require('monaco-editor-webpack-plugin');
const urlDev = "https://localhost:3000/";
const urlProd = "https://www.contoso.com/"; // CHANGE THIS TO YOUR PRODUCTION DEPLOYMENT LOCATION
async function getHttpsOptions() {
const httpsOptions = await devCerts.getHttpsServerOptions();
return {
cacert: httpsOptions.ca,
key: httpsOptions.key,
cert: httpsOptions.cert,
};
}
module.exports = async (env, options) => {
const dev = options.mode === "development";
const buildType = dev ? "dev" : "prod";
const config = {
devtool: "source-map",
entry: {
functions: "./src/functions/functions.ts",
// functions: {
// import: "./src/functions/functions.ts",
// dependOn: "ts",
// },
taskpane: "./src/taskpane/taskpane.ts",
//ts: "typescript",
},
output: {
devtoolModuleFilenameTemplate:
"webpack:///[resource-path]?[loaders]",
clean: true,
},
resolve: {
extensions: [".ts", ".html", ".js"],
},
module: {
noParse: [require.resolve("typescript/lib/typescript.js")],
rules: [
{
test: /\.ts$/,
exclude: /node_modules/,
use: "ts-loader",
},
{
test: /\.html$/,
exclude: /node_modules/,
use: "html-loader",
},
{
test: /\.(png|jpg|jpeg|gif|ico)$/,
type: "asset/resource",
generator: {
filename: "assets/[name][ext][query]",
},
},
{
test: /\.css$/,
use: ['style-loader', 'css-loader']
},
{
test: /\.ttf$/,
use: ['file-loader']
}
],
},
plugins: [
new MonacoWebpackPlugin({
languages: ['typescript', 'javascript', 'python']
}),
new CustomFunctionsMetadataPlugin({
output: "functions.json",
input: "./src/functions/functions.ts",
}),
new HtmlWebpackPlugin({
filename: "taskpane.html",
template: "./src/taskpane/taskpane.html",
chunks: ["taskpane", "functions"],
}),
new CopyWebpackPlugin({
patterns: [
{
from: "assets/*",
to: "assets/[name][ext][query]",
},
{
from: "manifest*.xml",
to: "[name]." + buildType + "[ext]",
transform(content) {
if (dev) {
return content;
} else {
return content
.toString()
.replace(new RegExp(urlDev, "g"), urlProd);
}
},
},
],
}),
],
devServer: {
static: [__dirname],
headers: {
"Access-Control-Allow-Origin": "*",
},
https:
env.WEBPACK_BUILD || options.https !== undefined
? options.https
: await getHttpsOptions(),
port: process.env.npm_package_config_dev_server_port || 3000,
},
stats: {
errorDetails: true
}
};
return config;
};
|
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fantasy.nacos.core.distributed.raft.processor;
import com.fantasy.nacos.consistency.SerializeFactory;
import com.fantasy.nacos.consistency.entity.Log;
import com.fantasy.nacos.consistency.entity.Response;
import com.fantasy.nacos.core.distributed.raft.JRaftServer;
import com.fantasy.nacos.core.distributed.raft.utils.FailoverClosure;
import com.alipay.sofa.jraft.Node;
import com.alipay.sofa.jraft.Status;
import com.alipay.sofa.jraft.error.RaftError;
import com.alipay.sofa.jraft.rpc.Connection;
import com.alipay.sofa.jraft.rpc.RpcContext;
import com.google.protobuf.Message;
import org.junit.Assert;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicReference;
public class AbstractProcessorTest {
private JRaftServer server = new JRaftServer() {
@Override
public void applyOperation(Node node, Message data, FailoverClosure closure) {
closure.setResponse(
Response.newBuilder().setSuccess(false).setErrMsg("Error message transmission").build());
closure.run(new Status(RaftError.UNKNOWN, "Error message transmission"));
}
};
@Test
public void testErrorThroughRpc() {
final AtomicReference<Response> reference = new AtomicReference<>();
RpcContext context = new RpcContext() {
@Override
public void sendResponse(Object responseObj) {
reference.set((Response) responseObj);
}
@Override
public Connection getConnection() {
return null;
}
@Override
public String getRemoteAddress() {
return null;
}
};
AbstractProcessor processor = new NacosLogProcessor(server, SerializeFactory.getDefault());
processor.execute(server, context, Log.newBuilder().build(), new JRaftServer.RaftGroupTuple());
Response response = reference.get();
Assert.assertNotNull(response);
Assert.assertEquals("Error message transmission", response.getErrMsg());
Assert.assertFalse(response.getSuccess());
}
}
|
<reponame>DarlanDelmondes/produtos-favoritos-springboot
package com.desafio.labs.springfavs.repository;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.query.Param;
import org.springframework.data.rest.core.annotation.RepositoryRestResource;
import org.springframework.stereotype.Repository;
import com.desafio.labs.springfavs.domain.Cliente;
@Repository
//@RepositoryRestResource(collectionResourceRel = "clientes",path = "clientes")
public interface ClienteRepository extends JpaRepository<Cliente,Long> {
Cliente findByNome(String nome);
Page<Cliente> findByNomeContainingAllIgnoringCase(@Param("nome") String nome,Pageable pageable);
Cliente findByNomeAllIgnoringCase(@Param("nome") String nome);
}
|
import Form from './components/Form';
import { Provider } from './Provider';
import vasion-formiojs from 'vasion-formiojs';
import Vue from 'vue';
// Provide a plugin by default that will register all components.
export class Plugin {
// Vue Plugin
static install (Vue: Vue, { providers, store, router }: { providers: Provider[], store: any, router: any }) {
// Vue.$formio = vasion-formiojs;
// Vue.component('Form', Form);
providers.forEach(provider => {
provider.init(Vue);
provider.registerRoutes(router);
provider.registerStore(store);
});
}
};
|
#!/bin/bash
CONTAINER="oydid-base"
REPOSITORY="oydeu"
# read commandline options
BUILD_CLEAN=false
DOCKER_UPDATE=false
while [ $# -gt 0 ]; do
case "$1" in
--clean*)
BUILD_CLEAN=true
;;
--dockerhub*)
DOCKER_UPDATE=true
;;
*)
printf "unknown option(s)\n"
if [ "${BASH_SOURCE[0]}" != "${0}" ]; then
return 1
else
exit 1
fi
esac
shift
done
if $BUILD_CLEAN; then
rails r script/clean.rb
docker build --platform linux/amd64 --no-cache -f ./docker/Dockerfile -t $REPOSITORY/$CONTAINER .
else
docker build --platform linux/amd64 -f ./docker/Dockerfile -t $REPOSITORY/$CONTAINER .
fi
if $DOCKER_UPDATE; then
docker push $REPOSITORY/$CONTAINER
fi
|
<gh_stars>1-10
// +build linux,ambient
package libcontainer
import "github.com/syndtr/gocapability/capability"
const allCapabilityTypes = capability.CAPS | capability.BOUNDS | capability.AMBS
|
/*
* mdaSpecMeterProcessor.cpp
* mda-vst3
*
* Created by <NAME> on 6/14/08.
*
* mda VST Plug-ins
*
* Copyright (c) 2008 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
#include "mdaSpecMeterProcessor.h"
#include "mdaSpecMeterController.h"
#include <cmath>
namespace Steinberg {
namespace Vst {
namespace mda {
#define SILENCE 0.00000001f
#ifdef SMTG_MDA_VST2_COMPATIBILITY
//-----------------------------------------------------------------------------
FUID SpecMeterProcessor::uid (0x5653546D, 0x64613F6D, 0x64612073, 0x7065636D);
#else
//-----------------------------------------------------------------------------
FUID SpecMeterProcessor::uid (0x67B9A082, 0xCF0B4BAC, 0x907AA8B7, 0x1D855F94);
#endif
//-----------------------------------------------------------------------------
SpecMeterProcessor::SpecMeterProcessor ()
{
setControllerClass (SpecMeterController::uid);
}
//-----------------------------------------------------------------------------
SpecMeterProcessor::~SpecMeterProcessor ()
{
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API SpecMeterProcessor::initialize (FUnknown* context)
{
tresult res = BaseProcessor::initialize (context);
if (res == kResultTrue)
{
addAudioInput (USTRING("Stereo In"), SpeakerArr::kStereo);
addAudioOutput (USTRING("Stereo Out"), SpeakerArr::kStereo);
K = 0;
kmax = 2048;
topband = 11;
iK = 1.0f / (float)kmax;
den = 1.0e-8f;
recalculate ();
}
return res;
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API SpecMeterProcessor::terminate ()
{
return BaseProcessor::terminate ();
}
//-----------------------------------------------------------------------------
tresult PLUGIN_API SpecMeterProcessor::setActive (TBool state)
{
if (state)
{
if (getSampleRate () > 64000)
{ topband = 12; kmax = 4096; }
else
{ topband = 11; kmax = 2048; }
iK = 1.0f / (float)kmax;
Lpeak = Rpeak = Lrms = Rrms = Corr = 0.0f;
lpeak = rpeak = lrms = rrms = corr = 0.0f;
Lhold = Rhold = 0.0f;
Lmin = Rmin = 0.0000001f;
for(int32 i = 0; i<16; i++)
{
band[0][i] = band[1][i] = 0.0f;
for(int32 j=0; j<6; j++) lpp[j][i] = rpp[j][i] = 0.0f;
}
}
return BaseProcessor::setActive (state);
}
//-----------------------------------------------------------------------------
bool SpecMeterProcessor::bypassProcessing (ProcessData& data)
{
if (isBypassed ())
{
int32 sampleFrames = data.numSamples;
if (Lpeak > fabs (den) || Rpeak > fabs (den))
{
Lpeak *= 2.f;
Rpeak *= 2.f;
Lhold *= 2.f;
Rhold *= 2.f;
den = -den;
float l, r, p, q, iN = iK;
int32 k=K, j0=topband, mask, j;
while (--sampleFrames >= 0)
{
l = den; //anti-denormal
r = den;
lrms += l * l; //RMS integrate
rrms += r * r;
p = (float)fabs (l); if (p > lpeak) lpeak = p; //peak detect
q = (float)fabs (r); if (q > rpeak) rpeak = q;
/*
if (p > 1.0e-8f && p < lmin) lmin = p; //'trough' detect
if (q > 1.0e-8f && q < rmin) rmin = q;
*/
if ((l * r) > 0.0f) corr += iN; //measure correlation
j = j0;
mask = k << 1;
do //polyphase filter bank
{
p = lpp[0][j] + 0.208f * l;
lpp[0][j] = lpp[1][j];
lpp[1][j] = l - 0.208f * p;
q = lpp[2][j] + lpp[4][j] * 0.682f;
lpp[2][j] = lpp[3][j];
lpp[3][j] = lpp[4][j] - 0.682f * q;
lpp[4][j] = l;
lpp[5][j] += (float)fabs (p - q); //top octave
l = p + q; //lower octaves
p = rpp[0][j] + 0.208f * r;
rpp[0][j] = rpp[1][j];
rpp[1][j] = r - 0.208f * p;
q = rpp[2][j] + rpp[4][j] * 0.682f;
rpp[2][j] = rpp[3][j];
rpp[3][j] = rpp[4][j] - 0.682f * q;
rpp[4][j] = r;
rpp[5][j] += (float)fabs (p - q); //top octave
r = p + q; //lower octaves
j--;
mask >>= 1;
} while (mask & 1);
if (++k == kmax)
{
k = 0;
if (lpeak == 0.0f) Lpeak = Lrms = 0.0f; else ///add limits here!
{
if (lpeak > 2.0f) lpeak = 2.0f;
if (lpeak >= Lpeak)
{
Lpeak = lpeak;
Lhold = 2.0f * Lpeak;
}
else
{
Lhold *= 0.95f;
if (Lhold < Lpeak) Lpeak = Lhold;
}
Lmin = lmin;
lmin *= 1.01f;
Lrms += 0.2f * (iN * lrms - Lrms);
}
if (rpeak == 0.0f) Rpeak = Rrms = 0.0f; else
{
if (rpeak > 2.0f) rpeak = 2.0f;
if (rpeak >= Rpeak)
{
Rpeak = rpeak;
Rhold = 2.0f * Rpeak;
}
else
{
Rhold *= 0.95f;
if (Rhold < Rpeak) Rpeak = Rhold;
}
Rmin = rmin;
rmin *= 1.01f;
Rrms += 0.2f * (iN * rrms - Rrms);
}
rpeak = lpeak = lrms = rrms = 0.0f;
Corr += 0.1f * (corr - Corr); //correlation
corr = SILENCE;
float dec = 0.08f;
for(j=0; j<13; j++) //spectrum output
{
band[0][j] += dec * (iN * lpp[5][j] - band[0][j]);
if (band[0][j] > 2.0f) band[0][j] = 2.0f;
//else if (band[0][j] < 0.014f) band[0][j] = 0.014f;
band[1][j] += dec * (iN * rpp[5][j] - band[1][j]);
if (band[1][j] > 2.0f) band[1][j] = 2.0f;
//else if (band[1][j] < 0.014f) band[1][j] = 0.014f;
rpp[5][j] = lpp[5][j] = SILENCE;
dec = dec * 1.1f;
}
}
}
Lpeak /= 2.f;
Rpeak /= 2.f;
Lhold /= 2.f;
Rhold /= 2.f;
K = k;
}
IParameterChanges* changes = data.outputParameterChanges;
if (changes)
sendParameterChanges (changes, data.numSamples);
data.outputs[0].silenceFlags = data.inputs[0].silenceFlags;
return true;
}
return false;
}
//-----------------------------------------------------------------------------
void SpecMeterProcessor::doProcessing (ProcessData& data)
{
int32 sampleFrames = data.numSamples;
float* in1 = data.inputs[0].channelBuffers32[0];
float* in2 = data.inputs[0].channelBuffers32[1];
float* out1 = data.outputs[0].channelBuffers32[0];
float* out2 = data.outputs[0].channelBuffers32[1];
if (in1 != out1)
memcpy (out1, in1, sampleFrames * sizeof (float));
if (in2 != out2)
memcpy (out2, in2, sampleFrames * sizeof (float));
if (data.inputs[0].silenceFlags & 3 && Lpeak <= fabs (den) && Rpeak <= fabs (den))
{
data.outputs[0].silenceFlags = data.inputs[0].silenceFlags;
return;
}
data.outputs[0].silenceFlags = 0;
Lpeak *= 2.f;
Rpeak *= 2.f;
Lhold *= 2.f;
Rhold *= 2.f;
den = -den;
float l, r, p, q, iN = iK;
int32 k=K, j0=topband, mask, j;
while (--sampleFrames >= 0)
{
l = *in1++;
r = *in2++;
l += den; //anti-denormal
r += den;
lrms += l * l; //RMS integrate
rrms += r * r;
p = (float)fabs (l); if (p > lpeak) lpeak = p; //peak detect
q = (float)fabs (r); if (q > rpeak) rpeak = q;
/*
if (p > 1.0e-8f && p < lmin) lmin = p; //'trough' detect
if (q > 1.0e-8f && q < rmin) rmin = q;
*/
if ((l * r) > 0.0f) corr += iN; //measure correlation
j = j0;
mask = k << 1;
do //polyphase filter bank
{
p = lpp[0][j] + 0.208f * l;
lpp[0][j] = lpp[1][j];
lpp[1][j] = l - 0.208f * p;
q = lpp[2][j] + lpp[4][j] * 0.682f;
lpp[2][j] = lpp[3][j];
lpp[3][j] = lpp[4][j] - 0.682f * q;
lpp[4][j] = l;
lpp[5][j] += (float)fabs (p - q); //top octave
l = p + q; //lower octaves
p = rpp[0][j] + 0.208f * r;
rpp[0][j] = rpp[1][j];
rpp[1][j] = r - 0.208f * p;
q = rpp[2][j] + rpp[4][j] * 0.682f;
rpp[2][j] = rpp[3][j];
rpp[3][j] = rpp[4][j] - 0.682f * q;
rpp[4][j] = r;
rpp[5][j] += (float)fabs (p - q); //top octave
r = p + q; //lower octaves
j--;
mask >>= 1;
} while (mask & 1);
if (++k == kmax)
{
k = 0;
if (lpeak == 0.0f) Lpeak = Lrms = 0.0f; else ///add limits here!
{
if (lpeak > 2.0f) lpeak = 2.0f;
if (lpeak >= Lpeak)
{
Lpeak = lpeak;
Lhold = 2.0f * Lpeak;
}
else
{
Lhold *= 0.95f;
if (Lhold < Lpeak) Lpeak = Lhold;
}
Lmin = lmin;
lmin *= 1.01f;
Lrms += 0.2f * (iN * lrms - Lrms);
}
if (rpeak == 0.0f) Rpeak = Rrms = 0.0f; else
{
if (rpeak > 2.0f) rpeak = 2.0f;
if (rpeak >= Rpeak)
{
Rpeak = rpeak;
Rhold = 2.0f * Rpeak;
}
else
{
Rhold *= 0.95f;
if (Rhold < Rpeak) Rpeak = Rhold;
}
Rmin = rmin;
rmin *= 1.01f;
Rrms += 0.2f * (iN * rrms - Rrms);
}
rpeak = lpeak = lrms = rrms = 0.0f;
Corr += 0.1f * (corr - Corr); //correlation
corr = SILENCE;
float dec = 0.08f;
for(j=0; j<13; j++) //spectrum output
{
band[0][j] += dec * (iN * lpp[5][j] - band[0][j]);
if (band[0][j] > 2.0f) band[0][j] = 2.0f;
//else if (band[0][j] < 0.014f) band[0][j] = 0.014f;
band[1][j] += dec * (iN * rpp[5][j] - band[1][j]);
if (band[1][j] > 2.0f) band[1][j] = 2.0f;
//else if (band[1][j] < 0.014f) band[1][j] = 0.014f;
rpp[5][j] = lpp[5][j] = SILENCE;
dec = dec * 1.1f;
}
}
}
Lpeak /= 2.f;
Rpeak /= 2.f;
Lhold /= 2.f;
Rhold /= 2.f;
K = k;
IParameterChanges* changes = data.outputParameterChanges;
if (changes)
sendParameterChanges (changes, data.numSamples);
}
//-----------------------------------------------------------------------------
void SpecMeterProcessor::sendParameterChanges (IParameterChanges* changes, int32 numSamples)
{
IParamValueQueue* queue = nullptr;
int32 index;
int32 pid = SpecMeterController::kBandParamStart;
for (int32 ch = 0; ch < 2; ch++)
{
for (int32 bnd = 0; bnd < 13; bnd++, pid++)
{
queue = changes->addParameterData (pid, index);
if (queue)
queue->addPoint (numSamples, band[ch][bnd], index);
}
}
pid = SpecMeterController::kLeftPeakParam;
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Lpeak, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Lhold, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Lmin, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Lrms, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Rpeak, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Rhold, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Rmin, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Rrms, index);
queue = changes->addParameterData (pid++, index);
if (queue)
queue->addPoint (numSamples, Corr, index);
}
}}} // namespaces
|
/**
* Block chart Helpers
*/
/**
* Block Item Helpers
*/
const COLORS = [
'#9dbcea',
'#e4e4ac',
'#67c6f2',
'#e9b198',
'#5ad0da',
'#eaaecf',
'#b3e5b9',
'#c4b7ea',
'#bcbe8b',
'#8fd4e6',
'#8bc5ab',
'#95eadc'
];
export const qualitativeColorFormatter = (value, idx) => {
return COLORS[idx % COLORS.length];
};
|
curl -X POST \
'https://api.mercadopago.com/v1/payments' \
-H 'Authorization: Bearer ACCESS_TOKEN_ENV' \
-d '{
"token": "b3a7dbec3eb0d71798c4f19fec445795",
"installments": 1,
"transaction_amount": 58.80,
"description": "Point Mini a maquininha que dá o dinheiro de suas vendas na hora",
"payment_method_id": "visa",
"payer": {
"email": "test_user_123456@testuser.com",
"identification": {
"number": "19119119100",
"type": "CPF"
}
},
"notification_url": "https://www.suaurl.com/notificacoes/",
"sponsor_id": null,
"binary_mode": false,
"external_reference": "MP0001",
"statement_descriptor": "MercadoPago",
"additional_info": {
"items": [
{
"id": "PR0001",
"title": "Point Mini",
"description": "Producto Point para cobros con tarjetas mediante bluetooth",
"picture_url": "https://http2.mlstatic.com/resources/frontend/statics/growth-sellers-landings/device-mlb-point-i_medium@2x.png",
"category_id": "electronics",
"quantity": 1,
"unit_price": 58.80
}
],
"payer": {
"first_name": "Nome",
"last_name": "Sobrenome",
"address": {
"zip_code": "06233-200",
"street_name": "Av das Nacoes Unidas",
"street_number": 3003
},
"registration_date": "2019-01-01T12:01:01.000-03:00",
"phone": {
"area_code":"011",
"number":"987654321"
}
},
"shipments": {
"receiver_address": {
"street_name": "Av das Nacoes Unidas",
"street_number": 3003,
"zip_code": "06233200",
"city_name": "Buzios",
"state_name": "Rio de Janeiro"
}
}
}
}'
|
module FontAwesomePro5Rails
FA_VERSION = '5.11.2'
VERSION = '1.0'
end
|
<reponame>cwackerfuss/blockstack-onboarding
const path = require('path')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const ExtractTextPlugin = require('extract-text-webpack-plugin')
const CopyWebpackPlugin = require('copy-webpack-plugin')
const baseConfig = require('./webpack.config.base')
module.exports = baseConfig({
devtool: 'source-map',
entry: [path.join(__dirname, 'src/main.js')],
output: {
filename: '[name]-[hash].min.js'
},
plugins: [
new HtmlWebpackPlugin({
template: 'src/index.tpl.html',
inject: 'body',
filename: 'index.html',
minify: {
collapseWhitespace: true,
minifyCSS: true
}
}),
new CopyWebpackPlugin([{
from: 'src/assets'
}]),
new ExtractTextPlugin({
filename: '[name].[hash].min.css',
disable: false,
allChunks: true
})
],
module: {
rules: [
{
test: /.css$/,
loader: ExtractTextPlugin.extract({
fallbackLoader: 'style-loader',
loader: 'css-loader'
})
}
]
}
})
|
<reponame>wolfchinaliu/gameCenter
package weixin.source.service;
import org.jeecgframework.core.common.service.CommonService;
import weixin.source.entity.WeixinUrlEntity;
import java.io.Serializable;
public interface WeixinUrlServiceI extends CommonService{
public <T> void delete(T entity);
public <T> Serializable save(T entity);
public <T> void saveOrUpdate(T entity);
/**
* 默认按钮-sql增强-新增操作
* @param id
* @return
*/
public boolean doAddSql(WeixinUrlEntity t);
/**
* 默认按钮-sql增强-更新操作
* @param id
* @return
*/
public boolean doUpdateSql(WeixinUrlEntity t);
/**
* 默认按钮-sql增强-删除操作
* @param id
* @return
*/
public boolean doDelSql(WeixinUrlEntity t);
}
|
<reponame>adrianbparra/noise-controller-front-end
import React, { useContext, useEffect } from "react";
import { Link } from 'react-router-dom';
import { Menu, Dropdown, Icon } from 'semantic-ui-react';
import { useLazyQuery } from '@apollo/client';
import { USER } from "../../queries/queries.js";
import { AuthContext } from "../../auth/auth.js";
import NavSettings from "./NavSettings.js";
import MenuItem from "./MenuItem";
function NavAuth() {
const { user } = useContext(AuthContext);
const [isMobile, setMobile] = React.useState(getWindowWidth())
const [getUser, {loading, data, error, called, variables }] = useLazyQuery(USER)
useEffect(()=>{
function handleResize() {
setMobile(getWindowWidth());
}
window.addEventListener('resize', handleResize);
return () => window.removeEventListener('resize', handleResize);
},[])
useEffect(() => {
if (user){
getUser()
}
},[user])
function getWindowWidth(){
const { innerWidth } = window;
return innerWidth < 768 ? true : false
}
if(user){
return (
<>
<Dropdown
item
text = {data && data.getUser.selectedClass ? isMobile ? `${data.getUser.selectedClass.name.slice(0,10)}...` : data.getUser.selectedClass.name : "Classes" }
simple
loading={loading}
>
<Menu.Menu>
<Menu.Item
as={Link} to="/classform"
>
Add A Class
</Menu.Item>
<Menu.Item
as={Link} to="/classes"
>
All Classes
</Menu.Item>
<Dropdown.Divider />
{data && data.getUser.classes.map(cls=> <MenuItem key={cls.id} cls={cls} /> )}
</Menu.Menu>
</Dropdown>
{isMobile ?
<Menu.Menu position="right">
<Dropdown
item
simple
trigger={(<span><Icon name="user"/></span>)}
>
<Menu.Menu>
<NavSettings/>
</Menu.Menu>
</Dropdown>
</Menu.Menu> :
<Menu.Menu position="right">
<NavSettings/>
</Menu.Menu>
}
</>
)
} else {
return (
<Menu.Menu position="right">
<Menu.Item
as={Link} to="/login"
>
Login
</Menu.Item>
<Menu.Item
as={Link} to="/signup"
>
Sign Up
</Menu.Item>
</Menu.Menu>
)
}
}
export default NavAuth |
#!/bin/bash
if [ ! -f doom_complete.pk3 ]; then
cd wadsmoosh-branch-default
cp -rfv ../wadsmoosh.py ./
cp -rfv ../master/wads/*.WAD "source_wads/"
cp -rfv ../doom2/*.WAD "source_wads/"
./wadsmoo.sh
cp -rfv ./doom_complete.pk3 ../
cd ../
fi
LD_LIBRARY_PATH="lib:$LD_LIBRARY_PATH" ./gzdoom "$@"
|
<reponame>kerstentw/GenAuthPy
"""
This module creates Database and tables necessary for running the application.
"""
import sys
sys.path.append("..")
import schemas
TEST_CONFIG = {
"host" : "localhost",
"port" : "5121",
"database": "tk_test",
"user" : "admin",
"password": "<PASSWORD>"
}
|
#!/usr/bin/env bash
readonly USERNAME=jolokia_jei4cie5Kahg
readonly PASSWORD=eaFai4eTh1re
readonly HTTP="http --check-status --auth-type basic --auth $USERNAME:$PASSWORD"
readonly API=http://bgbilling-server.backpack.test:8099/jolokia
#
# Arguments:
# $1 - significant part of the API URL
# $2... - request parameters
# Returns:
# none
#
execute_get_request() {
$HTTP \
GET $API/$1 "${@:2}"
}
|
import { transformSync } from '@babel/core';
export const codeCheck = (code: string) => {
const tests = ['assert.equal(pow(2), 4);', 'assert.equal(pow(3), 9);', 'assert.equal(pow(4), 16);'];
const testsPassed = Array.from({ length: tests.length }, () => false);
try {
const result = transformSync(code);
tests.forEach((test, idx) => {
eval(`const assert = require('assert');${(result as any).code}${test}`);
testsPassed[idx] = true;
});
} catch (e) {
throw {
message: e,
testsPassed,
};
}
return {
message: 'OK',
testsPassed,
};
};
|
def common_seq(n, p, m):
for i in range(n-m+1):
seq = s[i:i+m]
for j in range(p-m+1):
if seq == t[j:j+m]:
return True
return False |
#!/usr/bin/env bash
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e -x
export BAZEL_VERSION=0.20.0 BAZEL_OS=linux
# Path to shared libraries for running pytest
export TFIO_DATAPATH="bazel-bin"
# Install needed repo
DEBIAN_FRONTEND=noninteractive apt-get -y -qq update
DEBIAN_FRONTEND=noninteractive apt-get -y -qq install \
python-pip python3-pip \
unzip curl \
ffmpeg > /dev/null
curl -sOL https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/bazel-${BAZEL_VERSION}-installer-${BAZEL_OS}-x86_64.sh
chmod +x bazel-${BAZEL_VERSION}-installer-${BAZEL_OS}-x86_64.sh
# Install bazel, display log only if error
./bazel-${BAZEL_VERSION}-installer-${BAZEL_OS}-x86_64.sh 2>&1 > bazel-install.log || (cat bazel-install.log && false)
rm -rf bazel-${BAZEL_VERSION}-installer-${BAZEL_OS}-x86_64.sh
rm -rf bazel-install.log
if [[ ! -z ${TENSORFLOW_INSTALL} ]]; then
python3 -m pip install -q ${TENSORFLOW_INSTALL}
python -m pip install -q ${TENSORFLOW_INSTALL}
fi
./configure.sh
bazel build \
--noshow_progress \
--noshow_loading_progress \
--verbose_failures \
--test_output=errors -- \
//tensorflow_io/...
python3 -m pip install -q pytest boto3 pyarrow==0.11.1 pandas==0.19.2
python -m pip install -q pytest boto3 pyarrow==0.11.1 pandas==0.19.2
python3 -m pytest tests
python -m pytest tests
|
<filename>src/main/java/com/difference/historybook/server/HistoryBookApplication.java
/*
* Copyright 2016 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.difference.historybook.server;
import java.nio.file.Paths;
import java.util.function.Predicate;
import com.difference.historybook.index.Index;
import com.difference.historybook.index.lucene.LuceneIndex;
import com.difference.historybook.proxy.Proxy;
import com.difference.historybook.proxy.ProxyFilterFactory;
import com.difference.historybook.proxy.ProxyTransactionInfo;
import com.difference.historybook.proxy.littleproxy.LittleProxy;
import com.difference.historybook.proxyfilter.IndexingProxyFilterFactory;
import com.difference.historybook.proxyfilter.IndexingProxyResponseInfoSelector;
import com.difference.historybook.resources.CollectionResource;
import io.dropwizard.Application;
import io.dropwizard.assets.AssetsBundle;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
/**
* Main application
*/
public class HistoryBookApplication extends Application<HistoryBookConfiguration>{
public static void main(String[] args) throws Exception {
new HistoryBookApplication().run(args);
}
@Override
public String getName() {
return "historybook";
}
@Override
public void initialize(Bootstrap<HistoryBookConfiguration> bootstrap) {
bootstrap.addBundle(new AssetsBundle("/assets/", "/search", "index.html"));
}
@Override
public void run(HistoryBookConfiguration configuration, Environment environment) throws Exception {
final Index index = new LuceneIndex(Paths.get(configuration.getDataDirectory()));
final ProxyFilterFactory filterFactory = new IndexingProxyFilterFactory(index, configuration.getDefaultCollection());
final Predicate<ProxyTransactionInfo> selector = new IndexingProxyResponseInfoSelector();
final Proxy proxy = new LittleProxy()
.setPort(configuration.getProxyPort())
.setFilterFactory(filterFactory)
.setResponseFilterSelector(selector)
.setMaxBufferSize(configuration.getMaxBufferSize());
final CollectionResource collectionResource = new CollectionResource(index);
environment.jersey().register(collectionResource);
CertManager.initialize(
Paths.get(configuration.getKeyStorePath()),
configuration.getKeyStorePassword(),
configuration.getCertAlias(),
configuration.getHost(),
"HistoryBook",
configuration.getCertDuration());
environment.lifecycle().manage(new ManagedProxy(proxy));
}
}
|
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { fetchWeather } from '../actions/index';
// I had to delete 'default' from export because
// I got an error message after I added in the bottom
//'export default connect'. Obviously it is only allowed
//once to use a default per module.
class SearchBar extends Component {
constructor(props){
super(props);
this.state= { term:''};
this.onInputChange = this.onInputChange.bind(this);
this.onFormSubmit = this.onFormSubmit.bind(this);
}
onInputChange(event) {
this.setState({ term: event.target.value })
}
onFormSubmit(event){
// console.log(props)
event.preventDefault(); //this function tells the browser
//not to submit the form. Because, when user types input and
//presses submit, then the content is removed and the new
//empty form submitted. we dont want that.
//now we need to go and fetch weather data
this.props.fetchWeather(this.state.term);
this.setState({ term: '' });
}
render() {
return (
<form onSubmit ={this.onFormSubmit} className="input-group">
<input
placeholder="Get a five-day forecast in your favorite cities"
className="form-control"
value={this.state.term}
onChange={this.onInputChange} />
<span className="input-group-btn">
<button type="submit" className="btn btn-secondary">Submit</button>
</span>
</form>
);
}
}
function mapDispatchToProps(dispatch) {
return bindActionCreators({ fetchWeather }, dispatch);
}
export default connect(null, mapDispatchToProps)(SearchBar);
|
<reponame>kennethdavidbuck/ember-cli-path-inspector
export {default} from './base';
|
<reponame>Aegide/bot-fusion-analyzer
from os.path import join
from os import sep as os_sep
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import traceback
from PIL import Image
from PIL import PyAccess
from PIL.PngImagePlugin import PngImageFile
import requests
from description import Description
BLACK_TRANSPARENCY = (0, 0, 0, 0)
WHITE_TRANSPARENCY = (255, 255, 255, 0)
PINK = (255, 0, 255, 255)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
UPPER_COLOR_LIMIT = 1000
COLOR_LIMIT = 100
HALF_TRANSPARENCY_LIMIT = 1000
path_custom = "CustomBattlers"
path_debug = "debug"
path_result = "CustomBattlersVisible"
bad_fusions = []
main_path = path_custom
TEST_SIZE = False
TEST_PALETTE = False
TEST_HIGH_DIVERSITY = False
TEST_MASSIVE_DIVERSITY = False
TEST_TRANSPARENCY = True
VERBOSE_MODE = False
def is_valid_size(image):
return image.size == (288,288)
def show_sprite(element):
image = mpimg.imread(join(main_path, element))
print("show_sprite", type(image))
fig, ax = plt.subplots(figsize=(4, 4))
imgplot = plt.imshow(image)
plt.show()
def apply_borders(pixels):
for i in range(0, 288):
pixels[i, 0] = PINK
pixels[i, 287] = PINK
pixels[0, i] = PINK
pixels[287, i] = PINK
def have_normal_transparency(pixels, i, j):
if isinstance(pixels[i, j], tuple) and len(pixels[i, j]) == 4:
return pixels[i, j][3] == 0
else:
return True
def have_weird_transparency(pixels, i, j):
if isinstance(pixels[i, j], tuple) and len(pixels[i, j]) == 4:
return pixels[i, j][3] != 0 and pixels[i, j][3] != 255
else:
return False
def is_not_transparent(pixels, i, j):
return pixels[i, j][3] != 0
def detect_half_transparency(pixels):
half_transparent_pixels = 0
if isinstance(pixels[0, 0], tuple) and len(pixels[0, 0]) == 4:
for i in range(0, 288):
for j in range(0, 288):
# Weird pixels : PINK
if have_weird_transparency(pixels, i, j):
# print(i, j, pixels[i, j])
pixels[i, j] = PINK
half_transparent_pixels += 1
# Background : WHITE
elif have_normal_transparency(pixels, i, j):
pixels[i, j] = WHITE
# Actual sprite : BLACK
else:
pixels[i, j] = BLACK
return half_transparent_pixels
def find_one_pixel(pixels):
one_pixel = None
should_break = False
size = 50
for i in range(0, size):
for j in range(0, size):
if is_not_transparent(pixels, i, j):
print(i, j, pixels[i, j])
pixels[i, j] = PINK
one_pixel = i, j
should_break = True
break
if should_break:
break
return one_pixel
def is_using_palette(pixels):
return isinstance(pixels[0,0], int)
def is_missing_colors(image):
return image.getcolors(UPPER_COLOR_LIMIT) is None
def get_non_transparent_colors(image):
old_colors = image.getcolors(UPPER_COLOR_LIMIT)
new_colors = []
# TODO : be careful of RGB-A with 3 channels
if old_colors is not None and isinstance(old_colors[0][1], tuple) and len(old_colors[0][1]) == 4:
for old_color in old_colors:
if old_color[1][3]==255:
new_colors.append(old_color)
return new_colors
def is_overusing_colors(image):
colors = get_non_transparent_colors(image)
if colors is None:
result = True
else:
color_amount = len(colors)
result = color_amount > COLOR_LIMIT
return result
def test_colors(image):
return_value = 0
if TEST_MASSIVE_DIVERSITY:
try:
if is_missing_colors(image):
print("[MASSIVE COLOR DIVERSITY]")
return_value = 1
except Exception as e:
print("test_colors", e)
return_value = 100
return return_value
def test_palette(pixels):
return_value = 0
if TEST_PALETTE:
try:
if is_using_palette(pixels):
print("[COLOR PALETTE USAGE]")
return_value = 1
except Exception as e:
print("test_palette", e)
return_value = 100
return return_value
class sprite_analysis():
warning = ""
valid_fusion = True
file_name = None
def __init__(self, image: PngImageFile, pixels: PyAccess, url: str):
self.image = image
self.pixels = pixels
self.file_name = url.split("/")[-1]
def test_size(self):
try:
if not is_valid_size(self.image):
self.warning += f"{self.image.size} is an invalid size" + "\n"
self.valid_fusion = False
except Exception as e:
print("test_size()", e)
print(traceback.format_exc())
def test_color_diversity(self):
try:
if is_overusing_colors(self.image):
color_list = get_non_transparent_colors(self.image)
if color_list is None:
self.warning += f"Using +{UPPER_COLOR_LIMIT} colors is weird" + "\n"
self.valid_fusion = False
else:
color_amount = len(color_list)
self.warning += f"Using {color_amount} colors is not recommended" + "\n"
except Exception as e:
print("test_color_diversity()", e)
print(traceback.format_exc())
def test_half_transparency(self):
try:
half_transparent_pixels = detect_half_transparency(self.pixels)
if half_transparent_pixels > HALF_TRANSPARENCY_LIMIT:
self.warning += f"Contains {half_transparent_pixels} half-transparent pixels" + "\n"
self.valid_fusion = False
self.image.save(f"tmp{os_sep}{self.file_name}")
except Exception as e:
print("test_half_transparency()", e)
if str(e) != "image index out of range":
print(traceback.format_exc())
def handle_results(self):
description = None
if len(self.warning) > 0:
if self.valid_fusion :
description = str(Description.sprite_issue.value)
else:
description = str(Description.sprite_error.value)
return self.valid_fusion, description, self.warning, self.file_name
def get_data(url):
image = Image.open(requests.get(url, stream=True).raw)
pixels = image.load()
analysis = sprite_analysis(image, pixels, url)
return analysis
def test_sprite(url):
analysis = get_data(url)
analysis.test_size()
analysis.test_color_diversity()
analysis.test_half_transparency()
results = analysis.handle_results()
# image.show()
return results
if __name__ == "__main__":
url = "https://cdn.discordapp.com/attachments/858107956326826004/925366536292687872/209.246.png"
test_sprite(url)
|
<gh_stars>10-100
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// The common functionality when building with or without snapshots.
#include "src/v8.h"
#include "src/api.h"
#include "src/base/platform/platform.h"
#include "src/serialize.h"
#include "src/snapshot.h"
namespace v8 {
namespace internal {
void Snapshot::ReserveSpaceForLinkedInSnapshot(Deserializer* deserializer) {
deserializer->AddReservation(NEW_SPACE, new_space_used_);
deserializer->AddReservation(OLD_POINTER_SPACE, pointer_space_used_);
deserializer->AddReservation(OLD_DATA_SPACE, data_space_used_);
deserializer->AddReservation(CODE_SPACE, code_space_used_);
deserializer->AddReservation(MAP_SPACE, map_space_used_);
deserializer->AddReservation(CELL_SPACE, cell_space_used_);
deserializer->AddReservation(PROPERTY_CELL_SPACE, property_cell_space_used_);
deserializer->AddReservation(LO_SPACE, lo_space_used_);
}
bool Snapshot::Initialize(Isolate* isolate) {
if (size_ > 0) {
base::ElapsedTimer timer;
if (FLAG_profile_deserialization) {
timer.Start();
}
SnapshotByteSource source(raw_data_, raw_size_);
Deserializer deserializer(&source);
ReserveSpaceForLinkedInSnapshot(&deserializer);
bool success = isolate->Init(&deserializer);
if (FLAG_profile_deserialization) {
double ms = timer.Elapsed().InMillisecondsF();
PrintF("[Snapshot loading and deserialization took %0.3f ms]\n", ms);
}
return success;
}
return false;
}
bool Snapshot::HaveASnapshotToStartFrom() {
return size_ != 0;
}
Handle<Context> Snapshot::NewContextFromSnapshot(Isolate* isolate) {
if (context_size_ == 0) {
return Handle<Context>();
}
SnapshotByteSource source(context_raw_data_,
context_raw_size_);
Deserializer deserializer(&source);
Object* root;
deserializer.AddReservation(NEW_SPACE, context_new_space_used_);
deserializer.AddReservation(OLD_POINTER_SPACE, context_pointer_space_used_);
deserializer.AddReservation(OLD_DATA_SPACE, context_data_space_used_);
deserializer.AddReservation(CODE_SPACE, context_code_space_used_);
deserializer.AddReservation(MAP_SPACE, context_map_space_used_);
deserializer.AddReservation(CELL_SPACE, context_cell_space_used_);
deserializer.AddReservation(PROPERTY_CELL_SPACE,
context_property_cell_space_used_);
deserializer.AddReservation(LO_SPACE, context_lo_space_used_);
deserializer.DeserializePartial(isolate, &root);
CHECK(root->IsContext());
return Handle<Context>(Context::cast(root));
}
#ifdef V8_USE_EXTERNAL_STARTUP_DATA
// Dummy implementations of Set*FromFile(..) APIs.
//
// These are meant for use with snapshot-external.cc. Should this file
// be compiled with those options we just supply these dummy implementations
// below. This happens when compiling the mksnapshot utility.
void SetNativesFromFile(StartupData* data) { CHECK(false); }
void SetSnapshotFromFile(StartupData* data) { CHECK(false); }
#endif // V8_USE_EXTERNAL_STARTUP_DATA
} } // namespace v8::internal
|
class VendingMachine:
def __init__(self):
self.items = []
self.balance = 0
def insert_coin(self, amount):
self.balance += amount
def add_item(self, item):
self.items.append(item)
def remove_item(self, item):
self.items.remove(item)
def purchase_item(self, item):
if self.balance >= item.price:
self.items.remove(item)
self.balance -= item.price
def return_change(self):
change = self.balance
self.balance = 0
return change |
#!/bin/bash
if [ $# -ne 5 ]; then
echo "Usage: $0 <player1.username> <player2.username> <winner>"
exit 1
fi
curl -sS -H "Content-Type: application/json" -X POST localhost:3000/api/matches -d @- << EOF
{
"player1": {
"username": "$1",
},
"player2": {
"username": "$2",
},
"winner": "$3"
}
EOF
|
def encode_string(s):
output = ""
for ch in s:
output += str(ord(ch))
return output |
<gh_stars>0
import { run } from './run.ts'
/**
* Return current logged in user's username
*
* @returns username or null
*/
export async function getCurrentUser() {
const stat = await Deno.lstat("/dev/console")
if (! stat?.uid) {
return null
}
const {status, stdout } = await run(['id', '-P', stat.uid.toString()])
if (!status.success) {
return null
}
// Get first field (username)
// ling-jda:********:904518429:1412170593::0:0:<NAME>:/Users/ling-jda:/bin/zsh
const [username,,uid,gid,,,,name,home,shell] = stdout.split(':')
return {username, uid, gid, name, home, shell}
}
/**
* Return current locale eg sv_SE, en_US ...
* @returns current locale
*/
export async function getCurrentLocale() {
const { stdout } = await run(['/usr/bin/defaults', 'read', '.GlobalPreferences', 'AppleLocale'])
return stdout.trim()
} |
import google.cloud.language
from google.cloud.language import enums
from google.cloud.language import types
client = google.cloud.language.LanguageServiceClient()
document = types.Document(content="I want to change my flight to tomorrow", type="PLAIN_TEXT")
annotations = client.analyze_sentiment(document=document)
sentence_type = enums.Sentence.Type(annotations.sentences[0].sentiment.sentence_type)
if sentence_type == enums.Sentence.Type.INTERROGATIVE:
intent = "change_flight" |
<filename>src/webrt.js
/**
* Runtime for jload consists of two functions that manage a module map:
*
* _jload_moduleAdd adds a module to the map
* _jload_moduleVal returns a valid module from the map or throws an exception
* _jload_moduleRef returns the (possibly undefined) map entry
*/
(function(scope) {
// Only define these methods once.
if (scope._jload) {
return;
}
var modules = new Map();
var currentDir = './';
scope._jload = true;
scope._jload_moduleAdd = function(name, m) {
var prev = currentDir;
var i = name.lastIndexOf('/');
if (i >= 0) {
currentDir = name.substring(0, i + 1);
}
modules.set(name, m);
modules.set(name + '.js', m);
currentDir = prev;
}
scope._jload_moduleRef = function(dir, name) {
return modules.get(normalize(dir + name));
}
if (!scope._jload_moduleVal) {
scope._jload_moduleVal = function(dir, name) {
var fq = normalize(dir + name);
var m = modules.get(fq);
if (!m) {
throw new Error('Cannot find ' + fq);
}
return m;
}
}
if (!scope.require) {
scope.require = function(name) {
return _jload_moduleVal('./', name);
}
}
function normalize(path) {
var list = path.split('/');
var newList = [ ];
var level = 0;
for (var i = 0; i < list.length; ++i) {
var component = list[i];
if (component !== ".") {
if (component === ".." && level > 0) {
level -= 1;
newList.pop();
} else {
level += 1;
newList.push(component);
}
}
}
return newList.join('/');
}
})(this);
|
<filename>bitrix/modules/dav/install/db/mysql/uninstall.sql
DROP TABLE if exists b_dav_locks;
DROP TABLE if exists b_dav_connections;
|
import { Component, OnInit, Input } from '@angular/core';
import { Router } from '@angular/router';
@Component({
selector: 'app-content-card',
templateUrl: './content-card.component.html',
styleUrls: ['./content-card.component.scss']
})
export class ContentCardComponent implements OnInit {
@Input() avatarImgSrc: string;
@Input() title: string;
@Input() subtitle: string;
@Input() imgSrc: string;
@Input() paragraph: string;
@Input() buttonText: string;
@Input() link: string;
constructor( private router: Router ) { }
ngOnInit() {
}
onClick(): void {
this.router.navigate([this.link], { queryParamsHandling: 'merge' });
}
}
|
var _ = require( 'lodash' );
module.exports = {
/**
* 过滤find参数,只保留 offset 和 limit
* @param options
* @returns {*}
*/
pickListOptions: function( options ){
options = _.pick( options || {}, [ 'offset', 'limit' ] );
return options;
},
handleArrayWithListOptions: function( list, options ){
options = options || {};
var offset = options.offset || 0;
var limit = options.limit || list.length - offset;
return list.slice( offset, offset + limit );
},
/**
*
* @param obj
* @returns {*}
*/
createHash: function( obj ){
}
}; |
#! /bin/bash
echo "Copy contract files..."
CONTRACTS_SRC_DIR="${BUILD_DIR}/contracts"
CONTRACTS_DEST_DIR="./contracts"
rm -r "${CONTRACTS_DEST_DIR}"
FILES=$( cd ${CONTRACTS_SRC_DIR} && find . -type f \( -name "*.wasm" -o -name "*.abi" \) | grep -v ./CMakeFiles )
for F in ${FILES[*]}
do
mkdir -p $(dirname "${CONTRACTS_DEST_DIR}/${F}")
cp -v "${CONTRACTS_SRC_DIR}/${F}" "${CONTRACTS_DEST_DIR}/${F}"
done
echo 'Done update test contracts.'
RELEASE="${VERSION_SUFFIX}"
# default release to "1" if there is no suffix
if [[ -z $RELEASE ]]; then
RELEASE="1"
fi
NAME="${PROJECT}_${VERSION_NO_SUFFIX}-${RELEASE}"
echo "Generating Tarball $NAME.tar.gz..."
tar -cvzf $NAME.tar.gz ${CONTRACTS_DEST_DIR} || exit 1
rm -r "${CONTRACTS_DEST_DIR}"
|
<filename>node_modules/react-icons-kit/ionicons/socialBitcoin.js<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.socialBitcoin = void 0;
var socialBitcoin = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M410.5,279.2c-5-11.5-12.7-21.6-28.1-30.1c-8.2-4.5-16.1-7.8-25.4-10c5.4-2.5,10-5.4,16.3-11c7.5-6.6,13.1-15.7,15.6-23.3\r\n\tc2.6-7.5,4.1-18,3.5-28.2c-1.1-16.8-4.4-33.1-13.2-44.8c-8.8-11.7-21.2-20.7-37.6-27c-12.6-4.8-25.5-7.8-45.5-8.9V32h-40v64h-32V32\r\n\th-41v64H96v48h27.9c8.7,0,14.6,0.8,17.6,2.3c3.1,1.5,5.3,3.5,6.5,6c1.3,2.5,1.9,8.4,1.9,17.5V343c0,9-0.6,14.8-1.9,17.4\r\n\tc-1.3,2.6-2,4.9-5.1,6.3c-3.1,1.4-3.2,1.3-11.8,1.3h-26.4L96,416h87v64h41v-64h32v64h40v-64.4c26-1.3,44.5-4.7,59.4-10.3\r\n\tc19.3-7.2,34.1-17.7,44.7-31.5c10.6-13.8,14.9-34.9,15.8-51.2C416.6,308.1,415,289.4,410.5,279.2z M224,150h32v74h-32V150z M224,362\r\n\tv-90h32v90H224z M296,153.9c6,2.5,9.9,7.5,13.8,12.7c4.3,5.7,6.5,13.3,6.5,21.4c0,7.8-2.9,14.5-7.5,20.5c-3.8,4.9-6.8,8.3-12.8,11.1\r\n\tV153.9z M324.8,340.6c-7.8,6.9-12.3,10.1-22.1,13.8c-2,0.8-4.7,1.4-6.7,1.9v-82.8c5,0.8,7.6,1.8,11.3,3.4\r\n\tc7.8,3.3,15.2,6.9,19.8,13.2c4.6,6.3,8,15.6,8,24.7C335.1,325.7,332.3,334,324.8,340.6z"
},
"children": []
}]
};
exports.socialBitcoin = socialBitcoin; |
import os
def find_subdirectories(path):
top_level_dirs = [os.path.join(path, dirname) for dirname in os.listdir(path) if os.path.isdir(os.path.join(path, dirname))]
second_level_dirs = []
for top_level_dir in top_level_dirs:
second_level_dirs += [os.path.join(top_level_dir, dirname) for dirname in os.listdir(top_level_dir) if os.path.isdir(os.path.join(top_level_dir, dirname))]
return second_level_dirs |
from math import sqrt
class Coordinate:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def distance_to(self, other):
return sqrt((self.x - other.x)**2 + (self.y - other.y)**2 + (self.z - other.z)**2)
def translate(self, dx, dy, dz):
self.x += dx
self.y += dy
self.z += dz
def __str__(self):
return f"({self.x}, {self.y}, {self.z})"
from typing import NamedTuple
class Orientation(NamedTuple):
rot_x: float
rot_y: float
rot_z: float
def rotate(self, rx, ry, rz):
self.rot_x += rx
self.rot_y += ry
self.rot_z += rz
def __str__(self):
return f"({self.rot_x}, {self.rot_y}, {self.rot_z})" |
#!/bin/bash
echo "Beginning end to end tests..."
while read file; do
echo -e "\t$file"
diff \
<(sed '1,/behavior/ d' "$file" | sed s'/^# //g') \
<(./web-crawler.js -f "$file")
if [[ $? -ne 0 ]]; then
echo "Behavior test FAILED!"
exit 1
fi
done < <(find 'samples/behavior/' -type f)
echo "End to end tests PASSED"
|
#!/bin/sh
# Copyright The containerd Authors.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------------------------------
# Forked from https://github.com/moby/moby/blob/v20.10.3/contrib/dockerd-rootless-setuptool.sh
# Copyright The Moby Authors.
# Licensed under the Apache License, Version 2.0
# -----------------------------------------------------------------------------
# containerd-rootless-setuptool.sh: setup tool for containerd-rootless.sh
# Needs to be executed as a non-root user.
#
# Typical usage: containerd-rootless-setuptool.sh install
set -eu
# utility functions
INFO() {
# https://github.com/koalaman/shellcheck/issues/1593
# shellcheck disable=SC2039
/bin/echo -e "\e[104m\e[97m[INFO]\e[49m\e[39m ${*}"
}
WARNING() {
# shellcheck disable=SC2039
/bin/echo >&2 -e "\e[101m\e[97m[WARNING]\e[49m\e[39m ${*}"
}
ERROR() {
# shellcheck disable=SC2039
/bin/echo >&2 -e "\e[101m\e[97m[ERROR]\e[49m\e[39m ${*}"
}
# constants
CONTAINERD_ROOTLESS_SH="containerd-rootless.sh"
SYSTEMD_CONTAINERD_UNIT="containerd.service"
SYSTEMD_BUILDKIT_UNIT="buildkit.service"
SYSTEMD_FUSE_OVERLAYFS_UNIT="containerd-fuse-overlayfs.service"
SYSTEMD_STARGZ_UNIT="stargz-snapshotter.service"
SYSTEMD_IPFS_UNIT="ipfs-daemon.service"
SYSTEMD_BYPASS4NETNSD_UNIT="bypass4netnsd.service"
# global vars
ARG0="$0"
REALPATH0="$(realpath "$ARG0")"
BIN=""
XDG_CONFIG_HOME="${XDG_CONFIG_HOME:-$HOME/.config}"
XDG_DATA_HOME="${XDG_DATA_HOME:-$HOME/.local/share}"
# run checks and also initialize global vars (BIN)
init() {
id="$(id -u)"
# User verification: deny running as root
if [ "$id" = "0" ]; then
ERROR "Refusing to install rootless containerd as the root user"
exit 1
fi
# set BIN
if ! BIN="$(command -v "$CONTAINERD_ROOTLESS_SH" 2>/dev/null)"; then
ERROR "$CONTAINERD_ROOTLESS_SH needs to be present under \$PATH"
exit 1
fi
BIN=$(dirname "$BIN")
# detect systemd
if ! systemctl --user show-environment >/dev/null 2>&1; then
ERROR "Needs systemd (systemctl --user)"
exit 1
fi
# HOME verification
if [ -z "${HOME:-}" ] || [ ! -d "$HOME" ]; then
ERROR "HOME needs to be set"
exit 1
fi
if [ ! -w "$HOME" ]; then
ERROR "HOME needs to be writable"
exit 1
fi
# Validate XDG_RUNTIME_DIR
if [ -z "${XDG_RUNTIME_DIR:-}" ] || [ ! -w "$XDG_RUNTIME_DIR" ]; then
ERROR "Aborting because but XDG_RUNTIME_DIR (\"$XDG_RUNTIME_DIR\") is not set, does not exist, or is not writable"
ERROR "Hint: this could happen if you changed users with 'su' or 'sudo'. To work around this:"
ERROR "- try again by first running with root privileges 'loginctl enable-linger <user>' where <user> is the unprivileged user and export XDG_RUNTIME_DIR to the value of RuntimePath as shown by 'loginctl show-user <user>'"
ERROR "- or simply log back in as the desired unprivileged user (ssh works for remote machines, machinectl shell works for local machines)"
ERROR "See also https://rootlesscontaine.rs/getting-started/common/login/ ."
exit 1
fi
}
# CLI subcommand: "check"
cmd_entrypoint_check() {
init
INFO "Checking RootlessKit functionality"
if ! rootlesskit \
--net=slirp4netns \
--disable-host-loopback \
--copy-up=/etc --copy-up=/run --copy-up=/var/lib \
true; then
ERROR "RootlessKit failed, see the error messages and https://rootlesscontaine.rs/getting-started/common/ ."
exit 1
fi
INFO "Checking cgroup v2"
controllers="/sys/fs/cgroup/user.slice/user-${id}.slice/user@${id}.service/cgroup.controllers"
if [ ! -f "${controllers}" ]; then
WARNING "Enabling cgroup v2 is highly recommended, see https://rootlesscontaine.rs/getting-started/common/cgroup2/ "
else
for f in cpu memory pids; do
if ! grep -qw "$f" "$controllers"; then
WARNING "The cgroup v2 controller \"$f\" is not delegated for the current user (\"$controllers\"), see https://rootlesscontaine.rs/getting-started/common/cgroup2/"
fi
done
fi
INFO "Checking overlayfs"
tmp=$(mktemp -d)
mkdir -p "${tmp}/l" "${tmp}/u" "${tmp}/w" "${tmp}/m"
if ! rootlesskit mount -t overlay -o lowerdir="${tmp}/l,upperdir=${tmp}/u,workdir=${tmp}/w" overlay "${tmp}/m"; then
WARNING "Overlayfs is not enabled, consider installing fuse-overlayfs snapshotter (\`$0 install-fuse-overlayfs\`), " \
"or see https://rootlesscontaine.rs/how-it-works/overlayfs/ to enable overlayfs."
fi
rm -rf "${tmp}"
INFO "Requirements are satisfied"
}
# CLI subcommand: "nsenter"
cmd_entrypoint_nsenter() {
# No need to call init()
pid=$(cat "$XDG_RUNTIME_DIR/containerd-rootless/child_pid")
exec nsenter --no-fork --wd="$(pwd)" --preserve-credentials -m -n -U -t "$pid" -- "$@"
}
show_systemd_error() {
unit="$1"
n="20"
ERROR "Failed to start ${unit}. Run \`journalctl -n ${n} --no-pager --user --unit ${unit}\` to show the error log."
ERROR "Before retrying installation, you might need to uninstall the current setup: \`$0 uninstall -f ; ${BIN}/rootlesskit rm -rf ${HOME}/.local/share/containerd\`"
}
install_systemd_unit() {
unit="$1"
unit_file="${XDG_CONFIG_HOME}/systemd/user/${unit}"
if [ -f "${unit_file}" ]; then
WARNING "File already exists, skipping: ${unit_file}"
else
INFO "Creating \"${unit_file}\""
mkdir -p "${XDG_CONFIG_HOME}/systemd/user"
cat >"${unit_file}"
systemctl --user daemon-reload
fi
if ! systemctl --user --no-pager status "${unit}" >/dev/null 2>&1; then
INFO "Starting systemd unit \"${unit}\""
(
set -x
if ! systemctl --user start "${unit}"; then
set +x
show_systemd_error "${unit}"
exit 1
fi
sleep 3
)
fi
(
set -x
if ! systemctl --user --no-pager --full status "${unit}"; then
set +x
show_systemd_error "${unit}"
exit 1
fi
systemctl --user enable "${unit}"
)
INFO "Installed \"${unit}\" successfully."
INFO "To control \"${unit}\", run: \`systemctl --user (start|stop|restart) ${unit}\`"
}
uninstall_systemd_unit() {
unit="$1"
unit_file="${XDG_CONFIG_HOME}/systemd/user/${unit}"
if [ ! -f "${unit_file}" ]; then
INFO "Unit ${unit} is not installed"
return
fi
(
set -x
systemctl --user stop "${unit}"
) || :
(
set -x
systemctl --user disable "${unit}"
) || :
rm -f "${unit_file}"
INFO "Uninstalled \"${unit}\""
}
# CLI subcommand: "install"
cmd_entrypoint_install() {
init
cmd_entrypoint_check
cat <<-EOT | install_systemd_unit "${SYSTEMD_CONTAINERD_UNIT}"
[Unit]
Description=containerd (Rootless)
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
Environment=CONTAINERD_ROOTLESS_ROOTLESSKIT_FLAGS=${CONTAINERD_ROOTLESS_ROOTLESSKIT_FLAGS:-}
ExecStart=$BIN/${CONTAINERD_ROOTLESS_SH}
ExecReload=/bin/kill -s HUP \$MAINPID
TimeoutSec=0
RestartSec=2
Restart=always
StartLimitBurst=3
StartLimitInterval=60s
LimitNOFILE=infinity
LimitNPROC=infinity
LimitCORE=infinity
TasksMax=infinity
Delegate=yes
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
systemctl --user daemon-reload
INFO "To run \"${SYSTEMD_CONTAINERD_UNIT}\" on system startup automatically, run: \`sudo loginctl enable-linger $(id -un)\`"
INFO "------------------------------------------------------------------------------------------"
INFO "Use \`nerdctl\` to connect to the rootless containerd."
INFO "You do NOT need to specify \$CONTAINERD_ADDRESS explicitly."
}
# CLI subcommand: "install-buildkit"
cmd_entrypoint_install_buildkit() {
init
if ! command -v "buildkitd" >/dev/null 2>&1; then
ERROR "buildkitd (https://github.com/moby/buildkit) needs to be present under \$PATH"
exit 1
fi
if ! systemctl --user --no-pager status "${SYSTEMD_CONTAINERD_UNIT}" >/dev/null 2>&1; then
ERROR "Install containerd first (\`$ARG0 install\`)"
exit 1
fi
cat <<-EOT | install_systemd_unit "${SYSTEMD_BUILDKIT_UNIT}"
[Unit]
Description=BuildKit (Rootless)
PartOf=${SYSTEMD_CONTAINERD_UNIT}
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
ExecStart="$REALPATH0" nsenter buildkitd
ExecReload=/bin/kill -s HUP \$MAINPID
RestartSec=2
Restart=always
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
}
# CLI subcommand: "install-buildkit-containerd"
cmd_entrypoint_install_buildkit_containerd() {
init
if ! command -v "buildkitd" >/dev/null 2>&1; then
ERROR "buildkitd (https://github.com/moby/buildkit) needs to be present under \$PATH"
exit 1
fi
if [ ! -f "${XDG_CONFIG_HOME}/buildkit/buildkitd.toml" ]; then
mkdir -p "${XDG_CONFIG_HOME}/buildkit"
cat <<-EOF > "${XDG_CONFIG_HOME}/buildkit/buildkitd.toml"
[worker.oci]
enabled = false
[worker.containerd]
enabled = true
rootless = true
EOF
fi
if ! systemctl --user --no-pager status "${SYSTEMD_CONTAINERD_UNIT}" >/dev/null 2>&1; then
ERROR "Install containerd first (\`$ARG0 install\`)"
exit 1
fi
UNIT_NAME=${SYSTEMD_BUILDKIT_UNIT}
BUILDKITD_FLAG=
if [ -n "${CONTAINERD_NAMESPACE:-}" ] ; then
UNIT_NAME="${CONTAINERD_NAMESPACE}-${SYSTEMD_BUILDKIT_UNIT}"
BUILDKITD_FLAG="${BUILDKITD_FLAG} --addr=unix://${XDG_RUNTIME_DIR}/buildkit-${CONTAINERD_NAMESPACE}/buildkitd.sock --root=${XDG_DATA_HOME}/buildkit-${CONTAINERD_NAMESPACE} --containerd-worker-namespace=${CONTAINERD_NAMESPACE}"
else
WARNING "buildkitd has access to images in \"buildkit\" namespace by default. If you want to give buildkitd access to the images in \"default\" namespace, run this command with CONTAINERD_NAMESPACE=default"
fi
if [ -n "${CONTAINERD_SNAPSHOTTER:-}" ] ; then
BULDKITD_FLAG="${BUILDKITD_FLAG} --containerd-worker-snapshotter=${CONTAINERD_SNAPSHOTTER}"
fi
cat <<-EOT | install_systemd_unit "${UNIT_NAME}"
[Unit]
Description=BuildKit (Rootless)
PartOf=${SYSTEMD_CONTAINERD_UNIT}
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
ExecStart="$REALPATH0" nsenter -- buildkitd ${BUILDKITD_FLAG}
ExecReload=/bin/kill -s HUP \$MAINPID
RestartSec=2
Restart=always
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
}
# CLI subcommand: "install-bypass4netnsd"
cmd_entrypoint_install_bypass4netnsd() {
init
if ! command -v "bypass4netnsd" >/dev/null 2>&1; then
ERROR "bypass4netnsd (https://github.com/rootless-containers/bypass4netns) needs to be present under \$PATH"
exit 1
fi
command_v_bypass4netnsd="$(command -v bypass4netnsd)"
# FIXME: bail if bypass4netnsd is an alias
cat <<-EOT | install_systemd_unit "${SYSTEMD_BYPASS4NETNSD_UNIT}"
[Unit]
Description=bypass4netnsd (daemon for bypass4netns, accelerator for rootless containers)
# Not PartOf=${SYSTEMD_CONTAINERD_UNIT}
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
ExecStart="${command_v_bypass4netnsd}"
ExecReload=/bin/kill -s HUP \$MAINPID
RestartSec=2
Restart=always
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
INFO "To use bypass4netnsd, set the \"nerdctl/bypass4netns=true\" label on containers, e.g., \`nerdctl run --label nerdctl/bypass4netns=true\`"
}
# CLI subcommand: "install-fuse-overlayfs"
cmd_entrypoint_install_fuse_overlayfs() {
init
if ! command -v "containerd-fuse-overlayfs-grpc" >/dev/null 2>&1; then
ERROR "containerd-fuse-overlayfs-grpc (https://github.com/containerd/fuse-overlayfs-snapshotter) needs to be present under \$PATH"
exit 1
fi
if ! command -v "fuse-overlayfs" >/dev/null 2>&1; then
ERROR "fuse-overlayfs (https://github.com/containers/fuse-overlayfs) needs to be present under \$PATH"
exit 1
fi
if ! systemctl --user --no-pager status "${SYSTEMD_CONTAINERD_UNIT}" >/dev/null 2>&1; then
ERROR "Install containerd first (\`$ARG0 install\`)"
exit 1
fi
cat <<-EOT | install_systemd_unit "${SYSTEMD_FUSE_OVERLAYFS_UNIT}"
[Unit]
Description=containerd-fuse-overlayfs (Rootless)
PartOf=${SYSTEMD_CONTAINERD_UNIT}
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
ExecStart="$REALPATH0" nsenter containerd-fuse-overlayfs-grpc "${XDG_RUNTIME_DIR}/containerd-fuse-overlayfs.sock" "${XDG_DATA_HOME}/containerd-fuse-overlayfs"
ExecReload=/bin/kill -s HUP \$MAINPID
RestartSec=2
Restart=always
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
INFO "Add the following lines to \"${XDG_CONFIG_HOME}/containerd/config.toml\" manually, and then run \`systemctl --user restart ${SYSTEMD_CONTAINERD_UNIT}\`:"
cat <<-EOT
### BEGIN ###
[proxy_plugins]
[proxy_plugins."fuse-overlayfs"]
type = "snapshot"
address = "${XDG_RUNTIME_DIR}/containerd-fuse-overlayfs.sock"
### END ###
EOT
INFO "Set \`export CONTAINERD_SNAPSHOTTER=\"fuse-overlayfs\"\` to use the fuse-overlayfs snapshotter."
}
# CLI subcommand: "install-stargz"
cmd_entrypoint_install_stargz() {
init
if ! command -v "containerd-stargz-grpc" >/dev/null 2>&1; then
ERROR "containerd-stargz-grpc (https://github.com/containerd/stargz-snapshotter) needs to be present under \$PATH"
exit 1
fi
if ! systemctl --user --no-pager status "${SYSTEMD_CONTAINERD_UNIT}" >/dev/null 2>&1; then
ERROR "Install containerd first (\`$ARG0 install\`)"
exit 1
fi
if [ ! -f "${XDG_CONFIG_HOME}/containerd-stargz-grpc/config.toml" ]; then
mkdir -p "${XDG_CONFIG_HOME}/containerd-stargz-grpc"
touch "${XDG_CONFIG_HOME}/containerd-stargz-grpc/config.toml"
fi
cat <<-EOT | install_systemd_unit "${SYSTEMD_STARGZ_UNIT}"
[Unit]
Description=stargz snapshotter (Rootless)
PartOf=${SYSTEMD_CONTAINERD_UNIT}
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
Environment=IPFS_PATH=${XDG_DATA_HOME}/ipfs
ExecStart="$REALPATH0" nsenter -- containerd-stargz-grpc -address "${XDG_RUNTIME_DIR}/containerd-stargz-grpc/containerd-stargz-grpc.sock" -root "${XDG_DATA_HOME}/containerd-stargz-grpc" -config "${XDG_CONFIG_HOME}/containerd-stargz-grpc/config.toml"
ExecReload=/bin/kill -s HUP \$MAINPID
RestartSec=2
Restart=always
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
INFO "Add the following lines to \"${XDG_CONFIG_HOME}/containerd/config.toml\" manually, and then run \`systemctl --user restart ${SYSTEMD_CONTAINERD_UNIT}\`:"
cat <<-EOT
### BEGIN ###
[proxy_plugins]
[proxy_plugins."stargz"]
type = "snapshot"
address = "${XDG_RUNTIME_DIR}/containerd-stargz-grpc/containerd-stargz-grpc.sock"
### END ###
EOT
INFO "Set \`export CONTAINERD_SNAPSHOTTER=\"stargz\"\` to use the stargz snapshotter."
}
# CLI subcommand: "install-ipfs"
cmd_entrypoint_install_ipfs() {
init
if ! command -v "ipfs" >/dev/null 2>&1; then
ERROR "ipfs needs to be present under \$PATH"
exit 1
fi
if ! systemctl --user --no-pager status "${SYSTEMD_CONTAINERD_UNIT}" >/dev/null 2>&1; then
ERROR "Install containerd first (\`$ARG0 install\`)"
exit 1
fi
IPFS_PATH="${XDG_DATA_HOME}/ipfs"
mkdir -p "${IPFS_PATH}"
cat <<-EOT | install_systemd_unit "${SYSTEMD_IPFS_UNIT}"
[Unit]
Description=ipfs daemon for rootless nerdctl
PartOf=${SYSTEMD_CONTAINERD_UNIT}
[Service]
Environment=PATH=$BIN:/sbin:/usr/sbin:$PATH
Environment=IPFS_PATH=${IPFS_PATH}
ExecStart="$REALPATH0" nsenter -- ipfs daemon $@
ExecReload=/bin/kill -s HUP \$MAINPID
RestartSec=2
Restart=always
Type=simple
KillMode=mixed
[Install]
WantedBy=default.target
EOT
# Avoid using 5001(api)/8080(gateway) which are reserved by tests.
# TODO: support unix socket
systemctl --user stop "${SYSTEMD_IPFS_UNIT}"
sleep 3
IPFS_PATH=${IPFS_PATH} ipfs config Addresses.API "/ip4/127.0.0.1/tcp/5888"
IPFS_PATH=${IPFS_PATH} ipfs config Addresses.Gateway "/ip4/127.0.0.1/tcp/5889"
systemctl --user restart "${SYSTEMD_IPFS_UNIT}"
sleep 3
INFO "If you use stargz-snapshotter, add the following line to \"${XDG_CONFIG_HOME}/containerd-stargz-grpc/config.toml\" manually, and then run \`systemctl --user restart ${SYSTEMD_STARGZ_UNIT}\`:"
cat <<-EOT
### BEGIN ###
ipfs = true
### END ###
EOT
INFO "If you want to expose the port 4001 of ipfs daemon, re-install rootless containerd with CONTAINERD_ROOTLESS_ROOTLESSKIT_FLAGS=\"--publish=0.0.0.0:4001:4001/tcp\" environment variable."
INFO "Set \`export IPFS_PATH=\"${IPFS_PATH}\"\` to use ipfs."
}
# CLI subcommand: "uninstall"
cmd_entrypoint_uninstall() {
init
uninstall_systemd_unit "${SYSTEMD_BUILDKIT_UNIT}"
uninstall_systemd_unit "${SYSTEMD_FUSE_OVERLAYFS_UNIT}"
uninstall_systemd_unit "${SYSTEMD_CONTAINERD_UNIT}"
INFO "This uninstallation tool does NOT remove containerd binaries and data."
INFO "To remove data, run: \`$BIN/rootlesskit rm -rf ${XDG_DATA_HOME}/containerd\`"
}
# CLI subcommand: "uninstall-buildkit"
cmd_entrypoint_uninstall_buildkit() {
init
uninstall_systemd_unit "${SYSTEMD_BUILDKIT_UNIT}"
INFO "This uninstallation tool does NOT remove data."
INFO "To remove data, run: \`$BIN/rootlesskit rm -rf ${XDG_DATA_HOME}/buildkit"
}
# CLI subcommand: "uninstall-buildkit-containerd"
cmd_entrypoint_uninstall_buildkit_containerd() {
init
UNIT_NAME=${SYSTEMD_BUILDKIT_UNIT}
BUILDKIT_ROOT="${XDG_DATA_HOME}/buildkit"
if [ -n "${CONTAINERD_NAMESPACE:-}" ] ; then
UNIT_NAME="${CONTAINERD_NAMESPACE}-${SYSTEMD_BUILDKIT_UNIT}"
BUILDKIT_ROOT="${XDG_DATA_HOME}/buildkit-${CONTAINERD_NAMESPACE}"
fi
uninstall_systemd_unit "${UNIT_NAME}"
INFO "This uninstallation tool does NOT remove data."
INFO "To remove data, run: \`$BIN/rootlesskit rm -rf ${BUILDKIT_ROOT}\`"
}
# CLI subcommand: "uninstall-bypass4netnsd"
cmd_entrypoint_uninstall_bypass4netnsd() {
init
uninstall_systemd_unit "${SYSTEMD_BYPASS4NETNSD_UNIT}"
}
# CLI subcommand: "uninstall-fuse-overlayfs"
cmd_entrypoint_uninstall_fuse_overlayfs() {
init
uninstall_systemd_unit "${SYSTEMD_FUSE_OVERLAYFS_UNIT}"
INFO "This uninstallation tool does NOT remove data."
INFO "To remove data, run: \`$BIN/rootlesskit rm -rf ${XDG_DATA_HOME}/containerd-fuse-overlayfs"
}
# CLI subcommand: "uninstall-stargz"
cmd_entrypoint_uninstall_stargz() {
init
uninstall_systemd_unit "${SYSTEMD_STARGZ_UNIT}"
INFO "This uninstallation tool does NOT remove data."
INFO "To remove data, run: \`$BIN/rootlesskit rm -rf ${XDG_DATA_HOME}/containerd-stargz-grpc"
}
# CLI subcommand: "uninstall-ipfs"
cmd_entrypoint_uninstall_ipfs() {
init
uninstall_systemd_unit "${SYSTEMD_IPFS_UNIT}"
INFO "This uninstallation tool does NOT remove data."
INFO "To remove data, run: \`$BIN/rootlesskit rm -rf ${XDG_DATA_HOME}/ipfs"
}
# text for --help
usage() {
echo "Usage: ${ARG0} [OPTIONS] COMMAND"
echo
echo "A setup tool for Rootless containerd (${CONTAINERD_ROOTLESS_SH})."
echo
echo "Commands:"
echo " check Check prerequisites"
echo " nsenter Enter into RootlessKit namespaces (mostly for debugging)"
echo " install Install systemd unit and show how to manage the service"
echo " uninstall Uninstall systemd unit"
echo
echo "Add-on commands (BuildKit):"
echo " install-buildkit Install the systemd unit for BuildKit"
echo " uninstall-buildkit Uninstall the systemd unit for BuildKit"
echo
echo "Add-on commands (bypass4netnsd):"
echo " install-bypass4netnsd Install the systemd unit for bypass4netnsd"
echo " uninstall-bypass4netnsd Uninstall the systemd unit for bypass4netnsd"
echo
echo "Add-on commands (fuse-overlayfs):"
echo " install-fuse-overlayfs Install the systemd unit for fuse-overlayfs snapshotter"
echo " uninstall-fuse-overlayfs Uninstall the systemd unit for fuse-overlayfs snapshotter"
echo
echo "Add-on commands (stargz):"
echo " install-stargz Install the systemd unit for stargz snapshotter"
echo " uninstall-stargz Uninstall the systemd unit for stargz snapshotter"
echo
echo "Add-on commands (ipfs):"
echo " install-ipfs [ipfs-daemon-flags...] Install the systemd unit for ipfs daemon. Specify \"--offline\" if run the daemon in offline mode"
echo " uninstall-ipfs Uninstall the systemd unit for ipfs daemon"
echo
echo "Add-on commands (BuildKit containerd worker):"
echo " install-buildkit-containerd Install the systemd unit for BuildKit with CONTAINERD_NAMESPACE=${CONTAINERD_NAMESPACE:-} and CONTAINERD_SNAPSHOTTER=${CONTAINERD_SNAPSHOTTER:-}"
echo " uninstall-buildkit-containerd Uninstall the systemd unit for BuildKit with CONTAINERD_NAMESPACE=${CONTAINERD_NAMESPACE:-} and CONTAINERD_SNAPSHOTTER=${CONTAINERD_SNAPSHOTTER:-}"
}
# parse CLI args
if ! args="$(getopt -o h --long help -n "$ARG0" -- "$@")"; then
usage
exit 1
fi
eval set -- "$args"
while [ "$#" -gt 0 ]; do
arg="$1"
shift
case "$arg" in
-h | --help)
usage
exit 0
;;
--)
break
;;
*)
# XXX this means we missed something in our "getopt" arguments above!
ERROR "Scripting error, unknown argument '$arg' when parsing script arguments."
exit 1
;;
esac
done
command=$(echo "${1:-}" | sed -e "s/-/_/g")
if [ -z "$command" ]; then
ERROR "No command was specified. Run with --help to see the usage. Maybe you want to run \`$ARG0 install\`?"
exit 1
fi
if ! command -v "cmd_entrypoint_${command}" >/dev/null 2>&1; then
ERROR "Unknown command: ${command}. Run with --help to see the usage."
exit 1
fi
# main
shift
"cmd_entrypoint_${command}" "$@"
|
<filename>spec/rubocop/cop/internal_affairs/method_name_equal_spec.rb<gh_stars>1000+
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::InternalAffairs::MethodNameEqual, :config do
it 'registers an offense when using `#method == :do_something`' do
expect_offense(<<~RUBY)
node.method_name == :do_something
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `method?(:do_something)` instead of `method_name == :do_something`.
RUBY
expect_correction(<<~RUBY)
node.method?(:do_something)
RUBY
end
it 'registers an offense when using `#method == other_node.do_something`' do
expect_offense(<<~RUBY)
node.method_name == other_node.do_something
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `method?(other_node.do_something)` instead of `method_name == other_node.do_something`.
RUBY
expect_correction(<<~RUBY)
node.method?(other_node.do_something)
RUBY
end
it 'does not register an offense when using `#method?`' do
expect_no_offenses(<<~RUBY)
node.method?(:do_something)
RUBY
end
end
|
<reponame>shamalainen/financer
import React from 'react';
interface HeroLeadProps {
children: string;
className?: string;
}
export const HeroLead = ({
children,
className = '',
}: HeroLeadProps): JSX.Element => (
<h2 className={`mt-5 text-xl leading-7 text-gray-300 ${className}`}>
{children}
</h2>
);
|
def k_means_string_clustering(string_data, k, distance_algorithm, additional_penalty):
"""
Perform K-means clustering on the given string data using the specified distance algorithm and additional penalty.
Args:
string_data (list): A list of strings to be clustered.
k (int): The number of clusters.
distance_algorithm (str): The distance algorithm to be used (e.g., "Needleman-Wunsch").
additional_penalty (int): The additional penalty to be applied during the distance calculation.
Returns:
dict: A dictionary mapping each centroid to the list of strings in its cluster.
"""
# Import necessary libraries
from sklearn.cluster import KMeans
import numpy as np
from Bio import pairwise2
# Define a custom distance function using Needleman-Wunsch algorithm with additional penalty
def custom_distance(s1, s2):
alignment = pairwise2.align.globalms(s1, s2, 2, -1, -1, -1, score_only=True)
return alignment + additional_penalty * abs(len(s1) - len(s2))
# Convert string data to numerical representation for KMeans algorithm
numerical_data = np.array([list(map(ord, s)) for s in string_data])
# Perform K-means clustering
kmeans_alg = KMeans(n_clusters=k, init='k-means++', random_state=42)
kmeans_alg.fit(numerical_data)
# Map centroids to their respective clusters
centroid_cluster_map = {}
for i in range(0, len(kmeans_alg.cluster_centers_)):
centroid_cluster_map[tuple(kmeans_alg.cluster_centers_[i])] = [string_data[j] for j in range(len(kmeans_alg.labels_)) if kmeans_alg.labels_[j] == i]
return centroid_cluster_map
def print_k_means_results(centroid_cluster_map, distance_algorithm):
"""
Print the K-means clustering results.
Args:
centroid_cluster_map (dict): A dictionary mapping each centroid to the list of strings in its cluster.
distance_algorithm (str): The distance algorithm used for clustering.
"""
print()
print("K Means string edition with %s distance algorithm" % distance_algorithm)
for centroid in centroid_cluster_map:
print(" - *%s* %s" % (centroid, centroid_cluster_map[centroid])) |
<reponame>IonThruster/ClockSim
var searchData=
[
['binaryexpr_821',['BinaryExpr',['../classCatch_1_1BinaryExpr.html',1,'Catch']]]
];
|
const mobx = require("../../../src/mobx")
const { autorun, keys, when, set, remove, values, entries, reaction, observable, has, get } = mobx
test("keys should be observable when extending", () => {
const todos = observable({})
const todoTitles = []
reaction(
() => keys(todos).map(key => `${key}: ${todos[key]}`),
titles => todoTitles.push(titles.join(","))
)
mobx.set(todos, {
lewis: "Read Lewis",
chesterton: "Be mind blown by Chesterton"
})
expect(todoTitles).toEqual(["lewis: Read Lewis,chesterton: Be mind blown by Chesterton"])
mobx.set(todos, { lewis: "Read Lewis twice" })
mobx.set(todos, { coffee: "Grab coffee" })
expect(todoTitles).toEqual([
"lewis: Read Lewis,chesterton: Be mind blown by Chesterton",
"lewis: Read Lewis twice,chesterton: Be mind blown by Chesterton",
"lewis: Read Lewis twice,chesterton: Be mind blown by Chesterton,coffee: Grab coffee"
])
})
test("toJS respects key changes", () => {
const todos = observable({})
const serialized = []
mobx.autorun(() => {
serialized.push(JSON.stringify(mobx.toJS(todos)))
})
mobx.set(todos, {
lewis: "Read Lewis",
chesterton: "Be mind blown by Chesterton"
})
mobx.set(todos, { lewis: "Read Lewis twice" })
mobx.set(todos, { coffee: "Grab coffee" })
expect(serialized).toEqual([
"{}",
'{"lewis":"Read Lewis","chesterton":"Be mind blown by Chesterton"}',
'{"lewis":"Read Lewis twice","chesterton":"Be mind blown by Chesterton"}',
'{"lewis":"Read Lewis twice","chesterton":"Be mind blown by Chesterton","coffee":"Grab coffee"}'
])
})
test("keys(object), values(object), entries(object)", () => {
const todos = observable({})
const plain = {}
const keysSnapshots = []
const valuesSnapshots = []
const entriesSnapshots = []
const expectedKeysSnapshots = []
const expectedValuesSnapshots = []
const expectedEntriesSnapshots = []
const s1 = Symbol()
const s2 = Symbol()
function expectEquality() {
expect(todos).toEqual(plain)
}
function expectKeysReaction() {
expectedKeysSnapshots.push(Object.keys(plain))
}
function expectValuesReaction() {
expectedValuesSnapshots.push(Object.values(plain))
}
function expectEntriesReaction() {
expectedEntriesSnapshots.push(Object.entries(plain))
}
reaction(
() => keys(todos),
result => keysSnapshots.push(result)
)
reaction(
() => values(todos),
result => valuesSnapshots.push(result)
)
reaction(
() => entries(todos),
result => entriesSnapshots.push(result)
)
expectEquality()
// add
set(todos, "k1", 1)
plain["k1"] = 1
expectEquality()
expectKeysReaction()
expectValuesReaction()
expectEntriesReaction()
// add symbol
set(todos, s1, 2)
plain[s1] = 2
expectEquality()
// see ObservableObjectAdministration.keys() for explanation
expectKeysReaction()
expectValuesReaction()
expectEntriesReaction()
// delete non-existent
remove(todos, "-")
delete plain["-"]
expectEquality()
// delete non-existent symbol
remove(todos, Symbol())
delete plain[Symbol()]
expectEquality()
// add second
set(todos, "k2", 3)
plain["k2"] = 3
expectEquality()
expectKeysReaction()
expectValuesReaction()
expectEntriesReaction()
// add second symbol
set(todos, s2, 4)
plain[s2] = 4
expectEquality()
// see ObservableObjectAdministration.keys() for explanation
expectKeysReaction()
expectValuesReaction()
expectEntriesReaction()
// update
set(todos, "k1", 11)
plain["k1"] = 11
expectEquality()
expectValuesReaction()
expectEntriesReaction()
// update symbol
set(todos, s1, 22)
plain[s1] = 22
expectEquality()
// delete
remove(todos, "k1")
delete plain["k1"]
expectEquality()
expectKeysReaction()
expectValuesReaction()
expectEntriesReaction()
// delete symbol
remove(todos, s1)
delete plain[s1]
expectEquality()
// see ObservableObjectAdministration.keys() for explanation
expectKeysReaction()
expectValuesReaction()
expectEntriesReaction()
expect(keysSnapshots).toEqual(expectedKeysSnapshots)
expect(valuesSnapshots).toEqual(expectedValuesSnapshots)
expect(entriesSnapshots).toEqual(expectedEntriesSnapshots)
})
test("values(map)", () => {
const todos = observable.map({})
const snapshots = []
reaction(
() => values(todos),
values => snapshots.push(values)
)
expect(has(todos, "x")).toBe(false)
expect(get(todos, "x")).toBe(undefined)
set(todos, "x", 3)
expect(has(todos, "x")).toBe(true)
expect(get(todos, "x")).toBe(3)
remove(todos, "y")
set(todos, "z", 4)
set(todos, "x", 5)
remove(todos, "z")
expect(snapshots).toEqual([[3], [3, 4], [5, 4], [5]])
})
test("values(map) - symbols", () => {
const todos = observable.map({})
const snapshots = []
const x = Symbol()
const y = Symbol()
const z = Symbol("z")
reaction(
() => values(todos),
values => snapshots.push(values)
)
expect(has(todos, x)).toBe(false)
expect(get(todos, x)).toBe(undefined)
set(todos, x, 3)
expect(has(todos, x)).toBe(true)
expect(get(todos, x)).toBe(3)
remove(todos, y)
set(todos, z, 4)
set(todos, x, 5)
remove(todos, z)
expect(snapshots).toEqual([[3], [3, 4], [5, 4], [5]])
})
test("entries(map)", () => {
const todos = observable.map({})
const snapshots = []
reaction(
() => entries(todos),
entries => snapshots.push(entries)
)
expect(has(todos, "x")).toBe(false)
expect(get(todos, "x")).toBe(undefined)
set(todos, "x", 3)
expect(has(todos, "x")).toBe(true)
expect(get(todos, "x")).toBe(3)
remove(todos, "y")
set(todos, "z", 4)
set(todos, "x", 5)
remove(todos, "z")
expect(snapshots).toEqual([
[["x", 3]],
[
["x", 3],
["z", 4]
],
[
["x", 5],
["z", 4]
],
[["x", 5]]
])
})
test("entries(map) - symbols", () => {
const todos = observable.map({})
const snapshots = []
const x = Symbol()
const y = Symbol()
const z = Symbol("z")
reaction(
() => entries(todos),
entries => snapshots.push(entries)
)
expect(has(todos, x)).toBe(false)
expect(get(todos, x)).toBe(undefined)
set(todos, x, 3)
expect(has(todos, x)).toBe(true)
expect(get(todos, x)).toBe(3)
remove(todos, y)
set(todos, z, 4)
set(todos, x, 5)
remove(todos, z)
expect(snapshots).toEqual([
[[x, 3]],
[
[x, 3],
[z, 4]
],
[
[x, 5],
[z, 4]
],
[[x, 5]]
])
})
test("keys(map)", () => {
const todos = observable.map({ a: 3 })
const snapshots = []
reaction(
() => keys(todos),
keys => snapshots.push(keys)
)
set(todos, "x", 3)
remove(todos, "y")
set(todos, "z", 4)
set(todos, "x", 5)
remove(todos, "z")
remove(todos, "a")
expect(snapshots).toEqual([["a", "x"], ["a", "x", "z"], ["a", "x"], ["x"]])
})
test("keys(map) - symbols", () => {
const snapshots = []
const x = Symbol()
const y = Symbol()
const z = Symbol("z")
const a = Symbol()
const todos = observable.map({ [a]: 3 })
reaction(
() => keys(todos),
keys => snapshots.push(keys)
)
set(todos, x, 3)
remove(todos, y)
set(todos, z, 4)
set(todos, x, 5)
remove(todos, z)
remove(todos, a)
expect(snapshots).toEqual([[a, x], [a, x, z], [a, x], [x]])
})
test("values(array)", () => {
const todos = observable.array()
const snapshots = []
reaction(
() => values(todos),
values => snapshots.push(values)
)
expect(has(todos, 0)).toBe(false)
expect(get(todos, 0)).toBe(undefined)
set(todos, 0, 2)
expect(has(todos, 0)).toBe(true)
expect(get(todos, 0)).toBe(2)
set(todos, "1", 4)
set(todos, 3, 4)
set(todos, 1, 3)
remove(todos, 2)
remove(todos, "0")
expect(snapshots).toEqual([
[2],
[2, 4],
[2, 4, undefined, 4],
[2, 3, undefined, 4],
[2, 3, 4],
[3, 4]
])
})
test("entries(array)", () => {
const todos = observable.array()
const snapshots = []
reaction(
() => entries(todos),
entries => snapshots.push(entries)
)
expect(has(todos, 0)).toBe(false)
expect(get(todos, 0)).toBe(undefined)
set(todos, 0, 2)
expect(has(todos, 0)).toBe(true)
expect(get(todos, 0)).toBe(2)
set(todos, "1", 4)
set(todos, 3, 4)
set(todos, 1, 3)
remove(todos, 2)
remove(todos, "0")
expect(snapshots).toEqual([
[[0, 2]],
[
[0, 2],
[1, 4]
],
[
[0, 2],
[1, 4],
[2, undefined],
[3, 4]
],
[
[0, 2],
[1, 3],
[2, undefined],
[3, 4]
],
[
[0, 2],
[1, 3],
[2, 4]
],
[
[0, 3],
[1, 4]
]
])
})
test("keys(array)", () => {
const todos = observable.array()
const snapshots = []
reaction(
() => keys(todos),
keys => snapshots.push(keys)
)
set(todos, 0, 2)
set(todos, "1", 4)
set(todos, 3, 4)
set(todos, 1, 3)
remove(todos, 2)
remove(todos, "0")
expect(snapshots).toEqual([[0], [0, 1], [0, 1, 2, 3], [0, 1, 2, 3], [0, 1, 2], [0, 1]])
})
test("observe & intercept", () => {
let events = []
const todos = observable(
{
a: { title: "get coffee" }
},
{},
{
deep: false,
name: "TestObject" // stable name for snapshot
}
)
mobx.observe(todos, c => {
events.push({ observe: { ...c, object: "skip" } })
})
const d = mobx.intercept(todos, c => {
events.push({ intercept: { ...c, object: "skip" } })
return null // no addition!
})
set(todos, { b: { title: "get tea" } })
remove(todos, "a")
expect(events).toMatchSnapshot()
expect(mobx.toJS(todos)).toEqual({
a: { title: "get coffee" }
})
events.splice(0)
d()
set(todos, { b: { title: "get tea" } })
remove(todos, "a")
expect(events).toMatchSnapshot()
expect(mobx.toJS(todos)).toEqual({
b: { title: "get tea" }
})
})
test("observe & intercept set called multiple times", () => {
const a = mobx.observable({}, {}, { name: "TestObject" }) // stable name for snapshot
const interceptLogs = []
const observeLogs = []
mobx.intercept(a, change => {
interceptLogs.push(`${change.name}: ${change.newValue}`)
return change
})
mobx.observe(a, change => observeLogs.push(`${change.name}: ${change.newValue}`))
mobx.set(a, "x", 0)
a.x = 1
mobx.set(a, "x", 2)
expect(interceptLogs).toEqual(["x: 0", "x: 1", "x: 2"])
expect(observeLogs).toEqual(["x: 0", "x: 1", "x: 2"])
})
test("dynamically adding properties should preserve the original modifiers of an object", () => {
const todos = observable.object(
{
a: { title: "get coffee" }
},
{},
{ deep: false }
)
expect(mobx.isObservable(todos.a)).toBe(false)
set(todos, { b: { title: "get tea" } })
expect(mobx.isObservable(todos.b)).toBe(false)
})
test("has and get are reactive", async () => {
const todos = observable({})
const p1 = when(() => has(todos, "x"))
const p2 = when(() => get(todos, "y") === 3)
setTimeout(() => {
set(todos, { x: false, y: 3 })
}, 100)
await p1
await p2
})
test("computed props are considered part of collections", () => {
const x = observable({
get y() {
return 3
}
})
expect(mobx.isComputedProp(x, "y")).toBe(true)
expect(x.y).toBe(3)
expect(has(x, "y")).toBe(true)
expect(get(x, "y")).toBe(3)
expect(keys(x)).toEqual([])
expect(values(x)).toEqual([])
expect(entries(x)).toEqual([])
})
test("#1739 - delete and undelete should work", () => {
const x = observable({})
const events = []
autorun(() => {
events.push(has(x, "a"))
})
set(x, "a", 1)
set(x, "a", 2)
remove(x, "a")
set(x, "a", 2)
remove(x, "a")
set(x, "a", 3)
expect(events).toEqual([false, true, false, true, false, true])
})
test("keys(set)", () => {
const todos = observable.set([1])
const snapshots = []
reaction(
() => keys(todos),
keys => snapshots.push(keys)
)
set(todos, 2)
remove(todos, 2)
set(todos, 3)
set(todos, 4)
remove(todos, 3)
expect(snapshots).toEqual([[1, 2], [1], [1, 3], [1, 3, 4], [1, 4]])
})
|
#!/bin/bash
if [[ $1 == "" ]]; then
echo "Please specify the full path to your VCPKG install."
else
VCPKG_DIR=$1
CMAKE_EXE=cmake
rm -rf build
mkdir build
pushd build
$VCPKG_DIR/vcpkg install sdl2
$CMAKE_EXE .. "-DCMAKE_TOOLCHAIN_FILE=$VCPKG_DIR/scripts/buildsystems/vcpkg.cmake" -G "Unix Makefiles"
make
popd
fi |
#!/bin/bash
if [ -z "$VIRTUAL_ENV" ]; then
echo "This requires the salvia python virtual environment."
echo "Execute '. ./activate' before running."
exit 1
fi
echo "Timelord requires CMake 3.14+ to compile vdf_client."
PYTHON_VERSION=$(python -c 'import sys; print(f"python{sys.version_info.major}.{sys.version_info.minor}")')
echo "Python version: $PYTHON_VERSION"
export BUILD_VDF_BENCH=Y # Installs the useful vdf_bench test of CPU squaring speed
THE_PATH=$(python -c 'import pkg_resources; print( pkg_resources.get_distribution("chiavdf").location)' 2>/dev/null)/vdf_client
CHIAVDF_VERSION=$(python -c 'from setup import dependencies; t = [_ for _ in dependencies if _.startswith("chiavdf")][0]; print(t)')
ubuntu_cmake_install() {
UBUNTU_PRE_2004=$(python -c 'import subprocess; process = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(float(process.stdout) < float(20.04))')
if [ "$UBUNTU_PRE_2004" = "True" ]; then
echo "Ubuntu version is pre 20.04LTS - installing CMake with snap."
sudo apt-get install snap -y
sudo apt-get remove --purge cmake -y
hash -r
sudo snap install cmake --classic
else
echo "Ubuntu 20.04LTS and newer support CMake 3.16+"
sudo apt-get install cmake -y
fi
}
symlink_vdf_bench() {
if [ ! -e vdf_bench ] && [ -e venv/lib/"$1"/site-packages/vdf_bench ]; then
echo ln -s venv/lib/"$1"/site-packages/vdf_bench
ln -s venv/lib/"$1"/site-packages/vdf_bench .
elif [ ! -e venv/lib/"$1"/site-packages/vdf_bench ]; then
echo "ERROR: Could not find venv/lib/$1/site-packages/vdf_bench"
else
echo "./vdf_bench link exists."
fi
}
if [ "$(uname)" = "Linux" ] && type apt-get; then
UBUNTU_DEBIAN=true
echo "Found Ubuntu/Debian."
elif [ "$(uname)" = "Linux" ] && type dnf || yum; then
RHEL_BASED=true
echo "Found RedHat."
elif [ "$(uname)" = "Darwin" ]; then
MACOS=true
echo "Found MacOS."
fi
if [ -e "$THE_PATH" ]; then
echo "$THE_PATH"
echo "vdf_client already exists, no action taken"
else
if [ -e venv/bin/python ] && test $UBUNTU_DEBIAN; then
echo "Installing chiavdf from source on Ubuntu/Debian"
# If Ubuntu version is older than 20.04LTS then upgrade CMake
ubuntu_cmake_install
# Install remaining needed development tools - assumes venv and prior run of install.sh
echo apt-get install libgmp-dev libboost-python-dev lib"$PYTHON_VERSION"-dev libboost-system-dev build-essential -y
sudo apt-get install libgmp-dev libboost-python-dev lib"$PYTHON_VERSION"-dev libboost-system-dev build-essential -y
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
elif [ -e venv/bin/python ] && test $RHEL_BASED; then
echo "Installing chiavdf from source on RedHat/CentOS/Fedora"
# Install remaining needed development tools - assumes venv and prior run of install.sh
echo yum install gcc gcc-c++ gmp-devel python3-devel libtool make autoconf automake openssl-devel libevent-devel boost-devel python3 -y
sudo yum install gcc gcc-c++ gmp-devel python3-devel libtool make autoconf automake openssl-devel libevent-devel boost-devel python3 -y
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
elif [ -e venv/bin/python ] && test $MACOS && [ "$(brew info boost | grep -c 'Not installed')" -eq 1 ]; then
echo "Installing chiavdf requirement boost for MacOS."
brew install boost
echo "Installing chiavdf from source."
# User needs to provide required packages
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
elif [ -e venv/bin/python ]; then
echo "Installing chiavdf from source."
# User needs to provide required packages
echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION"
symlink_vdf_bench "$PYTHON_VERSION"
else
echo "No venv created yet, please run install.sh."
fi
fi
echo "To estimate a timelord on this CPU try './vdf_bench square_asm 400000' for an ips estimate."
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2017 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.smg.evaluator;
import java.util.List;
import org.sosy_lab.cpachecker.cfa.ast.c.CCastExpression;
import org.sosy_lab.cpachecker.cfa.ast.c.CExpression;
import org.sosy_lab.cpachecker.cfa.ast.c.CFunctionCallExpression;
import org.sosy_lab.cpachecker.cfa.ast.c.CRightHandSideVisitor;
import org.sosy_lab.cpachecker.cfa.model.CFAEdge;
import org.sosy_lab.cpachecker.cfa.types.c.CComplexType;
import org.sosy_lab.cpachecker.cfa.types.c.CElaboratedType;
import org.sosy_lab.cpachecker.cpa.smg.SMGState;
import org.sosy_lab.cpachecker.cpa.smg.evaluator.SMGAbstractObjectAndState.SMGAddressAndState;
import org.sosy_lab.cpachecker.cpa.smg.graphs.value.SMGAddress;
import org.sosy_lab.cpachecker.exceptions.CPATransferException;
/**
* This class evaluates expressions that evaluate to a
* struct or union type. The type of every expression visited by this
* visitor has to be either {@link CElaboratedType} or
* {@link CComplexType}. Furthermore, it must not be a enum.
* The result of the evaluation is an {@link SMGAddress}.
* The object represents the memory this struct is placed in, the offset
* represents the start of the struct.
*/
class StructAndUnionVisitor extends AddressVisitor
implements CRightHandSideVisitor<List<SMGAddressAndState>, CPATransferException> {
public StructAndUnionVisitor(SMGExpressionEvaluator pSmgExpressionEvaluator, CFAEdge pCfaEdge, SMGState pNewState) {
super(pSmgExpressionEvaluator, pCfaEdge, pNewState);
}
@Override
public List<SMGAddressAndState> visit(CFunctionCallExpression pIastFunctionCallExpression) throws CPATransferException {
return visitDefault(pIastFunctionCallExpression);
}
@Override
public List<SMGAddressAndState> visit(CCastExpression cast) throws CPATransferException {
CExpression op = cast.getOperand();
if (SMGExpressionEvaluator.isStructOrUnionType(op.getExpressionType())) {
return cast.getOperand().accept(this);
} else {
//TODO cast reinterpretation
return visitDefault(cast);
}
}
} |
#!/bin/sh
sysdir="./system-headers"
for header in "$@"
do
mkdir -p "$sysdir/$(dirname $header)"
echo "include <$header>" > "$sysdir/$header"
done
|
class Task < ApplicationRecord
belongs_to :goal
validates_presence_of :name
#scope methods
scope :complete, -> {where(status: 'Complete')}
scope :incomplete, -> {where.not(status: 'Complete')}
end
|
<gh_stars>0
$(document).ready(function() {
"use strict";
/*================== App Features =====================*/
$('#blog-ticksy-vertical').slick({
slidesToShow: 1,
slidesToScroll: 1,
arrows: false,
dots: true,
vertical: true,
slide: 'li',
fade: false
});
}); |
const openpgp = typeof window !== 'undefined' && window.openpgp ? window.openpgp : require('../../dist/openpgp');
const expect = require('chai').expect;
describe('AES Key Wrap and Unwrap', function () {
const test_vectors = [
[
"128 bits of Key Data with a 128-bit KEK",
"000102030405060708090A0B0C0D0E0F",
"<KEY>",
"1FA68B0A8112B447 AEF34BD8FB5A7B82 9D3E862371D2CFE5"
],
[
"128 bits of Key Data with a 192-bit KEK",
"<KEY>",
"<KEY>",
"96778B25AE6CA435 F92B5B97C050AED2 468AB8A17AD84E5D"
],
[
"128 bits of Key Data with a 256-bit KEK",
"<KEY>",
"<KEY>",
"64E8C3F9CE0F5BA2 63E9777905818A2A 93C8191E7D6E8AE7"
],
[
"192 bits of Key Data with a 192-bit KEK",
"000102030405060708090A0B0C0D0E0F1011121314151617",
"00112233445566778899AABBCCDDEEFF0001020304050607",
"031D33264E15D332 68F24EC260743EDC E1C6C7DDEE725A93 6BA814915C6762D2"
],
[
"192 bits of Key Data with a 256-bit KEK",
"<KEY>",
"00112233445566778899AABBCCDDEEFF0001020304050607",
"A8F9BC1612C68B3F F6E6F4FBE30E71E4 769C8B80A32CB895 8CD5D17D6B254DA1"
],
[
"256 bits of Key Data with a 256-bit KEK",
"<KEY>",
"00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F",
"28C9F404C4B810F4 CBCCB35CFB87F826 3F5786E2D80ED326 CBC7F0E71A99F43B FB988B9B7A02DD21"
]
];
test_vectors.forEach(function(test) {
it(test[0], function(done) {
const kek = openpgp.util.hex_to_Uint8Array(test[1]);
const input = test[2].replace(/\s/g, "");
const input_bin = openpgp.util.hex_to_str(input);
const output = test[3].replace(/\s/g, "");
const output_bin = openpgp.util.hex_to_str(output);
expect(openpgp.util.Uint8Array_to_hex(openpgp.crypto.aes_kw.wrap(kek, input_bin)).toUpperCase()).to.equal(output);
expect(openpgp.util.Uint8Array_to_hex(openpgp.crypto.aes_kw.unwrap(kek, output_bin)).toUpperCase()).to.equal(input);
done();
});
});
});
|
<gh_stars>0
/** @jsx jsx */
import {css, jsx} from "@emotion/core";
import {FontAwesomeIcon} from "@fortawesome/react-fontawesome";
import {faCheck, faMinus} from "@fortawesome/free-solid-svg-icons";
import PropTypes from 'prop-types'
import DropdownContext from "../../DropdownContext";
import {useContext} from "react";
const bdColor = 'rgb(206,212,218)'
const CheckIcon = ({checked, partlyChecked}) => {
const {fontRatio} = useContext(DropdownContext)
return (
<div css={css`
width: ${20 * fontRatio}px;
height: ${20 * fontRatio}px;
padding: 3px;
position: absolute;
left: ${0.25}rem;
top: ${0.3}rem;
border: 1px solid ${bdColor};
border-radius: 5px;
`} className="d-flex justify-content-center align-items-center">
{partlyChecked ? <FontAwesomeIcon icon={faMinus} css={css`font-size: ${fontRatio}rem; color: dimgrey`} /> : (checked ? <FontAwesomeIcon icon={faCheck} css={css`font-size: ${fontRatio}rem; color: dimgrey`} /> : false)}
</div>
)
}
CheckIcon.propTypes = {
checked: PropTypes.bool,
partlyChecked: PropTypes.bool
}
export default CheckIcon |
/*
* Copyright 2013 atWare, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.co.atware.solr.analizers.cjk;
import java.util.Map;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.util.TokenFilterFactory;
public class CranioCaudalFilterFactory extends TokenFilterFactory {
/**
* インスタンスを生成します。
* @param args
*/
public CranioCaudalFilterFactory(Map<String, String> args) {
super(args);
}
private final String[] prefixOfWords = new String[] {
//
"d'", //イタリア語 定冠詞
"l'", //Italian, the definite article.
"dell'", //Italian, the finite article.
"D'", "L'", "DELL'" };
private final String[] endOfWords = new String[] { "'s", "'S" };
@Override
public TokenStream create(TokenStream input) {
return new CranioCaudalFilter(input, prefixOfWords, endOfWords);
}
}
|
#!/bin/sh
#Decode Base64-key to json file
echo "$INPUT_SERVICE_KEY" | base64 --decode > "$HOME"/service_key.json
gcloud auth activate-service-account --key-file="$HOME"/service_key.json --project "$INPUT_PROJECT"
gsutil rsync -R "$INPUT_BUILD_FOLDER" gs://"$INPUT_BUCKET_NAME"
gsutil web set -m "$INPUT_HOME_PAGE_PATH" -e "$INPUT_ERROR_PAGE_PATH" gs://"$INPUT_BUCKET_NAME"
gsutil setmeta -h "Cache-Control: no-store" gs://"$INPUT_BUCKET_NAME"/*
|
import assert from 'assert';
import app from '../../src/app';
import {remove_user, userInfo} from '../users.helper';
describe('\'users\' service', () => {
beforeEach(async () => {
await remove_user();
});
it('registered the service', () => {
const service = app.service('users');
assert.ok(service, 'Registered the service');
});
it('creates a user', async () => {
const user = await app.service('users').create(userInfo);
// Verify Gravatar has been set as we'd expect
// assert.strictEqual(user.avatar, 'https://s.gravatar.com/avatar/55502f40dc8b7c769880b10874abc9d0?s=60');
// // Makes sure the password got encrypted
// assert.ok(user.password !== '<PASSWORD>');
assert.ok(user, 'Created a user successfully');
assert.strictEqual(user['email'], userInfo.email);
assert.strictEqual(user['discordId'], userInfo.discordId);
assert.strictEqual(user['avatar'], userInfo.avatar);
assert.strictEqual(user['name'], userInfo.name);
});
// it('removes password for external requests', async () => {
// // Setting `provider` indicates an external request
// const params = { provider: 'rest' };
//
// const user = await app.service('users').create({
// email: '<EMAIL>',
// password: '<PASSWORD>'
// }, params);
//
// // Make sure password has been removed
// assert.ok(!user.password);
// });
});
|
<filename>packages/rehype-mathjax/lib/adaptor.browser.js<gh_stars>0
module.exports = require('mathjax-full/js/adaptors/browserAdaptor').browserAdaptor
|
class Solution {
public:
void moveZeroes(vector<int>& nums) {
vector<int>::iterator Iter;
int count=0;
for(Iter = nums.begin();Iter != nums.end(); Iter++){
if(*Iter == 0){
count++;//指向下一元素的迭代器
Iter = nums.erase(Iter);
Iter--;
}
}
for(int i=1;i<=count;i++){
nums.push_back(0);
}
}
}; |
import React, {FC} from 'react';
import {A, Avatar, CopyableAccountNumber, EmptyPage, Icon, IconType, Qr} from 'components';
import Account from 'containers/Account';
import {useBooleanState} from 'hooks';
import {TeamMember} from 'types/teams';
import {getContributorByGithubUsername, getTeamMemberByGithubUsername, getTeamPathname} from 'utils/data';
import './ProfileInfo.scss';
interface ComponentProps {
github_username: string;
}
const ProfileInfo: FC<ComponentProps> = ({github_username}) => {
const [editModalIsOpen, toggleEditModal] = useBooleanState(false);
const contributorDetails = getContributorByGithubUsername(github_username);
const memberDetails = getTeamMemberByGithubUsername(github_username);
if (!contributorDetails) {
return <EmptyPage className="ProfileInfo__empty-page" />;
}
const renderBackdrop = (isLead: boolean) => {
return (
<div className="ProfileInfo__backdrop-container">
<div className="ProfileInfo__blue-backdrop" />
{isLead && <div className="ProfileInfo__lead-flag">Team Lead</div>}
{/* TODO: make edit icon visible when user is the authenticated profile owner */}
<Icon
className="ProfileInfo__edit-profile"
icon={IconType.pencil}
onClick={toggleEditModal}
size={24}
totalSize={36}
/>
</div>
);
};
const renderMemberDetails = (member: TeamMember) => {
const {githubUsername, slackUsername, teams, titles} = member;
return (
<>
{teams &&
teams.map((team, index) => (
<div className="ProfileInfo__member-title" key={team.title}>
{titles[index]} on <A href={`/teams/${getTeamPathname(team.title)}`}>{team.title}</A>
</div>
))}
<div className="ProfileInfo__member-slack">
<Icon className="ProfileInfo__member-slack-icon" icon={IconType.slack} size={18} />
{slackUsername}
</div>
<div className="ProfileInfo__member-github">
<Icon className="ProfileInfo__member-github-icon" icon={IconType.github} size={18} />
<A className="ProfileInfo__member-github-link" href={`https://github.com/${github_username}`}>
{githubUsername}
</A>
</div>
</>
);
};
const {account_number: accountNumber, github_avatar_url: githubAvatarUrl} = contributorDetails;
return (
<>
{
<Account
accountNumber={accountNumber}
isOpen={editModalIsOpen}
displayName={memberDetails ? memberDetails.displayName : github_username}
toggleModal={toggleEditModal}
slackName={memberDetails ? memberDetails.slackUsername : ''}
/>
}
<div className="ProfileInfo">
<div className="ProfileInfo__top-section">
{renderBackdrop(memberDetails?.isLead || false)}
<Avatar className="ProfileInfo__profile-picture" alt={github_username} size={178} src={githubAvatarUrl} />
</div>
<div className="ProfileInfo__details">
<div className="ProfileInfo__user-details">
<div className="ProfileInfo__name">{memberDetails ? memberDetails.displayName : github_username}</div>
{memberDetails && renderMemberDetails(memberDetails)}
</div>
<div className="ProfileInfo__account-details">
<CopyableAccountNumber
accountNumber={accountNumber}
className="ProfileInfo__CopyableAccountNumber"
isCopyButtonAtBottom
/>
<div className="ProfileInfo__qr-container">
<Qr text={accountNumber} width={110} />
</div>
</div>
</div>
</div>
</>
);
};
export default ProfileInfo;
|
import {LiveValidator, HookProps, ControlOutputDataProps, StaticValidator} from "@common-types"
import {LockSubmitBtnErrorData, BeforeSubmitErrorData} from "./../types"
export type SetLiveValidatorResult = (validator: LiveValidator, hooksData: HookProps, controlOutputDataProps: ControlOutputDataProps) => void
export type SetLockSubmitBtnValidatorResult = (validator: StaticValidator, hooksData: HookProps, errorData: LockSubmitBtnErrorData, shouldCheckValidatorSettings?: boolean) => void
export type SetBeforeSubmitValidatorResult = (validator: StaticValidator, hooksData: HookProps, errorData: BeforeSubmitErrorData) => void
|
<filename>src/transformers/data/processors/utils.py
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, <NAME>PORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import csv
import json
import logging
import random
import pandas as pd
from ...file_utils import is_tf_available, is_torch_available
logger = logging.getLogger(__name__)
class InputExample(object):
"""
A single training/test example for simple sequence classification.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
def __init__(self, guid, text_a, text_b=None, label=None):
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class COPAInputExample(object):
"""
A single training/test example for SuperGLUE COPA task.
Args:
guid: Unique id for the example.
text_pre: string. The premise of the question.
text_a: String. First choice to the question.
text_b: String. Second choice to the question.
question: question.
label: string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
def __init__(self, guid, text_pre, text_a, text_b, question, label):
self.guid = guid
self.text_pre = text_pre
self.text_a = text_a
self.text_b = text_b
self.question = question
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class WSCInputExample(object):
"""
A single training/test example for SuperGLUE WSC task.
Args:
guid: Unique id for the example.
text: string. The premise of the question.
span_1: Tuple, the index of the targeted word in word level and the target word in sent 1.
span_2: Tuple, the index of the targeted word in word level and the target word in sent 2.
label: string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
def __init__(self, guid, text, span_1, span_2, label):
self.guid = guid
self.text = text
self.span_1 = span_1
self.span_2 = span_2
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class WiCInputExample(object):
"""
A single training/test example for SuperGLUE COPA task.
Args:
guid: Unique id for the example.
sent1: string. First sentence contains a word.
sent2: string. Second sentences contains the same word.
idxs1: tuple. The beginning and the ending digits of the word in sentence 1 (character level).
idxs2: tuple. The beginning and the ending digits of the word in sentence 2 (character level).
label: string. The label of the example indicates whether the meaning of the targeted
word is consistence across both sentences. This should
specified for train and dev examples, but not for test examples.
"""
def __init__(self, guid, sent1, sent2, idxs1, idxs2, label):
self.guid = guid
self.sent1 = sent1
self.sent2 = sent2
self.idxs1 = idxs1
self.idxs2 = idxs2
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class InputFeatures(object):
"""
A single set of features of data.
Args:
input_ids: Indices of input sequence tokens in the vocabulary.
attention_mask: Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
Usually ``1`` for tokens that are NOT MASKED, ``0`` for MASKED (padded) tokens.
token_type_ids: Segment token indices to indicate first and second portions of the inputs.
label: Label corresponding to the input
"""
def __init__(self, input_ids, attention_mask=None, token_type_ids=None, label=None):
self.input_ids = input_ids
self.attention_mask = attention_mask
self.token_type_ids = token_type_ids
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class InputFeatures_w(object):
"""
A single set of features of data for WSC and WiC from superGLUE.
Args:
input_ids: Indices of input sequence tokens in the vocabulary.
attention_mask: Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
Usually ``1`` for tokens that are NOT MASKED, ``0`` for MASKED (padded) tokens.
token_type_ids: Segment token indices to indicate first and second portions of the inputs.
label: Label corresponding to the input
"""
def __init__(self, input_ids, input_mask, segment_ids, span_1_mask, span_1_text,
span_2_mask, span_2_text, label):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.span_1_mask = span_1_mask
self.span_1_text = span_1_text
self.span_2_mask = span_2_mask
self.span_2_text = span_2_text
self.label = label
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_example_from_tensor_dict(self, tensor_dict):
"""Gets an example from a dict with tensorflow tensors
Args:
tensor_dict: Keys and values should match the corresponding Glue
tensorflow_dataset examples.
"""
raise NotImplementedError()
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
def tfds_map(self, example):
"""Some tensorflow_datasets datasets are not formatted the same way the GLUE datasets are.
This method converts examples to the correct format."""
if len(self.get_labels()) > 1:
example.label = self.get_labels()[int(example.label)]
return example
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with open(input_file, "r", encoding="utf-8-sig") as f:
return list(csv.reader(f, delimiter="\t", quotechar=quotechar))
@classmethod
def _read_json_to_list(self, input_file):
with open(input_file, ) as f:
df = pd.read_json(f, lines = True)
return df.values.tolist()
@classmethod
def _read_json_to_dict(self, input_file):
lines = []
for ln in open(input_file):
lines.append(json.loads(ln))
return lines
class SingleSentenceClassificationProcessor(DataProcessor):
""" Generic processor for a single sentence classification data set."""
def __init__(self, labels=None, examples=None, mode="classification", verbose=False):
self.labels = [] if labels is None else labels
self.examples = [] if examples is None else examples
self.mode = mode
self.verbose = verbose
def __len__(self):
return len(self.examples)
def __getitem__(self, idx):
if isinstance(idx, slice):
return SingleSentenceClassificationProcessor(labels=self.labels, examples=self.examples[idx])
return self.examples[idx]
@classmethod
def create_from_csv(
cls, file_name, split_name="", column_label=0, column_text=1, column_id=None, skip_first_row=False, **kwargs
):
processor = cls(**kwargs)
processor.add_examples_from_csv(
file_name,
split_name=split_name,
column_label=column_label,
column_text=column_text,
column_id=column_id,
skip_first_row=skip_first_row,
overwrite_labels=True,
overwrite_examples=True,
)
return processor
@classmethod
def create_from_examples(cls, texts_or_text_and_labels, labels=None, **kwargs):
processor = cls(**kwargs)
processor.add_examples(texts_or_text_and_labels, labels=labels)
return processor
def add_examples_from_csv(
self,
file_name,
split_name="",
column_label=0,
column_text=1,
column_id=None,
skip_first_row=False,
overwrite_labels=False,
overwrite_examples=False,
):
lines = self._read_tsv(file_name)
if skip_first_row:
lines = lines[1:]
texts = []
labels = []
ids = []
for (i, line) in enumerate(lines):
texts.append(line[column_text])
labels.append(line[column_label])
if column_id is not None:
ids.append(line[column_id])
else:
guid = "%s-%s" % (split_name, i) if split_name else "%s" % i
ids.append(guid)
return self.add_examples(
texts, labels, ids, overwrite_labels=overwrite_labels, overwrite_examples=overwrite_examples
)
def add_examples(
self, texts_or_text_and_labels, labels=None, ids=None, overwrite_labels=False, overwrite_examples=False
):
assert labels is None or len(texts_or_text_and_labels) == len(labels)
assert ids is None or len(texts_or_text_and_labels) == len(ids)
if ids is None:
ids = [None] * len(texts_or_text_and_labels)
if labels is None:
labels = [None] * len(texts_or_text_and_labels)
examples = []
added_labels = set()
for (text_or_text_and_label, label, guid) in zip(texts_or_text_and_labels, labels, ids):
if isinstance(text_or_text_and_label, (tuple, list)) and label is None:
text, label = text_or_text_and_label
else:
text = text_or_text_and_label
added_labels.add(label)
examples.append(InputExample(guid=guid, text_a=text, text_b=None, label=label))
# Update examples
if overwrite_examples:
self.examples = examples
else:
self.examples.extend(examples)
# Update labels
if overwrite_labels:
self.labels = list(added_labels)
else:
self.labels = list(set(self.labels).union(added_labels))
return self.examples
def get_features(
self,
tokenizer,
max_length=None,
pad_on_left=False,
pad_token=0,
mask_padding_with_zero=True,
return_tensors=None,
):
"""
Convert examples in a list of ``InputFeatures``
Args:
tokenizer: Instance of a tokenizer that will tokenize the examples
max_length: Maximum example length
task: GLUE task
label_list: List of labels. Can be obtained from the processor using the ``processor.get_labels()`` method
output_mode: String indicating the output mode. Either ``regression`` or ``classification``
pad_on_left: If set to ``True``, the examples will be padded on the left rather than on the right (default)
pad_token: Padding token
mask_padding_with_zero: If set to ``True``, the attention mask will be filled by ``1`` for actual values
and by ``0`` for padded values. If set to ``False``, inverts it (``1`` for padded values, ``0`` for
actual values)
Returns:
If the ``examples`` input is a ``tf.data.Dataset``, will return a ``tf.data.Dataset``
containing the task-specific features. If the input is a list of ``InputExamples``, will return
a list of task-specific ``InputFeatures`` which can be fed to the model.
"""
if max_length is None:
max_length = tokenizer.max_len
label_map = {label: i for i, label in enumerate(self.labels)}
all_input_ids = []
for (ex_index, example) in enumerate(self.examples):
if ex_index % 10000 == 0:
logger.info("Tokenizing example %d", ex_index)
input_ids = tokenizer.encode(
example.text_a, add_special_tokens=True, max_length=min(max_length, tokenizer.max_len),
)
all_input_ids.append(input_ids)
batch_length = max(len(input_ids) for input_ids in all_input_ids)
features = []
for (ex_index, (input_ids, example)) in enumerate(zip(all_input_ids, self.examples)):
if ex_index % 10000 == 0:
logger.info("Writing example %d/%d" % (ex_index, len(self.examples)))
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
attention_mask = [1 if mask_padding_with_zero else 0] * len(input_ids)
# Zero-pad up to the sequence length.
padding_length = batch_length - len(input_ids)
if pad_on_left:
input_ids = ([pad_token] * padding_length) + input_ids
attention_mask = ([0 if mask_padding_with_zero else 1] * padding_length) + attention_mask
else:
input_ids = input_ids + ([pad_token] * padding_length)
attention_mask = attention_mask + ([0 if mask_padding_with_zero else 1] * padding_length)
assert len(input_ids) == batch_length, "Error with input length {} vs {}".format(
len(input_ids), batch_length
)
assert len(attention_mask) == batch_length, "Error with input length {} vs {}".format(
len(attention_mask), batch_length
)
if self.mode == "classification":
label = label_map[example.label]
elif self.mode == "regression":
label = float(example.label)
else:
raise ValueError(self.mode)
if ex_index < 5 and self.verbose:
logger.info("*** Example ***")
logger.info("guid: %s" % (example.guid))
logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids]))
logger.info("attention_mask: %s" % " ".join([str(x) for x in attention_mask]))
logger.info("label: %s (id = %d)" % (example.label, label))
features.append(InputFeatures(input_ids=input_ids, attention_mask=attention_mask, label=label))
if return_tensors is None:
return features
elif return_tensors == "tf":
if not is_tf_available():
raise RuntimeError("return_tensors set to 'tf' but TensorFlow 2.0 can't be imported")
import tensorflow as tf
def gen():
for ex in features:
yield ({"input_ids": ex.input_ids, "attention_mask": ex.attention_mask}, ex.label)
dataset = tf.data.Dataset.from_generator(
gen,
({"input_ids": tf.int32, "attention_mask": tf.int32}, tf.int64),
({"input_ids": tf.TensorShape([None]), "attention_mask": tf.TensorShape([None])}, tf.TensorShape([])),
)
return dataset
elif return_tensors == "pt":
if not is_torch_available():
raise RuntimeError("return_tensors set to 'pt' but PyTorch can't be imported")
import torch
from torch.utils.data import TensorDataset
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_attention_mask = torch.tensor([f.attention_mask for f in features], dtype=torch.long)
if self.mode == "classification":
all_labels = torch.tensor([f.label for f in features], dtype=torch.long)
elif self.mode == "regression":
all_labels = torch.tensor([f.label for f in features], dtype=torch.float)
dataset = TensorDataset(all_input_ids, all_attention_mask, all_labels)
return dataset
else:
raise ValueError("return_tensors should be one of 'tf' or 'pt'")
|
#!/bin/bash
ACTION=$1
source $(dirname $0)/uf.config.sh
source $(dirname $0)/uf.config.interfaces.sh
echo -e "$ACT $(basename $0)"
$iptables $ACTION_INPUT -i $LAN -p udp --sport 1900 -j LOG-FIL-IN-ACCEPT
$iptables $ACTION_OUTPUT -o $LAN -p udp --dport 1900 -j LOG-FIL-OUT-ACCEPT
$iptables $ACTION_OUTPUT -o $LAN -p tcp --dport 8200 -j LOG-FIL-OUT-ACCEPT
|
def speed_improvement(time_taken_per_frame):
# Calculate the optimized time for processing one frame
optimized_time_per_frame = 480.0 / 48000.0
# Calculate the speed improvement ratio
speed_improvement_ratio = optimized_time_per_frame / time_taken_per_frame
# Format the result as a string
result = "{:.2f}X faster than real".format(speed_improvement_ratio)
return result |
require 'Post'
class JBNUParser
# String[] boardId = {"K_PUBLIC","K_NOTICE","K_RECRUIT","K_SEMINAR","K_STUDY","K_ALBA","K_MARKET","K_LIVE","K_FOUND"};
def boardId(id)
# boardId_t = ["K_PUBLIC","K_NOTICE","K_RECRUIT","K_SEMINAR","K_STUDY","K_ALBA","K_MARKET","K_LIVE","K_FOUND"]
boardId_t = ["139","139","140","141","150","425","152","153","154"]
return boardId_t[id]
end
def requestHTML(id,word)
if word == ""
uri = URI(URI.encode("https://www.jbnu.ac.kr/kor/?menuID="+boardId(id)))
else
uri = URI(URI.encode("http://www.chonbuk.ac.kr/bb/board.php?boardID="+boardId(id)+"&cTitle=Y&query="+word))
end
req = Net::HTTP::Get.new(uri)
res = Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https') { |http| http.request(req) }
doc = Nokogiri::HTML(res.body)
return doc
end
def getPosts(id,word)
case id
when 5
return requestPosts(id,word)
else
return requestNotice(id,word)
end
end
def requestPosts(id,word)
doc = requestHTML(id,word)
posts = []
doc.css(".page_list//tbody//tr").each_with_index do |post_data, index|
number = post_data.css(".mnom")[0].inner_text
title = post_data.css(".left span")[0].inner_text.to_s.strip + "\n" + post_data.css(".mnom")[1].inner_text
writer = post_data.css(".mview")[0].inner_text
period = post_data.css(".mview")[2].inner_text
content_link = post_data.css(".left span a")[0].attr('href')
post = Post.new(number,title,content_link,period,writer,boardId(id))
posts << post
end
return posts
end
def requestNotice(id,word)
doc = requestHTML(id,word)
posts = []
doc.css(".page_list//tbody//tr").each_with_index do |post_data, index|
number = post_data.css(".mnom")[0].inner_text
title = post_data.css(".left span")[0].inner_text
writer = post_data.css(".mview")[0].inner_text
period = post_data.css(".mview")[1].inner_text
content_link = post_data.css(".left span a")[0].attr('href')
post = Post.new(number,title,content_link,period,writer,boardId(id))
posts << post
end
return posts
end
end
|
def is_list_unique(list):
unique_list = list(dict.fromkeys(list))
return len(list) == len(unique_list) |
/*
* Human Cell Atlas
* https://www.humancellatlas.org/
*
* HCA Data Portal TOC spy component.
* Provides event listener for scroll and hashchange events.
*/
// Core dependencies
import React from 'react';
class TOCSpy extends React.Component {
elementIdsByAnchorFromTop = new Map();
constructor(props) {
super(props);
this.state = ({activeTOC: ''});
this.handleScroll = this.handleScroll.bind(this);
this.handleHashChange = this.handleHashChange.bind(this);
}
componentDidMount() {
this.getPageAnchors();
window.addEventListener('scroll', this.handleScroll);
window.addEventListener('hashchange', this.handleHashChange, false);
};
componentWillUnmount() {
window.removeEventListener('scroll', this.handleScroll);
window.removeEventListener('hashchange', this.handleHashChange, false);
};
getPageAnchors = () => {
let anchorables = Array.from(document.getElementById('hcaContent').querySelectorAll("[id]"));
let currentScrollPos = window.scrollY;
anchorables.forEach(pageAnchor => {
if (pageAnchor.tagName.toUpperCase().split('H')[1] <= 3 || pageAnchor.tagName === 'DIV') {
this.elementIdsByAnchorFromTop.set((pageAnchor.getBoundingClientRect().top + currentScrollPos), pageAnchor.id);
}
});
};
handleHashChange = () => {
this.setState({activeTOC: window.location.hash});
this.props.onTOCChange(this.state.activeTOC);
};
handleScroll = () => {
let currentScrollPos = window.scrollY + 88;
let endScrollPos = document.body.clientHeight - window.innerHeight + 88;
// Check not at the bottom of the page
if (currentScrollPos !== endScrollPos) {
let currentAnchorPos;
for (let anchorPos of this.elementIdsByAnchorFromTop.keys()) {
if (currentScrollPos >= anchorPos) {
currentAnchorPos = anchorPos;
}
else {
break; // exit iterator
}
}
let currentElementId = `#${this.elementIdsByAnchorFromTop.get(currentAnchorPos)}`;
if (currentElementId !== this.state.activeTOC) {
if (currentAnchorPos !== undefined) {
window.history.pushState(null, '', `#${this.elementIdsByAnchorFromTop.get(currentAnchorPos)}`);
this.setState({activeTOC: currentElementId});
this.props.onTOCChange(this.state.activeTOC);
}
else {
window.history.pushState(null, '', window.location.pathname);
this.setState({activeTOC: ''});
this.props.onTOCChange(this.state.activeTOC);
}
}
}
};
render() {
return this.props.children
}
}
export default TOCSpy;
|
#!/bin/bash
python -m scripts.runner --traindb scripts/data/sim_db_config.json
|
#!/usr/bin/env bash
# Trigger with:
# rofi -lines 5 -eh 3 -show ddgrsearch -modi "ddgrsearch:ddgr_search.sh"
# for detailed instructions check rofi-script docs:
# https://github.com/davatorium/rofi/blob/next/doc/rofi-script.5.markdown
# Environment variables:
# ROFI_RETV: an integer with the current state
# 0: Initial call of script.
# 1: Selected an entry.
# 2: Selected a custom entry.
# 10-28: Custom keybinding 1-19 ( need to be explicitely enabled by script ).
# ROFI_INFO: is set when an entry is selected. It's set with the parameter
# `info`. Example:
# Selecting the entry generated by:
# `echo -en "aap\0icon\x1ffolder\x1finfo\x1ftest\n"`
# would set ROFI_INFO with "test"
# Set the prompt string and delimiter. The delimiter is set to `;` so that we
# can have multi-line entries
echo -en "\0prompt\x1fddgr\n"
echo -en "\0delim\x1f;"
# Check current state and act accordingly
if [ "$ROFI_RETV" -eq "1" ]; then
# An entry was selected, so just open the browser
# Using python for compatibility purposes, across OSes
coproc ( python -m webbrowser "$ROFI_INFO" > /dev/null 2>&1 ) || true
fi
if [ "$ROFI_RETV" -eq "2" ]; then
# The search box was populated and returned. Trigger the search engine!
# Using printf to escape "$@" which might contain special characters like `!`
ddgr -n5 -x -C --json $(printf '%q' "$@") | python3 <(cat<< EOF
import sys
import json
def parse_json(json_str):
json_dict = json.loads(json_str)
elems = []
for e in json_dict:
title = e['title'].replace(';', ',').strip()
url = e['url'].replace(';', ',').strip()
abstract = e['abstract'].replace(';', ',').strip() + f"\0info\x1f{url};"
elems.append((title, url, abstract))
return elems
def print_entries(elems):
for t, u, a in elems:
print(f"* {t}")
print(f" {u}")
print(f" {a}", end='')
def main():
data = sys.stdin.read()
entries = parse_json(data)
print_entries(entries)
if __name__ == '__main__':
try:
main()
except:
pass
EOF
)
fi
|
<filename>processing/src/test/java/org/apache/druid/segment/vector/VectorSelectorUtilsTest.java<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.vector;
import org.apache.druid.collections.IntSetTestUtility;
import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.collections.bitmap.WrappedBitSetBitmap;
import org.apache.druid.collections.bitmap.WrappedConciseBitmap;
import org.apache.druid.collections.bitmap.WrappedImmutableConciseBitmap;
import org.apache.druid.collections.bitmap.WrappedRoaringBitmap;
import org.apache.druid.extendedset.intset.ImmutableConciseSet;
import org.junit.Assert;
import org.junit.Test;
import org.roaringbitmap.PeekableIntIterator;
import java.util.Set;
public class VectorSelectorUtilsTest
{
@Test
public void testBitSetNullVector()
{
final WrappedBitSetBitmap bitmap = new WrappedBitSetBitmap();
populate(bitmap);
assertNullVector(bitmap);
}
@Test
public void testConciseMutableNullVector()
{
final WrappedConciseBitmap bitmap = new WrappedConciseBitmap();
populate(bitmap);
assertNullVector(bitmap);
}
@Test
public void testConciseImmutableNullVector()
{
final WrappedConciseBitmap bitmap = new WrappedConciseBitmap();
populate(bitmap);
final ImmutableBitmap immutable = new WrappedImmutableConciseBitmap(
ImmutableConciseSet.newImmutableFromMutable(bitmap.getBitmap())
);
assertNullVector(immutable);
}
@Test
public void testRoaringMutableNullVector()
{
WrappedRoaringBitmap bitmap = new WrappedRoaringBitmap();
populate(bitmap);
assertNullVector(bitmap);
}
@Test
public void testRoaringImmutableNullVector()
{
WrappedRoaringBitmap bitmap = new WrappedRoaringBitmap();
populate(bitmap);
assertNullVector(bitmap.toImmutableBitmap());
}
public static void populate(MutableBitmap bitmap)
{
for (int i : IntSetTestUtility.getSetBits()) {
bitmap.add(i);
}
}
private void assertNullVector(ImmutableBitmap bitmap)
{
PeekableIntIterator iterator = bitmap.peekableIterator();
Set<Integer> nulls = IntSetTestUtility.getSetBits();
final int vectorSize = 32;
final boolean[] nullVector = new boolean[vectorSize];
ReadableVectorOffset someOffset = new NoFilterVectorOffset(vectorSize, 0, vectorSize);
VectorSelectorUtils.populateNullVector(nullVector, someOffset, iterator);
for (int i = 0; i < vectorSize; i++) {
Assert.assertEquals(nulls.contains(i), nullVector[i]);
}
iterator = bitmap.peekableIterator();
final int smallerVectorSize = 8;
boolean[] smallVector = null;
for (int offset = 0; offset < smallerVectorSize * 4; offset += smallerVectorSize) {
ReadableVectorOffset smallOffset = new NoFilterVectorOffset(smallerVectorSize, offset, offset + smallerVectorSize);
smallVector = VectorSelectorUtils.populateNullVector(smallVector, smallOffset, iterator);
for (int i = 0; i < smallerVectorSize; i++) {
if (smallVector == null) {
Assert.assertFalse(nulls.contains(offset + i));
} else {
Assert.assertEquals(nulls.contains(offset + i), smallVector[i]);
}
}
smallVector = null;
}
iterator = bitmap.peekableIterator();
ReadableVectorOffset allTheNulls = new BitmapVectorOffset(8, bitmap, 0, 22);
smallVector = VectorSelectorUtils.populateNullVector(smallVector, allTheNulls, iterator);
for (int i = 0; i < nulls.size(); i++) {
Assert.assertTrue(smallVector[i]);
}
}
}
|
#!/bin/bash
function identify_platform {
uname=`uname`
if [[ "$uname" == 'Linux' ]]; then
platform="linux"
elif [[ "$uname" == 'Darwin' ]]; then
platform="macos"
elif [[ "$uname" == MINGW64* ]]; then
platform="windows"
fi
}
function setup {
SEED=$(( ( RANDOM % 1000 ) + 100 ))
SEED="basicstest${SEED}"
rm -Rf ${TF_PATH}
rm -Rf .terraform
rm -Rf *.tfstate*
identify_platform
# Setup terraform version
TF_VERSION=${TERRAFORM_VERSION='0.8.5'}
TEMP_DIRECTORY="./tmp"
mkdir ${TEMP_DIRECTORY}
# Setup paths
TF_ZIP="${TEMP_DIRECTORY}/terraform_${TF_VERSION}.zip"
TF_PATH="${TEMP_DIRECTORY}/terraform_${TF_VERSION}/"
if [[ "$platform" == 'linux' ]]; then
url="https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_linux_amd64.zip"
elif [[ "$platform" == 'macos' ]]; then
url="https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_darwin_amd64.zip"
elif [[ "$platform" == 'windows' ]]; then
url="https://releases.hashicorp.com/terraform/${TF_VERSION}/terraform_${TF_VERSION}_windows_amd64.zip"
fi
curl -o ${TF_ZIP} ${url}
unzip ${TF_ZIP} -d ${TF_PATH}
PATH=${TF_PATH}:${PATH}
}
function smoke {
printf "Running smoke test using ${SEED}.\n"
terraform apply -var "resource_group_name=${SEED}" -var "dns_zone_name=example.com" -var "storage_account_name=${SEED}"
terraform show
./push_state_to_azure.sh ${SEED} ${SEED}
terraform destroy -force
rm -Rf ${TF_PATH}
rm -Rf .terraform
rm -Rf *.tfstate*
}
setup
smoke
|
if [ $# != 2 ]; then
echo "Wrong Usage!!! [USAGE::::]"
echo "$0 [List File Name] [BRAINS build]"
exit
fi
ListFile=$1;
source_dir=$2;
GADEXE="$source_dir/GradientAnisotropicDiffusionImageFilter"
## specify source directory for execution
while read line dummy
do
for mod in T1 T2
do
subject=(`echo $line |tr "/" "\n"|grep '[0-9]\{5\}'`)
command="$GADEXE \
--inputVolume $line/${subject}_AVG_${mod}.nii.gz \
--outputVolume $line/${subject}_AVG_${mod}_GAD.nii.gz \
--timeStep 0.05 \
--numberOfIterations 5 \
--conductance 1"
echo "======================================================================="
echo $command
$command
echo "======================================================================="
done
done < ${ListFile}
|
from unittest.mock import MagicMock
from belvo import resources
def test_accounts_create_token_if_given(api_session):
accounts = resources.Accounts(api_session)
accounts.session.post = MagicMock()
accounts.create("fake-link-uuid", token="<PASSWORD>")
accounts.session.post.assert_called_with(
"/api/accounts/",
data={"link": "fake-link-uuid", "save_data": True, "token": "fake-<PASSWORD>"},
raise_exception=False,
)
def test_account_resume(api_session):
accounts = resources.Accounts(api_session)
accounts.session.patch = MagicMock()
accounts.resume("fake-session", "fake-token")
accounts.session.patch.assert_called_with(
"/api/accounts/",
data={"session": "fake-session", "token": "fake-<PASSWORD>"},
raise_exception=False,
)
|
def longest_common_prefix_str(str1, str2):
prefix_str="";
for i in range(len(str1)):
if(str1[i]==str2[i]):
prefix_str += str1[i]
else:
break
return prefix_str |
#!/usr/bin/env bash
preflight_dir="$(cd "$(dirname "$0")/.." && pwd -P)"
source "$preflight_dir/modules/functions.bash"
if ! test -e ~/src ; then
info_echo "Creating project directory at ~/src"
mkdir ~/src
else
info_echo "Project dir ~/src already exist"
fi
|
<reponame>stereosupersonic/podi<gh_stars>0
# Set the host name for URL creation
host = if Rails.env.test?
"wartenberger.test.com"
elsif Rails.env.development?
"localhost:3000"
else
ENV["DEFAULT_URL_HOST"] || "#{ENV.fetch("HEROKU_APP_NAME")}.herokuapp.com"
end
protocol = Rails.application.config.force_ssl ? "https" : "http"
SitemapGenerator::Sitemap.default_host = "#{protocol}://#{host}"
SitemapGenerator::Sitemap.public_path = "tmp/sitemap"
# Where you want your sitemap.xml.gz file to be uploaded.
SitemapGenerator::Sitemap.adapter = SitemapGenerator::AwsSdkAdapter.new(
Rails.application.config.aws_bucket_name,
aws_access_key_id: Rails.application.config.aws_access_key,
aws_secret_access_key: Rails.application.config.aws_secret_key,
aws_region: Rails.application.config.aws_s3_region
)
# The full path to your bucket
SitemapGenerator::Sitemap.sitemaps_host = "https://#{Rails.application.config.aws_bucket_name}.s3.amazonaws.com"
if Rails.application.config.aws_bucket_name.present?
SitemapGenerator::Sitemap.create do
add episodes_path, priority: 0.7, changefreq: "daily"
Episode.published.each do |episode|
add episode_path(episode), priority: 0.7, changefreq: "daily", lastmod: episode.updated_at
end
add "/about", changefreq: "monthly"
add "/imprint", changefreq: "monthly"
add "/privacy", changefreq: "monthly"
end
else
puts "ERROR: aws s3 is not cofigured"
end
|
SELECT *
FROM customers
WHERE membership_date > '2019-12-25'; |
#!/usr/bin/env bash
set -e
kill_all() {
docker ps -q | xargs -I{} docker rm -f {}
}
prune_system() {
docker system prune --force --all --volumes
echo "cleaned containers, volumes, networks, and images."
}
kill_all
prune_system |
aws iam detach-role-policy --role-name swirEcsTaskExecutionRole --policy-arn arn:aws:iam::aws:policy/AmazonDynamoDBFullAccess
aws iam detach-role-policy --role-name swirEcsTaskExecutionRole --policy-arn arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy
aws iam delete-role-policy --role-name swirEcsTaskExecutionRole --policy-name swir-specific
aws iam delete-role --role-name swirEcsTaskExecutionRole
|
#!/bin/bash
#
# Master script for regression testing. This one does runs everything in the input list,
# regardless of errors encountered. This is why it is two scripts.
#
# Input: List of trace files and their fastforward frames (space delimited).
#
# Repository path
repo_path=/projects/pr368/competitive_benchmarking/trace_repository
# Sanity check
if [ $# -lt 1 ]; then
echo "Usage: ${0##*/} <file with list of trace files and frames>"
exit 1
fi
rm -f log.txt
rm -rf results
FILES=()
FRAMES=()
while read DIR
do
ARG1=`echo ${DIR} | sed 's/ .*//'`
ARGS=`echo ${DIR} | cut -d ' ' -f2-`
FILES+=(${ARG1})
FRAMES+=("${ARGS}")
done < $1
for DIR in "${FILES[@]}"
do
STEM=`echo $DIR | sed 's/$repo_path//'`
APP=`echo $STEM | awk -F/ '{print $(NF-4)}'`
VERSION=`echo $STEM | awk -F/ '{print $(NF-3)}'`
RESOLUTION=`echo $STEM | awk -F/ '{print $(NF-2)}'`
SCENE=`echo $STEM | awk -F/ '{print $(NF-1)}'`
NAME=${APP}.${SCENE}.${VERSION}
echo | tee -a log.txt
echo "---- $NAME : ${FRAMES[0]} ----" | tee -a log.txt
bash ./job.sh "$DIR" ${FRAMES[0]} 2>&1 | tee -a log.txt
FRAMES=("${FRAMES[@]:1}")
done
|
<gh_stars>0
class Stories extends React.Component {
constructor() {
super();
this.state = { stories: [] };
this.update = this.update.bind(this);
}
update(serviceArea) {
var countryCode = serviceArea.serviceProviderAvailableCountryModel.countryModel.twoCharCode,
countryStoryList;
if(this.state.countryCode == countryCode) {
return;
}
try {
$('#stories').flickity('destroy');
} catch(e) {
//Nothing to do.
}
countryStoryList = countryStoryMap[countryCode].stories;
if(!countryStoryList.length) {
countryCode = 'AE';//UAE is the default.
countryStoryList = countryStoryMap[countryCode].stories;
}
this.setState({
'countryCode' : countryCode,
'stories' : countryStoryList
});
}
render() {
if (this.state.stories == null) return [];
var list = [];
for (var i = 0; i < this.state.stories.length; i++) {
var story = this.state.stories[i];
list.push(React.createElement('div', { 'class': 'carousel-cell', 'key': i },
React.createElement('div', {
'class' : 'carousel-cell-container',
'data-youtube-id' : story.youtubeVideoId,
'style' : {
'backgroundImage' : 'url("' + story.image + '")'
}
},
React.createElement('div', { 'class': 'carousel-overlay' },
React.createElement('hr', { 'class': 'carousel-divider' }),
React.createElement('div', { 'class': 'carousel-text' },
React.createElement('div', { 'class': 'story-name' }, story.title),
React.createElement('div', { 'class': '' }, story.text)
),
)
)));
}
return list;
}
componentDidUpdate() {
$('#stories').flickity({
'cellalign': 'left',
'contain': true,
'prevNextButtons': false,
'pageDots': false
});
}
} |
package io.cucumber.core.plugin;
import io.cucumber.messages.Messages.Envelope;
import io.cucumber.messages.internal.com.google.protobuf.util.JsonFormat;
import io.cucumber.plugin.ConcurrentEventListener;
import io.cucumber.plugin.event.EventPublisher;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
public final class MessageFormatter implements ConcurrentEventListener {
private final Writer writer;
private final JsonFormat.Printer jsonPrinter = JsonFormat.printer()
.omittingInsignificantWhitespace();
public MessageFormatter(OutputStream outputStream) {
this.writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8);
}
@Override
public void setEventPublisher(EventPublisher publisher) {
publisher.registerHandlerFor(Envelope.class, this::writeMessage);
}
private void writeMessage(Envelope envelope) {
try {
jsonPrinter.appendTo(envelope, writer);
writer.write("\n");
writer.flush();
if (envelope.hasTestRunFinished()) {
writer.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
|
import uuid
import os
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import NoteSection
from .core_structures_x86 import *
from .notes import NTDescToJson
from .thread import Thread
from .consts import *
from .memory import *
from st_log.st_log import Logger
from ma_tk.manager import Manager
from ma_tk.load.elf import OpenELF, ElfFileLoader
from ma_tk.load.file import FileLoader
# Steps to mapping in files
# 1. Parse the core format
# 2. Extract relevant data points
# 3. Map relevant parts into memory
# 4. Load relevant supporting parts into memory
# 5. Perform analysis
import logging
import logging.handlers
class StitchObject(object):
def __init__(self):
for k, v in DEFAULT_MEMORY_META.items():
setattr(self, k, v)
def get_filename(self):
getattr(self, 'filename', None)
def update(self, info_dict: dict):
for k, v in info_dict.items():
setattr(self, k, v)
def set_loaded(self, loaded=True):
setattr(self, 'loaded', loaded)
def get_file_size(self):
return self.p_filesz
def get_mem_size(self):
return self.p_memsz
def get_vm_size(self):
return self.vm_end - self.vm_start
def get_page_offset(self):
return self.page_offset
def get_size(self):
if self.get_page_offset() < 0 and \
self.vm_start < 0:
return self.get_mem_size()
return self.get_vm_size()
def __str__(self):
vm_start = getattr(self, 'vm_start')
vm_end = getattr(self, 'vm_end')
page_offset = getattr(self, 'page_offset')
filename = getattr(self, 'filename')
size = self.get_size()
args = [vm_start, vm_end, page_offset, size, filename]
return "{:016x}-{:016x} {:08x} {:08x} {}".format(*args)
def __repr__(self):
return str(self)
class ELFCore(object):
def __init__(self, core_filename: str=None,
core_data: bytes=None,
required_files_location_list: list=None,
required_files_location: dict=None,
required_files_bytes: dict=None,
required_files_dir: str=None,
required_files_zip: str=None,
inmemory=False,
core_zip_filename: str=None,
load_external_files=True,
loglevel=logging.INFO,
namespace='ca_tk',
other_namespaces=None,
auto_load_files=False):
self.logger = Logger("ca_tk.linux.core.OpenELFCore", level=loglevel)
self.physical_ranges = []
self.virtual_ranges = []
self.load_external_files = load_external_files
self.inmemory = inmemory
self.namespace = namespace
self.other_namespaces = [] if other_namespaces is None else other_namespaces
self.auto_load_files = False
self.file_loader = FileLoader.create_fileloader( namespace=namespace,
required_files_location_list=required_files_location_list,
required_files_location=required_files_location,
required_files_bytes=required_files_bytes,
required_files_dir=required_files_dir,
required_files_zip=required_files_zip)
self.elf_loader = ElfFileLoader.create_fileloader( namespace=namespace+'_elf',
required_files_location_list=required_files_location_list,
required_files_location=required_files_location,
required_files_bytes=required_files_bytes,
required_files_dir=required_files_dir,
required_files_zip=required_files_zip)
self.elf_loader.set_file_opener(OpenELF)
# parse out each relevant program hdr and segment
# map pages to a specific range
self.virtual_cache = dict()
self.mgr = Manager(loglevel=loglevel)
# TODO FIXME set the page_mask correctly through parameterization
self.page_size = 4096
self.page_mask = self.mgr.page_mask
self.core_file_obj = None
self.elf = None
self.source = "Failed"
self.load_core_elf(core_filename=core_filename,
core_data=core_data, inmemory=self.inmemory,
core_zip_filename=core_zip_filename)
self.core_data = core_data
self.core_filename = core_filename
self.core_zip_filename = core_zip_filename
self.clone_interps = False
self.init_meta()
def init_meta(self):
self.logger.debug("Extracting the sections, notes, and segments")
self.get_segments()
self.get_sections()
self.get_pt_notes()
self.get_pt_loads()
self.get_notes()
_ = self.get_taskstruct_notes()
self.logger.debug("Getting the process information")
ps_props = self.get_prpsinfo_notes()
ps_json = NTDescToJson.nt_prpsinfo(ps_props)
self.proc_properties = {PRPSINFO[k]: v for k, v in ps_json.items() if k in PRPSINFO}
self.logger.debug("Stitching together the thread state")
thread_regs = self.get_prstatus_notes()
thread_fpregs = self.get_fpregset_notes()
thread_siginfos = self.get_siginfo_notes()
thread_xstates = self.get_xstate_notes()
threads_meta = zip(thread_regs,
thread_fpregs,
thread_siginfos,
thread_xstates)
self.threads_metas = {c: tm for c, tm in enumerate(threads_meta)}
self.threads = {c: Thread(*tm) for c, tm in self.threads_metas.items()}
self.logger.debug("Parsing the AUX vector")
auxv = self.get_auxv_notes()
self.auxv = NTDescToJson.nt_auxv(auxv)
self.stitch_files()
self.organize_stitchings()
if self.auto_load_files:
self.load_core_segments()
def get_required_files_list(self):
if not hasattr(self, 'required_files'):
required_files = set()
stitching = self.get_stitching()
for info in stitching.values():
if info.requires_file:
required_files.add(info.filename)
self.required_files = sorted(required_files)
return self.required_files
def add_stitch_page(self, vaddr, stitching):
self.page_caches[vaddr&self.page_mask] = stitching
def has_stitch_page(self, vaddr):
return vaddr&self.page_mask in self.page_caches
def get_stitch_page(self, vaddr):
return self.page_caches.get(vaddr & self.page_mask, None)
def init_stitch_pages(self, va_start, va_end, stitching=None):
for vaddr in range(va_start, va_end, self.page_size):
self.add_stitch_page(vaddr, stitching)
def init_stitching_pages(self, stitching):
return self.init_stitch_pages(stitching.vm_start, stitching.vm_end, stitching)
def organize_stitchings(self):
self.internal_segments = []
self.internal_segments_by_file = {}
self.internal_segments_by_vaddr = {}
self.external_segments = []
self.external_segments_by_file = {}
self.external_segments_by_vaddr = {}
self.page_caches = {}
for stitching in self.get_stitching().values():
filename = stitching.filename
vaddr = stitching.vm_start
self.init_stitching_pages(stitching)
if stitching.loadable:
self.internal_segments.append(stitching)
if filename not in self.internal_segments_by_file:
self.internal_segments_by_file[filename] = []
self.internal_segments_by_file[filename].append(stitching)
self.internal_segments_by_vaddr[vaddr] = stitching
else:
self.external_segments.append(stitching)
if filename not in self.external_segments_by_file:
self.external_segments_by_file[filename] = []
self.external_segments_by_file[filename].append(stitching)
self.external_segments_by_vaddr[vaddr] = stitching
def get_external_segments(self):
if not hasattr(self, 'external_segments'):
# self.required_files = sorted(required_files)
self.organize_stitchings()
return self.external_segments
def get_external_segments_by_file(self):
if not hasattr(self, 'external_segments_by_file'):
# self.required_files = sorted(required_files)
self.organize_stitchings()
return self.external_segments_by_file
def get_stitching_by_file(self):
if not hasattr(self, 'internal_segments_by_file'):
# self.required_files = sorted(required_files)
self.organize_stitchings()
return self.internal_segments_by_file
def get_stitching(self):
if not hasattr(self, 'stitching'):
self.stitching = self.stitch_files()
return self.stitching
def read_data_elf_info(self, page_offset, elf_info, expected_size=None):
'''
read the data from a specific page offset in the elf header.
this page offset comes from the core PT_LOAD page offset,
and we use the file name to determine where to read this data
from in the target ELF.
Also of note, we read only the the file segments size, and then
pad that data to the expected virtual address size.
see ma_tk.load.file.FileLoader for info
'''
fd = elf_info.get_fd()
filename = elf_info.get_filename()
segment = elf_info.get_attr('segments_by_offset', {}).get(page_offset, None)
if segment is None:
self.info("Unable to retrieve Segment @ {:016x} for {}".format(page_offset, filename))
return None
# ef = elf_info.get_file_interpreter()
if fd is None:
self.info("Invalid file descriptor for {}".format(filename))
return None
size = segment.get('p_filesz', None)
if size is None or size < 1:
self.info("Invalid size for segment at 0x{:016x}".format(page_offset))
return None
# FIXME not thread safe
fd.seek(page_offset, os.SEEK_SET)
data = fd.read(size)
if expected_size > size:
self.debug("Expected size larger than actual size, padding".format())
data = data + b'\x00' * (expected_size - size)
return data
def load_stitch_inmemory(self, stitched_info):
'''
load an bytes- memory segment using
information gathered during the stitching process
'''
ibm = None
filename = stitched_info.filename
elf_info = None
# FIXME need a better way of handling the file loading (e.g. data, exe disguised as not ELFs) # assuming this is an elf, means if its not we will fail anyway later
# assuming this is an elf, means if its not we will fail anyway later
try:
elf_info = self.elf_loader.load_file(filename, namespace=self.namespace, namespaces=self.other_namespaces)
except:
elf_info = self.file_loader.load_file(filename, namespace=self.namespace, namespaces=self.other_namespaces)
page_offset = stitched_info.page_offset
data_size = stitched_info.p_filesz
vm_size = stitched_info.vm_size
va_start = stitched_info.vm_start
flags = stitched_info.p_flags
data = None
ibm = self.mgr.get_map(va_start)
# TODO would reload the map here
if ibm is not None:
return ibm
segment_in_core = lambda info: info.page_offset > 0
segment_in_elf = lambda info: info.filename in rfiles
if segment_in_core(stitched_info):
self.core_file_obj.seek(page_offset, os.SEEK_SET)
data = self.core_file_obj.read(0, data_size)
elif elf_info is not None:
data = self.read_data_elf_info(page_offset, elf_info, expected_size=data_size)
if data is not None:
# map the memory object into the manager for accessiblity
# update the info indicated it was loaded
if len(data) != data_size:
data = data + b'\x00' * (vm_size - len(data))
self.logger.debug("Creating a memory object for the buffer from: {}@{:08x} starting @{:016x}".format(filename, page_offset, va_start))
ibm = self.mgr.add_buffermap(data, va_start, size=data_size, offset=0,
page_size=4096, filename=filename,
flags=flags)
setattr(ibm, 'elf_info', elf_info)
setattr(ibm, 'info', stitched_info)
stitched_info.set_loaded(True)
return ibm
def load_stitch_infile(self, stitched_info):
'''
load an ioobject memory segment using
information gathered during the stitching process
'''
ibm = None
filename = stitched_info.filename
# FIXME need a better way of handling the file loading (e.g. data, exe disguised as not ELFs)
# assuming this is an elf, means if its not we will fail anyway later
elf_info = None
try:
elf_info = self.elf_loader.load_file(filename, namespace=self.namespace, namespaces=self.other_namespaces)
except:
elf_info = self.file_loader.load_file(filename, namespace=self.namespace, namespaces=self.other_namespaces)
page_offset = stitched_info.page_offset
data_size = stitched_info.vm_size
va_start = stitched_info.vm_start
flags = stitched_info.p_flags
core_io = None
segment_in_core = lambda info: info.page_offset > 0
segment_in_elf = lambda info: info.filename in rfiles
if segment_in_core(stitched_info):
core_io = self.clone_core_io()
core_io.seek(page_offset, os.SEEK_SET)
else:
elf_info = self.file_loader.load_file(filename, namespace=self.namespace, namespaces=self.other_namespaces)
if elf_info is not None and elf_info.get_fd() is not None:
core_io = elf_info.clone(create_new_file_interp=self.clone_interps)
if core_io is not None:
# map the memory object into the manager for accessiblity
# update the info indicated it was loaded
self.debug("Creating a memory object for the file memory object from: {}@{:08x} starting @{:016x}".format(filename, page_offset, vaddr))
ibm = self.mgr.add_ioobj(core_io, va_start, size, phy_start=page_offset,
flags=flags, filename=filename, page_size=4096)
setattr(ibm, 'elf_info', elf_info)
setattr(ibm, 'info', info)
info['loaded'] = True
return ibm
def load_stitch(self, stitched_info):
ibm = None
if stitched_info is None or \
not stitched_info.loadable or \
stitched_info.vm_start == -1:
if stitch is not None and not stitch.loadable:
self.logger.debug("ELFCore.load_stitch unloadable segment: {}".format(stitch))
elif stitch is not None and stitched_info.vm_start == -1:
self.logger.debug("ELFCore.load_stitch unloadable segment, bad VA: {}".format(stitch))
else:
self.logger.debug("ELFCore.load_stitch unloadable segment, bad stitch value: {}".format(stitch))
return ibm
elif self.inmemory:
ibm = self.load_stitch_inmemory(stitched_info)
print(ibm)
else:
ibm = self.load_stitch_infile(stitched_info)
if ibm is not None:
stitched_info.set_loaded(True)
return ibm
def load_stitches_by_file(self, filename):
stchs = self.get_external_segments_by_file().get(filename, []) + \
self.get_external_segments_by_file().get(filename, [])
return self._load_stitches(stchs)
def load_stitch_by_vaddr(self, vaddr, stitch=None):
if stitch is None:
stitch = self.get_stitch_page(vaddr)
ibm = self.load_stitch(stitch)
return ibm
def _load_stitches(self, stitch_infos):
mem_list = []
for info in stitch_infos:
ibm = self.load_stitch_by_vaddr(info.vm_start, stitch)
if ibm is not None:
mem_list.append(ibm)
else:
self.logger.debug("Failed to load: [{}]".format(info))
return mem_list
def stitch_files(self):
'''
stitch together information from the NT_FILES and PT_LOAD segments
this information is used to load the respective memory segments for
analysis
#TODO clean up the data here, since the stitching will create
incorrect perspectives into the core file. this was a hack to
get around deeper understanding.
'''
self.logger.debug("Stitching together file information")
files_info = self.get_files_info()
file_addrs = self.get_files_by_vaddr()
ptloads_by_vaddr = self.get_pt_loads_by_vaddr()
all_vaddrs = [vaddr for vaddr in ptloads_by_vaddr if vaddr > 0] + \
[vaddr for vaddr in file_addrs if vaddr > 0]
# add the progam header meta data into the vaddr entry
# the logic is that where the PT_LOAD and NT_FILE
# segments align, we'll get a clear picture. Not always happening
self.stitching = {vaddr: StitchObject() for vaddr in all_vaddrs}
for vaddr in self.stitching:
stitch = self.stitching[vaddr]
if vaddr == 0:
continue
pt_load = ptloads_by_vaddr.get(vaddr, None)
file_association = file_addrs.get(vaddr, None)
bd = {}
vm_size = 0
if pt_load is not None:
bd.update({k:v for k, v in pt_load.header.items()})
if file_association is not None:
bd.update({k:v for k,v in file_association.items()})
bd['vm_size'] = file_association['vm_end'] - file_association['vm_start']
bd['requires_file'] = file_association['page_offset'] <= 0
bd['page_offset'] = file_association['page_offset']
bd['loadable'] = bd['page_offset'] >= 0 and \
file_association['filename'].find(b'(deleted)') == -1
stitch.update(bd)
stitch.set_loaded(False)
return self.stitching
def read_mapping(self):
mappings = {}
for f_info in self.files:
va_start = f_info['vm_start']
va_end = f_info['vm_end']
page_offset = f_info['page_offset']
file_name = f_info['filename']
size = va_end - va_start
self.add_mapping(va_start, va_end, page_offset, )
def get_thread_meta(self, idx):
return self.threads_metas[idx] if idx in self.threads_metas else None
def get_thread_info(self, idx):
return self.threads[idx] if idx in self.threads else None
def get_thread_count(self):
return len(self.threads_metas)
def get_segments(self):
if not hasattr(self, 'segments'):
self.segments = [i for i in self.elf.iter_segments()]
return self.segments
def get_sections(self):
if not hasattr(self, 'sections'):
self.sections = [i for i in self.elf.iter_sections()]
return self.sections
def get_notes(self):
if not hasattr(self, 'notes'):
pt_note = self.get_pt_notes()[0]
self.notes = [n for n in pt_note.iter_notes()]
return self.notes
def get_pt_notes(self):
if not hasattr(self, 'pt_notes'):
self.pt_notes = [i for i in self.segments if i.header.p_type == 'PT_NOTE']
return self.pt_notes
def get_prstatus_notes(self):
notes = self.get_notes()
if not hasattr(self, 'nt_prstatus'):
self.nt_prstatus = [i for i in notes if i['n_type'] == 'NT_PRSTATUS' or i['n_type'] == 1]
return self.nt_prstatus
def get_fpregset_notes(self):
notes = self.get_notes()
if not hasattr(self, 'nt_fpregset'):
self.nt_fpregset = [i for i in notes if i['n_type'] == 'NT_FPREGSET' or i['n_type'] == 2]
return self.nt_fpregset
def get_prpsinfo_notes(self):
notes = self.get_notes()
# Note the pyelf tools a good enough job pulling out the relevant details
if not hasattr(self, 'nt_prpsinfo'):
x = [i for i in notes if i['n_type'] == 'NT_PRPSINFO' or i['n_type'] == 3]
self.nt_prpsinfo = x[0]
return self.nt_prpsinfo
def get_taskstruct_notes(self):
notes = self.get_notes()
if not hasattr(self, 'nt_taskstruct'):
self.nt_taskstruct = [i for i in notes if i['n_type'] == 'NT_TASKSTRUCT' or i['n_type'] == 4]
return self.nt_taskstruct
def get_auxv_notes(self):
notes = self.get_notes()
if not hasattr(self, 'nt_auxv'):
x = [i for i in notes if i['n_type'] == 'NT_AUXV' or i['n_type'] == 4]
self.nt_auxv = x[0]
return self.nt_auxv
def get_siginfo_notes(self):
notes = self.get_notes()
if not hasattr(self, 'nt_siginfo'):
self.nt_siginfo = [i for i in notes if i['n_type'] == 'NT_SIGINFO' or i['n_type'] == 0x53494749]
return self.nt_siginfo
def get_file_notes(self):
notes = self.get_notes()
if not hasattr(self, 'nt_file'):
x = [i for i in notes if i['n_type'] == 'NT_FILE' or i['n_type'] == 0x46494c45]
self.nt_file = x
return self.nt_file
def get_xstate_notes(self):
notes = self.get_notes()
return [i for i in notes if i['n_type'] == 'NT_X86_XSTATE' or i['n_type'] == 0x202]
def get_files_info(self):
if not hasattr(self, 'files_info'):
self.file_info = NTDescToJson.nt_file(self.get_file_notes()[0])
self.files_by_vaddr = {i['vm_start']:i for i in self.file_info['memory_map']}
return self.file_info
def get_files_by_vaddr(self):
if not hasattr(self, 'files_by_vaddr'):
self.get_files_info()
return self.files_by_vaddr
def get_pt_loads_by_vaddr(self):
if not hasattr(self, 'pt_loads_by_vaddr'):
self.get_pt_loads_by_vaddr()
return self.pt_loads_by_vaddr
def get_pt_loads(self):
if not hasattr(self, 'pt_loads'):
self.pt_loads = [i for i in self.segments if i.header.p_type == 'PT_LOAD']
self.pt_loads_by_vaddr = {i.header.p_vaddr: i for i in self.pt_loads}
return self.pt_loads
def contains_physical(self, offset) -> bytes:
for mr in self.physical_ranges:
if offset in mr:
return True
return False
def physical_to_virtual_address(self, offset: int) -> int:
# convert the offset into a virtual address
# 1) is offset in core file?
# 2) is offset in another file that might be mapped in
pass
def virtual_to_physical(self, vma: int) -> int:
# convert the offset into a virtual address
# 1) is offset in core file?
# 2) is offset in another file that might be mapped in
pass
def read_physical(self, offset, size, mapping=None) -> bytes:
# 1) check if offset into the core file is present
# 2) if not, is the map the offset from this core into
# the target file
# 3) if none of the files are present return None
pass
def read_virtual(self, address, size) -> bytes:
# 1) check if address maps into the core file is present
# 2) if not, is the map the offset from this core into
# the target file
# 3) if none of the files are present return None
pass
def get_memory_range(self, address=None, offset=None, io_back=None):
# if address is not none, look up address and return back the range
# if offset is not none, look up offset and return back the range
# if the io_back is not none, look up the io_back and return the range
pass
def map_threads(self):
pass
def get_process(self, pid=None):
# 1) return threads/processes
pass
def get_thread(self, thread=None):
# 1) return threads/processes
pass
def get_processes(self, pid=None):
# 1) return threads/processes
pass
def get_threads(self, thread=None):
# 1) return threads/processes
pass
def load_elf_zip(self, zip_filename, filename=None, inmemory=True):
return self.elf_loader.load_file_from_zip(zip_filename, filename, inmemory=inmemory)
def load_elf_bytes(self, data, filename=None, update_bytes=True):
if data is not None and filename is None:
filename = str(uuid.uuid4())
# FIXME
if data is not None and\
filename in self.elf_loader.required_files_bytes and not update_bytes:
# TODO there may be a case where multiple byte blogs or what ever get
# set in known byte arrays KB e.g. elf_loader.required_files_bytes
# what do we do when this happens?
# right now, we ignore it and use the data already there
self.logger.critical("There might be a data collision for {}".format(filename))
elif data is not None and\
filename in self.elf_loader.required_files_bytes and update_bytes:
# TODO there may be a case where multiple byte blogs or what ever get
# set in known byte arrays KB e.g. elf_loader.required_files_bytes
# what do we do when this happens?
# right now, we ignore it and use the data already there
self.logger.critical("There might be a data collision for {}".format(filename))
self.elf_loader.required_files_bytes[filename] = data
elif data is not None:
self.elf_loader.required_files_bytes[filename] = data
return self.elf_loader.load_file(data, self.namespace,
namespaces=self.other_namespaces)
def load_elf(self, filename: str=None,
data: bytes=None,
inmemory=False,
zip_filename: str =None, update_bytes=True):
source = "Failed"
self.logger.debug("Attempting to load ELF Core")
if data is not None:
file_obj = self.load_elf_byte(data, filename=filename, update_bytes=update_bytes)
elif zip_filename is not None and self.elf_loader.is_zip(zip_filename):
file_obj = self.load_elf_zip(zip_filename, filename=filename, inmemory=self.inmemory)
elif filename is not None:
file_obj = self.elf_loader.load_file(filename, inmemory=inmemory)
if file_obj is not None:
source = file_obj.get_source()
self.logger.debug("Loaded ELF Core from: {}".format(source))
if file_obj is None:
raise Exception("Unable to load the core file for analysis")
return file_obj
def load_core_elf(self, core_filename: str=None,
core_data: bytes=None,
inmemory=False,
core_zip_filename: str =None):
self.core_file_obj = self.load_elf(filename=core_filename,
data=core_data,
inmemory=inmemory,
zip_filename=core_zip_filename)
if self.core_file_obj is None:
self.source = "Failed"
else:
self.source = self.core_file_obj.source
self.elf = self.core_file_obj.get_file_interpreter()
if self.core_file_obj is None:
raise Exception("Unable to load the core file for analysis")
def clone_core_io(self):
if self.source == "Failed":
raise Exception("Attempting to clone a failed core_io")
return self.core_file_obj.clone(create_new_file_interp=self.clone_interps)
def check_load(self, vaddr):
if self.mgr.check_vaddr(vaddr):
return True
elif not self.has_stitch_page(vaddr):
self.logger.debug("ELFCore.check_load {:08x} is unknown".format(vaddr))
return False
stitch = self.get_stitch_page(vaddr)
ibm = self.load_stitch(stitch)
if ibm is not None:
return True
return False
def read_word(self, vaddr, little_endian=True):
return self.mgr.read_word(vaddr, little_endian)
def read_dword(self, vaddr, little_endian=True):
return self.mgr.read_dword(vaddr, little_endian)
def read_qword(self, vaddr, little_endian=True):
return self.mgr.read_qword(vaddr, little_endian)
def read_at_vaddr(self, vaddr, size=1):
return self.mgr.read_at_vaddr(vaddr, size)
def read(self, size=1):
return self.mgr.read(size)
def read_cstruct(self, vaddr, cstruct):
return self.mgr.read_cstruct(cstruct, addr=vaddr)
def seek(self, vaddr):
return self.mgr.seek(addr=vaddr)
|
'use strict';
const EventEmitter = require('events');
const JSONStream = require('JSONStream');
const common = require('asset-pipe-common');
const stream = require('readable-stream');
const assert = require('assert');
const { join, basename, dirname } = require('path');
const fs = require('fs');
const os = require('os');
const { promisify } = require('util');
const mkdirp = promisify(require('mkdirp'));
const readFile = promisify(fs.readFile);
const writeFile = promisify(fs.writeFile);
const access = promisify(fs.access);
const readdir = promisify(fs.readdir);
const stat = promisify(fs.stat);
const Boom = require('boom');
class WriteStream extends stream.PassThrough {
constructor(rootPath, type) {
super();
const temp = join(os.tmpdir(), common.createTemporaryFilename(type));
const hasher =
type === 'json' ? new common.IdHasher() : new common.FileHasher();
const parser =
type === 'json' ? JSONStream.parse('*') : new stream.PassThrough();
const fileStream = fs.createWriteStream(temp);
fileStream.on('finish', () => {
const id = hasher.hash;
const file = `${id}.${type}`;
fs.rename(temp, join(rootPath, file), error => {
if (error) {
return this.emit('file not saved', error);
}
this.emit('file saved', id, file);
});
});
hasher.on('error', error => {
this.emit('error', error);
});
parser.on('error', error => {
this.emit('error', error);
});
fileStream.on('error', error => {
this.emit('error', error);
});
this.pipe(parser).pipe(hasher);
this.pipe(fileStream);
}
}
class ReadStream extends fs.createReadStream {
constructor(...args) {
super(...args);
const file = basename(args[0]);
this.on('open', () => {
this.emit('file found', file);
});
this.on('error', error => {
if (error.code === 'ENOENT') {
this.emit('file not found', file);
}
});
}
}
module.exports = class SinkFs extends EventEmitter {
constructor({ path } = {}) {
super();
assert(path, '"options.path" is missing');
this.name = 'asset-pipe-sink-fs';
this.rootPath = path;
}
joinPath(fileName) {
return join(this.rootPath, fileName);
}
assertFileName(fileName) {
assert(
typeof fileName === 'string',
`Expected "fileName" to be a string. Instead got ${fileName}`
);
}
async get(fileName) {
this.assertFileName(fileName);
try {
return await readFile(this.joinPath(fileName), 'utf8');
} catch (e) {
throw Boom.notFound(
`No file could be located with name "${fileName}". ${e.message}`
);
}
}
async set(fileName, fileContent) {
this.assertFileName(fileName);
assert(fileContent, '"fileContent" is missing');
const target = this.joinPath(fileName);
await mkdirp(dirname(target));
await writeFile(target, fileContent, 'utf8');
}
async has(fileName) {
this.assertFileName(fileName);
try {
await access(
this.joinPath(fileName),
fs.constants.R_OK | fs.constants.W_OK
);
return true;
} catch (e) {
return false;
}
}
async dir(directoryName = '/') {
const resolveTargetDirName = this.joinPath(directoryName);
try {
const dir = await readdir(resolveTargetDirName);
if (dir.length === 0) {
throw new Error();
}
let results = await Promise.all(
dir.map(relativePath =>
stat(join(resolveTargetDirName, relativePath)).then(
stats => {
if (stats.isDirectory()) {
// filter out
return;
}
return this.get(
join(directoryName, relativePath)
).then(content => ({
fileName: relativePath,
content,
}));
}
)
)
);
results = results.filter(Boolean);
if (results.length === 0) {
throw new Error();
}
return results;
} catch (e) {
throw new Error(
`Missing folder with name "${directoryName}" or empty result`
);
}
}
writer(type) {
assert(type, '"type" is missing');
return new WriteStream(this.rootPath, type);
}
reader(fileName) {
this.assertFileName(fileName);
return new ReadStream(this.joinPath(fileName));
}
};
|
package main
import (
"fmt"
"net/http"
)
func main() {
handler := func(w http.ResponseWriter, r *http.Request) {
fmt.Println("Got a request for ", r.URL.Path)
if r.URL.Path == "/config" {
fmt.Println("sending back config")
w.Write([]byte(`
[
{
"domain": "example.com",
"path": "/sample1",
"ready": true
},
{
"domain": "example.com",
"path": "/sample2",
"ready": false
},
{
"domain": "example1.com",
"path": "/sample1*",
"ready": true,
"rules": [
{
"type": "ReplacePath",
"args": {
"search": "/sample1",
"replace": "",
"times": 1
}
},
{
"type": "AppendPath",
"args": {
"begin": "/ali",
"end": "/cool"
}
}
]
}
]
`))
return
}
fmt.Println("Rest of api")
w.Write([]byte(r.URL.String()))
}
err := http.ListenAndServe(":8000", http.HandlerFunc(handler))
if err != nil {
panic(err)
}
}
|
const assert = require('assert'),
helpers = require('we-test-tools').helpers;
let we;
describe('plugin.fastload.unit', function() {
before(function (done) {
we = helpers.getWe();
done();
});
it('Should load the orWithMinusParser search parser', function() {
assert(we.router.search.parsers.orWithMinusParser);
});
it('Should load the inNameAndDescription search target', function() {
assert(we.router.search.targets.inNameAndDescription);
});
it('Should load the dog controller', function() {
assert(we.controllers.dog);
});
it('Should load giveVaccine modelHooks', function() {
assert(we.db.modelHooks.giveVaccine);
});
it('Should load bark modelInstanceMethod', function() {
assert(we.db.modelInstanceMethods.bark);
});
it('Should load jump modelClassMethod', function() {
assert(we.db.modelClassMethods.jump);
});
it('Should load dog model config and model', function(){
assert(we.db.modelsConfigs.dog);
assert(we.db.models.dog);
assert(we.db.models.dog.jump);
});
}); |
#!/usr/bin/env bash
set -eux
# Prevent outdated from making http requests
echo '["0.2.0", "2099-01-01 00:00:00"]' >/tmp/outdated_cache_outdated
echo '["0.8.3", "2099-01-01 00:00:00"]' >/tmp/outdated_cache_birdseye
gunicorn -c gunicorn_config_worker.py main.workers.master:app
|
<reponame>NIRALUser/BatchMake
#ifndef __Subject_h_
#define __Subject_h_
#include "Resource.h"
#include <string>
#include <stdlib.h>
#include <stdio.h>
#include <vector>
struct TagNameSubject{
std::string name;
std::string value;
};
class Subject
{
public:
Subject();
~Subject(){;}
void AddResource();
int GetNbTagsInfo(){return m_TagName.size();}
void SetResource(int i, Resource *res){m_Resources[i] = res;}
Resource* GetResource(int i){return m_Resources[i];}
void SetNbResource(int i){m_NbResources = i;}
void SetNbTagsInfo(int i){m_NbTagsInfo = i;}
int GetNbResource(){return m_NbResources;}
void SetSubjectID(std::string id){m_SubjectID = id;}
void SetTagName(std::string tagname);
void SetTagValue(std::string value, int i){m_TagName[i].value = value;}
std::string GetSubjectID(){return m_SubjectID;}
std::string GetTagName(int i){return m_TagName[i].name;}
std::string GetTagNameValue(int i){return m_TagName[i].value;}
private:
int m_NbResources;
int m_NbTagsInfo;
std::vector<Resource*> m_Resources;
std::string m_SubjectID;
std::vector<TagNameSubject> m_TagName;
};
#endif
|
public static List<String> getForbiddenWords(List<String> words, List<String> forbiddenWords) {
List<String> result = new ArrayList<>();
words.forEach(word -> {
if (!forbiddenWords.contains(word)) {
result.add(word);
}
});
return result;
} |
<gh_stars>0
Array._X_CLOUD_ARGTOARR = function (arg) {
return Array.prototype.slice.call(arg);
};
|
<filename>shald/core/Client.cpp
#include "Config.h"
#include "Client.h"
#include "Cfg.h"
namespace Core
{
ClientPool clientPool;
static uint _current_session = 0;
Client::Client( int fd ): Network::TcpClient(fd), _session(++ _current_session)
{
clientPool.Add(this);
}
Client::~Client()
{
clientPool.Remove(_session);
}
void Client::OnRecv( byte * buf, size_t len )
{
if(len == 0)
return;
Send(buf, len);
/*
_buffer.append(buf, len);
ProcessPacket();
*/
}
void Client::ProcessPacket()
{
size_t size;
ushort psize;
while((size = _buffer.size()) >= 4 && (psize = *(ushort *)&_buffer[2]) <= size)
{
byte * buf = _buffer;
Utils::Stream st(buf + 4, psize - 4);
// Plugins::Realm::PluginRealm::GetSingleton().Process(*this, (ushort)buf[1], st);
if(psize < size)
_buffer.pop_front(psize);
else
_buffer.clear();
}
}
ClientPool::ClientPool()
{
_sessionMap.rehash(cfg.GetHashtableSize());
}
void ClientPool::Remove( uint s )
{
_sessionMap.erase(s);
}
Client * ClientPool::operator[]( uint s )
{
SessionMap_t::iterator it = _sessionMap.find(s);
if(it == _sessionMap.end())
return NULL;
return it->second;
}
void ClientPool::Add( Client * cl )
{
if(cl == NULL)
return;
_sessionMap[cl->GetSession()] = cl;
}
}
|
<gh_stars>0
import React from 'react';
import { render } from '@testing-library/react';
import { OptionsMenuItem } from '../../OptionsMenuItem';
import { DropdownArrowContext } from '../../../Dropdown';
describe('OptionsMenuItem', () => {
it('should match snapshot', () => {
const { asFragment } = render(
<DropdownArrowContext.Provider value={{ sendRef: jest.fn(), keyHandler: undefined }}>
<OptionsMenuItem
children={<>ReactNode</>}
className={'string'}
isSelected={false}
isDisabled={true}
onSelect={() => null as any}
id={"''"}
/>{' '}
</DropdownArrowContext.Provider>
);
expect(asFragment()).toMatchSnapshot();
});
});
|
import React from 'react';
import {View, Text, StyleSheet, TouchableOpacity} from 'react-native';
export default function LoginButtons() {
return (
<View style={styles.container}>
<TouchableOpacity style={styles.signInButton}>
<Text style={styles.text}>Sign In</Text>
</TouchableOpacity>
<TouchableOpacity style={styles.signUpButton}>
<Text style={styles.text}>Sign Up</Text>
</TouchableOpacity>
</View>
);
}
const styles = StyleSheet.create({
container: {
height: 50,
width: '100%',
backgroundColor: '#FFF',
flexDirection: 'row',
justifyContent: 'space-between',
},
signInButton: {
height: '100%',
width: '50%',
backgroundColor: '#f2f2f2',
alignItems: 'center',
justifyContent: 'center',
borderTopRightRadius: 5,
borderBottomRightRadius: 5,
},
signUpButton: {
height: '100%',
width: '50%',
backgroundColor: '#F7C732',
alignItems: 'center',
justifyContent: 'center',
borderTopLeftRadius: 5,
borderBottomLeftRadius: 5,
},
text: {
color: '#000',
fontSize: 20,
},
}); |
<filename>tui/views/issues.js
import blessed from 'blessed'
import contrib from 'blessed-contrib'
import { BaseView } from './base.js'
const FETCH_LATEST_INTERVAL = 2e3
export class IssuesView extends BaseView {
render () {
if (!this.isInFlow) this.screen.render()
}
async setup () {
const {screen} = this
this.isInFlow = false
try {
this.api = await this.connectLoopback()
} catch (e) {
screen.append(blessed.text({
top: 2,
left: 2,
content: `Failed to connect to server\n${e.message}`,
style: {fg: 'red'},
border: {type: 'line', fg: 'red'}
}))
screen.render()
return
}
this.selection = undefined
this.issues = []
this.listing = blessed.listtable({
top: 1,
left: 0,
width: '100%',
height: '50%',
tags: true,
interactive: true,
keys: true,
align: 'left',
style: {
header: {underline: true},
cell: {selected: {bg: 'white', fg: 'black'}}
},
border: {type: 'line'}
})
screen.append(this.listing)
screen.append(blessed.text({
top: 1,
left: 2,
content: 'Issues',
style: {bold: true}
}))
this.infopane = blessed.box({
top: '50%+1',
left: 0,
width: '100%',
height: '50%',
tags: true
})
screen.append(this.infopane)
// HACK
// fix some spacing behaviors in blessed-contrib to take the full space possible
this.listing.rows.top = 0
this.listing.render = function () {
if (this.screen.focused === this.rows) this.rows.focus()
this.rows.width = this.width - 3
this.rows.height = this.height - 2
blessed.Box.prototype.render.call(this)
}
this.listing.on('select item', (node, index) => {
if (this.selection !== this.issues[index - 1].id) {
this.selection = this.issues[index - 1].id
this.updateInfoPane()
}
})
this.listing.focus()
this.listing.key(['r'], async () => {
if (!(await this.ask('Attempt to recover this issue?'))) {
return
}
this.isInFlow = true
try {
const issueId = this.selection
await this.api.call('server.recoverIssue', [issueId])
await new Promise(r => setTimeout(r, 1000)) // give a second to attempt recovery
await this.fetchLatest()
let index = this.issues.findIndex(issue => issue.id === issueId)
if (index !== -1) {
this.listing.select(index + 1)
this.message('Recovery appears to have been unsuccessful.', 'yellow')
} else {
this.message('Recovery appears to have been successful.')
}
} finally {
this.isInFlow = false
this.render()
this.listing.focus()
}
})
this.listing.key(['d'], async () => {
if (!(await this.ask('Dismiss this issue?'))) {
return
}
await this.api.call('server.dismissIssue', [this.selection])
await this.fetchLatest()
})
this.listing.key(['i'], async () => {
if (!(await this.ask('Dismiss and ignore this issue?'))) {
return
}
await this.api.call('server.dismissIssue', [this.selection, {ignoreFuture: true}])
await this.fetchLatest()
})
await this.fetchLatest()
this.render()
this.fetchLatestInterval = setInterval(() => this.fetchLatest(), FETCH_LATEST_INTERVAL)
}
teardown () {
clearInterval(this.fetchLatestInterval)
if (this.api?.socket) this.api?.close()
}
async fetchLatest () {
this.issues = await this.api.call('server.listIssues', []).catch(e => [])
const selected = this.listing.selected
this.listing.setData([
['Issue', 'Repetitions'],
...this.issues.map(issue => ([issue.entries[0].description, String(issue.entries.length)]))
])
if (selected) this.listing.select(selected)
this.render()
}
updateInfoPane () {
for (let child of this.infopane.children.slice()) {
this.infopane.remove(child)
}
if (!this.selection) {
return
}
const issue = this.issues.find(issue => issue.id === this.selection)
if (!issue) return
this.infopane.append(blessed.text({
left: 0,
top: 0,
width: '100%-20',
height: 3,
border: {type: 'line'},
padding: {left: 1},
content: issue.entries[0].description
}))
this.infopane.append(blessed.text({
top: 0,
left: 2,
content: 'Description',
style: {bold: true}
}))
this.infopane.append(blessed.text({
left: '100%-21',
top: 0,
width: 21,
height: 3,
border: {type: 'line'},
padding: {left: 1},
content: String(issue.entries.length)
}))
this.infopane.append(blessed.text({
top: 0,
left: '100%-18',
content: 'Repetitions',
style: {bold: true}
}))
this.infopane.append(blessed.text({
left: 0,
top: 2,
width: '100%',
height: 8,
border: {type: 'line'},
padding: {left: 1},
content: issue.entries[0].cause
}))
this.infopane.append(blessed.text({
top: 2,
left: 2,
content: 'Cause',
style: {bold: true}
}))
this.infopane.append(blessed.text({
left: 0,
top: 8,
width: '100%',
height: 6,
border: {type: 'line'},
padding: {left: 1},
content: issue.entries[0].error
}))
this.infopane.append(blessed.text({
top: 8,
left: 2,
content: 'Error',
style: {bold: true}
}))
this.infopane.append(blessed.text({
top: 13,
left: 0,
width: '100%',
height: 3,
border: {type: 'line'},
padding: {left: 1},
tags: true,
content: '{green-fg}{bold}[r]{/} {green-fg}Attempt recovery{/} {green-fg}{bold}[d]{/} {green-fg}Dismiss{/} {green-fg}{bold}[i]{/} {green-fg}Dismiss and ignore{/}'
}))
this.render()
}
} |
<reponame>AndreyNudko/ClickHouse-Native-JDBC
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package examples;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
/**
* SimpleQuery
*/
public class SimpleQuery {
public static void main(String[] args) throws Exception {
try (Connection connection = DriverManager.getConnection("jdbc:clickhouse://127.0.0.1:9000?client_name=ck-example")) {
try (Statement stmt = connection.createStatement()) {
try (ResultSet rs = stmt.executeQuery(
"SELECT (number % 3 + 1) as n, sum(number) FROM numbers(10000000) GROUP BY n")) {
while (rs.next()) {
System.out.println(rs.getInt(1) + "\t" + rs.getLong(2));
}
}
}
}
}
}
|
export default `
interface UnaryFunction<T, R> {
(source: T): R;
}
interface OperatorFunction<T, R> extends UnaryFunction<Observable<T>, Observable<R>> {
}
interface PromiseLike<T> {
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult1 = T, TResult2 = never>(onfulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null): PromiseLike<TResult1 | TResult2>;
}
declare type PromiseConstructorLike = new <T>(executor: (resolve: (value: T | PromiseLike<T>) => void, reject: (reason?: any) => void) => void) => PromiseLike<T>;
interface Unsubscribable {
unsubscribe(): void;
}
declare type TeardownLogic = Unsubscribable | Function | void;
interface NextObserver<T> {
closed?: boolean;
next: (value: T) => void;
error?: (err: any) => void;
complete?: () => void;
}
interface ErrorObserver<T> {
closed?: boolean;
next?: (value: T) => void;
error: (err: any) => void;
complete?: () => void;
}
interface CompletionObserver<T> {
closed?: boolean;
next?: (value: T) => void;
error?: (err: any) => void;
complete: () => void;
}
declare type PartialObserver<T> = NextObserver<T> | ErrorObserver<T> | CompletionObserver<T>;
interface Subscribable<T> {
subscribe(observer?: PartialObserver<T>): Unsubscribable;
/** @deprecated Use an observer instead of a complete callback */
subscribe(next: null | undefined, error: null | undefined, complete: () => void): Unsubscribable;
/** @deprecated Use an observer instead of an error callback */
subscribe(next: null | undefined, error: (error: any) => void, complete?: () => void): Unsubscribable;
/** @deprecated Use an observer instead of a complete callback */
subscribe(next: (value: T) => void, error: null | undefined, complete: () => void): Unsubscribable;
subscribe(next?: (value: T) => void, error?: (error: any) => void, complete?: () => void): Unsubscribable;
}
declare class Observable<T> implements Subscribable<T> {
/** Internal implementation detail, do not use directly. */
_isScalar: boolean;
/** @deprecated This is an internal implementation detail, do not use. */
source: Observable<any>;
/** @deprecated This is an internal implementation detail, do not use. */
operator: Operator<any, T>;
/**
* @constructor
* @param {Function} subscribe the function that is called when the Observable is
* initially subscribed to. This function is given a Subscriber, to which new values
* can be \`next\`ed, or an \`error\` method can be called to raise an error, or
* \`complete\` can be called to notify of a successful completion.
*/
constructor(subscribe?: (this: Observable<T>, subscriber: Subscriber<T>) => TeardownLogic);
/**
* Creates a new cold Observable by calling the Observable constructor
* @static true
* @owner Observable
* @method create
* @param {Function} subscribe? the subscriber function to be passed to the Observable constructor
* @return {Observable} a new cold observable
* @nocollapse
* @deprecated use new Observable() instead
*/
static create: Function;
/**
* Creates a new Observable, with this Observable as the source, and the passed
* operator defined as the new observable's operator.
* @method lift
* @param {Operator} operator the operator defining the operation to take on the observable
* @return {Observable} a new observable with the Operator applied
*/
lift<R>(operator: Operator<T, R>): Observable<R>;
subscribe(observer?: PartialObserver<T>): Subscription;
/** @deprecated Use an observer instead of a complete callback */
subscribe(next: null | undefined, error: null | undefined, complete: () => void): Subscription;
/** @deprecated Use an observer instead of an error callback */
subscribe(next: null | undefined, error: (error: any) => void, complete?: () => void): Subscription;
/** @deprecated Use an observer instead of a complete callback */
subscribe(next: (value: T) => void, error: null | undefined, complete: () => void): Subscription;
subscribe(next?: (value: T) => void, error?: (error: any) => void, complete?: () => void): Subscription;
/** @deprecated This is an internal implementation detail, do not use. */
_trySubscribe(sink: Subscriber<T>): TeardownLogic;
/**
* @method forEach
* @param {Function} next a handler for each value emitted by the observable
* @param {PromiseConstructor} [promiseCtor] a constructor function used to instantiate the Promise
* @return {Promise} a promise that either resolves on observable completion or
* rejects with the handled error
*/
forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise<void>;
/** @internal This is an internal implementation detail, do not use. */
_subscribe(subscriber: Subscriber<any>): TeardownLogic;
/**
* @nocollapse
* @deprecated In favor of iif creation function: import { iif } from 'rxjs';
*/
static if: typeof iif;
/**
* @nocollapse
* @deprecated In favor of throwError creation function: import { throwError } from 'rxjs';
*/
static throw: typeof throwError;
pipe(): Observable<T>;
pipe<A>(op1: OperatorFunction<T, A>): Observable<A>;
pipe<A, B>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>): Observable<B>;
pipe<A, B, C>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>): Observable<C>;
pipe<A, B, C, D>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>): Observable<D>;
pipe<A, B, C, D, E>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>, op5: OperatorFunction<D, E>): Observable<E>;
pipe<A, B, C, D, E, F>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>, op5: OperatorFunction<D, E>, op6: OperatorFunction<E, F>): Observable<F>;
pipe<A, B, C, D, E, F, G>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>, op5: OperatorFunction<D, E>, op6: OperatorFunction<E, F>, op7: OperatorFunction<F, G>): Observable<G>;
pipe<A, B, C, D, E, F, G, H>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>, op5: OperatorFunction<D, E>, op6: OperatorFunction<E, F>, op7: OperatorFunction<F, G>, op8: OperatorFunction<G, H>): Observable<H>;
pipe<A, B, C, D, E, F, G, H, I>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>, op5: OperatorFunction<D, E>, op6: OperatorFunction<E, F>, op7: OperatorFunction<F, G>, op8: OperatorFunction<G, H>, op9: OperatorFunction<H, I>): Observable<I>;
pipe<A, B, C, D, E, F, G, H, I>(op1: OperatorFunction<T, A>, op2: OperatorFunction<A, B>, op3: OperatorFunction<B, C>, op4: OperatorFunction<C, D>, op5: OperatorFunction<D, E>, op6: OperatorFunction<E, F>, op7: OperatorFunction<F, G>, op8: OperatorFunction<G, H>, op9: OperatorFunction<H, I>, ...operations: OperatorFunction<any, any>[]): Observable<{}>;
toPromise<T>(this: Observable<T>): Promise<T>;
toPromise<T>(this: Observable<T>, PromiseCtor: typeof Promise): Promise<T>;
toPromise<T>(this: Observable<T>, PromiseCtor: PromiseConstructorLike): Promise<T>;
}
`;
|
#!/bin/bash
PUSH=$1
DATE="$(date "+%Y%m%d%H%M")"
REPOSITORY_PREFIX="latonaio"
SERVICE_NAME="sap-api-integrations-equipment-master-creates"
DOCKER_BUILDKIT=1 docker build --progress=plain -t ${SERVICE_NAME}:"${DATE}" . --no-cache
# tagging
docker tag ${SERVICE_NAME}:"${DATE}" ${SERVICE_NAME}:latest
docker tag ${SERVICE_NAME}:"${DATE}" ${REPOSITORY_PREFIX}/${SERVICE_NAME}:"${DATE}"
docker tag ${REPOSITORY_PREFIX}/${SERVICE_NAME}:"${DATE}" ${REPOSITORY_PREFIX}/${SERVICE_NAME}:latest
if [[ $PUSH == "push" ]]; then
docker push ${REPOSITORY_PREFIX}/${SERVICE_NAME}:"${DATE}"
docker push ${REPOSITORY_PREFIX}/${SERVICE_NAME}:latest
fi
|
#! /usr/bin/env bash
# -----------------------------------------------------------------------
# Project setup and first implementation of an upgradeable DIDRegistry
# -----------------------------------------------------------------------
# Clean up
set -x
set -e
rm -f zos.*
# Initialize project zOS project
# NOTE: Creates a zos.json file that keeps track of the project's details
npx zos init oceanprotocol 0.1.poc -v
# Register contracts in the project as an upgradeable contract.
# NOTE: here we need to add the rest of oceanprotocol contracts
npx zos add DIDRegistry -v --skip-compile
# Deploy all implementations in the specified network.
# NOTE: Creates another zos.<network_name>.json file, specific to the network used, which keeps track of deployed addresses, etc.
npx zos push --network $NETWORK --skip-compile -v
# Request a proxy for the upgradeably contracts.
# Here we run initialize which replace contract constructors
# NOTE: A dapp could now use the address of the proxy specified in zos.<network_name>.json
# instance=MyContract.at(proxyAddress)
npx zos create DIDRegistry --network $NETWORK --init initialize --args $OWNER -v
# -----------------------------------------------------------------------
# Change admin priviliges to multisig
# -----------------------------------------------------------------------
npx zos set-admin DIDRegistry $MULTISIG --network development --yes |
def sort(list):
for i in range(len(list)):
min_idx = i
for j in range(i+1, len(list)):
if list[min_idx] > list[j]:
min_idx = j
list[i], list[min_idx] = list[min_idx], list[i]
return list
list = [3,7,1,4,9,5]
sorted_list = sort(list)
print(sorted_list) |
#!/bin/bash
docker run -it --rm --name gopherbot-theia quay.io/lnxjedi/gopherbot-theia:latest
|
def insertionSort(arr):
for i in range(1, len(arr)):
current = arr[i]
j =i-1
while j>=0 and arr[j]>current:
arr[j+1] = arr[j]
j-=1
arr[j+1] = current
return arr
if __name__ == '__main__':
arr = [2, 4, 7, 3, 8, 6, 1]
print(insertionSort(arr)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.