text stringlengths 1 1.05M |
|---|
<reponame>ghackett/ProviderOne
/*
* This file has been auto-generated by ProviderOne
*
* Copyright (C) 2011 GroupMe, Inc.
*/
package com.groupme.providerone.sample.database.autogen.util;
import android.content.ContentValues;
import android.database.DatabaseUtils;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteDatabaseCorruptException;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteStatement;
import android.os.Build;
import android.text.TextUtils;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
public class PlatformDatabaseUtils {
public static final int CONFLICT_NONE = SQLiteDatabase.CONFLICT_NONE;
public static final int CONFLICT_ROLLBACK = SQLiteDatabase.CONFLICT_ROLLBACK;
public static final int CONFLICT_ABORT = SQLiteDatabase.CONFLICT_ABORT;
public static final int CONFLICT_FAIL = SQLiteDatabase.CONFLICT_FAIL;
public static final int CONFLICT_IGNORE = SQLiteDatabase.CONFLICT_IGNORE;
public static final int CONFLICT_REPLACE = SQLiteDatabase.CONFLICT_REPLACE;
private static final String[] CONFLICT_VALUES = new String[] {
"", " OR ROLLBACK ", " OR ABORT ", " OR FAIL ", " OR IGNORE ", " OR REPLACE "
};
SQLiteOpenHelper mDatabase;
public PlatformDatabaseUtils(SQLiteOpenHelper db) {
mDatabase = db;
}
public int update(String table, ContentValues values, String where, String[] whereArgs) {
SQLiteDatabase db = mDatabase.getWritableDatabase();
return db.update(table, values, where, whereArgs);
}
public void execSQL(String query) {
SQLiteDatabase db = mDatabase.getWritableDatabase();
db.execSQL(query);
}
public long insertWithOnConflict(String table, String nullColumnHack, ContentValues values, int algorithm) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) {
return insert(table, nullColumnHack, values, algorithm);
} else {
return legacyInsert(table, nullColumnHack, values, algorithm);
}
}
private long legacyInsert(String table, String nullColumnHack, ContentValues initialValues, int algorithm) {
SQLiteDatabase db = mDatabase.getWritableDatabase();
StringBuilder sql = new StringBuilder();
sql.append("INSERT");
sql.append(CONFLICT_VALUES[algorithm]);
sql.append(" INTO ");
sql.append(table);
StringBuilder values = new StringBuilder();
Set<Map.Entry<String, Object>> entrySet = null;
if (initialValues != null && initialValues.size() > 0) {
entrySet = initialValues.valueSet();
Iterator<Map.Entry<String, Object>> entriesIter = entrySet.iterator();
boolean needsSeparator = false;
sql.append("(");
while (entriesIter.hasNext()) {
if (needsSeparator) {
sql.append(", ");
values.append(", ");
}
needsSeparator = true;
Map.Entry<String, Object> entry = entriesIter.next();
sql.append(entry.getKey());
values.append("?");
}
sql.append(")");
} else {
sql.append("(").append(nullColumnHack).append(") ");
values.append("NULL");
}
sql.append(" VALUES (");
sql.append(values);
sql.append(");");
SQLiteStatement statement = null;
try {
statement = db.compileStatement(sql.toString());
if (entrySet != null) {
int size = entrySet.size();
Iterator<Map.Entry<String, Object>> entriesIter = entrySet.iterator();
for (int i = 0; i < size; i++) {
Map.Entry<String, Object> entry = entriesIter.next();
DatabaseUtils.bindObjectToProgram(statement, i + 1, entry.getValue());
}
}
return statement.executeInsert();
} catch (SQLiteDatabaseCorruptException e) {
throw e;
} finally {
if (statement != null) {
statement.close();
}
}
}
private long insert(String table, String nullColumnHack, ContentValues values, int algorithm) {
SQLiteDatabase db = mDatabase.getWritableDatabase();
return db.insertWithOnConflict(table, nullColumnHack, values, algorithm);
}
public int updateWithOnConflict(String table, ContentValues values, String whereClause, String[] whereArgs, int algorithm) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO) {
return update(table, values, whereClause, whereArgs, algorithm);
} else {
return legacyUpdate(table, values, whereClause, whereArgs, algorithm);
}
}
private int legacyUpdate(String table, ContentValues values, String whereClause, String[] whereArgs, int algorithm) {
SQLiteDatabase db = mDatabase.getWritableDatabase();
StringBuilder sql = new StringBuilder();
sql.append("UPDATE ");
sql.append(CONFLICT_VALUES[algorithm]);
sql.append(table);
sql.append(" SET ");
Set<Map.Entry<String, Object>> entrySet = values.valueSet();
Iterator<Map.Entry<String, Object>> entriesIter = entrySet.iterator();
while (entriesIter.hasNext()) {
Map.Entry<String, Object> entry = entriesIter.next();
sql.append(entry.getKey());
sql.append("=?");
if (entriesIter.hasNext()) {
sql.append(",");
}
}
if (!TextUtils.isEmpty(whereClause)) {
sql.append(" WHERE ");
sql.append(whereClause);
}
SQLiteStatement statement = null;
try {
statement = db.compileStatement(sql.toString());
int bindArg = 1;
int size = entrySet.size();
entriesIter = entrySet.iterator();
for (int i = 0; i < size; i++) {
Map.Entry<String, Object> entry = entriesIter.next();
DatabaseUtils.bindObjectToProgram(statement, bindArg, entry.getValue());
bindArg++;
}
if (whereArgs != null) {
size = whereArgs.length;
for (int i = 0; i < size; i++) {
statement.bindString(bindArg, whereArgs[i]);
bindArg++;
}
}
statement.execute();
return -1;
} catch (SQLiteDatabaseCorruptException e) {
throw e;
} catch (SQLException e) {
throw e;
} finally {
if (statement != null) {
statement.close();
}
}
}
private int update(String table, ContentValues values, String whereClause, String[] whereArgs, int algorithm) {
SQLiteDatabase db = mDatabase.getWritableDatabase();
return db.updateWithOnConflict(table, values, whereClause, whereArgs, algorithm);
}
}
|
# Generated by "generate_commands.py"
USE_HOROVOD=${1:-0} # Horovod flag. 0 --> not use horovod, 1 --> use horovod
VERSION=${2:-2.0} # SQuAD Version
DTYPE=${3:-float32} # Default training data type
MODEL_NAME=google_albert_base_v2
BATCH_SIZE=4
NUM_ACCUMULATED=3
EPOCHS=3
LR=2e-05
WARMUP_RATIO=0.1
WD=0.01
MAX_SEQ_LENGTH=512
MAX_GRAD_NORM=1.0
LAYERWISE_DECAY=-1
# Prepare the Data
nlp_data prepare_squad --version ${VERSION}
RUN_SQUAD_PATH=$(dirname "$0")/../run_squad.py
# Run the script
if [ ${USE_HOROVOD} -eq 0 ];
then
RUN_COMMAND="python3 ${RUN_SQUAD_PATH} --gpus 0,1,2,3"
else
RUN_COMMAND="horovodrun -np 4 -H localhost:4 python3 ${RUN_SQUAD_PATH} --comm_backend horovod"
fi
${RUN_COMMAND} \
--model_name ${MODEL_NAME} \
--data_dir squad \
--output_dir fintune_${MODEL_NAME}_squad_${VERSION} \
--version ${VERSION} \
--do_eval \
--do_train \
--batch_size ${BATCH_SIZE} \
--num_accumulated ${NUM_ACCUMULATED} \
--layerwise_decay ${LAYERWISE_DECAY} \
--epochs ${EPOCHS} \
--lr ${LR} \
--warmup_ratio ${WARMUP_RATIO} \
--wd ${WD} \
--max_seq_length ${MAX_SEQ_LENGTH} \
--max_grad_norm ${MAX_GRAD_NORM} \
--dtype ${DTYPE} \
--overwrite_cache
|
<reponame>kdubiel/bh-events
import { BaseController } from 'interfaces';
export interface Controller extends BaseController {}
|
/*
* Copyright 2018 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.examples.arch;
import android.os.Bundle;
import android.support.annotation.MainThread;
import android.support.design.widget.FloatingActionButton;
import android.support.v7.app.AppCompatActivity;
public class ArchExampleActivity extends AppCompatActivity {
private FloatingActionButton backgroundJobStartStop;
private BackgroundTask backgroundTask;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_arch_example);
setupViews();
backgroundTask = (BackgroundTask) getLastCustomNonConfigurationInstance();
if (backgroundTask == null) { // this could also live inside a ViewModel, a singleton job queue, etc.
backgroundTask = new BackgroundTask();
backgroundTask.start(); // this task will update items in Realm on a background thread.
}
updateJobButton();
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, PersonListFragment.create())
.addToBackStack(null)
.commit();
}
}
@Override
public Object onRetainCustomNonConfigurationInstance() {
return backgroundTask; // retain background task through config changes without ViewModel.
}
@Override
protected void onDestroy() {
super.onDestroy();
if (isFinishing()) {
if(backgroundTask.isStarted()) {
backgroundTask.stop(); // make sure job is stopped when exiting the app
}
}
}
@Override
public void onBackPressed() {
if (getSupportFragmentManager().getBackStackEntryCount() <= 1) {
finish();
} else {
super.onBackPressed();
}
}
@MainThread
private void setupViews() {
backgroundJobStartStop = findViewById(R.id.backgroundJobStartStop);
backgroundJobStartStop.setOnClickListener(v -> {
if (!backgroundTask.isStarted()) {
backgroundTask.start();
} else {
backgroundTask.stop();
}
updateJobButton();
});
}
private void updateJobButton() {
if (backgroundTask.isStarted()) {
backgroundJobStartStop.setImageResource(R.drawable.ic_stop_black_24dp);
} else {
backgroundJobStartStop.setImageResource(R.drawable.ic_play_arrow_black_24dp);
}
}
}
|
import { DeepPartial, Theme } from "@chakra-ui/react";
import { mode } from "@chakra-ui/theme-tools";
const Button: DeepPartial<Theme["components"]["Button"]> = {
variants: {
solid: (props: any) => ({
bg: mode("violet.300", "purple.500")(props),
color: mode("purple.700", "violet.50")(props),
borderRadius: "full",
textTransform: "uppercase",
_hover: {
boxShadow: "lg",
fontWeight: "bold",
background: "yellow.500",
color: "purple.500",
},
_active: {
color: "purple.500",
bg: "yellow.800",
},
}),
outline: (props: any) => ({
borderRadius: "full",
borderWidth: "1px",
textTransform: "uppercase",
borderColor: mode("violet.300", "purple.500")(props),
color: mode("violet.300", "purple.500")(props),
_hover: {
boxShadow: "lg",
fontWeight: "bold",
background: "yellow.500",
borderColor: "purple.500",
color: "purple.500",
},
_active: {
color: "purple.500",
bg: "yellow.800",
},
}),
},
};
export default Button;
|
#!/bin/bash
PATH_NEMO_CONTEXTMENU=/usr/share/nemo/actions/ContextMenu.nemo_action
PATH_APPLICATIONS_SECUREGATE=/usr/share/applications/SecureGate.desktop
PATH_OPENNETLINK=/opt/hanssak/opennetlink
function CheckToRemoveFileAndDirectory()
{
if [ $# -ne 1 ]; then
echo "Need by 1 parameter"
elif [ -f $1 ]; then
echo "Remove File : $1"
rm -rf $1
elif [ -d $1 ]; then
echo "Remove Directory : $1"
rm -rf $1
fi;
}
# Remove: start desktop
CheckToRemoveFileAndDirectory $PATH_APPLICATIONS_SECUREGATE
# Remove: nemo action
CheckToRemoveFileAndDirectory $PATH_NEMO_CONTEXTMENU
# Update Fail
#CheckToRemoveFileAndDirectory $PATH_OPENNETLINK
|
import numpy as np
import os
import shutil
import glob
import JSONHelper
import quaternion
import argparse
import os.path as osp
import pickle
import align_utils as utils
def loadMesh(name ):
vertices = []
faces = []
with open(name, 'r') as meshIn:
lines = meshIn.readlines()
lines = [x.strip() for x in lines if len(x.strip() ) > 2 ]
for l in lines:
if l[0:2] == 'v ':
vstr = l.split(' ')[1:4]
varr = [float(x) for x in vstr ]
varr = np.array(varr ).reshape([1, 3] )
vertices.append(varr )
elif l[0:2] == 'f ':
fstr = l.split(' ')[1:4]
farr = [int(x.split('/')[0] ) for x in fstr ]
farr = np.array(farr ).reshape([1, 3] )
faces.append(farr )
vertices = np.concatenate(vertices, axis=0 )
faces = np.concatenate(faces, axis=0 )
return vertices, faces
def writeMesh(name, vertices, faces ):
with open(name, 'w') as meshOut:
for n in range(0, vertices.shape[0]):
meshOut.write('v %.3f %.3f %.3f\n' %
(vertices[n, 0], vertices[n, 1], vertices[n, 2] ) )
for n in range(0,faces.shape[0] ):
meshOut.write('f %d %d %d\n' %
(faces[n, 0], faces[n, 1], faces[n, 2]) )
def writeScene(name, boxes ):
with open(name, 'w') as meshOut:
vNum = 0
for group in boxes:
vertices = group[0]
faces = group[1]
for n in range(0, vertices.shape[0] ):
meshOut.write('v %.3f %.3f %.3f\n' %
(vertices[n, 0], vertices[n, 1], vertices[n, 2] ) )
for n in range(0, faces.shape[0]):
meshOut.write('f %d %d %d\n' %
(faces[n, 0] + vNum, faces[n, 1] + vNum, faces[n, 2] + vNum ) )
vNum += vertices.shape[0]
def computeBox(vertices ):
minX, maxX = vertices[:, 0].min(), vertices[:, 0].max()
minY, maxY = vertices[:, 1].min(), vertices[:, 1].max()
minZ, maxZ = vertices[:, 2].min(), vertices[:, 2].max()
corners = []
corners.append(np.array([minX, minY, minZ] ).reshape(1, 3) )
corners.append(np.array([maxX, minY, minZ] ).reshape(1, 3) )
corners.append(np.array([maxX, minY, maxZ] ).reshape(1, 3) )
corners.append(np.array([minX, minY, maxZ] ).reshape(1, 3) )
corners.append(np.array([minX, maxY, minZ] ).reshape(1, 3) )
corners.append(np.array([maxX, maxY, minZ] ).reshape(1, 3) )
corners.append(np.array([maxX, maxY, maxZ] ).reshape(1, 3) )
corners.append(np.array([minX, maxY, maxZ] ).reshape(1, 3) )
corners = np.concatenate(corners ).astype(np.float32 )
faces = []
faces.append(np.array([1, 2, 3] ).reshape(1, 3) )
faces.append(np.array([1, 3, 4] ).reshape(1, 3) )
faces.append(np.array([5, 7, 6] ).reshape(1, 3) )
faces.append(np.array([5, 8, 7] ).reshape(1, 3) )
faces.append(np.array([1, 6, 2] ).reshape(1, 3) )
faces.append(np.array([1, 5, 6] ).reshape(1, 3) )
faces.append(np.array([2, 7, 3] ).reshape(1, 3) )
faces.append(np.array([2, 6, 7] ).reshape(1, 3) )
faces.append(np.array([3, 8, 4] ).reshape(1, 3) )
faces.append(np.array([3, 7, 8] ).reshape(1, 3) )
faces.append(np.array([4, 5, 1] ).reshape(1, 3) )
faces.append(np.array([4, 8, 5] ).reshape(1, 3) )
faces = np.concatenate(faces ).astype(np.int32 )
return corners, faces
def computeTransform(vertices, t, q, s):
if s != None:
scale = np.array(s, dtype=np.float32 ).reshape(1, 3)
vertices = vertices * scale
if q != None:
q = np.quaternion(q[0], q[1], q[2], q[3])
rotMat = quaternion.as_rotation_matrix(q )
if np.abs(rotMat[1, 1] ) > 0.5:
d = rotMat[1, 1]
rotMat[:, 1] = 0
rotMat[1, :] = 0
if d < 0:
rotMat[1, 1] = -1
else:
rotMat[1, 1] = 1
vertices = np.matmul(rotMat, vertices.transpose() )
vertices = vertices.transpose()
if t != None:
trans = np.array(t, dtype=np.float32 ).reshape(1, 3)
vertices = vertices + trans
return vertices, trans.squeeze(), rotMat, scale.squeeze()
def checkOverlapApproximate(bverts1, bverts2 ):
axis_1 = (bverts1[1, :] - bverts1[0, :] ).reshape(1, 3)
xLen = np.sqrt(np.sum(axis_1 * axis_1 ) )
axis_2 = (bverts1[3, :] - bverts1[0, :] ).reshape(1, 3)
zLen = np.sqrt(np.sum(axis_2 * axis_2 ) )
origin = bverts1[0, :]
xCoord = np.sum( (bverts2[0:4, :] - origin ) * axis_1 / xLen, axis=1 )
zCoord = np.sum( (bverts2[0:4, :] - origin ) * axis_2 / zLen, axis=1 )
minX, maxX = xCoord.min(), xCoord.max()
minZ, maxZ = zCoord.min(), zCoord.max()
xOverlap = (min(maxX, xLen) - max(minX, 0) )
zOverlap = (min(maxZ, zLen) - max(minZ, 0) )
if xOverlap < 0 or zOverlap < 0:
return False
areaTotal = (maxX - minX) * (maxZ - minZ )
areaOverlap = xOverlap * zOverlap
if areaOverlap / areaTotal > 0.7:
return True
else:
return False
def findSupport(lverts, boxes, cats ):
# Find support for every object
boxList = []
for n in range(0, len(boxes) ):
bList = []
top = boxes[n][0][:, 1].max()
for m in range(0, len(boxes ) ):
if m != n:
bverts = boxes[m][0]
minY, maxY = bverts[:, 1].min(), bverts[:, 1].max()
bottom = minY
if np.abs(top - bottom) < 0.75 * (maxY - minY ) and np.abs(top - bottom ) < 1:
isOverlap = checkOverlapApproximate(boxes[n][0], boxes[m][0] )
if isOverlap:
if m < n:
if not n in boxList[m]:
bList.append(m )
else:
bList.append(m )
boxList.append(bList )
# Find objects on floor
floorList = []
floorHeight = lverts[:, 1].min()
for n in range(0, len(boxes ) ):
isSupported = False
for bList in boxList:
if n in bList:
isSupported = True
break
if not isSupported:
if cats[n] == '03046257' or cats[n] == '03636649' or cats[n] == '02808440':
bverts = boxes[n][0]
minY, maxY = bverts[:, 1].min(), bverts[:, 1].max()
if np.abs(minY - floorHeight ) < 1.5 * (maxY - minY) and np.abs(minY - floorHeight ) < 1 :
floorList.append(n )
else:
floorList.append(n )
return floorList, boxList
def adjustHeightBoxes(boxId, boxes, cads, boxList ):
top = boxes[boxId ][0][:, 1].max()
for n in boxList[boxId ]:
bverts = boxes[n][0]
bottom = bverts[:, 1].min()
delta = np.array([0, top-bottom, 0] ).reshape(1, 3)
boxes[n][0] = boxes[n][0] + delta
cads[n][0] = cads[n][0] + delta
boxes[n].append( ('t', delta.squeeze() ) )
cads[n].append( ('t', delta.squeeze() ) )
if len(boxList[n]) != 0:
adjustHeightBoxes(n, boxes, cads, boxList )
cads[n].append( ('t', delta.squeeze() ) )
if len(boxList[n]) != 0:
adjustHeightBoxes(n, boxes, cads, boxList )
adjustHeightBoxes(n, boxes, cads, boxList )
return
def adjustHeight(lverts, boxes, cads, floorList, boxList ):
# Adjust the height
floorHeight = lverts[:, 1].min()
for n in floorList:
bverts = boxes[n][0]
bottom = bverts[:, 1].min()
delta = np.array([0, floorHeight-bottom, 0] ).reshape(1, 3)
boxes[n][0] = boxes[n][0] + delta
boxes[n].append( ('t', delta.squeeze() ) )
cads[n][0] = cads[n][0] + delta
cads[n].append( ('t', delta.squeeze() ) )
if len(boxList[n] ) != 0:
adjustHeightBoxes(n, boxes, cads, boxList )
return
def checkPointInPolygon(wallVertices, v ):
###Given the wall vertices, determine if the pt is inside the polygon
X = [pt[0] for pt in wallVertices ]
Z = [pt[2] for pt in wallVertices ]
j = len(wallVertices) - 1
oddNodes = False
x, z = v[0], v[2]
for i in range(len(wallVertices ) ):
if Z[i] < z and Z[j] >= z or Z[j] < z and Z[i] >= z:
if (X[i] + ((z - Z[i]) / (Z[j] - Z[i]) * (X[j] - X[i]) ) ) < x:
oddNodes = not oddNodes
j=i
return oddNodes
def calLineParam(pt1, pt2 ):
###Calculate line parameters
x1, z1 = pt1
x2, z2 = pt2
a = z1 - z2
b = x2 - x1
c = z2 * x1 - x2 * z1
return a, b, c
def findNearestPt(w1, w2, pts ):
###Find the nearest point on the line to a point
a, b, c = calLineParam(w1, w2)
x, z = pts
a2b2 = a ** 2 + b ** 2
new_x = (b * (b * x - a * z) - a * c) / a2b2
new_z = (a * (-b * x + a * z) - b * c) / a2b2
return np.array([new_x, new_z] )
def findNearestWall(pt, wallVertices ):
###Find nearest wall of a point
minD, result = 100, None
pt = np.array([pt[0], pt[2]], dtype=np.float32 )
j = len(wallVertices) - 1
for i in range(len(wallVertices ) ):
w1 = np.array([wallVertices[i][0], wallVertices[i][2] ], dtype = np.float32 )
w2 = np.array([wallVertices[j][0], wallVertices[j][2] ], dtype = np.float32 )
if np.linalg.norm(w1 - pt ) < np.linalg.norm(w2 - pt):
d = np.linalg.norm(np.cross(w2 - w1, w1 - pt) ) / np.linalg.norm(w2 - w1)
else:
d = np.linalg.norm(np.cross(w2 - w1, w2 - pt) ) / np.linalg.norm(w2 - w1)
if d < minD:
nearestPt = findNearestPt(w1, w2, pt)
denom, nom = w1 - w2, w1 - nearestPt
if(np.sum(denom == 0)):
denom[denom == 0] = denom[denom != 0]
check = nom / denom
if np.mean(check) < 1 and np.mean(check) > 0:
minD = d
result = nearestPt
j = i
for i in range(len(wallVertices ) ):
w1 = np.array([wallVertices[i][0], wallVertices[i][2] ], dtype = np.float32 )
d = np.linalg.norm(w1 - pt)
if d < minD:
minD = d
result = w1
return minD, result
def moveBox(record):
pt, nearestPt = record
vector = ((nearestPt[0] - pt[0]), (nearestPt[1] - pt[2] ) )
return vector
def moveBoxInWall(cverts, bboxes, cads, threshold = 0.3):
# find wall_vertices
wallVertices = []
floorHeight = cverts[:, 1].min()
for n in range(0, cverts.shape[0] ):
vert = cverts[n, :]
if np.abs(vert[1] - floorHeight ) < 0.1:
wallVertices.append(vert )
isMove = False
isBeyondRange = False
for n in range(0, len(boxes ) ):
box = boxes[n]
maxD, record = 0, None
bverts = box[0]
for m in range(0, bverts.shape[0] ):
v = bverts[m, :]
if not checkPointInPolygon(wallVertices, v ):
d, nearestPt = findNearestWall(v, wallVertices )
if maxD < d:
record = (v, nearestPt )
maxD = d
if record != None:
t_x, t_z = moveBox(record )
trans = np.array([t_x, 0, t_z], dtype=np.float32 )
if np.linalg.norm(trans ) > threshold:
isBeyondRange = True
if np.linalg.norm(trans ) >= 1e-7:
isMove = True
direc = trans / np.linalg.norm(trans )
trans = trans + direc * 0.04
bboxes[n][0] = bboxes[n][0] + trans.reshape(1, 3)
bboxes[n].append( ('t', trans.squeeze() ) )
cads[n][0] = cads[n][0] + trans.reshape(1, 3)
cads[n].append( ('t', trans.squeeze() ) )
return isMove, isBeyondRange
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', default="./xml/", help="outDir of xml file" )
parser.add_argument('--isOutputAll', action='store_true', )
parser.add_argument('--threshold', type=float, default = 0.3, help = 'the threshold to decide low quality mesh.')
parser.add_argument('--rs', type=int, default=0, help='the starting point')
parser.add_argument('--re', type=int, default=1600, help='the end point')
opt = parser.parse_args()
params = JSONHelper.read("./Parameters.json" )
filename_json = params["scan2cad"]
shapeNetRoot = params["shapenetAbs"]
layoutRoot = params["scannet_layoutAbs"]
with open('lowQualLayout.txt', 'w') as fOut:
fOut.write('#Low quality layouts\n')
sceneCnt = 0
for r in JSONHelper.read(filename_json ):
if not (sceneCnt >= opt.rs and sceneCnt < opt.re):
continue
sceneCnt += 1
id_scan = r["id_scan"]
print('%d: %s' % (sceneCnt, id_scan ) )
outDir = osp.abspath(opt.out + "/" + id_scan )
os.system('mkdir -p %s' % outDir )
layOutFile = osp.join(layoutRoot, id_scan, id_scan + '.obj')
contourFile = osp.join(layoutRoot, id_scan, id_scan + '_contour.obj')
t = r['trs']['translation']
q = r['trs']['rotation']
s = r['trs']['scale']
lverts, lfaces = loadMesh(layOutFile )
lverts[:, 0], lverts[:, 1] = lverts[:, 0], lverts[:, 1]
lverts, trans, rot, scale = computeTransform(lverts, t, q, s )
layout = [lverts, lfaces, ('s', scale), ('rot', rot), ('t', trans) ]
cverts, cfaces = loadMesh(contourFile )
cverts, trans, rot, scale = computeTransform(cverts, t, q, s )
boxes = []
cads = []
cats = []
# Load the shapes
for model in r["aligned_models"]:
t = model["trs"]["translation"]
q = model["trs"]["rotation"]
s = model["trs"]["scale"]
id_cad = model["id_cad"]
catid_cad = model["catid_cad"]
cad_file = osp.join(shapeNetRoot, catid_cad, id_cad, 'alignedNew.obj' )
if not osp.isfile(cad_file ):
continue
vertices, faces = loadMesh(cad_file )
bverts, bfaces = computeBox(vertices )
bverts, trans, rot, scale = computeTransform(bverts, t, q, s )
vertices, _, _, _ = computeTransform(vertices, t, q, s )
boxes.append([bverts, bfaces, ('s', scale), ('rot', rot), ('t', trans) ] )
cads.append([vertices, faces, ('s', scale), ('rot', rot), ('t', trans) ] )
sceneDir = osp.join(outDir, 'scenBoxes' )
os.system('mkdir %s' % sceneDir )
sceneOrigName = osp.join(sceneDir, 'boxesOrigin.obj' )
sceneBoxes = boxes + [layout ]
writeScene(sceneOrigName, sceneBoxes )
if opt.isOutputAll:
sceneOrigName = osp.join(sceneDir, 'cadsOrigin.obj' )
sceneCads = cads + [layout ]
writeScene(sceneOrigName, sceneCads )
# Build the relationship and adjust heights
floorList, boxList = findSupport(lverts, boxes, cats )
adjustHeight(lverts, boxes, cads, floorList, boxList )
# Push the boxes to be inside the room
isMove, isBeyondRange = moveBoxInWall(cverts, boxes, cads, opt.threshold )
cnt = 0
while isMove == True and isBeyondRange == False:
isMove, isBeyondRange = moveBoxInWall(cverts, boxes, cads, opt.threshold )
print('IterNum %d' % cnt )
cnt += 1
if cnt == 5 or isMove == False or isBeyondRange == True:
break
if isBeyondRange == True:
with open('lowQualLayout.txt', 'a') as fOut:
fOut.write('%s\n' % id_scan )
else:
# Write new boxes
sceneNewName = osp.join(sceneDir, 'boxesNew.obj')
sceneBoxes = boxes + [layout ]
writeScene(sceneNewName, sceneBoxes )
sceneNewName = osp.join(sceneDir, 'cadsNew.obj')
sceneCads = cads + [layout ]
writeScene(sceneNewName, sceneCads )
# Write the transformation
'''
transformList = []
transformList.append(layout[2:] )
for box in boxes:
transformList.append(box[2:] )
transformName = osp.join(outDir, 'transform.dat')
with open(transformName, 'wb') as fOut:
pickle.dump(transformList, fOut )
'''
|
#!/usr/bin/env bash
# shellcheck disable=SC1090,SC2154
# Copyright 2020 Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
set -u
set -o pipefail
source "${REPO_ROOT}/content/en/docs/tasks/traffic-management/circuit-breaking/snips.sh"
source "${REPO_ROOT}/tests/util/samples.sh"
kubectl label namespace default istio-injection=enabled --overwrite
# Launch the httpbin sample
startup_httpbin_sample
# Create destination rule
snip_configuring_the_circuit_breaker_1
# Confirm destination rule set
_verify_elided snip_configuring_the_circuit_breaker_2 "$snip_configuring_the_circuit_breaker_2_out"
# Deploy fortio client
snip_adding_a_client_1
_wait_for_deployment default fortio-deploy
# Make one call to httpbin
_verify_contains snip_adding_a_client_3 "HTTP/1.1 200 OK"
# FIXME / TODO: These tests previously relied on checking that the
# percentage of 200 and 503 responses fell within a given range. That
# turned out to be flaky, so for now they are only checking that both
# 200 and 503 responses are recorded, and ignoring the number of each.
# That should be fixed at some point.
#
# Original PR: https://github.com/istio/istio.io/pull/6609
# Temporary fix: https://github.com/istio/istio.io/pull/7043
# Issue: https://github.com/istio/istio.io/issues/7074
# Make requests with 2 connections
_verify_lines snip_tripping_the_circuit_breaker_1 "
+ Code 200 :
+ Code 503 :
"
# Make requests with 3 connections
_verify_lines snip_tripping_the_circuit_breaker_3 "
+ Code 200 :
+ Code 503 :
"
# Query the istio-proxy stats
expected="cluster.outbound|8000||httpbin.istio-io-circuitbreaker.svc.cluster.local.circuit_breakers.default.rq_pending_open: ...
cluster.outbound|8000||httpbin.istio-io-circuitbreaker.svc.cluster.local.circuit_breakers.high.rq_pending_open: ...
cluster.outbound|8000||httpbin.istio-io-circuitbreaker.svc.cluster.local.upstream_rq_pending_active: ...
cluster.outbound|8000||httpbin.istio-io-circuitbreaker.svc.cluster.local.upstream_rq_pending_failure_eject: ...
cluster.outbound|8000||httpbin.istio-io-circuitbreaker.svc.cluster.local.upstream_rq_pending_overflow: ...
cluster.outbound|8000||httpbin.istio-io-circuitbreaker.svc.cluster.local.upstream_rq_pending_total: ..."
_verify_like snip_tripping_the_circuit_breaker_5 "$expected"
|
<filename>src/main/java/com/neusoft/mapper/ContactRecordMapper.java
package com.neusoft.mapper;
import com.neusoft.entity.ContactRecord;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
/**
* <p>
* 交往记录 交往记录 Mapper 接口
* </p>
*
* @author CDHong
* @since 2018-11-22
*/
public interface ContactRecordMapper extends BaseMapper<ContactRecord> {
}
|
package com.ibm.socialcrm.notesintegration.files.dialogs;
/****************************************************************
* IBM OpenSource
*
* (C) Copyright IBM Corp. 2012
*
* Licensed under the Apache License v2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
***************************************************************/
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.apache.commons.json.JSONArray;
import org.apache.commons.json.JSONException;
import org.apache.commons.json.JSONObject;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.jface.resource.JFaceColors;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.util.Geometry;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.RowLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.ProgressBar;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.events.HyperlinkAdapter;
import org.eclipse.ui.forms.events.HyperlinkEvent;
import org.eclipse.ui.forms.widgets.Hyperlink;
import org.eclipse.ui.progress.UIJob;
import com.ibm.socialcrm.notesintegration.core.CorePluginActivator;
import com.ibm.socialcrm.notesintegration.core.DocumentInfo;
import com.ibm.socialcrm.notesintegration.core.SugarAccount;
import com.ibm.socialcrm.notesintegration.core.SugarContact;
import com.ibm.socialcrm.notesintegration.core.SugarOpportunity;
import com.ibm.socialcrm.notesintegration.core.utils.SugarWebservicesOperations;
import com.ibm.socialcrm.notesintegration.ui.connector.AssociateData;
import com.ibm.socialcrm.notesintegration.ui.dashboardpanels.SugarItemsDashboard;
import com.ibm.socialcrm.notesintegration.ui.utils.UiUtils;
import com.ibm.socialcrm.notesintegration.utils.ConstantStrings;
import com.ibm.socialcrm.notesintegration.utils.GenericUtils;
import com.ibm.socialcrm.notesintegration.utils.SFAImageManager;
import com.ibm.socialcrm.notesintegration.utils.UtilsPlugin;
import com.ibm.socialcrm.notesintegration.utils.UtilsPluginNLSKeys;
import com.ibm.socialcrm.notesintegration.utils.GenericUtils.SugarType;
/**
* When we detect name conflicts with file uploads, we can use this dialog to help the user resolve the conflicts by renaming the files for upload or removing them from the upload operation.
*/
public class FileUploadConflictResolutionDialog extends Dialog {
private static final String PROGRESS_BAR_ID = "progressBarId"; //$NON-NLS-1$
private Shell _shell;
private Composite _parent;
private Composite _errorComposite;
private Label _errorMsgLabel;
private Label _versionLabel;
private Label _newVersionLabel;
private Label _noAssociationLabel;
private Text _renameText;
private Button _newVersionButton;
private Label _newVersionNameLabel;
private Button _renameButton;
private Composite _associationComposite;
private Composite _progressComposite = null;
private SelectionListener _newVersionSelectionListener;
private Color _errorCompositeColor;
private Font _normalFont;
private int _maxLabelWidth;
private File _file;
/*
* contains orig. document information retrieved from Connections server... Later, the GetDocumentRelationships API will update the sugarDocumentID field.
*/
private DocumentInfo _documentInfo;
// contains association information returned from the GetDocumentRelationships API.
private Map<String, Map<String, List<AssociateData>>> _sugarEntries = new HashMap<String, Map<String, List<AssociateData>>>();
/**
* contains the new document information. If user cancelled this dialog, _newDoucmentInfo will remain null.
*/
private DocumentInfo _newDocumentInfo = null;
private List<String> _namesTaken = null;
/**
* I realize this constructor is a bit strange, but there are times when we may want to have a different display name to present to the user than the actual file. Hence, we pass in a map of File
* object to displayNames
*
* @param shell
* @param fileMap
*/
public FileUploadConflictResolutionDialog(Shell shell, File file, DocumentInfo documentInfo, List<String> namesTaken) {
super(shell);
setShellStyle(SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MODELESS);
this._shell = shell;
this._file = file;
this._documentInfo = documentInfo;
this._namesTaken = namesTaken;
}
private void setShellListeners() {
// Close the shell when the user presses ESC
getShell().addListener(SWT.Traverse, new Listener() {
public void handleEvent(Event e) {
if (e.detail == SWT.TRAVERSE_ESCAPE) {
close();
}
}
});
// Listener to listen for shell resize
getShell().addListener(SWT.Resize, new Listener() {
public void handleEvent(Event e) {
// Don't save the preferences when the user maximizes the size
if (!getShell().getMaximized()) {
// Set the explicit size of the parent composite so that if we add a progress composite, the shell
// expands to accommodate the new widget rather than shrink the parentComposite within the shell.
Point point = getShell().getSize();
if (getShell() != null && !getShell().isDisposed()) {
((GridData) getShell().getLayoutData()).widthHint = point.x;
((GridData) getShell().getLayoutData()).heightHint = point.y;
}
}
}
});
}
@Override
protected void configureShell(Shell shell) {
super.configureShell(shell);
shell.setLayoutData(GridDataFactory.fillDefaults().create());
shell.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_TITLE));
}
@Override
protected Control createDialogArea(Composite parent) {
_parent = parent;
parent = (Composite) super.createDialogArea(parent);
createErrorComposite(parent);
parent.setLayout(GridLayoutFactory.fillDefaults().numColumns(1).margins(10, 10).create());
buildFileComposite(parent);
parent.setBackground(JFaceColors.getBannerBackground(Display.getDefault()));
createErrorMsgComposite(parent);
createProgressComposite();
retrieveAssociationData();
setShellListeners();
return parent;
}
/**
* Helper method to create the error composite
*
* @param parent
* @param showRemoveButton
*/
private void createErrorComposite(Composite parent) {
_errorComposite = new Composite(parent, SWT.BORDER);
_errorComposite.setLayout(GridLayoutFactory.fillDefaults().numColumns(2).equalWidth(false).margins(10, 10).create());
_errorComposite.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
_errorComposite.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent arg0) {
getErrorCompositeColor().dispose();
}
});
Label errorIcon = new Label(_errorComposite, SWT.NONE);
errorIcon.setImage(SFAImageManager.getImage(SFAImageManager.ERROR_ICON));
errorIcon.setLayoutData(GridDataFactory.fillDefaults().align(SWT.BEGINNING, SWT.BEGINNING).create());
Label errorLabel = new Label(_errorComposite, SWT.WRAP);
errorLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_SINGLE_CONFLICT));
errorLabel.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).hint(620, SWT.DEFAULT).create());
UiUtils.recursiveSetBackgroundColor(_errorComposite, getErrorCompositeColor());
}
/*
* error message
*/
private void createErrorMsgComposite(Composite parent) {
_errorMsgLabel = new Label(parent, SWT.WRAP);
_errorMsgLabel.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).hint(620, SWT.DEFAULT).create());
_errorMsgLabel.setForeground(Display.getDefault().getSystemColor(SWT.COLOR_RED));
_errorMsgLabel.setBackground(JFaceColors.getBannerBackground(Display.getDefault()));
setControlVisible(_errorMsgLabel, true);
}
/**
* Helper method to build each of the file parts
*
* @param parent
* @param file
* @param showRemoveButton
*/
private void buildFileComposite(final Composite parent) {
final Composite fileComposite = new Composite(parent, SWT.NONE);
fileComposite.setLayout(GridLayoutFactory.fillDefaults().equalWidth(false).numColumns(3).spacing(15, 5).create());
fileComposite.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
// Document:
Label documentLabel = new Label(fileComposite, SWT.NONE);
documentLabel.setLayoutData(GridDataFactory.fillDefaults().create());
documentLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_DOCUMENT));
Label documentNameLabel = new Label(fileComposite, SWT.NONE);
documentNameLabel.setLayoutData(GridDataFactory.fillDefaults().grab(false, false).create());
documentNameLabel.setText(_documentInfo == null ? ConstantStrings.EMPTY_STRING : _documentInfo.getDocumentName());
_versionLabel = new Label(fileComposite, SWT.NONE);
_versionLabel.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
_versionLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_VERSION,
new String[]{_documentInfo == null ? ConstantStrings.EMPTY_STRING : _documentInfo.getVersion()}));
// Associations:
Label associationLabel = new Label(fileComposite, SWT.NONE);
associationLabel.setLayoutData(GridDataFactory.fillDefaults().grab(false, false).create());
associationLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_ASSOCIATIONS));
_associationComposite = new Composite(fileComposite, SWT.NONE);
_associationComposite.setLayout(GridLayoutFactory.fillDefaults().numColumns(1).margins(0, 0).spacing(0, 0).create());
_associationComposite.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).span(2, 1).create());
_noAssociationLabel = new Label(_associationComposite, SWT.WRAP);
_noAssociationLabel.setLayoutData(GridDataFactory.fillDefaults().hint(570, SWT.DEFAULT).grab(true, false).create());
_noAssociationLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_NO_ASSOCIATIONS));
setControlVisible(_noAssociationLabel, false);
// will create association items after getDocumentRelationships WS is done.
// createAssociationItems(_associationComposite);
Composite optionComposite = new Composite(fileComposite, SWT.NONE);
Rectangle margins = Geometry.createDiffRectangle(0, 0, 15, 15);
optionComposite.setLayout(GridLayoutFactory.fillDefaults().numColumns(3).extendedMargins(margins).create());
optionComposite.setLayoutData(GridDataFactory.fillDefaults().span(3, 1).grab(true, false).create());
// Create a new version
_newVersionButton = new Button(optionComposite, SWT.RADIO);
_newVersionButton.setLayoutData(GridDataFactory.fillDefaults().create());
_newVersionButton.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_CREATE_NEW_VERSION));
addNewVersionButtonListener();
_newVersionNameLabel = new Label(optionComposite, SWT.NONE);
_newVersionNameLabel.setLayoutData(GridDataFactory.fillDefaults().create());
_newVersionNameLabel.setText(_documentInfo == null ? ConstantStrings.EMPTY_STRING : _documentInfo.getDocumentName());
_newVersionLabel = new Label(optionComposite, SWT.NONE);
_newVersionLabel.setLayoutData(GridDataFactory.fillDefaults().create());
_newVersionLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_VERSION, new String[]{getOneUpVersion()}));
// Rename the document
_renameButton = new Button(optionComposite, SWT.RADIO);
_renameButton.setLayoutData(GridDataFactory.fillDefaults().create());
_renameButton.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_CONFLICT_RENAME_DOCUMENT));
addRenameButtonListener();
_renameText = new Text(optionComposite, SWT.BORDER);
_renameText.setLayoutData(GridDataFactory.fillDefaults().span(2, 1).grab(true, false).create());
_renameText.setBackground(null);
_renameText.setText(_documentInfo == null ? ConstantStrings.EMPTY_STRING : _documentInfo.getDocumentName());
_renameText.setEnabled(false);
addRenameTextListener();
UiUtils.recursiveSetBackgroundColor(fileComposite, JFaceColors.getBannerBackground(Display.getDefault()));
}
private void addNewVersionButtonListener() {
_newVersionSelectionListener = new SelectionListener() {
@Override
public void widgetDefaultSelected(SelectionEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void widgetSelected(SelectionEvent arg0) {
_newVersionNameLabel.setEnabled(_newVersionButton.getSelection());
_newVersionLabel.setEnabled(_newVersionButton.getSelection());
_renameText.setEnabled(!_newVersionButton.getSelection());
setControlVisible(_errorMsgLabel, false);
getShell().pack(true);
getShell().layout(true);
}
};
_newVersionButton.addSelectionListener(_newVersionSelectionListener);
_newVersionButton.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent arg0) {
if (_newVersionSelectionListener != null) {
_newVersionButton.removeSelectionListener(_newVersionSelectionListener);
}
}
});
}
private void addRenameButtonListener() {
final SelectionListener renameSelectionListener = new SelectionListener() {
@Override
public void widgetDefaultSelected(SelectionEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void widgetSelected(SelectionEvent arg0) {
_renameText.setEnabled(_renameButton.getSelection());
_newVersionNameLabel.setEnabled(!_renameButton.getSelection());
_newVersionLabel.setEnabled(!_renameButton.getSelection());
setControlVisible(_errorMsgLabel, false);
getShell().pack(true);
getShell().layout(true);
}
};
_renameButton.addSelectionListener(renameSelectionListener);
_renameButton.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent arg0) {
if (renameSelectionListener != null) {
_renameButton.removeSelectionListener(renameSelectionListener);
}
}
});
}
private void addRenameTextListener() {
final ModifyListener renameTextModifyListener = new ModifyListener() {
@Override
public void modifyText(ModifyEvent arg0) {
if (_errorMsgLabel.isVisible()) {
setControlVisible(_errorMsgLabel, false);
getShell().pack(true);
getShell().layout(true);
}
}
};
_renameText.addModifyListener(renameTextModifyListener);
_renameText.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent arg0) {
if (renameTextModifyListener != null) {
_renameText.removeModifyListener(renameTextModifyListener);
}
}
});
}
private void setControlVisible(Control c, boolean b) {
((GridData) c.getLayoutData()).exclude = !b;
c.setVisible(b);
}
private String getOneUpVersion() {
String oneUpX = ConstantStrings.EMPTY_STRING;
int oneUp = 1;
try {
if (_documentInfo != null) {
String s = _documentInfo.getVersion();
oneUp = Integer.valueOf(s).intValue() + 1;
oneUpX = String.valueOf(oneUp);
}
} catch (Exception e) {
}
return oneUpX;
}
private void createAssociationItems(Composite parent, final String type, List<AssociateData> items) {
_maxLabelWidth = -1;
_maxLabelWidth = getMaxLabelWidth();
Composite associateComposite = new Composite(parent, SWT.NONE);
associateComposite.setLayout(GridLayoutFactory.fillDefaults().spacing(0, 0).numColumns(2).create());
associateComposite.setLayoutData(GridDataFactory.fillDefaults().indent(0, 0).grab(true, true).create());
Label accountsLabel = new Label(associateComposite, SWT.NONE);
accountsLabel.setLayoutData(GridDataFactory.fillDefaults().hint(_maxLabelWidth, SWT.DEFAULT).indent(0, SWT.DEFAULT).span(1, 1).create());
accountsLabel.setBackground(associateComposite.getBackground());
accountsLabel.setText(getTypeWithColon(type, items.size()));
Composite linkComposite = new Composite(associateComposite, SWT.NONE);
linkComposite.setLayoutData(GridDataFactory.fillDefaults().align(SWT.FILL, SWT.TOP).hint(570, SWT.DEFAULT).grab(true, false).indent(10, 0).create());
linkComposite.setBackground(associateComposite.getBackground());
RowLayout layout = new RowLayout();
layout.wrap = true;
layout.pack = true;
linkComposite.setLayout(layout);
for (int i = 0; i < items.size(); i++) {
// put image and item name in a Composite, so, wrapping logic will treat both as 1 unit.
Composite itemComposite = new Composite(linkComposite, SWT.NONE);
itemComposite.setLayout(GridLayoutFactory.fillDefaults().spacing(0, 0).numColumns(2).create());
itemComposite.setBackground(associateComposite.getBackground());
Label popoutLinkLabel = new Label(itemComposite, SWT.NONE);
popoutLinkLabel.setImage(SFAImageManager.getImage(SFAImageManager.EXTERNAL_LINK));
popoutLinkLabel.setLayoutData(GridDataFactory.fillDefaults().indent(10, 0).hint(15, SWT.DEFAULT).create());
popoutLinkLabel.setToolTipText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.OPEN_IN_SUGAR));
// Don't forget to pack me
popoutLinkLabel.pack();
final Hyperlink accountsLink = new Hyperlink(itemComposite, SWT.NONE);
accountsLink.setForeground(SugarItemsDashboard.getInstance().getBusinessCardLinkColor());
accountsLink.setBackground(associateComposite.getBackground());
accountsLink.setFont(getSmallerFont() /* SugarItemsDashboard.getInstance().getNormalFontForBusinessCardData() */); // acw-???
StringBuffer sb = new StringBuffer(items.get(i).getName());
sb.append((i == (items.size() - 1)) ? ConstantStrings.EMPTY_STRING : ConstantStrings.COMMA);
sb.append(ConstantStrings.SPACE);
accountsLink.setText(sb.toString());
// don't forget to pack me
accountsLink.pack();
final String id = items.get(i).getId();
if (id != null && !id.equalsIgnoreCase(ConstantStrings.EMPTY_STRING)) {
final HyperlinkAdapter accountsLinkListener = new HyperlinkAdapter() {
@Override
public void linkActivated(HyperlinkEvent evt) {
final String progressId = createProgressIndicator(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UI_ITEM_RETRIEVING_PROGRESS_MESSAGE));
if (type.equalsIgnoreCase(SugarType.CONTACTS.getType())) {
SugarContact contact = new SugarContact(id, null);
GenericUtils.launchUrlInPreferredBrowser(contact.getSugarUrl(), true);
} else if (type.equalsIgnoreCase(SugarType.OPPORTUNITIES.getType())) {
SugarOpportunity oppty = new SugarOpportunity(id, null);
GenericUtils.launchUrlInPreferredBrowser(oppty.getSugarUrl(), true);
} else if (type.equalsIgnoreCase(SugarType.ACCOUNTS.getType())) {
SugarAccount account = new SugarAccount(id, null);
GenericUtils.launchUrlInPreferredBrowser(account.getSugarUrl(), true);
}
UIJob removeProgressIndicatorUIjob = new UIJob("Remove Progress Indicator") //$NON-NLS-1$
{
@Override
public IStatus runInUIThread(IProgressMonitor arg0) {
removeProgressIndicator(progressId);
return Status.OK_STATUS;
}
};
// Set rule so the job will be executed in the correct
// order.
removeProgressIndicatorUIjob.setRule(UiUtils.DISPLAY_SUGAR_ITEM_BY_ID_JOB_RULE);
removeProgressIndicatorUIjob.schedule();
}
};
accountsLink.addHyperlinkListener(accountsLinkListener);
accountsLink.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent arg0) {
if (accountsLinkListener != null) {
accountsLink.removeHyperlinkListener(accountsLinkListener);
}
}
});
}
itemComposite.pack();
}
UiUtils.recursiveSetBackgroundColor(parent, JFaceColors.getBannerBackground(Display.getDefault()));
parent.layout(true);
_parent.layout(true);
_shell.layout();
}
// Font look tiny in Mac for some reason. So if we're on a Mac, bump the font size up
private int macFontSizeAdjustment = GenericUtils.isMac() ? 4 : 0;
public Font getSmallerFont() {
if (_normalFont == null) {
String fontName = "Arial-10-normal"; //$NON-NLS-1$
if (JFaceResources.getFontRegistry().hasValueFor(fontName)) {
_normalFont = JFaceResources.getFontRegistry().get(fontName);
} else {
JFaceResources.getFontRegistry().put(fontName, new FontData[]{new FontData("Arial", 10 + macFontSizeAdjustment, SWT.NORMAL)}); //$NON-NLS-1$
_normalFont = JFaceResources.getFontRegistry().get(fontName);
}
}
return _normalFont;
}
// Create progress bar composite in the Shell. Will set it to visible when
// the Progress Bar
// Indicator is needed.
private void createProgressComposite() {
_progressComposite = new Composite(getShell(), SWT.NONE);
_progressComposite.setLayout(GridLayoutFactory.fillDefaults().create());
_progressComposite.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
((GridData) (_progressComposite.getLayoutData())).exclude = true;
_progressComposite.setVisible(false);
getShell().layout(true);
}
/*
* Create a progress section with the given message. This method will return an id of the newly created section. This id should be passed into removeProgressBar when the operation completes.
*
* @param message
*
* @return
*/
public String createProgressIndicator(final String message) {
final String id = "progessBar_" + System.currentTimeMillis(); //$NON-NLS-1$
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
_progressComposite.setLayoutDeferred(true);
if (!_progressComposite.isVisible()) {
_progressComposite.setVisible(true);
((GridData) (_progressComposite.getLayoutData())).exclude = false;
}
Composite composite = new Composite(_progressComposite, SWT.NONE);
composite.setLayout(GridLayoutFactory.fillDefaults().numColumns(2).equalWidth(false).margins(5, 5).create());
composite.setLayoutData(GridDataFactory.fillDefaults().grab(true, false).create());
composite.setData(PROGRESS_BAR_ID, id);
Label label = new Label(composite, SWT.WRAP);
label.setText(message);
label.setLayoutData(GridDataFactory.fillDefaults().grab(false, false).create());
ProgressBar progressBar = new ProgressBar(composite, SWT.INDETERMINATE);
progressBar.setLayoutData(GridDataFactory.fillDefaults().indent(10, 0).grab(true, false).create());
_progressComposite.setLayoutDeferred(false);
_progressComposite.layout(true);
getShell().layout(true);
getShell().pack(true);
}
});
return id;
}
public void removeProgressIndicator(final String id) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
if (_progressComposite != null && !_progressComposite.isDisposed()) {
for (Control control : _progressComposite.getChildren()) {
Object storedId = control.getData(PROGRESS_BAR_ID);
if (storedId != null && storedId.equals(id)) {
control.dispose();
}
}
if (_progressComposite.getChildren().length == 0) {
_progressComposite.setVisible(false);
((GridData) (_progressComposite.getLayoutData())).exclude = true;
}
_progressComposite.layout(true);
getShell().pack(true);
getShell().layout(true);
}
}
});
}
private String getTypeWithColon(String type, int itemSize) {
String typeWithColon = null;
if (type != null) {
if (type.equalsIgnoreCase(SugarType.CONTACTS.getType())) {
if (itemSize > 1) {
typeWithColon = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.CONTACTS_LABEL_STRING);
} else {
typeWithColon = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.CONTACT_LABEL_STRING);
}
} else if (type.equalsIgnoreCase(SugarType.OPPORTUNITIES.getType())) {
if (itemSize > 1) {
typeWithColon = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.OPPORTUNITIES_LABEL_STRING);
} else {
typeWithColon = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.OPPORTUNITY_LABEL_STRING);
}
} else if (type.equalsIgnoreCase(SugarType.ACCOUNTS.getType())) {
if (itemSize > 1) {
typeWithColon = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.ACCOUNTS_LABEL_STRING);
} else {
typeWithColon = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.ACCOUNT_LABEL_STRING);
}
}
}
return typeWithColon;
}
@Override
protected void createButtonsForButtonBar(Composite buttonBar) {
super.createButtonsForButtonBar(buttonBar);
updateButtons();
}
private void updateButtons() {
Button okButton = getButton(IDialogConstants.OK_ID);
okButton.addSelectionListener(new SelectionListener() {
@Override
public void widgetDefaultSelected(SelectionEvent arg0) {
// TODO Auto-generated method stub
}
@Override
public void widgetSelected(SelectionEvent arg0) {
int isError = 0;
if (_newVersionButton != null && !_newVersionButton.isDisposed() && _newVersionButton.getSelection() && _newVersionLabel != null && !_newVersionLabel.isDisposed()) {
_newDocumentInfo = new DocumentInfo(_documentInfo.getDocumentName(), _newVersionLabel.getText(), _documentInfo.getConnectionsUUID());
_newDocumentInfo.setSugarDocumentID(_documentInfo.getSugarDocumentID());
isError = 0;
} else if (_renameButton != null && !_renameButton.isDisposed() && _renameButton.getSelection()) {
if (_renameText != null && !_renameText.isDisposed()) {
if (isNameTaken(_renameText.getText())) {
isError = 1;
} else {
_newDocumentInfo = new DocumentInfo(_renameText.getText(), "1", _documentInfo.getConnectionsUUID()); //$NON-NLS-1$
_newDocumentInfo.setSugarDocumentID(null);
isError = 0;
}
}
}
if (isError > 0) {
setControlVisible(_errorMsgLabel, true);
String errormsg = null;
if (isError == 1) {
errormsg = UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPLOAD_COINFLICT_ERROR_MSG1);
}
if (errormsg != null) {
_errorMsgLabel.setText(errormsg);
}
getShell().pack(true);
getShell().layout(true);
} else {
close();
}
}
});
}
public void okPressed() {
/* DO NOTHING HERE!!! */
// this.close();
}
private boolean isNameTaken(String s) {
boolean isTaken = false;
if (_namesTaken == null || _namesTaken.isEmpty()) {
isTaken = false;
} else if (s != null && _namesTaken.contains(s)) {
isTaken = true;
}
return isTaken;
}
private Color getErrorCompositeColor() {
if (_errorCompositeColor == null) {
_errorCompositeColor = new Color(Display.getDefault(), 250, 228, 222);
}
return _errorCompositeColor;
}
private void retrieveAssociationData() {
final String progressId = createProgressIndicator(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UI_ITEM_RETRIEVING_PROGRESS_MESSAGE));
retrieveAssociationDataWebServices();
UIJob updateUIAndRemoveProgressIndicatorUIjob = new UIJob("Update UI And Remove Progress Indicator") //$NON-NLS-1$
{
@Override
public IStatus runInUIThread(IProgressMonitor arg0) {
if (getAssociatedEntries() != null && !getAssociatedEntries().isEmpty()) {
boolean isNoAssociation = true;
Set<Entry<String, List<AssociateData>>> entryset = getAssociatedEntries().entrySet();
Iterator<Entry<String, List<AssociateData>>> it = entryset.iterator();
while (it.hasNext()) {
Entry<String, List<AssociateData>> entry = (Entry<String, List<AssociateData>>) it.next();
String type = entry.getKey();
if (entry.getValue() != null && !entry.getValue().isEmpty()) {
List<AssociateData> names = entry.getValue();
createAssociationItems(_associationComposite, type, names);
isNoAssociation = false;
}
}
if (isNoAssociation) {
setControlVisible(_noAssociationLabel, true);
}
} else {
setControlVisible(_noAssociationLabel, true);
}
// if no sugar id, disable the new version option
if (_documentInfo.getSugarDocumentID() == null) {
_newVersionButton.setEnabled(false);
_newVersionButton.setSelection(false);
_newVersionNameLabel.setEnabled(false);
_newVersionLabel.setEnabled(false);
_renameButton.setSelection(true);
_renameText.setEnabled(true);
}
_associationComposite.layout();
removeProgressIndicator(progressId);
return Status.OK_STATUS;
}
};
// Set rule so the job will be executed in the correct
// order.
updateUIAndRemoveProgressIndicatorUIjob.setRule(UiUtils.DISPLAY_SUGAR_ITEM_BY_ID_JOB_RULE);
updateUIAndRemoveProgressIndicatorUIjob.schedule();
}
private void retrieveAssociationDataWebServices() {
Job job = new Job("retrieveAssociationDataWebServices") //$NON-NLS-1$
{
@Override
protected IStatus run(IProgressMonitor arg0) {
List<String> uuids = new ArrayList<String>();
uuids.add(_documentInfo.getConnectionsUUID());
String out = SugarWebservicesOperations.getInstance().getDocumentRelationships("connectionsid", uuids); //$NON-NLS-1$
processGetDocumentRelationships(out);
return Status.OK_STATUS;
}
};
// Setting job rule so jobs following this rule will be executed in the correct order.
job.setRule(UiUtils.DISPLAY_SUGAR_ITEM_BY_ID_JOB_RULE);
job.schedule();
}
public void processGetDocumentRelationships(String output) {
JSONObject jsonObject = null;
try {
jsonObject = new JSONObject(output);
} catch (JSONException e) {
// End gracefully.
}
if (jsonObject != null) {
try {
if (jsonObject.containsKey("result")) { //$NON-NLS-1$
JSONObject resultObj = jsonObject.getJSONObject("result"); //$NON-NLS-1$
Set<String> keySet = resultObj.keySet();
if (!keySet.isEmpty()) {
Iterator<String> it = keySet.iterator();
while (it.hasNext()) {
String key = it.next();
Object obj = resultObj.get(key);
if (obj instanceof JSONObject) {
JSONObject associateObject = (JSONObject) obj;
Map<String, List<AssociateData>> sugarEntry = new HashMap<String, List<AssociateData>>();
// get document sugar id
StringBuffer sb = new StringBuffer(ConstantStrings.EMPTY_STRING);
if (associateObject.has("SugarIDs") && associateObject.get("SugarIDs") instanceof JSONArray) { //$NON-NLS-1$ //$NON-NLS-2$
JSONArray array = (JSONArray) associateObject.get("SugarIDs"); //$NON-NLS-1$
for (int i = 0; i < array.length(); i++) {
sb.append(i == 0 ? ConstantStrings.EMPTY_STRING : ConstantStrings.COMMA).append(array.get(i).toString());
// For now, we will only take the first sugar id
if (i == 0) {
break;
}
}
if (sb.length() > 0) {
_documentInfo.setSugarDocumentID(sb.toString());
}
}
// get associted oppties
JSONObject opptyObject = null;
if (associateObject.get("Opportunities") instanceof JSONArray) { //$NON-NLS-1$
} else {
opptyObject = associateObject.getJSONObject("Opportunities"); //$NON-NLS-1$
}
sugarEntry.put(SugarType.OPPORTUNITIES.getType(), extractSugarItems(opptyObject));
// get associated accounts
// HashMap accountsHashMap = new HashMap();
JSONObject accountObject = null;
if (associateObject.get("Accounts") instanceof JSONArray) { //$NON-NLS-1$
} else {
accountObject = associateObject.getJSONObject("Accounts"); //$NON-NLS-1$
}
sugarEntry.put(SugarType.ACCOUNTS.getType(), extractSugarItems(accountObject));
_sugarEntries.put(key, sugarEntry);
}
}
}
}
} catch (JSONException e) {
UtilsPlugin.getDefault().logException(e, CorePluginActivator.PLUGIN_ID);
}
}
}
private List<AssociateData> extractSugarItems(JSONObject object) {
List<AssociateData> associateDatas = new ArrayList<AssociateData>();
try {
if (object != null && !object.isEmpty()) {
Iterator<String> it = object.keySet().iterator();
while (it.hasNext()) {
String id = it.next();
String name = object.getString(id);
AssociateData associateData = new AssociateData(name, ConstantStrings.EMPTY_STRING, id, true);
associateDatas.add(associateData);
}
}
} catch (Exception e) {
UtilsPlugin.getDefault().logException(e, CorePluginActivator.PLUGIN_ID);
}
return associateDatas;
}
public Map<String, Map<String, List<AssociateData>>> getSugarEntries() {
if (_sugarEntries == null) {
_sugarEntries = new HashMap<String, Map<String, List<AssociateData>>>();
}
return _sugarEntries;
}
public Map<String, List<AssociateData>> getAssociatedEntries() {
return getSugarEntries().get(_documentInfo.getConnectionsUUID());
}
public DocumentInfo getNewDocumentInfo() {
return _newDocumentInfo;
}
protected int getMaxLabelWidth() {
if (_maxLabelWidth == -1) {
StringBuffer sb = new StringBuffer(ConstantStrings.EMPTY_STRING);
boolean isFirst = true;
Iterator<String> it = getAssociatedEntries().keySet().iterator();
while (it.hasNext()) {
String type = it.next();
int itemsize = getAssociatedEntries().get(type).size();
if (itemsize > 0) {
if (type != null && type.equals(SugarType.ACCOUNTS.getType())) {
sb.append(isFirst ? ConstantStrings.EMPTY_STRING : ConstantStrings.COMMA).append(
(itemsize >= 1 ? UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.ACCOUNTS_LABEL_STRING) : UtilsPlugin.getDefault().getResourceString(
UtilsPluginNLSKeys.ACCOUNT_LABEL_STRING)));
} else if (type != null && type.equals(SugarType.OPPORTUNITIES.getType())) {
sb.append(isFirst ? ConstantStrings.EMPTY_STRING : ConstantStrings.COMMA).append(
(itemsize >= 1 ? UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.OPPORTUNITIES_LABEL_STRING) : UtilsPlugin.getDefault().getResourceString(
UtilsPluginNLSKeys.OPPORTUNITY_LABEL_STRING)));
} else if (type != null && type.equals(SugarType.CONTACTS.getType())) {
sb.append(isFirst ? ConstantStrings.EMPTY_STRING : ConstantStrings.COMMA).append(
(itemsize >= 1 ? UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.CONTACTS_LABEL_STRING) : UtilsPlugin.getDefault().getResourceString(
UtilsPluginNLSKeys.CONTACT_LABEL_STRING)));
}
if (isFirst) {
isFirst = false;
}
}
}
String[] associationLabels = sb.toString().split(ConstantStrings.COMMA);
Point point = computeMaxSize(_parent, associationLabels);
if (point != null) {
_maxLabelWidth = point.x;
_maxLabelWidth += 18; // Add a buffer to improve spacing
}
}
return _maxLabelWidth;
}
public Point computeMaxSize(Composite parent, String[] arrays) {
int width = -1;
int height = -1;
if (parent == null || arrays == null || arrays.length == 0) {
return null;
}
GC gc = new GC(parent);
gc.setFont(SugarItemsDashboard.getInstance().getBusinessCardLabelFont());
for (int i = 0; i < arrays.length; i++) {
Point size = gc.textExtent(arrays[i]); // or textExtent
width = Math.max(width, size.x);
height = Math.max(height, size.y);
}
gc.dispose();
return new Point(width, height);
}
}
|
import * as dynamoDbLib from '../libs/dynamodb-lib'
import { success, failure } from '../libs/response-lib'
import uuid from 'uuid'
export async function main(event, context) {
let data
typeof event.body === 'string'
? (data = JSON.parse(event.body))
: (data = event.body)
const params = {
TableName: data.TableName,
Item: {
Team: data.Item.Team,
BlockUuid: `${data.Item.Block}_` + uuid.v1(),
Block: data.Item.Block,
BlockDescription: data.Item.BlockDescripton,
ItemHeader: data.Item.ItemHeader,
ItemText: data.Item.ItemText,
CreatedBy: event.requestContext.identity.cognitoIdentityId,
CreatedAt: Date.now(),
LastUpdatedBy: event.requestContext.identity.cognitoIdentityId,
},
}
try {
await dynamoDbLib.call('put', params)
return success(params.Item)
} catch (e) {
console.log(e)
return failure({ status: false })
}
}
|
/**
* Bolt
* statements/UpdateSelect
*
* Copyright (c) 2017 <NAME>
*
* This software is released under the MIT License.
* http://opensource.org/licenses/mit-license.php
*
* @author <NAME>
*/
package com.sopranoworks.bolt.statements
import com.google.cloud.spanner.{Mutation, TransactionContext}
import com.google.cloud.spanner.TransactionRunner.TransactionCallable
import com.sopranoworks.bolt.values.SubqueryValue
import com.sopranoworks.bolt.{Bolt, ColumnReader, QueryContext, Where}
import scala.collection.JavaConversions._
case class UpdateSelect(nut:Bolt.Nut,qc:QueryContext,tableName:String,columns:java.util.List[String],subquery:SubqueryValue,where:Where,hint:String) extends Update {
private def _updateSelect(tableName: String, columns:java.util.List[String], subquery: SubqueryValue, where: Where, tr: TransactionContext, hint:String):Unit = {
val tbl = nut.database.table(tableName).get
tbl.primaryKey.columns.foreach(k => if (columns.contains(k.name)) throw new RuntimeException(s"${k.name} is primary key"))
val (keys, values) = _getTargetKeysAndValues(tr, tbl, where.whereStmt, Nil, hint)
val res = subquery.eval.asInstanceOf[SubqueryValue].results
val reader = new ColumnReader {}
val ml = res.map {
r =>
values.zip(r).map {
case (keyValues, vs) =>
if (columns.length != vs.getColumnCount)
throw new RuntimeException("")
val m = Mutation.newUpdateBuilder(tableName)
keys.zip(keyValues).foreach {
case (k, v) =>
v.setTo(m, k.name)
}
var idx = 0
columns.foreach {
col =>
reader.getColumn(vs, idx).setTo(m, col)
idx += 1
}
m.build()
}
}
ml.foreach {
mm =>
nut.addMutations(mm)
}
}
def execute():Unit = {
nut.transactionContext match {
case Some(tr) =>
_updateSelect(tableName, columns, subquery, where, tr, hint)
case None =>
Option(nut.dbClient).foreach(
_ => nut.beginTransaction(_ => execute())
)
}
}
}
|
window.addEventListener('load',function() {
$(document).ready(function() {
$('#livefilterdemo').liveFilter({
delay: 200, // how long between keystroke and filter
analyticsLogging: false, // log to google analytics through foundationExtendEBI.js
fitlerTargetCustomDiv: 'div.live-filter-target-granularity',
defaultText: 'Type to filter these paper references',
noMatches: '<p>No matching papers found.</p><a class="button" href="#">You could add a link to advanced search</a> '
// for further plugin option guidance, see: https://ebi.emblstatic.net/web_guidelines/EBI-Framework/v1.3/libraries/LiveFilter/js/jquery.liveFilter.js
});
});
});
|
module Payshares
module Horizon
class Problem
include Contracts
def initialize(attributes)
@attributes = attributes.reverse_merge({
type: "about:blank",
title: "Unknown Error",
status: 500,
})
@meta = @attributes.except!(:type, :title, :status, :detail, :instance)
end
Contract None => String
def type
@attributes[:type]
end
Contract None => String
def title
@attributes[:title]
end
Contract None => Num
def status
@attributes[:status]
end
Contract None => String
def detail
@attributes[:detail]
end
Contract None => String
def instance
@attributes[:instance]
end
Contract None => HashOf[String, Any]
def meta
@attributes[:instance]
end
end
end
end
|
#include <iostream>
using namespace std;
int main() {
char userCh;
cout<<"Please enter a character:"<<endl;
cin>>userCh;
if(userCh >= 'a' && userCh <= 'z') {
cout<<userCh<<" is a lower case letter"<<endl;
}
else if(userCh >= 'A' && userCh <='Z') {
cout<<userCh<<" is an upper case letter"<<endl;
}
else if(userCh >= '0' && userCh <= '9') {
cout<<userCh<<" is a digit"<<endl;
}
else {
cout<<userCh<<" is not an alpha-numeric character"<<endl;
}
return 0;
} |
#!/bin/bash
#
# libjingle
# Copyright 2013 Google Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Wrapper script for running the Java tests under this directory. This script
# will only work if it has been massaged by the build action and placed in
# the PRODUCT_DIR (e.g. out/Debug).
# Exit with error immediately if any subcommand fails.
set -e
# Change directory to the PRODUCT_DIR (e.g. out/Debug).
cd -P $(dirname $0)
if [ -z "$LD_PRELOAD" ]; then
echo "LD_PRELOAD isn't set. It should be set to something like "
echo "/usr/lib/x86_64-linux-gnu/libpulse.so.0. I will now refuse to run "
echo "to protect you from the consequences of your folly."
exit 1
fi
export CLASSPATH=`pwd`/junit-4.11.jar
CLASSPATH=$CLASSPATH:`pwd`/libjingle_peerconnection_test.jar
CLASSPATH=$CLASSPATH:`pwd`/libjingle_peerconnection.jar
# This sets java.library.path so our lookup of libpeerconnection.so works.
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:`pwd`:`pwd`/lib:`pwd`/lib.target
# The RHS value is replaced by the build action that copies this script to
# <(PRODUCT_DIR), using search-and-replace by the build action.
export JAVA_HOME=GYP_JAVA_HOME
${JAVA_HOME}/bin/java -Xcheck:jni -classpath $CLASSPATH \
junit.textui.TestRunner org.webrtc.PeerConnectionTestJava
|
import { clickTarget, startTimeline } from "@jspsych/test-utils";
import { initJsPsych } from "jspsych";
import audioButtonResponse from ".";
jest.useFakeTimers();
// skip this until we figure out how to mock the audio loading
describe.skip("audio-button-response", () => {
test("on_load event triggered after page setup complete", async () => {
const timeline = [
{
type: audioButtonResponse,
stimulus: "mymp3.mp3",
prompt: "foo",
choices: ["choice1"],
on_load: () => {
expect(getHTML()).toContain("ffgfgoo");
clickTarget(displayElement.querySelector("button"));
},
},
];
const jsPsych = initJsPsych({
use_webaudio: false,
});
const { getHTML, finished, displayElement } = await startTimeline(timeline, jsPsych);
expect(getHTML()).not.toContain("foo");
await finished;
});
});
|
def process_trade_orders(trade_orders):
total_trade_orders = len(trade_orders)
sell_orders = [order for order in trade_orders if order["order_type"] == "sell"]
total_sell_orders = len(sell_orders)
total_sell_rate = sum(order["rate"] for order in sell_orders)
average_sell_rate = total_sell_rate / total_sell_orders if total_sell_orders > 0 else 0
buy_orders = [order for order in trade_orders if order["order_type"] == "buy"]
total_pending_buy_amount = sum(order["pending_amount"] for order in buy_orders)
highest_rate_order = max(trade_orders, key=lambda order: order["rate"])
return {
"total_trade_orders": total_trade_orders,
"average_sell_rate": average_sell_rate,
"total_pending_buy_amount": total_pending_buy_amount,
"highest_rate_order": highest_rate_order
} |
#!/bin/bash
#
# A simple and minimal test for deploy.sh
set -ex
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
TMPDIR=$(mktemp -d /tmp/tmp.deploy-test-XXXX)
export KUBEFLOW_DEPLOY=false
cd ${TMPDIR}
${DIR}/deploy.sh
EXPECTED_APP_DIR=${TMPDIR}/kubeflow_ks_app
if [[ ! -d ${EXPECTED_APP_DIR} ]]; then
echo ${EXPECTED_APP_DIR} was not created
exit 1
fi
|
class BankAccount {
var numCuenta: String
init(numCuenta: String){
self.numCuenta = numCuenta
}
func validateAccountNumber(accountNumber: String) -> Bool {
if accountNumber.count != 10 {
return false
}
let alphabeticRange = accountNumber.index(accountNumber.startIndex, offsetBy: 3)
let alphabeticPart = accountNumber[..<alphabeticRange]
let numericPart = accountNumber[alphabeticRange...]
if alphabeticPart.rangeOfCharacter(from: CharacterSet.letters.inverted) != nil {
return false
}
if numericPart.rangeOfCharacter(from: CharacterSet.decimalDigits.inverted) != nil {
return false
}
return true
}
} |
<filename>test/applications/draw.test.js
/**
* @fileOverview
* Vows tests for the Draw example application.
*
* These are cluster tests using Express, Redis, and multiple Thywill
* processes.
*/
var tools = require('../lib/tools');
// Obtain a test suit that launches Thywill instances in child processes.
var suite = tools.application.vowsSuite('Application: Draw', {
applicationName: 'draw',
// The initial batches load the application page and then connect via
// Socket.IO. The matches are checked against the page contents. Here
// we're looking at the templates that should be included.
pageMatches: [
'<div id="title">{{title}}</div>'
],
// Data for the processes to launch.
processData: [
{
port: 10078,
clusterMemberId: 'alpha'
},
{
port: 10079,
clusterMemberId: 'beta'
}
],
// Set a long timeout for actions, because things sometimes lag on a small
// server when running a bunch of tests.
defaultTimeout: 5000
});
// Test the functionality for sending notice of a drawn item and having
// the same data broadcast to the other client.
//
// Send fake data, since we're not actually processing the front end stuff.
var sendMessage = { segments: [] };
var responseMessage = { segments: [] };
tools.application.addSendAndAwaitResponsesBatch('Draw line data message and response', suite, {
applicationId: 'draw',
actionIndex: 0,
responseIndexes: 1,
sendMessage: sendMessage,
responseMessage: responseMessage
});
// Ensure that clients are closed and child processes are killed.
tools.application.closeVowsSuite(suite);
//-----------------------------------------------------------
// Exports - Vows test suite
//-----------------------------------------------------------
module.exports.suite = suite;
|
php artisan clear-compiled
php artisan optimize:clear
composer dump-autoload
php artisan optimize
#&EC@1%Fc34 //clave de registro
#Soportapp&EC@1%Fc34 admin //clave y usuario de la base de datos
#Soportapp&EC@1%Fc34 admin@soportapp.tk //clave y usuario admin Web
CREATE USER 'usuario'@'localhost' IDENTIFIED BY 'password';
ALTER USER 'usuario'@'localhost' IDENTIFIED WITH mysql_native_password BY 'mypassword';
CREATE USER 'usuario'@'localhost ' IDENTIFIED WITH mysql_native_password BY 'your_password';
GRANT select,insert,update,delete ON soportapp.users TO admin@localhost;
GRANT select,insert,update,delete ON soportapp.order_services TO admin@localhost;
GRANT select,insert,update,delete ON soportapp.clients TO admin@localhost;
|
#!/bin/bash
app_name="landmark"
# This should be sufficient to enable ML and BigQuery, along with GCS (Storage)
service="storage-mike"
# Additional configuration parameters: varies depending on which service
# Ref. https://cloud.google.com/iam/docs/understanding-roles
#
# ML, BigQuery: -c '{"role": "viewer"}'
# Storage (GCS): -c '{"role": "editor"}'
cf bind-service $app_name $service -c '{"role": "editor"}'
|
<gh_stars>0
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import React from 'react';
import PropTypes from 'prop-types';
import { Box, Button, Heading, ResponsiveContext, } from 'grommet';
import { Menu } from 'grommet-icons/icons/Menu';
/** Collapsible side bar component
* ```$ npm install grommet-controls
* import { Sidebar } from 'grommet-controls';
* <Sidebar title='My title'>
...
* </Sidebar>
* ```
*/
var Sidebar = function (_a) {
var title = _a.title, width = _a.width, children = _a.children, collapsible = _a.collapsible, rest = __rest(_a, ["title", "width", "children", "collapsible"]);
var _b = React.useState(undefined), collapsed = _b[0], setCollapsed = _b[1];
var BtnToggle = function (_a) {
var view = _a.view;
return (collapsible ? React.createElement(Button, { icon: React.createElement(Menu, null), onClick: function () { return setCollapsed(view !== 'collapsed'); } }) : null);
};
return (React.createElement(ResponsiveContext.Consumer, null, function (size) { return ((collapsible && size === 'small' && collapsed === undefined) || collapsed === true ? (React.createElement(Box, { align: 'start' },
React.createElement(BtnToggle, { view: 'collapsed' }))) : (React.createElement(Box, __assign({ width: width }, rest),
React.createElement(Box, { pad: { bottom: 'small' }, gap: 'small', direction: 'row', align: 'center' },
React.createElement(BtnToggle, { view: 'expanded' }),
title && (React.createElement(Box, { flex: false, tag: 'header' }, typeof title === 'string'
? (React.createElement(Heading, { margin: 'none', level: 3 }, title))
: title))),
children))); }));
};
Sidebar.defaultProps = {
title: undefined,
width: 'medium',
collapsible: true,
};
Sidebar.propTypes = {
title: PropTypes.node,
width: PropTypes.string,
collapsible: PropTypes.bool,
};
export { Sidebar };
|
<gh_stars>0
import {UX} from '@salesforce/command';
import {Optional} from '@salesforce/ts-types';
import {DescribeSObjectResult} from 'jsforce/describe-result';
import {sfdx} from '../sfdx';
// username -> sobject -> describe
const describeSObjectResultCache = new Map<string, Map<string, DescribeSObjectResult>>();
interface DescribeSObjectOptions {
useCache?: boolean;
ux?: Optional<UX>;
}
export async function describeSObject(
sObject: string, targetUsername: string, options: DescribeSObjectOptions = {}
): Promise<DescribeSObjectResult> {
const {useCache = true, ux} = options;
const cache = describeSObjectResultCache.get(targetUsername) || new Map();
describeSObjectResultCache.set(targetUsername, cache);
if (!cache.has(sObject) || !useCache) {
if (ux) {
ux.startSpinner(`Describing ${sObject} sObject`);
}
cache.set(sObject, await sfdx.force.schema.sobject.describe({
quiet: true,
sobjecttype: sObject,
targetusername: targetUsername
}));
if (ux) {
ux.stopSpinner();
}
}
return cache.get(sObject);
}
|
from typing import TypedDict
class UserActionSystemMessageContent(TypedDict):
user_action: str
message_content: str
# Example usage
user_action_message_map: UserActionSystemMessageContent = {
"login": "Welcome back!",
"logout": "You have been successfully logged out.",
"purchase": "Thank you for your purchase.",
"update_profile": "Your profile has been updated."
}
# Accessing the message content for a specific user action
print(user_action_message_map["login"]) # Output: Welcome back!
print(user_action_message_map["purchase"]) # Output: Thank you for your purchase. |
<filename>LeetCode/source/PlusOne.cpp
// PlusOne
// 2021.12.28
// Easy
class Solution
{
public:
vector<int> plusOne(vector<int>& digits)
{
int len = digits.size();
digits[len - 1] = digits[len - 1] + 1;
if (digits[len - 1] < 10)
{
return digits;
}
for (int i = len - 1; i > 0; i--)
{
if (digits[i] > 9)
{
carry(i, digits);
}
}
if (digits[0] > 9)
{
vector<int> newDigits(len + 1, 0);
newDigits[0] = 1;
return newDigits;
}
return digits;
}
void carry(int index, vector<int>& digits)
{
digits[index] = 0;
digits[index - 1] = digits[index - 1] + 1;
}
};
|
<reponame>adarshaacharya/csoverflow<gh_stars>10-100
// env variable config
if (process.env.NODE_ENV !== 'production') {
require('dotenv').config();
}
import app from './app';
import { databaseGenerate } from './config/database.config';
//db
databaseGenerate();
const hostname = 'localhost';
const PORT = process.env.PORT || 5000;
const handleListening = () => console.log(`✅ Listening on: http://${hostname}:${PORT}`);
app.listen(PORT, handleListening);
|
export type LogMode = 'live' | 'interactive' | 'grouped'; |
#!/sbin/sh
#
# /system/addon.d/50-cm.sh
# During a CM14.0 upgrade, this script backs up /system/etc/hosts,
# /system is formatted and reinstalled, then the file is restored.
#
. /tmp/backuptool.functions
list_files() {
cat <<EOF
etc/hosts
EOF
}
case "$1" in
backup)
list_files | while read FILE DUMMY; do
backup_file $S/"$FILE"
done
;;
restore)
list_files | while read FILE REPLACEMENT; do
R=""
[ -n "$REPLACEMENT" ] && R="$S/$REPLACEMENT"
[ -f "$C/$S/$FILE" ] && restore_file $S/"$FILE" "$R"
done
;;
pre-backup)
# Stub
;;
post-backup)
# Stub
;;
pre-restore)
# Stub
;;
post-restore)
# Stub
;;
esac
|
import { Component, ChangeDetectorRef, trigger, state, style, transition, animate} from '@angular/core';
import { TranslateService } from '@ngx-translate/core';
import { MatButtonModule, MatCheckboxModule} from '@angular/material';
import { MatCardModule} from '@angular/material/card';
import { MatSidenavModule} from '@angular/material/sidenav';
import { MatToolbarModule} from '@angular/material/toolbar';
import { MatSnackBarModule} from '@angular/material/snack-bar';
import { ActivatedRoute, RouterModule, Routes, Router } from '@angular/router';
import {MediaMatcher} from '@angular/cdk/layout';
import { AboutComponent } from './about/about.component';
import { HomeComponent } from './home/home.component';
import { SettingsComponent } from './settings/settings.component';
import { GroupsComponent } from './groups/groups.component';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
mobileQuery: MediaQueryList;
rootPage: any = HomeComponent;
title = 'Meetups';
homepage: any;
nextEvent: any;
categoryName: any;
numberOfEvents: string;
constructor(translate: TranslateService,private router: Router,
changeDetectorRef: ChangeDetectorRef, media: MediaMatcher){
let defLng = 'en';
translate.setDefaultLang(defLng);
translate.use(defLng);
translate.get('HOMEPAGE_TITLE').subscribe((res: string) => {
this.homepage = res;
});
this.mobileQuery = media.matchMedia('(max-width: 600px)');
this._mobileQueryListener = () => changeDetectorRef.detectChanges();
this.mobileQuery.addListener(this._mobileQueryListener);
}
ngOnDestroy(): void {
this.mobileQuery.removeListener(this._mobileQueryListener);
}
private _mobileQueryListener: () => void;
}
|
'use strict';
jasmine.getFixtures().fixturesPath = "base/test/fixtures";
describe('jquery.filterjitsu.js not filterjitsu search query test suite', function () {
var $fj;
beforeEach(function () {
loadFixtures('template-only-filter-params.html');
// HACK (marcus): the following line of code is needed to mock search query params with phantom js
// http://stackoverflow.com/questions/2494213/changing-window-location-without-triggering-refresh
//
// here we set the search query param to `flight` because it has the same number of characters
// as `filter`. We are testing that the .slice(0, 6) to remove `filter` is only removing
// the word filter
window.history.replaceState( {}, '', 'http://localhost:8080/context.html?flight-type=Water');
$fj = $.fn.filterjitsu();
});
it('should not hide [data-type="Land"] items ', function () {
expect($('[data-filterable][data-filter-type="Land"]')).toBeVisible();
});
});
|
CREATE OR REPLACE FUNCTION monthly_cost(price float, length int)
RETURNS float AS $$
BEGIN
RETURN price * length;
END; $$
LANGUAGE plpgsql;
SELECT monthly_cost(10, 6); |
#!/usr/bin/env bash
SOURCE_TAG_PREFIX="${TRAVIS_REPO_SLUG//\//_}_$TRAVIS_BUILD_NUMBER"
TARGET_TAG_PREFIX="$TRAVIS_COMMIT"
SOURCE_OPTIONS="--source-docker-repository-name '$BUILD_DOCKER_REPOSITORY' --source-docker-username '$BUILD_DOCKER_USERNAME' --source-docker-password '$BUILD_DOCKER_PASSWORD' --source-docker-tag-prefix '$SOURCE_TAG_PREFIX'"
TARGET_OPTIONS="--target-docker-repository-name '$BUILD_DOCKER_REPOSITORY' --target-docker-username '$BUILD_DOCKER_USERNAME' --target-docker-password '$BUILD_DOCKER_PASSWORD' --target-docker-tag-prefix '$TARGET_TAG_PREFIX'"
echo "$SOURCE_OPTIONS $TARGET_OPTIONS" |
// Package nutriscore provides utilities for calculating nutritional score and
// Nutri-Score.
// More about-score: https://en.wikipedia.org/wiki/Nutri-Score
package nutriscore
// ScoreType is the type of the scored product
type ScoreType int
const (
// Food is used when calculating nutritional score for general food items
Food ScoreType = iota
// Beverage is used when calculating nutritional score for beverages
Beverage
// Water is used when calculating nutritional score for water
Water
// Cheese is used for calculating the nutritional score for cheeses
Cheese
)
var scoreToLetter = []string{"A", "B", "C", "D", "E"}
var energyLevels = []float64{3350, 3015, 2680, 2345, 2010, 1675, 1340, 1005, 670, 335}
var sugarsLevels = []float64{45, 40, 36, 31, 27, 22.5, 18, 13.5, 9, 4.5}
var saturatedFattyAcidsLevels = []float64{10, 9, 8, 7, 6, 5, 4, 3, 2, 1}
var sodiumLevels = []float64{900, 810, 720, 630, 540, 450, 360, 270, 180, 90}
var fibreLevels = []float64{4.7, 3.7, 2.8, 1.9, 0.9}
var proteinLevels = []float64{8, 6.4, 4.8, 3.2, 1.6}
var energyLevelsBeverage = []float64{270, 240, 210, 180, 150, 120, 90, 60, 30, 0}
var sugarsLevelsBeverage = []float64{13.5, 12, 10.5, 9, 7.5, 6, 4.5, 3, 1.5, 0}
// NutritionalScore contains the numeric nutritional score value and type of product
type NutritionalScore struct {
Value int
Positive int
Negative int
ScoreType ScoreType
}
// EnergyKJ represents the energy density in kJ/100g
type EnergyKJ float64
// SugarGram represents amount of sugars in grams/100g
type SugarGram float64
// SaturatedFattyAcidsGram represents amount of saturated fatty acids in grams/100g
type SaturatedFattyAcidsGram float64
// SodiumMilligram represents amount of sodium in mg/100g
type SodiumMilligram float64
// FruitsPercent represents fruits, vegetables, pulses, nuts, and rapeseed, walnut and olive oils
// as percentage of the total
type FruitsPercent float64
// FibreGram represents amount of fibre in grams/100g
type FibreGram float64
// ProteinGram represents amount of protein in grams/100g
type ProteinGram float64
// EnergyFromKcal converts energy density from kcal to EnergyKJ
func EnergyFromKcal(kcal float64) EnergyKJ {
return EnergyKJ(kcal * 4.184)
}
// SodiumFromSalt converts salt mg/100g content to sodium content
func SodiumFromSalt(saltMg float64) SodiumMilligram {
return SodiumMilligram(saltMg / 2.5)
}
// GetPoints returns the nutritional score
func (e EnergyKJ) GetPoints(st ScoreType) int {
if st == Beverage {
return getPointsFromRange(float64(e), energyLevelsBeverage)
}
return getPointsFromRange(float64(e), energyLevels)
}
// GetPoints returns the nutritional score
func (s SugarGram) GetPoints(st ScoreType) int {
if st == Beverage {
return getPointsFromRange(float64(s), sugarsLevelsBeverage)
}
return getPointsFromRange(float64(s), sugarsLevels)
}
// GetPoints returns the nutritional score
func (sfa SaturatedFattyAcidsGram) GetPoints(st ScoreType) int {
return getPointsFromRange(float64(sfa), saturatedFattyAcidsLevels)
}
// GetPoints returns the nutritional score
func (s SodiumMilligram) GetPoints(st ScoreType) int {
return getPointsFromRange(float64(s), sodiumLevels)
}
// GetPoints returns the nutritional score
func (f FruitsPercent) GetPoints(st ScoreType) int {
if st == Beverage {
if f > 80 {
return 10
} else if f > 60 {
return 4
} else if f > 40 {
return 2
}
return 0
}
if f > 80 {
return 5
} else if f > 60 {
return 2
} else if f > 40 {
return 1
}
return 0
}
// GetPoints returns the nutritional score
func (f FibreGram) GetPoints(st ScoreType) int {
return getPointsFromRange(float64(f), fibreLevels)
}
// GetPoints returns the nutritional score
func (p ProteinGram) GetPoints(st ScoreType) int {
return getPointsFromRange(float64(p), proteinLevels)
}
// NutritionalData represents the source nutritional data used for the calculation
type NutritionalData struct {
Energy EnergyKJ
Sugars SugarGram
SaturatedFattyAcids SaturatedFattyAcidsGram
Sodium SodiumMilligram
Fruits FruitsPercent
Fibre FibreGram
Protein ProteinGram
IsWater bool
}
// GetNutritionalScore calculates the nutritional score for nutritional data n of type st
func GetNutritionalScore(n NutritionalData, st ScoreType) NutritionalScore {
value := 0
positive := 0
negative := 0
// Water is always graded A page 30
if st != Water {
fruitPoints := n.Fruits.GetPoints(st)
fibrePoints := n.Fibre.GetPoints(st)
negative = n.Energy.GetPoints(st) + n.Sugars.GetPoints(st) + n.SaturatedFattyAcids.GetPoints(st) + n.Sodium.GetPoints(st)
positive = fruitPoints + fibrePoints + n.Protein.GetPoints(st)
if st == Cheese {
// Cheeses always use (negative - positive) page 29
value = negative - positive
} else {
// page 27
if negative >= 11 && fruitPoints < 5 {
value = negative - fibrePoints - fruitPoints
} else {
value = negative - positive
}
}
}
return NutritionalScore{
Value: value,
Positive: positive,
Negative: negative,
ScoreType: st,
}
}
// GetNutriScore returns the Nutri-Score rating
func (ns NutritionalScore) GetNutriScore() string {
if ns.ScoreType == Food {
return scoreToLetter[getPointsFromRange(float64(ns.Value), []float64{18, 10, 2, -1})]
}
if ns.ScoreType == Water {
return scoreToLetter[0]
}
return scoreToLetter[getPointsFromRange(float64(ns.Value), []float64{9, 5, 1, -2})]
}
func getPointsFromRange(v float64, steps []float64) int {
lenSteps := len(steps)
for i, l := range steps {
if v > l {
return lenSteps - i
}
}
return 0
}
|
#!/bin/bash -e
. /etc/os-release
print_usage() {
echo "build_reloc.sh --clean --nodeps"
echo " --clean clean build directory"
echo " --nodeps skip installing dependencies"
echo " --version V product-version-release string (overriding SCYLLA-VERSION-GEN)"
exit 1
}
CLEAN=
NODEPS=
VERSION_OVERRIDE=
while [ $# -gt 0 ]; do
case "$1" in
"--clean")
CLEAN=yes
shift 1
;;
"--nodeps")
NODEPS=yes
shift 1
;;
"--version")
VERSION_OVERRIDE="$2"
shift 2
;;
*)
print_usage
;;
esac
done
VERSION=$(./SCYLLA-VERSION-GEN ${VERSION_OVERRIDE:+ --version "$VERSION_OVERRIDE"})
# the former command should generate build/SCYLLA-PRODUCT-FILE and some other version
# related files
PRODUCT=`cat build/SCYLLA-PRODUCT-FILE`
is_redhat_variant() {
[ -f /etc/redhat-release ]
}
is_debian_variant() {
[ -f /etc/debian_version ]
}
if [ ! -e reloc/build_reloc.sh ]; then
echo "run build_reloc.sh in top of scylla dir"
exit 1
fi
if [ "$CLEAN" = "yes" ]; then
rm -rf build target
fi
if [ -f build/$PRODUCT-tools-package.tar.gz ]; then
rm build/$PRODUCT-tools-package.tar.gz
fi
if [ -z "$NODEPS" ]; then
sudo ./install-dependencies.sh
fi
printf "version=%s" $VERSION > build.properties
ant jar
dist/debian/debian_files_gen.py
scripts/create-relocatable-package.py --version $VERSION build/$PRODUCT-tools-package.tar.gz
|
fetch('/api/user/current', {
method: 'GET',
credentials: 'same-origin',
headers: new Headers({
'Content-Type': 'application/json',
'Accept': 'application/json'
})
})
.then(response => {
return response.json()
})
.then(data => {
console.log(data.username)
console.log(data.first_name)
console.log(data.last_name)
console.log(data.email)
}) |
#!/bin/bash
note=$1
#Comparaison int
#-lt <, -gt >, -ge >=, -le <=, -eq ou ==, -ne ou !=
if [[ $note -lt 60 ]] #On ne peut pas coller du texte aux [[ ou aux ]]
then
echo "echec"
elif test $note -eq 60 #test agit comme [[ ]], prendre au choix
#On peut mettre le then sur la même ligne,
#mais ça prend un '; ' avant le then
then
echo "swishh"
else
echo "bravo Charlie!"
fi
#Switch case
read lettre
case $lettre in
c)
echo cest un c
;;
d)
echo cest un d
;;
[1-8])
echo un chiffre entre 1 et 8
;;
[[:lower:]])
echo cest une minuscule
;;
[[:upper:]])
echo cest une majuscule
;;
*) #cest comme default
;;
esac
#Comparaison string
#ici on peut utiliser les == et !=
if test $lettre != "a" #!= et == fonctionne avec les strings
then
echo "la lettre nest pas a"
fi
|
package com.acgist.snail.pojo.bean;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.acgist.snail.format.BEncodeDecoder;
import com.acgist.snail.utils.NetUtils;
import com.acgist.snail.utils.StringUtils;
/**
* <p>种子信息</p>
*
* @author acgist
*/
public final class Torrent implements Serializable {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = LoggerFactory.getLogger(Torrent.class);
/**
* <p>注释:{@value}</p>
*/
public static final String ATTR_COMMENT = "comment";
/**
* <p>注释UTF8:{@value}</p>
*/
public static final String ATTR_COMMENT_UTF8 = "comment.utf-8";
/**
* <p>编码:{@value}</p>
*/
public static final String ATTR_ENCODING = "encoding";
/**
* <p>创建者:{@value}</p>
*/
public static final String ATTR_CREATED_BY = "created by";
/**
* <p>创建时间:{@value}</p>
*/
public static final String ATTR_CREATION_DATE = "creation date";
/**
* <p>Tracker服务器:{@value}</p>
*/
public static final String ATTR_ANNOUNCE = "announce";
/**
* <p>Tracker服务器列表:{@value}</p>
*/
public static final String ATTR_ANNOUNCE_LIST = "announce-list";
/**
* <p>文件信息:{@value}</p>
*/
public static final String ATTR_INFO = "info";
/**
* <p>DHT节点:{@value}</p>
*/
public static final String ATTR_NODES = "nodes";
// ============== 种子文件自带信息 ============== //
/**
* <p>注释</p>
*/
private String comment;
/**
* <p>注释UTF8</p>
*/
private String commentUtf8;
/**
* <p>编码</p>
*/
private String encoding;
/**
* <p>创建者</p>
*/
private String createdBy;
/**
* <p>创建时间</p>
*/
private Long creationDate;
/**
* <p>Tracker服务器</p>
*/
private String announce;
/**
* <p>Tracker服务器列表</p>
*/
private List<String> announceList;
/**
* <p>文件信息</p>
*/
private TorrentInfo info;
/**
* <p>DHT节点</p>
*/
private Map<String, Integer> nodes;
// ============== 种子文件临时信息 ============== //
/**
* <p>InfoHash</p>
* <p>种子文件加载完成时保存InfoHash,防止重复计算导致错误。</p>
*/
private transient InfoHash infoHash;
protected Torrent() {
}
/**
* <p>读取种子信息</p>
*
* @param decoder 种子编码
*
* @return 种子信息
*/
public static final Torrent valueOf(BEncodeDecoder decoder) {
Objects.requireNonNull(decoder, "种子信息为空");
final Torrent torrent = new Torrent();
// 原始编码
final String encoding = decoder.getString(ATTR_ENCODING);
torrent.setEncoding(encoding);
torrent.setComment(decoder.getString(ATTR_COMMENT, encoding));
torrent.setCommentUtf8(decoder.getString(ATTR_COMMENT_UTF8));
torrent.setCreatedBy(decoder.getString(ATTR_CREATED_BY));
torrent.setCreationDate(decoder.getLong(ATTR_CREATION_DATE));
torrent.setAnnounce(decoder.getString(ATTR_ANNOUNCE));
// 读取Tracker服务器列表
final List<Object> announceList = decoder.getList(ATTR_ANNOUNCE_LIST);
torrent.setAnnounceList(readAnnounceList(announceList));
// 读取文件信息
final Map<String, Object> info = decoder.getMap(ATTR_INFO);
torrent.setInfo(TorrentInfo.valueOf(info, encoding));
// 读取DHT节点
final List<Object> nodes = decoder.getList(ATTR_NODES);
torrent.setNodes(readNodes(nodes));
return torrent;
}
/**
* <p>获取任务名称</p>
* <p>优先使用{@link TorrentInfo#getNameUtf8()},如果获取失败,然后使用{@link TorrentInfo#getName()}。</p>
*
* @return 任务名称
*/
public String name() {
String name = this.info.getNameUtf8();
if(StringUtils.isEmpty(name)) {
name = this.info.getName();
}
return name;
}
/**
* <p>获取InfoHash</p>
*
* @return InfoHash
*/
public InfoHash infoHash() {
return this.infoHash;
}
/**
* <p>设置InfoHash</p>
*
* @param infoHash InfoHash
*/
public void infoHash(InfoHash infoHash) {
this.infoHash = infoHash;
}
/**
* <p>获取Tracker服务器列表</p>
* <p>每个元素都是一个list,每个list里面包含一个Tracker服务器地址。</p>
*
* @param announceList Tracker服务器数据
*
* @return Tracker服务器列表
*/
private static final List<String> readAnnounceList(List<Object> announceList) {
if(announceList == null) {
return new ArrayList<>(0);
}
return announceList.stream()
.flatMap(value -> {
final List<?> values = (List<?>) value;
return values.stream();
})
.map(value -> StringUtils.getString(value))
.collect(Collectors.toList());
}
/**
* <p>获取DHT节点</p>
* <p>每个元素都是一个list,每个list里面包含节点的IP和端口。</p>
*
* @param nodes DHT节点数据
*
* @return DHT节点
*/
private static final Map<String, Integer> readNodes(List<Object> nodes) {
if(nodes == null) {
return new LinkedHashMap<>();
}
return nodes.stream()
.map(value -> {
final List<?> values = (List<?>) value;
if(values.size() == 2) {
final String host = StringUtils.getString(values.get(0));
final Long port = (Long) values.get(1);
if(StringUtils.isNumeric(host)) {
// 紧凑型
return Map.entry(
NetUtils.intToIP(Integer.parseInt(host)),
NetUtils.portToInt(port.shortValue())
);
} else {
// 字符串
return Map.entry(host, port.intValue());
}
} else {
LOGGER.warn("DHT节点错误:{}", value);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (a, b) -> b, LinkedHashMap::new));
}
// ============== GETTER SETTER ============== //
/**
* <p>获取注释</p>
*
* @return 注释
*/
public String getComment() {
return this.comment;
}
/**
* <p>设置注释</p>
*
* @param comment 注释
*/
public void setComment(String comment) {
this.comment = comment;
}
/**
* <p>获取注释UTF8</p>
*
* @return 注释UTF8
*/
public String getCommentUtf8() {
return this.commentUtf8;
}
/**
* <p>设置注释UTF8</p>
*
* @param commentUtf8 注释UTF8
*/
public void setCommentUtf8(String commentUtf8) {
this.commentUtf8 = commentUtf8;
}
/**
* <p>获取编码</p>
*
* @return 编码
*/
public String getEncoding() {
return this.encoding;
}
/**
* <p>设置编码</p>
*
* @param encoding 编码
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
/**
* <p>获取创建者</p>
*
* @return 创建者
*/
public String getCreatedBy() {
return this.createdBy;
}
/**
* <p>设置创建者</p>
*
* @param createdBy 创建者
*/
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
/**
* <p>获取创建时间</p>
*
* @return 创建时间
*/
public Long getCreationDate() {
return this.creationDate;
}
/**
* <p>设置创建时间</p>
*
* @param creationDate 创建时间
*/
public void setCreationDate(Long creationDate) {
this.creationDate = creationDate;
}
/**
* <p>获取Tracker服务器</p>
*
* @return Tracker服务器
*/
public String getAnnounce() {
return this.announce;
}
/**
* <p>设置Tracker服务器</p>
*
* @param announce Tracker服务器
*/
public void setAnnounce(String announce) {
this.announce = announce;
}
/**
* <p>获取Tracker服务器列表</p>
*
* @return Tracker服务器列表
*/
public List<String> getAnnounceList() {
return this.announceList;
}
/**
* <p>设置Tracker服务器列表</p>
*
* @param announceList Tracker服务器列表
*/
public void setAnnounceList(List<String> announceList) {
this.announceList = announceList;
}
/**
* <p>获取文件信息</p>
*
* @return 文件信息
*/
public TorrentInfo getInfo() {
return this.info;
}
/**
* <p>设置文件信息</p>
*
* @param info 文件信息
*/
public void setInfo(TorrentInfo info) {
this.info = info;
}
/**
* <p>获取DHT节点</p>
*
* @return DHT节点
*/
public Map<String, Integer> getNodes() {
return this.nodes;
}
/**
* <p>设置DHT节点</p>
*
* @param nodes DHT节点
*/
public void setNodes(Map<String, Integer> nodes) {
this.nodes = nodes;
}
}
|
package test;
/**
* @Class: InterfaceA
* @Description: java类作用描述
* @Author: hubohua
* @CreateDate: 2018/8/15
*/
public interface InterfaceA {
void a();
// Interface定义的默认方法,不用子类继续实现
// default String getName() {
// return "";
// }
}
|
package com.nortal.spring.cw.core.xml.jaxb;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import javax.activation.DataHandler;
import javax.xml.bind.attachment.AttachmentUnmarshaller;
import org.apache.commons.lang3.StringUtils;
import org.springframework.oxm.UnmarshallingFailureException;
import org.springframework.oxm.mime.MimeContainer;
import org.springframework.util.FileCopyUtils;
/**
* @author <NAME> <<EMAIL>>
* @since 22.10.2013
*/
public class EpmAttachmentUnmarshaller extends AttachmentUnmarshaller {
private MimeContainer mimeContainer;
public EpmAttachmentUnmarshaller(MimeContainer mimeContainer) {
this.mimeContainer = mimeContainer;
}
@Override
public DataHandler getAttachmentAsDataHandler(String cid) {
if (StringUtils.startsWithIgnoreCase(cid, EpmJaxb2Marshaller.CID)) {
cid = cid.substring(EpmJaxb2Marshaller.CID.length());
try {
cid = URLDecoder.decode(cid, "UTF-8");
} catch (UnsupportedEncodingException e) {
// ignore
}
}
cid = "<" + cid + ">";
return mimeContainer.getAttachment(cid);
}
@Override
public byte[] getAttachmentAsByteArray(String cid) {
try {
DataHandler dataHandler = getAttachmentAsDataHandler(cid);
return FileCopyUtils.copyToByteArray(dataHandler.getInputStream());
} catch (IOException e) {
throw new UnmarshallingFailureException("EpmAttachmentUnmarshaller.getAttachmentAsByteArray: Couldn't read attachment", e);
}
}
}
|
#!/bin/bash
set -e
# Define help message
show_help() {
echo """
Commands
---------------------------------------------------------
bash : run bash
eval : eval shell command
build : build the app [arg: path to cmake folder]
run : run the application [arg: path to program]
test : test the application
"""
}
build(){
cd "./build"
cmake ..
cmake --build .
}
case "$1" in
bash )
bash
;;
eval )
eval "${@:2}"
;;
build )
build
;;
test )
eval "./build/tests/tests ${@:2}"
;;
run )
eval "./build/main/app ${@:2}"
;;
ftest )
build && eval "./tests/tests ${@:2}"
;;
frun )
build && eval "./main/app ${@:2}"
;;
* )
show_help
;;
esac
|
public class StepsActivity extends WearableActivity implements SensorEventListener {
private SensorManager mSensorManager;
private Sensor mStepSensor;
private int numSteps = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
mStepSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_STEP_COUNTER);
}
@Override
public void onSensorChanged(SensorEvent event) {
if(event.sensor.getType() == Sensor.TYPE_STEP_COUNTER) {
numSteps += event.values[0];
}
}
@Override
protected void onResume() {
super.onResume();
mSensorManager.registerListener(this, mStepSensor, SensorManager.SENSOR_DELAY_NORMAL);
}
@Override
protected void onPause() {
super.onPause();
mSensorManager.unregisterListener(this);
}
} |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import React from 'react';
import { render } from 'enzyme';
import { requiredProps } from '../../../test';
import { EuiHeaderLinks, GUTTER_SIZES } from './header_links';
describe('EuiHeaderLinks', () => {
test('is rendered', () => {
const component = render(<EuiHeaderLinks {...requiredProps} />);
expect(component).toMatchSnapshot();
});
describe('gutterSize', () => {
GUTTER_SIZES.forEach((gutterSize) => {
test(`${gutterSize} is rendered`, () => {
const component = render(<EuiHeaderLinks gutterSize={gutterSize} />);
expect(component).toMatchSnapshot();
});
});
});
describe('popover props', () => {
test('is rendered', () => {
const component = render(
<EuiHeaderLinks
popoverBreakpoints={['xs', 's', 'm', 'l', 'xl']}
popoverButtonProps={{
iconType: 'bolt',
className: 'customButtonClass',
}}
popoverProps={{ anchorClassName: 'customAnchorClass' }}
/>
);
expect(component).toMatchSnapshot();
});
test('is never rendered with "none"', () => {
const component = render(<EuiHeaderLinks popoverBreakpoints={'none'} />);
expect(component).toMatchSnapshot();
});
});
});
|
Model.Form.User = Model.Form.extend({
class_name: 'Model.Form.User',
// Returns the form for editing a model or false for an embedded form.
edit: function()
{
var m = this.model;
var this2 = this;
var div = $('<div/>');
$(m.attributes).each(function(i, a) {
if (a.type == 'hidden')
return;
div.append(
$('<div/>')
.attr('id', m.name + '_' + m.id + '_' + a.name + '_container')
.click(function() { m.edit_attribute(a.name); })
.append($('<input/>')
.attr('placeholder', a.nice_name)
.val(a.value)
)
);
});
div.append($('<div/>').attr('id', this.message))
.append($('<p/>')
.append($('<input/>').attr('type', 'button').val('Back').click(function() { caboose_station.close_url('/pages/'+m.id+'/redirect'); }))
.append(' ')
.append($('<input/>').attr('type', 'button').val('Delete ' + m.name).click(function() { m.ajax_delete(); }))
);
return div;
}
});
|
<gh_stars>0
/*******************************************************************************
* Copyright 2017 Dell Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
* @microservice: core-clients-go library
* @author: <NAME>, Dell
* @version: 0.5.0
*******************************************************************************/
package metadataclients
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"github.com/edgexfoundry/edgex-go/core/domain/models"
"io/ioutil"
"net/http"
"net/url"
"strconv"
)
var (
ErrResponseNil error = errors.New("Problem connecting to metadata - reponse was nil")
ErrNotFound error = errors.New("Item not found")
)
/*
Addressable client for interacting with the addressable section of metadata
*/
type AddressableClient struct {
url string
}
/*
Device client for interacting with the device section of metadata
*/
type DeviceClient struct {
url string
}
/*
Command client for interacting with the command section of metadata
*/
type CommandClient struct {
url string
}
/*
Service client for interacting with the device service section of metadata
*/
type ServiceClient struct {
url string
}
// Device Profile client for interacting with the device profile section of metadata
type DeviceProfileClient struct {
url string
}
/*
Return an instance of AddressableClient
*/
func NewAddressableClient(metaDbAddressableUrl string) AddressableClient {
return AddressableClient{url: metaDbAddressableUrl}
}
/*
Return an instance of DeviceClient
*/
func NewDeviceClient(metaDbDeviceUrl string) DeviceClient {
return DeviceClient{url: metaDbDeviceUrl}
}
/*
Return an instance of CommandClient
*/
func NewCommandClient(metaDbCommandUrl string) CommandClient {
return CommandClient{url: metaDbCommandUrl}
}
/*
Return an instance of ServiceClient
*/
func NewServiceClient(metaDbServiceUrl string) ServiceClient {
return ServiceClient{url: metaDbServiceUrl}
}
// Return an instance of DeviceProfileClient
func NewDeviceProfileClient(metaDbDeviceProfileUrl string) DeviceProfileClient {
return DeviceProfileClient{url: metaDbDeviceProfileUrl}
}
// Helper method to make the request and return the response
func makeRequest(req *http.Request) (*http.Response, error) {
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fmt.Println(err)
}
return resp, err
}
// Helper method to get the body from the response after making the request
func getBody(resp *http.Response) ([]byte, error) {
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Println(err)
return []byte{}, err
}
return body, nil
}
// ***************** ADDRESSABLE CLIENT METHODS ***********************
// Add an addressable - handle error codes
// Returns the ID of the addressable and an error
func (a *AddressableClient) Add(addr *models.Addressable) (string, error) {
// Marshal the addressable to JSON
jsonStr, err := json.Marshal(addr)
if err != nil {
fmt.Println(err)
return "", err
}
client := &http.Client{}
resp, err := client.Post(a.url, "application/json", bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return "", err
}
if resp == nil {
fmt.Println(ErrResponseNil.Error())
return "", ErrResponseNil
}
defer resp.Body.Close()
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return "", err
}
bodyString := string(bodyBytes)
if resp.StatusCode != 200 {
return "", errors.New(string(bodyString))
}
return bodyString, err
}
// Helper method to decode an addressable and return the addressable
func (d *AddressableClient) decodeAddressable(resp *http.Response) (models.Addressable, error) {
dec := json.NewDecoder(resp.Body)
addr := models.Addressable{}
err := dec.Decode(&addr)
if err != nil {
fmt.Println(err)
}
return addr, err
}
// TODO: make method signatures consistent wrt to error return value
// ie. use it everywhere, or not at all!
// Get the addressable by name
func (a *AddressableClient) AddressableForName(name string) (models.Addressable, error) {
req, err := http.NewRequest("GET", a.url+"/name/"+url.QueryEscape(name), nil)
if err != nil {
fmt.Println(err)
return models.Addressable{}, err
}
resp, err := makeRequest(req)
// Check response
if resp == nil {
fmt.Println(ErrResponseNil)
return models.Addressable{}, ErrResponseNil
}
defer resp.Body.Close()
if err != nil {
fmt.Println("AddressableForName makeRequest failed: %s", err)
return models.Addressable{}, err
}
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return models.Addressable{}, err
}
bodyString := string(bodyBytes)
fmt.Println(bodyString)
return models.Addressable{}, errors.New(bodyString)
}
return a.decodeAddressable(resp)
}
// ***************** DEVICE CLIENT METHODS ***********************
// Help method to decode a device slice
func (d *DeviceClient) decodeDeviceSlice(resp *http.Response) ([]models.Device, error) {
dec := json.NewDecoder(resp.Body)
dSlice := []models.Device{}
err := dec.Decode(&dSlice)
if err != nil {
fmt.Println(err)
}
return dSlice, err
}
// Helper method to decode a device and return the device
func (d *DeviceClient) decodeDevice(resp *http.Response) (models.Device, error) {
dec := json.NewDecoder(resp.Body)
dev := models.Device{}
err := dec.Decode(&dev)
if err != nil {
fmt.Println(err)
}
return dev, err
}
// Get the device by id
func (d *DeviceClient) Device(id string) (models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/"+id, nil)
if err != nil {
fmt.Println(err)
return models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return models.Device{}, err
}
bodyString := string(bodyBytes)
return models.Device{}, errors.New(bodyString)
}
return d.decodeDevice(resp)
}
// Get a list of all devices
func (d *DeviceClient) Devices() ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url, nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the device by name
func (d *DeviceClient) DeviceForName(name string) (models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/name/"+url.QueryEscape(name), nil)
if err != nil {
fmt.Println(err)
return models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return models.Device{}, err
}
bodyString := string(bodyBytes)
return models.Device{}, errors.New(bodyString)
}
return d.decodeDevice(resp)
}
// Get the device by label
func (d *DeviceClient) DevicesByLabel(label string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/label/"+url.QueryEscape(label), nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the devices that are on a service
func (d *DeviceClient) DevicesForService(serviceId string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/service/"+serviceId, nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the devices that are on a service(by name)
func (d *DeviceClient) DevicesForServiceByName(serviceName string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/servicename/"+url.QueryEscape(serviceName), nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the devices for a profile
func (d *DeviceClient) DevicesForProfile(profileId string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/profile/"+profileId, nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the devices for a profile (by name)
func (d *DeviceClient) DevicesForProfileByName(profileName string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/profilename/"+url.QueryEscape(profileName), nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the devices for an addressable
func (d *DeviceClient) DevicesForAddressable(addressableId string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/addressable/"+addressableId, nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Get the devices for an addressable (by name)
func (d *DeviceClient) DevicesForAddressableByName(addressableName string) ([]models.Device, error) {
req, err := http.NewRequest("GET", d.url+"/addressablename/"+url.QueryEscape(addressableName), nil)
if err != nil {
fmt.Println(err)
return []models.Device{}, err
}
// Make the request and get response
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Device{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Device{}, err
}
bodyString := string(bodyBytes)
return []models.Device{}, errors.New(bodyString)
}
return d.decodeDeviceSlice(resp)
}
// Add a device - handle error codes
func (d *DeviceClient) Add(dev *models.Device) (string, error) {
jsonStr, err := json.Marshal(dev)
if err != nil {
fmt.Println(err)
return "", err
}
req, err := http.NewRequest("POST", d.url, bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return "", err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return "", err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return "", ErrResponseNil
}
defer resp.Body.Close()
// Get the body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return "", err
}
bodyString := string(bodyBytes)
if resp.StatusCode != 200 {
fmt.Println(bodyString)
return "", errors.New(bodyString)
}
return bodyString, nil
}
// Update a device - handle error codes
func (d *DeviceClient) Update(dev models.Device) error {
jsonStr, err := json.Marshal(&dev)
if err != nil {
fmt.Println(err)
return err
}
req, err := http.NewRequest("PUT", d.url, bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the lastConnected value for a device (specified by id)
func (d *DeviceClient) UpdateLastConnected(id string, time int64) error {
req, err := http.NewRequest("PUT", d.url+"/"+id+"/lastconnected/"+strconv.FormatInt(time, 10), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the lastConnected value for a device (specified by name)
func (d *DeviceClient) UpdateLastConnectedByName(name string, time int64) error {
req, err := http.NewRequest("PUT", d.url+"/name/"+url.QueryEscape(name)+"/lastconnected/"+strconv.FormatInt(time, 10), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the lastReported value for a device (specified by id)
func (d *DeviceClient) UpdateLastReported(id string, time int64) error {
req, err := http.NewRequest("PUT", d.url+"/"+id+"/lastreported/"+strconv.FormatInt(time, 10), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the lastReported value for a device (specified by name)
func (d *DeviceClient) UpdateLastReportedByName(name string, time int64) error {
req, err := http.NewRequest("PUT", d.url+"/name/"+url.QueryEscape(name)+"/lastreported/"+strconv.FormatInt(time, 10), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return err
}
// Update the opState value for a device (specified by id)
func (d *DeviceClient) UpdateOpState(id string, opState string) error {
req, err := http.NewRequest("PUT", d.url+"/"+id+"/opstate/"+opState, nil)
if err != nil {
fmt.Println(err.Error())
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return err
}
// Update the opState value for a device (specified by name)
func (d *DeviceClient) UpdateOpStateByName(name string, opState string) error {
req, err := http.NewRequest("PUT", d.url+"/name/"+url.QueryEscape(name)+"/opstate/"+opState, nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the adminState value for a device (specified by id)
func (d *DeviceClient) UpdateAdminState(id string, adminState string) error {
req, err := http.NewRequest("PUT", d.url+"/"+id+"/adminstate/"+adminState, nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err.Error())
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the adminState value for a device (specified by name)
func (d *DeviceClient) UpdateAdminStateByName(name string, adminState string) error {
req, err := http.NewRequest("PUT", d.url+"/name/"+url.QueryEscape(name)+"/adminstate/"+adminState, nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return err
}
// Delete a device (specified by id)
func (d *DeviceClient) Delete(id string) error {
req, err := http.NewRequest("DELETE", d.url+"/id/"+id, nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Delete a device (specified by name)
func (d *DeviceClient) DeleteByName(name string) error {
req, err := http.NewRequest("DELETE", d.url+"/name/"+url.QueryEscape(name), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// ************************** COMMAND CLIENT METHODS ****************************
// Helper method to decode and return a command
func (c *CommandClient) decodeCommand(resp *http.Response) (models.Command, error) {
dec := json.NewDecoder(resp.Body)
com := models.Command{}
err := dec.Decode(&com)
if err != nil {
fmt.Println(err)
}
return com, err
}
// Helper method to decode and return a command slice
func (c *CommandClient) decodeCommandSlice(resp *http.Response) ([]models.Command, error) {
dec := json.NewDecoder(resp.Body)
comSlice := []models.Command{}
err := dec.Decode(&comSlice)
if err != nil {
fmt.Println(err)
}
return comSlice, err
}
// Get a command by id
func (c *CommandClient) Command(id string) (models.Command, error) {
req, err := http.NewRequest("GET", c.url+"/"+id, nil)
if err != nil {
fmt.Println(err)
return models.Command{}, err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return models.Command{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return models.Command{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return models.Command{}, err
}
bodyString := string(bodyBytes)
return models.Command{}, errors.New(bodyString)
}
return c.decodeCommand(resp)
}
// Get a list of all the commands
func (c *CommandClient) Commands() ([]models.Command, error) {
req, err := http.NewRequest("GET", c.url, nil)
if err != nil {
fmt.Println(err)
return []models.Command{}, err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return []models.Command{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Command{}, ErrResponseNil
}
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Command{}, err
}
bodyString := string(bodyBytes)
return []models.Command{}, errors.New(bodyString)
}
return c.decodeCommandSlice(resp)
}
// Get a list of commands for a certain name
func (c *CommandClient) CommandsForName(name string) ([]models.Command, error) {
req, err := http.NewRequest("GET", c.url+"/name/"+name, nil)
if err != nil {
fmt.Println(err)
return []models.Command{}, err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return []models.Command{}, err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return []models.Command{}, ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return []models.Command{}, err
}
bodyString := string(bodyBytes)
return []models.Command{}, errors.New(bodyString)
}
return c.decodeCommandSlice(resp)
}
// Add a new command
func (c *CommandClient) Add(com *models.Command) (string, error) {
jsonStr, err := json.Marshal(com)
if err != nil {
fmt.Println(err)
return "", err
}
req, err := http.NewRequest("POST", c.url, bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return "", err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return "", err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return "", ErrResponseNil
}
defer resp.Body.Close()
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return "", err
}
bodyString := string(bodyBytes)
if resp.StatusCode != 200 {
return "", errors.New(bodyString)
}
return bodyString, nil
}
// Update a command
func (c *CommandClient) Update(com models.Command) error {
jsonStr, err := json.Marshal(&com)
if err != nil {
fmt.Println(err)
return err
}
req, err := http.NewRequest("PUT", c.url, bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Delete a command
func (c *CommandClient) Delete(id string) error {
req, err := http.NewRequest("DELETE", c.url+"/id/"+id, nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// ********************** SERVICE CLIENT METHODS **************************
// Helper method to decode and return a deviceservice
func (s *ServiceClient) decodeDeviceService(resp *http.Response) (models.DeviceService, error) {
dec := json.NewDecoder(resp.Body)
ds := models.DeviceService{}
err := dec.Decode(&ds)
if err != nil {
fmt.Println(err)
}
return ds, err
}
// Update the last connected time for the device service
func (s *ServiceClient) UpdateLastConnected(id string, time int64) error {
req, err := http.NewRequest("PUT", s.url+"/"+id+"/lastconnected/"+strconv.FormatInt(time, 10), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Update the last reported time for the device service
func (s *ServiceClient) UpdateLastReported(id string, time int64) error {
req, err := http.NewRequest("PUT", s.url+"/"+id+"/lastreported/"+strconv.FormatInt(time, 10), nil)
if err != nil {
fmt.Println(err)
return err
}
resp, err := makeRequest(req)
if err != nil {
fmt.Println(err)
return err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return ErrResponseNil
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return err
}
bodyString := string(bodyBytes)
return errors.New(bodyString)
}
return nil
}
// Add a new deviceservice
func (s *ServiceClient) Add(ds *models.DeviceService) (string, error) {
jsonStr, err := json.Marshal(ds)
if err != nil {
fmt.Println(err)
return "", err
}
client := &http.Client{}
resp, err := client.Post(s.url, "application/json", bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return "", err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return "", ErrResponseNil
}
defer resp.Body.Close()
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return "", err
}
bodyString := string(bodyBytes)
if resp.StatusCode != 200 {
fmt.Println(bodyString)
return "", errors.New(bodyString)
}
return bodyString, nil
}
// Request deviceservice for specified name
func (s *ServiceClient) DeviceServiceForName(name string) (models.DeviceService, error) {
req, err := http.NewRequest("GET", s.url+"/name/"+name, nil)
if err != nil {
fmt.Printf("DeviceServiceForName NewRequest failed: %v\n", err)
return models.DeviceService{}, err
}
resp, err := makeRequest(req)
if resp == nil {
fmt.Println(ErrResponseNil)
return models.DeviceService{}, ErrResponseNil
}
defer resp.Body.Close()
if err != nil {
fmt.Printf("DeviceServiceForName makeRequest failed: %v\n", err)
return models.DeviceService{}, err
}
if resp.StatusCode != 200 {
// Get the response body
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return models.DeviceService{}, err
}
bodyString := string(bodyBytes)
return models.DeviceService{}, errors.New(bodyString)
}
return s.decodeDeviceService(resp)
}
// ***************** DEVICE PROFILE METHODS *************************
// Add a new device profile to metadata
func (dpc *DeviceProfileClient) Add(dp *models.DeviceProfile) (string, error) {
jsonStr, err := json.Marshal(dp)
if err != nil {
fmt.Println(err)
return "", err
}
client := &http.Client{}
resp, err := client.Post(dpc.url, "application/json", bytes.NewReader(jsonStr))
if err != nil {
fmt.Println(err)
return "", err
}
if resp == nil {
fmt.Println(ErrResponseNil)
return "", ErrResponseNil
}
defer resp.Body.Close()
// Get the response
bodyBytes, err := getBody(resp)
if err != nil {
fmt.Println(err.Error())
return "", err
}
bodyString := string(bodyBytes)
// Check the response code
if resp.StatusCode != 200 {
fmt.Println(bodyString)
return "", errors.New(bodyString)
}
return bodyString, nil
}
|
#!/bin/bash
#SBATCH -J Act_sigmoid_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py sigmoid 174 Nadam 3 0.5245884720146486 0.0020337057375237233 orth 0.3
|
def replace():
steps = {1: 'Extracting content', 2: 'Starting edition', 3: 'Getting quantity'}
actual = 1
try:
file = get_path()
print(steps[1])
content = open(file).read()
line = [x for x in content.split('\n') if ' ' in x[:1]][0]
actual = 2
# Additional steps
# Step 3: Getting quantity
print(steps[3])
# Perform the replacement or editing of the specific line here
# Step 4: Saving changes
print("Saving changes")
except FileNotFoundError:
print("File not found")
actual = 1
except IndexError:
print("Error extracting line")
actual = 2
except Exception as e:
print(f"An error occurred: {e}")
actual = 3
finally:
print(f"Actual step: {actual}") |
<filename>Android/app/src/main/java/eu/rasus/fer/rasus/chatsPreview/AllChatsPreviewFragment.java
package eu.rasus.fer.rasus.chatsPreview;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.content.Intent;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.TextView;
import com.github.nkzawa.emitter.Emitter;
import com.github.nkzawa.socketio.client.Socket;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnItemClick;
import eu.rasus.fer.rasus.Application;
import eu.rasus.fer.rasus.HttpsConstants;
import eu.rasus.fer.rasus.R;
import eu.rasus.fer.rasus.RestApi;
import eu.rasus.fer.rasus.chat.ChatActivity;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
public class AllChatsPreviewFragment extends Fragment {
@BindView(R.id.chat_item_container)
ListView itemContainer;
@BindView(R.id.no_chats_text)
TextView noChats;
private ChatPreviewAdapter chatPreviewAdapter;
private Gson gson;
private Retrofit retrofit;
private RestApi api;
private Call<List<ChatPreview>> call;
private Emitter.Listener handleIncomingMessage = new Emitter.Listener() {
@Override
public void call(final Object... args) {
call.clone().enqueue(new Callback<List<ChatPreview>>() {
@Override
public void onResponse(final Call<List<ChatPreview>> call, final Response<List<ChatPreview>> response) {
List<ChatPreview> chats = new ArrayList<ChatPreview>();
for (ChatPreview chat : response.body()) {
if (chat.lastMessageText != null) {
chats.add(chat);
}
}
chatPreviewAdapter = new ChatPreviewAdapter(getActivity(), chats);
itemContainer.setTranscriptMode(ListView.TRANSCRIPT_MODE_ALWAYS_SCROLL);
if (response.body().size() == 0) {
noChats.setVisibility(View.VISIBLE);
} else {
noChats.setVisibility(View.INVISIBLE);
}
itemContainer.setAdapter(chatPreviewAdapter);
}
@Override
public void onFailure(final Call<List<ChatPreview>> call, final Throwable t) {
}
});
}
};
public Socket socket;
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_chats_preview, container, false);
ButterKnife.bind(this, view);
gson =
new GsonBuilder()
.registerTypeAdapter(ChatPreview.class, new ChatPreviewDeserializer())
.create();
retrofit = new Retrofit.Builder().baseUrl(HttpsConstants.ADDRES).client(HttpsConstants.getUnsafeOkHttpClient()).addConverterFactory(GsonConverterFactory.create(gson))
.build();
api = retrofit.create(RestApi.class);
call = api.getAllMessages(Application.TOKEN);
call.enqueue(new Callback<List<ChatPreview>>() {
@Override
public void onResponse(final Call<List<ChatPreview>> call, final Response<List<ChatPreview>> response) {
List<ChatPreview> chats = new ArrayList<ChatPreview>();
for (ChatPreview chat : response.body()) {
if (chat.lastMessageText != null) {
chats.add(chat);
}
}
chatPreviewAdapter = new ChatPreviewAdapter(getActivity(), chats);
itemContainer.setTranscriptMode(ListView.TRANSCRIPT_MODE_ALWAYS_SCROLL);
if (response.body().size() == 0) {
noChats.setVisibility(View.VISIBLE);
} else {
noChats.setVisibility(View.INVISIBLE);
}
itemContainer.setAdapter(chatPreviewAdapter);
}
@Override
public void onFailure(final Call<List<ChatPreview>> call, final Throwable t) {
}
});
socket = Application.SOCEKT;
socket.on("message", handleIncomingMessage);
return view;
}
@OnItemClick(R.id.chat_item_container)
public void openChat(final AdapterView<?> adapter, final View view, final int position, final long id) {
Intent intent = new Intent(getActivity(), ChatActivity.class);
intent.putExtra("CHAT_PREVIEW_ITEM", ((ChatPreview) adapter.getItemAtPosition(position)));
startActivity(intent);
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.remove(this).commit();
}
}
|
import os
from datetime import timedelta
from celery import Celery, platforms
from kombu import Exchange, Queue
from config import (
get_broker_and_backend,
get_redis_master
)
platforms.C_FORCE_ROOT = True
# 日志
worker_log_path = os.path.join(os.path.dirname(os.path.dirname(__file__)) + '/logs', 'celery.log')
beat_log_path = os.path.join(os.path.dirname(os.path.dirname(__file__)) + '/logs', 'beat.log')
# 获取broker 和 backend
# broker:就是消息队列
# backend:接受结果的消息队列
broker_and_backend = get_broker_and_backend()
tasks = [
'tasks.login', 'tasks.user', 'tasks.search', 'tasks.home', 'tasks.comment',
'tasks.repost', 'tasks.downloader', 'tasks.praise'
]
#
if isinstance(broker_and_backend, list):
broker, backend = broker_and_backend
# include:任务的模块
app = Celery('weibo_task', include=tasks, broker=broker, backend=backend)
else:
master = get_redis_master()
app = Celery('weibo_task', include=tasks, broker=broker_and_backend)
app.conf.update(
BROKER_TRANSPORT_OPTIONS={'master_name': master},
)
# 配置
app.conf.update(
CELERY_TIMEZONE='Asia/Shanghai',
CELERY_ENABLE_UTC=True,
CELERYD_LOG_FILE=worker_log_path,
CELERYBEAT_LOG_FILE=beat_log_path,
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
# 定义celery的beat 需要的任务
CELERYBEAT_SCHEDULE={
'login_task': {
# 任务名称
'task': 'tasks.login.execute_login_task',
# 调度时间
'schedule': timedelta(hours=20),
# 指定参数:只要是Task.apply_async接受的参数都行
'options': {'queue': 'login_queue', 'routing_key': 'for_login'}
},
'user_task': {
'task': 'tasks.user.execute_user_task',
'schedule': timedelta(minutes=3),
'options': {'queue': 'user_crawler', 'routing_key': 'for_user_info'}
},
'search_task': {
'task': 'tasks.search.execute_search_task',
'schedule': timedelta(hours=2),
'options': {'queue': 'search_crawler', 'routing_key': 'for_search_info'}
},
'home_task': {
'task': 'tasks.home.execute_home_task',
'schedule': timedelta(hours=10),
'options': {'queue': 'home_crawler', 'routing_key': 'home_info'}
},
'comment_task': {
'task': 'tasks.comment.execute_comment_task',
'schedule': timedelta(hours=10),
'options': {'queue': 'comment_crawler', 'routing_key': 'comment_info'}
},
'repost_task': {
'task': 'tasks.repost.execute_repost_task',
'schedule': timedelta(hours=10),
'options': {'queue': 'repost_crawler', 'routing_key': 'repost_info'}
},
'dialogue_task': {
'task': 'tasks.dialogue.execute_dialogue_task',
'schedule': timedelta(hours=10),
'options': {'queue': 'dialogue_crawler', 'routing_key': 'dialogue_info'}
},
},
CELERY_QUEUES=(
Queue('login_queue', exchange=Exchange('login_queue', type='direct'), routing_key='for_login'),
Queue('user_crawler', exchange=Exchange('user_crawler', type='direct'), routing_key='for_user_info'),
Queue('search_crawler', exchange=Exchange('search_crawler', type='direct'), routing_key='for_search_info'),
Queue('fans_followers', exchange=Exchange('fans_followers', type='direct'), routing_key='for_fans_followers'),
Queue('home_crawler', exchange=Exchange('home_crawler', type='direct'), routing_key='home_info'),
Queue('ajax_home_crawler', exchange=Exchange('ajax_home_crawler', type='direct'), routing_key='ajax_home_info'),
Queue('comment_crawler', exchange=Exchange('comment_crawler', type='direct'), routing_key='comment_info'),
Queue('comment_page_crawler', exchange=Exchange('comment_page_crawler', type='direct'),
routing_key='comment_page_info'),
Queue('praise_crawler', exchange=Exchange('praise_crawler', type='direct'), routing_key='praise_info'),
Queue('praise_page_crawler', exchange=Exchange('praise_page_crawler', type='direct'),
routing_key='praise_page_info'),
Queue('repost_crawler', exchange=Exchange('repost_crawler', type='direct'), routing_key='repost_info'),
Queue('repost_page_crawler', exchange=Exchange('repost_page_crawler', type='direct'),
routing_key='repost_page_info'),
Queue('dialogue_crawler', exchange=Exchange('dialogue_crawler', type='direct'), routing_key='dialogue_info'),
Queue('dialogue_page_crawler', exchange=Exchange('dialogue_page_crawler', type='direct'),
routing_key='dialogue_page_info'),
Queue('download_queue', exchange=Exchange('download_queue', type='direct'), routing_key='for_download'),
),
)
|
vlib work
|
import tensorflow as tf
from common.ops import shape_ops
def mean_stddev_op(input_tensor, axis):
# Calculate the mean and standard deviation of the input tensor along the specified axis
mean = tf.reduce_mean(input_tensor, axis=axis)
variance = tf.reduce_mean(tf.square(input_tensor - mean), axis=axis)
stddev = tf.sqrt(variance)
return mean, stddev
# Register the custom op with TensorFlow
@tf.RegisterGradient("MeanStddevOp")
def _mean_stddev_op_grad(op, grad_mean, grad_stddev):
input_tensor = op.inputs[0]
axis = op.get_attr("axis")
grad_input = tf.gradients(tf.concat([grad_mean, grad_stddev], axis=axis), input_tensor)
return grad_input, None
# Create a TensorFlow operation from the custom op function
def mean_stddev(input_tensor, axis, name=None):
with tf.name_scope(name, "MeanStddevOp", [input_tensor]) as name:
return tf.py_func(mean_stddev_op, [input_tensor, axis], [tf.float32, tf.float32], name=name, grad=_mean_stddev_op_grad)
# Test the custom op
input_data = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
mean, stddev = mean_stddev(input_data, axis=1)
with tf.Session() as sess:
mean_val, stddev_val = sess.run([mean, stddev])
print("Mean:", mean_val)
print("Standard Deviation:", stddev_val) |
def removeSmallestNums(arr):
# Sort the array in ascending order
arr.sort()
# Remove first 4 elements
del arr[:4]
return arr
# Test array
arr = [5, 3, 9, 15, 1, 20, 7]
# Call the function
removed_arr = removeSmallestNums(arr)
# Print the modified array
print(removed_arr) # [15, 20, 7] |
// Event loop for a multithreaded programming language
while (!isDone()) {
if (!isQueueEmpty()) {
// Get the next event in the queue
Event event = getNextEvent();
// Execute the event
executeEvent(event);
}
// Wait for the next event
waitForNextEvent();
} |
#!/usr/bin/env bash
#
# (C) Copyright IBM Corp. 2020 All Rights Reserved.
#
# Script install Redis Operator through the Operator Lifecycle Manager (OLM) or via command line (CLI)
# application of kubernetes manifests in both an online and offline airgap environment. This script can be invoked using
# `cloudctl`, a command line tool to manage Container Application Software for Enterprises (CASEs), or directly on an
# uncompressed CASE archive. Running the script through `cloudctl case launch` has added benefit of pre-requisite validation
# and verification of integrity of the CASE. Cloudctl download and usage istructions are available at [github.com/IBM/cloud-pak-cli](https://github.com/IBM/cloud-pak-cli).
#
# Pre-requisites:
# oc or kubectl installed
# sed installed
# CASE tgz downloaded & uncompressed
# authenticated to cluster
#
# Parameters are documented within print_usage function.
# ***** GLOBALS *****
# ----- DEFAULTS -----
# Command line tooling & path
kubernetesCLI="oc"
scriptName=$(basename "$0")
scriptDir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Script invocation defaults for parms populated via cloudctl
action="install"
caseJsonFile=""
casePath="${scriptDir}/../../.."
caseName="ibm-cloud-databases-redis"
inventory="redisOperator"
instance=""
# - optional parameter / argument defaults
dryRun=""
deleteCRDs=0
namespace=""
registry=""
pass=""
secret=""
user=""
inputcasedir=""
cr_system_status="betterThanYesterday"
recursive_catalog_install=0
# - variables specific to catalog/operator installation
caseCatalogName="ibm-cloud-databases-redis-operator-catalog"
catalogNamespace="openshift-marketplace"
channelName="v1.0"
catalogDigest=":latest"
# Display usage information with return code (if specified)
print_usage() {
# Determine context of call (via cloudctl or script directly) based on presence of cananical json parameter
if [ -z "$caseJsonFile" ]; then
usage="${scriptName} --casePath <CASE-PATH>"
caseParmDesc="--casePath value, -c value : root director to extracted CASE file to parse"
toleranceParm=""
toleranceParmDesc=""
else
usage="cloudctl case launch --case <CASE-PATH>"
caseParmDesc="--case value, -c value : local path or URL containing the CASE file to parse"
toleranceParm="--tolerance tolerance"
toleranceParmDesc="
--tolerance value, -t value : tolerance level for validating the CASE
0 - maximum validation (default)
1 - reduced valiation"
fi
echo "
USAGE: ${usage} --inventory inventoryItemOfLauncher --action launchAction --instance instance
--args \"args\" --namespace namespace ${toleranceParm}
OPTIONS:
--action value, -a value : the name of the action item launched
--args value, -r value : arguments specific to action (see 'Action Parameters' below).
${caseParmDesc}
--instance value, -i value : name of instance of target application (release)
--inventory value, -e value : name of the inventory item launched
--namespace value, -n value : name of the target namespace
${toleranceParmDesc}
ARGS per Action:
configure-creds-airgap
--registry : source/target container image registry (required)
--user : login user name for the container image registry (required)
--pass : login password for the container image registry (required)
configure-cluster-airgap
--dryRun : simulate configuration of custer for airgap
--inputDir : path to saved CASE directory
--registry : target container image registry (required)
mirror-images
--dryRun : simulate configuration of custer for airgap
--inputDir : path to saved CASE directory
--registry : target container image registry (required)
install-catalog:
--registry : target container image registry (required)
--recursive : recursively install dependent catalogs
--inputDir : path to saved CASE directory ( required if --recurse is set)
install-operator:
--channelName : name of channel for subscription (packagemanifest default used if not specified)
--secret : name of existing image pull secret for the container image registry
--registry : container image registry (required if pass|user specified)
--user : login user name for the container image registry (required if registry|pass specified)
--pass : login password for the container image registry (required if registry|user specified)
install-operator-native:
--secret : name of existing image pull secret for the container image registry
--registry : container image registry (required if pass|user specified)
--user : login user name for the container image registry (required if registry|pass specified)
--pass : login password for the container image registry (required if registry|user specified)
uninstall-catalog : uninstalls the catalog source and operator group
--recursive : recursively install dependent catalogs
--inputDir : path to saved CASE directory ( required if --recurse is set)
uninstall-operator : delete the operator deployment via OLM
uninstall-operator-native : deletes the operator deployment via native way
--deleteCRDs : deletes CRD's associated with this operator (if not set, crds won't get deleted)
apply-custom-resources : creates the sample custom resource
--systemStatus : status to display
delete-custom-resources : deletes the same custom resource
"
if [ -z "$1" ]; then
exit 1
else
exit "$1"
fi
}
# ***** ARGUMENT CHECKS *****
# Validates that the required parameters were specified for script invocation
check_cli_args() {
# Verify required parameters were specifed and are valid (including environment setup)
# - case path
[[ -z "${casePath}" ]] && { err_exit "The case path parameter was not specified."; }
[[ ! -f "${casePath}/case.yaml" ]] && { err_exit "No case.yaml in the root of the specified case path parameter."; }
# Verify kubernetes connection and namespace
check_kube_connection
[[ -z "${namespace}" ]] && { err_exit "The namespace parameter was not specified."; }
if ! $kubernetesCLI get namespace "${namespace}" >/dev/null; then
err_exit "Unable to retrieve namespace specified ${namespace}"
fi
# Verify dynamic args are valid (show as any issues on invocation as possible)
parse_dynamic_args
}
# Parses the args (--args) parameter if any are specified
parse_dynamic_args() {
_IFS=$IFS
IFS=" "
read -ra arr <<<"${1}"
IFS="$_IFS"
arr+=("")
idx=0
v="${arr[${idx}]}"
while [ "$v" != "" ]; do
case $v in
# Enable debug from cloudctl invocation
--debug)
idx=$((idx + 1))
set -x
;;
--dryRun)
dryRun="--dry-run"
;;
--deleteCRDs)
deleteCRDs=1
;;
--channelName)
idx=$((idx + 1))
v="${arr[${idx}]}"
channelName="${v}"
;;
--catalogDigest)
idx=$((idx + 1))
v="${arr[${idx}]}"
catalogDigest="@${v}"
;;
--registry)
idx=$((idx + 1))
v="${arr[${idx}]}"
registry="${v}"
;;
--inputDir)
idx=$((idx + 1))
v="${arr[${idx}]}"
inputcasedir="${v}"
;;
--user)
idx=$((idx + 1))
v="${arr[${idx}]}"
user="${v}"
;;
--pass)
idx=$((idx + 1))
v="${arr[${idx}]}"
pass="${v}"
;;
--secret)
idx=$((idx + 1))
v="${arr[${idx}]}"
secret="${v}"
;;
--systemStatus)
idx=$((idx + 1))
v="${arr[${idx}]}"
cr_system_status="${v}"
;;
--recursive)
recursive_catalog_install=1
;;
--help)
print_usage 0
;;
*)
err_exit "Invalid Option ${v}" >&2
;;
esac
idx=$((idx + 1))
v="${arr[${idx}]}"
done
}
# Validates that the required args were specified for install action
validate_install_args() {
# Verify arguments required per install method were provided
echo "Checking install arguments"
# Validate secret arguments provided are valid combination and
# either create or check for existence of secret in cluster.
if [[ -n "${registry}" || -n "${user}" || -n "${pass}" ]]; then
check_secret_params
set -e
$kubernetesCLI create secret docker-registry "${secret}" \
--docker-server="${registry}" \
--docker-username="${user}" \
--docker-password="${pass}" \
--docker-email="${user}" \
--namespace "${namespace}"
set +e
elif [[ -n ${secret} ]]; then
if ! $kubernetesCLI get secrets "${secret}" -n "${namespace}" >/dev/null 2>&1; then
err "Secret $secret does not exist, either create one or supply additional registry parameters to create one"
print_usage 1
fi
fi
}
validate_install_catalog() {
# using a mode flag to share the validation code between install and uninstall
local mode="$1"
echo "Checking arguments for install-catalog action"
if [[ ${mode} != "uninstall" && -z "${registry}" ]]; then
err "'--registry' must be specified with the '--args' parameter"
print_usage 1
fi
if [[ ${recursive_catalog_install} -eq 1 && -z "${inputcasedir}" ]]; then
err "'--inputDir' must be specified with the '--args' parameter when '--recursive' is set"
print_usage 1
fi
}
# Validates that the required args were specified for secret creation
validate_configure_creds_airgap_args() {
# Verify arguments required to create secret were provided
local foundError=0
[[ -z "${registry}" ]] && {
foundError=1
err "'--registry' must be specified with the '--args' parameter"
}
[[ -z "${user}" ]] && {
foundError=1
err "'--user' must be specified with the '--args' parameter"
}
[[ -z "${pass}" ]] && {
foundError=1
err "'--pass' must be specified with the '--args' parameter"
}
# Print usgae if missing parameter
[[ $foundError -eq 1 ]] && { print_usage 1; }
}
validate_configure_cluster_airgap_args() {
# Verify arguments required to create secret were provided
local foundError=0
[[ -z "${registry}" ]] && {
foundError=1
err "'--registry' must be specified with the '--args' parameter"
}
[[ -z "${inputcasedir}" ]] && {
foundError=1
err "'--inputDir' must be specified with the '--args' parameter"
}
# Print usgae if missing parameter
[[ $foundError -eq 1 ]] && { print_usage 1; }
}
validate_file_exists() {
local file=$1
[[ ! -f ${file} ]] && { err_exit "${file} is missing, exiting deployment."; }
}
# ***** END ARGUMENT CHECKS *****
# ***** UTILS *****
assert() {
testname="$1"
got=$2
want=$3
if [ "$got" != "$want" ]; then
err_exit "got $got, but want $want : ${testname} failed"
fi
}
# ***** END UTILS *****
# ***** ACTIONS *****
# ----- CONFIGURE ACTIONS -----
# Add / update local authentication store with user/password specified (~/.airgap/secrets/<registy>.json)
configure_creds_airgap() {
echo "-------------Configuring authentication secret-------------"
validate_configure_creds_airgap_args
# Create registry secret for user information provided
"${scriptDir}"/airgap.sh registry secret -c -u "${user}" -p "${pass}" "${registry}"
}
# Append secret to Global Cluster Pull Secret (pull-secret in openshif-config)
configure_cluster_pull_secret() {
echo "-------------Configuring cluster pullsecret-------------"
# configure global pull secret if an authentication secret exists on disk
if "${scriptDir}"/airgap.sh registry secret -l | grep "${registry}"; then
"${scriptDir}"/airgap.sh cluster update-pull-secret --registry "${registry}" "${dryRun}"
else
echo "Skipping configuring cluster pullsecret: No authentication exists for ${registry}"
fi
}
configure_content_image_source_policy() {
echo "-------------Configuring imagecontentsourcepolicy-------------"
"${scriptDir}"/airgap.sh cluster apply-image-policy \
--name "${caseName}" \
--dir "${inputcasedir}" \
--registry "${registry}" "${dryRun}"
}
# Apply ImageContentSourcePolicy required for airgap
configure_cluster_airgap() {
echo "-------------Configuring cluster for airgap-------------"
validate_configure_cluster_airgap_args
configure_cluster_pull_secret
configure_content_image_source_policy
}
# ----- MIRROR ACTIONS -----
# Mirror required images
mirror_images() {
echo "-------------Mirroring images-------------"
validate_configure_cluster_airgap_args
"${scriptDir}"/airgap.sh image mirror \
--dir "${inputcasedir}" \
--to-registry "${registry}" "${dryRun}"
}
# ----- INSTALL ACTIONS -----
# Installs the catalog source and operator group of any dependencies
install_dependent_catalogs() {
echo "No dependency"
}
# Installs the catalog source and operator group
install_catalog() {
validate_install_catalog
# install all catalogs of subcases first
if [[ ${recursive_catalog_install} -eq 1 ]]; then
install_dependent_catalogs
fi
echo "-------------Installing catalog source-------------"
local catsrc_file="${casePath}"/inventory/"${inventory}"/files/op-olm/catalog_source.yaml
local opgrp_file="${casePath}"/inventory/"${inventory}"/files/op-olm/operator_group.yaml
# Verfy expected yaml files for install exit
validate_file_exists "${catsrc_file}"
validate_file_exists "${opgrp_file}"
# Apply yaml files manipulate variable input as required
local catsrc_image_orig=$(grep "image:" "${catsrc_file}" | awk '{print$2}')
# replace original registry with local registry
local catsrc_image_mod="${registry}/$(echo "${catsrc_image_orig}" | sed -e "s/[^/]*\///")"
# apply catalog source
sed <"${catsrc_file}" "s|${catsrc_image_orig}|${catsrc_image_mod}|g" | $kubernetesCLI apply -f -
echo "-------------Installing operator group-------------"
# apply operator group
sed <"${opgrp_file}" "s|REPLACE_NAMESPACE|${namespace}|g" | $kubernetesCLI apply -n "${namespace}" -f -
}
# Install utilizing default OLM method
install_operator() {
# Verfiy arguments are valid
validate_install_args
# Proceed with install
echo "-------------Installing via OLM-------------"
[[ ! -f "${casePath}"/inventory/"${inventory}"/files/op-olm/subscription.yaml ]] && { err_exit "Missing required subscription yaml, exiting deployment."; }
# check if catalog source is installed ?
if ! $kubernetesCLI get catsrc "${caseCatalogName}" -n "${catalogNamespace}"; then
err_exit "expected catalog source '${caseCatalogName}' expected to be installed namespace '${catalogNamespace}'"
fi
# - subscription
sed <"${casePath}"/inventory/"${inventory}"/files/op-olm/subscription.yaml "s|REPLACE_NAMESPACE|${namespace}|g" | sed "s|REPLACE_CHANNEL_NAME|$channelName|g" | $kubernetesCLI apply -n "${namespace}" -f -
}
# Install utilizing default CLI method
install_operator_native() {
# Verfiy arguments are valid
validate_install_args
# Proceed with install
echo "-------------Installing native-------------"
# Verify expected yaml files for install exist
[[ ! -f "${casePath}"/inventory/"${inventory}"/files/op-cli/service_account.yaml ]] && { err_exit "Missing required service accout yaml, exiting deployment."; }
[[ ! -f "${casePath}"/inventory/"${inventory}"/files/op-cli/role.yaml ]] && { err_exit "Missing required role yaml, exiting deployment."; }
[[ ! -f "${casePath}"/inventory/"${inventory}"/files/op-cli/role_binding.yaml ]] && { err_exit "Missing required rol binding yaml, exiting deployment."; }
[[ ! -f "${casePath}"/inventory/"${inventory}"/files/op-cli/operator.yaml ]] && { err_exit "Missing required operator yaml, exiting deployment."; }
# Apply yaml files manipulate variable input as required
# - service account
sed <"${casePath}"/inventory/"${inventory}"/files/op-cli/service_account.yaml "s|REPLACE_SECRET|$secret|g" | $kubernetesCLI apply -n "${namespace}" -f -
# - crds
for crdYaml in "${casePath}"/inventory/"${inventory}"/files/op-cli/*_crd.yaml; do
$kubernetesCLI apply -n "${namespace}" -f "${crdYaml}"
done
# - role
$kubernetesCLI apply -n "${namespace}" -f "${casePath}"/inventory/"${inventory}"/files/op-cli/role.yaml
# - role binding
sed <"${casePath}"/inventory/"${inventory}"/files/op-cli/role_binding.yaml "s|REPLACE_NAMESPACE|${namespace}|g" | $kubernetesCLI apply -n "${namespace}" -f -
# - operator
$kubernetesCLI apply -n "${namespace}" -f "${casePath}"/inventory/"${inventory}"/files/op-cli/operator.yaml
}
# install operand custom resources
apply_custom_resources() {
echo "-------------Applying custom resources-------------"
local cr="${casePath}"/inventory/"${inventory}"/files/redis.databases.cloud.ibm.com_v1_redissentinel_cr.yaml
[[ ! -f ${cr} ]] && { err_exit "Missing required ${cr}, exiting deployment."; }
set -e
sed <"${cr}" "s|systemStatus.*|systemStatus: ${cr_system_status}|g" | $kubernetesCLI apply -n "$namespace" -f -
set +e
}
# ----- UNINSTALL ACTIONS -----
uninstall_dependent_catalogs() {
echo "No dependencies"
}
# deletes the catalog source and operator group
uninstall_catalog() {
validate_install_catalog "uninstall"
# uninstall all catalogs of subcases first
if [[ ${recursive_catalog_install} -eq 1 ]]; then
uninstall_dependent_catalogs
fi
local catsrc_file="${casePath}"/inventory/"${inventory}"/files/op-olm/catalog_source.yaml
local opgrp_file="${casePath}"/inventory/"${inventory}"/files/op-olm/operator_group.yaml
echo "-------------Uninstalling catalog source-------------"
$kubernetesCLI delete -f "${catsrc_file}" --ignore-not-found=true
echo "-------------Uninstalling operator group-------------"
$kubernetesCLI delete -f "${opgrp_file}" --ignore-not-found=true
}
# Uninstall operator installed via OLM
uninstall_operator() {
echo "-------------Uninstalling operator-------------"
# Find installed CSV
csvName=$($kubernetesCLI get subscription "${caseCatalogName}"-subscription -o go-template --template '{{.status.installedCSV}}' -n "${namespace}" --ignore-not-found=true)
# Remove the subscription
$kubernetesCLI delete subscription "${caseCatalogName}-subscription" -n "${namespace}" --ignore-not-found=true
# Remove the CSV which was generated by the subscription but does not get garbage collected
[[ -n "${csvName}" ]] && { $kubernetesCLI delete clusterserviceversion "${csvName}" -n "${namespace}" --ignore-not-found=true; }
# Remove the operatorGroup
$kubernetesCLI delete OperatorGroup "${caseCatalogName}-group" -n "${namespace}" --ignore-not-found=true
# delete crds
if [[ $deleteCRDs -eq 1 ]]; then
for crdYaml in "${casePath}"/inventory/"${inventory}"/files/op-cli/*_crd.yaml; do
$kubernetesCLI delete -f "${crdYaml}" --ignore-not-found=true
done
fi
}
# Uninstall operator installed via CLI
uninstall_operator_native() {
echo "-------------Uninstalling operator-------------"
# Verify expected yaml files for uninstall and delete resources for each
[[ -f "${casePath}/inventory/${inventory}/files/op-cli/service_account.yaml" ]] && { $kubernetesCLI delete -n "${namespace}" -f "${casePath}/inventory/${inventory}/files/op-cli/service_account.yaml" --ignore-not-found=true; }
[[ -f "${casePath}/inventory/${inventory}/files/op-cli/role.yaml" ]] && { $kubernetesCLI delete -n "${namespace}" -f "${casePath}/inventory/${inventory}/files/op-cli/role.yaml" --ignore-not-found=true; }
[[ -f "${casePath}/inventory/${inventory}/files/op-cli/role_binding.yaml" ]] && { $kubernetesCLI delete -n "${namespace}" -f "${casePath}/inventory/${inventory}/files/op-cli/role_binding.yaml" --ignore-not-found=true; }
[[ -f "${casePath}/inventory/${inventory}/files/op-cli/operator.yaml" ]] && { $kubernetesCLI delete -n "${namespace}" -f "${casePath}/inventory/${inventory}/files/op-cli/operator.yaml" --ignore-not-found=true; }
# - crds
if [[ $deleteCRDs -eq 1 ]]; then
echo "deleting crds"
for crdYaml in "${casePath}"/inventory/"${inventory}"/files/op-cli/*_crd.yaml; do
$kubernetesCLI delete -n "${namespace}" -f "${crdYaml}" --ignore-not-found=true
done
fi
}
delete_custom_resources() {
echo "-------------Deleting custom resources-------------"
local cr="${casePath}"/inventory/"${inventory}"/files/redis.databases.cloud.ibm.com_v1_redissentinel_cr.yaml
[[ ! -f ${cr} ]] && { err_exit "Missing required ${cr}, exiting deployment."; }
$kubernetesCLI delete -n "${namespace}" -f "${cr}"
}
# ***** END ACTIONS *****
# Verifies that we have a connection to the Kubernetes cluster
check_kube_connection() {
# Check if default oc CLI is available and if not fall back to kubectl
command -v $kubernetesCLI >/dev/null 2>&1 || { kubernetesCLI="kubectl"; }
command -v $kubernetesCLI >/dev/null 2>&1 || { err_exut "No kubernetes cli found - tried oc and kubectl"; }
# Query apiservices to verify connectivity
if ! $kubernetesCLI get apiservices >/dev/null 2>&1; then
# Developer note: A kubernetes CLI should be included in your prereqs.yaml as a client prereq if it is required for your script.
err_exit "Verify that $kubernetesCLI is installed and you are connected to a Kubernetes cluster."
fi
}
# Run the action specified
run_action() {
echo "Executing inventory item ${inventory}, action ${action} : ${scriptName}"
case $action in
configureCredsAirgap)
configure_creds_airgap
;;
configureClusterAirgap)
configure_cluster_airgap
;;
installCatalog)
install_catalog
;;
installOperator)
install_operator
;;
installOperatorNative)
install_operator_native
;;
mirrorImages)
mirror_images
;;
uninstallCatalog)
uninstall_catalog
;;
uninstallOperator)
uninstall_operator
;;
uninstallOperatorNative)
uninstall_operator_native
;;
applyCustomResources)
apply_custom_resources
;;
deleteCustomResources)
delete_custom_resources
;;
*)
err "Invalid Action ${action}" >&2
print_usage 1
;;
esac
}
# Error reporting functions
err() {
echo >&2 "[ERROR] $1"
}
err_exit() {
echo >&2 "[ERROR] $1"
exit 1
}
# Parse CLI parameters
while [ "${1-}" != "" ]; do
case $1 in
# Supported parameters for cloudctl & direct script invocation
--casePath | -c)
shift
casePath="${1}"
;;
--caseJsonFile)
shift
caseJsonFile="${1}"
;;
--inventory | -e)
shift
inventory="${1}"
;;
--action | -a)
shift
action="${1}"
;;
--namespace | -n)
shift
namespace="${1}"
;;
--instance | -i)
shift
instance="${1}"
;;
--args | -r)
shift
parse_dynamic_args "${1}"
;;
# Additional supported parameters for direct script invocation ONLY
--help)
print_usage 0
;;
--debug)
set -x
;;
*)
echo "Invalid Option ${1}" >&2
exit 1
;;
esac
shift
done
# Execution order
check_cli_args
run_action
|
<reponame>LiuFang07/bk-cmdb
// Copyright 2012-2018 <NAME>. All rights reserved.
// Use of this source code is governed by a MIT-license.
// See http://olivere.mit-license.org/license.txt for details.
package elastic
import (
"context"
"encoding/json"
"fmt"
"net/url"
"github.com/olivere/elastic/uritemplates"
)
// XPackWatcherDeactivateWatchService enables you to deactivate a currently active watch.
// See https://www.elastic.co/guide/en/elasticsearch/reference/6.7/watcher-api-deactivate-watch.html.
type XPackWatcherDeactivateWatchService struct {
client *Client
pretty bool
watchId string
masterTimeout string
bodyJson interface{}
bodyString string
}
// NewXPackWatcherDeactivateWatchService creates a new XPackWatcherDeactivateWatchService.
func NewXPackWatcherDeactivateWatchService(client *Client) *XPackWatcherDeactivateWatchService {
return &XPackWatcherDeactivateWatchService{
client: client,
}
}
// WatchId is the ID of the watch to deactivate.
func (s *XPackWatcherDeactivateWatchService) WatchId(watchId string) *XPackWatcherDeactivateWatchService {
s.watchId = watchId
return s
}
// MasterTimeout specifies an explicit operation timeout for connection to master node.
func (s *XPackWatcherDeactivateWatchService) MasterTimeout(masterTimeout string) *XPackWatcherDeactivateWatchService {
s.masterTimeout = masterTimeout
return s
}
// Pretty indicates that the JSON response be indented and human readable.
func (s *XPackWatcherDeactivateWatchService) Pretty(pretty bool) *XPackWatcherDeactivateWatchService {
s.pretty = pretty
return s
}
// buildURL builds the URL for the operation.
func (s *XPackWatcherDeactivateWatchService) buildURL() (string, url.Values, error) {
// Build URL
path, err := uritemplates.Expand("/_xpack/watcher/watch/{watch_id}/_deactivate", map[string]string{
"watch_id": s.watchId,
})
if err != nil {
return "", url.Values{}, err
}
// Add query string parameters
params := url.Values{}
if s.pretty {
params.Set("pretty", "true")
}
if s.masterTimeout != "" {
params.Set("master_timeout", s.masterTimeout)
}
return path, params, nil
}
// Validate checks if the operation is valid.
func (s *XPackWatcherDeactivateWatchService) Validate() error {
var invalid []string
if s.watchId == "" {
invalid = append(invalid, "WatchId")
}
if len(invalid) > 0 {
return fmt.Errorf("missing required fields: %v", invalid)
}
return nil
}
// Do executes the operation.
func (s *XPackWatcherDeactivateWatchService) Do(ctx context.Context) (*XPackWatcherDeactivateWatchResponse, error) {
// Check pre-conditions
if err := s.Validate(); err != nil {
return nil, err
}
// Get URL for request
path, params, err := s.buildURL()
if err != nil {
return nil, err
}
// Get HTTP response
res, err := s.client.PerformRequest(ctx, PerformRequestOptions{
Method: "PUT",
Path: path,
Params: params,
})
if err != nil {
return nil, err
}
// Return operation response
ret := new(XPackWatcherDeactivateWatchResponse)
if err := json.Unmarshal(res.Body, ret); err != nil {
return nil, err
}
return ret, nil
}
// XPackWatcherDeactivateWatchResponse is the response of XPackWatcherDeactivateWatchService.Do.
type XPackWatcherDeactivateWatchResponse struct {
Status *XPackWatchStatus `json:"status"`
}
|
"use strict";
const parse5 = require('parse5');
const expect = require('chai').expect;
const GeneratorContext = require('../../lib/generator-context');
module.exports = {
element: function (fragment) {
let documentFragment = parse5.parseFragment(fragment, {treeAdapter: parse5.treeAdapters.htmlparser2});
return documentFragment.childNodes[0];
},
registersElement(processor, kind, types, actions, assertions) {
let context = new GeneratorContext(null, '');
processor.process(context);
let element = context.page.elements[0];
assertElement(element, kind, types, actions, assertions);
return context;
},
assertElement: assertElement,
};
function assertElement(element, kind, types, actions, assertions) {
expect(element).to.be.an.instanceof(kind);
expect([...element.types]).to.have.members(types);
expect([...element.actions]).to.have.members(actions);
expect([...element.assertions]).to.have.members(assertions);
} |
#!/usr/bin bash
# -*- coding:utf-8 -*-
# Author: Donny You(donnyyou@163.com)
# Generate train & val data.
export PYTHONPATH='/home/donny/Projects/PytorchCV'
INPUT_SIZE=368
COCO_DIR='/home/donny/DataSet/MSCOCO/'
COCO_TRAIN_IMG_DIR=${COCO_DIR}'train2017'
COCO_VAL_IMG_DIR=${COCO_DIR}'/val2017'
COCO_ANNO_DIR=${COCO_DIR}'annotations/'
TRAIN_ANNO_FILE=${COCO_ANNO_DIR}'person_keypoints_train2017.json'
VAL_ANNO_FILE=${COCO_ANNO_DIR}'person_keypoints_val2017.json'
SAVE_DIR='/home/donny/DataSet/COCO_KPTS/'
TRAIN_ROOT_DIR=${SAVE_DIR}'train'
VAL_ROOT_DIR=${SAVE_DIR}'val'
python coco_pose_generator.py --root_dir $TRAIN_ROOT_DIR \
--ori_anno_file $TRAIN_ANNO_FILE \
--ori_img_dir $COCO_TRAIN_IMG_DIR
python coco_pose_generator.py --root_dir $VAL_ROOT_DIR \
--ori_anno_file $VAL_ANNO_FILE \
--ori_img_dir $COCO_VAL_IMG_DIR
|
import {reactive} from "vue";
import {AddressForm} from "@/views/order/orderList/components/addAddress/interface";
export function useForm() {
const formData = reactive({
form: {} as AddressForm
})
const handleFormChange = (data: any) => {
Object.assign(formData.form, data)
}
return {
formData,
handleFormChange,
}
}
|
#pragma once
#include <bond/core/bond_version.h>
#if BOND_VERSION < 0x0800
#error This file was generated by a newer version of the Bond compiler and is incompatible with your version of the Bond library.
#endif
#if BOND_MIN_CODEGEN_VERSION > 0x0c01
#error This file was generated by an older version of the Bond compiler and is incompatible with your version of the Bond library.
#endif
#include <bond/core/config.h>
#include <bond/core/containers.h>
namespace deprecated
{
namespace bondmeta
{
struct HasMetaFields
{
std::string full_name;
std::string name;
HasMetaFields()
{
InitMetadata("HasMetaFields", "deprecated.bondmeta.HasMetaFields");
}
HasMetaFields(const HasMetaFields& other)
: full_name(other.full_name.get_allocator()),
name(other.name.get_allocator())
{
InitMetadata("HasMetaFields", "deprecated.bondmeta.HasMetaFields");
}
HasMetaFields(HasMetaFields&& other)
: full_name(std::move(other.full_name)),
name(std::move(other.name))
{
InitMetadata("HasMetaFields", "deprecated.bondmeta.HasMetaFields");
}
HasMetaFields& operator=(HasMetaFields other)
{
other.swap(*this);
return *this;
}
bool operator==(const HasMetaFields&) const
{
return true
/* skip bond_meta::full_name field 'full_name' */
/* skip bond_meta::name field 'name' */;
}
bool operator!=(const HasMetaFields& other) const
{
return !(*this == other);
}
void swap(HasMetaFields&)
{
using std::swap;
/* skip bond_meta::full_name field 'full_name' */
/* skip bond_meta::name field 'name' */
}
struct Schema;
protected:
void InitMetadata(const char*name0, const char*qual_name)
{
this->name = name0;
this->full_name = qual_name;
}
};
inline void swap(::deprecated::bondmeta::HasMetaFields& left, ::deprecated::bondmeta::HasMetaFields& right)
{
left.swap(right);
}
} // namespace bondmeta
} // namespace deprecated
|
#!/bin/bash
dieharder -d 3 -g 18 -S 3107955950
|
// there are n houses in a city connected by exactly n-1 roads there is exactly shortest path fromany house to any other house.
// the houses are numbered from one to n. since chrimas is abo o come so santa decided to hide gifts in hese houses.
// santa will come o the ciy for M consecutive days. Each day he wil come to a house a first and will go till house b hiding a gift
// in each house thet comes in the path
// Input format :
// first line N= no. of houses M = numberof days santa will visit city
// next N-1 lines contains two integer u and v, there is a road between u and v
// next M lines contain two integer a and b representing the starting and ending house on 1st visit
// Example
// input :
// 4 2
// 1 2
// 2 3
// 2 4
// 1 4
// 3 4
// output :
// 2
// Explaination :
// 1 !
// |
// |
// 2 . !
// / \
// / \
// 3. 4. !
// this is a graph or can be considered as nary tree as per line 16 there is a path between 1 and 4 as 1,2,4 hence on first day santa
// visits this path and as per on line 17 there is a path between 3 and 4 as 3 2 4 hence on both day santa visits house 2 and 4 and
// hence maximum visit is of house 2
import java.util.ArrayList;
import java.util.*;
class Node {
public Node parent;//the parent of the current node
public List<Node> children = new ArrayList<Node>();//the children of the current node
public int data;//or any other property that the node should contain, like 'info'
boolean visited =false;
//public static int maxNrOfChildren;//equal to the k-arity;
public Node (int nodeName)
{
data=nodeName;
}
public void addChild(Node childNode)
{
childNode.parent=this;
this.children.add(childNode);
}
public List<Node> getchilds(){
return children;
}
public Node getParent() {
return parent;
}
public void setParent(Node parent) {
this.parent = parent;
}
}
public class narytree {
static ArrayList<Node> nodeList = new ArrayList<>();
static int[] arr;
public static void pathPrint(Node node, Node Start, List<Node> track) {
if(Start.getParent()!=node){
//System.out.print("--"+Start.data);
pathPrint(node,Start.getParent(),track); //recursive call to parent
}
System.out.print(Start.data+" ");
arr[Start.data]++;
}//end of method
public static boolean path(Node node, int value, List<Node> track){
LinkedList<Node>queue = new LinkedList<>();
queue.add(node); //add source node to queue
while(!queue.isEmpty()) {
Node presentNode = queue.remove(0);
presentNode.visited=true;
for(Node neighbor: presentNode.getchilds()) { //for each neighbor of present node
if(neighbor.visited==false) {
//System.out.print("call+"+neighbor.data);
neighbor.visited = true;
queue.add(neighbor);
neighbor.setParent(presentNode);
}
}//end of for loop
}
pathPrint(node,nodeList.get(value),track);
LinkedList<Node>queue1 = new LinkedList<>();
queue1.add(node); //add source node to queue
while(!queue1.isEmpty()) {
Node presentNode = queue1.remove(0);
presentNode.visited=false;
for(Node neighbor: presentNode.getchilds()) { //for each neighbor of present node
if(neighbor.visited==true) {
//System.out.print("call+"+neighbor.data);
neighbor.visited = false;
queue1.add(neighbor);
//neighbor.setParent(presentNode);
}
}//end of for loop
}
return true;
}
public static void main(String args[])
{
Scanner sc = new Scanner(System.in);
int nodes = sc.nextInt();
int max =0;
int maxval = 0;
arr = new int[nodes+1];
int traversals = sc.nextInt();
List<Node> track = new ArrayList<>();
nodeList.add(new Node(0));
for(int i=1;i<=nodes;i++){
nodeList.add(new Node(i));
}
for(int i=0;i<nodes-1;i++){
int a = sc.nextInt();
int b= sc.nextInt();
nodeList.get(a).addChild(nodeList.get(b));
nodeList.get(b).addChild(nodeList.get(a));
}
for(int i=0;i<traversals;i++){
int src = sc.nextInt();
int dst= sc.nextInt();
path(nodeList.get(src),dst,track);
System.out.println();
arr[src]++;
}
for(int i=0;i<arr.length;i++){
if(arr[i]>max){
max=i;
maxval=arr[i];
}
}
System.out.println(max+" ");
}
}
|
def delete_odd_numbers(arr):
new_arr = []
for i in arr:
if i % 2 != 0:
continue
else:
new_arr.append(i)
return new_arr
arr = [1, 2, 3, 4, 5, 6]
print(delete_odd_numbers(arr)) |
#!/usr/bin/env bash
set -e
docker-compose up --remove-orphans --build -d
docker-compose exec --user root php rm -rf /var/www/html/var/cache/dev /var/www/html/var/cache/test
docker-compose exec --user www-data php chmod -R 777 /var/www/html/var
docker-compose exec --user www-data php bin/console doctrine:database:drop -n --force
docker-compose exec --user www-data php bin/console doctrine:database:create -n
docker-compose exec --user www-data php bin/console doctrine:migrations:migrate -n -q
docker-compose exec --user www-data php bin/console doctrine:schema:update --dump-sql |
from __future__ import absolute_import, unicode_literals
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APITestCase
from common.tests.mixins import UserMixin
from permissions.classes import Permission
from smart_settings.classes import Namespace
from user_management.tests import (
TEST_ADMIN_PASSWORD, TEST_ADMIN_USERNAME, TEST_USER_USERNAME,
TEST_USER_PASSWORD
)
class BaseAPITestCase(UserMixin, APITestCase):
"""
API test case class that invalidates permissions and smart settings
"""
def setUp(self):
super(BaseAPITestCase, self).setUp()
Namespace.invalidate_cache_all()
Permission.invalidate_cache()
def tearDown(self):
self.client.logout()
super(BaseAPITestCase, self).tearDown()
def delete(self, viewname=None, path=None, *args, **kwargs):
data = kwargs.pop('data', {})
follow = kwargs.pop('follow', False)
if viewname:
path = reverse(viewname=viewname, *args, **kwargs)
return self.client.delete(
path=path, data=data, follow=follow
)
def get(self, viewname=None, path=None, *args, **kwargs):
data = kwargs.pop('data', {})
follow = kwargs.pop('follow', False)
if viewname:
path = reverse(viewname=viewname, *args, **kwargs)
return self.client.get(
path=path, data=data, follow=follow
)
def login(self, username, password):
logged_in = self.client.login(username=username, password=password)
user = get_user_model().objects.get(username=username)
self.assertTrue(logged_in)
self.assertTrue(user.is_authenticated)
return user.is_authenticated
def login_user(self):
self.login(username=TEST_USER_USERNAME, password=<PASSWORD>)
def login_admin_user(self):
self.login(username=TEST_ADMIN_USERNAME, password=<PASSWORD>)
def logout(self):
self.client.logout()
def patch(self, viewname=None, path=None, *args, **kwargs):
data = kwargs.pop('data', {})
follow = kwargs.pop('follow', False)
if viewname:
path = reverse(viewname=viewname, *args, **kwargs)
return self.client.patch(
path=path, data=data, follow=follow
)
def post(self, viewname=None, path=None, *args, **kwargs):
data = kwargs.pop('data', {})
follow = kwargs.pop('follow', False)
if viewname:
path = reverse(viewname=viewname, *args, **kwargs)
return self.client.post(
path=path, data=data, follow=follow
)
def put(self, viewname=None, path=None, *args, **kwargs):
data = kwargs.pop('data', {})
follow = kwargs.pop('follow', False)
if viewname:
path = reverse(viewname=viewname, *args, **kwargs)
return self.client.put(
path=path, data=data, follow=follow
)
|
<gh_stars>0
package oj;
/**
* date: 2017/02/04 14:01.
* author: <NAME>
*/
/**
* Given a sorted array, remove the duplicates in place such that each element appear only once
* and return the new length.
* Do not allocate extra space for another array, you must do this in place with constant memory.
*
* For example,
* Given input array nums = [1,1,2],
* Your function should return length = 2, with the first two elements of nums being 1 and 2 respectively.
* It doesn't matter what you leave beyond the new length.
*/
public class RemoveDuplicates_26 {
public int removeDuplicates(int[] nums) {
int result = nums.length;
int index = 0;
while (index < result) {
boolean isPlus = true;
for (int i = 0; i < index; i++) {
if (nums[index] == nums[i]) {
isPlus = false;
result--;
for (int j = index; j < result; j++) {
nums[j] = nums[j+1];
}
}
}
if (isPlus) {
index++;
}
//
// System.out.println("****index:" + index + "****");
// for (int i = 0; i < nums.length; i++) {
// System.out.print(nums[i] + "->");
// }
// System.out.println();
}
return result;
}
public static void main(String[] args) {
int[] nums = {1,1,2,3,4,5,2,3,6,7,8,7};
int result = new RemoveDuplicates_26().removeDuplicates(nums);
System.out.println(result);
for (int i = 0; i < nums.length; i++) {
System.out.print(nums[i] + "->");
}
}
}
|
package com.tweetapp.app.service;
import com.tweetapp.app.dao.entity.User;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public interface UserService {
List<User> getAllUsers();
List<User> getSearchedUsers(String username);
User getUserDetails(String userId);
}
|
<reponame>alailsonko/relay-nextjs
/** @type {import('@docusaurus/types').DocusaurusConfig} */
module.exports = {
title: 'relay-nextjs',
tagline: 'Relay Hooks integration for Next.js apps',
url: 'https://reverecre.github.io/relay-nextjs',
baseUrl: '/relay-nextjs/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
favicon: 'img/favicon.ico',
organizationName: 'RevereCRE', // Usually your GitHub org/user name.
projectName: 'relay-nextjs', // Usually your repo name.
themeConfig: {
navbar: {
title: 'relay-nextjs',
items: [
{
to: 'docs/',
activeBasePath: 'docs',
label: 'Docs',
position: 'left',
},
{
href: 'https://github.com/RevereCRE/relay-nextjs',
label: 'GitHub',
position: 'right',
},
],
},
footer: {
copyright: `Copyright © ${new Date().getFullYear()} Revere CRE, Inc. Built with Docusaurus.`,
},
},
presets: [
[
'@docusaurus/preset-classic',
{
docs: {
sidebarPath: require.resolve('./sidebars.js'),
editUrl:
'https://github.com/RevereCRE/relay-nextjs/edit/master/website/',
},
theme: {
customCss: require.resolve('./src/css/custom.css'),
},
},
],
],
};
|
module HandlePolicyNotification
# We need to consider three factors here to arrive out our decision:
# - New Policy, or Continuation Policy?
# - If a continuation policy exists, what has changed about it?
# - What are the dispositions of the other interacting policies that would
# affect the action we should take, and what should we do to those policies?
# - Is there a potential policy that would make this behave as a
class DeterminePolicyActions
include Interactor
# Context requires:
# - continuation_policy (either a Policy or nil)
# - member_detail_collection (array of HandlePolicyNotification::MemberDetails)
# - interacting_policies (array of Policy)
# - renewal_policies (array of Policy)
# - carrier_switch_renewals (array of Policy)
# Context outputs:
# - primary_policy_action (a HandlePolicyNotification::PolicyAction)
# - other_policy_actions (array of HandlePolicyNotification::PolicyAction)
def call
other_policy_actions = []
primary_policy_action = nil
if context.interacting_policies.empty? && context.renewal_policies.any?
if !context.continuation_policy.nil?
# 'Active' renewal
# not_yet_supported("active renewal")
primary_policy_action = build_passive_renewal_on(
context.policy_details,
context.member_detail_collection,
context.plan_details,
context.broker_details,
context.employer_details)
else
# 'Passive' renewal
primary_policy_action = build_passive_renewal_on(
context.policy_details,
context.member_detail_collection,
context.plan_details,
context.broker_details,
context.employer_details)
end
elsif context.interacting_policies.empty? && context.renewal_policies.empty?
if context.carrier_switch_renewals.any?
not_yet_supported("carrier switch renewal")
else
not_yet_supported("initial enrollment")
end
elsif context.interacting_policies.any? && context.renewal_policies.empty?
# Plan change, add, or remove
not_yet_supported("change on active policy")
elsif context.interacting_policies.any? && context.renewal_policies.any?
primary_policy_action = build_passive_renewal_on(
context.policy_details,
context.member_detail_collection,
context.plan_details,
context.broker_details,
context.employer_details)
# not_yet_supported("change with possible renewal")
end
context.primary_policy_action = primary_policy_action
context.other_policy_actions = other_policy_actions
end
def not_yet_supported(kind)
context.processing_errors.errors.add(:event_kind, "we don't yet handle #{kind} events")
context.fail!
end
def build_initial_enrollment_on(policy_details, member_detail_collection, plan_details, broker_details, employer_details)
event_on_all_members(policy_details, member_detail_collection, plan_details, broker_details, employer_details, "initial")
end
def build_passive_renewal_on(policy_details, member_detail_collection, plan_details, broker_details,employer_details)
event_on_all_members(policy_details, member_detail_collection, plan_details, broker_details, employer_details, "renew")
end
def event_on_all_members(policy_details, member_detail_collection, plan_details, broker_details, employer_details, action)
member_changes = build_member_changes(member_detail_collection)
HandlePolicyNotification::PolicyAction.new({
:action => action,
:policy_details => policy_details,
:member_changes => member_changes,
:plan_details => plan_details,
:broker_details => broker_details,
:employer_details => employer_details,
:transmit => true
})
end
def build_member_changes(member_detail_collection)
member_detail_collection.map do |md|
HandlePolicyNotification::MemberChange.new({
:member_id => md.member_id
})
end
end
end
end
|
<reponame>WeebHiroyuki/disgo
package main
import (
"fmt"
"net/http"
"os"
"os/signal"
"strconv"
"syscall"
"time"
"github.com/PaesslerAG/gval"
"github.com/sirupsen/logrus"
"github.com/DisgoOrg/disgo"
"github.com/DisgoOrg/disgo/api"
"github.com/DisgoOrg/disgo/api/events"
)
const red = 16711680
const orange = 16562691
const green = 65280
var token = os.Getenv("token")
var guildID = api.Snowflake(os.Getenv("guild_id"))
var adminRoleID = api.Snowflake(os.Getenv("admin_role_id"))
var testRoleID = api.Snowflake(os.Getenv("test_role_id"))
var emoteID = api.Snowflake(os.Getenv("test_emote_id"))
var logger = logrus.New()
var client = http.DefaultClient
func main() {
logger.SetLevel(logrus.DebugLevel)
logger.Info("starting ExampleBot...")
logger.Infof("disgo %s", api.Version)
dgo, err := disgo.NewBuilder(token).
SetLogger(logger).
SetRawGatewayEventsEnabled(true).
SetHTTPClient(client).
SetGatewayIntents(api.GatewayIntentsGuilds, api.GatewayIntentsGuildMessages, api.GatewayIntentsGuildMembers, api.GatewayIntentsGuildWebhooks).
SetMemberCachePolicy(api.MemberCachePolicyAll).
AddEventListeners(&events.ListenerAdapter{
OnRawGateway: rawGatewayEventListener,
OnGuildAvailable: guildAvailListener,
OnGuildMessageCreate: messageListener,
OnCommand: commandListener,
OnButtonClick: buttonClickListener,
}).
Build()
if err != nil {
logger.Fatalf("error while building disgo instance: %s", err)
return
}
/*rawCmds := []api.CommandCreate{
{
Name: "eval",
Description: "runs some go code",
DefaultPermission: true,
Options: []api.CommandOption{
{
Type: api.CommandOptionTypeString,
Name: "code",
Description: "the code to eval",
Required: true,
},
},
},
{
Name: "test",
Description: "test test test test test test",
DefaultPermission: true,
},
{
Name: "say",
Description: "says what you say",
DefaultPermission: true,
Options: []api.CommandOption{
{
Type: api.CommandOptionTypeString,
Name: "message",
Description: "What to say",
Required: true,
},
},
},
{
Name: "addrole",
Description: "This command adds a role to a member",
DefaultPermission: true,
Options: []api.CommandOption{
{
Type: api.CommandOptionTypeUser,
Name: "member",
Description: "The member to add a role to",
Required: true,
},
{
Type: api.CommandOptionTypeRole,
Name: "role",
Description: "The role to add to a member",
Required: true,
},
},
},
{
Name: "removerole",
Description: "This command removes a role from a member",
DefaultPermission: true,
Options: []api.CommandOption{
{
Type: api.CommandOptionTypeUser,
Name: "member",
Description: "The member to removes a role from",
Required: true,
},
{
Type: api.CommandOptionTypeRole,
Name: "role",
Description: "The role to removes from a member",
Required: true,
},
},
},
}
// using the api.RestClient directly to avoid the guild needing to be cached
cmds, err := dgo.RestClient().SetGuildCommands(dgo.ApplicationID(), guildID, rawCmds...)
if err != nil {
logger.Errorf("error while registering guild commands: %s", err)
}
var cmdsPermissions []api.SetGuildCommandPermissions
for _, cmd := range cmds {
var perms api.CommandPermission
if cmd.Name == "eval" {
perms = api.CommandPermission{
ID: adminRoleID,
Type: api.CommandPermissionTypeRole,
Permission: true,
}
} else {
perms = api.CommandPermission{
ID: testRoleID,
Type: api.CommandPermissionTypeRole,
Permission: true,
}
}
cmdsPermissions = append(cmdsPermissions, api.SetGuildCommandPermissions{
ID: cmd.ID,
Permissions: []api.CommandPermission{perms},
})
}
if _, err = dgo.RestClient().SetGuildCommandsPermissions(dgo.ApplicationID(), guildID, cmdsPermissions...); err != nil {
logger.Errorf("error while setting command permissions: %s", err)
}*/
err = dgo.Connect()
if err != nil {
logger.Fatalf("error while connecting to discord: %s", err)
}
defer dgo.Close()
logger.Infof("ExampleBot is now running. Press CTRL-C to exit.")
s := make(chan os.Signal, 1)
signal.Notify(s, syscall.SIGINT, syscall.SIGTERM, os.Interrupt, os.Kill)
<-s
}
func guildAvailListener(event events.GuildAvailableEvent) {
logger.Printf("guild loaded: %s", event.Guild.ID)
}
func rawGatewayEventListener(event events.RawGatewayEvent) {
if event.Type == api.GatewayEventInteractionCreate {
println(string(event.RawPayload))
}
}
func buttonClickListener(event events.ButtonClickEvent) {
switch event.CustomID() {
case "test1":
_ = event.Respond(api.InteractionResponseTypeChannelMessageWithSource,
api.NewMessageCreateBuilder().
SetContent(event.CustomID()).
Build(),
)
case "test2":
_ = event.Respond(api.InteractionResponseTypeDeferredChannelMessageWithSource, nil)
case "test3":
_ = event.Respond(api.InteractionResponseTypeDeferredUpdateMessage, nil)
case "test4":
_ = event.Respond(api.InteractionResponseTypeUpdateMessage,
api.NewMessageCreateBuilder().
SetContent(event.CustomID()).
Build(),
)
}
}
func commandListener(event events.CommandEvent) {
switch event.CommandName {
case "eval":
go func() {
code := event.Option("code").String()
embed := api.NewEmbedBuilder().
SetColor(orange).
AddField("Status", "...", true).
AddField("Time", "...", true).
AddField("Code", "```go\n"+code+"\n```", false).
AddField("Output", "```\n...\n```", false)
_ = event.Reply(api.NewMessageCreateBuilder().SetEmbeds(embed.Build()).Build())
start := time.Now()
output, err := gval.Evaluate(code, map[string]interface{}{
"disgo": event.Disgo(),
"dgo": event.Disgo(),
"event": event,
})
elapsed := time.Since(start)
embed.SetField(1, "Time", strconv.Itoa(int(elapsed.Milliseconds()))+"ms", true)
if err != nil {
_, err = event.Interaction.EditOriginal(api.NewMessageUpdateBuilder().
SetEmbeds(embed.
SetColor(red).
SetField(0, "Status", "Failed", true).
SetField(3, "Output", "```"+err.Error()+"```", false).
Build(),
).
Build(),
)
if err != nil {
logger.Errorf("error sending interaction response: %s", err)
}
return
}
_, err = event.Interaction.EditOriginal(api.NewMessageUpdateBuilder().
SetEmbeds(embed.
SetColor(green).
SetField(0, "Status", "Success", true).
SetField(3, "Output", "```"+fmt.Sprintf("%+v", output)+"```", false).
Build(),
).
Build(),
)
if err != nil {
logger.Errorf("error sending interaction response: %s", err)
}
}()
case "say":
_ = event.Reply(api.NewMessageCreateBuilder().
SetContent(event.Option("message").String()).
ClearAllowedMentions().
Build(),
)
case "test":
reader, _ := os.Open("gopher.png")
_ = event.Reply(api.NewMessageCreateBuilder().
SetContent("test message").
AddFile("gopher.png", reader).
SetComponents(
api.NewActionRow(
api.NewPrimaryButton("test1", "test1", nil, false),
api.NewPrimaryButton("test2", "test2", nil, false),
api.NewPrimaryButton("test3", "test3", nil, false),
api.NewPrimaryButton("test4", "test4", nil, false),
),
).
Build(),
)
case "addrole":
user := event.Option("member").User()
role := event.Option("role").Role()
err := event.Disgo().RestClient().AddMemberRole(*event.Interaction.GuildID, user.ID, role.ID)
if err == nil {
_ = event.Reply(api.NewMessageCreateBuilder().AddEmbeds(
api.NewEmbedBuilder().SetColor(green).SetDescriptionf("Added %s to %s", role, user).Build(),
).Build())
} else {
_ = event.Reply(api.NewMessageCreateBuilder().AddEmbeds(
api.NewEmbedBuilder().SetColor(red).SetDescriptionf("Failed to add %s to %s", role, user).Build(),
).Build())
}
case "removerole":
user := event.Option("member").User()
role := event.Option("role").Role()
err := event.Disgo().RestClient().RemoveMemberRole(*event.Interaction.GuildID, user.ID, role.ID)
if err == nil {
_ = event.Reply(api.NewMessageCreateBuilder().AddEmbeds(
api.NewEmbedBuilder().SetColor(65280).SetDescriptionf("Removed %s from %s", role, user).Build(),
).Build())
} else {
_ = event.Reply(api.NewMessageCreateBuilder().AddEmbeds(
api.NewEmbedBuilder().SetColor(16711680).SetDescriptionf("Failed to remove %s from %s", role, user).Build(),
).Build())
}
}
}
func messageListener(event events.GuildMessageCreateEvent) {
if event.Message.Author.IsBot {
return
}
if event.Message.Content == nil {
return
}
switch *event.Message.Content {
case "ping":
_, _ = event.Message.Reply(api.NewMessageCreateBuilder().SetContent("pong").SetAllowedMentions(&api.AllowedMentions{RepliedUser: false}).Build())
case "pong":
_, _ = event.Message.Reply(api.NewMessageCreateBuilder().SetContent("ping").SetAllowedMentions(&api.AllowedMentions{RepliedUser: false}).Build())
case "test":
go func() {
message, err := event.MessageChannel().SendMessage(api.NewMessageCreateBuilder().SetContent("test").Build())
if err != nil {
logger.Errorf("error while sending file: %s", err)
return
}
time.Sleep(time.Second * 2)
embed := api.NewEmbedBuilder().SetDescription("edit").Build()
message, _ = message.Edit(api.NewMessageUpdateBuilder().SetContent("edit").SetEmbeds(embed, embed).Build())
time.Sleep(time.Second * 2)
_, _ = message.Edit(api.NewMessageUpdateBuilder().SetContent("").SetEmbeds(api.NewEmbedBuilder().SetDescription("edit2").Build()).Build())
}()
case "dm":
go func() {
channel, err := event.Message.Author.OpenDMChannel()
if err != nil {
_ = event.Message.AddReaction("❌")
return
}
_, err = channel.SendMessage(api.NewMessageCreateBuilder().SetContent("helo").Build())
if err == nil {
_ = event.Message.AddReaction("✅")
} else {
_ = event.Message.AddReaction("❌")
}
}()
}
}
|
#!/bin/bash
# Copyright 2016 Daniel Nüst
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# remove previous log file if running interactive container
rm -f $QGIS_LOGFILE
# Print out QGIS and other library versions, based on Dockerfile
echo "### model.sh ### Full list of installed software:"
dpkg -l
echo "### model.sh ### Installed software versions:"
python --version
gdalmanage --version
saga_cmd --version --version 2>&1 | head -1
grass --version 2>&1 | head -1
echo "Orfeo Toolbox (OTB)" $(otbcli_BandMath 2>&1 | grep 'version')
python -c 'import qgis.utils; print "QGIS: %s" % qgis.utils.QGis.QGIS_VERSION'
echo "QGIS processing plugin:" $(
cat /usr/share/qgis/python/plugins/processing/metadata.txt | grep "version")
# We expect the container is started with one model file configured via environment variable
mkdir -p $QGIS_USER_MODELDIR
echo "### model.sh ### Using QGIS model file" $(ls $QGIS_MODELFILE)
cp $QGIS_MODELFILE $QGIS_USER_MODELDIR/docker.model
echo "### model.sh ### Model files is directory" $QGIS_USER_MODELDIR":" $(ls $QGIS_USER_MODELDIR)
# We expect the container is started with script files configured via environment variable
mkdir -p $QGIS_USER_SCRIPTDIR
if [ -f $QGIS_SCRIPTFILE ]; then
cp $QGIS_SCRIPTFILE $QGIS_USER_SCRIPTDIR
echo "### model.sh ### Script files directory contents" $QGIS_USER_SCRIPTDIR":" $(ls $QGIS_USER_SCRIPTDIR)
fi
# Run QGIS headless, see https://marc.info/?l=qgis-developer&m=141824118828451&w=2 using X Window Virtual Framebuffer
# We except the actual model file to be configured via environment variable
echo " "
echo "### model.sh ### Running model now"
xvfb-run -e $XVFB_LOGFILE python $QGIS_MODELSCRIPT
#echo " "
#echo "### model.sh ### xvfb log:"
#cat $XVFB_LOGFILE
echo " "
echo "### model.sh ### QGIS processing log:"
cat $QGIS_PROCESSING_LOGFILE
echo " "
echo "### model.sh ### QGIS log:"
cat $QGIS_LOGFILE
echo " "
echo "### model.sh ### Workspace directory contents:"
tree /workspace
echo " "
echo "### model.sh ### Result files are available in the container as a subdirectory of " $QGIS_RESULT
echo "### model.sh ### To copy this directory to your local host, run 'docker cp <container name or id>:"$QGIS_RESULT" <path on host>'"
|
import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'yearSpan'
})
export class YearSpanPipe implements PipeTransform {
transform(yearMin: number, yearMax: number): string {
if (yearMax === 9999) {
yearMax = new Date().getFullYear();
}
if (yearMin === yearMax) {
return String(yearMin);
}
else {
return `${yearMin} - ${yearMax}`;
}
}
}
|
interface ICalculatePagingInputs {
currentPage?: number | string; // user input
perPage?: number | string; // user input
perPageDefault?: number;
perPageMaximum?: number;
perPageMinimum?: number;
totalItems: number;
}
export interface ICalculatePagingOutputs {
currentPage: number;
itemsPerPage: number;
totalPages: number;
}
export const PAGING_DEFAULTS = {
PER_PAGE: 5,
PER_PAGE_MAXIMUM: 50,
PER_PAGE_MINIMUM: 1,
};
/**
* Calculates correct paging state based on potentially invalid / missing
* user input by using sensible defaults and constraints.
* @param params
*/
export const calculatePaging = (
params: ICalculatePagingInputs
): ICalculatePagingOutputs => {
if (params.totalItems <= 0) {
return {
currentPage: 0,
itemsPerPage: 0,
totalPages: 0,
};
}
const perPageMinimum = Math.abs(
params.perPageMinimum || PAGING_DEFAULTS.PER_PAGE_MINIMUM
);
const perPageDefault = Math.abs(
params.perPageDefault || Math.max(perPageMinimum, PAGING_DEFAULTS.PER_PAGE)
);
const perPageMaximum = Math.abs(
params.perPageMaximum ||
Math.min(params.totalItems, PAGING_DEFAULTS.PER_PAGE_MAXIMUM)
);
let itemsPerPage =
params.perPage != null && params.perPage !== ''
? parseInt(params.perPage.toString(), 10)
: null;
if (itemsPerPage == null) {
itemsPerPage = perPageDefault;
} else if (itemsPerPage < perPageMinimum) {
itemsPerPage = perPageMinimum;
} else if (itemsPerPage > perPageMaximum) {
itemsPerPage = perPageMaximum;
}
const totalPages = Math.ceil(params.totalItems / itemsPerPage);
let currentPage =
params.currentPage != null && params.currentPage !== ''
? parseInt(params.currentPage.toString(), 10)
: null;
if (currentPage == null || currentPage > totalPages) {
currentPage = totalPages;
} else if (currentPage <= 0) {
currentPage = 1;
}
return {
currentPage,
itemsPerPage,
totalPages,
};
};
|
<filename>kindi/bitmap.go<gh_stars>1-10
// Copyright (c) 2011 <NAME>. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * The name Uwe Hoffmann may not be used to endorse or promote
// products derived from this software without specific prior written
// permission.
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package kindi
import (
"image"
"image/color"
_ "image/jpeg"
"image/png"
"io"
)
func EncodePNG(w io.Writer, payload []byte, m image.Image) error {
nrgba := newNRGBAImageLSBReaderWriter(m)
err := writeLengthEncoded(nrgba, payload)
if err != nil {
return err
}
return png.Encode(w, nrgba.m)
}
func DecodePNG(rin io.Reader) ([]byte, error) {
m, err := png.Decode(rin)
if err != nil {
return nil, err
}
nrgba := newNRGBAImageLSBReaderWriter(m)
return readLengthEncoded(nrgba)
}
type nrgbaImageLSBReaderWriter struct {
m *image.NRGBA
x, y, q int
}
func newNRGBAImageLSBReaderWriter(im image.Image) *nrgbaImageLSBReaderWriter {
rv := new(nrgbaImageLSBReaderWriter)
rv.x = 0
rv.y = 0
rv.q = -1
b := im.Bounds()
rv.m = image.NewNRGBA(image.Rect(0, 0, b.Max.X-b.Min.X, b.Max.Y-b.Min.Y))
for y := b.Min.Y; y < b.Max.Y; y++ {
for x := b.Min.X; x < b.Max.X; x++ {
rv.m.Set(x-b.Min.X, y-b.Min.Y, im.At(x, y))
}
}
return rv
}
func (it *nrgbaImageLSBReaderWriter) reset() {
it.x = 0
it.y = 0
it.q = -1
}
func (it *nrgbaImageLSBReaderWriter) Read(p []byte) (n int, err error) {
n = 0
for j, _ := range p {
var rv byte = 0
var i uint8
for i = 0; i < 8; i++ {
it.q++
if it.q == 3 {
it.q = 0
it.x++
if it.x == it.m.Rect.Max.X {
it.x = it.m.Rect.Min.X
it.y++
if it.y == it.m.Rect.Max.Y {
return n, io.EOF
}
}
}
color := it.m.At(it.x, it.y).(color.NRGBA)
var colorByte byte
switch it.q {
case 0:
colorByte = color.R
case 1:
colorByte = color.G
case 2:
colorByte = color.B
}
rv = rv | ((colorByte & 1) << i)
}
p[j] = rv
n++
}
return n, nil
}
func setLSB(val, bit byte) byte {
var rv byte
if bit == 1 {
rv = val | 1
} else {
rv = val & 0xfe
}
return rv
}
func (it *nrgbaImageLSBReaderWriter) Write(p []byte) (n int, err error) {
n = 0
for _, v := range p {
var i uint8
for i = 0; i < 8; i++ {
it.q++
if it.q == 3 {
it.q = 0
it.x++
if it.x == it.m.Rect.Max.X {
it.x = it.m.Rect.Min.X
it.y++
if it.y == it.m.Rect.Max.Y {
return n, io.EOF
}
}
}
color := it.m.At(it.x, it.y).(color.NRGBA)
switch it.q {
case 0:
color.R = setLSB(color.R, (v>>i)&1)
case 1:
color.G = setLSB(color.G, (v>>i)&1)
case 2:
color.B = setLSB(color.B, (v>>i)&1)
}
it.m.Set(it.x, it.y, color)
}
n++
}
return n, nil
}
|
<gh_stars>1-10
package structure.adapter;
public interface Cat {
public void miao();
public void eat();
}
class WildCat implements Cat{
@Override
public void miao() {
System.out.println("喵喵叫");
}
@Override
public void eat() {
System.out.println("吃饭");
}
} |
;
define(function (require) {
var d3 = require('d3');
var $ = require('jquery');
function plotted(element, percents) {
var el = d3.select(element);
// el.classed('plot', true);
//el.html('');
var t = 2 * Math.PI; // http://tauday.com/tau-manifesto
var offsetWidth,
offsetHeight,
width,
height,
padding,
donutWidth,
outerRadius,
innerRadius,
fontSize,
isIncreased = true;
percents = percents || 0;
var data = {
val: percents
};
var plotLabel = el.append('div').attr('class', 'plot-label');
plotLabel.classed('up', isIncreased);
plotLabel.classed('down', !isIncreased);
//arrows
plotLabel.append('span').attr('class', 'arrow up').text('▲');
plotLabel.append('span').attr('class', 'arrow down').text('▼');
var arrow = plotLabel.selectAll('.arrow');
var text = plotLabel.append('span').attr('class', 'value').datum(data);
var points = plotLabel.append('span').attr('class', 'points').text('%');
// An arc function with all values bound except the endAngle. So, to compute an
// SVG path string for a given angle, we pass an object with an endAngle
// property to the `arc` function, and it will return the corresponding string.
var arc = d3.svg.arc()
.startAngle(0);
// Create the SVG container, and apply a transform such that the origin is the
// center of the canvas. This way, we don't need to position arcs individually.
var svgRoot = el.append("svg");
var svg = svgRoot.append("g");
// Add the background arc, from 0 to 100% (τ).
var background = svg.append("path").datum({endAngle: t});
// Add the foreground arc in orange, currently showing 12.7%.
var foreground = svg.append("path");
onResize();
// Every so often, start a transition to a new random angle. Use transition.call
// (identical to selection.call) so that we can encapsulate the logic for
// tweening the arc in a separate function below.
/* setInterval(function() {
updateData(Math.random() * 100);
}, 1500);*/
// Creates a tween on the specified transition's "d" attribute, transitioning
// any selected arcs from their current angle to the specified new angle.
function arcTween(transition, perc) {
data.val = perc;
text.text(function (d) {
return Math.round(d.val);
});
var newAngle = perc / 100 * t;
// The function passed to attrTween is invoked for each selected element when
// the transition starts, and for each element returns the interpolator to use
// over the course of transition. This function is thus responsible for
// determining the starting angle of the transition (which is pulled from the
// element's bound datum, d.endAngle), and the ending angle (simply the
// newAngle argument to the enclosing function).
transition.attrTween("d", function (d) {
//console.log(newAngle, d);
// To interpolate between the two angles, we use the default d3.interpolate.
// (Internally, this maps to d3.interpolateNumber, since both of the
// arguments to d3.interpolate are numbers.) The returned function takes a
// single argument t and returns a number between the starting angle and the
// ending angle. When t = 0, it returns d.endAngle; when t = 1, it returns
// newAngle; and for 0 < t < 1 it returns an angle in-between.
var interpolate = d3.interpolate(d.endAngle, newAngle);
isIncreased = d.endAngle <= newAngle;
plotLabel.classed('up', isIncreased);
plotLabel.classed('down', !isIncreased);
// The return value of the attrTween is also a function: the function that
// we want to run for each tick of the transition. Because we used
// attrTween("d"), the return value of this last function will be set to the
// "d" attribute at every tick. (It's also possible to use transition.tween
// to run arbitrary code for every tick, say if you want to set multiple
// attributes from a single function.) The argument t ranges from 0, at the
// start of the transition, to 1, at the end.
return function (t) {
// Calculate the current arc angle based on the transition time, t. Since
// the t for the transition and the t for the interpolate both range from
// 0 to 1, we can pass t directly to the interpolator.
//
// Note that the interpolated angle is written into the element's bound
// data object! This is important: it means that if the transition were
// interrupted, the data bound to the element would still be consistent
// with its appearance. Whenever we start a new arc transition, the
// correct starting angle can be inferred from the data.
d.endAngle = interpolate(t);
// Lastly, compute the arc path given the updated data! In effect, this
// transition uses data-space interpolation: the data is interpolated
// (that is, the end angle) rather than the path string itself.
// Interpolating the angles in polar coordinates, rather than the raw path
// string, produces valid intermediate arcs during the transition.
return arc(d);
};
});
}
function onResize() {
calculateVariables();
updateSizes();
}
function calculateVariables() {
offsetWidth = $(el.node()).width() || 400;
offsetHeight = $(el.node()).height() || 400;
console.log('offsetWidth, offsetHeight:', offsetWidth, offsetHeight);
width = offsetWidth > offsetHeight ? offsetHeight : offsetWidth;
height = width;
padding = width / 20;
donutWidth = width / 20;
outerRadius = (width - padding * 2) / 2;
innerRadius = outerRadius - donutWidth;
fontSize = innerRadius * 2 / 3;
}
function updateSizes() {
arc
.innerRadius(innerRadius)
.outerRadius(outerRadius);
svgRoot
.attr("width", width)
.attr("height", height);
svg
.attr("transform", "translate(" + width / 2 + "," + height / 2 + ")");
text
.attr("font-weight", "bold")
.style("font-size", fontSize + "px")
.style("line-height", fontSize + "px")
.text(function (d) {
return Math.round(d.val);
});
plotLabel
.style('width', innerRadius * 2 + 'px')
.style('left', padding + donutWidth + 'px')
.style('top', padding + outerRadius - fontSize / 2 + 'px');
arrow
.style('font-size', fontSize / 3 + 'px');
points
.style('font-size', fontSize / 3 + 'px');
background
.style("fill", "#ddd")
.attr("d", arc);
foreground
.datum({endAngle: data.val / 100 * t})
.style("fill", "rgb(2, 108, 163)")
.attr("d", arc);
}
function updateData(percents) {
foreground.transition()
.duration(750)
.call(arcTween, percents);
}
var resizeTimeout = null;
var onResizeWindow = function(){
clearTimeout(resizeTimeout);
resizeTimeout = setTimeout(onResize, 50);
};
$(window).on('resize', onResizeWindow);
return {
updateData: updateData,
updateSizes: onResize,
destroy: function(){
$(window).off('resize', onResizeWindow);
}
}
}
require('../../../ngModule').directive('plotPercentsChart', function () {
return {
restrict: 'EA',
templateUrl: '/app/controls/plots/percentsChart/percentsChart.html',
scope: {
model: '=plotData'
},
link: function($scope, element, attrs){
$scope.data = {
val: 0
};
$scope.plot = plotted(element.find('.percents-chart')[0], $scope.data.val);
$scope.$watch('data.val', function(){
$scope.plot.updateData($scope.data.val);
});
$scope.destroyPlot = function(){
$scope.plot.destroy();
}
},
controller: function ($scope) {
$scope.config = $scope.model.options;
$scope.updateData = function(key, value, groupBy){
var lastItem = $scope.model.data.length > 0 ? $scope.model.data[$scope.model.data.length - 1] : $scope.model.data[0];
$scope.data.val = lastItem ? parseInt(lastItem[key], 10) || 0 : 0;
};
$scope.$watch('config.key', function () {
$scope.updateData($scope.config.key, $scope.config.value, $scope.config.groupBy);
});
$scope.$watch('config.value', function () {
$scope.updateData($scope.config.key, $scope.config.value, $scope.config.groupBy);
});
$scope.$watch('model.data', function(){
$scope.updateData($scope.config.key, $scope.config.value, $scope.config.groupBy);
});
$scope.$watch('model.size', function(){
setTimeout(function() {
$scope.plot.updateSizes();
}, 0);
});
$scope.$on('$destroy', function(){
$scope.destroyPlot();
})
}
};
});
}); |
npm run start-prod --prefix parrot-manager-frontend
|
import twint
# Set up configuration
c = twint.Config()
c.Search = "programming" # Replace with the desired search query
c.Limit = 100 # Limit the number of tweets to fetch
c.Store_object = True # Store the tweets in a list for filtering
# Fetch tweets
twint.run.Search(c)
# Filter tweets based on likes and retweets
filtered_tweets = [tweet for tweet in twint.output.tweets_list if tweet.likes > 1000 and tweet.retweets > 500]
# Calculate average likes and retweets
total_likes = sum(tweet.likes for tweet in filtered_tweets)
total_retweets = sum(tweet.retweets for tweet in filtered_tweets)
average_likes = total_likes / len(filtered_tweets) if filtered_tweets else 0
average_retweets = total_retweets / len(filtered_tweets) if filtered_tweets else 0
print(f"Average likes: {average_likes}")
print(f"Average retweets: {average_retweets}") |
SELECT * FROM employees
WHERE address = 'Tampa'
ORDER BY DeptID ASC; |
class JobsController < ApplicationController
before_filter :require_user
def index
@jobs = Job.paginate(:all, :page => params[:page], :order => "id DESC")
end
def show
@job = Job.find(params[:id])
if @job.data["error"]
@error_message = @job.data["error"]["message"]
@error_exit_code = @job.data["error"]["exit_code"]
end
end
def update_status
job = Job.find(params[:id])
if job.aasm_current_state == :paused && params[:status] == 'pause'
job.unpause
params[:status] = 'waiting'
elsif job.aasm_events_for_current_state.include? params[:status].intern
eval("job.#{params[:status]}")
else
flash[:error] = "Job status could not be updated!"
redirect_to jobs_path
return
end
if job.save!
flash[:notice] = "Job #{job.aasm_current_state}!"
else
flash[:error] = "Job status could not be updated!"
end
redirect_to jobs_path
end
end
|
package com.arduino.propertyofss.arduinolearning.draglistview.sample;
import android.os.Bundle;
import java.util.concurrent.TimeUnit;
public class ActionList extends BoardFragment{
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
public static void listOfActions() { //check the arraylist items in the arraylist
//by comparing with their function names
// if true that function will be written to arduino board using the bluetooth connection
for (int x = 0; x < AddedFunctions.size(); x++) {
if (AddedFunctions.get(x).second.equals("Go Straight")) {
Bluetooth.mConnectedThread.write("1");
try {
TimeUnit.MILLISECONDS.sleep(250);
System.out.println("forward done");
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
System.out.println("forward not done");
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Turn Left")) {
Bluetooth.mConnectedThread.write("4");
try {
TimeUnit.MILLISECONDS.sleep(660);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
System.out.println("left done");
} catch (InterruptedException e) {
System.out.println("left not done");
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Turn Right")) {
Bluetooth.mConnectedThread.write("3");
try {
TimeUnit.MILLISECONDS.sleep(660);
System.out.println("right done");
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
System.out.println("right not done");
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Go Backwards")) {
Bluetooth.mConnectedThread.write("2");
try {
TimeUnit.MILLISECONDS.sleep(250);
System.out.println("backward done");
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
System.out.println("backward not done");
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Run 1 Seconds")) {
try {
if(AddedFunctions.get(x - 1).second.equals("Go Straight")){
Bluetooth.mConnectedThread.write("1");
}else if (AddedFunctions.get(x - 1).second.equals("Go Backwards")){
Bluetooth.mConnectedThread.write("2");
}
TimeUnit.SECONDS.sleep(1);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Run 2 Seconds")) {
try {
if(AddedFunctions.get(x - 1).second.equals("Go Straight")){
Bluetooth.mConnectedThread.write("1");
}else if (AddedFunctions.get(x - 1).second.equals("Go Backwards")){
Bluetooth.mConnectedThread.write("2");
}
TimeUnit.SECONDS.sleep(2);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Run 5 Seconds")) {
try {
if(AddedFunctions.get(x - 1).second.equals("Go Straight")){
Bluetooth.mConnectedThread.write("1");
}else if (AddedFunctions.get(x - 1).second.equals("Go Backwards")){
Bluetooth.mConnectedThread.write("2");
}
TimeUnit.SECONDS.sleep(5);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Object Detection")) {
Bluetooth.mConnectedThread.write("6");
} else if (AddedFunctions.get(x).second.equals("Stop")) {
Bluetooth.mConnectedThread.write("5");
} else if (AddedFunctions.get(x).second.equals("Right Turn 180 Degrees")) {
Bluetooth.mConnectedThread.write("3");
try {
TimeUnit.MILLISECONDS.sleep(1320);
System.out.println("turn 180 done");
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
System.out.println("turn 180 not done");
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Left Turn 180 Degrees")) {
Bluetooth.mConnectedThread.write("4");
try {
TimeUnit.MILLISECONDS.sleep(1320);
System.out.println("turn 180 done");
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
} catch (InterruptedException e) {
System.out.println("turn 180 not done");
e.printStackTrace();
}
} else if (AddedFunctions.get(x).second.equals("Zig Zag")) {
Bluetooth.mConnectedThread.write("3");
try {
TimeUnit.MILLISECONDS.sleep(330);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("1");
TimeUnit.MILLISECONDS.sleep(2000);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("4");
TimeUnit.MILLISECONDS.sleep(660);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("1");
TimeUnit.MILLISECONDS.sleep(2000);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("3");
TimeUnit.MILLISECONDS.sleep(660);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("1");
TimeUnit.MILLISECONDS.sleep(2000);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
System.out.println("Zig zac");
} catch (InterruptedException e) {
System.out.println("Zig zac not done");
e.printStackTrace();
}
}
else if (AddedFunctions.get(x).second.equals("Comeback")) {
Bluetooth.mConnectedThread.write("1");
try {
TimeUnit.MILLISECONDS.sleep(2000);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("3");
TimeUnit.MILLISECONDS.sleep(1320);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("1");
TimeUnit.MILLISECONDS.sleep(2000);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
Bluetooth.mConnectedThread.write("3");
TimeUnit.MILLISECONDS.sleep(1320);
Bluetooth.mConnectedThread.write("5");
TimeUnit.MILLISECONDS.sleep(400);
System.out.println("Comeback done");
} catch (InterruptedException e) {
System.out.println("Comeback not done");
e.printStackTrace();
}
}
}
}
}
|
#!/usr/bin/env sh
# Much of this is an adaptation of the corresponding script in swift-nio:
# https://github.com/apple/swift-nio
# ============================================================================
set -e
MODULES=(Futures FuturesSync)
REPO_SLUG=dfunckt/swift-futures
REPO_URL=https://github.com/${REPO_SLUG}
BASE_URL=https://dfunckt.github.io/swift-futures
OUTPUT_DIR="docs/"
VERSION=$(git describe --exact-match --abbrev=0 --tags || git rev-parse --abbrev-ref HEAD)
echo "Building docs for version '${VERSION}'"
JAZZY_ARGS=(
--config .jazzy.yml
--github_url "${REPO_URL}"
--github-file-prefix "${REPO_URL}/tree/${VERSION}"
--xcodebuild-arguments USE_SWIFT_RESPONSE_FILE=NO
)
make_module_readme() {
local readme_path="$1"
cat > "${readme_path}" <<"EOF"
# Futures
Futures comprises several modules:
EOF
for m in "${MODULES[@]}"; do
echo "- [${m}](../${m}/index.html)" >>"${readme_path}"
done
}
make_module_docs() {
local module="$1"
local module_readme="$2"
args=(
"${JAZZY_ARGS[@]}"
--module "${module}"
--module-version "${VERSION}"
--title "${module} Reference (${VERSION})"
--readme "${module_readme}"
--root-url "${BASE_URL}/${OUTPUT_DIR}${VERSION}/${module}"
--output "${OUTPUT_DIR}${VERSION}/${module}/"
)
jazzy "${args[@]}"
}
publish() {
local branch_name=$(git rev-parse --abbrev-ref HEAD)
local git_author=$(git --no-pager show -s --format='%an <%ae>' HEAD)
git fetch --depth=1 origin +gh-pages:gh-pages
git checkout gh-pages
rm -rf "${OUTPUT_DIR}latest"
cp -r "${OUTPUT_DIR}${VERSION}" "${OUTPUT_DIR}latest"
git add --all "${OUTPUT_DIR}"
local latest_url="${OUTPUT_DIR}${VERSION}/Futures/index.html"
echo '<html><head><meta http-equiv="refresh" content="0; url='"${latest_url}"'" /></head></html>' >index.html
git add index.html
touch .nojekyll
git add .nojekyll
local changes=$(git diff-index --name-only HEAD)
if test -n "$changes"; then
git commit --author="${git_author}" -m "Publish API reference for ${VERSION}"
git push origin gh-pages
else
echo "no changes detected"
fi
git checkout -f "${branch_name}"
}
mkdir -p "${OUTPUT_DIR}${VERSION}"
for module in "${MODULES[@]}"; do
readme="${OUTPUT_DIR}${VERSION}/${module}.md"
make_module_readme "$readme"
make_module_docs "$module" "$readme"
done
if test $CI = true; then
publish
fi
|
using System;
using System.Reflection;
using UnityEngine;
using Sisus.Vexe.FastReflection;
public class UnityObjectPropertyAccessor
{
public static object GetPropertyValue(Object obj, string propertyName)
{
Type objectType = obj.GetType();
PropertyInfo property = objectType.GetProperty(propertyName);
if (property != null && property.CanRead)
{
FastProperty fastProperty = property.GetFastGetter();
return fastProperty.Get(obj);
}
else
{
return $"Property '{propertyName}' does not exist or is not readable for object of type '{objectType}'.";
}
}
public static string SetPropertyValue(Object obj, string propertyName, object newValue)
{
Type objectType = obj.GetType();
PropertyInfo property = objectType.GetProperty(propertyName);
if (property != null && property.CanWrite)
{
FastProperty fastProperty = property.GetFastSetter();
fastProperty.Set(obj, newValue);
return $"Property '{propertyName}' set to '{newValue}' for object of type '{objectType}'.";
}
else
{
return $"Property '{propertyName}' does not exist or is not writable for object of type '{objectType}'.";
}
}
} |
#!/bin/bash
PROCESSOR_TYPE=$(uname -p)
DISTRIBUTOR_ID=$(lsb_release -i -s|tr '[:upper:]' '[:lower:]')
RELEASE=$(lsb_release -r -s)
LIBRARIES_DIRECTORY="./spec/libzstd/${PROCESSOR_TYPE}/${DISTRIBUTOR_ID}/${RELEASE}"
if [ -d "$LIBRARIES_DIRECTORY" ]; then
for library in ${LIBRARIES_DIRECTORY}/*
do
bundle exec rake ZSTANDARD_LIBRARY=$library
if [ "$?" -ne "0" ]; then
exit 1
fi
done
else
echo "There are no bundled libraries for the current system."
exit 1
fi
|
#!/usr/bin/env bash
POSTGRES_URLS=${PGBOUNCER_URLS:-DATABASE_URL}
POOL_MODE=${PGBOUNCER_POOL_MODE:-transaction}
SERVER_RESET_QUERY=${PGBOUNCER_SERVER_RESET_QUERY}
n=1
# if the SERVER_RESET_QUERY and pool mode is session, pgbouncer recommends DISCARD ALL be the default
# http://pgbouncer.projects.pgfoundry.org/doc/faq.html#_what_should_my_server_reset_query_be
if [ -z "${SERVER_RESET_QUERY}" ] && [ "$POOL_MODE" == "session" ]; then
SERVER_RESET_QUERY="DISCARD ALL;"
fi
cat >> /app/vendor/pgbouncer/pgbouncer.ini << EOFEOF
[pgbouncer]
listen_addr = 127.0.0.1
listen_port = 6000
auth_type = md5
auth_file = /app/vendor/pgbouncer/users.txt
server_tls_sslmode = prefer
server_tls_protocols = secure
server_tls_ciphers = HIGH:!ADH:!AECDH:!LOW:!EXP:!MD5:!3DES:!SRP:!PSK:@STRENGTH
; When server connection is released back to pool:
; session - after client disconnects
; transaction - after transaction finishes
; statement - after statement finishes
pool_mode = ${POOL_MODE}
server_reset_query = ${SERVER_RESET_QUERY}
max_client_conn = ${PGBOUNCER_MAX_CLIENT_CONN:-100}
default_pool_size = ${PGBOUNCER_DEFAULT_POOL_SIZE:-1}
min_pool_size = ${PGBOUNCER_MIN_POOL_SIZE:-0}
reserve_pool_size = ${PGBOUNCER_RESERVE_POOL_SIZE:-1}
reserve_pool_timeout = ${PGBOUNCER_RESERVE_POOL_TIMEOUT:-5.0}
server_lifetime = ${PGBOUNCER_SERVER_LIFETIME:-3600}
server_idle_timeout = ${PGBOUNCER_SERVER_IDLE_TIMEOUT:-600}
log_connections = ${PGBOUNCER_LOG_CONNECTIONS:-1}
log_disconnections = ${PGBOUNCER_LOG_DISCONNECTIONS:-1}
log_pooler_errors = ${PGBOUNCER_LOG_POOLER_ERRORS:-1}
stats_period = ${PGBOUNCER_STATS_PERIOD:-60}
ignore_startup_parameters = ${PGBOUNCER_IGNORE_STARTUP_PARAMETERS}
query_wait_timeout = ${PGBOUNCER_QUERY_WAIT_TIMEOUT:-120}
[databases]
EOFEOF
for POSTGRES_URL in $POSTGRES_URLS
do
eval POSTGRES_URL_VALUE=\$$POSTGRES_URL
IFS=':' read DB_USER DB_PASS DB_HOST DB_PORT DB_NAME <<< $(echo $POSTGRES_URL_VALUE | perl -lne 'print "$1:$2:$3:$4:$5" if /^postgres(?:ql)?:\/\/([^:]*):([^@]*)@(.*?):(.*?)\/(.*?)$/')
DB_MD5_PASS="md5"`echo -n ${DB_PASS}${DB_USER} | md5sum | awk '{print $1}'`
CLIENT_DB_NAME="db${n}"
echo "Setting ${POSTGRES_URL}_PGBOUNCER config var"
if [ "$PGBOUNCER_PREPARED_STATEMENTS" == "false" ]
then
export ${POSTGRES_URL}_PGBOUNCER=postgres://$DB_USER:$DB_PASS@127.0.0.1:6000/$CLIENT_DB_NAME?prepared_statements=false
else
export ${POSTGRES_URL}_PGBOUNCER=postgres://$DB_USER:$DB_PASS@127.0.0.1:6000/$CLIENT_DB_NAME
fi
cat >> /app/vendor/pgbouncer/users.txt << EOFEOF
"$DB_USER" "$DB_MD5_PASS"
EOFEOF
cat >> /app/vendor/pgbouncer/pgbouncer.ini << EOFEOF
$CLIENT_DB_NAME= host=$DB_HOST dbname=$DB_NAME port=$DB_PORT
EOFEOF
let "n += 1"
done
chmod go-rwx /app/vendor/pgbouncer/*
|
int[] myArray = {2, 4, 5, 6, 8}
public int binarySearch(int[] array, int key) {
int low = 0;
int high = array.length - 1;
while (low <= high) {
int mid = (low + high) / 2;
if (key == array[mid]) {
return mid;
}
if (key < array[mid]) {
high = mid - 1;
} else {
low = mid + 1;
}
}
return -1;
} |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MineModule/MineModule.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MineModule/MineModule.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/sh
# Download and install V2Ray
echo "go here for test"
mkdir /tmp/v2ray
curl -L -H "Cache-Control: no-cache" -o /tmp/v2ray/v2ray.zip https://github.com/v2fly/v2ray-core/releases/latest/download/v2ray-linux-64.zip
unzip /tmp/v2ray/v2ray.zip -d /tmp/v2ray
#rm before install
rm -rf /usr/local/bin/v2ray
rm -rf /usr/local/bin/v2ctl
rm -rf /usr/local/etc/v2ray
#install new
install -m 755 /tmp/v2ray/v2ray /usr/local/bin/v2ray
install -m 755 /tmp/v2ray/v2ctl /usr/local/bin/v2ctl
# Remove temporary directory
rm -rf /tmp/v2ray
# V2Ray new configuration
install -d /usr/local/etc/v2ray
cat << EOF > /usr/local/etc/v2ray/config.json
{
"inbounds": [
{
"port": $PORT,
"protocol": "vmess",
"settings": {
"clients": [
{
"id": "$UUID",
"alterId": 64
}
],
"disableInsecureEncryption": true
},
"streamSettings": {
"network": "ws"
}
}
],
"outbounds": [
{
"protocol": "freedom"
},
{
"protocol": "blackhole",
"tag": "blocked"
},
{
"protocol": "socks",
"tag": "sockstor",
"settings": {
"servers": [
{
"address": "127.0.0.1",
"port": 9050
}
]
}
}
]
}
EOF
# Run V2Ray
/usr/local/bin/v2ray -config /usr/local/etc/v2ray/config.json
#nohup tor & \
# /usr/local/bin/v2ray -config /usr/local/etc/v2ray/config.json
echo "finish sh cmd"
|
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.common.config;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StreamCorruptedException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.sshd.common.PropertyResolverUtils;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.io.NoCloseInputStream;
import org.apache.sshd.common.util.io.NoCloseReader;
import org.apache.sshd.common.util.net.SshdSocketAddress;
/**
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
* @see <a href="https://www.freebsd.org/cgi/man.cgi?query=ssh_config&sektion=5">ssh_config(5)</a>
*/
public final class ConfigFileReaderSupport {
public static final char COMMENT_CHAR = '#';
public static final String COMPRESSION_PROP = "Compression";
public static final String DEFAULT_COMPRESSION = CompressionConfigValue.NO.getName();
public static final String MAX_SESSIONS_CONFIG_PROP = "MaxSessions";
public static final int DEFAULT_MAX_SESSIONS = 10;
public static final String PUBKEY_AUTH_CONFIG_PROP = "PubkeyAuthentication";
public static final String DEFAULT_PUBKEY_AUTH = "yes";
public static final boolean DEFAULT_PUBKEY_AUTH_VALUE = parseBooleanValue(DEFAULT_PUBKEY_AUTH);
public static final String PASSWORD_AUTH_CONFIG_PROP = "PasswordAuthentication";
public static final String DEFAULT_PASSWORD_AUTH = "yes";
public static final boolean DEFAULT_PASSWORD_AUTH_VALUE = parseBooleanValue(DEFAULT_PASSWORD_AUTH);
public static final String KBD_INTERACTIVE_CONFIG_PROP = "KbdInteractiveAuthentication";
public static final String DEFAULT_KBD_INTERACTIVE_AUTH = "yes";
public static final boolean DEFAULT_KBD_INTERACTIVE_AUTH_VALUE = parseBooleanValue(DEFAULT_KBD_INTERACTIVE_AUTH);
public static final String PREFERRED_AUTHS_CONFIG_PROP = "PreferredAuthentications";
public static final String LISTEN_ADDRESS_CONFIG_PROP = "ListenAddress";
public static final String DEFAULT_BIND_ADDRESS = SshdSocketAddress.IPV4_ANYADDR;
public static final String PORT_CONFIG_PROP = "Port";
public static final String KEEP_ALIVE_CONFIG_PROP = "TCPKeepAlive";
public static final boolean DEFAULT_KEEP_ALIVE = true;
public static final String USE_DNS_CONFIG_PROP = "UseDNS";
// NOTE: the usual default is TRUE
public static final boolean DEFAULT_USE_DNS = true;
public static final String AUTH_KEYS_FILE_CONFIG_PROP = "AuthorizedKeysFile";
public static final String MAX_AUTH_TRIES_CONFIG_PROP = "MaxAuthTries";
public static final int DEFAULT_MAX_AUTH_TRIES = 6;
public static final String MAX_STARTUPS_CONFIG_PROP = "MaxStartups";
public static final int DEFAULT_MAX_STARTUPS = 10;
public static final String LOGIN_GRACE_TIME_CONFIG_PROP = "LoginGraceTime";
public static final long DEFAULT_LOGIN_GRACE_TIME = TimeUnit.SECONDS.toMillis(120);
public static final String KEY_REGENERATE_INTERVAL_CONFIG_PROP = "KeyRegenerationInterval";
public static final long DEFAULT_REKEY_TIME_LIMIT = TimeUnit.HOURS.toMillis(1L);
// see http://manpages.ubuntu.com/manpages/precise/en/man5/sshd_config.5.html
public static final String CIPHERS_CONFIG_PROP = "Ciphers";
// see http://manpages.ubuntu.com/manpages/precise/en/man5/sshd_config.5.html
public static final String MACS_CONFIG_PROP = "MACs";
// see http://manpages.ubuntu.com/manpages/precise/en/man5/sshd_config.5.html
public static final String KEX_ALGORITHMS_CONFIG_PROP = "KexAlgorithms";
// see http://linux.die.net/man/5/ssh_config
public static final String HOST_KEY_ALGORITHMS_CONFIG_PROP = "HostKeyAlgorithms";
// see http://manpages.ubuntu.com/manpages/precise/en/man5/sshd_config.5.html
public static final String LOG_LEVEL_CONFIG_PROP = "LogLevel";
public static final LogLevelValue DEFAULT_LOG_LEVEL = LogLevelValue.INFO;
// see https://www.freebsd.org/cgi/man.cgi?query=sshd_config&sektion=5
public static final String SYSLOG_FACILITY_CONFIG_PROP = "SyslogFacility";
public static final SyslogFacilityValue DEFAULT_SYSLOG_FACILITY = SyslogFacilityValue.AUTH;
public static final String SUBSYSTEM_CONFIG_PROP = "Subsystem";
private ConfigFileReaderSupport() {
throw new UnsupportedOperationException("No instance");
}
public static Properties readConfigFile(Path path, OpenOption... options) throws IOException {
try (InputStream input = Files.newInputStream(path, options)) {
return readConfigFile(input, true);
}
}
public static Properties readConfigFile(URL url) throws IOException {
try (InputStream input = url.openStream()) {
return readConfigFile(input, true);
}
}
public static Properties readConfigFile(InputStream input, boolean okToClose) throws IOException {
try (Reader reader = new InputStreamReader(
NoCloseInputStream.resolveInputStream(input, okToClose), StandardCharsets.UTF_8)) {
return readConfigFile(reader, true);
}
}
public static Properties readConfigFile(Reader reader, boolean okToClose) throws IOException {
try (BufferedReader buf = new BufferedReader(NoCloseReader.resolveReader(reader, okToClose))) {
return readConfigFile(buf);
}
}
/**
* Reads the configuration file contents into a {@link Properties} instance. <B>Note:</B> multiple keys value are
* concatenated using a comma - it is up to the caller to know which keys are expected to have multiple values and
* handle the split accordingly
*
* @param rdr The {@link BufferedReader} for reading the file
* @return The read properties
* @throws IOException If failed to read or malformed content
*/
public static Properties readConfigFile(BufferedReader rdr) throws IOException {
Properties props = new Properties();
int lineNumber = 1;
for (String line = rdr.readLine(); line != null; line = rdr.readLine(), lineNumber++) {
line = GenericUtils.replaceWhitespaceAndTrim(line);
if (GenericUtils.isEmpty(line)) {
continue;
}
int pos = line.indexOf(COMMENT_CHAR);
if (pos == 0) {
continue;
}
if (pos > 0) {
line = line.substring(0, pos);
line = line.trim();
}
/*
* Some options use '=', others use ' ' - try both NOTE: we do not validate the format for each option
* separately
*/
pos = line.indexOf(' ');
if (pos < 0) {
pos = line.indexOf('=');
}
if (pos < 0) {
throw new StreamCorruptedException("No delimiter at line " + lineNumber + ": " + line);
}
String key = line.substring(0, pos);
String value = line.substring(pos + 1).trim();
// see if need to concatenate multi-valued keys
String prev = props.getProperty(key);
if (!GenericUtils.isEmpty(prev)) {
value = prev + "," + value;
}
props.setProperty(key, value);
}
return props;
}
/**
* @param v Checks if the value is "yes", "y", "on", "t" or
* "true".
* @return The result - <B>Note:</B> {@code null}/empty values are interpreted as {@code false}
* @see PropertyResolverUtils#TRUE_VALUES
*/
public static boolean parseBooleanValue(String v) {
if (GenericUtils.isEmpty(v)) {
return false;
}
return PropertyResolverUtils.TRUE_VALUES.contains(v);
}
/**
* Returns a "yes" or "no" value based on the input parameter
*
* @param flag The required state
* @return "yes" if {@code true}, "no" otherwise
*/
public static String yesNoValueOf(boolean flag) {
return flag ? "yes" : "no";
}
}
|
#!/bin/bash -e
# shellcheck disable=SC2119,SC1091
run_sub_stage()
{
log "Begin ${SUB_STAGE_DIR}"
pushd "${SUB_STAGE_DIR}" > /dev/null
for i in {00..99}; do
if [ -f "${i}-debconf" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-debconf"
on_chroot << EOF
debconf-set-selections <<SELEOF
$(cat "${i}-debconf")
SELEOF
EOF
log "End ${SUB_STAGE_DIR}/${i}-debconf"
fi
if [ -f "${i}-packages-nr" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages-nr"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages-nr")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get install --no-install-recommends -y $PACKAGES
EOF
fi
log "End ${SUB_STAGE_DIR}/${i}-packages-nr"
fi
if [ -f "${i}-packages-au" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages-au"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages-au")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get install --allow-unauthenticated -y $PACKAGES
EOF
fi
log "End ${SUB_STAGE_DIR}/${i}-packages-au"
fi
if [ -f "${i}-packages" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-packages"
PACKAGES="$(sed -f "${SCRIPT_DIR}/remove-comments.sed" < "${i}-packages")"
if [ -n "$PACKAGES" ]; then
on_chroot << EOF
apt-get install -y $PACKAGES
EOF
fi
log "End ${SUB_STAGE_DIR}/${i}-packages"
fi
if [ -d "${i}-patches" ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-patches"
pushd "${STAGE_WORK_DIR}" > /dev/null
if [ "${CLEAN}" = "1" ]; then
rm -rf .pc
rm -rf ./*-pc
fi
QUILT_PATCHES="${SUB_STAGE_DIR}/${i}-patches"
SUB_STAGE_QUILT_PATCH_DIR="$(basename "$SUB_STAGE_DIR")-pc"
mkdir -p "$SUB_STAGE_QUILT_PATCH_DIR"
ln -snf "$SUB_STAGE_QUILT_PATCH_DIR" .pc
if [ -e "${SUB_STAGE_DIR}/${i}-patches/EDIT" ]; then
echo "Dropping into bash to edit patches..."
bash
fi
quilt upgrade
RC=0
quilt push -a || RC=$?
case "$RC" in
0|2)
;;
*)
false
;;
esac
popd > /dev/null
log "End ${SUB_STAGE_DIR}/${i}-patches"
fi
if [ -x ${i}-run.sh ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-run.sh"
./${i}-run.sh
log "End ${SUB_STAGE_DIR}/${i}-run.sh"
fi
if [ -f ${i}-run-chroot.sh ]; then
log "Begin ${SUB_STAGE_DIR}/${i}-run-chroot.sh"
on_chroot < ${i}-run-chroot.sh
log "End ${SUB_STAGE_DIR}/${i}-run-chroot.sh"
fi
done
popd > /dev/null
log "End ${SUB_STAGE_DIR}"
}
run_stage(){
log "Begin ${STAGE_DIR}"
STAGE="$(basename "${STAGE_DIR}")"
pushd "${STAGE_DIR}" > /dev/null
unmount "${WORK_DIR}/${STAGE}"
STAGE_WORK_DIR="${WORK_DIR}/${STAGE}"
ROOTFS_DIR="${STAGE_WORK_DIR}"/rootfs
if [ ! -f SKIP_IMAGES ]; then
if [ -f "${STAGE_DIR}/EXPORT_IMAGE" ]; then
EXPORT_DIRS="${EXPORT_DIRS} ${STAGE_DIR}"
fi
fi
if [ ! -f SKIP ]; then
if [ "${CLEAN}" = "1" ]; then
if [ -d "${ROOTFS_DIR}" ]; then
rm -rf "${ROOTFS_DIR}"
fi
fi
if [ -x prerun.sh ]; then
log "Begin ${STAGE_DIR}/prerun.sh"
./prerun.sh
log "End ${STAGE_DIR}/prerun.sh"
fi
for SUB_STAGE_DIR in ${STAGE_DIR}/*; do
if [ -d "${SUB_STAGE_DIR}" ] &&
[ ! -f "${SUB_STAGE_DIR}/SKIP" ]; then
run_sub_stage
fi
done
fi
unmount "${WORK_DIR}/${STAGE}"
PREV_STAGE="${STAGE}"
PREV_STAGE_DIR="${STAGE_DIR}"
PREV_ROOTFS_DIR="${ROOTFS_DIR}"
popd > /dev/null
log "End ${STAGE_DIR}"
}
if [ "$(id -u)" != "0" ]; then
echo "Please run as root" 1>&2
exit 1
fi
if [ -f config ]; then
source config
fi
if [ -z "${IMG_NAME}" ]; then
echo "IMG_NAME not set" 1>&2
exit 1
fi
export USE_QEMU="${USE_QEMU:-0}"
export IMG_DATE="${IMG_DATE:-"$(date +%Y-%m-%d)"}"
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
export SCRIPT_DIR="${BASE_DIR}/scripts"
export WORK_DIR="${WORK_DIR:-"${BASE_DIR}/work/${IMG_DATE}-${IMG_NAME}"}"
export DEPLOY_DIR=${DEPLOY_DIR:-"${BASE_DIR}/deploy"}
export LOG_FILE="${WORK_DIR}/build.log"
export BASE_DIR
export CLEAN
export IMG_NAME
export APT_PROXY
export STAGE
export STAGE_DIR
export STAGE_WORK_DIR
export PREV_STAGE
export PREV_STAGE_DIR
export ROOTFS_DIR
export PREV_ROOTFS_DIR
export IMG_SUFFIX
export NOOBS_NAME
export NOOBS_DESCRIPTION
export EXPORT_DIR
export EXPORT_ROOTFS_DIR
export QUILT_PATCHES
export QUILT_NO_DIFF_INDEX=1
export QUILT_NO_DIFF_TIMESTAMPS=1
export QUILT_REFRESH_ARGS="-p ab"
# shellcheck source=scripts/common
source "${SCRIPT_DIR}/common"
# shellcheck source=scripts/dependencies_check
source "${SCRIPT_DIR}/dependencies_check"
dependencies_check "${BASE_DIR}/depends"
mkdir -p "${WORK_DIR}"
log "Begin ${BASE_DIR}"
for STAGE_DIR in "${BASE_DIR}/stage"*; do
run_stage
done
CLEAN=1
for EXPORT_DIR in ${EXPORT_DIRS}; do
STAGE_DIR=${BASE_DIR}/export-image
# shellcheck source=/dev/null
source "${EXPORT_DIR}/EXPORT_IMAGE"
EXPORT_ROOTFS_DIR=${WORK_DIR}/$(basename "${EXPORT_DIR}")/rootfs
run_stage
if [ "${USE_QEMU}" != "1" ]; then
if [ -e "${EXPORT_DIR}/EXPORT_NOOBS" ]; then
# shellcheck source=/dev/null
source "${EXPORT_DIR}/EXPORT_NOOBS"
STAGE_DIR="${BASE_DIR}/export-noobs"
run_stage
fi
fi
done
if [ -x postrun.sh ]; then
log "Begin postrun.sh"
cd "${BASE_DIR}"
./postrun.sh
log "End postrun.sh"
fi
log "End ${BASE_DIR}"
|
import RPi.GPIO as GPIO
import time
# Set GPIO pin an output type
def setupValve():
pin = 22
GPIO.setmode(GPIO.BOARD)
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, GPIO.LOW)
print("Valve initialized.")
return pin
# Perform valve functionality test
# You should hear an audiable click
# when the solenoid triggers
def runTest():
valve = setupValve()
count = 0
print("Starting Test")
while (count < 5):
GPIO.output(valve, GPIO.HIGH)
time.sleep(1)
GPIO.output(valve, GPIO.LOW)
count += 1
print("Test complete.")
GPIO.cleanup()
print("Clean exit.")
# Main function to pour beer
def pourBeer(valve):
valve = setupValve()
GPIO.output(valve, GPIO.HIGH)
# Need to test exact time in order to pour beer perfectly
print("Pouring a cold one...")
time.sleep(5)
print("Beer poured. Stay thirsty my friend!")
GPIO.output(valve, GPIO.LOW)
|
//
// ZYCycleFlowLayout.h
// Investank
//
// Created by 史泽东 on 2019/1/14.
// Copyright © 2019 史泽东. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface ZYCycleFlowLayout : UICollectionViewFlowLayout
@end
NS_ASSUME_NONNULL_END
|
<filename>signalFile_test.go
package main
import (
"golang.org/x/exp/inotify"
"os"
"testing"
"time"
)
// Test file exists
func TestExistsTrue(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileExistsTrue.file")
// Create
testFile.Touch()
if !testFile.Exists() {
test.Fatal("Got file doesn't exist, but it does exist")
}
// Cleanup
testFile.Remove()
}
// Test file does not exist
func TestExistsFalse(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileExistsFalse.file")
// Remove any existing
testFile.Remove()
if testFile.Exists() {
test.Fatal("Got file exists, but it does not exist")
}
}
// Test new file creation
func TestTouchNew(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileTouchNew.file")
// Remove any existing
testFile.Remove()
// Create
testFile.Touch()
if _, err := os.Stat(testFile.File.Name()); err != nil {
test.Fatal("File was not created")
}
// Cleanup
testFile.Remove()
}
// Test existing file updation
func TestTouchExisting(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileTouch.file")
// Create
testFile.Touch()
// Test
file, err := os.Stat(testFile.File.Name())
if err != nil {
test.Fatal(err)
}
mtime := file.ModTime()
time.Sleep(500 * time.Millisecond)
testFile.Touch()
file, err = os.Stat(testFile.File.Name())
if err != nil {
test.Fatal(err)
}
if file.ModTime() == mtime {
test.Fatalf("Mtime was not updated, mtime: %s file.ModTime(): %s", mtime, file.ModTime())
} else {
test.Logf("Mtime was updated from %s to %s", mtime, file.ModTime())
}
// Cleanup
testFile.Remove()
}
// Test Remove
func TestRemove(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileRemove.file")
testFile.Touch()
testFile.Remove()
if _, err := os.Stat(testFile.File.Name()); err == nil {
test.Fatal("File was not removed")
}
}
// Test file creation - inotify.IN_CLOSE_WRITE
func TestWaitForSignalCreate(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileCreate.file")
testFile.Signal = inotify.IN_CLOSE_WRITE
testFile.Channel = make(chan bool)
defer close(testFile.Channel)
// Ensure file deoesn't exist
testFile.Remove()
// Setup watch
go testFile.WaitForSignal()
// Setup timeout
timeout := make(chan bool)
defer close(timeout)
go func() {
time.Sleep(1 * time.Second)
timeout <- true
}()
// Create file
testFile.Touch()
// Setup channel receive
select {
case <-testFile.Channel:
test.Logf("Received create signal for test file %s", testFile.File.Name())
case <-timeout:
test.Fatalf("Received no create signal after 1 second for test file %s", testFile.File.Name())
}
// Cleanup
testFile.Remove()
}
// Test file deletion - inotify.IN_DELETE
func TestWaitForSignalDelete(test *testing.T) {
testFile := new(signalFile)
testFile.File = os.NewFile(0, "./testFileDelete.file")
testFile.Signal = inotify.IN_DELETE
testFile.Channel = make(chan bool)
defer close(testFile.Channel)
// Create file
testFile.Touch()
// Setup watch
go testFile.WaitForSignal()
// Setup timeout
timeout := make(chan bool)
defer close(timeout)
go func() {
time.Sleep(1 * time.Second)
timeout <- true
}()
// Remove file
testFile.Remove()
// Setup channel receive
select {
case <-testFile.Channel:
test.Logf("Received remove signal for test file %s", testFile.File.Name())
case <-timeout:
test.Fatalf("Received no remove signal after 1 second for test file %s", testFile.File.Name())
}
}
|
<reponame>Skyhark-Projects/golang-bic-from-iban<filename>bic/banks.go
package bic
type Bank struct {
Country string
City string
Start int
End int
Name string
Swift string
}
var banks = []Bank{}
func GetSwiftBank(swift string) *Bank {
for _, bank := range banks {
if bank.Swift == swift {
return &bank
}
}
return nil
} |
<filename>src/scripts/drawables/ui/LevelName.ts
import { UiDepths, UI_SCALE } from '../../helpers/constants';
import globalState from '../../worldstate/index';
const X_POSITION = 12;
const Y_POSITION = 146;
const isTileVisible = (tile: Phaser.Tilemaps.Tile) => {
// tslint:disable-next-line: no-magic-numbers
return tile && tile.tint > 0x010101 && tile.index % 1000 > -1;
};
export default class Minimap extends Phaser.GameObjects.Text {
constructor(scene: Phaser.Scene) {
super(scene, X_POSITION * UI_SCALE, Y_POSITION * UI_SCALE, '', {
color: 'white',
fontSize: `${6 * UI_SCALE}pt`,
fontFamily: 'endlessDungeon',
});
this.setScrollFactor(0);
scene.add.existing(this);
this.setOrigin(0);
this.setText(globalState.roomAssignment[globalState.currentLevel]?.title || '');
this.setDepth(UiDepths.UI_BACKGROUND_LAYER);
}
}
|
import java.util.Arrays;
public class Videotest extends MiniJava {
public static final String ANSI_RED = "\u001B[31m";
public static final String ANSI_GREEN = "\u001B[32m";
public static final String ANSI_RESET = "\u001B[0m";
public static void fehler(String meldung) {
System.out.println(ANSI_RED + "%%%%%%%%%%%% Fehler %%%%%%%%%%%%%%%");
System.out.println(meldung);
System.out.println("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%" + ANSI_RESET);
}
public static void korrekt(String meldung) {
System.out.println(ANSI_GREEN + meldung + ANSI_RESET);
}
public static String printVideo(Video v) {
return "Video{" + "titel='" + v.getTitel() + '\'' + ", id=" + v.getTitel() + ", genres="
+ Arrays.toString(v.getGenres()) + '}';
}
public static String printVideosammlung(Videosammlung vhs) {
String s = "";
Video[] videos = vhs.getVideos();
for (int i = 0; i < videos.length; i++) {
if (videos[i] == null) {
s += "[_____]\n";
} else {
s += "[" + printVideo(videos[i]) + "]\n";
}
}
return s;
}
// Mainmethod zum testen der Video und Videosammlung Klassen
public static void main(String[] args) {
String titel;
// for (int i = 0; i < 2; i++) {
// titel = readString("Was ist der Titel des " + (i + 1) + ". Video?\n");
// int n;
// do {
// n = read("Wieviele Genres wollen Sie zu " + titel + " hinzufügen [1-5].");
// } while(n < 1 || n > 5);
// String[] genres = new String[n];
// for (int j = 0; j < n; j++) {
// genres[j] = readString("Bitte geben Sie das " + (j + 1) + ". Genre für " + titel + " ein.");
// }
//
// Video v = new Video(titel);
// for (int j = 0; j < n; j++) {
// v.addGenre(genres[j]);
// }
// writeLineConsole(printVideo(v));
// }
// Testen der Videosammlunng Klasse
// testProgram ob ich die korrekte Anzahl einfügen kann
Videosammlung vhs = new Videosammlung(20);
for (int i = 0; i < 20; i++) {
Video v = new Video("titel-" + i);
for (int j = 0; j < (i % 5); j++) {
v.addGenre("g-" + (j + 1));
}
int result = vhs.addVideo(v);
if (result == -1) {
fehler("Das " + i + ". Video konnte nicht in die Videosammlung aufgenommen werden.");
}
}
korrekt("Die ersten 20 Videos wurden erfolgreich aufgenommen in die Sammlung.");
writeLineConsole(printVideosammlung(vhs));
Video v = new Video("Darf nicht reinpassen");
int result = vhs.addVideo(v);
if (result != -1) {
fehler("Das 21. Video wurde aufgenommen, ob wohl kein Platz mehr sein sollte.");
} else {
korrekt("Das 21. Video wurde korrekt abgelehnt, nicely done.");
}
// Tests für die Verkaufen Methode
// Das 21. Video darf nicht gefunden werden
v = vhs.verkaufen(21);
if (v != null) {
fehler("Das 21. Video existiert, ob wohl nur für 20 Platz ist...");
} else {
korrekt("Das 21. Video wurde korrekterweise nicht gefunden.");
}
v = new Video("Darf nicht reinpassen");
v.addGenre("genre-testProgram");
result = vhs.addVideo(v);
if (result != -1) {
fehler("Das 21. Video wurde aufgenommen, ob wohl kein Platz mehr sein sollte.");
} else {
korrekt("Das 21. Video wurde korrekt abgelehnt, nicely done.");
}
v = vhs.verkaufen("Darf nicht reinpassen");
if (v != null) {
fehler("Das 21. Video existiert, ob wohl nur für 20 Platz ist...");
} else {
korrekt("Das 21. Video wurde korrekterweise nicht gefunden.");
}
// einige videos verkaufen und prüfen ob sie wirklich verschwunden sind
vhs.verkaufen(5);
vhs.verkaufen(0);
vhs.verkaufen(11);
if (vhs.verkaufen("titel-11") == null) {
korrekt("Das korrekte Video wurde verkauft :).");
} else {
fehler("Das falsche Video wurde verkauft.");
}
if (vhs.verkaufen("titel-5") == null) {
korrekt("Das korrekte Video wurde verkauft :).");
} else {
fehler("Das falsche Video wurde verkauft.");
}
if (vhs.verkaufen("titel-0") == null) {
korrekt("Das korrekte Video wurde verkauft :).");
} else {
fehler("Das falsche Video wurde verkauft.");
}
if (vhs.verkaufen(11) == null) {
korrekt("Das korrekte Video wurde verkauft :).");
} else {
fehler("Das falsche Video wurde verkauft.");
}
vhs.verkaufen("titel-7");
if (vhs.verkaufen(7) == null) {
korrekt("Das korrekte Video wurde verkauft :).");
} else {
fehler("Das falsche Video wurde verkauft.");
}
// verbleibende testen
if (vhs.getVerbleibende() == 4) {
korrekt("Es ist die korrekte Anzahl an verbleibenden Plätzen.");
} else {
fehler("Es ist die inkorrekte Anzahl an verbleibenden Plätzen.");
}
//
String[] titelVergleich = new String[] {"titel-4", "titel-9", "titel-14", "titel-19"};
String[] videos = vhs.videosInGenre("g-4");
if (Arrays.equals(videos, titelVergleich)) {
korrekt("Es werden die richtigen Titel entsprechend des Genres ausgegeben.");
} else {
fehler("Es werden die falschen Titel zurück gegeben.");
}
}
}
|
<reponame>scenarioo/scenarioo-js
import fs from 'fs';
import assert from 'assert';
import isArray from 'lodash/isArray';
import Q from 'q';
function assertXmlContent(filePath, expectedContents) {
return Q.nfcall(fs.readFile, filePath, 'utf-8')
.then(xmlContent => {
// Replace tabs in the beginning
xmlContent = xmlContent.replace(/^[ \t]*/gm, '');
// Replace all newlines and tabs
xmlContent = xmlContent.replace(/(?:\r\n|\r|\n|\t)/g, '');
if (!isArray(expectedContents)) {
expectedContents = [expectedContents];
}
expectedContents.forEach(expectedContent => {
assert(xmlContent.indexOf(expectedContent) > -1, 'Given xml is expected to contain "' + expectedContent + '"\n' + xmlContent);
});
});
}
function assertFileExists(filePath) {
return Q.nfcall(fs.stat, filePath);
}
export default {
assertXmlContent: assertXmlContent,
assertFileExists: assertFileExists
};
|
def filter_data(data, criteria):
filtered_data = []
for obj in data:
if obj[criteria[0]] == criteria[1]:
filtered_data.append(obj)
return filtered_data
people = [{'name':'John','age':23}, {'name':'Jane','age':27},{'name':'Adam','age':20}]
filtered_people = filter_data(people, ['age', 20])
print(filtered_people) |
<filename>uva/00614.cc
// https://uva.onlinejudge.org/external/6/614.pdf
#include<bits/stdc++.h>
using namespace std;
using vi=vector<int>;
using vvi=vector<vi>;
int main(){
for(int t=0;;t++){
int n,m,a,b,c,d;
cin>>n>>m>>a>>b>>c>>d;
if(!n)break;
a--;b--;c--;d--;
vvi e(n,vi(m)),f(n,vi(m));
for(int i=0;i<n;i++)
for(int j=0;j<m;j++)
cin>>e[i][j];
function<bool(int,int,int)>dfs=[&](int i,int j,int k){
f[i][j]=k;
if(i==c&&j==d)return 1;
int y[]={0,-1,0,1};
int x[]={-1,0,1,0};
for(int l=0;l<4;l++){
if(l==0&&(!j||e[i][j-1]&1))continue;
else if(l==1&&(!i||e[i-1][j]&2))continue;
else if(l==2&&(e[i][j]&1))continue;
else if(l==3&&(e[i][j]&2))continue;
int u=i+y[l],v=j+x[l];
if(u>=0&&u<n&&v>=0&&v<m&&!f[u][v])
if(dfs(u,v,k+1))
return 1;
}
f[i][j]=-1;
return 0;
};
dfs(a,b,1);
printf("Maze %d\n\n",t+1);
for(int i=0;i<n;i++){
for(int j=0;j<m;j++)
if(!i||(e[i-1][j]&2))cout<<"+---";
else cout<<"+ ";
cout<<"+\n";
for(int j=0;j<m;j++){
if(!j||e[i][j-1]&1)cout<<"|";
else cout<<" ";
if(f[i][j]>0)printf("% 3d",f[i][j]);
else if(f[i][j]<0)printf("???");
else printf(" ");
}
cout<<"|\n";
}
for(int j=0;j<m;j++)cout<<"+---";
cout<<"+\n\n\n";
}
}
|
#!/usr/bin/env bash
#
# This file is part of PHP CS Fixer (https://github.com/FriendsOfPHP/PHP-CS-Fixer).
#
# Copyright (c) 2012-2019 Fabien Potencier
# Dariusz Rumiński
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
set -Eeuo pipefail
files_with_trailing_whitespaces=$(
find . \
-type f \
-not -name "*.cache" \
-not -name "*.log" \
-not -path "./.composer/*" \
-not -path "./build/*" \
-not -path "./.git/*" \
-not -path "./vendor/*" \
-not -path "./tests/e2e/*" \
-not -path "./tests/phpunit/Fixtures/Files/phpunit/format-whitespace/original-phpunit.xml" \
-not -path "./tests/phpunit/StringNormalizerTest.php" \
-not -path "./tests/phpunit/StrTest.php" \
-exec grep -EIHn "\\s$" {} \;
)
if [[ "$files_with_trailing_whitespaces" ]]
then
printf '\033[97;41mTrailing whitespaces detected:\033[0m\n';
e=$(printf '\033');
echo "${files_with_trailing_whitespaces}" \
| sed -E "s/^\\.\\/([^:]+):([0-9]+):(.*[^\\t ])?([\\t ]+)$/${e}[0;31m - in ${e}[0;33m\\1${e}[0;31m at line ${e}[0;33m\\2\\n ${e}[0;31m>${e}[0m \\3${e}[41;1m\\4${e}[0m/";
exit 1;
fi
printf '\033[0;32mNo trailing whitespaces detected.\033[0m\n';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.