text stringlengths 1 1.05M |
|---|
#!/bin/bash
if [ -z "${JAVA_HOME}" ]; then
echo "JAVA_HOME not set"
OPENJDK=`java -version 2>&1 | grep OpenJDK`
if [ -z "${OPENJDK}" -a -d "/usr/lib/jvm/java-7-oracle" ]; then
JAVA_HOME=/usr/lib/jvm/java-7-oracle
else
JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
fi
echo "Assuming JAVA_HOME=${JAVA_HOME}"
fi
# fail on command error
set -e
# echo back each command
set -x
JAVA_HOME=${JAVA_HOME:-/usr/lib/jvm/java-7-oracle}
export ONOS_HOME=${ONOS_HOME:-$(cd `dirname $0`; cd ..; pwd)}
export RAMCLOUD_HOME=${RAMCLOUD_HOME:-~/ramcloud}
RAMCLOUD_BRANCH=${RAMCLOUD_BRANCH:-master}
if [ ! -e ${ONOS_HOME}/target/classes/edu/stanford/ramcloud/JRamCloud.class ]; then
mvn -f ${ONOS_HOME}/pom.xml compile -T 1C
fi
javah -cp ${ONOS_HOME}/target/classes -o ${ONOS_HOME}/src/main/cpp/edu_stanford_ramcloud_JRamCloud.h edu.stanford.ramcloud.JRamCloud
g++ -g -Wall -O3 -shared -fPIC -std=c++0x -I${JAVA_HOME}/include/ -I${JAVA_HOME}/include/linux -I${RAMCLOUD_HOME}/src/ -I${RAMCLOUD_HOME}/obj.${RAMCLOUD_BRANCH}/ -I${RAMCLOUD_HOME}/logcabin/ -I${RAMCLOUD_HOME}/gtest/include/ -L${RAMCLOUD_HOME}/obj.${RAMCLOUD_BRANCH} -o ${ONOS_HOME}/lib/libedu_stanford_ramcloud_JRamCloud.so ${ONOS_HOME}/src/main/cpp/edu_stanford_ramcloud_JRamCloud.cc -lramcloud
|
package main.methodNoLongerThrowsCheckedException;
public interface IMethodNoLongerThrowsCheckedException {
int noLongerThrowsExcep();
}
|
<reponame>vladfr/gosaas<filename>client/src/auth/authGuard.ts
import { getInstance } from './auth'
import { NavigationGuard } from 'vue-router'
export const authGuard: NavigationGuard = (to, from, next) => {
const authService = getInstance()
const fn = () => {
// Unwatch loading
/*eslint no-use-before-define: off */
/*eslint @typescript-eslint/no-use-before-define: off */
unwatch && unwatch()
// If the user is authenticated, continue with the route
if (authService.isAuthenticated) {
return next()
}
// Otherwise, log in
authService.loginWithRedirect({ appState: { targetUrl: to.fullPath } })
}
// If loading has already finished, check our auth state using `fn()`
if (!authService.loading) {
return fn()
}
// Watch for the loading property to change before we check isAuthenticated
const unwatch = authService.$watch('loading', (loading: boolean) => {
if (loading === false) {
return fn()
}
})
} |
fun longestCommonSubsequence(str1: String, str2: String): String {
val table = Array(str1.length+1) { IntArray(str2.length+1) }
for (i in 1..str1.length) {
for (j in 1..str2.length) {
if (str1[i-1] == str2[j-1]) {
table[i][j] = table[i-1][j-1] + 1
} else {
table[i][j] = kotlin.math.max(table[i][j-1], table[i-1][j])
}
}
}
var i = str1.length
var j = str2.length
val result = StringBuilder()
while (i > 0 && j > 0) {
if (str1[i-1] == str2[j-1]) {
result.append(str1[i-1])
i--
j--
} else if (table[i-1][j] > table[i][j-1]) {
i--
} else {j--}
}
return result.reverse().toString()
}
println(longestCommonSubsequence(str1, str2)) // Output: ABAD |
<filename>app/src/main/java/nigelhenshaw/com/cameraintenttutorial/CamaraIntentActivity.java
package nigelhenshaw.com.cameraintenttutorial;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.provider.MediaStore;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.util.Log;
import android.util.LruCache;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
public class CamaraIntentActivity extends Activity {
private static final int ACTIVITY_START_CAMERA_APP = 0;
private static final int STATE_PREVIEW = 0;
private static final int STATE__WAIT_LOCK = 1;
private int mState = STATE_PREVIEW;
private ImageView mPhotoCapturedImageView;
private String mImageFileLocation = "";
private String GALLERY_LOCATION = "image gallery";
private File mGalleryFolder;
private static LruCache<String, Bitmap> mMemoryCache;
private Size mPreviewSize;
private String mCameraId;
private TextureView mTextureView;
private TextureView.SurfaceTextureListener mSurfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
private CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback
= new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
createCameraPreviewSession();
// Toast.makeText(getApplicationContext(), "Camera Opened!", Toast.LENGTH_SHORT).show();
}
@Override
public void onDisconnected(CameraDevice camera) {
camera.close();
mCameraDevice = null;
}
@Override
public void onError(CameraDevice camera, int error) {
camera.close();
mCameraDevice = null;
}
};
private CaptureRequest mPreviewCaptureRequest;
private CaptureRequest.Builder mPreviewCaptureRequestBuilder;
private CameraCaptureSession mCameraCaptureSession;
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch(mState) {
case STATE_PREVIEW:
// Do nothing
break;
case STATE__WAIT_LOCK:
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED) {
/*
unLockFocus();
Toast.makeText(getApplicationContext(), "Focus Lock Successful", Toast.LENGTH_SHORT).show();
*/
captureStillImage();
}
break;
}
}
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
process(result);
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
Toast.makeText(getApplicationContext(), "Focus Lock Unsuccessful", Toast.LENGTH_SHORT).show();
}
};
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private static File mImageFile;
private ImageReader mImageReader;
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener =
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage()));
}
};
private static class ImageSaver implements Runnable {
private final Image mImage;
private ImageSaver(Image image) {
mImage = image;
}
@Override
public void run() {
ByteBuffer byteBuffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(mImageFile);
fileOutputStream.write(bytes);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
if(fileOutputStream != null) {
try {
fileOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_camara_intent);
createImageGallery();
final int maxMemorySize = (int) Runtime.getRuntime().maxMemory() / 1024;
final int cacheSize = maxMemorySize / 10;
mMemoryCache = new LruCache<String, Bitmap>(cacheSize) {
@Override
protected int sizeOf(String key, Bitmap value) {
return value.getByteCount() / 1024;
}
};
mTextureView = (TextureView) findViewById(R.id.textureView);
}
@Override
public void onResume() {
super.onResume();
openBackgroundThread();
if(mTextureView.isAvailable()) {
setupCamera(mTextureView.getWidth(), mTextureView.getHeight());
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
closeBackgoundThread();
super.onPause();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_camara_intent, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void takePhoto(View view) {
/*
Intent callCameraApplicationIntent = new Intent();
callCameraApplicationIntent.setAction(MediaStore.ACTION_IMAGE_CAPTURE);
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException e) {
e.printStackTrace();
}
callCameraApplicationIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(photoFile));
startActivityForResult(callCameraApplicationIntent, ACTIVITY_START_CAMERA_APP);
*/
try {
mImageFile = createImageFile();
} catch (IOException e) {
e.printStackTrace();
}
lockFocus();
}
protected void onActivityResult (int requestCode, int resultCode, Intent data) {
if(requestCode == ACTIVITY_START_CAMERA_APP && resultCode == RESULT_OK) {
// Toast.makeText(this, "Picture taken successfully", Toast.LENGTH_SHORT).show();
// Bundle extras = data.getExtras();
// Bitmap photoCapturedBitmap = (Bitmap) extras.get("data");
// mPhotoCapturedImageView.setImageBitmap(photoCapturedBitmap);
// Bitmap photoCapturedBitmap = BitmapFactory.decodeFile(mImageFileLocation);
// mPhotoCapturedImageView.setImageBitmap(photoCapturedBitmap);
// setReducedImageSize();
}
}
private void createImageGallery() {
File storageDirectory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
mGalleryFolder = new File(storageDirectory, GALLERY_LOCATION);
if(!mGalleryFolder.exists()) {
mGalleryFolder.mkdirs();
}
}
File createImageFile() throws IOException {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "IMAGE_" + timeStamp + "_";
File image = File.createTempFile(imageFileName, ".jpg", mGalleryFolder);
mImageFileLocation = image.getAbsolutePath();
return image;
}
private void setupCamera(int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for(String cameraId : cameraManager.getCameraIdList()) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) ==
CameraCharacteristics.LENS_FACING_FRONT){
continue;
}
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largestImageSize = Collections.max(
Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
new Comparator<Size>() {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() -
rhs.getWidth() * rhs.getHeight());
}
}
);
mImageReader = ImageReader.newInstance(largestImageSize.getWidth(),
largestImageSize.getHeight(),
ImageFormat.JPEG,
1);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener,
mBackgroundHandler);
mPreviewSize = getPreferredPreviewSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private Size getPreferredPreviewSize(Size[] mapSizes, int width, int height) {
List<Size> collectorSizes = new ArrayList<>();
for(Size option : mapSizes) {
if(width > height) {
if(option.getWidth() > width &&
option.getHeight() > height) {
collectorSizes.add(option);
}
} else {
if(option.getWidth() > height &&
option.getHeight() > width) {
collectorSizes.add(option);
}
}
}
if(collectorSizes.size() > 0) {
return Collections.min(collectorSizes, new Comparator<Size>() {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());
}
});
}
return mapSizes[0];
}
private void openCamera() {
CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera() {
if(mCameraCaptureSession != null) {
mCameraCaptureSession.close();
mCameraCaptureSession = null;
}
if(mCameraDevice != null) {
mCameraDevice.close();
mCameraDevice = null;
}
if(mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
}
private void createCameraPreviewSession () {
try {
SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Surface previewSurface = new Surface(surfaceTexture);
mPreviewCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewCaptureRequestBuilder.addTarget(previewSurface);
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
if(mCameraDevice == null) {
return;
}
try {
mPreviewCaptureRequest = mPreviewCaptureRequestBuilder.build();
mCameraCaptureSession = session;
mCameraCaptureSession.setRepeatingRequest(
mPreviewCaptureRequest,
mSessionCaptureCallback,
mBackgroundHandler
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
Toast.makeText(
getApplicationContext(),
"create camera session failed!",
Toast.LENGTH_SHORT
).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera2 background thread");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void closeBackgoundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void lockFocus() {
try {
mState = STATE__WAIT_LOCK;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_START);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),
mSessionCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void unLockFocus() {
try {
mState = STATE_PREVIEW;
mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(),
mSessionCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void captureStillImage() {
try {
CaptureRequest.Builder captureStillBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureStillBuilder.addTarget(mImageReader.getSurface());
CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
/*
Toast.makeText(getApplicationContext(),
"Image Captured!", Toast.LENGTH_SHORT).show();
*/
unLockFocus();
Log.d("Rename Activity", "Start of intent calling ");
nextIntent();
Log.d("Rename Activity", "end of intent calling ");
}
};
mCameraCaptureSession.capture(
captureStillBuilder.build(), captureCallback, null
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void nextIntent(){
String[] transferData=new String[2];
Log.d("Rename Activity", "in nextintent ");
transferData[0]=mImageFileLocation;
Log.d("Rename Activity", "in nextintent1 ");
transferData[1] = mGalleryFolder.toString();
Intent renameFileActivity = new Intent(this,RenameActivity.class);
renameFileActivity.putExtra("File_Address",transferData);
startActivity(renameFileActivity);
}
}
|
<gh_stars>1-10
'use strict';
var mongoose = require('mongoose')
, config = require('../config');
mongoose.Promise = global.Promise;
before((done) => {
mongoose.connect(config.dbConnection, done);
});
after((done) => {
mongoose.disconnect(done);
});
|
export enum Capability {
Matrix = 0,
Shader = 1,
Geometry = 2,
Tessellation = 3,
Addresses = 4,
Linkage = 5,
Kernel = 6,
Vector16 = 7,
Float16Buffer = 8,
Float16 = 9,
Float64 = 10,
Int64 = 11,
Int64Atomics = 12,
ImageBasic = 13,
ImageReadWrite = 14,
ImageMipmap = 15,
Pipes = 17,
Groups = 18,
DeviceEnqueue = 19,
LiteralSampler = 20,
AtomicStorage = 21,
Int16 = 22,
TessellationPointSize = 23,
GeometryPointSize = 24,
ImageGatherExtended = 25,
StorageImageMultisample = 27,
UniformBufferArrayDynamicIndexing = 28,
SampledImageArrayDynamicIndexing = 29,
StorageBufferArrayDynamicIndexing = 30,
StorageImageArrayDynamicIndexing = 31,
ClipDistance = 32,
CullDistance = 33,
ImageCubeArray = 34,
SampleRateShading = 35,
ImageRect = 36,
SampledRect = 37,
GenericPointer = 38,
Int8 = 39,
InputAttachment = 40,
SparseResidency = 41,
MinLod = 42,
Sampled1D = 43,
Image1D = 44,
SampledCubeArray = 45,
SampledBuffer = 46,
ImageBuffer = 47,
ImageMSArray = 48,
StorageImageExtendedFormats = 49,
ImageQuery = 50,
DerivativeControl = 51,
InterpolationFunction = 52,
TransformFeedback = 53,
GeometryStreams = 54,
StorageImageReadWithoutFormat = 55,
StorageImageWriteWithoutFormat = 56,
MultiViewport = 57,
SubgroupDispatch = 58,
NamedBarrier = 59,
PipeStorage = 60,
GroupNonUniform = 61,
GroupNonUniformVote = 62,
GroupNonUniformArithmetic = 63,
GroupNonUniformBallot = 64,
GroupNonUniformShuffle = 65,
GroupNonUniformShuffleRelative = 66,
GroupNonUniformClustered = 67,
GroupNonUniformQuad = 68,
ShaderLayer = 69,
ShaderViewportIndex = 70,
FragmentShadingRateKHR = 4422,
SubgroupBallotKHR = 4423,
DrawParameters = 4427,
SubgroupVoteKHR = 4431,
StorageBuffer16BitAccess = 4433,
StorageUniformBufferBlock16 = 4433,
StorageUniform16 = 4434,
UniformAndStorageBuffer16BitAccess = 4434,
StoragePushConstant16 = 4435,
StorageInputOutput16 = 4436,
DeviceGroup = 4437,
MultiView = 4439,
VariablePointersStorageBuffer = 4441,
VariablePointers = 4442,
AtomicStorageOps = 4445,
SampleMaskPostDepthCoverage = 4447,
StorageBuffer8BitAccess = 4448,
UniformAndStorageBuffer8BitAccess = 4449,
StoragePushConstant8 = 4450,
DenormPreserve = 4464,
DenormFlushToZero = 4465,
SignedZeroInfNanPreserve = 4466,
RoundingModeRTE = 4467,
RoundingModeRTZ = 4468,
RayQueryProvisionalKHR = 4471,
RayQueryKHR = 4472,
RayTraversalPrimitiveCullingKHR = 4478,
RayTracingKHR = 4479,
Float16ImageAMD = 5008,
ImageGatherBiasLodAMD = 5009,
FragmentMaskAMD = 5010,
StencilExportEXT = 5013,
ImageReadWriteLodAMD = 5015,
Int64ImageEXT = 5016,
ShaderClockKHR = 5055,
SampleMaskOverrideCoverageNV = 5249,
GeometryShaderPassthroughNV = 5251,
ShaderViewportIndexLayerEXT = 5254,
ShaderViewportIndexLayerNV = 5254,
ShaderViewportMaskNV = 5255,
ShaderStereoViewNV = 5259,
PerViewAttributesNV = 5260,
FragmentFullyCoveredEXT = 5265,
MeshShadingNV = 5266,
ImageFootprintNV = 5282,
FragmentBarycentricNV = 5284,
ComputeDerivativeGroupQuadsNV = 5288,
FragmentDensityEXT = 5291,
ShadingRateNV = 5291,
GroupNonUniformPartitionedNV = 5297,
ShaderNonUniform = 5301,
ShaderNonUniformEXT = 5301,
RuntimeDescriptorArray = 5302,
RuntimeDescriptorArrayEXT = 5302,
InputAttachmentArrayDynamicIndexing = 5303,
InputAttachmentArrayDynamicIndexingEXT = 5303,
UniformTexelBufferArrayDynamicIndexing = 5304,
UniformTexelBufferArrayDynamicIndexingEXT = 5304,
StorageTexelBufferArrayDynamicIndexing = 5305,
StorageTexelBufferArrayDynamicIndexingEXT = 5305,
UniformBufferArrayNonUniformIndexing = 5306,
UniformBufferArrayNonUniformIndexingEXT = 5306,
SampledImageArrayNonUniformIndexing = 5307,
SampledImageArrayNonUniformIndexingEXT = 5307,
StorageBufferArrayNonUniformIndexing = 5308,
StorageBufferArrayNonUniformIndexingEXT = 5308,
StorageImageArrayNonUniformIndexing = 5309,
StorageImageArrayNonUniformIndexingEXT = 5309,
InputAttachmentArrayNonUniformIndexing = 5310,
InputAttachmentArrayNonUniformIndexingEXT = 5310,
UniformTexelBufferArrayNonUniformIndexing = 5311,
UniformTexelBufferArrayNonUniformIndexingEXT = 5311,
StorageTexelBufferArrayNonUniformIndexing = 5312,
StorageTexelBufferArrayNonUniformIndexingEXT = 5312,
RayTracingNV = 5340,
VulkanMemoryModel = 5345,
VulkanMemoryModelKHR = 5345,
VulkanMemoryModelDeviceScope = 5346,
VulkanMemoryModelDeviceScopeKHR = 5346,
PhysicalStorageBufferAddresses = 5347,
PhysicalStorageBufferAddressesEXT = 5347,
ComputeDerivativeGroupLinearNV = 5350,
RayTracingProvisionalKHR = 5353,
CooperativeMatrixNV = 5357,
FragmentShaderSampleInterlockEXT = 5363,
FragmentShaderShadingRateInterlockEXT = 5372,
ShaderSMBuiltinsNV = 5373,
FragmentShaderPixelInterlockEXT = 5378,
DemoteToHelperInvocationEXT = 5379,
SubgroupShuffleINTEL = 5568,
SubgroupBufferBlockIOINTEL = 5569,
SubgroupImageBlockIOINTEL = 5570,
SubgroupImageMediaBlockIOINTEL = 5579,
IntegerFunctions2INTEL = 5584,
FunctionPointersINTEL = 5603,
IndirectReferencesINTEL = 5604,
SubgroupAvcMotionEstimationINTEL = 5696,
SubgroupAvcMotionEstimationIntraINTEL = 5697,
SubgroupAvcMotionEstimationChromaINTEL = 5698,
FPGAMemoryAttributesINTEL = 5824,
UnstructuredLoopControlsINTEL = 5886,
FPGALoopControlsINTEL = 5888,
KernelAttributesINTEL = 5892,
FPGAKernelAttributesINTEL = 5897,
BlockingPipesINTEL = 5945,
FPGARegINTEL = 5948,
AtomicFloat32AddEXT = 6033,
AtomicFloat64AddEXT = 6034,
Max = 0x7fffffff,
} |
#!/bin/sh
#
# Script to handle VirtualBox installation on a Linux host.
#
# Copyright (C) 2013-2015 Oracle Corporation
#
# This file is part of VirtualBox Open Source Edition (OSE), as
# available from http://www.virtualbox.org. This file is free software;
# you can redistribute it and/or modify it under the terms of the GNU
# General Public License (GPL) as published by the Free Software
# Foundation, in version 2 as it comes in the "COPYING" file of the
# VirtualBox OSE distribution. VirtualBox OSE is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
#
# This script is invoked as part of the installation of VirtualBox on a Linux
# host system (see next paragraph for details). When we install using the
# makeself installer it will be executed at installation time, whereas with RPM
# or deb packages it will be executed by the %install section of the template,
# respectively the binary rule from the rules file when the package is created.
# The plan is to gradually move anything here which is identical across the
# three installer types and to put new things in here straight away. We should
# maintain an uninstall.sh script in parallel, but this will only be needed by
# the makeself installer as the other two deal with that automatically. Perhaps
# once we have made some progress at factoring out common bits in the three
# installers we can even try doing it accross other POSIX platforms.
#
# The general idea (mine at least) of how this works/should work is that the
# build system installs everything needed for VirtualBox to run (provided all
# drivers it needs are currently loaded) into the output bin/ folder. The
# Makeself installer duplicates this folder as /opt/VirtualBox (or whatever),
# the other two as <prefix>/lib/virtualbox (again, or whatever). The installer
# then installs scripts into <prefix>/bin to start binaries in the duplicated
# main folder, builds drivers which are put into the kernel module directories
# and creates init/start-up scripts which load drivers and/or start binaries in
# the main folder. As mentioned above, this installation is done at the time
# the package is created for RPM/deb packages and shouldn't create anything
# which is not supposed to be included in a package file list (other things can
# be done in a post-install step).
# Clean up before we start.
cr="
"
tab=" "
IFS=" ${cr}${tab}"
'unset' -f unalias
'unalias' -a 2>/dev/null
'unset' -f command
PATH=/bin:/sbin:/usr/bin:/usr/sbin:$PATH
# Exit on any errors.
set -e
# Get the folder we are installed to, as we need other files there. May be
# relative to the current directory.
# I hope we never install to a folder with a new-line at the end of the name,
# but the code should(!) still work.
INSTALL_SOURCE=`expr "$0" : '\(.*/\)[^/][^/]*/*'`".."
. "${INSTALL_SOURCE}/scripts/generated.sh"
# Default settings values.
## Root of installation target file system.
ROOT=""
## Prefix to install to.
RELATIVE_PREFIX="/usr"
## Package name.
PACKAGE="VirtualBox"
## Init script folder. Scripts will not be installed at this stage if empty.
INIT_FOLDER=""
## Do not install Qt front-end-related files.
NO_QT=""
## Do a headless installation.
HEADLESS=""
## Do not install the web service.
NO_WEB_SERVICE=""
## OSE installation - does this still mean anything?
OSE=""
## Do not create <prefix>/share/<package>.
NO_SHARE_PACKAGE=""
## Delete the helpers and scripts folders.
NO_HELPERS=""
## The command to use to run the Python interpreter.
PYTHON_COMMAND="python"
## The folder where the main VirtualBox binaries and libraries will be, relative
# to <prefix>.
INSTALL_FOLDER="/lib/${PACKAGE}"
while test "${#}" -gt 0; do
case $1 in
--prefix)
test "${#}" -gt 1 ||
{
echo "${1} requires at least one argument." >&2
exit 1
}
RELATIVE_PREFIX="${2}"
shift
;;
--root)
test "${#}" -gt 1 ||
{
echo "${1} requires at least one argument." >&2
exit 1
}
ROOT="${2}"
shift
;;
--package)
test "${#}" -gt 1 ||
{
echo "${1} requires at least one argument." >&2
exit 1
}
PACKAGE="${2}"
shift
;;
--init-folder)
test "${#}" -gt 1 ||
{
echo "${1} requires at least one argument." >&2
exit 1
}
INIT_FOLDER="${2}"
shift
;;
--no-qt)
NO_QT="true"
;;
--headless)
HEADLESS="true"
;;
--no-web-service)
NO_WEB_SERVICE="true"
;;
--ose)
OSE="true"
;;
--no-share-package)
NO_SHARE_PACKAGE="true"
;;
--no-helpers)
NO_HELPERS="true"
;;
--no-vbox-img)
NO_VBOX_IMG="true"
;;
--python-command)
test "${#}" -gt 1 ||
{
echo "${1} requires at least one argument." >&2
exit 1
}
PYTHON_COMMAND="${2}"
shift
;;
--install-folder)
test "${#}" -gt 1 ||
{
echo "${1} requires at least one argument." >&2
exit 1
}
INSTALL_FOLDER="${2}"
shift
;;
*)
echo "Unknown argument ${1}."
exit 1
;;
esac
shift
done
PREFIX="${ROOT}${RELATIVE_PREFIX}"
# Note: install(1) is not POSIX, but it is available on Linux, Darwin and all
# Solaris versions I could check (the oldest was 2.6). It is a BSD command.
install -d -g 0 -o 0 "${PREFIX}/bin"
install -d -g 0 -o 0 "${PREFIX}/share/applications"
# We use this as our base install folder.
test -z "${NO_QT}" &&
mv "${INSTALL_SOURCE}/virtualbox.desktop" "${PREFIX}/share/applications/"
install -d -g 0 -o 0 "${PREFIX}/share/pixmaps"
test -z "${NO_QT}" &&
install -d -g 0 -o 0 "${PREFIX}/share/icons/hicolor"
test -z "${NO_QT}" &&
cp "${INSTALL_SOURCE}/icons/128x128/virtualbox.png" "${PREFIX}/share/pixmaps/"
test -z "${NO_QT}" &&
for i in "${INSTALL_SOURCE}/icons/"*; do
folder=`expr "${i}/" : '.*/\([^/][^/]*/\)/*'`
if test -f "${i}/virtualbox."*; then
install -d -g 0 -o 0 "${PREFIX}/share/icons/hicolor/${folder}/apps"
mv "${i}/virtualbox."* "${PREFIX}/share/icons/hicolor/${folder}/apps"
fi
install -d -g 0 -o 0 "${PREFIX}/share/icons/hicolor/${folder}/mimetypes"
mv "${i}/"* "${PREFIX}/share/icons/hicolor/${folder}/mimetypes" 2>/dev/null || true
rmdir "${i}"
done
test -z "${NO_QT}" &&
rmdir "${INSTALL_SOURCE}/icons"
if test -w "${INSTALL_SOURCE}/virtualbox.xml"; then
install -d -g 0 -o 0 "${PREFIX}/share/mime/packages"
mv "${INSTALL_SOURCE}/virtualbox.xml" "${PREFIX}/share/mime/packages/"
fi
mv "${INSTALL_SOURCE}/VBox.png" "${PREFIX}/share/pixmaps/"
test -n "${NO_QT}" &&
test ! -r ${INSTALL_SOURCE}/VBoxTestOGL
test -n "${NO_QT}" &&
test ! -r ${INSTALL_SOURCE}/nls
install -D -g 0 -o 0 -m 644 "${INSTALL_SOURCE}/VBox.sh" "${PREFIX}/bin/VBox"
rm "${INSTALL_SOURCE}/VBox.sh"
(
cd "${INSTALL_SOURCE}/sdk/installer"
export VBOX_INSTALL_PATH="${RELATIVE_PREFIX}${INSTALL_FOLDER}"
"${PYTHON_COMMAND}" "vboxapisetup.py" install --root "${ROOT}" --prefix "${RELATIVE_PREFIX}"
)
rm -rf ${INSTALL_SOURCE}/sdk/installer
test -n "${HEADLESS}" &&
test ! -r "${INSTALL_SOURCE}/VBoxSDL"
test -n "${NO_QT}" &&
test ! -r "${INSTALL_SOURCE}/VirtualBox"
test -n "${NO_WEB_SERVICE}" &&
test ! -r "${INSTALL_SOURCE}/vboxwebsrv"
test -n "${NO_VBOX_IMG}" &&
test ! -r "${INSTALL_SOURCE}/vbox-img"
test -n "${NO_WEB_SERVICE}" &&
test ! -r "${INSTALL_SOURCE}/webtest"
test -r "${INSTALL_SOURCE}/VBoxDTrace" &&
mv "${INSTALL_SOURCE}/VBoxDTrace" "${PREFIX}/bin"
mv "${INSTALL_SOURCE}/VBoxTunctl" "${PREFIX}/bin"
test -n "${OSE}" || test -n "${NO_QT}" &&
test ! -r ${INSTALL_SOURCE}/kchmviewer
test -z "${OSE}" && test -z "${HEADLESS}" &&
mv "${INSTALL_SOURCE}/rdesktop-vrdp" "${PREFIX}/bin"
if test -z "${NO_SHARE_PACKAGE}"; then
install -d -g 0 -o 0 "${PREFIX}/share/${PACKAGE}"
mv "${INSTALL_SOURCE}/VBoxSysInfo.sh" "${PREFIX}/share/${PACKAGE}"
mv "${INSTALL_SOURCE}/VBoxCreateUSBNode.sh" "${PREFIX}/share/${PACKAGE}"
mv "${INSTALL_SOURCE}/src" "${PREFIX}/share/${PACKAGE}"
test -z "${NO_QT}" &&
mv "${INSTALL_SOURCE}/nls" "${PREFIX}/share/${PACKAGE}"
mv "${INSTALL_SOURCE}/additions/VBoxGuestAdditions.iso" "${PREFIX}/share/${PACKAGE}"
# The original code did not fail if this file did not exist.
test -z "${OSE}" && test -f "${INSTALL_SOURCE}/rdesktop-vrdp.tar.gz" &&
mv "${INSTALL_SOURCE}/rdesktop-vrdp.tar.gz" "${PREFIX}/share/${PACKAGE}"
test -z "${OSE}" && test -z "${HEADLESS}" &&
mv "${INSTALL_SOURCE}/rdesktop-vrdp-keymaps" "${PREFIX}/share/${PACKAGE}"
ln -s "../share/virtualbox/src/vboxhost" "${PREFIX}/src/vboxhost-${VBOX_VERSION_STRING}"
else
mv "${INSTALL_SOURCE}/src/vboxhost" "${PREFIX}/src/vboxhost-${VBOX_VERSION_STRING}"
fi
test -z "${NO_QT}" && ln -s "VBox" "${PREFIX}/bin/VirtualBox"
test -z "${NO_QT}" && ln -sf "VBox" "${PREFIX}/bin/virtualbox"
ln -s "VBox" "${PREFIX}/bin/VBoxManage"
ln -sf "VBox" "${PREFIX}/bin/vboxmanage"
test -z "${HEADLESS}" && ln -s "VBox" "${PREFIX}/bin/VBoxSDL"
test -z "${HEADLESS}" && ln -sf "VBox" "${PREFIX}/bin/vboxsdl"
test -z "${OSE}" && ln -s "VBox" "${PREFIX}/bin/VBoxVRDP"
ln -s "VBox" "${PREFIX}/bin/VBoxHeadless"
ln -sf "VBox" "${PREFIX}/bin/vboxheadless"
ln -s "VBox" "${PREFIX}/bin/VBoxBalloonCtrl"
ln -sf "VBox" "${PREFIX}/bin/vboxballoonctrl"
ln -s "VBox" "${PREFIX}/bin/VBoxAutostart"
ln -s "VBox" "${PREFIX}/bin/vboxautostart"
test -z "${NO_WEB_SERVICE}" && ln -s "VBox" "${PREFIX}/bin/vboxwebsrv"
echo "NO_VBOX_IMG = ${NO_VBOX_IMG}"
test -z "${NO_VBOX_IMG}" && ln -sv "${RELATIVE_PREFIX}${INSTALL_FOLDER}/vbox-img" "${PREFIX}/bin/vbox-img"
rmdir ${INSTALL_SOURCE}/additions
rm "${INSTALL_SOURCE}/scripts/install.sh"
## @todo Move this to a make file.
install -d -g 0 -o 0 "${INSTALL_SOURCE}/ExtensionPacks"
# For now.
test -n "${NO_HELPERS}" &&
rm -r "${INSTALL_SOURCE}/helpers"
# And the very last bit.
test -n "${NO_HELPERS}" &&
rm -r "${INSTALL_SOURCE}/scripts"
exit 0
|
impl<'a> Iterator for ColumnIterator<'a> {
type Item = &'a [Cell];
fn next(&mut self) -> Option<Self::Item> {
if self.x < self.game.board.len() {
let column = self.game.board.iter().map(|row| &row[self.x]).collect::<Vec<_>>();
self.x += 1;
Some(column.as_slice())
} else {
None
}
}
} |
<filename>sa-security/src/main/java/com/sa/security/JsonAuthHandler.java
/*******************************************************************************
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* Contributors:
* <NAME> - Initial contribution and API
******************************************************************************/
package com.sa.security;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
import org.springframework.security.web.authentication.logout.LogoutSuccessHandler;
import org.springframework.stereotype.Component;
/**
* The <code>JsonAuthHandler</code>
*
* @author <NAME>
* @version 1.0
* @since 1.0
*/
@Component
public class JsonAuthHandler implements AuthenticationSuccessHandler, AuthenticationFailureHandler, LogoutSuccessHandler {
@Override
public void onAuthenticationSuccess(HttpServletRequest request, HttpServletResponse response, Authentication auth)
throws IOException, ServletException {
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().print("{\"success\": true}");
response.getWriter().flush();
}
@Override
public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
AuthenticationException exception) throws IOException, ServletException {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
response.getWriter().print("{\"success\": false}");
response.getWriter().flush();
}
@Override
public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication)
throws IOException, ServletException {
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().print("{\"success\":true}");
response.getWriter().flush();
}
}
|
<filename>dispatch-service/src/main/java/xcode/springcloud/dispatchservice/LocationService.java
package xcode.springcloud.dispatchservice;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
public interface LocationService {
@RequestMapping(value = "/drivers/{id}/location", method = RequestMethod.GET, produces = "application/json")
public Location getDriverLocation(
@PathVariable("id") String id);
@RequestMapping(value = "/drivers/{id}/location", method = RequestMethod.POST)
public Location createOrUpdate(
@PathVariable("id") String id,
@RequestBody(required = false) Location inputLocation);
@RequestMapping(value = "/find", method = RequestMethod.GET)
public Location findNearestDriver(
@RequestParam(value = "locationHash", defaultValue = "") String locationHash,
@RequestParam(value = "expirationInSec", defaultValue = "") String expirationInSec);
}
|
import random
length = 7
random_ints = [random.randint(0, 100) for x in range(length)]
print(random_ints) |
cat textonly | tr -d ',.:!"-' | tr '\n' ' ' >tmpfile
awk 'BEGIN{RS=" "} {++w[$0]} END{for(a in w) if(a!="") print a": "w[a]}' tmpfile | sort >result.txt
|
#!/bin/bash -e
# usage: collect_llvm_coverage.sh <directory of .profraw files>
# ctest must have been called with LLVM_PROFILE_FILE=<build directory>/coverage/coverage_%p.profraw
DIR=$1
# make a file for codecov
llvm-profdata merge -o $DIR/coverage/coverage.profdata $DIR/coverage/coverage_*.profraw
for a in $(find $DIR/bin/stat_bench_test_units); do
opts="$opts -object $a"
done
llvm-cov show -ignore-filename-regex='(test|.conan)/*' -instr-profile=$DIR/coverage/coverage.profdata $opts \
>$DIR/coverage/coverage.txt
llvm-cov show -ignore-filename-regex='(test|.conan)/*' -instr-profile=$DIR/coverage/coverage.profdata $opts \
-format=html -output-dir=$DIR/coverage/html
# output summary to stdout
llvm-cov report -ignore-filename-regex='(test|.conan)/*' -instr-profile=$DIR/coverage/coverage.profdata $opts |
tee $DIR/coverage/coverage_summary.txt
|
import numpy as np
def average_luminance(frame: np.ndarray) -> float:
B = frame[:, :, 0]
G = frame[:, :, 1]
R = frame[:, :, 2]
Y = 0.299 * R + 0.587 * G + 0.114 * B
Y_value = np.mean(Y)
return Y_value
# Sample image frame
frame = np.array([[[255, 0, 0], [0, 255, 0]],
[[0, 0, 255], [255, 255, 255]]], dtype=np.uint8)
# Call the function
avg_luminance = average_luminance(frame)
# Output
print(avg_luminance) # Output: 195.325 |
using UnityEngine;
public class EndlessScoringSystem : MonoBehaviour
{
public static int Score { get; private set; }
public void AddScore(int amount)
{
Score += amount;
EndlessEnemySystem.BossDying = true;
_Anim.enabled = false; // Assuming _Anim is a reference to the boss's animation component
_AddingScore = true; // Assuming _AddingScore is a flag used for scoring effects
audio.PlayOneShot(GuardianDeath); // Assuming audio is a reference to the audio source
StartCoroutine(PlayPreDeathEffect());
}
private IEnumerator PlayPreDeathEffect()
{
yield return new WaitForSeconds(6f);
Vector3 effectsPos = new Vector3(Guardian.transform.position.x, (Guardian.transform.position.y + 15), Guardian.transform.position.z);
GameObject preDeathEffect = Instantiate(PreDeathEffect, effectsPos, Quaternion.identity);
// Assuming PreDeathEffect is a prefab for the pre-death effect
// Assuming Guardian is a reference to the boss game object
}
} |
#!/bin/bash
set -xe
sudo snap install kustomize && sudo snap install go --classic
make docker-build-controller
make docker-build-vino-builder
make deploy
kubectl get po -A
#Wait for vino controller manager Pod.
count=0
until [[ $(kubectl -n vino-system get deployment -l control-plane=controller-manager 2>/dev/null) ]]; do
count=$((count + 1))
if [[ ${count} -eq "120" ]]; then
echo ' Timed out waiting for vino controller manager deployment to exist'
return 1
fi
sleep 2
done
kubectl -n vino-system rollout status deployment vino-controller-manager --timeout=240s
|
'use strict';
module.exports = require('angular')
.module('bd.names', [])
.directive('fullName', require('./full-name'))
.name;
|
<reponame>Yyassin/SystemsProgramming
/**
* Message Queue Wrapper Header
* @Author: <NAME>
* @Date: November 23, 2021
*/
#include "MessageQueueWrapper.h"
int message_queue_create(key_t key)
{
return msgget(key, IPC_CREAT | 0666);
}
int message_queue_send(int qid, Message* msg)
{
return msgsnd(qid, (void *)msg, MAX_TEXT, 0);
}
int message_queue_receive(int qid, Message* msg, long type)
{
return msgrcv(qid, (void *)msg, MAX_TEXT, type, 0);
}
int message_queue_delete(int qid)
{
return msgctl(qid, IPC_RMID, 0);
}
|
def is_anagram(str1, str2):
# sort characters of both strings
a = sorted(str1)
b = sorted(str2)
# if sorted strings are equal then
# strings are anagram
if (a == b):
return True
else:
return False |
// ThreePoints.js
export default class ThreePoints {
constructor(x, y, z) {
this.x = x;
this.y = y;
this.z = z;
}
getCoordinates() {
return [this.x, this.y, this.z];
}
} |
#!/bin/sh
SCRIPT="$0"
SCALA_RUNNER_VERSION=$(scala ./bin/Version.scala)
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
cd $APP_DIR
# if you've executed sbt assembly previously it will use that instead.
ags="com.wordnik.swagger.codegen.ScalaAsyncClientGenerator $@"
if [ -f $APP_DIR/target/scala-$SCALA_RUNNER_VERSION/swagger-codegen.jar ]; then
scala -cp target/scala-$SCALA_RUNNER_VERSION/swagger-codegen.jar $ags
else
echo "Please set scalaVersion := \"$SCALA_RUNNER_VERSION\" in build.sbt and run ./sbt assembly"
fi
|
#!/bin/bash
set -euo pipefail
function on_error {
echo "Something failed..."
$SHELL
}
trap on_error ERR
./build.sh Debug
./build.sh Release
./build.sh RelWithDebInfo |
<reponame>m-llo/UTA-AUS-FSF-PT-12-2020-U-C
const router = require('express').Router();
const { Traveller, Location, Trips } = require('../../models');
// GET all drivers
router.get('/', async (req, res) => {
try {
const travellerData = await Traveller.findAll({
include: [{ model: Location }, { model: Trips }],
});
res.status(200).json(travellerData);
} catch (err) {
res.status(500).json(err);
}
});
// GET a single driver
router.get('/:id', async (req, res) => {
try {
const travellerData = await Driver.findByPk(req.params.id, {
include: [{ model: License }, { model: Car }],
});
if (!driverData) {
res.status(404).json({ message: 'No driver found with that id!' });
return;
}
res.status(200).json(driverData);
} catch (err) {
res.status(500).json(err);
}
});
module.exports = router; |
#!/bin/sh
SRC_DIR=${1:-""}
ID_FILE=${2:-"surveillance_planes.txt"}
DST_DIR=${3:-"results"}
cat <<EOF
Plane puller: a script for extracting specific flights from data dumps
EOF
read -e -p "Input data directory: " -i "$SRC_DIR" SRC_DIR
read -e -p "Input plane ID file: " -i "$ID_FILE" ID_FILE
read -e -p "Output directory: " -i "$DST_DIR" DST_DIR
if [ ! -e "$SRC_DIR" ]; then
echo "Source dir $SRC_DIR doesn't exist?"
exit
fi
if [ ! -e "$ID_FILE" ]; then
echo "ID file '$ID_FILE' doesn't exist? exiting"
exit
fi
if [ ! -e "$DST_DIR" ]; then
echo "$DST_DIR doesn't exist, creating"
mkdir $DST_DIR
fi
#Reset all output files
while read line; do
ID=$line
if [ -e "$DST_DIR/$ID.txt" ]; then
rm "$DST_DIR/$ID.txt"
fi
done < $ID_FILE
#Run through all input files
files="${SRC_DIR}/*.txt"
for i in $files; do
echo "Working on file $i"
while read line; do
ID=$line
echo " Doing plane $ID"
grep $ID $i | sed -e 's/.*LINESTRING/LINESTRING/' >>$DST_DIR/$ID.txt
done < $ID_FILE
done
exit
while read line; do
ID=$line
echo "Working on $ID"
if [ -e "$DST_DIR/$ID.txt" ]; then
rm "$DST_DIR/$ID.txt"
fi
files="${SRC_DIR}/*.txt"
#for i in `grep -i $ID $SRC_DIR/*.txt | sed -e 's/:.*$/ /'`; do
for i in $files; do
echo looking at $i
grep $ID $i | sed -e 's/.*LINESTRING/LINESTRING/'
#grep $ID $SRC_DIR/$i | sed -e 's/.*LINESTRING/LINESTRING/' >>$DST_DIR/$ID.txt
done
done < $ID_FILE
#ID=ADC362
#for i in `grep -i $ID *.txt | sed -e 's/:.*$/ /'`; do
# grep $ID ../$i | sed -e 's/.*LINESTRING/LINESTRING/'
#done
|
#!/bin/sh
git submodule sync
git submodule foreach git pull origin master
|
import numpy as np
class HiddenGate:
def __init__(self, hidden_size, input_size, nonlinearity):
self.hidden_size = hidden_size
self.input_size = input_size
self.nonlinearity = nonlinearity
def forward(self, input_data):
# Perform operations using the specified nonlinearity function
if self.nonlinearity == "tanh":
return np.tanh(input_data)
elif self.nonlinearity == "sigmoid":
return 1 / (1 + np.exp(-input_data))
else:
raise ValueError("Unsupported nonlinearity function")
class SentenceStateGate:
def __init__(self, hidden_size, input_size, bias):
self.hidden_size = hidden_size
self.input_size = input_size
self.bias = bias
def forward(self, hidden_state, input_data):
# Perform operations for forget gate (g_f)
g_f = np.dot(hidden_state, np.random.randn(self.hidden_size, self.input_size))
if self.bias:
g_f += np.random.randn(self.hidden_size)
# Perform operations for input gate (g_i)
g_i = np.dot(hidden_state, np.random.randn(self.hidden_size, self.input_size))
if self.bias:
g_i += np.random.randn(self.hidden_size)
# Perform operations for output gate (g_o)
g_o = np.dot(hidden_state, np.random.randn(self.hidden_size, self.input_size))
if self.bias:
g_o += np.random.randn(self.hidden_size)
return g_f, g_i, g_o
# Example usage
hidden_size = 10
input_size = 5
bias = True
hidden_gate = HiddenGate(hidden_size, input_size, nonlinearity="tanh")
sentence_gate = SentenceStateGate(hidden_size, input_size, bias)
hidden_state = np.random.randn(hidden_size)
input_data = np.random.randn(input_size)
g_f, g_i, g_o = sentence_gate.forward(hidden_state, input_data)
gate_output = hidden_gate.forward(g_f + g_i + g_o)
print(gate_output) |
<gh_stars>0
import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { ActionResult } from '../models/action-result-model';
import { ConfigService } from './config.service';
const CONTROLLER = 'WF_DEFOR'
@Injectable({
providedIn: 'root'
})
export class FormDetailService {
constructor(private http:HttpClient,private configService:ConfigService) { }
GetDetailFormList(for_cont:number){
return this.http.get<ActionResult<any[]>>(`${this.configService.config.apiRwfEditrUrl}/${CONTROLLER}?for_cont=${for_cont}`);
}
}
|
class DatabaseUpdater:
def __init__(self, connection_string, database, logger):
if 'mssql' not in connection_string:
raise Exception('Wrong connection string, it should contain mssql word')
self.connection_string = connection_string
self.database = database
self.logger = logger
self.server = None # Initialize the server connection attribute
def _create_patch_table_if_not_exists(self, database):
# Implement the logic to create the patch table if it does not exist in the specified database
# Return True if the table already exists, False if it was created
# Implementation details depend on the specific database system being used (e.g., MSSQL)
def __del__(self):
if self.server:
self.server.ConnectionContext.Disconnect()
def update(self):
# Implement the logic to perform database updates
# This method should handle the necessary database update operations
# Use the established connection to execute update queries or procedures |
<filename>tests/dummy/app/router.js
import Ember from 'ember';
import config from './config/environment';
var Router = Ember.Router.extend({
location: config.locationType
});
export default Router.map(function() {
this.route('inspect', function () {
this.route('one', function () {
this.route('one');
this.route('two');
this.route('three');
});
this.route('two', function () {
this.route('one');
this.route('two');
this.route('three');
});
this.route('three', function () {
this.route('one');
this.route('two');
this.route('three');
});
});
});
|
<reponame>LarsBehrenberg/e-wallet<gh_stars>0
import React from 'react';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Grid, Container, Card } from '@material-ui/core';
export default function LivePreviewExample() {
return (
<>
<Card className="mb-spacing-6-x2">
<Container className="py-5">
<Grid container spacing={0}>
<Grid item lg={6}>
<div className="feature-box py-3">
<div className="font-size-xxl text-primary rounded-circle">
<FontAwesomeIcon icon={['fas', 'bomb']} />
</div>
<h3 className="font-size-lg font-weight-bold mt-2">Widgets</h3>
<p className="text-black-50 mt-3">
But I must explain to you how all this mistaken idea of
denouncing pleasure and praising pain was born.
</p>
</div>
</Grid>
<Grid item lg={6}>
<div className="feature-box py-3">
<div className="font-size-xxl text-danger rounded-circle">
<FontAwesomeIcon icon={['fas', 'network-wired']} />
</div>
<h3 className="font-size-lg font-weight-bold mt-2">
Components
</h3>
<p className="text-black-50 mt-3">
The master-builder of human happiness. No one rejects,
dislikes, or avoids pleasure itself, because it is pleasure.
</p>
</div>
</Grid>
<Grid item lg={6}>
<div className="feature-box py-3">
<div className="font-size-xxl text-success rounded-circle">
<FontAwesomeIcon icon={['fas', 'birthday-cake']} />
</div>
<h3 className="font-size-lg font-weight-bold mt-2">Blocks</h3>
<p className="text-black-50 mt-3">
Who are so beguiled and demoralized by the charms of pleasure
of the moment, so blinded by desire, that they cannot foresee.
</p>
</div>
</Grid>
<Grid item lg={6}>
<div className="feature-box py-3">
<div className="font-size-xxl text-warning rounded-circle">
<FontAwesomeIcon icon={['fas', 'bus-alt']} />
</div>
<h3 className="font-size-lg font-weight-bold mt-2">Pages</h3>
<p className="text-black-50 mt-3">
Which toil and pain can procure him some great pleasure. To
take a trivial example, which of us avoids pleasure.
</p>
</div>
</Grid>
</Grid>
</Container>
</Card>
</>
);
}
|
<reponame>hmedal/speu2<filename>src/objects/experiments.py
'''
Created on Jul 11, 2016
@author: hmedal
'''
import os
import xml.etree.cElementTree as ET
import unittest
import json
from src.objects import computationalresource, outputtable
def convertHoursToTimeString(hours):
seconds = hours * 3600
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return "%d:%02d:%02d" % (h, m, s)
def get_params_string_name(parametersDictionary, paramsThatChanged = None):
paramsString = ''
shortNamesDict = json.loads(open('../expr_scripts_for_paper/params_shortNames.json').read())
if paramsThatChanged is None or len(paramsThatChanged) == 0:
paramsString = 'base'
else:
for paramName in paramsThatChanged:
paramsString += '_' + shortNamesDict[paramName] + '-' + str(parametersDictionary[paramName])
return paramsString
def get_filename_from_params_dict(parametersDictionary, paramsThatChanged=None):
infModelName = parametersDictionary['signal']['infModelName']
return infModelName + "_" + get_params_string_name(parametersDictionary, paramsThatChanged = None)
def createOutputFileNameFromParamsDict(parametersDictionary, exprBatchName, paramsThatChanged = None):
return '../output/' + get_filename_from_params_dict(parametersDictionary, paramsThatChanged) + '.out'
def createExprFileFromParamsDict(paramsDict, exprBatchName, paramsThatChanged = None):
print "paramsDict before write", paramsDict
exprFileName = '../exprFiles/' + get_filename_from_params_dict(paramsDict, paramsThatChanged) + '.json'
with open(exprFileName, 'w') as exprFileObject:
json.dump(paramsDict, exprFileObject, indent=4, sort_keys=True)
return exprFileName
class OptimizationExperiment(object):
'''
For the purpose of printing out output
'''
def __init__(self, scriptCall, computationalResource, outputTable, exprName, parametersDictionary = None,
paramsThatChanged = None, outputFileName = None, exprFile = None):
'''
Constructor
'''
self.scriptCall = scriptCall
self.compResource = computationalResource
self.outputTable = outputTable
self.exprName = exprName
if outputFileName is None:
if parametersDictionary is None:
raise Exception("parametersDictionary may not be None if outputFileName is None")
self.outputFileName = createOutputFileNameFromParamsDict(parametersDictionary, exprName, paramsThatChanged)
else:
self.outputFileName = outputFileName
if exprFile is None:
if parametersDictionary is None:
raise Exception("parametersDictionary may not be None if exprFile is None")
self.exprFile = createExprFileFromParamsDict(parametersDictionary, exprName, paramsThatChanged)
else:
self.exprFile = exprFile
self.schedulerCommandFileOutputFilePath = '../runFiles/' + \
get_filename_from_params_dict(parametersDictionary,
paramsThatChanged) + '.pbs'
self.saveSchedulerCommandFile(self.schedulerCommandFileOutputFilePath)
def saveSchedulerCommandFile(self, schedulerCommandFileOutputFilePath, isLastJob = False, fileType = 'pbs'):
if fileType is 'pbs':
print "printing to ", schedulerCommandFileOutputFilePath, " ", self.compResource.orgFund
f = open(schedulerCommandFileOutputFilePath, 'w')
myStr = ""
if self.compResource.orgFund != 'unsponsored':
myStr += "#PBS -A " + self.compResource.orgFund + "\n"
myStr += "#PBS -N " + self.exprName + "\n"
myStr += "#PBS -q " + self.compResource.queue.name + "\n"
myStr += "\n"
myStr += "#PBS -j oe\n"
myStr += "\n"
myStr += "#PBS -M <EMAIL>\n" # send me email when job aborts (with an error)
if isLastJob:
myStr += "#PBS -m ae\n"
else:
myStr += "#PBS -m a\n"
myStr += "#PBS -o " + self.exprName +".$PBS_JOBID\n"
myStr += "#PBS -l nodes=1:ppn=" + str(self.compResource.numThreadsToUse) + "\n"
myStr += "#PBS -l walltime=" + str(convertHoursToTimeString(self.compResource.queue.maxtime)) + "\n"
myStr += "\n"
myStr += "cd $PBS_O_WORKDIR\n"
myStr += 'export PYTHONPATH="$PYTHONPATH:/work/hmedal/code/wnopt_cavs3"' + "\n"
myStr += self.scriptCall + " -e " + self.exprFile + " > " + self.outputFileName
f.write(myStr)
else:
raise Exception('invalid type')
class OptimizationExperimentBatch(object):
''''''
def __init__(self, computationalResource, filepathForBatch):
self.computationalResource = computationalResource
self.experimentsList = []
self.filepathForBatch = filepathForBatch
def addOptimizationExperiment(self, experiment):
self.experimentsList.append(experiment)
def writeBatchScript(self):
if self.computationalResource.type is 'local':
self.writeBatchScript_Local()
elif self.computationalResource.type is 'remote':
self.writeBatchScript_Remote()
else:
raise Exception('type is unknown')
def writeBatchScript_Local(self):
f = open(self.filepathForBatch, 'w')
myStr = "#!/bin/bash\n"
for experiment in self.experimentsList:
myStr += experiment.schedulerCommandFileOutputFilePath + "\n"
f.write(myStr)
os.system("chmod a+x " + self.filepathForBatch)
def writeBatchScript_Remote(self):
f = open(self.filepathForBatch, 'w')
myStr = "#!/bin/sh\n"
myStr += ". ~/.bashrc"
myStr += "\n"
print "printing " + str(len(self.experimentsList)) + " experiments in batch script"
for experiment in self.experimentsList:
print "experiment", experiment, experiment.compResource
myStr += experiment.compResource.schedulerCommand + " " + \
experiment.schedulerCommandFileOutputFilePath + "\n"
print "writing to file"
f.write(myStr)
def runBatchScript(self):
print "running batch script..."
os.system('ssh <EMAIL> "cd /work/hmedal/code/wnopt_cavs/exprBatchScripts; '
+ self.filepathForBatch)
print "...batch script ran"
def writeAndRun(self):
print "printing batch script..."
self.writeBatchScript()
print "...batch script printed"
self.runBatchScript() |
let leader_info = {
"name": "Diana Prince",
"job": "Leader"
}; |
<reponame>Isaquehg/algorithms_and_data_structures<gh_stars>0
#include <iostream>
using namespace std;
int main(){
int *vet;//vet p armazenamento
int *p;//input pointer
int *q;//pointer p positivos e pares
int i;//aux;
int tam;//tamanho
int pospar = 0;//numeros positivos e pares
//input
cin >> tam;
vet = new int[tam];
p = vet;
for(i = 0; i < tam; i ++){
cin >> *p;
p ++;
}
//encontrando positivos e pares
q = vet;
for(i = 0; i < tam; i ++){
if((*q > 0) && (*q % 2 == 0))
pospar ++;
q ++;
}
//output
cout << pospar << endl;
delete [] vet;
return 0;
} |
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/importer/ImportType.java
package io.opensphere.core.importer;
/**
* The Enum ImportType.
*/
public enum ImportType
{
/** File. */
FILE,
/** File group. */
FILE_GROUP,
/** URL. */
URL
}
|
// Author : XuBenHao
// Version : 1.0.0
// Mail : <EMAIL>
// Copyright : XuBenHao 2020 - 2030
#ifndef DATA_STRUCT_DYNQUEUE_H
#define DATA_STRUCT_DYNQUEUE_H
#include "header.h"
#include "doublelist.h"
namespace NDataStruct
{
template <typename T>
class DynQueue
{
public:
DynQueue();
virtual ~DynQueue();
DynQueue(const DynQueue& dqA_);
DynQueue& operator=(const DynQueue& dqA_);
void In(const T& nValue_);
T Out();
T Peek() const;
bool IsEmpty() const;
int Size() const;
DoubleList<T> GetList() const
{
return m_List;
}
private:
DoubleList<T> m_List;
};
template <typename T>
DynQueue<T>::DynQueue()
{
}
template <typename T>
DynQueue<T>::~DynQueue()
{
}
template <typename T>
DynQueue<T>::DynQueue(const DynQueue& dqA_)
{
m_List = dqA_.m_List;
}
template <typename T>
DynQueue<T>& DynQueue<T>::operator=(const DynQueue& dqA_)
{
if (this == &dqA_)
{
return *this;
}
m_List = dqA_.m_List;
return *this;
}
template <typename T>
void DynQueue<T>::In(const T& nValue_)
{
m_List.InsertLast(nValue_);
}
template <typename T>
T DynQueue<T>::Out()
{
if (IsEmpty())
{
throw "queue is empty";
}
typename DoubleList<T>::Node* _pFirst = m_List.GetFirst();
T _nValue = _pFirst->GetValue();
m_List.DeleteFirst();
return _nValue;
}
template <typename T>
T DynQueue<T>::Peek() const
{
if (IsEmpty())
{
throw "queue is empty";
}
typename DoubleList<T>::Node* _pFirst = m_List.GetFirst();
return _pFirst->GetValue();
}
template <typename T>
bool DynQueue<T>::IsEmpty() const
{
return m_List.IsEmpty();
}
template <typename T>
int DynQueue<T>::Size() const
{
return m_List.GetSize();
}
}
#endif
|
var this_js_script = $('script[src*=apphorariodetalle]');
var my_var_1 = this_js_script.attr('data-my_var_1');
if (typeof my_var_1 === "undefined") {
var my_var_1 = 'some_default_value';
}
var my_var_2 = this_js_script.attr('data-my_var_2');
if (typeof my_var_2 === "undefined") {
var my_var_2 = 'some_default_value';
}
Vue.config.devtools = true;
Vue.component('timepicker', window.VueTimepicker.default);
var v = new Vue({
el: '#appd',
data: {
url: my_var_1,
idhorario: my_var_2,
addModal: false,
addModalRecreo: false,
addModalHoraSinClase: false,
editModalHoraSinClase: false,
editModal: false,
editModalRecreo: false,
editModalSinClases: false,
cargando: false,
error: false,
//deleteModal:false,
horarios: [],
dias: [],
lunes: [],
martes: [],
miercoles: [],
jueves: [],
viernes: [],
materias: [],
search: {text: ''},
emptyResult: false,
newHorario: {
idhorario: my_var_2,
iddia: '',
titulo: '',
idmateria: '',
idprofesormateria: '',
horainicial: '',
horafinal: '',
urlvideoconferencia: '',
numeroanfitrion:'',
smserror: ''},
chooseHorario: {},
formValidate: [],
successMSG: ''
},
created() {
this.showAll();
this.showAllLunes();
this.showAllMartes();
this.showAllMiercoles();
this.showAllJueves();
this.showAllViernes();
this.showAllDias();
this.showAllMaterias();
},
methods: {
modelAgregarMateria() {
$('#addMateria').modal('show');
},
modelEditMateria() {
$('#editMateria').modal('show');
},
modelAgregarHoraSinClase() {
$('#addModalHoraSinClase').modal('show');
},
modelEditHoraSinClase() {
$('#editModalSinClases').modal('show');
},
modelAgregarRecreo() {
$('#addModalRecreo').modal('show');
},
modelEditRecreo() {
$('#editModalRecreo').modal('show');
},
showAll() {
axios.get(this.url + "Horario/showAll/").then(function (response) {
if (response.data.horarios == null) {
v.noResult()
} else {
response.data.horarios;
}
})
},
showAllLunes() {
axios.get(this.url + "Horario/showAllDiaHorario/" + this.idhorario + "/1")
.then(response => (this.lunes = response.data.horarios));
},
showAllDias() {
axios.get(this.url + "Horario/showAllDias/")
.then(response => (this.dias = response.data.dias));
},
showAllMartes() {
axios.get(this.url + "Horario/showAllDiaHorario/" + this.idhorario + "/2")
.then(response => (this.martes = response.data.horarios));
},
showAllMiercoles() {
axios.get(this.url + "Horario/showAllDiaHorario/" + this.idhorario + "/3")
.then(response => (this.miercoles = response.data.horarios));
},
showAllJueves() {
axios.get(this.url + "Horario/showAllDiaHorario/" + this.idhorario + "/4")
.then(response => (this.jueves = response.data.horarios));
},
showAllViernes() {
axios.get(this.url + "Horario/showAllDiaHorario/" + this.idhorario + "/5")
.then(response => (this.viernes = response.data.horarios));
},
showAllMaterias() {
axios.get(this.url + "Horario/showAllMaterias/")
.then(response => (this.materias = response.data.materias));
},
addHorario() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.newHorario);
axios.post(this.url + "Horario/addMateriaHorario", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.error = true;
v.cargando = false;
} else {
swal({
position: 'center',
type: 'success',
title: 'Exito!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
}
})
},
addReceso() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.newHorario);
axios.post(this.url + "Horario/addReceso", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.cargando = false;
v.error = true;
} else {
swal({
position: 'center',
type: 'success',
title: 'Exito!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
}
})
},
addHoraSinClases() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.newHorario);
axios.post(this.url + "Horario/addHoraSinClases", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.cargando = false;
v.error = true;
} else {
swal({
position: 'center',
type: 'success',
title: 'Exito!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
}
})
},
updateHorario() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.chooseHorario);
axios.post(this.url + "Horario/updateMateriaHorario", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.cargando = false;
v.error = true;
} else {
//v.successMSG = response.data.success;
swal({
position: 'center',
type: 'success',
title: 'Modificado!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
}
})
},
updateReceso() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.chooseHorario);
axios.post(this.url + "Horario/updateReceso", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.cargando = false;
v.error = true;
} else {
//v.successMSG = response.data.success;
swal({
position: 'center',
type: 'success',
title: 'Modificado!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
}
})
},
updateHoraSinClases() {
v.cargando = true;
v.error = false;
var formData = v.formData(v.chooseHorario);
axios.post(this.url + "Horario/updateHoraSinClases", formData).then(function (response) {
if (response.data.error) {
v.formValidate = response.data.msg;
v.cargando = false;
v.error = true;
} else {
//v.successMSG = response.data.success;
swal({
position: 'center',
type: 'success',
title: 'Modificado!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
}
})
},
cargar() {
//this.showAll();
this.showAllLunes();
this.showAllMartes();
this.showAllMiercoles();
this.showAllJueves();
this.showAllViernes();
//this.showAllDias();
//this.showAllMaterias();
},
deleteHorario(id) {
Swal.fire({
title: '¿Eliminar Elemente?',
text: "Realmente desea eliminar el Elemente.",
type: 'question',
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmButtonText: 'Eliminar',
cancelButtonText: 'Cancelar'
}).then((result) => {
if (result.value) {
axios.get(this.url + "Horario/deleteHorarioMateria", {
params: {
id: id
}
}).then(function (response) {
if (response.data.error == false) {
//v.noResult()
swal({
position: 'center',
type: 'success',
title: 'Eliminado!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
} else {
swal("Información", response.data.msg.msgerror, "info")
}
}).catch((error) => {
swal("Información", "No se puede eliminar el Elemente", "info")
})
}
})
},
deleteSinClases(id) {
Swal.fire({
title: '¿Eliminar Elemente?',
text: "Realmente desea eliminar el Elemente.",
type: 'question',
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmButtonText: 'Eliminar',
cancelButtonText: 'Cancelar'
}).then((result) => {
if (result.value) {
axios.get(this.url + "Horario/deleteSinClases", {
params: {
id: id
}
}).then(function (response) {
if (response.data.error == false) {
//v.noResult()
swal({
position: 'center',
type: 'success',
title: 'Eliminado!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
} else {
swal("Información", response.data.msg.msgerror, "info")
}
}).catch((error) => {
swal("Información", "No se puede eliminar el Elemente", "info")
})
}
})
},
deleteReceso(id) {
Swal.fire({
title: '¿Eliminar Elemente?',
text: "Realmente desea eliminar el Elemente.",
type: 'question',
showCancelButton: true,
confirmButtonColor: '#3085d6',
cancelButtonColor: '#d33',
confirmButtonText: 'Eliminar',
cancelButtonText: 'Cancelar'
}).then((result) => {
if (result.value) {
axios.get(this.url + "Horario/deleteReceso", {
params: {
id: id
}
}).then(function (response) {
if (response.data.error == false) {
//v.noResult()
swal({
position: 'center',
type: 'success',
title: 'Eliminado!',
showConfirmButton: false,
timer: 1500
});
v.clearAll();
v.clearMSG();
v.cargar();
} else {
swal("Información", response.data.msg.msgerror, "info")
}
}).catch((error) => {
swal("Información", "No se puede eliminar el Elemente", "info")
})
}
})
},
formData(obj) {
var formData = new FormData();
for (var key in obj) {
formData.append(key, obj[key]);
}
return formData;
},
selectHorario(horario) {
v.chooseHorario = horario;
console.log(v.chooseHorario);
},
clearMSG() {
setTimeout(function () {
v.successMSG = ''
}, 1500); // disappearing message success in 2 sec
},
clearAll() {
$('#addMateria').modal('hide');
$('#addModalHoraSinClase').modal('hide');
$('#addModalRecreo').modal('hide');
$('#editMateria').modal('hide');
$('#editModalRecreo').modal('hide');
$('#editModalSinClases').modal('hide');
v.newHorario = {
idhorario: my_var_2,
iddia: '',
titulo: '',
idmateria: '',
horainicial: '',
horafinal: '',
urlvideoconferencia: '',
numeroanfitrion:'',
smserror: ''};
v.formValidate = false;
v.addModal = false;
v.editModal = false;
v.passwordModal = false;
v.deleteModal = false;
v.addModalRecreo = false;
v.editModalRecreo = false;
v.addModalHoraSinClase = false;
v.editModalHoraSinClase = false;
v.editModalSinClases = false;
v.cargando = false;
v.error = false;
// v.refresh()
}
}
})
|
function simulateInvocation(func) {
// Define the JSCall object with JSAttribute and JSThis methods
const invoked = JSCall(
JSAttribute(func, 'call'),
[JSThis()]
);
// Return the result of the invoked function
return invoked;
} |
# Import necessary modules
from django.db import models
from django.conf import settings
import hoover.contrib.twofactor.models # Assuming the random_code function is defined in this module
# Define the Invitation model
class Invitation(models.Model):
id = models.AutoField(primary_key=True)
code = models.CharField(max_length=200, default=hoover.contrib.twofactor.models.random_code)
generated = models.DateTimeField(auto_now_add=True)
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return f"Invitation {self.id} - Code: {self.code}, Generated: {self.generated}, User: {self.user}" |
<filename>lib/cretonne/meta/isa/intel/defs.py
"""
Intel definitions.
Commonly used definitions.
"""
from __future__ import absolute_import
from cdsl.isa import TargetISA, CPUMode
import base.instructions
from . import instructions as x86
from base.immediates import floatcc
ISA = TargetISA('intel', [base.instructions.GROUP, x86.GROUP])
# CPU modes for 32-bit and 64-bit operation.
I64 = CPUMode('I64', ISA)
I32 = CPUMode('I32', ISA)
# The set of floating point condition codes that are directly supported.
# Other condition codes need to be reversed or expressed as two tests.
supported_floatccs = [
floatcc.ord,
floatcc.uno,
floatcc.one,
floatcc.ueq,
floatcc.gt,
floatcc.ge,
floatcc.ult,
floatcc.ule]
|
#!/bin/bash
cd `dirname $0`/..
if [ -z "${SONATYPE_USERNAME}" ]
then
echo "ERROR! Please set SONATYPE_USERNAME and SONATYPE_PASSWORD environment variable"
exit 1
fi
if [ -z "${SONATYPE_PASSWORD}" ]
then
echo "ERROR! Please set SONATYPE_PASSWORD environment variable"
exit 1
fi
if [ ! -z "${GPG_SECRET_KEYS}" ]
then
echo ${GPG_SECRET_KEYS} | base64 --decode | ${GPG_EXECUTABLE} --import
fi
if [ ! -z "${GPG_OWNERTRUST}" ]
then
echo ${GPG_OWNERTRUST} | base64 --decode | ${GPG_EXECUTABLE} --import-ownertrust
fi
if [ ! -z "${TRAVIS_TAG}" ]
then
echo "travis tag is set -> updating pom.xml <version> attribute to ${TRAVIS_TAG}"
mvn --settings .travis/settings.xml org.codehaus.mojo:versions-maven-plugin:2.1:set -DnewVersion=${TRAVIS_TAG} 1>/dev/null 2>/dev/null
else
echo "no travis tag is set, hence keeping the snapshot version in pom.xml"
fi
mvn clean deploy --settings .travis/settings.xml -B -U -P release
SUCCESS=$?
if [ ${SUCCESS} -eq 0 ]
then
echo "successfully deployed the jars to nexus"
fi
exit ${SUCCESS}
|
#!/bin/bash
##For More Information:http://blog.shvetsov.com/2013/02/access-android-app-data-without-root.html
##Script is Written By udit7395
##HOW TO USE: ./getDataWithoutRoot.sh <packagename>
#NOTE: This method doesn't work if application developer has explicitly disabled ability
#to backup his app by setting android:allowBackup="false" in the application manifest.
if [ -z "$1" ]
then
echo "No packagename supplied"
echo "Input as ----> ./getDataWithoutRoot.sh <packagename>"
else
path=$(pwd)/$1_$(date +%d_%m_%Y_%H_%M_%S)
mkdir "$path"
mkdir "$path/databases"
mkdir "$path/sharedprefrences"
mkdir backup
cd backup || exit
echo "Do not enter any Password"
adb backup -f data.ab -noapk "$1"
if [ -f data.ab ]; then
echo "File found!"
fi
dd if=data.ab bs=1 skip=24 | python -c "import zlib,sys;sys.stdout.write(zlib.decompress(sys.stdin.read()))" | tar -xvf -
cd ..
echo "$path"
rsync --progress backup/apps/"$1"/db/*.db "$path"/databases
rsync --progress backup/apps/"$1"/sp/*.xml "$path"/sharedprefrences
rm -rf backup
fi |
<reponame>osidorkin/Brunel<gh_stars>0
/*
* Copyright (c) 2015 IBM Corporation and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.brunel.util;
import org.brunel.action.Action;
import org.brunel.build.d3.D3Builder;
import org.brunel.build.util.BuilderOptions;
import org.brunel.data.Dataset;
import org.brunel.data.io.CSV;
import org.brunel.model.VisItem;
import com.google.gson.Gson;
/**
* Brunel integration methods provided for services and other languages. Only primitives are used for language integration methods
*
* Note, these methods currently assume a single dataset.
*
*/
public class D3Integration {
private static final Gson gson = new Gson();
/**
* Create and return the Brunel results as a String containing the Brunel JSON.
* @param data the data as a CSV String
* @param brunelSrc the brunel syntax
* @param width the desired width for the visualization
* @param height the desired height for the visualization
* @param visId an identifier used in the SVG tag that will contain the visualization
* @return a String that is JSON containing the Brunel JS, CSS and interactive control metadata.
*/
//Note: This method is called from other languages.
//Do not modify this method signature without checking all language integrations.
public static String createBrunelJSON(String data, String brunelSrc, int width, int height, String visId) {
BrunelD3Result result = createBrunelResult(data, brunelSrc, width, height, visId);
return gson.toJson(result) ;
}
/**
* Create and return the Brunel results as a String containing the Brunel JSON.
* @param data the data as a CSV String
* @param brunelSrc the brunel syntax
* @param width the desired width for the visualization
* @param height the desired height for the visualization
* @param visId an identifier used in the SVG tag that will contain the visualization
* @return a Gson serializable object containing the Brunel JS, CSS and interactive control metadata.
*/
public static BrunelD3Result createBrunelResult(String data, String brunelSrc, int width, int height, String visId) {
Dataset dataset = makeBrunelData(data);
D3Builder builder = makeD3(dataset, brunelSrc, width, height, visId);
BrunelD3Result result = new BrunelD3Result();
result.css = builder.getStyleOverrides();
result.js = builder.getVisualization().toString();
result.controls = builder.getControls();
return result;
}
//Creates a D3Builder to produce the d3 output
public static D3Builder makeD3(Dataset data, String actionText, int width, int height, String visId) {
try {
BuilderOptions options = new BuilderOptions();
options.visIdentifier = visId;
D3Builder builder = D3Builder.make(options);
VisItem item = makeVisItem(data, actionText);
builder.build(item, width, height);
return builder;
} catch (Exception ex) {
ex.printStackTrace();
throw new IllegalArgumentException("Could not execute Brunel: " + actionText, ex);
}
}
//Create a Dataset instance given CSV
private static Dataset makeBrunelData(String data) {
if (data.isEmpty()) return null;
try {
return Dataset.make(CSV.read(data));
} catch (Exception e) {
throw new IllegalArgumentException("Could not create data as CSV from content", e);
}
}
//Create the VisItem instance for the given Brunel
private static VisItem makeVisItem(Dataset brunel, String actionText) {
Action action = Action.parse(actionText);
if (brunel == null) return action.apply();
return action.apply(brunel);
}
}
|
public static boolean isPrime(int num) {
for(int i = 2; i <= Math.sqrt(num); i++) {
if(num % i == 0) {
return false;
}
}
return true;
} |
#!/usr/bin/env bash
# Install composer dependencies
composer install
mysql -e 'CREATE DATABASE IF NOT EXISTS test;'
cp tests/app/config/db.mysql.php.dist tests/app/config/db.php
php tests/app/yii migrate --interactive=0
php tests/app/yii fixture/load * --interactive=0
|
<filename>js/init.js<gh_stars>0
(function($){
$(function(){
$('.sidenav').sidenav();
$('.parallax').parallax();
$(document).ready(function(){
$('.collapsible').collapsible();
});
}); // end of document ready
document.addEventListener('DOMContentLoaded', function() {
var elems = document.querySelectorAll('select');
var instances = M.FormSelect.init(elems, options);
});
// Or with jQuery
$(document).ready(function(){
$('select').formSelect();
});
(function(){
var i, e, d = document, s = "script";i = d.createElement("script");i.async = 1;
i.src = "https://cdn.curator.io/published/e64659c9-4f4a-4852-8eaf-1b8d5e5e52f6.js";
e = d.getElementsByTagName(s)[0];e.parentNode.insertBefore(i, e);
})();
})(jQuery); // end of jQuery name space
|
from ctdcal import fit_ctd
import numpy as np
import pandas as pd
import pytest
@pytest.mark.parametrize("xN, yN", [(1, 0), (0, 1), (1, 1), (2, 1), (1, 2)])
def test_multivariate_fit(xN, yN):
data = [0.0, 0.0]
coef_names = ["x", "y"]
x_coefs = [f"x{n}" for n in np.arange(1, xN + 1)] if xN > 0 else []
y_coefs = [f"y{n}" for n in np.arange(1, yN + 1)] if yN > 0 else []
# ensure number of coefs produced is correct (including constant offset)
np.testing.assert_allclose(
fit_ctd.multivariate_fit(data, (data, xN), (data, yN)), np.zeros(xN + yN + 1)
)
# check that coefs are named properly
fit = fit_ctd.multivariate_fit(data, (data, xN), (data, yN), coef_names=coef_names)
assert all(coef in fit.keys() for coef in x_coefs + y_coefs)
assert "c0" in fit.keys()
# check error if len of inputs and coefs differ
with pytest.raises(ValueError):
fit_ctd.multivariate_fit(data, (data, xN), (data, yN), coef_names=["x"])
# check error if input is not tuple
with pytest.raises(TypeError):
fit_ctd.multivariate_fit(data, (data, xN), [data, yN])
def test_apply_polyfit():
y = np.array([1, 2, 3])
# check correction is applied correctly (no dependent variables)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0,)), y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (1, 0)), y + 1)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0, 1)), y + y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0, 0.5)), y + 0.5 * y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0, 0, 1)), y + y ** 2)
# check correction is applied correctly (with dependent variables)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0,), (y, (0,))), y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0,), (y, (1,))), y + y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0,), (y, (1.0,))), y + y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0.0,), (y, (1,))), y + y)
np.testing.assert_array_equal(fit_ctd.apply_polyfit(y, (0.0,), (y, (1.0,))), y + y)
np.testing.assert_array_equal(
fit_ctd.apply_polyfit(y, (0,), (y, (0, 1))), y + y ** 2
)
# check error if input is not tuple
with pytest.raises(TypeError):
fit_ctd.apply_polyfit(y, (0,), [y, (0,)])
|
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
# Check if user is root
if [ $(id -u) != "0" ]; then
echo "Error: You must be root to run this script, please use root to install lnmp"
exit 1
fi
#检测系统是否有www用户,如果没有则添加该用户,如果有则不做处理
id www
if [ `echo $?` != 0 ]
then
groupadd www
useradd -s /sbin/nologin -g www www
fi
cd ./packages
tar zxvf httpd-2.2.22.tar.gz
cd httpd-2.2.22/
./configure --prefix=/usr/local/apache --enable-so --enable-rewrite
make && make install
rm -rf httpd-2.2.22
cd ../../
\cp -rpv conf/httpd.conf /usr/local/apache/conf/httpd.conf
mkdir -p /usr/local/apache/conf/vhost
|
#include "iioservice/libiioservice_ipc/sensor_client.h"
#include <memory>
std::string processSensorData(const std::string& sensorId) {
std::string rawSensorData = retrieveSensorData(sensorId);
if (rawSensorData.find("error") != std::string::npos) {
return "Error: Sensor data retrieval failed";
} else if (rawSensorData.find("low_battery") != std::string::npos) {
return "Warning: Low battery detected";
} else if (rawSensorData.find("normal") != std::string::npos) {
return "Sensor data is normal";
} else {
return "Unknown sensor data";
}
} |
<filename>src/sentry/static/sentry/app/views/organizationIntegrations/constants.tsx
import {DocumentIntegration} from 'app/types';
export const INSTALLED = 'Installed' as const;
export const NOT_INSTALLED = 'Not Installed' as const;
export const PENDING = 'Pending' as const;
export const LEARN_MORE = 'Learn More' as const;
export const COLORS = {
[INSTALLED]: 'success',
[NOT_INSTALLED]: 'gray500',
[PENDING]: 'orange300',
[LEARN_MORE]: 'gray500',
} as const;
/**
* Integrations in the integration directory should be sorted by their popularity (weight). The weights should reflect the relative popularity of each integration are hardcoded.
*/
export const POPULARITY_WEIGHT: {
[key: string]: number;
} = {
// First-party-integrations
slack: 50,
github: 20,
jira: 10,
bitbucket: 10,
gitlab: 10,
pagerduty: 10,
vsts: 10,
jira_server: 10,
bitbucket_server: 10,
github_enterprise: 10,
// Sentry-apps
clubhouse: 9,
rookout: 9,
clickup: 9,
amixr: 9,
split: 9,
// Plugins
webhooks: 10,
asana: 8,
trello: 8,
heroku: 8,
pivotal: 8,
twilio: 8,
pushover: 5,
redmine: 5,
phabricator: 5,
opsgenie: 5,
teamwork: 5,
victorops: 5,
sessionstack: 5,
segment: 2,
'amazon-sqs': 2,
splunk: 2,
//doc integrations
fullstory: 8,
datadog: 8,
msteams: 8,
netlify: 8,
asayer: 8,
rocketchat: 8,
bitbucket_release_pipe: 8,
} as const;
export const documentIntegrationList: DocumentIntegration[] = [
{
slug: 'fullstory',
name: 'FullStory',
author: '<NAME>',
docUrl: 'https://www.npmjs.com/package/@sentry/fullstory',
description:
'The Sentry-FullStory integration seamlessly integrates the Sentry and FullStory platforms. When you look at a browser error in Sentry, you will see a link to the FullStory session replay at that exact moment in time. When you are watching a FullStory replay and your user experiences an error, you will see a link that will take you to that error in Sentry.',
features: [
{
featureGate: 'session-replay',
description:
'Links Sentry errors to the FullStory session replay and vice-versa.',
},
],
resourceLinks: [
{
title: 'Documentation',
url: 'https://www.npmjs.com/package/@sentry/fullstory',
},
{title: 'View Source', url: 'https://github.com/getsentry/sentry-fullstory'},
{
title: 'Report Issue',
url: 'https://github.com/getsentry/sentry-fullstory/issues',
},
],
},
{
slug: 'datadog',
name: 'Datadog',
author: 'Datadog',
docUrl: 'https://docs.datadoghq.com/integrations/sentry/',
description:
'Quickly discover relationships between production apps and systems performance. See correlations between Sentry events and metrics from infra services like AWS, Elasticsearch, Docker, and Kafka can save time detecting sources of future spikes.',
features: [
{
featureGate: 'incident-management',
description:
'Manage incidents and outages by sending Sentry notifications to DataDog.',
},
{
featureGate: 'alert-rule',
description:
'Configure Sentry rules to trigger notifications based on conditions you set through the Sentry webhook integration.',
},
],
resourceLinks: [
{title: 'Documentation', url: 'https://docs.datadoghq.com/integrations/sentry/'},
],
},
{
slug: 'msteams',
name: 'Microsoft Teams',
author: 'Microsoft',
docUrl:
'https://appsource.microsoft.com/en-us/product/office/WA104381566?src=office&tab=Overview',
description:
"Microsoft Teams is a hub for teamwork in Office 365. Keep all your team's chats, meetings, files, and apps together in one place.",
features: [
{
featureGate: 'chat',
description: 'Get Sentry notifications in Microsoft Teams.',
},
{
featureGate: 'alert-rule',
description:
'Configure Sentry rules to trigger notifications based on conditions you set through the Sentry webhook integration.',
},
],
resourceLinks: [
{
title: 'Documentation',
url:
'https://appsource.microsoft.com/en-us/product/office/WA104381566?src=office&tab=Overview',
},
],
},
{
slug: 'asayer',
name: 'Asayer',
author: '<NAME>',
docUrl: 'https://docs.asayer.io/integrations/sentry',
description:
'Asayer is a session replay tool for developers. Replay each user session alongside your front/backend logs and other data spread across your stack so you can immediately find, reproduce and fix bugs faster.',
features: [
{
featureGate: 'session-replay',
description:
'By integrating Sentry with Asayer, you can see the moments that precede and that lead up to each problem. You can sync your Sentry logs alongside your session replay, JS console and network activity to gain complete visibility over every issue that affect your users.',
},
],
resourceLinks: [
{title: 'Documentation', url: 'https://docs.asayer.io/integrations/sentry'},
],
},
{
slug: 'rocketchat',
name: 'Rocket.Chat',
author: 'Rocket.Chat',
docUrl: 'https://rocket.chat/docs/administrator-guides/integrations/sentry/',
description:
'Rocket.Chat is a free and open-source team chat collaboration platform that allows users to communicate securely in real-time across devices on the web, desktop or mobile and to customize their interface with a range of plugins, themes, and integrations with other key software.',
features: [
{
featureGate: 'chat',
description: 'Get Sentry notifications in Rocket.Chat.',
},
{
featureGate: 'alert-rule',
description:
'Configure Sentry rules to trigger notifications based on conditions you set through the Sentry webhook integration.',
},
],
resourceLinks: [
{
title: 'Documentation',
url: 'https://rocket.chat/docs/administrator-guides/integrations/sentry/',
},
],
},
{
slug: 'netlify',
name: 'Netlify Build Plugin',
author: '<NAME>',
docUrl: 'https://www.npmjs.com/package/@sentry/netlify-build-plugin',
description:
'The Sentry Netlify build plugin automatically uploads source maps and notifies Sentry of new releases being deployed to your site after it finishes building in Netlify.',
features: [
{
featureGate: 'release-management',
description: 'Notify Sentry of new releases being deployed.',
},
],
resourceLinks: [
{
title: 'Documentation',
url: 'https://www.npmjs.com/package/@sentry/netlify-build-plugin',
},
{
title: 'View Source',
url: 'https://github.com/getsentry/sentry-netlify-build-plugin',
},
{
title: 'Report Issue',
url: 'https://github.com/getsentry/sentry-netlify-build-plugin/issues',
},
],
},
{
slug: 'bitbucket_release_pipe',
name: 'Bitbucket Release Pipe',
author: '<NAME>',
docUrl:
'https://bitbucket.org/product/features/pipelines/integrations?p=sentryio/sentry-new-release',
description:
'Notify Sentry of any Bitbucket Pipelines builds to automatically manage releases and quickly surface any errors associated with a given build. **Requirement:** Bitbucket source code integration must be installed for the release pipe to work.',
features: [
{
featureGate: 'release-management',
description: 'Notify Sentry of new releases being deployed.',
},
],
resourceLinks: [
{
title: 'View Source',
url: 'https://bitbucket.org/sentryio/sentry-new-release/src/master/',
},
{
title: 'Report Issue',
url: 'https://bitbucket.org/sentryio/sentry-new-release/issues',
},
],
},
];
export const documentIntegrations: {
[key: string]: DocumentIntegration;
} = Object.fromEntries(
documentIntegrationList.map(integration => [integration.slug, integration])
);
|
package javafx.scene.control.skin;
import com.sun.javafx.scene.control.behavior.TextFieldBehavior;
import javafx.scene.control.TextField;
/**
* Text field skin.
*
* (empty as we rely on the target toolkit for now)
*/
public class TextFieldSkin extends TextInputControlSkin<TextField, TextFieldBehavior> {
/**
* This group contains the text, caret, and selection rectangle.
* It is clipped. The textNode, selectionHighlightPath, and
* caret are each translated individually when horizontal
* translation is needed to keep the caretPosition visible.
*/
private final ToolkitTextBox textGroup; // WebFx change
/**
* Create a new TextFieldSkin.
* @param textField not null
*/
public TextFieldSkin(final TextField textField) {
this(textField, /*(textField instanceof PasswordField)
? new PasswordFieldBehavior((PasswordField)textField)
:*/ new TextFieldBehavior(textField));
}
public TextFieldSkin(final TextField textField, final TextFieldBehavior behavior) {
super(textField, behavior);
textGroup = new ToolkitTextBox(textField);
getChildren().add(textGroup);
}
@Override protected double computeMinHeight(double width, double topInset, double rightInset, double bottomInset, double leftInset) {
return computePrefHeight(width, topInset, rightInset, bottomInset, leftInset);
}
@Override protected double computePrefHeight(double width, double topInset, double rightInset, double bottomInset, double leftInset) {
return topInset + textGroup.prefHeight(width) + bottomInset;
}
@Override protected double computeMaxHeight(double width, double topInset, double rightInset, double bottomInset, double leftInset) {
return getSkinnable().prefHeight(width);
}
}
|
#!/bin/bash
# ePSXe emulator is property of ePSXe team, http://epsxe.com/, under Proprietary license.
# ePSXe64Ubuntu.sh and formerly e64u.sh scripts are property of Brandon Lee Camilleri ( blc / brandleesee / Yrvyne , https://twitter.com/brandleesee , https://www.reddit.com/user/Yrvyne/ )
# ePSXe64Ubuntu.sh and formerly e64u.sh scripts are protected under the vestiges of GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007.
# Disclaimer: Brandon Lee Camilleri ( blc / brandleesee / Yrvyne ) does not assume any responsibilities and shall not be held liable should ePSXe64Ubuntu.sh, e64u.sh, shaders.zip, .ePSXe.svg CHANGELOG.md and/or README.md fail in their intended purpose, attempt and usage and/or break the system/s being used on.
# Brandon Lee Camilleri ( blc / brandleesee / Yrvyne ) can be reached on brandon.camilleri.90@gmail.com
# ePSXe64Ubuntu repository can be found at https://github.com/brandleesee/ePSXe64Ubuntu
# Leave anything with ~ unquoted so it expands properly. This lets us handle complicated home directory locations
ver="11.6"
ins="ePSXe205linux_x64.zip"
hme=~
hid=~/.epsxe
bkp=~/ePSXe_backups/$(date "+%F-%T-%Z")
cor=~/.local/share/applications
exe=~/.local/bin/ePSXe
dls="https://raw.githubusercontent.com/brandleesee/ePSXe64Ubuntu/master"
opt=("Download" "Restore from backup")
PS3="Choose from 1 to 3 above. "
PROTO="http"
MIRROR="archive.ubuntu.com"
check_sha256sum() {
tempfile=$1
filehash=$2
if [ ! -f "${tempfile}" ]; then
tput setaf 1; echo " ERROR: File ${tempfile} doesn't exist"; tput sgr0
exit 1
elif [ "$(sha256sum "${tempfile}" | head -c 64)" = "${filehash}" ]; then
tput setaf 2; echo "${tempfile} matches provided sha256sum"; tput sgr0
else
tput setaf 1; echo " ERROR: ${tempfile} doesn't match provided sha256sum"; tput sgr0
exit 1
fi
}
tput setaf 2; echo "Welcome to ePSXe64Ubuntu.sh script, $ver."; tput sgr0
tput setaf 1; echo "When ePSXe window appears on screen:"; tput sgr0
tput setaf 1; echo " Right click on icon in Dash/Dock/Panel"; tput sgr0
tput setaf 1; echo " Add to Favorites/Lock"; tput sgr0
tput setaf 1; echo " CLOSE ePSXe GUI to continue with the script."; tput sgr0
tput setaf 2; echo "Script started."; tput sgr0
sudo apt-get update
sudo apt-get -y install wget sed
# xxd was provided by vim-common on older distros
sudo apt-get -y install xxd || sudo apt-get -y install vim-common
# Install ubuntu 18.04 version of openssl1.0.0 if it's not known to our version of our distribution
if ! apt-cache show libssl1.0.0 2>/dev/null|grep -q '^Package: libssl1.0.0$'
then
filename="libssl1.0.0_1.0.2n-1ubuntu5_amd64.deb"
tempfile="/tmp/${filename}"
filehash="fcadc659174561b7a925e4f17e9de7451f4fb556a032fea1ed2ff800ed3a285e"
wget "${PROTO}://${MIRROR}/ubuntu/pool/main/o/openssl1.0/${filename}" -O "${tempfile}"
check_sha256sum "${tempfile}" "${filehash}"
sudo dpkg --force-depends -i "${tempfile}"
sudo apt-get -y install -f
rm "${tempfile}"
fi
if ! apt-cache show ecm 2>/dev/null|grep -q '^Package: ecm$'
then
filename="ecm_1.03-1build1_amd64.deb"
tempfile="/tmp/${filename}"
filehash="3889b926bcaed64bfc66f20c27f943e63ec41c701d1d50682b21f06f95d6fcfd"
wget "${PROTO}://${MIRROR}/ubuntu/pool/universe/c/cmdpack/${filename}" -O "${tempfile}"
check_sha256sum "${tempfile}" "${filehash}"
sudo dpkg --force-depends -i "${tempfile}"
sudo apt-get -y install -f
rm "${tempfile}"
fi
# Installs required packages per OS
if apt-cache show libcurl4 2>/dev/null|grep -q '^Package: libcurl4$'
then
sudo apt-get -y install libncurses5 libsdl-ttf2.0-0 libssl1.0.0 ecm unzip
filename="libcurl3_7.58.0-2ubuntu2_amd64.deb"
tempfile="/tmp/${filename}"
filehash="26d8e98614a55013b35afac465081ec17c9d931ee11f648bca7c3cbaefb404af"
wget "${PROTO}://${MIRROR}/ubuntu/pool/main/c/curl3/${filename}" -O "${tempfile}"
check_sha256sum "${tempfile}" "${filehash}"
sudo mkdir /tmp/libcurl3
sudo dpkg-deb -x "${tempfile}" /tmp/libcurl3
sudo cp -vn /tmp/libcurl3/usr/lib/x86_64-linux-gnu/libcurl.so.4.5.0 /usr/lib/x86_64-linux-gnu/libcurl.so.3
sudo rm -rf /tmp/libcurl3
rm "${tempfile}"
else
sudo apt-get -y install libcurl3 libsdl-ttf2.0-0 libssl1.0.0 ecm unzip
fi
# Back-up function
if [ -d "$hid" ]; then
mkdir -p "$bkp"
mv "$hid" "$bkp"
fi
# Removes duplicate of ePSXe executable
if [ -e "$exe" ]; then
rm -rf "$exe"
fi
# Downloads Icon
mkdir -p "$hme/.local/share/ePSXe"
wget -q "$dls/.ePSXe.svg" -O "$hme/.local/share/ePSXe/ePSXe.svg"
# Checks and creates icon data for Dash/Dock/Panel
if [ -e "$cor/ePSXe.desktop" ]; then
rm -rf "$cor/ePSXe.desktop"
fi
echo "[Desktop Entry]" > "/tmp/ePSXe.desktop"
{
echo "Type=Application"
echo "Terminal=false"
echo "Exec=$exe"
echo "Name=ePSXe"
echo "Comment=Created using ePSXe64Ubuntu from https://github.com/brandleesee"
echo "Icon=$hme/.local/share/ePSXe/ePSXe.svg"
echo "Categories=Game;Emulator;"
} >> "/tmp/ePSXe.desktop"
mkdir -p "$cor"
mv "/tmp/ePSXe.desktop" "$cor/ePSXe.desktop"
# Sets up ePSXe
wget -q "https://www.epsxe.com/files/$ins" -P "/tmp" || wget -q "http://www.epsxe.com/files/$ins" -P "/tmp"
unzip -qq "/tmp/$ins" -d "/tmp"
mkdir -p "$(dirname $exe)"
if apt-cache show libcurl4 2>/dev/null|grep -q '^Package: libcurl4$'
then
xxd /tmp/epsxe_x64 /tmp/epsxe_x64.xxd
sed -i '6434c \00019210: 2e73 6f2e 3300 6375 726c 5f65 6173 795f .so.3.curl_easy_' /tmp/epsxe_x64.xxd
xxd -r /tmp/epsxe_x64.xxd "$exe"
rm -f /tmp/epsxe_x64.xxd
if ! sha256sum -c --quiet <(echo "45fb1ee4cb21a5591de64e1a666e4c3cacb30fcc308f0324dc5b2b57767e18ee $exe")
then
tput setaf 1; echo "WARNING: patched $exe did not match checksum, using original executable instead"; tput sgr0
cp -f /tmp/epsxe_x64 "$exe"
fi
rm -f /tmp/epsxe_x64
else
mv "/tmp/epsxe_x64" "$exe"
fi
chmod +x "$exe"
"$exe"
# Transfers docs folder to .epsxe
mkdir -p "$hid"
mv "/tmp/docs" "$hid"
# Activates BIOS HLE
sed -i '11c \BiosPath = ' "$hid/epsxerc"
sed -i '14c \BiosHLE = 1' "$hid/epsxerc"
# Restores Back-Up
if [ -d "$bkp/.epsxe" ]; then
cp -r "$bkp/.epsxe/bios/." "$hid/bios"
cp -r "$bkp/.epsxe/cheats/." "$hid/cheats"
cp -r "$bkp/.epsxe/config/." "$hid/config"
cp -r "$bkp/.epsxe/configs/." "$hid/configs"
cp -r "$bkp/.epsxe/covers/." "$hid/covers"
cp -r "$bkp/.epsxe/docs/." "$hid/docs"
cp -r "$bkp/.epsxe/idx/." "$hid/idx"
cp -r "$bkp/.epsxe/info/." "$hid/info"
cp -r "$bkp/.epsxe/memcards/." "$hid/memcards"
cp -r "$bkp/.epsxe/patches/." "$hid/patches"
cp -r "$bkp/.epsxe/plugins/." "$hid/plugins"
cp -r "$bkp/.epsxe/sstates/." "$hid/sstates"
fi
# Function for Shaders
tput setaf 2; echo "Shaders Menu"; tput sgr0
select opt in "${opt[@]}" "Do nothing"; do
case "$REPLY" in
1 )
wget -q "$dls/shaders.zip" -P "/tmp"
unzip -qq "/tmp/shaders.zip" -d "$hid/shaders"
echo "This choice has downloaded shaders from ePSXe64Ubuntu repository.";
break
;;
2 )
cp -r "$bkp/.epsxe/shaders/." "$hid/shaders"
break
;;
$(( ${#opt[@]}+1 )) ) echo "This choice has left the shaders folder empty."; break;;
*) echo "Invalid option. Choose from 1 to 3.";continue;;
esac
done
# Removes clutter
rm -rf "/tmp/$ins"
rm -rf "/tmp/shaders.zip"
tput setaf 2; echo "Script finished."; tput sgr0
|
<filename>src/components/ui/stories/modal.stories.tsx
import React from "react";
import { Modal } from "../Modal";
import { Typography } from "@mui/material";
// import { action } from "@storybook/addon-actions";
// Prefer addon-control
const defaultProps = {};
export default {
title: "VN/design-system/Modal",
component: Modal,
// decoractors: [(storyFn) => <div>{storyFn()}</div>
};
export const basic = (args) => (
<Modal {...defaultProps} {...args}>
<div>
<Typography>"I am an humble modal content.</Typography>
</div>
</Modal>
);
basic.story = {
name: "default props",
// decorators: [...],
// parameters: {...}
};
/** deprecated: we no longer used styled-components modifiers library
export const vulcan = () => (
<Modal modifiers={["vulcan"]} {...defaultProps}>
<div>
<Typography>"I am a Vulcan modal, very orange."</Typography>
</div>
</Modal>
);
*/
|
#!/bin/bash
function java9 {
sudo update-alternatives --set java /usr/lib/jvm/java-9-oracle/bin/java;export JAVA_HOME=/usr/lib/jvm/java-9-oracle
}
function java8 {
sudo update-alternatives --set java /usr/lib/jvm/java-8-oracle/jre/bin/java;export JAVA_HOME=/usr/lib/jvm/java-8-oracle
}
function java7 {
sudo update-alternatives --set java /usr/lib/jvm/java-7-oracle/jre/bin/java;export JAVA_HOME=/usr/lib/jvm/java-7-oracle
}
function java6 {
sudo update-alternatives --set java /usr/lib/jvm/java-6-oracle/jre/bin/java;export JAVA_HOME=/usr/lib/jvm/java-6-oracle
}
#echo "change versions"
#exit
java8
rm release.properties
REL=6.0.7
DEV=6.0.8-SNAPSHOT
REPOID=orgsimpleflatmapper-1633
mvn --batch-mode -Dtag=sfm-parent-$REL -Pdev release:prepare \
-DreleaseVersion=$REL \
-DdevelopmentVersion=$DEV
cp release.properties tmp/release.properties
#GPG_TTY=$(tty)
#export GPG_TTY
java7
cp tmp/release.properties .
mvn release:perform -Darguments="-DstagingRepositoryId=$REPOID"
java9
cp tmp/release.properties .
export MAVEN_OPTS="--add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.text=ALL-UNNAMED --add-opens java.desktop/java.awt.font=ALL-UNNAMED "
mvn release:perform -Darguments="-DstagingRepositoryId=$REPOID"
unset MAVEN_OPTS
java8
cp tmp/release.properties .
mvn release:perform -Darguments="-DstagingRepositoryId=$REPOID"
|
export interface ICompletionParticipant {
}
import { Range, TextEdit, Position } from 'vscode-languageserver-types';
export { Range, TextEdit, Position };
export interface IDatabaseServices {
getDatabaseList(): IDatabase[];
getTables(db: string): ITable[];
getColumns(db: string, table: string): IColumn[];
findDatabase(db: string): IDatabase | null;
findTable(db: string, table: string): ITable | null;
findColumn(db: string, table: string, col: string): IColumn | null;
}
export interface IDatabase {
name: string;
tables: ITable[];
[key: string]: any;
}
export interface ITable {
name: string;
columns: IColumn[];
[key: string]: any;
}
export interface IColumn {
name: string;
[key: string]: any;
}
|
/*
* Copyright (c) 2019-2021. <NAME> and others.
* https://github.com/mfvanek/pg-index-health
*
* This file is a part of "pg-index-health" - a Java library for
* analyzing and maintaining indexes health in PostgreSQL databases.
*
* Licensed under the Apache License 2.0
*/
package io.github.mfvanek.pg.common.health.logger;
import javax.annotation.Nonnull;
@SuppressWarnings("WeakerAccess")
public interface LoggingKey {
@Nonnull
String getKeyName();
@Nonnull
String getSubKeyName();
}
|
/*
* Copyright © 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ltgt.gradle.apt;
import groovy.util.Node;
import groovy.util.NodeList;
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.internal.HasConvention;
import org.gradle.api.plugins.ExtensionAware;
import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.tasks.SourceSet;
import org.gradle.plugins.ide.idea.GenerateIdeaModule;
import org.gradle.plugins.ide.idea.IdeaPlugin;
import org.gradle.plugins.ide.idea.model.IdeaModel;
import org.gradle.plugins.ide.idea.model.IdeaModule;
import org.gradle.plugins.ide.idea.model.IdeaProject;
public class AptIdeaPlugin implements Plugin<Project> {
private static final boolean isIdeaImport =
Boolean.getBoolean("idea.active") && System.getProperty("idea.version") != null;
private static boolean classExists(String name) {
try {
Class.forName(name);
return true;
} catch (ClassNotFoundException e) {
return false;
}
}
@Override
public void apply(final Project project) {
project.getPlugins().apply(AptPlugin.class);
project.getPlugins().apply(IdeaPlugin.class);
project
.getPlugins()
.withType(
JavaPlugin.class,
javaPlugin -> {
JavaPluginConvention javaConvention =
project.getConvention().getPlugin(JavaPluginConvention.class);
SourceSet mainSourceSet =
javaConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME);
SourceSet testSourceSet =
javaConvention.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME);
configureIdeaModule(project, mainSourceSet, testSourceSet);
});
configureIdeaProject(project);
}
private void configureIdeaModule(
Project project, final SourceSet mainSourceSet, final SourceSet testSourceSet) {
final IdeaModule ideaModule = project.getExtensions().getByType(IdeaModel.class).getModule();
final ModuleApt apt = new ModuleApt();
((ExtensionAware) ideaModule).getExtensions().add("apt", apt);
project.afterEvaluate(
project1 -> {
if (apt.isAddGeneratedSourcesDirs()) {
Collection<File> mainGeneratedSourcesDirs =
AptPlugin.IMPL.getGeneratedSourcesDirs(mainSourceSet.getOutput()).getFiles();
Collection<File> testGeneratedSourcesDirs =
AptPlugin.IMPL.getGeneratedSourcesDirs(testSourceSet.getOutput()).getFiles();
// For some reason, modifying the existing collections doesn't work.
// We need to copy the values and then assign it back.
if (!mainGeneratedSourcesDirs.isEmpty()) {
ideaModule.setSourceDirs(
addToSet(ideaModule.getSourceDirs(), mainGeneratedSourcesDirs));
ideaModule.setGeneratedSourceDirs(
addToSet(ideaModule.getGeneratedSourceDirs(), mainGeneratedSourcesDirs));
}
if (!testGeneratedSourcesDirs.isEmpty()) {
ideaModule.setTestSourceDirs(
addToSet(ideaModule.getTestSourceDirs(), testGeneratedSourcesDirs));
ideaModule.setGeneratedSourceDirs(
addToSet(ideaModule.getGeneratedSourceDirs(), testGeneratedSourcesDirs));
}
}
if (apt.isAddAptDependencies()) {
final Configuration annotationProcessor =
project1
.getConfigurations()
.getByName(
AptPlugin.IMPL.getAnnotationProcessorConfigurationName(mainSourceSet));
final Configuration testAnnotationProcessor =
project1
.getConfigurations()
.getByName(
AptPlugin.IMPL.getAnnotationProcessorConfigurationName(testSourceSet));
getScope(ideaModule, apt.getMainDependenciesScope(), "plus").add(annotationProcessor);
getScope(ideaModule, "TEST", "plus").add(testAnnotationProcessor);
AptPlugin.IMPL.configureTasks(
project1,
GenerateIdeaModule.class,
generateIdeaModule ->
generateIdeaModule.dependsOn(annotationProcessor, testAnnotationProcessor));
}
});
}
private static Set<File> addToSet(Set<File> sourceDirs, Collection<File> dirs) {
Set<File> newSet = new LinkedHashSet<>(sourceDirs);
newSet.addAll(dirs);
return newSet;
}
@SuppressWarnings("NullAway")
private static Collection<Configuration> getScope(
IdeaModule ideaModule, String scope, String plusOrMinus) {
return ideaModule.getScopes().get(scope).get(plusOrMinus);
}
private void configureIdeaProject(final Project project) {
if (project.getParent() == null) {
final IdeaProject ideaProject =
project.getExtensions().getByType(IdeaModel.class).getProject();
final ProjectAptConvention apt = new ProjectAptConvention();
((HasConvention) ideaProject).getConvention().getPlugins().put("net.ltgt.apt-idea", apt);
ideaProject
.getIpr()
.withXml(
xmlProvider -> {
if (!apt.isConfigureAnnotationProcessing()) {
return;
}
for (Object it : (NodeList) xmlProvider.asNode().get("component")) {
Node compilerConfiguration = (Node) it;
if (!Objects.equals(
compilerConfiguration.attribute("name"), "CompilerConfiguration")) {
continue;
}
for (Object n : (NodeList) compilerConfiguration.get("annotationProcessing")) {
compilerConfiguration.remove((Node) n);
}
Node annotationProcessing =
compilerConfiguration.appendNode("annotationProcessing");
Map<String, Object> profileAttributes = new LinkedHashMap<>();
profileAttributes.put("name", "Default");
profileAttributes.put("enabled", true);
profileAttributes.put("default", true);
Node profile = annotationProcessing.appendNode("profile", profileAttributes);
// XXX: this assumes that all subprojects use the same name for their
// buildDir
profile.appendNode(
"sourceOutputDir",
Collections.singletonMap(
"name",
project.relativePath(project.getBuildDir())
+ "/generated/sources/annotationProcessor/java/"
+ SourceSet.MAIN_SOURCE_SET_NAME));
profile.appendNode(
"sourceTestOutputDir",
Collections.singletonMap(
"name",
project.relativePath(project.getBuildDir())
+ "/generated/sources/annotationProcessor/java/"
+ SourceSet.TEST_SOURCE_SET_NAME));
profile.appendNode(
"outputRelativeToContentRoot", Collections.singletonMap("value", true));
profile.appendNode(
"processorPath", Collections.singletonMap("useClasspath", true));
}
});
}
}
public static class ModuleApt {
private boolean addGeneratedSourcesDirs = true;
private boolean addAptDependencies = true;
// Gradle integration in IDEA uses COMPILE scope
private String mainDependenciesScope = isIdeaImport ? "COMPILE" : "PROVIDED";
public boolean isAddGeneratedSourcesDirs() {
return addGeneratedSourcesDirs;
}
public void setAddGeneratedSourcesDirs(boolean addGeneratedSourcesDirs) {
this.addGeneratedSourcesDirs = addGeneratedSourcesDirs;
}
public boolean isAddAptDependencies() {
return addAptDependencies;
}
public void setAddAptDependencies(boolean addAptDependencies) {
this.addAptDependencies = addAptDependencies;
}
public String getMainDependenciesScope() {
return mainDependenciesScope;
}
public void setMainDependenciesScope(String mainDependenciesScope) {
this.mainDependenciesScope = Objects.requireNonNull(mainDependenciesScope);
}
}
public static class ProjectAptConvention {
private boolean configureAnnotationProcessing = true;
public boolean isConfigureAnnotationProcessing() {
return configureAnnotationProcessing;
}
public void setConfigureAnnotationProcessing(boolean configureAnnotationProcessing) {
this.configureAnnotationProcessing = configureAnnotationProcessing;
}
}
}
|
TERMUX_PKG_HOMEPAGE=https://www.sno.phy.queensu.ca/~phil/exiftool/index.html
TERMUX_PKG_DESCRIPTION="Utility for reading, writing and editing meta information in a wide variety of files."
TERMUX_PKG_LICENSE="Artistic-License-2.0"
TERMUX_PKG_MAINTAINER="Leonid Plyushch <leonid.plyushch@gmail.com>"
TERMUX_PKG_VERSION=11.85
TERMUX_PKG_SRCURL="https://www.sno.phy.queensu.ca/~phil/exiftool/Image-ExifTool-$TERMUX_PKG_VERSION.tar.gz"
TERMUX_PKG_SHA256=8b0aaa8e080adfc8736c3b179c140ad3c05dc58a84540f1e56772ce129a8f897
TERMUX_PKG_DEPENDS="perl"
termux_step_make_install() {
# Change this after package 'perl' was upgraded.
local current_perl_version=5.30.1
install -Dm700 "$TERMUX_PKG_SRCDIR"/exiftool "$TERMUX_PREFIX"/bin/exiftool
find "$TERMUX_PKG_SRCDIR"/lib -name "*.pod" -delete
mkdir -p "$TERMUX_PREFIX/lib/perl5/site_perl/${current_perl_version}"
rm -rf "$TERMUX_PREFIX/lib/perl5/site_perl/${current_perl_version}"/{Image,File}
cp -a "$TERMUX_PKG_SRCDIR"/lib/{Image,File} "$TERMUX_PREFIX/lib/perl5/site_perl/${current_perl_version}/"
}
|
#pragma once
#include <typed-geometry/feature/basic.hh>
#include <typed-geometry/functions/objects/triangulation.hh>
namespace tg
{
/// calls on_triangle for each triangle of the objects triangulation
/// on_triangle: (tg::triangle) -> void
template <class Obj, class OnTriangle, std::enable_if_t<has_triangulation_of<Obj>, int> = 0>
constexpr void triangulate(Obj const& obj, OnTriangle&& on_triangle)
{
for (auto&& t : triangulation_of(obj))
on_triangle(t);
}
/// computes a uv triangulation of the given sphere and calls on_triangle with each triangle
/// on_triangle: (tg::triangle) -> void
/// NOTE: currently recomputes a lot of sin/cos and thus is not the fastest
/// NOTE: normal_of(t) points outwards
/// NOTE: segs_u must be >= 3, segs_v must be >= 2
template <class ScalarT, class TraitsT, class OnTriangle>
void triangulate_uv(sphere<3, ScalarT, 3, TraitsT> const& s, int segs_u, int segs_v, OnTriangle&& on_triangle)
{
TG_ASSERT(segs_u >= 3);
TG_ASSERT(segs_v >= 2);
// TODO: some caching of sin/cos
using dir_t = dir<3, ScalarT>;
auto const dir_of = [&](int u, int v) -> dir_t {
if (v == 0)
return dir_t(0, 1, 0);
else if (v == segs_v)
return dir_t(0, -1, 0);
auto [su, cu] = tg::sin_cos(tau<ScalarT> * (u == segs_u ? 0 : u) / ScalarT(segs_u));
auto [sv, cv] = tg::sin_cos(pi<ScalarT> * v / ScalarT(segs_v));
return dir_t(sv * su, cv, sv * cu);
};
auto const pos_of = [&](int u, int v) {
auto d = dir_of(u, v);
return s.center + d * s.radius;
};
// cap u
{
auto p0 = pos_of(0, 0);
for (auto i = 0; i < segs_u; ++i)
{
auto p1 = pos_of(i, 1);
auto p2 = pos_of(i + 1, 1);
on_triangle(tg::triangle3(p0, p1, p2));
}
}
// inner grid
for (auto j = 1; j < segs_v - 1; ++j)
{
for (auto i = 0; i < segs_u; ++i)
{
auto p00 = pos_of(i + 0, j + 0);
auto p01 = pos_of(i + 0, j + 1);
auto p10 = pos_of(i + 1, j + 0);
auto p11 = pos_of(i + 1, j + 1);
on_triangle(tg::triangle3(p00, p01, p11));
on_triangle(tg::triangle3(p00, p11, p10));
}
}
// cap v
{
auto p0 = pos_of(0, segs_v);
for (auto i = 0; i < segs_u; ++i)
{
auto p1 = pos_of(i, segs_v - 1);
auto p2 = pos_of(i + 1, segs_v - 1);
on_triangle(tg::triangle3(p0, p2, p1));
}
}
}
}
|
<filename>src/components/uploader/Uploader.utils.ts
import type { FileRejection } from 'react-dropzone';
import type { IntlShape } from 'react-intl';
import type { UploaderProps as CapUploaderProps } from "@cap-collectif/ui";
export type ApiFileInfo = {
id: string
name: string
size: string
url: string
type: string
}
export type UploaderValue = ApiFileInfo | ApiFileInfo[] | null | undefined;
export interface UploaderProps
extends Omit<CapUploaderProps, 'wording' | 'isInvalid' | 'isRequired' | 'onDrop'> {
onDrop?: CapUploaderProps['onDrop'];
uploadURI?: string;
onChange?: (value: UploaderValue) => void
}
export enum ErrorCode {
FileInvalidType = 'file-invalid-type',
FileTooLarge = 'file-too-large',
FileTooSmall = 'file-too-small',
TooManyFiles = 'too-many-files',
}
export type UploaderError = string | string[] | null;
export type UploaderWarning = string | null;
export async function uploadFiles (files: File[], uploadURI: string): Promise<ApiFileInfo[]> {
const allFilesUpload: Promise<ApiFileInfo>[] = files.map(file => {
const formData = new FormData();
formData.append('file', file);
return fetch(uploadURI, {
method: 'POST',
credentials: 'same-origin',
headers: {},
body: formData,
})
.then(response => response.json())
.then((res: ApiFileInfo) => ({
id: res.id,
name: res.name,
size: res.size,
url: res.url,
type: res.type,
}));
});
return Promise.all(allFilesUpload).then((values: ApiFileInfo[]) => values);
}
const getErrorsFile = (
fileRejections: FileRejection[],
intl: IntlShape,
rules: {
maxSize?: UploaderProps['maxSize'],
format?: UploaderProps['format'],
},
): string[] => {
return fileRejections.map(fileWithError => {
const mainError = fileWithError.errors[0];
if (rules?.maxSize && mainError.code === ErrorCode.FileTooLarge) {
const fileSize = fileWithError.file.size;
return intl.formatMessage({ id: 'error-image-too-big' }, { size: fileSize });
} else if (rules?.format && mainError.code === ErrorCode.FileInvalidType) {
const fileExtension = fileWithError.file.type.split('/')[1];
const fileName = fileWithError.file.name;
return intl.formatMessage({ id: 'error-file-not-supported' }, { fileName, fileExtension });
}
return '';
}).filter(Boolean);
}
export const handleErrors = (
fileRejections: FileRejection[],
setError: (error: UploaderError) => void,
multiple: boolean,
intl: IntlShape,
rules: {
maxSize?: UploaderProps['maxSize'],
format?: UploaderProps['format'],
},
): void => {
const errors = getErrorsFile(fileRejections, intl, rules);
if(multiple) setError(errors)
else setError(errors[0])
};
export const handleWarning = (
files: File[],
setWarning: (warning: string) => void,
intl: IntlShape,
minResolution?: UploaderProps['minResolution'],
): void => {
const isImage = getFileType(files[0].type) === 'image';
if (!isImage) return;
if (minResolution) {
const img = new Image();
img.onload = function () {
if (img.width && img.height && minResolution.width && minResolution.height) {
if (img.width < minResolution.width || img.height < minResolution.height)
setWarning(intl.formatMessage({ id: 'warning-image-quality' }));
}
};
img.src = URL.createObjectURL(files[0]);
} else if (files[0].size > mgtob(1.5)) {
setWarning(intl.formatMessage({ id: 'warning-image-size-big' }));
}
};
export function getFileType (format: string): string {
if (/^(.+?)\//.test(format)) {
return format.split('/')[0]
}
if (/^\./.test(format)) {
return format.split('.')[1]
}
return format
}
export function mgtob (megas: number): number {
return megas * 1024 * 1024
}
export function btomg (bytes: number): number {
return Math.round((bytes / 1024 / 1024) * 10) / 10
} |
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=C:\src\flutter"
export "FLUTTER_APPLICATION_PATH=C:\Users\Claud\Documents\EngSoft\LocalSales\local_sales"
export "FLUTTER_TARGET=lib\main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build\ios"
export "FLUTTER_FRAMEWORK_DIR=C:\src\flutter\bin\cache\artifacts\engine\ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
|
#! /bin/bash
compton --config ~/.config/compton/compton.conf &
nitrogen --restore &
urxvtd -q -o -f &
|
package org.agmip.translators.soil;
import static java.lang.Float.parseFloat;
import java.util.ArrayList;
import java.util.HashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LayerReducerUtil {
public static final Logger log = LoggerFactory.getLogger(LayerReducerUtil.class);
private static String UNKNOWN_DEFAULT_VALUE = "0.0";
/**
* Compute soil layer thickness
*
* @param soilsData
* @return
*/
public static ArrayList<HashMap<String, String>> computeSoilLayerSize(ArrayList<HashMap<String, String>> soilsData) {
float deep = 0.0f;
ArrayList<HashMap<String, String>> newSoilsData;
newSoilsData = new ArrayList<HashMap<String, String>>();
for (HashMap<String, String> currentSoil : soilsData) {
// create a new soil with reference parameters
HashMap<String, String> newCurrentSoil = new HashMap<String, String>(currentSoil);
// Specific for stics soil data representation
newCurrentSoil.put(LayerReducer.SLLB, new Float(parseFloat(currentSoil.get(LayerReducer.SLLB)) - deep).toString());
deep = parseFloat(currentSoil.get(LayerReducer.SLLB));
newSoilsData.add(newCurrentSoil);
}
return newSoilsData;
}
/**
* TODO replace this function by the DOME
*
* @param key
* @return
*/
public static String defaultValue(String key) {
String value;
HashMap<String, String> defaultValues;
defaultValues = new HashMap<String, String>();
defaultValues.put("slcly", "12.6");
defaultValues.put("salb", "0.25");
defaultValues.put("slphw", "6.2");
defaultValues.put("sksat", "0.0");
defaultValues.put("caco3", "0.0");
defaultValues.put("sloc", "0.1");
defaultValues.put("slll", "0.0");
defaultValues.put("icnh4", "0.0");
defaultValues.put("icno3", "0.0");
defaultValues.put("ich2o", "0.0");
if (defaultValues.containsKey(key)) {
value = defaultValues.get(key);
} else {
value = UNKNOWN_DEFAULT_VALUE;
}
return value;
}
public static void mergeSoilAndInitializationData(ArrayList<HashMap<String, String>> soilsData, ArrayList<HashMap<String, String>> initData) {
int index = 0;
log.debug("Init data size : " + initData.size());
log.debug("Soil data size : " + soilsData.size());
for (HashMap<String, String> soilData : soilsData) {
if (index >= initData.size()) {
log.error("Unable to merge soil information, initial condition information unavailable");
break;
}
if (initData.get(index).get(SAReducerDecorator.ICBL).equals(soilData.get(LayerReducer.SLLB))) {
soilData.putAll(initData.get(index));
} else {
log.error("Unable to merge soil information, inconsistent soil information");
}
index = index + 1;
}
}
}
|
import cmuiTabbar from './tabbar.vue';
import List from '../base/list.js';
Vue.component('cmui-tabbar',cmuiTabbar);
function TabBar(){
// get
if(!arguments.length){
return new List('tabbar');
}
if(arguments.length==1&&arguments[0]._isVue){
return new List('tabbar',arguments[0])
}
// set
let defaultOptions=_(cmuiTabbar.props).mapValues(o=>_.get(o,'default')).defaults({
items:[],
parent:'body',
className:'',
itemClick:null,
extra:'',
extraClick:null
}).value()
_.forEach(arguments,arg=>{
if(_.isArray(arg)){
if(_.every(arg,_.isBoolean)){
defaultOptions.nav=arg;
}else{
defaultOptions.items=arg;
}
}else if(_.isString(arg)){
if(/^\.|\#/.test(arg)){
defaultOptions.parent=arg
}else if(_.includes(['top','right','bottom','left'],arg)){
defaultOptions.position=arg
}else if(_.includes(['flex','auto'],arg)){
defaultOptions.col=arg
}
}else if(_.isNumber(arg)){
defaultOptions.col=arg;
}else if(arg instanceof jQuery){
defaultOptions.parent=arg
}else if(_.isFunction(arg)){
defaultOptions.itemClick=arg
}
})
const options=_.defaults(_.find(arguments,_.isPlainObject),defaultOptions)
const tpl=$(`
<cmui-tabbar
class="${options.className||''}"
:active-index="${options.activeIndex}"
position="${options.position}"
:nav="nav"
:col="col"
:watch="list"
@item-click="itemClick"
@extra-click="extraClick"
>
<div slot="extra" v-for="item in extraList" v-html="item"></div>
<cmui-tabbar-item v-for="(item,index) in list" :key="index">
<div v-html="item.title"></div><div slot="content" v-html="item.content"></div>
</cmui-tabbar-item>
</cmui-tabbar>
`);
const parent=$(options.parent);
if(parent.length){
$(options.parent).append(tpl);
return new Vue({
el:tpl[0],
data:function(){
return {
list:options.items,
col:options.col,
nav:options.nav,
extraList:[].concat(options.extra)
}
},
methods:{
itemClick:_.isFunction(options.itemClick)?options.itemClick:null,
extraClick:_.isFunction(options.extraClick)?options.extraClick:null,
}
})
}
}
export default TabBar |
<reponame>mtomko/geoducks
package org.marktomko.geoducks.util
import org.scalatest.{FlatSpec, Matchers}
class UtilTest extends FlatSpec with Matchers {
"fastSplit" should "split a string into an array" in {
val s = "1,2,33,444"
val a = Array.ofDim[String](4)
fastSplit(s, ',', a) should be (4)
a should be (Array("1", "2", "33", "444"))
}
"fastSplit" should "split a string into a list" in {
// 0123456789
val s = "1,2,33,444"
fastSplit(s, ',') should be (List("1", "2", "33", "444"))
}
}
|
def evaluate_polynomial(degree, coefficients):
# initialize the value to 0
value = 0
# loop through the coefficients
for i in range(degree + 1):
# calculate the power of the x
power = degree - i
# calculate the value at each iteration
value += coefficients[i] * pow(x, power)
# return the value of the polynomial
return value
print(evaluate_polynomial(2, [3, 2, 1])) |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=24:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_MountainCarContinuous-v0_ddpg_softcopy_epsilon_greedy_seed4_run6_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env MountainCarContinuous-v0 --random-seed 4 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/MountainCarContinuous-v0/ddpg_softcopy_epsilon_greedy_seed4_run6 --continuous-act-space-flag --double-ddpg-flag
|
package fetch
// The following code was sourced and modified from the
// https://github.com/andrew-d/goscrape package governed by MIT license.
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"math"
"net/http"
"net/http/cookiejar"
"net/url"
"strconv"
"strings"
"time"
"github.com/mafredri/cdp"
"github.com/mafredri/cdp/devtool"
"github.com/mafredri/cdp/protocol/dom"
"github.com/mafredri/cdp/protocol/network"
"github.com/mafredri/cdp/protocol/page"
"github.com/mafredri/cdp/protocol/runtime"
"github.com/mafredri/cdp/rpcc"
"github.com/slotix/dataflowkit/errs"
"github.com/spf13/viper"
"golang.org/x/net/publicsuffix"
"golang.org/x/sync/errgroup"
)
//Type represents types of fetcher
type Type string
//Fetcher types
const (
//Base fetcher is used for downloading html web page using Go standard library's http
Base Type = "Base"
//Headless chrome is used to download content from JS driven web pages
Chrome = "Chrome"
)
// Fetcher is the interface that must be satisfied by things that can fetch
// remote URLs and return their contents.
//
// Note: Fetchers may or may not be safe to use concurrently. Please read the
// documentation for each fetcher for more details.
type Fetcher interface {
// Fetch is called to retrieve HTML content of a document from the remote server.
Fetch(request Request) (io.ReadCloser, error)
getCookieJar() http.CookieJar
setCookieJar(jar http.CookieJar)
getCookies(u *url.URL) ([]*http.Cookie, error)
setCookies(u *url.URL, cookies []*http.Cookie) error
}
//Request struct contains request information sent to Fetchers
type Request struct {
// Type defines Fetcher type. It may be "chrome" or "base". Defaults to "base".
Type string `json:"type"`
// URL to be retrieved
URL string `json:"url"`
// HTTP method : GET, POST
Method string
// FormData is a string value for passing formdata parameters.
//
// For example it may be used for processing pages which require authentication
//
// Example:
//
// "auth_key=880ea6a14ea49e853634fbdc5015a024&referer=http%3A%2F%2Fexample.com%2F&ips_username=user&ips_password=<PASSWORD>&rememberMe=1"
//
FormData string `json:"formData,omitempty"`
//UserToken identifies user to keep personal cookies information.
UserToken string `json:"userToken"`
// Actions contains the list of action we have to perform on page
Actions string `json:"actions"`
}
// BaseFetcher is a Fetcher that uses the Go standard library's http
// client to fetch URLs.
type BaseFetcher struct {
client *http.Client
}
// ChromeFetcher is used to fetch Java Script rendeded pages.
type ChromeFetcher struct {
cdpClient *cdp.Client
client *http.Client
cookies []*http.Cookie
}
//newFetcher creates instances of Fetcher for downloading a web page.
func newFetcher(t Type) Fetcher {
switch t {
case Base:
return newBaseFetcher()
case Chrome:
return newChromeFetcher()
default:
logger.Panic(fmt.Sprintf("unhandled type: %#v", t))
}
panic("unreachable")
}
// newBaseFetcher creates instances of newBaseFetcher{} to fetch
// a page content from regular websites as-is
// without running js scripts on the page.
func newBaseFetcher() *BaseFetcher {
var client *http.Client
proxy := viper.GetString("PROXY")
if len(proxy) > 0 {
proxyURL, err := url.Parse(proxy)
if err != nil {
logger.Error(err.Error())
return nil
}
transport := &http.Transport{Proxy: http.ProxyURL(proxyURL)}
client = &http.Client{Transport: transport}
} else {
client = &http.Client{}
}
f := &BaseFetcher{
client: client,
}
jarOpts := &cookiejar.Options{PublicSuffixList: publicsuffix.List}
var err error
f.client.Jar, err = cookiejar.New(jarOpts)
if err != nil {
return nil
}
return f
}
// Fetch retrieves document from the remote server.
func (bf *BaseFetcher) Fetch(request Request) (io.ReadCloser, error) {
resp, err := bf.response(request)
if err != nil {
return nil, err
}
//Converting fetched content to UTF-8
utf8Res, _, _, err := readerToUtf8Encoding(resp.Body)
if err != nil {
return nil, err
}
return utf8Res, nil
}
//Response return response after document fetching using BaseFetcher
func (bf *BaseFetcher) response(r Request) (*http.Response, error) {
//URL validation
if _, err := url.ParseRequestURI(r.getURL()); err != nil {
return nil, err
}
var err error
var req *http.Request
if r.FormData == "" {
req, err = http.NewRequest(r.Method, r.URL, nil)
if err != nil {
return nil, err
}
} else {
//if form data exists send POST request
formData := parseFormData(r.FormData)
req, err = http.NewRequest("POST", r.URL, strings.NewReader(formData.Encode()))
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
req.Header.Add("Content-Length", strconv.Itoa(len(formData.Encode())))
}
//TODO: Add UA to requests
//req.Header.Add("User-Agent", "Dataflow kit - https://github.com/slotix/dataflowkit")
return bf.doRequest(req)
}
func (bf *BaseFetcher) doRequest(req *http.Request) (*http.Response, error) {
resp, err := bf.client.Do(req)
if err != nil {
return nil, err
}
switch resp.StatusCode {
case 200:
return resp, nil
default:
return nil, errs.StatusError{
resp.StatusCode,
errors.New(http.StatusText(resp.StatusCode)),
}
}
}
func (bf *BaseFetcher) getCookieJar() http.CookieJar { //*cookiejar.Jar {
return bf.client.Jar
}
func (bf *BaseFetcher) setCookieJar(jar http.CookieJar) {
bf.client.Jar = jar
}
func (bf *BaseFetcher) getCookies(u *url.URL) ([]*http.Cookie, error) {
return bf.client.Jar.Cookies(u), nil
}
func (bf *BaseFetcher) setCookies(u *url.URL, cookies []*http.Cookie) error {
bf.client.Jar.SetCookies(u, cookies)
return nil
}
// parseFormData is used for converting formdata string to url.Values type
func parseFormData(fd string) url.Values {
//"auth_key=880ea6a14ea49e853634fbdc5015a024&referer=http%3A%2F%2Fexample.com%2F&ips_username=usr&ips_password=<PASSWORD>&rememberMe=0"
formData := url.Values{}
pairs := strings.Split(fd, "&")
for _, pair := range pairs {
kv := strings.Split(pair, "=")
formData.Add(kv[0], kv[1])
}
return formData
}
// Static type assertion
var _ Fetcher = &BaseFetcher{}
// NewChromeFetcher returns ChromeFetcher
func newChromeFetcher() *ChromeFetcher {
var client *http.Client
proxy := viper.GetString("PROXY")
if len(proxy) > 0 {
proxyURL, err := url.Parse(proxy)
if err != nil {
logger.Error(err.Error())
return nil
}
transport := &http.Transport{Proxy: http.ProxyURL(proxyURL)}
client = &http.Client{Transport: transport}
} else {
client = &http.Client{}
}
f := &ChromeFetcher{
client: client,
}
return f
}
// LogCodec captures the output from writing RPC requests and reading
// responses on the connection. It implements rpcc.Codec via
// WriteRequest and ReadResponse.
type LogCodec struct{ conn io.ReadWriter }
// WriteRequest marshals v into a buffer, writes its contents onto the
// connection and logs it.
func (c *LogCodec) WriteRequest(req *rpcc.Request) error {
var buf bytes.Buffer
if err := json.NewEncoder(&buf).Encode(req); err != nil {
return err
}
fmt.Printf("SEND: %s", buf.Bytes())
_, err := c.conn.Write(buf.Bytes())
if err != nil {
return err
}
return nil
}
// ReadResponse unmarshals from the connection into v whilst echoing
// what is read into a buffer for logging.
func (c *LogCodec) ReadResponse(resp *rpcc.Response) error {
var buf bytes.Buffer
if err := json.NewDecoder(io.TeeReader(c.conn, &buf)).Decode(resp); err != nil {
return err
}
fmt.Printf("RECV: %s\n", buf.String())
return nil
}
// Fetch retrieves document from the remote server. It returns web page content along with cache and expiration information.
func (f *ChromeFetcher) Fetch(request Request) (io.ReadCloser, error) {
//URL validation
if _, err := url.ParseRequestURI(strings.TrimSpace(request.getURL())); err != nil {
return nil, err
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
devt := devtool.New(viper.GetString("CHROME"), devtool.WithClient(f.client))
//https://github.com/mafredri/cdp/issues/60
//pt, err := devt.Get(ctx, devtool.Page)
pt, err := devt.Create(ctx)
if err != nil {
return nil, err
}
var conn *rpcc.Conn
if viper.GetBool("CHROME_TRACE") {
newLogCodec := func(conn io.ReadWriter) rpcc.Codec {
return &LogCodec{conn: conn}
}
// Connect to WebSocket URL (page) that speaks the Chrome Debugging Protocol.
conn, err = rpcc.DialContext(ctx, pt.WebSocketDebuggerURL, rpcc.WithCodec(newLogCodec))
} else {
conn, err = rpcc.DialContext(ctx, pt.WebSocketDebuggerURL)
}
if err != nil {
fmt.Println(err)
return nil, err
}
defer conn.Close() // Cleanup.
defer devt.Close(ctx, pt)
// Create a new CDP Client that uses conn.
f.cdpClient = cdp.NewClient(conn)
if err = runBatch(
// Enable all the domain events that we're interested in.
func() error { return f.cdpClient.DOM.Enable(ctx) },
func() error { return f.cdpClient.Network.Enable(ctx, nil) },
func() error { return f.cdpClient.Page.Enable(ctx) },
func() error { return f.cdpClient.Runtime.Enable(ctx) },
); err != nil {
return nil, err
}
err = f.loadCookies()
if err != nil {
return nil, err
}
domLoadTimeout := 60 * time.Second
if request.FormData == "" {
err = f.navigate(ctx, f.cdpClient.Page, "GET", request.getURL(), "", domLoadTimeout)
} else {
formData := parseFormData(request.FormData)
err = f.navigate(ctx, f.cdpClient.Page, "POST", request.getURL(), formData.Encode(), domLoadTimeout)
}
if err != nil {
return nil, err
}
if err := f.runActions(ctx, request.Actions); err != nil {
logger.Warn(err.Error())
}
u, err := url.Parse(request.getURL())
if err != nil {
return nil, err
}
f.cookies, err = f.saveCookies(u, &ctx)
if err != nil {
return nil, err
}
// Fetch the document root node. We can pass nil here
// since this method only takes optional arguments.
doc, err := f.cdpClient.DOM.GetDocument(ctx, nil)
if err != nil {
return nil, err
}
// Get the outer HTML for the page.
result, err := f.cdpClient.DOM.GetOuterHTML(ctx, &dom.GetOuterHTMLArgs{
NodeID: &doc.Root.NodeID,
})
if err != nil {
return nil, err
}
readCloser := ioutil.NopCloser(strings.NewReader(result.OuterHTML))
return readCloser, nil
}
func (f *ChromeFetcher) runActions(ctx context.Context, actionsJSON string) error {
if len(actionsJSON) == 0 {
return nil
}
acts := []map[string]json.RawMessage{}
err := json.Unmarshal([]byte(actionsJSON), &acts)
if err != nil {
return err
}
for _, actionMap := range acts {
for actionType, params := range actionMap {
action, err := NewAction(actionType, params)
if err == nil {
return action.Execute(ctx, f)
}
}
}
return nil
}
func (f *ChromeFetcher) setCookieJar(jar http.CookieJar) {
f.client.Jar = jar
}
func (f *ChromeFetcher) getCookieJar() http.CookieJar {
return f.client.Jar
}
// Static type assertion
var _ Fetcher = &ChromeFetcher{}
// navigate to the URL and wait for DOMContentEventFired. An error is
// returned if timeout happens before DOMContentEventFired.
func (f *ChromeFetcher) navigate(ctx context.Context, pageClient cdp.Page, method, url string, formData string, timeout time.Duration) error {
defer time.Sleep(750 * time.Millisecond)
ctxTimeout, cancelTimeout := context.WithTimeout(context.Background(), timeout)
// Make sure Page events are enabled.
err := pageClient.Enable(ctxTimeout)
if err != nil {
return err
}
// Navigate to GitHub, block until ready.
loadEventFired, err := pageClient.LoadEventFired(ctxTimeout)
if err != nil {
return err
}
defer loadEventFired.Close()
loadingFailed, err := f.cdpClient.Network.LoadingFailed(ctxTimeout)
if err != nil {
return err
}
defer loadingFailed.Close()
// exceptionThrown, err := f.cdpClient.Runtime.ExceptionThrown(ctxTimeout)
// if err != nil {
// return err
// }
//defer exceptionThrown.Close()
if method == "GET" {
_, err = pageClient.Navigate(ctxTimeout, page.NewNavigateArgs(url))
if err != nil {
return err
}
} else {
/* ast := "*" */
pattern := network.RequestPattern{URLPattern: &url}
patterns := []network.RequestPattern{pattern}
f.cdpClient.Network.SetCacheDisabled(ctxTimeout, network.NewSetCacheDisabledArgs(true))
interArgs := network.NewSetRequestInterceptionArgs(patterns)
err = f.cdpClient.Network.SetRequestInterception(ctxTimeout, interArgs)
if err != nil {
return err
}
kill := make(chan bool)
go f.interceptRequest(ctxTimeout, url, formData, kill)
_, err = pageClient.Navigate(ctxTimeout, page.NewNavigateArgs(url))
if err != nil {
return err
}
kill <- true
}
select {
// case <-exceptionThrown.Ready():
// ev, err := exceptionThrown.Recv()
// if err != nil {
// return err
// }
// return errs.StatusError{400, errors.New(ev.ExceptionDetails.Error())}
case <-loadEventFired.Ready():
_, err = loadEventFired.Recv()
if err != nil {
return err
}
case <-loadingFailed.Ready():
reply, err := loadingFailed.Recv()
if err != nil {
return err
}
canceled := reply.Canceled != nil && *reply.Canceled
if !canceled && reply.Type == network.ResourceTypeDocument {
return errs.StatusError{400, errors.New(reply.ErrorText)}
}
case <-ctx.Done():
cancelTimeout()
return nil /*
case <-ctxTimeout.Done():
return errs.StatusError{400, errors.New("Fetch timeout")} */
}
return nil
}
func (f *ChromeFetcher) setCookies(u *url.URL, cookies []*http.Cookie) error {
f.cookies = cookies
return nil
}
func (f *ChromeFetcher) loadCookies() error {
/* u, err := url.Parse(cookiesURL)
if err != nil {
return err
} */
for _, c := range f.cookies {
c1 := network.SetCookieArgs{
Name: c.Name,
Value: c.Value,
Path: &c.Path,
/* Expires: expire, */
Domain: &c.Domain,
HTTPOnly: &c.HttpOnly,
Secure: &c.Secure,
}
if !c.Expires.IsZero() {
duration := c.Expires.Sub(time.Unix(0, 0))
c1.Expires = network.TimeSinceEpoch(duration / time.Second)
}
_, err := f.cdpClient.Network.SetCookie(context.Background(), &c1)
if err != nil {
return err
}
}
return nil
}
func (f *ChromeFetcher) getCookies(u *url.URL) ([]*http.Cookie, error) {
return f.cookies, nil
}
func (f *ChromeFetcher) saveCookies(u *url.URL, ctx *context.Context) ([]*http.Cookie, error) {
ncookies, err := f.cdpClient.Network.GetCookies(*ctx, &network.GetCookiesArgs{URLs: []string{u.String()}})
if err != nil {
return nil, err
}
cookies := []*http.Cookie{}
for _, c := range ncookies.Cookies {
c1 := http.Cookie{
Name: c.Name,
Value: c.Value,
Path: c.Path,
/* Expires: expire, */
Domain: c.Domain,
HttpOnly: c.HTTPOnly,
Secure: c.Secure,
}
if c.Expires > -1 {
sec, dec := math.Modf(c.Expires)
expire := time.Unix(int64(sec), int64(dec*(1e9)))
/* logger.Info(expire.String())
logger.Info(expire.Format("2006-01-02 15:04:05")) */
c1.Expires = expire
}
cookies = append(cookies, &c1)
domain := string(c1.Domain)
Url := u.String()
f.cdpClient.Network.DeleteCookies(*ctx, &network.DeleteCookiesArgs{Name: c.Name, Domain: &domain, URL: &Url, Path: &c1.Path})
}
return cookies, nil
}
func (f *ChromeFetcher) interceptRequest(ctx context.Context, originURL string, formData string, kill chan bool) {
var sig = false
cl, err := f.cdpClient.Network.RequestIntercepted(ctx)
if err != nil {
panic(err)
}
defer cl.Close()
for {
if sig {
return
}
select {
case <-cl.Ready():
r, err := cl.Recv()
if err != nil {
logger.Error(err.Error())
sig = true
continue
}
lengthFormData := len(formData)
if lengthFormData > 0 && r.Request.URL == originURL && r.RedirectURL == nil {
interceptedArgs := network.NewContinueInterceptedRequestArgs(r.InterceptionID).
SetMethod("POST").
SetPostData(formData)
headers, _ := json.Marshal(map[string]string{
"Content-Type": "application/x-www-form-urlencoded",
"Content-Length": strconv.Itoa(lengthFormData),
})
interceptedArgs.Headers = headers
if err = f.cdpClient.Network.ContinueInterceptedRequest(ctx, interceptedArgs); err != nil {
logger.Error(err.Error())
sig = true
continue
}
} else {
interceptedArgs := network.NewContinueInterceptedRequestArgs(r.InterceptionID)
if r.ResourceType == network.ResourceTypeImage || r.ResourceType == network.ResourceTypeStylesheet || isExclude(r.Request.URL) {
interceptedArgs.SetErrorReason(network.ErrorReasonAborted)
}
if err = f.cdpClient.Network.ContinueInterceptedRequest(ctx, interceptedArgs); err != nil {
logger.Error(err.Error())
sig = true
continue
}
continue
}
case <-kill:
sig = true
break
}
}
}
func isExclude(origin string) bool {
excludeRes := viper.GetStringSlice("EXCLUDERES")
for _, res := range excludeRes {
if strings.Index(origin, res) != -1 {
return true
}
}
return false
}
func (f ChromeFetcher) RunJSFromFile(ctx context.Context, path string, entryPointFunction string) error {
exp, err := ioutil.ReadFile(path)
if err != nil {
panic(err)
}
exp = append(exp, entryPointFunction...)
compileReply, err := f.cdpClient.Runtime.CompileScript(ctx, &runtime.CompileScriptArgs{
Expression: string(exp),
PersistScript: true,
})
if err != nil {
panic(err)
}
awaitPromise := true
_, err = f.cdpClient.Runtime.RunScript(ctx, &runtime.RunScriptArgs{
ScriptID: *compileReply.ScriptID,
AwaitPromise: &awaitPromise,
})
return err
}
// removeNodes deletes all provided nodeIDs from the DOM.
// func removeNodes(ctx context.Context, domClient cdp.DOM, nodes ...dom.NodeID) error {
// var rmNodes []runBatchFunc
// for _, id := range nodes {
// arg := dom.NewRemoveNodeArgs(id)
// rmNodes = append(rmNodes, func() error { return domClient.RemoveNode(ctx, arg) })
// }
// return runBatch(rmNodes...)
// }
// runBatchFunc is the function signature for runBatch.
type runBatchFunc func() error
// runBatch runs all functions simultaneously and waits until
// execution has completed or an error is encountered.
func runBatch(fn ...runBatchFunc) error {
eg := errgroup.Group{}
for _, f := range fn {
eg.Go(f)
}
return eg.Wait()
}
//GetURL returns URL to be fetched
func (req Request) getURL() string {
return strings.TrimRight(strings.TrimSpace(req.URL), "/")
}
// Host returns Host value from Request
func (req Request) Host() (string, error) {
u, err := url.Parse(req.getURL())
if err != nil {
return "", err
}
return u.Host, nil
}
|
#!/bin/bash
# Copyright (c) 2013, Sibt ul Hussain <sibt.ul.hussain at gmail dot com>
# All rights reserved.
# Released under BSD License
# -------------------------
# For license terms please see license.lic
# Script for Computing LBP, LTP & LQP Features...
# feature_type[ lbp or ltp or lqp or lbp+ltp] path_of_file_containing_list_of_images[./face-recog-100x170.txt] dir[directory path for storing feature files.] optional: path_of_a_separate_file_containing_list_of_images_for_codebook_learning> e.g bash computeFeatures.sh lqp ~/dataset/lfw-list.txt ~/experiments/data/ ~/dataset/lfw-list-view1.txt"
count=1
srcdir=${PWD}
exefile=${srcdir}/build/mainFeatures # mainFeatures-large
efname="runExp.sh"
nthreads=1 #number of threads
suffix=""
computeLQP()
{
ftype=14
addcmd=" ${addcmd} --FeatureType=14 --CodeBook-DMetric=0 --ClusteringRounds=10 --Patch-PruneCount=10 --Patch-PatchStride=1"
#a
# for dirname in ${ofname[@]}
# do
dirname=$ofname
echo ${dirname}
for patchsize in ${psize[@]}
do
for cbsizevar in ${cbsize[@]}
do
cd $mdir
dname="${dirname}-PatchSize-${patchsize}-CodeType${codetype}-${cbsizevar}-${suffix}"
echo "Running Dir Name"
mkdir $dname
cp ${exefile} ${srcdir}/${efname} ${dname} #
echo "Training $dname "
cd $dname
echo "${cbsizevar} run.sh ${cbsizevar} ${patchsize} ${ptype[$count]} ${codetype}"
naddcmd="--Normalization=${norm} --CodeBookSize=${cbsizevar} --PatchType=${ptype} --Patch-PatchSize=${patchsize} --Patch-CodingType=${codetype} ${addcmd}"
echo "Running Process Number = $count"
# tvar=$(($count%$nthreads))
# echo " tvar="$tvar
if [ $(($count%$nthreads)) -eq 0 ]
then
echo "Blocking Call "
sh ${efname} $exefile "${naddcmd}"
wait
else
sh ${efname} $exefile "${naddcmd}"&
fi
# sh removefile.sh
count=$(($count+1))
done
done
}
lqp()
{
#Best results on view1 are found using Tolerance=7 and Disk size of 7
#echo "Running Using LTP Features"
#computeLQP #
for norm in 1
do
echo "Running Disc Split Only"
foldname="Circ-Split-${fname}" # original(HOG
codetype=4 # code type for circular > 16 = 2
psize=(7)
# cbsize=(100 150 200)
cbsize=(150)
tcount=0
for ptype in 2 # horizontal5 diagonal 9 combined
do
for tol in 7 5
do
echo "Running Using LQP Features"
ofname="${foldname[${tcount}]}-norm-${norm}-ptype-${ptype}-tol-${tol}"
addcmd=" --LTP-Tolerance=${tol} ${taddcmd} "
computeLQP #
done
tcount=$(($tcount+1))
done
# Split-LTP
echo "Running Horizontal+Vertical+Diag+ADiag Only"
foldname=Hor+Vert+Diag+ADiag # original(HOG
codetype=4 # code type for circular > 16 = 2
psize=7
cbsize=(150)
tcount=0
for ptype in 9 # horizontal5 diagonal 9 combined
do
for tol in 5 7
do
echo "Running Using LQP Features"
ofname="${foldname[${tcount}]}-norm-${norm}-ptype-${ptype}-tol-${tol}"
addcmd=" --LTP-Tolerance=${tol}"
# computeLQP #
done
tcount=$(($tcount+1))
done
done
}
lbp()
{
echo "Feature Destination Dir=${mdir} "
cd $mdir
echo "Running Process Number = $count"
echo "Running in Dir Name=${dname}"
dname="${dname}${suffix}"
mkdir $dname
cp ${exefile} ${srcdir}/${efname} ${dname} #
echo "Training in $dname "
cd $dname
# tvar=$(($count%$nthreads))
# echo " tvar="$tvar
if [ $(($count%$nthreads)) -eq 0 ]
then
echo "Blocking Call "
sh ${efname} $exefile "${addcmd}"
wait
else
sh ${efname} $exefile "${addcmd}"&
fi
count=$(($count+1))
}
if [ $# -lt 3 ]
then
echo "Error Wrong Number of Arguments: <$0 feature_type [lbp or ltp or lqp or lbp+ltp] path_of_file_containing_list_of_images [./face-recog-100x170.txt] dir [directory path for storing feature files.] optional: path_of_a_separate_file_containing_list_of_images_for_codebook_learning> e.g
bash computeFeatures.sh lqp ~/dataset/lfw-list.txt ~/experiments/data/ ~/dataset/lfw-list-view1.txt"
exit
# return
fi
mdir=$3
if [ ! -d $mdir ]
then
echo "Making Directory ${mdir}"
mkdir -p ${mdir}
fi
tfile=$2
if [ $# -eq 3 ]
then
vfile=${tfile}
else
vfile=$4
fi
echo "---------------------------------- Computing Features --------------------"
echo "Computing $1 Features .... "
echo "Number of threads = ${nthreads} "
if [ $1 = "lqp" -o $1 = "LQP" ]
then
fname="100x170"
taddcmd=" --Win-Width=80 --Win-Height=150 --Validation-File=${vfile} --TrainingFile=${tfile} "
lqp
#call lbp & ltp computation.
elif [ $1 = "lbp" -o $1 = "LBP" ]
then
norm=1
dname="lbp-norm-${norm}"
addcmd=" --Win-Width=80 --Win-Height=150 --Validation-File=${vfile} --FeatureType=1 "
lbp
elif [ $1 = "ltp" -o $1 = "LTP" ]
then
norm=1
for tol in 5 7
do
dname="ltp-norm-${norm}-tol-${tol}"
addcmd=" --Win-Width=80 --Win-Height=150 --Validation-File=${vfile} --FeatureType=2 --LTP-Tolerance=${tol} "
lbp
done
elif [ $1 = "lbp+ltp" -o $1 = "LBP+LTP" ]
then
norm=1
for tol in 5 7
do
dname="lbp+ltp-norm-${norm}-tol-${tol}"
addcmd=" --Win-Width=80 --Win-Height=150 --Validation-File=${vfile} --FeatureType=3 --LTP-Tolerance=${tol} "
lbp
done
fi
|
<gh_stars>0
#ifndef __BU_EDITOR_H__
#define __BU_EDITOR_H__
void editor_init();
#endif
|
<?php
function calculateFibonacci($num) {
// Initializing the fibonacci sequence
$fibonacci = array(0, 1);
if ($num >= 2) {
// Generate the fibonacci sequence
for ($i = 2; $i <= $num; $i++) {
$fibonacci[$i] = $fibonacci[$i-1] + $fibonacci[$i-2];
}
}
// Print the sequence
print_r($fibonacci);
}
$num = 8;
calculateFibonacci($num);
?>
# Output
Array ( [0] => 0 [1] => 1 [2] => 1 [3] => 2 [4] => 3 [5] => 5 [6] => 8 [7] => 13 [8] => 21 ) |
#!/bin/bash
WEBSITE_DOMAIN_NAME=`jq -r .WebsiteDomainName < ../config.json`
STACK_NAME=`jq -r .CloudformationStackName < ../config.json`
DEVOPS_BUCKET_NAME=devops-`aws sts get-caller-identity | jq -r .Account`-`aws configure get region`
#Creates Devlops bucket if it doesn't exist
aws s3api head-bucket --bucket $DEVOPS_BUCKET_NAME \
|| aws s3 mb s3://$DEVOPS_BUCKET_NAME && \
aws s3api wait bucket-exists \
--bucket $DEVOPS_BUCKET_NAME
echo Cloudformation Deploy...
sam build \
--template-file ./template.yaml
sam deploy \
--s3-bucket $DEVOPS_BUCKET_NAME \
--s3-prefix $STACK_NAME/ \
--stack-name $STACK_NAME \
--capabilities CAPABILITY_NAMED_IAM \
--parameter-overrides WebsiteDomainName=$WEBSITE_DOMAIN_NAME \
echo Sync Website Files...
aws s3 sync frontend s3://$WEBSITE_DOMAIN_NAME/
echo "Invoke the Lambda:"
aws lambda invoke \
--function-name MyLambdaFunction \
--invocation-type RequestResponse \
response.json > /dev/null \
&& jq -r . < response.json \
&& rm response.json
|
#!/bin/bash
#COBALT -t 0:30:00
#COBALT -n 1
#COBALT -A OceanClimate_2
# This software is open source software available under the BSD-3 license.
#
# Copyright (c) 2020 Triad National Security, LLC. All rights reserved.
# Copyright (c) 2020 Lawrence Livermore National Security, LLC. All rights
# reserved.
# Copyright (c) 2020 UT-Battelle, LLC. All rights reserved.
#
# Additional copyright and license information can be found in the LICENSE file
# distributed with this code, or at
# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/master/LICENSE
source /lus/theta-fs0/projects/ccsm/acme/tools/e3sm-unified/load_latest_e3sm_unified.sh
export HDF5_USE_FILE_LOCKING=FALSE
# MPAS/ACME job to be analyzed, including paths to simulation data and
# observations. Change this name and path as needed
run_config_file="config.20190301.GMPAS-DIB-IAF-ISMF.T62_oRRS30to10v3wLI.theta"
# NOTE: the following section will OVERWRITE values specified within the config file named above
# one parallel task per node by default
parallel_task_count=12
# ncclimo can run with 1 (serial) or 12 (bck) threads
ncclimo_mode=bck
if [ ! -f $run_config_file ]; then
echo "File $run_config_file not found!"
exit 1
fi
# This is a config file generated just for this job with the output directory,
# command prefix and parallel task count from above.
job_config_file=config.output.$COBALT_JOBID
# write out the config file specific to this job
cat <<EOF > $job_config_file
[execute]
# options related to executing parallel tasks
# the number of parallel tasks (1 means tasks run in serial, the default)
parallelTaskCount = $parallel_task_count
# the parallelism mode in ncclimo ("serial" or "bck")
# Set this to "bck" (background parallelism) if running on a machine that can
# handle 12 simultaneous processes, one for each monthly climatology.
ncclimoParallelMode = $ncclimo_mode
EOF
# if using the mpas_analysis conda package instead of the git repo, remove
# "python -m"
python -m mpas_analysis $run_config_file $job_config_file
|
<reponame>vieiraeduardos/easy-management
class User():
def __init__(self, id=0, code=0, name="", email="", password="", type="", createdAt=""):
self.id = id
self.code = code
self.name = name
self.email = email
self.password = password
self.type = type
self.createdAt = createdAt
|
from django.db import models
from rss_feeds.models import Feed
from django.contrib.auth.models import User
class Category(models.Model):
category = models.CharField(max_length=255)
count = models.IntegerField(default=0)
feed = models.ForeignKey(Feed, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE) |
/*
Sushi: fast image loading previews.
Version: 0.9
Author: <NAME>
Contact: <EMAIL>
Website: https://tommy144.wordpress.com/
The MIT License (MIT)
Copyright (c) 2015 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
// (aprox) Two dimensional Gaussian function maps onto a single variable.
// where 'theta' is our standard deviation.
function twoDimensionalGaussian(x, y, theta) {
return (1.0 / (2 * Math.PI * theta * theta)) *
Math.pow(Math.E, -1.0 * (x * x + y * y) / (2 * theta * theta));
}
// Maps centered x,y coordinates onto a matrix.
function mapOntoCenteredMatrix(x, y, n) {
return Math.floor(n/2) + (Math.floor(n/2) * n) + x + (y * n)
}
// Generates a normalized two dimensional Gaussian matrix.
function normalGaussianMatrix(n, theta) {
var M = new Array(n * n);
var sum = 0;
var nmid = Math.floor(n/2);
for (var x = -nmid; x <= nmid; x++) {
for (var y = -nmid; y <= nmid; y++) {
var currGauss = twoDimensionalGaussian(x, y, theta);
sum += currGauss;
M[mapOntoCenteredMatrix(x,y,n)] = currGauss;
}
}
var nsquared = n * n;
for (var i = 0; i < nsquared; i++) {
M[i] = M[i] / sum; //normalize.
}
return M
}
// Indexes 1d array like 2d array.
function index2dArray(x, y, w, h) {
return x + y * w;
}
// Returns index of matrix given an x,y and offset.
function xyOffsetIndex(i, x, y, w, h) {
var index = i + x + y * w;
var absx = (i % w) + x;
if (absx < 0 && index < 0 || absx >= w && index >= w * h) {
return i - x - y * w;
}
if (absx < 0 || absx >= w) { // if off the left.
return i - x + y * w;
}
//var absy = index - (w * y)
if (index < 0 || index >= w * h) { // if off top/bottom.
return i + x - y * w;
}
return index;
}
// Performs gaussian blur (SLOW!).
function filterGaussian(data, w, h, n, theta, phi) {
var sizeOfPixel = 4;
var M = normalGaussianMatrix(n, theta);
var dataLength = w * h * sizeOfPixel;
for (var i = 0; i < dataLength; i+=4) {
var sumR = 0;
var sumG = 0;
var sumB = 0;
var nmid = Math.floor(n/2);
for (var x = -nmid; x <= nmid; x++) {
for (var y = -nmid; y <= nmid; y++) {
var index = mapOntoCenteredMatrix(x, y, n);
var dataIndex = xyOffsetIndex(i/4, x, y, w, h); //* sizeOfPixel;
if (i/4 === 255) {
dataIndex = xyOffsetIndex(i/4, x, -y, w, h); //* sizeOfPixel;
}
var epsilon;
epsilon = M[mapOntoCenteredMatrix(x, y, n)];
dataIndex *= sizeOfPixel;
sumR += data[dataIndex + 0] * epsilon * phi;
sumG += data[dataIndex + 1] * epsilon * phi;
sumB += data[dataIndex + 2] * epsilon * phi;
}
data[i + 0] = sumR;
data[i + 1] = sumG;
data[i + 2] = sumB;
}
}
return data;
}
// Explodes data pixel array to larger resolution.
function explodeImage(data, w, h, targetw, targeth) {
var wfac = Math.floor(targetw / w);
var vfac = Math.floor(targeth / h);
var imgdata = new ImageData(targetw, Math.floor(targeth));
for (var i = 0; i < data.length; i+=4) {
var R = data[i + 0];
var G = data[i + 1];
var B = data[i + 2];
var A = data[i + 3];
var x = i % w * wfac;
var index = i / 4;
var x = index % w;
var y = Math.floor(index / w);
drawSquare(imgdata.data, x * wfac, y * vfac, wfac, vfac, R, G, B, A, targetw);
}
return imgdata;
}
// Draws square on an image of specified color.
function drawSquare(data, x, y, w, h, R, G, B, A, width) {
var start = x * 4 + y * width * 4;
for (var i = 0; i < h; i++) {
for (var j = 0; j < w * 4; j+=4) {
data[start + width * i * 4 + j + 0] = R;
data[start + width * i * 4 + j + 1] = G;
data[start + width * i * 4 + j + 2] = B;
data[start + width * i * 4 + j + 3] = A;
}
}
}
// Encodes image data to b64 string.
function encodeBase64String(data, w, h) {
var str = "";
for (var i = 0; i < data.length; i++) {
str = str.concat(btoa(data[i]));
}
return str;
}
// Decodes image from b64 string.
function decodeBase64Image(s, w, h) {
var imgdata = new ImageData(w, h);
for (var i = 0; i < s.length; i+=4) {
imgdata.data[i / 4] = atob(s.substring(i, i + 4));
}
return imgdata;
}
|
#!/bin/bash
#$-m abe
#$-M yding4@nd.edu
#$-q gpu@qa-xp-004 # specify the queue
#$-l gpu_card=4
#$-N node4gpu16_mnist_sub3
export PATH=/afs/crc.nd.edu/user/y/yding4/Transformer/bin:$PATH
export LD_LIBRARY_PATH=/afs/crc.nd.edu/user/y/yding4/Transformer/lib:$LD_LIBRARY_PATH
DIST=/scratch365/yding4/hetseq
AD=tcp://10.32.82.207:11111
python3 ${DIST}/train.py \
--task mnist --optimizer adadelta --lr-scheduler PolynomialDecayScheduler \
--data /scratch365/yding4/mnist/MNIST/processed/ --clip-norm 100 \
--max-sentences 64 --fast-stat-sync --max-epoch 20 --update-freq 1 \
--valid-subset test --num-workers 4 \
--warmup-updates 0 --total-num-update 50000 --lr 1.01 \
--distributed-init-method ${AD} --distributed-world-size 16 \
--distributed-gpus 4 --distributed-rank 12 --save-dir node4gpu16 |
#include <iostream>
#include <string>
// Global variables
int xPos = 0;
int yPos = 0;
// Function prototypes
void movePos(char dir);
int main() {
while (true) {
std::cout << "You are at (" << xPos << ", " << yPos << ")\n";
std::cout << "Which direction do you want to move? (N/S/E/W)\n";
char dir;
std::cin >> dir;
movePos(dir);
}
return 0;
}
void movePos(char dir) {
switch(dir) {
case 'N':
++yPos;
break;
case 'S':
--yPos;
break;
case 'E':
++xPos;
break;
case 'W':
--xPos;
break;
}
} |
<reponame>PeterJCLaw/srcomp-ts
import test from 'ava';
import fetchMock from 'fetch-mock';
import { SRComp } from './srcomp';
import { MatchType } from './types';
test.afterEach(() => {
fetchMock.restore();
});
test('srcomp.getMatches', async (t) => {
const rawData = {
last_scored: 160,
matches: [
{
arena: 'Simulator',
display_name: 'Final (#160)',
num: 160,
scores: {
game: { HRS3: 8, SPA: 36 },
normalised: { HRS3: 2, SPA: 4 },
ranking: { HRS3: 2, SPA: 1 },
},
teams: ['SPA', 'HRS3'],
times: {
game: {
end: '2021-05-01T13:33:00+01:00',
start: '2021-05-01T13:31:00+01:00',
},
slot: {
end: '2021-05-01T13:33:30+01:00',
start: '2021-05-01T13:30:00+01:00',
},
staging: {
closes: '2021-05-01T13:29:00+01:00',
opens: '2021-05-01T13:26:00+01:00',
signal_shepherds: { Shepherd: '2021-05-01T13:28:00+01:00' },
signal_teams: '2021-05-01T13:28:00+01:00',
},
},
type: 'knockout',
},
],
};
fetchMock.mock(
'https://studentrobotics.org/comp-api/matches',
JSON.stringify(rawData)
);
const expected = [
{
arena: 'Simulator',
display_name: 'Final (#160)',
num: 160,
scores: {
game: { HRS3: 8, SPA: 36 },
normalised: { HRS3: 2, SPA: 4 },
ranking: { HRS3: 2, SPA: 1 },
},
teams: ['SPA', 'HRS3'],
times: {
game: {
end: new Date('2021-05-01T13:33:00+01:00'),
start: new Date('2021-05-01T13:31:00+01:00'),
},
slot: {
end: new Date('2021-05-01T13:33:30+01:00'),
start: new Date('2021-05-01T13:30:00+01:00'),
},
staging: {
closes: new Date('2021-05-01T13:29:00+01:00'),
opens: new Date('2021-05-01T13:26:00+01:00'),
signal_shepherds: { Shepherd: new Date('2021-05-01T13:28:00+01:00') },
signal_teams: new Date('2021-05-01T13:28:00+01:00'),
},
},
type: MatchType.Knockout,
},
];
const srcomp = new SRComp('https://studentrobotics.org/comp-api');
const matches = await srcomp.getMatches();
t.deepEqual(expected, matches);
});
test('srcomp.getTeams', async (t) => {
const teamData = {
name: '<NAME>',
tla: 'SRZ',
league_pos: 13,
location: {
name: 'the-venue',
get: '/comp-api/locations/the-venue',
},
scores: {
game: 6,
league: 7,
},
get: '/comp-api/teams/SRZ',
};
const teamsData = { SRZ: teamData };
fetchMock.mock(
'https://studentrobotics.org/comp-api/teams',
JSON.stringify({ teams: teamsData })
);
const srcomp = new SRComp('https://studentrobotics.org/comp-api');
const teams = await srcomp.getTeams();
t.deepEqual(teamsData, teams);
});
|
module MyEnumerable
def all?
all = true
list.each do |i|
all = false unless yield(i)
end
all
end
def any?
any = false
list.each do |i|
any = true if yield(i)
end
any
end
def filter
filter = []
list.each do |i|
filter.push(i) if yield(i)
end
filter
end
end
|
class Node:
# Node class
def __init__(self, data):
self.data = data
self.next = None
class Stack:
# Stack class
def __init__(self):
self.head = None
def is_empty(self):
# checks if the stack is empty
if self.head is None:
return True
else:
return False
def push(self, data):
# push an element on the stack
new_node = Node(data)
if self.head is None:
self.head = new_node
else:
new_node.next = self.head
self.head = new_node
def pop(self):
# remove element from the stack
if self.head is None:
return None
else:
popped = self.head.data
self.head = self.head.next
return popped |
#!/bin/bash
## Set the variable below to your Aria password
ARIA_RPC_SECRET="puss"
## This is the maximum number of download jobs that will be active at a time. Note that this does not affect the number of concurrent *uploads*
MAX_CONCURRENT_DOWNLOADS=5
## The port that RPC will listen on
RPC_LISTEN_PORT=8210
aria2c --enable-rpc --rpc-listen-all=false --rpc-listen-port $RPC_LISTEN_PORT --max-concurrent-downloads=$MAX_CONCURRENT_DOWNLOADS --max-connection-per-server=10 --rpc-max-request-size=1024M --seed-time=0.01 --min-split-size=10M --follow-torrent=mem --split=10 --rpc-secret=$ARIA_RPC_SECRET --max-overall-upload-limit=1 --daemon=true
echo "Aria2c daemon started"
|
import { List, ListItem, ListItemText, Typography } from "@mui/material";
import Box from "@mui/system/Box";
import { RecipeProduct } from "../../common/models/recipe.form";
import { SetLanguageText } from "../../services/i18n/languageManager";
export interface IngredientViewProps {
recipeProducts:Array<RecipeProduct>;
};
export const IngredientView = (props: IngredientViewProps) => {
const textValue = SetLanguageText;
const displayListItem = (recipeProduct: RecipeProduct, index:number ) => <ListItem key={index}>
<ListItemText
primary={`${recipeProduct.name}`}
secondary={PrintMeassureText(recipeProduct)}/>
</ListItem>
return <Box sx={{ padding: '20px' }}>
<Typography variant="h6" gutterBottom component="div">
{textValue('ingredients')}
</Typography>
<List>
{props.recipeProducts?.map(displayListItem)}
</List>
</Box>;
}
export const PrintMeassureText = (recipeProduct : RecipeProduct) => {
let text = '';
if(recipeProduct.quantity > 0)
text += ` ${recipeProduct.quantity}`;
if(recipeProduct.fractionary)
text += ` ${recipeProduct.fractionary}`;
text += ` ${recipeProduct.measureType}`
return text;
} |
#!/bin/bash
if [[ "$3" == "dev" ]]; then
sh compile.sh $1;
else
sh compile.sh $1 nodev;
fi
echo "Building app ...";
./node_modules/.bin/electron-packager ./ $1 --out=../built --overwrite --platform=$2;
cd scripts;
node build.js $1;
|
#!/bin/bash
set -euo pipefail
if [ -z "$INPUT_BUMP" ]
then
echo "bump input not specified."
exit -1
fi
# Generate environment variables
REMOTE_REPO="https://${GITHUB_ACTOR}:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git"
# Configure the root user's npmrc file
NPM_CONFIG_FILE="${NPM_CONFIG_FILE-"$HOME/.npmrc"}"
NPM_HOST="${NPM_HOST-registry.npmjs.org}"
NPM_STRICT_SSL="${NPM_STRICT_SSL-true}"
NPM_SCHEMA="https"
if ! $NPM_STRICT_SSL; then
NPM_SCHEMA="http"
fi
printf "//%s/:_authToken=%s\\nregistry=%s\\nstrict-ssl=%s" "$NPM_HOST" "$NPM_AUTH_TOKEN" "${NPM_SCHEMA}://$NPM_HOST" "${NPM_STRICT_SSL}" > "$NPM_CONFIG_FILE"
chmod 0600 "$NPM_CONFIG_FILE"
# Configure source control
git config http.sslVerify false
git config user.email "$INPUT_EMAIL"
git config user.name "$INPUT_USERNAME"
git remote add kwsites $REMOTE_REPO
git show-ref
git fetch kwsites --unshallow --tags
git branch --verbose
echo "tags"
git tag
git status
# Run lerna
node_modules/.bin/lerna run build --since
node_modules/.bin/lerna version --git-remote=kwsites --yes $INPUT_BUMP
node_modules/.bin/lerna publish --git-remote=kwsites --registry=$INPUT_REGISTRY --yes from-git
echo "tags"
git tag
git status
git push kwsites HEAD --tags
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
* to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
* OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef BIGNUMBER_H
#define BIGNUMBER_H
#include <vector>
#include <string>
#include <iostream>
/**
* BigNumber class
*/
class BigNumber {
public:
//@{
/**
* BigNumber constructor
* @param number - The initial value of the BigNumber
*/
BigNumber(std::string number);
BigNumber(long long number);
//@}
/**
* Add another BigNumber to the current instance
* @param other - The other BigNumber
* @return The sum of the two BigNumbers
*/
BigNumber add(BigNumber other);
/**
* Subtract another BigNumber from the current instance
* @param other - The other BigNumber
* @return The difference of the two BigNumbers
*/
BigNumber subtract(BigNumber other);
/**
* Multiply the current instance by another BigNumber
* @param other - The other BigNumber
* @return The product of the two BigNumbers
*/
BigNumber multiply(BigNumber other);
/**
* Divide the current instance by another BigNumber
* @param other - The other BigNumber
* @return The quotient of the two BigNumbers
*/
BigNumber divide(BigNumber other);
/**
* Raise the current instance to the power of an exponent
* @param exponent - The power to be raised by
* @return - The resulting BigNumber after exponentiation
*/
BigNumber pow(int exponent);
/**
* Get the string value of the current instance
* @return The BigNumber as a string
*/
std::string getString();
/**
* Set the value of the current instance with a string
* @param newStr - The new value for the BigNumber
* @return The BigNumber with the new value
*/
BigNumber setString(const std::string &newStr);
/**
* Negates the current instance
* @return The BigNumber after negation
*/
BigNumber negate();
BigNumber trimLeadingZeros();
//@{
/**
* Check if another BigNumber is equal to the current instance
* @param other - The other BigNumber
* @return True if equal, otherwise false
*/
bool equals(const BigNumber &other);
bool equals(const long long &other);
bool equals(const std::string &other);
//@}
/**
* Get the number of digits in the current instance
* @return The number of digits
*/
unsigned int digits();
/**
* Get whether or not the current instance is a negative number
* @return True if negative, otherwise false
*/
bool isNegative() const;
/**
* Get whether or not the current instance is a positive number
* @return True if positive, otherwise false
*/
bool isPositive();
/**
* Get whether or not the current instance is an even number
* @return True if even, otherwise false
*/
bool isEven();
/**
* Get whether or not the current instance is an odd number
* @return True if odd, otherwise false
*/
bool isOdd();
/**
* Get the absolute value of the current instance
* @return The absolute value of the BigNumber
*/
BigNumber abs() const;
/**
* Output stream operator
* @param os The output stream
* @param num The current instance
* @return The output stream with the current instance
*/
friend std::ostream &operator<<(std::ostream &os, const BigNumber &num);
//@{
/**
* Addition operator
* @param b1 - The current instance
* @param b2 - The number being added
* @return The sum of the two numbers
*/
friend BigNumber operator+(BigNumber b1, const BigNumber &b2);
friend BigNumber operator+(BigNumber b1, const long long &b2);
friend BigNumber operator+(BigNumber b1, const std::string &b2);
//@}
//@{
/**
* Subtraction operator
* @param b1 - The current instance
* @param b2 - The number being subtracted
* @return The difference of the two numbers
*/
friend BigNumber operator-(BigNumber b1, const BigNumber &b2);
friend BigNumber operator-(BigNumber b1, const long long &b2);
friend BigNumber operator-(BigNumber b1, const std::string &b2);
//@}
//@{
/**
* Multiplication operator
* @param b1 - The current instance
* @param b2 - The number being multiplied by
* @return The product of the two numbers
*/
friend BigNumber operator*(BigNumber b1, const BigNumber &b2);
friend BigNumber operator*(BigNumber b1, const long long &b2);
friend BigNumber operator*(BigNumber b1, const std::string &b2);
//@}
//@{
/**
* Division operator
* @param b1 - The current instance
* @param b2 - The number being divided by
* @return The quotient of the two numbers
*/
friend BigNumber operator/(BigNumber b1, const BigNumber &b2);
friend BigNumber operator/(BigNumber b1, const long long &b2);
friend BigNumber operator/(BigNumber b1, const std::string &b2);
//@}
/**
* Exponent operator
* @param b1 - The current instance
* @param b2 - The exponent
* @return The value after exponentiation
*/
friend BigNumber operator^(BigNumber b1, const int &b2);
//@{
/**
* Equality operator
* @param b1 - The current instance
* @param b2 - Another value
* @return True if equal, otherwise false
*/
friend bool operator==(BigNumber b1, const BigNumber &b2);
friend bool operator==(BigNumber b1, const long long &b2);
friend bool operator==(BigNumber b1, const std::string &b2);
//@}
/**
* Greater-than operator
* @param b1 - The current instance
* @param b2 - Another BigNumber
* @return True if current instance is greater, otherwise false
*/
friend bool operator>(BigNumber b1, const BigNumber &b2);
/**
* Less-than operator
* @param b1 - The current instance
* @param b2 - Another BigNumber
* @return True if current instance is less, otherwise false
*/
friend bool operator<(BigNumber b1, const BigNumber &b2);
/**
* Greater-than or equal-to operator
* @param b1 - The current instance
* @param b2 - Another BigNumber
* @return True if current instance is greater or equal, otherwise false
*/
friend bool operator>=(BigNumber b1, const BigNumber &b2);
/**
* Less-than or equal-to operator
* @param b1 - The current instance
* @param b2 - Another BigNumber
* @return True if current instance is less or equal, otherwise false
*/
friend bool operator<=(BigNumber b1, const BigNumber &b2);
//@{
/**
* Assignment operator
* @param other - The new value for the BigNumber
* @return A BigNumber containing the new value
*/
BigNumber& operator=(const BigNumber &other);
BigNumber& operator=(const long long &other);
BigNumber& operator=(const std::string &other);
//@}
//@{
/**
* Addition assignment operator\n
* Adds and assigns a value to the current instance
* @param other - The value being added
* @return The new value after addition and assignment
*/
BigNumber& operator+=(const BigNumber &other);
BigNumber& operator+=(const long long &other);
BigNumber& operator+=(const std::string &other);
//@}
//@{
/**
* Subtraction assignment operator\n
* Subtracts and assigns a value to the current instance
* @param other - The value being subtracted
* @return The new value after subtraction and assignment
*/
BigNumber& operator-=(const BigNumber &other);
BigNumber& operator-=(const long long &other);
BigNumber& operator-=(const std::string &other);
//@}
//@{
/**
* Multiplication assignment operator\n
* Multiplies and assigns a value to the current instance
* @param other - The value being multiplied
* @return The new value after multiplication and assignment
*/
BigNumber& operator*=(const BigNumber &other);
BigNumber& operator*=(const long long &other);
BigNumber& operator*=(const std::string &other);
//@}
//@{
/**
* Division assignment operator\n
* Divides and assigns a value to the current instance
* @param other - The value being divided
* @return The new value after division and assignment
*/
BigNumber& operator/=(const BigNumber &other);
BigNumber& operator/=(const long long &other);
BigNumber& operator/=(const std::string &other);
//@}
/**
* Pre-increment operator
* @return The incremented BigNumber
*/
BigNumber& operator++();
/**
* Pre-decrement operator
* @return The decremented BigNumber
*/
BigNumber& operator--();
/**
* Post-increment operator
* @return The incremented BigNumber
*/
BigNumber operator++(int);
/**
* Post-decrement operator
* @return The decremented BigNumber
*/
BigNumber operator--(int);
/**
* The index operator
* @param index The position being looked at
* @return The number at the specified position in the BigNumber string
*/
unsigned int operator[](int index);
private:
std::string _numberString; //The big number represented as a string
//Methods
BigNumber addll(const long long &other);
BigNumber addstr(const std::string &other);
BigNumber subtractll(const long long &other);
BigNumber subtractstr(const std::string &other);
BigNumber multiplyll(const long long &other);
BigNumber multiplystr(const std::string &other);
BigNumber dividell(const long long &other);
BigNumber dividestr(const std::string &other);
};
#endif |
import {makeStyles} from "@material-ui/core/styles"
const useStyles = makeStyles((theme) => ({
root:
{
display:"flex",
justifyContent:"center",
alignItems:"center",
height:"100vh",
backgroundColor:"#cb997e"
},
errorPage:
{
display:"flex",
justifyContent:"center"
},
errorContainer:
{
textAlign: "center",
textTransform: "capitalize"
}
}))
export default useStyles;
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/sysroot/home/harpreet/lib/flutter/stable"
export "FLUTTER_APPLICATION_PATH=/sysroot/home/harpreet/AndroidStudioProjects/flutter/HR-Management-and-Geo-Attendance-System"
export "COCOAPODS_PARALLEL_CODE_SIGN=true"
export "FLUTTER_TARGET=lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build/ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
export "DART_OBFUSCATION=false"
export "TRACK_WIDGET_CREATION=false"
export "TREE_SHAKE_ICONS=false"
export "PACKAGE_CONFIG=.packages"
|
<filename>StarTrekArena/js/player.js
let player;
function Player(classType, health, intelligence, strength, agility){
this.classType = classType;
this.health = health;
this.intelligence = intelligence;
this.strength = strength;
this.agility = agility;
this.phaserPower = 10;
}
let PlayerMoves = {
calcAttack: function() {
//TODO: add randomness so top doesn't always get first
let getPlayerAgility = player.agility * player.intelligence;
let getEnemyAgility = enemy.agility * enemy.intelligence;
let playerAttack = function() {
let baseDamage = player.strength * player.agility;
let offsetDamage = Math.floor(Math.random() * Math.floor(10));
let outputDamage = baseDamage + offsetDamage;
let numberOfHits = Math.floor(Math.random() * Math.floor(player.agility/10)/2) + 1;
return [outputDamage, numberOfHits];
}
let enemyAttack = function() {
let baseDamage = enemy.strength * enemy.agility;
let offsetDamage = Math.floor(Math.random() * Math.floor(10));
let outputDamage = baseDamage + offsetDamage;
let numberOfHits = Math.floor(Math.random() * Math.floor(enemy.agility/10)/2) + 1;
return [outputDamage, numberOfHits];
}
let getPlayerHealth = document.querySelector(".health-player");
let getEnemyHealth = document.querySelector(".health-enemy");
if (getPlayerAgility >= getEnemyAgility) {
let playerAttackValues = playerAttack();
let totalDamage = playerAttackValues[0] * playerAttackValues[1];
enemy.health =- totalDamage;
alert("You hit " + playerAttackValues[0] + " damage " +
playerAttackValues[1] + " times.");
if (enemy.health <= 0){
alert("You win! Refresh to play again");
getEnemyHealth.innerHTML = 'Health: 0';
getPlayerHealth.innerHTML = 'Health: ' + player.health;
} else {
getEnemyHealth.innerHTML = 'Health: ' + enemy.health;
//enemy attacks
let enemyAttackValues = enemyAttack();
let totalDamage = enemyAttackValues[0] * enemyAttackValues[1];
player.health =- totalDamage;
alert("Enemy hit " + enemyAttackValues[0] + " damage " + enemyAttackValues[1] + " times.");
if (player.health <= 0) {
alert("You loose! Refresh browser to play again");
getPlayerHealth.innerHTML = "Health: 0";
getEnemyHealth.innerHTML = "Health: " + enemy.health;
} else {
getPlayerHealth.innerHTML = "Health: " + player.health;
}
}
}
else if (getEnemyAgility > getPlayerAgility) {
let enemyAttackValues = enemyAttack();
let totalDamage = enemyAttackValues[0] * enemyAttackValues[1];
player.health =- totalDamage;
alert("Enemy hit " + enemyAttackValues[0] + " damage " +
enemyAttackValues[1] + " times.");
if (player.health <= 0){
alert("You loose! Refresh to play again");
getPlayerHealth.innerHTML = 'Health: 0';
getEnemyHealth.innerHTML = 'Health: ' + enemy.health;
} else {
getPlayerHealth.innerHTML = 'Health: ' + player.health;
//player attacks
let playerAttackValues = playerAttack();
let totalDamage = playerAttackValues[0] * playerAttackValues[1];
enemy.health =- totalDamage;
alert("You hit " + playerAttackValues[0] + " damage " + playerAttackValues[1] + " times.");
if (enemy.health <= 0) {
alert("You win! Refresh browser to play again");
getEnemyHealth.innerHTML = "Health: 0";
getPlayerHealth.innerHTML = "Health: " + player.health;
} else {
getEnemyHealth.innerHTML = "Health: " + enemy.health;
}
}
}
}
} |
#!/bin/sh
#
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
if [ -z "${PKG}" ]; then
echo "PKG must be set"
exit 1
fi
if [ -z "${ARCH}" ]; then
echo "ARCH must be set"
exit 1
fi
if [ -z "${VERSION}" ]; then
echo "VERSION must be set"
exit 1
fi
export CGO_ENABLED=0
export GOARCH="${ARCH}"
go install \
-installsuffix "static" \
-ldflags "-X ${PKG}/pkg/version.VERSION=${VERSION}" \
./...
|
<gh_stars>1-10
"use strict";
const Boom = require(`boom`);
const Promise = require(`bluebird`);
const Users = require(`../modules/users/model`);
const Errors = require(`./errors`);
class Prerequisites {
/**
* confirmRecordExists(model[, mode, requestKey, databasekey])
*
* Returns a HAPI pre-req package configured to
* to fetch all matching records of the passed `model`,
* using data from the route parameters or the request payload
* to build the query.
*
* @param {Object} model - Bookshelf model for querying
* @param {Object} config - Contains configuration options for the query:
* mode {String} - 'param' or 'payload' (if omitted, returns all records)
* requestKey {String} - key of the param/payload
* databaseKey {String} - key of the database model
*
* @return {Promise} - Promise fullfilled when the record is found
*/
static confirmRecordExists(model, config) {
config = config || {};
const dbKey = config.databaseKey || config.requestKey;
const requestKey = config.requestKey;
return {
assign: `records`,
method(req, reply) {
const queryOptions = {};
if (requestKey) {
queryOptions.where = {};
if (config.mode === `param`) {
queryOptions.where[dbKey] = req.params[requestKey];
} else {
queryOptions.where[dbKey] = req.payload[requestKey];
}
}
let fetchOptions = {};
if (config.columns) {
fetchOptions = { columns: config.columns };
}
const result = model
.query(queryOptions)
.fetchAll(fetchOptions)
.then(function(records) {
if (records.length === 0) {
throw Boom.notFound(null, {
debug: true,
error: `resource not found`
});
}
return records;
})
.catch(Errors.generateErrorResponse);
return reply(result);
}
};
}
/**
* validateUser()
*
* Ensures that the user sending the request exists in the
* current context. This means that the user should have hit the
* /users/login route first
*
* @return {Promise} - Promise fullfilled when the user has been found
*/
static validateUser() {
return {
assign: `user`,
method(request, reply) {
const result = Users.query({
where: {
name: request.auth.credentials.username
}
})
.fetch()
.then(function(authenticatedUser) {
if (!authenticatedUser) {
// This case means our auth logic failed unexpectedly
throw Boom.badImplementation(null, {
error: `authenticated user doesn't exist (mayday!)`
});
}
return authenticatedUser;
})
.catch(Errors.generateErrorResponse);
return reply(result);
}
};
}
/**
* validateOwnership()
*
* Ensures the authenticated user is the owner of the
* resource being manipulated or requested.
*
* @return {Promise} - Promise fullfilled when the user has been confirmed to
* be the owner of the resource requested
*/
static validateOwnership() {
return {
method(request, reply) {
const resource = request.pre.records.models[0];
const authenticatedUser = request.pre.user;
const result = Promise.resolve()
.then(function() {
// Check if the resource is the owning user, otherwise fetch
// the user it's owned by
if (resource.tableName === `users`) {
return resource;
}
return resource
.user()
.query({})
.fetch()
.then(function(owner) {
if (!owner) {
// This should never ever happen
throw Boom.badImplementation(null, {
error: `An owning user can't be found (mayday!)`
});
}
return owner;
});
})
.then(function(owner) {
if (owner.get(`id`) !== authenticatedUser.get(`id`)) {
throw Boom.unauthorized(null, {
debug: true,
error: `User doesn't own the resource requested`
});
}
})
.catch(Errors.generateErrorResponse);
return reply(result);
}
};
}
/**
* validateCreationPermission([foreignKey, model])
*
* Ensures the authenticated user is the owner of the
* resource being created.
*
* @param {Object} foreignKey - Foreign key of an existing resource
* @param {Object} model - Bookshelf model for querying
*
* @return {Promise} - Promise fullfilled when the user has been confirmed to
* be the owner of the resource being created
*/
static validateCreationPermission(foreignKey, model) {
return {
method(request, reply) {
const result = Users.query({
where: {
name: request.auth.credentials.username
}
})
.fetch()
.then(function(userRecord) {
if (!userRecord) {
// This case means our auth logic failed unexpectedly
throw Boom.badImplementation(null, {
error: `User doesn't exist!`
});
}
// Check to see if there's a direct reference to `user_id` in the payload
if (!foreignKey) {
if (userRecord.get(`id`) !== request.payload.user_id) {
throw Boom.unauthorized(null, {
debug: true,
error: `User doesn't own the resource being referenced`
});
}
return;
}
const query = { where: {} };
query.where.id = request.payload[foreignKey];
return model.query(query)
.fetch()
.then(function(record) {
if (!record) {
throw Boom.notFound(null, {
debug: true,
error: `Foreign key doesn't reference an existing record`
});
}
if (userRecord.get(`id`) !== record.get(`user_id`)) {
throw Boom.unauthorized(null, {
debug: true,
error: `User doesn't own the resource being referenced`
});
}
});
})
.catch(Errors.generateErrorResponse);
reply(result);
}
};
}
/**
* trackTemporaryFile()
*
* Stores the path to a temporary file in req.app for clearing after a request completes
* and in req.pre for use in the handler
*
* @return {String} - Path to the temporary file
*/
static trackTemporaryFile() {
return {
assign: `tmpFile`,
/* eslint no-unused-vars: ["error", { "argsIgnorePattern": "^reply$" }]*/
method(request, reply) {
const buffer = request.payload.buffer;
// Store the paths for after the request completes
request.app.tmpFile = buffer.path;
reply(buffer.path);
}
};
}
}
module.exports = Prerequisites;
|
<reponame>Skarlso/hubble
// Copyright 2019 Authors of Hubble
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package printer
import (
"encoding/json"
"fmt"
"os"
"strings"
"text/tabwriter"
pb "github.com/cilium/hubble/api/v1/observer"
"github.com/cilium/hubble/pkg/format"
"github.com/cilium/cilium/pkg/monitor/api"
"github.com/francoispqt/gojay"
"github.com/gogo/protobuf/types"
"github.com/google/gopacket/layers"
)
// Encoder for flows.
type Encoder interface {
Encode(v interface{}) error
}
// Printer for flows.
type Printer struct {
opts Options
line int
tw *tabwriter.Writer
jsonEncoder Encoder
}
// New Printer.
func New(fopts ...Option) *Printer {
// default options
opts := Options{
output: TabOutput,
w: os.Stdout,
werr: os.Stderr,
}
// apply optional parameters
for _, fopt := range fopts {
fopt(&opts)
}
p := &Printer{
opts: opts,
}
switch opts.output {
case TabOutput:
// initialize tabwriter since it's going to be needed
p.tw = tabwriter.NewWriter(opts.w, 2, 0, 3, ' ', 0)
case JSONOutput:
if opts.withJSONEncoder {
p.jsonEncoder = json.NewEncoder(p.opts.w)
} else {
p.jsonEncoder = gojay.NewEncoder(p.opts.w)
}
}
return p
}
const (
tab = "\t"
newline = "\n"
)
// Close any outstanding operations going on in the printer.
func (p *Printer) Close() error {
if p.tw != nil {
return p.tw.Flush()
}
return nil
}
// WriteErr returns the given msg into the err writer defined in the printer.
func (p *Printer) WriteErr(msg string) error {
_, err := fmt.Fprint(p.opts.werr, fmt.Sprintf("%s\n", msg))
return err
}
func getPorts(f *pb.Flow) (string, string) {
if f.L4 == nil {
return "", ""
}
switch f.L4.Protocol.(type) {
case *pb.Layer4_TCP:
return format.TCPPort(layers.TCPPort(f.L4.GetTCP().SourcePort)), format.TCPPort(layers.TCPPort(f.L4.GetTCP().DestinationPort))
case *pb.Layer4_UDP:
return format.UDPPort(layers.UDPPort(f.L4.GetUDP().SourcePort)), format.UDPPort(layers.UDPPort(f.L4.GetUDP().DestinationPort))
default:
return "", ""
}
}
func getHostNames(f *pb.Flow) (string, string) {
var srcNamespace, dstNamespace, srcPodName, dstPodName string
if f == nil || f.IP == nil {
return "", ""
}
if f.Source != nil {
srcNamespace = f.Source.Namespace
srcPodName = f.Source.PodName
}
if f.Destination != nil {
dstNamespace = f.Destination.Namespace
dstPodName = f.Destination.PodName
}
srcPort, dstPort := getPorts(f)
src := format.Hostname(f.IP.Source, srcPort, srcNamespace, srcPodName, f.SourceNames)
dst := format.Hostname(f.IP.Destination, dstPort, dstNamespace, dstPodName, f.DestinationNames)
return src, dst
}
func getTimestamp(f *pb.Flow) string {
if f == nil {
return "N/A"
}
ts, err := types.TimestampFromProto(f.Time)
if err != nil {
return "N/A"
}
return format.MaybeTime(&ts)
}
func getFlowType(f *pb.Flow) string {
if f == nil || f.EventType == nil {
return "UNKNOWN"
}
if f.L7 != nil {
l7Protocol := "l7"
l7Type := strings.ToLower(f.GetL7().Type.String())
switch f.L7.GetRecord().(type) {
case *pb.Layer7_Http:
l7Protocol = "http"
case *pb.Layer7_Dns:
l7Protocol = "dns"
case *pb.Layer7_Kafka:
l7Protocol = "kafka"
}
return l7Protocol + "-" + l7Type
}
if f.Verdict == pb.Verdict_DROPPED {
return api.DropReason(uint8(f.EventType.SubType))
}
return api.TraceObservationPoint(uint8(f.EventType.SubType))
}
// WriteProtoFlow writes pb.Flow into the output writer.
func (p *Printer) WriteProtoFlow(f *pb.Flow) error {
switch p.opts.output {
case TabOutput:
if p.line == 0 {
_, err := fmt.Fprint(p.tw,
"TIMESTAMP", tab,
"SOURCE", tab,
"DESTINATION", tab,
"TYPE", tab,
"VERDICT", tab,
"SUMMARY", newline,
)
if err != nil {
return err
}
}
src, dst := getHostNames(f)
_, err := fmt.Fprint(p.tw,
getTimestamp(f), tab,
src, tab,
dst, tab,
getFlowType(f), tab,
f.Verdict.String(), tab,
f.Summary, newline,
)
if err != nil {
return fmt.Errorf("failed to write out packet: %v", err)
}
case DictOutput:
if p.line != 0 {
// TODO: line length?
_, err := fmt.Fprintln(p.opts.w, "------------")
if err != nil {
return err
}
}
src, dst := getHostNames(f)
// this is a little crude, but will do for now. should probably find the
// longest header and auto-format the keys
_, err := fmt.Fprint(p.opts.w,
" TIMESTAMP: ", getTimestamp(f), newline,
" SOURCE: ", src, newline,
"DESTINATION: ", dst, newline,
" TYPE: ", getFlowType(f), newline,
" VERDICT: ", f.Verdict.String(), newline,
" SUMMARY: ", f.Summary, newline,
)
if err != nil {
return fmt.Errorf("failed to write out packet: %v", err)
}
case CompactOutput:
src, dst := getHostNames(f)
_, err := fmt.Fprintf(p.opts.w,
"%s [%s]: %s -> %s %s %s (%s)\n",
getTimestamp(f),
f.NodeName,
src,
dst,
getFlowType(f),
f.Verdict.String(),
f.Summary,
)
if err != nil {
return fmt.Errorf("failed to write out packet: %v", err)
}
case JSONOutput:
f.Payload = nil
return p.jsonEncoder.Encode(f)
}
p.line++
return nil
}
|
import io.reactivex.Scheduler;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
public class CustomScheduler {
private Scheduler mainThreadScheduler;
private Scheduler computationThreadScheduler;
private Scheduler ioThreadScheduler;
public CustomScheduler() {
mainThreadScheduler = AndroidSchedulers.mainThread();
computationThreadScheduler = Schedulers.computation();
ioThreadScheduler = Schedulers.io();
}
public void scheduleOnMainThread(Runnable task) {
mainThreadScheduler.scheduleDirect(task);
}
public void scheduleOnComputationThread(Runnable task) {
computationThreadScheduler.scheduleDirect(task);
}
public void scheduleOnIOThread(Runnable task) {
ioThreadScheduler.scheduleDirect(task);
}
// Additional methods for handling custom scheduling logic can be added here
} |
package models
import "github.com/astaxie/beego/orm"
type Tag struct {
Id int64
Name string
}
func AddTag(tag Tag) error {
o := orm.NewOrm()
_, err := o.Insert(tag)
return err
}
func GetArticleTag(articleId int64) ([]*Tag, error) {
o := orm.NewOrm()
qs := o.QueryTable("tag")
qs.Filter("article_id", articleId)
tagList := make([]*Tag, 0)
_, err := qs.All(&tagList)
return tagList, err
}
func GetTags() ([]Tag, error) {
o := orm.NewOrm()
tags := make([]Tag, 0)
qs := o.QueryTable("tag")
_, err := qs.All(&tags)
return tags, err
}
func RenameTag(tagId int64,tagName string) error{
o := orm.NewOrm()
qs := o.QueryTable("tag").Filter("id",tagId)
tag := Tag{}
err := qs.One(&tag)
if err != nil {
return err
}
tag.Name = tagName
_,err = o.Update(&tag)
return err
}
func DeleteTag(tagId int64) error{
var err error
o := orm.NewOrm()
tag := Tag{
Id:tagId,
}
_,err = o.Delete(&tag)
if err != nil {
return err
}
article_tags := make([]*ArticleTag,0)
qs := o.QueryTable("article_tag").Filter("tag_id",tagId)
_,err = qs.All(&article_tags)
if err != nil {
return err
}
for _,x := range article_tags{
o.Delete(x)
if err != nil {
return err
}
}
return err
}
func FindTags(key string) ([]Tag,error){
var tags []Tag
var err error
o := orm.NewOrm()
qs := o.QueryTable("tag").Filter("name__iexact",key)
_,err = qs.All(&tags)
return tags,err
} |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _linkifyIt = require('linkify-it');
var _linkifyIt2 = _interopRequireDefault(_linkifyIt);
var _tlds = require('tlds');
var _tlds2 = _interopRequireDefault(_tlds);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var linkify = new _linkifyIt2.default();
linkify.tlds(_tlds2.default);
exports.default = function (text) {
return linkify.match(text);
}; |
<reponame>jfsnowden/etcher
/*
* Copyright 2017 resin.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict'
const stream = require('readable-stream')
const crypto = require('crypto')
const xxhash = require('xxhash')
const _ = require('lodash')
/**
* @summary Create an instance of ChecksumStream
* @name ChecksumStream
* @class
*/
class ChecksumStream extends stream.Transform {
/**
* @summary Create an instance of ChecksumStream
* @name ChecksumStream
* @class
* @param {Object} options - options
* @param {String[]} options.algorithms - hash algorithms
* @example
* var checksum = new ChecksumStream({
* algorithms: [ 'md5' ]
* })
*
* checksum.once('checksum', (checksum) => {
* // checksum: {
* // md5: '55a4eb779e08f604c41ba1cbfff47ada'
* // }
* })
*
* fs.createReadStream( 'os-image.img' )
* .pipe( checksum )
* .pipe( fs.createWriteStream( '/dev/rdisk2' ) )
* .once( 'finish', () => { ... })
*/
constructor (options = {}) {
super(options)
this.results = {}
this.algorithms = options.algorithms || []
this.hashes = _.map(this.algorithms, (algorithm) => {
return this._createHash(algorithm)
})
}
/**
* @summary Create & pipe to the Hash streams
* @private
* @param {String[]} algorithm - hash algorithm
* @returns {Stream}
* @example
* const hash = this._createHash(algorithm)
*/
_createHash (algorithm) {
let hash = null
if (algorithm === 'xxhash') {
// Seed value 0x45544348 = ASCII "ETCH"
const seed = 0x45544348
const is64Bit = process.arch === 'x64' || process.arch === 'aarch64'
hash = new xxhash.Stream(seed, is64Bit ? 64 : 32)
} else {
hash = _.attempt(crypto.createHash, algorithm)
}
if (_.isError(hash)) {
hash.message += ` "${algorithm}"`
throw hash
}
/**
* @summary Check for all checksums to have been calculated
* @private
* @example
* hash.once('end', check)
*/
const check = () => {
if (_.keys(this.results).length === this.algorithms.length) {
this.emit('checksum', _.clone(this.results))
}
}
hash.once('error', (error) => {
return this.emit('error', error)
})
hash.once('readable', () => {
this.results[algorithm] = hash.read().toString('hex')
check()
})
return hash
}
/**
* @summary Pass through chunks
* @private
* @param {Buffer} chunk - chunk
* @param {String} encoding - encoding
* @param {Function} next - callback
* @example
* checksumStream.write(buffer)
*/
_transform (chunk, encoding, next) {
for (let index = 0; index < this.hashes.length; index += 1) {
this.hashes[index].write(chunk)
}
next(null, chunk)
}
/**
* @summary End the hash streams once this stream ends
* @private
* @param {Function} done - callback
* @example
* checksumStream.end()
*/
_flush (done) {
for (let index = 0; index < this.hashes.length; index += 1) {
this.hashes[index].end()
}
done()
}
}
module.exports = ChecksumStream
|
<reponame>Nebulis/blog
import {
extraLargeStart,
extraLargeStartSize,
largeEnd,
largeStart,
largeStartSize,
maxWidthExtraLargeContainer,
maxWidthLargeContainer,
maxWidthMediumContainer,
mediumEnd,
mediumStart,
mediumStartSize,
smallEnd,
} from "../core/variables"
import { ExtraImageLinkProps } from "../../types/shared"
import { useWindowSize } from "../hooks/useWindowSize"
import { ApplicationLink } from "../core/links/link"
import React, { ComponentType } from "react"
import { css, jsx } from "@emotion/react"
import styled from "@emotion/styled"
const margin = 20
const style = css`
@media (min-width: ${mediumStart}) and (max-width: ${mediumEnd}) {
&:nth-of-type(odd) {
margin-right: ${margin}px;
}
}
@media (min-width: ${largeStart}) and (max-width: ${largeEnd}) {
margin-right: ${margin}px;
&:nth-of-type(3n) {
margin-right: 0;
}
}
@media (min-width: ${extraLargeStart}) {
margin-right: ${margin}px;
&:nth-of-type(4n) {
margin-right: 0;
}
}
&:last-of-type {
margin-right: 0;
}
position: relative;
.title {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
z-index: 10;
color: white;
text-transform: uppercase;
font-weight: bold;
font-size: 1.3rem;
transition: all 0.2s linear;
width: 100%;
padding: 0 1rem;
text-align: center;
}
.overlay {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: black;
opacity: 0.2;
z-index: 5;
pointer-events: none;
transition: all 0.2s linear;
}
&:hover .overlay {
opacity: 0.4;
}
&:hover .title {
font-size: 1.4rem;
}
`
export const CountryContainer: React.FunctionComponent<{
title: string
image: ComponentType<ExtraImageLinkProps>
imageProps?: ExtraImageLinkProps
to: string
// use a default value otherwise typescript not happy :)
}> = ({ title, image: Image, to, imageProps = { image: "" } }) => {
const { windowWidth } = useWindowSize()
const maxAllowedWidth = 300
// margin * 3 (and others) is not completely correct, we should multiply by the number of images displayed, but it's ok
const computedWidth =
windowWidth > extraLargeStartSize
? (maxWidthExtraLargeContainer - margin * 3) / 4
: windowWidth > largeStartSize
? (maxWidthLargeContainer - margin * 2) / 3
: windowWidth > mediumStartSize
? (maxWidthMediumContainer - margin) / 2
: windowWidth
const width = computedWidth > maxAllowedWidth ? `${maxAllowedWidth}px` : `${computedWidth}px`
const { css: customCss, ...restImageProps } = imageProps
return (
<ApplicationLink to={to} css={style} className="custom-link mb3">
<div className="title">{title}</div>
<div className="overlay" />
{jsx(Image, {
fluidObject: { aspectRatio: 4 / 3 },
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore looks like the value is transformed to something different :)
css: css`
${customCss}
width: ${width};
`,
...restImageProps,
})}
</ApplicationLink>
)
}
export const CountriesContainer = styled.div`
display: flex;
flex-wrap: wrap;
justify-content: center;
@media (max-width: ${smallEnd}) {
display: grid;
}
`
|
class MenuItem:
def __init__(self, name, url, is_submenu):
self.name = name
self.url = url
self.is_submenu = is_submenu
def get_full_url(self, base_url):
if self.is_submenu:
return f"{base_url}/{self.url}"
else:
return self.url
# Example usage
item1 = MenuItem('Home', '/home', False)
item2 = MenuItem('About', 'https://example.com/about', False)
item3 = MenuItem('Products', 'products', True)
base_url = 'https://example.com'
print(item1.get_full_url(base_url)) # Output: /home
print(item2.get_full_url(base_url)) # Output: https://example.com/about
print(item3.get_full_url(base_url)) # Output: https://example.com/products |
#!/bin/bash
dieharder -d 206 -g 5 -S 326285884
|
#!/usr/bin/env bash
set -e
d="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$d"
if [ "$#" -lt 1 ]; then
echo "Usage: make-show.sh post-title"
fi
date="$(date --rfc-3339=seconds)"
date_prefix="$(echo "$date" | cut -d' ' -f1)"
title="$1"
title_slug="$(echo "$title" | iconv -t ascii//TRANSLIT | sed -E 's/[^a-zA-Z0-9]+/-/g' | sed -E 's/^-+\|-+$//g' | tr A-Z a-z)"
filename="$d/_posts/$date_prefix-$title_slug.md"
if [ ! -f "$filename" ]; then
cat > "$filename" <<NEW_POST
---
author: mogria
layout: post
title: $title
date: $date
categories: news
---
NEW_POST
else
echo "WARN: already exists"
fi
"$EDITOR" "$filename"
echo "$filename"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.