text
stringlengths
1
1.05M
package org.moskito.control.ui.resource.configuration; import org.moskito.control.config.ConnectorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; /** * Represents a single configured connector. */ @XmlRootElement public class ConnectorConfigBean { /** * Type of the connector. Used by the factory to create an instance. */ @XmlElement private ConnectorType type; /** * Clazzname of the connector implementation. This class will be instantiated by the factory. */ @XmlElement private String className; public ConnectorType getType() { return type; } public void setType(ConnectorType type) { this.type = type; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } }
for script in scripts/* do bash $script done
<reponame>mabeto5p/Progresspicture package nigelhenshaw.com.cameraintenttutorial; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Camera; import android.graphics.ImageFormat; import android.view.LayoutInflater; import android.view.ViewGroup; import android.widget.Button; import android.widget.PopupWindow; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureResult; import android.hardware.camera2.DngCreator; import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.Message; import android.provider.MediaStore; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.LruCache; import android.util.Size; import android.util.SparseIntArray; import android.view.Gravity; import android.view.Menu; import android.view.MenuItem; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.Toast; import android.widget.Toolbar; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; public class CamaraIntentActivity extends Activity implements RecyclerViewClickPositionInterface { private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } private static final String IMAGE_FILE_LOCATION = "image_file_location"; private static final int ACTIVITY_START_CAMERA_APP = 0; private static final int STATE_PREVIEW = 0; private static final int STATE__WAIT_LOCK = 1; private static final int STATE__PICTURE_CAPTURED = 2; private int mState = STATE_PREVIEW; private ImageView mPhotoCapturedImageView; private String mImageFileLocation = ""; private File mGalleryFolder; private File mRawGalleryFolder; private static LruCache<String, Bitmap> mMemoryCache; private RecyclerView mRecyclerView; private Size mPreviewSize; private String mCameraId; private CameraCharacteristics mCameraCharacteristics; private CaptureResult mCaptureResult; private TextureView mTextureView; private TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { setupCamera(width, height); transformImage(width, height); openCamera(); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { return false; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surface) { } }; private CameraDevice mCameraDevice; private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(CameraDevice camera) { mCameraDevice = camera; createCameraPreviewSession(); // Toast.makeText(getApplicationContext(), "Camera Opened!", Toast.LENGTH_SHORT).show(); } @Override public void onDisconnected(CameraDevice camera) { camera.close(); mCameraDevice = null; } @Override public void onError(CameraDevice camera, int error) { camera.close(); mCameraDevice = null; } }; private CaptureRequest mPreviewCaptureRequest; private CaptureRequest.Builder mPreviewCaptureRequestBuilder; private CameraCaptureSession mCameraCaptureSession; private CameraCaptureSession.CaptureCallback mSessionCaptureCallback = new CameraCaptureSession.CaptureCallback() { private void process(CaptureResult result) { switch(mState) { case STATE_PREVIEW: // Do nothing break; case STATE__WAIT_LOCK: Integer afState = result.get(CaptureResult.CONTROL_AF_STATE); if(afState == CaptureRequest.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureRequest.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) { /* unLockFocus(); Toast.makeText(getApplicationContext(), "Focus Lock Successful", Toast.LENGTH_SHORT).show(); */ mState = STATE__PICTURE_CAPTURED; captureStillImage(); } break; } } @Override public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) { super.onCaptureStarted(session, request, timestamp, frameNumber); } @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); process(result); } @Override public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { super.onCaptureFailed(session, request, failure); Toast.makeText(getApplicationContext(), "Focus Lock Unsuccessful", Toast.LENGTH_SHORT).show(); } }; private HandlerThread mBackgroundThread; private Handler mBackgroundHandler; private final Handler mUiHandler = new Handler(Looper.getMainLooper()) { @Override public void handleMessage(Message msg) { super.handleMessage(msg); swapImageAdapter(); } }; private static File mImageFile; private static File mRawImageFile; private ImageReader mImageReader; private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { mBackgroundHandler.post(new ImageSaver(mActivity, reader.acquireNextImage(), mUiHandler, mCaptureResult, mCameraCharacteristics)); } }; private ImageReader mRawImageReader; private final ImageReader.OnImageAvailableListener mOnRawImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { mBackgroundHandler.post(new ImageSaver(mActivity, reader.acquireNextImage(), mUiHandler, mCaptureResult, mCameraCharacteristics)); } }; private static Uri mRequestingAppUri; private Activity mActivity; @Override public void getRecyclerViewAdapterPosition(int position) { // Toast.makeText(this, Integer.toString(position), Toast.LENGTH_SHORT).show(); Intent sendFileAddressIntent = new Intent(this, SingleImageActivity.class); sendFileAddressIntent.putExtra(IMAGE_FILE_LOCATION, sortFilesToLatest(mGalleryFolder)[position].toString()); startActivity(sendFileAddressIntent); } private static class ImageSaver implements Runnable { private final Image mImage; private final Activity mActivity; private final Handler mHandler; private final CaptureResult mCaptureResult; private final CameraCharacteristics mCameraCharacteristics; private ImageSaver(Activity activity, Image image, Handler handler, CaptureResult captureResult, CameraCharacteristics cameraCharacteristics) { mActivity = activity; mImage = image; mHandler = handler; mCaptureResult = captureResult; mCameraCharacteristics = cameraCharacteristics; } @Override public void run() { int format = mImage.getFormat(); switch(format) { case ImageFormat.JPEG: ByteBuffer byteBuffer = mImage.getPlanes()[0].getBuffer(); byte[] bytes = new byte[byteBuffer.remaining()]; byteBuffer.get(bytes); FileOutputStream fileOutputStream = null; try { fileOutputStream = new FileOutputStream(mImageFile); fileOutputStream.write(bytes); } catch (IOException e) { e.printStackTrace(); } finally { mImage.close(); if(fileOutputStream != null) { try { fileOutputStream.close(); } catch (IOException e) { e.printStackTrace(); } } if(mRequestingAppUri != null) { mRequestingAppUri = null; mActivity.setResult(RESULT_OK); mActivity.finish(); } Message message = mHandler.obtainMessage(); message.sendToTarget(); } break; case ImageFormat.RAW_SENSOR: DngCreator dngCreator = new DngCreator(mCameraCharacteristics, mCaptureResult); FileOutputStream rawFileOutputStream = null; try { rawFileOutputStream = new FileOutputStream(mRawImageFile); dngCreator.writeImage(rawFileOutputStream, mImage); } catch (IOException e) { e.printStackTrace(); } finally { mImage.close(); if(rawFileOutputStream != null) { try { rawFileOutputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } break; } } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_camara_intent); Intent intent = getIntent(); String action = intent.getAction(); if(MediaStore.ACTION_IMAGE_CAPTURE.equals(action)) { mRequestingAppUri = intent.getParcelableExtra(MediaStore.EXTRA_OUTPUT); } mActivity = this; createImageGallery(); mRecyclerView = (RecyclerView) findViewById(R.id.galleryRecyclerView); GridLayoutManager layoutManager = new GridLayoutManager(this, 1); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mRecyclerView.setLayoutManager(layoutManager); RecyclerView.Adapter imageAdapter = new ImageAdapter(sortFilesToLatest(mGalleryFolder), this); mRecyclerView.setAdapter(imageAdapter); final int maxMemorySize = (int) Runtime.getRuntime().maxMemory() / 1024; final int cacheSize = maxMemorySize / 10; mMemoryCache = new LruCache<String, Bitmap>(cacheSize) { @Override protected int sizeOf(String key, Bitmap value) { return value.getByteCount() / 1024; } }; mTextureView = (TextureView) findViewById(R.id.textureView); } @Override public void onResume() { super.onResume(); openBackgroundThread(); if(mTextureView.isAvailable()) { setupCamera(mTextureView.getWidth(), mTextureView.getHeight()); transformImage(mTextureView.getWidth(), mTextureView.getHeight()); openCamera(); } else { mTextureView.setSurfaceTextureListener(mSurfaceTextureListener); } } @Override public void onPause() { closeCamera(); closeBackgoundThread(); super.onPause(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_camara_intent, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } public void takePhoto(View view) { /* Intent callCameraApplicationIntent = new Intent(); callCameraApplicationIntent.setAction(MediaStore.ACTION_IMAGE_CAPTURE); File photoFile = null; try { photoFile = createImageFile(); } catch (IOException e) { e.printStackTrace(); } callCameraApplicationIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(photoFile)); startActivityForResult(callCameraApplicationIntent, ACTIVITY_START_CAMERA_APP); */ lockFocus(); } protected void onActivityResult (int requestCode, int resultCode, Intent data) { if(requestCode == ACTIVITY_START_CAMERA_APP && resultCode == RESULT_OK) { // Toast.makeText(this, "Picture taken successfully", Toast.LENGTH_SHORT).show(); // Bundle extras = data.getExtras(); // Bitmap photoCapturedBitmap = (Bitmap) extras.get("data"); // mPhotoCapturedImageView.setImageBitmap(photoCapturedBitmap); // Bitmap photoCapturedBitmap = BitmapFactory.decodeFile(mImageFileLocation); // mPhotoCapturedImageView.setImageBitmap(photoCapturedBitmap); // setReducedImageSize(); RecyclerView.Adapter newImageAdapter = new ImageAdapter(sortFilesToLatest(mGalleryFolder), this); mRecyclerView.swapAdapter(newImageAdapter, false); } } private void createImageGallery() { File storageDirectory = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES); mGalleryFolder = new File(storageDirectory, "JPEG Images"); mRawGalleryFolder = new File(storageDirectory, "Raw Images"); if(!mGalleryFolder.exists()) { mGalleryFolder.mkdirs(); } if(!mRawGalleryFolder.exists()) { mRawGalleryFolder.mkdirs(); } } File createImageFile() throws IOException { String timeStamp = new SimpleDateFormat("yyyyMMdd").format(new Date()); String imageFileName = "JPEG_" + timeStamp + "_"; File image = File.createTempFile(imageFileName, ".jpg", mGalleryFolder); mImageFileLocation = image.getAbsolutePath(); return image; } File createRawImageFile() throws IOException { String timeStamp = new SimpleDateFormat("yyyyMMdd").format(new Date()); String imageFileName = "RAW_" + timeStamp + "_"; File image = File.createTempFile(imageFileName, ".dng", mRawGalleryFolder); mImageFileLocation = image.getAbsolutePath(); return image; } void setReducedImageSize() { int targetImageViewWidth = mPhotoCapturedImageView.getWidth(); int targetImageViewHeight = mPhotoCapturedImageView.getHeight(); BitmapFactory.Options bmOptions = new BitmapFactory.Options(); bmOptions.inJustDecodeBounds = true; BitmapFactory.decodeFile(mImageFileLocation, bmOptions); int cameraImageWidth = bmOptions.outWidth; int cameraImageHeight = bmOptions.outHeight; int scaleFactor = Math.min(cameraImageWidth/targetImageViewWidth, cameraImageHeight/targetImageViewHeight); bmOptions.inSampleSize = scaleFactor; bmOptions.inJustDecodeBounds = false; Bitmap photoReducedSizeBitmp = BitmapFactory.decodeFile(mImageFileLocation, bmOptions); mPhotoCapturedImageView.setImageBitmap(photoReducedSizeBitmp); } private File[] sortFilesToLatest(File fileImagesDir) { File[] files = fileImagesDir.listFiles(); Arrays.sort(files, new Comparator<File>() { @Override public int compare(File lhs, File rhs) { return Long.valueOf(rhs.lastModified()).compareTo(lhs.lastModified()); } }); return files; } public static Bitmap getBitmapFromMemoryCache(String key) { return mMemoryCache.get(key); } public static void setBitmapToMemoryCache(String key, Bitmap bitmap) { if(getBitmapFromMemoryCache(key) == null) { mMemoryCache.put(key, bitmap); } } private void setupCamera(int width, int height) { CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { for(String cameraId : cameraManager.getCameraIdList()) { CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId); if(!contains(cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { continue; } if(cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT){ continue; } StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); Size largestImageSize = Collections.max( Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizeByArea()); Size largestRawImageSize = Collections.max( Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizeByArea()); mImageReader = ImageReader.newInstance(largestImageSize.getWidth(), largestImageSize.getHeight(), ImageFormat.JPEG, 1); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); mRawImageReader = ImageReader.newInstance(largestRawImageSize.getWidth(), largestRawImageSize.getHeight(), ImageFormat.RAW_SENSOR, 1); mRawImageReader.setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mPreviewSize = getPreferredPreviewSize(map.getOutputSizes(SurfaceTexture.class), width, height); mCameraId = cameraId; mCameraCharacteristics = cameraCharacteristics; return; } } catch (CameraAccessException e) { e.printStackTrace(); } } private Size getPreferredPreviewSize(Size[] mapSizes, int width, int height) { List<Size> collectorSizes = new ArrayList<>(); for(Size option : mapSizes) { if(width > height) { if(option.getWidth() > width && option.getHeight() > height) { collectorSizes.add(option); } } else { if(option.getWidth() > height && option.getHeight() > width) { collectorSizes.add(option); } } } if(collectorSizes.size() > 0) { return Collections.min(collectorSizes, new Comparator<Size>() { @Override public int compare(Size lhs, Size rhs) { return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight()); } }); } return mapSizes[0]; } private void openCamera() { CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { cameraManager.openCamera(mCameraId, mCameraDeviceStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void closeCamera() { if(mCameraCaptureSession != null) { mCameraCaptureSession.close(); mCameraCaptureSession = null; } if(mCameraDevice != null) { mCameraDevice.close(); mCameraDevice = null; } if(mImageReader != null) { mImageReader.close(); mImageReader = null; } } private void createCameraPreviewSession() { try { SurfaceTexture surfaceTexture = mTextureView.getSurfaceTexture(); surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); Surface previewSurface = new Surface(surfaceTexture); mPreviewCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewCaptureRequestBuilder.addTarget(previewSurface); mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface(), mRawImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession session) { if(mCameraDevice == null) { return; } try { mPreviewCaptureRequest = mPreviewCaptureRequestBuilder.build(); mCameraCaptureSession = session; mCameraCaptureSession.setRepeatingRequest( mPreviewCaptureRequest, mSessionCaptureCallback, mBackgroundHandler ); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(CameraCaptureSession session) { Toast.makeText( getApplicationContext(), "create camera session failed!", Toast.LENGTH_SHORT ).show(); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } } private void openBackgroundThread() { mBackgroundThread = new HandlerThread("Camera2 background thread"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } private void closeBackgoundThread() { mBackgroundThread.quitSafely(); try { mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } private void lockFocus() { try { mState = STATE__WAIT_LOCK; mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); mCameraCaptureSession.capture(mPreviewCaptureRequestBuilder.build(), mSessionCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void unLockFocus() { try { mState = STATE_PREVIEW; mPreviewCaptureRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); mCameraCaptureSession.setRepeatingRequest(mPreviewCaptureRequestBuilder.build(), mSessionCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void swapImageAdapter() { RecyclerView.Adapter newImageAdapter = new ImageAdapter(sortFilesToLatest(mGalleryFolder), this); mRecyclerView.swapAdapter(newImageAdapter, false); } private void captureStillImage() { try { CaptureRequest.Builder captureStillBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureStillBuilder.addTarget(mImageReader.getSurface()); captureStillBuilder.addTarget(mRawImageReader.getSurface()); int rotation = getWindowManager().getDefaultDisplay().getRotation(); captureStillBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) { super.onCaptureStarted(session, request, timestamp, frameNumber); try { if(mRequestingAppUri != null) { mImageFile = new File(mRequestingAppUri.getPath()); } else { mImageFile = createImageFile(); mRawImageFile = createRawImageFile(); } } catch (IOException e) { e.printStackTrace(); } } @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); /* Toast.makeText(getApplicationContext(), "Image Captured!", Toast.LENGTH_SHORT).show(); */ mCaptureResult = result; unLockFocus(); } }; mCameraCaptureSession.stopRepeating(); mCameraCaptureSession.capture( captureStillBuilder.build(), captureCallback, null ); } catch (CameraAccessException e) { e.printStackTrace(); } //popUpCall(); } private void transformImage(int width, int height) { if(mPreviewSize == null || mTextureView == null) { return; } Matrix matrix = new Matrix(); int rotation = getWindowManager().getDefaultDisplay().getRotation(); RectF textureRectF = new RectF(0, 0, width, height); RectF previewRectF = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = textureRectF.centerX(); float centerY = textureRectF.centerY(); if(rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) { previewRectF.offset(centerX - previewRectF.centerX(), centerY - previewRectF.centerY()); matrix.setRectToRect(textureRectF, previewRectF, Matrix.ScaleToFit.FILL); float scale = Math.max((float)width / mPreviewSize.getWidth(), (float)height / mPreviewSize.getHeight()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } mTextureView.setTransform(matrix); } private static class CompareSizeByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } private static Boolean contains(int[] modes, int mode) { if(modes == null) { return false; } for(int i : modes) { if(i == mode) { return true; } } return false; } /* public void popUpCall(){ setContentView(R.layout.activity_camara_intent); RelativeLayout parentView = (RelativeLayout) findViewById(R.id.popupParent); LayoutInflater layoutInflater = (LayoutInflater) getApplicationContext().getSystemService(LAYOUT_INFLATER_SERVICE); ViewGroup container = (ViewGroup) layoutInflater.inflate(R.layout.popup,null); PopupWindow popupWindow = new PopupWindow(container, 400,400,true); popupWindow.showAtLocation(parentView, Gravity.BOTTOM, 100,100); }*/ }
import unittest from game import Game from game_spec_validator import GameSpecValidator test_spec = { "shape": [4, 4], "start": [0, 0], "goal": [3, 3], "inaccessible": [[2, 1], [1,2]], "pirate_routes": { "0": [[1,1], [1,2], [2,2], [2,1]] } } class Test_Game_init_from_spec(unittest.TestCase): def test_new_instance(self): game = Game.__new__(Game) self.assertIsInstance(game, Game) def test_validate_spec(self): game = Game.__new__(Game) self.assertEqual([], GameSpecValidator().validate_spec(test_spec)) if __name__ == "__main__": unittest.main()
import Home from "~/components/home"; //import { useForm } from "react-hook-form"; import path from "path"; import { promises as fsPromises } from "fs"; import { muiMdComponents } from "~/components/layout/muiMdComponents"; import { PageLayout } from "~/components/layout"; import { Box } from "@mui/material"; import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; import { serialize } from "next-mdx-remote/serialize"; // inject both the custom components + default components like h1, p, etc. const components = { ...muiMdComponents }; const HomePage = ({ source }: { source: MDXRemoteSerializeResult }) => { const readMeContent = <MDXRemote {...source} components={components} />; return ( <PageLayout> <main> <Home /> <Box sx={{ borderLeft: "1px solid", paddingLeft: "24px", borderImageSource: "linear-gradient(10deg, #e1009855, #3f77fa55)", borderImageSlice: 1, borderColor: "#3f77fa", }} > {readMeContent} </Box> </main> </PageLayout> ); }; export async function getStaticProps() { const filePath = path.resolve("./README.md"); const source = await fsPromises.readFile(filePath, { encoding: "utf8" }); // MDX text - can be from a local file, database, anywhere // Does a server-render of the source and relevant React wrappers + allow to inject React components const mdxSource = await serialize(source); return { props: { source: mdxSource } }; } export default HomePage;
<filename>config/file.go<gh_stars>1-10 package config import ( "io/ioutil" "os" "path/filepath" "strings" ) // File represents a configuration file on disk. type File struct { path string info os.FileInfo } // String provides custom formatting for the File struct. func (f *File) String() string { return f.path } // Key builds a key for a configuration file. func (f *File) Key() string { ext := filepath.Ext(f.path) return strings.TrimSuffix(f.path, ext) } // Read reads the contents of the file. func (f *File) Read() (string, error) { body, err := ioutil.ReadFile(f.path) if err != nil { return "", err } return string(body), nil } // FileSet represents a set of configuration files guaranteed to be unique. type FileSet struct { files map[string]*File } // Union creates a new FileSet from a slice of FileSets. func Union(sets []*FileSet) *FileSet { totalLen := 0 for _, set := range sets { totalLen += len(set.files) } union := &FileSet{} for _, set := range sets { for _, file := range set.files { union.Add(file) } } return union } // Add adds a new File to the set. func (s *FileSet) Add(file *File) { if s.files == nil { s.files = make(map[string]*File) } k := file.path s.files[k] = file } // ToSlice converts a FileSet into a slice. func (s *FileSet) ToSlice() []*File { i := 0 slice := make([]*File, len(s.files)) for _, file := range s.files { slice[i] = file i++ } return slice }
SELECT * FROM books WHERE year < 2000 AND rating >= 3
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: edge.proto package com.ciat.bim.server.edge.gen; /** * Protobuf enum {@code edge.EdgeEntityType} */ public enum EdgeEntityType implements com.google.protobuf.ProtocolMessageEnum { /** * <code>DEVICE = 0;</code> */ DEVICE(0), /** * <code>ASSET = 1;</code> */ ASSET(1), UNRECOGNIZED(-1), ; /** * <code>DEVICE = 0;</code> */ public static final int DEVICE_VALUE = 0; /** * <code>ASSET = 1;</code> */ public static final int ASSET_VALUE = 1; public final int getNumber() { if (this == UNRECOGNIZED) { throw new IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @Deprecated public static EdgeEntityType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static EdgeEntityType forNumber(int value) { switch (value) { case 0: return DEVICE; case 1: return ASSET; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<EdgeEntityType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< EdgeEntityType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<EdgeEntityType>() { public EdgeEntityType findValueByNumber(int number) { return EdgeEntityType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return EdgeProtos.getDescriptor().getEnumTypes().get(3); } private static final EdgeEntityType[] VALUES = values(); public static EdgeEntityType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private EdgeEntityType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:edge.EdgeEntityType) }
def binary_search(arr, target): low = 0 high = len(arr) - 1 while low <= high: mid = (low + high)//2 if arr[mid] == target: return mid if target < arr[mid]: high = mid - 1 else: low = mid + 1 return -1
function getMaxNumberFromArray(arr) { return Math.max(...arr); } function getMaxValueFromObjects(arr) { let maxValue = arr[0].value; for (const obj of arr) { if (obj.value > maxValue) { maxValue = obj.value; } } return maxValue; } const maxNumber = getMaxNumberFromArray([6, 2, 8]); const maxValue = getMaxValueFromObjects([ {name: "John", value: 3}, {name: "Lisa", value: 8}, {name: "Lily", value: 7} ]); console.log(maxNumber); console.log(maxValue);
#!/bin/sh IP=$(ifconfig en0 | grep inet | awk '$1=="inet" {print $2}') if [ -z "$DISPLAY" ] then echo "Can't detect X11 display. If you don't know how to resolve this, please check ./doc/xhost_trouble_shooting.md" exit 1 else # xterm xterm -e "$(xhost $IP)" fi socat TCP-LISTEN:6000,reuseaddr,fork UNIX-CLIENT:\"$DISPLAY\" & docker run --privileged --rm -it -v "$(pwd):/root/quisp" -u "$(id -u):$(id -g)" --name quisp -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY="$IP:0" quisp trap "lsof -i:6000 -t|xargs kill" 0
<reponame>Skeyelab/Escalation-App-3.0 (function() { "use strict"; return { childRegex: /child_of:(\d*)/, parentRegex: /parent_of:(\d*)/, customFieldRegex: /custom_field_(\d+)/, relation: {}, sub_category: "", defaultTicketAttributes: { "custom_fields": [] }, requests: { 'createTicket': function(attributes){ return { url: '/api/v2/tickets.json', type: 'POST', dataType: 'json', data: JSON.stringify(attributes), contentType: 'application/json', proxy_v2: true, processData: false }; }, 'fetchTicket': function(id) { return { url: '/api/v2/tickets/'+ id +'.json?include=groups,users', type: 'GET', dataType: 'json', proxy_v2: true }; }, 'updateTicket': function(id, attributes){ return { url: '/api/v2/tickets/' + id + '.json', type: 'PUT', dataType: 'json', data: JSON.stringify(attributes), contentType: 'application/json', proxy_v2: true, processData: false }; } }, events: { 'app.created' : 'deferredInitialize', 'ticket.save' : 'saveHook', 'fetchTicket.done' : 'displayRelation', 'change .sub_category' : 'showBody', 'click .btn-confirm' : 'fireAction' }, deferredInitialize: function(){ var self = this; _.defer(function(){ self.initialize(); }); }, initialize: function() { this.ticketFields('custom_field_' + this.setting('ancestry')).hide(); if (this.isChild() || this.isParent()) return this.ajax('fetchTicket', this.childID() || this.parentID()); this.switchTo('categories'); }, saveHook: function(){ if(!(this.isChild() || this.isParent())) return true; if (this.isParent() && this.ticket().status() == 'solved' && this.relation.status !== 'solved') return this.I18n.t('messages.solve_child_first'); var attributes = {}; if (this.isChild() && this.comment().type().match("internal")){ attributes.comment = { "body": "%@:\n%@".fmt(this.I18n.t("messages.comment_from_child"), this.comment().text()), "public": false }; } if (this.ticket().status() == 'solved' && this.setting('resolution_field')) { this.ticket().customField('custom_field_' + this.setting('resolution_field'), true); attributes.custom_fields = [ { id: Number(this.setting('resolution_field')), value: true } ]; } if (attributes) return this.promise(function(done, fail){ this.ajax('updateTicket', this.childID() || this.parentID(), { ticket: attributes }) .fail(function(data){ fail(this.I18n.t("messages.relation_update_failed", { error: data.responseText})); }) .then(done.bind(done)); }); return true; }, showBody: function() { this.sub_category = this.$('.sub_category').val(); if(this.sub_category == "-1") { this.$('.comment-form').hide(); } else { var body = this.I18n.t('escalation.subcategories.' + this.sub_category + '.body'); this.$('.additional-comment').val(body); this.$('.comment-form').show(); } }, fireAction: function(){ var config = this.findConfigByEscalationReason(this.sub_category), attributes = _.defaults(config.attributes, this.defaultTicketAttributes); attributes = this.appendAdditionalComment(attributes); attributes = this.interpolateWithContext(attributes); this.createChildTicket(attributes); }, findConfigByEscalationReason: function(sub_category){ return _.clone(_.find(this.config(), function(i) { return i.escalation_reason === sub_category; })); }, preventMalformedJson: function(ticket) { for(var field in ticket) { if(typeof ticket[field] == "string") { ticket[field] = ticket[field].replace(/\\/g, '\\\\'); ticket[field] = ticket[field].replace(/"/g, '\\"'); } } }, interpolateWithContext: function(obj){ var context = _.extend( _.clone(this.containerContext()), this.currentUserContext() ); context.ticket.id = this.ticket().id(); try { this.preventMalformedJson(context.ticket); var temp_template = _.template(JSON.stringify(obj), { interpolate : /\{\{(.+?)\}\}/g }); return JSON.parse(temp_template(context)); } catch(error){ services.notify('%@: %@'.fmt(this.I18n.t('messages.parse_error'), error.message), 'error'); this.initialize(); } }, appendAdditionalComment: function(attributes){ var comment = this.$('textarea.additional-comment').val(); if (!_.isEmpty(comment)){ attributes.comment = attributes.comment || {}; attributes.comment.body = attributes.comment.body || ''; attributes.comment.body += '\n' + comment; } return attributes; }, createChildTicket: function(attributes){ this.switchTo('spinner'); attributes.custom_fields = _.filter(attributes.custom_fields, function(field) { return !_.isEmpty(field.value) && !_.contains(["undefined", "null", "-"], field.value); }); attributes.custom_fields.push({ id: this.setting('ancestry'), value: "child_of:" + this.ticket().id() }); this.ajax('createTicket', { ticket: attributes }) .done(function(data) { this.setChildTicket(data.ticket); }); }, setChildTicket: function(ticket){ var new_ancestry_value = 'parent_of:' + ticket.id; this.ticket().customField('custom_field_' + this.setting('ancestry'), new_ancestry_value); var new_child_value = '' + ticket.id; this.ticket().customField('custom_field_' + this.setting('child_field'), new_child_value); var new_escalation_value = 'parent_ticket'; this.ticket().customField('custom_field_' + this.setting('escalation_field'), new_escalation_value); var new_escalation_reason = this.sub_category; this.ticket().customField('custom_field_' + this.setting('escalation_reason'), new_escalation_reason); this.ajax('updateTicket', this.ticket().id(), { ticket: { custom_fields: [ { id: this.setting('ancestry'), value: new_ancestry_value }, { id: this.setting('child_field'), value: new_child_value }, { id: this.setting('escalation_field'), value: new_escalation_value }, { id: this.setting('escalation_reason'), value: new_escalation_reason } ] }}); this.displayRelation({ ticket: ticket }); }, displayRelation: function(data){ this.relation = data.ticket; this.switchTo('relation', { ticket: data.ticket, is_child: !!this.isChild() }); }, currentUserContext: function(){ var context = { current_user: {} }; if (this.currentUser()){ var names = this.splitUsername(this.currentUser().name()); context.current_user = { id: this.currentUser().id(), email: this.currentUser().email(), name: this.currentUser().name(), firstname: names.firstname, lastname: names.lastname, externalId: this.currentUser().externalId() }; } return context; }, splitUsername: function(username){ var names = username.split(' '); var obj = { firstname: '', lastname: '' }; if (!_.isEmpty(names)){ obj.firstname = names.shift(); if (!_.isEmpty(names)){ obj.lastname = names.join(' '); } } return obj; }, config: function(){ return JSON.parse(this.setting('config')); }, ancestryValue: function(){ return this.ticket().customField("custom_field_" + this.setting('ancestry')); }, isParent: function(){ return this.parentRegex.test(this.ancestryValue()); }, isChild: function(){ return this.childRegex.test(this.ancestryValue()); }, childID: function(){ if (this.isParent()) return this.parentRegex.exec(this.ancestryValue())[1]; }, parentID: function(){ if (this.isChild()) return this.childRegex.exec(this.ancestryValue())[1]; } }; }());
# Install node curl -sL https://deb.nodesource.com/setup_13.x | sudo bash - sudo apt-get install -y nodejs sudo apt-get install -y build-essential
#!/bin/bash -l ## ## Copyright (c) 2019 Opticks Team. All Rights Reserved. ## ## This file is part of Opticks ## (see https://bitbucket.org/simoncblyth/opticks). ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## opticks- opticks-id sdir=$(pwd) name=$(basename $sdir) bdir=/tmp/$USER/$name/build rm -rf $bdir && mkdir -p $bdir && cd $bdir && pwd om- om-cmake $sdir #cmake $sdir \ # -DCMAKE_BUILD_TYPE=Debug \ # -DCMAKE_INSTALL_PREFIX=$(opticks-prefix) \ # -DCMAKE_MODULE_PATH=$(opticks-home)/cmake/Modules \ # -DOPTICKS_PREFIX=$(opticks-prefix) make make install exe=$(opticks-prefix)/lib/$name if [ "$(uname)" == "Linux" ]; then ldd $exe ls -l $exe elif [ "$(uname)" == "Darwin" ]; then otool -L $exe $exe fi
<filename>src/services/config/redux/sagas/index.ts import { takeLatest, put, call, select, all, take, fork } from 'redux-saga/effects'; import getErrorMsg from 'shared/helpers/getErrorMsg'; import { ICurrencyPair, ICountry, IUserConfig, IPreset, IPresetLayouts, IAssetsInfoMap, IMConfig, IHoldingTheme, } from 'shared/types/models'; import { IDependencies } from 'shared/types/app'; import { convertToPresetLayouts } from 'shared/helpers/converters'; import { arePresetsLayoutsChanged, changePresetsLayouts } from 'shared/helpers/presets'; import { actions as notificationActions } from 'services/notification'; import * as actions from '../actions'; import * as NS from '../../namespace'; import * as selectors from '../data/selectors'; import { getAssetIdFromAssetName } from '../helpers'; import { UITheme } from 'shared/types/ui'; export default function getSaga(deps: IDependencies) { const loadSecuritySettingsType: NS.ILoadSecuritySettings['type'] = 'CONFIG:LOAD_SECURITY_SETTINGS'; const loadCurrencyPairsType: NS.ILoadCurrencyPairs['type'] = 'CONFIG:LOAD_CURRENCY_PAIRS'; const loadCountriesType: NS.ILoadCountries['type'] = 'CONFIG:LOAD_COUNTRIES'; const loadAssetsInfoType: NS.ILoadAssetsInfo['type'] = 'CONFIG:LOAD_ASSETS_INFO'; const loadUserConfigType: NS.ILoadUserConfig['type'] = 'CONFIG:LOAD_USER_CONFIG'; const setCurrentPresetsLayoutsType: NS.ISetCurrentPresetsLayouts['type'] = 'CONFIG:SET_CURRENT_PRESETS_LAYOUTS'; const saveCurrentPresetsLayoutsType: NS.ISaveCurrentPresetsLayouts['type'] = 'CONFIG:SAVE_CURRENT_PRESETS_LAYOUTS'; const saveAssetInfoType: NS.ISaveAssetInfo['type'] = 'CONFIG:SAVE_ASSET_INFO'; const saveUserConfigType: NS.ISaveUserConfig['type'] = 'CONFIG:SAVE_USER_CONFIG'; const saveThemeType: NS.ISaveTheme['type'] = 'CONFIG:SAVE_THEME'; const mLoadConfigType: NS.IMLoadConfig['type'] = 'CONFIG:M:LOAD_CONFIG'; const mSetCurrentCurrencyPairIDType: NS.IMSetCurrentCurrencyPairID['type'] = 'CONFIG:M:SET_CURRENT_CURRENCY_PAIR_ID'; function* saga() { yield all([ takeLatest(loadSecuritySettingsType, loadSecuritySettingsSaga, deps), takeLatest(loadCurrencyPairsType, executeLoadCurrencyPairs, deps), takeLatest(loadCountriesType, executeLoadCountries, deps), takeLatest(loadAssetsInfoType, executeLoadAssetsInfo, deps), takeLatest(loadUserConfigType, executeLoadUserConfig, deps), takeLatest(setCurrentPresetsLayoutsType, executeSetCurrentPresetsLayoutsSaga), takeLatest(saveCurrentPresetsLayoutsType, executeSaveCurrentPresetsLayoutsTypeSaga), takeLatest(saveAssetInfoType, executeSaveAssetInfoSaga, deps), takeLatest(saveUserConfigType, executeSaveUserConfig, deps), takeLatest(saveThemeType, executeSaveTheme, deps), takeLatest(mLoadConfigType, executeMLoadConfigSaga, deps), takeLatest(mSetCurrentCurrencyPairIDType, executeMSetCurrentCurrencyPairIDSaga, deps), yield fork(executeMInitializeCurrectCurrencyPairSaga, deps), ]); } return saga; } export function* loadSecuritySettingsSaga(deps: IDependencies) { try { const response: NS.ISecuritySettings = { loginTriesBeforeCaptcha: 5, loginTriesBeforeLock: 10, loginRetryPeriod: 30000, restorePassTriesBeforeLock: 5, restorePassRetryPeriod: 30000, }; yield put(actions.loadSecuritySettingSuccess(response)); } catch (error) { const message = getErrorMsg(error); yield put(actions.loadSecuritySettingFail(message)); } } export function* executeLoadCurrencyPairs(deps: IDependencies) { try { const currencyPairs: ICurrencyPair[] = yield call(deps.api.config.loadCurrencyPairs); yield put(actions.loadCurrencyPairsSuccess(currencyPairs)); } catch (error) { const message = getErrorMsg(error); yield put(actions.loadCurrencyPairsFail(message)); } } export function* executeLoadCountries(deps: IDependencies, action: NS.ILoadCountries) { try { const countries: ICountry[] = yield call(deps.api.config.loadCountries, action.payload); yield put(actions.loadCountriesSuccess(countries)); } catch (error) { const message = getErrorMsg(error); yield put(actions.loadCountriesFail(message)); } } export function* executeLoadAssetsInfo(deps: IDependencies) { try { const assetsInfo = yield call(deps.api.config.loadAssetsInfo); yield put(actions.loadAssetsInfoSuccess(assetsInfo)); } catch (error) { const message = getErrorMsg(error); yield put(actions.loadAssetsInfoFail(message)); } } export function* executeLoadUserConfig({ api }: IDependencies) { try { const { theme, ...userConfig }: IUserConfig & IHoldingTheme = yield call(api.config.loadUserConfig); yield put(actions.setCurrentPresetsLayouts(userConfig.presets.map(convertToPresetLayouts))); yield put(actions.setTheme(theme)); yield put(actions.loadUserConfigSuccess(userConfig)); } catch (error) { const message = getErrorMsg(error); yield put(actions.loadUserConfigFail(message)); } } export function* executeSaveCurrentPresetsLayoutsTypeSaga() { const presets: IPreset[] = yield select(selectors.selectPresets); const currentPresetsLayouts: IPresetLayouts[] = yield select(selectors.selectCurrentPresetsLayouts); const newPresets = changePresetsLayouts(presets, currentPresetsLayouts); yield put(actions.saveUserConfig({ presets: newPresets, })); yield put(actions.setPresetsHaveUnsavedChanges(false)); } export function* executeSetCurrentPresetsLayoutsSaga() { const presets: IPreset[] = yield select(selectors.selectPresets); const currentPresetsLayouts: IPresetLayouts[] = yield select(selectors.selectCurrentPresetsLayouts); if (arePresetsLayoutsChanged(currentPresetsLayouts, presets)) { yield put(actions.setPresetsHaveUnsavedChanges(true)); } else { yield put(actions.setPresetsHaveUnsavedChanges(false)); } } export function* executeSaveAssetInfoSaga({ api }: IDependencies, { payload }: NS.ISaveAssetInfo) { try { const assetsInfo: IAssetsInfoMap = yield select(selectors.selectAssetsInfo); const assetId: string | undefined = getAssetIdFromAssetName(payload.assetName, assetsInfo); if (assetId) { yield call(api.config.updateAsset, assetId, payload); yield put(actions.saveAssetInfoSuccess(payload)); } } catch (error) { const message = getErrorMsg(error); yield put(notificationActions.setNotification({ kind: 'error', text: message })); yield put(actions.saveAssetInfoFail(message)); } } export function* executeSaveUserConfig({ api }: IDependencies, { payload }: NS.ISaveUserConfig) { try { const storedConfig: IUserConfig | null = yield select(selectors.selectUserConfig); const theme: UITheme = yield select(selectors.selectUITheme); if (storedConfig) { const userConfig: IUserConfig & IHoldingTheme = { ...storedConfig, ...payload, theme, }; yield call(api.config.setUserConfig, userConfig); yield put(actions.saveUserConfigSuccess()); } else { console.warn('Trying to save userConfig, but it not initialized yet', payload); } } catch (error) { const message = getErrorMsg(error); yield put(notificationActions.setNotification({ kind: 'error', text: message })); yield put(actions.saveUserConfigFail(message)); } } export function* executeSaveTheme({ api }: IDependencies, { payload }: NS.ISaveTheme) { try { const storedConfig: IUserConfig | null = yield select(selectors.selectUserConfig); if (storedConfig) { const userConfig: IUserConfig & IHoldingTheme = { ...storedConfig, theme: payload, }; yield call(api.config.setUserConfig, userConfig); yield put(actions.saveThemeSuccess()); } else { console.warn('Trying to save theme, but config is not initialized yet', payload); } } catch (error) { const message = getErrorMsg(error); yield put(notificationActions.setNotification({ kind: 'error', text: message })); yield put(actions.saveThemeFail(message)); } } export function* executeMSetCurrentCurrencyPairIDSaga({ api }: IDependencies) { // TODO refactor with subscribing to state change const config: IMConfig = yield select(selectors.mSelectConfig); try { yield call(api.config.mSaveConfig, config); } catch (error) { const message = getErrorMsg(error); console.error('could not save config', message); } } export function* executeMLoadConfigSaga({ api }: IDependencies) { try { const mConfig: IMConfig = yield call(api.config.mLoadConfig); yield put(actions.mLoadConfigCompleted(mConfig)); } catch (error) { const message = getErrorMsg(error); yield put(actions.mLoadConfigFail(message)); } } export function* executeMInitializeCurrectCurrencyPairSaga() { const mLoadConfigCompletedType: NS.IMLoadConfigCompleted['type'] = 'CONFIG:M:LOAD_CONFIG_COMPLETED'; const loadCurrencyPairsCompletedType: NS.ILoadCurrencyPairsSuccess['type'] = 'CONFIG:LOAD_CURRENCY_PAIRS_SUCCESS'; const [ { payload: { selectedCurrecyPairID } }, { payload: pairs }, ]: [NS.IMLoadConfigCompleted, NS.ILoadCurrencyPairsSuccess] = yield all([ take(mLoadConfigCompletedType), take(loadCurrencyPairsCompletedType), ]); if (selectedCurrecyPairID === null) { const notHiddenPairs = pairs.filter(x => !x.hidden); if (notHiddenPairs.length > 0) { yield put(actions.mSetCurrentCurrencyPairID(notHiddenPairs[0].id)); } else { console.error('no currency pairs on current currency pair initialization'); } } else { yield put(actions.mSetCurrentCurrencyPairID(selectedCurrecyPairID)); } }
#!/usr/bin/env bash set -o errexit set -o errtrace set -o nounset set -o pipefail shopt -s expand_aliases alias die='EXIT=$? LINE=$LINENO error_exit' CHECKMARK='\033[0;32m\xE2\x9C\x94\033[0m' trap die ERR trap 'die "Script interrupted."' INT function error_exit() { trap - ERR local DEFAULT='Unknown failure occured.' local REASON="\e[97m${1:-$DEFAULT}\e[39m" local FLAG="\e[91m[ERROR:LXC] \e[93m$EXIT@$LINE" msg "$FLAG $REASON" exit $EXIT } function msg() { local TEXT="$1" echo -e "$TEXT" } echo -e "${CHECKMARK} \e[1;92m Setting up Container OS... \e[0m" sed -i "/$LANG/ s/\(^# \)//" /etc/locale.gen locale-gen >/dev/null echo -e "${CHECKMARK} \e[1;92m Updating Container OS... \e[0m" apt update &>/dev/null apt-get -qqy upgrade &>/dev/null echo -e "${CHECKMARK} \e[1;92m Installing Dependencies... \e[0m" echo "fs.file-max = 65535" > /etc/sysctl.conf apt-get update &>/dev/null apt-get -y install --no-install-recommends sudo curl wget gnupg openssl ca-certificates apache2-utils logrotate build-essential python3-dev git lsb-release &>/dev/null echo -e "${CHECKMARK} \e[1;92m Installing Python... \e[0m" apt-get install -y -q --no-install-recommends python3 python3-pip python3-venv &>/dev/null pip3 install --upgrade setuptools &>/dev/null pip3 install --upgrade pip &>/dev/null python3 -m venv /opt/certbot/ &>/dev/null if [ "$(getconf LONG_BIT)" = "32" ]; then python3 -m pip install --no-cache-dir -U cryptography==3.3.2 &>/dev/null fi python3 -m pip install --no-cache-dir cffi certbot &>/dev/null echo -e "${CHECKMARK} \e[1;92m Installing Openresty... \e[0m" wget -q -O - https://openresty.org/package/pubkey.gpg | apt-key add - &>/dev/null codename=`grep -Po 'VERSION="[0-9]+ \(\K[^)]+' /etc/os-release` &>/dev/null echo "deb http://openresty.org/package/debian $codename openresty" | tee /etc/apt/sources.list.d/openresty.list &>/dev/null apt-get -y update &>/dev/null apt-get -y install --no-install-recommends openresty &>/dev/null echo -e "${CHECKMARK} \e[1;92m Setting up Node.js Repository... \e[0m" sudo curl -sL https://deb.nodesource.com/setup_16.x | sudo -E bash - &>/dev/null echo -e "${CHECKMARK} \e[1;92m Installing Node.js... \e[0m" sudo apt-get install -y nodejs git make g++ gcc &>/dev/null echo -e "${CHECKMARK} \e[1;92m Installing Yarn... \e[0m" npm install --global yarn &>/dev/null echo -e "${CHECKMARK} \e[1;92m Downloading NPM v2.9.15... \e[0m" wget -q https://codeload.github.com/NginxProxyManager/nginx-proxy-manager/tar.gz/v2.9.15 -O - | tar -xz &>/dev/null cd ./nginx-proxy-manager-2.9.15 echo -e "${CHECKMARK} \e[1;92m Setting up Enviroment... \e[0m" ln -sf /usr/bin/python3 /usr/bin/python ln -sf /usr/bin/certbot /opt/certbot/bin/certbot ln -sf /usr/local/openresty/nginx/sbin/nginx /usr/sbin/nginx ln -sf /usr/local/openresty/nginx/ /etc/nginx sed -i "s+0.0.0+#v2.9.15+g" backend/package.json sed -i "s+0.0.0+#v2.9.15+g" frontend/package.json sed -i 's+^daemon+#daemon+g' docker/rootfs/etc/nginx/nginx.conf NGINX_CONFS=$(find "$(pwd)" -type f -name "*.conf") for NGINX_CONF in $NGINX_CONFS; do sed -i 's+include conf.d+include /etc/nginx/conf.d+g' "$NGINX_CONF" done mkdir -p /var/www/html /etc/nginx/logs cp -r docker/rootfs/var/www/html/* /var/www/html/ cp -r docker/rootfs/etc/nginx/* /etc/nginx/ cp docker/rootfs/etc/letsencrypt.ini /etc/letsencrypt.ini cp docker/rootfs/etc/logrotate.d/nginx-proxy-manager /etc/logrotate.d/nginx-proxy-manager ln -sf /etc/nginx/nginx.conf /etc/nginx/conf/nginx.conf rm -f /etc/nginx/conf.d/dev.conf mkdir -p /tmp/nginx/body \ /run/nginx \ /data/nginx \ /data/custom_ssl \ /data/logs \ /data/access \ /data/nginx/default_host \ /data/nginx/default_www \ /data/nginx/proxy_host \ /data/nginx/redirection_host \ /data/nginx/stream \ /data/nginx/dead_host \ /data/nginx/temp \ /var/lib/nginx/cache/public \ /var/lib/nginx/cache/private \ /var/cache/nginx/proxy_temp chmod -R 777 /var/cache/nginx chown root /tmp/nginx echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" {print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf);" > /etc/nginx/conf.d/include/resolvers.conf if [ ! -f /data/nginx/dummycert.pem ] || [ ! -f /data/nginx/dummykey.pem ]; then echo -e "${CHECKMARK} \e[1;92m Generating dummy SSL Certificate... \e[0m" openssl req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj "/O=Nginx Proxy Manager/OU=Dummy Certificate/CN=localhost" -keyout /data/nginx/dummykey.pem -out /data/nginx/dummycert.pem &>/dev/null fi mkdir -p /app/global /app/frontend/images cp -r backend/* /app cp -r global/* /app/global echo -e "${CHECKMARK} \e[1;92m Building Frontend... \e[0m" cd ./frontend export NODE_ENV=development yarn install --network-timeout=30000 &>/dev/null yarn build &>/dev/null cp -r dist/* /app/frontend cp -r app-images/* /app/frontend/images echo -e "${CHECKMARK} \e[1;92m Initializing Backend... \e[0m" rm -rf /app/config/default.json &>/dev/null if [ ! -f /app/config/production.json ]; then cat << 'EOF' > /app/config/production.json { "database": { "engine": "knex-native", "knex": { "client": "sqlite3", "connection": { "filename": "/data/database.sqlite" } } } } EOF fi cd /app export NODE_ENV=development yarn install --network-timeout=30000 &>/dev/null echo -e "${CHECKMARK} \e[1;92m Creating NPM Service... \e[0m" cat << 'EOF' > /lib/systemd/system/npm.service [Unit] Description=Nginx Proxy Manager After=network.target Wants=openresty.service [Service] Type=simple Environment=NODE_ENV=production ExecStartPre=-mkdir -p /tmp/nginx/body /data/letsencrypt-acme-challenge ExecStart=/usr/bin/node index.js --abort_on_uncaught_exception --max_old_space_size=250 WorkingDirectory=/app Restart=on-failure [Install] WantedBy=multi-user.target EOF echo -e "${CHECKMARK} \e[1;92m Customizing Container... \e[0m" rm /etc/motd rm /etc/update-motd.d/10-uname touch ~/.hushlogin GETTY_OVERRIDE="/etc/systemd/system/container-getty@1.service.d/override.conf" mkdir -p $(dirname $GETTY_OVERRIDE) cat << EOF > $GETTY_OVERRIDE [Service] ExecStart= ExecStart=-/sbin/agetty --autologin root --noclear --keep-baud tty%I 115200,38400,9600 \$TERM EOF systemctl daemon-reload systemctl restart $(basename $(dirname $GETTY_OVERRIDE) | sed 's/\.d//') echo -e "${CHECKMARK} \e[1;92m Starting Services... \e[0m" systemctl enable npm &>/dev/null systemctl start openresty systemctl start npm echo -e "${CHECKMARK} \e[1;92m Cleanup... \e[0m" rm -rf /npm_setup.sh /var/{cache,log}/* /var/lib/apt/lists/*
<filename>app/src/main/java/com/jinjunhang/contract/service/AuditApprovalResponse.java package com.jinjunhang.contract.service; /** * Created by lzn on 16/4/7. */ public class AuditApprovalResponse extends ServerResponse { private boolean mResult; private String mMessage; public boolean isResult() { return mResult; } public String getMessage() { return mMessage; } public void setResult(boolean result) { mResult = result; } public void setMessage(String message) { mMessage = message; } }
#! /bin/bash a_help="help" a_coverage="coverage" a_open="open" out="tests_out.log" coverage_out="" main_command="run" for i in "$@"; do case ${i} in -h|--help) main_command=${a_help} ;; -c|--coverage) main_command=${a_coverage} ;; -o|--open) second_command=${a_open} ;; esac done function is_in_venv() { is_venv=$(python is_venv.py) if [ ${is_venv} = "True" ];then return 0 fi return 1 } function activate_venv() { if [ !is_in_venv ]; then . venv/Scripts/activate &> ${out} fi } function deactivate_venv() { if [ is_in_venv ]; then deactivate fi } function run_test_with_coverage() { activate_venv echo "# Running tests with coverage..." coverage run -m unittest discover src/test/ &> ${out} echo "# Tests finished!" echo "" echo "# Showing report..." coverage report -m echo "# End of report" echo "" echo "# Generating html report..." coverage html echo "# Html report generated!" echo "" deactivate_venv } function run_test_without_coverage() { activate_venv echo "# Running tests..." python -m unittest discover src/test/ &> ${out} echo "# Tests finished!" echo "" deactivate_venv } function open_test_coverage_report() { echo "" echo "# Opening report..." start ./.coverage_report/index.html echo "# Report opened!" } function show_results() { echo "# Showing results" cat ./tests_out.log } function main() { if [[ ${main_command} = ${a_help} ]]; then echo "#############################################################" echo "## test_runner.sh [-h|--help] [-c|--coverage] [-o|--open] ##" echo "## Script made to facilitate execution of tests ##" echo "## Available parameters: ##" echo "## [-h|---help]: display this help box ##" echo "## [-c|---coverage]: run tests with coverage, generate ##" echo "## a report and automatically ##" echo "## [-o|--open]: open report in browser ##" echo "#############################################################" else if [[ ${main_command} = ${a_coverage} ]]; then run_test_with_coverage & wait $! else run_test_without_coverage & wait $! fi show_results echo "# Done" fi if [[ ${second_command} = ${a_open} ]]; then open_test_coverage_report fi } main
""" MTN Nigeria USSD transport. """ from vumi.transports.mtn_nigeria.mtn_nigeria_ussd import ( MtnNigeriaUssdTransport) __all__ = ['MtnNigeriaUssdTransport', 'XmlOverTcpClient']
package ch.raiffeisen.openbank.branch.persistency.model; /** * Service/Facilities offered at a branch. * * @author <NAME> */ public enum ServiceAndFacility { // Branch provides assisted service counters which are machines providing a wide range of banking // facililities similar to those provided by counter services AssistedServiceCounter, // The branch provides an external ATM ExternalATM, // Branch provides AVS verifying, for instance, Account Holders (Name /Company No), Account (Code, // Branch No, Status, Type, Length open and if it accapts debits/credits) AccountVerificationService, // The branch has a business counter BusinessCounter, // The branch provides Bureau de Change services BureauDeChange, // The branch provides automated terminals for taking business deposits BusinessDepositTerminal, // The branch provides business IT consultancy services BusinessITSupport, // The branch provides a card issuance facility. Note this is usually an emergency for issuing // standard debit cards with personalised magnetic stripe and chip data only. CardIssuanceFacility, // The branch has Click and Collect Lockers CollectionLockers, // The branch provides counter teller services CounterServices, // The branch provides one or more external quick service points, which are machines providing // banking services. ExternalQuickServicePoint, // The branch provides internal quick service points, which are machines providing banking // services. InternalQuickServicePoint, // The branch provides an internal ATM InternalAtm, // The branch provides lodgement devices which are ATMs which can accept cash and cheque deposits LodgementDevice, // The branch provides mortgage advisor services MortgageAdvisor, // The branch has meeting rooms for customer interaction MeetingRooms, // The branch has a night safe NightSafe, // The branch provides facilities for on-line banking OnlineBankingPoint, // The branch can provide foreign currency on demand. OnDemandCurrency, // Used to indicate that the Branch Self Service code does not exist in this standard code list. // Use OtherSelfService to supply code, name & description. Other, // The branch has parking facilities Parking, // The branch has a couter for the banks premier account customers PremierCounter, // The branch provides quick deposit devices for automatic deposition of cash and cheques. QuickDeposit, // The branch provides a Saturday counter service SaturdayCounterService, // The branch has a statement printer StatementPrinter, // The branch offers a digital service for account opening. SelfServiceAccountOpening, // The branch has a video banking terminal for remote banking services. VideoBanking, // The branch has Wi-Fi facilities for use by its customers WiFi; }
<reponame>bradleyfalzon/go-release-cycle<filename>main.go package main import ( "bytes" "flag" "fmt" "io/ioutil" "log" "os" "regexp" "strconv" "strings" "time" ) func main() { showGA := flag.Bool("show-ga", false, "Show GA Releases") showBeta := flag.Bool("show-beta", false, "Show Beta Releases") showRC := flag.Bool("show-rc", false, "Show RC Releases") flag.Parse() gittag, err := ioutil.ReadAll(os.Stdin) if err != nil { log.Fatal(err) } releases, err := MakeReleases(gittag) if err != nil { log.Fatal(err) } releases.SetDurations() fmt.Print(releases.CSV(*showGA, *showBeta, *showRC)) } type ( // Version is the major and minor version number, such as "1.8". Version string // ReleaseType type is the type of release, such as "rc", "beta" or "." ReleaseType string // rc, beta or . ) const ( // BetaRelease is a beta. BetaRelease ReleaseType = "beta" // RCRelease is a release candidate. RCRelease = "rc" // GARelease is a general availability release. GARelease = "." ) // A Release is the time when a release occurred. type Release struct { date time.Time duration time.Duration } // Releases holds all the releases for all versions for all release types. type Releases map[Version]map[ReleaseType][]Release // MakeReleases reads out and returns all releases or an error. // // out format is expected to be in chronological order, containing the release // tag and date separated with a tab. // // refs/tags/go1.7beta1 Thu Jun 2 10:00:23 2016 +1000 // refs/tags/go1.7beta2 Thu Jun 16 15:41:33 2016 -0400 // refs/tags/go1.7rc1 Thu Jul 7 16:41:29 2016 -0700 // refs/tags/go1.7rc2 Mon Jul 18 08:19:17 2016 -0700 // refs/tags/go1.7 Mon Aug 15 14:09:32 2016 -0700 // refs/tags/go1.7.1 Wed Sep 7 12:11:12 2016 -0700 // refs/tags/go1.7.2 Mon Oct 17 13:43:23 2016 -0700 // refs/tags/go1.7.3 Tue Oct 18 17:02:28 2016 -0700 // // This output can be obtained with: git tag --format '%(refname),%(authordate)' --sort=authordate // func MakeReleases(out []byte) (Releases, error) { // sample: go1.7rc1 Thu Jul 7 16:41:29 2016 -0700 // go versions: go1.8 or go1.8beta1 or go1.9rc1 or go1.8.1 tags := regexp.MustCompile(`go([0-9]+\.[0-9]+)(\.|rc|beta|)([0-9]+|),(.*)`+"\n").FindAllStringSubmatch(string(out), -1) releases := make(Releases) for _, tag := range tags { var ( version = Version(tag[1]) revType = ReleaseType(tag[2]) ) var num int64 if tag[3] != "" { var err error num, err = strconv.ParseInt(tag[3], 10, 64) if err != nil { return nil, fmt.Errorf("could not parse release number in: %v: %v", tag[0], err) } } date, err := time.Parse("Mon Jan _2 15:04:05 2006 -0700", tag[4]) if err != nil { return nil, fmt.Errorf("could not parse date in: %v: %v", tag[0], err) } if revType == "" { revType = GARelease } releases.Add(version, revType, int(num), date) } return releases, nil } // Add adds a version, type, number that occurred on date to the releases. func (r Releases) Add(version Version, typ ReleaseType, num int, date time.Time) { if _, ok := r[version]; !ok { r[version] = make(map[ReleaseType][]Release) } if _, ok := r[nextVersion(version)][GARelease]; ok && typ == GARelease { // Ignore old GA releases when a newer GA is available, eg, if 1.6 // has come out and 1.5.4 is also released, ignore the 1.5.4. It's // usually just small security patches, and this makes time simple // to follow (1.6 marks latest 1.5.x release as the last). return } r[version][typ] = append(r[version][typ], Release{date: date}) } // SetDurations sets the durations on each release based on when the next // occurred. func (r Releases) SetDurations() { for version, revs := range r { for typ, releases := range revs { for i, release := range releases { // Set the duration of the last release based on this release's date. switch { case typ == BetaRelease && i == 0: // beta1 is the first release of a new version, don't touch last release. case typ == RCRelease && i == 0: // rc1 should set the duration of the last beta. r.SetLastDuration(version, BetaRelease, release.date) case typ == GARelease && i == 0: // .0 release should set the duration of the last rc. r.SetLastDuration(version, RCRelease, release.date) r.SetLastDuration(prevVersion(version), GARelease, release.date) default: // Could be beta2, rc2, .2 etc r.SetDuration(version, typ, release.date, i-1) } } } } // Set last release that doesn't have a duration to end today. This allows a // user to see where the current release is in comparion to previous releases. // This should only affect the latest/current beta or rc and ga. for version, revs := range r { for typ, releases := range revs { idx := len(releases) - 1 // last release in chain release := &releases[idx] if release.duration == 0 { r.SetDuration(version, typ, time.Now(), idx) } } } } // SetLastDuration sets the duration of last/current release based on date. func (r Releases) SetLastDuration(version Version, typ ReleaseType, date time.Time) { idx := len(r[version][typ]) - 1 if idx < 0 { return } r.SetDuration(version, typ, date, idx) } // SetDuration sets the duration of the version's revType to be the difference // between its date and the provided date. func (r Releases) SetDuration(version Version, typ ReleaseType, date time.Time, idx int) { d := date.Sub(r[version][typ][idx].date) r[version][typ][idx].duration = d } // CSV returns a CSV of the releases. func (r Releases) CSV(showGA, showBeta, showRC bool) string { var ( buf bytes.Buffer header = []string{""} ) for version, revs := range r { for typ, releases := range revs { switch { case typ == GARelease && !showGA: continue case typ == BetaRelease && !showBeta: continue case typ == RCRelease && !showRC: continue } fmt.Fprintf(&buf, "%v%v,", version, typ) for i, release := range releases { if i > len(header)-2 { header = append(header, fmt.Sprintf("%d", i)) } fmt.Fprintf(&buf, "%d,", release.duration/(86400*time.Second)) } fmt.Fprintln(&buf) } } return fmt.Sprintf("%s\n%s", strings.Join(header, ","), buf.String()) } func nextVersion(current Version) (next Version) { major, minor := parseVersion(current) return Version(fmt.Sprintf("%d.%d", major, minor+1)) } func prevVersion(current Version) (previous Version) { major, minor := parseVersion(current) return Version(fmt.Sprintf("%d.%d", major, minor-1)) } func parseVersion(version Version) (major int, minor int) { v := strings.SplitN(string(version), ".", 2) maj, err := strconv.ParseInt(v[0], 10, 64) if err != nil { panic(err) // passed invalid version string } min, err := strconv.ParseInt(v[1], 10, 64) if err != nil { panic(err) // passed invalid version string } return int(maj), int(min) }
from setuptools import setup from setuptools.extension import Extension try: from Cython.Distutils import build_ext from Cython.Build import cythonize except ImportError: use_cython = False else: use_cython = True cmdclass = { } ext_modules = [ ] if use_cython: ext_modules += [ #Extension("ska_kmers", [ "ska_kmers.pyx" ], extra_compile_args=['-fopenmp'], extra_link_args=['-fopenmp'],), Extension("ska_kmers", [ "ska_kmers.pyx" ], ), ] cmdclass.update({ 'build_ext': build_ext }) else: ext_modules += [ Extension("ska_kmers", [ "ska_kmers.c" ]), ] setup( name = "fast_ska", version = "0.9.3", description='A fast Cython implementation of the "Streaming K-mer Assignment" algorithm initially described in Lambert et al. 2014 (PMID: 24837674)', url = 'https://github.com/marvin-jens/fast_ska', author = '<NAME>', author_email = '<EMAIL>', license = 'MIT', classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 4 - Beta', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Scientific/Engineering :: Bio-Informatics', ], keywords = 'rna rbns k-mer kmer statistics biology bioinformatics', install_requires=['cython','numpy'], scripts=['ska'], cmdclass = cmdclass, ext_modules=ext_modules, #ext_modules = cythonize("ska_kmers.pyx") )
#!/bin/bash . UsuariosGrupos/usuarios/funciones_usuarios.sh opcion2="99" while [[ "$opcion2" != "0" ]] do echo "----------------------------------------------------------------" echo "| Menu de Gestion de Usuarios |" echo "| |" echo "|1- Crear un usuario |" echo "|2- Editar un usuario |" #editar un usuario da la opcion de agregar un usuario a grupos echo "|3- Eliminar un usuario |" echo "|0- Volver |" echo "----------------------------------------------------------------" read -p ">" opcion2 case "$opcion2" in "1") ingresar_usuario ;; "2") clear ./UsuariosGrupos/usuarios/editar_usuarios.sh ;; "3") eliminar_usuario ;; "0") clear opcion2="0" ;; *) clear echo "Opcion invalida" ;; esac done
<reponame>Theodus/go-transpiler package main import ( "fmt" "os" "github.com/codegangsta/cli" ) func main() { app := cli.NewApp() app.Name = "go-transpiler" app.Author = "<NAME>" app.Usage = "compile Go to Java, C++, or JS using tardisgo or gopherjs" app.Version = "0.3.0" app.Commands = []cli.Command{ { Name: "java", Usage: "Compile Go source to Java target", Action: func(ctx *cli.Context) { if len(ctx.Args()) < 1 { tardis("java", "") return } if len(ctx.Args()) > 1 { fmt.Println("Too many arguments!") return } tardis("java", ctx.Args()[0]) }, }, { Name: "cpp", Usage: "Compile Go source to C++ target", Action: func(ctx *cli.Context) { if len(ctx.Args()) < 1 { tardis("cpp", "") return } if len(ctx.Args()) > 1 { fmt.Println("Too many arguments!") return } tardis("cpp", ctx.Args()[0]) }, }, { Name: "js", Usage: "Compile Go source to JS target", Action: func(ctx *cli.Context) { if len(ctx.Args()) < 1 { gopherjs("") return } if len(ctx.Args()) > 1 { fmt.Println("Too many arguments!") return } gopherjs(ctx.Args()[0]) }, }, } app.Run(os.Args) }
#!/bin/sh currPath=$(pwd) keyStr="/kbengine/" bcontain=`echo $currPath|grep $keyStr|wc -l` if [ $bcontain = 0 ] then export KBE_ROOT="$(cd ../; pwd)" else export KBE_ROOT="$(pwd | awk -F "/kbengine/" '{print $1}')/kbengine" fi export KBE_RES_PATH="$KBE_ROOT/kbe/res/:$(pwd):$(pwd)/res:$(pwd)/scripts/" export KBE_BIN_PATH="$KBE_ROOT/kbe/bin/server/" echo KBE_ROOT = \"${KBE_ROOT}\" echo KBE_RES_PATH = \"${KBE_RES_PATH}\" echo KBE_BIN_PATH = \"${KBE_BIN_PATH}\" "$KBE_BIN_PATH/kbcmd" --clientsdk=unity --outpath="$currPath/kbengine_unity3d_plugins" "$KBE_BIN_PATH/kbcmd" --clientsdk=ue4 --outpath="$currPath/kbengine_ue4_plugins"
package com.bean; import org.springframework.stereotype.Component; /** * @program: spring * @ClassName Lj * @description:$ * @author: 李杰 * @create: 2020-05-24 15:09 * @Version 1.0 **/ @Component public class Lj { private String name; private String age; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAge() { return age; } public void setAge(String age) { this.age = age; } }
import pandas as pd # Read the CSV file into a pandas DataFrame sales_data = pd.read_csv("sales_data.csv") # Calculate the total revenue generated from all sales total_revenue = sales_data["Revenue"].sum() # Calculate the average number of units sold per sale average_units_sold = sales_data["Units_Sold"].mean() # Identify the product that generated the highest revenue and the date of that sale top_product = sales_data.loc[sales_data["Revenue"].idxmax()] top_product_name = top_product["Product"] top_product_date = top_product["Date"] # Create a new DataFrame containing only the sales data for the product with the highest number of units sold top_selling_product_data = sales_data[sales_data["Product"] == top_product_name] # Calculate the total revenue generated from the top-selling product top_product_revenue = top_selling_product_data["Revenue"].sum() # Output the results print("Total revenue generated from all sales:", total_revenue) print("Average number of units sold per sale:", average_units_sold) print("Product that generated the highest revenue:", top_product_name) print("Date of the highest revenue sale:", top_product_date) print("Total revenue generated from the top-selling product:", top_product_revenue)
export const rotateAndScale = shape => (ctx, offset) => { const scaleX = shape.scaleX * offset.scaleX; const scaleY = shape.scaleY * offset.scaleY; const rotation = shape.rotation + offset.rotation; if (scaleX !== 1 || scaleY !== 1 || rotation !== 0) { const translate = shape.getTranslationCenter(offset); ctx.translate(translate.x, translate.y); ctx.rotate(rotation); ctx.scale(scaleX, scaleY); ctx.translate(-translate.x, -translate.y); } return true; } export const shade = shape => (ctx, offset) => { const globalAlpha = shape.opacity * offset.opacity; if (globalAlpha !== 1) { ctx.globalAlpha = globalAlpha; } if (shape.shadowColor) { ctx.shadowColor = shape.shadowColor } if (shape.shadowBlur !== 0) { ctx.shadowBlur = shape.shadowBlur } if (shape.shadowOffsetX !== 0) { ctx.shadowOffsetX = shape.shadowOffsetX } if (shape.shadowOffsetY !== 0) { ctx.shadowOffsetY = shape.shadowOffsetY; } return true; } export const fillAndStroke = shape => (ctx, offset) => { if (shape.backgroundColor) { ctx.fillStyle = shape.backgroundColor; ctx.fill(); } if (shape.borderDash?.length) { ctx.setLineDash(shape.borderDash) } if (shape.borderColor && shape.borderWidth) { ctx.strokeStyle = shape.borderColor; ctx.lineWidth = shape.borderWidth; ctx.stroke(); } return true; }
import datetime def alter_now_relative(current_time, time_delta): return current_time + datetime.timedelta(seconds=time_delta)
function resolveNamespaces(string $baseDir, array $namespaceDirectories): array { $resolvedPaths = array(); foreach ($namespaceDirectories as $namespace => $directories) { $resolvedPaths[$namespace] = array(); foreach ($directories as $directory) { $resolvedPaths[$namespace][] = $baseDir . $directory; } } return $resolvedPaths; } // Test the function with the provided example $baseDir = '/var/www/project'; $namespaceDirectories = array( 'Symfony\\Polyfill\\Php80\\' => array('/vendor/symfony/polyfill-php80'), 'Symfony\\Polyfill\\Mbstring\\' => array('/vendor/symfony/polyfill-mbstring'), 'Symfony\\Component\\Finder\\' => array('/vendor/symfony/finder'), 'Symfony\\Component\\EventDispatcher\\' => array('/vendor/symfony/event-dispatcher'), 'Psr\\Log\\' => array('/vendor/psr/log/Psr/Log'), ); $result = resolveNamespaces($baseDir, $namespaceDirectories); var_dump($result);
<filename>gae-default/src/main/java/com/google/developers/event/http/DefaultServletModule.java package com.google.developers.event.http; import com.google.developers.api.SpreadsheetManager; import com.google.developers.event.ActiveEvent; import com.google.developers.event.campaign.CampaignServlet; import com.google.developers.event.qrcode.RegistrationServlet; import com.google.developers.event.qrcode.TicketServlet; import com.google.gdata.util.ServiceException; import com.google.inject.servlet.ServletModule; import javax.servlet.http.HttpServletRequest; import java.io.IOException; public class DefaultServletModule extends ServletModule implements Path { @Override protected void configureServlets() { /* * /api/401/;jsessionid=37fycpy88nx7 */ serve("/api/401/*").with(UnauthorizedServlet.class); /* * A servlet is a singleton, and is allowed to be registered only once. */ // serve("/api/check-in").with(CheckInServlet.class); serve("/api/label").with(LabelServlet.class); serve("/api/logo").with(LogoServlet.class); serve("/api/chapters").with(ChaptersServlet.class); serve("/api/events").with(EventsServlet.class); serve("/api/activities").with(ActivitiesServlet.class); serve("/api/participants").with(RegistrationServlet.class); serve("/api/user").with(UserServlet.class); serve(OAUTH2ENTRY).with(OAuth2EntryServlet.class); serve(OAUTH2CALLBACK).with(OAuth2CallbackServlet.class); serve(OAUTH2REVOKE).with(OAuth2RevokeServlet.class); serve(EVENTS_URL + "*").with(EventsServlet.class); serveRegex("/api/check-in|" + CHECK_IN_URL + "[0-9a-z]+").with(CheckInServlet.class); serveRegex("/api/ticket|" + TICKET_URL + "[0-9a-z]+").with(TicketServlet.class); serveRegex("/api/campaign|" + CAMPAIGN_URL + "[0-9a-z]+").with(CampaignServlet.class); } public static ActiveEvent getActiveEvent( HttpServletRequest req, SpreadsheetManager spreadsheetManager, String path) throws IOException, ServiceException { /* * TODO https://github.com/google/guice/wiki/AssistedInject */ /* * retrieve event id from http header, referer * e.g. * https://plus.google.com/events/c2vl1u3p3pbglde0gqhs7snv098 * https://developers.google.com/events/6031536915218432/ * https://hub.gdgx.io/events/6031536915218432 */ String referer = req.getHeader("Referer"); if (referer == null) { return null; } // Pattern gplusEventPattern = Pattern.compile("https://plus.google.com/events/" + // "[^/]+"); // Pattern devsiteEventPattern = Pattern.compile("https://developers.google.com/events/" + // "[^/]+/"); // Pattern gdgxHubEventPattern = Pattern.compile("https://hub.gdgx.io/events/" + // "([^/]+)"); String requestURL = req.getRequestURL().toString(); String urlBase = requestURL.substring(0, requestURL.indexOf(req.getRequestURI())) + path; if (!referer.startsWith(urlBase) || referer.equals(urlBase)) { return null; } String gplusEventUrl = "https://plus.google.com/events/" + referer.substring(urlBase.length()); return ActiveEvent.get(gplusEventUrl, spreadsheetManager); } }
#!/bin/bash git pull origin master supervisorctl restart hyperf_blog
CREATE TABLE Product ( id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(255) NOT NULL, price DECIMAL(5,2) NOT NULL, quantity INT NOT NULL ); CREATE TABLE Customer ( id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(255) NOT NULL, address VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, phone VARCHAR(20) NOT NULL ); CREATE TABLE Order ( id INT AUTO_INCREMENT PRIMARY KEY, customer_id INT, product_id INT, quantity INT NOT NULL, FOREIGN KEY (customer_id) REFERENCES Customer(id), FOREIGN KEY (product_id) REFERENCES Product(id) );
const ztNavList = [ { icon: 'el-icon-orange', kinds: [ { text: '轮播', path: '/bazaar/swiper', }, { text: '图表', path: '/bazaar/echarts', }, { text: '富文本', path: '/bazaar/editor' } ] }, { icon: 'el-icon-share', kinds: [ { text: '过渡', path: '/bazaar/transition' } ] } ] const bars = [ { index: '1', icon: 'el-icon-orange', text: 'home', childrens: ['welcome'] }, { index: '2', icon: 'el-icon-s-custom', text: 'user', childrens: ['message'] }, { index: '3', icon: 'el-icon-menu', text: 'components', childrens: ['vuedraggable'] }, { index: '4', icon: 'el-icon-warning', text: 'error', childrens: ['401','404'] }, { index: '5', icon: 'el-icon-s-operation', text: 'jsx', childrens: [] }, { index: '6', icon: 'el-icon-s-order', text: 'excel', childrens: [] } ] export { bars, ztNavList }
<gh_stars>0 /* $Id: pixel.c,v 1.37 2002/10/24 23:57:21 brianp Exp $ */ /* * Mesa 3-D graphics library * Version: 4.1 * * Copyright (C) 1999-2002 <NAME> All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * <NAME> BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "glheader.h" #include "imports.h" #include "colormac.h" #include "context.h" #include "macros.h" #include "pixel.h" #include "mtypes.h" /**********************************************************************/ /***** glPixelZoom *****/ /**********************************************************************/ void _mesa_PixelZoom( GLfloat xfactor, GLfloat yfactor ) { GET_CURRENT_CONTEXT(ctx); if (ctx->Pixel.ZoomX == xfactor && ctx->Pixel.ZoomY == yfactor) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.ZoomX = xfactor; ctx->Pixel.ZoomY = yfactor; } /**********************************************************************/ /***** glPixelStore *****/ /**********************************************************************/ void _mesa_PixelStorei( GLenum pname, GLint param ) { /* NOTE: this call can't be compiled into the display list */ GET_CURRENT_CONTEXT(ctx); ASSERT_OUTSIDE_BEGIN_END(ctx); switch (pname) { case GL_PACK_SWAP_BYTES: if (param == (GLint)ctx->Pack.SwapBytes) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.SwapBytes = param ? GL_TRUE : GL_FALSE; break; case GL_PACK_LSB_FIRST: if (param == (GLint)ctx->Pack.LsbFirst) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.LsbFirst = param ? GL_TRUE : GL_FALSE; break; case GL_PACK_ROW_LENGTH: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Pack.RowLength == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.RowLength = param; break; case GL_PACK_IMAGE_HEIGHT: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Pack.ImageHeight == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.ImageHeight = param; break; case GL_PACK_SKIP_PIXELS: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Pack.SkipPixels == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.SkipPixels = param; break; case GL_PACK_SKIP_ROWS: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Pack.SkipRows == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.SkipRows = param; break; case GL_PACK_SKIP_IMAGES: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Pack.SkipImages == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.SkipImages = param; break; case GL_PACK_ALIGNMENT: if (param!=1 && param!=2 && param!=4 && param!=8) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Pack.Alignment == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.Alignment = param; break; case GL_PACK_INVERT_MESA: if (!ctx->Extensions.MESA_pack_invert) { _mesa_error( ctx, GL_INVALID_ENUM, "glPixelstore(pname)" ); return; } if (ctx->Pack.Invert == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Pack.Invert = param; break; case GL_UNPACK_SWAP_BYTES: if (param == (GLint)ctx->Unpack.SwapBytes) return; if ((GLint)ctx->Unpack.SwapBytes == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.SwapBytes = param ? GL_TRUE : GL_FALSE; break; case GL_UNPACK_LSB_FIRST: if (param == (GLint)ctx->Unpack.LsbFirst) return; if ((GLint)ctx->Unpack.LsbFirst == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.LsbFirst = param ? GL_TRUE : GL_FALSE; break; case GL_UNPACK_ROW_LENGTH: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Unpack.RowLength == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.RowLength = param; break; case GL_UNPACK_IMAGE_HEIGHT: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Unpack.ImageHeight == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.ImageHeight = param; break; case GL_UNPACK_SKIP_PIXELS: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Unpack.SkipPixels == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.SkipPixels = param; break; case GL_UNPACK_SKIP_ROWS: if (param<0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Unpack.SkipRows == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.SkipRows = param; break; case GL_UNPACK_SKIP_IMAGES: if (param < 0) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore(param)" ); return; } if (ctx->Unpack.SkipImages == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.SkipImages = param; break; case GL_UNPACK_ALIGNMENT: if (param!=1 && param!=2 && param!=4 && param!=8) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelStore" ); return; } if (ctx->Unpack.Alignment == param) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.Alignment = param; break; case GL_UNPACK_CLIENT_STORAGE_APPLE: if (param == (GLint)ctx->Unpack.ClientStorage) return; FLUSH_VERTICES(ctx, _NEW_PACKUNPACK); ctx->Unpack.ClientStorage = param ? GL_TRUE : GL_FALSE; break; default: _mesa_error( ctx, GL_INVALID_ENUM, "glPixelStore" ); return; } } void _mesa_PixelStoref( GLenum pname, GLfloat param ) { _mesa_PixelStorei( pname, (GLint) param ); } /**********************************************************************/ /***** glPixelMap *****/ /**********************************************************************/ void _mesa_PixelMapfv( GLenum map, GLint mapsize, const GLfloat *values ) { GLint i; GET_CURRENT_CONTEXT(ctx); ASSERT_OUTSIDE_BEGIN_END(ctx); if (mapsize < 1 || mapsize > MAX_PIXEL_MAP_TABLE) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelMapfv(mapsize)" ); return; } if (map >= GL_PIXEL_MAP_S_TO_S && map <= GL_PIXEL_MAP_I_TO_A) { /* test that mapsize is a power of two */ if (_mesa_bitcount((GLuint) mapsize) != 1) { _mesa_error( ctx, GL_INVALID_VALUE, "glPixelMapfv(mapsize)" ); return; } } FLUSH_VERTICES(ctx, _NEW_PIXEL); switch (map) { case GL_PIXEL_MAP_S_TO_S: ctx->Pixel.MapStoSsize = mapsize; for (i=0;i<mapsize;i++) { ctx->Pixel.MapStoS[i] = (GLint) values[i]; } break; case GL_PIXEL_MAP_I_TO_I: ctx->Pixel.MapItoIsize = mapsize; for (i=0;i<mapsize;i++) { ctx->Pixel.MapItoI[i] = (GLint) values[i]; } break; case GL_PIXEL_MAP_I_TO_R: ctx->Pixel.MapItoRsize = mapsize; for (i=0;i<mapsize;i++) { GLfloat val = CLAMP( values[i], 0.0F, 1.0F ); ctx->Pixel.MapItoR[i] = val; ctx->Pixel.MapItoR8[i] = (GLint) (val * 255.0F); } break; case GL_PIXEL_MAP_I_TO_G: ctx->Pixel.MapItoGsize = mapsize; for (i=0;i<mapsize;i++) { GLfloat val = CLAMP( values[i], 0.0F, 1.0F ); ctx->Pixel.MapItoG[i] = val; ctx->Pixel.MapItoG8[i] = (GLint) (val * 255.0F); } break; case GL_PIXEL_MAP_I_TO_B: ctx->Pixel.MapItoBsize = mapsize; for (i=0;i<mapsize;i++) { GLfloat val = CLAMP( values[i], 0.0F, 1.0F ); ctx->Pixel.MapItoB[i] = val; ctx->Pixel.MapItoB8[i] = (GLint) (val * 255.0F); } break; case GL_PIXEL_MAP_I_TO_A: ctx->Pixel.MapItoAsize = mapsize; for (i=0;i<mapsize;i++) { GLfloat val = CLAMP( values[i], 0.0F, 1.0F ); ctx->Pixel.MapItoA[i] = val; ctx->Pixel.MapItoA8[i] = (GLint) (val * 255.0F); } break; case GL_PIXEL_MAP_R_TO_R: ctx->Pixel.MapRtoRsize = mapsize; for (i=0;i<mapsize;i++) { ctx->Pixel.MapRtoR[i] = CLAMP( values[i], 0.0F, 1.0F ); } break; case GL_PIXEL_MAP_G_TO_G: ctx->Pixel.MapGtoGsize = mapsize; for (i=0;i<mapsize;i++) { ctx->Pixel.MapGtoG[i] = CLAMP( values[i], 0.0F, 1.0F ); } break; case GL_PIXEL_MAP_B_TO_B: ctx->Pixel.MapBtoBsize = mapsize; for (i=0;i<mapsize;i++) { ctx->Pixel.MapBtoB[i] = CLAMP( values[i], 0.0F, 1.0F ); } break; case GL_PIXEL_MAP_A_TO_A: ctx->Pixel.MapAtoAsize = mapsize; for (i=0;i<mapsize;i++) { ctx->Pixel.MapAtoA[i] = CLAMP( values[i], 0.0F, 1.0F ); } break; default: _mesa_error( ctx, GL_INVALID_ENUM, "glPixelMapfv(map)" ); } } void _mesa_PixelMapuiv(GLenum map, GLint mapsize, const GLuint *values ) { const GLint n = MIN2(mapsize, MAX_PIXEL_MAP_TABLE); GLfloat fvalues[MAX_PIXEL_MAP_TABLE]; GLint i; if (map==GL_PIXEL_MAP_I_TO_I || map==GL_PIXEL_MAP_S_TO_S) { for (i=0;i<n;i++) { fvalues[i] = (GLfloat) values[i]; } } else { for (i=0;i<n;i++) { fvalues[i] = UINT_TO_FLOAT( values[i] ); } } _mesa_PixelMapfv(map, mapsize, fvalues); } void _mesa_PixelMapusv(GLenum map, GLint mapsize, const GLushort *values ) { const GLint n = MIN2(mapsize, MAX_PIXEL_MAP_TABLE); GLfloat fvalues[MAX_PIXEL_MAP_TABLE]; GLint i; if (map==GL_PIXEL_MAP_I_TO_I || map==GL_PIXEL_MAP_S_TO_S) { for (i=0;i<n;i++) { fvalues[i] = (GLfloat) values[i]; } } else { for (i=0;i<n;i++) { fvalues[i] = USHORT_TO_FLOAT( values[i] ); } } _mesa_PixelMapfv(map, mapsize, fvalues); } void _mesa_GetPixelMapfv( GLenum map, GLfloat *values ) { GET_CURRENT_CONTEXT(ctx); GLint i; ASSERT_OUTSIDE_BEGIN_END(ctx); switch (map) { case GL_PIXEL_MAP_I_TO_I: for (i=0;i<ctx->Pixel.MapItoIsize;i++) { values[i] = (GLfloat) ctx->Pixel.MapItoI[i]; } break; case GL_PIXEL_MAP_S_TO_S: for (i=0;i<ctx->Pixel.MapStoSsize;i++) { values[i] = (GLfloat) ctx->Pixel.MapStoS[i]; } break; case GL_PIXEL_MAP_I_TO_R: MEMCPY(values,ctx->Pixel.MapItoR,ctx->Pixel.MapItoRsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_I_TO_G: MEMCPY(values,ctx->Pixel.MapItoG,ctx->Pixel.MapItoGsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_I_TO_B: MEMCPY(values,ctx->Pixel.MapItoB,ctx->Pixel.MapItoBsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_I_TO_A: MEMCPY(values,ctx->Pixel.MapItoA,ctx->Pixel.MapItoAsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_R_TO_R: MEMCPY(values,ctx->Pixel.MapRtoR,ctx->Pixel.MapRtoRsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_G_TO_G: MEMCPY(values,ctx->Pixel.MapGtoG,ctx->Pixel.MapGtoGsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_B_TO_B: MEMCPY(values,ctx->Pixel.MapBtoB,ctx->Pixel.MapBtoBsize*sizeof(GLfloat)); break; case GL_PIXEL_MAP_A_TO_A: MEMCPY(values,ctx->Pixel.MapAtoA,ctx->Pixel.MapAtoAsize*sizeof(GLfloat)); break; default: _mesa_error( ctx, GL_INVALID_ENUM, "glGetPixelMapfv" ); } } void _mesa_GetPixelMapuiv( GLenum map, GLuint *values ) { GET_CURRENT_CONTEXT(ctx); GLint i; ASSERT_OUTSIDE_BEGIN_END(ctx); switch (map) { case GL_PIXEL_MAP_I_TO_I: MEMCPY(values, ctx->Pixel.MapItoI, ctx->Pixel.MapItoIsize*sizeof(GLint)); break; case GL_PIXEL_MAP_S_TO_S: MEMCPY(values, ctx->Pixel.MapStoS, ctx->Pixel.MapStoSsize*sizeof(GLint)); break; case GL_PIXEL_MAP_I_TO_R: for (i=0;i<ctx->Pixel.MapItoRsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapItoR[i] ); } break; case GL_PIXEL_MAP_I_TO_G: for (i=0;i<ctx->Pixel.MapItoGsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapItoG[i] ); } break; case GL_PIXEL_MAP_I_TO_B: for (i=0;i<ctx->Pixel.MapItoBsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapItoB[i] ); } break; case GL_PIXEL_MAP_I_TO_A: for (i=0;i<ctx->Pixel.MapItoAsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapItoA[i] ); } break; case GL_PIXEL_MAP_R_TO_R: for (i=0;i<ctx->Pixel.MapRtoRsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapRtoR[i] ); } break; case GL_PIXEL_MAP_G_TO_G: for (i=0;i<ctx->Pixel.MapGtoGsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapGtoG[i] ); } break; case GL_PIXEL_MAP_B_TO_B: for (i=0;i<ctx->Pixel.MapBtoBsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapBtoB[i] ); } break; case GL_PIXEL_MAP_A_TO_A: for (i=0;i<ctx->Pixel.MapAtoAsize;i++) { values[i] = FLOAT_TO_UINT( ctx->Pixel.MapAtoA[i] ); } break; default: _mesa_error( ctx, GL_INVALID_ENUM, "glGetPixelMapfv" ); } } void _mesa_GetPixelMapusv( GLenum map, GLushort *values ) { GET_CURRENT_CONTEXT(ctx); GLint i; ASSERT_OUTSIDE_BEGIN_END(ctx); switch (map) { case GL_PIXEL_MAP_I_TO_I: for (i=0;i<ctx->Pixel.MapItoIsize;i++) { values[i] = (GLushort) ctx->Pixel.MapItoI[i]; } break; case GL_PIXEL_MAP_S_TO_S: for (i=0;i<ctx->Pixel.MapStoSsize;i++) { values[i] = (GLushort) ctx->Pixel.MapStoS[i]; } break; case GL_PIXEL_MAP_I_TO_R: for (i=0;i<ctx->Pixel.MapItoRsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapItoR[i] ); } break; case GL_PIXEL_MAP_I_TO_G: for (i=0;i<ctx->Pixel.MapItoGsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapItoG[i] ); } break; case GL_PIXEL_MAP_I_TO_B: for (i=0;i<ctx->Pixel.MapItoBsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapItoB[i] ); } break; case GL_PIXEL_MAP_I_TO_A: for (i=0;i<ctx->Pixel.MapItoAsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapItoA[i] ); } break; case GL_PIXEL_MAP_R_TO_R: for (i=0;i<ctx->Pixel.MapRtoRsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapRtoR[i] ); } break; case GL_PIXEL_MAP_G_TO_G: for (i=0;i<ctx->Pixel.MapGtoGsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapGtoG[i] ); } break; case GL_PIXEL_MAP_B_TO_B: for (i=0;i<ctx->Pixel.MapBtoBsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapBtoB[i] ); } break; case GL_PIXEL_MAP_A_TO_A: for (i=0;i<ctx->Pixel.MapAtoAsize;i++) { values[i] = FLOAT_TO_USHORT( ctx->Pixel.MapAtoA[i] ); } break; default: _mesa_error( ctx, GL_INVALID_ENUM, "glGetPixelMapfv" ); } } /**********************************************************************/ /***** glPixelTransfer *****/ /**********************************************************************/ /* * Implements glPixelTransfer[fi] whether called immediately or from a * display list. */ void _mesa_PixelTransferf( GLenum pname, GLfloat param ) { GET_CURRENT_CONTEXT(ctx); ASSERT_OUTSIDE_BEGIN_END(ctx); switch (pname) { case GL_MAP_COLOR: if (ctx->Pixel.MapColorFlag == (param ? GL_TRUE : GL_FALSE)) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.MapColorFlag = param ? GL_TRUE : GL_FALSE; break; case GL_MAP_STENCIL: if (ctx->Pixel.MapStencilFlag == (param ? GL_TRUE : GL_FALSE)) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.MapStencilFlag = param ? GL_TRUE : GL_FALSE; break; case GL_INDEX_SHIFT: if (ctx->Pixel.IndexShift == (GLint) param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.IndexShift = (GLint) param; break; case GL_INDEX_OFFSET: if (ctx->Pixel.IndexOffset == (GLint) param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.IndexOffset = (GLint) param; break; case GL_RED_SCALE: if (ctx->Pixel.RedScale == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.RedScale = param; break; case GL_RED_BIAS: if (ctx->Pixel.RedBias == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.RedBias = param; break; case GL_GREEN_SCALE: if (ctx->Pixel.GreenScale == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.GreenScale = param; break; case GL_GREEN_BIAS: if (ctx->Pixel.GreenBias == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.GreenBias = param; break; case GL_BLUE_SCALE: if (ctx->Pixel.BlueScale == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.BlueScale = param; break; case GL_BLUE_BIAS: if (ctx->Pixel.BlueBias == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.BlueBias = param; break; case GL_ALPHA_SCALE: if (ctx->Pixel.AlphaScale == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.AlphaScale = param; break; case GL_ALPHA_BIAS: if (ctx->Pixel.AlphaBias == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.AlphaBias = param; break; case GL_DEPTH_SCALE: if (ctx->Pixel.DepthScale == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.DepthScale = param; break; case GL_DEPTH_BIAS: if (ctx->Pixel.DepthBias == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.DepthBias = param; break; case GL_POST_COLOR_MATRIX_RED_SCALE: if (ctx->Pixel.PostColorMatrixScale[0] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixScale[0] = param; break; case GL_POST_COLOR_MATRIX_RED_BIAS: if (ctx->Pixel.PostColorMatrixBias[0] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixBias[0] = param; break; case GL_POST_COLOR_MATRIX_GREEN_SCALE: if (ctx->Pixel.PostColorMatrixScale[1] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixScale[1] = param; break; case GL_POST_COLOR_MATRIX_GREEN_BIAS: if (ctx->Pixel.PostColorMatrixBias[1] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixBias[1] = param; break; case GL_POST_COLOR_MATRIX_BLUE_SCALE: if (ctx->Pixel.PostColorMatrixScale[2] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixScale[2] = param; break; case GL_POST_COLOR_MATRIX_BLUE_BIAS: if (ctx->Pixel.PostColorMatrixBias[2] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixBias[2] = param; break; case GL_POST_COLOR_MATRIX_ALPHA_SCALE: if (ctx->Pixel.PostColorMatrixScale[3] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixScale[3] = param; break; case GL_POST_COLOR_MATRIX_ALPHA_BIAS: if (ctx->Pixel.PostColorMatrixBias[3] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostColorMatrixBias[3] = param; break; case GL_POST_CONVOLUTION_RED_SCALE: if (ctx->Pixel.PostConvolutionScale[0] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionScale[0] = param; break; case GL_POST_CONVOLUTION_RED_BIAS: if (ctx->Pixel.PostConvolutionBias[0] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionBias[0] = param; break; case GL_POST_CONVOLUTION_GREEN_SCALE: if (ctx->Pixel.PostConvolutionScale[1] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionScale[1] = param; break; case GL_POST_CONVOLUTION_GREEN_BIAS: if (ctx->Pixel.PostConvolutionBias[1] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionBias[1] = param; break; case GL_POST_CONVOLUTION_BLUE_SCALE: if (ctx->Pixel.PostConvolutionScale[2] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionScale[2] = param; break; case GL_POST_CONVOLUTION_BLUE_BIAS: if (ctx->Pixel.PostConvolutionBias[2] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionBias[2] = param; break; case GL_POST_CONVOLUTION_ALPHA_SCALE: if (ctx->Pixel.PostConvolutionScale[2] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionScale[2] = param; break; case GL_POST_CONVOLUTION_ALPHA_BIAS: if (ctx->Pixel.PostConvolutionBias[2] == param) return; FLUSH_VERTICES(ctx, _NEW_PIXEL); ctx->Pixel.PostConvolutionBias[2] = param; break; default: _mesa_error( ctx, GL_INVALID_ENUM, "glPixelTransfer(pname)" ); return; } } void _mesa_PixelTransferi( GLenum pname, GLint param ) { _mesa_PixelTransferf( pname, (GLfloat) param ); } /**********************************************************************/ /***** Pixel processing functions ******/ /**********************************************************************/ /* * Apply scale and bias factors to an array of RGBA pixels. */ void _mesa_scale_and_bias_rgba(const GLcontext *ctx, GLuint n, GLfloat rgba[][4], GLfloat rScale, GLfloat gScale, GLfloat bScale, GLfloat aScale, GLfloat rBias, GLfloat gBias, GLfloat bBias, GLfloat aBias) { if (rScale != 1.0 || rBias != 0.0) { GLuint i; for (i = 0; i < n; i++) { rgba[i][RCOMP] = rgba[i][RCOMP] * rScale + rBias; } } if (gScale != 1.0 || gBias != 0.0) { GLuint i; for (i = 0; i < n; i++) { rgba[i][GCOMP] = rgba[i][GCOMP] * gScale + gBias; } } if (bScale != 1.0 || bBias != 0.0) { GLuint i; for (i = 0; i < n; i++) { rgba[i][BCOMP] = rgba[i][BCOMP] * bScale + bBias; } } if (aScale != 1.0 || aBias != 0.0) { GLuint i; for (i = 0; i < n; i++) { rgba[i][ACOMP] = rgba[i][ACOMP] * aScale + aBias; } } } /* * Apply pixel mapping to an array of floating point RGBA pixels. */ void _mesa_map_rgba( const GLcontext *ctx, GLuint n, GLfloat rgba[][4] ) { const GLfloat rscale = (GLfloat) (ctx->Pixel.MapRtoRsize - 1); const GLfloat gscale = (GLfloat) (ctx->Pixel.MapGtoGsize - 1); const GLfloat bscale = (GLfloat) (ctx->Pixel.MapBtoBsize - 1); const GLfloat ascale = (GLfloat) (ctx->Pixel.MapAtoAsize - 1); const GLfloat *rMap = ctx->Pixel.MapRtoR; const GLfloat *gMap = ctx->Pixel.MapGtoG; const GLfloat *bMap = ctx->Pixel.MapBtoB; const GLfloat *aMap = ctx->Pixel.MapAtoA; GLuint i; for (i=0;i<n;i++) { GLfloat r = CLAMP(rgba[i][RCOMP], 0.0F, 1.0F); GLfloat g = CLAMP(rgba[i][GCOMP], 0.0F, 1.0F); GLfloat b = CLAMP(rgba[i][BCOMP], 0.0F, 1.0F); GLfloat a = CLAMP(rgba[i][ACOMP], 0.0F, 1.0F); rgba[i][RCOMP] = rMap[IROUND(r * rscale)]; rgba[i][GCOMP] = gMap[IROUND(g * gscale)]; rgba[i][BCOMP] = bMap[IROUND(b * bscale)]; rgba[i][ACOMP] = aMap[IROUND(a * ascale)]; } } /* * Apply the color matrix and post color matrix scaling and biasing. */ void _mesa_transform_rgba(const GLcontext *ctx, GLuint n, GLfloat rgba[][4]) { const GLfloat rs = ctx->Pixel.PostColorMatrixScale[0]; const GLfloat rb = ctx->Pixel.PostColorMatrixBias[0]; const GLfloat gs = ctx->Pixel.PostColorMatrixScale[1]; const GLfloat gb = ctx->Pixel.PostColorMatrixBias[1]; const GLfloat bs = ctx->Pixel.PostColorMatrixScale[2]; const GLfloat bb = ctx->Pixel.PostColorMatrixBias[2]; const GLfloat as = ctx->Pixel.PostColorMatrixScale[3]; const GLfloat ab = ctx->Pixel.PostColorMatrixBias[3]; const GLfloat *m = ctx->ColorMatrixStack.Top->m; GLuint i; for (i = 0; i < n; i++) { const GLfloat r = rgba[i][RCOMP]; const GLfloat g = rgba[i][GCOMP]; const GLfloat b = rgba[i][BCOMP]; const GLfloat a = rgba[i][ACOMP]; rgba[i][RCOMP] = (m[0] * r + m[4] * g + m[ 8] * b + m[12] * a) * rs + rb; rgba[i][GCOMP] = (m[1] * r + m[5] * g + m[ 9] * b + m[13] * a) * gs + gb; rgba[i][BCOMP] = (m[2] * r + m[6] * g + m[10] * b + m[14] * a) * bs + bb; rgba[i][ACOMP] = (m[3] * r + m[7] * g + m[11] * b + m[15] * a) * as + ab; } } /* * Apply a color table lookup to an array of colors. */ void _mesa_lookup_rgba(const struct gl_color_table *table, GLuint n, GLfloat rgba[][4]) { ASSERT(table->FloatTable); if (!table->Table || table->Size == 0) return; switch (table->Format) { case GL_INTENSITY: /* replace RGBA with I */ if (!table->FloatTable) { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLchan *lut = (const GLchan *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint j = IROUND(rgba[i][RCOMP] * scale); GLfloat c = CHAN_TO_FLOAT(lut[CLAMP(j, 0, 1)]); rgba[i][RCOMP] = rgba[i][GCOMP] = rgba[i][BCOMP] = rgba[i][ACOMP] = c; } } else { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLfloat *lut = (const GLfloat *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint j = IROUND(rgba[i][RCOMP] * scale); GLfloat c = lut[CLAMP(j, 0, max)]; rgba[i][RCOMP] = rgba[i][GCOMP] = rgba[i][BCOMP] = rgba[i][ACOMP] = c; } } break; case GL_LUMINANCE: /* replace RGB with L */ if (!table->FloatTable) { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLchan *lut = (const GLchan *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint j = IROUND(rgba[i][RCOMP] * scale); GLfloat c = CHAN_TO_FLOAT(lut[CLAMP(j, 0, max)]); rgba[i][RCOMP] = rgba[i][GCOMP] = rgba[i][BCOMP] = c; } } else { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLfloat *lut = (const GLfloat *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint j = IROUND(rgba[i][RCOMP] * scale); GLfloat c = lut[CLAMP(j, 0, max)]; rgba[i][RCOMP] = rgba[i][GCOMP] = rgba[i][BCOMP] = c; } } break; case GL_ALPHA: /* replace A with A */ if (!table->FloatTable) { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLchan *lut = (const GLchan *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint j = IROUND(rgba[i][ACOMP] * scale); rgba[i][ACOMP] = CHAN_TO_FLOAT(lut[CLAMP(j, 0, max)]); } } else { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLfloat *lut = (const GLfloat *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint j = IROUND(rgba[i][ACOMP] * scale); rgba[i][ACOMP] = lut[CLAMP(j, 0, max)]; } } break; case GL_LUMINANCE_ALPHA: /* replace RGBA with LLLA */ if (!table->FloatTable) { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLchan *lut = (const GLchan *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint jL = IROUND(rgba[i][RCOMP] * scale); GLint jA = IROUND(rgba[i][ACOMP] * scale); GLfloat luminance, alpha; jL = CLAMP(jL, 0, max); jA = CLAMP(jA, 0, max); luminance = CHAN_TO_FLOAT(lut[jL * 2 + 0]); alpha = CHAN_TO_FLOAT(lut[jA * 2 + 1]); rgba[i][RCOMP] = rgba[i][GCOMP] = rgba[i][BCOMP] = luminance; rgba[i][ACOMP] = alpha; } } else { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLfloat *lut = (const GLfloat *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint jL = IROUND(rgba[i][RCOMP] * scale); GLint jA = IROUND(rgba[i][ACOMP] * scale); GLfloat luminance, alpha; jL = CLAMP(jL, 0, max); jA = CLAMP(jA, 0, max); luminance = lut[jL * 2 + 0]; alpha = lut[jA * 2 + 1]; rgba[i][RCOMP] = rgba[i][GCOMP] = rgba[i][BCOMP] = luminance; rgba[i][ACOMP] = alpha; } } break; case GL_RGB: /* replace RGB with RGB */ if (!table->FloatTable) { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLchan *lut = (const GLchan *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint jR = IROUND(rgba[i][RCOMP] * scale); GLint jG = IROUND(rgba[i][GCOMP] * scale); GLint jB = IROUND(rgba[i][BCOMP] * scale); jR = CLAMP(jR, 0, max); jG = CLAMP(jG, 0, max); jB = CLAMP(jB, 0, max); rgba[i][RCOMP] = CHAN_TO_FLOAT(lut[jR * 3 + 0]); rgba[i][GCOMP] = CHAN_TO_FLOAT(lut[jG * 3 + 1]); rgba[i][BCOMP] = CHAN_TO_FLOAT(lut[jB * 3 + 2]); } } else { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLfloat *lut = (const GLfloat *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint jR = IROUND(rgba[i][RCOMP] * scale); GLint jG = IROUND(rgba[i][GCOMP] * scale); GLint jB = IROUND(rgba[i][BCOMP] * scale); jR = CLAMP(jR, 0, max); jG = CLAMP(jG, 0, max); jB = CLAMP(jB, 0, max); rgba[i][RCOMP] = lut[jR * 3 + 0]; rgba[i][GCOMP] = lut[jG * 3 + 1]; rgba[i][BCOMP] = lut[jB * 3 + 2]; } } break; case GL_RGBA: /* replace RGBA with RGBA */ if (!table->FloatTable) { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLchan *lut = (const GLchan *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint jR = IROUND(rgba[i][RCOMP] * scale); GLint jG = IROUND(rgba[i][GCOMP] * scale); GLint jB = IROUND(rgba[i][BCOMP] * scale); GLint jA = IROUND(rgba[i][ACOMP] * scale); jR = CLAMP(jR, 0, max); jG = CLAMP(jG, 0, max); jB = CLAMP(jB, 0, max); jA = CLAMP(jA, 0, max); rgba[i][RCOMP] = CHAN_TO_FLOAT(lut[jR * 4 + 0]); rgba[i][GCOMP] = CHAN_TO_FLOAT(lut[jG * 4 + 1]); rgba[i][BCOMP] = CHAN_TO_FLOAT(lut[jB * 4 + 2]); rgba[i][ACOMP] = CHAN_TO_FLOAT(lut[jA * 4 + 3]); } } else { const GLint max = table->Size - 1; const GLfloat scale = (GLfloat) max; const GLfloat *lut = (const GLfloat *) table->Table; GLuint i; for (i = 0; i < n; i++) { GLint jR = IROUND(rgba[i][RCOMP] * scale); GLint jG = IROUND(rgba[i][GCOMP] * scale); GLint jB = IROUND(rgba[i][BCOMP] * scale); GLint jA = IROUND(rgba[i][ACOMP] * scale); jR = CLAMP(jR, 0, max); jG = CLAMP(jG, 0, max); jB = CLAMP(jB, 0, max); jA = CLAMP(jA, 0, max); rgba[i][RCOMP] = lut[jR * 4 + 0]; rgba[i][GCOMP] = lut[jG * 4 + 1]; rgba[i][BCOMP] = lut[jB * 4 + 2]; rgba[i][ACOMP] = lut[jA * 4 + 3]; } } break; default: _mesa_problem(NULL, "Bad format in _mesa_lookup_rgba"); return; } } /* * Apply color index shift and offset to an array of pixels. */ void _mesa_shift_and_offset_ci( const GLcontext *ctx, GLuint n, GLuint indexes[] ) { GLint shift = ctx->Pixel.IndexShift; GLint offset = ctx->Pixel.IndexOffset; GLuint i; if (shift > 0) { for (i=0;i<n;i++) { indexes[i] = (indexes[i] << shift) + offset; } } else if (shift < 0) { shift = -shift; for (i=0;i<n;i++) { indexes[i] = (indexes[i] >> shift) + offset; } } else { for (i=0;i<n;i++) { indexes[i] = indexes[i] + offset; } } } /* * Apply color index mapping to color indexes. */ void _mesa_map_ci( const GLcontext *ctx, GLuint n, GLuint index[] ) { GLuint mask = ctx->Pixel.MapItoIsize - 1; GLuint i; for (i=0;i<n;i++) { index[i] = ctx->Pixel.MapItoI[ index[i] & mask ]; } } /* * Map color indexes to rgba values. */ void _mesa_map_ci_to_rgba_chan( const GLcontext *ctx, GLuint n, const GLuint index[], GLchan rgba[][4] ) { #if CHAN_BITS == 8 GLuint rmask = ctx->Pixel.MapItoRsize - 1; GLuint gmask = ctx->Pixel.MapItoGsize - 1; GLuint bmask = ctx->Pixel.MapItoBsize - 1; GLuint amask = ctx->Pixel.MapItoAsize - 1; const GLubyte *rMap = ctx->Pixel.MapItoR8; const GLubyte *gMap = ctx->Pixel.MapItoG8; const GLubyte *bMap = ctx->Pixel.MapItoB8; const GLubyte *aMap = ctx->Pixel.MapItoA8; GLuint i; for (i=0;i<n;i++) { rgba[i][RCOMP] = rMap[index[i] & rmask]; rgba[i][GCOMP] = gMap[index[i] & gmask]; rgba[i][BCOMP] = bMap[index[i] & bmask]; rgba[i][ACOMP] = aMap[index[i] & amask]; } #else GLuint rmask = ctx->Pixel.MapItoRsize - 1; GLuint gmask = ctx->Pixel.MapItoGsize - 1; GLuint bmask = ctx->Pixel.MapItoBsize - 1; GLuint amask = ctx->Pixel.MapItoAsize - 1; const GLfloat *rMap = ctx->Pixel.MapItoR; const GLfloat *gMap = ctx->Pixel.MapItoG; const GLfloat *bMap = ctx->Pixel.MapItoB; const GLfloat *aMap = ctx->Pixel.MapItoA; GLuint i; for (i=0;i<n;i++) { CLAMPED_FLOAT_TO_CHAN(rgba[i][RCOMP], rMap[index[i] & rmask]); CLAMPED_FLOAT_TO_CHAN(rgba[i][GCOMP], gMap[index[i] & gmask]); CLAMPED_FLOAT_TO_CHAN(rgba[i][BCOMP], bMap[index[i] & bmask]); CLAMPED_FLOAT_TO_CHAN(rgba[i][ACOMP], aMap[index[i] & amask]); } #endif } /* * Map color indexes to float rgba values. */ void _mesa_map_ci_to_rgba( const GLcontext *ctx, GLuint n, const GLuint index[], GLfloat rgba[][4] ) { GLuint rmask = ctx->Pixel.MapItoRsize - 1; GLuint gmask = ctx->Pixel.MapItoGsize - 1; GLuint bmask = ctx->Pixel.MapItoBsize - 1; GLuint amask = ctx->Pixel.MapItoAsize - 1; const GLfloat *rMap = ctx->Pixel.MapItoR; const GLfloat *gMap = ctx->Pixel.MapItoG; const GLfloat *bMap = ctx->Pixel.MapItoB; const GLfloat *aMap = ctx->Pixel.MapItoA; GLuint i; for (i=0;i<n;i++) { rgba[i][RCOMP] = rMap[index[i] & rmask]; rgba[i][GCOMP] = gMap[index[i] & gmask]; rgba[i][BCOMP] = bMap[index[i] & bmask]; rgba[i][ACOMP] = aMap[index[i] & amask]; } } /* * Map 8-bit color indexes to rgb values. */ void _mesa_map_ci8_to_rgba( const GLcontext *ctx, GLuint n, const GLubyte index[], GLchan rgba[][4] ) { #if CHAN_BITS == 8 GLuint rmask = ctx->Pixel.MapItoRsize - 1; GLuint gmask = ctx->Pixel.MapItoGsize - 1; GLuint bmask = ctx->Pixel.MapItoBsize - 1; GLuint amask = ctx->Pixel.MapItoAsize - 1; const GLubyte *rMap = ctx->Pixel.MapItoR8; const GLubyte *gMap = ctx->Pixel.MapItoG8; const GLubyte *bMap = ctx->Pixel.MapItoB8; const GLubyte *aMap = ctx->Pixel.MapItoA8; GLuint i; for (i=0;i<n;i++) { rgba[i][RCOMP] = rMap[index[i] & rmask]; rgba[i][GCOMP] = gMap[index[i] & gmask]; rgba[i][BCOMP] = bMap[index[i] & bmask]; rgba[i][ACOMP] = aMap[index[i] & amask]; } #else GLuint rmask = ctx->Pixel.MapItoRsize - 1; GLuint gmask = ctx->Pixel.MapItoGsize - 1; GLuint bmask = ctx->Pixel.MapItoBsize - 1; GLuint amask = ctx->Pixel.MapItoAsize - 1; const GLfloat *rMap = ctx->Pixel.MapItoR; const GLfloat *gMap = ctx->Pixel.MapItoG; const GLfloat *bMap = ctx->Pixel.MapItoB; const GLfloat *aMap = ctx->Pixel.MapItoA; GLuint i; for (i=0;i<n;i++) { CLAMPED_FLOAT_TO_CHAN(rgba[i][RCOMP], rMap[index[i] & rmask]); CLAMPED_FLOAT_TO_CHAN(rgba[i][GCOMP], gMap[index[i] & gmask]); CLAMPED_FLOAT_TO_CHAN(rgba[i][BCOMP], bMap[index[i] & bmask]); CLAMPED_FLOAT_TO_CHAN(rgba[i][ACOMP], aMap[index[i] & amask]); } #endif } void _mesa_shift_and_offset_stencil( const GLcontext *ctx, GLuint n, GLstencil stencil[] ) { GLuint i; GLint shift = ctx->Pixel.IndexShift; GLint offset = ctx->Pixel.IndexOffset; if (shift > 0) { for (i=0;i<n;i++) { stencil[i] = (stencil[i] << shift) + offset; } } else if (shift < 0) { shift = -shift; for (i=0;i<n;i++) { stencil[i] = (stencil[i] >> shift) + offset; } } else { for (i=0;i<n;i++) { stencil[i] = stencil[i] + offset; } } } void _mesa_map_stencil( const GLcontext *ctx, GLuint n, GLstencil stencil[] ) { GLuint mask = ctx->Pixel.MapStoSsize - 1; GLuint i; for (i=0;i<n;i++) { stencil[i] = ctx->Pixel.MapStoS[ stencil[i] & mask ]; } } /* * This function converts an array of GLchan colors to GLfloat colors. * Most importantly, it undoes the non-uniform quantization of pixel * values introduced when we convert shallow (< 8 bit) pixel values * to GLubytes in the ctx->Driver.ReadRGBASpan() functions. * This fixes a number of OpenGL conformance failures when running on * 16bpp displays, for example. */ void _mesa_chan_to_float_span(const GLcontext *ctx, GLuint n, CONST GLchan rgba[][4], GLfloat rgbaf[][4]) { #if CHAN_TYPE == GL_FLOAT MEMCPY(rgbaf, rgba, n * 4 * sizeof(GLfloat)); #else const GLuint rShift = CHAN_BITS - ctx->Visual.redBits; const GLuint gShift = CHAN_BITS - ctx->Visual.greenBits; const GLuint bShift = CHAN_BITS - ctx->Visual.blueBits; GLuint aShift; const GLfloat rScale = 1.0F / (GLfloat) ((1 << ctx->Visual.redBits ) - 1); const GLfloat gScale = 1.0F / (GLfloat) ((1 << ctx->Visual.greenBits) - 1); const GLfloat bScale = 1.0F / (GLfloat) ((1 << ctx->Visual.blueBits ) - 1); GLfloat aScale; GLuint i; if (ctx->Visual.alphaBits > 0) { aShift = CHAN_BITS - ctx->Visual.alphaBits; aScale = 1.0F / (GLfloat) ((1 << ctx->Visual.alphaBits) - 1); } else { aShift = 0; aScale = 1.0F / CHAN_MAXF; } for (i = 0; i < n; i++) { const GLint r = rgba[i][RCOMP] >> rShift; const GLint g = rgba[i][GCOMP] >> gShift; const GLint b = rgba[i][BCOMP] >> bShift; const GLint a = rgba[i][ACOMP] >> aShift; rgbaf[i][RCOMP] = (GLfloat) r * rScale; rgbaf[i][GCOMP] = (GLfloat) g * gScale; rgbaf[i][BCOMP] = (GLfloat) b * bScale; rgbaf[i][ACOMP] = (GLfloat) a * aScale; } #endif }
#train models on different pretrained weights #BART_PATH=/pretrained/bart.large.cnn/model.pt BART_PATH=/home/ubuntu/project/semsim/fairseq-semsim/checkpoints/semsim.pt TOTAL_NUM_UPDATES=50000 WARMUP_UPDATES=500 LR=3e-05 MAX_TOKENS=1024 # for gpu 16gb UPDATE_FREQ=32 python train.py cnn_dm-bin_medium \ --no-epoch-checkpoints \ --restore-file $BART_PATH \ --max-tokens $MAX_TOKENS \ --task translation \ --source-lang source --target-lang target \ --layernorm-embedding \ --share-all-embeddings \ --share-decoder-input-output-embed \ --reset-optimizer --reset-dataloader --reset-meters \ --required-batch-size-multiple 1 \ --arch bart_large \ --criterion semantic_similarity_loss \ --label-smoothing 0.1 \ --dropout 0.1 --attention-dropout 0.1 \ --weight-decay 0.01 --optimizer adam --adam-betas "(0.9, 0.999 )" --adam-eps 1e-08 \ --clip-norm 0.1 \ --lr-scheduler polynomial_decay --lr $LR --total-num-update $TOTAL_NUM_UPDATES --warmup-updates $WARMUP_UPDATES \ --update-freq $UPDATE_FREQ \ --skip-invalid-size-inputs-valid-test \ --save-dir checkpoints/new_rewarder_model \ --find-unused-parameters;
""" Principal Component Pursuit (PCP) Based on: https://github.com/dfm/pcp, <NAME>, 2015, MIT license. https://github.com/dfm/pcp/blob/main/pcp.py """ import numpy as np from apg import rpca_apg from ialm import rpca_ialm from md_utils import mat2gray, sliding_window def pcp_func( o_image, im_shape, max_iter=500, tol=1e-2, method='ialm', sw_step_size=10, sw_ptch_sz=50): ''' Principal Component Pursuit ''' m, n = im_shape wndw_sz = sw_ptch_sz step_sz = sw_step_size orig_img = sliding_window( o_image, wndw_sz, step_sz, m, n) orig_img = mat2gray(orig_img) lam = 1.0 / np.sqrt(np.max((m, n))) if method == 'apg': s_o = rpca_apg(orig_img, lam, max_iter, tol) elif method == 'ialm': s_o = rpca_ialm(orig_img, lam, max_iter, tol) trgt_patch = np.zeros((m, n, 100)) s_ret = np.zeros((m, n)) y = np.zeros((m, n)) temp1 = np.zeros((wndw_sz, wndw_sz)) idx = 0 # build target patch for i in range(0, m - wndw_sz+1, step_sz): for j in range(0, n - wndw_sz+1, step_sz): idx += 1 temp1 = temp1.ravel(order='F') temp1 = s_o[:, [idx-1]] y[i:i + wndw_sz-1, j:j + wndw_sz - 1] = y[i:i + wndw_sz-1, j:j + wndw_sz-1]+1 temp1 = np.reshape(temp1, (wndw_sz, wndw_sz), order='F') for u in range(i, i + wndw_sz-1): for v in range(j, j + wndw_sz-1): trgt_patch[u, v, int(y[u, v])] = temp1[u - i+1, v - j+1] # median from IPI paper for i in range(0, m): for j in range(0, n): if int(y[i, j]) > 0: s_ret[i, j] = np.median(trgt_patch[i, j, 0:int(y[i, j])]) # s_ret[i, j] = np.percentile(x, 10) # 10th percentile: alternative to median return s_ret
'use strict'; var extend = require('lodash').assign; var mysql = require('mysql'); var config = require('../../../config/' + process.env.NODE_ENV); module.exports = function(config) { function getConnection() { return mysql.createConnection(extend({ database: config.mysql.dbname }, config.mysql.auth)); } // [START list] function list(model, limit, offset, cb) { limit = limit ? parseInt(limit) : 0; offset = offset ? parseInt(offset, limit) : 0; var connection = getConnection(); connection.query( 'SELECT * FROM ?? LIMIT ? OFFSET ?', [model, limit, offset], function(err, results) { if (err) return cb(err); cb(null, results, results.length === limit ? token + results.length : false); } ); connection.end(); } // [END list] // [START create] function create(model, data, cb) { var connection = getConnection(); connection.query('INSERT INTO ?? SET ?', [model, data], function(err, res) { if (err) return cb(err); read(model, res.insertId, cb); }); connection.end(); } // [END create] function read(model, id, cb) { var connection = getConnection(); connection.query('SELECT * FROM ?? WHERE `id` = ?', [model, id], function(err, results) { if (err) return cb(err); if (!results.length) return cb({ code: 404, message: 'Not found' }); cb(null, results[0]); }); connection.end(); } // [START update] function update(model, id, data, cb) { var connection = getConnection(); connection.query('UPDATE ?? SET ? WHERE `id` = ?', [model, data, id], function(err) { if (err) return cb(err); read(model, id, cb); }); connection.end(); } // [END update] function _delete(model, id, cb) { var connection = getConnection(); connection.query('DELETE FROM ?? WHERE `id` = ?', [model, id], cb); connection.end(); } return { createSchema: createSchema, list: list, create: create, read: read, update: update, delete: _delete }; }; if (!module.parent) { var prompt = require('prompt'); prompt.start(); console.log( 'Running this script directly will allow you to initialize your mysql database.\n' + 'This script will not modify any existing tables.\n'); createSchema(); } function createSchema() { var connection = mysql.createConnection(extend({ multipleStatements: true }, config.mysql.auth)); connection.query( 'CREATE DATABASE IF NOT EXISTS `'+config.mysql.dbname+'` DEFAULT CHARACTER SET = \'utf8\' DEFAULT COLLATE \'utf8_general_ci\'; ' + 'USE `'+config.mysql.dbname+'`; ' + 'CREATE TABLE IF NOT EXISTS `'+config.mysql.dbname+'`.`experiments` ( ' + '`id` INT UNSIGNED NOT NULL AUTO_INCREMENT, ' + '`account_id` INT NOT NULL, ' + '`project_id` INT NULL, ' + '`title` VARCHAR(255) NULL, ' + '`created_date` VARCHAR(255) NULL, ' + '`updated_date` VARCHAR(255) NULL, ' + 'PRIMARY KEY (`id`));', function(err, rows) { if (err) throw err; console.log('Successfully created schema'); connection.end(); } ); }
#!/usr/bin/env sh VERSION=5 mkdir -p zips if [ -f zips/sony-dualsim-patcher-v${VERSION}.zip ] then rm zips/sony-dualsim-patcher-v${VERSION}.zip fi zip -r zips/sony-dualsim-patcher-v${VERSION}.zip META-INF tmp
<reponame>matt-slater/delivery-client import { Component, OnInit, OnDestroy } from '@angular/core'; import { Router } from '@angular/router'; import { Delivery } from './delivery'; import { DeliveryService} from './delivery.service'; declare var SockJS: any; declare var Stomp: any; @Component({ selector: 'deliverytable', templateUrl: 'app/deliveries/deliverytable.html' }) export class DeliveryTable implements OnInit, OnDestroy { public stompClient: Stomp; public endpoint = "http://localhost:8080/delivery-ws"; public channelPath = "/topic/openDeliveries"; public deliveries = []; public connected: boolean; constructor(private router: Router, private _deliveryService: DeliveryService) { this.stompClient = Stomp.over(new SockJS(this.endpoint)); let stompClient = this.stompClient; let channelPath = this.channelPath; let response = this.response; this.stompClient.connect({}, (frame) => { console.log('Connected: ' + frame); this.connected = true; stompClient.subscribe(channelPath, (deliveryList) => { //this.deliveries = deliveryList.body; this.deliveries = JSON.parse(deliveryList.body); }); }); } public setResponse(x: []) { console.log('in response function'); this.deliveries = x; } ngOnDestroy() { if (this.connected) { this.stompClient.disconnect(); this.connected = false; } } ngOnInit() { this._deliveryService.getAllOpenDeliveries().subscribe( data => {this.deliveries = data}, err => {console.log(err)} ); } onSelect(delivery: Delivery) { this.router.navigate(['/delivery', delivery.id]); } }
<reponame>RockSolidKnowledge/ActionIt import { Component, OnInit } from '@angular/core'; import { IToDo } from '../../shared'; import { ToDoService } from './todoService'; import * as moment from 'moment'; import * as _ from 'underscore'; import { ActivatedRoute } from '@angular/router'; @Component({ templateUrl: 'todos.component.html', styles: [require('./todos.component.scss').toString()] }) export class ToDosComponent implements OnInit { public routeData: IExtraRouteData; private todos: IToDo[]; constructor(private _todoService: ToDoService, private _route: ActivatedRoute) { } public ngOnInit(): void { this._route.data.subscribe(data => { this.routeData = data as IExtraRouteData; }); this._todoService.getCurrentToDos().subscribe( (todos) => this.todos = todos, (error) => console.error(error) ); } public overdue(): IToDo[] { let filtered = _.chain(this.todos) .filter((todo) => { return todo.dueDate.isBefore(moment().add(this.routeData.days), 'day'); }) .sortBy((todo) => todo.dueDate); return filtered.value(); } public today(): IToDo[] { let filtered = _.filter(this.todos, (todo) => { return todo.dueDate.isSame(moment(), 'day'); }); return filtered; } public inDateRange(): IToDo[] { let filtered = _.chain(this.todos).filter((todo) => { if (this.routeData.days === 0) { return todo.dueDate.isAfter(moment(), 'day'); } return todo.dueDate.isSameOrBefore(moment().add(this.routeData.days, 'day'), 'day') && todo.dueDate.isAfter(moment(), 'day'); }).sortBy((todo) => todo.dueDate); return filtered.value(); } } interface IExtraRouteData { days: number; title: string; }
<gh_stars>1-10 /* * @Description: 数字面板 * @Author: Pony * @Date: 2021-08-14 22:45:16 * @LastEditors: Pony * @LastEditTime: 2021-08-14 23:36:35 */ import React, { FC } from 'react'; import { Card } from 'antd'; import iconMap from '@/utils/iconMap' import styles from './NumberCard.less'; interface NumberCardType { icon: string; color: string; title: string; number: number; } const NumberCard: FC<NumberCardType> = ({ icon, color, title, number }) => { return ( <Card className={styles.numberCard} bordered={false} bodyStyle={{ padding: 10 }} > <span className={styles.iconWarp} style={{ color }}> {iconMap[icon as keyof typeof iconMap]} </span> <div className={styles.content}> <p className={styles.title}>{title || 'No Title'}</p> <p className={styles.number}> {number && number.toLocaleString()} </p> </div> </Card> ); }; export default NumberCard;
#!/bin/bash # # Test integration with dkimpy. set -e . $(dirname ${0})/../util/lib.sh init check_hostaliases # Check if dkimpy tools are installed in /usr/bin, and driusan/dkim is # installed somewhere else in $PATH. # # Unfortunately we need both because dkimpy's dkimverify lacks the features # needed to use it in integration testing. # # We need to run them and check the help because there are other binaries with # the same name. # This is really hacky but the most practical way to handle it, since they # both have the same binary names. if ! /usr/bin/dkimsign --help 2>&1 | grep -q -- --identity; then skip "/usr/bin/dkimsign is not dkimpy's" fi if ! dkimverify --help 2>&1 < /dev/null | grep -q -- "-txt string"; then skip "dkimverify is not driusan/dkim's" fi generate_certs_for testserver ( mkdir -p .dkimcerts; cd .dkimcerts; dknewkey private > log 2>&1 ) add_user user@testserver secretpassword add_user someone@testserver secretpassword mkdir -p .logs chasquid -v=2 --logfile=.logs/chasquid.log --config_dir=config & wait_until_ready 1025 # Authenticated: user@testserver -> someone@testserver # Should be signed. run_msmtp someone@testserver < content wait_for_file .mail/someone@testserver mail_diff content .mail/someone@testserver grep -q "DKIM-Signature:" .mail/someone@testserver # Verify the signature manually, just in case. # NOTE: This is using driusan/dkim instead of dkimpy, because dkimpy can't be # overriden to get the DNS information from anywhere else (text file or custom # DNS server). dkimverify -txt .dkimcerts/private.dns < .mail/someone@testserver # Save the signed mail so we can verify it later. # Drop the first line ("From blah") so it can be used as email contents. tail -n +2 .mail/someone@testserver > .signed_content # Not authenticated: someone@testserver -> someone@testserver smtpc.py --server=localhost:1025 < .signed_content # Check that the signature fails on modified content. echo "Added content, invalid and not signed" >> .signed_content if smtpc.py --server=localhost:1025 < .signed_content 2> /dev/null; then fail "DKIM verification succeeded on modified content" fi success
import React, { Component } from 'react'; import Scatter from './scatter.js'; import Stack from './stack.js'; const RumbleCharts = () => (<section className="rumble page"> <Scatter /> <Stack /> </section>); export default RumbleCharts;
from django.contrib import admin from pinax.apps.waitinglist.models import WaitingListEntry class WaitingListEntryAdmin(admin.ModelAdmin): list_display = ["email", "created"] admin.site.register(WaitingListEntry, WaitingListEntryAdmin)
import Axis from './axis/Axis'; import AxisLeft from './axis/AxisLeft'; import AxisRight from './axis/AxisRight'; import AxisTop from './axis/AxisTop'; import AxisBottom from './axis/AxisBottom'; export default { Axis, AxisLeft, AxisRight, AxisTop, AxisBottom, }
<reponame>minuk8932/Algorithm_BaekJoon<gh_stars>1-10 package heap; /* * what is heap * -> priority Queue : 중요도를 따져서 큐를 운용 * 특수한 형태의 Binary tree * * * 1. Max - heap : 가장 큰 값 순서로 root에 존재 할 때 * push : 왼쪽부터 full b-tree 형식으로 채운다. 부모와 자신의 값을 비교하여 우선순위가 큰 것을 위로 올림, 다른 노드는 고려하지 않는다. 시간 복잡도 log2n * poll : 가장 큰 값부터, 즉 root 값을 뽑아온다. 이후 자식 노드 중 가장 아래에 있는 값을 root로 끌어온 후 왼쪽부터 자식 부모끼리 비교하여 정렬 * 만약 root 값보다 좌 우의 자식 노드값이 둘다 크다면 둘 중에 더 큰 값이랑 위치를 바꿈 * * * 2. min - heap : 가장 작은 값 순서로 root에 존재 할 때, logN * */ public class HeapAccess { public static void main(String[] args) throws Exception{ } private static class MaxHeap{ private static final int MAX_HEAP_SIZE = 1000; // heap array private int[] heap; // heap size private int size; public MaxHeap(){ heap = new int[MAX_HEAP_SIZE]; } public void swapHeap(int idx1, int idx2){ } public void offer(int data){ int offerIdx = ++size; heap[offerIdx] = data; while(offerIdx > 1){ // rootIdx가 될 때 까지 바꿔라 int rootIdx = offerIdx / 2; if(heap[rootIdx] < heap[offerIdx]){ // 부모와 나의 값을 비교해 내가 더 크다면 swapHeap(rootIdx, offerIdx); // 바꿔라 int tmpIdx = offerIdx; offerIdx = rootIdx; rootIdx = tmpIdx; } else{ break; } } } public int poll(){ int rootIdx = 1; int pollData = heap[rootIdx]; heap[rootIdx] = heap[size]; heap[size--] = Integer.MIN_VALUE; while(true){ int leftIdx = rootIdx * 2; int rightIdx = rootIdx * 2 + 1; if(heap[rootIdx] >= heap[leftIdx] && heap[rootIdx] >= heap[rightIdx]){ break; } else if(heap[leftIdx] > heap[rightIdx]){ swapHeap(rootIdx, leftIdx); rootIdx = leftIdx; } } return pollData; } } }
<reponame>GunnarEriksson/space-invaders /** * The helper functions. * */ /*exported isIntersect, showTextLetterByLetter */ /** * Helper function to see if two vectors are intersecting * * @param {number} ax - The x-coordinate for the first object. * @param {number} ay - The y-coordinate for the first object. * @param {number} aw - The width of the first object. * @param {number} ah - The height of the first object. * @param {number} bx - The x-coordinate for the second object. * @param {number} by - The x-coordinate for the second object. * @param {number} bw - The width of the second object. * @param {number} bh - The height of the second object. * * @return {boolean} True if the objects intersects, false otherwise. */ function isIntersect(ax, ay, aw, ah, bx, by, bw, bh) { return ax < bx + bw && bx < ax + aw && ay < by + bh && by < ay + ah; } /** * Helper function to show a text letter by letter with a delay. * * @param {Object} ct - the canvas context. * @param {number} timer - the timer controlling the time to show the text. * @param {number} index - the position in the text array. * @param {string} text - the text stored in an array. * @param {number} offsetX - the position in x led for the character. * @param {number} offsetY - the position in y led for the character. * @param {number} textDistance - the distance between the characters. * * @return {void} */ function showTextLetterByLetter(ct, timer, index, text, offsetX, offsetY, textDistance) { if (timer % 8 === 0 & index < text.length) { index++; } for (var i = 0; i < index; i++) { ct.fillText(text[i], offsetX, offsetY); offsetX += textDistance; } return index; }
/** * @file Manage unit tests for the month class. */ // ━━ IMPORT MODULES ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ const Month = require('../src'); const datebook = require('./fixture/dummy.datebook'); // ━━ CONSTANTS ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ const MONTH_HOLIDAYS = [ { name: { describe: 'Month: property holidays', test: 'month.holidays match array contents', }, options: { current: new Date(2021, 0, 15), weekend: '0000001', datebook, }, expected: [4, 25, 8, 15], }, { name: { describe: 'Month: property holidays', test: 'month.holidays match array contents', }, options: { current: new Date(2021, 0, 15), weekend: '0000011', datebook, }, expected: [4, 25, 8, 15], }, { name: { describe: 'Month: property holidays', test: 'month.holidays match array contents', }, options: { current: new Date(2021, 1, 10), weekend: '0000001', datebook, }, expected: [1, 22, 5, 12], }, { name: { describe: 'Month: property holidays', test: 'month.holidays match array contents', }, options: { current: new Date(2021, 2, 15), weekend: '0000001', datebook, }, expected: [3, 24, 25, 31], }, ]; // ━━ MODULE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ describe.each(MONTH_HOLIDAYS)('$name.describe', ({ name, options, expected }) => { const month = new Month(options); test(name.test, () => { expect(month.holidays).toEqual(expected); }); });
import base64 from io import BytesIO from PIL import Image def decode_image(): img_data = base64.b64decode("R0lGODlhEAAQAPQAAP///wAAAMDAwICAgP8AAP///wAAAAAAAAAAACH5BAEAAAMALAAAAAAQABAAAAe4gHOCg4SFhoU7VVVWTIeEOlFRU0aOg1ROTk9HlYJSSEhKL4MbGhuFUERERTGDGRQZhU1CQkMwgxcTF4VLPz9BNoMYEhhwYF9gV0k9PUA4gxYRFm9kY2RYNzw8LCKDFQwVcWJhYlkyKCg+I4MQCxBybGtsWjMlJSskgw8KDwJqaGpbaJgwoeLDIAcHHAxIYyYNlxonTqQAMagBggYEzpQ50yVHixYuQgwykMBAgTZu2njp4KElhzmBAAA7") img = Image.open(BytesIO(img_data)) return img
package cyclops.function.combiner; import cyclops.container.immutable.tuple.Tuple; import cyclops.container.immutable.tuple.Tuple2; import cyclops.container.immutable.tuple.Tuple3; import cyclops.container.immutable.tuple.Tuple4; import cyclops.container.transformable.Transformable; import cyclops.function.enhanced.Function3; import cyclops.function.enhanced.Function4; import java.util.function.BiFunction; import org.reactivestreams.Publisher; /** * A Data Type that can be comined with another data type * * @param <T> Data type of element(s) of this Zippable * @author johnmcclean */ public interface Zippable<T> extends Iterable<T>, Publisher<T>, Transformable<T> { /** * Zip (combine) this Zippable with the supplied Iterable using the supplied combining function * * @param iterable to zip with * @param fn Zip function * @return Combined zippable */ <T2, R> Zippable<R> zip(final Iterable<? extends T2> iterable, final BiFunction<? super T, ? super T2, ? extends R> fn); /** * Zip (combine) this Zippable with the supplied Publisher, using the supplied combining function * * @param fn Zip / combining function * @param publisher to combine with * @return Combined zippable */ <T2, R> Zippable<R> zip(final BiFunction<? super T, ? super T2, ? extends R> fn, final Publisher<? extends T2> publisher); default <U> Zippable<Tuple2<T, U>> zipWithPublisher(final Publisher<? extends U> other) { return zip((a, b) -> Tuple.tuple(a, b), other); } default <U> Zippable<Tuple2<T, U>> zip(final Iterable<? extends U> other) { return zip(other, Tuple::tuple); } default <S, U> Zippable<Tuple3<T, S, U>> zip3(final Iterable<? extends S> second, final Iterable<? extends U> third) { return zip(second, Tuple::tuple).zip(third, (a, b) -> Tuple.tuple(a._1(), a._2(), b)); } default <S, U, R> Zippable<R> zip3(final Iterable<? extends S> second, final Iterable<? extends U> third, final Function3<? super T, ? super S, ? super U, ? extends R> fn3) { return (Zippable<R>) zip3(second, third).map(t -> fn3.apply(t._1(), t._2(), t._3())); } default <T2, T3, T4> Zippable<Tuple4<T, T2, T3, T4>> zip4(final Iterable<? extends T2> second, final Iterable<? extends T3> third, final Iterable<? extends T4> fourth) { return zip(second, Tuple::tuple).zip(third, (a, b) -> Tuple.tuple(a._1(), a._2(), b)) .zip(fourth, (a, b) -> (Tuple4<T, T2, T3, T4>) Tuple.tuple(a._1(), a._2(), a._3(), b)); } default <T2, T3, T4, R> Zippable<R> zip4(final Iterable<? extends T2> second, final Iterable<? extends T3> third, final Iterable<? extends T4> fourth, final Function4<? super T, ? super T2, ? super T3, ? super T4, ? extends R> fn) { return (Zippable<R>) zip4(second, third, fourth).map(t -> fn.apply(t._1(), t._2(), t._3(), t._4())); } }
package cyclops.async.reactive.futurestream.react.completablefuture; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import cyclops.async.reactive.futurestream.LazyReact; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Stream; import org.junit.Test; public class RxJavaConversionTest { volatile int count = 0; volatile int savedCalled = 0; @Test public void rxConversion() throws InterruptedException, ExecutionException { //conversion of non-concurrent RxJava code here :- http://blog.danlew.net/2014/09/22/grokking-rxjava-part-2/ List<String> titles = new LazyReact().fromStreamFutures(Stream.of(query("Hello, world!"))) .flatMap(Collection::stream) .peek(System.out::println).<String>then(url -> getTitle(url)).filter(Objects::nonNull) .limit(5) .peek(title -> saveTitle(title)) .peek(System.out::println) .block(); assertThat(titles.size(), is(5)); assertThat(savedCalled, is(5)); } @Test public void rxConversionTestSkip() throws InterruptedException, ExecutionException { List<String> titles = new LazyReact().from(query("Hello, world!").get()).<String>then(url -> getTitle(url)).filter(Objects::nonNull) .skip(5) .peek(title -> saveTitle(title)) .peek(System.out::println) .block(); assertThat(titles.size(), is(4)); assertThat(savedCalled, is(4)); } private synchronized void saveTitle(String title) { savedCalled++; } private String getTitle(String url) { return url.substring(url.lastIndexOf('/')); } private CompletableFuture<List<String>> query(String string) { CompletableFuture future = new CompletableFuture(); future.complete(Arrays.asList("http://blog.danlew.net/2014/09/22/grokking-rxjava-part-2", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3", "http://blog.danlew.net/2014/09/30/grokking-rxjava-part-3")); return future; } /** Original RxJava example public void rx(){ queryRx("Hello, world!") .flatMap(urls -> Observable.from(urls)) .flatMap(url -> getTitleRx(url)) .filter(title -> title != null) .take(5) .doOnNext(title -> saveTitleRx(title)) .forEachAsync(title -> System.out.println(title)); } private Observable<String> saveTitleRx(String title) { // TODO Auto-generated method stub return null; } private Observable<String> getTitleRx(String url) { // TODO Auto-generated method stub return null; } private Observable<List<String>> queryRx(String string) { // TODO Auto-generated method stub return null; } **/ }
import { classes } from './a.st.css'; export function CompA({ className, children }) { return `<div class="${classes.root + ' ' + className}">CompA ${children}</div>`; }
#!/usr/bin/env sh set -e /usr/sbin/php-fpm8 -D /usr/sbin/nginx -g 'daemon off; pid /run/nginx.pid;'
#!/usr/bin/env bash function get_full_path(){ if [[ -d $1 ]]; then echo "$(cd $1 && pwd)" else echo "$(cd `dirname $1` && pwd)/`basename $1`" fi } function main(){ SRC_DIR="." BUILD_DIR="./build" INSTALL_DIR="./install" TOOLCHAIN="./cmake/arm-none-eabi-gcc_toolchain.cmake" if [ ! -d "${BUILD_DIR}" ]; then mkdir ${BUILD_DIR} fi if [ ! -d "${INSTALL_DIR}" ]; then mkdir ${INSTALL_DIR} fi SRC_PREFIX=$(get_full_path ${SRC_DIR}) BUILD_PREFIX=$(get_full_path ${BUILD_DIR}) INSTALL_PREFIX=$(get_full_path ${INSTALL_DIR}) TOOLCHAIN_PATH=$(get_full_path ${TOOLCHAIN}) echo "======================================" echo "source :${SRC_PREFIX}" echo "build :${BUILD_PREFIX}" echo "install :${INSTALL_PREFIX}" echo "toolchain :${TOOLCHAIN_PATH}" echo "======================================" cd $BUILD_PREFIX cmake ${SRC_PREFIX} -DCMAKE_TOOLCHAIN_FILE=${TOOLCHAIN_PATH} -DCMAKE_BINARY_DIR=${BUILD_PREFIX} -DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX} make install } main $@
#!@PERL@ # Copyright (C) 2000 MySQL AB & MySQL Finland AB & TCX DataKonsult AB # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Library General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Library General Public License for more details. # # You should have received a copy of the GNU Library General Public # License along with this library; if not, write to the Free # Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, # MA 02111-1307, USA # # Test of extreme tables. # ##################### Standard benchmark inits ############################## use DBI; use Benchmark; $opt_loop_count=1000; # Change this to make test harder/easier $opt_field_count=1000; chomp($pwd = `pwd`); $pwd = "." if ($pwd eq ''); require "$pwd/bench-init.pl" || die "Can't read Configuration file: $!\n"; $opt_field_count=min($opt_field_count,$limits->{'max_columns'}, ($limits->{'query_size'}-30)/14); $opt_loop_count*=10 if ($opt_field_count<100); # mSQL has so few fields... if ($opt_small_test) { $opt_loop_count/=10; $opt_field_count/=10; } print "Testing of some unusual tables\n"; print "All tests are done $opt_loop_count times with $opt_field_count fields\n\n"; #### #### Testing many fields #### $dbh = $server->connect(); print "Testing table with $opt_field_count fields\n"; $sth = $dbh->do("drop table bench1" . $server->{'drop_attr'}); my @fields=(); my @index=(); my $fields="i1"; push(@fields,"$fields int"); $values= "1," x ($opt_field_count-1) . "1"; for ($i=2 ; $i <= $opt_field_count ; $i++) { push(@fields,"i${i} int"); $fields.=",i${i}"; } $start_time=new Benchmark; do_many($dbh,$server->create("bench1",\@fields,\@index)); $sth = $dbh->do("insert into bench1 values ($values)") or die $DBI::errstr; if ($opt_fast && defined($server->{vacuum})) { $server->vacuum(0,\$dbh); } test_query("Testing select * from table with 1 record", "Time to select_many_fields", "select * from bench1", $dbh,$opt_loop_count); if ($limits->{'working_all_fields'}) { test_query("Testing select all_fields from table with 1 record", "Time to select_many_fields", "select $fields from bench1", $dbh,$opt_loop_count); } test_query("Testing insert VALUES()", "Time to insert_many_fields", "insert into bench1 values($values)", $dbh,$opt_loop_count); if ($opt_fast && defined($server->{vacuum})) { $server->vacuum(0,\$dbh); } test_command("Testing insert (all_fields) VALUES()", "Time to insert_many_fields", "insert into bench1 ($fields) values($values)", $dbh,$opt_loop_count); $sth = $dbh->do("drop table bench1" . $server->{'drop_attr'}) or die $DBI::errstr; if ($opt_fast && defined($server->{vacuum})) { $server->vacuum(0,\$dbh); } ################################ END ################################### #### #### End of the test...Finally print time used to execute the #### whole test. $dbh->disconnect; end_benchmark($start_time); ############################ HELP FUNCTIONS ############################## sub test_query { my($test_text,$result_text,$query,$dbh,$count)=@_; my($i,$loop_time,$end_time); print $test_text . "\n"; $loop_time=new Benchmark; for ($i=0 ; $i < $count ; $i++) { defined(fetch_all_rows($dbh,$query)) or die $DBI::errstr; } $end_time=new Benchmark; print $result_text . "($count): " . timestr(timediff($end_time, $loop_time),"all") . "\n\n"; } sub test_command { my($test_text,$result_text,$query,$dbh,$count)=@_; my($i,$loop_time,$end_time); print $test_text . "\n"; $loop_time=new Benchmark; for ($i=0 ; $i < $count ; $i++) { $dbh->do($query) or die $DBI::errstr; } $end_time=new Benchmark; print $result_text . "($count): " . timestr(timediff($end_time, $loop_time),"all") . "\n\n"; }
# # Copyright (c) 2018 ISP RAS (http://www.ispras.ru) # Ivannikov Institute for System Programming of the Russian Academy of Sciences # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import logging.config import argparse import os import json import shutil import subprocess import queue import threading import time import signal import zipfile import re import glob import multiprocessing import sys import consulate from xml.etree import ElementTree # This should prevent rumbling of urllib3 logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("consulate").setLevel(logging.WARNING) class StreamQueue: """ Implements queue to work with output stream to catch stderr or stdout. """ def __init__(self, stream, stream_name, collect_all_output=False): self.stream = stream self.stream_name = stream_name self.collect_all_output = collect_all_output self.queue = queue.Queue() self.finished = False self.traceback = None self.thread = threading.Thread(target=self.__put_lines_from_stream_to_queue) self.output = [] def get(self): try: return self.queue.get_nowait() except queue.Empty: return None def join(self): self.thread.join() def start(self): self.thread.start() def __put_lines_from_stream_to_queue(self): try: # This will put lines from stream to queue until stream will be closed. For instance it will happen when # execution of command will be completed. for line in self.stream: line = line.decode('utf8').rstrip() self.queue.put(line) if self.collect_all_output: self.output.append(line) # Nothing will be put to queue from now. self.finished = True except Exception: import traceback self.traceback = traceback.format_exc().rstrip() def common_initialization(tool, conf=None): """ Start execution of the corresponding cloud tool. :param tool: Tool name string. :param conf: Configuration dictionary. :return: Configuration dictionary. """ if not conf: # Parse configuration parser = argparse.ArgumentParser(description='Start cloud {} according to the provided configuration.'. format(tool)) parser.add_argument('config', metavar="CONF", help='Path to the cloud configuration file.') args = parser.parse_args() # Read configuration from file. with open(args.config, encoding="utf8") as fp: conf = json.load(fp) if "Klever Bridge" not in conf: raise KeyError("Provide configuration property 'Klever Bridge' as an JSON-object") if tool != "Client controller": if "scheduler" not in conf: raise KeyError("Provide configuration property 'scheduler' as an JSON-object") if "Klever jobs and tasks queue" not in conf: raise KeyError("Provide configuration property 'Klever jobs and tasks queue' as an JSON-object") # Check common configuration if "common" not in conf: raise KeyError("Provide configuration property 'common' as an JSON-object") # Prepare working directory if "working directory" not in conf["common"]: raise KeyError("Provide configuration property 'common''working directory'") else: conf["common"]['working directory'] = os.path.abspath(conf["common"]['working directory']) clean_dir = False if os.path.isdir(conf["common"]['working directory']) and not conf["common"].get("keep working directory", False): clean_dir = True shutil.rmtree(conf["common"]['working directory'], True) os.makedirs(conf["common"]['working directory'].encode("utf8"), exist_ok=True) os.chdir(conf["common"]['working directory']) # Configure logging if "logging" not in conf["common"]: raise KeyError("Provide configuration property 'common''logging' according to Python logging specs") logging.config.dictConfig(conf["common"]['logging']) logger = logging.getLogger() # Report about the dir if clean_dir: # Go to the working directory to avoid creating files elsewhere logger.debug("Clean working dir: {0}".format(conf["common"]['working directory'])) logger.debug("Create working dir: {0}".format(conf["common"]['working directory'])) else: logger.info("Keep working directory from the previous run") def handle_exception(exc_type, exc_value, exc_traceback): logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = handle_exception return conf, logger def split_archive_name(path): """ Split archive name into file name and extension. The difference with is.path.splitext is that this function can properly parse double zipped archive names like myname.tar.gz providing "myname" and ".tar.gz". Would not work properly with names which contain dots. :param path: File path or file name. :return: tuple with file name at the first position and extension within the second one. """ name = path extension = "" while "." in name: split = os.path.splitext(name) name = split[0] extension = split[1] + extension return name, extension def get_output(command): """ Return STDOUT of the command. :param command: a command to be executed to get an entity value. """ val = subprocess.getoutput(command) if not val: raise ValueError('Cannot get anything executing {}'.format(command)) return val def extract_system_information(): """ Extract information about the system and return it as a dictionary. :return: dictionary with system info, """ system_conf = dict() system_conf["node name"] = get_output('uname -n') system_conf["CPU model"] = get_output('cat /proc/cpuinfo | grep -m1 "model name" | sed -r "s/^.*: //"') system_conf["CPU number"] = len(extract_cpu_cores_info().keys()) system_conf["RAM memory"] = \ int(get_output('cat /proc/meminfo | grep "MemTotal" | sed -r "s/^.*: *([0-9]+).*/1024 * \\1/" | bc')) system_conf["disk memory"] = 1024 * int(get_output('df ./ | grep / | awk \'{ print $4 }\'')) system_conf["Linux kernel version"] = get_output('uname -r') system_conf["arch"] = get_output('uname -m') return system_conf def sort_priority(priority): """ Use the function to sort tasks by their priorities. For higher priority return higher integer. :param priority: String. :return: 3, 2, 1, 0 """ if priority == "IDLE": return 0 elif priority == "LOW": return 1 elif priority == "HIGH": return 2 elif priority == "URGENT": return 3 else: raise ValueError("Unknown priority: {}".format(priority)) def higher_priority(one, two, strictly=False): """ Compare that one priority is higher than second priority. If the third argument is True (False by default) than comparison is strict. :param one: 'IDLE', 'LOW', 'HIGH' or 'URGENT' :param two: 'IDLE', 'LOW', 'HIGH' or 'URGENT' :param strictly: False or True :return: one > two or one >= two (default) """ one_priority = sort_priority(one) two_priority = sort_priority(two) if strictly: return one_priority > two_priority else: return one_priority >= two_priority def dir_size(dir): """ Measure size of the given directory. :param dir: Path string. :return: integer size in Bytes. """ if not os.path.isdir(dir): raise ValueError('Expect existing directory but it is not: {}'.format(dir)) output = get_output('du -bs {} | cut -f1'.format(dir)) try: res = int(output) except ValueError as e: # One of the files inside the dir has been removed. We should delete the warning message. splts = output.split('\n') if len(splts) < 2: # Can not delete the warning message raise e else: res = int(splts[-1]) return res def execute(args, env=None, cwd=None, timeout=0.5, logger=None, stderr=sys.stderr, stdout=sys.stdout, disk_limitation=None, disk_checking_period=30): """ Execute given command in a separate process catching its stderr if necessary. :param args: Command erguments. :param env: Environment variables. :param cwd: Current working directory to run the command. :param timeout: Timeout for the command. :param logger: Logger object. :param stderr: Pipe or file descriptor to redirect output. Use it if logger is not provided. :param stderr: Pipe or file descriptor to redirect output. Use it if logger is not provided. :param disk_limitation: Allowed integer size of disk memory in Bytes of current working directory. :param disk_checking_period: Integer number of seconds for the disk space measuring interval. :return: subprocess.Popen.returncode. """ original_sigint_handler = signal.getsignal(signal.SIGINT) original_sigtrm_handler = signal.getsignal(signal.SIGTERM) def restore_handlers(): signal.signal(signal.SIGTERM, original_sigtrm_handler) signal.signal(signal.SIGINT, original_sigint_handler) def process_alive(pid): try: os.kill(pid, 0) except OSError: return False else: return True def handler(arg1, arg2): def terminate(): print("{}: Cancellation of {} is successfull, exiting".format(os.getpid(), pid)) os._exit(-1) # Repeate until it dies if p and p.pid: pid = p.pid print("{}: Cancelling process {}".format(os.getpid(), pid)) # Sent initial signals try: os.kill(pid, signal.SIGINT) except ProcessLookupError: terminate() restore_handlers() try: # Try to wait - it helps if a process is waiting for something, we need to check its status p.wait(timeout=10) except subprocess.TimeoutExpired: print('{}: Process {} is still alive ...'.format(os.getpid(), pid)) # Lets try it again time.sleep(10) terminate() def set_handlers(): signal.signal(signal.SIGTERM, handler) signal.signal(signal.SIGINT, handler) def disk_controller(pid, limitation, period): while process_alive(pid): s = dir_size("./") if s > limitation: # Kill the process print("Reached disk memory limit of {}B, killing process {}".format(limitation, pid)) os.kill(pid, signal.SIGINT) time.sleep(period) os._exit(0) def activate_disk_limitation(pid, limitation): if limitation: checker = multiprocessing.Process(target=disk_controller, args=(pid, limitation, disk_checking_period)) checker.start() return checker else: return None set_handlers() cmd = args[0] if logger: logger.debug('Execute:\n{0}{1}{2}'.format(cmd, '' if len(args) == 1 else ' ', ' '.join('"{0}"'.format(arg) for arg in args[1:]))) p = subprocess.Popen(args, env=env, stderr=subprocess.PIPE, cwd=cwd, preexec_fn=os.setsid) disk_checker = activate_disk_limitation(p.pid, disk_limitation) err_q = StreamQueue(p.stderr, 'STDERR', True) err_q.start() # Print to logs everything that is printed to STDOUT and STDERR each timeout seconds. Last try is required to # print last messages queued before command finishes. last_try = True while not err_q.finished or last_try: if err_q.traceback: raise RuntimeError( 'STDERR reader thread failed with the following traceback:\n{0}'.format(err_q.traceback)) last_try = not err_q.finished time.sleep(timeout) output = [] while True: line = err_q.get() if line is None: break output.append(line) if output: m = '"{0}" outputted to {1}:\n{2}'.format(cmd, err_q.stream_name, '\n'.join(output)) logger.warning(m) err_q.join() else: p = subprocess.Popen(args, env=env, cwd=cwd, preexec_fn=os.setsid, stderr=stderr, stdout=stdout) disk_checker = activate_disk_limitation(p.pid, disk_limitation) p.wait() if disk_checker: disk_checker.terminate() disk_checker.join() restore_handlers() # Check dir size after a stop if disk_limitation: size = dir_size("./") if size >= disk_limitation: raise RuntimeError("Disk space limitation of {}B is exceeded".format(disk_limitation)) return p.returncode def process_task_results(logger): """ Expect working directory after BenchExec finished its work. Then parse its generated files and read spent resources. :param logger: Logger object. :return: """ logger.debug("Translate benchexec output into our results format") decision_results = { "resources": {} } # Actually there is the only output file, but benchexec is quite clever to add current date to its name. solutions = glob.glob(os.path.join("output", "benchmark*results.xml")) if len(solutions) == 0: raise FileNotFoundError("Cannot find any solution generated by BenchExec") for benexec_output in solutions: with open(benexec_output, encoding="utf8") as fp: result = ElementTree.parse(fp).getroot() decision_results["desc"] = '{0}\n{1} {2}'.format(result.attrib.get('generator'), result.attrib.get('tool'), result.attrib.get('version')) run = result.findall("run")[0] for column in run.iter("column"): name, value = [column.attrib.get(name) for name in ("title", "value")] if name == "cputime": match = re.search(r"^(\d+\.\d+)s$", value) if match: decision_results["resources"]["CPU time"] = int(float(match.groups()[0]) * 1000) elif name == "walltime": match = re.search(r"^(\d+\.\d+)s$", value) if match: decision_results["resources"]["wall time"] = int(float(match.groups()[0]) * 1000) elif name == "memUsage": decision_results["resources"]["memory size"] = int(value) elif name == "exitcode": decision_results["exit code"] = int(value) elif name == "status": decision_results["status"] = str(value) return decision_results def submit_task_results(logger, server, scheduler_type, identifier, decision_results, solution_path, speculative=False): """ Pack output directory prepared by BenchExec and prepare report archive with decision results and upload it to the server. :param logger: Logger object. :param server: server.AbstractServer object. :param scheduler_type: Scheduler type. :param identifier: Task identifier. :param decision_results: Dictionary with decision results and measured resources. :param solution_path: Path to the directory with solution files. :param speculative: Do not upload solution to Bridge. :return: None """ results_file = os.path.join(solution_path, "decision results.json") logger.debug("Save decision results to the disk: {}".format(os.path.abspath(results_file))) with open(results_file, "w", encoding="utf8") as fp: json.dump(decision_results, fp, ensure_ascii=False, sort_keys=True, indent=4) results_archive = os.path.join(solution_path, 'decision result files.zip') logger.debug("Save decision results and files to the archive: {}".format(os.path.abspath(results_archive))) with open(results_archive, mode='w+b', buffering=0) as fp: with zipfile.ZipFile(fp, mode='w', compression=zipfile.ZIP_DEFLATED) as zfp: zfp.write(os.path.join(solution_path, "decision results.json"), "decision results.json") for dirpath, dirnames, filenames in os.walk(os.path.join(solution_path, "output")): for filename in filenames: zfp.write(os.path.join(dirpath, filename), os.path.join(os.path.relpath(dirpath, solution_path), filename)) os.fsync(zfp.fp) if not speculative: ret = server.submit_solution(identifier, decision_results, results_archive) else: ret = True logger.info("Do not upload speculative solution") kv_upload_solution(logger, identifier, scheduler_type, decision_results) return ret def extract_cpu_cores_info(): """ Read /proc/cpuinfo to get information about cores and virtual cores. :return: {int(core id) -> int(virtual core id)} """ data = {} with open('/proc/cpuinfo', encoding='utf8') as fp: current_vc = None for line in fp.readlines(): vc = re.match(r'processor\s*:\s*(\d+)', line) pc = re.match(r'core\sid\s*:\s*(\d+)', line) if vc: current_vc = int(vc.group(1)) if pc: pc = int(pc.group(1)) if pc in data: data[pc].append(current_vc) else: data[pc] = [current_vc] return data def __converter(value, table, kind, outunit): """ Converts units to uits. :param value: Given value as an integer, float or a string with units or without them. :param table: Table to translate units. :param kind: Time of units to print errors. :param outunit: Desired output unit, '' - means base. :return: Return the obtained value and the string of the value with units. """ if isinstance(value, str): regex = re.compile("([0-9.]+)([a-zA-Z]*)$") if not regex.search(value): raise ValueError("Cannot parse string to extract the value and units: {!r}".format(value)) else: value, inunit = regex.search(value).groups() else: inunit = '' # Check values for v in (inunit, outunit): if v not in table: raise ValueError("Get unknown {} unit {!r}".format(kind, v)) # Get number and get bytes value_in_base = float(value) * table[inunit] # Than convert bytes into desired value value_in_out = value_in_base / table[outunit] # Align if necessary if outunit != '': fvalue = round(float(value_in_out), 2) ivalue = int(round(float(value_in_out), 0)) if abs(fvalue - ivalue) < 0.1: value_in_out = ivalue else: value_in_out = fvalue else: value_in_out = int(value_in_out) return value_in_out, "{}{}".format(value_in_out, outunit) def memory_units_converter(num, outunit=''): """ Translate memory units. :param num: Given value as an integer, float or a string with units or without them. :param outunit: Desired output unit, '' - means Bytes. :return: Return the obtained value and the string of the value with units. """ units_in_bytes = { '': 1, "B": 1, "KB": 10 ** 3, "MB": 10 ** 6, "GB": 10 ** 9, "TB": 10 ** 12, "KiB": 2 ** 10, "MiB": 2 ** 20, "GiB": 2 ** 30, "TiB": 2 ** 40, } return __converter(num, units_in_bytes, 'memory', outunit) def time_units_converter(num, outunit=''): """ Translate time units. :param num: Given value as an integer, float or a string with units or without them. :param outunit: Desired output unit, '' - means seconds. :return: Return the obtained value and the string of the value with units. """ units_in_seconds = { '': 1, "s": 1, "min": 60, "h": 60 ** 2 } return __converter(num, units_in_seconds, 'time', outunit) def kv_upload_solution(logger, identifier, scheduler_type, dataset): """ Upload data to controller storage. :param logger: Logger object. :param identifier: Task identifier. :param scheduler_type: Scheduler type. :param dataset: Data to save about the solution. This should be dictionary. :return: None """ key = 'solutions/{}/{}'.format(scheduler_type, identifier) session = consulate.Session() try: session.kv[key] = json.dumps(dataset) return except (AttributeError, KeyError): logger.warning("Cannot save key {!r} to key-value storage".format(key)) def kv_get_solution(logger, scheduler_type, identifier): """ Upload data to controller storage. :param logger: Logger object. :param scheduler_type: Type of the scheduler to avoif races. :param identifier: Task identifier. :return: None """ key = 'solutions/{}/{}'.format(scheduler_type, identifier) session = consulate.Session() try: return json.loads(session.kv[key]) except (AttributeError, KeyError) as err: logger.warning("Cannot obtain key {!r} from key-value storage: {!r}".format(key, err)) def kv_clear_solutions(logger, scheduler_type, identifier=None): """ Upload data to controller storage. :param logger: Logger object. :param scheduler_type: Type of the scheduler to avoif races. :param identifier: Task identifier. :return: None """ try: session = consulate.Session() if isinstance(identifier, str): session.kv.delete('solutions/{}/{}'.format(scheduler_type, identifier), recurse=True) else: session.kv.delete('solutions/{}'.format(scheduler_type), recurse=True) except (AttributeError, KeyError): logger.warning("Key-value storage is inaccessible")
# platform = multi_platform_rhel,multi_platform_fedora,multi_platform_ol if grep --silent ^ENCRYPT_METHOD /etc/login.defs ; then sed -i 's/^ENCRYPT_METHOD.*/ENCRYPT_METHOD SHA512/g' /etc/login.defs else echo "" >> /etc/login.defs echo "ENCRYPT_METHOD SHA512" >> /etc/login.defs fi
#!/bin/sh # Run the test suite so that: # --> -x: Stops on first error or failure # --> -s: Outputs all diagnostic information pipenv run pytest -x -s
#!/bin/sh gunicorn --chdir app app:app -w 16 --threads 4 -b ui:443
#!/bin/bash # passwordless sudo echo "%sudo ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers # public ssh key for vagrant user mkdir /home/vagrant/.ssh wget -O /home/vagrant/.ssh/authorized_keys "https://raw.githubusercontent.com/mitchellh/vagrant/master/keys/vagrant.pub" chmod 755 /home/vagrant/.ssh chmod 644 /home/vagrant/.ssh/authorized_keys chown -R vagrant:vagrant /home/vagrant/.ssh # speed up ssh echo "UseDNS no" >> /etc/ssh/sshd_config # Install chef from omnibus curl -L https://www.getchef.com/chef/install.sh | bash # display grub timeout and login promt after boot sed -i \ -e "s/quiet splash//" \ -e "s/GRUB_TIMEOUT=[0-9]/GRUB_TIMEOUT=0/" \ /etc/default/grub update-grub # clean up apt-get clean # Zero free space to aid VM compression dd if=/dev/zero of=/EMPTY bs=1M rm -f /EMPTY
package com.me.jdbc.app; import java.sql.Connection; import java.sql.PreparedStatement; import java.util.Scanner; import com.me.jdbc.utils.JDBCUtils; import org.junit.Test; //课后练习1 public class Exer1Test { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); System.out.print("请输入用户名:"); String name = scanner.next(); System.out.print("请输入邮箱:"); String email = scanner.next(); System.out.print("请输入生日:"); String birthday = scanner.next();//'1992-09-08' String sql = "insert into customers(name,email,birth)values(?,?,?)"; int insertCount = update(sql,name,email,birthday); if(insertCount > 0){ System.out.println("添加成功"); }else{ System.out.println("添加失败"); } } // 通用的增删改操作 public static int update(String sql, Object... args) {// sql中占位符的个数与可变形参的长度相同! Connection conn = null; PreparedStatement ps = null; try { // 1.获取数据库的连接 conn = JDBCUtils.getConnection(); // 2.预编译sql语句,返回PreparedStatement的实例 ps = conn.prepareStatement(sql); // 3.填充占位符 for (int i = 0; i < args.length; i++) { ps.setObject(i + 1, args[i]);// 小心参数声明错误!! } // 4.执行 /* * ps.execute(): * 如果执行的是查询操作,有返回结果,则此方法返回true; * 如果执行的是增、删、改操作,没有返回结果,则此方法返回false. */ //方式一: // return ps.execute(); //方式二: return ps.executeUpdate(); } catch (Exception e) { e.printStackTrace(); } finally { // 5.资源的关闭 JDBCUtils.closeResource(conn, ps); } return 0; } }
#!/usr/bin/env bash # Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Generates objc docs for Flutter iOS libraries. FLUTTER_UMBRELLA_HEADER=$(find ../out -maxdepth 4 -type f -name Flutter.h | grep 'ios_' | head -n 1) if [[ ! -f "$FLUTTER_UMBRELLA_HEADER" ]] then echo "Error: This script must be run at the root of the Flutter source tree with at least one built Flutter.framework in ../out/ios*/Flutter.framework." exit 1 fi if [[ $# -eq 0 ]] then echo "Error: Argument specifying output directory required." exit 1 fi # If GEM_HOME is set, prefer using its copy of jazzy. # LUCI will put jazzy here instead of on the path. if [[ -n "${GEM_HOME}" ]] then PATH="${GEM_HOME}/bin:$PATH" fi # Use iPhoneSimulator SDK # See: https://github.com/realm/jazzy/issues/791 jazzy \ --objc\ --sdk iphonesimulator\ --clean\ --author Flutter Team\ --author_url 'https://flutter.io'\ --github_url 'https://github.com/flutter'\ --github-file-prefix 'http://github.com/flutter/engine/blob/master'\ --module-version 1.0.0\ --xcodebuild-arguments --objc,"$FLUTTER_UMBRELLA_HEADER",--,-x,objective-c,-isysroot,"$(xcrun --show-sdk-path --sdk iphonesimulator)",-I,"$(pwd)"\ --module Flutter\ --root-url https://docs.flutter.io/objc/\ --output "$1" EXPECTED_CLASSES="FlutterAppDelegate.html FlutterBasicMessageChannel.html FlutterCallbackCache.html FlutterCallbackInformation.html FlutterDartProject.html FlutterEngine.html FlutterError.html FlutterEventChannel.html FlutterHeadlessDartRunner.html FlutterMethodCall.html FlutterMethodChannel.html FlutterPluginAppLifeCycleDelegate.html FlutterStandardMessageCodec.html FlutterStandardMethodCodec.html FlutterStandardReader.html FlutterStandardReaderWriter.html FlutterStandardTypedData.html FlutterStandardWriter.html FlutterViewController.html" ACTUAL_CLASSES=$(ls "$1/Classes" | sort) if [[ $EXPECTED_CLASSES != $ACTUAL_CLASSES ]]; then echo "Expected classes did not match actual classes" echo diff <(echo "$EXPECTED_CLASSES") <(echo "$ACTUAL_CLASSES") exit -1 fi
var jsonobj = '{"test" : "valeu1", "test2" : 3.44, "test3" : 0}'; var obj = JSON.parse(jsonobj, function(key, value){ if(typeof value == 'number'){ if(value == 0) value = false; else if (value == 1){ value = true; } } return value; }); alert(obj.test3); // prints false function convertBoolToNums(key, value){ if(typeof value == 'boolean'){ if(value) value = 1; else value = 0; } return value; }; window.onload=function(){ var obj = '{"test" : "valeu1", "test2" : 3.44, "test3" : false}'; var jsonobj = JSON.stringify(obj, convertBoolToNums, 3); alert(jsonobj); // test3 should be 0 }
#!/bin/bash ## Export the project directory environment variable script_path=`realpath $0` script_path=${script_path%/*} export PRJ_DIR=${script_path} ## Source the zephyr environment configuration source $PRJ_DIR/../3rd-party/zephyr/zephyr-env.sh
<reponame>smagill/opensphere-desktop package io.opensphere.geopackage.importer; import java.io.File; import java.util.Set; import org.apache.log4j.Logger; import io.opensphere.core.data.DataRegistry; import io.opensphere.core.data.util.DataModelCategory; import io.opensphere.core.data.util.DefaultQuery; import io.opensphere.core.importer.FileOrURLImporter; import io.opensphere.core.util.collections.New; /** * Ensures that we have all data within the data registry for the geopackage * files that we have imported. If we do not, we remove what we do have from the * data registry and re-import the file. */ public class GeoPackageDataEnsurer { /** * Used to log messages. */ private static final Logger LOGGER = Logger.getLogger(GeoPackageDataEnsurer.class); /** * The geopackage importer used to re-import corrupte geopackages. */ private final FileOrURLImporter myImporter; /** * Contains imported geopackage data. */ private final DataRegistry myRegistry; /** * Constructs a new data ensurer. * * @param importer The geopackage importer used to re-import corrupte * geopackages. * @param registry Contains imported geopackage data. */ public GeoPackageDataEnsurer(FileOrURLImporter importer, DataRegistry registry) { myRegistry = registry; myImporter = importer; } /** * Ensures that we have all data within the data registry for the geopackage * files that we have imported. If we do not, we remove what we do have from * the data registry and re-import the file. * * @param imports The set of files we have imported. */ public void ensureData(Set<String> imports) { for (String importFile : imports) { LOGGER.info("Verifying geopackage file is cached for " + importFile); File theFile = new File(importFile); if (theFile.exists()) { DataModelCategory category = new DataModelCategory(importFile, null, null); DefaultQuery layerQuery = new DefaultQuery(category, New.collection()); long[] ids = myRegistry.performLocalQuery(layerQuery); if (ids == null || ids.length <= 0) { LOGGER.info("Could not find geopackage file in cache reimporting " + importFile); myRegistry.removeModels(category, false); myImporter.importFile(theFile, null); } else { LOGGER.info("Geopackage file is cached for " + importFile); } } } } }
#!/bin/sh set -e source ./shared.functions.sh START_DIR=$PWD WORK_DIR=$START_DIR/../../../../../.macosbuild mkdir -p $WORK_DIR WORK_DIR=$(abspath "$WORK_DIR") VCX_SDK=$START_DIR/../../../../.. VCX_SDK=$(abspath "$VCX_SDK") COMBINED_LIB=$1 DATETIME=$(date +"%Y%m%d.%H%M") IOS_ARCHS="arm64,armv7,armv7s,i386,x86_64" if [ ! -z "$2" ]; then IOS_ARCHS=$2 fi bkpIFS="$IFS" IFS=',()][' read -r -a archs <<<"${IOS_ARCHS}" echo "Building vcx.${COMBINED_LIB} wrapper for architectures: ${archs[@]}" ##Or printf "%s\n" ${array[@]} IFS="$bkpIFS" cd $VCX_SDK/vcx/wrappers/ios/vcx #mv lib/libvcx.a lib/libvcx.a.original cp -v lib/${COMBINED_LIB}.a lib/libvcx.a xcodebuild -project vcx.xcodeproj -scheme vcx -configuration Debug CONFIGURATION_BUILD_DIR=. clean rm -rf vcx.framework.previousbuild IPHONE_SDK=iphoneos for arch in ${archs[*]} do rm -rf vcx.framework if [ "${arch}" = "i386" ] || [ "${arch}" = "x86_64" ]; then # This sdk supports i386 and x86_64 IPHONE_SDK=iphonesimulator elif [ "${arch}" = "armv7" ] || [ "${arch}" = "armv7s" ] || [ "${arch}" = "arm64" ]; then # This sdk supports armv7, armv7s, and arm64 IPHONE_SDK=iphoneos fi xcodebuild -project vcx.xcodeproj -scheme vcx -configuration Debug -arch ${arch} -sdk ${IPHONE_SDK} CONFIGURATION_BUILD_DIR=. build if [ -d "./vcx.framework.previousbuild" ]; then lipo -create -output combined.ios.vcx vcx.framework/vcx vcx.framework.previousbuild/vcx mv combined.ios.vcx vcx.framework/vcx rm -rf vcx.framework.previousbuild fi cp -rp vcx.framework vcx.framework.previousbuild done #mv lib/libvcx.a.original lib/libvcx.a rm lib/libvcx.a rm -rf vcx.framework.previousbuild mkdir -p vcx.framework/lib # IMPORTANT: DO NOT PUT THE libvcx.a FILE INSIDE THE cocoapod AT ALL!!!!! #cp -v lib/${COMBINED_LIB}.a vcx.framework/lib/libvcx.a mkdir -p vcx.framework/Headers cp -v ConnectMeVcx.h vcx.framework/Headers cp -v include/libvcx.h vcx.framework/Headers cp -v vcx/vcx.h vcx.framework/Headers if [ -d $VCX_SDK/vcx/wrappers/ios/vcx/tmp ]; then rm -rf $VCX_SDK/vcx/wrappers/ios/vcx/tmp fi mkdir -p $VCX_SDK/vcx/wrappers/ios/vcx/tmp/vcx/ cp -rvp vcx.framework $VCX_SDK/vcx/wrappers/ios/vcx/tmp/vcx/ cd $VCX_SDK/vcx/wrappers/ios/vcx/tmp cp $WORK_DIR/evernym.vcx-sdk.git.commit.log $VCX_SDK/vcx/wrappers/ios/vcx/tmp/vcx/ || true cp $WORK_DIR/hyperledger.indy-sdk.git.commit.log $VCX_SDK/vcx/wrappers/ios/vcx/tmp/vcx/ || true zip -r vcx.${COMBINED_LIB}_${DATETIME}_universal.zip vcx mkdir -p ~/IOSBuilds/${COMBINED_LIB} cp $VCX_SDK/vcx/wrappers/ios/vcx/tmp/vcx.${COMBINED_LIB}_${DATETIME}_universal.zip ~/IOSBuilds/${COMBINED_LIB} echo vcx.${COMBINED_LIB}_${DATETIME}_universal.zip #curl --insecure -u normjarvis -X POST -F file=@./vcx.${COMBINED_LIB}_${DATETIME}_universal.zip https://kraken.corp.evernym.com/repo/ios/upload # Download the file at https://repo.corp.evernym.com/filely/ios/vcx.${COMBINED_LIB}_${DATETIME}_universal.zip #hyperledger.indy-sdk.git.commit.logsudo cp ./vcx.${COMBINED_LIB}_${DATETIME}_universal.zip /usr/local/var/www/download/ios
ifconfig | grep "ether " | cut -d " " -f 2
cd .. && mvn -Dcheckstyle.skip=true -Dmaven.test.skip=true -Dmaven.javadoc.skip=true package && cd examples echo "Basic" javac -cp "../target/*" Basic.java && java -cp ../target/*:. Basic echo echo "Bitmap64" javac -cp "../target/*" Bitmap64.java && java -cp ../target/*:. Bitmap64 echo echo "Running CompressionResults" javac -cp "../target/*" CompressionResults.java && java -cp ../target/*:. CompressionResults echo echo "Running SerializeToByteBufferExample" javac -cp "../target/*" SerializeToByteBufferExample.java && java -cp ../target/*:. SerializeToByteBufferExample echo echo "Running ImmutableRoaringBitmapExample" javac -cp "../target/*" ImmutableRoaringBitmapExample.java && java -cp ../target/*:. ImmutableRoaringBitmapExample echo echo "Running MemoryMappingExample" javac -cp "../target/*" MemoryMappingExample.java && java -cp ../target/*:. MemoryMappingExample echo echo "Serializing to byte array" javac -cp "../target/*":. SerializeToByteArrayExample.java && java -cp ../target/*:. SerializeToByteArrayExample echo echo "ForEach example" javac -cp "../target/*":. ForEachExample.java && java -cp ../target/*:. ForEachExample echo echo "Very large example" javac -cp "../target/*":. VeryLargeBitmap.java && java -cp ../target/*:. VeryLargeBitmap echo echo echo "paging" javac -cp "../target/*":. PagedIterator.java && java -cp ../target/*:. PagedIterator echo echo "Serializing to file " javac -cp "../target/*" SerializeToDiskExample.java && java -cp ../target/*:. SerializeToDiskExample rm bitmapwithoutruns.bin bitmapwithruns.bin rm *.class
#!/usr/bin/env python import qi import sys import json import os import time class PepperAppLauncher: services_connected = None def __init__(self, application): # Getting a session that will be reused everywhere self.application = application self.session = application.session self.service_name = self.__class__.__name__ # Getting a logger. Logs will be in /var/log/naoqi/servicemanager/{application id}.{service name} self.logger = qi.Logger(self.service_name) # Do some initializations before the service is registered to NAOqi self.logger.info("Initializing...") self.connect_services() self.logger.info("Initialized!") def mycallback(self, value): print "val:", value @qi.nobind def start_service(self): self.logger.info("Starting service...") self.loadAppNames() self.loadSettingAutoUpdate() self.show_local('index.html') self.logger.info("Started!") print("started service") self.memory = self.session.service('ALMemory') self.tabletService = self.session.service("ALTabletService") self.ALStore = self.session.service('ALStore') self.memory.insertData("WelboAPPL/apps_updated", True ) self.memory.insertData("WelboAPPL/checked_store", False ) autoUpdate = self.memory.getData("WelboAPPL/auto_update") wifiStatus = self.tabletService.getWifiStatus() appsStatus = self.ALStore.status() self.memory.insertData("WelboAPPL/checked_store", True ) #The next two lines are handy for debugging # for app in appsStatus: # print (str(app['status']) + " "+ str(app['uuid'])) self.logger.info(autoUpdate + " " + wifiStatus) if (any(app.get('status', None) != 1 for app in appsStatus) and autoUpdate == "True" and wifiStatus == "CONNECTED"): print "Found an update, will update the apps!" self.memory.insertData("WelboAPPL/apps_updated", False ) self.ALStore.update() self.memory.insertData("WelboAPPL/apps_updated", True ) print("Evertyhing should now be up to date") @qi.nobind def stop_service(self): # probably useless, unless one method needs to stop the service from inside. # external naoqi scripts should use ALServiceManager.stopService if they need to stop it. self.logger.info("Stopping service...") self.application.stop() self.logger.info("Stopped!") @qi.nobind def connect_services(self): # connect all services required by your module # done in async way over 30s, # so it works even if other services are not yet ready when you start your module # this is required when the service is autorun as it may start before other modules... self.logger.info('Connecting services...') self.services_connected = qi.Promise() services_connected_fut = self.services_connected.future() def get_services(): try: self.memory = self.session.service('ALMemory') self.ts = self.session.service("ALTabletService") self.logger.info('All services are now connected') self.services_connected.setValue(True) except RuntimeError as e: self.logger.warning('Still missing some service:\n {}'.format(e)) get_services_task = qi.PeriodicTask() get_services_task.setCallback(get_services) get_services_task.setUsPeriod(int(2*1000000)) # check every 2s get_services_task.start(True) try: services_connected_fut.value(30*1000) # timeout = 30s get_services_task.stop() except RuntimeError: get_services_task.stop() self.logger.error('Failed to reach all services after 30 seconds') raise RuntimeError ### Utility functions ### def loadAppNames(self): # open file path = "config/app_config.json" try: with open(path) as data_file: data = json.load(data_file) self.memory.insertData("WelboAPPL/app_data", json.dumps(data) ) except: path = "config/error.json" with open(path) as data_file: data = json.load(data_file) self.memory.insertData("WelboAPPL/app_data", json.dumps(data) ) def loadSettingAutoUpdate(self): preferenceManager = self.session.service("ALPreferenceManager") autoUpdate = str(preferenceManager.getValue("com.welbo.config","autoUpdate")) self.memory.insertData("WelboAPPL/auto_update", autoUpdate ) def show_local(self,page): self.show('http://198.18.0.1/apps/pepper-app-launcher-service/'+page) def show(self,page): print("Showing a new page") self.ts.showWebview(page) ### ################# ### def cleanup(self): # called when your module is stopped self.logger.info("Cleaning...") # do something self.logger.info("End!") if __name__ == "__main__": # with this you can run the script for tests on remote robots # run : python my_super_service.py --qi-url 192.168.3.11 app = qi.Application(sys.argv) app.start() service_instance = PepperAppLauncher(app) service_id = app.session.registerService(service_instance.service_name, service_instance) service_instance.start_service() app.run() service_instance.cleanup() app.session.unregisterService(service_id) #service_instance.show_local('index.html')
import logging from sqlalchemy import create_engine, MetaData, Table, Column, String, Date, LargeBinary, and_ import random from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor class UserDb: """ UserDb provides a set of helper functions over SQLAlchemy to handle db operations for userservice """ def __init__(self, uri, logger=logging): self.engine = create_engine(uri) self.logger = logger self.users_table = Table( 'users', MetaData(self.engine), Column('userid', String, primary_key=True), Column('email', String, unique=True, nullable=False), Column('passhash', LargeBinary, nullable=False), Column('timezone', String, nullable=False), Column('registereddate', Date, nullable=True), ) # Set up tracing autoinstrumentation for sqlalchemy SQLAlchemyInstrumentor().instrument( engine=self.engine, service='users', ) def add_user(self, user): """Add a user to the database. Params: user - a key/value dict of attributes describing a new user {'email': email, 'password': password, ...} Raises: SQLAlchemyError if there was an issue with the database """ statement = self.users_table.insert().values(user) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: conn.execute(statement) def generate_userid(self): """Generates a globally unique alphanumerical userid.""" self.logger.debug('Generating an account ID') userid = None with self.engine.connect() as conn: while userid is None: userid = str(random.randint(1e9, (1e10 - 1))) statement = self.users_table.select().where( self.users_table.c.userid == userid ) self.logger.debug('QUERY: %s', str(statement)) result = conn.execute(statement).first() # If there already exists an account, try again. if result is not None: userid = None self.logger.debug( 'RESULT: account ID already exists. Trying again') self.logger.debug('RESULT: account ID generated.') return userid def get_user(self, email): """Get user data for the specified email. Params: email - the email of the user Return: a key/value dict of user attributes, {'email': email, 'userid': userid, ...} or None if that user does not exist Raises: SQLAlchemyError if there was an issue with the database """ statement = self.users_table.select().where( self.users_table.c.email == email) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: result = conn.execute(statement).first() self.logger.debug('RESULT: fetched user data for %s', email) return dict(result) if result is not None else None class BudgetDb: """ BudgetDb provides a set of helper functions over SQLAlchemy to handle db operations for userservice """ def __init__(self, uri, logger=logging): self.engine = create_engine(uri) self.logger = logger self.budgets_table = Table( 'budgets', MetaData(self.engine), Column('budgetid', String, primary_key=True), Column('displayname', String, nullable=False), Column('budgetnotes', String, nullable=True), Column('accessdate', Date, nullable=False), Column('userid', String, nullable=False), ) # Set up tracing autoinstrumentation for sqlalchemy SQLAlchemyInstrumentor().instrument( engine=self.engine, service='budgets', ) def add_budget(self, budget): """Add a budget to the database. Params: budget - a key/value dict of attributes describing a new budget {'email': email, 'password': password, ...} Raises: SQLAlchemyError if there was an issue with the database """ statement = self.budgets_table.insert().values(budget) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: conn.execute(statement) def generate_budgetid(self): """Generates a globally unique alphanumerical budgetid.""" self.logger.debug('Generating an account ID') budgetid = None with self.engine.connect() as conn: while budgetid is None: budgetid = str(random.randint(1e9, (1e10 - 1))) statement = self.budgets_table.select().where( self.budgets_table.c.budgetid == budgetid ) self.logger.debug('QUERY: %s', str(statement)) result = conn.execute(statement).first() # If there already exists an account, try again. if result is not None: budgetid = None self.logger.debug( 'RESULT: account ID already exists. Trying again') self.logger.debug('RESULT: account ID generated.') return budgetid def get_budget(self, budgetid): """Get user data for the specified email. Params: email - the email of the user Return: a key/value dict of user attributes, {'email': email, 'userid': userid, ...} or None if that user does not exist Raises: SQLAlchemyError if there was an issue with the database """ statement = self.budgets_table.select().where( self.budgets_table.c.budgetid == budgetid) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: result = conn.execute(statement).first() self.logger.debug('RESULT: fetched budget data for %s', budgetid) return dict(result) if result is not None else None def get_budgets(self, userid): statement = self.budgets_table.select().where( self.budgets_table.c.userid == userid) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: results = conn.execute(statement) self.logger.debug('RESULT: fetched budgets data for %s', userid) return [dict(result) for result in results] if results is not None else None def delete_budget(self, budgetid): """_summary_ Args: budgetid (_type_): _description_ """ statement = self.budgets_table.delete().where( self.budgets_table.c.budgetid == budgetid) with self.engine.connect() as conn: results = conn.execute(statement) self.logger.debug('QUERY: %s', str(statement)) return {} def update_budget(self, budget: dict): statement = self.budgets_table.update().values(budget).where( self.budgets_table.c.budgetid == budget["budgetid"]) with self.engine.connect() as conn: results = conn.execute(statement) class CategoryDb: """ CategoryDb provides a set of helper functions over SQLAlchemy to handle db operations for userservice """ def __init__(self, uri, logger=logging): self.engine = create_engine(uri) self.logger = logger self.categories_table = Table( 'categories', MetaData(self.engine), Column('categoryid', String, primary_key=True), Column('displayname', String, nullable=False), Column('parentid', String, nullable=True), Column('budgetid', String, nullable=False), ) # Set up tracing autoinstrumentation for sqlalchemy SQLAlchemyInstrumentor().instrument( engine=self.engine, service='categories', ) def add_category(self, category): """Add a budget to the database. Params: budget - a key/value dict of attributes describing a new budget {'email': email, 'password': password, ...} Raises: SQLAlchemyError if there was an issue with the database """ statement = self.categories_table.insert().values(category) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: conn.execute(statement) def generate_categoryid(self): """Generates a globally unique alphanumerical budgetid.""" self.logger.debug('Generating an account ID') categoryid = None with self.engine.connect() as conn: while categoryid is None: categoryid = str(random.randint(1e9, (1e10 - 1))) statement = self.categories_table.select().where( self.categories_table.c.categoryid == categoryid ) self.logger.debug('QUERY: %s', str(statement)) result = conn.execute(statement).first() # If there already exists an account, try again. if result is not None: categoryid = None self.logger.debug( 'RESULT: account ID already exists. Trying again') self.logger.debug('RESULT: account ID generated.') return categoryid def get_category(self, categoryid): """Get user data for the specified email. Params: email - the email of the user Return: a key/value dict of user attributes, {'email': email, 'userid': userid, ...} or None if that user does not exist Raises: SQLAlchemyError if there was an issue with the database """ statement = self.categories_table.select().where( self.categories_table.c.categoryid == categoryid) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: result = conn.execute(statement).first() self.logger.debug('RESULT: fetched budget data for %s', categoryid) return dict(result) if result is not None else None def get_categories(self, budgetid): statement = self.categories_table.select().where(and_(self.categories_table.c.budgetid == budgetid,self.categories_table.c.parentid == None)) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: results = conn.execute(statement) self.logger.debug('RESULT: fetched categories for budgetid %s', budgetid) return [dict(result) for result in results] if results is not None else None def delete_category(self, categoryid): """_summary_ Args: budgetid (_type_): _description_ """ statement = self.budgets_table.delete().where( self.categories_table.c.categoryid == categoryid) with self.engine.connect() as conn: results = conn.execute(statement) self.logger.debug('QUERY: %s', str(statement)) return {} def update_category(self, category: dict): statement = self.budgets_table.update().values(category).where( self.categories_table.c.budgetid == category["budgetid"]) with self.engine.connect() as conn: results = conn.execute(statement) class MonthDb: """ MonthDb provides a set of helper functions over SQLAlchemy to handle db operations for userservice """ def __init__(self, uri, logger=logging): self.engine = create_engine(uri) self.logger = logger self.months_table = Table( 'months', MetaData(self.engine), Column('monthid', String, primary_key=True), Column('budgetid', String, nullable=False), Column('month', String, nullable=False), Column('note', String, nullable=True), Column('income', String, nullable=True), Column('budgeted', String, nullable=True), Column('activity', String, nullable=True), Column('to_be_budgeted', String, nullable=True), Column('age_of_money', String, nullable=True), ) # Set up tracing autoinstrumentation for sqlalchemy SQLAlchemyInstrumentor().instrument( engine=self.engine, service='months', ) def add_month(self, month): """Add a month to the database. Params: budget - a key/value dict of attributes describing a new month Raises: SQLAlchemyError if there was an issue with the database """ statement = self.months_table.insert().values(month) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: conn.execute(statement) def generate_monthid(self): """Generates a globally unique alphanumerical budgetid.""" self.logger.debug('Generating an account ID') monthid = None with self.engine.connect() as conn: while monthid is None: monthid = str(random.randint(1e9, (1e10 - 1))) statement = self.months_table.select().where( self.months_table.c.monthid == monthid ) self.logger.debug('QUERY: %s', str(statement)) result = conn.execute(statement).first() # If there already exists an account, try again. if result is not None: monthid = None self.logger.debug( 'RESULT: account ID already exists. Trying again') self.logger.debug('RESULT: account ID generated.') return monthid def get_month(self, monthid): """Get month data for the specified email. Params: email - the email of the user Return: a key/value dict of user attributes, {'email': email, 'userid': userid, ...} or None if that user does not exist Raises: SQLAlchemyError if there was an issue with the database """ statement = self.months_table.select().where( self.months_table.c.monthid == monthid) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: result = conn.execute(statement).first() self.logger.debug('RESULT: fetched budget data for %s', monthid) return dict(result) if result is not None else None def get_months(self, budgetid): statement = self.months_table.select().where(and_(self.months_table.c.budgetid == budgetid,self.months_table.c.parentid == None)) self.logger.debug('QUERY: %s', str(statement)) with self.engine.connect() as conn: results = conn.execute(statement) self.logger.debug('RESULT: fetched budgets data for %s', budgetid) return [dict(result) for result in results] if results is not None else None def delete_month(self, monthid): """_summary_ Args: budgetid (_type_): _description_ """ statement = self.budgets_table.delete().where( self.months_table.c.monthid == monthid) with self.engine.connect() as conn: results = conn.execute(statement) self.logger.debug('QUERY: %s', str(statement)) return {} def update_month(self, month: dict): statement = self.budgets_table.update().values(month).where( self.months_table.c.budgetid == month["budgetid"]) with self.engine.connect() as conn: results = conn.execute(statement)
from flask import abort def handle_validation_errors(errors): if len(errors): errorString = '' for k in errors: errorString += k + ' ' for error in errors[k]: errorString += error + ', ' errorString += '\n' abort(409, errorString)
public class AverageCalculator { public static double calculateAverage(int[] numbers) { int sum = 0; for(int i : numbers) { sum += i; } return (double) sum / numbers.length; } public static void main(String args[]) { int[] numbers = {1, 2, 3, 4, 5}; System.out.println("The average of the numbers is: " + calculateAverage(numbers)); } }
#!/usr/bin/env bash ## RSS模组 RSS moudle install_rss(){ set +e cd /usr/share/nginx/ apt-get install php7.4-mysql -y apt-get install php7.4-pgsql -y apt-get install php7.4-sqlite -y if [[ -d /usr/share/nginx/RSSHub ]]; then TERM=ansi whiptail --title "安装中" --infobox "更新rsshub中..." 7 68 cd /usr/share/nginx/RSSHub git pull npm update npm install --production npm prune else git clone https://github.com/DIYgod/RSSHub.git cd /usr/share/nginx/RSSHub npm update npm install --production npm prune touch .env cat > '.env' << EOF CACHE_TYPE=redis #REDIS_URL=redis://127.0.0.1:6379/ REDIS_URL=/var/run/redis/redis.sock CACHE_EXPIRE=600 LISTEN_INADDR_ANY=0 EOF cat > '/etc/systemd/system/rsshub.service' << EOF [Unit] Description=Rsshub Documentation=https://docs.rsshub.app/ After=network.target Wants=network.target [Service] Type=simple WorkingDirectory=/usr/share/nginx/RSSHub ExecStart=/bin/bash -c 'npm start' Restart=on-failure LimitNOFILE=65536 User=root Group=root [Install] WantedBy=multi-user.target EOF systemctl enable rsshub systemctl restart rsshub fi cd /usr/share/nginx/ if [[ -d /usr/share/nginx/tt-rss/ ]]; then TERM=ansi whiptail --title "安装中" --infobox "更新tt-rss中..." 7 68 echo "dev ing" else echo "dev ing" fi cd }
<filename>lib/edtf-humanize.rb require 'edtf/humanize'
var date1 = new Date('1971-11-09'); var date2 = new Date(); // To calculate the time difference of two dates var Difference_In_Time = date2.getTime() - date1.getTime(); // To calculate the no. of days between two dates var Difference_In_Days = Difference_In_Time / (1000 * 3600 * 24); // To display the final no. of days (result) console.log(Difference_In_Days);
<gh_stars>1-10 const factoryConfiguration = { reservedEnergy:10000, // [type,amount] amount can be greedy "W22N25":[[RESOURCE_REDUCTANT,4000],[RESOURCE_OXIDANT,2500],[RESOURCE_UTRIUM_BAR,2500],[RESOURCE_ZYNTHIUM_BAR,2500],[RESOURCE_COMPOSITE,1000],[RESOURCE_WIRE,1000],[RESOURCE_SWITCH,100]], "W23N25":[[RESOURCE_KEANIUM_BAR,4000],[RESOURCE_ENERGY,"greedy"],[RESOURCE_CRYSTAL,1000]], "W21N24":[[RESOURCE_REDUCTANT,4000],[RESOURCE_ENERGY,"greedy"]], "W19N22":[[RESOURCE_PURIFIER,4000],[RESOURCE_ENERGY,"greedy"]], "W18N22":[[RESOURCE_LEMERGIUM_BAR,4000]], "sim":[] } module.exports = factoryConfiguration
require 'fileutils' require 'pathname' module Teaspoon class Export attr_accessor :output_path def initialize(options) @suite = options.fetch(:suite).to_s @url = options.fetch(:url).to_s @output_path = File.expand_path(options[:output_path] || 'teaspoon-export') end def execute ensure_wget_installed create_suite_output_directory Dir.chdir(suite_output_path) do download_suite rename_html_as_index update_relative_paths remove_hostname_dir truncate_query_from_filenames truncate_query_from_links end end ExportFailure = Class.new(RuntimeError) def self.run_silently(*args) out, err = $stdout.clone, $stderr.clone [$stdout, $stderr].each { |file| file.reopen('/dev/null') } success = system(*args) $stdout.reopen out $stderr.reopen err success end private def ensure_wget_installed success = self.class.run_silently 'wget', '--help' raise ExportFailure, "wget must be installed to export" unless success end def suite_output_path File.join(@output_path, @suite) end def create_suite_output_directory FileUtils.mkdir_p(suite_output_path) end def download_suite self.class.run_silently('wget', '-kEpH', @url) end def rename_html_as_index suite_html_output = Dir.glob(File.join('**', "#{@suite}.html")).first raise ExportFailure, 'wget did not download any html document' if suite_html_output.nil? FileUtils.move(suite_html_output, 'index.html') end def update_relative_paths # Assumption: the html file moved up several directories, and no asset was in those directories html = File.read('index.html') html.gsub!('../', '') File.write('index.html', html) end def remove_hostname_dir FileUtils.mv Dir.glob(File.join(suite_output_path, '*/*')), suite_output_path end def truncate_query_from_filenames files_with_query = Dir.glob('**/*\?*') files_with_query.each do |file| File.rename file, file.sub(/\?.*/, '') end end def truncate_query_from_links html = File.read('index.html') html.gsub!(/%3F[^'"]*/, '') File.write('index.html', html) end end end
<reponame>ryoff/all_becomes_f require 'spec_helper' require 'pry' describe "EverythingBecomesF" do it 'has a version number' do expect(EverythingBecomesF::VERSION).not_to be nil end describe "#everything_became_f?" do describe Integer do where(:integer, :result) do [ [0, false], [1, false], [10, false], [15, true], [16, false], [254, false], [255, true], [256, false], [65534, false], [65535, true], [65536, false] ] end with_them do subject { integer.everything_became_f? } it { is_expected.to eq result } end end describe String do where(:string, :result) do [ ['a', false], ['af', false], ['fa', false], ['A', false], ['AF', false], ['FA', false], ['ffffffffffffffffffffffffffffffffffffffffffffffffffe', false], ['effffffffffffffffffffffffffffffffffffffffffffffffff', false], ['f', true], ['fF', true], ['FF', true], ['ffffffffffffffffffffffffffffffffffffffffffffffffff', true], ['FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', true] ] end with_them do subject { string.everything_became_f? } it { is_expected.to eq result } end end describe Time do ENV['TZ'] = 'UTC' where(:time, :result) do [ [Time.new(1970, 1, 1, 0, 0, 15), true], [Time.new(1970, 1, 1, 0, 4, 15), true], [Time.new(1970, 1, 1, 1, 8, 15), true], [Time.new(1970, 1, 1, 18, 12, 15), true], [Time.new(1970, 1, 13, 3, 16, 15), true], [Time.new(1970, 7, 14, 4, 20, 15), true], [Time.new(1978, 7, 4, 21, 24, 15), true], [Time.new(2106, 2, 7, 6, 28, 15), true], [Time.new(4147, 8, 20, 7, 32, 15), true], [Time.new(36812, 2, 20, 0, 36, 15), true], [Time.new(559444, 3, 8, 9, 40, 15), true], [Time.new(1970, 1, 1, 0, 0, 0), false], [Time.new(2015, 12, 24, 0, 0, 0), false] ] end with_them do subject { time.everything_became_f? } it { is_expected.to eq result } end end end describe "#everything_becomes_f" do describe Integer do where(:integer, :result) do [ [0, 15], [1, 15], [10, 15], [15, 15], [16, 255], [254, 255], [255, 255], [256, 4095], [65534, 65535], [65535, 65535] ] end with_them do subject { integer.everything_becomes_f } it { is_expected.to eq result } it { expect(subject.everything_became_f?).to eq true } end end describe String do where(:string, :result) do [ ['a', 'f'], ['af', 'ff'], ['fa', 'ff'], ['ffffffffffffffffffffffffffffffffffffffffffffffffffe', 'fffffffffffffffffffffffffffffffffffffffffffffffffff'], ['あいうえお', 'fffff'] ] end with_them do subject { string.everything_becomes_f } it { is_expected.to eq result } it { expect(subject.everything_became_f?).to eq true } end end describe Time do ENV['TZ'] = 'UTC' where(:time, :result) do [ [Time.new(1970, 1, 1, 0, 0, 14), Time.new(1970, 1, 1, 0, 0, 15)], [Time.new(1970, 1, 1, 0, 0, 15), Time.new(1970, 1, 1, 0, 0, 15)], [Time.new(1970, 1, 1, 0, 0, 16), Time.new(1970, 1, 1, 0, 4, 15)], [Time.new(1978, 7, 4, 21, 24, 14), Time.new(1978, 7, 4, 21, 24, 15)], [Time.new(1978, 7, 4, 21, 24, 15), Time.new(1978, 7, 4, 21, 24, 15)], [Time.new(1978, 7, 4, 21, 24, 16), Time.new(2106, 2, 7, 6, 28, 15)] ] end with_them do subject { time.everything_becomes_f } it { is_expected.to eq result } it { expect(subject.everything_became_f?).to eq true } end end end end
class MaxStack { private var stack: [(value: Int, max: Int)] = [] func push(_ value: Int) { let currentMax = stack.last.map { max($0.max, value) } ?? value stack.append((value, currentMax)) } func pop() -> Int? { return stack.popLast()?.value } func max() -> Int? { return stack.last?.max } }
import pandas as pd # Load data data = pd.read_csv('disease-data.csv') # Pre-process data X = data.iloc[:, :-1] y = data.iloc[:, -1] # Train and test split from sklearn.model_selection import train_test_split X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0) # Train model from sklearn.ensemble import RandomForestClassifier model = RandomForestClassifier() model.fit(X_train, y_train) # Predictions predictions = model.predict(X_test) # Testing accuracy from sklearn.metrics import accuracy_score score = accuracy_score(y_test, predictions) print('The accuracy score is:', score) # Get inputs from user symptoms = input('Enter symptoms separated by comma: ').split(',') test_symptoms = [symptoms] # Make prediction prediction = model.predict(test_symptoms) print('The predicted disease is:', prediction)
<reponame>wxmgcs/learnZookeeper<gh_stars>10-100 package org.apache.bookkeeper.client; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.net.InetSocketAddress; import java.security.GeneralSecurityException; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Enumeration; import java.util.Queue; import org.apache.bookkeeper.client.BKException; import org.apache.bookkeeper.client.AsyncCallback.AddCallback; import org.apache.bookkeeper.client.AsyncCallback.CloseCallback; import org.apache.bookkeeper.client.AsyncCallback.ReadCallback; import org.apache.bookkeeper.client.BKException.BKNotEnoughBookiesException; import org.apache.bookkeeper.client.BookKeeper.DigestType; import org.apache.bookkeeper.client.LedgerMetadata; import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback; import org.apache.bookkeeper.util.SafeRunnable; import org.apache.bookkeeper.util.StringUtils; import org.apache.log4j.Logger; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.AsyncCallback.StatCallback; import org.apache.zookeeper.data.Stat; import org.jboss.netty.buffer.ChannelBuffer; /** * Ledger handle contains ledger metadata and is used to access the read and * write operations to a ledger. */ public class LedgerHandle implements ReadCallback, AddCallback, CloseCallback { final static Logger LOG = Logger.getLogger(LedgerHandle.class); final byte[] ledgerKey; final LedgerMetadata metadata; final BookKeeper bk; final long ledgerId; long lastAddPushed; long lastAddConfirmed; long length; final DigestManager macManager; final DistributionSchedule distributionSchedule; final Queue<PendingAddOp> pendingAddOps = new ArrayDeque<PendingAddOp>(); LedgerHandle(BookKeeper bk, long ledgerId, LedgerMetadata metadata, DigestType digestType, byte[] password) throws GeneralSecurityException { this.bk = bk; this.metadata = metadata; if (metadata.isClosed()) { lastAddConfirmed = lastAddPushed = metadata.close; length = metadata.length; } else { lastAddConfirmed = lastAddPushed = -1; length = 0; } this.ledgerId = ledgerId; macManager = DigestManager.instantiate(ledgerId, password, digestType); this.ledgerKey = MacDigestManager.genDigest("ledger", password); distributionSchedule = new RoundRobinDistributionSchedule( metadata.quorumSize, metadata.ensembleSize); } /** * Get the id of the current ledger * * @return */ public long getId() { return ledgerId; } /** * Get the last confirmed entry id on this ledger * * @return */ public long getLastAddConfirmed() { return lastAddConfirmed; } /** * Get the entry id of the last entry that has been enqueued for addition (but * may not have possibly been persited to the ledger) * * @return */ public long getLastAddPushed() { return lastAddPushed; } /** * Get the Ledger's key/password. * * @return byte array for the ledger's key/password. */ public byte[] getLedgerKey() { return ledgerKey; } /** * Get the LedgerMetadata * * @return LedgerMetadata for the LedgerHandle */ public LedgerMetadata getLedgerMetadata() { return metadata; } /** * Get the DigestManager * * @return DigestManager for the LedgerHandle */ public DigestManager getDigestManager() { return macManager; } /** * Add to the length of the ledger in bytes. * * @param delta * @return */ long addToLength(long delta){ this.length += delta; return this.length; } /** * Returns the length of the ledger in bytes. * * @return */ public long getLength(){ return this.length; } /** * Get the Distribution Schedule * * @return DistributionSchedule for the LedgerHandle */ public DistributionSchedule getDistributionSchedule() { return distributionSchedule; } public void writeLedgerConfig(StatCallback callback, Object ctx) { bk.getZkHandle().setData(StringUtils.getLedgerNodePath(ledgerId), metadata.serialize(), -1, callback, ctx); } /** * Close this ledger synchronously. * */ public void close() throws InterruptedException { SyncCounter counter = new SyncCounter(); counter.inc(); asyncClose(this, counter); counter.block(0); } /** * Asynchronous close, any adds in flight will return errors * * @param cb * callback implementation * @param ctx * control object * @throws InterruptedException */ public void asyncClose(CloseCallback cb, Object ctx) { asyncClose(cb, ctx, BKException.Code.LedgerClosedException); } /** * Same as public version of asynClose except that this one takes an * additional parameter which is the return code to hand to all the pending * add ops * * @param cb * @param ctx * @param rc */ private void asyncClose(final CloseCallback cb, final Object ctx, final int rc) { bk.mainWorkerPool.submitOrdered(ledgerId, new SafeRunnable() { @Override public void safeRun() { metadata.length = length; // Close operation is idempotent, so no need to check if we are // already closed metadata.close(lastAddConfirmed); errorOutPendingAdds(rc); lastAddPushed = lastAddConfirmed; if (LOG.isDebugEnabled()) { LOG.debug("Closing ledger: " + ledgerId + " at entryId: " + metadata.close + " with this many bytes: " + metadata.length); } writeLedgerConfig(new StatCallback() { @Override public void processResult(int rc, String path, Object subctx, Stat stat) { if (rc != KeeperException.Code.OK.intValue()) { cb.closeComplete(BKException.Code.ZKException, LedgerHandle.this, ctx); } else { cb.closeComplete(BKException.Code.OK, LedgerHandle.this, ctx); } } }, null); } }); } /** * Read a sequence of entries synchronously. * * @param firstEntry * id of first entry of sequence (included) * @param lastEntry * id of last entry of sequence (included) * */ public Enumeration<LedgerEntry> readEntries(long firstEntry, long lastEntry) throws InterruptedException, BKException { SyncCounter counter = new SyncCounter(); counter.inc(); asyncReadEntries(firstEntry, lastEntry, this, counter); counter.block(0); if (counter.getrc() != BKException.Code.OK) { throw BKException.create(counter.getrc()); } return counter.getSequence(); } /** * Read a sequence of entries asynchronously. * * @param firstEntry * id of first entry of sequence * @param lastEntry * id of last entry of sequence * @param cb * object implementing read callback interface * @param ctx * control object */ public void asyncReadEntries(long firstEntry, long lastEntry, ReadCallback cb, Object ctx) { // Little sanity check if (firstEntry < 0 || lastEntry > lastAddConfirmed || firstEntry > lastEntry) { cb.readComplete(BKException.Code.ReadException, this, null, ctx); return; } new PendingReadOp(this, firstEntry, lastEntry, cb, ctx).initiate(); } /** * Add entry synchronously to an open ledger. * * @param data * array of bytes to be written to the ledger */ public long addEntry(byte[] data) throws InterruptedException, BKException { LOG.debug("Adding entry " + data); SyncCounter counter = new SyncCounter(); counter.inc(); asyncAddEntry(data, this, counter); counter.block(0); return counter.getrc(); } /** * Add entry asynchronously to an open ledger. * * @param data * array of bytes to be written * @param cb * object implementing callbackinterface * @param ctx * some control object */ public void asyncAddEntry(final byte[] data, final AddCallback cb, final Object ctx) { bk.mainWorkerPool.submitOrdered(ledgerId, new SafeRunnable() { @Override public void safeRun() { if (metadata.isClosed()) { LOG.warn("Attempt to add to closed ledger: " + ledgerId); cb.addComplete(BKException.Code.LedgerClosedException, LedgerHandle.this, -1, ctx); return; } long entryId = ++lastAddPushed; long currentLength = addToLength(data.length); PendingAddOp op = new PendingAddOp(LedgerHandle.this, cb, ctx, entryId); pendingAddOps.add(op); ChannelBuffer toSend = macManager.computeDigestAndPackageForSending( entryId, lastAddConfirmed, currentLength, data); op.initiate(toSend); } }); } // close the ledger and send fails to all the adds in the pipeline void handleUnrecoverableErrorDuringAdd(int rc) { asyncClose(NoopCloseCallback.instance, null, rc); } void errorOutPendingAdds(int rc) { PendingAddOp pendingAddOp; while ((pendingAddOp = pendingAddOps.poll()) != null) { pendingAddOp.submitCallback(rc); } } void sendAddSuccessCallbacks() { // Start from the head of the queue and proceed while there are // entries that have had all their responses come back PendingAddOp pendingAddOp; while ((pendingAddOp = pendingAddOps.peek()) != null) { if (pendingAddOp.numResponsesPending != 0) { return; } pendingAddOps.remove(); lastAddConfirmed = pendingAddOp.entryId; pendingAddOp.submitCallback(BKException.Code.OK); } } void handleBookieFailure(InetSocketAddress addr, final int bookieIndex) { InetSocketAddress newBookie; if (LOG.isDebugEnabled()) { LOG.debug("Handling failure of bookie: " + addr + " index: " + bookieIndex); } try { newBookie = bk.bookieWatcher .getAdditionalBookie(metadata.currentEnsemble); } catch (BKNotEnoughBookiesException e) { LOG .error("Could not get additional bookie to remake ensemble, closing ledger: " + ledgerId); handleUnrecoverableErrorDuringAdd(e.getCode()); return; } final ArrayList<InetSocketAddress> newEnsemble = new ArrayList<InetSocketAddress>( metadata.currentEnsemble); newEnsemble.set(bookieIndex, newBookie); if (LOG.isDebugEnabled()) { LOG.debug("Changing ensemble from: " + metadata.currentEnsemble + " to: " + newEnsemble + " for ledger: " + ledgerId + " starting at entry: " + (lastAddConfirmed + 1)); } metadata.addEnsemble(lastAddConfirmed + 1, newEnsemble); writeLedgerConfig(new StatCallback() { @Override public void processResult(final int rc, String path, Object ctx, Stat stat) { bk.mainWorkerPool.submitOrdered(ledgerId, new SafeRunnable() { @Override public void safeRun() { if (rc != KeeperException.Code.OK.intValue()) { LOG .error("Could not persist ledger metadata while changing ensemble to: " + newEnsemble + " , closing ledger"); handleUnrecoverableErrorDuringAdd(BKException.Code.ZKException); return; } for (PendingAddOp pendingAddOp : pendingAddOps) { pendingAddOp.unsetSuccessAndSendWriteRequest(bookieIndex); } } }); } }, null); } void recover(GenericCallback<Void> cb) { if (metadata.isClosed()) { // We are already closed, nothing to do cb.operationComplete(BKException.Code.OK, null); return; } new LedgerRecoveryOp(this, cb).initiate(); } static class NoopCloseCallback implements CloseCallback { static NoopCloseCallback instance = new NoopCloseCallback(); @Override public void closeComplete(int rc, LedgerHandle lh, Object ctx) { // noop } } /** * Implementation of callback interface for synchronous read method. * * @param rc * return code * @param leder * ledger identifier * @param seq * sequence of entries * @param ctx * control object */ public void readComplete(int rc, LedgerHandle lh, Enumeration<LedgerEntry> seq, Object ctx) { SyncCounter counter = (SyncCounter) ctx; synchronized (counter) { counter.setSequence(seq); counter.setrc(rc); counter.dec(); counter.notify(); } } /** * Implementation of callback interface for synchronous read method. * * @param rc * return code * @param leder * ledger identifier * @param entry * entry identifier * @param ctx * control object */ public void addComplete(int rc, LedgerHandle lh, long entry, Object ctx) { SyncCounter counter = (SyncCounter) ctx; counter.setrc(rc); counter.dec(); } /** * Close callback method * * @param rc * @param lh * @param ctx */ public void closeComplete(int rc, LedgerHandle lh, Object ctx) { SyncCounter counter = (SyncCounter) ctx; counter.setrc(rc); synchronized (counter) { counter.dec(); counter.notify(); } } }
<reponame>ohtomi/react-handsontable-hoc // @flow import Handsontable from 'handsontable' import {BasePlugin} from './BasePlugin' // see. https://github.com/handsontable/handsontable/blob/6.1.1/src/plugins/manualColumnResize/manualColumnResize.js#L459 const MinimumColumnWidthByManual = 20 const HiddenColumnWidth = 1e-20 class ManualColumnsHidePlugin extends BasePlugin { constructor(hot: Handsontable) { super(hot) this.afterUpdateSettings = this.afterUpdateSettings.bind(this) } isEnabled() { this.debug('isEnabled') const hasManualColumnsHide = !!this.hot.getSettings().manualColumnsHide const hasManualColumnResize = !!this.hot.getSettings().manualColumnResize return hasManualColumnsHide && hasManualColumnResize } enablePlugin() { this.debug('enablePlugin', this.enabled) if (this.enabled) { return } this.hot.addHook('afterUpdateSettings', this.afterUpdateSettings) const that = this this.hot._registerTimeout( setTimeout(() => { if (Array.isArray(that.hot.getSettings().manualColumnsHide)) { that.hideColumns(that.hot.getSettings().manualColumnsHide) } }, 0)) super.enablePlugin() } disablePlugin() { this.debug('disablePlugin') super.disablePlugin() } updatePlugin() { this.debug('updatePlugin') super.updatePlugin() } destroy() { this.debug('destroy') this.hot.removeHook('afterUpdateSettings', this.afterUpdateSettings) super.destroy() } afterUpdateSettings(newSettings: any) { this.debug('afterUpdateSettings', newSettings) if (newSettings.manualColumnsHide) { if (Array.isArray(newSettings.manualColumnsHide)) { this.hideColumns(newSettings.manualColumnsHide) } } } hideColumns(hiddenColumns: Array<number>) { this.debug('hideColumns', hiddenColumns) const columnWidths = preserveColumnWidth(this.hot) const manualColumnResize = resizeColumnWidth(this.hot, columnWidths, hiddenColumns) this.hot.updateSettings({manualColumnResize}) const plugin = this.hot.getPlugin('ManualColumnResize') plugin.saveManualColumnWidths() } } const preserveColumnWidth = (hotInstance: Handsontable) => { const numOfColumns = hotInstance.countCols() const widths = Array.from({length: numOfColumns}).map(c => null) const manualColumnResize = hotInstance.getSettings().manualColumnResize if (Array.isArray(manualColumnResize)) { manualColumnResize.forEach((w, i) => { if (w && w >= MinimumColumnWidthByManual) { widths[i] = w } }) } return widths } const resizeColumnWidth = (hotInstance: Handsontable, currentWidths: Array<?number>, hiddenColumns: Array<number>) => { // hiddenColumns is logical index return currentWidths.map((w, i) => { return hiddenColumns.some(c => c === i) ? HiddenColumnWidth : w }) } export const isHiddenColumn = (column: number) => (width: ?number, index: number) => { return width && width < MinimumColumnWidthByManual && index === column } const pluginName = 'ManualColumnsHidePlugin' export const getManualColumnsHidePlugin = (hotInstance: Handsontable) => { return hotInstance.getPlugin(pluginName) } export const registerManualColumnsHidePlugin = () => { Handsontable.plugins.registerPlugin(pluginName, ManualColumnsHidePlugin) }
<reponame>kevin-wang001/poi-el package com.kvn.poi.exception; import java.text.MessageFormat; /** * 异常码区段:[1001-1999] * @author wzy * @date 2017年6月20日 上午11:59:32 */ public enum PoiElErrorCode implements IErrors { TAG_NOT_FOUND(1001, "[{0}]中找不到tag:[{1}]"), TEMPLATE_FILE_NOT_FOUND(1002, "找不到模板[{0}]"), EXCEL_FILE_NOT_FOUND(1003, "找不到被导入的excel[{0}]"), NOT_FOUND_IN_ROOTOBJECT(1004, "没有在RootObject中找到{0}对应的值"), /**-----------------COMMON ERROR--------------------*/ SYSTEM_ERROR(1998, "系统错误"), ILLEGAL_PARAM(1999, "参数异常:{0}"); private int code; private String msg; private PoiElErrorCode(int code, String msg) { this.code = code; this.msg = msg; } public int getCode() { return code; } public void setCode(int code) { this.code = code; } public String getMsg() { return msg; } public void setMsg(String msg) { this.msg = msg; } @Override public PoiElException exp() { return new PoiElException(code, msg); } @Override public PoiElException exp(Object... args) { return new PoiElException(code, MessageFormat.format(msg, args)); } @Override public PoiElException exp(Throwable cause, Object... args) { return new PoiElException(code, MessageFormat.format(msg, args), cause); } @Override public PoiElException expMsg(String message, Object... args) { return new PoiElException(code, MessageFormat.format(message, args)); } @Override public PoiElException expMsg(String message, Throwable cause, Object... args) { return new PoiElException(code, MessageFormat.format(message, args), cause); } }
<reponame>benoitc/pypy """ Reference tracker for lltype data structures. """ import autopath, sys, os import gc from pypy.rpython.lltypesystem import lltype, llmemory from pypy.rpython.memory.gcheader import header2obj from pypy.translator.tool.reftracker import BaseRefTrackerPage, MARKER from pypy.tool.uid import uid from pypy.tool.identity_dict import identity_dict class LLRefTrackerPage(BaseRefTrackerPage): def compute(self, objectlist, size_gc_header): self.size_gc_header = size_gc_header return BaseRefTrackerPage.compute(self, objectlist) def formatobject(self, o): lines = [] for name, value in self.enum_content(o): if not isinstance(value, str): value = '0x%x' % uid(value) lines.append('%s = %s' % (name, value)) s = '\n'.join(lines) t = shorttypename(lltype.typeOf(o)) return t, s, '' def get_referrers(self, o): return [] # not implemented def get_referents(self, o): for name, value in self.enum_content(o): if not isinstance(value, str): yield value def edgelabel(self, o1, o2): slst = [] for name, value in self.enum_content(o1): if value is o2: slst.append(name) return '/'.join(slst) def newpage(self, objectlist): return self.__class__(objectlist, self.size_gc_header) def normalize(self, o): if self.size_gc_header is not None: try: return header2obj[o]._obj except (KeyError, TypeError): pass return o def enum_content(self, o, name='', with_header=True): # XXX clean up T = lltype.typeOf(o) if (self.size_gc_header is not None and with_header and isinstance(T, lltype.ContainerType) and T._gckind == 'gc'): adr = llmemory.cast_ptr_to_adr(o._as_ptr()) adr -= self.size_gc_header o = adr.get()._obj T = lltype.typeOf(o) if isinstance(T, lltype.Struct): try: gcobjptr = header2obj[o] fmt = '(%s)' except KeyError: gcobjptr = None fmt = '%s' for name in T._names: for name, value in self.enum_content(getattr(o, name), name, with_header=False): yield fmt % (name,), value if gcobjptr: GCT = lltype.typeOf(gcobjptr) if self.size_gc_header is not None: for sub in self.enum_content(gcobjptr._obj, with_header=False): yield sub else: # display as a link to avoid the same data showing up # twice in the graph yield 'header of', gcobjptr._obj elif isinstance(T, lltype.Array): for index, o1 in enumerate(o.items): for sub in self.enum_content(o1, str(index)): yield sub elif isinstance(T, lltype.Ptr): if not o: yield name, 'null' else: yield name, self.normalize(lltype.normalizeptr(o)._obj) elif isinstance(T, lltype.OpaqueType) and hasattr(o, 'container'): T = lltype.typeOf(o.container) yield 'container', '<%s>' % (shorttypename(T),) for sub in self.enum_content(o.container, name, with_header=False): yield sub elif T == llmemory.Address: if not o: yield name, 'NULL' else: addrof = o.ref() T1 = lltype.typeOf(addrof) if (isinstance(T1, lltype.Ptr) and isinstance(T1.TO, lltype.Struct) and addrof._obj in header2obj): yield name + ' @hdr', self.normalize(addrof._obj) else: yield name + ' @', self.normalize(o.ptr._obj) ## if o.offset: ## yield '... offset', str(o.offset) else: yield name, str(o) def shorttypename(T): return '%s %s' % (T.__class__.__name__, getattr(T, '__name__', '')) def track(*ll_objects): """Invoke a dot+pygame object reference tracker.""" lst = [MARKER] size_gc_header = None seen = identity_dict() for ll_object in ll_objects: if isinstance(ll_object, llmemory.GCHeaderOffset): size_gc_header = ll_object continue #if isinstance(lltype.typeOf(ll_object), lltype.Ptr): # ptr = lltype.normalizeptr(ll_object) # if ptr is not None: # ll_object = ptr._obj # else: # ll_object = None if ll_object is not None and ll_object not in seen: lst.append(ll_object) seen[ll_object] = ll_object page = LLRefTrackerPage(lst, size_gc_header) # auto-expand one level, for now auto_expand = 1 for i in range(auto_expand): page = page.content() for ll_object in lst[1:]: for name, value in page.enum_content(ll_object): if not isinstance(value, str) and value not in seen: lst.append(value) seen[value] = value page = page.newpage(lst) page.display() if __name__ == '__main__': try: sys.path.remove(os.getcwd()) except ValueError: pass T = lltype.GcArray(lltype.Signed) S = lltype.GcForwardReference() S.become(lltype.GcStruct('S', ('t', lltype.Ptr(T)), ('next', lltype.Ptr(S)))) s = lltype.malloc(S) s.next = lltype.malloc(S) s.next.t = lltype.malloc(T, 5) s.next.t[1] = 123 track(s)
<reponame>michael-gann/larder import _ from "lodash"; import { GET_INGREDIENTS, getIngredients, } from "../actions/ingredients.actions"; export const ingredients = () => async (dispatch) => { const res = await fetch("/api/ingredients/options"); const ingredientData = await res.json(); dispatch(getIngredients(ingredientData)); return res; }; const ingredientsReducer = (state = { ingredients: [] }, action) => { let newState; switch (action.type) { case GET_INGREDIENTS: newState = _.cloneDeep(state); newState.ingredients = action.payload; return newState; default: return state; } }; export default ingredientsReducer;
<filename>hazelcast/src/main/java/com/hazelcast/internal/config/MergePolicyValidator.java /* * Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.config; import com.hazelcast.cache.ICache; import com.hazelcast.cache.impl.merge.policy.CacheMergePolicyProvider; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.config.InvalidConfigurationException; import com.hazelcast.config.MapConfig; import com.hazelcast.config.ReplicatedMapConfig; import com.hazelcast.logging.ILogger; import com.hazelcast.map.merge.MergePolicyProvider; import com.hazelcast.spi.merge.MergingExpirationTime; import com.hazelcast.spi.merge.MergingLastStoredTime; import com.hazelcast.spi.merge.MergingValue; import com.hazelcast.spi.merge.SplitBrainMergePolicy; import com.hazelcast.spi.merge.SplitBrainMergePolicyProvider; import com.hazelcast.spi.merge.SplitBrainMergeTypeProvider; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.ArrayList; import java.util.List; import static com.hazelcast.config.InMemoryFormat.NATIVE; import static com.hazelcast.internal.cluster.Versions.V3_10; import static java.lang.String.format; /** * Validates a merge policy instance. */ public final class MergePolicyValidator { private MergePolicyValidator() { } /** * Checks if the given {@link InMemoryFormat} can be merged by the given * {@code mergePolicy} instance. * <p> * When a wrong policy is detected, it does one of two things: * if {@code failFast} is {@code true} and the cluster version is 3.10 or later, * it throws an {@link InvalidConfigurationException}, otherwise it logs a warning. * * @return {@code true} if the given {@code inMemoryFormat} can be merged by * the supplied {@code mergePolicy}, {@code false} otherwise */ public static boolean checkMergePolicySupportsInMemoryFormat(String name, Object mergePolicy, InMemoryFormat inMemoryFormat, boolean failFast, ILogger logger) { if (inMemoryFormat != NATIVE) { return true; } if (mergePolicy instanceof SplitBrainMergePolicy) { return true; } if (failFast) { throw new InvalidConfigurationException(createSplitRecoveryWarningMsg(name, mergePolicy.getClass().getName())); } logger.warning(createSplitRecoveryWarningMsg(name, mergePolicy.getClass().getName())); return false; } private static String createSplitRecoveryWarningMsg(String name, String mergePolicy) { String messageTemplate = "Split brain recovery is not supported for '%s'," + " because it's using merge policy `%s` to merge `%s` data." + " To fix this, use an implementation of `%s` with a cluster version `%s` or later"; return format(messageTemplate, name, mergePolicy, NATIVE, SplitBrainMergePolicy.class.getName(), V3_10); } /** * Checks the merge policy configuration in the context of an {@link ICache}. * * @param mergePolicyClassname the configured merge policy of the cache * @param mergeTypeProvider the {@link SplitBrainMergeTypeProvider} of the cache * @param mergePolicyProvider the {@link CacheMergePolicyProvider} to resolve merge policy classes */ static void checkCacheMergePolicy(String mergePolicyClassname, SplitBrainMergeTypeProvider mergeTypeProvider, CacheMergePolicyProvider mergePolicyProvider) { if (mergePolicyProvider == null) { return; } Object mergePolicyInstance = getMergePolicyInstance(mergePolicyProvider, mergePolicyClassname); checkMergePolicy(mergeTypeProvider, mergePolicyInstance); } private static Object getMergePolicyInstance(CacheMergePolicyProvider mergePolicyProvider, String mergePolicyClassname) { try { return mergePolicyProvider.getMergePolicy(mergePolicyClassname); } catch (InvalidConfigurationException e) { throw new InvalidConfigurationException("Merge policy must be an instance of SplitBrainMergePolicy" + " or CacheMergePolicy, but was " + mergePolicyClassname, e.getCause()); } } /** * Checks the merge policy configuration of the given {@link ReplicatedMapConfig}. * * @param replicatedMapConfig the {@link ReplicatedMapConfig} to check * @param mergePolicyProvider the {@link com.hazelcast.replicatedmap.merge.MergePolicyProvider} * to resolve merge policy classes */ static void checkReplicatedMapMergePolicy(ReplicatedMapConfig replicatedMapConfig, com.hazelcast.replicatedmap.merge.MergePolicyProvider mergePolicyProvider) { String mergePolicyClassName = replicatedMapConfig.getMergePolicyConfig().getPolicy(); Object mergePolicyInstance = getMergePolicyInstance(mergePolicyProvider, mergePolicyClassName); checkMergePolicy(replicatedMapConfig, mergePolicyInstance); } private static Object getMergePolicyInstance(com.hazelcast.replicatedmap.merge.MergePolicyProvider mergePolicyProvider, String mergePolicyClassName) { try { return mergePolicyProvider.getMergePolicy(mergePolicyClassName); } catch (InvalidConfigurationException e) { throw new InvalidConfigurationException("Merge policy must be an instance of SplitBrainMergePolicy" + " or ReplicatedMapMergePolicy, but was " + mergePolicyClassName, e.getCause()); } } /** * Checks the merge policy configuration of the given {@link MapConfig}. * * @param mapConfig the {@link MapConfig} * @param mergePolicyProvider the {@link MergePolicyProvider} to resolve merge policy classes */ static void checkMapMergePolicy(MapConfig mapConfig, MergePolicyProvider mergePolicyProvider) { String mergePolicyClassName = mapConfig.getMergePolicyConfig().getPolicy(); Object mergePolicyInstance = getMergePolicyInstance(mergePolicyProvider, mergePolicyClassName); List<Class> requiredMergeTypes = checkMergePolicy(mapConfig, mergePolicyInstance); if (!mapConfig.isStatisticsEnabled() && requiredMergeTypes != null) { checkMapMergePolicyWhenStatisticsAreDisabled(mergePolicyClassName, requiredMergeTypes); } } private static Object getMergePolicyInstance(MergePolicyProvider mergePolicyProvider, String mergePolicyClassName) { try { return mergePolicyProvider.getMergePolicy(mergePolicyClassName); } catch (InvalidConfigurationException e) { throw new InvalidConfigurationException("Merge policy must be an instance of SplitBrainMergePolicy" + " or MapMergePolicy, but was " + mergePolicyClassName, e.getCause()); } } /** * Checks if the configured merge policy requires merge types, which are just available if map statistics are enabled. * * @param mergePolicyClass the name of the configured merge policy class * @param requiredMergeTypes the required merge types of the configured merge policy */ private static void checkMapMergePolicyWhenStatisticsAreDisabled(String mergePolicyClass, List<Class> requiredMergeTypes) { for (Class<?> requiredMergeType : requiredMergeTypes) { if (MergingLastStoredTime.class.isAssignableFrom(requiredMergeType) || MergingExpirationTime.class.isAssignableFrom(requiredMergeType)) { throw new InvalidConfigurationException("The merge policy " + mergePolicyClass + " requires the merge type " + requiredMergeType.getName() + ", which is just provided if the map statistics are enabled."); } } } /** * Checks if a {@link SplitBrainMergeTypeProvider} provides all required types of a given merge policy. * * @param mergeTypeProvider the {@link SplitBrainMergeTypeProvider} to retrieve the provided merge types * @param mergePolicyProvider the {@link SplitBrainMergePolicyProvider} to resolve merge policy classes * @param mergePolicyClassName the merge policy class name * @throws InvalidConfigurationException if the given merge policy is no {@link SplitBrainMergePolicy} */ static void checkMergePolicy(SplitBrainMergeTypeProvider mergeTypeProvider, SplitBrainMergePolicyProvider mergePolicyProvider, String mergePolicyClassName) { SplitBrainMergePolicy mergePolicy = getMergePolicyInstance(mergePolicyProvider, mergePolicyClassName); checkSplitBrainMergePolicy(mergeTypeProvider, mergePolicy); } private static SplitBrainMergePolicy getMergePolicyInstance(SplitBrainMergePolicyProvider mergePolicyProvider, String mergePolicyClassName) { try { return mergePolicyProvider.getMergePolicy(mergePolicyClassName); } catch (InvalidConfigurationException e) { throw new InvalidConfigurationException("Merge policy must be an instance of SplitBrainMergePolicy," + " but was " + mergePolicyClassName, e.getCause()); } } /** * Checks if a {@link SplitBrainMergeTypeProvider} provides all required types of a given merge policy instance. * * @param mergeTypeProvider the {@link SplitBrainMergeTypeProvider} to retrieve the provided merge types * @param mergePolicyInstance the merge policy instance * @return a list of the required merge types if the merge policy is a {@link SplitBrainMergePolicy}, {@code null} otherwise */ private static List<Class> checkMergePolicy(SplitBrainMergeTypeProvider mergeTypeProvider, Object mergePolicyInstance) { if (mergePolicyInstance instanceof SplitBrainMergePolicy) { return checkSplitBrainMergePolicy(mergeTypeProvider, (SplitBrainMergePolicy) mergePolicyInstance); } return null; } /** * Checks if a {@link SplitBrainMergeTypeProvider} provides all required types of a given {@link SplitBrainMergePolicy}. * * @param mergeTypeProvider the {@link SplitBrainMergeTypeProvider} to retrieve the provided merge types * @param mergePolicyInstance the {@link SplitBrainMergePolicy} instance * @return a list of the required merge types */ private static List<Class> checkSplitBrainMergePolicy(SplitBrainMergeTypeProvider mergeTypeProvider, SplitBrainMergePolicy mergePolicyInstance) { List<Class> requiredMergeTypes = new ArrayList<Class>(); Class providedMergeTypes = mergeTypeProvider.getProvidedMergeTypes(); Class<?> mergePolicyClass = mergePolicyInstance.getClass(); String mergePolicyClassName = mergePolicyClass.getName(); // iterate over the complete class hierarchy of a merge policy, to check all its generics do { checkSplitBrainMergePolicyGenerics(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, mergePolicyClass); mergePolicyClass = mergePolicyClass.getSuperclass(); } while (mergePolicyClass != null); return requiredMergeTypes; } private static void checkSplitBrainMergePolicyGenerics(List<Class> requiredMergeTypes, Class providedMergeTypes, String mergePolicyClassName, Class<?> mergePolicyClass) { for (TypeVariable<? extends Class<?>> classTypeVariable : mergePolicyClass.getTypeParameters()) { // checks merge policies like // CustomMergePolicy<V, T extends MergingValue<V>> implements SplitBrainMergePolicy<V, T> for (Type requireMergeType : classTypeVariable.getBounds()) { checkRequiredMergeType(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, requireMergeType); } } for (Type type : mergePolicyClass.getGenericInterfaces()) { // checks merge policies like // CustomMergePolicy implements SplitBrainMergePolicy<Object, SplitBrainMergeTypes$...MergeTypes> checkRequiredGenericType(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, type); } // checks merge policies like // CustomMergePolicy extends AbstractSplitBrainMergePolicy<Object, SplitBrainMergeTypes$...MergeTypes> Type type = mergePolicyClass.getGenericSuperclass(); checkRequiredGenericType(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, type); } private static void checkRequiredGenericType(List<Class> requiredMergeTypes, Class providedMergeTypes, String mergePolicyClassName, Type requiredMergeType) { if (requiredMergeType instanceof ParameterizedType) { Type[] actualTypeArguments = ((ParameterizedType) requiredMergeType).getActualTypeArguments(); for (Type requireMergeType : actualTypeArguments) { checkRequiredMergeType(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, requireMergeType); } } } private static void checkRequiredMergeType(List<Class> requiredMergeTypes, Class providedMergeTypes, String mergePolicyClassName, Type requireMergeType) { if (requireMergeType instanceof ParameterizedType) { // checks types like Merging...<V> extends MergingValue<V> Class<?> requiredMergeType = (Class<?>) ((ParameterizedType) requireMergeType).getRawType(); checkRequiredMergeTypeClass(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, requiredMergeType); } else if (requireMergeType instanceof Class) { // checks types like SplitBrainMergeTypes$...MergeTypes Class<?> requiredMergeType = (Class) requireMergeType; checkRequiredMergeTypeClass(requiredMergeTypes, providedMergeTypes, mergePolicyClassName, requiredMergeType); } } private static void checkRequiredMergeTypeClass(List<Class> requiredMergeTypes, Class providedMergeTypes, String mergePolicyClassName, Class<?> requiredMergeTypeClass) { if (!MergingValue.class.isAssignableFrom(requiredMergeTypeClass)) { // just check types, which inherit from MergingValue return; } if (!requiredMergeTypeClass.isAssignableFrom(providedMergeTypes)) { throw new InvalidConfigurationException("The merge policy " + mergePolicyClassName + " can just be configured on data structures which provide the merging type " + requiredMergeTypeClass.getName() + ". See SplitBrainMergeTypes for supported merging types."); } requiredMergeTypes.add(requiredMergeTypeClass); } }
/* jshint node: true */ module.exports = function (grunt) { 'use strict'; // Livereload and connect variables var LIVERELOAD_PORT = 35729; var lrSnippet = require('connect-livereload')({ port: LIVERELOAD_PORT }); var mountFolder = function( connect, dir ) { return connect.static(require('path').resolve(dir)); }; grunt.initConfig({ pkg: grunt.file.readJSON('package.json'), jshint: { all: [ 'Gruntfile.js', 'src/main.js' ], options: { jshintrc: '.jshintrc' } }, connect: { dev: { options: { port: 8999, hostname: 'localhost', // keepalive: true, // livereload: true, // open: true, // middleware: function( connect ) { // return [lrSnippet, mountFolder(connect, '.')]; // } } } }, open: { dev: { path: 'http://localhost:<%= connect.dev.options.port %>/index.html' } }, watch: { dev: { files: [ 'src/**/*.js', ], task: ['jshint'], options: { Livereload: true } } } }); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-contrib-connect'); grunt.loadNpmTasks('grunt-contrib-copy'); grunt.loadNpmTasks('grunt-open'); grunt.registerTask('default', [ 'jshint', 'connect:dev:livereload', 'open:dev', 'watch:dev' ]); };
package libs.trustconnector.scdp.smartcard.application.globalplatform; import libs.trustconnector.scdp.*; import libs.trustconnector.scdp.SCDPException; public class SCPException extends SCDPException { private static final long serialVersionUID = -8405785182237793959L; private SCPException(final String message) { super(message); } public static void throwIt(final String message) { throw new SCPException(message); } }
The script should accept a search query as an input and query a geospatial database. The output should be a visual representation of the data, such as a map, heatmap or chart.
<filename>src/importer/dxf/layer.cpp #include <layer.h> namespace Importer::Dxf { Layer::Layer(const std::string& name) :m_name(name) { } void Layer::addPolyline(const Geometry::Polyline& polyline) { m_polylines.push_back(polyline); } Geometry::Polyline::List &&Layer::polylines() { return std::move(m_polylines); } const std::string& Layer::name() const { return m_name; } }
<gh_stars>10-100 package com.pearson.docussandra.domain.objects; /** * @author https://github.com/tfredrich * @since Jan 24, 2015 */ public interface Callback<T> { public void process(T value); }
<filename>app/src/main/java/android/example/com/split/data/model/Payment.java<gh_stars>0 package android.example.com.split.data.model; public interface Payment { void setSetteled(boolean setteled); void setPaidAmount(double paidAmount); double getPaidAmount(); double getTotalShare(); void setTotalShare(double shareAmount); }
load test_helpers @test "version-specific virtualenv created" { verify_virtualenv /opt/devpi/devpi-server-2.0.0 } @test "version-specific server is correctly installed" { verify_devpi_server /opt/devpi/devpi-server-2.0.0 } @test "version-specific data directory is created" { ensure_root_can_group_sudo sudo -u devpi test -w /opt/devpi/devpi-server-2.0.0/data sudo -u nobody -g devpi test -w /opt/devpi/devpi-server-2.0.0/data }
<reponame>ArekkuusuJerii/Stratoprism<gh_stars>0 /** * This class was created by <ArekkuusuJerii>. It's distributed as * part of Stratoprism. Get the Source Code in github: * https://github.com/ArekkuusuJerii/Stratoprism * * Stratoprism is Open Source and distributed under the * MIT Licence: https://github.com/ArekkuusuJerii/Stratoprism/blob/master/LICENSE */ package arekkuusu.stratoprism.api.recipe; import net.minecraft.item.ItemStack; import net.minecraft.world.World; import net.minecraftforge.items.IItemHandler; public interface IRecipePrism { boolean validateRecipe(IItemHandler usedItems, World world); ItemStack getResult(); }
// 题目描述 // //求前n个素数的和。 // //例如,前5个素数是2、3、5、7、11,它们的和是28。 // //输入 // //一个整数n,1<=n<=1000。 // //输出 // //前n个素数的和 // //样例输入 // //5 // //样例输出 // //28 // //提示 // //第1000个素数是7919。 // //以下程序实现了这一功能,请你填补空白处内容: // //#include <iostream> //using namespace std; //int main() //{ // int n, i, j, sum, a; // cin >> n; // a = 0; // i = 2; // sum = 0; // while (a < n) // { // __________________; // if (j == i) // { // sum += i; // ++a; // } // ++i; // } // cout << sum; //} // // Created by 鹄思鹄想_bit森 on 2022/5/22. // #include <iostream> using namespace std; int main() { int n, i, j, sum, a; cin >> n; a = 0; i = 2; sum = 0; while (a < n) { for (j = 2; j <= i; j++) if (i % j == 0) break; if (j == i) { sum += i; ++a; } ++i; } cout << sum; }
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # This was lovingly copied from Apache HBase set -e function usage { echo "Usage: ${0} [options] /path/to/download/file.tar.gz download/fragment/eg/project/subdir/some-artifact-version.tar.gz" echo "" echo " --force for a redownload even if /path/to/download/file.tar.gz exists." echo " --working-dir /path/to/use Path for writing tempfiles. must exist." echo " defaults to making a directory via mktemp that we clean." echo " --keys url://to/project/KEYS where to get KEYS. needed to check signature on download." echo "" exit 1 } # if no args specified, show usage if [ $# -lt 2 ]; then usage fi # Get arguments declare done_if_cached="true" declare working_dir declare cleanup="true" declare keys while [ $# -gt 0 ] do case "$1" in --force) shift; done_if_cached="false";; --working-dir) shift; working_dir=$1; cleanup="false"; shift;; --keys) shift; keys=$1; shift;; --) shift; break;; -*) usage ;; *) break;; # terminate while loop esac done # should still have required args if [ $# -lt 2 ]; then usage fi target="$1" artifact="$2" if [ -f "${target}" ] && [ "true" = "${done_if_cached}" ]; then echo "Reusing existing download of '${artifact}'." exit 0 fi if [ -z "${working_dir}" ]; then if ! working_dir="$(mktemp -d -t hbase-download-apache-artifact)" ; then echo "Failed to create temporary working directory. Please specify via --working-dir" >&2 exit 1 fi else # absolutes please working_dir="$(cd "$(dirname "${working_dir}")"; pwd)/$(basename "${working_dir}")" if [ ! -d "${working_dir}" ]; then echo "passed working directory '${working_dir}' must already exist." >&2 exit 1 fi fi function cleanup { if [ -n "${keys}" ]; then echo "Stopping gpg agent daemon" gpgconf --homedir "${working_dir}/.gpg" --kill gpg-agent echo "Stopped gpg agent daemon" fi if [ "true" = "${cleanup}" ]; then echo "cleaning up temp space." rm -rf "${working_dir}" fi } trap cleanup EXIT SIGQUIT echo "New download of '${artifact}'" # N.B. this comes first so that if gpg falls over we skip the expensive download. if [ -n "${keys}" ]; then if [ ! -d "${working_dir}/.gpg" ]; then rm -rf "${working_dir}/.gpg" mkdir -p "${working_dir}/.gpg" chmod -R 700 "${working_dir}/.gpg" fi echo "installing project KEYS" curl -L --fail -o "${working_dir}/KEYS" "${keys}" if ! gpg --homedir "${working_dir}/.gpg" --import "${working_dir}/KEYS" ; then echo "ERROR importing the keys via gpg failed. If the output above mentions this error:" >&2 echo " gpg: can't connect to the agent: File name too long" >&2 # we mean to give them the command to run, not to run it. #shellcheck disable=SC2016 echo 'then you prolly need to create /var/run/user/$(id -u)' >&2 echo "see this thread on gnupg-users: https://s.apache.org/uI7x" >&2 exit 2 fi echo "downloading signature" curl -L --fail -o "${working_dir}/artifact.asc" "https://archive.apache.org/dist/${artifact}.asc" fi echo "downloading artifact" if ! curl --dump-header "${working_dir}/artifact_download_headers.txt" -L --fail -o "${working_dir}/artifact" "https://www.apache.org/dyn/closer.lua/${artifact}?action=download" ; then echo "Artifact wasn't in mirror system. falling back to archive.a.o." curl --dump-header "${working_dir}/artifact_fallback_headers.txt" -L --fail -o "${working_dir}/artifact" "http://archive.apache.org/dist/${artifact}" fi if [ -n "${keys}" ]; then echo "verifying artifact signature" gpg --homedir "${working_dir}/.gpg" --verify "${working_dir}/artifact.asc" echo "signature good." fi echo "moving artifact into place at '${target}'" # ensure we're on the same filesystem mv "${working_dir}/artifact" "${target}.copying" # attempt atomic move mv "${target}.copying" "${target}" echo "all done!"
#!/bin/bash if [[ $0 =~ ^(.*)/[^/]+$ ]]; then WORKDIR=${BASH_REMATCH[1]} fi source ${WORKDIR}/drv.vsp.client ISO_FILE="centos.iso" URL=$1 SESSION="$(cat $VSPSESSION)" echo "$VSPHOST" echo "$SESSION" echo "$URL" FILE="${PWD}/centos.iso" curl -vvv -k -X PUT \ -F "centos.iso=@centos.iso" \ -H "vmware-api-session-id: ${SESSION}" \ "$URL"
gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB \curl -sSL https://get.rvm.io | bash -s stable source ~/.rvm/scripts/rvm sudo yum install -y nodejs npm --enablerepo=epel rvm install 2.4.1 rvm gemset create aws-demo rvm gemset use aws-demo gem install bundler cd .. cd - bundle
import SwiftUI // ObservableObject class class MyData: ObservableObject { @Published var value: String = "Initial Value" } // ManualObservableObjectExample view struct ManualObservableObjectExample: View { @ObservedObject var data = MyData() var body: some View { VStack { Text("Value: \(data.value)") Button("Change Value") { self.data.value = "New Value" } } } } // Updated RootView with additional NavigationLink items struct RootView: View { var body: some View { NavigationView { List { NavigationLink(destination: ManualObservableObjectExample()) { Text("Navigate to ObservableObject Example") } // Add more NavigationLink items here } } } }