text
stringlengths 27
775k
|
|---|
import "./style.css";
const canvas = document.getElementById("canvas");
const ctx = canvas.getContext("2d");
canvas.width = 1024;
canvas.height = 512;
let leniency = 5;
const cloudImage = document.querySelector("#cloud");
const birdImage = document.querySelector("#bird");
const instructions = {
start: "Press <span class='key'>space</span> to start",
pause: "Press <span class='key'>p</span> to pause",
resume: "Press <span class='key'>space</span> to resume"
};
const gravity = 0.25;
class Player {
constructor(promises) {
this.width = 50;
this.height = 50;
this.vx = 0;
this.vy = 0;
this.image = new Image();
this.image.src = birdImage.src;
this.x = canvas.width / 2 - this.width / 2;
this.y = canvas.height / 2 - this.height / 2;
const promise = new Promise((resolve) => {
this.image.onload = () => {
this.ready = true;
resolve(true);
};
});
if (promises) promises.push(promise);
}
draw() {
if (!this.ready) return;
// ctx.fillStyle = "red";
// ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.drawImage(this.image, this.x, this.y, this.width, this.height);
}
update() {
this.draw();
this.vy += gravity;
this.y += this.vy;
}
}
class Background {
constructor() {
this.width = canvas.width;
this.height = canvas.height;
}
draw() {
const gradient = ctx.createLinearGradient(0, 0, 0, canvas.height);
gradient.addColorStop(0, '#9ce6fa');
gradient.addColorStop(0.75, '#e3f9ff');
gradient.addColorStop(1, '#9ce6fa');
ctx.fillStyle = gradient;
ctx.fillRect(0, 0, this.width, this.height);
}
}
class Cloud {
constructor(promises) {
this.vx = (Math.random() * -4) - 1;
this.vy = 0;
this.image = new Image();
this.image.src = cloudImage.src;
const promise = new Promise((resolve) => {
this.image.onload = () => {
this.ready = true;
const reduction = Math.round((Math.random() * 8)) + 5;
this.width = this.image.width / reduction;
this.height = this.image.height / reduction;
this.x = canvas.width;
this.y = Math.random() * (canvas.height - this.height);
resolve(true);
}
});
if (promises) promises.push(promise);
}
draw() {
if (!this.ready) return;
ctx.drawImage(this.image, this.x, this.y, this.width, this.height);
}
update() {
this.draw();
this.x += this.vx;
}
}
class Obstacle {
static pos = 'top';
constructor() {
this.width = 100;
this.height = canvas.height / 2;
this.x = canvas.width + this.width + Math.round(Math.random() * this.width);
this.y = getObstacleY();
}
draw() {
const gradient = ctx.createLinearGradient(this.x, 0, this.x + this.width, 0);
gradient.addColorStop(0, '#cdcdcd');
gradient.addColorStop(0.5, '#ffffff');
gradient.addColorStop(1, '#cdcdcd');
ctx.fillStyle = gradient;
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.fillRect(this.x - 2, this.y > 0 ? this.y : this.y + this.height - 10, this.width + 4, 10);
}
update() {
this.draw();
this.x -= 2;
}
}
function getObstacleY() {
let pos = [25, 50, 75, 100, 125, 150];
let y = 0;
if (Obstacle.pos === "top") {
y = -25 - pos[Math.ceil(Math.random() * pos.length) - 1];
Obstacle.pos = "bottom";
} else {
y = (canvas.height / 2) + pos[Math.ceil(Math.random() * pos.length) - 1] + 25;
Obstacle.pos = "top";
}
return y;
}
const bg = new Background();
let clouds = [], obstacles = [];
let player, FPS, then = Date.now(), reqAnimFrame = undefined, game = {};
let promises = [];
const instruction = document.querySelector(".subtitle");
const score = document.querySelector(".score");
const gameOverEl = document.querySelector("#gameOver");
const restartEl = document.querySelector("#restart");
function gameOver() {
score.innerHTML = "";
instruction.innerHTML = "";
gameOverEl.style.display = "block";
gameOverEl.style.top = canvas.height / 2 - gameOverEl.offsetHeight / 2 + "px";
gameOverEl.style.left = canvas.width / 2 - gameOverEl.offsetWidth / 2 + "px";
const scoreEl = document.querySelector("#score");
scoreEl.innerHTML = `${FPS}`;
}
function restart() {
instruction.innerHTML = instructions.start;
gameOverEl.style.display = "none";
init();
animate();
}
restartEl.addEventListener("click", restart);
function init() {
game = { ready: false, over: false, pause: false };
bg.draw();
clouds = [new Cloud(promises)];
obstacles = [new Obstacle(), new Obstacle()];
player = new Player(promises);
player.draw();
FPS = 0;
then = Date.now();
reqAnimFrame = undefined;
Promise.all(promises).then(() => {
player.draw();
});
}
function animate() {
reqAnimFrame = requestAnimationFrame(animate);
if (!game.ready || game.pause) return;
if (game.over) {
cancelAnimationFrame(reqAnimFrame);
reqAnimFrame = undefined;
gameOver();
return;
}
bg.draw();
clouds.forEach((cloud, index) => {
cloud.update();
if (cloud.x + cloud.width <= 0) {
clouds.splice(index, 1);
}
});
obstacles.forEach((obstacle, index) => {
obstacle.update();
if (obstacle.x + obstacle.width <= 0) {
obstacles.splice(index, 1);
}
if (obstacle.x + leniency <= player.x + player.width && obstacle.x + obstacle.width - leniency >= player.x &&
obstacle.y + leniency <= player.y + player.height && obstacle.y + obstacle.height - leniency >= player.y) {
game.over = true;
}
});
player.update();
if (player.y < -leniency || player.y + player.height > canvas.height + leniency) {
game.over = true;
}
if (clouds.length < 5) clouds.push(new Cloud());
const now = Date.now();
if (now - then >= 1000) {
FPS++;
score.innerHTML = `SCORE: <span class="highlight key">${FPS}<span>`;
if (FPS % 3 === 0) {
obstacles.push(new Obstacle(), new Obstacle());
}
then = now;
}
}
addEventListener("keydown", function ({ key }) {
if (key === " ") {
game.ready = true;
game.pause = false;
player.vy = -5;
if (!game.over) instruction.innerHTML = instructions.pause;
else restart();
}
if ((key === 'p' || key === 'P') && game.ready) {
game.pause = true;
instruction.innerHTML = game.pause ? instructions.resume : instructions.pause;
}
});
restart();
|
/**
* Class to model your SIOP request
*/
import ITestModel from './ITestModel';
import { ClaimToken } from '../../lib';
export default class RequestAttestationsOneSelfAssertedResponseOk implements ITestModel {
public clientId = 'https://requestor.example.com';
/**
* Define the model for the request
*/
public request: any = {
clientId: this.clientId,
clientName: 'My relying party',
clientPurpose: 'Need your VC to provide access',
redirectUri: this.clientId,
tosUri: 'https://tosUri.example.com',
logoUri: 'https://logoUri.example.com',
attestations: {
selfIssued: {
mapping: {
alias: {
claim: 'name',
type: 'string',
required: false,
indexed: false
}
}
}
}
}
/**
* Define the model for the response
*/
public response: any = {
iss: 'https://self-issued.me',
aud: this.clientId,
nonce: '',
state: '',
did: '',
jti: 'fa8fdc8f-d95b-4237-9c90-9696112f4e19',
attestations: {
selfIssued: {
name: 'Jules Winnfield',
birthDate: '1948-21-21T00:00:00'
}
}
}
public responseStatus = {};
/**
* Return a specific VC
* @param key Name for the vc
*/
public getVcFromResponse(): ClaimToken | undefined {
return undefined;
}
/**
* Return all presentations from model
*/
public getPresentationsFromModel(): { [key: string]: any } {
return {};
}
/**
* Return all non presented VCs
*/
public getNonPresentedVcFromModel(): { [key: string]: any } {
return {};
}
/**
* Return all id tokens from model
*/
public getIdTokensFromModel(): { [key: string]: any } {
return this.response.attestations.idTokens;{}
}
}
|
use mysql;
select host, user from user;
-- 因为mysql版本是5.7,因此新建用户为如下命令:
create user IF NOT EXISTS huser identified by 'NcUE4mtaJRQr96sR';
-- 将hmpay数据库的权限授权给创建的huser用户,密码为NcUE4mtaJRQr96sR:
grant all on hmpay.* to huser@'%' identified by 'NcUE4mtaJRQr96sR' with grant option;
-- 这一条命令一定要有:
flush privileges;
|
# epitech-emacs
Official Emacs configuration for Epitech students.
## Installation
- For local installation, run `./INSTALL.sh local`.
- For system-wide installation, run `sudo ./INSTALL.sh system`
|
nodemcu-devkit
==============
This is history.
New board is NodeMCU-DEVKIT-V1.0, see also https://github.com/nodemcu/nodemcu-devkit-v1.0
A development kit for NodeMCU firmware.
It will make NodeMCU more easy. With a micro USB cable, you can connect NodeMCU devkit to your laptop and flash it without any trouble, just like Arduino.

It is an open hardware, with ESP-12 core (4MBytes flash version).
Pin map with ESP-12.

It is designed by Altium Designer, and fully open–source. Now everyone can make their own NODEMCU.

|
package fr.xgouchet.musichelper.ui.view;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.View;
import fr.xgouchet.musicgeneration.model.Chord;
import fr.xgouchet.musicgeneration.model.Note;
import fr.xgouchet.musichelper.R;
import fr.xgouchet.musichelper.common.MusicUtils;
import fr.xgouchet.musichelper.model.Key;
/**
* A Staff view draws a Staff with the given Chord as wholes
*
* @author Xavier Gouchet
*/
public class StaffView extends View {
public static final int LINES = 5;
/**
* Simple constructor to use when creating a view from code.
*
* @param context
* The Context the view is running in, through which it can
* access the current theme, resources, etc.
*/
public StaffView(final Context context) {
super(context);
initStaffView();
}
/**
* Constructor that is called when inflating a view from XML. This is called
* when a view is being constructed from an XML file, supplying attributes
* that were specified in the XML file. This version uses a default style of
* 0, so the only attribute values applied are those in the Context's Theme
* and the given AttributeSet.
*
* The method onFinishInflate() will be called after all children have been
* added.
*
* @param context
* The Context the view is running in, through which it can
* access the current theme, resources, etc.
* @param attrsThe
* attributes of the XML tag that is inflating the view.
* @see ShapeButton#ShapeButton(Context, AttributeSet, int)
*/
public StaffView(final Context context, final AttributeSet attrs) {
super(context, attrs);
initStaffView();
readStaffViewAttributes(attrs);
}
/**
* Perform inflation from XML and apply a class-specific base style. This
* constructor of View allows subclasses to use their own base style when
* they are inflating. For example, a Button class's constructor would call
* this version of the super class constructor and supply R.attr.buttonStyle
* for defStyle; this allows the theme's button style to modify all of the
* base view attributes (in particular its background) as well as the Button
* class's attributes.
*
* @param context
* The Context the view is running in, through which it can
* access the current theme, resources, etc.
* @param attrs
* The attributes of the XML tag that is inflating the view.
* @param defStyle
* The default style to apply to this view. If 0, no style will
* be applied (beyond what is included in the theme). This may
* either be an attribute resource, whose value will be retrieved
* from the current theme, or an explicit style resource.
* @see #ShapeButton(Context, AttributeSet)
*/
public StaffView(final Context context, final AttributeSet attrs,
final int defStyle) {
super(context, attrs, defStyle);
initStaffView();
readStaffViewAttributes(attrs);
}
/**
* @param chords
* the chords to display
*/
public void setChord(final Chord chord) {
mChord = chord;
updateContent();
}
/**
* @param key
* the {@link Key} to use to display the chord
*/
public void setKey(final Key key) {
mKey = key;
updateContent();
}
/**
* @see android.view.View#onMeasure(int, int)
*/
@Override
protected void onMeasure(final int widthMeasureSpec,
final int heightMeasureSpec) {
final int widthSpecMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightSpecMode = MeasureSpec.getMode(heightMeasureSpec);
final int widthSpecSize = MeasureSpec.getSize(widthMeasureSpec);
final int heightSpecSize = MeasureSpec.getSize(heightMeasureSpec);
// Compute needed width
float neededWidth, neededHeight;
// staff height + spaces above and below
int spaces = mSpacesBeforeStaff + 4 + mSpacesAfterStaff;
neededHeight = (int) ((spaces * mLineSpacing) + 0.5f);
// key width
neededWidth = (mLineSpacing * 4);
// chord and alterations
if (mChord != null) {
if (mChord.hasAlteration()) {
neededWidth += (mLineSpacing * 2);
}
neededWidth += (mLineSpacing * 4);
}
neededWidth += getPaddingLeft() + getPaddingRight();
neededHeight += getPaddingTop() + getPaddingBottom();
mTopOffsetToBottomLine = getPaddingTop()
+ ((mSpacesBeforeStaff + 4f) * mLineSpacing);
// Adapt width to constraints
switch (widthSpecMode) {
case MeasureSpec.EXACTLY:
neededWidth = widthSpecSize;
break;
case MeasureSpec.AT_MOST:
neededWidth = Math.min(widthSpecSize, neededWidth);
break;
case MeasureSpec.UNSPECIFIED:
default:
break;
}
// Adapt height to constraints
switch (heightSpecMode) {
case MeasureSpec.EXACTLY:
neededHeight = heightSpecSize;
break;
case MeasureSpec.AT_MOST:
neededHeight = Math.min(heightSpecSize, neededHeight);
break;
case MeasureSpec.UNSPECIFIED:
default:
break;
}
setMeasuredDimension((int) neededWidth, (int) neededHeight);
}
/**
* @see android.view.View#onDraw(android.graphics.Canvas)
*/
@Override
protected void onDraw(final Canvas canvas) {
super.onDraw(canvas);
// draw the staff
drawLines(canvas);
drawKey(canvas);
// draw the chords
if (mChord != null) {
drawChords(canvas);
}
}
/**
* Draws the staff lines
*
* @param canvas
* the canvas on which the view will be drawn
*/
private void drawLines(final Canvas canvas) {
float offsetY = getPaddingTop();
// treble key
offsetY += mSpacesBeforeStaff * mLineSpacing;
float x1, x2, y;
x1 = getPaddingLeft();
x2 = getWidth() - getPaddingRight();
// Draw Lines
for (int i = 0; i < LINES; ++i) {
y = offsetY + (i * mLineSpacing);
canvas.drawLine(x1, y, x2, y, mLinePaint);
}
}
/**
* Draws the staff key
*
* @param canvas
* the canvas on which the view will be drawn
*/
private void drawKey(final Canvas canvas) {
int topOffset = getPaddingTop()
+ (int) ((mSpacesBeforeStaff - 2) * mLineSpacing);
Drawable drawable;
switch (mKey) {
case treble:
drawable = mTrebble;
break;
case alto:
drawable = mAlto;
break;
case bass:
drawable = mBass;
break;
default:
drawable = null;
}
if (drawable != null) {
int height = (int) (mLineSpacing * 8);
int width = (int) (mLineSpacing * 4);
drawable.setBounds(getPaddingLeft(), topOffset, getPaddingLeft()
+ width, topOffset + height);
drawable.draw(canvas);
}
}
/**
* Draws the chords on the staff
*
* @param canvas
* the canvas on which the view will be drawn
*/
private void drawChords(final Canvas canvas) {
float offsetX;
offsetX = getPaddingLeft() + (5f * mLineSpacing);
if (mChord.hasAlteration()) {
offsetX += 2 * mLineSpacing;
}
drawChord(canvas, mChord, offsetX);
}
/**
* Draws the given chord on the staff at the given offset
*
* @param canvas
* the canvas on which the view will be drawn
* @param chord
* the chord to draw
* @param offsetX
* the horizontal offset
*/
private void drawChord(final Canvas canvas, final Chord chord,
final float offsetX) {
int offset, prevOffset;
boolean overlap, overlapAlt, previousAlt, previousOverlap;
previousAlt = previousOverlap = false;
prevOffset = -256;
for (Note note : chord.getNotes()) {
offset = MusicUtils.getOffsetFromC4(note) + mKey.c4Offset();
overlap = ((offset - prevOffset) <= 1);
prevOffset = offset;
overlapAlt = note.isAltered() & previousAlt & (!previousOverlap);
drawNote(canvas, note, offsetX, offset, overlap, overlapAlt);
previousAlt = note.isAltered();
previousOverlap = overlapAlt;
}
}
/**
*
* @param canvas
* @param note
* @param x
* @param offset
* @param overlap
* @param overlapAlt
*/
private void drawNote(final Canvas canvas, final Note note, final float x,
final int offset, final boolean overlap, final boolean overlapAlt) {
float y = mTopOffsetToBottomLine - (offset * mHalfSpacing);
float overlapOffset = (overlap ? mLineSpacing : 0);
drawExtraLines(canvas, x + overlapOffset, offset);
drawWhole(canvas, x + overlapOffset, y);
overlapOffset = (overlapAlt ? mLineSpacing : 0);
if (note.isAltered()) {
switch (note.getAccidental()) {
case doubleSharp:
drawSharp(canvas, x - (2 * mLineSpacing) - overlapOffset, y);
case sharp:
drawSharp(canvas, x - mLineSpacing - overlapOffset, y);
break;
case doubleFlat:
drawFlat(canvas, x - (2 * mLineSpacing) - overlapOffset, y);
case flat:
drawFlat(canvas, x - mLineSpacing - overlapOffset, y);
break;
case natural:
default:
break;
}
}
}
/**
* Draws extra lines, needed for a note outside of the staff
*
*
* @param canvas
* the canvas on which the view will be drawn
* @param offsetX
* the horizontal offset
* @param offset
* the line offset
*/
private void drawExtraLines(final Canvas canvas, final float offsetX,
final int offset) {
float y;
if (offset < 0) {
for (int i = 0; i >= (offset - 1); i -= 2) {
y = mTopOffsetToBottomLine - (i * mHalfSpacing);
canvas.drawLine(offsetX - mLineSpacing, y, offsetX
+ mLineSpacing + mHalfSpacing, y, mLinePaint);
}
} else if (offset > 8) {
for (int i = 8; i <= (offset + 1); i += 2) {
y = mTopOffsetToBottomLine - (i * mHalfSpacing);
canvas.drawLine(offsetX - mLineSpacing, y, offsetX
+ mLineSpacing + mHalfSpacing, y, mLinePaint);
}
}
}
/**
* Draws a whole note at the given position
*
* @param canvas
* the canvas on which the view will be drawn
* @param x
* @param y
*/
private void drawWhole(final Canvas canvas, final float x, final float y) {
mWhole.setBounds((int) (x - mLineSpacing), (int) (y - mLineSpacing),
(int) (x + mLineSpacing), (int) (y + mLineSpacing));
mWhole.draw(canvas);
}
/**
* Draws a Sharp at the given position
*
* @param canvas
* the canvas on which the view will be drawn
* @param x
* @param y
*/
private void drawSharp(final Canvas canvas, final float x, final float y) {
mSharp.setBounds((int) (x - mLineSpacing), (int) (y - mLineSpacing),
(int) (x + mLineSpacing), (int) (y + mLineSpacing));
mSharp.draw(canvas);
}
/**
* Draws a Flat at the given position
*
* @param canvas
* the canvas on which the view will be drawn
* @param x
* @param y
*/
private void drawFlat(final Canvas canvas, final float x, final float y) {
mFlat.setBounds((int) (x - mLineSpacing), (int) (y - mLineSpacing),
(int) (x + mLineSpacing), (int) (y + mLineSpacing));
mFlat.draw(canvas);
}
/**
* Initialiser the Staff view
*/
private void initStaffView() {
Resources res = getContext().getResources();
mDipToPixel = res.getDisplayMetrics().density;
mLinePaint = new Paint();
mLinePaint.setColor(Color.BLACK);
mLinePaint.setStyle(Style.STROKE);
// Default space above and below staff (for treble key mainly)
mSpacesAfterStaff = 2;
mSpacesBeforeStaff = 2;
mKey = Key.treble;
if (!isInEditMode()) {
// keys
mTrebble = res.getDrawable(R.drawable.treble);
mAlto = res.getDrawable(R.drawable.alto);
mBass = res.getDrawable(R.drawable.bass);
// notes
mWhole = res.getDrawable(R.drawable.whole);
// alterations
mSharp = res.getDrawable(R.drawable.sharp);
mFlat = res.getDrawable(R.drawable.flat);
}
}
/**
* Read the attributes taken from XML
*
* @param attrs
* The attributes of the XML tag that is inflating the view.
*/
private void readStaffViewAttributes(final AttributeSet attrs) {
if (isInEditMode()) {
mLineSpacing = mDipToPixel * 16;
mHalfSpacing = mLineSpacing / 2.0f;
mLineWidth = mDipToPixel * 2;
return;
}
TypedArray a = getContext().obtainStyledAttributes(attrs,
R.styleable.StaffView);
mLineSpacing = a.getDimension(R.styleable.StaffView_lineSpacing, 0);
if (mLineSpacing == 0) {
mLineSpacing = mDipToPixel * 16;
}
mHalfSpacing = mLineSpacing / 2.0f;
mLineWidth = a.getDimension(R.styleable.StaffView_lineWidth, 0);
if (mLineWidth == 0) {
mLineWidth = mDipToPixel * 1;
}
mLinePaint.setStrokeWidth(mLineWidth);
a.recycle();
}
/**
* Update the calculation and invalidate the view (called when the chord,
* key or another element changes the view)
*/
private void updateContent() {
int highestOffset, lowestOffset, offset;
lowestOffset = 0;
highestOffset = 8;
for (Note note : mChord.getNotes()) {
offset = MusicUtils.getOffsetFromC4(note) + mKey.c4Offset();
if (offset < lowestOffset) {
lowestOffset = offset;
} else if (offset > highestOffset) {
highestOffset = offset;
}
}
mSpacesAfterStaff = 1 + Math.max(1, (1 - lowestOffset) / 2);
mSpacesBeforeStaff = 1 + Math.max(1, (highestOffset - 7) / 2);
invalidate();
}
/** Utility to convert Dip values to Pixel */
private float mDipToPixel;
// Drawables
private Drawable mTrebble, mAlto, mBass;
private Drawable mWhole;
private Drawable mSharp, mFlat;
private Paint mLinePaint;
private float mLineSpacing, mLineWidth, mHalfSpacing;
private int mSpacesBeforeStaff, mSpacesAfterStaff;
private float mTopOffsetToBottomLine;
// Data
private Chord mChord;
private Key mKey;
}
|
package bsttraversal
type BST struct {
Value int
Left *BST
Right *BST
}
func (tree *BST) InOrderTraverse(array []int) []int {
// Write your code here.
if tree == nil {
return array
}
left := tree.Left
if left != nil {
array = left.InOrderTraverse(array)
}
array = append(array, tree.Value)
right := tree.Right
if right != nil {
array = right.InOrderTraverse(array)
}
return array
}
func (tree *BST) PreOrderTraverse(array []int) []int {
// Write your code here.
if tree == nil {
return array
}
array = append(array, tree.Value)
left := tree.Left
if left != nil {
array = left.PreOrderTraverse(array)
}
right := tree.Right
if right != nil {
array = right.PreOrderTraverse(array)
}
return array
}
func (tree *BST) PostOrderTraverse(array []int) []int {
// Write your code here.
if tree == nil {
return array
}
left := tree.Left
if left != nil {
array = left.PostOrderTraverse(array)
}
right := tree.Right
if right != nil {
array = right.PostOrderTraverse(array)
}
array = append(array, tree.Value)
return array
}
|
package tech.hostlematedevelopers.hostelmate.adapter
import android.annotation.SuppressLint
import android.content.Context
import android.content.Intent
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.ImageView
import android.widget.LinearLayout
import android.widget.TextView
import androidx.recyclerview.widget.RecyclerView
import com.squareup.picasso.Picasso
import tech.hostlematedevelopers.hostelmate.activity.HostelDetailsActivity
import tech.hostlematedevelopers.hostelmate.R
import tech.hostlematedevelopers.hostelmate.model.Hostel
class HostelRecyclerAdapter(
private val context: Context,
private var hostelList: ArrayList<Hostel>
) :
RecyclerView.Adapter<HostelRecyclerAdapter.HostelViewHolder>() {
class HostelViewHolder(view: View) : RecyclerView.ViewHolder(view) {
val rlRecyclerHostelRow: LinearLayout = view.findViewById(R.id.rlRecyclerHostelRow)
val tvHostelName: TextView = view.findViewById(R.id.tvHostelName)
val imgHostelType: ImageView = view.findViewById(R.id.imgHostelType)
val tvFeePerMonth: TextView = view.findViewById(R.id.tvFeePerMonth)
val tvRating: TextView = view.findViewById(R.id.tvRating)
val imgHostelImage: ImageView = view.findViewById(R.id.imgHostelImage)
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): HostelViewHolder {
val view = LayoutInflater.from(parent.context)
.inflate(R.layout.recycler_hostel_row, parent, false)
return HostelViewHolder(view)
}
@SuppressLint("SetTextI18n")
override fun onBindViewHolder(holder: HostelViewHolder, position: Int) {
val hostel: Hostel = hostelList[position]
holder.rlRecyclerHostelRow.setOnClickListener {
val intent = Intent(context, HostelDetailsActivity::class.java)
intent.putExtra("hostel_name", hostel.Name)
intent.putExtra("hostel_type", hostel.Type)
intent.putExtra("hostel_mobile", hostel.Mobile)
intent.putExtra("hostel_address", hostel.Address)
intent.putExtra("hostel_fee", hostel.Fee)
intent.putExtra("hostel_inTime", hostel.Time)
intent.putExtra("hostel_rating", hostel.Rating)
context.startActivity(intent)
}
//---------- hostel image-----------
Picasso.get().load(hostel.Image).error(R.drawable.hostelmateicon)
.into(holder.imgHostelImage)
//------hostel name-----
holder.tvHostelName.text = hostel.Name
//------hostel type------
if (hostel.Type == "Girls") {
holder.imgHostelType.setImageResource(R.drawable.girl32)
} else {
holder.imgHostelType.setImageResource(R.drawable.boy32)
}
//---------- hostel price per month-----------
holder.tvFeePerMonth.text = "Rs. ${hostel.Fee}/Month"
//---------- hostel rating-----------
holder.tvRating.text = "${hostel.Rating}/5"
}
override fun getItemCount(): Int {
return hostelList.size
}
@SuppressLint("NotifyDataSetChanged")
fun filterList(filteredList: ArrayList<Hostel>) {
hostelList = filteredList
notifyDataSetChanged()
}
}
|
const fs = require('fs');
const https = require('https');
const zlib = require('zlib');
const tar = require('tar');
const path = require('path');
const { getLicense, getSelectedDbs } = require('../utils');
let licenseKey;
try {
licenseKey = getLicense();
} catch (e) {
console.error('geolite2: Error retrieving Maxmind License Key');
console.error(e.message);
}
if (!licenseKey) {
console.error(`Error: License key is not configured.\n
You need to signup for a _free_ Maxmind account to get a license key.
Go to https://www.maxmind.com/en/geolite2/signup, obtain your key and
put it in the MAXMIND_LICENSE_KEY environment variable.
If you don not have access to env vars, put this config in your package.json
file (at the root level) like this:
"geolite2": {
// specify the key
"license-key": "<your license key>",
// ... or specify the file where key is located:
"license-file": "maxmind-licence.key"
}
`);
process.exit(1);
}
const link = (edition) =>
`https://download.maxmind.com/app/geoip_download?edition_id=${edition}&license_key=${licenseKey}&suffix=tar.gz`;
const selected = getSelectedDbs();
const links = ['City', 'Country', 'ASN']
.filter((e) => selected.includes(e))
.map((e) => link(`GeoLite2-${e}`));
const downloadPath = path.join(__dirname, '..', 'dbs');
if (!fs.existsSync(downloadPath)) fs.mkdirSync(downloadPath);
const download = (url) =>
new Promise((resolve) => {
https.get(url, function (response) {
resolve(response.pipe(zlib.createGunzip({})));
});
});
console.log('Downloading maxmind databases...');
links.forEach((url) =>
download(url).then((result) =>
result.pipe(tar.t()).on('entry', (entry) => {
if (entry.path.endsWith('.mmdb')) {
const dstFilename = path.join(downloadPath, path.basename(entry.path));
entry.pipe(fs.createWriteStream(dstFilename));
}
})
)
);
|
package ibb
import (
"context"
"encoding/json"
"github.com/ycd/ibb/pkg/resources"
)
// DamOccupancy contains information on the daily and annual
// changes of the occupancy rates of dams in Istanbul.
// https://data.ibb.gov.tr/en/dataset/istanbul-baraj-doluluk-oranlari-verisi
type DamOccupancy struct {
damOccupancyRecords `json:"result"`
}
type damOccupancyRecords struct {
Records []*struct {
Date string `json:"DATE"`
Rate float32 `json:"GENERAL_DAM_OCCUPANCY_RATE"`
ReservedWater int `json:"GENERAL_DAM_RESERVED_WATER"`
} `json:"records"`
}
// DamOccupancyRates contains information on the daily and annual
// changes of the occupancy rates of dams in Istanbul.
// https://data.ibb.gov.tr/en/dataset/istanbul-baraj-doluluk-oranlari-verisi
func (c *Client) DamOccupancyRates(ctx context.Context) (*DamOccupancy, error) {
var do DamOccupancy
url := resources.BaseURL + resources.DamOccupancyRates + resources.Limit100000
resp, err := c.get(ctx, url)
if err != nil {
return &DamOccupancy{}, err
}
json.Unmarshal(resp, &do)
return &do, nil
}
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.partials
import base.LanguageStubs
import org.joda.time.DateTime
import org.scalatestplus.mockito.MockitoSugar.mock
import org.scalatestplus.play.PlaySpec
import play.api.mvc.RequestHeader
import views.html.Layout
import views.html.partials.messageContent
import views.viewmodels.MessageView
class messageContentSpec extends PlaySpec with LanguageStubs {
"messsageContent template" must {
"have all information including first read" in new TestClass {
val messageContent = new messageContent(layout)(
MessageView(
Some("Mike"),
DateTime.parse("2021-02-19T10:29:47.275Z"),
Some(DateTime.parse("2021-02-19T10:29:47.275Z")),
"message body"))(messagesEn, requestHeader).toString
messageContent must include("Mike sent this on 19 February 2021 at")
messageContent must include("First read on 19 February 2021 at")
messageContent must include("First viewed on 19 February 2021 at")
messageContent must include("message body")
}
"be handled without first read information" in new TestClass {
val messageContent =
new messageContent(layout)(
MessageView(Some("Mike"), DateTime.parse("2021-02-19T10:29:47.275Z"), None, "message body"))(
messagesEn,
requestHeader).toString
messageContent must include("Mike sent this on 19 February 2021 at")
messageContent mustNot include("First read")
messageContent mustNot include("First viewed")
messageContent must include("message body")
}
"have all information including first read in Welsh" in new TestClass {
val messageContent = new messageContent(layout)(
MessageView(
Some("Mike"),
DateTime.parse("2021-02-19T10:29:47.275Z"),
Some(DateTime.parse("2021-02-19T10:29:47.275Z")),
"message body"))(messagesCy, requestHeader).toString
messageContent must include("Mike wnaeth anfon y neges hon ar 19 Chwefror 2021 am")
messageContent must include("Darllenwyd am y tro cyntaf ar 19 Chwefror 2021 am")
messageContent must include("Gwelwyd am y tro cyntaf ar 19 Chwefror 2021 am")
messageContent must include("message body")
}
"be handled without first read information in Welsh" in new TestClass {
val messageContent =
new messageContent(layout)(
MessageView(Some("Mike"), DateTime.parse("2021-02-19T10:29:47.275Z"), None, "message body"))(
messagesCy,
requestHeader).toString
messageContent must include("Mike wnaeth anfon y neges hon ar 19 Chwefror 2021 am")
messageContent mustNot include("Darllenwyd am y tro cyntaf ar")
messageContent mustNot include("Gwelwyd am y tro cyntaf ar")
messageContent must include("message body")
}
}
class TestClass {
implicit val requestHeader: RequestHeader = mock[RequestHeader]
val layout = mock[Layout]
}
}
|
#!/usr/bin/env python3
import sys
import re
def case_print(f, c, s):
f.write('\tcase {}:\n'.format(c))
f.write('\t\treturn "{}";\n'.format(s))
info = {
'fmt': r'^#define (\w+)\s*(?:\\$\s*)?fourcc_code',
'basic_pre': r'^#define (I915_FORMAT_MOD_\w+)\b',
'basic_post': r'^#define (DRM_FORMAT_MOD_(?:INVALID|LINEAR|SAMSUNG|QCOM|VIVANTE|NVIDIA|BROADCOM|ALLWINNER)\w*)\s',
}
with open(sys.argv[1], 'r') as f:
data = f.read()
for k, v in info.items():
info[k] = re.findall(v, data, flags=re.M)
with open(sys.argv[2], 'w') as f:
f.write('''\
#include <stdint.h>
#include <drm_fourcc.h>
#include "tables.h"
const char *format_str(uint32_t format)
{
switch (format) {
case DRM_FORMAT_INVALID:
return "INVALID";
''')
for ident in info['fmt']:
case_print(f, ident, ident[len('DRM_FORMAT_'):])
f.write('''\
default:
return "Unknown";
}
}
const char *basic_modifier_str(uint64_t modifier)
{
switch (modifier) {
''')
for ident in info['basic_pre'] + info['basic_post']:
case_print(f, ident, ident)
f.write('''\
default:
return "Unknown";
}
}
''')
|
# DCC605 File System Shell (DCC-FSSHELL).
1. Pode ser feito em dupla
1. Domingo Depois da Terceira Prova
Neste trabalho você vai implementar um shell para o sistema ext2. Para
realizar seu TP recomendo um bom entendimento do
[Fast File System](http://pages.cs.wisc.edu/~remzi/OSTEP/file-ffs.pdf). O mesmo
é a base do ext2. Além disto, a seção 42.2 do OSTEP deve ser útil, leia a mesma
[aqui](http://pages.cs.wisc.edu/~remzi/OSTEP/file-journaling.pdf).
## Sobre o ext2
O Extended File System 2 (ext2) é um sistema de arquivos criados para sistemas
Linux em meados de 1993. Visando corrigir alguns problemas da primeira versão
do Extended File System (simplesmente ext), o sistema de arquivos ext2 tem um
esquema de implementação mais próximo ao Berkley Fast File System (FFS).
Embora já tenha caído um pouco em desuso, o ext2 é um sistema de arquivos com
bastante influência. O desenvolvimento do ext2 teve como um dos principais
objetivos a extensibilidade do sistema, sendo assim o mesmo serviu como base
para o ext3 e ext4 que são mais populares hoje em dia.
**Layout dos inodes, grupos e blocos**
Seguindo o modelo do FFS, um disco formatado com um sistema de arquivos ext2
terá um layout de blocos similar ao da figura abaixo (note que existem
problemas de escala na mesma, é apenas um esquema):
```
Layout geral:
* Bloco de Boot --> Utilizado para iniciar o sistema, sempre ocupa
uma posição fixa no ínicio do disco.
* Grupo de Blocos i --> Cada grupo de blocos é utilizado para guardar
arquivos. Fazemos uso de mais de um grupo pois
discos tem vários cílindros. Então guardar
artigos relacionados em um mesmo bloco ajuda.
1-bloco
+-------+-------+-------+-------+-------+-------+-------+-------+-------+-----
| bloco | | |
| de | grupo de blocos 0 | grupo de blocos 1 | ...
| boot | | |
+-------+-------+-------+-------+-------+-------+-------+-------+-------+-----
/ \
/ \
/ \
/ \
/ \
/ \
/ \
/ grupo de blocos i \
+-------+-------+-------+-------+-------+-------+-------+-------+-------+------
| super | descritores | | | tabela |
| bloco | do |d-bmap |i-bmap | de inode | blocos de dados...
| const | grupo | | | inodes |
+-------+-------+-------+-------+-------+-------+-------+-------+-------+------
1-bloco n-blocos 1-bloco 1-bloco n-blocos n-blocos
Layout de um grupo:
* Super Bloco --> Contém meta-dados do sistema de arquivos. Uma
cópia em cada grupo.
* Descritores do Grupo --> Meta-dados do grupo.
* Data Bitmap (d-bmap) --> Mapa de bits de dados livres
* Inode Bitmap (i-bmap) --> Mapa de bits de inodes livres
* Tabela de Inodes --> Contém os inodes (metadados) do sistema de
arquivos. Cada arquivo tem 1 apenas 1 inode.
Através de links, um mesmo inode pode aparecer
mapear para 2 caminhos.
* Bloco de Dados --> Os dados do arquivos e diretórios em si.
```
## Structs úteis
Para entender melhor o ext2, vamos dar uma olhada no cabeçalho do Linux
que descreve o sistema de arquivos. O mesmo pode ser encontrado
[aqui](http://github.com/torvalds/linux/blob/master/fs/ext2/ext2.h).
Antes de iniciar, temos que entender os tipos `__le32` e `__le16`. Como o Linux
é cross-platform, tipos genéricos para qualquer arquitetura são necessários.
Esses dois em particular são *unsigned ints* de 32 e 16 bits. Os mesmos sempre
vão ser representados em *little endian*.
Como o PC que vai corrigir o TP é little endian (o seu tamnbém deve ser),
pode usar um atalho como:
```c
typedef int __le32;
```
**Super bloco**
```c
struct ext2_super_block {
__le32 s_inodes_count; /* Inodes count */
__le32 s_blocks_count; /* Blocks count */
__le32 s_r_blocks_count; /* Reserved blocks count */
__le32 s_free_blocks_count; /* Free blocks count */
__le32 s_free_inodes_count; /* Free inodes count */
__le32 s_first_data_block; /* First Data Block */
__le32 s_log_block_size; /* Block size */
// . . .
__le32 s_blocks_per_group; /* # Blocks per group */
// . . .
__le32 s_inodes_per_group; /* # Inodes per group */
// . . .
__le16 s_magic; /* Magic signature */
__le32 s_first_ino; /* First non-reserved inode */
__le16 s_inode_size; /* size of inode structure */
// . . .
}
```
**Descritores de Grupo**
```c
struct ext2_group_desc
{
__le32 bg_block_bitmap; /* Blocks bitmap block */
__le32 bg_inode_bitmap; /* Inodes bitmap block */
__le32 bg_inode_table; /* Inodes table block */
__le16 bg_free_blocks_count; /* Free blocks count */
__le16 bg_free_inodes_count; /* Free inodes count */
__le16 bg_used_dirs_count; /* Directories count */
__le16 bg_pad;
__le32 bg_reserved[3];
};
```
Para saber o número de grupos no ext2 usamos a seguinte abordagem. A mesma faz
uso dos campos do superbloco.
```c
/* calculate number of block groups on the disk */
unsigned int group_count = 1 + (super.s_blocks_count-1) / super.s_blocks_per_group;
/* calculate size of the group descriptor list in bytes */
unsigned int descr_list_size = group_count * sizeof(struct ext2_group_descr);
```
Para ler os descritores do grupo, primeiramente você deve calcular o offset do
inicio do disco. Como o disco tem 1024 bytes reservados no inicio e o primeiro
bloco é um superbloco, o código é para ler o descrito é tal como:
```c
struct ext2_group_descr group_descr;
/* position head above the group descriptor block */
/* sd --> storage device, no nosso caso um arquivo */
lseek(sd, 1024 + block_size, SEEK_SET);
read(sd, &group_descr, sizeof(group_descr));
```
O descritor do grupo vai conter meta-dados para identificar o data e inode
bitmap daquele grupo. Uma macro boa de se ter indica qual o local do disco de
um dado bloco:
```c
/* location of the super-block in the first group */
#define BASE_OFFSET 1024
#define BLOCK_OFFSET(block) (BASE_OFFSET + (block-1)*block_size)
```
**INodes**
```c
/*
* Structure of an inode on the disk
*/
struct ext2_inode {
__le16 i_mode; /* File mode */
__le16 i_uid; /* Low 16 bits of Owner Uid */
__le32 i_size; /* Size in bytes */
__le32 i_atime; /* Access time */
__le32 i_ctime; /* Creation time */
__le32 i_mtime; /* Modification time */
__le32 i_dtime; /* Deletion Time */
__le16 i_gid; /* Low 16 bits of Group Id */
__le16 i_links_count; /* Links count */
__le32 i_blocks; /* Blocks count */
__le32 i_flags; /* File flags */
__le32 i_block[EXT2_N_BLOCKS]; /* Pointers to blocks */
// . . .
};
```
**Diretórios**
```c
struct ext2_dir_entry {
__le32 inode; /* Inode number */
__le16 rec_len; /* Directory entry length */
__le16 name_len; /* Name length */
char name[]; /* File name, up to EXT2_NAME_LEN */
};
```
## Criando imagens
O script de teste já cria as imagens que você deve trabalhar em cima. O mesmo
faz uso dos comandos `dd` e `mkfs` discutidos em sala. Segue alguns exemplos:
**Comando dd - criando imagem zerada**
```
$ filename=fs-0x00dcc605-ext2-10240.img
$ dd if=/dev/zero of=$filename bs=1024 count=10240
```
```
1024+0 records in
1024+0 records out
1048576 bytes (1.0 MB, 1.0 MiB) copied, 0.0242941 s, 43.2 MB/s
```
**Comando mkfs.ext2 - criando um disco de 1mb sem superblock backup**
```
$ mkfs.ext2 fs-0x00dcc605-ext2-10240.img
```
```
mke2fs 1.42.13 (17-May-2015)
Creating filesystem with 10240 1k blocks and 2560 inodes
Filesystem UUID: 24c464b5-2e6c-4b6f-8309-d3454d683858
Superblock backups stored on blocks:
8193
Allocating group tables: done
Writing inode tables: done
Writing superblocks and filesystem accounting information: done
```
## Comandos que você deve realizar
Todos os comandos abaixo devem ser re-implementados utilizando os inodes.
Isto é, não utilize os programas do Unix.
1. **cd** Caminhar para um diretório. Inicie o programa na raiz.
1. **ls** Seu shell deve executar um ls e listar todos os arquivos do
diretório atual.
1. **stat** Pega os metadados de um arquivo/diretório.
1. **find** Imprime toda a árvore de pastas/arquivos iniciando do diretório
atual.
1. **sb** Lê os dados do super-bloco.
## Entrega
Um .c e um .h (caso precise) que roda o shell corrigindo. Chame seu programa
de `dcc_fs_shell`.
A entrega será pelo moodle. Desta vez como é um único arquivo faz menos sentido
um repositório no git. Porém, caso deseje utilizar, pode fazer a entrega pelo
git.
## Rodando seu programa
```sh
$ ./dcc_fs_shell uma_imagem.img
```
|
-- With prelude
range :: Int -> Int -> [Int]
range s e = [s..e]
-- With list recursion
range' :: Int -> Int -> [Int]
range' s e
| s > e = []
| otherwise = s : range' (s+1) e
|
#!/bin/bash
$SPARK_HOME/bin/spark-submit \
--class "preprocessingUtils.main" \
--master local[4] \
target/scala-2.10/preprocess-assembly-0.3.jar \
--outdir="../data/dogs_vs_cats/" \
--saveToHDFS=false \
--nPartitions=4 \
--dataFormat=text \
--sparse=false \
--textDataFormat=spaces \
--separateTrainTestFiles=false \
--proportionTest=0.2 \
--dataFile="../data/dogs_vs_cats_n5000.txt" \
--centerFeatures=true \
--scaleFeatures=true \
--centerResponse=false \
--scaleResponse=false \
--outputTrainFileName="dogs_vs_cats_small_train" \
--outputTestFileName="dogs_vs_cats_small_test" \
--twoOutputClasses=false \
--seed=1
"$@"
|
/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package typed_test
import (
"testing"
"sigs.k8s.io/structured-merge-diff/typed"
)
var unionParser = func() typed.ParseableType {
parser, err := typed.NewParser(`types:
- name: union
map:
fields:
- name: discriminator
type:
scalar: string
- name: one
type:
scalar: numeric
- name: two
type:
scalar: numeric
- name: three
type:
scalar: numeric
- name: letter
type:
scalar: string
- name: a
type:
scalar: numeric
- name: b
type:
scalar: numeric
unions:
- discriminator: discriminator
deduceInvalidDiscriminator: true
fields:
- fieldName: one
discriminatorValue: One
- fieldName: two
discriminatorValue: TWO
- fieldName: three
discriminatorValue: three
- discriminator: letter
fields:
- fieldName: a
discriminatorValue: A
- fieldName: b
discriminatorValue: b`)
if err != nil {
panic(err)
}
return parser.Type("union")
}()
func TestNormalizeUnions(t *testing.T) {
tests := []struct {
name string
old typed.YAMLObject
new typed.YAMLObject
out typed.YAMLObject
}{
{
name: "nothing changed, add discriminator",
old: `{"one": 1}`,
new: `{"one": 1}`,
out: `{"one": 1, "discriminator": "One"}`,
},
{
name: "nothing changed, non-deduced",
old: `{"a": 1}`,
new: `{"a": 1}`,
out: `{"a": 1}`,
},
{
name: "proper union update, setting discriminator",
old: `{"one": 1}`,
new: `{"two": 1}`,
out: `{"two": 1, "discriminator": "TWO"}`,
},
{
name: "proper union update, non-deduced",
old: `{"a": 1}`,
new: `{"b": 1}`,
out: `{"b": 1}`,
},
{
name: "proper union update from not-set, setting discriminator",
old: `{}`,
new: `{"two": 1}`,
out: `{"two": 1, "discriminator": "TWO"}`,
},
{
name: "proper union update from not-set, non-deduced",
old: `{}`,
new: `{"b": 1}`,
out: `{"b": 1}`,
},
{
name: "remove union, with discriminator",
old: `{"one": 1}`,
new: `{}`,
out: `{}`,
},
{
name: "remove union and discriminator",
old: `{"one": 1, "discriminator": "One"}`,
new: `{}`,
out: `{}`,
},
{
name: "remove union, not discriminator",
old: `{"one": 1, "discriminator": "One"}`,
new: `{"discriminator": "One"}`,
out: `{"discriminator": "One"}`,
},
{
name: "remove union, not discriminator, non-deduced",
old: `{"a": 1, "letter": "A"}`,
new: `{"letter": "A"}`,
out: `{"letter": "A"}`,
},
{
name: "change discriminator, nothing else",
old: `{"discriminator": "One"}`,
new: `{"discriminator": "random"}`,
out: `{"discriminator": "random"}`,
},
{
name: "change discriminator, nothing else, non-deduced",
old: `{"letter": "A"}`,
new: `{"letter": "b"}`,
out: `{"letter": "b"}`,
},
{
name: "change discriminator, nothing else, it drops other field",
old: `{"discriminator": "One", "one": 1}`,
new: `{"discriminator": "random", "one": 1}`,
out: `{"discriminator": "random"}`,
},
{
name: "change discriminator, nothing else, it drops other field, non-deduced",
old: `{"letter": "A", "a": 1}`,
new: `{"letter": "b", "a": 1}`,
out: `{"letter": "b"}`,
},
{
name: "remove discriminator, nothing else",
old: `{"discriminator": "One", "one": 1}`,
new: `{"one": 1}`,
out: `{"one": 1, "discriminator": "One"}`,
},
{
name: "remove discriminator, nothing else, non-deduced",
old: `{"letter": "A", "a": 1}`,
new: `{"a": 1}`,
out: `{"a": 1}`,
},
{
name: "remove discriminator, add new field",
old: `{"discriminator": "One", "one": 1}`,
new: `{"two": 1}`,
out: `{"two": 1, "discriminator": "TWO"}`,
},
{
name: "remove discriminator, add new field, non-deduced",
old: `{"letter": "A", "a": 1}`,
new: `{"b": 1}`,
out: `{"b": 1}`,
},
{
name: "both fields removed",
old: `{"one": 1, "two": 1}`,
new: `{}`,
out: `{}`,
},
{
name: "one field removed",
old: `{"one": 1, "two": 1}`,
new: `{"one": 1}`,
out: `{"one": 1, "discriminator": "One"}`,
},
{
name: "one field removed, non-deduced",
old: `{"a": 1, "b": 1}`,
new: `{"a": 1}`,
out: `{"a": 1}`,
},
// These use-cases shouldn't happen:
{
name: "one field removed, discriminator unchanged",
old: `{"one": 1, "two": 1, "discriminator": "TWO"}`,
new: `{"one": 1, "discriminator": "TWO"}`,
out: `{"one": 1, "discriminator": "One"}`,
},
{
name: "one field removed, discriminator unchanged, non-deduced",
old: `{"a": 1, "b": 1, "letter": "b"}`,
new: `{"a": 1, "letter": "b"}`,
out: `{"a": 1, "letter": "b"}`,
},
{
name: "one field removed, discriminator added",
old: `{"two": 2, "one": 1}`,
new: `{"one": 1, "discriminator": "TWO"}`,
out: `{"discriminator": "TWO"}`,
},
{
name: "one field removed, discriminator added, non-deduced",
old: `{"b": 2, "a": 1}`,
new: `{"a": 1, "letter": "b"}`,
out: `{"letter": "b"}`,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
old, err := unionParser.FromYAML(test.old)
if err != nil {
t.Fatalf("Failed to parse old object: %v", err)
}
new, err := unionParser.FromYAML(test.new)
if err != nil {
t.Fatalf("failed to parse new object: %v", err)
}
out, err := unionParser.FromYAML(test.out)
if err != nil {
t.Fatalf("failed to parse out object: %v", err)
}
got, err := old.NormalizeUnions(new)
if err != nil {
t.Fatalf("failed to normalize unions: %v", err)
}
comparison, err := out.Compare(got)
if err != nil {
t.Fatalf("failed to compare result and expected: %v", err)
}
if !comparison.IsSame() {
t.Errorf("Result is different from expected:\n%v", comparison)
}
})
}
}
func TestNormalizeUnionError(t *testing.T) {
tests := []struct {
name string
old typed.YAMLObject
new typed.YAMLObject
}{
{
name: "dumb client update, no discriminator",
old: `{"one": 1}`,
new: `{"one": 2, "two": 1}`,
},
{
name: "new object has three of same union set",
old: `{"one": 1}`,
new: `{"one": 2, "two": 1, "three": 3}`,
},
{
name: "dumb client doesn't update discriminator",
old: `{"one": 1, "discriminator": "One"}`,
new: `{"one": 2, "two": 1, "discriminator": "One"}`,
},
{
name: "client sends new field that and discriminator change",
old: `{}`,
new: `{"one": 1, "discriminator": "Two"}`,
},
{
name: "client sends new fields that don't match discriminator change",
old: `{}`,
new: `{"one": 1, "two": 1, "discriminator": "One"}`,
},
{
name: "old object has two of same union set",
old: `{"one": 1, "two": 2}`,
new: `{"one": 2, "two": 1}`,
},
{
name: "old object has two of same union, but we add third",
old: `{"discriminator": "One", "one": 1, "two": 1}`,
new: `{"discriminator": "One", "one": 1, "two": 1, "three": 1}`,
},
{
name: "one field removed, 2 left, discriminator unchanged",
old: `{"one": 1, "two": 1, "three": 1, "discriminator": "TWO"}`,
new: `{"one": 1, "two": 1, "discriminator": "TWO"}`,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
old, err := unionParser.FromYAML(test.old)
if err != nil {
t.Fatalf("Failed to parse old object: %v", err)
}
new, err := unionParser.FromYAML(test.new)
if err != nil {
t.Fatalf("failed to parse new object: %v", err)
}
_, err = old.NormalizeUnions(new)
if err == nil {
t.Fatal("Normalization should have failed, but hasn't.")
}
})
}
}
|
from __future__ import absolute_import
from todo.commands.toggle import ToggleCommand
class UncheckCommand(ToggleCommand):
def check_by_item(self, item):
item_toggled = item.copy()
item_toggled['done'] = False
return item_toggled
Uncheck = UncheckCommand()
|
<?php
/**
* Created by PhpStorm.
* User: Jozef Môstka
* Date: 29.5.2016
* Time: 9:54
*/
namespace testMagicMethods{
/**
* @property string test
* @method testCall($test)
*/
class Foo{
public $_test;
public function __construct($test) {
$this->_test=$test;
}
public function tetsFunc(){
return 5;
}
public function __get($name) {
switch ($name) {
case "test":
return $this->_test;
}
return undefined;
}
public function __set($name, $value) {
switch ($name) {
case "test":
$this->_test=$value;
}
}
public function __call($name, $arguments) {
switch ($name) {
case "testCall":
return $arguments[0];
}
return null;
}
}
$foo = new Foo("12345");
assert_($foo->_test,"12345","12345");
$foo->_test="6789";
assert_($foo->_test,"6789","6789");
assert_($foo->tetsFunc(),5,"testfunc");
assert_($foo->test,"6789","foo->test 1");
$foo->test="98765";
assert_($foo->test,"98765","98765");
assert_($foo->testCall(5),5,"testCall(5)");
}
|
class Module
unless method_defined?(:__name__)
alias_method :__name__, :name
end
if method_defined?(:singleton_class?)
alias_method :__singleton_class__?, :singleton_class?
else
def __singleton_class__?
self != Class && ancestors.first != self
end
end
unless method_defined?(:__singleton_class__)
alias_method :__singleton_class__, :singleton_class
end
unless method_defined?(:__include__?)
alias_method :__include__?, :include?
end
unless method_defined?(:__instance_methods__)
alias_method :__instance_methods__, :instance_methods
end
unless method_defined?(:__public_instance_methods__)
alias_method :__public_instance_methods__, :public_instance_methods
end
unless method_defined?(:__protected_instance_methods__)
alias_method :__protected_instance_methods__, :protected_instance_methods
end
unless method_defined?(:__private_instance_methods__)
alias_method :__private_instance_methods__, :private_instance_methods
end
end
|
// Invoke 'strict' JavaScript mode
'use strict';
// Create the 'chat' module
angular.module('chat', []);
|
import type { Vtt } from '../types'
// Taken from: https://stackoverflow.com/a/42761393
export function paginator(array: Vtt[], pageNumber: number, pageSize: number) {
// human-readable page numbers usually start with 1, so we reduce 1 in the first argument
return array.slice((pageNumber - 1) * pageSize, pageNumber * pageSize)
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.fusion.fusionbackend.service;
import com.google.common.collect.Sets;
import io.fusion.fusionbackend.exception.ResourceNotFoundException;
import io.fusion.fusionbackend.model.QuantityType;
import io.fusion.fusionbackend.model.Unit;
import io.fusion.fusionbackend.repository.QuantityTypeRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Set;
@Service
@Transactional
public class QuantityTypeService {
private final QuantityTypeRepository quantityTypeRepository;
private final UnitService unitService;
@Autowired
public QuantityTypeService(QuantityTypeRepository quantityTypeRepository, @Lazy UnitService unitService) {
this.quantityTypeRepository = quantityTypeRepository;
this.unitService = unitService;
}
public Set<QuantityType> getAllQuantityTypes() {
return Sets.newLinkedHashSet(quantityTypeRepository.findAll(QuantityTypeRepository.DEFAULT_SORT));
}
public QuantityType getQuantityType(final Long quantityTypeId) {
return quantityTypeRepository.findById(quantityTypeId).orElseThrow(ResourceNotFoundException::new);
}
public QuantityType createQuantityType(final QuantityType newQuantityType, final Long baseUnitId) {
final QuantityType persistedQuantityType = quantityTypeRepository.save(newQuantityType);
if (baseUnitId != null && baseUnitId > 0) {
return setQuantityTypeBaseUnit(persistedQuantityType.getId(), baseUnitId);
} else {
return persistedQuantityType;
}
}
public QuantityType updateQuantityType(final Long unitId, final QuantityType sourceQuantityType) {
final QuantityType targetQuantityType = getQuantityType(unitId);
targetQuantityType.copyFrom(sourceQuantityType);
return targetQuantityType;
}
public void deleteQuantityType(final Long id) {
quantityTypeRepository.delete(getQuantityType(id));
}
public QuantityType setQuantityTypeBaseUnit(final Long quantityTypeId, final Long baseUnitId) {
final QuantityType quantityType = getQuantityType(quantityTypeId);
final Unit baseUnit = unitService.getUnit(baseUnitId);
quantityType.setBaseUnit(baseUnit);
return quantityType;
}
}
|
package testutils
import (
"fmt"
"net"
"sync"
"time"
"github.com/eclipse/paho.mqtt.golang/packets"
"github.com/superscale/spire/mqtt"
)
// Pipe ...
func Pipe() (*mqtt.Session, *mqtt.Session) {
a, b := net.Pipe()
t := time.Second * 1
return mqtt.NewSession(a, t), mqtt.NewSession(b, t)
}
// PubSubRecorder ...
type PubSubRecorder struct {
Topics []string
Messages []interface{}
l sync.RWMutex
}
// NewPubSubRecorder ...
func NewPubSubRecorder() *PubSubRecorder {
return &PubSubRecorder{
Topics: []string{},
Messages: []interface{}{},
}
}
// HandleMessage implements mqtt.Subscriber
func (r *PubSubRecorder) HandleMessage(topic string, payload interface{}) error {
r.l.Lock()
defer r.l.Unlock()
r.Topics = append(r.Topics, topic)
r.Messages = append(r.Messages, payload)
return nil
}
// Count ...
func (r *PubSubRecorder) Count() int {
r.l.RLock()
defer r.l.RUnlock()
return len(r.Topics)
}
// Get ...
func (r *PubSubRecorder) Get(i int) (string, interface{}) {
r.l.RLock()
defer r.l.RUnlock()
if i < len(r.Topics) && i < len(r.Messages) {
return r.Topics[i], r.Messages[i]
}
return "", nil
}
// First ...
func (r *PubSubRecorder) First() (string, interface{}) {
return r.Get(0)
}
// Last ...
func (r *PubSubRecorder) Last() (string, interface{}) {
return r.Get(r.Count() - 1)
}
// WriteConnectPacket ...
func WriteConnectPacket(formationID, deviceName, ipAddress string, session *mqtt.Session) error {
pkg := packets.NewControlPacket(packets.Connect).(*packets.ConnectPacket)
pkg.ClientIdentifier = deviceName
pkg.UsernameFlag = true
pkg.Username = fmt.Sprintf(`{"formation_id": "%s", "ip_address": "%s"}`, formationID, ipAddress)
return session.Write(pkg)
}
|
namespace Extractor.WpfClient.Exporters
{
using System;
using System.Collections.Generic;
using Extractor.WpfClient.Contracts;
using System.IO;
public class TxtExporter : IExporter
{
public void Export<T>(
ICollection<T> fileInformations,
string folderToExportTo,
string fileName) where T: IFileInfo
{
string fullExportFilePath = (folderToExportTo + "\\" + fileName + ".txt").Replace("\\\\", "\\");
//try
//{
StreamWriter writer = new StreamWriter(fullExportFilePath);
using (writer)
{
foreach (var fileInfo in fileInformations)
{
writer.WriteLine(fileInfo.ToString());
}
}
//}
//catch (Exception ex)
//{
// throw new Exception(ex.Message);
//}
}
}
}
|
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System.Collections.Generic;
using Microsoft.Azure.Functions.Worker.Sdk;
using Xunit;
namespace Microsoft.Azure.Functions.SdkTests
{
public class ExtensionsCsProjGeneratorTests
{
[Fact]
public void GetCsProjContent_Succeeds()
{
IDictionary<string, string> extensions = new Dictionary<string, string>
{
{ "Microsoft.Azure.WebJobs.Extensions.Storage", "4.0.3" },
{ "Microsoft.Azure.WebJobs.Extensions.Http", "3.0.0" },
{ "Microsoft.Azure.WebJobs.Extensions", "2.0.0" },
};
var generator = new ExtensionsCsprojGenerator(extensions, "", "netcoreapp3.1");
string actualCsproj = generator.GetCsProjContent();
Assert.Equal(ExpectedCsProj(), actualCsproj);
}
private static string ExpectedCsProj()
{
return @"
<Project Sdk=""Microsoft.NET.Sdk"">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<LangVersion>preview</LangVersion>
<Configuration>Release</Configuration>
<AssemblyName>Microsoft.Azure.Functions.Worker.Extensions</AssemblyName>
<RootNamespace>Microsoft.Azure.Functions.Worker.Extensions</RootNamespace>
<MajorMinorProductVersion>1.0</MajorMinorProductVersion>
<Version>$(MajorMinorProductVersion).0</Version>
<AssemblyVersion>$(MajorMinorProductVersion).0.0</AssemblyVersion>
<FileVersion>$(Version)</FileVersion>
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
</PropertyGroup>
<ItemGroup>
<PackageReference Include=""Microsoft.NETCore.Targets"" Version=""3.0.0"" PrivateAssets=""all"" />
<PackageReference Include=""Microsoft.NET.Sdk.Functions"" Version=""3.0.11"" />
<PackageReference Include=""Microsoft.Azure.WebJobs.Extensions.Storage"" Version=""4.0.3"" />
<PackageReference Include=""Microsoft.Azure.WebJobs.Extensions.Http"" Version=""3.0.0"" />
<PackageReference Include=""Microsoft.Azure.WebJobs.Extensions"" Version=""2.0.0"" />
</ItemGroup>
</Project>
";
}
}
}
|
require 'package'
class V2ray < Package
description 'A platform for building proxies to bypass network restrictions.'
homepage 'https://www.v2ray.com/'
version '{{VERSION}}'
case ARCH
when 'aarch64', 'armv7l'
source_url 'https://github.com/v2ray/v2ray-core/releases/download/{{VERSION}}/v2ray-linux-arm.zip'
source_sha256 '{{SHA256_ARMV7L}}'
when 'i686'
source_url 'https://github.com/v2ray/v2ray-core/releases/download/{{VERSION}}/v2ray-linux-32.zip'
source_sha256 '{{SHA256_I686}}'
when 'x86_64'
source_url 'https://github.com/v2ray/v2ray-core/releases/download/{{VERSION}}/v2ray-linux-64.zip'
source_sha256 '{{SHA256_X86_64}}'
end
def self.install
FileUtils.mkdir_p(CREW_DEST_PREFIX + '/share/v2ray')
FileUtils.cp_r('.', CREW_DEST_PREFIX + '/share/v2ray')
FileUtils.mkdir_p(CREW_DEST_PREFIX + '/bin')
FileUtils.cd(CREW_DEST_PREFIX + '/bin') do
FileUtils.ln_s(CREW_PREFIX + '/share/v2ray/v2ray', 'v2ray')
FileUtils.ln_s(CREW_PREFIX + '/share/v2ray/v2ctl', 'v2ctl')
end
end
def self.postinstall
FileUtils.chmod('u=x,go=x', CREW_PREFIX + '/share/v2ray/v2ray')
FileUtils.chmod('u=x,go=x', CREW_PREFIX + '/share/v2ray/v2ctl')
puts
puts 'To start using v2ray, type `v2ray`.'.lightblue
puts
puts 'You can use customer config. about how to use v2ray command, see https://www.v2ray.com/'.lightblue
puts 'If you want to remove v2ray'.lightblue
puts
puts 'crew remove v2ray'.lightblue
puts
end
end
|
use crate::components::{BlocksTile, CombatStats, Monster, Name, Position, Renderable, Viewshed};
use crate::config;
use rltk::{RandomNumberGenerator, RGB};
use specs::prelude::*;
pub fn random(ecs: &mut World, start: Position, cfg: &config::Monsters) {
let roll: i32;
{
let mut rng = ecs.write_resource::<RandomNumberGenerator>();
roll = rng.roll_dice(1, 2);
}
match roll {
1 => spawn(ecs, start, cfg, &cfg.orc),
_ => spawn(ecs, start, cfg, &cfg.goblin),
}
}
pub fn spawn(ecs: &mut World, start: Position, cfg: &config::Monsters, m: &config::Monster) {
log::debug!("Creating monster at {:?} ...", start);
ecs.create_entity()
.with(start)
.with(Renderable {
glyph: rltk::to_cp437(m.chr),
fg: RGB::named(cfg.fg_color),
bg: RGB::named(cfg.bg_color),
render_order: 1,
})
.with(Viewshed {
visible_tiles: Vec::new(),
range: cfg.view_range.tile_count.clone(),
dirty: true,
})
.with(Monster {})
.with(Name {
name: m.name.clone(),
})
.with(BlocksTile {})
.with(CombatStats {
max_hp: m.stats.max_hp,
hp: m.stats.starting_hp,
defense: m.stats.defense,
power: m.stats.power,
})
.build();
}
|
package spatial.codegen.resourcegen
import scala.collection.mutable
import argon._
import argon.codegen.FileDependencies
import spatial.codegen.naming._
import argon.node._
import spatial.lang._
import spatial.node._
import spatial.metadata.access._
import spatial.metadata.control._
import spatial.metadata.memory._
import spatial.metadata.retiming._
import spatial.metadata.types._
import spatial.util.spatialConfig
import spatial.traversal.AccelTraversal
case class ResourceReporter(IR: State) extends NamedCodegen with FileDependencies with AccelTraversal {
override val lang: String = "reports"
override val ext: String = "json"
override protected def emitEntry(block: Block[_]): Unit = {
gen(block)
}
private def bitWidth(tp: Type[_]): Int = tp match {
case Bits(bT) => bT.nbits
case _ => -1
}
var fixOp: Int = 0
override def emitHeader(): Unit = {
super.emitHeader()
emit("{")
}
val dataMap = mutable.Map[String, mutable.Map[String, String]]()
override def emitFooter(): Unit = {
super.emitFooter()
dataMap.foreach {
entry => {
emit(s"""\t"${entry._1}": {""")
val last_index = entry._2.size - 1;
entry._2.toList.zipWithIndex.foreach {
tup_ind => {
val comma = if (tup_ind._2 != last_index) "," else ""
emit(s"""\t\t"${tup_ind._1._1}": [${tup_ind._1._2}]${comma}""")
}
}
emit("\t},")
}
}
emit(s"""\t"fixed_ops": $fixOp""")
emit("}")
}
def emitMem(lhs: Sym[_], tp: String, dims: Seq[Int], padding: Seq[Int], depth: Int) = {
dataMap.getOrElseUpdate(tp, mutable.Map[String, String]()) += (
src"${lhs}" -> src"""${bitWidth(lhs.tp.typeArgs.head)}, ${dims}, ${padding}, ${depth}""")
}
override protected def quoteOrRemap(arg: Any): String = arg match {
case p: Seq[_] =>
s"[${p.map(quoteOrRemap).mkString(",")}]"
case _ => super.quoteOrRemap(arg)
}
override protected def gen(lhs: Sym[_], rhs: Op[_]): Unit = {
rhs match {
case AccelScope(func) => inAccel {
spatialConfig.enGen = true
gen(func)
}
case _ =>
if (inHw) {
countResource(lhs, rhs)
}
rhs.blocks.foreach { blk => gen(blk) }
}
}
def countResource(lhs: Sym[_], rhs: Op[_]): Unit = rhs match {
case op: SRAMNew[_, _] =>
emitMem(lhs, "bram", lhs.constDims, lhs.padding, lhs.instance.depth)
case op: FIFONew[_] =>
emitMem(lhs, "bram", lhs.constDims, lhs.padding, lhs.instance.depth)
case op: LIFONew[_] =>
emitMem(lhs, "bram", lhs.constDims, lhs.padding, lhs.instance.depth)
case op: LineBufferNew[_] =>
emitMem(lhs, "bram", lhs.constDims, lhs.padding, lhs.instance.depth)
case op: RegFileNew[_, _] =>
emitMem(lhs, "bram", lhs.constDims, lhs.padding, lhs.instance.depth)
case op: RegNew[_] =>
emitMem(lhs, "reg", Seq(1), Seq(0), lhs.instance.depth)
case op: FIFORegNew[_] =>
emitMem(lhs, "reg", Seq(1), Seq(0), lhs.instance.depth)
case op: LUTNew[_, _] =>
emitMem(lhs, "reg", lhs.constDims, Seq(0), 1)
case op: MergeBufferNew[_] =>
emitMem(lhs, "reg", lhs.constDims, Seq(0), 1)
case FixInv(x) => fixOp += 1
case FixAdd(x, y) => fixOp += 1
case FixSub(x, y) => fixOp += 1
case FixMul(x, y) => fixOp += 1
case FixDiv(x, y) => fixOp += 1
case FixRecip(x) => fixOp += 1
case FixMod(x, y) => fixOp += 1
case FixAnd(x, y) => fixOp += 1
case FixOr(x, y) => fixOp += 1
case FixLst(x, y) => fixOp += 1
case FixLeq(x, y) => fixOp += 1
case FixXor(x, y) => fixOp += 1
case FixSLA(x, y) => fixOp += 1
case FixSRA(x, y) => fixOp += 1
case FixSRU(x, y) => fixOp += 1
case SatAdd(x, y) => fixOp += 1
case SatSub(x, y) => fixOp += 1
case SatMul(x, y) => fixOp += 1
case SatDiv(x, y) => fixOp += 1
case UnbMul(x, y) => fixOp += 1
case UnbDiv(x, y) => fixOp += 1
case UnbSatMul(x, y) => fixOp += 1
case UnbSatDiv(x, y) => fixOp += 1
case FixNeq(x, y) => fixOp += 1
case FixEql(x, y) => fixOp += 1
case FixMax(x, y) => fixOp += 1
case FixMin(x, y) => fixOp += 1
case FixToFlt(x, fmt) => fixOp += 1
case FixToText(x) => fixOp += 1
case TextToFix(x, _) => fixOp += 1
case FixRandom(Some(max)) => fixOp += 1
case FixRandom(None) => fixOp += 1
case FixAbs(x) => fixOp += 1
case FixFloor(x) => fixOp += 1
case FixCeil(x) => fixOp += 1
case FixLn(x) => fixOp += 1
case FixExp(x) => fixOp += 1
case FixSqrt(x) => fixOp += 1
case FixSin(x) => fixOp += 1
case FixCos(x) => fixOp += 1
case FixTan(x) => fixOp += 1
case FixSinh(x) => fixOp += 1
case FixCosh(x) => fixOp += 1
case FixTanh(x) => fixOp += 1
case FixAsin(x) => fixOp += 1
case FixAcos(x) => fixOp += 1
case FixAtan(x) => fixOp += 1
case FixPow(x, exp) => fixOp += 1
case FixFMA(m1, m2, add) => fixOp += 1
case FixRecipSqrt(x) => fixOp += 1
case FixSigmoid(x) => fixOp += 1
case _ =>
}
}
|
package net.wuillemin.jds.common.exception
/**
* An exception that should be thrown when an authentication (would it be an initial authentication, a refresh or
* whatever related to authentication) is rejected.
*
* @param code The code of the exception
* @param args The arguments of the exceptions
*/
class AuthenticationRejectedException(
val code: ExceptionCode,
vararg val args: Any?
) : RuntimeException() {
companion object {
private const val serialVersionUID = 1L
}
}
|
package com.platform.dao;
import com.platform.entity.FootprintVo;
import java.util.List;
import java.util.Map;
/**
* @author lipengjun
* @email 939961241@qq.com
* @date 2017-08-11 09:14:26
*/
public interface ApiFootprintMapper extends BaseDao<FootprintVo> {
int deleteByParam(Map<String, Object> map);
List<FootprintVo> shareList(Map<String, Object> map);
List<FootprintVo> queryListFootprint(String userid);
}
|
# osgQt
A simple wrapper around OpenSceneGraph for Qt5
Warning
=======
At time of writing this code does not works with osg 3.6.4-rc3.
Note
--------
The provided Qt project file has include and library paths specific to my local setup.
This should be adjusted for your location of the osg library.
Tested with 32-bit osg 3.6.3 and Qt 5.12.4.
Tested with 64-bit osg 3.6.5-2 and Qt 5.14.1.
|
#!/usr/bin/env bash
_=$(git rev-parse --show-toplevel)
retVal=$?
if [ $retVal -ne 0 ]; then
echo "$PWD does not seem to be in a git repo."
exit 1
fi
# Check there are no uncommitted changes
git_status=$(git status --porcelain=v1)
if [ "$git_status" != "" ]; then
echo "Worktree is dirty, please commit or stash any uncommitted or untracked files:"
echo "$git_status"
exit 1
fi
# Force push the current branch
# TODO some user-friendly error handling if we seem to be on the main branch?
# TODO check if pre-commit is used on the pre-push hook and mention linting instead of pushing?
echo "Pushing code ..."
curr_branch=$(git rev-parse --abbrev-ref HEAD)
git push -u origin "$curr_branch" --force
retVal=$?
if [ $retVal -ne 0 ]; then
echo "Failed to push branch. Git returned with exit code $retVal"
exit 1
fi
# The echo is there in case a prompt is shown to select multiple MRs -> the most recent one is selected by default
# Possibly using `lab` instead of `glab` could make this easier if required
echo | glab mr view
retVal=$?
if [ $retVal -ne 0 ]; then
glab mr create
fi
|
package generators.css;
import java.util.ArrayList;
public class ColorBlender {
public static ArrayList<Color> blendColor(Color startColor, Color endColor, int midPoints) {
ArrayList<Color> colorBlended = new ArrayList<Color>();
midPoints++;
double redStep = (startColor.getRed() - endColor.getRed()) / (double) midPoints;
double greenStep = (startColor.getGreen() - endColor.getGreen()) / (double) midPoints;
double blueStep = (startColor.getBlue() - endColor.getBlue()) / (double) midPoints;
// System.out.println(redStep + " = " + startColor.getRed() + " - " + endColor.getRed() + "/ " + midPoints + "\n"
// + greenStep + "-" + blueStep);
Color color;
for (int i = 0; i < midPoints + 1; i++) {
color = new Color((int) ((double) startColor.getRed() - (redStep * (double) i)),
(int) ((double) startColor.getGreen() - greenStep * (double) i),
(int) ((double) startColor.getBlue() - blueStep * (double) i));
colorBlended.add(color);
// System.out.println(i + " : " + color.getRed() + "-" + color.getGreen() + "-" + color.getBlue() + " : "
// + color.getHexValue());
}
return colorBlended;
}
}
|
require "spec_helper"
describe ProposalMailer do
describe "comment_notification" do
let(:proposal) { create(:proposal) }
let(:person) { create(:person) }
let(:comment) { create(:comment, person: person, proposal: proposal) }
let(:mail) { ProposalMailer.comment_notification(proposal, comment) }
it "bccs to all speakers" do
proposal.speakers = build_list(:speaker, 3)
proposal.save!
expect(mail.bcc.count).to eq(3)
expect(mail.bcc).to match_array(proposal.speakers.map(&:email))
end
it "doesn't bcc the speaker if they are also the commenter" do
proposal.speakers = build_list(:speaker, 3)
proposal.save!
proposal.speakers << build(:speaker, person: person)
expect(proposal.speakers.count).to eq(4)
expect(mail.bcc.count).to eq(3)
expect(mail.bcc).to match_array(proposal.speakers.first(3).map(&:email))
end
end
end
|
module Puppet::Parser::Functions
newfunction(:quick_include, :arity => 1, :doc => "Like hiera_include
function. Using an array as first and only parameter instead
") do |args|
answer = args[0]
if answer && !answer.empty?
method = Puppet::Parser::Functions.function(:include)
send(method, [answer])
else
raise Puppet::ParseError, "Could not find data item #{answer}"
end
end
end
|
package com.me.chapter07
object Scala26_Collection_Method3 {
def main(args: Array[String]): Unit = {
// TODO Scala - 集合 - 常用方法
val list = List(1,2,3,4)
// TODO 集合数据的功能操作
// val newList = for ( i <- list ) yield {
// i * 2
// }
// println(newList)
// 将集合中的每一个数据进行转换后得到新的集合
// map => 映射 => A -> B
def transform(i:Int): Int = {
i * 2
}
//val newList: List[Int] = list.map(transform)
//val newList: List[Int] = list.map((i:Int)=>{i*2})
//val newList: List[Int] = list.map((i:Int)=>i*2)
//val newList: List[Int] = list.map((i)=>i*2)
//val newList: List[Int] = list.map(i=>i*2)
//val newList: List[Int] = list.map(_*2)
val newList: List[String] = list.map(_.toString)
println(newList)
}
}
|
package me
// GENERATED SDK for me API
// Describe SOTP validation status
type SOTPValidate struct {
RemainingCodes int64 `json:"remainingCodes,omitempty"`
}
|
use std::fmt::Debug;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum KafcatError {
#[error("Timeout error")]
Timeout,
#[error(transparent)]
IoError(#[from] std::io::Error),
#[error(transparent)]
SerdeJsonError(#[from] serde_json::Error),
#[error(transparent)]
RdkafkaError(#[from] rdkafka::error::KafkaError),
#[error(transparent)]
Other(#[from] anyhow::Error),
}
|
unless ActiveSupport::Notifications.respond_to?(:subscribed)
module SubscribedBehavior
def subscribed(callback, *args)
subscriber = subscribe(*args, &callback)
yield
ensure
unsubscribe(subscriber)
end
end
ActiveSupport::Notifications.extend SubscribedBehavior
end
describe "using ActiveSupport::Instrumentation to track factory interaction" do
let(:slow_user_factory) { FactoryBot.factory_by_name("slow_user") }
let(:user_factory) { FactoryBot.factory_by_name("user") }
before do
define_model("User", email: :string)
FactoryBot.define do
factory :user do
email { "john@example.com" }
factory :slow_user do
after(:build) { Kernel.sleep(0.1) }
end
end
end
end
it "tracks proper time of creating the record" do
time_to_execute = 0
callback = ->(_name, start, finish, _id, _payload) { time_to_execute = finish - start }
ActiveSupport::Notifications.subscribed(callback, "factory_bot.run_factory") do
FactoryBot.build(:slow_user)
end
expect(time_to_execute).to be >= 0.1
end
it "builds the correct payload" do
tracked_invocations = {}
callback = ->(_name, _start, _finish, _id, payload) do
factory_name = payload[:name]
strategy_name = payload[:strategy]
factory = payload[:factory]
tracked_invocations[factory_name] ||= {}
tracked_invocations[factory_name][strategy_name] ||= 0
tracked_invocations[factory_name][strategy_name] += 1
tracked_invocations[factory_name][:factory] = factory
end
ActiveSupport::Notifications.subscribed(callback, "factory_bot.run_factory") do
FactoryBot.build_list(:slow_user, 2)
FactoryBot.build_list(:user, 5)
FactoryBot.create_list(:user, 2)
FactoryBot.attributes_for(:slow_user)
end
expect(tracked_invocations[:slow_user][:build]).to eq(2)
expect(tracked_invocations[:slow_user][:attributes_for]).to eq(1)
expect(tracked_invocations[:slow_user][:factory]).to eq(slow_user_factory)
expect(tracked_invocations[:user][:build]).to eq(5)
expect(tracked_invocations[:user][:factory]).to eq(user_factory)
end
end
|
SECTION code_ctype
PUBLIC asm_iscntrl
asm_iscntrl:
; determine if char is 127 or <32, ie non-printable ascii
; enter : a = char
; exit : carry if a control char
; uses : f
cp 127
ccf
ret z
cp 32
ret
|
package com.landside.support.mvp
import android.content.Context
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.annotation.LayoutRes
import androidx.fragment.app.Fragment
import com.landside.shadowstate.ShadowState
import com.landside.support.extensions.toast
import dagger.android.AndroidInjector
import dagger.android.DispatchingAndroidInjector
import dagger.android.support.AndroidSupportInjection
import dagger.android.support.HasSupportFragmentInjector
import javax.inject.Inject
abstract class MVPBaseFragment<V : BaseView, T : BasePresenterImpl<V>> : Fragment(),
HasSupportFragmentInjector, BaseView {
@Inject
lateinit var presenter: T
@Inject
lateinit var childFragmentInjector: DispatchingAndroidInjector<Fragment>
@get:LayoutRes
abstract val layoutId: Int
val autoDisposeProvider = AutoDisposeProvider()
private var mRootView: View? = null
private var isInit = false
var isLoad = false
private set
override fun onCreate(savedInstanceState: Bundle?) {
ShadowState.bind(this)
presenter.attachView(this as V)
super.onCreate(savedInstanceState)
autoDisposeProvider.init(this)
isInit = true
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
initViews()
tryLazyLoad()
}
override fun setUserVisibleHint(isVisibleToUser: Boolean) {
super.setUserVisibleHint(isVisibleToUser)
tryLazyLoad()
}
override fun onHiddenChanged(hidden: Boolean) {
super.onHiddenChanged(hidden)
if (hidden) {
stopLoad()
} else {
tryLazyLoad()
}
}
protected open fun tryLazyLoad() {
if (!isInit) {
return
}
if (!isLoad && userVisibleHint) {
lazyLoad()
isLoad = true
} else {
if (isLoad) {
stopLoad()
}
}
}
abstract fun lazyLoad()
protected open fun stopLoad() {}
abstract fun initViews()
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
mRootView = inflater.inflate(layoutId, container, false)
return mRootView
}
override fun onAttach(context: Context) {
AndroidSupportInjection.inject(this)
super.onAttach(context)
}
override fun onDestroyView() {
super.onDestroyView()
mRootView = null
}
override fun onDestroy() {
super.onDestroy()
presenter.detachView()
}
override fun supportFragmentInjector(): AndroidInjector<Fragment> {
return childFragmentInjector
}
override fun showToast(msg: String) {
toast { msg }
}
override fun showToast(resId: Int) {
toast(resId)
}
override fun showRefreshing() {}
override fun hideRefreshing() {}
override fun showLoadMore() {}
override fun hideLoadMore() {}
override fun back() {}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
Copyright (c) 1989-1993
by Todd R. Hill
All Rights Reserved
*/
/*unpubMan********************************************************************
NAME
keyword.h - VAST Command Language keyword definitions
DESCRIPTION
Contains definitions for keywords used within the VAST Command
Language (VCL).
If VCL_DECL or __cplusplus are defined the global symbols are declared.
FILES
sys.h
SEE ALSO
NOTES
BUGS
********************************************************************unpubMan*/
#ifndef KEYWORD_H /* avoid multiple inclusion */
#define KEYWORD_H
#ifndef VCLDEF_H
#include "vcldef.h"
#endif
#ifndef SYS_H
#include "sys.h"
#endif
extern SYMBOLTABLE LibraryFunctions[];
#define MAXLIBFUNCTIONS (sizeof(LibraryFunctions)/sizeof(SYMBOLTABLE))
/* keyword lookup table */
EXTERN SYMBOLTABLE Keywords[];
#define MAXKEYWORDS (sizeof(Keywords)/sizeof(SYMBOLTABLE))
/* multi-character operator lookup tbl */
EXTERN SYMBOLTABLE Operators[] ;
#define MAXOPERATORS (sizeof(Operators)/sizeof(SYMBOLTABLE))
EXTERN SYMBOLTABLE PreProcessors[];
#define MAXPREPROCESSORS (sizeof(PreProcessors)/sizeof(SYMBOLTABLE))
EXTERN SYMBOLTABLE PreDefined[];
#define MAXPREDEFINED (sizeof(PreDefined)/sizeof(SYMBOLTABLE))
#endif /* avoid multiple inclusion */
|
class FetchHelpscoutArticlesJob
include Sidekiq::Worker
def perform
Helpscout::Article.fetch!
end
end
|
use rand::Rng;
pub fn private_key(p: u64) -> u64 {
rand::thread_rng().gen_range(2, p)
}
pub fn public_key(p: u64, g: u64, a: u64) -> u64 {
mod_exp(g, a, p)
}
pub fn secret(p: u64, b_pub: u64, a: u64) -> u64 {
mod_exp(b_pub, a, p)
}
fn mod_exp(mut base: u64, mut exp: u64, modulus: u64) -> u64 {
if modulus == 1 {
return 0;
}
let mut result = 1;
base = base % modulus;
while exp > 0 {
if exp % 2 == 1 {
result = result * base % modulus;
}
exp = exp >> 1;
base = base * base % modulus
}
result
}
|
/* Support for printing Modula 2 types for GDB, the GNU debugger.
Copyright 1986, 1988, 1989, 1991 Free Software Foundation, Inc.
This file is part of GDB.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include "defs.h"
#include "obstack.h"
#include "bfd.h" /* Binary File Description */
#include "symtab.h"
#include "gdbtypes.h"
#include "expression.h"
#include "value.h"
#include "gdbcore.h"
#include "target.h"
#include "command.h"
#include "gdbcmd.h"
#include "language.h"
#include "demangle.h"
#include "m2-lang.h"
#include <string.h>
#include <errno.h>
void
m2_print_type (type, varstring, stream, show, level)
struct type *type;
char *varstring;
FILE *stream;
int show;
int level;
{
extern void c_print_type PARAMS ((struct type *, char *, FILE *, int, int));
c_print_type (type, varstring, stream, show, level); /* FIXME */
}
|
package com.elpassion.android.commons.rxjavatest
import org.junit.Test
import rx.Observable
class ObservableExtensionTest {
@Test
fun shouldTestSubscriberAssertValue() {
Observable.just(2).test().assertValue(2)
}
@Test(expected = AssertionError::class)
fun shouldTestSubscriberThrowAssertionError() {
Observable.just(2).test().assertValue(-1)
}
@Test
fun shouldTestSubscriberAssertValueOnBlock() {
Observable.just(2).test { assertValue(2) }
}
@Test(expected = AssertionError::class)
fun shouldTestSubscriberThrowAssertionErrorOnBlock() {
Observable.just(2).test { assertValue(3) }
}
@Test(expected = AssertionError::class)
fun shouldFailAssertionErrorChain() {
Observable.error<Unit>(RuntimeException()).test {
assertNoErrors()
}
}
@Test
fun shouldNotFailAssertionErrorChain() {
val error = RuntimeException()
Observable.error<Unit>(error).test {
assertError(error)
}
}
}
|
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Syndll2.Tests.BasicTests
{
[TestClass]
public class ByteFormatTests
{
[TestMethod]
public void Convert_To_SynelByte_Null()
{
var s = SynelByteFormat.Convert((byte[])null);
Assert.IsNull(s);
}
[TestMethod]
public void Convert_From_SynelByte_Null()
{
var ba = SynelByteFormat.Convert((string)null);
Assert.IsNull(ba);
}
[TestMethod]
public void Convert_To_SynelByte_Empty()
{
var s = SynelByteFormat.Convert(new byte[0]);
Assert.AreEqual(0, s.Length);
}
[TestMethod]
public void Convert_From_SynelByte_Empty()
{
var ba = SynelByteFormat.Convert(string.Empty);
Assert.AreEqual(0, ba.Length);
}
[TestMethod]
public void Convert_To_SynelByte_Data()
{
var s = SynelByteFormat.Convert(new byte[] {0, 1, 2, 3, 100, 101, 102, 103, 200, 201, 202, 203, 255});
Assert.AreEqual("`0`1`2`3f4f5f6f7l8l9l:l;o?", s);
}
[TestMethod]
public void Convert_From_SynelByte_Data()
{
var ba = SynelByteFormat.Convert("`0`1`2`3f4f5f6f7l8l9l:l;o?");
CollectionAssert.AreEqual(new byte[] {0, 1, 2, 3, 100, 101, 102, 103, 200, 201, 202, 203, 255}, ba);
}
}
}
|
2020年08月19日01时数据
Status: 200
1.钱枫又胖了
微博热度:1507983
2.警方初步判断老人被狗绳绊倒身亡为意外
微博热度:882078
3.这功夫有感脚
微博热度:869934
4.孟佳 我真的上台了
微博热度:865667
5.以家人之名
微博热度:784447
6.湖南卫视818晚会
微博热度:527641
7.乌童是个抖M吧
微博热度:440058
8.中国驻以使馆通报某中国工人聚居区疫情
微博热度:378836
9.豆瓣崩了
微博热度:357052
10.张萌张月同台
微博热度:355331
11.琉璃
微博热度:349944
12.黄子韬徐艺洋好甜
微博热度:335135
13.一旦树木内部发生燃烧
微博热度:313196
14.老人被狗绳绊倒摔地后身亡
微博热度:285865
15.钟南山说夏天新冠传染性差的想法太天真
微博热度:285835
16.奥巴马夫人炮轰特朗普
微博热度:284756
17.李子柒柳州建螺蛳粉厂
微博热度:283670
18.京东回应禁止卖家用申通发货
微博热度:282938
19.华为称会继续提供系统更新
微博热度:282184
20.特朗普宣布将赦免美国女权运动领袖
微博热度:281249
21.Wendy回来了
微博热度:279993
22.中国好声音导师开场秀
微博热度:279044
23.章绍伟
微博热度:278372
24.深圳盒马确诊3人身份
微博热度:277857
25.梦话录音能有多好笑
微博热度:270143
26.王岳伦
微博热度:266335
27.王一博龙拳太炸了
微博热度:264453
28.别人发群里的搞笑图片
微博热度:262103
29.美国拟在日本部署陆基中程导弹
微博热度:233322
30.脏脏芒果酥
微博热度:221080
31.迪丽热巴广州
微博热度:219659
32.考普通话时的经历
微博热度:198390
33.青青子衿
微博热度:193698
34.任豪随便拍拍
微博热度:178660
35.且听凤鸣
微博热度:153505
36.白冰辛巴直播
微博热度:152942
37.影视剧难以接受的结局
微博热度:141772
38.权志龙的画
微博热度:138651
39.画眼线到底有多难
微博热度:120764
40.宋紫薇
微博热度:119914
41.成都雪山
微博热度:107914
42.司凤受刑
微博热度:107861
43.女生都喜欢小巧的东西吗
微博热度:105895
44.死亡谷气温创美国107年来最高纪录
微博热度:100425
45.新西兰总理回击特朗普
微博热度:99055
46.综艺里鸡同鸭讲的空耳现场
微博热度:98129
47.乌童改造玲珑
微博热度:88848
48.微软明年停止支持IE浏览器
微博热度:63654
49.咸蛋黄鱼皮干锅螺蛳粉
微博热度:61766
50.青海生态环境厅回应木里矿区非法开采
微博热度:61154
|
# Activate test environment on older Julia versions
if VERSION < v"1.2"
using Pkg: Pkg
Pkg.activate(@__DIR__)
Pkg.develop(Pkg.PackageSpec(; path=dirname(@__DIR__)))
Pkg.instantiate()
end
using AbstractPPL
using Documenter
using Test
@testset "AbstractPPL.jl" begin
include("deprecations.jl")
@testset "doctests" begin
DocMeta.setdocmeta!(
AbstractPPL,
:DocTestSetup,
:(using AbstractPPL);
recursive=true,
)
doctest(AbstractPPL; manual=false)
end
end
|
// flag api doc:
// http://developer.factual.com/display/docs/Core+API+-+Flag
var auth = require('./auth');
var Factual = require('../factual-api');
var factual = new Factual(auth.key, auth.secret);
factual.startDebug();
factual.post('/t/global/21EC2020-3AEA-1069-A2DD-08002B30309D/flag', {
problem: "duplicate",
user: "a_user_id",
comment: "I think this is identical to 9d676355-6c74-4cf6-8c4a-03fdaaa2d66a"
}, function (error, res) {
if (!error) console.log("success");
});
|
# Uploadcare PHP
This is a set of libraries to work with [Uploadcare][1].
## Install
**Note**: php-curl must be installed.
Just clone source code anywhere you like inside your project:
git clone git://github.com/uploadcare/uploadcare-php.git
If you like, define some constants with Public and Secret keys within your project:
define('UC_PUBLIC_KEY', 'demopublickey');
define('UC_SECRET_KEY', 'demoprivatekey');
If you are using PHP 5.3+ or 5.4+ it will be much better to use library with namespaces.
Just include one file to start using Uploadcare inside your PHP project and use namespace "\Uploadcare":
require_once '../uploadcare/lib/5.3-5.4/Uploadcare.php';
use \Uploadcare;
If you are using PHP 5.2+, then you should include Uploadcare PHP libraries like this:
require_once '../uploadcare/lib/5.2/Uploadcare.php';
Now, we are ready. Create an object of Uploadcare\Api class:
$api = new Uploadcare\Api(UC_PUBLIC_KEY, UC_SECRET_KEY);
For PHP 5.2 it will be:
$api = new Uploadcare_Api(UC_PUBLIC_KEY, UC_SECRET_KEY);
This is a main object your should work with. It has everything you need.
## Widgets and simple example
Let's start with widgets.
If you want to get Javascript's url for widget, just call:
print $api->widget->getScriptSrc()
You can easily get all contents and <script> sections to include in your HTML:
<head>
<?php print $api->widget->getScriptTag(); ?>
</head>
Create some form to use with widget:
<form method="POST" action="upload.php">
<?php echo $api->widget->getInputTag('qs-file'); ?>
<input type="submit" value="Save!" />
</form>
You will see an Uploadcare widget. After selecting file the "file_id" parameter will be set as value of hidden field.
The last thing left is to store file:
$file_id = $_POST['qs-file'];
$api = new Uploadcare\Api(UC_PUBLIC_KEY, UC_SECRET_KEY);
$file = $api->getFile($file_id);
$file->store();
Now you have an Uploadcare\File object to work with. You can show an image like this:
<img src="<?php echo $file->getUrl(); ?>" />
Or just:
<img src="<?php echo $file; ?>" />
Or you can even call a getImgTag method. This will return a prepared <img> tag:
echo $file->getImgTag('image.jpg', array('alt' => 'Image'));
## API and requests
You can do any simple request if you like by calling:
$api->request($method, $path, $data = array(), $headers = array());
Don't forget, that each API url has it's own allowed methods.
If method is not allowed exceptions will be thrown.
Ok, lets do some requests. This is request to index (http://api.uploadcare.com).
This will return an stdClass with information about urls you can request.
This is not really valuable data.
$data = $api->request('GET', '/');
Lets request account info.
This will return just some essential data inside stdClass such as: username, pub_key and email
$account_data = $api->request('GET', '/account/');
Now lets get file list.
This request will return stdClass with all files uploaded and some information about files.
Each files has:
- size
- upload_date
- last_keep_claim
- on_s3
- made_public
- url
- is_image
- file_id
- original_filename
- removed
- mime_type
- original_file_url
$files_raw = $api->request('GET', '/files/');
Previous request is just some raw request and it will return raw data from json.
There's a better way to handle all the files by using method below.
It will return an array of \Uploadcare\File objects to work with.
This objects provide ways to display the file and to use methods such as resize, crop, etc
$files = $api->getFileList();
getFileList called without any params will return just an array of first 20 files objects (first page).
But you can supply a page you want to see:
$page = 2;
$files = $api->getFileList($page);
You can get some information about pagination.
You will get an array with params:
- page: current page
- next: uri to request next page
- per_page: number of files per page
- pages: number of pages
- previous: uri to request previous page
Use "per_page" and "pages" information to create pagination inside your own project
$pagination_info = $api->getFilePaginationInfo();
If you have a file_id (for example, it's saved in your database) you can create object for file easily.
Just use request below:
$file_id = '5255b9dd-f790-425e-9fa9-8b49d4e64643';
$file = $api->getFile($file_id);
You can access raw data like this:
$file->data['size'];
Trying to access "data" parameter will fire GET request to get all that data once.
It will be a cached array if you will try to access "data" parameter again.
## File operations
Using object of \Uploadcare\File class we can get url for the file
echo $file->getUrl();
Now let's do some crop.
$width = 400;
$height = 400;
$is_center = true;
$fill_color = 'ff0000';
echo $file->crop($width, $height, $is_center, $fill_color)->getUrl();
And here's some resize with width and height
echo $file->resize($width, $height)->getUrl();
Width only
echo $file->resize($width)->getUrl();
Height only
echo $file->resize(false, $height)->getUrl();
We can also use scale crop
echo $file->scaleCrop($width, $height, $is_center)->getUrl();
And we can apply some effects.
echo $file->effect('flip')->getUrl();
echo $file->effect('grayscale')->getUrl();
echo $file->effect('invert')->getUrl();
echo $file->effect('mirror')->getUrl();
We can apply more than one effect!
echo $file->effect('flip')->effect('invert')->getUrl();
We can combine operations, not just effects.
Just chain methods and finish but calling "getUrl()".
echo $file->resize(false, $height)->crop(100, 100)->effect('flip')->effect('invert')->getUrl();
getUrl() returns a string with the resulting URL.
However, it's optional – the object itself becomes a string when treated as such.
An example below will print an url too:
echo $file->resize(false, $height)->crop(100, 100)->effect('flip')->effect('invert');
The way you provide operations matters.
We can see the same operations below, but result will be a little bit different because of order:
echo $file->crop(100, 100)->resize(false, $height)->effect('flip')->effect('invert')->getUrl();
You can run any custom operations like this:
echo $file->op('effect/flip');
echo $file->op('resize/400x400')->op('effect/flip');
You can call getUrl with postfix parameter. This is will add some readable postfix.
echo $file->getUrl('image.jpg');
The result will be like this one:
http://ucarecdn.com/85b5644f-e692-4855-9db0-8c5a83096e25/-/crop/970x500/center/he.jpg
[More information on file operations can be found here][2]
## Uploading files
Let's have some fun with uploading files.
First of all, we can upload file from url. Just use construction below.
This will return Uploadcare\File instance.
$file = $api->uploader->fromUrl('http://www.baysflowers.co.nz/Images/tangerine-delight.jpg');
$file->store();
By using default params of "fromUrl" method you tell Uploader to check file to be uploaded.
By default, Uploader will make 5 checks max with 1 second wait. You can change these params:
$file = $api->uploader->fromUrl('http://www.baysflowers.co.nz/Images/tangerine-delight.jpg', true, $timeout, $max_attempts);
If file is not uploaded an Exception will be thrown.
You can just get token and check status manually later any time:
$token = $api->uploader->fromUrl('http://www.baysflowers.co.nz/Images/tangerine-delight.jpg', false);
$data = $api->uploader->status($token);
if ($data->status == 'success') {
$file_id = $data->file_id
// do smth with a file
}
You can do any operations with this file now.
echo $file->effect('flip')->getUrl();
You can upload file from path.
$file = $api->uploader->fromPath(dirname(__FILE__).'/test.jpg');
$file->store();
echo $file->effect('flip')->getUrl();
Or even just use a file pointer.
$fp = fopen(dirname(__FILE__).'/test.jpg', 'r');
$file = $api->uploader->fromResource($fp);
$file->store();
echo $file->effect('flip')->getUrl();
The last thing you can do is upload a file just from it's contents. But you will have to provide mime-type.
$content = "This is some text I want to upload";
$file = $api->uploader->fromContent($content, 'text/plain');
$file->store();
echo $file->getUrl();
If you want to delete file, just call delete() method on Uploadcare\File object.
$file->delete();
## Tests
Inside "tests" directory you can find test for PHP 5.2 and PHP 5.3.
This tests are based on PHPUnit, so you must have PHPUnit installed on your system to use them.
To execute tests just run this for PHP 5.3:
cd tests/5.3/
phpunit ApiTest.php
or for PHP 5.2:
cd tests/5.2/
phpunit ApiTest.php
ApiTest is divided is sections/methods.
Here's descriptions of methods:
### testConstantValid
Just some basic unit test to test, that constants are not misspelled.
### testChildObjectsValid
Test that Api object has proper child objects.
### testPublicKeyValid
Test for public key is correct.
### testFileList
Test that getFilesList mehtod returns array and each item of array is an object of Uploadcare\File class
### testRequestsRaw
Test different request types to url https://api.uploadcare.com/.
Some requests must throw an exception, some must not.
Checks for some result returned.
### testRequestsAccount
The same as "testRequestsRaw" but with https://api.uploadcare.com/account/ url.
### testRequestsFiles
Makes raw request to get an array of files.
Check's if each file has essentials parameters.
### testFile
Tests Uploadcare\File object to work correctly.
Test runs different operations and checks url is returned correctly for each of them.
### testUploadAndDelete
Tests all four types of uploading.
None of them should throw exception while uplaoding and storing.
Checks text file is uploaded correctly.
Checks for file deletions. No exceptions must be thrown.
[1]: https://uploadcare.com/
[2]: https://uploadcare.com/documentation/reference/basic/cdn.html
|
#ifndef _RENDER_MANAGER_H_
#define _RENDER_MANAGER_H_
#include "common/utils/singleton.h"
#include "cmake_val.h"
namespace BriskEngine {
enum class ENUM_GRAPIC_API_TYPE :unsigned {
DX12,
NA
};
class RendererManager : implements Singleton<RendererManager>
{
public:
inline ENUM_GRAPIC_API_TYPE getGraphicType() { return m_eGraphicType; }
private:
#ifdef GRAPHIC_API_TYPE_DX12
ENUM_GRAPIC_API_TYPE m_eGraphicType = ENUM_GRAPIC_API_TYPE::DX12;
#elif defined GRAPHIC_API_TYPE_NA
ENUM_GRAPIC_API_TYPE m_eGraphicType = ENUM_GRAPIC_API_TYPE::NA;
#endif
};
}
#endif
|
% Trying SWI-Prolog delimited continuations
% trial/3 takes the names of three "reset points", one of "level_1", "level_2", "level_3"
% The innermost/3 predicate will shift/1 to the given reset points in turn. The code
% following the reset points performs another reset/3 on the received continuation, unless
% it is zero. This way, we can actually jump around in the call stack.
%
% For example:
%
% ?- trial(level_1,level_2,level_3).
% Shifting level_1
% Exiting reset at level_1, with continuation a compound term
% Back at innerest, shifting level_2
% Exiting reset at level_2, with continuation a compound term
% Back at innerest, shifting level_3
% Exiting reset at level_3, with continuation a compound term
% Exiting reset at level_3, with continuation == 0
% Exiting reset at level_2, with continuation == 0
% Exiting reset at level_1, with continuation == 0
trial(X,Y,Z) :-
reset(inner(X,Y,Z),level_1,Cont),
print(level_1,Cont),
((Cont == 0)
-> true
; (reset(Cont,level_1,Cont2),print(level_1,Cont2))),
!.
inner(X,Y,Z) :-
reset(innerer(X,Y,Z),level_2,Cont),
print(level_2,Cont),
((Cont == 0)
-> true
; (reset(Cont,level_2,Cont2),print(level_2,Cont2))).
innerer(X,Y,Z) :-
reset(innermost(X,Y,Z),level_3,Cont),
print(level_3,Cont),
((Cont == 0)
-> true
; (reset(Cont,level_3,Cont2),print(level_3,Cont2))).
innermost(X,Y,Z) :-
format("Shifting ~q\n",[X]),
shift(X),
format("Back at innerest, shifting ~q\n",[Y]),
shift(Y),
format("Back at innerest, shifting ~q\n",[Z]),
shift(Z). % Although this is the last instruction, the continuation on reset-exit will NOT be 0
print(Level,0) :-
format("Exiting reset at ~q, with continuation == 0\n",[Level]).
print(Level,Cont) :-
Cont \== 0,
format("Exiting reset at ~q, with continuation a compound term\n",[Level]).
|
package i_introduction._0_Hello_World
import org.junit.Assert.assertEquals
import org.junit.Test
class N00StartKtTest {
@Test fun testOk() {
assertEquals("OK", task0())
}
}
|
//! Typed AST module to access nodes in the tree.
//!
//! The nodes described here are those also described in the [GraphQL grammar],
//! with a few exceptions. For example, for easy of querying the AST we do not
//! separate `Definition` into `ExecutableDefinition` and
//! `TypeSystemDefinitionOrExtension`. Instead, all possible definitions and
//! extensions can be accessed with `Definition`.
//!
//! Each struct in this module has getter methods to access information that's
//! part of its node. For example, as per spec a `UnionTypeDefinition` is defined as follows:
//!
//! ```ungram
//! UnionTypeDefinition =
//! Description? 'union' Name Directives? UnionMemberTypes?
//! ```
//!
//! It will then have getters for `Description`, union token, `Name`,
//! `Directives` and `UnionMemberTypes`. Checkout documentation for the Struct
//! you're working with to find out its exact API.
//!
//! ## Example
//! This example parses a subgraph schema and looks at the various Definition Names.
//!
//! ```rust
//! use apollo_parser::{ast, Parser};
//!
//! let schema = r#"
//! directive @tag(name: String!) repeatable on FIELD_DEFINITION
//!
//! type ProductVariation {
//! id: ID!
//! }
//! scalar UUID @specifiedBy(url: "https://tools.ietf.org/html/rfc4122")
//!
//! union SearchResult = Photo | Person
//!
//! extend type Query {
//! allProducts: [Product]
//! product(id: ID!): Product
//! }
//! "#;
//! let parser = Parser::new(schema);
//! let ast = parser.parse();
//!
//! assert_eq!(0, ast.errors().len());
//! let document = ast.document();
//! for definition in document.definitions() {
//! match definition {
//! ast::Definition::DirectiveDefinition(directive) => {
//! assert_eq!(
//! directive
//! .name()
//! .expect("Cannot get directive name.")
//! .text()
//! .as_ref(),
//! "tag"
//! )
//! }
//! ast::Definition::ObjectTypeDefinition(object_type) => {
//! assert_eq!(
//! object_type
//! .name()
//! .expect("Cannot get object type definition name.")
//! .text()
//! .as_ref(),
//! "ProductVariation"
//! )
//! }
//! ast::Definition::UnionTypeDefinition(union_type) => {
//! assert_eq!(
//! union_type
//! .name()
//! .expect("Cannot get union type definition name.")
//! .text()
//! .as_ref(),
//! "SearchResult"
//! )
//! }
//! ast::Definition::ScalarTypeDefinition(scalar_type) => {
//! assert_eq!(
//! scalar_type
//! .name()
//! .expect("Cannot get scalar type definition name.")
//! .text()
//! .as_ref(),
//! "UUID"
//! )
//! }
//! ast::Definition::ObjectTypeExtension(object_type) => {
//! assert_eq!(
//! object_type
//! .name()
//! .expect("Cannot get object type extension name.")
//! .text()
//! .as_ref(),
//! "Query"
//! )
//! }
//! _ => unimplemented!(),
//! }
//! }
//! ```
//!
//! [GraphQL grammar]: https://spec.graphql.org/October2021/#sec-Document-Syntax
mod generated;
mod node_ext;
use std::marker::PhantomData;
use crate::{SyntaxKind, SyntaxNode, SyntaxNodeChildren, SyntaxToken};
pub use generated::nodes::*;
/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
/// the same representation: a pointer to the tree root and a pointer to the
/// node itself.
pub trait AstNode {
fn can_cast(kind: SyntaxKind) -> bool
where
Self: Sized;
fn cast(syntax: SyntaxNode) -> Option<Self>
where
Self: Sized;
fn syntax(&self) -> &SyntaxNode;
fn clone_for_update(&self) -> Self
where
Self: Sized,
{
Self::cast(self.syntax().clone_for_update()).unwrap()
}
fn clone_subtree(&self) -> Self
where
Self: Sized,
{
Self::cast(self.syntax().clone_subtree()).unwrap()
}
}
/// Like `AstNode`, but wraps tokens rather than interior nodes.
pub trait AstToken {
fn can_cast(token: SyntaxKind) -> bool
where
Self: Sized;
fn cast(syntax: SyntaxToken) -> Option<Self>
where
Self: Sized;
fn syntax(&self) -> &SyntaxToken;
fn text(&self) -> &str {
self.syntax().text()
}
}
/// An iterator over `SyntaxNode` children of a particular AST type.
#[derive(Debug, Clone)]
pub struct AstChildren<N> {
inner: SyntaxNodeChildren,
ph: PhantomData<N>,
}
impl<N> AstChildren<N> {
fn new(parent: &SyntaxNode) -> Self {
AstChildren {
inner: parent.children(),
ph: PhantomData,
}
}
}
impl<N: AstNode> Iterator for AstChildren<N> {
type Item = N;
fn next(&mut self) -> Option<N> {
self.inner.find_map(N::cast)
}
}
mod support {
use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
parent.children().find_map(N::cast)
}
pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
AstChildren::new(parent)
}
pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
parent
.children_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == kind)
}
}
|
#!/usr/bin/ruby -Wall
# ================================================================
# Please see LICENSE.txt in the same directory as this file.
# John Kerl
# kerl.john.r@gmail.com
# Copyright (c) 2004
# Ported to Ruby 2011-02-10
# ================================================================
require 'Bit_arith.rb'
class Bit_vector
# ------------------------------------------------------------
def initialize(init_num_elements)
if (init_num_elements <= 0)
raise "Bit_vector: size must be > 0; got #{init_num_elements}. Exiting."
end
@num_bits = init_num_elements
@bits = 0 # Let Ruby do the dynamic sizing of the integer.
end
attr_reader :num_bits
attr_accessor :bits
# ------------------------------------------------------------
# xxx note bit position zero appears at the right.
# xxx esp. the identity mx appears as
# 01
# 10
# xxx re-think the ordering for the factorizer ... *after* the port
# is complete. :)
@@write_hex = false
def Bit_vector.set_hex_output
@@write_hex = true
end
def Bit_vector.set_binary_output
@@write_hex = false
end
def to_s()
if @@write_hex == true
"%0*x" % [(@num_bits+3) >> 2, @bits]
else
"%0*b" % [@num_bits, @bits]
end
end
# ----------------------------------------------------------------
def [](j)
if (j < 0) || (j >= @num_bits)
raise "Index #{j} out of bounds 0..#{@num_bits-1}"
end
(@bits >> j) & 1
end
def []=(j, v)
if (j < 0) || (j >= @num_bits)
raise "Index #{j} out of bounds 0..#{@num_bits-1}"
end
if (v & 1) == 1
@bits |= 1 << j
else
@bits &= ~(1 << j)
end
end
def toggle_element(j)
if (j < 0) || (j >= @num_bits)
raise "Index #{j} out of bounds 0..#{@num_bits-1}"
end
@bits ^= 1 << j
end
# ----------------------------------------------------------------
# For use by the row-reduction algorithm in Bit_matrix.
def find_leader_pos
Bit_arith.lsb_pos(@bits)
# Much slower:
# mask = 1
# count = 0
# while count < @num_bits
# if (@bits & mask) != 0
# return count
# end
# mask <<= 1
# count += 1
# end
# return -1
end
## ----------------------------------------------------------------
## This is componentwise multiplication (u * v), useful for implementing
## direct products of rings.
##
## Use dot() (e.g. u.dot(v)) for inner product, or tmatrix's outer() (e.g.
## tmatrix::outer(u, v)) for outer product.
#
#bit_vector_t bit_vector_t::operator*(
# bit_vector_t that)
#{
# this->check_equal_lengths(that)
# bit_vector_t rv(this->num_bits)
# for (int i = 0; i < this->num_words; i++)
# rv.words[i] = this->words[i] & that.words[i]
# return rv
#}
#
## ----------------------------------------------------------------
#bit_t bit_vector_t::dot(
# bit_vector_t that)
#{
# if (this->num_bits != that.num_bits)
# this->check_equal_lengths(that)
# unsigned accum = 0
# for (int i = 0; i < this->num_words; i++)
# accum ^= this->words[i] & that.words[i]
# int num_ones = count_one_bits((unsigned char *)&accum, sizeof(accum))
# bit_t rv(num_ones & 1)
# return rv
#}
#
## ----------------------------------------------------------------
#void bit_vector_t::check_equal_lengths(bit_vector_t & that)
#{
# if (this->num_bits != that.num_bits) {
# raise
# << "bit_vector_t operator+(): Incompatibly sized "
# << "arguments (" << this->num_bits << ", "
# << that.num_bits << ")." << std::endl
# }
#}
#
## ----------------------------------------------------------------
#void bit_vector_t::bounds_check(int index)
#{
# if ((index < 0) || (index >= this->num_bits)) {
# raise
# << "bit_vector_t array operator: index "
# << index
# << " out of bounds "
# << 0
# << ":"
# << (this->num_bits - 1)
# << std::endl
# }
#}
end
|
<?php
namespace Aalberts\Models\Presenters\Cms;
use Aalberts\Models\Presenters\AbstractPresenter;
class RelatedproductPresenter extends AbstractPresenter
{
/**
* @return bool
*/
public function hasImage()
{
return ( $this->entity->relatedproductImages
&& count($this->entity->relatedproductImages)
);
}
/**
* @return null
*/
public function image()
{
if ( ! $this->hasImage()) return null;
return $this->entity->relatedproductImages->first()->file;
}
/**
* @return null|string
*/
public function link()
{
if ( ! $this->entity->filterlink) return null;
return url($this->entity->filterlink);
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using WaveEngine.Components.Cameras;
using WaveEngine.Framework;
using WaveEngine.Framework.Graphics;
namespace IsisTempleProject.Components
{
public class FollowCameraBehavior : Behavior
{
#region Variables
[RequiredComponent]
public Transform3D transform3D;
private Camera3D camera;
#endregion
#region Initialize
public FollowCameraBehavior(Entity cameraEntity)
: base("FollowCamera")
{
this.camera = cameraEntity.FindComponent<Camera3D>();
}
#endregion
#region Public Methods
protected override void Update(TimeSpan gameTime)
{
transform3D.Position = camera.Position;
}
#endregion
}
}
|
package com.nickskelton.wifidelity.view.adapter
import com.nickskelton.wifidelity.R
sealed class BlockListItem(
val drawableIconRes: Int,
val titleText: String,
val subtitleText: String,
val onSelected: (BlockListItem) -> Unit
)
class NetworkBlockListItem(
networkName: String,
onSelected: (BlockListItem) -> Unit
) : BlockListItem(
R.drawable.ic_network_wifi,
networkName,
"Network Found",
onSelected
)
class TextBlockListItem(
foundText: String,
val strength: Int,
onSelected: (BlockListItem) -> Unit
) : BlockListItem(
R.drawable.ic_text_format,
foundText,
"$strength% Match",
onSelected
)
|
package certrotation
import (
"bytes"
"testing"
"time"
"github.com/openshift/library-go/pkg/crypto"
corev1 "k8s.io/api/core/v1"
)
func TestNeedNewSigningCertKeyPair(t *testing.T) {
certData1, keyData1, err := newSigningCertKeyPair("signer1", time.Hour*-1)
if err != nil {
t.Fatalf("Expected no error, but got: %v", err)
}
certData2, keyData2, err := newSigningCertKeyPair("signer2", time.Hour*1)
if err != nil {
t.Fatalf("Expected no error, but got: %v", err)
}
cases := []struct {
name string
secret *corev1.Secret
validateReason validateReasonFunc
}{
{
name: "missing tls.crt",
secret: &corev1.Secret{},
validateReason: expectReason("missing tls.crt"),
},
{
name: "bad certificate",
secret: &corev1.Secret{
Data: map[string][]byte{
"tls.crt": []byte("invalid data"),
},
},
validateReason: expectReason("bad certificate"),
},
{
name: "expired",
secret: &corev1.Secret{
Data: map[string][]byte{
"tls.crt": certData1,
"tls.key": keyData1,
},
},
validateReason: expectReason("already expired"),
},
{
name: "no new cert needed",
secret: &corev1.Secret{
Data: map[string][]byte{
"tls.crt": certData2,
"tls.key": keyData2,
},
},
validateReason: expectReason(""),
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
actual := needNewSigningCertKeyPair(c.secret)
c.validateReason(t, actual)
})
}
}
func newSigningCertKeyPair(signerName string, validity time.Duration) (certData, keyData []byte, err error) {
ca, err := crypto.MakeSelfSignedCAConfigForDuration(signerName, validity)
if err != nil {
return nil, nil, err
}
certBytes := &bytes.Buffer{}
keyBytes := &bytes.Buffer{}
if err := ca.WriteCertConfig(certBytes, keyBytes); err != nil {
return nil, nil, err
}
return certBytes.Bytes(), keyBytes.Bytes(), nil
}
|
package controllers
import (
"fmt"
"github.com/bytesfield/golang-gin-auth-service/src/app/models"
userRepository "github.com/bytesfield/golang-gin-auth-service/src/app/repositories"
"github.com/bytesfield/golang-gin-auth-service/src/app/responses"
"github.com/bytesfield/golang-gin-auth-service/src/app/services"
gin "github.com/gin-gonic/gin"
)
func (server *Server) RefreshToken(ctx *gin.Context) {
id, err := services.GetTokenID(ctx)
fmt.Println(err)
if err != nil {
responses.Unauthorized(ctx, "Unauthorized")
return
}
user := models.User{}
userRepo := userRepository.New(&user)
userProfile, err := userRepo.FindUserByID(server.DB, uint32(id))
if err != nil {
responses.NotFound(ctx, "User not found", err.Error())
return
}
token, err := services.RefreshToken(ctx)
if err != nil {
responses.BadRequest(ctx, err.Error())
return
}
userData := map[string]interface{}{"token": token, "user": userProfile}
responses.Ok(ctx, "Token refreshed successfully", userData)
}
|
/*===================================================================
* Copyright (c) 2022 Oleg Naraevskiy Date: 02.2022
* Version IDE: MS VS 2019
* Designed by: Oleg Naraevskiy / noa.oleg96@gmail.com [02.2022]
*===================================================================*/
using ModemRebooter.ConnectChecker.Classes;
using System.Net.NetworkInformation;
namespace ModemRebooter.ConnectChecker
{
public class Check
{
public static bool PingHost(string nameOrAddress)
{
PingResult pingResult = new PingResult();
bool pingable = false;
Ping pinger = null;
try
{
pinger = new Ping();
PingReply reply = pinger.Send(nameOrAddress);
pingable = reply.Status == IPStatus.Success;
}
catch (PingException)
{
pingable = false;
}
finally
{
if (pinger != null)
{
pinger.Dispose();
}
}
return pingable;
}
//Console.WriteLine(pingReply.RoundtripTime); //время ответа
//Console.WriteLine(pingReply.Status); //статус
//Console.WriteLine(pingReply.Address); //IP
//Console.ReadKey(true);
}
}
|
using System;
namespace Tenant_Configuration_Server_Dotnet.Models {
public class TenantParameter<T> where T : ITenantParameter, new () {
public T GetInstance () {
return new T ();
}
}
}
|
# controlled_context_demo_react
A demo for the tutorial at https://dev.to/charlesdlandau/react-usecontext-and-pre-initialized-providers-2gn5
|
package nl.jongensvantechniek.movierecommendations.service.social
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
trait GraphLoader {
/**
* @param sc
* @param dataSourcePath
* @return
*/
protected def loadGraph(sc: SparkContext, dataSourcePath: String): RDD[String] = {
sc.textFile(dataSourcePath)
}
}
|
package org.jetbrains.plugins.scala
package codeInsight
package intention
package types
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.command.undo.UndoUtil
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import com.intellij.psi.impl.source.codeStyle.CodeEditUtil
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.codeInsight.intention.types.ConvertImplicitBoundsToImplicitParameter._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScParameterOwner}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScConstructorOwner, ScTrait}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScTypeBoundsOwner, ScTypeParametersOwner}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createImplicitClauseFromTextWithContext
import org.jetbrains.plugins.scala.lang.refactoring.ScalaNamesValidator
import org.jetbrains.plugins.scala.lang.refactoring.util.InplaceRenameHelper
import scala.collection.mutable
import scala.jdk.CollectionConverters._
class ConvertImplicitBoundsToImplicitParameter extends PsiElementBaseIntentionAction {
override def getFamilyName: String = ScalaBundle.message("family.name.convert.implicit.bounds")
override def getText: String = ScalaBundle.message("convert.view.and.context.bounds.to.implicit.parameters")
override def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = {
canBeConverted(element)
}
override def invoke(project: Project, editor: Editor, element: PsiElement): Unit = {
val addedParams = doConversion(element)
if (!isUnitTestMode)
runRenamingTemplate(addedParams)
}
}
object ConvertImplicitBoundsToImplicitParameter {
def canBeConverted(element: PsiElement): Boolean =
element.parentOfType(classOf[ScTypeBoundsOwner], strict = false)
.filter(_.hasImplicitBounds)
.flatMap(_.parentOfType(classOf[ScTypeParametersOwner]))
.exists {
case _: ScTrait => false
case _ => true
}
def doConversion(element: PsiElement): Seq[ScParameter] = {
val parameterOwner = Option(element)
.filter(_.isValid)
.flatMap(_.parentOfType(classOf[ScParameterOwner], strict = false))
.filterByType[ScTypeParametersOwner]
.getOrElse(return Seq.empty)
val (function, isClass) = parameterOwner match {
case function: ScFunction => (function, false)
case ScConstructorOwner.constructor(constr) => (constr, true)
case _ => return Seq.empty
}
val existingClause = parameterOwner.allClauses.lastOption.filter(pc => (pc.isImplicit || pc.isUsing) && !element.isInScala3File)
val existingParams = existingClause.iterator.flatMap(_.parameters).toSeq
val candidates = for {
tp <- parameterOwner.typeParameters
cb <- tp.contextBoundTypeElement
cbText = cb.getText
cbName = cbText.lowercased
typeText = cbText.parenthesize(!ScalaNamesValidator.isIdentifier(cbText))
} yield (cbName.escapeNonIdentifiers, (cbName + tp.name.capitalize).escapeNonIdentifiers, s"$typeText[${tp.name}]")
val isUniqueName = candidates.groupBy(_._1).filter(_._2.sizeIs == 1).keySet
val nextNumber = mutable.Map.empty[String, Int]
val newParamsTexts = for {
(primaryName, altName, typeText) <- candidates
name = if (isUniqueName(primaryName)) primaryName else altName
suffix = nextNumber.updateWith(name)(old => Some(old.getOrElse(-1) + 1)).filter(_ >= 1).fold("")(_.toString)
} yield s"$name$suffix: $typeText"
// remove old clause
existingClause.foreach(_.delete())
// add clause
val clause = createImplicitClauseFromTextWithContext(existingParams.map(_.getText) ++ newParamsTexts, parameterOwner, isClass)
CodeEditUtil.setNodeGenerated(clause.getNode, true)
function.parameterList.addClause(clause)
// remove bounds
parameterOwner.typeParameters.foreach(_.removeImplicitBounds())
UndoUtil.markPsiFileForUndo(function.getContainingFile)
clause.parameters.takeRight(newParamsTexts.size)
}
def runRenamingTemplate(params: Seq[ScParameter]): Unit = {
if (params.isEmpty) return
val parent = PsiTreeUtil.findCommonParent(params.asJava)
val helper = new InplaceRenameHelper(parent)
params.foreach(p => helper.addGroup(p, Seq.empty, Seq.empty))
helper.startRenaming()
}
}
|
class MonkeyBaseException(Exception):
pass
class MonkeyTypeErrorException(MonkeyBaseException):
pass
class FileDownloadErrorException(MonkeyBaseException):
pass
class CaseBaseException(Exception):
pass
class CaseTypeErrorException(CaseBaseException):
pass
class DeviceNotConnectedException(Exception):
pass
class LocalPackageNotFoundException(Exception):
pass
class SetUpErrorException(Exception):
pass
class InstallAppException(Exception):
pass
class CheckScreenLockedFailed(Exception):
pass
|
using System;
using MvvmCross.Binding;
using MvvmCross.Binding.Bindings.Target;
using MvvmCross.Platform.Platform;
using MvvmCross.Platform.UI;
using UIKit;
using AppRopio.Base.iOS.UIExtentions;
namespace AppRopio.Base.iOS.Binding
{
public class AnimatedVisibilityBinding : MvxConvertingTargetBinding
{
protected UIView View => (UIView)Target;
public AnimatedVisibilityBinding(UIView target)
: base(target)
{
}
public override MvxBindingMode DefaultMode => MvxBindingMode.OneWay;
public override Type TargetType => typeof(MvxVisibility);
protected override void SetValueImpl(object target, object value)
{
var view = (UIView)target;
var visibility = (MvxVisibility)value;
switch (visibility)
{
case MvxVisibility.Visible:
view.SetHiddenAnimated(false);
break;
case MvxVisibility.Collapsed:
view.SetHiddenAnimated(true);
break;
default:
MvxBindingTrace.Trace(MvxTraceLevel.Warning, "Visibility out of range {0}", value);
break;
}
}
}
}
|
/*
* Copyright 2020 Safeboda
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.safeboda.ui.viewmodel
import com.jraska.livedata.test
import com.safeboda.BaseViewModelTest
import com.safeboda.core.data.remote.UserOrganizationRepository
import com.safeboda.data.local.sample.username
import com.safeboda.data.repository.UserRepository
import io.mockk.coEvery
import io.mockk.coVerify
import io.mockk.mockk
import kotlinx.coroutines.FlowPreview
import kotlinx.coroutines.flow.flowOf
import org.junit.Before
import org.junit.Test
import org.robolectric.annotation.Config
@Config(manifest = Config.NONE)
class UserOrganizationViewModelTest : BaseViewModelTest() {
private val userRepository = mockk<UserRepository>()
private val userOrganizationRepository = mockk<UserOrganizationRepository>()
private lateinit var userOrganizationViewModel: UserOrganizationViewModel
@Before
fun setUp() {
userOrganizationViewModel = UserOrganizationViewModel(
userOrganizationRepository,
userRepository
)
}
@FlowPreview
@Test
fun `test get user or organisation details are fetched successfully`() {
coEvery {
userOrganizationRepository.fetchUserOrOrganization(
username,
any(),
any()
)
} returns flowOf()
userOrganizationViewModel.fetchUserOrOrganization(username)
coVerify { userOrganizationRepository.fetchUserOrOrganization(username, any(), any()) }
userOrganizationViewModel.profileModel.test().assertValue { it.data?.isNotEmpty() }
}
}
|
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using Mirror;
using SS3D.Content.Systems.Interactions;
using SS3D.Engine.Interactions;
using SS3D.Engine.Inventory;
namespace SS3D.Content.Items.Functional.Tools
{
// Simple flashlight
public class Flashlight : Item, IToggleable
{
[SerializeField]
public new Light light = null;
public Sprite toggleIcon;
public Material bulbMaterialOn;
public Material bulbMaterialOff;
public GameObject bulbObject;
private MeshRenderer meshRenderer;
public void Start()
{
meshRenderer = bulbObject.GetComponent<MeshRenderer>();
meshRenderer.material = (light.enabled ? bulbMaterialOn : bulbMaterialOff);
}
public void Toggle()
{
light.enabled = !light.enabled;
meshRenderer.material = (light.enabled ? bulbMaterialOn : bulbMaterialOff);
RpcToggle(light.enabled);
}
public bool GetState()
{
return light.enabled;
}
[ClientRpc]
private void RpcToggle(bool lightEnabled)
{
light.enabled = lightEnabled;
meshRenderer.material = lightEnabled ? bulbMaterialOn : bulbMaterialOff;
}
public override IInteraction[] GenerateInteractionsFromTarget(InteractionEvent interactionEvent)
{
List<IInteraction> interactions = base.GenerateInteractionsFromTarget(interactionEvent).ToList();
interactions.Add(new ToggleInteraction{ IconOn = toggleIcon, IconOff = toggleIcon });
return interactions.ToArray();
}
}
}
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace TPS
{
public enum ComputeType
{
None,
Setup,
SetupImmediately,
DisableAll,
}
public enum UpdateMethodType
{
None,
Target,
Explode,
Gravity,
}
public interface IParticleTarget
{
Mesh Mesh { get; }
int VertexCount { get; }
Vector3[] Vertices { get; }
Vector2[] UV { get; }
Texture2D Texture { get; }
Matrix4x4 WorldMatrix { get; }
float MinScale { get; }
float MaxScale { get; }
uint[] SubGroupIndices { get; }
void Initialize();
void SetStartIndex(int startIdx);
}
public interface IParticleTargetGroup
{
Texture2DArray TextureArray { get; }
Matrix4x4[] MatrixData { get; }
InitData[] AllInitData { get; }
uint[] Indices { get; }
void Initialize(TransformParticleSystem system);
void UpdateMatrices();
}
public struct TransformParticle
{
public int isActive;
public int targetId;
public Vector2 uv;
public Vector3 targetPosition;
public float speed;
public Vector3 position;
public int useTexture;
public float scale;
public Vector4 velocity;
public Vector3 horizontal;
}
public struct InitData
{
public int isActive;
public Vector3 targetPosition;
public int targetId;
public float scale;
public Vector4 velocity;
public Vector2 uv;
public Vector3 horizontal;
}
}
|
module Robots
class Robot
attr_accessor :orientation, :position
attr_reader :commands, :status
def initialize(position:, orientation:, commands:, status: :operating)
@position = position
@orientation = orientation
@commands = commands
@status = status
end
def lose!
@status = :lost
end
def lost?
status == :lost
end
def set_position(new_position)
@position = new_position
end
def set_orientation(new_orientation)
@orientation = new_orientation
end
end
end
|
require 'rails_helper'
RSpec.describe "API::V1::Users", type: :request do
before(:each) do
@user = create(:user)
@admin = create(:user, username: "Admin User", admin: true)
@token = Auth.create_token(@user.id)
@admin_token = Auth.create_token(@admin.id)
@token_headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': "Bearer: #{@token}"
}
@admin_headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': "Bearer: #{@admin_token}"
}
@tokenless_headers = {
'Content-Type': 'application/json',
}
end
describe "POST /users" do
describe "on success" do
before(:each) do
params = {
user: {
username: "testuser",
password: "testtest",
name: "testname"
}
}
post "/api/v1/users",
params: params.to_json,
headers: @admin_headers
@response = response
end
it "creates a user from the params" do
expect(User.last.username).to eq("testuser")
end
it "returns the new user and a JWT token" do
body = JSON.parse(@response.body)
expect(@response.status).to eq(200)
expect(body['token']).not_to eq(nil)
expect(body['user']['id']).not_to eq(nil)
expect(body['user']['username']).to eq("testuser")
expect(body['user']['name']).to eq("Testname")
expect(body['user']['password_digest']).to eq(nil)
end
end
describe "on authentication error" do
it "only an authenticated user can add a user" do
params = {
user: {
username: "testuser",
password: "testtest",
name: "testname"
}
}
post "/api/v1/users",
params: params.to_json,
headers: @tokenless_headers
body = JSON.parse(response.body)
expect(response.status).to eq(403)
expect(body["errors"]).to eq([{"message" => "You must include a JWT token"}])
end
it "only an admin user can add a user" do
params = {
user: {
username: "testuser",
password: "testtest",
name: "testname"
}
}
post "/api/v1/users",
params: params.to_json,
headers: @token_headers
body = JSON.parse(response.body)
expect(response.status).to eq(403)
expect(body["errors"]).to eq([{"message" => "You must be an admin to perform this task"}])
end
end
describe "on validation error" do
it "required a valid email and password" do
params = {
user: {
username: "",
password: ""
}
}
post "/api/v1/users",
params: params.to_json,
headers: @admin_headers
body = JSON.parse(response.body)
expect(response.status).to eq(500)
expect(body["errors"]).to eq([
"Password can't be blank",
'Password is too short (minimum is 8 characters)',
"Username can't be blank"])
end
end
end
describe "GET /users/:id" do
describe "on success" do
it "returns a list of all users" do
get "/api/v1/users/#{@user.id}",
headers: @token_headers
body = JSON.parse(response.body)
expect(response.status).to eq(200)
expect(body['user']['id']).to eq(@user.id)
expect(body['user']['username']).to eq(@user.username)
expect(body['user']['name']).to eq(@user.name)
expect(body['user']['password_digest']).to eq(nil)
end
end
describe "on failure" do
it "returns a status of 404 with an error message" do
get "/api/v1/users/5",
headers: @token_headers
body = JSON.parse(response.body)
expect(response.status).to eq(404)
expect(body["errors"]).to eq([{"message"=> "Page not found"}])
end
end
end
end
|
package lint
import (
"regexp"
yaml "gopkg.in/yaml.v3"
)
type regexpStr struct {
*regexp.Regexp
}
func (r *regexpStr) UnmarshalYAML(n *yaml.Node) error {
str := ""
n.Decode(&str)
r.Regexp = regexp.MustCompile(str)
return nil
}
type stringRequirement struct {
Template string
}
func (s *stringRequirement) UnmarshalYAML(n *yaml.Node) error {
str := ""
n.Decode(&str)
s.Template = str
return nil
}
|
package u32
import (
"encoding/hex"
"strconv"
"strings"
)
type TCPFields struct {
SourcePort bool
DestinationPort bool
SequenceNumber bool
ACKNumber bool
DataOffset bool
Flags bool
WindowSize bool
Checksum bool
UrgentPointer bool
}
type TCPHeader struct {
Offset *Offset
SourcePort uint16
DestinationPort uint16
SequenceNumber uint32
ACKNumber uint32
DataOffset uint8
Flags uint16
WindowSize uint16
Checksum uint16
UrgentPointer uint16
Set *TCPFields
}
func (tcp *TCPHeader) NextHeader() string {
return strconv.Itoa(tcp.Offset.Offset+12) + ">>26&0x3C@"
}
func (tcp *TCPHeader) GetOffset() Offset {
return *tcp.Offset
}
func (tcp *TCPHeader) SetOffset(offset *Offset) {
tcp.Offset = &Offset{Offset: offset.Offset, U32Offset: offset.U32Offset}
}
func (tcp *TCPHeader) MoveOffset(offset *Offset) {
offset.Offset = 0
offset.U32Offset += tcp.NextHeader()
}
func (tcp *TCPHeader) Marshall() []byte {
var bytes []byte
bytes = append(bytes, Uint16ToUint8(tcp.SourcePort)...)
bytes = append(bytes, Uint16ToUint8(tcp.DestinationPort)...)
bytes = append(bytes, Uint32ToUint8(tcp.SequenceNumber)...)
bytes = append(bytes, Uint32ToUint8(tcp.ACKNumber)...)
var tmp = Uint16ToUint8(tcp.Flags)
bytes = append(bytes, (tcp.DataOffset<<4)+tmp[0])
bytes = append(bytes, tmp[1])
bytes = append(bytes, Uint16ToUint8(tcp.WindowSize)...)
bytes = append(bytes, Uint16ToUint8(tcp.Checksum)...)
bytes = append(bytes, Uint16ToUint8(tcp.UrgentPointer)...)
bytes = append(bytes, []byte{0x0, 0x0, 0x0, 0x0}...)
return bytes
}
func (tcp *TCPHeader) BuildMatches() string {
packet := tcp.Marshall()
matches := []string{}
var i int = 0
for i < len(packet) {
match := ""
mask := "0x"
for index := 0; index < 4; index++ {
msk, mtch := tcp.GetMask(i+index, packet[i+index])
mask += msk
match += strings.ToUpper(hex.EncodeToString([]byte{mtch}))
}
if mask != "0x00000000" {
match = tcp.Offset.U32Offset + strconv.Itoa(tcp.Offset.Offset+i) + "&" + mask + "=0x" + match
matches = append(matches, match)
}
i += 4
}
return strings.Join(matches, " && ")
}
func (tcp *TCPHeader) GetMask(offset int, value byte) (string, byte) {
if offset == 0 || offset == 1 {
if tcp.Set.SourcePort {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 2 || offset == 3 {
if tcp.Set.DestinationPort {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 4 || offset == 5 || offset == 6 || offset == 7 {
if tcp.Set.SequenceNumber {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 8 || offset == 9 || offset == 10 || offset == 11 {
if tcp.Set.ACKNumber {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 12 {
if tcp.Set.DataOffset && tcp.Set.Flags {
return "F1", 0xF1 & value
} else if tcp.Set.DataOffset {
return "F0", 0xF0 & value
} else if tcp.Set.Flags {
return "0F", 0x0F & value
}
return "00", 00
}
if offset == 13 {
if tcp.Set.Flags {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 14 || offset == 15 {
if tcp.Set.WindowSize {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 16 || offset == 17 {
if tcp.Set.Checksum {
return "FF", 0xFF & value
}
return "00", 00
}
if offset == 18 || offset == 19 {
if tcp.Set.UrgentPointer {
return "FF", 0xFF & value
}
return "00", 00
}
return "00", 00
}
|
#include "include.h"
#define MAXN 100000
#define FLAT_CONST 298.256
#define ERAD 6378.139
#define RPERD 0.017453292
#define FONE (float)(1.0)
#define FTWO (float)(2.0)
void *check_malloc(size_t);
void *check_realloc(void *ptr,size_t len);
FILE *fopfile(char*, char*);
#include "function.h"
struct statinfo
{
char name[8];
float lat;
float lon;
float xp;
float yp;
float zp;
int ix;
int iy;
int iz;
};
main(int ac,char **av)
{
FILE *fopfile(), *fp;
struct statinfo *si;
float mlat, mlon;
float kperd_n, kperd_e, xlen, ylen, xs, ys;
int i, mn, ns, nx, ny, test;
float cosR, sinR, xr, yr;
char infile[512], outfile[512], str[512];
float rotate = 0.0;
float h = 1.0;
float rperd = RPERD;
float erad = ERAD;
float fc = FLAT_CONST;
float g2, radc, latavg;
double g0, b0;
double amat[9], ainv[9];
int xy2ll = 0;
int ll2xy = 1;
double xr0, yr0, dlon, dlat, dxr, dyr;
int geo2utm = 0;
int utm2geo = 1;
int utm_zone = 11;
/*
geoproj=0: RWG spherical projection with local kmplat, kmplon
=1: RWG great circle projection
=2: UTM coordinate projection
*/
int geoproj = 0; /* default is OLD way */
int center_origin = 0;
float xshift = -1.0e+15;
float yshift = -1.0e+15;
float zscale = 1.0;
float zupper = 1.0e+15;
float zlower = -1.0e+15;
setpar(ac, av);
mstpar("mlat","f",&mlat);
mstpar("mlon","f",&mlon);
mstpar("infile","s",infile);
mstpar("outfile","s",outfile);
mstpar("rotate","f",&rotate);
mstpar("xlen","f",&xlen);
mstpar("ylen","f",&ylen);
getpar("zscale","f",&zscale);
getpar("zupper","f",&zupper);
getpar("zlower","f",&zlower);
getpar("geoproj","d",&geoproj);
getpar("center_origin","d",¢er_origin);
getpar("xshift","f",&xshift);
getpar("yshift","f",&yshift);
endpar();
cosR = cos(rotate*rperd);
sinR = sin(rotate*rperd);
if(center_origin != 0)
{
xshift = -0.5*xlen;
yshift = -0.5*ylen;
}
else
{
if(xshift < -1.0e+14)
xshift = 0.0;
if(yshift < -1.0e+14)
yshift = 0.0;
}
if(geoproj == 0)
{
radc = ERAD*RPERD;
set_g2(&g2,&fc);
latavg = mlat;
if(center_origin == 0) /* backward compatible */
latavg = mlat - 0.5*(xlen*sinR + ylen*cosR)/111.20;
geocen(&latavg,(double)(latavg*rperd));
latlon2km(&latavg,&kperd_n,&kperd_e,&radc,&g2);
fprintf(stderr,"ke=%12.4f kn=%12.4f latavg=%10.4f\n",kperd_e,kperd_n,latavg/rperd);
}
else if(geoproj == 1)
{
gen_matrices(amat,ainv,&rotate,&mlon,&mlat);
g0 = (double)(0.5*ylen)/(double)(erad);
b0 = (double)(0.5*xlen)/(double)(erad);
}
else if(geoproj == 2)
{
dlon = mlon;
dlat = mlat;
geoutm_(&dlon,&dlat,&xr0,&yr0,&utm_zone,&geo2utm);
fprintf(stderr,"UTM Zone= %d\n",utm_zone);
}
fprintf(stderr,"ke=%12.4f kn=%12.4f latavg=%10.4f\n",kperd_e,kperd_n,latavg/rperd);
si = (struct statinfo *)check_malloc(MAXN*sizeof(struct statinfo));
fp = fopfile(infile,"r");
ns = 0;
mn = 1;
while(fgets(str,512,fp) != NULL)
{
fprintf(stderr,"ns= %d\n",ns);
if(ns == mn*MAXN)
{
mn++;
si = (struct statinfo *)check_realloc(si,mn*MAXN*sizeof(struct statinfo));
}
sscanf(str,"%f %f %f",&si[ns].lon,&si[ns].lat,&si[ns].zp);
if(geoproj == 0)
{
xs = (si[ns].lon - mlon)*kperd_e;
ys = (mlat - si[ns].lat)*kperd_n;
xr = xs*cosR + ys*sinR - xshift;
yr = -xs*sinR + ys*cosR - yshift;
}
else if(geoproj == 1)
{
gcproj(&xr,&yr,&si[ns].lon,&si[ns].lat,&erad,&g0,&b0,amat,ainv,ll2xy);
}
else if(geoproj == 2)
{
dlon = si[ns].lon;
dlat = si[ns].lat;
geoutm_(&dlon,&dlat,&dxr,&dyr,&utm_zone,&geo2utm);
xs = 0.001*(dxr - xr0);
ys = 0.001*(yr0 - dyr);
xr = xs*cosR + ys*sinR - xshift;
yr = -xs*sinR + ys*cosR - yshift;
}
si[ns].xp = xr;
si[ns].yp = yr;
si[ns].zp = zscale*si[ns].zp;
if(si[ns].zp > zupper)
si[ns].zp = zupper;
if(si[ns].zp < zlower)
si[ns].zp = zlower;
ns++;
}
fclose(fp);
fp = fopfile(outfile,"w");
for(i=0;i<ns;i++)
fprintf(fp,"%10.5f %10.5f %10.5f\n",si[i].xp,si[i].yp,si[i].zp);
fclose(fp);
}
void *check_realloc(void *ptr,size_t len)
{
ptr = (char *) realloc (ptr,len);
if(ptr == NULL)
{
fprintf(stderr,"***** memory reallocation error\n");
exit(-1);
}
return(ptr);
}
FILE *fopfile(char *name,char *mode)
{
FILE *fp;
if((fp = fopen(name,mode)) == NULL)
{
fprintf(stderr,"CAN'T FOPEN FILE = %s, MODE = %s\n", name, mode);
exit(-1);
}
return(fp);
}
void *check_malloc(size_t len)
{
char *ptr;
ptr = (char *) malloc (len);
if(ptr == NULL)
{
fprintf(stderr,"***** memory allocation error\n");
exit(-1);
}
return(ptr);
}
|
import { Model } from 'mongoose';
import { Injectable, Logger } from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { UserDocument } from 'src/schemas/user.schema';
import { OnEvent } from '@nestjs/event-emitter';
@Injectable()
export class AuthService {
private readonly logger = new Logger(AuthService.name);
private userCache: { [key: string]: UserDocument } = {};
constructor(@InjectModel('User') private userModel: Model<UserDocument>) {}
async syncUser(userId: string, userName: string) {
if (!this.userCache[userId]) {
const user = await this.userModel.findOne({ externalId: userId });
if (user) this.userCache[userId] = user;
else {
this.logger.log('First login: ' + userName);
this.userCache[userId] = (
await this.userModel.create([
{ externalId: userId, userName: userName },
])
)[0];
}
}
return this.userCache[userId];
}
@OnEvent('auth.cache.clear')
clearCache(payload: any) {
if (payload.full) this.userCache = {};
else if (payload.user) this.userCache[payload.user] = undefined;
}
}
|
global using CodeAnalysis.TestTools;
global using FluentAssertions;
global using NUnit.Framework;
global using Qowaiv.CodeAnalysis;
global using Qowaiv.CodeAnalysis.Diagnostics;
global using System.Linq;
|
package structure.wallbender
import structure.WallStructure
import structure.helperClasses.SpookyWall
fun WallStructure.color(l: List<SpookyWall>): List<SpookyWall> {
val sl = l.sortedBy { it.startTime }
this.color.colorWalls(sl)
return sl
}
|
//! # The manifest (`*.txt`) files
mod lines;
use std::collections::BTreeMap;
use std::{fmt, io};
use futures_util::{TryStream, TryStreamExt};
use nom_supreme::final_parser::Location;
use thiserror::Error;
use self::lines::{file_line, version_line};
pub use self::lines::{FileLine, VersionLine};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
/// A section of the manifest
pub enum Section {
/// The `[version]` section
Version,
/// The `[files]` section
Files,
}
impl Section {
fn as_header(&self) -> &'static str {
match self {
Self::Files => "[files]",
Self::Version => "[version]",
}
}
}
#[derive(Debug, Error)]
/// An error from parsing a manifest
pub enum Error {
/// Unexpected EOF, expected a header
MissingHeader(&'static str),
/// Unexpected EOF, expected version line
MissingVersionLine,
/// Expected a header but found something else
ExpectedHeader(&'static str, String),
/// An IO error
IO(#[from] std::io::Error),
/// Failed to parse a line
Nom(#[from] nom::error::Error<Location>),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::MissingHeader(h) => write!(f, "Missing '{}' header", h),
Self::ExpectedHeader(h, line) => write!(f, "Expected '{}' header, got {:?}", h, line),
Self::MissingVersionLine => write!(f, "Missing version line"),
Self::IO(_) => write!(f, "I/O error"),
Self::Nom(_) => write!(f, "Parser error"),
}
}
}
/// The result type for this module
pub type Result<T> = std::result::Result<T, Error>;
async fn expect_header<L>(lines: &mut L, section: Section) -> Result<()>
where
L: TryStream<Ok = String, Error = io::Error> + Unpin,
{
let header = section.as_header();
let res = lines.try_next().await?;
let line = res.ok_or(Error::MissingHeader(header))?;
if line != header {
return Err(Error::ExpectedHeader(header, line));
}
Ok(())
}
async fn read_index_version<L>(lines: &mut L) -> Result<VersionLine>
where
L: TryStream<Ok = String, Error = io::Error> + Unpin,
{
expect_header(lines, Section::Version).await?;
let line = lines.try_next().await?.ok_or(Error::MissingVersionLine)?;
let version = version_line(&line)?;
Ok(version)
}
/// A manifest file in-memory
pub struct Manifest {
/// The parsed version line
pub version: VersionLine,
/// The parsed, sorted list of files
pub files: BTreeMap<String, FileLine>,
}
/// Load the manifest from a stream of lines
pub async fn load_manifest<L>(lines: &mut L) -> Result<Manifest>
where
L: TryStream<Ok = String, Error = io::Error> + Unpin,
{
let mut files = BTreeMap::new();
let version = read_index_version(lines).await?;
expect_header(lines, Section::Files).await?;
while let Some(item) = lines.try_next().await? {
let line = item;
let (filename, data) = file_line(&line)?;
files.insert(filename.to_owned(), data);
}
Ok(Manifest { version, files })
}
|
import 'package:flutter/material.dart';
class HomePage extends StatefulWidget {
const HomePage({Key key}) : super(key: key);
@override
HomePageState createState() => new HomePageState();
}
class HomePageState extends State<HomePage> {
String firstname;
String lastname;
String emailId;
String mobileno;
final scaffoldKey = new GlobalKey<ScaffoldState>();
final formKey = new GlobalKey<FormState>();
void _submit() {
if (this.formKey.currentState.validate()) {
formKey.currentState.save();
} else {
return null;
}
print(firstname);
print(lastname);
print(mobileno);
print(emailId);
// var employee = Employee(firstname, lastname, mobileno, emailId);
// var dbHelper = DBHelper();
// dbHelper.saveEmployee(employee);
// _showSnackBar("Data saved successfully");
}
@override
Widget build(BuildContext context) {
// TODO: implement build
return new Card(
elevation: 2.5,
color: Colors.white,
child: new Padding(
padding: const EdgeInsets.all(16.0),
child: new Form(
key: formKey,
child: new Column(
children: [
new TextFormField(
keyboardType: TextInputType.number,
decoration: new InputDecoration(labelText: 'Starting Reading'),
validator: (val) =>
val.length == 0 ? "Enter Starting Reading" : null,
onSaved: (val) => this.firstname = val,
),
new TextFormField(
keyboardType: TextInputType.number,
decoration: new InputDecoration(labelText: 'Ending Reading'),
validator: (val) =>
val.length == 0 ? 'Enter Ending Reading' : null,
onSaved: (val) => this.lastname = val,
),
new TextFormField(
keyboardType: TextInputType.number,
decoration: new InputDecoration(labelText: 'Litres'),
validator: (val) => val.length == 0 ? 'Enter Litres' : null,
onSaved: (val) => this.mobileno = val,
),
new TextFormField(
keyboardType: TextInputType.number,
decoration: new InputDecoration(labelText: 'Price'),
validator: (val) => val.length == 0 ? 'Price' : null,
onSaved: (val) => this.emailId = val,
),
new Container(
width: 300.0,
height: 50.0,
decoration: new BoxDecoration(
borderRadius: new BorderRadius.circular(30.0),
gradient: new LinearGradient(
colors: [
const Color(0xFF3366FF),
const Color(0xFF00CCFF)
],
begin: const FractionalOffset(0.0, 0.0),
end: const FractionalOffset(1.0, 0.0),
stops: [0.0, 1.0],
tileMode: TileMode.clamp),
),
margin: const EdgeInsets.only(top: 10.0),
child: new FlatButton(
textColor: Colors.white,
shape: new RoundedRectangleBorder(
borderRadius: new BorderRadius.circular(30.0)),
onPressed: _submit,
child: new Text('Calculate'),
),
)
],
),
),
),
);
}
}
|
/*
// Licensed to DynamoBI Corporation (DynamoBI) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. DynamoBI licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
*/
#include "fennel/common/CommonPreamble.h"
#include "fennel/flatfile/FlatFileParser.h"
FENNEL_BEGIN_CPPFILE("$Id$");
const char SPACE_CHAR = ' ';
void FlatFileColumnParseResult::setResult(
FlatFileColumnParseResult::DelimiterType type, char *buffer, uint size)
{
this->type = type;
this->size = size;
next = buffer + size;
switch (type) {
case NO_DELIM:
case MAX_LENGTH:
break;
case FlatFileColumnParseResult::FIELD_DELIM:
case FlatFileColumnParseResult::ROW_DELIM:
next++;
break;
default:
permAssert(false);
}
}
FlatFileRowDescriptor::FlatFileRowDescriptor()
: std::vector<FlatFileColumnDescriptor>()
{
bounded = true;
}
void FlatFileRowDescriptor::setUnbounded()
{
bounded = false;
}
bool FlatFileRowDescriptor::isBounded() const
{
return bounded;
}
FlatFileRowParseResult::FlatFileRowParseResult()
{
reset();
}
void FlatFileRowParseResult::reset()
{
status = NO_STATUS;
current = next = NULL;
nRowDelimsRead = 0;
}
FlatFileParser::FlatFileParser(
char fieldDelim, char rowDelim, char quote, char escape, bool doTrim)
{
this->fieldDelim = fieldDelim;
this->rowDelim = rowDelim;
this->quote = quote;
this->escape = escape;
this->doTrim = doTrim;
fixed = (fieldDelim == 0);
if (fixed) {
assert(quote == 0);
assert(escape == 0);
}
}
void FlatFileParser::scanRow(
const char *buffer,
int size,
const FlatFileRowDescriptor &columns,
FlatFileRowParseResult &result)
{
assert(size >= 0);
const char *row = buffer;
uint offset = 0;
FlatFileColumnParseResult columnResult;
result.status = FlatFileRowParseResult::NO_STATUS;
bool bounded = columns.isBounded();
bool lenient = columns.isLenient();
bool mapped = columns.isMapped();
bool strict = (bounded && (!lenient));
uint maxColumns = columns.getMaxColumns();
uint resultColumns = columns.size();
if (bounded) {
result.resize(resultColumns);
for (uint i = 0; i < resultColumns; i++) {
result.setNull(i);
}
} else {
result.clear();
}
// Scan any initial row delimiters, helps for the case when a row
// delimiter is multiple characters like \r\n and the delimiter
// characters are split between two buffers. (The previous row could
// be complete due to \r, and parsing could begin at \n.
const char *nonDelim = scanRowDelim(row, size, false);
offset = nonDelim - row;
bool done = false;
bool rowDelim = false;
for (uint i = 0; i < maxColumns; i++) {
uint maxLength = columns.getMaxLength(i);
scanColumn(
row + offset,
size - offset,
maxLength,
columnResult);
switch (columnResult.type) {
case FlatFileColumnParseResult::NO_DELIM:
result.status = FlatFileRowParseResult::INCOMPLETE_COLUMN;
done = true;
break;
case FlatFileColumnParseResult::ROW_DELIM:
if (strict && (i + 1 != columns.size())) {
if (i == 0) {
result.status = FlatFileRowParseResult::NO_COLUMN_DELIM;
} else {
result.status = FlatFileRowParseResult::TOO_FEW_COLUMNS;
}
}
done = true;
rowDelim = true;
break;
case FlatFileColumnParseResult::MAX_LENGTH:
case FlatFileColumnParseResult::FIELD_DELIM:
if (strict && (i + 1 == columns.size())) {
result.status = FlatFileRowParseResult::TOO_MANY_COLUMNS;
done = true;
}
break;
default:
permAssert(false);
}
if (bounded) {
int target = mapped ? columns.getMap(i) : i;
if (target >= 0) {
assert(target < resultColumns);
result.setColumn(target, offset, columnResult.size);
}
} else {
result.addColumn(offset, columnResult.size);
}
offset = columnResult.next - row;
if (done) {
break;
}
}
result.current = const_cast<char *>(row);
result.next = const_cast<char *>(
scanRowEnd(
columnResult.next,
buffer + size - columnResult.next,
rowDelim,
result));
}
const char *FlatFileParser::scanRowEnd(
const char *buffer,
int size,
bool rowDelim,
FlatFileRowParseResult &result)
{
const char *read = buffer;
const char *end = buffer + size;
switch (result.status) {
case FlatFileRowParseResult::INCOMPLETE_COLUMN:
case FlatFileRowParseResult::ROW_TOO_LARGE:
assert(read == end);
return read;
default:
break;
}
// if a row delimiter was not encountered while scanning the row,
// search for the next row delimiter character
if (!rowDelim) {
read = scanRowDelim(read, end - read, true);
if (read == end) {
return read;
}
}
result.nRowDelimsRead++;
// search for the first non- row delimiter character
read = scanRowDelim(read, end - read, false);
return read;
}
const char *FlatFileParser::scanRowDelim(
const char *buffer,
int size,
bool search)
{
const char *read = buffer;
const char *end = buffer + size;
while (read < end) {
if (isRowDelim(*read) == search) {
break;
} else {
read++;
}
}
return read;
}
bool FlatFileParser::isRowDelim(char c)
{
assert(rowDelim != '\r');
return (rowDelim == '\n') ? (c == '\r' || c == '\n') : (c == rowDelim);
}
void FlatFileParser::scanColumn(
const char *buffer,
uint size,
uint maxLength,
FlatFileColumnParseResult &result)
{
if (fixed) {
return scanFixedColumn(buffer, size, maxLength, result);
}
assert(buffer != NULL);
const char *read = buffer;
const char *end = buffer + size;
// read past leading spaces before checking for quotes
if (doTrim) {
while (read < end && SPACE_CHAR == *read) {
read++;
}
}
bool quoted = (read < end && *read == quote);
bool quoteEscape = (quoted && quote == escape);
FlatFileColumnParseResult::DelimiterType type =
FlatFileColumnParseResult::NO_DELIM;
if (quoted) {
read++;
}
while (read < end) {
if (*read == quote) {
read++;
if (quoteEscape) {
// read next character to determine whether purpose of
// this character is an escape character or an end quote
if (read == end) {
break;
}
if (*read == quote) {
// two consecutive quote/escape characters is an
// escaped quote
read++;
continue;
}
}
if (quoted) {
// otherwise a quote may be a close quote
quoteEscape = quoted = false;
}
} else if (*read == escape) {
read++;
// an escape escapes the next character
if (read == end) {
break;
}
read++;
} else if (quoted) {
read++;
} else if (*read == fieldDelim) {
type = FlatFileColumnParseResult::FIELD_DELIM;
break;
} else if (isRowDelim(*read)) {
type = FlatFileColumnParseResult::ROW_DELIM;
break;
} else {
read++;
}
}
uint resultSize = read - buffer;
result.setResult(type, const_cast<char *>(buffer), resultSize);
}
void FlatFileParser::scanFixedColumn(
const char *buffer,
uint size,
uint maxLength,
FlatFileColumnParseResult &result)
{
assert(buffer != NULL);
const char *read = buffer;
const char *end = buffer + size;
uint remaining = maxLength;
FlatFileColumnParseResult::DelimiterType type =
FlatFileColumnParseResult::NO_DELIM;
while (read < end && remaining > 0) {
if (isRowDelim(*read)) {
type = FlatFileColumnParseResult::ROW_DELIM;
break;
}
read++;
remaining--;
}
// Resolve delimiter type if another character can be read. This allows
// us to catch the case where a row delimiter follows a max length field.
if (type == FlatFileColumnParseResult::NO_DELIM && read < end) {
if (isRowDelim(*read)) {
type = FlatFileColumnParseResult::ROW_DELIM;
} else if (remaining == 0) {
type = FlatFileColumnParseResult::MAX_LENGTH;
}
}
uint resultSize = read - buffer;
result.setResult(type, const_cast<char *>(buffer), resultSize);
}
void FlatFileParser::stripQuoting(
FlatFileRowParseResult &rowResult,
bool trim)
{
int nFields = rowResult.getReadCount();
if (rowResult.strippedSizes.size() < nFields) {
rowResult.strippedSizes.resize(nFields);
}
for (uint i = 0; i < nFields; i++) {
char *value = rowResult.getColumn(i);
uint newSize = 0;
if (value != NULL) {
uint oldSize = rowResult.getRawColumnSize(i);
newSize = stripQuoting(value, oldSize, trim);
}
rowResult.strippedSizes[i] = newSize;
}
}
uint FlatFileParser::stripQuoting(
char *buffer, uint sizeIn, bool untrimmed)
{
assert(buffer != NULL);
if (sizeIn == 0) {
return 0;
}
int size = untrimmed ? trim(buffer, sizeIn) : sizeIn;
bool quoted = false;
char *read = buffer;
char *end = buffer + size;
char *write = buffer;
if (*buffer == quote) {
quoted = true;
read++;
}
bool quoteEscape = (quoted && quote == escape);
while (read < end) {
if (quoteEscape && *read == quote) {
read++;
if ((read < end) && (*read == quote)) {
// two consecutive quote/escape characters is an escaped quote
*write++ = *read++;
} else {
// single quote/escape is end quote
break;
}
} else if (quoted && *read == quote) {
break;
} else if (*read == escape) {
read++;
if (read < end) {
*write++ = *read++;
}
} else {
*write++ = *read++;
}
}
return write - buffer;
}
uint FlatFileParser::trim(char *buffer, uint size)
{
assert(buffer != NULL);
if (size == 0) {
return 0;
}
char *read = buffer;
char *write = buffer;
char *end = buffer + size;
while (read < end && *read == ' ') {
read++;
}
end--;
while (end >= read && *end == ' ') {
end--;
}
end++;
while (read < end) {
*write++ = *read++;
}
return write - buffer;
}
FENNEL_END_CPPFILE("$Id$");
// End FlatFileParser.cpp
|
#ifndef READXML_HEADER
#define READXML_HEADER
#include <optixu/optixpp_namespace.h>
#include <optixu/optixu_aabb_namespace.h>
#include <optixu/optixu_math_stream_namespace.h>
#include <vector>
#include <string>
#include <iostream>
#include <set>
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <cmath>
#include <algorithm>
#include "sutil.h"
#include "tinyxml/tinyxml.h"
#include "tinyobjloader/objLoader.h"
#include "tinyplyloader/plyLoader.h"
#include "lightStructs.h"
#include "structs/cameraInput.h"
#include "inout/relativePath.h"
#include "structs/constant.h"
std::vector<float> parseFloatStr(const std::string& str);
struct objTransform{
std::string name;
float value[4];
};
bool doObjTransform(shape_t& shape, std::vector<objTransform>& TArr);
bool loadSensorFromXML(CameraInput& Camera, TiXmlNode* module );
bool loadBsdfFromXML(std::vector<material_t>& materials, TiXmlNode* module, std::string fileName, int offset = 0);
bool loadLightFromXML(std::vector<Envmap>& envmaps, std::vector<Point>& points,
TiXmlNode* module, std::string fileName );
bool loadShapeFromXML(std::vector<shape_t>& shapes, std::vector<material_t>& materials,
TiXmlNode* module, std::string fileName );
bool readXML(std::string fileName,
std::vector<shape_t>& shapes,
std::vector<material_t>& materials,
CameraInput& Camera,
std::vector<Envmap>& envmaps,
std::vector<Point>& points);
#endif
|
using Test, OffsetArrays
using VanFoFy.Ellipticals: Theta, theta
@testset "θ-functions" begin
ω1 = complex(1.0)
ω3 = exp(1im)
tol_check = 1e-15
tol_compute = 1e-20
θ = Theta(ω1, ω3, tol_compute)
z = 0.63256929128615824176 + 0.47122375149242207160im
th = OffsetArray{ComplexF64}(undef, 1:4, 0:4)
for n in 0:4
for k in 1:4
th[k, n] = theta(θ, th_k=k, d_n=n, z=z)
end
end
#####
th_ma = [ 0.46214180304899833678 + 0.65513824162092381551im,
0.96467023166692638429 + 0.11945667130003175272im,
1.13835201868809736642 + 0.08123099419582718546im,
0.86149287144921489500 - 0.08153295492698743884im]
@test th[:, 0] ≈ th_ma atol=tol_check
#####
d1_th_ma = [ 0.97727706225681589769 + 0.08049915546539055744im,
-0.42045304182205492196 - 0.63824580596766239552im,
0.14241824316243509356 - 0.38564309515712238239im,
-0.14363258751912619862 + 0.38620176200689986572im]
@test th[:, 1] ≈ d1_th_ma atol=tol_check
#####
d2_th_ma = [-0.54549882845031761232 - 0.68893198268462155795im,
-0.93946558497506382639 - 0.19739257822094024267im,
-0.55247742739635853599 - 0.32311219018650595886im,
0.55495918519936412129 + 0.32794356188507020189im]
@test th[:, 2] ≈ d2_th_ma atol=tol_check
#####
d3_th_ma = [-1.05294508933080125267 + 0.15318331373277377347im,
0.17025898335751795341 + 0.53691780124847588798im,
-0.5623867730174600491 + 1.5392204494203911042im,
0.5818162827245192532 - 1.5481591190168449923im]
@test th[:, 3] ≈ d3_th_ma atol=tol_check
#####
d4_th_ma = [1.2954660922120757234 + 0.9931820896285708616im,
0.71273193831970297909 + 0.89906624330941167631im,
2.1950195882908139481 + 1.2634597310003502748im,
-2.2347277131390164682 - 1.3407616781773902616im]
@test th[:, 4] ≈ d4_th_ma atol=tol_check
#####
th1, th2, dth1, dth2 = theta(θ, z)
@test th1 ≈ theta(θ, th_k=1, d_n=0, z=z) atol=tol_check
@test th2 ≈ theta(θ, th_k=2, d_n=0, z=z) atol=tol_check
@test dth1 ≈ theta(θ, th_k=1, d_n=1, z=z) atol=tol_check
@test dth2 ≈ theta(θ, th_k=2, d_n=1, z=z) atol=tol_check
end
|
<?php
declare(strict_types=1);
namespace Rector\Reporting\EventSibscriber;
use Rector\ChangesReporting\Output\ConsoleOutputFormatter;
use Rector\Core\Configuration\Configuration;
use Rector\Core\EventDispatcher\Event\AfterReportEvent;
use Rector\Reporting\DataCollector\ReportCollector;
use Symfony\Component\Console\Style\SymfonyStyle;
use Symfony\Component\EventDispatcher\EventSubscriberInterface;
final class PrintReportCollectorEventSubscriber implements EventSubscriberInterface
{
/**
* @var ReportCollector
*/
private $reportCollector;
/**
* @var Configuration
*/
private $configuration;
/**
* @var SymfonyStyle
*/
private $symfonyStyle;
public function __construct(
ReportCollector $reportCollector,
Configuration $configuration,
SymfonyStyle $symfonyStyle
) {
$this->reportCollector = $reportCollector;
$this->configuration = $configuration;
$this->symfonyStyle = $symfonyStyle;
}
/**
* @return string[]
*/
public static function getSubscribedEvents(): array
{
return [AfterReportEvent::class => 'printReportCollector'];
}
public function printReportCollector(): void
{
if ($this->shouldSkip()) {
return;
}
$this->symfonyStyle->title('Collected reports');
foreach ($this->reportCollector->getReports() as $report) {
$this->symfonyStyle->writeln($report->getRelativeFilePath() . ':' . $report->getLine());
$this->symfonyStyle->writeln('* ' . $report->getReport());
$this->symfonyStyle->writeln('* ' . $report->getRectorClass());
$this->symfonyStyle->newLine(2);
}
}
private function shouldSkip(): bool
{
// print only to console, not json etc.
if ($this->configuration->getOutputFormat() !== ConsoleOutputFormatter::NAME) {
return true;
}
return $this->reportCollector->getReports() === [];
}
}
|
import React from 'react';
import { TwitterEmbedElement } from '@artibox/slate-common/embed/strategies/twitter';
import { RenderElementProps } from '../../../../core';
import { useLoadTwitterEmbedApi } from '../hooks/useLoadTwitterEmbedApi';
import { useLoadTwitterEmbedHtml } from '../hooks/useLoadTwitterEmbedHtml';
export interface TwitterProps {
attributes?: RenderElementProps['attributes'];
children?: any;
data: string;
element: TwitterEmbedElement;
}
function Twitter({ attributes, children, data: url }: TwitterProps) {
const html = useLoadTwitterEmbedHtml(url);
useLoadTwitterEmbedApi(html);
return (
<div {...attributes} contentEditable={false}>
{html && (
<div
style={{
display: 'flex',
marginTop: -10,
marginBottom: -10
}}
dangerouslySetInnerHTML={{
__html: html
}}
/>
)}
{attributes ? children : undefined}
</div>
);
}
export default Twitter;
|
package com.hxbreak.animalcrossingtools.ui.flutter
import android.os.Bundle
import android.view.View
import androidx.activity.OnBackPressedCallback
import androidx.fragment.app.viewModels
import androidx.navigation.fragment.findNavController
import androidx.navigation.fragment.navArgs
import com.google.android.material.transition.MaterialSharedAxis
import dagger.hilt.android.AndroidEntryPoint
import io.flutter.embedding.android.FlutterFragment
import timber.log.Timber
/**
* Flutter Module Enter Point
*/
@AndroidEntryPoint
class ACNHFlutterFragment : FlutterFragment(){
val args by navArgs<ACNHFlutterFragmentArgs>()
val viewModel by viewModels<ACNHFlutterViewModel>()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
viewModel //initViewModel Push Flutter Route
val forward = MaterialSharedAxis(MaterialSharedAxis.X, true)
enterTransition = forward
val backward = MaterialSharedAxis(MaterialSharedAxis.X, false)
returnTransition = backward
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
requireActivity().onBackPressedDispatcher.addCallback(viewLifecycleOwner, object : OnBackPressedCallback(true){
override fun handleOnBackPressed() {
this@ACNHFlutterFragment.onBackPressed()
}
})
flutterEngine?.navigationChannel?.setMethodCallHandler { call, result ->
when(call.method){
"routeUpdated" -> {
val previousRouteName = call.argument<Any?>("previousRouteName")
val routeName = call.argument<Any?>("routeName")
if (previousRouteName is String && previousRouteName == args.destination){
findNavController().navigateUp()
}else if (routeName is String && routeName == "/"){
findNavController().navigateUp()
}
}
}
}
// UrlLauncherPlugin.registerWith()
}
override fun onDestroyView() {
super.onDestroyView()
flutterEngine?.navigationChannel?.setMethodCallHandler(null)
}
}
|
#pragma once
#include "drape_frontend/map_shape.hpp"
#include "drape_frontend/shape_view_params.hpp"
#include "drape/constants.hpp"
namespace df
{
class ColoredSymbolShape : public MapShape
{
public:
ColoredSymbolShape(m2::PointD const & mercatorPt, ColoredSymbolViewParams const & params,
TileKey const & tileKey, uint32_t textIndex, bool needOverlay = true);
ColoredSymbolShape(m2::PointD const & mercatorPt, ColoredSymbolViewParams const & params,
TileKey const & tileKey, uint32_t textIndex, std::vector<m2::PointF> const & overlaySizes);
void Draw(ref_ptr<dp::Batcher> batcher, ref_ptr<dp::TextureManager> textures) const override;
MapShapeType GetType() const override { return MapShapeType::OverlayType; }
private:
uint64_t GetOverlayPriority() const;
m2::PointD const m_point;
ColoredSymbolViewParams m_params;
m2::PointI const m_tileCoords;
uint32_t const m_textIndex;
bool const m_needOverlay;
std::vector<m2::PointF> m_overlaySizes;
};
} // namespace df
|
package shared
import (
"fmt"
"github.com/pkg/errors"
"io/ioutil"
"strconv"
"strings"
)
// intToDigitArr takes a number and returns an array of digits (e.g. 12345 => [1 2 3 4 5])
func IntToDigitArr(num int) []int {
if num < 10 {
return []int{num}
}
result := []int{num % 10}
return append(IntToDigitArr(num/10), result...)
}
// intToDigitArr takes a number string and returns an array of digits (e.g. "0012345" => [1 2 3 4 5])
func StrToDigitArr(num string) ([]int, error) {
split := strings.Split(num, "")
var nums []int
for _, num := range split {
n, err := strconv.Atoi(num)
if err != nil {
return []int{}, err
}
nums = append(nums, n)
}
return nums, nil
}
func StrToMem(in string) ([]int, error) {
programStr := strings.Split(in, ",")
var memory []int
for _, s := range programStr {
n, err := strconv.Atoi(s)
if err != nil {
return nil, errors.Wrapf(err, "failed to convert %s", s)
}
memory = append(memory, n)
}
return memory, nil
}
func CloneSlice(slice []int) []int {
return append(slice[:0:0], slice...)
}
func Abs(x int) int {
if x < 0 {
return -x
}
return x
}
func Max(x, y int) int {
if x > y {
return x
}
return y
}
func Min(x, y int) int {
if x < y {
return x
}
return y
}
func PrintSolution(day int, task int, format string, args ...interface{}) {
fmt.Printf("Solution Day%d-Task%d: %s\n", day, task, fmt.Sprintf(format, args...))
}
func ReadFile(file string) (string, error) {
f, err := ioutil.ReadFile(file)
if err != nil {
return "", err
}
return string(f), nil
}
|
import pytest
from django.contrib.auth import get_user_model
from django.test import Client
def test_user_guest():
c = Client()
resp = c.get("/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
def test_async_user_guest():
c = Client()
resp = c.get("/async/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_user_inactive():
c = Client()
user = get_user_model().objects.get_or_create(
username="inactive_user", email="inactive@user.com"
)[0]
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_async_user_inactive():
c = Client()
user = get_user_model().objects.get_or_create(
username="inactive_user", email="inactive@user.com"
)[0]
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/async/require-user")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_user_success():
user = get_user_model().objects.get_or_create(
username="user", email="some@user.com"
)[0]
c = Client()
c.force_login(user)
resp = c.get("/require-user")
assert resp.status_code == 200
assert resp.json() == {"user": "user"}
@pytest.mark.django_db(transaction=True)
def test_async_user_success():
user = get_user_model().objects.get_or_create(
username="user", email="some@user.com"
)[0]
c = Client()
c.force_login(user)
resp = c.get("/async/require-user")
assert resp.status_code == 200
assert resp.json() == {"user": "user"}
def test_staff_guest():
c = Client()
resp = c.get("/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
def test_async_staff_guest():
c = Client()
resp = c.get("/async/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_staff_inactive():
user = get_user_model().objects.get_or_create(
username="inactive_staff", email="inactive@staff.com", is_staff=True
)[0]
c = Client()
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_async_staff_inactive():
user = get_user_model().objects.get_or_create(
username="inactive_staff", email="inactive@staff.com", is_staff=True
)[0]
c = Client()
c.force_login(user)
user.is_active = False
user.save()
resp = c.get("/async/require-staff")
assert resp.status_code == 403
assert resp.json() == {"message": "You have to log in"}
@pytest.mark.django_db
def test_staff_success():
user = get_user_model().objects.get_or_create(
username="staff", email="some@staff.com", is_staff=True
)[0]
c = Client()
c.force_login(user)
resp = c.get("/require-staff")
assert resp.status_code == 200
assert resp.json() == {"user": "staff"}
@pytest.mark.django_db(transaction=True)
def test_async_staff_success():
user = get_user_model().objects.get_or_create(
username="staff", email="some@staff.com", is_staff=True
)[0]
c = Client()
c.force_login(user)
resp = c.get("/async/require-staff")
assert resp.status_code == 200
assert resp.json() == {"user": "staff"}
|
##
## Copyright [2013-2016] [Megam Systems]
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
module SessionsHelper
# The signed_in? method simply returns true if the user is logged
# in and false otherwise. It does this by "booleanizing" the
# current_user method we created previously using a double ! operator.
def signed_in?
!!current_user
end
# Finds the User with the email, api_key stored in the session with the key
# :cemail, :api_key This is a common way to handle user login in
def current_user
@_current_user ||= (session[:email] && session[:api_key]) && Nilavu::Auth::Configuration.load(session[:email])
end
def cleanup_session
[:email, :api_key, :org_id, :ceph_access_key, :ceph_secret_key, :return_to].each { |n| session.delete(n) }
@_current_user = session[:email] = nil
end
# Store the URI of the current request in the session.
#
# We can return to this location by calling #redirect_back_or_default.
def store_location
session[:return_to] = request.url
end
# Store the email and api_key of the current user in the session.
def store_credentials(acct)
session[:email] = acct.email
session[:api_key] = acct.api_key
end
# Store the cephgw storage access_key and secret_key
def store_ceph_credentials(ceph)
session[:ceph_access_key] = ceph['access_key']
session[:ceph_secret_key] = ceph['secret_key']
end
# Redirect to the URI stored by the most recent store_location call or
# to the passed default.
def redirect_back_or_default(default)
loc = session[:return_to] || default
session[:return_to] = nil
redirect_to loc
end
def loaded_environments?
session[:org_id]
end
end
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.PooledObjects;
using Microsoft.CodeAnalysis.Remote;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.FindSymbols
{
// All the logic for finding all declarations in a given solution/project with some name
// is in this file.
internal static partial class DeclarationFinder
{
public static async Task<ImmutableArray<ISymbol>> FindAllDeclarationsWithNormalQueryAsync(
Project project,
SearchQuery query,
SymbolFilter criteria,
CancellationToken cancellationToken
)
{
// All entrypoints to this function are Find functions that are only searching
// for specific strings (i.e. they never do a custom search).
Contract.ThrowIfTrue(
query.Kind == SearchKind.Custom,
"Custom queries are not supported in this API"
);
if (project == null)
{
throw new ArgumentNullException(nameof(project));
}
if (query.Name != null && string.IsNullOrWhiteSpace(query.Name))
{
return ImmutableArray<ISymbol>.Empty;
}
var client = await RemoteHostClient
.TryGetClientAsync(project, cancellationToken)
.ConfigureAwait(false);
if (client != null)
{
var solution = project.Solution;
var result = await client
.TryInvokeAsync<
IRemoteSymbolFinderService,
ImmutableArray<SerializableSymbolAndProjectId>
>(
solution,
(service, solutionInfo, cancellationToken) =>
service.FindAllDeclarationsWithNormalQueryAsync(
solutionInfo,
project.Id,
query.Name,
query.Kind,
criteria,
cancellationToken
),
cancellationToken
)
.ConfigureAwait(false);
if (!result.HasValue)
{
return ImmutableArray<ISymbol>.Empty;
}
return await RehydrateAsync(solution, result.Value, cancellationToken)
.ConfigureAwait(false);
}
return await FindAllDeclarationsWithNormalQueryInCurrentProcessAsync(
project,
query,
criteria,
cancellationToken
)
.ConfigureAwait(false);
}
internal static async Task<
ImmutableArray<ISymbol>
> FindAllDeclarationsWithNormalQueryInCurrentProcessAsync(
Project project,
SearchQuery query,
SymbolFilter criteria,
CancellationToken cancellationToken
)
{
var list = ArrayBuilder<ISymbol>.GetInstance();
if (project.SupportsCompilation)
{
var compilation = await project
.GetCompilationAsync(cancellationToken)
.ConfigureAwait(false);
// get declarations from the compilation's assembly
await AddCompilationDeclarationsWithNormalQueryAsync(
project,
query,
criteria,
list,
cancellationToken
)
.ConfigureAwait(false);
// get declarations from directly referenced projects and metadata
foreach (var assembly in compilation.GetReferencedAssemblySymbols())
{
var assemblyProject = project.Solution.GetProject(assembly, cancellationToken);
if (assemblyProject != null)
{
await AddCompilationDeclarationsWithNormalQueryAsync(
assemblyProject,
query,
criteria,
list,
compilation,
assembly,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
await AddMetadataDeclarationsWithNormalQueryAsync(
project,
assembly,
compilation.GetMetadataReference(assembly)
as PortableExecutableReference,
query,
criteria,
list,
cancellationToken
)
.ConfigureAwait(false);
}
}
// Make certain all namespace symbols returned by API are from the compilation
// for the passed in project.
for (var i = 0; i < list.Count; i++)
{
var symbol = list[i];
if (symbol is INamespaceSymbol ns)
{
list[i] = compilation.GetCompilationNamespace(ns);
}
}
}
return list.ToImmutableAndFree();
}
private static async Task<ImmutableArray<ISymbol>> RehydrateAsync(
Solution solution,
IList<SerializableSymbolAndProjectId> array,
CancellationToken cancellationToken
)
{
var result = ArrayBuilder<ISymbol>.GetInstance(array.Count);
foreach (var dehydrated in array)
{
cancellationToken.ThrowIfCancellationRequested();
var rehydrated = await dehydrated
.TryRehydrateAsync(solution, cancellationToken)
.ConfigureAwait(false);
if (rehydrated != null)
{
result.Add(rehydrated);
}
}
return result.ToImmutableAndFree();
}
}
}
|
package com.satendranegi.javalearnings;
public class TernaryOperator {
public static void main(String[] args) {
int i = 2;
int j = 3;
//via if else selection
if(i>3)
j = 1;
else
j = 2;
System.out.println(j);
//via ternary operator
j = i>3?1:2;
System.out.println(j);
}
}
|
package hr.foi.daspicko.iotmas.repositories;
import hr.foi.daspicko.iotmas.models.Agent;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
public interface AgentRepository extends CrudRepository<Agent, Long> {
}
|
from PIL import Image
import numpy as np
from robopilot.utils import img_to_binary, binary_to_img, arr_to_img, \
img_to_arr, normalize_image
class ImgArrToJpg():
def run(self, img_arr):
if img_arr is None:
return None
try:
image = arr_to_img(img_arr)
jpg = img_to_binary(image)
return jpg
except:
return None
class JpgToImgArr():
def run(self, jpg):
if jpg is None:
return None
image = binary_to_img(jpg)
img_arr = img_to_arr(image)
return img_arr
class StereoPair:
'''
take two images and put together in a single image
'''
def run(self, image_a, image_b):
'''
This will take the two images and combine them into a single image
One in red, the other in green, and diff in blue channel.
'''
if image_a is not None and image_b is not None:
width, height, _ = image_a.shape
grey_a = dk.utils.rgb2gray(image_a)
grey_b = dk.utils.rgb2gray(image_b)
grey_c = grey_a - grey_b
stereo_image = np.zeros([width, height, 3], dtype=np.dtype('B'))
stereo_image[...,0] = np.reshape(grey_a, (width, height))
stereo_image[...,1] = np.reshape(grey_b, (width, height))
stereo_image[...,2] = np.reshape(grey_c, (width, height))
else:
stereo_image = []
return np.array(stereo_image)
class ImgCrop:
"""
Crop an image to an area of interest.
"""
def __init__(self, top=0, bottom=0, left=0, right=0):
self.top = top
self.bottom = bottom
self.left = left
self.right = right
def run(self, img_arr):
if img_arr is None:
return None
width, height, _ = img_arr.shape
img_arr = img_arr[self.top:height-self.bottom,
self.left: width-self.right]
return img_arr
def shutdown(self):
pass
class ImgStack:
"""
Stack N previous images into a single N channel image, after converting
each to grayscale. The most recent image is the last channel, and pushes
previous images towards the front.
"""
def __init__(self, num_channels=3):
self.img_arr = None
self.num_channels = num_channels
def rgb2gray(self, rgb):
'''
take a numpy rgb image return a new single channel image converted to
greyscale
'''
return np.dot(rgb[...,:3], [0.299, 0.587, 0.114])
def run(self, img_arr):
width, height, _ = img_arr.shape
gray = self.rgb2gray(img_arr)
if self.img_arr is None:
self.img_arr = np.zeros([width, height, self.num_channels], dtype=np.dtype('B'))
for ch in range(self.num_channels - 1):
self.img_arr[...,ch] = self.img_arr[...,ch+1]
self.img_arr[...,self.num_channels - 1:] = np.reshape(gray, (width, height, 1))
return self.img_arr
def shutdown(self):
pass
|
Since I've had a reasonable amount of contact with Ruby, I decided to do some
unrelated exercises here. They are poorly implemented.
|
package com.megatest.myapplication.framework.presentation.list
import android.view.View
import androidx.fragment.app.viewModels
import androidx.navigation.findNavController
import androidx.navigation.fragment.findNavController
import androidx.recyclerview.widget.ItemTouchHelper
import androidx.recyclerview.widget.LinearLayoutManager
import com.megatest.myapplication.R
import com.megatest.myapplication.business.domain.model.TransactionModel
import com.megatest.myapplication.business.domain.state.StateMessageCallback
import com.megatest.myapplication.databinding.FragmentTransactionListBinding
import com.megatest.myapplication.framework.presentation.adapter.TransactionListAdapter
import com.megatest.myapplication.framework.presentation.base.BaseFragment
import com.megatest.myapplication.framework.presentation.common.TopSpacingItemDecoration
import com.megatest.myapplication.framework.presentation.util.gone
import com.megatest.myapplication.framework.presentation.util.visible
import com.megatest.myapplication.util.cLog
import com.megatest.myapplication.util.cLogD
import dagger.hilt.android.AndroidEntryPoint
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.FlowPreview
@ExperimentalCoroutinesApi
@FlowPreview
@AndroidEntryPoint
class TransactionListFragment :
BaseFragment<TransactionListViewModel, FragmentTransactionListBinding>(R.layout.fragment_transaction_list),
TransactionListAdapter.Interaction {
override val viewModel: TransactionListViewModel by viewModels()
private var transactionListAdapter: TransactionListAdapter? = null
override fun setupChannel() {
viewModel.setupChannel()
}
override fun subscribeObservers() {
viewModel.viewState.observe(viewLifecycleOwner, { viewState ->
viewState?.apply {
transactionList?.let { transactionList->
if (transactionList.isEmpty()){
binding.rvTransitionList.gone()
binding.tvNoList.visible()
}else{
transactionListAdapter?.submitList(transactionList)
transactionListAdapter?.notifyDataSetChanged()
binding.rvTransitionList.visible()
binding.tvNoList.gone()
transactionList[0].toString().cLogD()
}
}
}
})
viewModel.shouldDisplayProgressBar.observe(viewLifecycleOwner, {
uiController.displayProgressBar(it)
})
viewModel.stateMessage.observe(viewLifecycleOwner, { stateMessage ->
stateMessage?.let {
uiController.onResponseReceived(
response = stateMessage.response,
stateMessageCallback = object : StateMessageCallback {
override fun removeMessageFromStack() {
viewModel.clearStateMessage()
}
}
)
}
})
}
override fun initBinding(view: View): FragmentTransactionListBinding =
FragmentTransactionListBinding.bind(view)
override fun init() {
setupUI()
}
override fun onResume() {
super.onResume()
viewModel.retrieveTransactionListInCache()
}
private fun setupUI() {
setupRecyclerView()
binding.fabAdd.setOnClickListener {
navigateToDetailFragment()
}
}
private fun setupRecyclerView() {
binding.rvTransitionList.apply {
layoutManager = LinearLayoutManager(activity)
val topSpacingDecorator = TopSpacingItemDecoration(20)
addItemDecoration(topSpacingDecorator)
transactionListAdapter = TransactionListAdapter(
this@TransactionListFragment,
)
adapter = transactionListAdapter
}
}
private fun navigateToDetailFragment() {
findNavController().navigate(R.id.action_transactionListFragment_to_transactionDetailGraph)
}
override fun onItemSelected(position: Int, item: TransactionModel) {
val transactionId = item.id
val direction = TransactionListFragmentDirections.actionTransactionListFragmentToTransactionDetailGraph(
transactionId
)
findNavController().navigate(direction)
}
override fun onDestroyView() {
super.onDestroyView()
transactionListAdapter = null
}
}
|
using CSharpFunctionalExtensions.Internal;
using System;
using System.Runtime.Serialization;
namespace CSharpFunctionalExtensions
{
[Serializable]
public partial struct Result<T, E> : IResult, IValue<T>, ISerializable
{
private readonly ResultCommonLogic<E> _logic;
public bool IsFailure => _logic.IsFailure;
public bool IsSuccess => _logic.IsSuccess;
public E Error => _logic.Error;
private readonly T _value;
public T Value => IsSuccess ? _value : throw new ResultFailureException<E>(Error);
internal Result(bool isFailure, E error, T value)
{
_logic = new ResultCommonLogic<E>(isFailure, error);
_value = value;
}
private Result(SerializationInfo info, StreamingContext context)
{
_logic = ResultCommonLogic<E>.Deserialize(info);
_value = _logic.IsFailure ? default : (T)info.GetValue("Value", typeof(T));
}
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context)
=> _logic.GetObjectData(info, this);
public static implicit operator Result(Result<T, E> result)
{
if (result.IsSuccess)
return Result.Success();
else
return Result.Failure(result.Error.ToString());
}
public static implicit operator Result<T>(Result<T, E> result)
{
if (result.IsSuccess)
return Result.Success(result.Value);
else
return Result.Failure<T>(result.Error.ToString());
}
}
}
|
## Currency Converter
```bash
git clone git@github.com:lis-space/currency-converter.git
cd currency-converter
echo "OER_APP_ID = 'YOURAPPID'" >> app/app/settings_local.py
./bin/up.sh
```
## API
Currencies list:
> http://0.0.0.0:8000/converter/currencies/
Rates list:
> http://0.0.0.0:8000/converter/rates/
Convert:
> http://0.0.0.0:8000/converter/CZK/PLN/100/
## Front
Converter
> http://0.0.0.0:3000/
## Tests
```bash
./bin/tests-app.sh
```
## License
MIT.
|
begin work;
truncate employees restart identity cascade;
truncate departments restart identity cascade;
truncate companies restart identity cascade;
truncate truckplans restart identity cascade;
truncate trucks restart identity cascade;
-- alter sequence employees_employeeid_seq restart;
-- alter sequence departments_departmentid_seq restart;
-- alter sequence truckplans_truckplanid_seq restart;
-- alter sequence trucks_truckid_seq restart;
insert into departments (name,description,email)
values('sales','loud mouths','info@example.com');
insert into employees (lastname,firstname,email,departmentid,dob)
values ('Puk','Piet','p.puk@vanderheiden.nl',1,'1993-03-17');
insert into trucks (plate) values( 'Vroooom');
insert into truckplans(truckid,plan) values(1,'[2019-05-05T19:30,2019-05-06T8:30)');
commit;
table employees;
table departments;
table companies;
table trucks;
table truckplans;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.