text stringlengths 1 1.05M |
|---|
<reponame>Arpitgoyalgg/ng-simple-state
import { ModuleWithProviders, NgModule } from '@angular/core';
import { NgSimpleStateConfig, NG_SIMPLE_STORE_CONFIG } from './ng-simple-state-models';
@NgModule({
declarations: [],
imports: [],
exports: [],
})
export class NgSimpleStateModule {
static forRoot(
ngHttpCachingConfig?: NgSimpleStateConfig
): ModuleWithProviders<NgSimpleStateModule> {
return {
ngModule: NgSimpleStateModule,
providers: [
{
provide: NG_SIMPLE_STORE_CONFIG,
useValue: ngHttpCachingConfig,
},
],
};
}
}
|
<filename>v2/src/app/pages/doclist/docadd/modal-m-doc/modal-m-doc.component.ts<gh_stars>0
import { Component, Input } from '@angular/core';
import { NbDialogRef } from '@nebular/theme';
import { Router } from '@angular/router';
import { Location } from '@angular/common';
import pouchdb from 'pouchdb';
import { Igijuser, Ogijuser, nano_time, MyDataBaseNames } from '../../../../interface';
@Component({
selector: 'ngx-modal',
templateUrl: './modal-m-doc.component.html',
styleUrls: ['./modal-m-doc.component.scss']
})
export class ModalMComponent {
private db: PouchDB.Database<{}>;
dbname: string;
remoteCouch = 'http://admin:admin@localhost:5984/';
user: Igijuser;
//usercom : UserComponent;
@Input() _id: string;
@Input() _rev: string;
@Input() isdelete: boolean;
_selectedUser: Igijuser;
userList: Igijuser[];
constructor(protected ref: NbDialogRef<ModalMComponent>, public _Location: Location, public router: Router) {
this.user = new Ogijuser();
this.user._rev = '';
this.user._id = nano_time.now();
this.userList = new Array<Ogijuser>();
this._selectedUser = new Ogijuser();
}
ngOnInit() {
this.remoteCouch += MyDataBaseNames.dbuser;
this.db = new pouchdb(MyDataBaseNames.dbuser);
this.sync();
this.loadUserList();
if (this._id) {
this.getuser(this._id);
} else {
this._selectedUser = new Ogijuser();
}
}
updateUser() {
console.log(this._selectedUser);
console.log(this._selectedUser._id);
console.log(this._selectedUser._rev);
if (this._rev) {
if (this.isdelete) {
console.log('delete');
this.deleteUser();
} else {
console.log('update');
this.db.put(this._selectedUser, { force: true }).then(res => {
console.log(res);
}).catch(err => {
console.log((err));
});
}
} else {
try {
this.user._id = (Math.random() * 1000000) + '';
console.log('add new');
this.insert();
}
catch (e) {
}
}
this.close();
}
insert() {
this.db.put(this.user, { force: true }, (err, res) => {
if (err) {
console.log('err after put'
);
console.log(err);
} else {
console.log('after put');
console.log(res);
}
});
}
deleteUser() {
this.db.remove(this._selectedUser).then(res => {
}).catch(err => {
})
}
updateManyUsers(arr: []) {
// for many at once
this.db.bulkDocs(arr, { new_edits: false }, (err, res) => {
if (err) {
console.log(err);
}
else {
console.log(res);
}
});
}
getuser(id: string) {
this.db.get(id).then(res => {
console.log(res);
this._selectedUser = res as Ogijuser;
}).catch(err => {
console.log('getuser error');
//console.log('id: '+id);
console.log(err);
});
}
close() {
this.ref.close({ command: 'update' });
}
loadUserList() {
const pageSize = 10;
const offSet = 0;
const parent = this;
this.userList.length = 0;
this.userList = new Array<Igijuser>();
this.db.allDocs({ limit: pageSize, skip: offSet, descending: true, include_docs: true }).then(res => {
//console.log(res);
for (let index = 0; index < res.rows.length; index++) {
parent.userList.push(<Igijuser><unknown>res.rows[index].doc);
}
}).catch(err => {
console.log(err);
});
}
sync() {
//syncDom.setAttribute('data-sync-state', 'syncing');
let parent = this;
this.db.sync(this.remoteCouch, {
live: true,
//retry: true
}).on('change', async (info) => {
console.log('sync res');
console.log(info);
if (info.direction == "pull") {
this.loadUserList();
}
}).on('paused', function (err) {
// replication paused (e.g. replication up to date, user went offline)
console.log('paused');
}).on('active', function () {
// replicate resumed (e.g. new changes replicating, user went back online)
console.log('active');
}).on('denied', function (err) {
// a document failed to replicate (e.g. due to permissions)
console.log('denied');
}).on('complete', function (info) {
// handle complete
}).on('error', function (err) {
console.log('sync err');
console.log(err);
});
}
}
|
var glUtils={
canvas :null,
gl :null,
isGL2 :null,
init(name){
this.canvas = document.querySelector('#'+name);
this.gl = this.canvas.getContext('webgl2');
if(! this.gl){
this.gl = this.canvas.getContext('webgl');
this.isGL2 = false;
console.log("Notice that webgl2 is not supported!");
}else{
this.isGL2 = true;
}
if (!this.gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return ;
}
// this.setGLViewSize( window.innerWidth, window.innerHeight);
},
packQuadShader(fragSource){
var fragHead =
`
precision highp float;
precision highp int;
#define PI 3.1415926
uniform vec2 iResolution;
uniform float iTime;
\n`;
var fragTail = `
void main() {
mainImage(gl_FragColor,gl_FragCoord.xy);
}`;
return fragHead+fragSource+fragTail;
},
setGLViewSize(width, height){
this.canvas.width = width;
this.canvas.height =height;
this.gl.viewport(0, 0, this.canvas.width, this.canvas.height);
},
getResolution(){
return [this.canvas.width, this.canvas.height];
},
getScreenPixels(){
var pixels = new Uint8Array(this.canvas.width * this.canvas.height * 4);
this.gl.readPixels(0, 0, this.canvas.width, this.canvas.height, this.gl.RGBA, this.gl.UNSIGNED_BYTE, pixels);
return pixels;
},
createVertexBuffer(data) {
const positionBuffer = this.gl.createBuffer();
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, positionBuffer);
this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(data), this.gl.STATIC_DRAW);
return positionBuffer;
},
createVertexBufferBySize(size) {
const matrixBuffer = this.gl.createBuffer();
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, matrixBuffer);
this.gl.bufferData(this.gl.ARRAY_BUFFER, size, this.gl.DYNAMIC_DRAW);
return matrixBuffer;
},
wirteDataBuffer(buffer, data) {
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, buffer);
this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(data), this.gl.STATIC_DRAW);
},
wirteIndexBuffer(buffer, data) {
this.gl.bindBuffer(this.gl.ELEMENT_ARRAY_BUFFER, buffer);
this.gl.bufferData(this.gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(data), this.gl.STATIC_DRAW);
},
createQuadVertexBuffer() {
const positionBuffer = this.gl.createBuffer();
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, positionBuffer);
const positions = [
1.0, 1.0, 0, 1, 1,
-1.0, 1.0, 0, 0, 1,
1.0, -1.0, 0, 1, 0,
-1.0, -1.0, 0, 0, 0
];
this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(positions), this.gl.STATIC_DRAW);
return positionBuffer;
},
createShaderProgram(vsSource, fsSource, cb) {
const vertexShader = this.loadShader(this.gl.VERTEX_SHADER, vsSource, cb);
const fragmentShader = this.loadShader(this.gl.FRAGMENT_SHADER, fsSource, cb);
// Create the shader program
if(!fragmentShader||!vertexShader) return null;
const shaderProgram = this.gl.createProgram();
this.gl.attachShader(shaderProgram, vertexShader);
this.gl.attachShader(shaderProgram, fragmentShader);
this.gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!this.gl.getProgramParameter(shaderProgram, this.gl.LINK_STATUS)) {
console.log('Error in shader program: ' + this.gl.getProgramInfoLog(shaderProgram));
cb('Error in shader program: ' + this.gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
},
loadShader(type, source, cb) {
const shader = this.gl.createShader(type);
this.gl.shaderSource(shader, source);
this.gl.compileShader(shader);
if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) {
console.log('An error occurred compiling the shaders: ' + this.gl.getShaderInfoLog(shader)+' type is '+type);
var err = 'An error occurred compiling the shaders: ' + this.gl.getShaderInfoLog(shader);
cb(err);
this.gl.deleteShader(shader);
return null;
}
return shader;
},
loadTexture(texture, url) {
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
const level = 0;
const internalFormat = this.gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = this.gl.RGBA;
const srcType =this.gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
this.gl.texImage2D(this.gl.TEXTURE_2D, level, internalFormat,
width, height, border, srcFormat, srcType,
pixel);
const image = new Image();
var that = this;
//跨域数据
if ((new URL(url, window.location.href)).origin !== window.location.origin) {
image.crossOrigin = "";
}
image.onload = function() {
that.gl.bindTexture(that.gl.TEXTURE_2D, texture);
that.gl.texImage2D(that.gl.TEXTURE_2D, level, internalFormat,
srcFormat, srcType, image);
// WebGL1 has different requirements for power of 2 images
// vs non power of 2 images so check if the image is a
// power of 2 in both dimensions.
if (that.isPowerOf2(image.width) && that.isPowerOf2(image.height)) {
// Yes, it's a power of 2. Generate mips.
that.gl.generateMipmap(that.gl.TEXTURE_2D);
} else {
// No, it's not a power of 2. Turn of mips and set
// wrapping to clamp to edge
that.gl.texParameteri(that.gl.TEXTURE_2D, that.gl.TEXTURE_WRAP_S, that.gl.CLAMP_TO_EDGE);
that.gl.texParameteri(that.gl.TEXTURE_2D, that.gl.TEXTURE_WRAP_T, that.gl.CLAMP_TO_EDGE);
that.gl.texParameteri(that.gl.TEXTURE_2D, that.gl.TEXTURE_MIN_FILTER, that.gl.LINEAR);
}
console.log('successfull image ////////////////////////////////');
};
image.src = url;
},
loadTextureByData(texture, data) {
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
const level = 0;
const internalFormat = this.gl.RGBA;
const srcFormat = this.gl.RGBA;
const srcType =this.gl.UNSIGNED_BYTE;
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
this.gl.texImage2D(this.gl.TEXTURE_2D, level, internalFormat,
srcFormat, srcType, data);
if (this.isPowerOf2(data.width) && this.isPowerOf2(data.height)) {
// Yes, it's a power of 2. Generate mips.
this.gl.generateMipmap(this.gl.TEXTURE_2D);
} else {
// No, it's not a power of 2. Turn of mips and set
// wrapping to clamp to edge
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
}
},
loadCubeTexture(fileArray){
var texture = gl.createTexture();
this.gl.bindTexture(gl.TEXTURE_CUBE_MAP, texture);
const faceInfos = [
{
target: this.gl.TEXTURE_CUBE_MAP_POSITIVE_X,
url : fileArray[0]
},
{
target: this.gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
url : fileArray[1]
},
{
target: this.gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
url : fileArray[2]
},
{
target: this.gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
url : fileArray[3]
},
{
target: this.gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
url : fileArray[4]
},
{
target: this.gl.TEXTURE_CUBE_MAP_NEGATIVE_Z,
url : fileArray[5]
},
];
faceInfos.forEach(element => {
const {target, url} = element;
const level = 0;
const interalFormat = gl.RGBA;
const width = 512;
const height = 512;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
this.gl.texImage2D(target, level, interalFormat, width, height, 0, format, type, null);
const image = new Image();
image.src = url;
image.addEventListener('load', function(){
gl.bindTexture(gl.TEXTURE_CUBE_MAP, texture);
gl.texImage2D(target, level, interalFormat, format, type, image);
gl.generateMipmap(gl.TEXTURE_CUBE_MAP);
});
});
this.gl.generateMipmap(gl.TEXTURE_CUBE_MAP);
this.gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
},
deleteTexture(tex){
this.gl.deleteTexture(tex);
},
isPowerOf2(v){
return (v&(v-1))==0;
},
loadTexture1DFromData(data) {
const texture = this.gl.createTexture();
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
this.gl.texImage2D(this.gl.TEXTURE_2D, 0, this.gl.ALPHA, data.length, 1, 0, this.gl.ALPHA, this.gl.UNSIGNED_BYTE, data);
// No, it's not a power of 2. Turn of mips and set
// wrapping to clamp to edge
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
return texture;
},
createRenderTargetTexture() {
const targetTextureWidth = window.innerWidth;
const targetTextureHeight = window.innerHeight;
const targetTexture = this.gl.createTexture();
this.gl.bindTexture(this.gl.TEXTURE_2D, targetTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = this.gl.RGBA;
const border = 0;
const format = this.gl.RGBA;
const type = this.gl.UNSIGNED_BYTE;
const data = null;
this.gl.texImage2D(this.gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
// set the filtering so we don't need mips
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
}
return targetTexture;
},
createDepthTexture() {
const depthTextureWidth = window.innerWidth;
const depthTextureHeight = window.innerHeight;
const depthTexture = this.gl.createTexture();
this.gl.bindTexture(this.gl.TEXTURE_2D, depthTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = this.gl.DEPTH_COMPONENT32F;
const border = 0;
const format = this.gl.DEPTH_COMPONENT;
const type = this.gl.FLOAT;
const data = null;
this.gl.texImage2D(this.gl.TEXTURE_2D, level, internalFormat,
depthTextureWidth, depthTextureHeight, border, format, type, data);
// set the filtering so we don't need mips
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.NEAREST);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.NEAREST);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
}
return depthTexture;
},
createFrameBuffer(rtex, dtex){
const fb = this.gl.createFramebuffer();
this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, fb);
// attach the texture as the first color attachment
this.gl.framebufferTexture2D(this.gl.FRAMEBUFFER, this.gl.COLOR_ATTACHMENT0, this.gl.TEXTURE_2D, rtex, 0);
this.gl.framebufferTexture2D(this.gl.FRAMEBUFFER, this.gl.DEPTH_ATTACHMENT, this.gl.TEXTURE_2D, dtex, 0);
this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, null);
return fb;
},
writeDataToTexture(texture, data){
if(data.length<2) return;
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);
this.gl.texImage2D(this.gl.TEXTURE_2D, 0, this.gl.ALPHA, data.length, 1, 0, this.gl.ALPHA, this.gl.UNSIGNED_BYTE, data);
// No, it's not a power of 2. Turn of mips and set
// wrapping to clamp to edge
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.NEAREST);
},
//sometimes we need clear gl context
clearContext(){
this.gl.bindBuffer(this.gl.ARRAY_BUFFER,0);
},
};
export {glUtils} |
from typing import List
def remove_whitespace(str_list: List[str]) -> List[str]:
return [s.strip() for s in str_list] |
def is_plaindrome(num):
original_num = num
reversed_num = 0
while num > 0:
remainder = num % 10
reversed_num = (reversed_num * 10) + remainder
num //= 10
return reversed_num == original_num
num = 1454
if is_plaindrome(num):
print(num, "is a plaindrome")
else:
print(num, "is not a plaindrome") |
<filename>src/command/CommandUnit.java
package command;
import java.util.ResourceBundle;
/**
*
* Packages commands from interpreter and translates
* into the appropriate turtle/pen methods from resources
* file so that executor can call them using reflection.
*
* @author Natalie
*
*/
public class CommandUnit implements Command {
public static final String DEFAULT_RESOURCE_PACKAGE = "resources.executor/";
public static final String REFLECTION_RESOURCES = "Commandable";
public static final String[] CHANGE_VAL = {"Home", "ID", "Backward", "Right", "Home"};
public static final String[] CHANGE_VAL_VIS = {"ShowTurtle", "HideTurtle", "PenDown", "PenUp"};
private ResourceBundle refResources = ResourceBundle.getBundle(DEFAULT_RESOURCE_PACKAGE + REFLECTION_RESOURCES);
private String command;
private Double[] value;
public CommandUnit(String comm, Double... val) {
command = comm;
value = val;
manip();
}
/**
* Turns user command into turtle method string, adjusts values for select
* commands
*
* @param val value associated with command-- either 1 num for double or 2 for point
*/
private void manip() {
manipVal();
command = refResources.getString(command);
}
/**
* Creates point if necessary, adjusts values for select commands
*
* @param val value associated with command-- either 1 num for double or 2 for point
*/
private void manipVal() {
if (command.equals(CHANGE_VAL[0]) || command.equals(CHANGE_VAL[1]))
value = new Double[]{};
if (command.equals(CHANGE_VAL[2]) || command.equals(CHANGE_VAL[3]))
for (int i = 0; i < value.length; i++)
value[i] *= -1;
if (command.equals(CHANGE_VAL[4]))
value = new Double[] {0.0,0.0};
if (command.equals(CHANGE_VAL_VIS[0]) || command.equals(CHANGE_VAL_VIS[2]))
value = new Double[]{1.0};
if (command.equals(CHANGE_VAL_VIS[1]) || command.equals(CHANGE_VAL_VIS[3]))
value = new Double[]{0.0};
}
@Override
public String getCommand() {
return command;
}
@Override
public Object[] getValue() {
return value;
}
@Override
public Class[] getValType() {
Class[] className = new Class[value.length];
for (int i = 0; i < value.length; i++)
className[i] = Double.class;
return className;
}
@Override
public String getCommandableType(){
return refResources.getString(command);
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2015:1189
#
# Security announcement date: 2015-06-25 13:49:20 UTC
# Script generation date: 2017-01-01 21:16:22 UTC
#
# Operating System: Red Hat 5
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - kmod-kvm.x86_64:83-273.el5_11
# - kmod-kvm-debug.x86_64:83-273.el5_11
# - kvm.x86_64:83-273.el5_11
# - kvm-debuginfo.x86_64:83-273.el5_11
# - kvm-qemu-img.x86_64:83-273.el5_11
# - kvm-tools.x86_64:83-273.el5_11
#
# Last versions recommanded by security team:
# - kmod-kvm.x86_64:83-276.el5_11
# - kmod-kvm-debug.x86_64:83-276.el5_11
# - kvm.x86_64:83-276.el5_11
# - kvm-debuginfo.x86_64:83-276.el5_11
# - kvm-qemu-img.x86_64:83-276.el5_11
# - kvm-tools.x86_64:83-276.el5_11
#
# CVE List:
# - CVE-2015-3209
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install kmod-kvm.x86_64-83 -y
sudo yum install kmod-kvm-debug.x86_64-83 -y
sudo yum install kvm.x86_64-83 -y
sudo yum install kvm-debuginfo.x86_64-83 -y
sudo yum install kvm-qemu-img.x86_64-83 -y
sudo yum install kvm-tools.x86_64-83 -y
|
<reponame>Nebulis/blog
import { getPathForEnglish, getPathForFrench } from "../utils"
describe("getPathForFrench", () => {
it("should work when url start with /en", () => {
expect(getPathForFrench({ pathname: "/en/bla" })).toBe("/bla")
})
it("should work when url start is /en", () => {
expect(getPathForFrench({ pathname: "/en" })).toBe("")
})
it("should work when url start is /", () => {
expect(getPathForFrench({ pathname: "/" })).toBe("/")
})
it("should work when url does not start by /en", () => {
expect(getPathForFrench({ pathname: "/bar" })).toBe("/bar")
})
it("should keep search", () => {
expect(getPathForFrench({ pathname: "/bar", search: "?country=australia" })).toBe("/bar?country=australia")
})
})
describe("getPathForEnglish(", () => {
it("should work when url start with /en", () => {
expect(getPathForEnglish({ pathname: "/en/bla" })).toBe("/en/bla")
})
it("should work when url start is /en", () => {
expect(getPathForEnglish({ pathname: "/en" })).toBe("/en")
})
it("should work when url start is /", () => {
expect(getPathForEnglish({ pathname: "/" })).toBe("/en/")
})
it("should work when url does not start by /en", () => {
expect(getPathForEnglish({ pathname: "/bar" })).toBe("/en/bar")
})
it("should keep search", () => {
expect(getPathForEnglish({ pathname: "/bar", search: "?country=australia" })).toBe("/en/bar?country=australia")
})
})
|
from typing import List, Tuple
from functools import wraps
def count_signature_args(func):
@wraps(func)
def signature_args_counter(*args, **kwargs):
print(f'{len(args)} positional arguments, {len(kwargs)} keyword arguments')
func(*args, **kwargs)
return signature_args_counter
def signature_arg_values(func):
@wraps(func)
def signature_values_lister(*args, **kwargs):
print('positional arguments:', ', '.join(str(arg) for arg in args))
print('keyword arguments:', ', '.join(f'{key}: {value}' for key, value in kwargs.items()))
func(*args, **kwargs)
return signature_values_lister
class Point:
'''
A Point class, with a constructor which is decorated by wrapping decorators
'''
@count_signature_args
@signature_arg_values
def __init__(self, x: float, y: float):
self.x = x
self.y = y
Point(2.5, y=3.2)
def signature_improper_decorator(func):
def not_wrapping_decorator(*args, **kwargs):
print(f'{len(args) + len(kwargs)} arguments')
func(*args, **kwargs)
return not_wrapping_decorator
class PointDecoratedWithoutWrapping:
'''
A Point class, with a constructor which is decorated by wrapping decorators
'''
@signature_improper_decorator
def __init__(self, x: float, y: float):
self.x = x
self.y = y
PointDecoratedWithoutWrapping(2.5, y=3.2) |
import json
# Get json string
json_string = "{\"name\":\"John Doe\",\"age\":30}"
# Parse the json string
parsed_string = json.loads(json_string)
# Pretty print the parsed string
pretty_string = json.dumps(parsed_string, indent=4, sort_keys=True)
# Show output
print(pretty_string)
# Output:
{
"name": "John Doe",
"age": 30
} |
use bevy::asset::{AssetServer, Handle, HandleId, LoadState};
use bevy::render::shader::Shader;
use bevy::render::color::ColorMaterial;
fn fragment_shader(asset_server: &AssetServer) -> Option<Handle<Shader>> {
let color_material = ColorMaterial::fragment_shader(asset_server);
match color_material {
Some(handle) => Some(handle),
None => None,
}
} |
package week1.model;
import org.junit.*;
import static org.junit.Assert.*;
public class BillTest {
private Product product1, product2;
private Bill bill1;
@Before
public void initData() {
product1 = new Product(1,"Grecha", 40.00);
product2 = new Product(2,"juse", 20.00);
bill1 = new Bill();
}
@Test
public void getSalesman() {
assertEquals(bill1.getSalesman(),bill1.getSalesman());
}
@Test
public void getAmountPrice() {
assertEquals(bill1.getAmountPrice(),bill1.getAmountPrice(),1);
}
@Test
public void getDataTime() {
assertEquals(bill1.getDataTime(),bill1.getDataTime());
}
@Test
public void addProduct1(){
assertTrue(bill1.addProduct(product1));
}
@Test
public void calculateAmountPrice() {
bill1.addProduct(product1);
bill1.addProduct(product2);
double sum = bill1.calculateAmountPrice();
assertEquals(60.0,sum,1);
}
@Test
public void printBill() {
assertTrue(bill1.setCloseBill());
}
@Test
public void compareTo() {
assertEquals(product1.compareTo(product1),0);
}
@Test
public void setCloseBill() {
assertTrue(bill1.setCloseBill());
}
} |
class Dice {
private:
int numDice;
public:
//Constructor
Dice(int numDice) {
this->numDice = numDice;
}
//Function to roll dice
int roll() {
int sum = 0;
for (int i=0; i<numDice; i++) {
sum += rand() % 6 + 1;
}
return sum;
}
}; |
#!/bin/bash
#$ -cwd
# error = Merged with joblog
#$ -o joblog.$JOB_ID
#$ -j y
#$ -pe shared 8
#$ -l arch=intel-E5-2670,exclusive,h_rt=24:00:00,h_data=3.5G
# Email address to notify
## $ -M $USER@mal
# Notify when:
#$ -m bea
#save job info on joblog:
echo "Job $JOB_ID started on: " `hostname -s`
echo "Job $JOB_ID started on: " `date `
# load the job environment:
. /u/local/Modules/default/init/modules.sh
module load julia/1.0.1
module li
which julia
# run julia code
echo "julia normal_cv_nodebias.jl, debias=false"
pwd; julia /u/home/b/biona001/cross_validation/normal_cv_nodebias.jl
#echo job info on joblog:
echo "Job $JOB_ID ended on: " `hostname -s`
echo "Job #JOB_ID ended on: " `date `
#echo " "
|
#!/bin/bash
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
if [[ -z "${CREDENTIALS}" ]]; then
CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account
fi
if [[ -z "${STAGING_BUCKET_V2}" ]]; then
echo "Need to set STAGING_BUCKET_V2 environment variable"
exit 1
fi
# work from the git root directory
pushd $(dirname "$0")/../../
# install docuploader package
python3 -m pip install gcp-docuploader
# compile all packages
mvn clean install -B -q -DskipTests=true
export NAME=google-cloud-apigee-connect
export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3)
# V3 generates docfx yml from javadoc
# generate yml
mvn clean site -B -q -P docFX
# copy README to docfx-yml dir and rename index.md
cp README.md target/docfx-yml/index.md
# copy CHANGELOG to docfx-yml dir and rename history.md
cp CHANGELOG.md target/docfx-yml/history.md
pushd target/docfx-yml
# create metadata
python3 -m docuploader create-metadata \
--name ${NAME} \
--version ${VERSION} \
--xrefs devsite://java/gax \
--xrefs devsite://java/google-cloud-core \
--xrefs devsite://java/api-common \
--xrefs devsite://java/proto-google-common-protos \
--xrefs devsite://java/google-api-client \
--xrefs devsite://java/google-http-client \
--xrefs devsite://java/protobuf \
--language java
# upload yml to production bucket
python3 -m docuploader upload . \
--credentials ${CREDENTIALS} \
--staging-bucket ${STAGING_BUCKET_V2} \
--destination-prefix docfx
|
//
// PickerView.h
// MeiYiQuan
//
// Created by 任强宾 on 16/9/28.
// Copyright © 2016年 任强宾. All rights reserved.
//
#import <UIKit/UIKit.h>
typedef NS_ENUM(NSInteger, PickerViewType)
{
PickerViewTypeNormal = 0,
PickerViewTypeHaveBtn
};
typedef void(^PickerViewBlock)(NSArray *selectedArray);
@interface PickerView : UIView <UIPickerViewDelegate, UIPickerViewDataSource>
/**
* 自定义初始化方法
*
* @param style 类型
* @param dataArray 数组嵌套数组的形式传进来
* @param block 内部事件通过block传递出来
*/
- (instancetype)initWithStyle:(PickerViewType)style dataArray:(NSArray *)dataArray pickerViewHeight:(CGFloat)pickerViewHeight inView:(UIView *)view block:(PickerViewBlock)block;
//显示
- (void)show;
//隐藏
- (void)dismiss;
//绑定默认值
- (void)configDataFromArray:(NSArray *)array;
@end
|
// Person class
class Person {
// instance variables
public string firstName;
public string lastName;
public int age;
public string address;
public string phone;
// constructor
Person(string fname, string lname, int age, string add, string phone)
{
firstName = fname;
lastName = lname;
this.age = age;
address = add;
this.phone = phone;
}
// method to display data
public void display()
{
Console.WriteLine("First Name: {0} ", firstName);
Console.WriteLine("Last Name: {0} ", lastName);
Console.WriteLine("Age: {0} ", age);
Console.WriteLine("Address: {0} ", address);
Console.WriteLine("Phone: {0} ", phone);
}
} |
//
const setLanguages = [];
function TextNode( engText, frText ) {
frText = frText ? frText : engText;
const node = document.createTextNode( engText );
setLanguages.push( ( language ) => {
node.nodeValue = language === 'english' ? engText : frText;
})
return node
};
//
function setLanguage( language ) {
setLanguages.forEach( (fn) => fn( language ) );
};
export { setLanguage }
export default {
// MENU
menu: {
consulting: TextNode( "Consulting", "Conseil" ),
prototypes: TextNode( "Prototypes & Viewers", "Prototypes & Rendus" ),
casting: TextNode( "Casting Parts", "Pièces de Fonte" ),
doc: TextNode( "Documentation" ),
webdev: TextNode( "Web Development", "Développement Web" ),
samples: TextNode( "Demos", "Démonstrations" ),
contact: TextNode( 'Contact' /* 'Contact & Rate', 'Contact & Tarifs' */ )
},
// HOMEPAGE
homepage: {
// INTRO SCENE
intro: {
intro: TextNode(
'High Jewellery Design',
'Design de Haute Joaillerie'
)
},
// EXPERTISE SCENE
expertise: {
title: TextNode( "Consulting", "Conseil" ),
text: TextNode(
`Backed by my experience working with the most demanding
high jewelry companies, I am able to advise you
in your projects.`,
`Fort d'une expérience de travail avec les maisons de
joaillerie les plus exigeantes, je suis à même de vous
conseiller dans vos projets.`
),
more: TextNode( "learn more ", "voir plus " )
},
// PROTOTYPES SCENE
prototypes: {
title: TextNode(
"Prototypes & viewers",
"Prototypes & rendus"
),
text: TextNode(
`Early in development you
will review resin
prototypes and photorealistic
viewers in order to pinpoint
issues fast and respect
your schedule.`,
`Tôt pendant le développement,
vous examinerez des rendus et des
prototypes en résine pour
identifier les problèmes rapidement et
respecter vos échéances.`
),
more: TextNode( "learn more ", "voir plus " )
},
// CASTING PARTS SCENE
casting: {
title: TextNode( "Casting parts", "Pièces de fonte" ),
text: TextNode(
`Once your jewel is designed,
I can supply production-ready files
for 3D printing or CNC milling`,
`Une fois votre bijou conçu,
Je peux fournir des fichiers prêts pour
la production via impression 3D ou usinage.`
),
more: TextNode( "learn more ", "voir plus " )
},
// DOCUMENTATION SCENE
doc: {
title: TextNode( "Documentation" ),
text: TextNode(
`I can provide the documentation that
suit your need. Assembly instructions,
stone setting blueprints, painting layouts...`,
`Je peux fournir la documentation
dont vous avez besoin. Plans de montage,
nomenclatures, plans quatre vues...`
),
more: TextNode( "learn more ", "voir plus " )
},
// CONTACT SCENE
contact: {
title: TextNode( "More information :", "Plus d'information :" ),
webdevLink: TextNode( 'Web development ', 'Développement Web ' ),
samplesLink: TextNode( 'Demos ', 'Démonstrations ' ),
contactLink: TextNode( 'Contact' /* 'Contact & Rate', 'Contact & Tarifs' */ )
}
},
// INFO MODULES
modules: {
// EXPERTISE
expertise: {
title: TextNode( "Consulting", "Conseil" ),
presentation: {
title: TextNode( "Who am I ?", "Qui suis-je ?" ),
speech1: TextNode(
`I worked for more than two years as a
technical designer for <NAME>
in Paris, which is a workshop famous in
Place Vendôme high jewelry community for
its quality work. Thereafter I lived in
Hong Kong during three years,
working for Michelle Ong who makes jewels
for the most picky amateurs of this city-state.`,
`J'ai travaillé pendant plus de deux ans chez
<NAME> en tant
que CAO ( Concepteur Assisté par Ordinateur ),
cet atelier est connu dans la communauté de
la place Vendôme pour son travail de grande qualité.
J'ai ensuite vécu trois ans à Hong Kong où
j'ai travaillé pour <NAME>,
qui fabrique des bijoux pour les plus fins
connaisseurs de cette cité-état.`
),
speech2: TextNode(
`On the basis of my design and project management
skills acquired during these years,
I can give you precious advices to help you making
your projects happen.`,
`Sur la base de ces compétences en design et en gestion de projet,
je peux vous offrir un conseil précieux pour
vous aider à réaliser vos objectifs.`
)
},
service: {
title: TextNode( "My consulting services", "Mon service de conseil" ),
speech: TextNode(
`You can hire me to manage your design projects,
or as an advisor who can help you making the right
decision when you are in occasional need.`,
`Vous pouvez m'engager pour gérer vos projets
de design, ou en tant que consultant pour
vous aider à prendre la bonne décision
quand vous avez une question ponctuelle.`
)
}
},
// PROTOTYPES
prototypes: {
title: TextNode( 'Prototypes & 3D viewers', "Prototypes & rendus" ),
speech1: TextNode(
`Designing is a progressive process,
scattered with unexpected issues.
We are going to work jointly to
reveal all the unforeseen details and
oversights of the original project,
in order to arrive at a product
esthetically and technically flawless
at the delivery date you will request.`,
`Le design est un processus évolutif,
parsemé d'imprévus. Nous allons
travailler conjointement pour révéler les
problèmes conceptuels du design originel,
pour pouvoir arriver à un produit esthétiquement
et techniquement parfait à la date de
livraison que vous aurez défini.`
),
speech2: TextNode(
`To arrive at this perfect design and
to save us from discovering flaws
late in product development,
we will regularly organize design reviews
supported by resin prototypes and/or 3D viewers.`,
`Pour obtenir ce design parfait et nous
épargner la découverte de défauts tard
dans la conception du produit, nous
organiserons régulièrement des révisions
du design, supportées par des prototypes
en résine et/ou des rendus 3D photoréalistes.`
),
speech3: TextNode(
`If you don't happen to own a 3D printer to print
the prototypes, don't worry : I can handle it and ship them to you.`,
`Si vous ne possédez pas d'imprimante 3D pour
l'impression des prototypes, pas d'inquiétude :
je m'en occupe pour vous, et vous les fait
livrer.`
)
},
// CASTING
casting: {
title: TextNode( 'Casting Parts', 'Pièces de fonte' ),
speech1: TextNode(
`Your project is designed. Now you can
opt for lost-wax casting via 3D printing to
speed up your process, and ensure that the
finished jewel you will get is as close as possible
from what we designed together.
On the basis of my jewelry handcrafting know-how,
particularly in the stone setting field,
I can adapt your design to the technical requirements
of 3D printing, casting, and all the subsequent stages
of jewelry handcrafting.`,
`Votre projet est conçu et dessiné. Vous pouvez
maintenant opter pour la fonte à cire perdue via
l'impression 3D pour accélerer la réalisation,
et garantir que le bijou fini sera au plus
proche de ce que nous avons conçu ensemble.
Fort de mes connaissances pratiques en fabrication
de bijou, particulièrement dans le domaine
du sertissage, je peux adapter votre design
aux contraintes techniques de l'impression 3D,
de la fonte à cire perdue, et de toutes les
étapes ultérieures de fabrication.`
),
speech2: TextNode(
`If you are not lucky enough to own a 3D printer,
I can lead you to suppliers who can handle your files
from 3D printing to casting.`,
`Si vous ne possédez pas votre propre imprimante 3D
adaptée à la fonte à cire perdue, je me propose de
vous diriger vers des fournisseurs qui s'occuperont
de vos fichier de l'impression 3D à la fonte.`
)
},
// DOCUMENTATION
doc: {
title: TextNode( 'Documentation' ),
speech1: TextNode(
`Once your project is designed,
you are not left on your own.
I send you all the documentation you need to
assemble the parts, set the stones,
and even communicate and advertise about your jewel.`,
`Une fois votre projet conçu, vous n'êtes pas
laissé à vous-même. Je vous envoie toute la
documentation dont vous avez besoin pour
assembler les éléments, sertir les pierres,
et même communiquer sur votre bijou.`
),
speech2: TextNode(
`I adapt the documentation to every project,
and listen to your special demands :
parts spreadsheets for your inventory,
documents for public presentations
or customer use, 4-views layouts for painting...
Name your own needs.`,
`J'adapte la documentation à chaque projet,
et répond à vos demandes spéciales :
tableaux pour vos inventaires, documents pour
vos présentations publiques ou privées, plans
quatre vues à gouacher... Vous n'avez qu'à
demander.`
)
},
// WEB DEVELOPMENT
webdev: {
title: TextNode( 'Web Development', 'Développement web' ),
speech1: TextNode(
`Web development has been my passion
for some years, and I offer my
services in this field. My skills in jewelry,
3D modeling and web development are a rare
and precious combination if you are a
jewelry company with a project of high-end online service.`,
`Le développement web a été ma passion
ces dernières années, et j'offre mes services
dans ce domaine. Mes compétences en joaillerie,
modélisation 3D et développement web sont une
combinaison rare et précieuse si vous êtes une
entreprise de joaillerie avec un projet de service
en ligne.`
),
speech2: TextNode(
`This website you are browsing was entirely designed and
coded by myself with the latest web technologies
( Node.js, Three.js, Webpack, etc... ).
The 3D models displayed in the various scenes
of the front page are mine, and required
a specific treatment for realtime rendering.`,
`Ce site que vous visitez a été entièrement designé et
programmé par moi-même avec les dernières
technologies web ( Node.js, Three.js,
Webpack, etc... ). Les modèles 3D affichés
dans chaque scène de la page d'accueil sont les miens,
et on nécessité un traitement spécial pour le
rendu en temps réel.`
)
},
// SAMPLES
samples: {
title: TextNode( "Demos", "Démonstrations" ),
renders: TextNode( 'Photorealistic Rendering', 'Rendus Photoréalistes' ),
cad: TextNode( 'CAD 3D Models', 'Models 3D CAO' )
},
// CONTACT
contact: {
title: TextNode( 'Contact' /* 'Contact & Rate', 'Contact & Tarifs' */ ),
speech: TextNode(
`I am eager to answer any information
demand and make quotations tailored to
your special needs. I you want to discuss verbally,
we can arrange a Skype or Zoom meeting on request.`,
`Je suis ravi de répondre à toute demande
d'information, et de faire un devis ajusté
à vos besoins. Si vous voulez discuter de
vive voix, nous pouvons organiser une entrevue
via Skype ou Zoom.`
),
hourlyRate: TextNode(
'Hourly Rate : 95 USD',
'Tarif Horaire : 80 €'
)
}
}
} |
<filename>src/main/java/org/olat/core/commons/services/doceditor/office365/manager/DateHelper.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.commons.services.doceditor.office365.manager;
import java.util.Date;
/**
*
* Initial date: 2 May 2019<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class DateHelper {
// Source https://stackoverflow.com/a/44028583
private static final long TICKS_AT_EPOCH = 621355968000000000L;
private static final long TICKS_PER_MILLISECOND = 10000;
public static Date ticksToDate(long utcTicks){
return new Date((utcTicks - TICKS_AT_EPOCH) / TICKS_PER_MILLISECOND);
}
}
|
#!/usr/bin/bash
. /bin/ironic-common.sh
HTTP_PORT=${HTTP_PORT:-"80"}
MARIADB_PASSWORD=${MARIADB_PASSWORD:-"change_me"}
NUMPROC=$(cat /proc/cpuinfo | grep "^processor" | wc -l)
NUMWORKERS=$(( NUMPROC < 12 ? NUMPROC : 12 ))
# Whether to enable fast_track provisioning or not
IRONIC_FAST_TRACK=${IRONIC_FAST_TRACK:-true}
wait_for_interface_or_ip
cp /etc/ironic/ironic.conf /etc/ironic/ironic.conf_orig
crudini --merge /etc/ironic/ironic.conf <<EOF
[DEFAULT]
my_ip = $IRONIC_IP
[api]
host_ip = ::
api_workers = $NUMWORKERS
[conductor]
api_url = http://${IRONIC_URL_HOST}:6385
[database]
connection = mysql+pymysql://ironic:${MARIADB_PASSWORD}@localhost/ironic?charset=utf8
[deploy]
http_url = http://${IRONIC_URL_HOST}:${HTTP_PORT}
fast_track = ${IRONIC_FAST_TRACK}
[inspector]
endpoint_override = http://${IRONIC_URL_HOST}:5050
EOF
mkdir -p /shared/html
mkdir -p /shared/ironic_prometheus_exporter
|
#!/bin/bash
#=================================================
# Description: DIY script
# Lisence: MIT
# Author: eSirPlayground
# Youtube Channel: https://goo.gl/fvkdwm
#=================================================
#1. Modify default IP
sed -i 's/192.168.1.1/10.0.0.1/g' openwrt/package/base-files/files/bin/config_generate
#2. Clear the login password
sed -i 's/$1$V4UetPzk$CYXluq4wUazHjmCDBCqXF.//g' openwrt/package/lean/default-settings/files/zzz-default-settings
#3. Replace with JerryKuKu’s Argon
#rm openwrt/package/lean/luci-theme-argon -rf
|
<filename>server/node/src/main/java/org/kaaproject/kaa/server/admin/client/mvp/activity/GetUserConfigActivity.java
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.admin.client.mvp.activity;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.user.client.rpc.AsyncCallback;
import java.util.Collections;
import java.util.List;
import org.kaaproject.avro.ui.gwt.client.util.BusyAsyncCallback;
import org.kaaproject.kaa.common.dto.EndpointUserConfigurationDto;
import org.kaaproject.kaa.server.admin.client.KaaAdmin;
import org.kaaproject.kaa.server.admin.client.mvp.ClientFactory;
import org.kaaproject.kaa.server.admin.client.mvp.place.GetUserConfigPlace;
import org.kaaproject.kaa.server.admin.client.mvp.view.GetUserConfigView;
import org.kaaproject.kaa.server.admin.client.servlet.ServletHelper;
import org.kaaproject.kaa.server.admin.client.util.Utils;
import org.kaaproject.kaa.server.admin.shared.schema.SchemaInfoDto;
public class GetUserConfigActivity extends AbstractDetailsActivity<EndpointUserConfigurationDto, GetUserConfigView, GetUserConfigPlace> {
private String applicationId;
public GetUserConfigActivity(GetUserConfigPlace place, ClientFactory clientFactory) {
super(place, clientFactory);
this.applicationId = place.getApplicationId();
}
@Override
protected String getEntityId(GetUserConfigPlace place) {
return null;
}
@Override
protected GetUserConfigView getView(boolean create) {
return clientFactory.getUserConfigView();
}
@Override
protected EndpointUserConfigurationDto newEntity() {
return new EndpointUserConfigurationDto();
}
@Override
protected void onEntityRetrieved() {
KaaAdmin.getDataSource().getUserConfigurationSchemaInfosByApplicationId(applicationId,
new BusyAsyncCallback<List<SchemaInfoDto>>() {
@Override
public void onSuccessImpl(List<SchemaInfoDto> result) {
Collections.sort(result);
SchemaInfoDto schemaInfo = result.get(result.size()-1);
detailsView.getConfigurationSchemaInfo().setValue(schemaInfo);
detailsView.getConfigurationSchemaInfo().setAcceptableValues(result);
}
@Override
public void onFailureImpl(Throwable caught) {
Utils.handleException(caught, detailsView);
}
});
registrations.add(detailsView.getDownloadUserCongigurationButton().addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
KaaAdmin.getDataSource().findUserConfigurationByExternalUIdAndAppIdAndSchemaVersion(
detailsView.getExternalUserId().getValue(),
applicationId,
detailsView.getConfigurationSchemaInfo().getValue().getVersion(),
new BusyAsyncCallback<EndpointUserConfigurationDto>() {
@Override
public void onSuccessImpl(EndpointUserConfigurationDto result) {
ServletHelper.downloadUserConfiguration(
detailsView.getExternalUserId().getValue(),
String.valueOf(detailsView.getConfigurationSchemaInfo().getValue().getVersion()),
applicationId
);
}
@Override
public void onFailureImpl(Throwable caught) {
Utils.handleException(caught, detailsView);
}
});
}
}));
}
@Override
protected void onSave() {
}
@Override
protected void getEntity(String id, AsyncCallback<EndpointUserConfigurationDto> callback) {
}
@Override
protected void editEntity(EndpointUserConfigurationDto entity, AsyncCallback<EndpointUserConfigurationDto> callback) {
}
@Override
protected void bind(EventBus eventBus) {
super.bind(eventBus);
}
}
|
<reponame>SuadeLabs/boolean.py<filename>setup.py
#!/usr/bin/env python
from __future__ import absolute_import
from setuptools import find_packages
from setuptools import setup
long_desc = '''
This library helps you deal with boolean expressions and algebra with variables
and the boolean functions AND, OR, NOT.
You can parse expressions from strings and simplify and compare expressions.
You can also easily create your custom algreba and mini DSL and create custom
tokenizers to handle custom expressions.
For extensive documentation look either into the docs directory or view it online, at
https://booleanpy.readthedocs.org/en/latest/
https://github.com/bastikr/boolean.py
Copyright (c) 2009-2020 <NAME>, <EMAIL> and others
SPDX-License-Identifier: BSD-2-Clause
'''
setup(
name='boolean.py',
version='3.9',
license='BSD-2-Clause',
description='Define boolean algebras, create and parse boolean '
'expressions and create custom boolean DSL.',
long_description=long_desc,
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/bastikr/boolean.py',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_loader='unittest:TestLoader',
test_suite='boolean.test_boolean',
keywords='boolean expression, boolean algebra, logic, expression parser',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
],
)
|
<reponame>dutinmeow/library
#pragma region str_pi
#ifndef STR_PI_HPP
#define STR_PI_HPP
namespace str {
vector<int> pi(const string &s) {
int n = (int)s.length();
vector<int> _pi(n);
for (int i = 1, j; i < n; i++) {
for (j = _pi[i - 1]; j > 0 && s[j] != s[i]; j = _pi[j - 1]);
if (s[i] == s[j])
j++;
_pi[i] = j;
}
return _pi;
}
}
#endif
#pragma endregion str_pi |
package com.bhm.sdk.rxlibrary.rxbus;
/**
*
* Created by bhm on 2018/5/15.
*/
public enum ThreadMode {
/**
* current thread
*/
CURRENT_THREAD,
/**
* android main thread
*/
MAIN,
/**
* new thread
*/
NEW_THREAD
}
|
package entity
type Student struct {
ID string `json:"id" db:"id"`
Email string `json:"email" db:"email"`
Name string `json:"name" db:"name"`
Picture string `json:"picture" db:"picture"`
}
type EnrollWebinarParam struct {
WebinarID int64 `json:"webinar_id" db:"webinar_id"`
TeacherID string `json:"teacher_id" db:"teacher_id"`
}
type CancelEnrollmentWebinarParam struct {
WebinarID int64 `json:"webinar_id" db:"webinar_id"`
TeacherID string `json:"teacher_id" db:"teacher_id"`
}
func (e EnrollWebinarParam) Validate() error {
return nil
}
func (c CancelEnrollmentWebinarParam) Validate() error {
return nil
}
|
python -m tonic.train \
--header "import tonic.torch" \
--agent "tonic.torch.agents.PPO()" \
--environment "tonic.environments.Gym('BipedalWalker-v3')" \
--trainer "tonic.Trainer(steps=100000, save_steps=100000)" \
--seed 0
python -m tonic.plot --path BipedalWalker-v3 --show True --baselines PPO
python -m tonic.play --path BipedalWalker-v3/PPO/0
|
-- phpMyAdmin SQL Dump
-- version 4.8.2
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Jan 17, 2019 at 11:28 AM
-- Server version: 10.1.34-MariaDB
-- PHP Version: 7.2.7
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `smartshop`
--
-- --------------------------------------------------------
--
-- Table structure for table `admins`
--
CREATE TABLE `admins` (
`id` int(11) NOT NULL,
`password` varchar(255) NOT NULL,
`email` varchar(255) NOT NULL,
`phone` varchar(255) NOT NULL,
`fullname` varchar(255) NOT NULL,
`address` varchar(255) DEFAULT NULL,
`img` varchar(255) DEFAULT 'public/images/user.png',
`role` tinyint(4) NOT NULL DEFAULT '0',
`remember_token` varchar(100) DEFAULT NULL,
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`status` tinyint(4) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `admins`
--
INSERT INTO `admins` (`id`, `password`, `email`, `phone`, `fullname`, `address`, `img`, `role`, `remember_token`, `created_at`, `updated_at`, `status`) VALUES
(1, 'admin123', '<EMAIL>', '0914249694', 'Nguyến Văn Admin', 'Nam định', 'man.png', 1, NULL, '2018-05-20 17:00:00', '2018-05-20 17:00:00', 1),
(6, '$2y$10$wq48TKxpPv7r7B6FOc/xmOLarrI2UrOpRdYTREpGG6gJ0ubKJSffa', '<EMAIL>', '0165757215', 'Huy Cảnh', 'Nam Định', 'X69t_05-1600x902.jpg', 1, 'bx2nVZ0lpu1TJU5YyVnNTCiJ9pSsfUz89pSMQMJAdfpQDiH7xuW2WUJpbxfT', '2018-11-12 01:59:12', '2018-11-12 09:28:19', 1);
-- --------------------------------------------------------
--
-- Table structure for table `banners`
--
CREATE TABLE `banners` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`img` varchar(255) DEFAULT NULL,
`link` varchar(255) DEFAULT NULL,
`description` text,
`position` varchar(255) DEFAULT NULL,
`created_at` date NOT NULL,
`update_at` date DEFAULT NULL,
`status` tinyint(4) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `banners`
--
INSERT INTO `banners` (`id`, `name`, `img`, `link`, `description`, `position`, `created_at`, `update_at`, `status`) VALUES
(1, 'điện thoại 1', 'bg-1-min5whqk-1921053czv-1920duj7o-1920.png', NULL, NULL, NULL, '2018-11-07', NULL, 0),
(2, 'điện thoại 2', 'bg-5-minXJUYC-1920.png', NULL, NULL, NULL, '2018-11-07', NULL, 0),
(3, 'điện thoại 3', 'phone-old-year-built-1955-bakelite-163007.jpeg', NULL, NULL, NULL, '2018-11-07', NULL, 0),
(4, 'điện thoại 4', 'photo-hero.jpg', NULL, NULL, NULL, '2018-11-07', NULL, 0),
(5, 'điện thoại 5', 'pexels-photo-1434819.jpeg', NULL, NULL, NULL, '2018-11-07', NULL, 0);
-- --------------------------------------------------------
--
-- Table structure for table `brands`
--
CREATE TABLE `brands` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`slug` varchar(255) NOT NULL,
`image` varchar(255) DEFAULT NULL,
`description` text,
`created_at` date NOT NULL,
`updated_at` date DEFAULT NULL,
`status` tinyint(4) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `brands`
--
INSERT INTO `brands` (`id`, `name`, `slug`, `image`, `description`, `created_at`, `updated_at`, `status`) VALUES
(1, 'Apple', 'Apple', NULL, NULL, '2018-05-23', NULL, 1),
(2, 'Sony', 'Sony', NULL, NULL, '2018-05-23', NULL, 1),
(3, 'Asus', 'asus', NULL, NULL, '2018-05-23', NULL, 1),
(4, 'Xiaomi', 'xiaomi', NULL, NULL, '2018-05-23', NULL, 1),
(5, 'Samsung', 'samsung', NULL, NULL, '2018-05-23', NULL, 1),
(6, 'Kingston', 'Kingston', NULL, 'Hãng sản xuất thẻ nhớ ', '2018-05-23', NULL, 1),
(7, 'Unik', 'unik', NULL, 'Hãng sản xuất tai nghe, ốp lưng ..', '2018-05-23', NULL, 1),
(8, 'iCORE', 'iCORE', NULL, 'Hãng sản xuất sạc cáp điện thoại', '2018-05-23', NULL, 1);
-- --------------------------------------------------------
--
-- Table structure for table `customers`
--
CREATE TABLE `customers` (
`id` int(11) NOT NULL,
`fullname` varchar(255) NOT NULL,
`email` varchar(255) NOT NULL,
`phone` varchar(255) NOT NULL,
`address` varchar(255) DEFAULT NULL,
`province_id` int(11) DEFAULT NULL,
`district_id` int(11) DEFAULT NULL,
`gender` tinyint(4) DEFAULT NULL,
`password` varchar(100) NOT NULL,
`customer_group_id` int(11) DEFAULT NULL,
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`status` tinyint(4) NOT NULL DEFAULT '1'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `customers`
--
INSERT INTO `customers` (`id`, `fullname`, `email`, `phone`, `address`, `province_id`, `district_id`, `gender`, `password`, `customer_group_id`, `created_at`, `updated_at`, `status`) VALUES
(1, 'Kotono', '<EMAIL>', '01234731395', '337,Cầu giấy', 59, 358, 1, '123456abcde', NULL, '2018-05-23 17:00:00', '2018-05-23 17:00:00', 1),
(2, 'customer_test', '<EMAIL>', '0124753951', '338.Cầu gò', 50, 537, 1, '123456abcde', NULL, '2018-05-23 17:00:00', '2018-05-23 17:00:00', 1),
(3, 'tranthutrang', '<EMAIL>', '12345666666', '330 Nguyễn Tư Giản', 22, 241, 0, '1232211abc', NULL, '2018-05-23 17:00:00', '2018-05-23 17:00:00', 1),
(4, 'nguyendonghung', '<EMAIL>', '0167881545845', '20 Cầu Giấy', 22, 242, 1, '1245abc', NULL, '2018-05-26 17:00:00', '2018-05-26 17:00:00', 1),
(5, 'danghuycanh', '<EMAIL>', '0231456789', '10 Khương Hạ', 24, 230, 1, '123abcd', NULL, '2018-05-24 17:00:00', '2018-05-24 17:00:00', 1),
(6, 'nguyenthianh', '<EMAIL>', '012457954', '200 Định Công', 19, 145, 0, '45ghj', NULL, '2018-04-30 17:00:00', '2018-04-30 17:00:00', 1),
(7, 'nguyenbagiap', '<EMAIL>', '012457896554', '180 Nguyễn Trãi', 24, 158, 1, '154acb', NULL, '2018-05-25 17:00:00', '2018-05-26 17:00:00', 1),
(8, 'dovanthang', '<EMAIL>', '1245875224', '150 Hồ Tùng Mậu', 21, 402, NULL, '158fgh', NULL, '2018-05-02 17:00:00', '2018-05-02 17:00:00', 1),
(9, 'nguyentiendung', '<EMAIL>', '019823222', '90 Nam Đàn', 20, 278, 1, '123abcdf', NULL, '2018-05-10 17:00:00', '2018-05-10 17:00:00', 1),
(11, 'nguyenlananh', '<EMAIL>', '0193847554', '80 <NAME>', 15, 665, 0, '456rgf', NULL, '2018-05-06 17:00:00', '2018-05-06 17:00:00', 1),
(12, 'aaaaaaasd', '<EMAIL>', '123456789', '', 20, 145, 1, '0123456789', NULL, '2018-05-08 17:00:00', '2018-07-02 17:00:00', 1),
(13, '<NAME>', '<EMAIL>', '09876453627', '347 Cổ Nhuế', 1, 15, 1, '$2y$10$7NHH3J05LHkoXmXWD1/ogOGuJV3YxvrSwH5bROdnjs/DvXD30/.7a', NULL, '2018-11-14 00:46:25', '2018-11-14 00:46:25', 1);
-- --------------------------------------------------------
--
-- Table structure for table `districts`
--
CREATE TABLE `districts` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`province_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `districts`
--
INSERT INTO `districts` (`id`, `name`, `province_id`) VALUES
(1, 'Quận Ba Đình', 1),
(2, 'Quận Hoàn Kiếm', 1),
(3, 'Quận <NAME>', 1),
(4, '<NAME>', 1),
(5, '<NAME>', 1),
(6, '<NAME>', 1),
(7, '<NAME>', 1),
(8, '<NAME>', 1),
(9, '<NAME>', 1),
(10, '<NAME>', 1),
(11, '<NAME>', 1),
(12, '<NAME>', 1),
(13, '<NAME> Liêm', 1),
(14, '<NAME>', 1),
(15, '<NAME> Liêm', 1),
(16, '<NAME>', 1),
(17, '<NAME>', 1),
(18, '<NAME>', 1),
(19, '<NAME>', 1),
(20, '<NAME>', 1),
(21, '<NAME>', 1),
(22, '<NAME>', 1),
(23, '<NAME>', 1),
(24, '<NAME>', 1),
(25, '<NAME>', 1),
(26, '<NAME>', 1),
(27, '<NAME>', 1),
(28, '<NAME>', 1),
(29, '<NAME>', 1),
(30, '<NAME>', 1),
(31, '<NAME> <NAME>', 2),
(32, '<NAME>', 2),
(33, '<NAME>', 2),
(34, '<NAME>', 2),
(35, '<NAME>', 2),
(36, '<NAME>', 2),
(37, '<NAME>', 2),
(38, '<NAME>', 2),
(39, '<NAME>', 2),
(40, '<NAME>', 2),
(41, '<NAME>', 2),
(42, 'Thành phố <NAME>', 3),
(43, '<NAME>', 3),
(44, '<NAME>', 3),
(45, '<NAME>', 3),
(46, '<NAME>', 3),
(47, '<NAME>', 3),
(48, '<NAME>', 3),
(49, '<NAME>', 3),
(50, '<NAME>', 3),
(51, '<NAME>', 3),
(52, '<NAME>', 3),
(53, '<NAME>', 3),
(54, '<NAME>', 3),
(55, 'Thành Phố <NAME>', 4),
(56, '<NAME>', 4),
(57, '<NAME>', 4),
(58, '<NAME>', 4),
(59, '<NAME>', 4),
(60, '<NAME>', 4),
(61, '<NAME>', 4),
(62, '<NAME>', 4),
(63, 'Thành phố <NAME>', 5),
(64, '<NAME>', 5),
(65, '<NAME>', 5),
(66, '<NAME>', 5),
(67, '<NAME>', 5),
(68, '<NAME>', 5),
(69, '<NAME>', 5),
(70, 'Thành phố L<NAME>', 6),
(71, '<NAME>', 6),
(72, '<NAME>', 6),
(73, '<NAME>', 6),
(74, '<NAME>', 6),
(75, '<NAME>', 6),
(76, '<NAME>', 6),
(77, '<NAME>', 6),
(78, '<NAME>', 6),
(79, 'Thành phố <NAME>', 7),
(80, '<NAME>', 7),
(81, '<NAME>', 7),
(82, '<NAME>', 7),
(83, '<NAME>', 7),
(84, '<NAME>', 7),
(85, '<NAME>', 7),
(86, '<NAME>', 7),
(87, '<NAME>', 7),
(88, '<NAME>', 7),
(89, 'Thành phố Lai Châu', 8),
(90, '<NAME>', 8),
(91, '<NAME>', 8),
(92, '<NAME>', 8),
(93, '<NAME>', 8),
(94, '<NAME>', 8),
(95, '<NAME>', 8),
(96, '<NAME>', 8),
(97, 'Thành phố Sơn La', 9),
(98, '<NAME>', 9),
(99, '<NAME>', 9),
(100, '<NAME>', 9),
(101, '<NAME>', 9),
(102, '<NAME>', 9),
(103, '<NAME>', 9),
(104, '<NAME>', 9),
(105, '<NAME>', 9),
(106, '<NAME>', 9),
(107, '<NAME>', 9),
(108, '<NAME>', 9),
(109, 'Thành phố Yên Bái', 10),
(110, '<NAME>', 10),
(111, '<NAME>', 10),
(112, '<NAME>', 10),
(113, '<NAME>', 10),
(114, '<NAME>', 10),
(115, '<NAME>', 10),
(116, 'Huyện <NAME>', 10),
(117, '<NAME>', 10),
(118, 'Thành phố Hòa Bình', 11),
(119, '<NAME>', 11),
(120, '<NAME>', 11),
(121, '<NAME>', 11),
(122, '<NAME>', 11),
(123, '<NAME>', 11),
(124, '<NAME>', 11),
(125, '<NAME>', 11),
(126, '<NAME>', 11),
(127, '<NAME>', 11),
(128, '<NAME>', 11),
(129, 'Thành phố Th<NAME>', 12),
(130, 'Thành phố Sông Công', 12),
(131, '<NAME>', 12),
(132, '<NAME>', 12),
(133, '<NAME>', 12),
(134, '<NAME>', 12),
(135, '<NAME>', 12),
(136, '<NAME>', 12),
(137, '<NAME>', 12),
(138, 'Thành phố Lạng Sơn', 13),
(139, '<NAME>', 13),
(140, '<NAME>', 13),
(141, '<NAME>', 13),
(142, '<NAME>', 13),
(143, '<NAME>', 13),
(144, '<NAME>', 13),
(145, '<NAME>', 13),
(146, '<NAME>', 13),
(147, '<NAME>', 13),
(148, '<NAME>', 13),
(149, 'Thành phố <NAME>', 14),
(150, 'Thành phố Móng Cái', 14),
(151, 'Thành phố Cẩm Phả', 14),
(152, 'Thành phố U<NAME>', 14),
(153, '<NAME>', 14),
(154, '<NAME>', 14),
(155, '<NAME>', 14),
(156, '<NAME>', 14),
(157, '<NAME>', 14),
(158, '<NAME>', 14),
(159, '<NAME>', 14),
(160, '<NAME>', 14),
(161, '<NAME>', 14),
(162, '<NAME>', 14),
(163, 'Thành phố B<NAME>', 15),
(164, '<NAME>', 15),
(165, '<NAME>', 15),
(166, '<NAME>', 15),
(167, '<NAME>', 15),
(168, '<NAME>', 15),
(169, '<NAME>', 15),
(170, '<NAME>', 15),
(171, '<NAME>', 15),
(172, '<NAME>', 15),
(173, 'Thành phố <NAME>', 16),
(174, '<NAME>', 16),
(175, '<NAME>', 16),
(176, '<NAME>', 16),
(177, '<NAME>', 16),
(178, '<NAME>', 16),
(179, '<NAME>', 16),
(180, '<NAME>', 16),
(181, '<NAME>', 16),
(182, '<NAME>', 16),
(183, '<NAME>', 16),
(184, '<NAME>', 16),
(185, '<NAME>', 16),
(186, 'Thành phố Vĩ<NAME>', 17),
(187, '<NAME>', 17),
(188, '<NAME>', 17),
(189, '<NAME>', 17),
(190, '<NAME>', 17),
(191, '<NAME>', 17),
(192, '<NAME>', 17),
(193, '<NAME>', 17),
(194, '<NAME>', 17),
(195, 'Thành phố <NAME>', 18),
(196, '<NAME>', 18),
(197, '<NAME>', 18),
(198, '<NAME>', 18),
(199, '<NAME>', 18),
(200, '<NAME>', 18),
(201, '<NAME>', 18),
(202, '<NAME>', 18),
(203, 'Thành phố <NAME>', 19),
(204, '<NAME>', 19),
(205, '<NAME>', 19),
(206, '<NAME>', 19),
(207, '<NAME>', 19),
(208, '<NAME>', 19),
(209, '<NAME>', 19),
(210, '<NAME>', 19),
(211, '<NAME>', 19),
(212, '<NAME>', 19),
(213, '<NAME>', 19),
(214, '<NAME>', 19),
(215, '<NAME>', 20),
(216, '<NAME>', 20),
(217, '<NAME>', 20),
(218, '<NAME>', 20),
(219, '<NAME>', 20),
(220, '<NAME>', 20),
(221, '<NAME>', 20),
(222, '<NAME>', 20),
(223, '<NAME>', 20),
(224, '<NAME>', 20),
(225, '<NAME>', 20),
(226, '<NAME>', 20),
(227, '<NAME>', 20),
(228, '<NAME>', 20),
(229, '<NAME>', 20),
(230, 'Thành phố <NAME>', 21),
(231, '<NAME>', 21),
(232, '<NAME>', 21),
(233, '<NAME>', 21),
(234, '<NAME>', 21),
(235, '<NAME>', 21),
(236, '<NAME>', 21),
(237, '<NAME>', 21),
(238, '<NAME>', 21),
(239, '<NAME>', 21),
(240, 'Thành phố <NAME>', 22),
(241, '<NAME>', 22),
(242, '<NAME>', 22),
(243, '<NAME>', 22),
(244, '<NAME>', 22),
(245, '<NAME>', 22),
(246, '<NAME>', 22),
(247, '<NAME>', 22),
(248, 'Thành phố <NAME>', 23),
(249, '<NAME>', 23),
(250, '<NAME>', 23),
(251, '<NAME>', 23),
(252, '<NAME>', 23),
(253, '<NAME>', 23),
(254, 'Thành phố Nam Định', 24),
(255, '<NAME>', 24),
(256, '<NAME>', 24),
(257, '<NAME>', 24),
(258, '<NAME>', 24),
(259, '<NAME>', 24),
(260, '<NAME>', 24),
(261, '<NAME>', 24),
(262, '<NAME>', 24),
(263, '<NAME>', 24),
(264, 'Thành phố <NAME>', 25),
(265, 'Thành phố <NAME>', 25),
(266, '<NAME>', 25),
(267, '<NAME>', 25),
(268, '<NAME>', 25),
(269, '<NAME>', 25),
(270, '<NAME>', 25),
(271, '<NAME>', 25),
(272, 'Thành phố <NAME>', 26),
(273, '<NAME>', 26),
(274, '<NAME>', 26),
(275, '<NAME>', 26),
(276, '<NAME>', 26),
(277, '<NAME>', 26),
(278, '<NAME>', 26),
(279, '<NAME>', 26),
(280, '<NAME>', 26),
(281, '<NAME>', 26),
(282, '<NAME>', 26),
(283, '<NAME>', 26),
(284, '<NAME>', 26),
(285, '<NAME>', 26),
(286, '<NAME>', 26),
(287, '<NAME>', 26),
(288, '<NAME>', 26),
(289, '<NAME>', 26),
(290, '<NAME>', 26),
(291, '<NAME>', 26),
(292, '<NAME>', 26),
(293, '<NAME>', 26),
(294, '<NAME>', 26),
(295, '<NAME>', 26),
(296, '<NAME>', 26),
(297, '<NAME>', 26),
(298, '<NAME>', 26),
(299, '<NAME>', 27),
(300, '<NAME>', 27),
(301, '<NAME>', 27),
(302, '<NAME>', 27),
(303, '<NAME>', 27),
(304, '<NAME>', 27),
(305, '<NAME>', 27),
(306, '<NAME>', 27),
(307, '<NAME>', 27),
(308, '<NAME>', 27),
(309, '<NAME>', 27),
(310, '<NAME>', 27),
(311, '<NAME>', 27),
(312, '<NAME>', 27),
(313, '<NAME>', 27),
(314, '<NAME>', 27),
(315, '<NAME>', 27),
(316, '<NAME>', 27),
(317, '<NAME>', 27),
(318, '<NAME>', 27),
(319, '<NAME>', 27),
(320, '<NAME>', 28),
(321, '<NAME>', 28),
(322, '<NAME>', 28),
(323, '<NAME>', 28),
(324, '<NAME>', 28),
(325, '<NAME>', 28),
(326, '<NAME>', 28),
(327, '<NAME>', 28),
(328, '<NAME>', 28),
(329, '<NAME>', 28),
(330, '<NAME>', 28),
(331, '<NAME>', 28),
(332, '<NAME>', 28),
(333, '<NAME>', 29),
(334, '<NAME>', 29),
(335, '<NAME>', 29),
(336, '<NAME>', 29),
(337, '<NAME>', 29),
(338, '<NAME>', 29),
(339, '<NAME>', 29),
(340, '<NAME>', 29),
(341, '<NAME>', 30),
(342, '<NAME>', 30),
(343, '<NAME>', 30),
(344, '<NAME>', 30),
(345, '<NAME>', 30),
(346, '<NAME>', 30),
(347, '<NAME>', 30),
(348, '<NAME>', 30),
(349, '<NAME>', 30),
(350, '<NAME>', 30),
(351, '<NAME>', 31),
(352, '<NAME>', 31),
(353, '<NAME>', 31),
(354, '<NAME>', 31),
(355, '<NAME>', 31),
(356, '<NAME>', 31),
(357, '<NAME>', 31),
(358, '<NAME>', 31),
(359, '<NAME>', 31),
(360, '<NAME>', 32),
(361, '<NAME>', 32),
(362, '<NAME>', 32),
(363, '<NAME>', 32),
(364, '<NAME>', 32),
(365, '<NAME>', 32),
(366, '<NAME>', 32),
(367, '<NAME>', 32),
(368, 'Thành phố <NAME>', 33),
(369, 'Thành phố <NAME>', 33),
(370, '<NAME>', 33),
(371, '<NAME>', 33),
(372, '<NAME>', 33),
(373, '<NAME>', 33),
(374, '<NAME>', 33),
(375, '<NAME>', 33),
(376, '<NAME>', 33),
(377, '<NAME>', 33),
(378, '<NAME>', 33),
(379, '<NAME>', 33),
(380, '<NAME>', 33),
(381, '<NAME>', 33),
(382, '<NAME>', 33),
(383, '<NAME>', 33),
(384, '<NAME>', 33),
(385, '<NAME>', 33),
(386, '<NAME>', 34),
(387, '<NAME>', 34),
(388, '<NAME>', 34),
(389, '<NAME>', 34),
(390, '<NAME>', 34),
(391, '<NAME>', 34),
(392, '<NAME>', 34),
(393, '<NAME>', 34),
(394, '<NAME>', 34),
(395, '<NAME>', 34),
(396, '<NAME>', 34),
(397, '<NAME>', 34),
(398, '<NAME>', 34),
(399, '<NAME>', 34),
(400, 'Thành phố Qu<NAME>', 35),
(401, '<NAME>', 35),
(402, '<NAME>', 35),
(403, '<NAME>', 35),
(404, '<NAME>', 35),
(405, '<NAME>', 35),
(406, '<NAME>', 35),
(407, '<NAME>', 35),
(408, '<NAME>', 35),
(409, '<NAME>', 35),
(410, '<NAME>', 35),
(411, 'Thành phố <NAME>', 36),
(412, '<NAME>', 36),
(413, '<NAME>', 36),
(414, '<NAME>', 36),
(415, '<NAME>', 36),
(416, '<NAME>', 36),
(417, '<NAME>', 36),
(418, '<NAME>', 36),
(419, '<NAME>', 36),
(420, 'Thành phố <NAME>', 37),
(421, 'Thành phố <NAME>', 37),
(422, '<NAME>', 37),
(423, '<NAME>', 37),
(424, '<NAME>', 37),
(425, '<NAME>', 37),
(426, '<NAME>', 37),
(427, '<NAME>', 37),
(428, '<NAME>', 37),
(429, 'Thành phố Phan Rang-Tháp Chàm', 38),
(430, '<NAME>', 38),
(431, '<NAME>', 38),
(432, '<NAME>', 38),
(433, '<NAME>', 38),
(434, '<NAME>', 38),
(435, '<NAME>', 38),
(436, 'Thành phố <NAME>', 39),
(437, '<NAME>', 39),
(438, '<NAME>', 39),
(439, '<NAME>', 39),
(440, '<NAME>', 39),
(441, '<NAME>', 39),
(442, '<NAME>', 39),
(443, '<NAME>', 39),
(444, '<NAME>', 39),
(445, '<NAME>', 39),
(446, 'Thành phố <NAME>', 40),
(447, '<NAME>', 40),
(448, '<NAME>', 40),
(449, '<NAME>', 40),
(450, '<NAME>', 40),
(451, '<NAME>', 40),
(452, '<NAME>', 40),
(453, '<NAME>', 40),
(454, '<NAME>', 40),
(455, '<NAME>', 40),
(456, 'Thành phố Pleiku', 41),
(457, '<NAME>', 41),
(458, '<NAME>', 41),
(459, '<NAME>', 41),
(460, '<NAME>', 41),
(461, '<NAME>', 41),
(462, '<NAME>', 41),
(463, '<NAME>', 41),
(464, '<NAME>', 41),
(465, '<NAME>', 41),
(466, '<NAME>', 41),
(467, '<NAME>', 41),
(468, '<NAME>', 41),
(469, '<NAME>', 41),
(470, '<NAME>', 41),
(471, '<NAME>', 41),
(472, '<NAME>', 41),
(473, 'Thành phố <NAME>', 42),
(474, '<NAME>', 42),
(475, '<NAME>', 42),
(476, '<NAME>', 42),
(477, '<NAME>', 42),
(478, '<NAME>', 42),
(479, '<NAME>', 42),
(480, '<NAME>', 42),
(481, '<NAME>', 42),
(482, '<NAME>', 42),
(483, '<NAME>', 42),
(484, '<NAME>', 42),
(485, '<NAME>', 42),
(486, '<NAME>', 42),
(487, '<NAME>', 42),
(488, '<NAME>', 43),
(489, '<NAME>', 43),
(490, '<NAME>', 43),
(491, '<NAME>', 43),
(492, '<NAME>', 43),
(493, '<NAME>', 43),
(494, '<NAME>', 43),
(495, '<NAME>', 43),
(496, '<NAME> <NAME>', 44),
(497, '<NAME>', 44),
(498, '<NAME>', 44),
(499, '<NAME>', 44),
(500, '<NAME>', 44),
(501, '<NAME>', 44),
(502, '<NAME>', 44),
(503, '<NAME>', 44),
(504, '<NAME>', 44),
(505, '<NAME>', 44),
(506, '<NAME>', 44),
(507, '<NAME>', 44),
(508, '<NAME>', 45),
(509, '<NAME>', 45),
(510, '<NAME>', 45),
(511, '<NAME>', 45),
(512, '<NAME>', 45),
(513, '<NAME>', 45),
(514, '<NAME>', 45),
(515, '<NAME>', 45),
(516, '<NAME>', 45),
(517, '<NAME>', 45),
(518, '<NAME>', 45),
(519, 'Thành phố <NAME>', 46),
(520, '<NAME>', 46),
(521, '<NAME>', 46),
(522, '<NAME>', 46),
(523, '<NAME>', 46),
(524, '<NAME>', 46),
(525, '<NAME>', 46),
(526, '<NAME>', 46),
(527, '<NAME>', 46),
(528, 'Thành phố Thủ Dầu Một', 47),
(529, '<NAME>', 47),
(530, '<NAME>', 47),
(531, '<NAME>', 47),
(532, '<NAME>', 47),
(533, '<NAME>', 47),
(534, '<NAME>', 47),
(535, '<NAME>', 47),
(536, '<NAME>', 47),
(537, 'Thành phố <NAME>', 48),
(538, '<NAME>', 48),
(539, '<NAME>', 48),
(540, '<NAME>', 48),
(541, '<NAME>', 48),
(542, '<NAME>', 48),
(543, '<NAME>', 48),
(544, '<NAME>', 48),
(545, '<NAME>', 48),
(546, '<NAME>', 48),
(547, '<NAME>', 48),
(548, 'Thành phố V<NAME>', 49),
(549, 'Thành phố B<NAME>', 49),
(550, '<NAME>', 49),
(551, '<NAME>', 49),
(552, '<NAME>', 49),
(553, '<NAME>', 49),
(554, '<NAME>', 49),
(555, '<NAME>', 49),
(556, 'Quận 1', 50),
(557, 'Quận 12', 50),
(558, '<NAME>', 50),
(559, 'Quận 9', 50),
(560, '<NAME>', 50),
(561, '<NAME>', 50),
(562, '<NAME>', 50),
(563, '<NAME>', 50),
(564, '<NAME>', 50),
(565, 'Quận 2', 50),
(566, 'Quận 3', 50),
(567, 'Quận 10', 50),
(568, 'Quận 11', 50),
(569, 'Quận 4', 50),
(570, 'Quận 5', 50),
(571, 'Quận 6', 50),
(572, 'Quận 8', 50),
(573, '<NAME>', 50),
(574, 'Quận 7', 50),
(575, '<NAME>', 50),
(576, '<NAME>', 50),
(577, '<NAME>', 50),
(578, '<NAME>', 50),
(579, '<NAME>', 50),
(580, 'Th<NAME>', 51),
(581, '<NAME>', 51),
(582, '<NAME>', 51),
(583, '<NAME>', 51),
(584, '<NAME>', 51),
(585, '<NAME>', 51),
(586, '<NAME>', 51),
(587, '<NAME>', 51),
(588, '<NAME>', 51),
(589, '<NAME>', 51),
(590, '<NAME>', 51),
(591, '<NAME>', 51),
(592, '<NAME>', 51),
(593, '<NAME>', 51),
(594, '<NAME>', 51),
(595, 'Th<NAME> <NAME>', 52),
(596, '<NAME>', 52),
(597, '<NAME>', 52),
(598, '<NAME>', 52),
(599, '<NAME>', 52),
(600, '<NAME>', 52),
(601, '<NAME>', 52),
(602, '<NAME>', 52),
(603, '<NAME>', 52),
(604, '<NAME>', 52),
(605, '<NAME>', 52),
(606, 'Thành ph<NAME>', 53),
(607, '<NAME>', 53),
(608, '<NAME>', 53),
(609, '<NAME>', 53),
(610, '<NAME>', 53),
(611, '<NAME>', 53),
(612, '<NAME>', 53),
(613, '<NAME>', 53),
(614, '<NAME>', 53),
(615, 'Th<NAME>', 54),
(616, '<NAME>', 54),
(617, '<NAME>', 54),
(618, '<NAME>', 54),
(619, '<NAME>', 54),
(620, '<NAME>', 54),
(621, '<NAME>', 54),
(622, '<NAME>', 54),
(623, '<NAME>', 54),
(624, 'Thành phố <NAME>', 55),
(625, '<NAME>', 55),
(626, '<NAME>', 55),
(627, '<NAME>', 55),
(628, '<NAME>', 55),
(629, '<NAME>', 55),
(630, '<NAME>', 55),
(631, '<NAME>', 55),
(632, '<NAME>', 56),
(633, 'Thành phố Sa Đéc', 56),
(634, '<NAME>', 56),
(635, '<NAME>', 56),
(636, '<NAME>', 56),
(637, '<NAME>', 56),
(638, '<NAME>', 56),
(639, '<NAME>', 56),
(640, '<NAME>', 56),
(641, '<NAME>', 56),
(642, '<NAME>', 56),
(643, '<NAME>', 56),
(644, '<NAME> <NAME>', 57),
(645, 'Thành ph<NAME>', 57),
(646, '<NAME>', 57),
(647, '<NAME>', 57),
(648, '<NAME>', 57),
(649, '<NAME>', 57),
(650, '<NAME>', 57),
(651, '<NAME>', 57),
(652, '<NAME>', 57),
(653, '<NAME>', 57),
(654, '<NAME>', 57),
(655, '<NAME> <NAME>', 58),
(656, '<NAME>', 58),
(657, '<NAME>', 58),
(658, '<NAME>', 58),
(659, '<NAME>', 58),
(660, '<NAME>', 58),
(661, '<NAME>', 58),
(662, '<NAME>', 58),
(663, '<NAME>', 58),
(664, '<NAME>', 58),
(665, '<NAME>', 58),
(666, '<NAME>', 58),
(667, '<NAME>', 58),
(668, '<NAME>', 58),
(669, '<NAME>', 58),
(670, '<NAME>', 59),
(671, '<NAME>', 59),
(672, '<NAME>', 59),
(673, '<NAME>', 59),
(674, '<NAME>', 59),
(675, '<NAME>', 59),
(676, '<NAME>', 59),
(677, '<NAME>', 59),
(678, '<NAME>', 59),
(679, 'Thành phố <NAME>', 60),
(680, '<NAME>', 60),
(681, '<NAME>', 60),
(682, '<NAME>', 60),
(683, '<NAME>', 60),
(684, '<NAME>', 60),
(685, '<NAME>', 60),
(686, '<NAME>', 60),
(687, 'Thành phố S<NAME>', 61),
(688, '<NAME>', 61),
(689, '<NAME>', 61),
(690, '<NAME>', 61),
(691, '<NAME>', 61),
(692, '<NAME>', 61),
(693, '<NAME>', 61),
(694, '<NAME>', 61),
(695, '<NAME>', 61),
(696, '<NAME> <NAME>', 61),
(697, '<NAME>', 61),
(698, 'Thành phố B<NAME>', 62),
(699, '<NAME>', 62),
(700, '<NAME>', 62),
(701, '<NAME>', 62),
(702, '<NAME>', 62),
(703, '<NAME>', 62),
(704, '<NAME>', 62),
(705, '<NAME>', 63),
(706, '<NAME>', 63),
(707, '<NAME>', 63),
(708, '<NAME>', 63),
(709, '<NAME>', 63),
(710, '<NAME>', 63),
(711, '<NAME>', 63),
(712, '<NAME>', 63),
(713, '<NAME>', 63);
-- --------------------------------------------------------
--
-- Table structure for table `orders`
--
CREATE TABLE `orders` (
`id` int(11) NOT NULL,
`user_id` int(11) DEFAULT NULL,
`fullname` varchar(255) NOT NULL,
`email` varchar(255) NOT NULL,
`phone` varchar(255) NOT NULL,
`address` varchar(255) NOT NULL,
`province_id` int(11) DEFAULT NULL,
`district_id` int(11) DEFAULT NULL,
`amount` varchar(255) DEFAULT NULL,
`payment` varchar(11) NOT NULL DEFAULT 'COD',
`note` text,
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`status` tinyint(4) NOT NULL DEFAULT '1'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `orders`
--
INSERT INTO `orders` (`id`, `user_id`, `fullname`, `email`, `phone`, `address`, `province_id`, `district_id`, `amount`, `payment`, `note`, `created_at`, `updated_at`, `status`) VALUES
(2, NULL, 'tathiphuonganh', '<EMAIL>', '0154875545', '180 Trần Khát Chân', 22, 358, '1000000 ', 'COD', '', '2018-04-30 17:00:00', '2018-05-02 00:00:00', 0),
(20, 12, 'sdcasdasd ', '<EMAIL>', '0123456789', 'abc', 20, 145, '162900000', 'COD', 'xcvxcvxvds', '2018-07-11 17:00:00', '2018-07-12 00:00:00', 1),
(31, 12, '<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 18, 196, '29990000 ', 'COD', 'Chưa có lưu ý', '2018-07-11 17:00:00', '2018-07-12 00:00:00', 1),
(33, 12, '<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 18, 196, '29990000 ', 'COD', 'Chưa có lưu ý', '2018-07-11 17:00:00', '2018-07-12 00:00:00', 1),
(34, NULL, '<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 18, 196, '335860000 ', 'COD', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(35, NULL, '<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã M<NAME>', 18, 196, '335860000 ', 'COD', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(36, NULL, 'H<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã M<NAME>', 17, 187, '335860000 ', 'COD', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(37, NULL, '<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã M<NAME>', 1, 2, '335860000 ', 'ATM', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(38, NULL, 'Huy Cảnh', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 3, 43, '335860000 ', 'ATM', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(49, NULL, 'H<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 1, 2, '489090000', 'ATM', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-11-11 12:34:46', 1),
(50, NULL, '<NAME>nh', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 4, 56, '489090000 ', 'COD', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(52, NULL, '<NAME>', '<EMAIL>', '165757215', 'Xóm 2, xã Mỹ Hưng', 5, 64, '489090000', 'COD', 'Chưa có lưu ý', '2018-07-12 17:00:00', '2018-07-13 00:00:00', 1),
(53, 13, '<NAME>', '<EMAIL>', '09876453627', '347 Cổ Nhuế', 1, 15, '33480000', 'COD', '', '2018-11-14 10:37:17', '2018-11-14 17:37:17', 1),
(54, 13, '<NAME>', '<EMAIL>', '09876453627', '347 Cổ Nhuế', 1, 15, '3490000', 'COD', '', '2018-11-14 10:44:55', '2018-11-14 17:44:55', 1);
-- --------------------------------------------------------
--
-- Table structure for table `order_items`
--
CREATE TABLE `order_items` (
`id` int(11) NOT NULL,
`order_id` int(11) NOT NULL,
`product_id` int(11) NOT NULL,
`price` varchar(255) NOT NULL,
`qty` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `order_items`
--
INSERT INTO `order_items` (`id`, `order_id`, `product_id`, `price`, `qty`) VALUES
(2, 2, 1, '100000', 3),
(3, 2, 1, '100000', 7),
(11, 20, 1, '54300000', 1),
(12, 20, 51, '54300000', 1),
(13, 20, 6, '54300000', 1),
(17, 31, 5, '29990000', 1),
(19, 33, 5, '29990000', 1),
(20, 34, 3, '23990000', 14),
(21, 35, 3, '23990000', 14),
(22, 36, 3, '23990000', 14),
(23, 37, 3, '23990000', 14),
(24, 38, 3, '23990000', 14),
(25, 49, 3, '23990000', 14),
(26, 49, 5, '29990000', 3),
(28, 49, 16, '7990000', 1),
(29, 49, 15, '6990000', 1),
(30, 49, 19, '13490000', 1),
(31, 49, 4, '34790000', 1),
(32, 50, 3, '23990000', 14),
(33, 50, 5, '29990000', 3),
(35, 50, 16, '7990000', 1),
(36, 50, 15, '6990000', 1),
(37, 50, 19, '13490000', 1),
(38, 50, 4, '34790000', 1),
(39, 52, 3, '23990000', 14),
(40, 52, 5, '29990000', 3),
(42, 52, 16, '7990000', 1),
(43, 52, 15, '6990000', 1),
(44, 52, 19, '13490000', 1),
(45, 52, 4, '34790000', 1),
(46, 53, 5, '29990000', 1),
(47, 53, 21, '3490000', 1),
(48, 54, 21, '3490000', 1);
-- --------------------------------------------------------
--
-- Table structure for table `products`
--
CREATE TABLE `products` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`slug` varchar(255) NOT NULL,
`price` varchar(255) DEFAULT NULL,
`colors` varchar(255) DEFAULT NULL,
`qty` float NOT NULL DEFAULT '0',
`brand_id` int(11) NOT NULL,
`product_category_id` int(11) NOT NULL,
`description` text,
`content` text,
`views` int(11) DEFAULT NULL,
`is_new` tinyint(4) NOT NULL DEFAULT '0',
`is_sale` int(11) NOT NULL DEFAULT '0',
`is_featured` int(11) NOT NULL DEFAULT '1',
`created_at` date NOT NULL,
`updated_at` date DEFAULT NULL,
`status` tinyint(4) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `products`
--
INSERT INTO `products` (`id`, `name`, `slug`, `price`, `colors`, `qty`, `brand_id`, `product_category_id`, `description`, `content`, `views`, `is_new`, `is_sale`, `is_featured`, `created_at`, `updated_at`, `status`) VALUES
(1, 'iPhone 8 Plus RED', 'iphone-8-plus-256G-product-red', '28790000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 1250, 1, 0, 1, '2018-02-06', '2018-04-18', 1),
(2, 'iPhone 8 Plus 64GB', 'iphone-8-plus-64gb', '23990000', 'đỏ, trắng, đen, xám', 100, 1, 1, '', '', 1425, 1, 0, 1, '2018-02-06', '2018-04-18', 1),
(3, 'iPhone 8 Plus 64GB PRODUCT RED', 'iphone-8-plus-64gb-product-red', '23990000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 45870, 1, 0, 1, '2018-02-06', '2018-04-18', 1),
(4, 'iPhone X 256GB', 'iphone-x-256gb', '34790000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 14500, 1, 0, 1, '2018-02-06', '2018-04-18', 1),
(5, 'iPhone X 64GB ', 'iphone-x', '29990000', 'đỏ, trắng, đen, xám', 100, 1, 1, '<h2 align=\"center\"> Đánh giá chi tiết iPhone X 64GB </h2>\r\n<p> Đã lâu lắm rồi Apple mới ra mắt một sản phẩm với thiết kế đột phá và liều lĩnh. Dù vấp phải khá nhiều ý kiến trái chiều nhưng cũng không thể phủ nhận độ hấp dẫn của chiếc iPhone thế hệ thứ 10 này. Công nghệ bảo mật mới, loại bỏ nút home truyền thống, camera với những tính năng độc quyền, tất cả đã khiến người dùng đứng ngồi không yên cho đến khi được trên tay. </p>\r\n<br>\r\n<b>iPhone X 64GB có thiết kế lột xác hoàn toàn </b> <br>\r\n<p>iPhone X 64GB đã lột xác hoàn toàn với việc loại bỏ nút Home truyền thống, màn hình tràn viền và camera kép ở phía sau đã được đặt lại vị trí theo chiều dọc. Khung viền từ thép sáng bóng bền bỉ và mặt lưng kính với các góc bo tròn dễ dàng cầm nắm. Có thể nói đây là một thiết kế khá đột phá mà lâu lắm rồi Apple mới thể hiện lại. Người dùng cần phải trên tay thì mới cảm nhận được hết nét tinh tế và sang trọng của sản phẩm.</p><br>\r\n<b>Màn hình của iPhone X 64GB hiển thị đẹp hơn</b> <br>\r\n<p>iPhone X 64GB là chiếc smartphone đầu tiên được Apple ưu ái cho tấm nền màn hình OLED, kích thước 5.8 inch và độ phân giải đạt chuẩn Super Retina HD, Điều này giúp cho màn hình có màu sắc sống động, góc nhìn rộng hơn, cải thiện độ sáng và tốn ít điện năng hơn. Bên cạnh đó, công nghệ True Tone còn giúp màu sắc trở nên cực kì trung thực.\r\n\r\n </p>\r\n', '<h2>Cấu hình sản phẩm: </h2><br>\r\n\r\nMàn Hình: 5.8 inchs OLED <br>\r\nCamera: 7.0 MP/ Dual 12.0 MP <br>\r\nPin: 2716 mAh, Li-Ion battery <br>\r\nRam: 3 GB <br> \r\nCPU: Apple A11 Bionic <br> \r\nHĐH: iOS 11 <br>', 24780, 1, 0, 1, '2018-02-06', '2018-04-18', 1),
(6, 'iPhone 6 32GB (2017)', 'iphone-6-32gb', '7499000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 45600, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(7, 'iPhone 6s Plus 32GB', 'iphone-6s-plus-32gb', '13999000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 124, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(8, 'iPhone 8 256GB', 'iphone-8-256gb', '25790000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 47840, 1, 0, 1, '2018-02-06', '2018-04-18', 1),
(9, 'iPhone 7 Plus 32GB', 'iphone-7-plus', '19999000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, '', 1245, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(10, 'iPhone 7 32GB', 'iphone-7', '15999000', 'đỏ, trắng, đen, xám', 100, 1, 1, NULL, NULL, 3214, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(11, 'Sony Xperia L1 Dual', 'sony-xperia-l1-dual', '3590000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 48712, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(12, 'Sony Xperia L2', 'sony-xperia-l2', '4990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 4245, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(13, 'Sony Xperia XA1 Plus', 'sony-xperia-xa1-plus', '5990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 45214, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(14, 'Sony Xperia XA1 Ultra', 'sony-xperia-xa1-ultra', '6990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 1212, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(15, 'Sony Xperia XA Ultra', 'sony-xperia-xa-ultra', '6990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 475400, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(16, 'Sony Xperia X', 'sony-xperia-x', '7990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 121400, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(17, 'Sony Xperia XZs', 'sony-xperia-xzs', '9990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 1235, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(18, 'Sony Xperia XZ1', 'sony-xperia-xz1', '11990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 1247, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(19, 'Sony Xperia XZ Premium Pink Gold', 'sony-xperia-xz-premium-pink-gold', '13490000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 1244000, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(20, 'Sony Xperia XZ Premium', 'sony-xperia-xz-premium', '14990000', 'đỏ, trắng, đen, xám', 100, 2, 1, NULL, NULL, 50000, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(21, 'Asus Zenfone 4 Max', 'asus-zenfone-4-max', '3490000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, NULL, 1, 1, 1, '2018-02-06', '2018-04-18', 1),
(22, 'Asus Zenfone 4 Max Pro', 'asus-zenfone-4-max-pro', '4690000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(23, 'Asus Zenfone Max Plus M1', 'asus-zenfone-max-plus-m1', '4990000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(24, 'ASUS Zenfone Max Plus M1 - ZB570TL', 'asus-zenfone-max-plus-m1-zb570tl', '4990000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(25, 'Asus Zenfone 4 Max Pro ZC554KL', 'asus-zenfone-4-max-pro-zc554kl', '4690000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, NULL, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(26, 'Asus Zenfone 4 Max ZC520KL', 'asus-zenfone-4-max-zc520kl', '3490000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, 1241, 127, 0, 1, '2018-02-06', '2018-04-18', 1),
(27, 'Asus Zenfone Live ZB501KL', 'asus-zenfone-live-zb501kl', '2990000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, 1245, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(28, 'Asus Zenfone 5', 'asus-zenfone-5', '7990000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, 1242, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(29, 'Asus Zenfone 3 Max 5.5\" - ZC553KL', 'asus-zenfone-3-max-5-5-zc553kl', '4379000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, 1245, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(30, 'Asus Zenfone 3 - ZE552KL', 'asus-zenfone-3-ze552kl', '8179000', 'đỏ, trắng, đen, xám', 100, 3, 1, NULL, NULL, 4541, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(31, 'Xiaomi Redmi Note 4 32GB', 'xiaomi-redmi-note-4-32gb', '4290000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 1478, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(32, 'Xiaomi Redmi Note 5 32GB', 'xiaomi-redmi-note-5', '4799000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 1236, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(33, 'Xiaomi Mi A1 32GB', 'xiaomi-mi-a1-32gb', '4990000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 1478, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(34, 'Xiaomi Mi A1', 'xiaomi-mi-a1', '549000000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 0, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(35, 'Xiaomi Redmi S2 32GB', 'xiaomi-redmi-s2', '567000000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 14587, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(36, 'Xiaomi Redmi Note 5 a Prime', 'xiaomi-redmi-note-5a-prime', '3690000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 1121, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(37, 'Xiaomi Redmi 4X', 'xiaomi-redmi-4x', '3690000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 2134, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(38, 'Xiaomi Redmi 5 Plus', 'xiaomi-redmi-5-plus', '3999000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 1241, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(39, 'Xiaomi Redmi Note 5A', 'xiaomi-redmi-note-5a', '2990000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 2144, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(40, 'Xiaomi Redmi 5A 16GB Ram 2GB ', 'xiaomi-redmi-5a-16gb-ram-2gb', '1990000', 'đỏ, trắng, đen, xám', 100, 4, 1, NULL, NULL, 1256, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(41, 'Samsung Galaxy S8 Plus Orchid Gray', 'samsung-galaxy-s8-plus-orchid-gray', '17990000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(42, 'Samsung Galaxy S8 Plus', 'samsung-galaxy-s8-plus', '17990000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(43, 'Samsung Galaxy Note 8 Orchid Grey ', 'samsung-galaxy-note-8-orchid-grey', '22490000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(44, 'Samsung Galaxy S9+ 128GB', 'samsung-galaxy-s9-plus-128gb', '24990000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(45, 'Samsung Galaxy S9+ Lilac Purple 128GB ', 'samsung-galaxy-s9-plus-lilac-purple', '24990000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(46, 'Samsung Galaxy J7+', 'samsung-galaxy-j7-plus', '7290000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(47, 'Samsung Galaxy J3 Pro', 'samsung-galaxy-j3-pro', '3990000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(48, 'Samsung Galaxy J7 Pro', 'samsung-galaxy-j7-pro', '6090000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(49, 'Samsung Galaxy A8 (2018)', 'samsung-galaxy-a8-2018', '10990000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(50, 'Samsung Galaxy A8+ (2018)', 'samsung-galaxy-a8-plus-2018', '13490000', 'đỏ, trắng, đen, xám', 100, 5, 1, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(51, 'Usb 3.0 32GB Kingston Data Traveler 100G3 Black', 'Usb 3.0 32GB Kingston Data Traveler 100G3 Black', '390000', 'đỏ, trắng, đen, xám', 100, 6, 3, NULL, NULL, 1245, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(52, 'USB 3.0 16Gb Kingston 101G3', 'usb-30-16gb-kingston-101g3', '250000', 'đỏ, trắng, đen, xám', 100, 6, 3, NULL, NULL, 1245, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(53, 'Thẻ nhớ MicroSD 64GB Kingston C10', 'the-nho-microsd-64gb-kingston-c10', '790000', 'đỏ, trắng, đen, xám', 100, 6, 3, NULL, NULL, 124, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(54, 'Thẻ nhớ MicroSD 8GB Kingston Class 4', 'the-nho-microsd-8gb-kingston-class-4', '170000', 'đỏ, trắng, đen, xám', 100, 6, 3, NULL, NULL, 1465, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(55, 'Thẻ nhớ MicroSD 16GB Kingston', 'the-nho-microsd-16gb-kingston-sdchc-class-4', '270000', 'đỏ, trắng, đen, xám', 100, 6, 3, NULL, NULL, 1435, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(56, 'Tai nghe choàng đầu Unik S416', 'tai-nghe-choang-dau-unik-s416', '249000', 'đỏ, trắng, đen, xám', 100, 7, 4, NULL, NULL, 1247, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(57, 'Tai nghe choàng đầu Unik S448', 'tai-nghe-choang-dau-unik-s448', '249000', 'đỏ, trắng, đen, xám', 100, 7, 4, NULL, NULL, 1248, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(58, 'Tai nghe có mic Unik S810', 'tai-nghe-co-mic-unik-s810', '150000', 'đỏ, trắng, đen, xám', 100, 7, 4, NULL, NULL, 523, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(59, 'Tai nghe có mic Unik S704', 'tai-nghe-co-mic-unik-s704', '150000', 'đỏ, trắng, đen, xám', 100, 7, 4, NULL, NULL, 154, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(60, 'Tai nghe choàng đầu có MIC Bluetooth Unik BT05', 'tai-nghe-choang-dau-co-mic-bluetooth-unik-bt05', '599000', 'đỏ, trắng, đen, xám', 100, 7, 4, NULL, NULL, 1254, 0, 1, 1, '2018-02-06', '2018-04-18', 1),
(61, 'Sạc điện thoại liền cáp 1m Icore 1A', 'sac-dien-thoai-lien-cap-1m-icore-1a', '100000', 'đỏ, trắng, đen, xám', 100, 8, 6, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(62, 'Sạc ĐT USB Icore 1 cổng 1A cho ĐT', 'sac-dt-usb-icore-1-cong-1a-cho-dt', '100000', 'đỏ, trắng, đen, xám', 100, 8, 6, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(63, 'Sạc ĐT liền cáp micro usb Icore', 'sac-dt-lien-cap-micro-usb-icore', '80000', 'đỏ, trắng, đen, xám', 100, 8, 6, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(64, 'Cáp Lightning MFI Icore 1m', 'Cap-Lightning-MFI-Icore-1m', '180000', 'đỏ, trắng, đen, xám', 100, 8, 6, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(65, 'Cáp Micro Usb Icore 1m', 'cap-micro-usb-icore-1m', '80000', 'đỏ, trắng, đen, xám', 100, 8, 6, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(66, 'Sạc dự phòng Icore 10000mAh (polymer)', 'sac-du-phong-icore-10000mah-polymer', '450000', 'đỏ, trắng, đen, xám', 100, 8, 7, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(67, 'Sạc dự phòng Icore 5000mAh', 'Sac-du-phong-Icore-5000mAh', '190000', 'đỏ, trắng, đen, xám', 100, 8, 7, NULL, NULL, NULL, 0, 0, 1, '2018-02-06', '2018-04-18', 1),
(71, 'Nước hoa Pháp 3', 'nuoc-hoa-phap-3', '23456', 'xanh', 4, 4, 6, NULL, NULL, NULL, 1, 1, 1, '2018-11-09', '2018-11-09', 1);
-- --------------------------------------------------------
--
-- Table structure for table `product_categories`
--
CREATE TABLE `product_categories` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`slug` varchar(255) NOT NULL,
`description` text,
`status` tinyint(4) NOT NULL DEFAULT '0',
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `product_categories`
--
INSERT INTO `product_categories` (`id`, `name`, `slug`, `description`, `status`, `created_at`, `updated_at`) VALUES
(1, 'Điện thoại', 'dien-thoai', 'Điện thoại', 1, '2018-11-09 02:04:45', '0000-00-00 00:00:00'),
(3, 'Thẻ nhớ', 'the-nho', 'Thẻ nhớ điện thoại', 1, '2018-11-09 02:04:45', '0000-00-00 00:00:00'),
(4, 'Tai nghe', 'tai-nghe', 'Tai nghe điện thoại', 1, '2018-11-09 02:04:45', '0000-00-00 00:00:00'),
(5, 'Bao da ốp lưng', 'bao-da-op-lung', 'Bao da ốp lưng điện thoại', 1, '2018-11-09 02:04:45', '0000-00-00 00:00:00'),
(6, 'Sạc cáp', 'sac-cap', 'Sạc cáp điện thoại', 1, '2018-11-09 02:04:45', '0000-00-00 00:00:00'),
(7, 'Sạc dự phòng', 'sac-du-phong', 'Sạc dự phòng cực khỏe', 1, '2018-11-09 02:04:45', '0000-00-00 00:00:00');
-- --------------------------------------------------------
--
-- Table structure for table `product_images`
--
CREATE TABLE `product_images` (
`id` int(11) NOT NULL,
`product_id` int(11) NOT NULL,
`img` varchar(255) NOT NULL,
`is_featured` tinyint(4) DEFAULT '1',
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `product_images`
--
INSERT INTO `product_images` (`id`, `product_id`, `img`, `is_featured`, `created_at`, `updated_at`) VALUES
(1, 1, 'img/products/636614727176851624_iphone--8-plus-red-1.png', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(2, 2, 'img/products/636459040422660236_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(3, 3, 'img/products/636614727176851624_iphone--8-plus-red-1 (1).png', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(4, 4, 'img/products/636483223586180190_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(5, 5, 'img/products/636483223586180190_1 (1).jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(6, 6, 'img/products/636506509528306435_iphone6-32GB-2.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(7, 7, 'img/products/636172339622394948_apple-Iphone-6s-gold-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(8, 8, 'img/products/636459060591822074_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(9, 9, 'img/products/636159432323817451_ip7p-gold-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(10, 10, 'img/products/636159398645952790_ip7-black-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(11, 11, 'img/products/636347803366796448_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(12, 12, 'img/products/636534255386871307_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(13, 13, 'img/products/636449525827101531_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(14, 14, 'img/products/636313235962471668_800-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(15, 15, 'img/products/636161892755323252_xperia-xa-ultra-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(16, 16, 'img/products/636160168215487121_xperia-x-silver-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(17, 17, 'img/products/636259601365302936_800trang.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(18, 18, 'img/products/636449520592598133_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(19, 19, 'img/products/sony-xperia-xz-premium-pink-gold-400x460.png', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(20, 20, 'img/products/sony-xperia-xz-premium-1-400x460.png', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(21, 21, 'img/products/636473926771819711_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(22, 22, 'img/products/636403898586199374_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(23, 23, 'img/products/636524079493403495_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(24, 24, 'img/products/asus-zenfone-max-plus-m1-zb570tl-den-1-3-org.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(25, 25, 'img/products/asus-zenfone-4-max-pro-zc554kl-den-1-org.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(26, 26, 'img/products/asus-zenfone-4-max-zc520kl-m-den-1-org.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(27, 27, 'img/products/asus-zenfone-live-zb501kl-400-1-400x460.png', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(28, 28, 'img/products/asus-zenfone-5-didongviet.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(29, 29, 'img/products/asus-zenfone-3-max-zc553kl-vang-dong-didongviet_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(30, 30, 'img/products/asus-zenfone-3-ze552kl-den-didongviet.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(31, 31, 'img/products/636453055114651902_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(32, 32, 'img/products/636619088468711666_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(33, 33, 'img/products/636415156301744244_1o.png', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(34, 34, 'img/products/636415162482543484_2.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(35, 35, 'img/products/636623236912499129_xiaomi-s2-3-xam.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(36, 36, 'img/products/636463347832890173_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(37, 37, 'img/products/636453072940146775_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(38, 38, 'img/products/636549777491044706_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(39, 39, 'img/products/636427922203447399_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(40, 40, 'img/products/xiaomi-redmi-5a-16gb-ram-2gb.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(41, 41, 'img/products/636344641451328424_636344634241195520_800-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(42, 42, 'img/products/636396217066191623_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(43, 43, 'img/products/636506554439585001_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(44, 44, 'img/products/636552331208636703_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(45, 45, 'img/products/636552333148760332_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(46, 46, 'img/products/636447213995680282_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(47, 47, 'img/products/636383938496757496_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(48, 48, 'img/products/636529900670656200_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(49, 49, 'img/products/636523986341921012_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(50, 50, 'img/products/636523998806629206_1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(51, 51, 'img/products/636075724321439795_HAPK-USB-30-32GB-KINGSTON-DATA-TRAVELER-100G3-07.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(52, 52, 'img/products/636074847972740142_USB-30-16GB-KINGSTON-101G3-00006137-1.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(53, 53, 'img/products/636075714037196583_HAPK-THE-NHO-MICROSD-64GB-KINGSTON-C10-05.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(54, 54, 'img/products/the-nho-microsd-8gb-kingston-class-4-id26826.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(55, 55, 'img/products/636251948217518300_HMPK-THE-NHO-MICROSD-16GB-KINGSTON-SDCHC-CLASS-4-01.jpg', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(56, 56, 'img/products/636455617292459552_HASP-TAI-NGHE-CHOANG-DAU-UNIK-S416-00391764.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(57, 57, 'img/products/636459935421057396_HASP-TAI-NGHE-CHOANG-DAU-UNIK-S448-00391765.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(58, 58, 'img/products/636455624103887552_HASP-TAI-NGHE-CO-MIC-UNIK-S810-00391762 (7).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(59, 59, 'img/products/636455619571619552_HASP-TAI-NGHE-CO-MIC-UNIK-S704-00391763 (7).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(60, 60, 'img/products/636455608714643552_HASP-TAI-NGHE-CO-MIC-BLUETOOTH-UNIK-BT05-00391766 (7).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(61, 61, 'img/products/636282152873274000_HAPK-SAC-DT-LIEN-CAP-1M-ICORE-1A-002476861 (1).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(62, 62, 'img/products/636281936660201622_HAPK-SAC-DT-USB-ICORE-1-CONG-1A-CHO-DT- 000041651 (1).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(63, 63, 'img/products/636070537766014404_HAPK-SAC-DT-LIEN-CAP-MICRO-USB-ICORE-01.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(64, 64, 'img/products/636269252051143690_HAPK-CAP-LIGHTNING-MFI-ICORE-1M-00007147 (1).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(65, 65, 'img/products/636079082096344865_HAPK-CAP-MICRO-USB-ICORE-1M-00007146-4.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(66, 66, 'img/products/636530151291840952_HASP-PIN-DU-PHONG-ICORE-15.JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10'),
(67, 67, 'img/products/636230100679237505_HAPK-SAC-DU-PHONG-ICORE-5000MAH-00262041 (1).JPG', 1, '2018-11-09 14:53:49', '2018-11-09 14:54:10');
-- --------------------------------------------------------
--
-- Table structure for table `product_relates`
--
CREATE TABLE `product_relates` (
`id` int(11) NOT NULL,
`product_id` int(11) NOT NULL,
`product_relate_id` int(11) NOT NULL,
`status` tinyint(4) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `product_relates`
--
INSERT INTO `product_relates` (`id`, `product_id`, `product_relate_id`, `status`) VALUES
(1, 1, 9, 1),
(2, 1, 3, 1),
(3, 1, 16, 1),
(4, 1, 17, 1),
(5, 5, 9, 1),
(6, 5, 10, 1),
(7, 5, 11, 1),
(8, 5, 12, 1);
-- --------------------------------------------------------
--
-- Table structure for table `product_reviews`
--
CREATE TABLE `product_reviews` (
`id` int(11) NOT NULL,
`product_id` int(11) NOT NULL,
`user_id` int(11) NOT NULL,
`content` text NOT NULL,
`rate` int(1) NOT NULL,
`created_at` date NOT NULL,
`updated_at` date DEFAULT NULL,
`status` tinyint(4) NOT NULL DEFAULT '1'
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `product_reviews`
--
INSERT INTO `product_reviews` (`id`, `product_id`, `user_id`, `content`, `rate`, `created_at`, `updated_at`, `status`) VALUES
(1, 5, 1, 'Cái này được đó', 5, '2018-05-24', '2018-11-10', 1),
(2, 5, 2, 'Quá đắt nhưng mà cũng được.', 5, '2018-05-24', '2018-05-24', 1),
(3, 8, 13, 'Sản phẩm rất tốt', 5, '2018-11-16', '2018-11-16', 1),
(4, 8, 13, 'Sản phẩm tốt', 5, '2018-11-16', '2018-11-16', 1);
-- --------------------------------------------------------
--
-- Table structure for table `provinces`
--
CREATE TABLE `provinces` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`code` varchar(255) NOT NULL,
`zipcode` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `provinces`
--
INSERT INTO `provinces` (`id`, `name`, `code`, `zipcode`) VALUES
(1, 'Thành phố Hà Nội', '1', '100000'),
(2, 'Tỉnh Hà Giang', '2', '310000'),
(3, 'Tỉnh Cao Bằng', '4', '270000'),
(4, 'Tỉnh Bắc Kạn', '6', '960000'),
(5, 'Tỉnh Tuyên Quang', '8', '300000'),
(6, 'Tỉnh Lào Cai', '10', '330000'),
(7, 'Tỉnh Điện Biên', '11', '380000'),
(8, 'Tỉnh Lai Châu', '12', '390000'),
(9, 'Tỉnh Sơn La', '14', '360000'),
(10, 'Tỉnh Yên Bái', '15', '320000'),
(11, '<NAME>', '17', '350000'),
(12, '<NAME>', '19', '250000'),
(13, '<NAME>', '20', '240000'),
(14, '<NAME>', '22', '200000'),
(15, '<NAME>', '24', '220000'),
(16, '<NAME>', '25', '290000'),
(17, '<NAME>', '26', '280000'),
(18, '<NAME>', '27', '790000'),
(19, '<NAME>', '30', '170000'),
(20, '<NAME>', '31', '180000'),
(21, '<NAME>', '33', '160000'),
(22, '<NAME>', '34', '410000'),
(23, '<NAME>', '35', '400000'),
(24, '<NAME>', '36', '420000'),
(25, '<NAME>', '37', '430000'),
(26, '<NAME>', '38', '440000'),
(27, '<NAME>', '40', '460000'),
(28, '<NAME>', '42', '480000'),
(29, '<NAME>', '44', '510000'),
(30, 'Tỉnh Quảng Trị', '45', '520000'),
(31, 'Tỉ<NAME>', '46', '530000'),
(32, 'Thành phố Đà Nẵng', '48', '550000'),
(33, 'Tỉnh Quảng Nam', '49', '560000'),
(34, 'Tỉnh Quảng Ngãi', '51', '570000'),
(35, 'Tỉnh Bình Định', '52', '820000'),
(36, 'Tỉnh Phú Yên', '54', '620000'),
(37, '<NAME>', '56', '650000'),
(38, 'Tỉ<NAME>', '58', '660000'),
(39, 'Tỉnh B<NAME>', '60', '800000'),
(40, 'Tỉnh <NAME>', '62', '580000'),
(41, 'Tỉ<NAME>', '64', '600000'),
(42, 'Tỉnh Đ<NAME>', '66', '630000'),
(43, 'Tỉnh Đắk Nông', '67', '640000'),
(44, 'Tỉnh Lâm Đồng', '68', '670000'),
(45, 'Tỉnh Bình Phước', '70', '830000'),
(46, 'Tỉnh Tâ<NAME>inh', '72', '840000'),
(47, 'Tỉnh Bình Dương', '74', '590000'),
(48, 'Tỉnh Đồng Nai', '75', '810000'),
(49, '<NAME> - Vũng Tàu', '77', '790000'),
(50, 'Thành phố H<NAME>', '79', '700000'),
(51, '<NAME>', '80', '850000'),
(52, '<NAME>', '82', '860000'),
(53, '<NAME>', '83', '930000'),
(54, '<NAME>', '84', '940000'),
(55, '<NAME>', '86', '890000'),
(56, '<NAME>', '87', '870000'),
(57, '<NAME>', '89', '880000'),
(58, '<NAME>', '91', '920000'),
(59, 'Thành phố C<NAME>', '92', '900000'),
(60, '<NAME>', '93', '910000'),
(61, '<NAME>', '94', '950000'),
(62, '<NAME>', '95', '260000'),
(63, '<NAME>', '96', '970000');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `admins`
--
ALTER TABLE `admins`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `brands`
--
ALTER TABLE `brands`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `slug` (`slug`);
--
-- Indexes for table `customers`
--
ALTER TABLE `customers`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `email` (`email`),
ADD KEY `provicence_id` (`province_id`),
ADD KEY `districts_id` (`district_id`),
ADD KEY `customer_group_id` (`customer_group_id`);
--
-- Indexes for table `districts`
--
ALTER TABLE `districts`
ADD PRIMARY KEY (`id`),
ADD KEY `province_id` (`province_id`);
--
-- Indexes for table `orders`
--
ALTER TABLE `orders`
ADD PRIMARY KEY (`id`),
ADD KEY `user_id` (`user_id`),
ADD KEY `provicence_id` (`province_id`),
ADD KEY `district_id` (`district_id`);
--
-- Indexes for table `order_items`
--
ALTER TABLE `order_items`
ADD PRIMARY KEY (`id`),
ADD KEY `order_id` (`order_id`),
ADD KEY `product_id` (`product_id`);
--
-- Indexes for table `products`
--
ALTER TABLE `products`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `slug` (`slug`),
ADD KEY `brand_id` (`brand_id`),
ADD KEY `product_category_id` (`product_category_id`);
--
-- Indexes for table `product_categories`
--
ALTER TABLE `product_categories`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `slug` (`slug`);
--
-- Indexes for table `product_images`
--
ALTER TABLE `product_images`
ADD PRIMARY KEY (`id`),
ADD KEY `product_id` (`product_id`);
--
-- Indexes for table `product_relates`
--
ALTER TABLE `product_relates`
ADD PRIMARY KEY (`id`),
ADD KEY `product_id` (`product_id`);
--
-- Indexes for table `product_reviews`
--
ALTER TABLE `product_reviews`
ADD PRIMARY KEY (`id`),
ADD KEY `product_id` (`product_id`),
ADD KEY `user_id` (`user_id`);
--
-- Indexes for table `provinces`
--
ALTER TABLE `provinces`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `admins`
--
ALTER TABLE `admins`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `brands`
--
ALTER TABLE `brands`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT for table `customers`
--
ALTER TABLE `customers`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=14;
--
-- AUTO_INCREMENT for table `districts`
--
ALTER TABLE `districts`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=714;
--
-- AUTO_INCREMENT for table `orders`
--
ALTER TABLE `orders`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=55;
--
-- AUTO_INCREMENT for table `order_items`
--
ALTER TABLE `order_items`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=49;
--
-- AUTO_INCREMENT for table `products`
--
ALTER TABLE `products`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=72;
--
-- AUTO_INCREMENT for table `product_categories`
--
ALTER TABLE `product_categories`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT for table `product_images`
--
ALTER TABLE `product_images`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=68;
--
-- AUTO_INCREMENT for table `product_relates`
--
ALTER TABLE `product_relates`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT for table `product_reviews`
--
ALTER TABLE `product_reviews`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `provinces`
--
ALTER TABLE `provinces`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=64;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `customers`
--
ALTER TABLE `customers`
ADD CONSTRAINT `customers_ibfk_2` FOREIGN KEY (`province_id`) REFERENCES `provinces` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `customers_ibfk_3` FOREIGN KEY (`district_id`) REFERENCES `districts` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `districts`
--
ALTER TABLE `districts`
ADD CONSTRAINT `districts_ibfk_1` FOREIGN KEY (`province_id`) REFERENCES `provinces` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `orders`
--
ALTER TABLE `orders`
ADD CONSTRAINT `orders_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `customers` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `orders_ibfk_2` FOREIGN KEY (`province_id`) REFERENCES `provinces` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `orders_ibfk_3` FOREIGN KEY (`district_id`) REFERENCES `districts` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `order_items`
--
ALTER TABLE `order_items`
ADD CONSTRAINT `order_items_ibfk_1` FOREIGN KEY (`order_id`) REFERENCES `orders` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `order_items_ibfk_2` FOREIGN KEY (`product_id`) REFERENCES `products` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `products`
--
ALTER TABLE `products`
ADD CONSTRAINT `products_ibfk_1` FOREIGN KEY (`product_category_id`) REFERENCES `product_categories` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `products_ibfk_2` FOREIGN KEY (`brand_id`) REFERENCES `brands` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `product_images`
--
ALTER TABLE `product_images`
ADD CONSTRAINT `product_images_ibfk_1` FOREIGN KEY (`product_id`) REFERENCES `products` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `product_relates`
--
ALTER TABLE `product_relates`
ADD CONSTRAINT `product_relates_ibfk_1` FOREIGN KEY (`product_id`) REFERENCES `products` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `product_reviews`
--
ALTER TABLE `product_reviews`
ADD CONSTRAINT `product_reviews_ibfk_1` FOREIGN KEY (`product_id`) REFERENCES `products` (`id`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `product_reviews_ibfk_2` FOREIGN KEY (`user_id`) REFERENCES `customers` (`id`) ON DELETE CASCADE ON UPDATE CASCADE;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
import { action, makeObservable, observable } from 'mobx'
class NotificationStore {
constructor (rootStore) {
this.rootStore = rootStore
makeObservable(this)
}
@observable snackOpen = false
@observable snackMessage = ''
@observable snackOptions = {}
@observable dialogOpen = false
@observable dialogMessage = ''
@observable dialogActions = []
@action setSnackOpen = (open) => {
this.snackOpen = open
}
@action setDialogOpen = (open) => {
this.dialogOpen = open
}
@action snack = (message, options = {}) => {
this.snackMessage = message
this.snackOptions = options
this.setSnackOpen(true)
}
@action alert = (message) => {
return new Promise((resolve => {
this.dialogMessage = message
this.setDialogOpen(true)
this.dialogActions = [
{
content: 'OK',
variant: 'contained',
color: 'primary',
onClick: () => resolve(true),
},
]
}))
}
@action confirm = (message) => {
return new Promise((resolve => {
this.dialogMessage = message
this.setDialogOpen(true)
this.dialogActions = [
{
content: 'Cancel',
variant: 'outlined',
color: 'primary',
onClick: () => resolve(false),
},
{
content: 'OK',
variant: 'contained',
color: 'primary',
onClick: () => resolve(true),
},
]
}))
}
}
export default NotificationStore
|
<filename>distribute/src/main/java/org/glamey/training/designmodel/strategy/Strategy.java<gh_stars>0
package org.glamey.training.designmodel.strategy;
/**
* @author zhouyang.zhou. 2017.05.09.15.
*/
public interface Strategy {
void algorithm(StrategyContext context);
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. py:currentmodule:: create_map
:synopsis: Create map from the mcxray simulation.
.. moduleauthor:: <NAME> <<EMAIL>>
Create map from the mcxray simulation.
"""
###############################################################################
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
import logging
import math
import os.path
# Third party modules.
import h5py
import matplotlib.pyplot as plt
from scipy.constants import e
from numpy.random import normal, poisson
import numpy as np
# Local modules.
from pymcxray.mcxray import HDF5_PARAMETERS
# Project modules.
from xrayspectrummodeling.map.simulation_data import SimulationData
from xrayspectrummodeling import get_current_module_path
# Globals and constants variables.
MAP_WIDTH = "width"
MAP_HEIGHT = "height"
MAP_DEPTH = "depth"
MAP_DATA_TYPE = "data type"
MAP_PIXEL_TIME_s = "pixel time (s)"
MAP_CURRENT_nA = "current nA"
MAP_NOMINAL_NUMBER_ELECTRONS = "nominal number electrons"
MAP_SOLID_ANGLE_rad = "solid angle (rad)"
MAP_DETECTOR_NOISE_eV = "detector noise (eV)"
MAP_DETECTOR_RESOLUTION_AT_MN_eV = "detector resolution at Mn Ka (eV)"
MAP_COMMENTS = "comments"
MAP_DATA_WIDTH_nm = "widths (nm)"
MAP_DATA_HEIGHT_nm = "heights (nm)"
MAP_DATA_DEPTH_keV = "energies (keV)"
class DetectorFunction(object):
def __init__(self, electronic_noise_eV, fano_factor=0.125):
self._electronic_noise_eV = electronic_noise_eV
self._fano_factor = fano_factor
self._electron_hole_pair_eV = 3.8
self._numeric_factor = 2.0 * math.sqrt(2.0 * math.log(2.0))
def getFwhm_eV(self, xrayEnergy_eV):
term1 = self._electronic_noise_eV ** 2
term2 = self._numeric_factor * self._numeric_factor * self._electron_hole_pair_eV * self._fano_factor * xrayEnergy_eV
fwhm_eV = math.sqrt(term1 + term2)
return fwhm_eV
def get_fwhms_eV(self, xray_energies_eV):
term1 = self._electronic_noise_eV ** 2
term2 = self._numeric_factor * self._numeric_factor * self._electron_hole_pair_eV * self._fano_factor * xray_energies_eV
fwhms_eV = np.sqrt(term1 + term2)
return fwhms_eV
def getSigma_keV(self, xrayEnergy_keV):
xrayEnergy_eV = xrayEnergy_keV*1.0e3
fwhm_eV = self.getFwhm_eV(xrayEnergy_eV)
fwhm_keV = fwhm_eV/1.0e3
sigma_keV = fwhm_keV/self._numeric_factor
return sigma_keV
def get_sigmas_keV(self, xray_energies_keV):
xray_energies_eV = xray_energies_keV*1.0e3
fwhms_eV = self.get_fwhms_eV(xray_energies_eV)
fwhms_keV = fwhms_eV/1.0e3
sigmas_keV = fwhms_keV/self._numeric_factor
return sigmas_keV
def getElectronicNoise_eV(self):
return self._electronic_noise_eV
def get_efficiency():
file_path = get_current_module_path(__file__, r"../../data/mcxray_XrayDetectorEfficiency.csv")
data = np.loadtxt(file_path, float, delimiter=',',)
return data
def create_test_map(data_path, figure=True):
compositions = {1: "Fe-1wt%Co", 2: "Fe-2wt%Co", 3: "Fe-5wt%Co",
4: "Co-1wt%Ni", 5: "Co-2wt%Ni", 6: "Co-5wt%Ni",
7: "Fe-1wt%Co-49.5Ni", 8: "Fe-2wt%Co-49.0Ni", 9: "Fe-5wt%Co-47.5Ni"}
width = 3
height = 3
depth = 1024
data_type = np.int32
current_nA = 1.0
solid_angle_rad = 0.00140035
detector_noise_eV = 50
efficiency = get_efficiency()
xs_nm = np.linspace(-5.0e3, 5.0e3, width)
hdf5_file_path = os.path.join(data_path, r"SimulationMapsMM2017_3x3.hdf5")
print(hdf5_file_path)
with h5py.File(hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
print(simulations_group.name)
times_s = [0.05, 0.1, 0.5, 1.0, 5.0, 10.0]
hdf5_file_out_path = os.path.join(data_path, r"test_maps.hdf5")
with h5py.File(hdf5_file_out_path, 'w', driver='core') as hdf5_file:
maps_group = hdf5_file.require_group("maps")
for time_s in times_s:
_create_map(compositions, current_nA, data_type, depth, detector_noise_eV, efficiency, figure,
hdf5_file_out_path, height, maps_group, simulations_group, solid_angle_rad, time_s, width,
xs_nm)
def create_map_mm2017_abstract(data_path, figure=False):
compositions = {1: "Fe-1wt%Co", 2: "Fe-2wt%Co", 3: "Fe-5wt%Co",
4: "Co-1wt%Ni", 5: "Co-2wt%Ni", 6: "Co-5wt%Ni",
7: "Fe-1wt%Co-49.5Ni", 8: "Fe-2wt%Co-49.0Ni", 9: "Fe-5wt%Co-47.5Ni"}
width = 128
height = 128
depth = 1024
data_type = np.int32
current_nA = 1.0
solid_angle_rad = 0.00140035
detector_noise_eV = 50
efficiency = get_efficiency()
xs_nm = np.linspace(-5.0e3, 5.0e3, width)
hdf5_file_path = os.path.join(data_path, r"SimulationMapsMM2017.hdf5")
with h5py.File(hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
times_s = [0.05, 0.1, 0.5, 1.0, 5.0, 10.0]
hdf5_file_out_path = os.path.join(data_path, r"map_mm2017_abstract.hdf5")
with h5py.File(hdf5_file_out_path, 'w', driver='core') as hdf5_file:
maps_group = hdf5_file.require_group("maps")
for time_s in times_s:
_create_map(compositions, current_nA, data_type, depth, detector_noise_eV, efficiency, figure,
hdf5_file_out_path, height, maps_group, simulations_group, solid_angle_rad, time_s, width,
xs_nm)
def export_raw_test_map(data_path):
from pySpectrumFileFormat.Bruker.MapRaw.ParametersFile import ParametersFile, BYTE_ORDER_LITTLE_ENDIAN, RECORED_BY_VECTOR, DATA_TYPE_SIGNED
hdf5_file_out_path = os.path.join(data_path, r"analyzes\test_maps.hdf5")
with h5py.File(hdf5_file_out_path, 'r', driver='core') as hdf5_file:
maps_group = hdf5_file["maps"]
for name, group in maps_group.items():
if str(group.name).startswith("/maps/map"):
map_data_set = group
logging.info(group.name)
logging.info(name)
parameters_file = ParametersFile()
parameters_file.width = map_data_set.attrs[MAP_WIDTH]
parameters_file.height = map_data_set.attrs[MAP_HEIGHT]
parameters_file.depth = map_data_set.attrs[MAP_DEPTH]
parameters_file.offset = 0
parameters_file.dataLength_B = 4
parameters_file.dataType = DATA_TYPE_SIGNED
parameters_file.byteOrder = BYTE_ORDER_LITTLE_ENDIAN
parameters_file.recordBy = RECORED_BY_VECTOR
parameters_file.energy_keV = 30.0
parameters_file.pixel_size_nm = 0.0
base_file_out_path = hdf5_file_out_path[:-5] + "_" + name.replace(' ', '_')
parameters_file.write(base_file_out_path + ".rpl")
shape = (parameters_file.height, parameters_file.width, parameters_file.depth)
fp = np.memmap(base_file_out_path + ".raw", dtype=np.int32, mode='w+', shape=shape)
fp[:] = map_data_set[:]
del fp
def read_raw_test_map(data_path):
from pySpectrumFileFormat.Bruker.MapRaw.MapRawFormat import MapRawFormat
file_path = os.path.join(data_path, r"test_maps_map_1000000_us.raw")
map_raw = MapRawFormat(file_path)
channels, datacube = map_raw.getDataCube()
plt.figure()
plt.plot(channels, datacube[1,1,:])
x_data, y_data = map_raw.getSpectrum(1, 1)
plt.figure()
plt.plot(x_data, y_data)
x_data, y_data = map_raw.getSumSpectrum()
plt.figure()
plt.plot(x_data, y_data)
image = map_raw.getTotalIntensityImage()
plt.figure()
plt.imshow(image, cmap="gray")
roi = (210, 225)
image = map_raw.getRoiIntensityImage(roi)
plt.figure()
plt.imshow(image, cmap="gray")
def export_raw_map_mm2017_abstract(data_path):
from pySpectrumFileFormat.Bruker.MapRaw.ParametersFile import ParametersFile, BYTE_ORDER_LITTLE_ENDIAN, RECORED_BY_VECTOR, DATA_TYPE_SIGNED
hdf5_file_out_path = os.path.join(data_path, r"map_mm2017_abstract.hdf5")
with h5py.File(hdf5_file_out_path, 'r', driver='core') as hdf5_file:
maps_group = hdf5_file["maps"]
for name, group in maps_group.items():
if str(group.name).startswith("/maps/map"):
map_data_set = group
logging.info(group.name)
logging.info(name)
parameters_file = ParametersFile()
parameters_file.width = map_data_set.attrs[MAP_WIDTH]
parameters_file.height = map_data_set.attrs[MAP_HEIGHT]
parameters_file.depth = map_data_set.attrs[MAP_DEPTH]
parameters_file.offset = 0
parameters_file.dataLength_B = 4
parameters_file.dataType = DATA_TYPE_SIGNED
parameters_file.byteOrder = BYTE_ORDER_LITTLE_ENDIAN
parameters_file.recordBy = RECORED_BY_VECTOR
parameters_file.energy_keV = 30.0
parameters_file.pixel_size_nm = 0.0
base_file_out_path = hdf5_file_out_path[:-5] + "_" + name.replace(' ', '_')
parameters_file.write(base_file_out_path + ".rpl")
shape = (parameters_file.height, parameters_file.width, parameters_file.depth)
fp = np.memmap(base_file_out_path + ".raw", dtype=np.int32, mode='w+', shape=shape)
fp[:] = map_data_set[:]
del fp
def read_raw_map_mm2017_abstract(data_path):
from pySpectrumFileFormat.Bruker.MapRaw.MapRawFormat import MapRawFormat
file_path = os.path.join(data_path, r"map_mm2017_abstract_map_10000000_us.raw")
map_raw = MapRawFormat(file_path)
channels, datacube = map_raw.getDataCube()
print(datacube.shape)
plt.figure()
plt.title("All regions")
plt.semilogy(channels, datacube.sum(axis=(0,1)))
plt.figure()
plt.title("Region 1")
plt.semilogy(channels, datacube[0:32, 0:32, :].sum(axis=(0,1)))
plt.figure()
plt.title("Region 2")
plt.semilogy(channels, datacube[32:32*3, 0:32, :].sum(axis=(0,1)))
plt.figure()
plt.title("Region 3")
plt.semilogy(channels, datacube[32*3:, 0:32, :].sum(axis=(0,1)))
plt.figure()
plt.plot(channels, datacube[1,1,:])
plt.close()
x_data, y_data = map_raw.getSpectrum(1, 1)
plt.figure()
plt.plot(x_data, y_data)
plt.close()
x_data, y_data = map_raw.getSumSpectrum()
plt.figure()
plt.plot(x_data, y_data)
plt.close()
image = map_raw.getTotalIntensityImage()
plt.figure()
plt.imshow(image, cmap="gray")
plt.close()
roi = (225, 235)
image = map_raw.getRoiIntensityImage(roi)
plt.figure()
plt.imshow(image, cmap="gray")
plt.close()
plt.figure()
plt.plot(x_data, np.linspace(0.0, 30.0, len(x_data)))
plt.close()
def bse_image_mm2017(data_path):
hdf5_file_path = os.path.join(data_path, r"SimulationMapsMM2017.hdf5")
with h5py.File(hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
width = 128
height = width
data_type = np.float
xs_nm = np.linspace(-5.0e3, 5.0e3, width)
shape = (height, width)
data = np.zeros(shape, dtype=data_type)
for group in simulations_group.values():
try:
index_x = np.where(xs_nm == group.attrs["beamPosition"][0])[0][0]
index_y = np.where(xs_nm == group.attrs["beamPosition"][1])[0][0]
bse = group["ElectronResults"].attrs["Backscattering coefficient"]
data[index_y, index_x] = bse
except IndexError as message:
logging.error(message)
logging.info(group.name)
plt.figure()
plt.imshow(data, cmap='gray')
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "bse_image.png")
plt.savefig(figure_file_path)
# plt.close()
def _create_electron_maps(data_path, hdf5_file_path, positions):
symbols = ['Fe', 'Co', 'Ni']
simulation_data = SimulationData(hdf5_file_path, positions, symbols)
# BSE map
bse_map = simulation_data.get_bse_map()
plt.figure()
plt.imshow(bse_map, cmap='gray')
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "bse_image.png")
plt.savefig(figure_file_path)
# plt.close()
# TE map
te_map = simulation_data.get_te_map()
plt.figure()
plt.imshow(te_map, cmap='gray')
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "te_image.png")
plt.savefig(figure_file_path)
plt.close()
# skirted electron map
se_map = simulation_data.get_skirted_electron_map()
plt.figure()
plt.imshow(se_map, cmap='gray')
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "se_image.png")
plt.savefig(figure_file_path)
plt.close()
# TE corrected map
te_map = simulation_data.get_te_map()
plt.figure()
plt.imshow(te_map+se_map, cmap='gray')
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "transmitted_electron_image.png")
plt.savefig(figure_file_path)
# plt.close()
def _create_intensity_maps(data_path, hdf5_file_path, positions):
symbols = ['Fe', 'Co']
simulation_data = SimulationData(hdf5_file_path, positions, symbols)
intensity_data = {}
for symbol in symbols:
intensity_data[symbol] = simulation_data.get_intensity_data(symbol)
# Ka map
intensity_map = np.sum(intensity_data[symbol][:, :, :, 0:1, 1], axis=(2,3))
logging.debug(intensity_data[symbol].shape)
logging.debug(intensity_map.shape)
try:
plt.figure()
plt.title("{} Ka generated".format(symbol))
plt.imshow(intensity_map, cmap='gray')
plt.colorbar()
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "intensity_{}_ka_generated_image.png".format(symbol))
plt.savefig(figure_file_path)
plt.close()
except ValueError as message:
logging.error(message)
logging.info(symbol)
intensity_map = np.sum(intensity_data[symbol][:, :, :, 0:1, 3], axis=(2, 3))
logging.info(intensity_data[symbol].shape)
logging.info(intensity_map.shape)
try:
plt.figure()
plt.title("{} Ka emitted".format(symbol))
plt.imshow(intensity_map, cmap='gray')
plt.colorbar()
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "intensity_{}_ka_emitted_image.png".format(symbol))
plt.savefig(figure_file_path)
plt.close()
except ValueError as message:
logging.error(message)
logging.info(symbol)
intensity_data = {}
for symbol in symbols:
intensity_data[symbol] = simulation_data.get_intensity_data(symbol)
for symbol in symbols:
# Ka f-ratio map
intensity_element_map = np.sum(intensity_data[symbol][:, :, :, 0:1, 3], axis=(2, 3))
intensity_total_map = np.zeros_like(intensity_element_map)
for symbol_total in symbols:
intensity_total_map += np.sum(intensity_data[symbol_total][:, :, :, 0:1, 3], axis=(2, 3))
fratio_element_map = intensity_element_map / intensity_total_map
try:
plt.figure()
plt.title("{} Ka emitted".format(symbol))
# plt.imshow(fratio_element_map, cmap='gray', norm=colors.LogNorm(vmin=0.001, vmax=1.0))
plt.imshow(fratio_element_map, cmap='gray', vmin=0.0, vmax=1.0)
plt.colorbar()
plt.xticks([])
plt.yticks([])
figure_file_path = os.path.join(data_path, "figures", "fratio_{}_ka_emitted_image.png".format(symbol))
plt.savefig(figure_file_path)
# plt.close()
except ValueError as message:
logging.error(message)
logging.info(symbol)
def _create_spectra(data_path, hdf5_file_path, positions):
symbols = ['Fe', 'Co', 'Ni']
simulation_data = SimulationData(hdf5_file_path, positions, symbols)
for position in positions.get_list()[0:1]:
energies_keV, spectrum = simulation_data.get_emitted_spectrum(position)
plt.figure()
title = "{0} ({1}, {2})".format("Emitted", position[0], position[1])
plt.title(title)
plt.semilogy(energies_keV, spectrum)
plt.xlabel("Energy (keV)")
plt.ylabel("Intensity (1/keV/e-/sr)")
file_name = "{0}_{1}_{2}.png".format("Spectrum_Emitted", position[0], position[1])
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
plt.close()
energies_keV, spectrum = simulation_data.get_detected_spectrum(position)
plt.figure()
title = "{0} ({1}, {2})".format("Detected", position[0], position[1])
plt.title(title)
plt.semilogy(energies_keV, spectrum)
plt.xlabel("Energy (keV)")
plt.ylabel("Intensity (photons)")
plt.ylim(ymin=1)
file_name = "{0}_{1}_{2}.png".format("Spectrum_Detected", position[0], position[1])
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
plt.close()
# Calculated detected
current_nA = 1.0
solid_angle_rad = 0.00140035
detector_noise_eV = 50
efficiency = get_efficiency()
time_s = 100.0
depth = 128
nominal_number_electrons, number_electrons = _compute_number_electrons(current_nA, time_s)
logging.debug("{} {}".format(nominal_number_electrons, number_electrons))
energy_data, intensity_data_1_ekeVsr = simulation_data.get_emitted_spectrum(position)
intensity_efficiency_data_1_ekeVsr = intensity_data_1_ekeVsr * np.interp(energy_data, efficiency[:, 0],
efficiency[:, 1])
plt.figure()
title = "{} ({}, {})".format("Emitted * efficiency", position[0], position[1])
plt.title(title)
plt.semilogy(energy_data, intensity_data_1_ekeVsr, '.')
plt.semilogy(energy_data, intensity_efficiency_data_1_ekeVsr, '.')
plt.xlabel("Energy (keV)")
plt.ylabel("Intensity (1/keV/e-/sr)")
file_name = "{0}_{1}_{2}.png".format("Spectrum_Emitted_Efficiency", position[0], position[1])
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
plt.close()
delta_energy_keV = energy_data[1] - energy_data[0]
intensity_data = intensity_efficiency_data_1_ekeVsr * number_electrons * solid_angle_rad * delta_energy_keV
plt.figure()
title = "{} ({}, {}), t = {} s".format("Emitted counts", position[0], position[1], time_s)
plt.title(title)
plt.semilogy(energy_data, intensity_data, '.')
plt.xlabel("Energy (keV)")
plt.ylabel("Intensity (photons)")
plt.ylim(ymin=1)
file_name = "{}_{}_{}_t{}s.png".format("Spectrum_Emitted_Counts", position[0], position[1], time_s)
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
plt.close()
energy_edges_keV = np.linspace(0.0, 30.0, depth + 1)
energies_keV = np.linspace(0.0, 30.0, depth)
counts_data = change_energy_scale2(energy_data, intensity_data, energy_edges_keV)
plt.figure()
title = "{} ({}, {}), t = {} s".format("Emitted counts", position[0], position[1], time_s)
plt.title(title)
plt.semilogy(energy_data, intensity_data, '-')
plt.semilogy(energies_keV, counts_data, '.')
plt.xlabel("Energy (keV)")
plt.ylabel("Intensity (photons)")
plt.ylim(ymin=1)
file_name = "{}_{}_{}_t{}s.png".format("Spectrum_Emitted_Counts", position[0], position[1], time_s)
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
# plt.close()
detector = DetectorFunction(detector_noise_eV)
sigmas_keV = detector.get_sigmas_keV(energies_keV)
fwhms_eV = detector.get_fwhms_eV(energies_keV*1.0e3)
plt.figure()
plt.title("Detector")
plt.plot(energies_keV, sigmas_keV)
plt.plot(energies_keV, fwhms_eV/1.0e3)
plt.xlabel("Energy (keV)")
plt.ylabel("Sigma (keV)")
# plt.ylim(ymin=1)
file_name = "{}_{}_{}_t{}s.png".format("Detector", position[0], position[1], time_s)
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
plt.close()
plt.figure()
title = "{} ({}, {}), t = {} s".format("Detected", position[0], position[1], time_s)
plt.title(title)
mean_intensity = np.zeros_like(energies_keV)
number_repetitions = 50
for repetition in range(number_repetitions):
xrays = _compute_xrays(detector_noise_eV, energies_keV, counts_data)
counts, _bin_edges = np.histogram(xrays, bins=energy_edges_keV)
mean_intensity += counts
# plt.semilogy(energies_keV, counts, label=repetition)
logging.debug("{:d} {:d} {:d}".format(int(np.sum(counts_data)), len(xrays), int(np.sum(counts_data)-len(xrays))))
logging.debug("{:d} {:d} {:d}".format(len(xrays), int(np.sum(counts)) , len(xrays) - int(np.sum(counts))))
mean_intensity /= number_repetitions
plt.semilogy(energies_keV, counts_data)
plt.semilogy(energies_keV, mean_intensity)
plt.xlabel("Energy (keV)")
plt.ylabel("Intensity (photons)")
plt.ylim(ymin=1)
# plt.legend()
file_name = "{}_{}_{}_t{}s.png".format("Spectrum_Detected", position[0], position[1], time_s)
figure_file_path = os.path.join(data_path, "figures", file_name)
plt.savefig(figure_file_path)
# plt.close()
def compute_histogram(energy_data, intensity_data, energy_edges_keV):
xrays = []
for energy_keV, intensity in zip(energy_data, intensity_data):
xrays.extend(np.full((int(round(intensity))), energy_keV).tolist())
counts_data, _bin_edges = np.histogram(xrays, bins=energy_edges_keV)
logging.info("{:d} {:d} {:d}".format(int(np.sum(intensity_data)), len(xrays), int(np.sum(intensity_data) - len(xrays))))
return counts_data
def change_energy_scale(energy_data, intensity_data, energy_edges_keV):
counts_data = np.zeros((len(energy_edges_keV)-1), dtype=np.float)
for energy_keV, intensity in zip(energy_data, intensity_data):
for i in range(len(energy_edges_keV)-1):
if energy_keV >= energy_edges_keV[i] and energy_keV < energy_edges_keV[i+1]:
counts_data[i] += intensity
return counts_data
def change_energy_scale2(energy_data, intensity_data, energy_edges_keV):
counts_data = np.zeros((len(energy_edges_keV)-1), dtype=np.float)
for energy_keV, intensity in zip(energy_data[:-1], intensity_data[:-1]):
i = np.searchsorted(energy_edges_keV, energy_keV, side="right")-1
counts_data[i] += intensity
return counts_data
def _create_spectra_maps(data_path, hdf5_file_path, hdf5_file_out_path, positions):
logging.info("_create_spectra_maps")
depth = 1024
data_type = np.int32
current_nA = 1.0
solid_angle_rad = 0.00140035
detector_noise_eV = 50
efficiency = get_efficiency()
with h5py.File(hdf5_file_path, 'r', driver='core') as hdf5_file:
simulations_group = hdf5_file["simulations"]
times_s = [0.005, 0.01, 0.05, 0.1, 0.5, 1.0, 5.0, 10.0, 50.0, 100.0, 500.0, 1000.0]
times_s = [0.1]
with h5py.File(hdf5_file_out_path, 'a', driver='core') as hdf5_file:
maps_group = hdf5_file.require_group("maps")
for time_s in times_s:
_create_map(current_nA, data_type, depth, detector_noise_eV, efficiency, maps_group,
simulations_group, solid_angle_rad, time_s, positions)
def _create_map(current_nA, data_type, depth, detector_noise_eV, efficiency, maps_group, simulations_group,
solid_angle_rad, time_s, positions):
logging.info("_create_map {}".format(time_s))
time_us = time_s * 1.0e6
map_name = "map %i us" % (time_us)
if map_name in maps_group:
logging.info("Map already exist skip it: {}".format(map_name))
return
shape = (positions.y_pixels, positions.x_pixels, depth)
data = np.zeros(shape, dtype=data_type)
for group in simulations_group.values():
if not group.name.endswith(HDF5_PARAMETERS):
try:
index_x = np.where(positions.xs_nm == group.attrs["beamPosition"][0])[0][0]
index_y = np.where(positions.ys_nm == group.attrs["beamPosition"][1])[0][0]
nominal_number_electrons, number_electrons = _compute_number_electrons(current_nA, time_s)
delta_energy_keV, energy_data, intensity_data = _compute_intensity(efficiency, group, number_electrons,
solid_angle_rad, depth)
xrays = _compute_xrays(detector_noise_eV, energy_data, intensity_data)
counts, energies_keV = _compute_counts(data, depth, index_x, index_y, xrays)
except IndexError:
pass
_write_map(current_nA, data, data_type, depth, detector_noise_eV, energies_keV, maps_group,
nominal_number_electrons, shape, solid_angle_rad, time_s, positions)
def _write_map(current_nA, data, data_type, depth, detector_noise_eV, energies_keV, maps_group,
nominal_number_electrons, shape, solid_angle_rad, time_s, positions):
logging.info("_write_map {}".format(time_s))
detector = DetectorFunction(detector_noise_eV)
time_us = time_s * 1.0e6
map_name = "map {} us".format(time_us)
map_data_set = maps_group.require_dataset(map_name, shape, dtype=data_type)
map_data_set[...] = data
map_data_set.attrs[MAP_WIDTH] = positions.x_pixels
map_data_set.attrs[MAP_HEIGHT] = positions.y_pixels
map_data_set.attrs[MAP_DEPTH] = depth
map_data_set.attrs[MAP_DATA_TYPE] = str(data_type)
map_data_set.attrs[MAP_PIXEL_TIME_s] = time_s
map_data_set.attrs[MAP_CURRENT_nA] = current_nA
map_data_set.attrs[MAP_NOMINAL_NUMBER_ELECTRONS] = nominal_number_electrons
map_data_set.attrs[MAP_SOLID_ANGLE_rad] = solid_angle_rad
map_data_set.attrs[MAP_DETECTOR_NOISE_eV] = detector_noise_eV
map_data_set.attrs[MAP_DETECTOR_RESOLUTION_AT_MN_eV] = detector.getFwhm_eV(5898.0)
map_data_set.attrs[MAP_COMMENTS] = "data[X, Y, D]"
width_data_set = maps_group.require_dataset(MAP_DATA_WIDTH_nm, (positions.x_pixels,), dtype=np.float)
width_data_set[...] = positions.xs_nm
height_data_set = maps_group.require_dataset(MAP_DATA_HEIGHT_nm, (positions.y_pixels,), dtype=np.float)
height_data_set[...] = positions.ys_nm
depth_data_set = maps_group.require_dataset(MAP_DATA_DEPTH_keV, (depth,), dtype=np.float)
depth_data_set[...] = energies_keV
map_data_set.dims.create_scale(width_data_set, "X (nm)")
map_data_set.dims.create_scale(height_data_set, "Y (nm)")
map_data_set.dims.create_scale(depth_data_set, "Energies (keV)")
map_data_set.dims[0].attach_scale(width_data_set)
map_data_set.dims[1].attach_scale(height_data_set)
map_data_set.dims[2].attach_scale(depth_data_set)
def _compute_counts(data, depth, index_x, index_y, xrays):
energy_edges_keV = np.linspace(0.0, 30.0, depth + 1)
energies_keV = np.linspace(0.0, 30.0, depth)
counts, _bin_edges = np.histogram(xrays, bins=energy_edges_keV)
data[index_y, index_x, :] = counts
return counts, energies_keV
def _compute_xrays(detector_noise_eV, energy_data, intensity_data):
detector = DetectorFunction(detector_noise_eV)
sigmas_keV = detector.get_sigmas_keV(energy_data)
xrays = []
for channel in range(len(energy_data)):
nominal_number_xrays = intensity_data[channel]
number_xrays = poisson(nominal_number_xrays)
number_xrays = int(round(number_xrays))
counts = normal(energy_data[channel], sigmas_keV[channel], size=number_xrays)
xrays.extend(counts.tolist())
return xrays
def _compute_intensity(efficiency, group, number_electrons, solid_angle_rad, depth):
energy_data = group["XraySpectraRegionsEmitted/energies_keV"][:]
intensity_data_1_ekeVsr = group["XraySpectraRegionsEmitted/total_1_ekeVsr"][:]
intensity_data_1_ekeVsr *= np.interp(energy_data, efficiency[:, 0], efficiency[:, 1])
delta_energy_keV = energy_data[1] - energy_data[0]
intensity_data = intensity_data_1_ekeVsr * number_electrons * solid_angle_rad * delta_energy_keV
energy_edges_keV = np.linspace(0.0, 30.0, depth + 1)
energies_keV = np.linspace(0.0, 30.0, depth)
counts_data = change_energy_scale2(energy_data, intensity_data, energy_edges_keV)
return delta_energy_keV, energies_keV, counts_data
def _compute_number_electrons(current_nA, time_s):
nominal_number_electrons = current_nA * 1.0e-9 * time_s / e
try:
number_electrons = poisson(nominal_number_electrons)
except ValueError as message:
number_electrons = normal(nominal_number_electrons, np.sqrt(nominal_number_electrons))
return nominal_number_electrons, number_electrons
def _export_raw_map(hdf5_file_path):
from pySpectrumFileFormat.Bruker.MapRaw.ParametersFile import ParametersFile, BYTE_ORDER_LITTLE_ENDIAN, \
RECORED_BY_VECTOR, DATA_TYPE_SIGNED
logging.info("_export_raw_map")
with h5py.File(hdf5_file_path, 'r', driver='core') as hdf5_file:
maps_group = hdf5_file["maps"]
for name, group in maps_group.items():
if str(group.name).startswith("/maps/map"):
map_data_set = group
logging.info(group.name)
logging.info(name)
parameters_file = ParametersFile()
parameters_file.width = map_data_set.attrs[MAP_WIDTH]
parameters_file.height = map_data_set.attrs[MAP_HEIGHT]
parameters_file.depth = map_data_set.attrs[MAP_DEPTH]
parameters_file.offset = 0
parameters_file.dataLength_B = 4
parameters_file.dataType = DATA_TYPE_SIGNED
parameters_file.byteOrder = BYTE_ORDER_LITTLE_ENDIAN
parameters_file.recordBy = RECORED_BY_VECTOR
parameters_file.energy_keV = 30.0
parameters_file.pixel_size_nm = 0.0
base_file_out_path = hdf5_file_path[:-5] + "_" + name.replace(' ', '_')
parameters_file.write(base_file_out_path + ".rpl")
shape = (parameters_file.height, parameters_file.width, parameters_file.depth)
fp = np.memmap(base_file_out_path + ".raw", dtype=np.int32, mode='w+', shape=shape)
fp[:] = map_data_set[:]
del fp
if __name__ == '__main__':
import sys
logging.getLogger().setLevel(logging.INFO)
if len(sys.argv) > 1:
data_path = sys.argv[1]
else:
data_path = r"D:\work\Dropbox\hdemers\professional\results\simulations\mcxray\SimulationMapsMM2017\analyzes"
logging.debug(sys.argv)
logging.info(data_path)
# create_test_map(data_path, figure=True)
# export_raw_test_map(data_path)
# read_raw_test_map(data_path)
# create_map_mm2017_abstract(data_path)
# export_raw_map_mm2017_abstract(data_path)
# read_raw_map_mm2017_abstract(data_path)
# bse_image_mm2017(data_path)
logging.info("Done")
plt.show()
|
<reponame>apb2006/vue-poc
//Manage array of text sources used for:edit tabs
// item{
// name: file name
// contentType: "text/xml",
// mode: "xml",
// text:
// id: ids have the form "Tn"
// uri: path to save to
// requires: Settings,HTTP
//
const GEditTabs={
data(){
return {
items:[],
length: 0,
nextId: 1,
currentId: null,
restored: null
}
},
methods: {
// add tab return index
addItem(tab,pos){
//console.log("new: ",tab," ,pos:",pos);
var def={name: "AA"+this.nextId,
contentType: "text/xml",
mode: "xml",
text: "<foo>" +this.nextId +"</foo>",
uri: null
};
var etab = Object.assign(def,tab);
etab.id= "T" + this.nextId
if(pos){
this.items.splice(pos, 0, etab)
}else{
this.items.push (etab);
}
this.length++
this.nextId++;
var ind=this.items.indexOf(etab)
return ind;
},
closeItem(item){
//https://github.com/vuejs/vue/issues/5855
this.items=this.items.filter(t => t.id !== item.id)
this.length--;
},
// fetch content from server and create tab
loadItem(url){
HTTP.get("get",{params: {url:url}})
.then(r=>{
console.log(r)
var tab={
text: ""+ r.data.data,
url: url,
name: url.split(/.*[\/|\\]/)[1]
};
this.addItem(tab);
})
.catch(error=> {
console.log(error);
alert("Get query error:\n"+url)
});
},
save(){
Settings.setItem('edit/items',this.items);
},
restore(){
var that=this
this.restored=Settings.getItem('edit/items')
.then(function (v){
//console.log("items ",v)
v.forEach(v =>that.addItem(v))
})
.catch(error=> {
console.log(error);
alert("load error")
});
},
sorted(q){ /* return sorted and filtered array of tab indices */
var len=this.items.length
var indices = new Array(len);
for (var i = 0; i < len; ++i) indices[i] = i;
var list=this.items;
indices=indices.filter(a=>(!q) || list[a].name.toLowerCase().includes(q.toLowerCase()))
indices.sort((a,b) =>list[a].name.localeCompare(list[b].name))
return indices
}
},
created(){
console.log("EditTabs created")
}
};
|
package test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/pip-services/pip-services-runtime-go"
)
func TestMicroserviceError(t *testing.T) {
error := runtime.NewError("Test error").ForComponent("TestComponent").WithCode("TestError")
assert.Equal(t, "TestComponent", error.Component)
assert.Equal(t, "TestError", error.Code)
assert.Equal(t, "Test error", error.Message)
error = runtime.NewError("").ForComponent("TestComponent")
assert.Equal(t, "InternalError", error.Code)
assert.Equal(t, "Internal error", error.Message)
} |
console.log(Math.round(1.6)); // 2
console.log(Math.round(1.4)); // 1
function sum(first, second){ // parameter
return first+second;
}
console.log(sum(2,4)); // argument |
"""
Generate a code to convert binary to decimal
"""
def bin_to_dec(binary_number):
decimal_number = 0
for digit in binary_number:
decimal_number = (decimal_number * 2) + int(digit)
return decimal_number
if __name__ == '__main__':
binary_number = "101"
print(bin_to_dec(binary_number)) |
<reponame>jamiemd/RecipeApp_Vue.js
const Recipe = require("./RecipeModel");
// reference: https://medium.com/@yugagrawal95/mongoose-mongodb-functions-for-crud-application-1f54d74f1b34
const STATUS_USER_ERROR = 422;
const STATUS_SERVER_ERROR = 500;
const STATUS_OKAY = 200;
const STATUS_NOT_FOUND = 404;
module.exports = app => {
app.get("/api/recipes", (req, res) => {
// console.log("req.body in getrecipes", req.body);
Recipe.find({})
.then(result => {
res.status(200).json(result);
})
.catch(() => {
res
.status(500)
.json({ message: "The information could not be retrieved" });
});
});
app.post("/api/add-recipe", function(req, res) {
console.log("req.body in addrecipes", req.body);
const newRecipe = new Recipe(req.body);
newRecipe.save(function(err, result) {
if (err) {
res.status(STATUS_USER_ERROR).json({ error: "Error while adding" });
} else {
res.status(STATUS_OKAY).json({ recipe: result });
}
});
});
app.put("/api/update-recipe", function(req, res) {
console.log("req.body in update", req.body);
Recipe.findOneAndUpdate(
{ _id: req.body._id },
{
$set: {
name: req.body.name,
ingredients: req.body.ingredients,
instructions: req.body.instructions
}
},
{ new: true }
)
.then(result => {
res.status(STATUS_OKAY).json({ message: "Recipe Deleted" });
})
.catch(error => {
res.status(STATUS_USER_ERROR).json({ message: "Updated Failed" });
});
});
app.delete("/api/delete-recipe", function(req, res) {
console.log("req.body in delete", req.body);
Recipe.findOneAndRemove(req.body._id)
.then(result => {
res.status(STATUS_OKAY).json({ message: "Recipe Deleted" });
})
.catch(error => {
res.status(STATUS_USER_ERROR).json({ message: "Delete Failed" });
});
});
};
|
#!/usr/bin/env bash
#
# MetaCall Configuration Environment Bash Script by Parra Studios
# Configure and install MetaCall environment script utility.
#
# Copyright (C) 2016 - 2019 Vicente Eduardo Ferrer Garcia <vic798@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
ROOT_DIR=$(pwd)
RUN_AS_ROOT=0
SUDO_CMD=sudo
INSTALL_APT=1
INSTALL_PYTHON=0
INSTALL_RUBY=0
INSTALL_NETCORE=0
INSTALL_V8=0
INSTALL_NODEJS=0
INSTALL_FILE=0
INSTALL_PORTS=0
INSTALL_CLEAN=0
SHOW_HELP=0
PROGNAME=$(basename $0)
# Install and mark packages to avoid autoremove
sub_apt_install_hold(){
$SUDO_CMD apt-get -y install --no-install-recommends $@
$SUDO_CMD apt-mark hold $@
}
# Base packages
sub_apt(){
echo "configure apt"
cd $ROOT_DIR
$SUDO_CMD apt-get update && apt-get -y install --no-install-recommends wget
}
# Python
sub_python(){
echo "configure python"
cd $ROOT_DIR
sub_apt_install_hold python3 libpython3.5
}
# Ruby
sub_ruby(){
echo "configure ruby"
cd $ROOT_DIR
sub_apt_install_hold ruby2.3 libruby2.3 \
libgdbm3 libncurses5 libssl1.0.2 libyaml-0-2 rake ruby ruby-did-you-mean \
ruby-minitest ruby-net-telnet ruby-power-assert ruby-test-unit \
rubygems-integration ca-certificates
}
# NetCore
sub_netcore(){
echo "configure netcore"
cd $ROOT_DIR
sub_apt_install_hold libc6 libcurl3 libgcc1 libgssapi-krb5-2 libicu57 \
liblttng-ust0 libssl1.0.2 libstdc++6 libunwind8 libuuid1 zlib1g ca-certificates
# Install .NET Core
DOTNET_VERSION=1.1.10
DOTNET_DOWNLOAD_URL=https://dotnetcli.blob.core.windows.net/dotnet/Runtime/$DOTNET_VERSION/dotnet-debian.9-x64.$DOTNET_VERSION.tar.gz
wget $DOTNET_DOWNLOAD_URL -O dotnet.tar.gz
mkdir -p /usr/share/dotnet
tar -zxf dotnet.tar.gz -C /usr/share/dotnet
rm dotnet.tar.gz
ln -s /usr/share/dotnet/dotnet /usr/bin/dotnet
}
# V8
sub_v8(){
echo "configure v8"
# TODO
}
# NodeJS
sub_nodejs(){
echo "configure node"
# Nothing needed, node_modules are local to the path,
# runtime is located in /usr/local/lib, and node builtins
# are already compiled in the runtime
}
# File
sub_file(){
echo "configure file"
# Nothing needed
}
# Ports
sub_ports(){
echo "configure ports"
# Nothing needed, there are no dependencies for ports by now
}
# Install
sub_install(){
if [ $RUN_AS_ROOT = 1 ]; then
SUDO_CMD=""
fi
if [ $INSTALL_APT = 1 ]; then
sub_apt
fi
if [ $INSTALL_PYTHON = 1 ]; then
sub_python
fi
if [ $INSTALL_RUBY = 1 ]; then
sub_ruby
fi
if [ $INSTALL_NETCORE = 1 ]; then
sub_netcore
fi
if [ $INSTALL_V8 = 1 ]; then
sub_v8
fi
if [ $INSTALL_NODEJS = 1 ]; then
sub_nodejs
fi
if [ $INSTALL_FILE = 1 ]; then
sub_file
fi
if [ $INSTALL_PORTS = 1 ]; then
sub_ports
fi
if [ $INSTALL_CLEAN = 1 ]; then
sub_clean
fi
echo "install finished in workspace $ROOT_DIR"
}
# Clean dependencies
sub_clean(){
echo "clean dependencies"
$SUDO_CMD apt-get -y remove wget
$SUDO_CMD apt-get -y autoclean
$SUDO_CMD apt-get -y autoremove
}
# Configuration
sub_options(){
for var in "$@"
do
if [ "$var" = 'root' ]; then
echo "running as root"
RUN_AS_ROOT=1
fi
if [ "$var" = 'base' ]; then
echo "apt selected"
INSTALL_APT=1
fi
if [ "$var" = 'python' ]; then
echo "python selected"
INSTALL_PYTHON=1
fi
if [ "$var" = 'ruby' ]; then
echo "ruby selected"
INSTALL_RUBY=1
fi
if [ "$var" = 'netcore' ]; then
echo "netcore selected"
INSTALL_NETCORE=1
fi
if [ "$var" = 'v8' ]; then
echo "v8 selected"
INSTALL_V8=1
fi
if [ "$var" = 'nodejs' ]; then
echo "nodejs selected"
INSTALL_NODEJS=1
fi
if [ "$var" = 'file' ]; then
echo "file selected"
INSTALL_FILE=1
fi
if [ "$var" = 'ports' ]; then
echo "ports selected"
INSTALL_PORTS=1
fi
if [ "$var" = 'clean' ]; then
echo "clean selected"
INSTALL_CLEAN=1
fi
done
}
# Help
sub_help() {
echo "Usage: `basename "$0"` list of component"
echo "Components:"
echo " root"
echo " base"
echo " python"
echo " ruby"
echo " netcore"
echo " v8"
echo " nodejs"
echo " file"
echo " ports"
echo " clean"
echo ""
}
case "$#" in
0)
sub_help
;;
*)
sub_options $@
sub_install
;;
esac
|
#!/bin/bash
# Bash script that checks if the given RPM version is newer than the EVR
# Exits 0 if RPM is newer than the EVR, 1 otherwise
USAGE="Usage: pkg-cmp-gt.sh <RPM> <EVR>"
RPM=$1
EVR=$2
if [[ $# -ne 2 ]]; then
echo "Expected 2 args, got $#" >&2
echo "$USAGE" >&2
exit 2
fi
RPM_EVR=$(rpm -q --queryformat '%{EPOCH}:%{VERSION}-%{RELEASE}\n' "$RPM") # get EVR of the RPM
RPM_EVR=${RPM_EVR/(none)/0} # no epoch will print (none) for the epoch field so we change it to 0
rpmdev-vercmp "$RPM_EVR" "$EVR" # check if RPM is newer than the EVR
if [[ $? == 11 ]]; then
exit 0
else
exit 1
fi
|
<reponame>NiteshOswal/play-scraper
import re
from codecs import open
from setuptools import setup
with open("play_scraper/__init__.py", "r") as f:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE
).group(1)
with open("README.md", "r", "utf-8") as f:
readme = f.read()
setup(
name="play_scraper",
version=version,
description="Google Play Store application scraper",
long_description=readme,
long_description_content_type="text/markdown",
url="https://github.com/NiteshOswal/play-scraper",
author="<NAME>",
author_email="<EMAIL>",
packages=["play_scraper"],
license="MIT License",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"beautifulsoup4>=4.9.3",
"lxml>=4.6.2",
"requests-futures>=1.0.0",
"requests[security]>=2.25.1",
],
)
|
// App.js
import React from 'react';
import { Provider } from 'react-redux';
import { store } from './store';
import MoviesList from './components/MoviesList';
import MovieDetails from './components/MovieDetails';
import PostReview from './components/PostReview';
function App() {
return (
<Provider store={ store }>
<div >
<MoviesList />
<MovieDetails />
<PostReview />
</div>
</Provider>
);
}
export default App; |
<filename>app/app-desktop.go
// Copyright 2016 The G3N Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !wasm
package app
import (
"fmt"
"github.com/g3n/engine/audio/al"
"github.com/g3n/engine/audio/vorbis"
"github.com/g3n/engine/core"
"github.com/g3n/engine/gui"
"github.com/g3n/engine/renderer"
"github.com/g3n/engine/window"
"time"
)
// Desktop application defaults
const (
title = "G3N Application"
width = 800
height = 600
)
// Application
type Application struct {
window.IWindow // Embedded GlfwWindow
keyState *window.KeyState // Keep track of keyboard state
mouseState *window.MouseState // Keep track of mouse state
renderer *renderer.Renderer // Renderer object
audioDev *al.Device // Default audio device
startTime time.Time // Application start time
frameStart time.Time // Frame start time
frameDelta time.Duration // Duration of last frame
}
// App returns the Application singleton, creating it the first time.
func App() *Application {
// Return singleton if already created
if a != nil {
return a
}
a = new(Application)
// Initialize window
err := window.Init(width, height, title)
if err != nil {
panic(err)
}
a.IWindow = window.Get()
a.openDefaultAudioDevice() // Set up audio
a.keyState = window.NewKeyState(a) // Create KeyState
a.mouseState = window.NewMouseState(a) // Create MouseState
// Create renderer and add default shaders
a.renderer = renderer.NewRenderer(a.Gls())
err = a.renderer.AddDefaultShaders()
if err != nil {
panic(fmt.Errorf("AddDefaultShaders:%v", err))
}
return a
}
// Run starts the update loop.
// It calls the user-provided update function every frame.
func (a *Application) Run(update func(rend *renderer.Renderer, deltaTime time.Duration)) {
// Initialize start and frame time
a.startTime = time.Now()
a.frameStart = time.Now()
// Set up recurring calls to user's update function
for true {
// If Exit() was called or there was an attempt to close the window dispatch OnExit event for subscribers.
// If no subscriber cancelled the event, terminate the application.
if a.IWindow.(*window.GlfwWindow).ShouldClose() {
a.Dispatch(OnExit, nil)
// TODO allow for cancelling exit e.g. showing dialog asking the user if he/she wants to save changes
// if exit was cancelled {
// a.IWindow.(*window.GlfwWindow).SetShouldClose(false)
// } else {
break
// }
}
// Update frame start and frame delta
now := time.Now()
a.frameDelta = now.Sub(a.frameStart)
a.frameStart = now
// Dispatch before render event
a.Dispatch(gui.OnBeforeRender, nil)
//dispatchRecursive(gui.OnBeforeRender, nil, a.scene.Children())
//dispatchRecursive(gui.OnBeforeRender, nil, a.guiroot.Children())
// Call user's update function
update(a.renderer, a.frameDelta)
// Dispatch after render event
a.Dispatch(gui.OnAfterRender, nil)
//dispatchRecursive(gui.OnAfterRender, nil, a.scene.Children())
//dispatchRecursive(gui.OnAfterRender, nil, a.guiroot.Children())
// Swap buffers and poll events
a.IWindow.(*window.GlfwWindow).SwapBuffers()
a.IWindow.(*window.GlfwWindow).PollEvents()
}
// Close default audio device
if a.audioDev != nil {
al.CloseDevice(a.audioDev)
}
// Destroy window
a.Destroy()
}
func dispatchRecursive(evname string, ev interface{}, nodes []core.INode) bool {
for _, node := range nodes {
if node != nil {
if node.Dispatch(evname, ev) != 0 || dispatchRecursive(evname, ev, node.Children()) {
return true
}
}
}
return false
}
// Exit requests to terminate the application
// Application will dispatch OnQuit events to registered subscribers which
// can cancel the process by calling CancelDispatch().
func (a *Application) Exit() {
a.IWindow.(*window.GlfwWindow).SetShouldClose(true)
}
// Renderer returns the application's renderer.
func (a *Application) Renderer() *renderer.Renderer {
return a.renderer
}
// KeyState returns the application's KeyState.
func (a *Application) KeyState() *window.KeyState {
return a.keyState
}
// MouseState returns the application's MouseState.
func (a *Application) MouseState() *window.MouseState {
return a.mouseState
}
// RunTime returns the elapsed duration since the call to Run().
func (a *Application) RunTime() time.Duration {
return time.Now().Sub(a.startTime)
}
// openDefaultAudioDevice opens the default audio device setting it to the current context
func (a *Application) openDefaultAudioDevice() error {
// Opens default audio device
var err error
a.audioDev, err = al.OpenDevice("")
if err != nil {
return fmt.Errorf("opening OpenAL default device: %s", err)
}
// Check for OpenAL effects extension support
var attribs []int
if al.IsExtensionPresent("ALC_EXT_EFX") {
attribs = []int{al.MAX_AUXILIARY_SENDS, 4}
}
// Create audio context
acx, err := al.CreateContext(a.audioDev, attribs)
if err != nil {
return fmt.Errorf("creating OpenAL context: %s", err)
}
// Makes the context the current one
err = al.MakeContextCurrent(acx)
if err != nil {
return fmt.Errorf("setting OpenAL context current: %s", err)
}
// Logs audio library versions
log.Info("%s version: %s", al.GetString(al.Vendor), al.GetString(al.Version))
log.Info("%s", vorbis.VersionString())
return nil
}
|
int Client::handle_output(ACE_HANDLE)
{
ACE_Message_Block *mb;
ACE_Time_Value nowait(ACE_OS::gettimeofday());
while (-1 != this->getq(mb, &nowait))
{
ssize_t send_cnt = this->peer().send(mb->rd_ptr(), mb->length());
if (send_cnt == -1)
{
ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) %p\n"),
"Error occurred while sending message to the server"));
// Handle the error, e.g., logging, retry mechanism, or notifying the application
}
// Release the message block after sending
mb->release();
}
// Return 0 to indicate successful handling of output
return 0;
} |
import flask
app = flask.Flask(__name__)
@app.route('/cities?latitude=<float:latitude>&longitude=<float:longitude>&radius=<float:radius>', methods=['GET'])
def cities(latitude, longitude, radius):
...
cursor.execute(SELECT * from city WHERE distance(latitude, longitude, %s, %s) < %s, (latitude, longitude, radius))
cities = cursor.fetchall()
return flask.jsonify(cities)
if __name__ == '__main__':
app.run(debug=True) |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio;
/**
* Interface representing an Alluxio process.
*/
public interface Process {
/**
* Starts the Alluxio process. This call blocks until the process is stopped via
* {@link #stop()}. The {@link #waitForReady()} method can be used to make sure that the
* process is ready to serve requests.
*/
void start() throws Exception;
/**
* Stops the Alluxio process, blocking until the action is completed.
*/
void stop() throws Exception;
/**
* Waits until the process is ready to serve requests.
*/
// TODO(jiri): Replace with isServing.
void waitForReady();
}
|
#/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
EXAMPLEDIR="$(dirname "$DIR")"
K8SDIR="$(dirname "$EXAMPLEDIR")"
bb=$(tput bold) || true
nn=$(tput sgr0) || true
green=$(tput setaf 2) || true
echo "${bb}Deleting tutorial resources...${nn}"
kubectl delete -k "${EXAMPLEDIR}"/k8s/. --ignore-not-found
echo "${bb}Deleting resources from X.509 Tutorial...${nn}"
bash "${K8SDIR}"/envoy-x509/scripts/clean-env.sh > /dev/null
echo "${green}Cleaning completed.${nn}"
|
<gh_stars>1-10
import { combineReducers } from 'redux';
import args from './args';
import recent_entries from './recent_entries';
import show_count from './show_count';
export default combineReducers({
args,
recent_entries,
show_count,
});
|
# coding: utf-8
import cv2
import os,sys
import time
import os.path
import math
sys.path.insert(0, '../facealign')
sys.path.insert(0, '../util')
from fileutil import *
from MtcnnPycaffe import MtcnnDetector, draw_and_show
from alignment import *
from logfile import *
import json
import argparse
def IoU(bbox1, bbox2):
intersect_bbox = [max(bbox1[0], bbox2[0]), max(bbox1[1], bbox2[1]),
min(bbox1[0]+bbox1[2], bbox2[0]+bbox2[2]), min(bbox1[1]+bbox1[3], bbox2[1]+bbox2[3])]
overlap = (intersect_bbox[2] - intersect_bbox[0]) * (intersect_bbox[3] - intersect_bbox[1])
overlap_rate = overlap / (bbox1[2]*bbox1[3] + bbox2[2]*bbox2[3] - overlap)
return overlap_rate
def load_bbox_file(path, dict):
lines = read_lines(path)
# skip first
for i in range(1,len(lines)):
line = lines[i]
segs = line.split('\t')
name = segs[0]
face_id = segs[2]
bbox = segs[4]
vals = bbox.split(',')
x0 = int(vals[0])
y0 = int(vals[1])
x1 = int(vals[2])
y1 = int(vals[3])
rect = [x0,y0,x1 - x0, y1 - y0]
# name_faceid
key = name + '_' + face_id
dict[key] = rect
return dict
class FacescrubAlignVisitor(object):
"""
Megaface alignment
"""
def __init__(self,
src_prefix,
dst_prefix,
detector,
bbox,
skip_exist = False,
transform = 'sililarity',
pading = 0):
self.src_prefix = src_prefix
self.dst_prefix = dst_prefix
self.skip_exist = skip_exist
self.detector = detector
self.bbox = bbox
self.transform = transform
self.pading = pading
# statistic
self.done_count = 0
self.fail_count = 0
def process(self, path):
if not is_image_file(path):
return True
dst_path = translate_path(self.src_prefix, self.dst_prefix, path)
if self.skip_exist and os.path.exists(dst_path):
# print('skip:%s' % path)
return True
#print('%s -> %s' % (path, dst_path))
img = cv2_imread(path)
if img is None:
print('load error:%s'%(path))
log_write(path)
self.fail_count += 1
return False
#print('run:%s/%s'%(subdir,filename))
try:
boxes, points = self.detector.detect_face(img)
except:
print('detect error:%s'%(path))
log_write(path)
self.fail_count += 1
return False
if points is None or len(points) == 0:
log_write(path)
self.fail_count += 1
return False
# find the one largest IoU
dir, fname = os.path.split(path)
key, _ = os.path.splitext(fname)
target_box = self.bbox[key]
max_idx = 0
max_iou = 0
for i, box in enumerate(boxes):
box = [box[0], box[1], box[2] - box[0], box[3] - box[1]]
iou = IoU(box, target_box)
if iou > max_iou:
max_iou = iou
max_idx = i
# check iou
if max_iou < 0.3:
#cv2.rectangle(img, (target_box[0],target_box[1]),
# (target_box[0] + target_box[2], target_box[1] + target_box[3]), (0,255,0), 2)
#draw_and_show(img, boxes, points )
#ch = cv2.waitKey(0)
ch = 0
if ch == 27:
log_write(path)
self.fail_count += 1
return False
max_chip = align_to_96x112(img, points[max_idx], self.pading, trans_type = self.transform)
#draw_and_show(img,boxes, points )
#cv2.imshow('chip', max_chip)
#cv2.waitKey(0)
makedirs(dst_path)
ret = cv2_imwrite(dst_path, max_chip)
if ret == False:
print('imwrite error:%s'%(path))
log_write(path)
self.fail_count += 1
return False
# report
if self.done_count % 100 == 0:
print('done:%05d, fail:%05d img:%s'%(self.done_count, self.fail_count, path))
self.done_count += 1
return True
def align_facescrub_uncropped(src_dir, dst_dir, templatelists_path, dict, gpu_id = 0):
# load json
with open(templatelists_path, 'r') as f:
data = json.load(f)
rel_list = data['path']
# to fullpath
path_list = [ os.path.join(src_dir,p) for p in rel_list ]
# init detector
detector = MtcnnDetector( minsize=36, gpu_id = gpu_id )
# align by detection
visitor = FacescrubAlignVisitor(src_dir,dst_dir,detector, dict)
detect_fail_list = templatelists_path + '.detect-fail.txt'
log_open(detect_fail_list)
total_size = len(path_list)
for i in range(total_size):
path = path_list[i]
#print('%d/%d %s' % (i,total_size,path))
visitor.process(path)
log_close()
def align_facescrub_fail(src_dir, dst_dir, templatelists_path, dict, gpu_id = 0):
# init detector
detector = MtcnnDetector( minsize=36, gpu_id = gpu_id )
# align by detection
visitor = FacescrubAlignVisitor(src_dir,dst_dir,detector, dict)
detect_fail_list = templatelists_path + '.detect-fail.txt'
log_open(templatelists_path + '.final-fail.txt')
list_walker(detect_fail_list,visitor)
log_close()
def align_facescrub_fail_json(src_dir, dst_dir, templatelists_path, dict, json_path):
# load json
with open(json_path, 'r') as f:
data = json.load(f)
print(data)
list = read_lines(templatelists_path + '.final-fail.txt')
for path in list:
dst_path = translate_path(src_dir, dst_dir, path)
dir, fname = os.path.split(path)
key, _ = os.path.splitext(fname)
print(key)
target_box = dict[key]
img = cv2_imread(path)
point = data[key]
xxyy = []
for i in range(5):
xxyy.append(point[i*2])
for i in range(5):
xxyy.append(point[i*2+1])
print(xxyy)
max_chip = align_to_96x112(img, xxyy)
makedirs(dst_path)
cv2_imwrite(dst_path, max_chip)
#draw_and_show(img, [target_box], [xxyy] )
#ch = cv2.waitKey(0)
def detect_facescrub_landmarks(src_dir, templatelists_path, bbox, detector):
# load json
with open(templatelists_path, 'r') as f:
data = json.load(f)
rel_list = data['path']
landmarks = {}
for rel_path in rel_list:
# to fullpath
path = os.path.join(src_dir, rel_path)
img = cv2_imread(path)
try:
boxes, points = detector.detect_face(img)
except:
print('detect error:%s'%(path))
if points is None or len(points) == 0:
continue
# find the one largest IoU
dir, fname = os.path.split(path)
key, _ = os.path.splitext(fname)
target_box = bbox[key]
max_idx = 0
max_iou = 0
for i, box in enumerate(boxes):
box = [box[0], box[1], box[2] - box[0], box[3] - box[1]]
iou = IoU(box, target_box)
if iou > max_iou:
max_iou = iou
max_idx = i
landmarks[key] = points[max_idx].tolist()
return landmarks
def correct_facescrub_json(src_dir, dst_dir, dict, json_path):
# load json
with open(json_path, 'r') as f:
data = json.load(f)
print(data)
for key, value in data.items():
name, image_id = key.split('_')
path = os.path.join(src_dir,name+'/'+key+'.jpg')
dst_path = translate_path(src_dir, dst_dir, path)
target_box = dict[key]
img = cv2_imread(path)
point = data[key]
xxyy = []
for i in range(5):
xxyy.append(point[i*2])
for i in range(5):
xxyy.append(point[i*2+1])
print(xxyy)
print(key)
max_chip = align_to_96x112(img, xxyy)
makedirs(dst_path)
#cv2_imwrite(dst_path, max_chip)
draw_and_show(img, [target_box], [xxyy] )
cv2.imshow('chip', max_chip)
ch = cv2.waitKey(0)
def merge_landmarks(labeled_json, detect_json, dst_json):
# load json
with open(labeled_json, 'r') as f:
data = json.load(f)
# load detect
with open(detect_json, 'r') as f:
landmarks = json.load(f)
# merge
for key, value in data.items():
point = value
xxyy = []
for i in range(5):
xxyy.append(point[i*2])
for i in range(5):
xxyy.append(point[i*2+1])
landmarks[key] = xxyy
# output
with open(dst_json, 'w') as f:
f.write(json.dumps(landmarks))
print(len(landmarks))
def align_facescrub_by_landmark(src_dir, dst_dir, templatelists_path, landmarks_path):
# path list
with open(templatelists_path, 'r') as f:
data = json.load(f)
rel_list = data['path']
# landmarks
with open(landmarks_path, 'r') as f:
landmarks = json.load(f)
for rel_path in rel_list:
# to fullpath
path = os.path.join(src_dir, rel_path)
img = cv2_imread(path)
dst_path = translate_path(src_dir, dst_dir, path)
dir, fname = os.path.split(path)
key, _ = os.path.splitext(fname)
points = landmarks[key]
max_chip = align_to_96x112(img, points)
makedirs(dst_path)
cv2_imwrite(dst_path, max_chip)
#cv2.imshow('face', max_chip)
#ch = cv2.waitKey(1)
'''
wrong label:Richard Madden_48806
'''
if __name__=='__main__':
if len(sys.argv) < 3:
print('facescrub_image_dir aligned_dir features_list_json_path')
exit()
#
src_dir = sys.argv[1]
dst_dir = sys.argv[2]
templatelists_path = sys.argv[3]
merged_json = './facescrub_80_landmark5.json'
align_facescrub_by_landmark(src_dir, dst_dir, templatelists_path, merged_json)
|
'''
The cars rented and returned at Jack's follow a Poisson distribution.
Moreover, all rentals and returns are independent of each other. Thus,
the total probability of given by p(s',a|s,a) for rentals and returns at
both locations is given by their product.
To avoid looping over extremly small probabilities we use lower and upper
bounds for the distributions.
@author: <NAME>
'''
from scipy.stats import poisson
class Poisson(object):
'''
Implements Poisson distribution for Jack's Car Rental.
Using a lower and upper bound to avoid unnecessary looping.
'''
CONFIDENCE_INTERVALL = 0.999
def __init__(self, lam):
'''
Setup lamba for the poisson distribution.
Calculate lower and upper bounds.
'''
self.lam = lam
# Calculate Lower and Upper Bounds for the given confidence interall
self.lower, self.upper = poisson.interval(self.CONFIDENCE_INTERVALL, self.lam)
self.lower = int(self.lower)
self.upper = int(self.upper)
# Caculate probabilities within the given bounds
self.probs = [poisson.pmf(k, self.lam) for k in range(self.lower, self.upper)]
|
#!/bin/sh
cd ui
yarn install
yarn serve
|
/**
* Constants for skeletos-web-router package that other packages or your application can use.
*/
export class SkeletosWebRouterConstants {
/**
* The name of the variable in the global window scope that contains the serialized (dehydrated) Skeletos state
* when a page is first loaded. Deserializing this (hydrating your Skeletos state) allows you to build isomorphic
* applications.
*
* @type {string}
*/
public static SKELETOS_DEHYDRATED_STATE_GLOBAL_ID: string = "___SKELETOS_STATE";
} |
#!/bin/bash
if [ -f .paket/paket.exe ]; then
mono .paket/paket.bootstrapper.exe
fi
if [ -f boot.fsx ]; then
fsharpi boot.fsx
rm boot.fsx
mono .paket/paket.exe install
fi
if [ ! -f paket.lock ]; then
mono .paket/paket.exe install
fi
mono packages/build/FAKE/tools/FAKE.exe "build.fsx" Dummy --fsiargs build.fsx $@
|
#!/bin/sh
set -e
UNSIGNED=$1
SIGNATURE=$2
ARCH=x86_64
ROOTDIR=dist
BUNDLE=${ROOTDIR}/VENcoin-Qt.app
TEMPDIR=signed.temp
OUTDIR=signed-app
if [ -z "$UNSIGNED" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
if [ -z "$SIGNATURE" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
rm -rf ${TEMPDIR} && mkdir -p ${TEMPDIR}
tar -C ${TEMPDIR} -xf ${UNSIGNED}
tar -C ${TEMPDIR} -xf ${SIGNATURE}
if [ -z "${PAGESTUFF}" ]; then
PAGESTUFF=${TEMPDIR}/pagestuff
fi
if [ -z "${CODESIGN_ALLOCATE}" ]; then
CODESIGN_ALLOCATE=${TEMPDIR}/codesign_allocate
fi
for i in `find ${TEMPDIR} -name "*.sign"`; do
SIZE=`stat -c %s ${i}`
TARGET_FILE=`echo ${i} | sed 's/\.sign$//'`
echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}"
${CODESIGN_ALLOCATE} -i ${TARGET_FILE} -a ${ARCH} ${SIZE} -o ${i}.tmp
OFFSET=`${PAGESTUFF} ${i}.tmp -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
if [ -z ${QUIET} ]; then
echo "Attaching signature at offset ${OFFSET}"
fi
dd if=$i of=${i}.tmp bs=1 seek=${OFFSET} count=${SIZE} 2>/dev/null
mv ${i}.tmp ${TARGET_FILE}
rm ${i}
echo "Success."
done
mv ${TEMPDIR}/${ROOTDIR} ${OUTDIR}
rm -rf ${TEMPDIR}
echo "Signed: ${OUTDIR}"
|
#!/bin/bash
export HOME=/root/
source $HOME/.bashrc
source $HOME/conda/bin/activate
conda activate tali
cd $CODE_DIR
git pull
pip install -r $CODE_DIR/requirements.txt
source $CODE_DIR/setup_scripts/setup_base_experiment_disk.sh
source $CODE_DIR/setup_scripts/setup_wandb_credentials.sh
cd $CODE_DIR
fuser -k /dev/nvidia*; \
python $CODE_DIR/run.py \
hydra.verbose=True \
trainer=default \
resume=True \
batch_size=2 \
trainer.gpus=1 \
trainer.auto_scale_batch_size=True \
datamodule.dataset_config.rescan_paths=True \
datamodule.prefetch_factor=3 \
datamodule.num_workers=12 \
model=deci_modus_prime_vi-transformer16 \
datamodule.dataset_config.dataset_size_identifier=base \
datamodule.dataset_config.modality_config.image=True \
datamodule.dataset_config.modality_config.text=True \
datamodule.dataset_config.modality_config.audio=False \
datamodule.dataset_config.modality_config.video=False
|
<reponame>fehwalker/omf
package org.om.core.impl.mapping.extractor;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertThat;
import java.util.List;
import org.junit.Test;
import org.om.core.api.annotation.MapKeyStrategy;
import org.om.core.api.exception.MappingException;
import org.om.core.api.mapping.CollectionMapping;
import org.om.core.api.mapping.MappedField;
import org.om.core.api.mapping.extractor.FieldMappingExtractor;
import org.om.core.api.mapping.field.Mapping;
import org.om.core.impl.test.EntityImplementingInterface;
import org.om.core.impl.test.EntityWithCollections;
import org.om.core.impl.test.EntityWithPrimitiveProperties;
import org.om.core.impl.test.MappedFieldBuilder;
import org.om.core.impl.test.MyInterface;
public class FieldMappingExtractorCollectionTest {
private FieldMappingExtractor extractor = new FieldMappingExtractorImpl();
@Test
public void testStringCollection() throws Exception {
MappedField mappedField = extractor.extract(EntityWithCollections.class.getDeclaredField("collectionWithStrings"));
assertThat(mappedField, notNullValue());
CollectionMapping mapping = (CollectionMapping) mappedField.getMapping();
assertThat(mapping, notNullValue());
assertThat(mapping, instanceOf(CollectionMapping.class));
assertEquals(String.class, mapping.getDeclaredType());
assertEquals(String.class, mapping.getImplementationType());
assertEquals(List.class, mapping.getCollectionType());
}
@Test
public void testIntegerCollection() throws Exception {
MappedField mappedField = extractor.extract(EntityWithCollections.class.getDeclaredField("collectionWithIntegers"));
assertThat(mappedField, notNullValue());
CollectionMapping mapping = (CollectionMapping) mappedField.getMapping();
assertThat(mapping, notNullValue());
assertThat(mapping, instanceOf(CollectionMapping.class));
assertEquals(Integer.class, mapping.getImplementationType());
}
@Test
public void testReferenceCollection() throws Exception {
MappedField mappedField = extractor.extract(EntityWithCollections.class.getDeclaredField("collectionWithReferenceTypes"));
assertThat(mappedField, notNullValue());
CollectionMapping mapping = (CollectionMapping) mappedField.getMapping();
assertThat(mapping, notNullValue());
assertThat(mapping, instanceOf(CollectionMapping.class));
assertEquals(EntityWithPrimitiveProperties.class, mapping.getImplementationType());
assertThat(mapping.getLocation(), is("collectionWithReferenceTypes"));
}
@Test
public void testMap() throws Exception {
MappedField mappedField = extractor.extract(EntityWithCollections.class.getDeclaredField("map"));
assertThat(mappedField, notNullValue());
CollectionMapping mapping = (CollectionMapping) mappedField.getMapping();
assertThat(mapping, notNullValue());
assertEquals(EntityWithPrimitiveProperties.class, mapping.getImplementationType());
assertThat(mapping.getLocation(), is("map"));
assertThat(mapping.getMapKeyStrategy(), is(MapKeyStrategy.Name));
}
@Test
public void testExtractMappingForCollectionWithDefaultImplType() throws Exception {
MappedField field = extractor.extract(EntityWithCollections.class.getDeclaredField("collectionWithStrings"));
Mapping mapping = field.getMapping();
assertEquals(mapping.getDeclaredType(), mapping.getImplementationType());
}
@Test
public void testExtractMappingForCollectionWithDifferingTargetAndImplType() throws Exception {
MappedField field = extractor.extract(EntityWithCollections.class.getDeclaredField("collectionWithDifferentTargetAndImplType"));
Mapping mapping = field.getMapping();
assertNotSame(mapping.getDeclaredType(), mapping.getImplementationType());
assertEquals(MyInterface.class, mapping.getDeclaredType());
assertEquals(EntityImplementingInterface.class, mapping.getImplementationType());
}
@Test(expected = MappingException.class)
public void testExtractingMappingWithIncompatibleTargetAndImplementationTypes() {
new MappedFieldBuilder().withName("foobar").withType(List.class).withCollectionMapping(List.class, String.class, EntityWithCollections.class, "foobar")
.create();
}
}
|
#!/bin/bash
#
# Install Homebrew and applications
# Check out https://brew.sh for more details
# Comment (with #) what should not be installed and add the applications you want to install.
source ./scripts/utils.sh
echo_info "Installing apps..."
# Install Rosetta for Apple Silicon hardware
if [[ `uname -p` == 'arm' ]]; then
softwareupdate --install-rosetta --agree-to-license
fi
# Install Homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
brew update
brew upgrade
# Install Homebrew taps
TAPS=(
homebrew/cask-drivers
homebrew/cask-fonts
homebrew/cask-versions
homebrew/command-not-found
)
for tap in ${TAPS[@]}
do
brew tap $tap
done
# Install Homebrew formulas
FORMULAS=(
curl
git
vim
node
postgresql
python
ruby
tree
unar
yarn
zsh
)
for formula in ${FORMULAS[@]}
do
brew install $formula
done
# Install Homebrew casks
CASKS=(
docker
font-jetbrains-mono
google-chrome
iina
openinterminal
pdf-expert
tableplus
visual-studio-code
notion
zoom
firefox
fig
coconutbattery
figma
dbeaver-community
spark
runjs
slack
reactotron
rectangle
iterm2
spotify
)
for app in ${CASKS[@]}
do
brew install --cask $app
done
# Install Homebrew Cask Upgrade
# Check out https://github.com/buo/homebrew-cask-upgrade for more details
brew tap buo/cask-upgrade
brew update
brew cu
# Finish
echo_success "Finished applications installation."
|
package es.redmic.api.common.controller;
/*-
* #%L
* API
* %%
* Copyright (C) 2019 REDMIC Project / Server
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.HandlerMapping;
import es.redmic.es.common.service.SelectionService;
import es.redmic.exception.databinding.DTONotValidException;
import es.redmic.models.es.common.dto.ElasticSearchDTO;
import es.redmic.models.es.common.dto.JSONCollectionDTO;
import es.redmic.models.es.common.dto.SelectionDTO;
import es.redmic.models.es.common.dto.SuperDTO;
import es.redmic.models.es.common.model.Selection;
import es.redmic.models.es.common.query.dto.DataQueryDTO;
import es.redmic.models.es.common.query.dto.SimpleQueryDTO;
@RestController
@RequestMapping(value = "**${controller.mapping.SELECTIONS}")
public class SelectionController extends RBaseController<Selection, SelectionDTO, SimpleQueryDTO> implements ISettingsController {
// TODO: Controlar las rutas que entran para que no puedan entrar todas si no queremos
private SelectionService service;
@Value("${controller.mapping.SELECTIONS}")
String selectionBaseURI;
@Autowired
public SelectionController(SelectionService service) {
super(service);
this.service = service;
}
@PostMapping(value = "/")
@ResponseBody
public SuperDTO saveSettings(@Valid @RequestBody SelectionDTO dto, BindingResult bindingResult, HttpServletRequest request) {
if (bindingResult.hasErrors())
throw new DTONotValidException(bindingResult);
String basePath = (String) request.getAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE);
dto.setService(basePath.replace(selectionBaseURI + "/", ""));
return new ElasticSearchDTO( service.save(dto), 1);
}
@PutMapping(value = "/{id}")
@ResponseBody
public SuperDTO updateSettings(@PathVariable("id") String id, @Valid @RequestBody SelectionDTO dto, BindingResult bindingResult, HttpServletRequest request) {
if (bindingResult.hasErrors())
throw new DTONotValidException(bindingResult);
String basePath = (String) request.getAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE);
dto.setService(basePath.replace(selectionBaseURI + "/" + id, ""));
return new ElasticSearchDTO(service.save(dto), 1);
}
@PostMapping(value = "/_search")
@ResponseBody
public SuperDTO findAllSettings(@Valid @RequestBody DataQueryDTO queryDTO, BindingResult bindingResult, HttpServletRequest request) {
processQuery(queryDTO, bindingResult);
String basePath = (String) request.getAttribute(HandlerMapping.PATH_WITHIN_HANDLER_MAPPING_ATTRIBUTE);
JSONCollectionDTO result = service.findAll(queryDTO, basePath.replace(selectionBaseURI + "/_search", ""));
return new ElasticSearchDTO(result, result.getTotal());
}
@GetMapping(value = "/{id}")
@ResponseBody
public SuperDTO getSettings(@PathVariable("id") String id) {
SelectionDTO response = service.get(id.toString());
return new ElasticSearchDTO(response, response == null ? 0 : 1);
}
@GetMapping(value = "/_suggest")
@ResponseBody
public SuperDTO suggestSettings(@RequestParam("fields") String[] fields, @RequestParam("text") String text,
@RequestParam(required = false, value = "size") Integer size) {
SimpleQueryDTO queryDTO = service.createSimpleQueryDTOFromSuggestQueryParams(fields, text, size);
processQuery(queryDTO);
List<String> response = service.suggest(convertToDataQuery(queryDTO));
return new ElasticSearchDTO(response, response.size());
}
@DeleteMapping(value = "/{id}")
@ResponseBody
public SuperDTO deleteSettings(@PathVariable("id") String id) {
service.delete(id);
return new SuperDTO(true);
}
}
|
export declare const registerSubsocialTypes: () => void;
export default registerSubsocialTypes;
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/XYEmptyDataView/XYEmptyDataView.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/XYEmptyDataView/XYEmptyDataView.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
# https://github.com/wg/wrk
wrk -d 60 -c 256 -t 40 --latency -s script.lua http://localhost:8080/ &> "testResult$(date +%s).txt"
# -duration : 60 second -c 256 concurent connections -t 40 threads |
<reponame>tholenst/tink<filename>python/cc/cc_streaming_aead_wrappers.h
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///////////////////////////////////////////////////////////////////////////////
#ifndef TINK_PYTHON_CC_CC_STREAMING_AEAD_WRAPPERS_H_
#define TINK_PYTHON_CC_CC_STREAMING_AEAD_WRAPPERS_H_
#include <memory>
#include "absl/strings/string_view.h"
#include "tink/streaming_aead.h"
#include "tink/util/statusor.h"
#include "tink/python/cc/input_stream_adapter.h"
#include "tink/python/cc/output_stream_adapter.h"
#include "tink/python/cc/python_file_object_adapter.h"
#include "tink/python/cc/python_input_stream.h"
#include "tink/python/cc/python_output_stream.h"
namespace crypto {
namespace tink {
// Wrapper function for StreamingAead.NewEncryptingStream
//
// It uses 'streaming_aead' to create an EncryptingStream that writes the
// ciphertext to 'ciphertext_destination' through a PythonOutputStream, and
// returns an OutputStreamAdapter that wraps this EncryptingStream.
// Taking a raw pointer signals to CLIF that the object is borrowed - ownership
// is not taken, and the value is not copied.
util::StatusOr<std::unique_ptr<OutputStreamAdapter>> NewCcEncryptingStream(
StreamingAead* streaming_aead, const absl::string_view aad,
std::unique_ptr<PythonFileObjectAdapter> ciphertext_destination);
// Wrapper function for StreamingAead.NewDecryptingStream
//
// It uses 'streaming_aead' to create a DecryptingStream that reads the
// ciphertext from 'ciphertext_source' through a PythonInputStream, and
// returns an InputStreamAdapter that wraps this DecryptingStream.
// Taking a raw pointer signals to CLIF that the object is borrowed - ownership
// is not taken, and the value is not copied.
util::StatusOr<std::unique_ptr<InputStreamAdapter>> NewCcDecryptingStream(
StreamingAead* streaming_aead, const absl::string_view aad,
std::unique_ptr<PythonFileObjectAdapter> ciphertext_source);
} // namespace tink
} // namespace crypto
#endif // TINK_PYTHON_CC_CC_STREAMING_AEAD_WRAPPERS_H_
|
<gh_stars>0
# Copyright 2014 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Manages and provides selected standard pages such as "page not found" and HTTP redirect operations.
"""
from mportal_tools import http_response, mportal_log
import template_mgr, mportal_urls
def notfound_page():
""" Returns a 'page not found'
return HTTPResponse object.
"""
response = http_response.HTTPResponse()
response.html = template_mgr.render_template('not_found', {'css_url':mportal_urls.get_url('css')})
return response
def redirect_console():
""" Returns a redirection via HTTP headers to redirect user to a user console.
return HTTPResponse object.
"""
"""The HTML output"""
response = http_response.HTTPResponse()
response.status = '303 See Other'
response.headers.append(('Location', mportal_urls.get_url('console')))
response.html = template_mgr.render_template('redirect', {})
return response
def redirect_login():
""" Returns a redirection via HTTP headers to redirect user to the login page.
return HTTPResponse object.
"""
response = http_response.HTTPResponse()
response.status = '303 See Other'
response.headers.append(('Location', mportal_urls.get_url('home')))
response.html = template_mgr.render_template('redirect', {})
return response
|
#include <stdio.h>
int main()
{
// Define the array
int numbers[] = {2, 4, 6, 8};
// Calculate the array size
int size = sizeof(numbers) / sizeof(numbers[0]);
// Calculate sum of all array elements
int sum = 0;
for (int i = 0; i < size; i++)
sum += numbers[i];
// Calculate average value
float avg = (float)sum / size;
printf("Average of given numbers: %.2f", avg);
return 0;
} |
""" Tests that check if JIT-compiled numpy operations produce reasonably
good assembler
"""
import py
from pypy.jit.metainterp import pyjitpl
from pypy.jit.metainterp.test.support import LLJitMixin
from pypy.jit.metainterp.warmspot import reset_stats
from pypy.module.micronumpy import interp_boxes
from pypy.module.micronumpy.compile import (FakeSpace,
IntObject, Parser, InterpreterState)
from pypy.module.micronumpy.interp_numarray import (W_NDimArray,
BaseArray, W_FlatIterator)
from pypy.rlib.nonconst import NonConstant
class TestNumpyJIt(LLJitMixin):
graph = None
interp = None
def setup_class(cls):
default = """
a = [1,2,3,4]
c = a + b
sum(c) -> fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b
a -> 3:1:2
"""
d = {}
p = Parser()
allcodes = [p.parse(default)]
for name, meth in cls.__dict__.iteritems():
if name.startswith("define_"):
code = meth()
d[name[len("define_"):]] = len(allcodes)
allcodes.append(p.parse(code))
cls.code_mapping = d
cls.codes = allcodes
def run(self, name):
space = FakeSpace()
i = self.code_mapping[name]
codes = self.codes
def f(i):
interp = InterpreterState(codes[i])
interp.run(space)
if not len(interp.results):
raise Exception("need results")
w_res = interp.results[-1]
if isinstance(w_res, BaseArray):
concr = w_res.get_concrete_or_scalar()
sig = concr.find_sig()
frame = sig.create_frame(concr)
w_res = sig.eval(frame, concr)
if isinstance(w_res, interp_boxes.W_Float64Box):
return w_res.value
if isinstance(w_res, interp_boxes.W_Int64Box):
return float(w_res.value)
elif isinstance(w_res, interp_boxes.W_BoolBox):
return float(w_res.value)
raise TypeError(w_res)
if self.graph is None:
interp, graph = self.meta_interp(f, [i],
listops=True,
backendopt=True,
graph_and_interp_only=True)
self.__class__.interp = interp
self.__class__.graph = graph
reset_stats()
pyjitpl._warmrunnerdesc.memory_manager.alive_loops.clear()
return self.interp.eval_graph(self.graph, [i])
def define_add():
return """
a = |30|
b = a + a
b -> 3
"""
def test_add(self):
result = self.run("add")
self.check_simple_loop({'getinteriorfield_raw': 2, 'float_add': 1,
'setinteriorfield_raw': 1, 'int_add': 1,
'int_ge': 1, 'guard_false': 1, 'jump': 1,
'arraylen_gc': 1})
assert result == 3 + 3
def define_float_add():
return """
a = |30| + 3
a -> 3
"""
def test_floatadd(self):
result = self.run("float_add")
assert result == 3 + 3
self.check_simple_loop({"getinteriorfield_raw": 1, "float_add": 1,
"setinteriorfield_raw": 1, "int_add": 1,
"int_ge": 1, "guard_false": 1, "jump": 1,
'arraylen_gc': 1})
def define_sum():
return """
a = |30|
b = a + a
sum(b)
"""
def test_sum(self):
result = self.run("sum")
assert result == 2 * sum(range(30))
self.check_simple_loop({"getinteriorfield_raw": 2, "float_add": 2,
"int_add": 1, "int_ge": 1, "guard_false": 1,
"jump": 1, 'arraylen_gc': 1})
def define_axissum():
return """
a = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
b = sum(a,0)
b -> 1
"""
def test_axissum(self):
result = self.run("axissum")
assert result == 30
# XXX note - the bridge here is fairly crucial and yet it's pretty
# bogus. We need to improve the situation somehow.
self.check_simple_loop({'getinteriorfield_raw': 2,
'setinteriorfield_raw': 1,
'arraylen_gc': 2,
'guard_true': 1,
'int_lt': 1,
'jump': 1,
'float_add': 1,
'int_add': 3,
})
def define_prod():
return """
a = |30|
b = a + a
prod(b)
"""
def test_prod(self):
result = self.run("prod")
expected = 1
for i in range(30):
expected *= i * 2
assert result == expected
self.check_simple_loop({"getinteriorfield_raw": 2, "float_add": 1,
"float_mul": 1, "int_add": 1,
"int_ge": 1, "guard_false": 1, "jump": 1,
'arraylen_gc': 1})
def define_max():
return """
a = |30|
a[13] = 128
b = a + a
max(b)
"""
def test_max(self):
result = self.run("max")
assert result == 256
py.test.skip("not there yet, getting though")
self.check_simple_loop({"getinteriorfield_raw": 2, "float_add": 1,
"float_mul": 1, "int_add": 1,
"int_lt": 1, "guard_true": 1, "jump": 1})
def test_min(self):
py.test.skip("broken, investigate")
result = self.run("""
a = |30|
a[15] = -12
b = a + a
min(b)
""")
assert result == -24
self.check_simple_loop({"getinteriorfield_raw": 2, "float_add": 1,
"float_mul": 1, "int_add": 1,
"int_lt": 1, "guard_true": 1, "jump": 1})
def define_any():
return """
a = [0,0,0,0,0,0,0,0,0,0,0]
a[8] = -12
b = a + a
any(b)
"""
def test_any(self):
result = self.run("any")
assert result == 1
self.check_simple_loop({"getinteriorfield_raw": 2, "float_add": 1,
"int_and": 1, "int_add": 1,
'cast_float_to_int': 1,
"int_ge": 1, "jump": 1,
"guard_false": 2, 'arraylen_gc': 1})
def define_already_forced():
return """
a = |30|
b = a + 4.5
b -> 5 # forces
c = b * 8
c -> 5
"""
def test_already_forced(self):
result = self.run("already_forced")
assert result == (5 + 4.5) * 8
# This is the sum of the ops for both loops, however if you remove the
# optimization then you end up with 2 float_adds, so we can still be
# sure it was optimized correctly.
py.test.skip("too fragile")
self.check_resops({'setinteriorfield_raw': 4, 'getfield_gc': 22,
'getarrayitem_gc': 4, 'getarrayitem_gc_pure': 2,
'getfield_gc_pure': 8,
'guard_class': 8, 'int_add': 8, 'float_mul': 2,
'jump': 2, 'int_ge': 4,
'getinteriorfield_raw': 4, 'float_add': 2,
'guard_false': 4, 'arraylen_gc': 2, 'same_as': 2})
def define_ufunc():
return """
a = |30|
b = a + a
c = unegative(b)
c -> 3
"""
def test_ufunc(self):
result = self.run("ufunc")
assert result == -6
self.check_simple_loop({"getinteriorfield_raw": 2, "float_add": 1,
"float_neg": 1,
"setinteriorfield_raw": 1, "int_add": 1,
"int_ge": 1, "guard_false": 1, "jump": 1,
'arraylen_gc': 1})
def define_specialization():
return """
a = |30|
b = a + a
c = unegative(b)
c -> 3
d = a * a
unegative(d)
d -> 3
d = a * a
unegative(d)
d -> 3
d = a * a
unegative(d)
d -> 3
d = a * a
unegative(d)
d -> 3
"""
def test_specialization(self):
self.run("specialization")
# This is 3, not 2 because there is a bridge for the exit.
self.check_trace_count(3)
def define_slice():
return """
a = |30|
b = a -> ::3
c = b + b
c -> 3
"""
def test_slice(self):
result = self.run("slice")
assert result == 18
self.check_simple_loop({'getinteriorfield_raw': 2,
'float_add': 1,
'setinteriorfield_raw': 1,
'int_add': 3,
'int_ge': 1, 'guard_false': 1,
'jump': 1,
'arraylen_gc': 1})
def define_take():
return """
a = |10|
b = take(a, [1, 1, 3, 2])
b -> 2
"""
def test_take(self):
result = self.run("take")
assert result == 3
self.check_simple_loop({'getinteriorfield_raw': 2,
'cast_float_to_int': 1,
'int_lt': 1,
'int_ge': 2,
'guard_false': 3,
'setinteriorfield_raw': 1,
'int_mul': 1,
'int_add': 3,
'jump': 1,
'arraylen_gc': 2})
def define_multidim():
return """
a = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
b = a + a
b -> 1 -> 1
"""
def test_multidim(self):
result = self.run('multidim')
assert result == 8
# int_add might be 1 here if we try slightly harder with
# reusing indexes or some optimization
self.check_simple_loop({'float_add': 1, 'getinteriorfield_raw': 2,
'guard_false': 1, 'int_add': 1, 'int_ge': 1,
'jump': 1, 'setinteriorfield_raw': 1,
'arraylen_gc': 1})
def define_multidim_slice():
return """
a = [[1, 2, 3, 4], [3, 4, 5, 6], [5, 6, 7, 8], [7, 8, 9, 10], [9, 10, 11, 12], [11, 12, 13, 14], [13, 14, 15, 16], [16, 17, 18, 19]]
b = a -> fc00:e968:6179::de52:7100
c = b + b
c -> 1 -> 1
"""
def test_multidim_slice(self):
result = self.run('multidim_slice')
assert result == 12
py.test.skip("improve")
# XXX the bridge here is scary. Hopefully jit-targets will fix that,
# otherwise it looks kind of good
self.check_simple_loop({})
def define_broadcast():
return """
a = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]
b = [1, 2, 3, 4]
c = a + b
c -> 1 -> 2
"""
def test_broadcast(self):
result = self.run("broadcast")
assert result == 10
py.test.skip("improve")
self.check_simple_loop({})
def define_setslice():
return """
a = |30|
b = |10|
b[1] = 5.5
c = b + b
a[0:30:3] = c
a -> 3
"""
def test_setslice(self):
result = self.run("setslice")
assert result == 11.0
self.check_trace_count(1)
self.check_simple_loop({'getinteriorfield_raw': 2, 'float_add': 1,
'setinteriorfield_raw': 1, 'int_add': 2,
'int_eq': 1, 'guard_false': 1, 'jump': 1,
'arraylen_gc': 1})
def define_virtual_slice():
return """
a = |30|
c = a + a
d = c -> 1:20
d -> 1
"""
def test_virtual_slice(self):
result = self.run("virtual_slice")
assert result == 4
self.check_trace_count(1)
self.check_simple_loop({'getinteriorfield_raw': 2, 'float_add': 1,
'setinteriorfield_raw': 1, 'int_add': 1,
'int_ge': 1, 'guard_false': 1, 'jump': 1,
'arraylen_gc': 1})
def define_flat_iter():
return '''
a = |30|
b = flat(a)
c = b + a
c -> 3
'''
def test_flat_iter(self):
result = self.run("flat_iter")
assert result == 6
self.check_trace_count(1)
self.check_simple_loop({'getinteriorfield_raw': 2, 'float_add': 1,
'setinteriorfield_raw': 1, 'int_add': 2,
'int_ge': 1, 'guard_false': 1,
'arraylen_gc': 1, 'jump': 1})
def define_flat_getitem():
return '''
a = |30|
b = flat(a)
b -> 4: -> 6
'''
def test_flat_getitem(self):
result = self.run("flat_getitem")
assert result == 10.0
self.check_trace_count(1)
self.check_simple_loop({'getinteriorfield_raw': 1,
'setinteriorfield_raw': 1,
'int_lt': 1,
'int_ge': 1,
'int_add': 3,
'guard_true': 1,
'guard_false': 1,
'arraylen_gc': 2,
'jump': 1})
def define_flat_setitem():
return '''
a = |30|
b = flat(a)
b[4:] = a->:26
a -> 5
'''
def test_flat_setitem(self):
result = self.run("flat_setitem")
assert result == 1.0
self.check_trace_count(1)
# XXX not ideal, but hey, let's ignore it for now
self.check_simple_loop({'getinteriorfield_raw': 1,
'setinteriorfield_raw': 1,
'int_lt': 1,
'int_gt': 1,
'int_add': 4,
'guard_true': 2,
'arraylen_gc': 2,
'jump': 1,
'int_sub': 1,
# XXX bad part
'int_and': 1,
'int_mod': 1,
'int_rshift': 1,
})
def define_dot():
return """
a = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]
b=[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]]
c = dot(a, b)
c -> 1 -> 2
"""
def test_dot(self):
result = self.run("dot")
assert result == 184
self.check_simple_loop({'arraylen_gc': 9,
'float_add': 1,
'float_mul': 1,
'getinteriorfield_raw': 3,
'guard_false': 3,
'guard_true': 3,
'int_add': 6,
'int_lt': 6,
'int_sub': 3,
'jump': 1,
'setinteriorfield_raw': 1})
|
import React from 'react';
function Jetway () {
return (
<div className="card col-11 aboutMeCard" style={{ display:"inline-flex"}}>
<div className="card-body">
<h5 className="card-title" ><NAME></h5>
<h6 className="card-subtitle mb-2 text-muted">An iterative renovation of a 297 sq ft apartment in the Emerald City.</h6>
<a href="/design" className="card-link" >Coming Soon.</a>
</div>
</div>
);
};
export default Jetway; |
#!/usr/bin/env bash
CUDA_VISIBLE_DEVICES=2 taskset -c 0-2 python main.py --policy PytorchSAC --env dm.walker.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 10 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=3 taskset -c 3-5 python main.py --policy PytorchSAC --env dm.walker.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 20 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=4 taskset -c 6-8 python main.py --policy PytorchSAC --env dm.walker.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 30 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=5 taskset -c 9-11 python main.py --policy PytorchSAC --env dm.quadruped.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0001 --fourier_dim 1024 --seed 10 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=6 taskset -c 12-14 python main.py --policy PytorchSAC --env dm.quadruped.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0001 --fourier_dim 1024 --seed 20 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=7 taskset -c 15-17 python main.py --policy PytorchSAC --env dm.quadruped.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0001 --fourier_dim 1024 --seed 30 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=8 taskset -c 18-20 python main.py --policy PytorchSAC --env dm.quadruped.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 10 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=2 taskset -c 21-23 python main.py --policy PytorchSAC --env dm.quadruped.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 20 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=3 taskset -c 24-26 python main.py --policy PytorchSAC --env dm.quadruped.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 30 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=4 taskset -c 27-29 python main.py --policy PytorchSAC --env dm.quadruped.walk --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0001 --fourier_dim 1024 --seed 10 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=5 taskset -c 30-32 python main.py --policy PytorchSAC --env dm.quadruped.walk --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0001 --fourier_dim 1024 --seed 20 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=6 taskset -c 33-35 python main.py --policy PytorchSAC --env dm.quadruped.walk --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0001 --fourier_dim 1024 --seed 30 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=7 taskset -c 36-38 python main.py --policy PytorchSAC --env dm.quadruped.walk --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 10 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=8 taskset -c 39-41 python main.py --policy PytorchSAC --env dm.quadruped.walk --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 20 --max_timesteps 2000000 &
CUDA_VISIBLE_DEVICES=2 taskset -c 42-44 python main.py --policy PytorchSAC --env dm.quadruped.walk --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.0003 --fourier_dim 1024 --seed 30 --max_timesteps 2000000 &
|
class GridExtension extends AbstractExtension
{
/**
* {@inheritdoc}
*/
public function getFunctions()
{
return [
[
'name' => 'sortGrid',
'callable' => function (array $gridData, $column, $order = 'asc') {
// Implement sorting logic for grid data based on the specified column and order
// Return the sorted grid data
}
],
[
'name' => 'filterGrid',
'callable' => function (array $gridData, $filters) {
// Implement filtering logic for grid data based on the specified filters
// Return the filtered grid data
}
],
[
'name' => 'formatGridCell',
'callable' => [$this, 'formatGridCellMethod']
]
];
}
/**
* Method to be used as a callable for 'formatGridCell' function
*/
public function formatGridCellMethod($cellValue, $format) {
// Implement formatting logic for a single grid cell value based on the specified format
// Return the formatted cell value
}
} |
#include <functional>
#include <chrono>
class RembThrottler {
public:
RembThrottler(std::function<void()> sender, std::chrono::steady_clock* clock)
: sender_(sender), clock_(clock), alpha_(0.2), last_feedback_(0), last_send_time_(clock_->now()) {}
void receiveFeedback(double feedback) {
last_feedback_ = feedback;
}
void adjustSendingRate() {
auto now = clock_->now();
std::chrono::duration<double> elapsed_time = now - last_send_time_;
double instant_rate = last_feedback_ / elapsed_time.count();
if (last_send_time_ != now) {
sending_rate_ = alpha_ * instant_rate + (1 - alpha_) * sending_rate_;
last_send_time_ = now;
sender_();
}
}
private:
std::function<void()> sender_;
std::chrono::steady_clock* clock_;
double alpha_; // Smoothing factor for exponential moving average
double last_feedback_;
std::chrono::time_point<std::chrono::steady_clock> last_send_time_;
double sending_rate_ = 0;
}; |
#!/bin/bash
cd /home/jonas/Schreibtisch/jobs_manager/scripts
cd ../newjobs
allminuts=0
for f in *; do
if [ "$f" != "*" ];then
param=`sed -n '1p' $f`
allminuts=$(( $allminuts + $param ))
fi
done
NEW_expration_DATE=$(date -d "+$allminuts minutes")
# echo "sum of the minutes of the new jobs " $allminuts "; all job can run until "$NEW_expration_DATE
echo " " $allminuts " minutes for new jobs; server is expected to run until " $NEW_expration_DATE
|
public class Bishop extends Piece {
public Bishop(Color color, String ID, int startX, int startY) {
super(color, ID, startX, startY);
}
@Override
public String toString() {
if (this.getColor() == Color.WHITE) {
return "♗";
}
return "♝";
}
@Override
public boolean possibleMove(int x, int y) {
// cannot conflict with same color piece
if (this.sameColor(Board.getPiece(x, y)) == true) {
return false;
}
// cannot move side to side
if (Math.abs(getX() - x) != Math.abs(getY() - y)) {
return false;
}
if (Board.isPathClear(getX(), getY(), x, y)) {
return true;
}
return false;
}
@Override
public boolean canMove() {
int originX = this.getX();
int originY = this.getY();
int x = originX;
int y = originY;
/*
Check all cases of illegal moves:
going out of bounds on either side, top or bottom
or conflicts
*/
while ((--x) >= 0 && (--y) >= 0) {
if (this.testMove(x, y)) {
return true;
}
}
x = originX;
y = originY;
while ((++x) <= 7 && (--y) >= 0) {
if (this.testMove(x, y)) {
return true;
}
}
x = originX;
y = originY;
while ((--x) >= 0 && (++y) <= 7) {
if (this.testMove(x, y)) {
return true;
}
}
/**
* once we have checked all possible cases we may return true
*/
x = originX;
y = originY;
while ((++x) <= 7 && (++y) <= 7) {
if (this.testMove(x, y)) {
return true;
}
}
return false;
}
}
|
<filename>tests/acceptance/ember-init-test.js
import startApp from '../helpers/start-app';
import configuration from '../../config/environment';
import lookup from '../helpers/lookup';
import QUnit from 'qunit';
const { module, test } = QUnit;
function lookupFactory(app, key) {
return app.__container__.lookupFactory(key);
}
let toriiConfiguration = configuration.torii;
var app, originalSessionServiceName;
module('Ember Initialization - Acceptance', {
setup: function(){
originalSessionServiceName = toriiConfiguration.sessionServiceName;
delete toriiConfiguration.sessionServiceName;
},
teardown: function(){
Ember.run(app, 'destroy');
toriiConfiguration.sessionServiceName = originalSessionServiceName;
}
});
test('session is not injected by default', function(assert){
app = startApp();
assert.ok(!lookup(app, 'service:session'));
app.register('controller:application', Ember.Controller.extend());
var controller = lookup(app, 'controller:application');
assert.ok(!controller.get('session'), 'controller has no session');
});
test('session is injected with the name in the configuration', function(assert){
toriiConfiguration.sessionServiceName = 'wackySessionName';
app = startApp({loadInitializers: true});
assert.ok(lookup(app, 'service:wackySessionName'), 'service:wackySessionName is injected');
app.register('controller:application', Ember.Controller.extend());
var controller = lookup(app, 'controller:application');
assert.ok(controller.get('wackySessionName'),
'Controller has session with accurate name');
assert.ok(!controller.get('session'),
'Controller does not have "session" property name');
});
test('session is injectable using inject.service', function(assert){
toriiConfiguration.sessionServiceName = 'session';
app = startApp({loadInitializers: true});
assert.ok(lookup(app, 'service:session'), 'service:session is injected');
app.register('component:testComponent', Ember.Component.extend({
session: Ember.inject.service('session'),
torii: Ember.inject.service('torii')
}));
var DummyRenderer = { componentInitAttrs() {} };
var component = lookupFactory(app, 'component:testComponent').create({renderer: DummyRenderer});
assert.ok(component.get('session'), 'Component has access to injected session service');
assert.ok(component.get('torii'), 'Component has access to injected torii service');
});
|
package com.siyuan.enjoyreading.ui.activity.login;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import com.androidapp.activity.BaseActivity;
import com.siyuan.enjoyreading.R;
public class PassWordLoginActivity extends BaseActivity {
private TextView mCreateAccountTextView;
private View.OnClickListener mOnClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if (v == mTitleBar.getRightTextView()) {
doStartActivity(SecretLoginActivity.class, null);
} else if (v == mCreateAccountTextView) {
doStartActivity(CreateAccountActivity.class, null);
}
}
};
@Override
protected void initContentView(Bundle bundle) {
setContentView(R.layout.act_account_login_pwd);
}
@Override
protected void initView() {
if (mTitleBar != null) {
mTitleBar.getRightTextView().setOnClickListener(mOnClickListener);
}
mCreateAccountTextView = findViewById(R.id.tv_create_account);
mCreateAccountTextView.setOnClickListener(mOnClickListener);
}
@Override
protected void initData() {
}
}
|
<gh_stars>0
package es.shyri.longtaskservice;
import android.app.IntentService;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
import android.support.annotation.Nullable;
import android.support.v7.app.NotificationCompat;
/**
* Created by Shyri on 01/02/2016.
*/
public class LongTaskService extends IntentService implements LongTaskRunnable.LontTaskInterface {
public static final int NOTIFICATION_ID_PROGRESS = 1234;
public static final int NOTIFICATION_ID_ENDED = 1235;
public static final String CANCEL_TASK_ACTION = "es.shyri.longtaskservice.ACTION_CANCEL";
private NotificationManager nm;
private LongTaskRunnable longTaskRunnable;
private PendingIntent cancelPendingIntent;
private final IBinder mBinder = new LocalBinder();
Handler longTaskMessageHandler;
/**
* Creates an IntentService. Invoked by your subclass's constructor.
*
*/
public LongTaskService() {
super("LongTaskService");
}
public void onCreate() {
super.onCreate();
nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
longTaskRunnable = new LongTaskRunnable(this);
Intent cancelIntent = new Intent(this, LongTaskService.class);
cancelIntent.setAction(CANCEL_TASK_ACTION);
cancelPendingIntent = PendingIntent.getService(this, 0, cancelIntent, 0);
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
@Override
protected void onHandleIntent(Intent intent) {
if(intent.getAction() != null && intent.getAction().equals(CANCEL_TASK_ACTION)) {
cancelLongTask();
}
}
public void performLongTask() {
new Thread(longTaskRunnable).start();
}
public void cancelLongTask() {
longTaskRunnable.cancel();
}
public LongTaskRunnable.STATUS getCurrentStatus() {
return longTaskRunnable.getCurrentStatus();
}
public void setMessageHandler(Handler longTaskMessageHandler) {
this.longTaskMessageHandler = longTaskMessageHandler;
}
@Override
public void onStatusUpdate(LongTaskRunnable.STATUS currentStatus) {
switch(currentStatus) {
case STARTING: {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this);
notificationBuilder.setContentTitle(getString(R.string.notification_title))
.setContentText(getString(R.string.notification_message_starting))
.setSmallIcon(R.mipmap.ic_launcher)
.addAction(new android.support.v4.app.NotificationCompat.Action(R.mipmap.ic_launcher, CANCEL_TASK_ACTION, cancelPendingIntent))
.setOngoing(true);
startForeground(NOTIFICATION_ID_PROGRESS, notificationBuilder.build());
break;
}
case RUNNING: {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this);
notificationBuilder.setContentTitle(getString(R.string.notification_title))
.setContentText(getString(R.string.notification_message_running))
.setSmallIcon(R.mipmap.ic_launcher)
.setOngoing(true)
.addAction(new android.support.v4.app.NotificationCompat.Action(R.mipmap.ic_launcher, CANCEL_TASK_ACTION, cancelPendingIntent))
.setContentInfo("0%");
startForeground(NOTIFICATION_ID_PROGRESS, notificationBuilder.build());
break;
}
case END_SUCCESSFULLY: {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this);
notificationBuilder.setContentTitle(getString(R.string.notification_title))
.setContentText(getString(R.string.notification_message_end_successfully))
.setOngoing(false)
.setSmallIcon(R.mipmap.ic_launcher);
nm.notify(NOTIFICATION_ID_ENDED, notificationBuilder.build());
stopForeground(true);
break;
}
case CANCELLING: {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this);
notificationBuilder.setContentTitle(getString(R.string.notification_title))
.setContentText(getString(R.string.notification_message_cancelling))
.setOngoing(false)
.setSmallIcon(R.mipmap.ic_launcher);
startForeground(NOTIFICATION_ID_PROGRESS, notificationBuilder.build());
break;
}
case END_CANCELLED: {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this);
notificationBuilder.setContentTitle(getString(R.string.notification_title))
.setContentText(getString(R.string.notification_message_end_cancelled))
.setOngoing(false)
.setSmallIcon(R.mipmap.ic_launcher);
nm.notify(NOTIFICATION_ID_ENDED, notificationBuilder.build());
stopForeground(true);
break;
}
default:
break;
}
}
@Override
public void onProgressUpdate(int percentage) {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this);
notificationBuilder.setContentTitle(getString(R.string.notification_title))
.setContentText(getString(R.string.notification_message_running))
.setSmallIcon(R.mipmap.ic_launcher)
.setOngoing(true)
.setProgress(100, percentage, false)
.addAction(new android.support.v4.app.NotificationCompat.Action(R.mipmap.ic_launcher, CANCEL_TASK_ACTION, cancelPendingIntent))
.setContentInfo(percentage + "%");
startForeground(NOTIFICATION_ID_PROGRESS, notificationBuilder.build());
}
/**
* Class used for the client Binder. Because we know this service always
* runs in the same process as its clients, we don't need to deal with IPC.
*/
public class LocalBinder extends Binder {
public LongTaskService getService() {
// Return this instance of LocalService so clients can call public methods
return LongTaskService.this;
}
}
}
|
const xmlrpc = require ("davexmlrpc");
const urlEndpoint = "http://betty.userland.com/rpc2";
const verb = "examples.getStateName";
const params = [5]; //an array containing one element, the number 5
const format = "xml"; //could also be "json"
xmlrpc.client (urlEndpoint, verb, params, format, function (err, data) {
if (err) {
console.log ("err.message == " + err.message);
}
else {
console.log (JSON.stringify (data));
}
});
|
package io.opensphere.auxiliary.video;
import java.awt.image.BufferedImage;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import com.xuggle.ferry.IBuffer;
import com.xuggle.xuggler.ICodec;
import com.xuggle.xuggler.IContainer;
import com.xuggle.xuggler.IContainerFormat;
import com.xuggle.xuggler.IError;
import com.xuggle.xuggler.IPacket;
import com.xuggle.xuggler.IPixelFormat;
import com.xuggle.xuggler.IRational;
import com.xuggle.xuggler.IStream;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.IVideoPicture;
import com.xuggle.xuggler.io.XugglerIO;
import com.xuggle.xuggler.video.ConverterFactory;
import com.xuggle.xuggler.video.IConverter;
import io.opensphere.core.video.KLVVideoEncoder;
import io.opensphere.core.video.VideoEncoderException;
/**
* Encodes a video and metadata into a klv video. The klv video will be put into
* the given {@link OutputStream}.
*/
public class KLVEncoder implements KLVVideoEncoder
{
/**
* The first presentation time for a video chunk.
*/
private long myFirstPts;
/**
* The klv coder.
*/
private IStreamCoder myKlvCoder;
/**
* The output encoder.
*/
private IStreamCoder myOutCoder;
/**
* The output container.
*/
private IContainer myOutContainer;
/**
* The outputstream to write out klv video to.
*/
private OutputStream myOutputStream;
/**
* Completes the encoding process.
*/
@Override
public void close()
{
if (myOutContainer != null)
{
myOutContainer.writeTrailer();
myOutCoder.close();
myOutCoder.delete();
myKlvCoder.close();
myKlvCoder.delete();
myOutContainer.flushPackets();
myOutContainer.close();
myOutContainer.delete();
myOutContainer = null;
}
}
/**
* Encodes the metadata into the klv stream.
*
* @param metadata The metadata in MISB KLV format to add to the klv stream.
* @param ptsMS The time fo the metadata.
*/
@Override
@SuppressWarnings("PMD.AvoidArrayLoops")
public void encodeMetadata(ByteBuffer metadata, long ptsMS)
{
byte[] metadataBytes = metadata.array();
byte[] packetBytes = new byte[metadataBytes.length * 2];
for (int i = 0, j = 1; i < metadataBytes.length; i++, j += 2)
{
packetBytes[j] = metadataBytes[i];
}
IBuffer buffer = IBuffer.make(null, packetBytes, 0, packetBytes.length);
IPacket packet = IPacket.make(buffer);
try
{
packet.setTimeBase(myKlvCoder.getTimeBase());
// TODO Since xuggler doesn't handle klv appropriately we must put
// all metadata at the front of the video file so set the pts to
// zero. Once our xuggler version handles klv appropriately
// uncomment the below code out to pass in the correct time.
// double secondsPerTimeBase = packet.getTimeBase().getDouble();
// long timeBasePts = (long)((ptsMS - myFirstPts) / secondsPerTimeBase / Constants.MILLI_PER_UNIT);
packet.setPts(0);
packet.setStreamIndex(myKlvCoder.getStream().getIndex());
packet.setComplete(true, packet.getSize());
myOutContainer.writePacket(packet);
}
finally
{
buffer.delete();
packet.delete();
}
}
/**
* Encodes the frame into the klv stream.
*
* @param image The image to encode.
* @param ptsMS The presentation time of the image in milliseconds.
* @throws VideoEncoderException Thrown if there were issues writing encoded
* video to the stream.
*/
@Override
public void encodeVideo(BufferedImage image, long ptsMS) throws VideoEncoderException
{
if (myOutContainer == null)
{
setupOutputContainer(image.getWidth(), image.getHeight());
myFirstPts = ptsMS;
}
IPacket packet = IPacket.make();
try
{
IConverter converter = ConverterFactory.createConverter(image, myOutCoder.getPixelType());
IVideoPicture picture = converter.toPicture(image, (ptsMS - myFirstPts) * 1000);
picture.setQuality(1);
if (myOutCoder.encodeVideo(packet, picture, 0) < 0)
{
throw new VideoEncoderException("Could not encode video", null);
}
if (packet.isComplete())
{
int status = myOutContainer.writePacket(packet);
if (status < 0)
{
throw new VideoEncoderException("Could not write packet to container", null);
}
}
}
finally
{
packet.delete();
}
}
/**
* Sets up the output container and encoder.
*
* @param width The width of the video.
* @param height The height of the video.
* @throws VideoEncoderException If the output container could not be setup
* properly.
*/
private void setupOutputContainer(int width, int height) throws VideoEncoderException
{
IContainerFormat format = IContainerFormat.make();
IRational timeBase = IRational.make(1, 60);
try
{
format.setOutputFormat("mpegts", null, null);
myOutContainer = IContainer.make(format);
String outputStreamUrl = XugglerIO.map(myOutputStream);
if (myOutContainer.open(outputStreamUrl, IContainer.Type.WRITE, format, true, false) < 0)
{
throw new VideoEncoderException("Could not open output container for klv video.", null);
}
IStream videoStream = myOutContainer.addNewStream(ICodec.findEncodingCodec(ICodec.ID.CODEC_ID_MPEG2VIDEO));
IStream klvStream = myOutContainer.addNewStream(ICodec.findEncodingCodec(ICodec.ID.CODEC_ID_FFMETADATA));
myOutCoder = videoStream.getStreamCoder();
myKlvCoder = klvStream.getStreamCoder();
myOutCoder.setWidth(width);
myOutCoder.setHeight(height);
myOutCoder.setTimeBase(timeBase);
myOutCoder.setPixelType(IPixelFormat.Type.YUV420P);
myOutCoder.setBitRate(12000000);
myOutCoder.setGlobalQuality(1);
int error = myOutCoder.open(null, null);
if (error < 0)
{
IError theError = IError.make(error);
try
{
throw new VideoEncoderException("Could not open encoder " + theError, null);
}
finally
{
theError.delete();
}
}
myOutContainer.writeHeader();
}
finally
{
format.delete();
timeBase.delete();
}
}
@Override
public void init(OutputStream stream)
{
myOutputStream = stream;
}
}
|
/**
* isTypedArray-funktion integraatiotestit
*
* @group integration
*/
import isTypedArray from "../../src/isTypedArray.js";
jest.mock("../../src/.internal/nodeTypes.js",
() => ({isTypedArray: false}));
describe("integration/isTypedArray", () => {
it("new Uint8Array -> true (dokumentaatioesimerkki)", () => {
expect(isTypedArray(new Uint8Array))
.toBe(true);
});
it("[1, 2, 3] -> false", () => {
expect(isTypedArray([1, 2, 3]))
.toBe(false);
});
it("null -> false", () => {
expect(isTypedArray(null))
.toBe(false);
});
it("undefined -> false", () => {
expect(isTypedArray(undefined))
.toBe(false);
});
it("NaN -> false", () => {
expect(isTypedArray(NaN))
.toBe(false);
});
});
|
#!/bin/bash
cd ~
pwd
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/src/app/plugins/modules/appointment-pn
cp -a Documents/workspace/microting/eform-angular-appointment-plugin/eform-client/src/app/plugins/modules/appointment-pn Documents/workspace/microting/eform-angular-frontend/eform-client/src/app/plugins/modules/appointment-pn
rm -fR Documents/workspace/microting/eform-angular-frontend/eFormAPI/Plugins/Appointment.Pn
cp -a Documents/workspace/microting/eform-angular-appointment-plugin/eFormAPI/Plugins/Appointment.Pn Documents/workspace/microting/eform-angular-frontend/eFormAPI/Plugins/Appointment.Pn
# Test files rm
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/appointment-settings
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/appointment-general
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Page\ objects/Appointment
rm -fR Documents/workspace/microting/eform-angular-frontend/eform-client/wdio-plugin-step2.conf.js
# Test files cp
cp -a Documents/workspace/microting/eform-angular-appointment-plugin/eform-client/e2e/Tests/appointment-settings Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/appointment-settings
cp -a Documents/workspace/microting/eform-angular-appointment-plugin/eform-client/e2e/Tests/appointment-general Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Tests/appointment-general
cp -a Documents/workspace/microting/eform-angular-appointment-plugin/eform-client/e2e/Page\ objects/Appointment Documents/workspace/microting/eform-angular-frontend/eform-client/e2e/Page\ objects/Appointment
cp -a Documents/workspace/microting/eform-angular-appointment-plugin/eform-client/wdio-headless-plugin-step2.conf.js Documents/workspace/microting/eform-angular-frontend/eform-client/wdio-plugin-step2.conf.js
|
#!/bin/sh
# kFreeBSD do not accept scripts as interpreters, using #!/bin/sh and sourcing.
if [ true != "$INIT_D_SCRIPT_SOURCED" ] ; then
set "$0" "$@"; INIT_D_SCRIPT_SOURCED=true . /lib/init/init-d-script
fi
### BEGIN INIT INFO
# Provides: sysup.sh
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: A daemon script which uploads uptime information to a remote host.
# Description: Requires sysupload.php on public server or ssh-keygen and ssh-copy-id to remote host. Must be added to
# crontab @reboot or added as service. Should chmod 755.
### END INIT INFO
# Author: Bastian Tenbergen (bastian.tenbergen@oswego.edu)
# Version: 2020/02/25
while (true)
do
date=$(date "+%Y/%m/%d-%H:%M:%S")
addr=$(ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p')
ext_addr=$(curl -s http://ipecho.net/plain; echo;)
hostname=$(hostname)
uptime=$(uptime -p)
message="$hostname @ $addr via $ext_addr, last seen: $date, $uptime"
{
#uncomment the following line for scp-mode. Requires ss-keygen and ssh-copy-id to remote host.
#ssh user@example.com "echo $message > ~/public_html/$hostname.txt"
#uncomment the following lines for www-mode. Requires sysupload.php script to exist at remote dir and dir possibly to be chmod 777.
#urlmessage=$(echo $message | sed -r 's/ /+/g')
#curl -m 2 "http://example.com/~user/sysupload.php?host=$hostname&data=$urlmessage"
} || {
echo "Host unreachable at $date." >> ./sysup.log
}
sleep 60
done
|
<gh_stars>1-10
package segment_tree;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 2820번: 자동차 공장
*
* @see https://www.acmicpc.net/problem/2820
*
*/
public class Boj2820 {
private static ArrayList<Integer>[] link;
private static long[] tree;
private static long[] lazy;
private static int[] start, end;
private static int N, S = 1;
private static int count = -1;
private static final char PAY = 'p';
private static final String NEW_LINE = "\n";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
N = Integer.parseInt(st.nextToken());
int M = Integer.parseInt(st.nextToken());
init();
long[] cost = new long[N];
cost[0] = Long.parseLong(br.readLine());
for(int i = 1; i < N; i++){
st = new StringTokenizer(br.readLine());
long pay = Long.parseLong(st.nextToken());
int node = Integer.parseInt(st.nextToken()) - 1;
link[node].add(i);
cost[i] = pay;
}
dfs(0);
for(int i = 0; i < N; i++){ // cost positioning in tree
update(start[i], start[i], cost[i], 1, 0, N - 1);
}
StringBuilder sb = new StringBuilder();
while(M-- > 0){
st = new StringTokenizer(br.readLine());
char cmd = st.nextToken().charAt(0);
int a = Integer.parseInt(st.nextToken()) - 1;
if(cmd == PAY){
long x = Long.parseLong(st.nextToken());
update(start[a] + 1, end[a], x, 1, 0, N - 1);
}
else{
sb.append(sum(start[a], start[a], 1, 0, N - 1)).append(NEW_LINE);
}
}
System.out.println(sb.toString());
}
private static void init(){
while(S <= N) S <<= 1;
tree = new long[S * 2];
lazy = new long[S * 2];
start = new int[N];
end = new int[N];
link = new ArrayList[N];
for(int i = 0; i < N; i++){
link[i] = new ArrayList<>();
}
}
private static void dfs(int current){
start[current] = ++count;
for(int next: link[current]){
dfs(next);
}
end[current] = count;
}
private static int[] makeSon(int node){
int son = node * 2;
return new int[]{son, ++son};
}
private static void propagation(int node, int s, int e){
if(lazy[node] == 0) return;
if(s != e){
int[] son = makeSon(node);
lazy[son[0]] += lazy[node]; // push lazy
lazy[son[1]] += lazy[node];
}
tree[node] += lazy[node] * (e - s + 1);
lazy[node] = 0;
}
private static void update(int left, int right, long val, int node, int start, int end){
propagation(node, start, end);
if(right < start || end < left) return;
if(left <= start && end <= right) { // in range
lazy[node] += val;
propagation(node, start, end);
return;
}
int[] son = makeSon(node);
int mid = (start + end) / 2;
update(left, right, val, son[0], start, mid);
update(left, right, val, son[1], mid + 1, end);
tree[node] = tree[son[0]] + tree[son[1]]; // total update
}
private static long sum(int left, int right, int node, int start, int end){
propagation(node, start, end);
if(right < start || end < left) return 0;
if(left <= start && end <= right) return tree[node]; // sum
int[] son = makeSon(node);
int mid = (start + end) / 2;
return sum(left, right, son[0], start, mid) + sum(left, right, son[1], mid + 1, end);
}
}
|
#!/bin/bash
#
# run_gem5_alpha_spec06_benchmark.sh
# Author: Mark Gottscho Email: mgottscho@ucla.edu
# Copyright (C) 2014 Mark Gottscho
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
############ DIRECTORY VARIABLES: MODIFY ACCORDINGLY #############
GEM5_DIR=/home/qizeng/gem5 # Install location of gem5
SPEC_DIR=/home/qizeng/cpu2006 # Install location of your SPEC2006 benchmarks
##################################################################
ARGC=$# # Get number of arguments excluding arg0 (the script itself). Check for help message condition.
if [[ "$ARGC" < 2 ]]; then # Bad number of arguments.
echo "run_gem5_alpha_spec06_benchmark.sh Copyright (C) 2014 Mark Gottscho"
echo "This program comes with ABSOLUTELY NO WARRANTY; for details see <http://www.gnu.org/licenses/>."
echo "This is free software, and you are welcome to redistribute it under certain conditions; see <http://www.gnu.org/licenses/> for details."
echo ""
echo "Author: Mark Gottscho"
echo "mgottscho@ucla.edu"
echo ""
echo "This script runs a single gem5 simulation of a single SPEC CPU2006 benchmark for Alpha ISA."
echo ""
echo "USAGE: run_gem5_alpha_spec06_benchmark.sh <BENCHMARK> <OUTPUT_DIR>"
echo "EXAMPLE: ./run_gem5_alpha_spec06_benchmark.sh bzip2 /FULL/PATH/TO/output_dir"
echo ""
echo "A single --help help or -h argument will bring this message back."
exit
fi
# Get command line input. We will need to check these.
BENCHMARK=$1 # Benchmark name, e.g. bzip2
OUTPUT_DIR=$2 # Directory to place run output. Make sure this exists!
WARM_LEN=1000000000
RUN_LEN=100000000
if [[ "$ARGC" > 2 ]]; then
WARM_LEN=$3
fi
if [[ "$ARGC" > 3 ]]; then
RUN_LEN=$4
fi
echo "cool aO IS RUNNING $BENCHMARk"
######################## BENCHMARK CODENAMES ####################
PERLBENCH_CODE=400.perlbench
BZIP2_CODE=401.bzip2
GCC_CODE=403.gcc
BWAVES_CODE=410.bwaves
GAMESS_CODE=416.gamess
MCF_CODE=429.mcf
MILC_CODE=433.milc
ZEUSMP_CODE=434.zeusmp
GROMACS_CODE=435.gromacs
CACTUSADM_CODE=436.cactusADM
LESLIE3D_CODE=437.leslie3d
NAMD_CODE=444.namd
GOBMK_CODE=445.gobmk
DEALII_CODE=447.dealII
SOPLEX_CODE=450.soplex
POVRAY_CODE=453.povray
CALCULIX_CODE=454.calculix
HMMER_CODE=456.hmmer
SJENG_CODE=458.sjeng
GEMSFDTD_CODE=459.GemsFDTD
LIBQUANTUM_CODE=462.libquantum
H264REF_CODE=464.h264ref
TONTO_CODE=465.tonto
LBM_CODE=470.lbm
OMNETPP_CODE=471.omnetpp
ASTAR_CODE=473.astar
WRF_CODE=481.wrf
SPHINX3_CODE=482.sphinx3
XALANCBMK_CODE=483.xalancbmk
SPECRAND_INT_CODE=998.specrand
SPECRAND_FLOAT_CODE=999.specrand
##################################################################
# Check BENCHMARK input
#################### BENCHMARK CODE MAPPING ######################
BENCHMARK_CODE="none"
if [[ "$BENCHMARK" == "perlbench" ]]; then
BENCHMARK_CODE=$PERLBENCH_CODE
fi
if [[ "$BENCHMARK" == "bzip2" ]]; then
BENCHMARK_CODE=$BZIP2_CODE
fi
if [[ "$BENCHMARK" == "gcc" ]]; then
BENCHMARK_CODE=$GCC_CODE
fi
if [[ "$BENCHMARK" == "bwaves" ]]; then
BENCHMARK_CODE=$BWAVES_CODE
fi
if [[ "$BENCHMARK" == "gamess" ]]; then
BENCHMARK_CODE=$GAMESS_CODE
fi
if [[ "$BENCHMARK" == "mcf" ]]; then
BENCHMARK_CODE=$MCF_CODE
fi
if [[ "$BENCHMARK" == "milc" ]]; then
BENCHMARK_CODE=$MILC_CODE
fi
if [[ "$BENCHMARK" == "zeusmp" ]]; then
BENCHMARK_CODE=$ZEUSMP_CODE
fi
if [[ "$BENCHMARK" == "gromacs" ]]; then
BENCHMARK_CODE=$GROMACS_CODE
fi
if [[ "$BENCHMARK" == "cactusADM" ]]; then
BENCHMARK_CODE=$CACTUSADM_CODE
fi
if [[ "$BENCHMARK" == "leslie3d" ]]; then
BENCHMARK_CODE=$LESLIE3D_CODE
fi
if [[ "$BENCHMARK" == "namd" ]]; then
BENCHMARK_CODE=$NAMD_CODE
fi
if [[ "$BENCHMARK" == "gobmk" ]]; then
BENCHMARK_CODE=$GOBMK_CODE
fi
if [[ "$BENCHMARK" == "dealII" ]]; then # DOES NOT WORK
BENCHMARK_CODE=$DEALII_CODE
fi
if [[ "$BENCHMARK" == "soplex" ]]; then
BENCHMARK_CODE=$SOPLEX_CODE
fi
if [[ "$BENCHMARK" == "povray" ]]; then
BENCHMARK_CODE=$POVRAY_CODE
fi
if [[ "$BENCHMARK" == "calculix" ]]; then
BENCHMARK_CODE=$CALCULIX_CODE
fi
if [[ "$BENCHMARK" == "hmmer" ]]; then
BENCHMARK_CODE=$HMMER_CODE
fi
if [[ "$BENCHMARK" == "sjeng" ]]; then
BENCHMARK_CODE=$SJENG_CODE
fi
if [[ "$BENCHMARK" == "GemsFDTD" ]]; then
BENCHMARK_CODE=$GEMSFDTD_CODE
fi
if [[ "$BENCHMARK" == "libquantum" ]]; then
BENCHMARK_CODE=$LIBQUANTUM_CODE
fi
if [[ "$BENCHMARK" == "h264ref" ]]; then
BENCHMARK_CODE=$H264REF_CODE
fi
if [[ "$BENCHMARK" == "tonto" ]]; then
BENCHMARK_CODE=$TONTO_CODE
fi
if [[ "$BENCHMARK" == "lbm" ]]; then
BENCHMARK_CODE=$LBM_CODE
fi
if [[ "$BENCHMARK" == "omnetpp" ]]; then
BENCHMARK_CODE=$OMNETPP_CODE
fi
if [[ "$BENCHMARK" == "astar" ]]; then
BENCHMARK_CODE=$ASTAR_CODE
fi
if [[ "$BENCHMARK" == "wrf" ]]; then
BENCHMARK_CODE=$WRF_CODE
fi
if [[ "$BENCHMARK" == "sphinx3" ]]; then
BENCHMARK_CODE=$SPHINX3_CODE
fi
if [[ "$BENCHMARK" == "xalancbmk" ]]; then # DOES NOT WORK
BENCHMARK_CODE=$XALANCBMK_CODE
fi
if [[ "$BENCHMARK" == "specrand_i" ]]; then
BENCHMARK_CODE=$SPECRAND_INT_CODE
fi
if [[ "$BENCHMARK" == "specrand_f" ]]; then
BENCHMARK_CODE=$SPECRAND_FLOAT_CODE
fi
# Sanity check
if [[ "$BENCHMARK_CODE" == "none" ]]; then
echo "Input benchmark selection $BENCHMARK did not match any known SPEC CPU2006 benchmarks! Exiting."
exit 1
fi
##################################################################
# Check OUTPUT_DIR existence
if [[ !(-d "$OUTPUT_DIR") ]]; then
echo "Output directory $OUTPUT_DIR does not exist! Exiting."
exit 1
fi
echo "cool aO IS RUNNING $BENCHMARK_CODE"
RUN_DIR=$SPEC_DIR/benchspec/CPU2006/$BENCHMARK_CODE/run/run_base_ref\_my-alpha.0000 # Run directory for the selected SPEC benchmark
SCRIPT_OUT=$OUTPUT_DIR/runscript.log # File log for this script's stdout henceforth
################## REPORT SCRIPT CONFIGURATION ###################
echo "Command line:" | tee $SCRIPT_OUT
echo "$0 $*" | tee -a $SCRIPT_OUT
echo "================= Hardcoded directories ==================" | tee -a $SCRIPT_OUT
echo "GEM5_DIR: $GEM5_DIR" | tee -a $SCRIPT_OUT
echo "SPEC_DIR: $SPEC_DIR" | tee -a $SCRIPT_OUT
echo "==================== Script inputs =======================" | tee -a $SCRIPT_OUT
echo "BENCHMARK: $BENCHMARK" | tee -a $SCRIPT_OUT
echo "OUTPUT_DIR: $OUTPUT_DIR" | tee -a $SCRIPT_OUT
echo "==========================================================" | tee -a $SCRIPT_OUT
##################################################################
#################### LAUNCH GEM5 SIMULATION ######################
echo ""
echo "Changing to SPEC benchmark runtime directory: $RUN_DIR" | tee -a $SCRIPT_OUT
cd $RUN_DIR
echo "" | tee -a $SCRIPT_OUT
echo "" | tee -a $SCRIPT_OUT
echo "--------- Here goes nothing! Starting gem5! ------------" | tee -a $SCRIPT_OUT
echo "" | tee -a $SCRIPT_OUT
echo "" | tee -a $SCRIPT_OUT
# Actually launch gem5!
#$GEM5_DIR/build/ALPHA/gem5.opt --outdir=$OUTPUT_DIR $GEM5_DIR/configs/example/spec06_config_single.py --benchmark=$BENCHMARK --maxinsts=2000000000 --benchmark_stdout=$OUTPUT_DIR/$BENCHMARK.out --benchmark_stderr=$OUTPUT_DIR/$BENCHMARK.err --caches --l1i_size=32kB --l1d_size=32kB --l2cache --l2_size=4MB --l2_assoc=32| tee -a $SCRIPT_OUT
if [ ! -d "$OUTPUT_DIR/$BENCHMARK/lru8_f1" ]; then
mkdir $OUTPUT_DIR/$BENCHMARK/lru8_f1
fi
$GEM5_DIR/build/ALPHA/gem5.opt --outdir=$OUTPUT_DIR/$BENCHMARK/lru8_f1 $GEM5_DIR/configs/example/spec06_config_single.py --benchmark=$BENCHMARK --restore-with-cpu=detailed -s 1000000000 --cpu-clock=3GHz --mem-size=4GB --mem-channels=2 --mem-type=DDR3_1600_8x8 --caches --l1i_size=32kB --l1i_assoc=8 --l1d_size=32kB --l1d_assoc=8 --l2cache --l2_size=256kB --l2_assoc=8 --l3cache --l3_size=4MB --l3_assoc=32 --l3_tags=5 --checkpoint-dir=/home/qizeng/cpu2006/benchspec/CPU2006/$BENCHMARK_CODE/run/run_base_ref_my-alpha.0000/m5out/ --checkpoint-restore=$WARM_LEN --at-instruction --maxinsts=$RUN_LEN --warmup-insts=1000000000
|
T=100000;
fname=out1
rm -f $fname
./bin/bayesian_markov_chain_text data/alice.txt 0 $T 1 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 1 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 2 $T 0.292893 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 3 $T 0.206299 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 4 $T 0.159104 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 5 $T 0.129449 >>$fname
fname=out2
rm -f $fname
./bin/bayesian_markov_chain_text data/alice.txt 0 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 1 $T 0.292893 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 2 $T 0.206299 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 3 $T 0.159104 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 4 $T 0.129449 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 5 $T 0.109101 >>out
fname=out3
rm -f $fname
./bin/bayesian_markov_chain_text data/alice.txt 0 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 1 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 2 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 3 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 4 $T 0.5 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 5 $T 0.5 >>out
fname=out4
rm -f $fname
./bin/bayesian_markov_chain_text data/alice.txt 0 $T 0.027 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 1 $T 0.027 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 2 $T 0.027 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 3 $T 0.027 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 4 $T 0.027 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 5 $T 0.027 >>out
fname=out5
rm -f $fname
./bin/bayesian_markov_chain_text data/alice.txt 0 $T 1 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 1 $T 0.027 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 2 $T 0.0013594 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 3 $T 0.0090822 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 4 $T 0.0068194 >>$fname
./bin/bayesian_markov_chain_text data/alice.txt 5 $T 0.0054593 >>out
## 0.500000 0.292893 0.206299 0.159104 0.129449 0.109101 0.094276
|
<filename>src/scripts/src/shaderbox/canvas.js
import {CanvasShader} from '../../shader/shader_canvas.js';
import {DefaultShader} from '../../shader/default.js';
import {glUtils} from '../framework/glUtils.js';
import {global} from '../framework/global.js';
const shaderCanvas= {
init() {
this.isPause = false;
this.onInitChannelInfo();
this.animate();
},
clear(){
cancelAnimationFrame(this.animID) ;
this.stopAllMusic();
},
initShader(vs, ps){
let that = this;
that.compileCallFunc("compile successfull!",'blue');
const shaderProgram = glUtils.createShaderProgram(vs, ps, function(str){
console.log("err shader is "+str);
that.compileCallFunc(str,'red');
return null;
});
if(!shaderProgram)return null;
const programInfo = {
program: shaderProgram,
attribLocations:
{
vertexPosition:glUtils.gl.getAttribLocation(shaderProgram, 'aPos'),
texPosition:glUtils.gl.getAttribLocation(shaderProgram, 'aTex')
},
uniformLocations:{
iTime: glUtils.gl.getUniformLocation(shaderProgram, "iTime"),
fLimit: glUtils.gl.getUniformLocation(shaderProgram, 'fLimit'),
iResolution: glUtils.gl.getUniformLocation(shaderProgram, 'iResolution'),
channel0: glUtils.gl.getUniformLocation(shaderProgram, 'channel0'),
channel1: glUtils.gl.getUniformLocation(shaderProgram, 'channel1'),
channel2: glUtils.gl.getUniformLocation(shaderProgram, 'channel2'),
channel3: glUtils.gl.getUniformLocation(shaderProgram, 'channel3'),
},
};
return programInfo;
},
reloadShader(ps){
this.vbuffer = glUtils.createQuadVertexBuffer();
this.shader = this.initShader(CanvasShader.vertexShader,DefaultShader.fragTitle+ps+DefaultShader.fragTail);
if(this.texchannels!=undefined&&this.texchannels!=null){
this.texchannels.forEach(element => {
glUtils.deleteTexture(element.tex);
});
}
this.onInitChannelInfo();
this.onReloadChannels();
},
setCallFunc(updateCb,compileCb){
this.updateCallFunc = updateCb;
this.compileCallFunc = compileCb;
},
onInitChannelInfo(){
this.texchannels = [
{tex:glUtils.gl.createTexture(), type:""},
{tex:glUtils.gl.createTexture(), type:""},
{tex:glUtils.gl.createTexture(), type:""},
{tex:glUtils.gl.createTexture(), type:""},
];
if(this.channelInfo==undefined||this.channelInfo==null){
this.channelInfo = [];
}
},
onReloadChannels(){
this.channelInfo.forEach((e,index)=>{
if(!e.isPause){
if(e.info.type=='texture'){
glUtils.loadTexture(this.texchannels[index].tex,e.info.rurl);
}
this.texchannels[index].type = e.info.type;
this.texchannels[index].audio = e.audioData;
}
});
},
setAllMusicState(bool){
this.channelInfo.forEach((e,index)=>{
if(!e.isPause&&this.texchannels[index].audio){
if(bool){
this.texchannels[index].audio.resume();
}else{
this.texchannels[index].audio.pause();
}
}
});
},
stopAllMusic(){
this.channelInfo.forEach((e,index)=>{
if(!e.isPause&&this.texchannels[index].audio){
this.texchannels[index].audio.stop();
}
});
},
onChangeChannel(v){
this.channelInfo = v;
this.onReloadChannels();
},
animate( timestamp ) {
if(timestamp!=undefined&&this.shader!=null){
if(!this.isPause){
global.interval= new Date().getTime()-global.lastStamp;
global.fpscount++;
if(global.interval>=1000){
global.lastStamp = new Date().getTime();
this.updateCallFunc(global.fpscount);
global.fpscount = 0;
}
this.drawScene(timestamp);
}
};
this.animID = requestAnimationFrame( this.animate.bind(this) );
},
getScreenData(){
this.drawScene(1);
return glUtils.getScreenPixels();
},
drawScene(timestamp) {
glUtils.gl.enable(glUtils.gl.DEPTH_TEST);
glUtils.gl.depthFunc(glUtils.gl.LEQUAL); // Near things obscure far things
glUtils.gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
glUtils.gl.clearDepth(1.0);
glUtils.gl.enable(glUtils.gl.BLEND);
glUtils.gl.blendFunc(glUtils.gl.SRC_ALPHA, glUtils.gl.ONE_MINUS_SRC_ALPHA);
glUtils.gl.clear(glUtils.gl.COLOR_BUFFER_BIT | glUtils.gl.DEPTH_BUFFER_BIT);
// // //
// // // // Tell WebGL how to pull out the positions from the position
// // // // buffer into the vertexPosition attribute
glUtils.gl.bindBuffer(glUtils.gl.ARRAY_BUFFER, this.vbuffer);
glUtils.gl.vertexAttribPointer(this.shader.attribLocations.vertexPosition, 3, glUtils.gl.FLOAT, false, 20, 0);
glUtils.gl.enableVertexAttribArray(this.shader.attribLocations.vertexPosition);
glUtils.gl.vertexAttribPointer(this.shader.attribLocations.texPosition, 2, glUtils.gl.FLOAT, false, 20, 12);
glUtils.gl.enableVertexAttribArray(this.shader.attribLocations.texPosition);
glUtils.gl.useProgram(this.shader.program);
//////////////////////////////////////////////////////////////////////
glUtils.gl.activeTexture(glUtils.gl.TEXTURE0);
glUtils.gl.bindTexture(glUtils.gl.TEXTURE_2D,this.texchannels[0].tex);
if(this.texchannels[0].type=='audio'){
glUtils.writeDataToTexture(this.texchannels[0].tex,this.texchannels[0].audio.getFreqData());
}
glUtils.gl.uniform1i(this.shader.uniformLocations.channel0,0);
//////////////////////////////////////////////////////////////////////1
glUtils.gl.activeTexture(glUtils.gl.TEXTURE1);
glUtils.gl.bindTexture(glUtils.gl.TEXTURE_2D,this.texchannels[1].tex);
if(this.texchannels[1].type=='audio'){
glUtils.writeDataToTexture(this.texchannels[1].tex,this.texchannels[1].audio.getFreqData());
}
glUtils.gl.uniform1i(this.shader.uniformLocations.channel1,1);
//////////////////////////////////////////////////////////////////////2
glUtils.gl.activeTexture(glUtils.gl.TEXTURE2);
glUtils.gl.bindTexture(glUtils.gl.TEXTURE_2D,this.texchannels[2].tex);
if(this.texchannels[2].type=='audio'){
glUtils.writeDataToTexture(this.texchannels[2].tex,this.texchannels[2].audio.getFreqData());
}
glUtils.gl.uniform1i(this.shader.uniformLocations.channel2,2);
//////////////////////////////////////////////////////////////////////2
glUtils.gl.activeTexture(glUtils.gl.TEXTURE3);
glUtils.gl.bindTexture(glUtils.gl.TEXTURE_2D,this.texchannels[3].tex);
if(this.texchannels[3].type=='audio'){
glUtils.writeDataToTexture(this.texchannels[3].tex,this.texchannels[3].audio.getFreqData());
}
glUtils.gl.uniform1i(this.shader.uniformLocations.channel3,3);
glUtils.gl.uniform1f(this.shader.uniformLocations.iTime, timestamp / 1000);
glUtils.gl.uniform2f(this.shader.uniformLocations.iResolution, glUtils.getResolution()[0], glUtils.getResolution()[1]);
glUtils.gl.drawArrays(glUtils.gl.TRIANGLE_STRIP, 0, 4);
}
}
export {shaderCanvas} |
<gh_stars>1-10
import { apiManager } from "../api-manager/apiManager";
import { iApiResponse } from "../api-manager/apiManagerInterfaces";
export const successService = async (): Promise<iApiResponse> => {
const url = `https://3d048452-563b-44d1-8005-758190e56bb1.mock.pstmn.io/success`;
const { data } = await apiManager.request(url, {}, "GET");
return data;
};
export const failureService = async (): Promise<iApiResponse> => {
const url = `https://3d048452-563b-44d1-8005-758190e56bb1.mock.pstmn.io/failed`;
const { data } = await apiManager.request(url, {}, "GET");
return data;
};
|
import random
# Generating random RGURA policy
density = {
"sattrs": 70,
"mattrs": 70,
"assign": 70,
"add": 70,
"not": 30
}
def getChoiceByPercentage(percentage):
a = percentage * [True] + (100 - percentage) * [False]
return random.choice(a)
def TrueOrFalse():
return random.choice([True, False])
def generateTest(inputfile,
nusers,
nsingleattrs,
nmultiattrs,
max_sattr_values,
max_mattr_values,
nadminroles,
nassign_rules,
nadd_rules,
ndelete_rules
):
''' This generator retrieve
@nuser: number of users
@nsingleattrs: number of single-value attributes
@nmultiattrs: number of multi-value attributes
@max_sattr_values: max |domain| size of each single-value attribute
@max_mattr_values: max |domain| size of each multi-value attribute
@nadminroles: number of admin roles
'''
s_attr_val_dict = {}
m_attr_val_dict = {}
with open(inputfile, "w") as out:
# USERS
out.write("USERS\n")
for i in xrange(nusers):
out.write("user%s\n" % i)
out.write(";\n")
# Single attributes
out.write("ATTRIBUTE_SINGLE\n")
for i in xrange(nsingleattrs):
out.write("s_attr%s\n" % i)
s_attr_val_dict["s_attr%s" % i] = {}
out.write(";\n")
# Multiple attributes
out.write("ATTRIBUTE_MULTIPLE\n")
for i in xrange(nmultiattrs):
out.write("m_attr%s\n" % i)
m_attr_val_dict["m_attr%s" % i] = {}
out.write(";\n")
# Scope
out.write("SCOPE\n")
# single values
for i in xrange(nsingleattrs):
domain_size = random.randint(1, max_sattr_values)
out.write("<s_attr%s," % i)
for j in xrange(domain_size):
out.write(" sa%i_value%s" % (i, j))
s_attr_val_dict["s_attr%s" % i]["sa%i_value%s" % (i, j)] = True
out.write(">\n")
# multiple values
for i in xrange(nmultiattrs):
domain_size = random.randint(1, max_mattr_values)
out.write("<m_attr%s," % i)
for j in xrange(domain_size):
out.write(" ma%i_value%s" % (i, j))
m_attr_val_dict["m_attr%s" % i]["ma%i_value%s" % (i, j)] = True
out.write(">\n")
out.write(";\n")
# UATTR_S
out.write("UATTR_S\n")
for u in xrange(nusers): # for each user
s = "user%s" % u
a = ""
for i in xrange(nsingleattrs): # for each single attribute
if getChoiceByPercentage(density["sattrs"]):
a += " <s_attr%s, " % i
j = random.randint(0, len(s_attr_val_dict['s_attr%s' % i]) - 1)
a += "sa%s_value%s>" % (i, j)
if a != "":
out.write(s + a + "\n")
out.write(";\n")
# UATTR_M
out.write("UATTR_M\n")
for u in xrange(nusers): # for each user
s = "user%s" % u
a = ""
for i in xrange(nmultiattrs): # for each multi attribute
if getChoiceByPercentage(density["mattrs"]):
a += " <m_attr%s" % i
jlist = random.sample(
xrange(len(m_attr_val_dict['m_attr%s' % i])),
random.randint(1, len(m_attr_val_dict['m_attr%s' % i])))
for j in jlist:
a += ", ma%s_value%s" % (i, j)
a += ">"
if a != "":
out.write(s + a + "\n")
out.write(";\n")
# adminrole
out.write("ADMINROLES\n")
for a in xrange(nadminroles):
out.write("admin%s\n" % (a))
out.write(";\n")
# rules
out.write("RULES\n")
# assign
for repeat in xrange(nassign_rules):
adminid = random.randint(0, nadminroles - 1)
attrid = random.randint(0, nsingleattrs - 1)
targetid = random.randint(0, len(s_attr_val_dict["s_attr%s" % attrid]) - 1)
# precondition
precond = ""
if getChoiceByPercentage(1):
precond = "TRUE"
else:
for s in xrange(nsingleattrs):
if getChoiceByPercentage(density["assign"]) and s != attrid:
v = random.randint(0, len(s_attr_val_dict["s_attr%s" % s]) - 1)
if getChoiceByPercentage(density['not']):
precond += "not "
precond += "s_attr%s=sa%s_value%s & " % (s, s, v)
for m in xrange(nmultiattrs):
if getChoiceByPercentage(density["assign"]):
vlist = random.sample(
xrange(len(m_attr_val_dict["m_attr%s" % m])),
random.randint(1, len(m_attr_val_dict["m_attr%s" % m]))
)
for v in vlist:
if getChoiceByPercentage(density['not']):
precond += "not "
precond += "m_attr%s=ma%s_value%s & " % (m, m, v)
if precond.endswith("& "):
precond = precond[:-2] # remove last &
if precond == "":
precond = "TRUE"
out.write("<admin%s, %s, s_attr%s, sa%s_value%s>\n" % (adminid, precond, attrid, attrid, targetid))
# add
for repeat in xrange(nadd_rules):
adminid = random.randint(0, nadminroles - 1)
attrid = random.randint(0, nmultiattrs - 1)
targetid = random.randint(0, len(m_attr_val_dict["m_attr%s" % attrid]) - 1)
# precondition
precond = ""
if getChoiceByPercentage(1):
precond = "TRUE"
else:
for s in xrange(nsingleattrs):
if getChoiceByPercentage(density["add"]):
v = random.randint(0, len(s_attr_val_dict["s_attr%s" % s]) - 1)
if getChoiceByPercentage(density['not']):
precond += "not "
precond += "s_attr%s=sa%s_value%s & " % (s, s, v)
for m in xrange(nmultiattrs):
if getChoiceByPercentage(density["add"]):
vlist = random.sample(
xrange(len(m_attr_val_dict["m_attr%s" % m])),
random.randint(1, len(m_attr_val_dict["m_attr%s" % m]))
)
for v in vlist:
if getChoiceByPercentage(density['not']) and v != targetid:
precond += "not "
precond += "m_attr%s=ma%s_value%s & " % (m, m, v)
if precond.endswith("& "):
precond = precond[:-2] # remove last &
if precond == "":
precond = "TRUE"
out.write("<admin%s, %s, m_attr%s, ma%s_value%s>\n" % (adminid, precond, attrid, attrid, targetid))
# delete
for repeat in xrange(ndelete_rules):
adminid = random.randint(0, nadminroles - 1)
if TrueOrFalse(): # single var
attrid = random.randint(0, nsingleattrs - 1)
targetid = random.randint(0, len(s_attr_val_dict["s_attr%s" % attrid]) - 1)
out.write("<admin%s, s_attr%s, sa%s_value%s>\n" % (
adminid, attrid, attrid, targetid
))
else:
attrid = random.randint(0, nmultiattrs - 1)
targetid = random.randint(0, len(m_attr_val_dict["m_attr%s" % attrid]) - 1)
out.write("<admin%s, m_attr%s, ma%s_value%s>\n" % (
adminid, attrid, attrid, targetid
))
out.write(";\n")
out.write("SPEC\n")
if TrueOrFalse():
attrid = random.randint(0, nsingleattrs - 1)
targetid = random.randint(0, len(s_attr_val_dict["s_attr%s" % attrid]) - 1)
out.write("s_attr%s sa%s_value%s\n" % (attrid, attrid, targetid))
else:
attrid = random.randint(0, nmultiattrs - 1)
targetid = random.randint(0, len(m_attr_val_dict["m_attr%s" % attrid]) - 1)
out.write("m_attr%s ma%s_value%s\n" % (attrid, attrid, targetid))
out.write(";\n")
def help():
print "rGURA_generator.py PATH N_TESTCASES"
if __name__ == '__main__':
import time
import sys
import os
import os.path
try:
path = sys.argv[1]
except Exception:
help()
sys.exit(1)
try:
testcases = int(sys.argv[2])
except Exception:
help()
sys.exit(1)
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
random.seed(time.time())
nusers = random.randint(10, 20)
nsingleattrs = random.randint(10, 15)
nmultiattrs = random.randint(5, 10)
max_sattr_values = random.randint(10, 20)
max_mattr_values = random.randint(20, 30)
nadminroles = random.randint(1, 5)
nassign_rules = random.randint(50, 100)
nadd_rules = random.randint(50, 100)
ndelete_rules = random.randint(40, 50)
for i in xrange(testcases):
generateTest(path + "/rGURA_random_policy_%s.txt" % str(i).zfill(3),
nusers,
nsingleattrs,
nmultiattrs,
max_sattr_values,
max_mattr_values,
nadminroles,
nassign_rules,
nadd_rules,
ndelete_rules)
|
#!/bin/bash
# Copyright (c) 2018-2022, NVIDIA CORPORATION.
##############################################
# cuDF GPU build and test script for CI #
##############################################
set -e
NUMARGS=$#
ARGS=$*
# Arg parsing function
function hasArg {
(( ${NUMARGS} != 0 )) && (echo " ${ARGS} " | grep -q " $1 ")
}
# Set path and build parallel level
export PATH=/opt/conda/bin:/usr/local/cuda/bin:$PATH
export PARALLEL_LEVEL=${PARALLEL_LEVEL:-4}
# Set home to the job's workspace
export HOME="$WORKSPACE"
# Switch to project root; also root of repo checkout
cd "$WORKSPACE"
# Determine CUDA release version
export CUDA_REL=${CUDA_VERSION%.*}
export CONDA_ARTIFACT_PATH="$WORKSPACE/ci/artifacts/cudf/cpu/.conda-bld/"
# Parse git describe
export GIT_DESCRIBE_TAG=`git describe --tags`
export MINOR_VERSION=`echo $GIT_DESCRIBE_TAG | grep -o -E '([0-9]+\.[0-9]+)'`
# ucx-py version
export UCX_PY_VERSION='0.26.*'
################################################################################
# TRAP - Setup trap for removing jitify cache
################################################################################
# Set `LIBCUDF_KERNEL_CACHE_PATH` environment variable to $HOME/.jitify-cache
# because it's local to the container's virtual file system, and not shared with
# other CI jobs like `/tmp` is
export LIBCUDF_KERNEL_CACHE_PATH="$HOME/.jitify-cache"
function remove_libcudf_kernel_cache_dir {
EXITCODE=$?
gpuci_logger "TRAP: Removing kernel cache dir: $LIBCUDF_KERNEL_CACHE_PATH"
rm -rf "$LIBCUDF_KERNEL_CACHE_PATH" \
|| gpuci_logger "[ERROR] TRAP: Could not rm -rf $LIBCUDF_KERNEL_CACHE_PATH"
exit $EXITCODE
}
# Set trap to run on exit
gpuci_logger "TRAP: Set trap to remove jitify cache on exit"
trap remove_libcudf_kernel_cache_dir EXIT
mkdir -p "$LIBCUDF_KERNEL_CACHE_PATH" \
|| gpuci_logger "[ERROR] TRAP: Could not mkdir -p $LIBCUDF_KERNEL_CACHE_PATH"
################################################################################
# SETUP - Check environment
################################################################################
gpuci_logger "Check environment variables"
env
gpuci_logger "Check GPU usage"
nvidia-smi
gpuci_logger "Activate conda env"
. /opt/conda/etc/profile.d/conda.sh
conda activate rapids
gpuci_logger "Check conda environment"
conda info
conda config --show-sources
conda list --show-channel-urls
gpuci_logger "Install dependencies"
gpuci_mamba_retry install -y \
"cudatoolkit=$CUDA_REL" \
"rapids-build-env=$MINOR_VERSION.*" \
"rapids-notebook-env=$MINOR_VERSION.*" \
"dask-cuda=${MINOR_VERSION}" \
"rmm=$MINOR_VERSION.*" \
"ucx-py=${UCX_PY_VERSION}" \
"openjdk=8.*" \
"maven"
# "mamba install openjdk" adds an activation script to set JAVA_HOME but this is
# not triggered on installation. Re-activating the conda environment will set
# this environment variable so that CMake can find JNI.
conda activate rapids
# https://docs.rapids.ai/maintainers/depmgmt/
# gpuci_conda_retry remove --force rapids-build-env rapids-notebook-env
# gpuci_mamba_retry install -y "your-pkg=1.0.0"
gpuci_logger "Check compiler versions"
python --version
$CC --version
$CXX --version
gpuci_logger "Check conda environment"
conda info
conda config --show-sources
conda list --show-channel-urls
function install_dask {
# Install the main version of dask, distributed, and streamz
gpuci_logger "Install the main version of dask, distributed, and streamz"
set -x
pip install "git+https://github.com/dask/distributed.git@main" --upgrade --no-deps
pip install "git+https://github.com/dask/dask.git@main" --upgrade --no-deps
# Need to uninstall streamz that is already in the env.
pip uninstall -y streamz
pip install "git+https://github.com/python-streamz/streamz.git@master" --upgrade --no-deps
set +x
}
################################################################################
# INSTALL - Install libcudf artifacts
################################################################################
gpuci_logger "Installing libcudf & libcudf_kafka"
gpuci_mamba_retry install -c ${CONDA_ARTIFACT_PATH} libcudf libcudf_kafka
install_dask
################################################################################
# TEST - Run java tests
################################################################################
gpuci_logger "Check GPU usage"
nvidia-smi
gpuci_logger "Running Java Tests"
cd ${WORKSPACE}/java
mvn test -B -DCUDF_JNI_ARROW_STATIC=OFF
return ${EXITCODE}
|
<gh_stars>0
StartTest(function(t) {
t.testExtJS(function (t) {
t.it('should be able to do a CQ', function (t) {
Ext.create('Ext.Component', {
foo : 'bar'
});
t.cqExists('[foo=bar]')
t.cqNotExists('[foo=bar2]')
})
t.it('should be able to click on widgets', function (t) {
var treeStore = new Ext.data.TreeStore({
fields : [ 'id', 'text' ],
root : {
text : 'root',
expanded : true,
children : [
{ id : 1, text : "1", leaf : true },
{
id : 2, text : "2", expanded : true, children : [
{ id : 3, text : "3", leaf : true },
{ id : 4, text : "4", leaf : true }
]
},
{ id : 5, text : "5", leaf : true }
]
}
})
var treeList = new Ext.list.Tree({
renderTo : Ext.getBody(),
store : treeStore,
width : 400,
height : 300
});
t.firesOnce(t.cq1('>>treelistitem[text=1]').el, 'click')
t.click('>>treelistitem[text=1]', function () {})
})
});
});
|
#!/bin/bash
#
# This is InterProScan v5. Welcome.
# Edit this script to suit your installation needs.
#
cd $(dirname "$0")
JAVA=$(type -p java)
if [[ "$JAVA" == "" ]]; then
printf 'Java not found. Please install Java\n'
printf 'and place it on your path,\n'
printf 'or edit the interproscan.sh script to refer to your java installation.\n'.
exit 1
fi
VERSION=$("$JAVA" -version 2>&1 | { read X; printf '%s' "${X#*\"}"; } )
MAJOR_VERSION=${VERSION%%.*}
MINOR_VERSION=${VERSION#*.}
MINOR_VERSION=${MINOR_VERSION%%.*}
if [[ "${MAJOR_VERSION}" == "1" && "${MINOR_VERSION}" -lt "8" ]];
then
printf 'Java version 1.8 or above required\n'
printf 'Detected version %s.%s\n' "${MAJOR_VERSION}" "${MINOR_VERSION}"
printf 'Please install the correct version\n'
exit 1
fi
#"$JAVA" -Xmx2048M -jar berkeley-db-builder.jar "$@"
"$JAVA" -XX:+UseParallelGC -XX:ParallelGCThreads=4 -XX:+UseCompressedOops -Xms4048M -Xmx22048M -jar berkeley-db-builder.jar "$@"
#end |
package pl.tajchert.businesscardwear;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.support.wearable.view.WatchViewStub;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.ImageView;
import android.widget.TextView;
import java.io.File;
public class MainActivity extends Activity {
private ImageView imageViewQRCode;
private ImageView refreshCircle;
private Animation refreshAnim;
private String imagePath = Environment.getExternalStorageDirectory() + ValuesCons.WEAR_IMAGE_PATH;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
refreshCircle = (ImageView) findViewById(R.id.refreshCircle);
refreshAnim = AnimationUtils.loadAnimation(this, R.anim.refresh_animation);
File qrFile = new File(imagePath);
if(qrFile.exists()){
TextView txImageNull = (TextView) findViewById(R.id.text);
txImageNull.setVisibility(View.GONE);
}
final WatchViewStub stub = (WatchViewStub) findViewById(R.id.watch_view_stub);
stub.setOnLayoutInflatedListener(new WatchViewStub.OnLayoutInflatedListener() {
@Override
public void onLayoutInflated(WatchViewStub stub) {
imageViewQRCode = (ImageView) stub.findViewById(R.id.image_qr_code);
if(refreshAnim != null){
refreshAnimation(refreshAnim);
}
}
});
}
private void refreshAnimation(final Animation animation){
refreshCircle.setVisibility(View.VISIBLE);
animation.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation arg0) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
if (imageViewQRCode != null) {
setImageQRCode();
}
}
}, (animation.getDuration() + 300));
}
@Override
public void onAnimationRepeat(Animation arg0) {
}
@Override
public void onAnimationEnd(Animation arg0) {
refreshCircle.setVisibility(View.GONE);
}
});
refreshCircle.startAnimation(animation);
}
private void setImageQRCode() {
Bitmap qrCode = readBitmapFromFile();
if (qrCode != null && imageViewQRCode != null) {
imageViewQRCode.setImageBitmap(qrCode);
}
}
private Bitmap readBitmapFromFile(){
BitmapFactory.Options options = new BitmapFactory.Options();
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
return BitmapFactory.decodeFile(imagePath, options);
}
}
|
# urls.py
from django.urls import path
from . import views
urlpatterns = [
path('note/', views.NoteListView.as_view(), name='note'),
path('note/<int:pk>/', views.NoteDetailView.as_view(), name='note-detail'),
path('tags/', views.TagListView.as_view(), name='tags'),
path('tags/<int:pk>/', views.notes_by_tags, name='tag-detail'),
]
# views.py
from django.shortcuts import render
from django.views.generic import ListView, DetailView
from .models import Note, Tag
class NoteListView(ListView):
model = Note
template_name = 'note_list.html'
context_object_name = 'notes'
class NoteDetailView(DetailView):
model = Note
template_name = 'note_detail.html'
context_object_name = 'note'
class TagListView(ListView):
model = Tag
template_name = 'tag_list.html'
context_object_name = 'tags'
def notes_by_tags(request, pk):
tag = Tag.objects.get(pk=pk)
notes = tag.note_set.all()
return render(request, 'notes_by_tag.html', {'tag': tag, 'notes': notes}) |
<gh_stars>0
//Written by <NAME>
let height = 730;
let width = 700;
let radius = 310;
let centerX = width/2;
let centerY = radius + 50;
let total = 0;
let circCount = 0;
function setup() {
div0 = createDiv()
textFont('Georgia')
button = createButton("Reset");
button.size(width, 40)
button.mousePressed(reset);
div0.child(button)
dots = createGraphics(2 * radius, 2 * radius)
dots.strokeWeight(2);
createCanvas(height, width);
angleMode(DEGREES);
noFill();
textSize(20);
strokeWeight(2);
}
function reset() {
dots = createGraphics(2 * radius, 2 * radius);
dots.strokeWeight(2);
circCount = 0;
total = 0;
}
function draw() {
translate(centerX, centerY);
x = random(-radius, radius);
y = random(-radius, radius);
total ++;
if ( x ** 2 + y ** 2 < radius ** 2){
circCount ++;
dots.stroke('red')
} else {
dots.stroke('blue')
}
dots.point(radius + x, radius + y);
fill(30);
noStroke();
background(200);
circle(0, 0, 2*radius);
noFill();
stroke(100)
rect(-radius, -radius, 2 * radius, 2 * radius);
image(dots, -radius, -radius);
fill('black');
strokeWeight(1)
text(`Number of points in Circle : Total= ${circCount} : ${total} = ${round(circCount / total, 4)}; π ≈ ${round(4 * circCount / total, 4)}`, 50 - centerX, 30 - centerY)
} |
#!/usr/bin/env bash
# Ths wrapper script activats the virtual environment before calling the
# statsrunner.py program. It also makes sure the application is not run as
# the root user.
USER='screepsstats'
# Get real directory in case of symlink
if [[ -L "${BASH_SOURCE[0]}" ]]
then
DIR="$( cd "$( dirname $( readlink "${BASH_SOURCE[0]}" ) )" && pwd )"
else
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
fi
cd $DIR
ENV="$DIR/../env/bin/activate"
if [ ! -f $ENV ]; then
echo 'Virtual Environment Not Installed'
exit -1
fi
SCRIPT="$DIR/../screeps_etl/screepsstats.py"
if (( "$EUID" == 0 )); then
su - $USER -s /bin/bash -c "source $ENV; $SCRIPT $@"
else
source $ENV
$SCRIPT "$@"
fi
|
#!/bin/bash
set -e
set -x
STAGE=$1
LABEL=$2
PRIVATE_ECR=$3
REGION=$4
docker container prune --force
docker image prune --force
if [ $PRIVATE_ECR == "true" ]
then
aws ecr get-login-password --region $REGION | docker login -u AWS --password-stdin $ECR_URL
else
aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL
fi
mkdir -p ./project-package-jsons/projects/default-project
cp packages/projects/default-project/package.json ./project-package-jsons/projects/default-project
find packages/projects/projects/ -name package.json -exec bash -c 'mkdir -p ./project-package-jsons/$(dirname $1) && cp $1 ./project-package-jsons/$(dirname $1)' - '{}' \;
DOCKER_BUILDKIT=1 docker build --cache-from $ECR_URL/$REPO_NAME:latest_$STAGE --build-arg BUILDKIT_INLINE_CACHE=1 --tag $LABEL -f Dockerfile-build-deploy .
|
#include <iostream>
#include <thread>
#include <chrono>
int main()
{
for(int i=0; i<5; i++)
{
std::cout << "Hello World!" << std::endl;
std::this_thread::sleep_for(std::chrono::seconds(1));
}
} |
<gh_stars>0
package mainpackage;
public class Main {
public static void main(String[] args) {
Users u1=new Students();
u1.setUsername("psychosocial7");
u1.setName("Giwrgos");
u1.setSurname("Diwtis");
u1.setDepartment("Departmentofinformatics");
u1.setAge(12);
System.out.println("Age"+u1.getAge());
System.out.println("\n");
Courses u1c1=new Courses();
Grades u1c1g=new Grades();
u1c1.setCourse("Analusi1");
System.out.println("To ma8hma pou parakoloy8ei o foithths einai h :"+u1c1.getCourse());
u1c1g.setGrade(9);
System.out.println("\n");
System.out.println("O ba8mos pou phre einai"+u1c1g.getGrade());
System.out.println("\n");
Users u2=new Professors();
u2.setUsername("pappoulis13");
u2.setName("Kwnstantinos");
u2.setSurname("Kousounnis");
u2.setDepartment("Departmentofinformatics");
System.out.println(u1.getUsername());
System.out.println(u1.getName());
System.out.println(u1.getSurname());
System.out.println(u1.getDepartment());
System.out.println("Registernumber"+u1.getregistretionNumber()+"\n");
System.out.println(u2.getUsername());
System.out.println(u2.getName());
System.out.println(u2.getSurname());
System.out.println(u2.getDepartment());
System.out.println("the costructor was used "+ u2.getCounter()+" times");
System.out.println("Registernumber"+u2.getregistretionNumber());
u2.setPhone(783675);
System.out.println("Phone6986"+u2.getPhone());
Users u3=new Secretaries();
u3.setUsername("grammateia");
u3.setName("Elenh");
u3.setSurname("kontola");
u3.setDepartment("Departmentofinformatics");
u3.setAddress("<EMAIL>");
System.out.println(u3.getUsername());
System.out.println(u3.getName());
System.out.println(u3.getSurname());
System.out.println(u3.getDepartment());
System.out.println(u3.getAddress());
}
}
|
#!/bin/bash
# Run k times an experiment
k=5
epochs=300
batch_size=128
lr=1e-4
pipeline=siamese_net
dataset=$1
for layers in 2 3; do
for edges in adj feat; do
mkdir -p ./run/$pipeline/$dataset/${layers}_layer/$edges/
echo ./run/$pipeline/$dataset/${layers}_layer/$edges/
for ((run=0; run<$k; run++))
do
python train_siamese_net.py /media/priba/PPAP/NeuralMessagePassing/data/IAM/Letter/$dataset/ letters -s ./checkpoint/$pipeline/$dataset/${layers}_layer/$edges/$run/ --log ./log/$pipeline/$dataset/${layers}_layer/$edges/$run/ -lr $lr --nlayers $layers -e $epochs -b $batch_size --representation $edges --schedule $epochs+1 > ./run/$pipeline/$dataset/${layers}_layer/$edges/$run.txt
done
done
done
|
/**
* Demo Title
*/
import React from 'react';
const Title = () => (
<div className="d-title">
<h1>Title</h1>
</div>
);
export default Title;
|
#!/usr/bin/env bash
source ./docker-env.sh
# Remove existing containers
docker-compose stop
docker-compose rm -f
# Recreate containers
docker-compose create
|
<reponame>tony-aq/optic
import React, { FC, useEffect, useRef } from 'react';
import { IShapeRenderer } from '<src>/types';
import { ShapeRenderer } from './ShapeRenderer';
import { Panel } from './Panel';
type HttpBodyPanelProps = {
shapes: IShapeRenderer[];
location: string;
selectedFieldId?: string | null;
fieldsAreSelectable?: boolean;
setSelectedField?: (fieldId: string) => void;
};
export const HttpBodyPanel: FC<HttpBodyPanelProps> = ({
shapes,
location,
selectedFieldId,
fieldsAreSelectable,
setSelectedField,
}) => {
const contentRef = useRef<HTMLDivElement>(null);
useEffect(() => {
if (selectedFieldId && contentRef.current) {
const isContainerScrollable =
contentRef.current.scrollHeight > contentRef.current.clientHeight;
const fieldNode = contentRef.current.querySelector(
`[data-fieldid='${selectedFieldId}']`
);
if (isContainerScrollable && fieldNode) {
const scrollTopDiff =
fieldNode.getBoundingClientRect().top -
contentRef.current.getBoundingClientRect().top;
// set scroll position to the selected field being in the middle
contentRef.current.scrollTop +=
scrollTopDiff - contentRef.current.clientHeight / 2;
}
}
}, [selectedFieldId]);
return (
<Panel header={location} contentRef={contentRef}>
<ShapeRenderer
showExamples={false}
shapes={shapes}
selectedFieldId={selectedFieldId}
fieldsAreSelectable={fieldsAreSelectable}
setSelectedField={setSelectedField}
/>
</Panel>
);
};
|
python main.py /media/HDD_4TB/datasets/iccv/ --model volo_d5 --img-size 512 -b 2 --lr 8.0e-6 --min-lr 4.0e-6 --drop-path 0.5 --epochs 50 --apex-amp --ground-truth --token-label --token-label-size 32 --finetune /media/HDD_4TB/javi/FoodChallenge/ICCV/volo/output/train/20210721-202002-volo_d5-512/checkpoint-0.pth.tar --aa augmix-m5-w4-d2
# python get_test_preds.py --cfg configs/swin_large_patch4_window7_224.yaml --data-path /datasets/iccv --local_rank 0 --batch-size 128 --resume /pretrained/iccv/swin/swin_large_224_sam.pth --eval
|
const stack = require('./stack');
const stack_of_stacks = class {
constructor(capacity){
this.stackList = Array();
this.stackList.push(new stack());
this.stackListSize = 1;
this.capacity = capacity || 10;
}
get_last_stack(){
if (this.stackListSize !== 0){
return this.stackList[this.stackListSize-1]
}
return null;
}
push(value){
const lastStack = this.get_last_stack();
if (lastStack.size == this.capacity){
let newStack = new stack();
newStack.push(value);
this.stackList.push(newStack);
this.stackListSize += 1;
}
else {
lastStack.push(value);
}
}
pop(){
const lastStack = this.get_last_stack();
const value = lastStack.pop();
this.stackListSize -= 1;
}
}
const s1 = new stack_of_stacks(10);
for (let i = 0; i < 14; i++){
s1.push(i);
}
console.log(s1.stackListSize);
s1.pop();
console.log(s1.stackListSize);
s1.pop();
console.log(s1.stackListSize);
|
<gh_stars>10-100
package io.opensphere.core.launch;
import java.awt.SplashScreen;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.JOptionPane;
import javax.swing.UnsupportedLookAndFeelException;
import org.apache.log4j.Logger;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import io.opensphere.core.SplashScreenManager;
import io.opensphere.core.appl.Kernel;
import io.opensphere.core.appl.LookAndFeelInit;
import io.opensphere.core.appl.SplashScreenManagerImpl;
import io.opensphere.core.preferences.Preferences;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.core.preferences.PreferencesRegistryImpl;
import io.opensphere.core.security.SecurityManagerImpl;
import io.opensphere.core.util.Constants;
import io.opensphere.core.util.SystemPropertyLoader;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.filesystem.FileUtilities;
import io.opensphere.core.util.lang.StringUtilities;
/**
* Launcher for the OpenSphere Tool Suite. This determines the proper Java
* arguments and forks a new Java process.
*/
@SuppressWarnings("PMD.GodClass")
public final class Launch
{
/** Logger reference. */
private static final Logger LOGGER;
/** Flag indicating if the app should be restarted. */
private static volatile boolean ourRestart;
/** How many milliseconds between re-launches. */
private static final long RELAUNCH_EXCLUSION_WINDOW_MILLIS = 10000L;
/**
* Flag indicating if the application is configured to use a master
* password.
*/
private final boolean myMasterPasswordInUse;
/** The preferences. */
private final Preferences myPreferences;
/** Flag indicating if a splash screen is active. */
private volatile boolean mySplashScreenActive = SplashScreen.getSplashScreen() != null;
/** The splash screen manager. */
private final SplashScreenManager mySplashScreenManager = new SplashScreenManagerImpl();
/** The directory in which the application will launch. */
private File myCurrentWorkingDirectory;
static
{
try
{
new LookAndFeelInit().setLookAndFeel();
}
catch (final UnsupportedLookAndFeelException e)
{
Logger.getLogger(Launch.class).error(e, e);
}
SystemPropertyLoader.validateUserHome();
SystemPropertyLoader.loadSystemProperties();
LOGGER = Logger.getLogger(Launch.class);
}
/**
* Main method.
*
* @param args The program arguments.
*/
@SuppressFBWarnings("DM_EXIT")
public static void main(String[] args)
{
final SplashScreen splash = SplashScreen.getSplashScreen();
if (splash != null)
{
splash.createGraphics();
splash.update();
}
int code;
long lastLaunch = 0L;
do
{
code = new Launch().run();
if (System.currentTimeMillis() - lastLaunch < RELAUNCH_EXCLUSION_WINDOW_MILLIS)
{
LOGGER.info("Encountered a subsequent failure. Giving up.");
break;
}
else if (code == 2)
{
LOGGER.info("User requested restart.");
}
else
{
if (code != 0)
{
logErrorFile();
}
if (code != 0 && ourRestart)
{
LOGGER.info("Unexpected exit. Restarting...");
}
else
{
LOGGER.info("Exiting with status " + code);
break;
}
}
lastLaunch = System.currentTimeMillis();
}
while (true);
System.exit(code);
}
/**
* Read an input stream until its end.
*
* @param is The input stream.
* @throws IOException If there is an IO error.
*/
private static void evacuateInputStream(InputStream is) throws IOException
{
boolean done;
do
{
done = is.read() == -1;
}
while (!done);
}
/**
* Get any JVM arguments set in the environment.
*
* @return The arguments.
*/
private static List<String> getJvmArgsFromEnv()
{
List<String> args;
String jvmArgs = System.getenv("OPENSPHERE_JVM_ARGS");
if (jvmArgs == null)
{
// Check for a system property if no environment variable is
// supplied.
jvmArgs = System.getProperty("opensphere.jvm.args");
}
if (jvmArgs == null)
{
args = Collections.<String>emptyList();
}
else
{
// Split the string on whitespace, ignoring whitespace inside single
// or double quotes.
args = new ArrayList<>();
boolean inParen = false;
int leadingEdge = -1;
for (int index = 0; index < jvmArgs.length(); ++index)
{
final char ch = jvmArgs.charAt(index);
if (Character.isWhitespace(ch))
{
if (leadingEdge >= 0 && !inParen)
{
final String arg = jvmArgs.substring(leadingEdge, index).replace("\"", "");
if (!arg.isEmpty())
{
args.add(StringUtilities.expandProperties(arg, System.getProperties()));
}
leadingEdge = -1;
}
}
else
{
if (leadingEdge == -1)
{
leadingEdge = index;
}
if (ch == '"' || ch == '\'')
{
inParen = !inParen;
}
}
}
if (leadingEdge >= 0)
{
final String arg = jvmArgs.substring(leadingEdge).replace("\"", "");
if (!arg.isEmpty())
{
args.add(StringUtilities.expandProperties(arg, System.getProperties()));
}
}
}
return args;
}
/**
* Determine if an error file was written from the process and copy it to
* the log file.
*/
private static void logErrorFile()
{
File errorFile = null;
final List<String> jvmArgsFromEnv = getJvmArgsFromEnv();
for (final String arg : jvmArgsFromEnv)
{
final Matcher matcher = Pattern.compile("-XX:ErrorFile=(.+)(?:%p.*)").matcher(arg);
if (matcher.matches())
{
final File file = new File(matcher.group(1));
final File[] files = file.getParentFile().listFiles(pathname -> pathname.getName().startsWith(file.getName()));
if (files != null)
{
for (final File file2 : files)
{
if (errorFile == null || file2.lastModified() > errorFile.lastModified())
{
errorFile = file2;
}
}
}
else
{
LOGGER.error("Error accessing files in directory " + file.getParentFile());
}
break;
}
}
if (errorFile != null && errorFile.canRead())
{
for (final String line : FileUtilities.readLines(errorFile))
{
LOGGER.error(line);
}
}
}
/**
* Attempt to create a JVM.
*
* @param input The command.
* @return {@code true} if the JVM was successfully created.
*/
private static boolean testJava(Collection<? extends String> input)
{
do
{
final List<String> command = New.list(input.size() + 1);
command.addAll(input);
command.add("-version");
try
{
final Process proc1 = new ProcessBuilder(command).start();
evacuateInputStream(proc1.getErrorStream());
evacuateInputStream(proc1.getInputStream());
return proc1.waitFor() == 0;
}
catch (final InterruptedException e)
{
LOGGER.warn("Interrupted while testing command [" + command + "]: " + e, e);
}
catch (final IOException e)
{
LOGGER.warn("Exception while testing command [" + command + "]: " + e, e);
}
}
while (true);
}
/**
* Attempt to create a JVM with a certain max memory setting.
*
* @param java The path to the java executable.
* @param mem The memory setting, in megabytes.
* @return {@code true} if the JVM was successfully created.
*/
private static boolean testMemory(String java, int mem)
{
final String arg = "-Xmx" + mem + "m";
return testJava(Arrays.asList(java, arg));
}
/** Constructor. */
private Launch()
{
final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
final PreferencesRegistry reg = new PreferencesRegistryImpl(executor, executor);
myPreferences = reg.getPreferences(getClass());
myMasterPasswordInUse = SecurityManagerImpl.isMasterPasswordInUse(reg);
myCurrentWorkingDirectory = new File(System.getProperty("user.dir"));
}
/**
* Determine the maxMemory to use for the child application.
*
* @param java The java executable.
* @return The maxMemory in MB.
*/
private int determineMaxMemory(String java)
{
final int prefMaxMemory = myPreferences.getInt("opensphere.launch.maxMemory", -1);
int maxMemory = prefMaxMemory == -1 ? getUpperLimitForMemoryTesting() : prefMaxMemory;
final int maxMemoryFloorMB = 512;
while (maxMemory > maxMemoryFloorMB)
{
if (testMemory(java, maxMemory))
{
LOGGER.info("Memory test at " + maxMemory + " was successful.");
break;
}
LOGGER.info("Memory test at " + maxMemory + " failed.");
maxMemory -= 128;
}
if (maxMemory < maxMemoryFloorMB)
{
maxMemory = maxMemoryFloorMB;
}
if (maxMemory != prefMaxMemory)
{
if (prefMaxMemory != -1)
{
LOGGER.warn("Launching with preferred maxMemory " + prefMaxMemory + " MB failed.");
}
myPreferences.putInt("opensphere.launch.maxMemory", maxMemory, this);
}
LOGGER.info("Setting maxMemory to " + maxMemory + "MB");
return maxMemory;
}
/**
* Get the upper limit for memory testing.
*
* @return The upper limit in MB.
*/
@SuppressFBWarnings("DM_EXIT")
private int getUpperLimitForMemoryTesting()
{
int maxMemoryMB;
final long maxMemTestBytes = Long.getLong("opensphere.launch.maxMemTestBytes", -1L).longValue();
final long totalPhsicalMemoryBytes = Long.getLong("opensphere.launch.totalPhysicalMemoryBytes", -1L).longValue();
if (maxMemTestBytes != -1L)
{
LOGGER.info("Max test memory is " + maxMemTestBytes + " bytes.");
maxMemoryMB = (int)((float)maxMemTestBytes / Constants.BYTES_PER_MEGABYTE);
}
else if (totalPhsicalMemoryBytes != -1L)
{
LOGGER.info("Total Physical memory is " + totalPhsicalMemoryBytes + " bytes.");
maxMemoryMB = (int)((float)totalPhsicalMemoryBytes / Constants.BYTES_PER_MEGABYTE
* Utilities.parseSystemProperty("opensphere.launch.maxMemTestRatio", .5f)) / 128 * 128;
}
else
{
try
{
new LookAndFeelInit().setLookAndFeel();
}
catch (final UnsupportedLookAndFeelException e)
{
Logger.getLogger(Launch.class).error(e, e);
}
final String[] options = new String[] { "256 MB", "512 MB", "768 MB", "1024 MB", "1536 MB", "2048 MB", "4096 MB" };
final int choice = JOptionPane.showOptionDialog(null,
"Maximum memory setting could not be automatically determined. Please select from the following options:",
"Select Maximum Memory", JOptionPane.DEFAULT_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[3]);
if (choice == -1)
{
JOptionPane.showMessageDialog(null, "No memory setting made. Application will exit.");
System.exit(1);
maxMemoryMB = 0;
}
else
{
maxMemoryMB = Integer.parseInt(options[choice].split(" ")[0]);
myPreferences.putInt("opensphere.launch.maxMemory", maxMemoryMB, this);
myPreferences.waitForPersist();
}
}
return maxMemoryMB;
}
/**
* Get if running in a 64-bit VM.
*
* @return {@code true} if the VM is 64-bit.
*/
private boolean is64bit()
{
return System.getProperty("os.arch").contains("64");
}
/**
* Get if running on Linux.
*
* @return {@code true} if the operating system is Linux.
*/
private boolean isLinux()
{
return "Linux".equals(System.getProperty("os.name"));
}
/**
* Run the child application.
*
* @return The exit code of the child application.
*/
private int run()
{
String java = System.getProperty("java.home") + File.separatorChar + "bin" + File.separatorChar + "java";
File baseDirectory = myCurrentWorkingDirectory.getParentFile();
File configPropertiesFile = new File(baseDirectory, "config.properties");
if (configPropertiesFile.canRead())
{
Properties configProperties = new Properties();
try (InputStream in = new FileInputStream(configPropertiesFile))
{
configProperties.load(in);
String preferredVersion = configProperties.getProperty("preferred.version");
if (!myCurrentWorkingDirectory.getName().equals(preferredVersion))
{
LOGGER.info("Current version (" + myCurrentWorkingDirectory.getName() + ") does not match preferred version ("
+ preferredVersion + ")");
File preferredVersionDirectory = new File(baseDirectory, preferredVersion);
if (preferredVersionDirectory.isDirectory())
{
java = java.replace(myCurrentWorkingDirectory.getName(), preferredVersion);
myCurrentWorkingDirectory = preferredVersionDirectory;
}
else
{
LOGGER.error("Attempted to use '" + preferredVersionDirectory.getAbsolutePath()
+ "' for launch location, but it doesn't exist as a directory.");
}
}
}
catch (IOException e)
{
LOGGER.error("Unable to read configPropertiesFile from '" + configPropertiesFile.getAbsolutePath() + "'", e);
}
}
final List<String> command = new ArrayList<>();
command.add(java);
final String os = isLinux() ? "linux" : "win32";
final String arch = is64bit() ? "x86_64" : "x86";
command.add("-Djava.library.path=lib" + File.separatorChar + os + File.separatorChar + arch);
command.add("-Djava.security.policy=java.policy");
command.add("-Dopensphere.enableRestart=true");
command.add("-Duser.home=" + System.getProperty("user.home"));
command.add("-Xmx" + determineMaxMemory(java) + "m");
final int requiredCommandSize = command.size();
// On Windows, the master password prompt doesn't get focus if popToBack
// is used.
if (isLinux() || !myMasterPasswordInUse)
{
command.add("-Dopensphere.enablePopToBack=true");
}
command.addAll(getJvmArgsFromEnv());
while (command.size() > requiredCommandSize && !testJava(command))
{
LOGGER.warn("Failed to run with arguments: " + command);
command.remove(command.size() - 1);
LOGGER.warn("Attempting to run with arguments: " + command);
}
command.add("-cp");
command.add(constructClasspath());
command.add("io.opensphere.core.appl.OpenSphere");
LOGGER.info("Starting Desktop Application: '" + StringUtilities.join("', '", command) + "'");
final ProcessBuilder pb = new ProcessBuilder(command);
pb.directory(myCurrentWorkingDirectory);
final Process proc;
try
{
proc = pb.start();
}
catch (final IOException e)
{
LOGGER.fatal("Failed to start Desktop Application: " + e, e);
return -1;
}
Runtime.getRuntime().addShutdownHook(new Thread()
{
@Override
public void run()
{
proc.destroy();
}
});
startSysoutPipe(proc);
startSyserrPipe(proc);
while (true)
{
try
{
return proc.waitFor();
}
catch (final InterruptedException e)
{
LOGGER.warn("Interrupted waiting for child process.", e);
}
}
}
/**
* Examine the current execution directory, looking for JAR files, and
* plugins. Construct a full classpath argument from the found items.
*
* @return A classpath generated from the set of found items.
*/
private String constructClasspath()
{
String suiteJarName;
String configJarName;
final String applicationVersion = System.getProperty("opensphere.launch.version", null);
final String configJar = System.getProperty("opensphere.launch.config", null);
if (applicationVersion == null)
{
suiteJarName = findSuiteJar();
}
else
{
suiteJarName = "suite-" + applicationVersion + ".jar";
}
if (configJar == null)
{
configJarName = findConfigurationJar();
}
else
{
configJarName = configJar;
}
return configJarName + File.pathSeparatorChar + suiteJarName + File.pathSeparatorChar + "plugins/*";
}
/**
* Finds the path of the configuration JAR.
*
* @return the path of the configuration jar.
*/
private String findConfigurationJar()
{
String returnValue = null;
final File file = myCurrentWorkingDirectory;
if (file.isDirectory())
{
final File[] files = file.listFiles((FilenameFilter)(dir, name) ->
{
if (name.startsWith("config") && name.endsWith(".jar") || name.contains("-config-") && name.endsWith(".jar"))
{
return true;
}
return false;
});
if (files != null)
{
if (files.length == 1)
{
returnValue = files[0].getName();
}
else if (files.length > 1)
{
LOGGER.warn("More than one config JAR found in installation directory, and no property set for "
+ "'opensphere.launch.config'. Defaulting to newest file.");
Arrays.sort(files, (o1, o2) -> (int)(o2.lastModified() - o1.lastModified()));
returnValue = files[0].getName();
}
}
}
if (returnValue == null)
{
LOGGER.fatal("No property was set for opensphere.launch.version, and no suite jars could be found in '"
+ file.getAbsolutePath() + "', exiting.");
throw new IllegalStateException(
"No property was set for opensphere.launch.version, and no suite jars could be found in '"
+ file.getAbsolutePath() + "'");
}
return returnValue;
}
/**
* Searches for the suite jar in the launch directory. If only one is
* present, it will be used. If more than one are present, then the most
* recently created file will be used.
*
* @throws IllegalStateException if no suite jars can be found in the launch
* directory.
* @return the name of the suite jar with which to invoke the application.
*/
private String findSuiteJar()
{
String returnValue = null;
final File file = myCurrentWorkingDirectory;
if (file.isDirectory())
{
final File[] files = file.listFiles((FilenameFilter)(dir, name) ->
{
if (name.startsWith("suite") && name.endsWith(".jar") || name.contains("-suite-") && name.endsWith(".jar"))
{
return true;
}
return false;
});
if (files != null)
{
if (files.length == 1)
{
returnValue = files[0].getName();
}
else if (files.length > 1)
{
LOGGER.warn("More than one suite JAR found in installation directory, and no property set for "
+ "'opensphere.launch.version'. Defaulting to newest file.");
Arrays.sort(files, (o1, o2) -> (int)(o2.lastModified() - o1.lastModified()));
returnValue = files[0].getName();
}
}
}
if (returnValue == null)
{
LOGGER.fatal("No property was set for opensphere.launch.version, and no suite jars could be found in '"
+ file.getAbsolutePath() + "', exiting.");
throw new IllegalStateException(
"No property was set for opensphere.launch.version, and no suite jars could be found in '"
+ file.getAbsolutePath() + "'");
}
return returnValue;
}
/**
* Start a pipe to send the error output from the child process to my error
* output.
*
* @param proc The child process.
*/
@SuppressWarnings("PMD.SystemPrintln")
private void startSyserrPipe(final Process proc)
{
final BufferedReader err = new BufferedReader(
new InputStreamReader(proc.getErrorStream(), StringUtilities.DEFAULT_CHARSET));
final Thread errThread = new Thread()
{
@Override
public void run()
{
while (true)
{
try
{
for (String line; (line = err.readLine()) != null;)
{
System.err.println(line);
}
break;
}
catch (final IOException e)
{
LOGGER.warn("IOException piping stderr: " + e, e);
}
}
}
};
errThread.setDaemon(true);
errThread.start();
}
/**
* Start a pipe to send the standard output from the child process to my
* standard output.
*
* @param proc The child process.
*/
@SuppressWarnings("PMD.SystemPrintln")
private void startSysoutPipe(final Process proc)
{
final BufferedReader out = new BufferedReader(
new InputStreamReader(proc.getInputStream(), StringUtilities.DEFAULT_CHARSET));
final Thread outThread = new Thread()
{
@Override
public void run()
{
while (true)
{
try
{
if (mySplashScreenActive)
{
for (String line; (line = out.readLine()) != null;)
{
if (line.endsWith(Kernel.DISPLAYING_MAIN_FRAME_MSG))
{
final SplashScreen ss = SplashScreen.getSplashScreen();
if (ss != null)
{
ss.close();
}
mySplashScreenActive = false;
break;
}
final int ix = line.indexOf(SplashScreenManagerImpl.INIT_MESSAGE_PREFIX);
if (ix >= 0)
{
mySplashScreenManager.setInitMessage(
line.substring(ix + SplashScreenManagerImpl.INIT_MESSAGE_PREFIX.length()));
}
else
{
System.out.println(line);
}
}
}
for (String line; (line = out.readLine()) != null;)
{
if (line.endsWith(Kernel.SHUTTING_DOWN_MSG))
{
ourRestart = false;
}
System.out.println(line);
}
break;
}
catch (final IOException e)
{
LOGGER.warn("IOException piping stdout: " + e, e);
}
}
}
};
outThread.setDaemon(true);
outThread.start();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.