text
stringlengths 27
775k
|
|---|
- deploy to home laptop
- deploy to office laptop
- copy Scripts to C:\Tools folder
- modify path inside scripts e.g. 7z.exe path
- modify FreeCommander settings
- put the latest FreeCommander.fav.ini file into delivery
- github create release
|
package au.com.codeka.warworlds.server.cron;
import java.util.Map;
import java.util.TreeMap;
import au.com.codeka.common.Log;
/** A registry of the cron jobs we have created. */
public class CronJobRegistry {
private static final Log log = new Log("CronJobRegistry");
private static Map<String, Class<? extends CronJob>> sCronJobs;
static {
sCronJobs = new TreeMap<String, Class<? extends CronJob>>();
sCronJobs.put("update-ranks", UpdateRanksCronJob.class);
sCronJobs.put("purge-combat-reports", PurgeCombatReportsCronJob.class);
sCronJobs.put("purge-sessions", PurgeSessionsCronJob.class);
sCronJobs.put("find-abandoned-empires", FindAbandonedEmpiresCronJob.class);
sCronJobs.put("find-alts", FindAltAccountsCronJob.class);
sCronJobs.put("update-dashboard", UpdateDashboardCronJob.class);
}
public static CronJob getJob(String name) {
Class<? extends CronJob> clazz = sCronJobs.get(name);
if (clazz != null) {
try {
return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
log.error("Exception caught creating instance of cron task '"+name+"'", e);
return null;
}
}
return null;
}
}
|
package snd.unit.model
import snd.BaseTest
import snd.fixtures.CanvasFixtures
import snd.model.CanvasState
class CanvasStateSpec extends BaseTest with CanvasFixtures{
describe ("In CanvasState model"){
it ("apply should create a canvas of the desired size"){
val canvas = CanvasState(15,10)
canvas.xSize shouldBe 15
canvas.ySize shouldBe 10
canvas.surface should have length 15
canvas.surface.head should have length 10
}
it ("update should set a desired color on a given point"){
baseCanvas.update(pointInBaseCanvas, 'c').surface(pointInBaseCanvas.xPos)(pointInBaseCanvas.yPos) shouldBe 'c'
}
}
}
|
<?php
/**
*------------------------------------------------------------------------------
* Configuration
*------------------------------------------------------------------------------
*/
// Do not forget to update your composer.json accordingly
$appNamespace = 'MyApp';
// The function to resolve the controller class
$resolveControllerClass = function ($params) use ($appNamespace) {
$controller = $params->fetch('controller', 'index');
return implode('\\', array(
$appNamespace,
'Controllers',
ucfirst($controller) . 'Controller'
));
};
// The function to resolve the action to be called on the controller
$resolveControllerAction = function ($params) {
return $params->fetch('action', 'index') . 'Action';
};
/**
*------------------------------------------------------------------------------
* Bootstrap
*------------------------------------------------------------------------------
*/
session_start();
$autoloader = require '../vendor/autoload.php';
// set request
$env = new \Koine\Http\Environment($_SERVER);
$cookies = new \Koine\Http\Cookies($_COOKIE);
$session = new \Koine\Http\Session($_SESSION);
$params = new \Koine\Http\Params($_REQUEST);
$request = new \Koine\Http\Request(array(
'environment' => $env,
'cookies' => $cookies,
'session' => $session,
'params' => $params,
));
// set view
$view = new \Koine\Mvc\View();
$view->getConfig()
->addPath(__DIR__ . '/app/views');
require_once __DIR__ . '/../configs/dependencies.php';
$dependencyContainer = \Nurse\Di::getInstance()->getContainer();
// set front controller
$frontController = new \Koine\Mvc\FrontController();
$frontController->setRequest($request)
->setControllerClass($resolveControllerClass($params))
->setAction($resolveControllerAction($params))
->setDependencyContainer($dependencyContainer)
->setView($view);
$response = $frontController->execute();
$response->send();
exit();
|
FactoryBot.define do
factory :transport_survey do
school
run_on { Date.today }
end
end
|
DROP TABLE IF EXISTS employee;
DROP TABLE IF EXISTS role;
DROP TABLE IF EXISTS department;
CREATE TABLE department(
id INTEGER PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(30)
);
CREATE TABLE role(
id INTEGER PRIMARY KEY AUTO_INCREMENT,
title VARCHAR(30) NOT NULL,
salary DECIMAL(10,2) NOT NULL,
department_id INTEGER,
CONSTRAINT fk_department_id FOREIGN KEY (department_id) REFERENCES department(id)
);
CREATE TABLE employee(
id INTEGER PRIMARY KEY AUTO_INCREMENT,
first_name VARCHAR(30) NOT NULL,
last_name VARCHAR(30) NOT NULL,
role_id INTEGER NOT NULL,
manager_id INTEGER REFERENCES employee(id),
CONSTRAINT fk_role_id FOREIGN KEY (role_id) REFERENCES role(id)
);
|
using Android.App;
using Android.Content;
using Android.OS;
using Android.Runtime;
using Android.Views;
using Android.Widget;
using Sinergija21.Basic.Droid.Infrastructure;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Google.AR.Core;
using Android.Graphics;
using System.IO;
using Google.AR.Sceneform;
using System.Threading.Tasks;
using Google.AR.Sceneform.Math;
namespace Sinergija21.Basic.Droid.Models
{
internal class AugmentedImageServiceManager
{
public static AugmentedImageServiceManager Instance { get; } = new AugmentedImageServiceManager();
private CustomArFragment fragment;
private Dictionary<string, bool> images { get; }
private AugmentedImageServiceManager()
{
images = new Dictionary<string, bool>
{
{"marker.jpg", false }
};
}
public void Initialize(CustomArFragment fragment)
{
this.fragment = fragment;
fragment.ArSceneView.Scene.Update += Scene_FirstUpdate;
}
private void Scene_FirstUpdate(object sender, Scene.UpdateEventArgs e)
{
fragment.ArSceneView.Scene.Update -= Scene_FirstUpdate;
FirstRun();
fragment.ArSceneView.Scene.Update += Scene_Update;
}
private void FirstRun()
{
// Setup image database - AR tries to recognize images added to here.
var session = fragment.ArSceneView.Session;
var db = new AugmentedImageDatabase(session);
var config = new Config(session);
foreach (var img in images)
{
Bitmap bmp = loadImageFromAssets(img.Key);
float width = 0.5f;// [Optional]
db.AddImage(img.Key, bmp, width);
}
//config.SetFocusMode(Config.FocusMode.Auto);
config.SetAugmentedImageDatabase(db);
config.SetUpdateMode(Config.UpdateMode.LatestCameraImage);
session.Configure(config);
}
private void Scene_Update(object sender, Google.AR.Sceneform.Scene.UpdateEventArgs e)
{
// Check images for each frame.
var frame = fragment.ArSceneView.ArFrame;
IEnumerable<AugmentedImage> aImages = frame
.GetUpdatedTrackables(Java.Lang.Class.FromType(typeof(AugmentedImage)))
.Cast<AugmentedImage>()
.ToList();
foreach(var aImg in aImages)
{
// Image is already recognized.
if (images[aImg.Name])
continue;
if (aImg.TrackingState == TrackingState.Tracking)
{
images[aImg.Name] = true;
// Create anchor to which model is attached to.
var anchor = aImg.CreateAnchor(aImg.CenterPose);
var anchorNode = new AnchorNode(anchor);
anchorNode.SetParent(fragment.ArSceneView.Scene);
// Load 3d model.
var n = ((AndroidDisplayManager)DI.Display).LoadModelInternal("a7logo.glb", new Vector3());
n.SetParent(anchorNode);
float scale = aImg.ExtentX;
//n.LocalScale = new Google.AR.Sceneform.Math.Vector3(scale, scale, scale);
// Set 3d model "up".
//n.LocalRotation = Quaternion.AxisAngle(new Vector3(1, 0, 0), -90);
}
}
}
private static Bitmap loadImageFromAssets(string img)
{
using (Stream s = Application.Context.Assets.Open(img))
{
var b = BitmapFactory.DecodeStream(s);
return b;
}
}
}
}
|
using AoC.Helpers.Days;
using System;
using System.Collections.Generic;
using System.Linq;
namespace AoC.Y2016.Days
{
public class Day06 : BaseDay
{
public Day06() : base(2016, 6)
{
}
public Day06(IEnumerable<string> inputLines) : base(2016, 6, inputLines)
{
}
protected override IConvertible PartOne() => GetMessage(inputLines.ToList(), true);
protected override IConvertible PartTwo() => GetMessage(inputLines.ToList(), false);
private static string GetMessage(List<string> messages, bool getMostCommon)
{
var columns = new List<char[]>();
var message = string.Empty;
for (int i = 0; i < messages[0].Length; i++)
{
columns.Add(messages.Select(m => m[i]).ToArray());
}
foreach (var column in columns)
{
var characters = new Dictionary<char, int>();
foreach (var character in column)
{
if (!characters.ContainsKey(character)) { characters.Add(character, 0); }
characters[character]++;
}
if (getMostCommon)
{
message += characters.OrderByDescending(c => c.Value).First().Key;
}
else
{
message += characters.OrderBy(c => c.Value).First().Key;
}
}
return message;
}
}
}
|
# frozen_string_literal: true
require 'test_helper'
require 'sapi_client'
module SapiClient
class ApplicationTest < Minitest::Test
describe 'Application' do
let(:spec) { 'test/fixtures/unified-view/application.yaml' }
let(:base_url) { "http://localhost:#{sapi_api_port}" }
describe '#initialize' do
it 'should load the application specification given' do
app = SapiClient::Application.new(base_url, spec)
_(app.specification).must_be_kind_of Hash
end
it 'should raise an error if the spec file does not exist' do
_(
-> { SapiClient::Application.new(base_url, 'wimbledon/wombles.yaml') }
).must_raise(SapiError)
end
it 'should store the base URL' do
app = SapiClient::Application.new(base_url, spec)
_(app.base_url).must_equal base_url
end
end
describe '#configuration' do
it 'should report the application configuration' do
app = SapiClient::Application.new(base_url, spec)
_(app.configuration).must_be_kind_of Hash
assert app.configuration.key?('loadSpecPath')
end
end
describe '#endpoint_group_files' do
it 'should return the names of all of the endpoint specification files' do
app = SapiClient::Application.new(base_url, spec)
file_names = app.endpoint_group_files
_(file_names.length).must_be :>, 5
_(file_names).must_include('test/fixtures/unified-view/endpointSpecs/establishment.yaml')
end
end
describe '#endpoints' do
it 'should return a list of all of the endpoint specification objects' do
app = SapiClient::Application.new(base_url, spec)
eps = app.endpoints
_(eps).must_be_kind_of Array
_(eps.length).must_be :>, 5
_(eps.map(&:raw_path)).must_include '/business/id/establishment'
end
end
describe '#instance' do
it 'should create an instance with methods corresponding to endpoints' do
app = SapiClient::Application.new(base_url, spec)
inst = app.instance
methods = inst.public_methods
_(methods).must_include(:establishment_list)
_(methods).must_include(:establishment_list_spec)
end
it 'should wrap a list of instances' do
class ::Establishment # rubocop:disable Lint/ConstantDefinitionInBlock
def initialize(_json)
@invoked = true
end
attr_reader :invoked
end
app = SapiClient::Application.new(base_url, spec)
inst = app.instance
VCR.use_cassette('application.test_instance_wrapping') do
establishments = inst.establishment_list(_limit: 1)
_(establishments.first).must_be_kind_of(Establishment)
assert establishments.first.invoked
end
end
it 'should retrieve a hierarchy' do
VCR.use_cassette('application.test_hierarchy') do
app = SapiClient::Application.new(
'http://fsa-rp-test.epimorphics.net',
'test/fixtures/regulated-products/application.yaml'
)
hierarchy = app.instance.feed_category_hierarchy_hierarchy({}, :skos)
_(hierarchy.roots.size).must_equal(5)
end
end
end
end
end
end
|
---
title: Advanced objects in JavaScript
author: azu
layout: post
itemUrl: 'http://bjorn.tipling.com/advanced-objects-in-javascript'
editJSONPath: 'https://github.com/jser/jser.info/edit/gh-pages/data/2014/08/index.json'
date: '2014-08-25T12:26:31Z'
tags:
- JavaScript
- ECMAScript
- tutorial
relatedLinks:
- title: >-
Web Reflection: What Books Didn't Tell You About ES5 Descriptors -
Part 1
url: >-
http://webreflection.blogspot.jp/2014/03/what-books-wont-tell-you-about-es5.html
- title: 'ES5, Property Descriptor解説 - 枕を欹てて聴く'
url: 'http://constellation.hatenablog.com/entry/20101205/1291564928'
- title: Revisiting JavaScript Objects | LakTEK (Lakshan Perera)
url: 'http://www.laktek.com/2012/12/29/revisiting-javascript-objects/'
---
JSのオブジェクトについて幅広く解説されてる。
getter/setter、definePropertyとPropertyDescriptor、Object.create、valueOfとtoString、ES6のProxyとSymbolについて
|
#!/usr/bin/env python
# coding=utf-8
"""
tests for csvutils
"""
from __future__ import print_function, unicode_literals
from .csvutils import rows_to_csv
def test_rows_to_csv():
assert rows_to_csv([
("abc", u"呵呵", 123),
("def", u"哈哈", 456),
]) == u"""\
abc,呵呵,123\r
def,哈哈,456\r
"""
|
from __future__ import print_function
from cloudmesh.shell.command import command
from cloudmesh.shell.command import PluginCommand
from cloudmesh.common.debug import VERBOSE
class Shell3Command(PluginCommand):
# noinspection PyUnusedLocal
@command
def do_shell3(self, args, arguments):
"""
::
Usage:
shell3 --text=TEXT
shell3 --number=NUMBER
shell3 list
This command informs the option entered along with values.
Arguments:
TEXT a text string
NUMBER a whole number
"""
arguments.TEXTSTR = arguments['--text'] or None
arguments.NUMBER = arguments['--number'] or None
VERBOSE(arguments)
if arguments.TEXTSTR:
print("You have entered Text Option, Value: " + arguments.TEXTSTR)
elif arguments.NUMBER:
print("You have entered Number Option, Value:" + str(arguments.NUMBER))
elif arguments.list:
print("You have entered list Option")
return ""
|
package cn.haohao.dbbook.data
import cn.haohao.dbbook.data.entity.http.BookListResponse
import cn.haohao.dbbook.data.entity.http.BookReviewsListResponse
import cn.haohao.dbbook.data.entity.http.BookSeriesListResponse
import retrofit2.Response
import retrofit2.http.GET
import retrofit2.http.Path
import retrofit2.http.Query
import rx.Observable
/**
* Created by HaohaoChang on 2017/6/9.
*/
interface BookService {
companion object {
val BASE_URL = "https://api.douban.com/v2/"
}
@GET("book/search")
fun getBookList(@Query("q") q: String?, @Query("tag") tag: String?, @Query("start") start: Int, @Query("count") count: Int, @Query("fields") fields: String): Observable<Response<BookListResponse>>
@GET("book/{bookId}/reviews")
fun getBookDetail(@Path("bookId") bookId: String, @Query("start") start: Int, @Query("count") count: Int, @Query("fields") fields: String): Observable<Response<BookReviewsListResponse>>
@GET("book/series/{seriesId}/books")
fun getBookSeries(@Path("seriesId") seriesId: String, @Query("start") start: Int, @Query("count") count: Int, @Query("fields") fields: String): Observable<Response<BookSeriesListResponse>>
}
|
import { PlatformAddress } from "../classes";
import { Transaction } from "../Transaction";
import { NetworkId } from "../types";
export interface SetShardUsersActionJSON {
shardId: number;
users: string[];
}
export class SetShardUsers extends Transaction {
private readonly shardId: number;
private readonly users: PlatformAddress[];
public constructor(
params: { shardId: number; users: PlatformAddress[] },
networkId: NetworkId
) {
super(networkId);
this.shardId = params.shardId;
this.users = params.users;
}
public type(): string {
return "setShardUsers";
}
protected actionToEncodeObject(): any[] {
const { shardId, users } = this;
return [
6,
shardId,
users.map(user => user.getAccountId().toEncodeObject())
];
}
protected actionToJSON(): SetShardUsersActionJSON {
const { shardId, users } = this;
return {
shardId,
users: users.map(user => user.toString())
};
}
}
|
#ifndef SUDOKU_SAVER_H
#define SUDOKU_SAVER_H
#include <SDL/SDL.h>
#include <SDL/SDL_image.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include "Imagery/Utils/image.h"
#define IMAGE_DIRECTORY "Sudoku_Saver/Images"
#define IMAGE_SIZE 28
/**
* @brief
*
* @param grid
* @param destination
* @param dimension
*/
void copyArray(unsigned int **grid, unsigned int **destination,
unsigned int dimension);
/**
* @brief
*
* @param grid
* @param dimension
*/
void basicPrint(unsigned int **grid, unsigned int dimension);
/**
* @brief
*
* @param grid
* @param inputPath
* @param verbose
* @param dimension
*/
void readGrid(unsigned int **grid, char inputPath[], int verbose,
unsigned int dimension);
/**
* @brief
*
* @param grid
* @param outputPath
* @param verbose
* @param dimension
*/
void saveGrid(unsigned int **grid, char outputPath[], int verbose,
unsigned int dimension);
/**
* @brief Create a Sudoku Image object
*
* @param grid
* @param copy
* @param folder_path
* @param dimension
* @return Image
*/
Image createSudokuImage(unsigned int **grid, unsigned int **copy,
char *folder_path, unsigned int dimension);
/**
* @brief Create a Hexa Sudoku Image object
*
* @param grid
* @param copy
* @param folder_path
* @return Image
*/
Image createHexaSudokuImage(unsigned int **grid, unsigned int **copy,
char *folder_path);
/**
* @brief Get the Image object
*
* @param val
* @param directory
* @param green
* @return Image
*/
Image getImage(unsigned int val, char *directory, unsigned int green);
#endif
|
package provider
// Provider represents a PSP (Payment Service Provider)
type Provider struct {
Name string
}
|
# Association Rule Learning Recommender
[Access to dataset](https://www.kaggle.com/mathchi/online-retail-ii-data-set-from-ml-repository)
***Business Problem:***
- Suggesting products to users at the basket stage.
***Dataset Story:***
- The data set named Online Retail II includes the sales of a UK-based online store between 01/12/2009 and 09/12/2011.
- The product catalog of this company includes souvenirs. They can also be considered promotional items.
- It is also known that most of their customers are wholesalers.
***Variables***:
- InvoiceNo: Invoice Number
-> If this code starts with C, it means that the operation has been canceled.
- StockCode: Product Code
-> Unique number for each product
- Description: Product name
- Quantity: Number of products
-> Indicates how many of the products on the invoices were sold.
- InvoiceDate
- UnitePrice
- CustomerID: Unique customer number
- Country
|
#!/usr/bin/env bash
echo '* * * * * root yum install -y https://s3.amazonaws.com/ec2-downloads-windows/SSMAgent/latest/linux_amd64/amazon-ssm-agent.rpm && rm -rf /etc/cron.d/ssmstart' > /etc/cron.d/ssmstart
|
; RUN: llc < %s -mtriple=thumbv6-apple-darwin | FileCheck %s
%umul.ty = type { i32, i1 }
define i32 @test1(i32 %a) nounwind {
; CHECK: test1:
; CHECK: muldi3
%tmp0 = tail call %umul.ty @llvm.umul.with.overflow.i32(i32 %a, i32 37)
%tmp1 = extractvalue %umul.ty %tmp0, 0
%tmp2 = select i1 undef, i32 -1, i32 %tmp1
ret i32 %tmp2
}
declare %umul.ty @llvm.umul.with.overflow.i32(i32, i32) nounwind readnone
define i32 @test2(i32 %argc, i8** %argv) ssp {
; CHECK: test2:
; CHECK: str r0
; CHECK: movs r2
; CHECK: mov r1
; CHECK: mov r3
; CHECK: muldi3
%1 = alloca i32, align 4
%2 = alloca i32, align 4
%3 = alloca i8**, align 4
%m_degree = alloca i32, align 4
store i32 0, i32* %1
store i32 %argc, i32* %2, align 4
store i8** %argv, i8*** %3, align 4
store i32 10, i32* %m_degree, align 4
%4 = load i32, i32* %m_degree, align 4
%5 = call %umul.ty @llvm.umul.with.overflow.i32(i32 %4, i32 8)
%6 = extractvalue %umul.ty %5, 1
%7 = extractvalue %umul.ty %5, 0
%8 = select i1 %6, i32 -1, i32 %7
%9 = call noalias i8* @_Znam(i32 %8)
%10 = bitcast i8* %9 to double*
ret i32 0
}
declare noalias i8* @_Znam(i32)
|
DISTANCE_TOO_CLOSE = 30
DISTANCE_CLOSE = 20
DISTANCE_GOAL_CLOSE = 150
DISTANCE_MEDIUM = 30
DISTANCE_FAR = 10
EDGE_DISTANCE = 60
|
#!/usr/bin/env bash
####
# Github pages release script
####
# Base settings
GH_ORG="cogment"
GH_REPO="cogment-doc"
GH_BRANCH="gh-pages"
PUBLISH_AUTHOR_NAME="Artificial Intelligence Redefined"
PUBLISH_AUTHOR_EMAIL="dev+cogment@ai-r.com"
PUB_DIR="publish"
OUT_DIR="public"
# Utility functions
## join_by
## Examples:
## $ join_by "-delimiter-" "a" "b" "c"
## "a-delimiter-b-delimiter-c"
function join_by() {
local delimiter=$1
shift
local strings=$1
shift
printf %s "${strings}" "${@/#/$delimiter}"
}
function usage() {
local usage_str=""
usage_str+="Publish a version of cogment-doc\n\n"
usage_str+="The package will be deployed to https://github.com/${GH_ORG}/${GH_REPO}/tree/${GH_BRANCH}\n\n"
usage_str+="Requirements:\n"
usage_str+=" A running ssh agent having a read/write key for this repo.\n\n"
usage_str+="Options:\n"
usage_str+=" --dry-run: Skip the actual publishing.\n"
usage_str+=" -h, --help: Show this screen.\n\n"
printf "%b" "${usage_str}"
}
# Uncomment to trace every command that are being run
#set -x
# Any subsequent(*) commands which fail will cause the shell script to exit immediately
set -o errexit
root_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)
dry_run=0
while [ "$1" != "" ]; do
case $1 in
--dry-run)
dry_run=1
;;
--help | -h)
usage
exit 0
;;
esac
shift
done
pub_dir_full_path="${root_dir}/${PUB_DIR}"
rm -rf "${pub_dir_full_path}"
git clone -q -b "${GH_BRANCH}" "git@github.com:${GH_ORG}/${GH_REPO}.git" "${pub_dir_full_path}"
rm -rf "${pub_dir_full_path:?}"/*
printf "* \"%s\" cloned to \"%s\"\n" "${GH_ORG}/${GH_REPO}@${GH_BRANCH}" "${pub_dir_full_path}"
out_dir_full_path="${root_dir}/${OUT_DIR}"
cp -r "${out_dir_full_path:?}"/* "${pub_dir_full_path}"
printf "* \"%s\" synced to \"%s\"\n" "${out_dir_full_path}" "${pub_dir_full_path}"
echo "docs.cogment.ai" >"${pub_dir_full_path}/CNAME"
echo ".nojekyll" >"${pub_dir_full_path}/.nojekyll"
printf "* required files 'CNAME' and '.nojekyll' updated in \"%s\"\n" "${pub_dir_full_path}"
git -C "${pub_dir_full_path}" config user.name "${PUBLISH_AUTHOR_NAME}"
git -C "${pub_dir_full_path}" config user.email "${PUBLISH_AUTHOR_EMAIL}"
git -C "${pub_dir_full_path}" add -A
publish_url="https://${GH_ORG}.github.io/${GH_REPO}"
set +o errexit
if ! git -C "${pub_dir_full_path}" commit -q -m"Publish documentation" >/dev/null; then
printf "* Nothing new to publish.\n"
if [ ${dry_run} == 1 ]; then
printf "** DRY RUN SUCCESSFUL - Nothing published\n"
else
printf "** Existing documentation can be browsed at %s\n" "${publish_url}"
fi
exit 0
fi
set -o errexit
if [[ "${dry_run}" == 1 ]]; then
printf "** DRY RUN SUCCESSFUL - Nothing pushed\n"
else
git -C "${pub_dir_full_path}" push -q origin "${GH_BRANCH}"
printf "** New version pushed, it can be browsed at %s\n" "${publish_url}"
fi
|
# git-together

Following in the footsteps of [git-pair][gp] and [git-duet][gd], but without
needing to change your existing git habits.
[gp]: https://github.com/pivotal/git_scripts
[gd]: https://github.com/git-duet/git-duet
## Installation
```bash
brew install pivotal/tap/git-together
```
## Configuration
Here's one way to configure `git-together`, but since it uses `git config` to
store information, there are many other ways to do it. This particular example
assumes a desire to store authors at the repo-level in a `.git-together` file.
```bash
# `git-together` is meant to be aliased as `git`
alias git=git-together
# Use .git-together per project for author configuration
git config --add include.path ../.git-together
# Or use one .git-together for all projects
git config --global --add include.path ~/.git-together
# Setting the default domain
git config --file .git-together --add git-together.domain rocinante.com
# Adding a couple authors
git config --file .git-together --add git-together.authors.jh 'James Holden; jholden'
git config --file .git-together --add git-together.authors.nn 'Naomi Nagata; nnagata'
# Adding an author with a different domain
git config --file .git-together --add git-together.authors.ca 'Chrisjen Avasarala; avasarala@un.gov'
```
## Usage
```bash
# Pairing
git with jh nn
# ...
git commit
# Soloing
git with nn
# ...
git commit
# Mobbing
git with jh nn ca
# ...
git commit
```
Soloing and mobbing are automatically set by the number of authors passed to
`git with`. `git-together` rotates authors by default after making a commit so
that the author/committer roles are fairly spread across the pair/mob over
time.
Aliases are supported as well. You can make git-together do its thing when you
use an alias for a committing command by configuring a comma-separated list of
aliases:
```bash
git config git-together.aliases ci,rv,m
# ...
git ci
```
By default, `git-together` sets and rotates pairs for a single local
repository. If you are working across multiple repos with a pair on a regular
basis, this can be difficult to set across all of them. The `--global` flag can
be passed along to set a global pair. `git-together` will still default to a
local repository, so if you'd like to reset from local to global, you can use
the `--clear` flag.
```bash
# Set for all repos
git with --global jh nn
# Override in single repo
git with nn
# Clear local and move back to global
git with --clear
```
## Technical Details
Because repo-level authors are common and there's no good way of configuring
`git config` on cloning a repo, `git-together` will automatically include
`.git-together` to `git config` if it exists. (See `GitConfig::auto_include`
for details.) This allows `git-together` to work immediately on cloning a repo
without manual configuration.
Under the hood, `git-together` sets `GIT_AUTHOR_NAME`, `GIT_AUTHOR_EMAIL`,
`GIT_COMMITTER_NAME`, and `GIT_COMMITTER_EMAIL` for the `commit`, `merge`, and
`revert` subcommands so that git commits have the correct attribution..
`git-together` also adds the `--signoff` argument to the `commit` and `revert`
subcommands so that the commit message includes the `Signed-off-by: ` line.
## Known Issues
`git-together` works by aliasing `git` itself, so there are going to be issues
with git's in-built aliases as well as other utilities (such as [Hub][hub])
that work in the same manner.
[hub]: https://hub.github.com/
## Development
### Rust version
Install rust using the [rustup][rustup] tool. Installing from homebrew won't work
because some nightly features of rust are needed to build.
Then, switch to the nightly with
```bash
rustup default nightly
```
### Bats
[Bats][bats] is a bash testing framework, used here for integration tests. This
can be installed with homebrew.
```bash
brew install bats
```
[rustup]: https://www.rustup.rs/
[bats]: https://github.com/sstephenson/bats
### Testing
```bash
cargo test
./bats/integration.bats
```
|
#!/bin/bash
cd /tmp
wget https://redirector.gvt1.com/edgedl/go/go1.9.2.linux-amd64.tar.gz
sudo rm -rf /usr/local/go
sudo tar -C /usr/local -xzf go1.9.2.linux-amd64.tar.gz
sudo chown -R circleci /usr/local/go
|
import { Exception } from "@enterprize/exceptions";
/**
* Exception indicating that an Array has a different number of dimensions than expected.
*
* @sinceVersion 1.0.0
* @author Giancarlo Dalle Mole
* @since 06/03/2020
*/
export class ArrayDimensionsOutOfRangeException extends Exception<ArrayDimensionsOutOfRangeExceptionDetails> {
constructor(expectedDimensions: number, value: Array<any>) {
super(
`The expected number of dimensions for the Array is "${expectedDimensions}"`,
{
expectedDimensions: expectedDimensions,
value: value
}
);
}
}
/**
* Details of {@link ArrayDimensionsOutOfRangeException}.
*
* @sinceVersion 1.0.0
* @author Giancarlo Dalle Mole
* @since 06/03/2020
*/
export type ArrayDimensionsOutOfRangeExceptionDetails = {
/**
* Number of expected dimensions.
*/
expectedDimensions: number;
/**
* The Array with different expected dimensions.
*/
value: Array<any>
}
|
require 'fog/core'
# This can be removed once fog-google fixes
# https://github.com/fog/fog-google/issues/421, which will allow us to upgrade
# fog-core to 2.2.4.
original = Fog::Logger[:deprecation]
Fog::Logger[:deprecation] = nil
require 'fog/aliyun'
# Hack until https://github.com/fog/fog-aliyun/pull/155 is merged and released
Fog::Aliyun::Compute = Fog::Compute::Aliyun
require 'fog/aws'
require 'fog/local'
require 'fog/google'
require 'fog/azurerm'
require 'fog/openstack'
Fog::Logger[:deprecation] = original
|
class AddingSystemNames < ActiveRecord::Migration
def self.up
add_column :services, :system_name, :string
add_column :plans, :system_name, :string
Plan.reset_column_information
Service.reset_column_information
[ Plan, Service ].each do |clazz|
clazz.find_each(:with_deleted => true) do |model|
begin
puts "#{clazz.to_s} '#{model.name}...'"
model.generate_system_name
model.save!
rescue
new = "#{model.system_name}_#{SecureRandom.hex(4)}"
puts "Changing #{model.to_s} #{model.system_name} to #{new}"
model.system_name = new
model.name = model.system_name if model.name.blank? # the connect db is weird
model.save!
end
end
end
change_column :services, :system_name, :string, :null => false
change_column :plans, :system_name, :string, :null => false
end
def self.down
remove_column :services, :system_name
remove_column :plans, :system_name
end
end
|
package com.jcl.exam.emapta.data.prefs
import android.content.Context
import android.content.SharedPreferences
import com.jcl.exam.emapta.encryption.EncryptionUtil
import javax.inject.Inject
/**
* Created by jaylumba on 05/16/2018.
*/
class SecuredPrefs @Inject
constructor(context: Context) {
private val SHARED_PREF_NAME = "TempPrefsName"
private val mPref: SharedPreferences?
private var mEditor: SharedPreferences.Editor? = null
init {
mPref = context.getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE)
}
fun clear() {
mPref!!.edit().clear().apply()
}
fun save(key: String, value: String): Boolean {
if (mPref != null) {
mEditor = mPref.edit()
mEditor!!.putString(key, EncryptionUtil.encrypt(value))
return mEditor!!.commit()
} else {
return false
}
}
operator fun get(key: String): String {
if (mPref != null) {
val value = mPref.getString(key, "")
return if (value == "") "" else EncryptionUtil.decrypt(value)
} else {
return ""
}
}
}
|
<?php
declare(strict_types=1);
namespace AppBundle\Controller;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Method;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Template;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
class TagController extends AbstractBaseController
{
/**
* @Route("/tags",name="tag_search")
* @Template()
* @Method("GET")
*/
public function indexAction(Request $request)
{
$tags = $this
->getTagRepository()
->search($request->get('query'), $this->getUser())
->select('t.name')
->getQuery()
->getScalarResult()
;
$tags = array_map('current', $tags);
$response = new Response(json_encode($tags));
$response->headers->set('Content-Type', 'application/json');
return $response;
}
}
|
package com.jfixby.r3.physics.body;
import com.jfixby.r3.api.physics.BODY_SHAPE_TYPE;
import com.jfixby.r3.api.physics.BoxBody;
import com.jfixby.r3.physics.ShapeData;
public class BoxBodyImpl extends BodyImpl implements BoxBody {
public BoxBodyImpl () {
super(new ShapeData(BODY_SHAPE_TYPE.BOX));
}
}
|
---
order: 5
title:
zh-CN: 新手教学
en-US: Onboarding
---
## zh-CN
`MWindow` 使创建自定义组件(如不同样式的幻灯片)变得很轻松。
## en-US
`MWindow` makes it easy to create custom components such as a different styled stepper.
|
import { define } from '../common';
export class XFormat extends HTMLElement {
connectedCallback() {
this.type = this.getAttribute('type') || 'number'; // number, currency, or date
this.locale = this.getAttribute('locale') || 'en-US'; // ko-KR, en-US, etc.
this.maxDecimal = this.getAttribute('max-decimal') || 2; // max decimal places
this.currency = this.getAttribute('currency') || 'USD'; // currency code
this.format = this.getAttribute('format') || getLocaleDateFormat(this.locale) ; // date format
this.orgText = this.innerText
try {
this.innerText = format(this.orgText, this.type, this);
} catch(e) {
this.innerText = e; // show error in case of error
}
}
}
XFormat.define = define('x-format', XFormat);
/**
* Returns formatted value
* @param {string} text
* @param {string} type, number, currency, date
* @param {object} options, {locale, maxDecimal, currency, format}
*/
export function format(text, type, options) {
const { locale='en-US', maxDecimal=2, currency='USD', format='yyyy-MM-dd'} = options;
switch(type) {
case 'number':
return new Intl.NumberFormat(locale, { maximumFractionDigits: maxDecimal }).format(+text);
case 'currency':
return new Intl.NumberFormat(locale, { currency: currency, style: 'currency' }).format(+text);
case 'date':
const date = text ? new Date(text) : new Date();
return formatDate(date, locale, format);
default:
return '[error] invalid type ' + type;
}
}
function getLocaleDateFormat(locale) {
const [language, country] = locale.toLowerCase().split(/[-_]/);
return ['fi'].includes(country) ? 'dd.MM.yyyy ' : // finland
['fr', 'th'].includes(country) ? 'dd/MM/yyyy' :
['it', 'no'].includes(country) ? 'dd.MM.yy' :
['es'].includes(country) ? 'dd-MM-yy' :
['us'].includes(country) ? 'MM-dd-yy' :
['gb'].includes(country) ? 'dd/MM/yy' : 'yyyy-MM-dd';
}
/**
* Returns a date string in the format specified by the locale and format
* @param {Date} date
* @param {string} locale, e.g. 'en-US'
* @param {string} format, e.g. 'yyyy-MM-dd', 'yyyy-MM-dd HH:mm:ss', 'weekday month dd, yyyy'
* @returns {string} formatted date
*/
function formatDate(date, locale, format='yyyy-MM-dd') {
const [month, weekday] = new Intl.DateTimeFormat(locale, {weekday: 'long', month: 'long'})
.format(date).split(' ');
const [mon, week] = new Intl.DateTimeFormat(locale, {weekday: 'short' ,month: 'short'})
.format(date).split(' ');
var z = {
M: date.getMonth() + 1,
d: date.getDate(),
h: date.getHours(),
m: date.getMinutes(),
s: date.getSeconds()
};
const formatted = format
.replace(/month/ig, _ => '⑫⑫').replace(/mon/ig, _ => '⑫')
.replace(/weekday/ig, _ => '⑦⑦').replace(/week/ig, _ => '⑦')
.replace(/(y{2,})/g, v => date.getFullYear().toString().slice(-v.length))
.replace(/(M+|d+|h+|m+|s+)/g, v => ((v.length > 1 ? '0' : '') + z[v.slice(-1)]).slice(-2))
.replace(/⑫⑫/g, _ => month).replace(/⑫/g, _ => mon)
.replace(/⑦⑦/ig, _ => weekday).replace(/⑦/ig, _ => week);
return formatted;
}
|
/*
* Author's name and email: Michael
* Program description: Includes my.h and defines the functions
* print_foo and print that were declared in my.h.
* Latest version: 9:57 PM, 1/5/2020.
* Older versions:
*/
#include <iostream>
#include "my.h"
// Prints the global foo variable that was declared in my.h
// and defines in use.cpp.
void print_foo()
{
std::cout << "Value of foo is " << foo << ".\n";
}
// Prints it's parameter.
void print(int i)
{
std::cout << "Value of i is " << i << ".\n";
}
|
using System;
using System.Xml;
using System.Text;
namespace AIMLbot.AIMLTagHandlers
{
/// <summary>
/// An element called bot, which may be considered a restricted version of get, is used to
/// tell the AIML interpreter that it should substitute the contents of a "bot predicate". The
/// value of a bot predicate is set at load-time, and cannot be changed at run-time. The AIML
/// interpreter may decide how to set the values of bot predicate at load-time. If the bot
/// predicate has no value defined, the AIML interpreter should substitute an empty string.
///
/// The bot element has a required name attribute that identifies the bot predicate.
///
/// The bot element does not have any content.
/// </summary>
public class bot : AIMLbot.Utils.AIMLTagHandler
{
/// <summary>
/// Ctor
/// </summary>
/// <param name="bot">The bot involved in this request</param>
/// <param name="user">The user making the request</param>
/// <param name="query">The query that originated this node</param>
/// <param name="request">The request inputted into the system</param>
/// <param name="result">The result to be passed to the user</param>
/// <param name="templateNode">The node to be processed</param>
public bot(AIMLbot.Bot bot,
AIMLbot.User user,
AIMLbot.Utils.SubQuery query,
AIMLbot.Request request,
AIMLbot.Result result,
XmlNode templateNode)
: base(bot, user, query, request, result, templateNode)
{
}
protected override string ProcessChange()
{
if (this.templateNode.Name.ToLower() == "bot")
{
if (this.templateNode.Attributes.Count == 1)
{
if (this.templateNode.Attributes[0].Name.ToLower() == "name")
{
string key = this.templateNode.Attributes["name"].Value;
return (string)this.bot.GlobalSettings.grabSetting(key);
}
}
}
return string.Empty;
}
}
}
|
package trypp.support.math
/**
* Convert an input value (between 0.0 and 1.0) to an output value (also between 0.0 and
* 1.0) but not necessarily at the same rate.
*/
interface Lerp {
companion object {
/**
* Run from 0 - 1 linearly
*/
val LINEAR = object : Lerp {
override fun apply(value: Float): Float { return value }
}
/**
* Run from 0 - 1, decelerating as it reaches the end
*/
val EASE_OUT = object : Lerp {
override fun apply(value: Float): Float {
return Math.sin((Angle.HALF_PI * value).toDouble()).toFloat() }
}
/**
* Run from 0 - 1, accelerating as it reaches the end
*/
val EASE_IN = object : Lerp {
override fun apply(value: Float): Float {
return 1f - Math.cos((Angle.HALF_PI * value).toDouble()).toFloat() }
}
/**
* Take any lerp and have it run backwards instead.
*/
fun reverse(lerp: Lerp): Lerp {
return object : Lerp {
override fun apply(value: Float): Float {
return lerp.apply(1.0f - value)
}
}
}
/**
* Take any lerp and have it run from start to target and back to start again
*/
fun bounce(lerp: Lerp): Lerp {
return object : Lerp {
override fun apply(value: Float): Float {
if (value <= 0.5f) {
return lerp.apply(value * 2f)
}
else {
return lerp.apply((1.0f - value) * 2f)
}
}
}
}
}
/**
* Convert an input value (between 0.0 and 1.0) to an output value (also between 0.0 and
* 1.0).
*/
fun apply(value: Float): Float
}
|
/*
* Copyright (c) 2021 HopeBayTech.
*
* This file is part of Tera.
* See https://github.com/HopeBayMobile for further info.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "gtest/gtest.h"
#include "cstdlib"
#include <semaphore.h>
#include <string.h>
extern "C" {
#include "pkg_cache.h"
#include "fuseop.h"
#include <errno.h>
}
int32_t hash_pkg(const char *input)
{
int32_t hash = 5381;
int32_t index;
index = 0;
while (input[index]) {
hash = ((hash << 5) + hash ^ input[index++]);
}
hash &= PKG_HASH_SIZE - 1;
return hash;
}
/*
* Unittest of init_pkg_cache()
*/
class init_pkg_cacheTest : public ::testing::Test {
protected:
void SetUp()
{
}
void TearDown()
{
}
};
TEST_F(init_pkg_cacheTest, InitSuccess)
{
int32_t value;
init_pkg_cache();
sem_getvalue(&pkg_cache.pkg_cache_lock, &value);
EXPECT_EQ(1, value);
/* Check structure */
EXPECT_EQ(0, pkg_cache.num_cache_pkgs);
for (int32_t i = 0; i < PKG_HASH_SIZE; i++) {
EXPECT_EQ(0, pkg_cache.pkg_hash[i].num_pkgs);
EXPECT_EQ(0, pkg_cache.pkg_hash[i].first_pkg_entry);
}
}
/*
* End of unittest of init_pkg_cache()
*/
/*
* Unittest of insert_cache_pkg()
*/
class insert_cache_pkgTest : public ::testing::Test {
protected:
void SetUp()
{
init_pkg_cache();
}
void TearDown()
{
destroy_pkg_cache();
}
};
TEST_F(insert_cache_pkgTest, InsertSomething)
{
uid_t uids[] = {2, 4, 6, 8};
std::string pkgname[] = {"a", "b", "c", "d"};
int32_t hash[4];
for (int32_t i = 0; i < 4; i++) {
insert_cache_pkg(pkgname[i].c_str(), uids[i]);
hash[i] = hash_pkg(pkgname[i].c_str());
}
for (int32_t i = 0; i < 4; i++) {
EXPECT_EQ(1, pkg_cache.pkg_hash[hash[i]].num_pkgs);
EXPECT_STREQ(pkgname[i].c_str(),
pkg_cache.pkg_hash[hash[i]].first_pkg_entry->pkgname);
EXPECT_EQ(uids[i],
pkg_cache.pkg_hash[hash[i]].first_pkg_entry->pkguid);
EXPECT_EQ(0,
pkg_cache.pkg_hash[hash[i]].first_pkg_entry->next);
}
EXPECT_EQ(4, pkg_cache.num_cache_pkgs);
}
TEST_F(insert_cache_pkgTest, ElementLimitExceeds)
{
PKG_CACHE_ENTRY *now;
uid_t uids[MAX_PKG_ENTRIES + 2];
std::string pkgname[MAX_PKG_ENTRIES + 2];
char pkg_name[300];
int32_t idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES + 2; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
k++;
}
}
/* Insert all of them */
for (int32_t i = 0; i < k; i++)
insert_cache_pkg(pkgname[i].c_str(), uids[i]);
/* Verify */
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = MAX_PKG_ENTRIES + 1; /* last one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(1, idx);
}
TEST_F(insert_cache_pkgTest, InsertExistEntry)
{
PKG_CACHE_ENTRY *now;
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
char pkg_name[300];
int32_t idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
k++;
}
}
/* Insert all of them */
for (int32_t i = 0; i < k; i++)
insert_cache_pkg(pkgname[i].c_str(), uids[i]);
/* head->7,6,5,4,3,2,1,0 */
insert_cache_pkg(pkgname[MAX_PKG_ENTRIES - 1].c_str(), uids[MAX_PKG_ENTRIES - 1]);
/* head->7,6,5,4,3,2,1,0 */
insert_cache_pkg(pkgname[MAX_PKG_ENTRIES / 2].c_str(), uids[MAX_PKG_ENTRIES / 2]);
/* head->4,7,6,5,3,2,1,0 */
insert_cache_pkg(pkgname[0].c_str(), uids[0]);
/* head->0,4,7,6,5,3,2,1 */
/* Verify */
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
EXPECT_STREQ(pkgname[0].c_str(), now->pkgname);
EXPECT_EQ(uids[0], now->pkguid);
now = now->next;
EXPECT_STREQ(pkgname[MAX_PKG_ENTRIES / 2].c_str(), now->pkgname);
EXPECT_EQ(uids[MAX_PKG_ENTRIES / 2], now->pkguid);
now = now->next;
idx = MAX_PKG_ENTRIES - 1;
while (now) {
if (idx == (MAX_PKG_ENTRIES / 2) || idx == 0) {
idx--;
continue;
}
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(0, idx);
}
/*
* End of unittest of insert_cache_pkg()
*/
/*
* Unittest of lookup_cache_pkg()
*/
class lookup_cache_pkgTest : public ::testing::Test {
protected:
void SetUp()
{
init_pkg_cache();
}
void TearDown()
{
destroy_pkg_cache();
}
};
TEST_F(lookup_cache_pkgTest, LookupEmptyCache)
{
char pkg_name[300];
uid_t uid;
int32_t ret;
for (int32_t i = 0; i < 10000 ; i++) {
sprintf(pkg_name, "%d", i);
ret = lookup_cache_pkg(pkg_name, &uid);
ASSERT_EQ(-ENOENT, ret);
}
/* Check structure */
EXPECT_EQ(0, pkg_cache.num_cache_pkgs);
for (int32_t i = 0; i < PKG_HASH_SIZE; i++) {
EXPECT_EQ(0, pkg_cache.pkg_hash[i].num_pkgs);
EXPECT_EQ(0, pkg_cache.pkg_hash[i].first_pkg_entry);
}
}
TEST_F(lookup_cache_pkgTest, LookupHitNothing)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uid;
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* Hit nothing */
for (int32_t i = 10000; i < 20000 ; i++) {
sprintf(pkg_name, "%d", i);
ret = lookup_cache_pkg(pkg_name, &uid);
ASSERT_EQ(-ENOENT, ret);
}
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = MAX_PKG_ENTRIES - 1; /* last one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(-1, idx);
}
TEST_F(lookup_cache_pkgTest, LookupHitManyTimes)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uid;
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* Hit many times with reverse order */
for (int32_t times = 0; times < 100000; times++) {
for (int32_t i = k-1; i >= 0 ; i--) {
ret = lookup_cache_pkg(pkgname[i].c_str(), &uid);
ASSERT_EQ(0, ret);
ASSERT_EQ(uids[i], uid);
}
}
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = 0; /* first one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx++;
}
EXPECT_EQ(MAX_PKG_ENTRIES, idx);
}
TEST_F(lookup_cache_pkgTest, LookupHitManyTimes2)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uid;
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* Hit many times with reverse order */
for (int32_t i = k-1; i >= 0 ; i--) {
for (int32_t times = 0; times < 100000; times++) {
ret = lookup_cache_pkg(pkgname[i].c_str(), &uid);
ASSERT_EQ(0, ret);
ASSERT_EQ(uids[i], uid);
}
}
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = 0; /* first one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx++;
}
EXPECT_EQ(MAX_PKG_ENTRIES, idx);
}
/*
* End of unittest of lookup_cache_pkg()
*/
/*
* Unittest of remove_cache_pkg()
*/
class remove_cache_pkgTest : public ::testing::Test {
protected:
void SetUp()
{
init_pkg_cache();
}
void TearDown()
{
destroy_pkg_cache();
}
};
TEST_F(remove_cache_pkgTest, RemoveMediumSuccess)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* head->7,6,5,4,3,2,1,0 */
ret = remove_cache_pkg(pkgname[MAX_PKG_ENTRIES / 2].c_str());
ASSERT_EQ(0, ret);
/* head->7,6,5,3,2,1,0 */
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES - 1, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES - 1, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = MAX_PKG_ENTRIES - 1; /* first one */
while (now) {
if (idx == MAX_PKG_ENTRIES / 2) {
idx--;
continue;
}
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(-1, idx);
}
TEST_F(remove_cache_pkgTest, RemoveHeadSuccess)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* head->7,6,5,4,3,2,1,0 */
ret = remove_cache_pkg(pkgname[MAX_PKG_ENTRIES - 1].c_str());
ASSERT_EQ(0, ret);
/* head->6,5,4,3,2,1,0 */
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES - 1, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES - 1, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = MAX_PKG_ENTRIES - 2; /* first one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(-1, idx);
}
TEST_F(remove_cache_pkgTest, RemoveTailSuccess)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* head->7,6,5,4,3,2,1,0 */
ret = remove_cache_pkg(pkgname[0].c_str());
ASSERT_EQ(0, ret);
/* head->6,5,4,3,2,1,0 */
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES - 1, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES - 1, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = MAX_PKG_ENTRIES - 1; /* first one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(0, idx);
}
TEST_F(remove_cache_pkgTest, RemoveHitNothing)
{
PKG_CACHE_ENTRY *now;
char pkg_name[300];
uid_t uids[MAX_PKG_ENTRIES];
std::string pkgname[MAX_PKG_ENTRIES];
int32_t ret, idx, k = 0;
/* Generate mock pkgname and uid in the same hash bucket */
for (int32_t i = 0; k < MAX_PKG_ENTRIES; i++) {
sprintf(pkg_name, "%d", i);
if (hash_pkg(pkg_name) == 0) {
pkgname[k] = std::string(pkg_name);
uids[k] = k;
insert_cache_pkg(pkgname[k].c_str(), uids[k]);
k++;
}
}
/* Hit nothing */
for (int32_t i = 10000; i < 20000 ; i++) {
sprintf(pkg_name, "%d", i);
ret = remove_cache_pkg(pkg_name);
ASSERT_EQ(-ENOENT, ret);
}
/* Check structure */
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[0].num_pkgs);
EXPECT_EQ(MAX_PKG_ENTRIES, pkg_cache.num_cache_pkgs);
now = pkg_cache.pkg_hash[0].first_pkg_entry;
idx = MAX_PKG_ENTRIES - 1; /* first one */
while (now) {
EXPECT_STREQ(pkgname[idx].c_str(), now->pkgname);
EXPECT_EQ(uids[idx], now->pkguid);
now = now->next;
idx--;
}
EXPECT_EQ(-1, idx);
}
/*
* End of unittest of remove_cache_pkg()
*/
/*
* Unittest of destroy_pkg_cache()
*/
class destroy_pkg_cacheTest : public ::testing::Test {
protected:
void SetUp()
{
init_pkg_cache();
}
void TearDown()
{
destroy_pkg_cache();
}
};
TEST_F(destroy_pkg_cacheTest, DestroyEmptyCache)
{
destroy_pkg_cache();
/* Check structure */
for (int32_t i = 0; i < PKG_HASH_SIZE; i++) {
EXPECT_EQ(0, pkg_cache.pkg_hash[i].num_pkgs);
EXPECT_EQ(0, pkg_cache.pkg_hash[i].first_pkg_entry);
}
EXPECT_EQ(0, pkg_cache.num_cache_pkgs);
}
TEST_F(destroy_pkg_cacheTest, DestroySuccess)
{
for (int32_t i = 0; i < 10000; i++) {
char pkg_name[300];
sprintf(pkg_name, "%d", i);
insert_cache_pkg(pkg_name, i);
}
for (int32_t i = 0; i < PKG_HASH_SIZE; i++)
ASSERT_EQ(MAX_PKG_ENTRIES, pkg_cache.pkg_hash[i].num_pkgs);
ASSERT_EQ(MAX_PKG_ENTRIES * PKG_HASH_SIZE, pkg_cache.num_cache_pkgs);
destroy_pkg_cache();
/* Check structure */
for (int32_t i = 0; i < PKG_HASH_SIZE; i++) {
ASSERT_EQ(0, pkg_cache.pkg_hash[i].num_pkgs);
ASSERT_EQ(0, pkg_cache.pkg_hash[i].first_pkg_entry);
}
ASSERT_EQ(0, pkg_cache.num_cache_pkgs);
}
|
package main
import (
"crypto/rand"
"github.com/ismdeep/args"
"math/big"
)
// RandStr Generate Random String
func RandStr(base string, length int64) string {
letters := []rune(base)
lettersLen := new(big.Int).SetInt64(int64(len(letters)))
results := make([]rune, length)
for i := range results {
v, _ := rand.Int(rand.Reader, lettersLen)
results[i] = letters[v.Int64()]
}
return string(results)
}
// RandDigital Generate Random Digital
func RandDigital(length int64) string {
return RandStr(BaseDigital, length)
}
// RandHex Generate Random Hex String
func RandHex(length int64) string {
return RandStr(BaseHex, length)
}
// RandDigitalAndAlphabet Generate Random String with digital and alphabet
func RandDigitalAndAlphabet(length int64) string {
return RandStr(BaseNormal, length)
}
// GenPass Generate Password
func GenPass(digital bool, lowerCase bool, upperCase bool, fuzzy bool, length int64) string {
base := ""
if digital {
base += BaseDigital
}
if lowerCase {
base += BaseLowerCaseAlphabet
}
if upperCase {
base += BaseUpperCaseAlphabet
}
if base == "" {
base = BaseNormal
}
if args.Exists("--without-fuzzy") {
base = BaseWithoutFuzzy
}
return RandStr(base, length)
}
|
{-# LANGUAGE TypeOperators, OverloadedStrings, DeriveGeneric #-}
module DB (
Category(..)
, Recipe(..)
, getCategoriesIO
, getRecipesIO
) where
import GHC.Generics (Generic)
import Database.Selda
import Database.Selda.Generic
import Database.Selda.SQLite
import Paths_ex5
getDBPath :: IO FilePath
getDBPath = getDataFileName "data/test.db"
cats :: Table (Int :*: Text :*: Int)
cats = table "categories" $ primary "id" :*: required "name" :*: required "parent_id"
catsId :*: catsName :*: _ = selectors cats
catList :: Table (Int :*: Int)
catList = table "category_list" $ required "recipe_id" :*: required "category_id"
catListRecId :*: catListCatId = selectors catList
recipes :: Table (Int :*: Maybe Text)
recipes = table "recipes" $ required "id" :*: optional "title"
recipesId :*: recipesTitle = selectors recipes
data Category = Category
{ catId :: Int
, catName :: Text
, catParent :: Int
} deriving (Show, Generic)
data Recipe = Recipe
{ recipeId :: Int
, recipeTitle :: Maybe Text
} deriving (Show, Generic)
getCategories :: MonadSelda m => m [Category]
getCategories = fromRels <$> (query $ select cats)
getRecipes :: MonadSelda m => [Category] -> m [Recipe]
getRecipes cs = do
let cis = fmap (int . catId) cs
rs <- query $ do
r <- select recipes
restrict $ (r ! recipesId) `isIn` do
cl_r_id :*: cl_cat_id <- select catList
restrict $ cl_cat_id `isIn` cis
return cl_r_id
return r
return $ fromRels rs
withDB :: SeldaM a -> IO a
withDB run = do
dbPath <- getDBPath
withSQLite dbPath run
getCategoriesIO :: IO [Category]
getCategoriesIO = withDB getCategories
getRecipesIO :: [Category] -> IO [Recipe]
getRecipesIO cs = withDB (getRecipes cs)
|
part of xlsio;
/// <summary>
/// Class used for YearToken.
/// </summary>
class _YearToken extends _FormatTokenBase {
/// <summary>
/// Regular expression for minutes part of the format:
/// </summary>
final _yearRegex = RegExp('[yY]+');
/// <summary>
/// Tries to parse format string.
/// </summary>
@override
int _tryParse(String strFormat, int iIndex) {
return _tryParseRegex(_yearRegex, strFormat, iIndex);
}
/// <summary>
/// Applies format to the value.
/// </summary>
@override
String _applyFormat(double value, bool bShowHiddenSymbols,
CultureInfo culture, _FormatSection section) {
final DateTime date = Range._fromOADate(value);
final int iYear = date.year;
if (_strFormat.length > 2) {
return iYear.toString();
} else {
return (iYear % 100).toString();
}
}
/// <summary>
/// Applies format to the value.
/// </summary>
@override
// ignore: unused_element
String _applyFormatString(String value, bool bShowHiddenSymbols) {
return '';
}
/// <summary>
/// Gets type of the token. Read-only.
/// </summary>
@override
_TokenType get _tokenType {
return _TokenType.year;
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tech.cuda.woden.common.service.mysql.function
import me.liuwj.ktorm.expression.ArgumentExpression
import me.liuwj.ktorm.expression.FunctionExpression
import me.liuwj.ktorm.schema.*
import java.time.Instant
import java.time.LocalDate
import java.time.LocalDateTime
/**
* @author Jensen Qi <jinxiu.qi@alu.hit.edu.cn>
* @since 0.1.0
*/
fun ColumnDeclaring<Set<Int>>.contains(item: Int): FunctionExpression<Boolean> {
return FunctionExpression(
functionName = "json_contains",
arguments = listOf(
this.asExpression(),
ArgumentExpression(item.toString(), VarcharSqlType),
ArgumentExpression("$", VarcharSqlType)
),
sqlType = BooleanSqlType
)
}
fun ColumnDeclaring<LocalDateTime>.toDate(): FunctionExpression<LocalDate> {
return FunctionExpression(
functionName = "date",
arguments = listOf(
this.asExpression()
),
sqlType = LocalDateSqlType
)
}
fun unixTimestamp(): FunctionExpression<Long> {
return FunctionExpression(functionName = "unix_timestamp", arguments = emptyList(), sqlType = LongSqlType)
}
|
#!/bin/bash
/sbin/iptables -F
/sbin/iptables -X
/sbin/iptables -A INPUT -s 127.0.0.1 -d 127.0.0.1 -j ACCEPT
/sbin/iptables -A INPUT -s 127.0.0.1 -j ACCEPT
/sbin/iptables -A INPUT -s 114.114.114.114 -j ACCEPT
/sbin/iptables -A INPUT -m state --state ESTABLISHED,RELATED -j ACCEPT
/sbin/iptables -A OUTPUT -j ACCEPT
/sbin/iptables -A INPUT -s 114.114.114.114 -p tcp -m state --state NEW -m tcp --dport 22 -j ACCEPT
/sbin/iptables -A INPUT -j REJECT
/sbin/iptables -A FORWARD -j REJECT
iptables-save
##iptables-save >/etc/iptables.up.rules
##iptables-restore </etc/iptables.up.rules
echo ok
|
import 'package:flutter/material.dart';
import 'package:flutter_board/flutter_board.dart';
import 'content_drawer.dart';
import 'content_markdown.dart';
/// Implements the basic material design visual layout structure of content
/// pages in an flutter board simple theme application.
class ContentPage extends StatelessWidget {
/// Whether it is the sub page on the main page or not.
final bool subPage;
/// Whether to use Liquid template or not.
final bool liquid;
/// Creates a visual scaffold for material design widgets of content pages.
ContentPage({key, this.subPage = false, this.liquid = false})
: super(key: key);
/// Describes the part of the user interface represented by this widget.
@override
Widget build(BuildContext context) {
final arguments =
ModalRoute.of(context).settings.arguments as MenuPageArguments;
return Scaffold(
appBar: AppBar(
title: Text(arguments.title),
),
drawer: !subPage ? ContentDrawer() : null,
body: arguments.boardContext != null
? getContentWidget(context)
: FutureBuilder<BoardContext>(
future: Future.delayed(Duration(seconds: 2), () async {
return BoardContext.get();
}),
builder: (BuildContext context,
AsyncSnapshot<BoardContext> snapshot) {
if (snapshot.connectionState == ConnectionState.done) {
arguments.routeGenerator.builderSettingsMap.forEach((k, v) {
v.arguments.boardContext = snapshot.data;
});
return getContentWidget(context);
} else {
return getSplashWidget(context);
}
}));
}
/// Gets a content markdown widget.
Widget getContentWidget(BuildContext context) =>
ContentMarkdown(liquid: liquid);
/// Gets a splash widget.
Widget getSplashWidget(BuildContext context) => Center(
child: Image.asset(
'packages/flutter_board_theme_simple/content/images/logo.png',
width: 320,
fit: BoxFit.fitWidth,
),
);
}
|
name := """vgrilo-moneytransfer"""
version := "0.1"
lazy val root = (project in file(".")).enablePlugins(PlayJava)
scalaVersion := "2.11.11"
// Set JS Engine to use
//JsEngineKeys.engineType := JsEngineKeys.EngineType.Node
//libraryDependencies += javaJdbc
//libraryDependencies += cache
libraryDependencies += javaWs
// https://mvnrepository.com/artifact/org.json/json
libraryDependencies += "org.json" % "json" % "20170516"
// https://mvnrepository.com/artifact/org.hibernate.validator/hibernate-validator
libraryDependencies += "org.hibernate.validator" % "hibernate-validator" % "6.0.2.Final"
mainClass in assembly := Some("play.core.server.ProdServerStart")
assemblyJarName := "vgrilo-moneytransfer.jar"
val meta = """META.INF(.)*""".r
assemblyMergeStrategy in assembly := {
case PathList("javax", "servlet", xs @ _*) => MergeStrategy.first
case PathList(ps @ _*) if ps.last endsWith ".html" => MergeStrategy.first
case n if n.startsWith("reference.conf") => MergeStrategy.concat
case n if n.endsWith(".conf") => MergeStrategy.concat
case meta(_) => MergeStrategy.discard
case x => MergeStrategy.first
}
|
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Grade extends Model
{
protected $fillable = [
'grade',
'designation_id',
'leave_deduction',
'office_time',
'late_attendance_fee',
];
protected $table = 'grades';
}
|
# Changelog
All notable changes to my ESLint config will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
<!-- ## [Unreleased] -->
## [6.0.0] - 2021-12-18
### BREAKING CHANGED
- Changed rule 'camelCase' to '@typescript-eslint/naming-convention'
### Added
- Support for eslint@8
- Support for eslint-plugin-sonarjs@0.11
## [5.3.0] - 2021-11-23
### Added
- Support for eslint-plugin-unicorn@39
## [5.2.0] - 2021-11-08
### Added
- Support for eslint-plugin-unicorn@38
## [5.1.0] - 2021-11-01
### Added
- Support for @typescript-eslint/eslint-plugin@5
- Support for eslint-plugin-jest@25
- Support for eslint-plugin-unicorn@37
## [5.0.1] - 2021-09-30
### No changes
- Version bump.
## [5.0.0] - 2021-09-29
### BREAKING CHANGES
- Updated eslint-plugin-jest peerDependency to 24.5.0.
### Added
- Support for eslint-plugin-sonarjs@^0.10.0
- Support for eslint-plugin-unicorn@^36.0.0
## [4.0.0] - 2021-07-27
### BREAKING CHANGES
- Added eslint-plugin-jest to peerDependencies.
- Added eslint-plugin-unicorn to peerDependencies.
- Changed rule 'tslint/object-literal-sort-keys' to 'sort-keys'
- Dropped support for Node.js 10.
### Added
- Linting and special exceptions for Jest testing files.
- Support for eslint-plugin-sonarjs@^0.8.0
- Support for eslint-plugin-sonarjs@^0.9.0
### Removed
- @typescript-eslint/eslint-plugin-tslint
- tslint
## [3.1.0] - 2021-05-05
### Added
- Support for eslint-plugin-sonarjs@^0.6.0
- Support for eslint-plugin-sonarjs@^0.7.0
## [3.0.0] - 2021-01-10
### BREAKING CHANGES
- Upgraded to @typescript-eslint/eslint-plugin 4.12.0.
- Upgraded to @typescript-eslint/eslint-plugin-tslint 4.12.0.
- Upgraded to eslint 7.17.0.
### Fixed
- No-shadow rule errors when using enums.
## [2.0.0] - 2020-09-07
### BREAKING CHANGES
- Changed 'optionalDependencies' to 'peerDependencies'.
- Upgraded to @typescript-eslint/eslint-plugin 4.1.1.
- Upgraded to @typescript-eslint/eslint-plugin-tslint 4.1.1.
- Upgraded to eslint 7.9.0.
### Added
- Testing
## [1.0.4] - 2020-06-19
### Fixed
- Security vulnerability in dependencies.
## [1.0.3] - 2020-03-22
### Fixed
- Security vulnerability in dependencies.
## [1.0.2] - 2020-03-22
### Fixed
- Security vulnerability in dependencies.
## [1.0.1] - 2020-01-30
### Fixed
- Wrong Switch-case default being set.
## [1.0.0] - 2020-01-24
### Added
- Initial version.
[Unreleased]: https://github.com/Ionaru/eslint-config/compare/6.0.0...HEAD
[6.0.0]: https://github.com/Ionaru/eslint-config/compare/5.3.0...6.0.0
[5.3.0]: https://github.com/Ionaru/eslint-config/compare/5.2.0...5.3.0
[5.2.0]: https://github.com/Ionaru/eslint-config/compare/5.1.0...5.2.0
[5.1.0]: https://github.com/Ionaru/eslint-config/compare/5.0.1...5.1.0
[5.0.1]: https://github.com/Ionaru/eslint-config/compare/5.0.0...5.0.1
[5.0.0]: https://github.com/Ionaru/eslint-config/compare/4.0.0...5.0.0
[4.0.0]: https://github.com/Ionaru/eslint-config/compare/3.1.0...4.0.0
[3.1.0]: https://github.com/Ionaru/eslint-config/compare/3.0.0...3.1.0
[3.0.0]: https://github.com/Ionaru/eslint-config/compare/2.0.0...3.0.0
[2.0.0]: https://github.com/Ionaru/eslint-config/compare/1.0.4...2.0.0
[1.0.4]: https://github.com/Ionaru/eslint-config/compare/1.0.3...1.0.4
[1.0.3]: https://github.com/Ionaru/eslint-config/compare/1.0.2...1.0.3
[1.0.2]: https://github.com/Ionaru/eslint-config/compare/1.0.1...1.0.2
[1.0.1]: https://github.com/Ionaru/eslint-config/compare/1.0.0...1.0.1
[1.0.0]: https://github.com/Ionaru/eslint-config/compare/2c91352...1.0.0
|
//------------------------------------------------------------------------------
// <auto-generated>
// Этот код создан по шаблону.
//
// Изменения, вносимые в этот файл вручную, могут привести к непредвиденной работе приложения.
// Изменения, вносимые в этот файл вручную, будут перезаписаны при повторном создании кода.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Farming.WpfClient.Models
{
using System;
using System.Data.Entity;
using System.Data.Entity.Infrastructure;
public partial class FarmingEntities : DbContext
{
public FarmingEntities()
: base("name=FarmingEntities")
{
}
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
throw new UnintentionalCodeFirstException();
}
public virtual DbSet<Bull> Bulls { get; set; }
public virtual DbSet<BloodType> BloodTypes { get; set; }
public virtual DbSet<Category> Categories { get; set; }
public virtual DbSet<Line> Lines { get; set; }
public virtual DbSet<Log> Logs { get; set; }
public virtual DbSet<Breed> Breeds { get; set; }
public virtual DbSet<Priplod> Priplods { get; set; }
public virtual DbSet<Productivity> Productivities { get; set; }
public virtual DbSet<Family> Families { get; set; }
public virtual DbSet<Retirement> Retirements { get; set; }
public virtual DbSet<MethodSluchki> MethodsSluchki { get; set; }
public virtual DbSet<Gender> Genders { get; set; }
public virtual DbSet<User> Users { get; set; }
public virtual DbSet<Cow> Cows { get; set; }
public virtual DbSet<Reproduction> Reproductions { get; set; }
public virtual DbSet<UserType> UsersTypes { get; set; }
}
}
|
class ActivationsController < ApplicationController
before_action :load_repo
before_action :ensure_repo_allowed
def create
activator = ActivateRepo.new(@repo, current_user.github_token)
if activator.call
render :reload_repo
else
flash[:error] =
activator.error_messages_formatted.presence || "Activation error"
redirect_to :repos
end
end
def destroy
deactivator = DeactivateRepo.new(@repo, current_user.github_token)
if deactivator.call
render :reload_repo
else
flash[:error] =
deactivator.error_messages_formatted.presence || "Deactivation error"
redirect_to :repos
end
end
private
def load_repo
@repo = current_user.repos.with_membership_status.
find_by(id: params[:repo_id])
unless @repo
flash[:error] = "Repo not found"
redirect_to :repos
end
end
def ensure_repo_allowed
if @repo.private?
flash[:error] = "Sorry, private repos are not supported yet."
redirect_to :repos
end
end
end
|
module PGNParser.PGNParser where
import Prelude (String, read)
import Protolude hiding (try, (<|>), many, option)
import Text.ParserCombinators.Parsec
import Text.Parsec.Prim hiding (try)
import PGNParser.Data.Metadata
import PGNParser.Data.Move
import qualified JonnyH.Ply as Ply (Ply(..))
import JonnyH.Square
import JonnyH.Piece.Common
import qualified JonnyH.Color as Color
type PGNParser u = ParsecT String u Identity
data Game = Game Metadata [Move]
parsePly :: PGNParser u Ply.Ply
parsePly = do
piece <- oneOf "NBRQK"
capture <- char 'x'
file <- oneOf "abcdefgh"
rank <- oneOf "12345678"
return $ Ply.Move (Piece Color.White Knight) (Square file 1)
parseSingleMove =
many1 (oneOf "abcdefgh12345678NBRQKxOO+-=")
parseMove :: PGNParser u Move
parseMove = do
n <- many1 digit
_ <- char '.'
w <- parseSingleMove
_ <- space
b <- option "" parseSingleMove
-- return $ ((read n :: Int), w b)
return $ Move (read n :: Int) w b
parseResult :: PGNParser u Move
parseResult = do
w <- try (string "1/2") <|> string "1" <|> string "0"
_ <- char '-'
b <- try (string "1/2") <|> string "1" <|> string "0"
return $ GameResult w b
parseUnfinished :: PGNParser u Move
parseUnfinished = do
_ <- char '*'
return Unfinished
parseMoveLine :: PGNParser u [Move]
parseMoveLine = sepBy
(try parseMove <|> try parseResult <|> parseUnfinished)
(many $ char ' ')
parseMetaLine :: PGNParser () Tag
parseMetaLine = do
_ <- char '['
e <- choice $ map (try . string) metaLabels
spaces
_ <- char '"'
s <- many $ noneOf "\""
_ <- string "\"]\n"
return $ Tag (stringToTagKey e) s
parseGame :: PGNParser () Game
parseGame = do
meta <- many1 parseMetaLine
_ <- newline
moves <- endBy parseMoveLine newline
return $ Game (tl2meta meta) (concat moves)
parseGames :: PGNParser () [Game]
parseGames = many parseGame
parseFile :: String -> IO (Either ParseError [Game])
parseFile = parseFromFile parseGames
|
package tech.cryptonomic.conseil.api.routes.platform.data.tezos
import akka.http.scaladsl.server.Route
import endpoints.akkahttp.server.Endpoints
import endpoints.algebra.Documentation
import tech.cryptonomic.conseil.api.routes.validation.Validation.QueryValidating
import tech.cryptonomic.conseil.api.routes.platform.data.ApiServerJsonSchema
import tech.cryptonomic.conseil.api.routes.platform.data.ApiValidation.defaultValidated
import tech.cryptonomic.conseil.common.tezos.Tables
/** Trait with helpers needed for data routes */
private[tezos] class TezosDataHelpers extends Endpoints with TezosDataEndpoints with ApiServerJsonSchema {
/** Method for validating query request */
override def validated[A](
response: A => Route,
invalidDocs: Documentation
): QueryValidating[A] => Route =
defaultValidated(response, invalidDocs)
/** Represents the function, that is going to encode the blockchain specific data types */
override protected def customAnyEncoder = {
case x: Tables.BlocksRow => blocksRowSchema.encoder.encode(x)
case x: Tables.AccountsRow => accountsRowSchema.encoder.encode(x)
case x: Tables.OperationGroupsRow => operationGroupsRowSchema.encoder.encode(x)
case x: Tables.OperationsRow => operationsRowSchema.encoder.encode(x)
}
}
|
package main
import (
"fmt"
"os"
"github.com/aws/aws-cdk-go/awscdk/v2"
"github.com/aws/aws-cdk-go/awscdk/v2/awsapigateway"
"github.com/aws/constructs-go/constructs/v10"
"github.com/aws/jsii-runtime-go"
"github.com/fogfish/scud"
)
//
func vsn(app awscdk.App) string {
switch val := app.Node().TryGetContext(jsii.String("vsn")).(type) {
case string:
return val
default:
return "latest"
}
}
func main() {
//
// Global config
//
app := awscdk.NewApp(nil)
config := &awscdk.StackProps{
Env: &awscdk.Environment{
Account: jsii.String(os.Getenv("CDK_DEFAULT_ACCOUNT")),
Region: jsii.String(os.Getenv("CDK_DEFAULT_REGION")),
},
}
//
// Stack
//
stackID := fmt.Sprintf("blueprint-golang-%s", vsn(app))
stack := awscdk.NewStack(app, jsii.String(stackID), config)
NewBlueprint(stack)
app.Synth(nil)
}
//
// NewBlueprint create example REST api
func NewBlueprint(scope constructs.Construct) {
gateway := scud.NewGateway(scope, jsii.String("Gateway"),
&awsapigateway.RestApiProps{
RestApiName: jsii.String("scud"),
},
)
myfun := scud.NewFunctionGo(scope, jsii.String("MyFun"),
&scud.FunctionGoProps{
SourceCodePackage: "github.com/fogfish/blueprint-serverless-golang",
SourceCodeLambda: "cmd/lambda/scud",
},
)
gateway.AddResource("scud", myfun)
}
|
import * as es6 from "../../assets/vita/es6.png";
import * as react from "../../assets/vita/react.png";
import * as redux from "../../assets/vita/redux.png";
import * as styled from "../../assets/vita/styled-components.png";
import * as css3 from "../../assets/vita/css3.png";
import * as webpack from "../../assets/vita/webpack.png";
import * as npm from "../../assets/vita/npm.png";
import * as jenkins from "../../assets/vita/jenkins.png";
import * as gitlabci from "../../assets/vita/gitlab-ci.png";
import * as shell from "../../assets/vita/shell.png";
import * as ubuntu from "../../assets/vita/ubuntu.png";
import * as docker from "../../assets/vita/docker.png";
import * as python from "../../assets/vita/python.png";
import * as ts from "../../assets/vita/ts.png";
import * as git from "../../assets/vita/git.png";
import * as mysql from "../../assets/vita/mysql.png";
import * as aws from "../../assets/vita/aws.png";
export const info = {
name: `王志明`,
position: `H5前端研发工程师`,
sub: {
英文名: `Fox`,
性别: `男`,
祖籍: `上海`,
年龄: `${new Date().getFullYear() - 1981}岁`,
工作地点: `上海浦东`,
工龄: `${new Date().getFullYear() - 2006}年`,
政治面貌: `群众`,
兴趣: `阅读,旅游`,
运动: `马拉松`,
},
intention: {
工作类型: `全职`,
期望月薪: `15000+RMB/月`,
期望职业: `H5前端研发`,
期望行业: `任意`,
},
};
export const work = [
{
time: `2010年08月 - 2019年08月`,
name: `上海钦文信息科技有限公司 (爱乐奇)`,
position: `H5前端研发工程师`,
introduction: `本人于2010年08月入职担任软件工程师一职。主要负责公司的一系列少儿英语培训教学软件项目前端的研发工作,同时也公司内部工具项目开发,项目自动化集成,自动化运维,数据分析等工作。`,
},
{
time: `2006年11月 - 2009年04月`,
name: `上海立派信息科技有限公司`,
position: `对日软件开发工程师`,
introduction: `主要负责是对日外包项目的开发和测试,项目的业务主要是物流方面的,用的主要技术是java和oracle。通过业余时间的学习,获得了《Oracle Certified Associate》证书`,
},
];
export const project = [
{
time: `2017年05月 - 2019年08月`,
name: `爱乐奇视频外教-线上智慧课堂(pc端mac端)`,
introduction:
`职位名称:HTML5前端工程师\n` +
`项目规模:前端5人,后端,QA,运维若干。\n` +
`项目简介:视频外教是爱乐奇公司为培训学校提供的一套教学解决方案。通过在线视频,让外教老师授课,提升学员英语听说和应用能力。(https://www.alo7.com/services/izj.html)\n` +
`主要技术栈:\n` +
`1. 底层:js(ES6), TS。\n` +
` 用TS的目的是TS数据要求带有明确的类型,便于在项目逐渐变大以后方便团队协作。\n` +
`2. 封装:electron 解决了跨平台,自动更新,安装应用\n` +
`3. UI:css3,styled-components\n` +
` styled-components可以解决css中不能带逻辑,可以像用组件的方式的来复用css的样式。\n` +
`4. 渲染层: react提升UI渲染性能,提高用户体验。 UI组件模块化,提高可复用性。 \n` +
`5.视频服务由第三方提供`,
duty:
`主要负责视频外教项目的pc端和mac端的开发,视频流服务商对接。\n` +
`具体内容包括:\n` +
`1.根据产品设计师提供的PRD和交互设计师提供的UI,开发并实现学生端,教师端相关页面开发,动画特效。 主要技术栈:electron,react,styled-components\n` +
`2.根据运维要求,每两周一次功能迭代,自动化集成发布上线。主要技术栈:webpack, glup, gitlab-ci, jenkins, docker, shell。\n` +
`3.采集用户相关的数据和错误栈,管理和监控项目健康状况。 主要技术栈:kibana,elasticsearch, bi。`,
},
{
time: `2015年04月 - 2017年01月`,
name: `老师教学中心-互动课件(pc端mac端)`,
introduction:
`职位名称:HTML5前端工程师\n` +
`主要技术栈:electron + JS + react + flash/as3\n` +
`项目规模:前端平台端4人,前端内容端、后端、QA若干。\n` +
`项目简介:项目主要是用于电子互动白板上运行的课件。`,
duty:
`主要负责平台端和内容端相关开发\n` +
`具体内容:\n` +
`1. 前端部分UI功能。 主要技术栈:react css3\n` +
`2. 内容端部分题型实现。主要技术栈:react css3\n` +
`3. 内容端的PPT播放器功能实现。 主要技术栈: keynote,js`,
},
{
time: `2013年10月 - 2015年01月`,
name: `爱作业`,
introduction:
`职位名称:AS3前端工程师\n` +
`主要技术栈:flash/as3/stage3D starling\n` +
`项目简介:爱作业是爱乐奇英语配套的课后作业平台。项目使用跨平台技术,可以一套代码在ios端,安卓端,pc端运行。`,
duty: `负责相关题型开发`,
},
{
time: `2010年08月 - 2013年12月`,
name: `爱乐奇虚拟世界(2020年5月31日下线)`,
introduction:
`职位名称:flash/as3工程师\n` +
`主要技栈:flex actionscript3\n` +
`项目规模:前端10人,后端等若干。\n` +
`项目简介:少儿学习英语的平台,分为平台端和内容端。平台端的内容主要是展现整个虚拟世界的地图场景,运营活动,个人信息等等。内容端有练习(learning app),益智游戏,商店,宠物养成等等。`,
duty:
`主要负责内容有:平台端场景,个人信息、班级、会员管理已经。内容端的部分题型,商店。\n` +
`具体内容包括:\n` +
`1. 平台端:场景,个人信息、班级、会员管理,平台和app对接。主要技术:GC,寻路。\n` +
`2. 内容端:部分游戏,题型。主要技术:骨骼动画(DragonBones)。\n` +
`3. 其他:性能优化,项目自动化集成,代码混淆,线上错误收集和排查。主要技术:jenkins,shell,python,ruby,mysql`,
},
];
export const education = [
{
time: `1997年09月 - 2000年06月`,
name: `广西柳州市铁路第一中学`,
},
{
time: `2000年09月 - 2004年06月`,
name: `中南大学 计算机科学与技术专业(全日制,学历可查) 本科 学士学位`,
},
];
export const tech = [
{
bg: es6,
name: `es6`,
width: 90,
},
{
bg: react,
name: `react`,
width: 90,
},
{
bg: redux,
name: `redux`,
width: 75,
},
{
bg: css3,
name: `css3`,
width: 75,
},
{
bg: webpack,
name: `webpack`,
width: 80,
},
{
bg: styled,
name: `styled-components`,
width: 85,
},
];
export const subTech = [
{
category: `自动化集成`,
tech: [
{
bg: npm,
name: `npm`,
},
{
bg: gitlabci,
name: `gitlab-ci`,
},
{
bg: jenkins,
name: `jenkins `,
},
],
},
{
category: `服务器`,
tech: [
{
bg: ubuntu,
name: `ubuntu`,
},
{
bg: shell,
name: `shell`,
},
{
bg: docker,
name: `docker`,
},
{
bg: aws,
name: `云服务器`,
},
],
},
{
category: `其他语言`,
tech: [
{
bg: python,
name: `python`,
},
{
bg: ts,
name: `TypeScript`,
},
],
},
{
category: `其他技术`,
tech: [
{
bg: git,
name: `git`,
},
{
bg: mysql,
name: `mysql`,
},
],
},
];
|
module Data.Streaming.FileReadSpec (spec) where
import Test.Hspec
import qualified Data.ByteString as S
import qualified Data.Streaming.FileRead as F
import Control.Exception (bracket)
spec :: Spec
spec = describe "Data.Streaming.FileRead" $ do
it "works" $ do
let fp = "LICENSE"
expected <- S.readFile fp
actual <- bracket (F.openFile fp) F.closeFile $ \fh -> do
let loop front = do
bs <- F.readChunk fh
if S.null bs
then return $ S.concat $ front []
else loop (front . (bs:))
loop id
actual `shouldBe` expected
|
<?php
/**
* @filesource index/views/ieedit.php
* @link http://www.kotchasan.com/
* @copyright 2016 Goragod.com
* @license http://www.kotchasan.com/license/
*/
namespace Index\Ieedit;
use \Kotchasan\Http\Request;
use \Kotchasan\Html;
use \Kotchasan\Language;
/**
* module=ieedit
*
* @author Goragod Wiriya <admin@goragod.com>
*
* @since 1.0
*/
class View extends \Gcms\View
{
/**
* ฟอร์มเพิ่ม รายรับ-รายจ่าย
*
* @param Request $request
* @param object $index
* @return string
*/
public function render(Request $request, $index)
{
$status = array(
'IN' => '{LNG_Income}',
'OUT' => '{LNG_Expense}',
'TRANSFER' => '{LNG_Transfer between accounts}',
'INIT' => '{LNG_Summit}',
);
// form
$form = Html::create('form', array(
'id' => 'product',
'class' => 'setup_frm',
'autocomplete' => 'off',
'action' => 'index.php/index/model/ierecord/submit',
'onsubmit' => 'doFormSubmit',
'token' => true,
'ajax' => true
));
$fieldset = $form->add('fieldset', array(
'title' => $status[$index->status]
));
if ($index->status == 'IN' || $index->status == 'OUT') {
// category_id
$fieldset->add('select', array(
'id' => 'write_category',
'itemClass' => 'item',
'labelClass' => 'g-input icon-category',
'label' => '{LNG_Category}',
'options' => \Index\Select\Model::ieCategories($index->owner_id, $index->status),
'value' => $index->category_id
));
} else {
// category_id
$fieldset->add('hidden', array(
'id' => 'write_category',
'value' => 0
));
}
if ($index->status == 'TRANSFER') {
$label = $index->income > 0 ? '{LNG_To}' : '{LNG_From}';
$disabled = true;
} else {
$label = '{LNG_Wallet}';
$disabled = false;
}
if ($index->status == 'INIT') {
// wallet
$wallet = \Index\Select\Model::wallets($index->owner_id);
$fieldset->add('text', array(
'id' => 'write_wallet_name',
'itemClass' => 'item',
'labelClass' => 'g-input icon-wallet',
'label' => '{LNG_Wallet}',
'readonly' => true,
'value' => $wallet[$index->wallet]
));
} else {
// wallet
$fieldset->add('select', array(
'id' => 'write_wallet',
'itemClass' => 'item',
'labelClass' => 'g-input icon-wallet',
'label' => $label,
'disabled' => $disabled || $index->status == 'INIT',
'options' => \Index\Select\Model::wallets($index->owner_id),
'value' => $index->wallet
));
}
// สกุลเงิน
$currency_units = Language::get('CURRENCY_UNITS');
// amount
$fieldset->add('currency', array(
'id' => 'write_amount',
'itemClass' => 'item',
'labelClass' => 'g-input icon-money',
'label' => '{LNG_Amount} ('.$currency_units[self::$cfg->currency_unit].')',
'disabled' => $disabled,
'value' => $index->income > 0 ? $index->income : $index->expense
));
// create_date
$fieldset->add('date', array(
'id' => 'write_create_date',
'itemClass' => 'item',
'labelClass' => 'g-input icon-calendar',
'label' => '{LNG_date}',
'disabled' => $disabled,
'value' => $index->create_date
));
// comment
$fieldset->add('text', array(
'id' => 'write_comment',
'itemClass' => 'item',
'labelClass' => 'g-input icon-edit',
'label' => '{LNG_Annotation}',
'maxlength' => 255,
'comment' => '{LNG_Notes or Additional Notes}',
'value' => $index->comment
));
$fieldset = $form->add('fieldset', array(
'class' => 'submit'
));
// submit
$fieldset->add('submit', array(
'class' => 'button save large',
'value' => '{LNG_Save}'
));
// status
$fieldset->add('hidden', array(
'id' => 'write_status',
'value' => $index->status
));
// id
$fieldset->add('hidden', array(
'id' => 'write_id',
'value' => $index->id
));
// owner_id
$fieldset->add('hidden', array(
'id' => 'write_owner_id',
'value' => $index->owner_id
));
// คืนค่าฟอร์ม
return $form->render();
}
}
|
using IndirectTrajOpt
using DifferentialEquations
using StaticArrays
using LinearAlgebra
# Initialize BVP function
ps = initCR3BPIndirectParams("Low Thrust 10 CR3BP")
tspan = (0.0, 8.6404*24*3600/ps.crp.TU)
# Error producing initial conditions
y0 = @SVector [-0.0194885115,
-0.0160334798,
0.0,
8.9188819237,
-4.081936888,
0.0,
1.0,
-18.20557373360214,
-16.683730836929307,
40.0,
0.04174822756326783,
-0.005163201648349852,
-0.03929039276763477,
0.12683528760241147]
# Set thrust type
cSc = ps.sp.isp*9.81*ps.crp.TU / (ps.crp.LU*1000.0)
λv = norm(view(y0, 11:13))
S = IndirectTrajOpt.computeS(y0, λv, cSc)
if S > ps.ϵ; ps.utype = 0
elseif S < -ps.ϵ; ps.utype = 2
else; ps.utype = 1; end
cb = VectorContinuousCallback(
IndirectTrajOpt.cr3bpEomsCondition,
IndirectTrajOpt.cr3bpEomsAffect!,
IndirectTrajOpt.cr3bpEomsAffect!, 4;
idxs = nothing,
rootfind = DiffEqBase.LeftRootFind,
interp_points = 10,
abstol = 1e-14,
reltol = 0.0,
save_positions = (true, true))
ff = ODEFunction{false}(IndirectTrajOpt.cr3bpEomIndirect)
prob = ODEProblem(ff, y0, tspan, ps; callback=cb)
sol = solve(prob, Vern9(),reltol=1e-14,abstol=1e-14)
|
module Todo.Web.View (view) where
import Control.Lens ((<&>))
import Data.Bool
import Data.Map
import Miso (View, checked_, div_, h1_, img_, input_, p_, src_, style_, text, type_)
import Miso.String hiding (length)
view :: View a
view = div_ [style_ containerStyle]
[ h1_ [style_ titleStyle] ["Weekly Todo List (Simple Check)"]
, div_ [style_ todosStyle]
[ div_ [style_ todoStyle]
[ input_ [checked_ True, type_ "checkbox"]
, p_ [style_ $ todoDescriptionStyle True]
["Interview for an open position"]
]
, div_ [style_ todoStyle]
[ input_ [checked_ True, type_ "checkbox"]
, p_ [style_ $ todoDescriptionStyle True]
["Join Polimorphic!"]
]
, div_ [style_ todoStyle]
[ input_ [type_ "checkbox"]
, p_ [style_ $ todoDescriptionStyle False]
["Build the virtual townhall!"]
]
, div_ [style_ todoStyle]
[ input_ [type_ "checkbox"]
, p_ [style_ $ todoDescriptionStyle False]
["Give local government it's biggest software update"]
]
, div_ [style_ todoStyle]
[ input_ [checked_ True, type_ "checkbox"]
, p_ [style_ $ todoDescriptionStyle True]
["Start learning the codebase"]
]
]
, h1_ [style_ titleStyle] ["Weekly Todo List (Custom Check)"]
, p_ [] [text $ (ms . show $ length entries) <> " entries"]
, div_ [style_ todosStyle] $ (entries) <&> \(active, description) -> div_
[style_ todoStyle]
[ div_ [style_ $ checkStyle active] . pure $
img_ [src_ checkUrl, style_ checkWhiteStyle]
, p_ [style_ $ todoDescriptionStyle active] [text description]
]
]
where
checkUrl = "/static/check.svg"
entries =
[ (False, "Give local government it\'s biggest software update")
, (True, "Start learning the codebase")
, (True, "Interview for an open position")
, (True, "Join Polimorphic!")
, (False, "Build the virtual townhall!")
]
checkStyle :: Bool -> Map MisoString MisoString
checkStyle checked =
[ ("background-color", bool "white" "#27AAE1" checked)
, ("border", "1px solid")
, ("border-color", bool "#DDDDDD" "#27AAE1" checked)
, ("border-radius", "3px")
, ("cursor", "pointer")
, ("height", "16px")
, ("justify-content", "center")
, ("margin-right", "10px")
, ("width", "16px")
] <> flexVAlignStyle
checkWhiteStyle :: Map MisoString MisoString
checkWhiteStyle =
[ ("filter", "brightness(0) invert(1)")
, ("height", "10px")
, ("width", "10px")
]
containerStyle :: Map MisoString MisoString
containerStyle =
[ ("font-family", "\'Work Sans\', sans-serif")
, ("padding", "25px 15px")
, ("margin", "0 auto")
, ("max-width", "100%")
, ("width", "600px")
]
flexVAlignStyle :: Map MisoString MisoString
flexVAlignStyle =
[ ("align-items", "center")
, ("display", "flex")
]
titleStyle :: Map MisoString MisoString
titleStyle = [("font-size", "32px")]
todoStyle :: Map MisoString MisoString
todoStyle = [("margin-bottom", "10px")] <> flexVAlignStyle
todoDescriptionStyle :: Bool -> Map MisoString MisoString
todoDescriptionStyle completed =
[ ("color", bool "black" "#888888" completed)
, ("margin-left", "10px")
] <> bool [] [("text-decoration", "line-through")] completed
todosStyle :: Map MisoString MisoString
todosStyle = [("margin", "20px auto")]
|
import { Component, OnInit, OnDestroy, ChangeDetectorRef } from '@angular/core';
import { Vision } from '../models/vision';
import { FeedbackMessage } from '../models/feedbackMessage';
import { VisionService } from '../services/vision.service';
import { Title } from '@angular/platform-browser';
import { Subscription } from 'rxjs/Subscription';
@Component({
moduleId: module.id,
selector: 'vision',
templateUrl: 'vision.component.html',
styles: [`
.vision {
font-size: 20px;
border: 0;
font-family: "Avenir", Arial, sans-serif;
}
`]
})
export class VisionComponent implements OnInit, OnDestroy {
currentVision: Vision;
getVisionsSubscription: Subscription;
feedbackMessage: FeedbackMessage = null;
showSubmit: boolean = false;
constructor(private visionService: VisionService, private cdr: ChangeDetectorRef, private titleService: Title) {
this.titleService.setTitle('Vision');
this.currentVision = new Vision();
} // with this Angular will know to supply an instance of the PowderService when it creates a new AppComponent
ngOnInit(): void {
this.getVisionsSubscription = this.visionService.getVisions().subscribe((visions: Vision[])=>{
let length = visions.length;
if(!(length <= 0)){
this.currentVision = visions[length-1];
}
})
}
ngOnDestroy(): void {
}
updateVision(): void {
this.currentVision.date = new Date();
this.visionService.createVision(this.currentVision).subscribe(
(createdVision: Vision) => { this.feedbackMessage = new FeedbackMessage("","Vision gespeichert","success")},
(err)=> {this.feedbackMessage = new FeedbackMessage("","Vision konnte nicht gespeichert werden","error")});
}
}
|
namespace CoCo
{
internal static class Guids
{
public const string Package = "7ce8e307-18de-4056-91d0-c757b94fbbb9";
}
}
|
import React from 'react';
import { Global } from '@emotion/react';
import { AppPropsRoot } from 'starter/core/model/common.model';
import App from 'starter/web/app';
import Layout from 'components/layout';
const globalStyles: any = {
body: {},
};
const MyApp: React.FC<AppPropsRoot> = props => (
<>
<Global styles={globalStyles} />
<Layout {...props}>
<App {...props} />
</Layout>
</>
);
export default MyApp;
|
import numpy as np
from math import sqrt
def get_primes_below(n):
# http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188
""" Input n>=6, Returns a array of primes, 2 <= p < n """
sieve = np.ones(n/3 + (n % 6 == 2), dtype=np.bool)
sieve[0] = False
for i in xrange(int(n**0.5)/3+1):
if sieve[i]:
k = 3*i+1 | 1
sieve[((k*k)/3)::2*k] = False
sieve[(k*k+4*k-2*k*(i & 1))/3::2*k] = False
return np.r_[2, 3, ((3*np.nonzero(sieve)[0]+1) | 1)]
def is_prime(n):
if n == 2:
return True
if n % 2 == 0 or n <= 1:
return False
for i in range(3, int(sqrt(n))+1, 2):
if n % i == 0:
return False
return True
|
# (C) Datadog, Inc. 2019-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest
from datadog_checks.amazon_msk import AmazonMskCheck
from datadog_checks.amazon_msk.metrics import JMX_METRICS_MAP, NODE_METRICS_MAP
@pytest.mark.usefixtures('mock_data')
def test_node_check(aggregator, instance, mock_client):
c = AmazonMskCheck('amazon_msk', {}, [instance])
assert not c.run()
caller, client = mock_client
cluster_arn = instance['cluster_arn']
region_name = cluster_arn.split(':')[3]
caller.assert_called_once_with('kafka', region_name=region_name)
client.list_nodes.assert_called_once_with(ClusterArn=cluster_arn)
global_tags = ['cluster_arn:{}'.format(cluster_arn), 'region_name:{}'.format(region_name)]
global_tags.extend(instance['tags'])
aggregator.assert_service_check(c.SERVICE_CHECK_CONNECT, c.OK, tags=global_tags)
for node_info in client.list_nodes()['NodeInfoList']:
broker_info = node_info['BrokerNodeInfo']
broker_tags = ['broker_id:{}'.format(broker_info['BrokerId'])]
broker_tags.extend(global_tags)
assert_node_metrics(aggregator, broker_tags)
assert_jmx_metrics(aggregator, broker_tags)
for endpoint in broker_info['Endpoints']:
for port in (11001, 11002):
service_check_tags = ['endpoint:http://{}:{}/metrics'.format(endpoint, port)]
service_check_tags.extend(global_tags)
aggregator.assert_service_check('aws.msk.prometheus.health', c.OK, tags=service_check_tags)
aggregator.assert_all_metrics_covered()
def assert_node_metrics(aggregator, tags):
metrics = set(NODE_METRICS_MAP.values())
# Summaries
for metric in ('go.gc.duration.seconds',):
metrics.remove(metric)
metrics.update({'{}.count'.format(metric), '{}.quantile'.format(metric), '{}.sum'.format(metric)})
for metric in sorted(metrics):
metric = 'aws.msk.{}'.format(metric)
for tag in tags:
aggregator.assert_metric_has_tag(metric, tag)
def assert_jmx_metrics(aggregator, tags):
for metric in sorted(JMX_METRICS_MAP.values()):
metric = 'aws.msk.{}'.format(metric)
for tag in tags:
aggregator.assert_metric_has_tag(metric, tag)
@pytest.mark.usefixtures('mock_data')
def test_custom_metric_path(aggregator, instance, mock_client):
instance['prometheus_metrics_path'] = '/'
c = AmazonMskCheck('amazon_msk', {}, [instance])
assert not c.run()
caller, client = mock_client
cluster_arn = instance['cluster_arn']
region_name = cluster_arn.split(':')[3]
caller.assert_called_once_with('kafka', region_name=region_name)
client.list_nodes.assert_called_once_with(ClusterArn=cluster_arn)
global_tags = ['cluster_arn:{}'.format(cluster_arn), 'region_name:{}'.format(region_name)]
global_tags.extend(instance['tags'])
aggregator.assert_service_check(c.SERVICE_CHECK_CONNECT, c.OK, tags=global_tags)
for node_info in client.list_nodes()['NodeInfoList']:
broker_info = node_info['BrokerNodeInfo']
broker_tags = ['broker_id:{}'.format(broker_info['BrokerId'])]
broker_tags.extend(global_tags)
assert_node_metrics(aggregator, broker_tags)
assert_jmx_metrics(aggregator, broker_tags)
for endpoint in broker_info['Endpoints']:
for port in (11001, 11002):
service_check_tags = ['endpoint:http://{}:{}/'.format(endpoint, port)]
service_check_tags.extend(global_tags)
aggregator.assert_service_check('aws.msk.prometheus.health', c.OK, tags=service_check_tags)
aggregator.assert_all_metrics_covered()
|
<?php
namespace AdolphYu\FBMessenger\Facades;
use AdolphYu\FBMessenger\Models\Messaging\Messaging;
use Illuminate\Http\Request;
USE Illuminate\Support\Facades\Facade;
/**
* @method static array send(Messaging $messaging)
* @method static null receive(Request $request)
*
* @see \AdolphYu\FBMessenger\FBMSG
*/
class FBMSG extends Facade
{
/**
* Get the registered name of the component.
*
* @return string
*/
protected static function getFacadeAccessor()
{
return 'fbmsg';
}
}
|
/*
* Copyright (c) 2020, Twilio Inc.
*/
package com.twilio.security.crypto
import com.twilio.security.crypto.key.signer.ECSigner
import com.twilio.security.crypto.key.template.ECP256SignerTemplate
import com.twilio.security.crypto.key.template.SignerTemplate
import java.security.KeyPair
import java.security.KeyPairGenerator
import java.security.KeyStore
import java.security.Signature
import org.junit.After
import org.junit.Assert.assertFalse
import org.junit.Assert.assertNotNull
import org.junit.Assert.assertTrue
import org.junit.Before
import org.junit.Test
class ECP256SignerTests {
private val keyStore = KeyStore.getInstance(providerName)
.apply { load(null) }
private val androidKeyManager = keyManager()
private lateinit var alias: String
@Before
fun setup() {
alias = System.currentTimeMillis()
.toString()
if (keyStore.containsAlias(alias)) {
keyStore.deleteEntry(alias)
}
}
@After
fun tearDown() {
if (this::alias.isInitialized) {
keyStore.deleteEntry(alias)
}
}
@Test
fun testSigner_withNonExistingKeyPair_shouldReturnSignerForNewKeyPair() {
val template = ECP256SignerTemplate(alias)
val signer = androidKeyManager.signer(template)
assertTrue(signer is ECSigner)
assertTrue(keyStore.containsAlias(alias))
assertNotNull((signer as? ECSigner)?.keyPair)
assertTrue(
keyStore.getCertificate(alias)?.publicKey?.encoded?.contentEquals(
(signer as ECSigner).keyPair.public.encoded
) == true
)
}
@Test
fun testSigner_withExistingKeyPair_shouldReturnSignerForKeyPair() {
val template = ECP256SignerTemplate(alias)
val keyPair = createKeyPair(template)
val signer = androidKeyManager.signer(template)
assertTrue(signer is ECSigner)
assertTrue(keyStore.containsAlias(alias))
assertNotNull((signer as? ECSigner)?.keyPair)
assertTrue(
keyPair.public.encoded?.contentEquals(
(signer as ECSigner).keyPair.public.encoded
) == true
)
}
@Test
fun testSign_withSigner_shouldReturnSignature() {
val data = "message".toByteArray()
val template = ECP256SignerTemplate(alias)
val keyPair = createKeyPair(template)
val signer = androidKeyManager.signer(template)
val signature = signer.sign(data)
val valid = Signature.getInstance(template.signatureAlgorithm)
.run {
initVerify(keyPair.public)
update(data)
verify(signature)
}
assertTrue(valid)
}
@Test
fun testGetPublicKey_shouldReturnKeyStorePublicKey() {
val template = ECP256SignerTemplate(alias)
val signer = androidKeyManager.signer(template)
assertTrue(keyStore.containsAlias(alias))
val expectedPublicKey = keyStore.getCertificate(alias)
.publicKey.encoded
assertTrue(signer.getPublic().contentEquals(expectedPublicKey))
}
@Test
fun testVerify_withSigner_shouldReturnTrue() {
val data = "message".toByteArray()
val template = ECP256SignerTemplate(alias)
val keyPair = createKeyPair(template)
val signature = Signature.getInstance(template.signatureAlgorithm)
.run {
initSign(keyPair.private)
update(data)
sign()
}
val signer = androidKeyManager.signer(template)
val valid = signer.verify(data, signature)
assertTrue(valid)
}
@Test
fun testDelete_withExistingKeyPair_shouldDeleteAlias() {
val template = ECP256SignerTemplate(alias)
createKeyPair(template)
assertTrue(keyStore.containsAlias(alias))
androidKeyManager.delete(alias)
assertFalse(keyStore.containsAlias(alias))
}
private fun createKeyPair(template: SignerTemplate): KeyPair {
val keyPairGenerator = KeyPairGenerator.getInstance(
template.algorithm, providerName
)
keyPairGenerator.initialize(template.keyGenParameterSpec)
return keyPairGenerator.generateKeyPair()
}
}
|
/* KenticoClearObjectVersionHistory.v11.sql */
/* Goal: Clean up data from objects */
/* Description: Truncates all version history */
/* that can bloat a database. Be very careful */
/* with this one, there is no coming back */
/* Intended Kentico Verison: 11.x */
/* Author: Brian McKeiver (mcbeev@gmail.com) */
/* Revision: 1.0 */
/* Take a backup first! Don't be THAT guy! */
UPDATE CMS_ObjectSettings
SET ObjectCheckedOutVersionHistoryID = NULL
WHERE ObjectCheckedOutVersionHistoryID IS NOT NULL
GO
DELETE FROM CMS_ObjectVersionHistory
|
package phpserialize
import (
"bytes"
"fmt"
"reflect"
"sort"
"strconv"
"strings"
)
// MarshalOptions must be provided when invoking Marshal(). Use
// DefaultMarshalOptions() for sensible defaults.
type MarshalOptions struct {
// If this is true, then all struct names will be stripped from objects
// and "stdClass" will be used instead. The default value is false.
OnlyStdClass bool
}
// DefaultMarshalOptions will create a new instance of MarshalOptions with
// sensible defaults. See MarshalOptions for a full description of options.
func DefaultMarshalOptions() *MarshalOptions {
options := new(MarshalOptions)
options.OnlyStdClass = false
return options
}
// MarshalBool returns the bytes to represent a PHP serialized bool value. This
// would be the equivalent to running:
//
// echo serialize(false);
// // b:0;
//
// The same result would be returned by marshalling a boolean value:
//
// Marshal(true)
func MarshalBool(value bool) []byte {
if value {
return []byte("b:1;")
}
return []byte("b:0;")
}
// MarshalInt returns the bytes to represent a PHP serialized integer value.
// This would be the equivalent to running:
//
// echo serialize(123);
// // i:123;
//
// The same result would be returned by marshalling an integer value:
//
// Marshal(123)
func MarshalInt(value int64) []byte {
return []byte("i:" + strconv.FormatInt(value, 10) + ";")
}
// MarshalUint is provided for compatibility with unsigned types in Go. It works
// the same way as MarshalInt.
func MarshalUint(value uint64) []byte {
return []byte("i:" + strconv.FormatUint(value, 10) + ";")
}
// MarshalFloat returns the bytes to represent a PHP serialized floating-point
// value. This would be the equivalent to running:
//
// echo serialize(1.23);
// // d:1.23;
//
// The bitSize should represent the size of the float. This makes conversion to
// a string value more accurate, for example:
//
// // float64 is implicit for literals
// MarshalFloat(1.23, 64)
//
// // If the original value was cast from a float32
// f := float32(1.23)
// MarshalFloat(float64(f), 32)
//
// The same result would be returned by marshalling a floating-point value:
//
// Marshal(1.23)
func MarshalFloat(value float64, bitSize int) []byte {
return []byte("d:" + strconv.FormatFloat(value, 'f', -1, bitSize) + ";")
}
// MarshalString returns the bytes to represent a PHP serialized string value.
// This would be the equivalent to running:
//
// echo serialize('Hello world');
// // s:11:"Hello world";
//
// The same result would be returned by marshalling a string value:
//
// Marshal('Hello world')
//
// One important distinction is that PHP stores binary data in strings. See
// MarshalBytes for more information.
func MarshalString(value string) []byte {
return []byte(fmt.Sprintf("s:%d:\"%s\";", len(value), value))
}
// MarshalBytes returns the bytes to represent a PHP serialized string value
// that contains binary data. This is because PHP does not have a distinct type
// for binary data.
//
// This can cause some confusion when decoding the value as it will want to
// unmarshal as a string type. The Unmarshal() function will be sensitive to
// this condition and allow either a string or []byte when unserializing a PHP
// string.
func MarshalBytes(value []byte) []byte {
var buffer bytes.Buffer
for _, c := range value {
buffer.WriteString(fmt.Sprintf("\\x%02x", c))
}
return []byte(fmt.Sprintf("s:%d:\"%s\";", len(value), buffer.String()))
}
// MarshalNil returns the bytes to represent a PHP serialized null value.
// This would be the equivalent to running:
//
// echo serialize(null);
// // N;
//
// Unlike the other specific Marshal functions it does not take an argument
// because the output is a constant value.
func MarshalNil() []byte {
return []byte("N;")
}
// MarshalStruct returns the bytes that represent a PHP encoded class from a
// struct or pointer to a struct.
//
// Fields that are not exported (starting with a lowercase letter) will not be
// present in the output. All fields that appear in the output will have their
// first letter converted to lowercase. Any other uppercase letters in the field
// name are maintained. At the moment there is no way to change this behaviour,
// unlike other marshallers that use a tag on the field.
func MarshalStruct(input interface{}, options *MarshalOptions) ([]byte, error) {
value := reflect.ValueOf(input)
typeOfValue := value.Type()
// Some of the fields in the struct may not be visible (unexported). We
// need to make sure we count all the visible ones for the final result.
visibleFieldCount := 0
var buffer bytes.Buffer
for i := 0; i < value.NumField(); i++ {
f := value.Field(i)
if !f.CanInterface() {
// This is an unexported field, we cannot read it.
continue
}
visibleFieldCount++
// Note: since we can only export fields that are public (start
// with an uppercase letter) we must change it to lower case. If
// you really do want it to be upper case you will have to wait
// for when tags are supported on individual fields.
fieldName := lowerCaseFirstLetter(typeOfValue.Field(i).Name)
buffer.Write(MarshalString(fieldName))
m, err := Marshal(f.Interface(), options)
if err != nil {
return nil, err
}
buffer.Write(m)
}
className := reflect.ValueOf(input).Type().Name()
if options.OnlyStdClass {
className = "stdClass"
}
return []byte(fmt.Sprintf("O:%d:\"%s\":%d:{%s}", len(className),
className, visibleFieldCount, buffer.String())), nil
}
// Marshal is the canonical way to perform the equivalent of serialize() in PHP.
// It can handle encoding scalar types, slices and maps.
func Marshal(input interface{}, options *MarshalOptions) ([]byte, error) {
if options == nil {
options = DefaultMarshalOptions()
}
// []byte is a special case because all strings (binary and otherwise)
// are handled as strings in PHP.
if bytesToEncode, ok := input.([]byte); ok {
return MarshalBytes(bytesToEncode), nil
}
// Nil is another special case because it is typeless and must be
// handled before trying to determine the type.
if input == nil {
return MarshalNil(), nil
}
// Otherwise we need to decide if it is a scalar value, map or slice.
value := reflect.ValueOf(input)
switch value.Kind() {
case reflect.Bool:
return MarshalBool(value.Bool()), nil
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
reflect.Int64:
return MarshalInt(value.Int()), nil
case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return MarshalUint(value.Uint()), nil
case reflect.Float32:
return MarshalFloat(value.Float(), 32), nil
case reflect.Float64:
return MarshalFloat(value.Float(), 64), nil
case reflect.String:
return MarshalString(value.String()), nil
case reflect.Slice:
return marshalSlice(value.Interface(), options)
case reflect.Map:
return marshalMap(value.Interface(), options)
case reflect.Struct:
return MarshalStruct(input, options)
case reflect.Ptr:
return Marshal(value.Elem().Interface(), options)
default:
return nil, fmt.Errorf("can not encode: %T", input)
}
}
func marshalSlice(input interface{}, options *MarshalOptions) ([]byte, error) {
s := reflect.ValueOf(input)
var buffer bytes.Buffer
for i := 0; i < s.Len(); i++ {
m, err := Marshal(i, options)
if err != nil {
return nil, err
}
buffer.Write(m)
m, err = Marshal(s.Index(i).Interface(), options)
if err != nil {
return nil, err
}
buffer.Write(m)
}
return []byte(fmt.Sprintf("a:%d:{%s}", s.Len(), buffer.String())), nil
}
func marshalMap(input interface{}, options *MarshalOptions) ([]byte, error) {
s := reflect.ValueOf(input)
// Go randomises maps. To be able to test this we need to make sure the
// map keys always come out in the same order. So we sort them first.
mapKeys := s.MapKeys()
sort.Slice(mapKeys, func(i, j int) bool {
return lessValue(mapKeys[i], mapKeys[j])
})
var buffer bytes.Buffer
for _, mapKey := range mapKeys {
m, err := Marshal(mapKey.Interface(), options)
if err != nil {
return nil, err
}
buffer.Write(m)
m, err = Marshal(s.MapIndex(mapKey).Interface(), options)
if err != nil {
return nil, err
}
buffer.Write(m)
}
return []byte(fmt.Sprintf("a:%d:{%s}", s.Len(), buffer.String())), nil
}
func lowerCaseFirstLetter(s string) string {
return strings.ToLower(s[0:1]) + s[1:]
}
|
use ate::prelude::*;
use chrono::DateTime;
use chrono::Utc;
use serde::*;
use super::*;
/// Contracts are agreement between a consumer and provider for
/// particular services. Only brokers may perform actions on
/// active contracts
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Contract {
/// Reference number assigned to this contract
pub reference_number: String,
/// The country that you pay GST tax in for this services
pub gst_country: Country,
/// The wallet that will be debited
pub debit_wallet: PrimaryKey,
/// The rate card that will be used for this contract
pub rate_card: RateCard,
/// The advertised service being consumed by the provider
pub service: AdvertisedService,
/// Status of the contract
pub status: ContractStatus,
/// Limited duration contracts will expire after a
/// certain period of time without incurring further
/// charges
pub expires: Option<DateTime<Utc>>,
/// Key used by the broker to gain access to the wallet
/// (only after the provider supplies their key)
pub broker_unlock_key: EncryptKey,
/// Broker key encrypted with the providers public key
pub broker_key: PublicEncryptedSecureData<EncryptKey>,
/// Metrics for difference instance of this service with
/// unqiue reference numbers (field=related_to)
pub metrics: DaoVec<ContractMetrics>,
}
|
const BIGINT_ROUNDER = 1000000;
const TIMING_PRECISION = 3;
export const getCurrentTime = () => {
return typeof window !== undefined
? performance.now()
: Number((Number(process.hrtime.bigint()) / BIGINT_ROUNDER).toFixed(TIMING_PRECISION));
};
|
module Hasura.SessionSpec (spec) where
import Hasura.Generator ()
import Hasura.Prelude
import Hasura.Server.Utils
import Hasura.Session
import Test.Hspec
import Test.Hspec.QuickCheck
spec :: Spec
spec = describe "SessionVariable" $ do
prop "Arbitrary instance generates valid session variables" $ \v ->
sessionVariableToText v `shouldSatisfy` isSessionVariable
|
enum Color { Red, Green=2, Blue };
class test {
static Color y = Color.Green;
static test() {}
public static void Main() {
Color x = y;
System.Console.WriteLine("{0}={1}", x, (int)x);
}
}
|
use crate::backend::renderer::Renderer;
use crate::backend::targets::texture::TextureTarget;
use crate::backend::targets::window::WindowTarget;
use crate::entities::background::BackgroundColor;
use crate::entities::render_target::internal::{PrepareRenderingAction, RenderAction};
use crate::internal::PrepareCaptureAction;
use crate::storages::core::{CameraProperties, CoreStorage};
use crate::{
Camera2D, Color, FrameRate, FrameRateLimit, GraphicsModule, ShapeColor, SurfaceSize,
WindowSettings,
};
use modor::{Built, Entity, EntityBuilder, Query, Single, With, World};
use modor_physics::{Position, Shape, Size};
use winit::dpi::PhysicalSize;
use winit::event_loop::EventLoop;
use winit::window::{Window as WinitWindow, WindowBuilder};
const DEFAULT_BACKGROUND_COLOR: Color = Color::BLACK;
const DEFAULT_CAMERA: CameraProperties = CameraProperties {
position: Position::ZERO,
size: Size::ONE,
};
pub(crate) struct RenderTarget {
pub(crate) core: CoreStorage,
}
#[entity]
impl RenderTarget {
pub(crate) fn build(renderer: Renderer) -> impl Built<Self> {
EntityBuilder::new(Self {
core: CoreStorage::new(renderer),
})
}
#[run_as(PrepareRenderingAction)]
fn prepare_rendering(
&mut self,
shapes: Query<'_, (&ShapeColor, &Position, &Size, Option<&Shape>)>,
cameras: Query<'_, (&Position, &Size), With<Camera2D>>,
) {
let camera = Self::extract_camera(cameras);
self.core.update_instances(shapes, camera);
}
#[run_as(RenderAction)]
fn render(
&mut self,
background_color: Option<Single<'_, BackgroundColor>>,
frame_rate_limit: Option<Single<'_, FrameRateLimit>>,
) {
let background_color = background_color.map_or(DEFAULT_BACKGROUND_COLOR, |c| **c);
let enable_vsync = matches!(frame_rate_limit.map(|l| l.get()), Some(FrameRate::VSync));
self.core.toggle_vsync(enable_vsync);
self.core.render(background_color);
}
#[run_as(UpdateGraphicsAction)]
fn finish_update() {}
fn extract_camera(cameras: Query<'_, (&Position, &Size), With<Camera2D>>) -> CameraProperties {
cameras
.iter()
.next()
.map_or(DEFAULT_CAMERA, |(p, s)| CameraProperties {
position: *p,
size: *s,
})
}
}
// coverage: off (window cannot be tested)
/// The open window in which rendering occurs.
///
/// # Modor
///
/// - **Type**: singleton entity
/// - **Lifetime**: same as [`GraphicsModule`](crate::GraphicsModule)
///
/// # Examples
///
/// See [`GraphicsModule`](crate::GraphicsModule).
pub struct Window {
size: SurfaceSize,
refreshed_renderer: Option<Renderer>,
}
#[singleton]
impl Window {
/// Returns the size of the rendering area.
pub fn size(&self) -> SurfaceSize {
self.size
}
pub(crate) fn build(renderer: Renderer) -> impl Built<Self> {
let (width, height) = renderer.target_size();
EntityBuilder::new(Self {
size: SurfaceSize { width, height },
refreshed_renderer: None,
})
.inherit_from(RenderTarget::build(renderer))
}
pub(crate) fn set_size(&mut self, size: SurfaceSize) {
self.size = size;
}
pub(crate) fn update_renderer(&mut self, window: &WinitWindow) {
self.refreshed_renderer = Some(Renderer::new(WindowTarget::new(window)));
}
#[run]
fn update_size(&mut self, surface: &mut RenderTarget) {
if let Some(renderer) = self.refreshed_renderer.take() {
surface.core = CoreStorage::new(renderer);
} else {
surface.core.set_size(self.size());
}
}
}
pub(crate) struct WindowInit {
settings: WindowSettings,
renderer: Option<Renderer>,
}
#[singleton]
impl WindowInit {
pub(crate) fn build(settings: WindowSettings) -> impl Built<Self> {
EntityBuilder::new(Self {
settings,
renderer: None,
})
}
pub(crate) fn create_renderer(&mut self, window: &WinitWindow) {
self.renderer = Some(Renderer::new(WindowTarget::new(window)));
}
#[allow(clippy::let_and_return)]
pub(crate) fn create_window(&mut self, event_loop: &EventLoop<()>) -> WinitWindow {
let window = WindowBuilder::new()
.with_title(self.settings.title.clone())
.with_inner_size(PhysicalSize::new(
self.settings.size.width,
self.settings.size.height,
))
.build(event_loop)
.expect("failed to create window");
window.set_cursor_visible(self.settings.has_visible_cursor);
#[cfg(target_arch = "wasm32")]
{
use winit::platform::web::WindowExtWebSys;
let canvas = window.canvas();
canvas.set_id("modor");
if !self.settings.has_visible_cursor {
canvas
.style()
.set_property("cursor", "none")
.expect("cannot setup canvas");
}
web_sys::window()
.and_then(|win| win.document())
.and_then(|doc| doc.body())
.and_then(|body| body.append_child(&web_sys::Element::from(canvas)).ok())
.expect("cannot append canvas to document body");
}
#[cfg(not(target_os = "android"))]
{
self.renderer = Some(Renderer::new(WindowTarget::new(&window)));
}
window
}
#[run]
fn consume(
&mut self,
entity: Entity<'_>,
graphics: Single<'_, GraphicsModule>,
mut world: World<'_>,
) {
let renderer = if let Some(renderer) = self.renderer.take() {
renderer
} else {
Renderer::new(TextureTarget::new(
self.settings.size.width,
self.settings.size.height,
))
};
world.create_child_entity(graphics.entity().id(), Window::build(renderer));
world.delete_entity(entity.id());
}
}
// coverage: on
/// A handler for capturing rendering.
///
/// # Modor
///
/// - **Type**: singleton entity
/// - **Lifetime**: same as [`GraphicsModule`](crate::GraphicsModule)
/// - **Updated during**: [`UpdateCaptureBufferAction`](crate::UpdateCaptureBufferAction)
///
/// # Examples
///
/// See [`GraphicsModule`](crate::GraphicsModule).
// coverage: off (window cannot be tested)
pub struct Capture {
buffer: Vec<u8>,
buffer_size: SurfaceSize,
updated_size: Option<SurfaceSize>,
}
#[singleton]
impl Capture {
/// Returns the capture size.
pub fn size(&self) -> SurfaceSize {
self.buffer_size
}
/// Sets the capture size.
pub fn set_size(&mut self, size: SurfaceSize) {
self.updated_size = Some(size);
}
/// Returns the capture as a 8-bit RGBA image buffer.
pub fn buffer(&self) -> Option<&[u8]> {
if self.buffer.is_empty() {
None
} else {
Some(&self.buffer)
}
}
pub(crate) fn build(size: SurfaceSize) -> impl Built<Self> {
EntityBuilder::new(Self {
buffer_size: size,
buffer: vec![],
updated_size: Some(size),
})
.inherit_from(RenderTarget::build(Renderer::new(TextureTarget::new(
size.width,
size.height,
))))
}
#[run_as(PrepareCaptureAction)]
fn update_config(&mut self, surface: &mut RenderTarget) {
if let Some(size) = self.updated_size.take() {
surface.core.set_size(size);
}
}
#[run_as(UpdateCaptureBufferAction)]
fn update_buffer(&mut self, surface: &mut RenderTarget) {
let (width, height) = surface.core.renderer().target_size();
self.buffer_size = SurfaceSize::new(width, height);
self.buffer = surface.core.renderer().retrieve_buffer();
}
}
/// An action done when the graphics module has retrieved all data necessary for the rendering.
#[action(PrepareRenderingAction)]
pub struct UpdateGraphicsAction;
/// An action done when the rendering has been captured by the [`Capture`](crate::Capture) entity.
#[action(RenderAction)]
pub struct UpdateCaptureBufferAction;
pub(crate) mod internal {
use crate::UpdateCamera2DAction;
use modor_input::UpdateInputAction;
use modor_physics::UpdatePhysicsAction;
#[action]
pub struct PrepareCaptureAction;
#[action(
UpdatePhysicsAction,
UpdateInputAction,
UpdateCamera2DAction,
PrepareCaptureAction
)]
pub struct PrepareRenderingAction;
#[action(PrepareRenderingAction)]
pub struct RenderAction;
}
|
---
templateKey: blog-post
title: Urali - Ghostology
order: 12
image: /img/urali.jpg
style: bow
---
- - -
**Urali - Ghostology**
Illustration and design for the album _Ghostology_ by Urali.




|
/*
Copyright Ⓒ 2013 Brett Smith
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import org.scalatest._
class S99Spec extends FlatSpec {
import S99._
"Function last" should "return the last element of a list" in {
1 to 10 foreach(max => assert(last((1 to max).toList) === Some(max)))
}
it should "return None for an empty list" in {
assert(last(List()) === None)
}
"Function penultimate" should "return the second-last element of a list" in {
2 to 10 foreach(
max => assert(penultimate((1 to max).toList) === Some(max - 1))
)
}
it should "return None for a too-short list" in {
assert(penultimate(List(1)) === None)
}
"Function nth" should "return the corresponding index" in {
val l = (0 to 5).map(n => n * n).toList
(0 to 5).foreach(n => assert(nth(n, l) === Some(n * n)))
}
it should "return None past the end of the list" in {
assert(nth(1, List()) === None)
}
"Function length" should "return the length of the list" in {
(0 to 5).foreach(end => assert(length((1 to end).toList) === end))
}
"Function reverse" should "reverse a list" in {
(0 to 5).foreach(
end => assert(reverse((1 to end).toList) === end.to(1, -1).toList)
)
}
"Function isPalindrome" should "find palindromes" in {
assert(isPalindrome(List()))
assert(isPalindrome(List(2)))
assert(isPalindrome(List(2, 5, 2)))
assert(isPalindrome(List(2, 5, 5, 2)))
assert(isPalindrome(List(2, 5, 9, 5, 2)))
assert(!isPalindrome(List(2, 5, 9, 4, 2)))
assert(!isPalindrome(List(2, 5, 4, 2)))
assert(!isPalindrome(List(2, 3)))
}
"Function flatten" should "flatten nested lists" in {
assert(flatten(List()) === List())
val result = List(1, 2, 3)
assert(flatten(result) === result)
assert(flatten(List(List(1, 2), 3)) === result)
assert(flatten(List(List(1, List(2)), 3)) === result)
}
"Function compress" should "remove duplicates run together" in {
assert(compress(List()) === List())
val result = List(1, 2, 3)
assert(compress(result) === result)
assert(compress(List(1, 1, 2, 3)) === result)
assert(compress(List(1, 2, 2, 3)) === result)
assert(compress(List(1, 2, 3, 3)) === result)
assert(compress(List(1, 1, 2, 2, 3, 3)) === result)
assert(compress(List(1, 1, 2, 2, 1, 3, 3)) === List(1, 2, 1, 3))
}
"Function pack" should "wrap non-dupe elements in Lists" in {
List(List(), List(1), List(2, 3, 4), List(2, 3, 2)).foreach(in_list =>
assert(pack(in_list) === in_list.map(n => List(n)).toList)
)
}
it should "list duplicates together" in {
assert(pack(List(1, 1, 1)) === List(List(1, 1, 1)))
assert(pack(List(1, 1, 2)) === List(List(1, 1), List(2)))
assert(pack(List(1, 1, 2, 2)) === List(List(1, 1), List(2, 2)))
assert(pack(List(1, 2, 2, 3)) === List(List(1), List(2, 2), List(3)))
assert(pack(List(1, 2, 2, 2)) === List(List(1), List(2, 2, 2)))
assert(pack(List('a, 'a, 'a, 'a, 'b, 'c, 'c, 'a, 'a, 'd, 'e, 'e, 'e, 'e))
=== List(List('a, 'a, 'a, 'a), List('b), List('c, 'c), List('a, 'a),
List('d), List('e, 'e, 'e, 'e)))
}
"Function slice" should "return sublists" in {
val list = 1.to(5).toList
assert(slice(0, 3, list) === List(1, 2, 3))
assert(slice(1, 4, list) === List(2, 3, 4))
assert(slice(2, 5, list) === List(3, 4, 5))
}
it should "support negative indices" in {
val list = 1.to(5).toList
assert(slice(-2, 5, list) === List(4, 5))
assert(slice(-3, 5, list) === List(3, 4, 5))
assert(slice(1, -1, list) === List(2, 3, 4))
}
it should "return Nil out of bounds" in {
assert(slice(0, 1, Nil) === Nil)
assert(slice(1, 2, List(1)) === Nil)
assert(slice(0, -2, List(1)) === Nil)
}
"Function rotate" should "rotate the given list" in {
val list = (1 to 5).toList
assert(rotate(0, list) === list)
assert(rotate(1, list) === List(2, 3, 4, 5, 1))
assert(rotate(2, list) === List(3, 4, 5, 1, 2))
assert(rotate(3, list) === List(4, 5, 1, 2, 3))
assert(rotate(4, list) === List(5, 1, 2, 3, 4))
assert(rotate(5, list) === List(1, 2, 3, 4, 5))
}
"Function removeAt" should "remove the nth element of the list" in {
val list = (1 to 5).toList
assert(removeAt(0, list) === List(2, 3, 4, 5))
assert(removeAt(1, list) === List(1, 3, 4, 5))
assert(removeAt(2, list) === List(1, 2, 4, 5))
assert(removeAt(3, list) === List(1, 2, 3, 5))
assert(removeAt(4, list) === List(1, 2, 3, 4))
assert(removeAt(5, list) === list)
}
}
|
/*
Copyright (c) 2019-2021 Integrative Software LLC
Created: 5/2019
Author: Pablo Carbonell
*/
using System;
using System.ComponentModel;
namespace Integrative.Lara
{
/// <summary>
/// Meta element
/// </summary>
[Obsolete("Use HtmlMetaElement instead")]
[EditorBrowsable(EditorBrowsableState.Never)]
public class Meta : HtmlMetaElement
{
}
/// <summary>
/// The 'meta' HTML5 element.
/// </summary>
/// <seealso cref="Element" />
public class HtmlMetaElement : Element
{
/// <summary>
/// Initializes a new instance of the <see cref="HtmlMetaElement"/> class.
/// </summary>
public HtmlMetaElement() : base("meta")
{
}
/// <summary>
/// Gets or sets the 'content' HTML5 attribute.
/// </summary>
public string? Content
{
get => GetAttributeLower("content");
set => SetAttributeLower("content", value);
}
/// <summary>
/// Gets or sets the 'httpequiv' HTML5 attribute.
/// </summary>
public string? HttpEquiv
{
get => GetAttributeLower("http-equiv");
set => SetAttributeLower("http-equiv", value);
}
/// <summary>
/// Gets or sets the 'name' HTML5 attribute.
/// </summary>
public string? Name
{
get => GetAttributeLower("name");
set => SetAttributeLower("name", value);
}
}
}
|
<?php
namespace plainframe\Controllers;
class ControllerMock extends Controller {
private $var = "isset";
public function index($params = array()) {
}
}
?>
|
using System;
using System.IO;
using System.Net.Http;
namespace WebAnchor
{
public class StreamContentPart : ContentPartBase
{
public StreamContentPart(Stream content)
: this(content, null, null)
{
}
public StreamContentPart(Stream content, string name)
: this(content, name, null)
{
}
public StreamContentPart(Stream content, string name, string fileName)
: base(name, fileName)
{
this.Content = content ?? throw new ArgumentNullException(nameof(content));
}
public Stream Content { get; }
protected override HttpContent CreateSpecificContent()
{
return new StreamContent(this.Content);
}
}
}
|
package com.benasher44.uuid
internal expect fun getRandomUuidBytes(): ByteArray
internal expect fun <T> T.freeze(): T
|
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AzureGuidance.Domain
{
public class Order
{
public Guid OrderId { get; set; }
public decimal TotalDue { get; set; }
[Required]
public string CustomerName { get; set; }
public string EmailId { get; set; }
public List<ProductDetails> ProductOrderDetailsList{ get; set; }
public DateTime OrderDate { get; set; }
[DefaultValue("Processed")]
public string orderStatus { get; set; }
}
public class Product
{
public string ProductId { get; set; }
public string ProductName { get; set; }
public decimal UnitPrice { get; set; }
}
public class AddProduct
{
public string ProductId { get; set; }
public string ProductName { get; set; }
public decimal UnitPrice { get; set; }
public int ProductQuantity { get; set; }//---
[DefaultValue(false)]
public bool SelectProduct { get; set; } //---
}
public class ProductDetails
{
public string ProductName { get; set; }
public int ProductQuantity { get; set; }
}
public class ProductOrder
{
public Order order { get; set; }
public List<AddProduct> lstProducts { get; set; }
}
//public class ProductOrderQuantity
//{
// public int ProductQuantity { get; set; }//---
//}
}
|
<?php
class Errors extends Controller {
function errors()
{
parent::Controller();
}
function index()
{
$this->load->model('errors_model');
$site_id = "16";
$error = "Database unavailable";
$now = date("Y-m-d H:i:s");
$data = array(
'site_id' => $site_id ,
'error' => $error ,
'date' => $now
);
$this->errors_model->insert_error($data);
}
function show_errors($site_id = ''){
$this->output->cache(5);
$this->load->model('errors_model');
$this->load->library('table');
$errors = $this->errors_model->get_errors($site_id);
if ($errors->num_rows() > 0){
$data['table'] = $this->table->generate($errors);
$this->load->view('error_table', $data);
}
}
function show_calendar($site_id = '', $year = '', $month = ''){
$this->load->model('errors_model');
$this->load->library('calendar');
$errors = $this->errors_model->get_errors_by_date($site_id, $year, $month);
if ($errors->num_rows() > 0){
$data = array();
foreach ($errors->result() as $row){
$data[$row->day] = "http://localhost/codeigniter/errors/show_day/{$site_id}/{$year}/{$month}/{$row->day}";
}
echo $this->calendar->generate($year, $month, $data);
}
}
function show_day($site_id = '', $year = '', $month = '', $day = ''){
$this->load->model('errors_model');
$this->load->helper('text');
$errors = $this->errors_model->get_errors_by_date($site_id, $year, $month, $day);
if ($errors->num_rows() > 0){
$data = array();
foreach ($errors->result() as $row){
echo "<h1>".$row->date." : ".word_limiter($row->error, 4)."</h1>";
echo "<p>".$row->error."</p><br/><br/>";
}
}
}
function report_twitter($user_lang = 'en'){
switch($user_lang){
case 'en':
$this->lang->load('info', 'english');
break;
case 'es':
$this->lang->load('info', 'spanish');
break;
}
$this->load->library('twitter');
$this->twitter->auth('account@account.com','password');
$this->twitter->update($this->lang->line('error'));
}
}
|
<?php
namespace App\Http\Controllers\Admin;
use App\Models\Hotel;
use App\Http\Controllers\Controller;
use App\Http\Requests\MassDestroyRoomRequest;
use App\Http\Requests\StoreRoomRequest;
use App\Http\Requests\UpdateRoomRequest;
use App\Models\Room;
use App\Models\Booking;
use App\Models\Coupon;
use App\Models\RoomType;
use App\Models\BookingRoom;
use DB;
use Gate;
use Carbon\Carbon;
use Illuminate\Http\Request;
use Symfony\Component\HttpFoundation\Response;
use Yajra\DataTables\Facades\DataTables;
class RoomsController extends Controller
{
public function index(Request $request)
{
$rooms = Room::with('hotel','roomType','bookingRooms')->get();
// dd($rooms->toArray());
// foreach($rooms as $room){
// if($room->bookingRooms != null){
// dd($room->bookingRooms->toArray());
// }
// }
return view('admin.rooms.index',compact('rooms'));
}
public function create()
{
abort_if(Gate::denies('room_create'), Response::HTTP_FORBIDDEN, '403 Forbidden');
$hotels = Hotel::all()->pluck('name', 'id')->prepend(trans('global.pleaseSelect'), '');
$room_types = RoomType::all()->pluck('type', 'id')->prepend(trans('global.pleaseSelect'), '');
return view('admin.rooms.create', compact('hotels', 'room_types'));
}
public function store(StoreRoomRequest $request)
{
$room = Room::create($request->all());
return redirect()->route('admin.rooms.index')->with(['success'=>'create room success']);
}
public function edit(Room $room)
{
abort_if(Gate::denies('room_edit'), Response::HTTP_FORBIDDEN, '403 Forbidden');
$hotels = Hotel::all()->pluck('name', 'id')->prepend(trans('global.pleaseSelect'), '');
$room_types = RoomType::all()->pluck('type', 'id')->prepend(trans('global.pleaseSelect'), '');
$room->load('hotel', 'roomType');
return view('admin.rooms.edit', compact('hotels', 'room_types', 'room'));
}
public function update(UpdateRoomRequest $request,$id)
{
$room = new Room;
$arr['price'] = $request->price;
$arr['room_number'] = $request->room_number;
$arr['description'] = $request->description;
$arr['hotel_id'] = $request->hotel_id;
$arr['roomtype_id'] = $request->roomtype_id;
$arr['qty'] = $request->qty;
$arr['discount'] = $request->discount;
$arr['area'] = $request->area;
$arr['view'] = $request->view;
$room::where('id',$id)->update($arr);
return redirect()->route('admin.rooms.index')->with(['success'=>'update room success']);
}
public function show($id)
{
abort_if(Gate::denies('room_show'), Response::HTTP_FORBIDDEN, '403 Forbidden');
// $room->load('hotel', 'roomType', 'bookingRooms','images');
$room = Room::with('hotel', 'roomType', 'bookingRooms','images')->findOrFail($id);
return view('admin.rooms.show', compact('room'));
}
public function destroy(Room $room)
{
abort_if(Gate::denies('room_delete'), Response::HTTP_FORBIDDEN, '403 Forbidden');
$room->delete();
return back()->with(['success'=>'delete booking success']);
}
public function massDestroy(MassDestroyRoomRequest $request)
{
Room::whereIn('id', request('ids'))->delete();
return response(null, Response::HTTP_NO_CONTENT);
}
public function changeStatus(Request $request){
$room = Room::find($request->id);
$room->status = $request->status;
$room->save();
return response()->json(['success' => 'Status Changed Successfully']);
}
public function searchRoom(Request $request)
{
$coupons = Coupon::all()->pluck('reduction', 'id');
$roomTypes = RoomType::all()->pluck('type', 'id');
$startDate = $request->input('startDate');
$endDate = $request->input('endDate');
$query = $request->input('room_type');
$dtnow = Carbon::now('Asia/Ho_Chi_Minh')->toDateString();
if ($request->isMethod('POST')) {
$availableRoom = BookingRoom::join('rooms','rooms.id','=','booking_rooms.room_id')
->select("room_id", "qty", DB::raw('(sum(qty_total)) as total_qty'))
->where(function ($q) use ($startDate, $endDate) {
$q->where('startDate', '<',$endDate)
->Where('endDate', '>', $startDate);})
->groupBy(DB::raw('room_id'), 'qty')
->havingRaw('(sum(qty_total)) < qty')
->pluck('room_id');
if(isset($query)){
$rooms = Room::with('roomType','bookingRooms', 'images')
->whereDoesntHave('bookingRooms', function ($q) use ($startDate, $endDate) {
$q->where('startDate', '<',$endDate)
->Where('endDate', '>', $startDate);})
->orWhereIn('id', $availableRoom)->get();
$rooms = $rooms->where('roomtype_id','=',$query);
}else{
$rooms = Room::with('roomType','bookingRooms', 'images')
->whereDoesntHave('bookingRooms', function ($q) use ($startDate, $endDate) {
$q->where('startDate', '<',$endDate)
->Where('endDate', '>', $startDate);})
->orWhereIn('id', $availableRoom)->get();
}
} else {
$rooms = null;
}
return view('admin.searchrooms.index', compact('rooms','roomTypes','coupons','dtnow'));
}
}
|
#!/bin/sh
cd /tmp
export CCP4_MASTER=/dls_sw/apps/ccp4/x86_64/6.4.0/11oct2013/
export CINCL=$CCP4_MASTER/ccp4-6.4.0/include
export CLIBD=$CCP4_MASTER/ccp4-6.4.0/lib/data
export CCP4_SCR=/tmp
export root=$CCP4_MASTER/ccp4-6.4.0/bin
if [ $3 == 'dimple' ]; then
$root/fft HKLIN $1 MAPOUT "/tmp/$2_dimple_2fofc.map.tmp" << eof
title $2 2fofc
xyzlim asu
scale F1 1.0
labin -
F1=F SIG1=SIGF PHI=PH2FOFCWT W=FOM
end
eof
$root/mapmask MAPIN "/tmp/$2_dimple_2fofc.map.tmp" MAPOUT "/tmp/$2_dimple_2fofc.map" XYZIN "$4" << eof
BORDER 5
eof
$root/fft HKLIN $1 MAPOUT "/tmp/$2_dimple_fofc.map.tmp" << eof
title $2 fofc
xyzlim asu
scale F1 1.0
labin -
F1=F SIG1=SIGF PHI=PHFOFCWT W=FOM
end
eof
$root/mapmask MAPIN "/tmp/$2_dimple_fofc.map.tmp" MAPOUT "/tmp/$2_dimple_fofc.map" XYZIN "$4" << eof
BORDER 5
eof
gzip "/tmp/$2_dimple_2fofc.map"
gzip "/tmp/$2_dimple_fofc.map"
else
$root/fft HKLIN $1 MAPOUT "/tmp/$2_ep.map" << eof
title $2 fofc
xyzlim asu
scale F1 1.0
labin -
F1=F SIG1=SIGF PHI=PHI W=FOM
end
eof
#$mm MAPIN "/tmp/$2_ep.map.tmp" MAPOUT "/tmp/$2_ep.map" XYZIN "$4" << eof
#BORDER 5
#eof
gzip "/tmp/$2_ep.map"
fi
|
/// <reference path="typings/index.d.ts" />
import Http = require('http');
class AuthorizationConfig {
host: string = 'lrcmain.brainglass.com'; // API URL
port: number = 80;
path: string = '/oauth/chat-token'; // path for the getToken request
// key and secret are credentials for access to Chat API.
// These credentials are for testing purposes only.
// Request real credentials if you want to use Dictionary API.
key: string = '5752f6ebf9f1aba26deb56b9';
secret: string = 'yW6mY0AWVUqYz7D7';
payload: string = 'grant_type=client_credentials';
oauthCode: string = new Buffer(this.key + ':' + this.secret).toString('base64');
}
// Get token authorizing work with Chat API.
// It is on the server side and not on the client for security reasons,
// because it contains credentials authorizing access to Chat API.
export class Authorization {
private authorizationConfig = new AuthorizationConfig();
getToken(cb: (err: any, result: string) => void): void {
var opts = {
hostname: this.authorizationConfig.host,
port: this.authorizationConfig.port,
path: this.authorizationConfig.path,
method: 'POST',
headers: {
'Authorization': 'Basic ' + this.authorizationConfig.oauthCode,
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': Buffer.byteLength(this.authorizationConfig.payload)
}
};
var req = Http.request(opts, (res) => {
res.setEncoding('utf8');
var result: string = '';
res.on('data', (data) => {
result += data;
});
res.on('end', () => {
if (res.statusCode != 200)
return cb && cb(result, null);
try {
cb && cb(null, JSON.parse(result)['access_token']);
} catch (err) {
cb && cb(err, null);
}
});
});
req.on('error', (err) => {
cb && cb(err, null);
});
req.end(this.authorizationConfig.payload);
}
}
|
module Tokenable
extend ActiveSupport::Concern
def jwt_token(exp: 5.hour.from_now)
payload = { resource_id: id, resource_type: self.class.to_s }
respond_to?(:remember_me) && remember_me ? payload[:remember] = true : payload[:exp] = exp.to_i
JWT.encode(payload, auth_token)
end
end
|
library ng_specs;
import 'dart:html' hide Animation;
import 'dart:js' as js;
import 'package:angular/angular.dart';
import 'package:angular/mock/module.dart';
import 'package:unittest/unittest.dart' as unit;
import 'package:guinness/guinness_html.dart' as gns;
export 'dart:html' hide Animation;
export 'package:unittest/unittest.dart' hide expect;
export 'package:guinness/guinness_html.dart';
export 'package:mock/mock.dart';
export 'package:di/di.dart';
export 'package:angular/angular.dart';
export 'package:angular/application.dart';
export 'package:angular/introspection.dart';
export 'package:angular/cache/module.dart';
export 'package:angular/cache/js_cache_register.dart';
export 'package:angular/core/annotation.dart';
export 'package:angular/core/registry.dart';
export 'package:angular/core/module_internal.dart';
export 'package:angular/core_dom/module_internal.dart';
export 'package:angular/core_dom/type_to_uri_mapper.dart';
export 'package:angular/core/parser/parser.dart';
export 'package:angular/core/parser/lexer.dart';
export 'package:angular/directive/module.dart';
export 'package:angular/formatter/module.dart';
export 'package:angular/routing/module.dart';
export 'package:angular/animate/module.dart';
export 'package:angular/touch/module.dart';
export 'package:angular/mock/module.dart';
export 'package:perf_api/perf_api.dart';
es(String html) {
var div = new DivElement()..setInnerHtml(html, treeSanitizer: new NullTreeSanitizer());
return new List.from(div.nodes);
}
e(String html) => es(html).first;
// All our tests files are served under this prefix when run under Karma. (i.e.
// this file, _specs.dart, is at path /base/test/_specs.dart. However, if
// you're using a different test server or reconfigured the base prefix, then
// you can set this to something different.
String TEST_SERVER_BASE_PREFIX = "/base/";
Expect expect(actual, [matcher]) {
final expect = new Expect(actual);
if (matcher != null) expect.to(matcher);
return expect;
}
class Expect extends gns.Expect {
Expect(actual) : super(actual);
NotExpect get not => new NotExpect(actual);
void toBeValid() => _expect(actual.valid && !actual.invalid, true, reason: 'Form is not valid');
void toBePristine() => _expect(actual.pristine && !actual.dirty, true, reason: 'Form is dirty');
void toHaveText(String text) => _expect(actual, new _TextMatcher(text));
Function get _expect => gns.guinness.matchers.expect;
}
class NotExpect extends gns.NotExpect {
NotExpect(actual) : super(actual);
void toBeValid() => _expect(actual.valid && !actual.invalid, false, reason: 'Form is valid');
void toBePristine() => _expect(actual.pristine && !actual.dirty, false, reason: 'Form is pristine');
Function get _expect => gns.guinness.matchers.expect;
}
class _TextMatcher extends unit.Matcher {
final String expected;
_TextMatcher(this.expected);
unit.Description describe(unit.Description description) =>
description..replace("element matching: ${expected}");
unit.Description describeMismatch(actual, unit.Description mismatchDescription,
Map matchState, bool verbose) =>
mismatchDescription..add(_elementText(actual));
bool matches(actual, Map matchState) =>
_elementText(actual) == expected;
}
String _elementText(n) {
hasShadowRoot(n) => n is Element && n.shadowRoot != null;
if (n is Iterable) return n.map((nn) => _elementText(nn)).join("");
if (n is Comment) return '';
if (n is ContentElement) return _elementText(n.getDistributedNodes());
if (hasShadowRoot(n)) return _elementText(n.shadowRoot.nodes);
if (n.nodes == null || n.nodes.isEmpty) return n.text;
return _elementText(n.nodes);
}
Function _injectify(Function fn) {
// The function does two things:
// First: if the it() passed a function, we wrap it in
// the "sync" FunctionComposition.
// Second: when we are calling the FunctionComposition,
// we inject "inject" into the middle of the
// composition.
if (fn is! FunctionComposition) fn = sync(fn);
var fc = fn as FunctionComposition;
return fc.outer(inject(fc.inner));
}
// Replace guinness syntax elements to inject dependencies.
void beforeEachModule(Function fn) {
gns.beforeEach(module(fn), priority:1);
}
void beforeEach(Function fn) {
gns.beforeEach(_injectify(fn));
}
void afterEach(Function fn) {
gns.afterEach(_injectify(fn));
}
// For sharding across multiple instances of karma.
// _numKarmaShards values:
// 1: (default) Use one shard. (i.e. there's no sharding.)
// 0: No shards! So no tests are run. However, the preprocessors are still
// executed and the browsers are launched. This can be used to validate
// the configuration and browsers without running any tests.
// scripts/travis/build.sh uses this to run the preprocessors once to
// generate the dart2js output. It then runs the tests with multiple
// shards knowing that these shards will all use the dart2js output
// generated from the dummy run.
// > 1: Specifies that there are this many number of total karma shards. If
// there are N karma shards and T tests, then each shard runs about T/N
// tests. In this case, the _shardId - which must be [0, N) - indicates
// the current karma shard so we can select the appropriate subset of
// tests to run.
int _numShards = 1;
int _shardId = 0;
int _itCount = 0;
bool _failOnIit = false;
_safeJsGet(dottedName) => dottedName.split(".").fold(
js.context, (a, b) => (a == null ? a : a[b]));
_initSharding() {
_failOnIit = (_safeJsGet("__karma__.config.clientArgs.travis") != null);
_numShards = _safeJsGet("__karma__.config.clientArgs.travis.numKarmaShards");
_shardId = _safeJsGet("__karma__.config.clientArgs.travis.karmaShardId");
if (_numShards == null || _shardId == null) {
_numShards = 1;
_shardId = 0;
}
}
void _itFirstTime(String name, Function fn) {
_initSharding();
if (_numShards > 0) {
_it(name, fn);
it = _it;
} else {
// This is a test run who purpose is to prime the dart2js cache. Do not
// actually run any tests.
gns.it('should print the dart2js cache', () {});
it = (String name, Function fn) {};
}
}
void _it(String name, Function fn) {
_itCount += 1;
if (_itCount % _numShards == _shardId) {
gns.it(name, _injectify(fn));
}
}
var it = _itFirstTime;
void iit(String name, Function fn) {
if (_failOnIit) {
throw "iit is not allowed when running under a CI server";
}
gns.iit(name, _injectify(fn));
}
void ddescribe(String name, Function fn) {
if (_failOnIit) {
throw "ddescribe is not allowed when running under a CI server";
}
gns.ddescribe(name, fn);
}
_removeNgBinding(node) {
if (node is Element) {
var el = node.clone(true) as Element;
el.classes.remove('ng-binding');
el.querySelectorAll(".ng-binding").forEach((Element e) {
e.classes.remove('ng-binding');
});
return el;
}
return node;
}
main() {
gns.beforeEach(setUpInjector, priority:3);
gns.afterEach(tearDownInjector);
gns.guinnessEnableHtmlMatchers();
gns.guinness.matchers.config.preprocessHtml = _removeNgBinding;
}
|
#pragma once
#include <type_traits>
#include <random>
#include <pttk/macroutils.h>
class NumberGenerator
{
public:
/*template<typename T>
static T generateNumberBetween(T min, T max)
{
static_assert(std::is_fundamental_v<T>, "Type must be a fundamental type");
}*/
static int generateBetween(int min, int max)
{
return std::uniform_int_distribution(min, max)(get().m_numberGenerator);
}
private:
static NumberGenerator& get();
explicit NumberGenerator();
DELETE_COPY_CONSTR_ASSIGN(NumberGenerator)
std::random_device m_randomDevice; // Will be used to obtain a seed for the random number engine
std::mt19937 m_numberGenerator; // Standard mersenne_twister_engine seeded with rd()
};
|
<?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use App\Model\Day;
use Session;
class DayController extends Controller
{
public function view()
{
$countDay = Day::count();
$days = Day::all();
return view('admin.day.view-day',['days'=>$days],['countDay'=>$countDay]);
}
public function add()
{
$countDay = Day::count();
return view('admin.day.add-day',['countDay'=>$countDay]);
}
public function store(Request $request)
{
$day = new Day();
$day->dayOne = $request->dayOne;
$day->save();
Session::flash('message','Day Save Successfully!');
return redirect()->route('days-view');
}
public function edit($id)
{
$editday = Day::find($id);
return view('admin.day.edit-day',['editday'=>$editday]);
}
public function update(Request $request)
{
$day = Day::find($request->id);
$day->dayOne = $request->dayOne;
$day->save();
Session::flash('message','Day Update Successfully!');
return redirect()->route('days-view');
}
public function delete($id)
{
$day = Day::find($id);
$day->delete();
Session::flash('message','Day Delete Successfully!');
return redirect()->route('days-view');
}
}
|
# rubyConstructs
## Prerequisites
1. Ensure that ruby is installed.
```
brew install ruby
```
2.git clone this repository
```
git clone https://github.com/bhagvank/rubyConstructs.git
```
3. Run hello world
```
cd code
ruby class.rb
```
## blog ruby on rails app
## myapp basic ruby on rails app
## todo api ruby on rails api (webservices)
|
<?php
return [
'clear_cache' => 'Очистить кэш',
'compile_template' => 'Компиляция шаблонов',
'check_email_services' => 'Проверка почтовой службы',
'variables_list' => 'Список переменных',
'key' => 'Ключ',
'value' => 'Значение',
'remove_variable' => 'Удалить переменную',
'create_variable' => 'Создать переменную',
'save' => 'Сохранить',
'settings_tab_main' => 'Основные настройки',
'settings_tab_mail' => 'Настройки почты',
'settings_tab_database' => 'База данных',
'settings_tab_information' => 'Основная информация',
'main_settings_name' => 'Название сайта',
'main_settings_url' => 'URL сайта',
'main_settings_template' => 'Шаблон сайта',
'main_settings_language' => 'Язык сайта по умолчанию',
'main_settings_closed' => 'Закрыть публичную версию сайта?',
'main_settings_closed_closed' => 'Да',
'main_settings_closed_enabled' => 'Нет',
'mail_from_name' => 'Подпись email',
'mail_from_email' => 'Отправить email от',
'mail_driver' => 'Email драйвер',
'mail_host' => 'Email хост',
'mail_port' => 'Email порт',
'mail_encryption' => 'Email шифрование',
'mail_username' => 'Имя пользователя для авторизации',
'mail_password' => 'Пароль пользователя для авторизации',
'information_framework_version' => 'Версия Laravel фреймворка',
'information_cms_version' => 'Версия CMS',
'information_cms_developers' => 'Разработчики CMS',
];
|
import { mount, mock, collectCoverage } from 'griffin'
describe('Profile Info', () => {
it('renders authenticated', async () => {
await mount('PROFILE_INFO')
await expect(element(by.text('Authenticated!'))).toBeVisible()
})
it('renders not authenticated', async () => {
await mount('PROFILE_INFO')
await mock('./hooks/useUser.js', 'useUser')
await expect(element(by.text('Not Authenticated!'))).toBeVisible()
})
})
|
@extends('layouts.app')
@section('content')
<main class="container w-75">
<h4 class="heading m-auto">Edit Your Project</h4>
<form method="POST" action="{{ $project->path() }}" class="form-group mt-3">
@csrf
@method('PATCH')
@include('projects.partials.form', [
'buttonText' => 'Update Project'
])
</form>
</main>
@endsection
|
%% -------------------------------------------------------------------
%%
%% xqerl - XQuery processor
%%
%% Copyright (c) 2017-2020 Zachary N. Dean All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(xqerl_test).
%-include_lib("common_test/include/ct.hrl").
-export([
assert/2,
assert_empty/1,
assert_type/2,
assert_xml/2,
assert_eq/2,
assert_deep_eq/2,
assert_false/1,
assert_true/1,
assert_count/2,
assert_permutation/2,
assert_error/2,
assert_string_value/2,
assert_norm_string_value/2,
assert_serialization_match/3,
assert_serialization_error/2
]).
-export([
run/1,
run_suite/2
]).
-export([
handle_environment/1,
combined_error/2
]).
-export([load_qt3_xml/0]).
-include("xqerl.hrl").
-define(LB(L), unicode:characters_to_binary(L)).
%% assert functions return either true or {false, Result}
result_type_string(Result) when is_list(Result) ->
" as item()*";
result_type_string(Result) when is_map(Result) andalso is_map_key(nk, Result) ->
" as node()";
result_type_string(Result) when is_map(Result) ->
" as map(*)";
result_type_string(Result) when element(1, Result) == array ->
" as array(*)";
result_type_string(_) ->
" as item()*".
%% assert (: run test query with result as variable == true :)
assert(Result, QueryString) ->
Type = result_type_string(Result),
NewQueryString =
"declare variable $result" ++
Type ++
" external; " ++ QueryString,
case catch xqerl:run(NewQueryString, #{<<"result">> => Result}) of
{'EXIT', Res} ->
?dbg("false", {false, {Res, Result, QueryString}}),
{false, {Res, Result, QueryString}};
#xqError{} = Res ->
?dbg("false", {false, {Res, Result, QueryString}}),
{false, {Res, Result, QueryString}};
Res1 ->
case xqerl_operators:eff_bool_val(Res1) of
true ->
true;
false ->
?dbg("false", {false, {assert, Res1, Result, QueryString}}),
{false, {assert, Res1, QueryString}}
end
end.
%% assert_empty (: string value of result == [] :)
assert_empty(Result) ->
case string_value(Result) of
[] -> true;
<<>> -> true;
StrVal -> {false, {assert_empty, StrVal}}
end.
%% assert_type (: result instance of type :)
assert_type(Result, TypeString) ->
NewQueryString =
"declare variable $result as item()* external; "
"($result) instance of " ++ TypeString,
case catch xqerl:run(NewQueryString, #{<<"result">> => Result}) of
{'EXIT', Res} ->
{false, {assert_type, Res}};
Res1 ->
case string_value(Res1) of
<<"true">> -> true;
_ -> {false, {assert_type, Res1, TypeString}}
end
end.
%% assert_xml (: fn:deep-equal(result, run test query) :)
assert_xml(Result, {file, FileLoc}) ->
{ok, FileBin} = file:read_file(FileLoc),
assert_xml(Result, normalize_lines(FileBin, <<>>));
assert_xml(Result, {doc_file, FileLoc}) ->
{ok, FileBin} = file:read_file(FileLoc),
Norm = normalize_lines(FileBin, <<>>),
Doc = xqerl_mod_fn:'parse-xml'(#{'base-uri' => <<>>}, Norm),
Norm1 = xqerl_node:to_xml(Doc),
assert_xml(Result, Norm1);
assert_xml(Result, QueryString) when is_list(QueryString) ->
assert_xml(Result, ?LB(QueryString));
assert_xml(#xqError{} = Err, QueryString) ->
{false, {assert_xml, Err, QueryString}};
assert_xml(Result, QueryString0) when is_binary(Result) ->
QueryString = xqerl_lib:trim(QueryString0),
% fragments sometimes only work this way
case Result == QueryString of
true -> true;
false -> assert_xml_1(Result, QueryString)
end;
assert_xml(Result, QueryString0) ->
assert_xml(xqerl_node:to_xml(Result), QueryString0).
assert_xml_1(Result, QueryString) ->
try
ResXml2 = xqerl_mod_fn:'parse-xml-fragment'(
#{'base-uri' => <<>>},
Result
),
QueryString2 =
case
catch xqerl_mod_fn:'parse-xml-fragment'(
#{'base-uri' => <<>>},
QueryString
)
of
{'EXIT', #xqError{}} ->
xqerl:run(
unicode:characters_to_list(
<<"document{ ", QueryString/binary, " }">>
)
);
Other ->
Other
end,
case xqerl_node:nodes_equal(ResXml2, QueryString2, codepoint) of
true -> true;
false -> {false, {assert_xml, Result, QueryString}}
end
catch
_:_:Stack ->
?dbg("Stack", Stack),
{false, {assert_xml, Result, QueryString}}
end.
assert_eq(Result, TypeString) ->
NewQueryString =
"declare variable $result as item()* external; "
"$result = " ++ TypeString,
case catch xqerl:run(NewQueryString, #{<<"result">> => Result}) of
{'EXIT', Res} ->
?dbg("Res", Res),
{false, Res};
Res1 ->
case string_value(Res1) of
<<"true">> -> true;
_ -> {false, {assert_eq, Result, TypeString}}
end
end.
%% assert_deep_eq (: fn:deep-equal(result, run test query) :)
assert_deep_eq(Result, QueryString) ->
NewQueryString =
"declare variable $result as item()* external; "
"fn:deep-equal($result,(" ++ QueryString ++ "))",
case catch xqerl:run(NewQueryString, #{<<"result">> => Result}) of
{'EXIT', Res} ->
{false, Res};
Res1 ->
case string_value(Res1) of
<<"true">> -> true;
_ -> {false, {assert_deep_eq, Result, QueryString}}
end
end.
%% assert_false (: string value of result == 'true' :)
assert_false(false) -> true;
assert_false(Result) -> {false, {assert_false, Result}}.
%% assert_true (: string value of result == 'false' :)
assert_true(true) -> true;
assert_true(Result) -> {false, {assert_true, Result}}.
%% assert_permutation (: take_while member(result, run test query) == [] :)
%% the result should be a list of atomic values, the permute list also
assert_permutation(Result, PermuteString) ->
QueryString = "(" ++ PermuteString ++ ")",
case catch xqerl:run(QueryString, #{}) of
{'EXIT', Res} ->
{false, Res};
Res1 ->
case assert_permutation_1(ensure_list(Result), Res1, []) of
true -> true;
false -> {false, {assert_permutation, Res1, PermuteString, Result}}
end
end.
assert_permutation_1([], [], []) ->
true;
assert_permutation_1(_, [], _) ->
false;
assert_permutation_1([R1 | T1] = List1, [R2 | T2], Acc2) ->
case permute_equal(R1, R2) of
true -> assert_permutation_1(T1, T2 ++ Acc2, []);
false -> assert_permutation_1(List1, T2, [R2 | Acc2])
end.
permute_equal(R1, R2) ->
case catch xqerl_operators:equal(R1, R2) of
true -> true;
_ -> xqerl_types:value(R1) == nan andalso xqerl_types:value(R2) == nan
end.
%% assert_count (: fn:count(result) == cnt :)
assert_count(Result, TypeString) ->
case list_to_integer(TypeString) of
Cnt when is_list(Result), length(Result) == Cnt -> true;
1 -> true;
true -> {false, {assert_count, Result, TypeString}}
end.
%% assert_string_value (: string value of result == Str :)
assert_string_value(Result, String) when is_list(String) ->
assert_string_value(Result, ?LB(String));
assert_string_value(Result, String) ->
case string_value(Result) of
String -> true;
StrVal -> {false, {assert_string_value, StrVal, String}}
end.
assert_norm_string_value(Result, String) when is_list(String) ->
assert_norm_string_value(Result, ?LB(String));
assert_norm_string_value(Result, String) ->
StrVal = xqerl_lib:normalize_spaces(
xqerl_lib:normalize_string(
string_value(Result)
)
),
case StrVal of
String -> true;
_ -> {false, {assert_norm_string_value, StrVal, String}}
end.
assert_serialization_match(#xqError{} = Err, _, _) ->
{false, Err};
assert_serialization_match(Result, SchemaRegex, Flags) when is_binary(Result) ->
{ok, NormResult} = xs_regex:normalize(string_value(Result)),
{ok, Norm} = xs_regex:normalize(SchemaRegex),
{_, RE} = xs_regex:compile(Norm, Flags),
case catch re:run(NormResult, RE) of
{match, _} ->
true;
_ ->
?dbg("Result,SchemaRegex", {NormResult, Norm}),
{false, {assert_serialization_match, NormResult, SchemaRegex}}
end;
assert_serialization_match(Item, SchemaRegex, Flags) ->
Ser = xqerl_serialize:serialize(Item, #{}),
assert_serialization_match(Ser, SchemaRegex, Flags).
%% assert_error
assert_error(Result, ErrorCode) when is_list(ErrorCode) ->
assert_error(Result, list_to_binary(ErrorCode));
assert_error(
#xqError{name = #xqAtomicValue{value = #qname{local_name = Err}}},
ErrorCode
) when Err == ErrorCode; ErrorCode == <<"*">> ->
true;
assert_error(
#xqError{name = #xqAtomicValue{value = #qname{namespace = ErrNs, local_name = Err}}},
ErrorCode
) ->
case error_name_match(ErrorCode, ErrNs, Err) of
true ->
true;
_ ->
{true, Err}
end;
assert_error(Result, ErrorCode) ->
{false, {assert_error, Result, ErrorCode}}.
assert_serialization_error(Result, ErrorCode) when is_list(ErrorCode) ->
assert_serialization_error(Result, list_to_binary(ErrorCode));
assert_serialization_error(
#xqError{name = #xqAtomicValue{value = #qname{local_name = Err}}},
ErrorCode
) when Err == ErrorCode; ErrorCode == <<"*">> ->
true;
assert_serialization_error(
#xqError{name = #xqAtomicValue{value = #qname{namespace = ErrNs, local_name = Err}}},
ErrorCode
) ->
case error_name_match(ErrorCode, ErrNs, Err) of
true ->
true;
_ ->
{false, {Err, ErrorCode}}
end;
assert_serialization_error(Result, ErrorCode) ->
case catch xqerl_serialize:serialize(Result, #{}) of
#xqError{} = Err ->
assert_serialization_error(Err, ErrorCode);
Result1 ->
{false, {assert_serialization_error, Result1, ErrorCode}}
end.
error_name_match(ErrorCode, <<>>, Err) ->
<<"Q{}", Err/binary>> == ErrorCode;
error_name_match(ErrorCode, ErrNs, Err) ->
<<"Q{", ErrNs/binary, "}", Err/binary>> == ErrorCode.
% normalize end-of-line characters
normalize_lines(<<13, 10, T/binary>>, Acc) ->
normalize_lines(T, <<Acc/binary, 10>>);
normalize_lines(<<13, T/binary>>, Acc) ->
normalize_lines(T, <<Acc/binary, 10>>);
normalize_lines(<<H, T/binary>>, Acc) ->
normalize_lines(T, <<Acc/binary, H>>);
normalize_lines(<<>>, Acc) ->
Acc.
string_value(List) when is_list(List) ->
Seq = xqerl_seq3:flatten(List),
xqerl_types:string_value(Seq);
string_value(Seq) when is_binary(Seq) ->
Seq;
string_value(Seq) ->
xqerl_types:string_value(Seq).
run_suite(Suite, SubDir) ->
LibDir = code:lib_dir(xqerl),
TestDir = filename:absname_join(LibDir, "test"),
TestSubDir = filename:join(TestDir, SubDir),
LogDir = filename:join(TestDir, "logs"),
%_ = delete_all_docs(),
ct:run_test([
{suite, Suite},
{dir, TestSubDir},
{logdir, LogDir},
{logopts, [no_src]}
]).
run_suites(SubDir, Suites) ->
run_suite(Suites, SubDir).
ensure_list(L) when is_list(L) -> L;
ensure_list(L) -> [L].
run(all) ->
run(prod),
run(app),
run(misc),
run(ser),
run(xs),
run(fn),
run(map),
run(op),
run(array),
run(math),
run(expath),
run(restxq),
run(xquts);
run(app) ->
run_suites("app", [
app_CatalogCheck_SUITE,
app_Demos_SUITE,
app_FunctxFn_SUITE,
app_FunctxFunctx_SUITE,
app_UseCaseCompoundValues_SUITE,
app_UseCaseJSON_SUITE,
app_UseCaseNLP_SUITE,
app_UseCaseNS_SUITE,
app_UseCasePARTS_SUITE,
app_UseCaseR_SUITE,
app_UseCaseR31_SUITE,
app_UseCaseSEQ_SUITE,
app_UseCaseSGML_SUITE,
app_UseCaseSTRING_SUITE,
app_UseCaseTREE_SUITE,
app_UseCaseXMP_SUITE,
app_Walmsley_SUITE,
app_XMark_SUITE,
app_spec_examples_SUITE
]);
run(math) ->
run_suites("math", [
math_acos_SUITE,
math_asin_SUITE,
math_atan_SUITE,
math_atan2_SUITE,
math_cos_SUITE,
math_exp_SUITE,
math_exp10_SUITE,
math_log_SUITE,
math_log10_SUITE,
math_pi_SUITE,
math_pow_SUITE,
math_sin_SUITE,
math_sqrt_SUITE,
math_tan_SUITE
]);
run(misc) ->
run_suites("misc", [
misc_CombinedErrorCodes_SUITE,
misc_AnnexE_SUITE,
misc_AppendixA4_SUITE,
misc_ErrorsAndOptimization_SUITE,
misc_HigherOrderFunctions_SUITE,
misc_JsonTestSuite_SUITE,
misc_StaticContext_SUITE,
misc_Surrogates_SUITE,
misc_UCACollation_SUITE,
misc_XMLEdition_SUITE
]);
run(ser) ->
run_suites("ser", [
method_adaptive_SUITE,
method_html_SUITE,
method_json_SUITE,
method_text_SUITE,
method_xhtml_SUITE,
method_xml_SUITE
]);
run(xs) ->
run_suites("xs", [
xs_anyURI_SUITE,
xs_base64Binary_SUITE,
xs_dateTimeStamp_SUITE,
xs_double_SUITE,
xs_error_SUITE,
xs_float_SUITE,
xs_hexBinary_SUITE,
xs_normalizedString_SUITE,
xs_numeric_SUITE,
xs_token_SUITE
]);
run(prod) ->
run_suites("prod", [
prod_AllowingEmpty_SUITE,
prod_Annotation_SUITE,
prod_ArrayTest_SUITE,
prod_ArrowPostfix_SUITE,
prod_AxisStep_SUITE,
prod_AxisStep_abbr_SUITE,
prod_AxisStep_ancestor_SUITE,
prod_AxisStep_ancestor_or_self_SUITE,
prod_AxisStep_following_SUITE,
prod_AxisStep_following_sibling_SUITE,
prod_AxisStep_preceding_SUITE,
prod_AxisStep_preceding_sibling_SUITE,
prod_AxisStep_static_typing_SUITE,
prod_AxisStep_unabbr_SUITE,
prod_BaseURIDecl_SUITE,
prod_BoundarySpaceDecl_SUITE,
prod_CastableExpr_SUITE,
prod_CastExpr_SUITE,
prod_CastExpr_derived_SUITE,
prod_CastExpr_schema_SUITE,
prod_Comment_SUITE,
prod_CompAttrConstructor_SUITE,
prod_CompDocConstructor_SUITE,
prod_CompCommentConstructor_SUITE,
prod_CompElemConstructor_SUITE,
prod_CompNamespaceConstructor_SUITE,
prod_CompPIConstructor_SUITE,
prod_CompTextConstructor_SUITE,
prod_ConstructionDecl_SUITE,
prod_ConstructionDecl_schema_SUITE,
prod_ContextItemDecl_SUITE,
prod_ContextItemExpr_SUITE,
prod_CopyNamespacesDecl_SUITE,
prod_CountClause_SUITE,
prod_CurlyArrayConstructor_SUITE,
prod_DecimalFormatDecl_SUITE,
prod_DefaultCollationDecl_SUITE,
prod_DefaultNamespaceDecl_SUITE,
prod_DirAttributeList_SUITE,
prod_DirectConstructor_SUITE,
prod_DirElemConstructor_SUITE,
prod_DirElemContent_SUITE,
prod_DirElemContent_namespace_SUITE,
prod_DirElemContent_whitespace_SUITE,
prod_EmptyOrderDecl_SUITE,
prod_EQName_SUITE,
prod_ExtensionExpr_SUITE,
prod_FLWORExpr_SUITE,
prod_FLWORExpr_static_typing_SUITE,
prod_ForClause_SUITE,
prod_FunctionCall_SUITE,
prod_FunctionDecl_SUITE,
prod_GeneralComp_eq_SUITE,
prod_GeneralComp_ge_SUITE,
prod_GeneralComp_gt_SUITE,
prod_GeneralComp_le_SUITE,
prod_GeneralComp_lt_SUITE,
prod_GeneralComp_ne_SUITE,
prod_GroupByClause_SUITE,
prod_IfExpr_SUITE,
prod_InlineFunctionExpr_SUITE,
prod_InstanceofExpr_SUITE,
prod_LetClause_SUITE,
prod_Literal_SUITE,
prod_Lookup_SUITE,
prod_MapConstructor_SUITE,
prod_MapTest_SUITE,
prod_ModuleImport_SUITE,
prod_NamedFunctionRef_SUITE,
prod_NamespaceDecl_SUITE,
prod_NameTest_SUITE,
prod_NodeTest_SUITE,
prod_OptionDecl_SUITE,
prod_OptionDecl_serialization_SUITE,
prod_OrExpr_SUITE,
prod_OrderByClause_SUITE,
prod_OrderingModeDecl_SUITE,
prod_PathExpr_SUITE,
prod_ParenthesizedExpr_SUITE,
prod_PositionalVar_SUITE,
prod_Predicate_SUITE,
prod_QuantifiedExpr_SUITE,
prod_ReturnClause_SUITE,
prod_SchemaImport_SUITE,
prod_SequenceType_SUITE,
prod_SquareArrayConstructor_SUITE,
prod_StepExpr_SUITE,
prod_StringConstructor_SUITE,
prod_SwitchExpr_SUITE,
prod_TreatExpr_SUITE,
prod_TryCatchExpr_SUITE,
prod_TypeswitchExpr_SUITE,
prod_UnorderedExpr_SUITE,
prod_UnaryLookup_SUITE,
prod_ValidateExpr_SUITE,
prod_ValueComp_SUITE,
prod_VarDecl_SUITE,
prod_VarDecl_external_SUITE,
prod_VarDefaultValue_SUITE,
prod_VersionDecl_SUITE,
prod_WhereClause_SUITE,
prod_WindowClause_SUITE
]);
run(fn) ->
run_suites("fn", [
fn_abs_SUITE,
fn_adjust_date_to_timezone_SUITE,
fn_adjust_dateTime_to_timezone_SUITE,
fn_adjust_time_to_timezone_SUITE,
fn_analyze_string_SUITE,
fn_apply_SUITE,
fn_available_environment_variables_SUITE,
fn_avg_SUITE,
fn_base_uri_SUITE,
fn_boolean_SUITE,
fn_ceiling_SUITE,
fn_codepoint_equal_SUITE,
fn_codepoints_to_string_SUITE,
fn_collation_key_SUITE,
fn_collection_SUITE,
fn_compare_SUITE,
fn_concat_SUITE,
fn_contains_SUITE,
fn_contains_token_SUITE,
fn_count_SUITE,
fn_current_date_SUITE,
fn_current_dateTime_SUITE,
fn_current_time_SUITE,
fn_data_SUITE,
fn_dateTime_SUITE,
fn_day_from_date_SUITE,
fn_day_from_dateTime_SUITE,
fn_days_from_duration_SUITE,
fn_deep_equal_SUITE,
fn_default_collation_SUITE,
fn_default_language_SUITE,
fn_distinct_values_SUITE,
fn_doc_SUITE,
fn_doc_available_SUITE,
fn_document_uri_SUITE,
fn_element_with_id_SUITE,
fn_empty_SUITE,
fn_encode_for_uri_SUITE,
fn_ends_with_SUITE,
fn_environment_variable_SUITE,
fn_error_SUITE,
fn_escape_html_uri_SUITE,
fn_exactly_one_SUITE,
fn_exists_SUITE,
fn_false_SUITE,
fn_filter_SUITE,
fn_floor_SUITE,
fn_fold_left_SUITE,
fn_fold_right_SUITE,
fn_for_each_SUITE,
fn_for_each_pair_SUITE,
fn_format_date_SUITE,
fn_format_dateTime_SUITE,
fn_format_integer_SUITE,
fn_format_number_SUITE,
fn_format_time_SUITE,
fn_function_lookup_SUITE,
fn_function_arity_SUITE,
fn_function_name_SUITE,
fn_generate_id_SUITE,
fn_has_children_SUITE,
fn_head_SUITE,
fn_hours_from_dateTime_SUITE,
fn_hours_from_duration_SUITE,
fn_hours_from_time_SUITE,
fn_id_SUITE,
fn_idref_SUITE,
fn_implicit_timezone_SUITE,
fn_innermost_SUITE,
fn_index_of_SUITE,
fn_insert_before_SUITE,
fn_in_scope_prefixes_SUITE,
fn_iri_to_uri_SUITE,
fn_json_doc_SUITE,
fn_json_to_xml_SUITE,
fn_lang_SUITE,
fn_last_SUITE,
fn_load_xquery_module_SUITE,
fn_local_name_SUITE,
fn_local_name_from_QName_SUITE,
fn_lower_case_SUITE,
fn_max_SUITE,
fn_matches_SUITE,
fn_matches_re_SUITE,
fn_min_SUITE,
fn_minutes_from_dateTime_SUITE,
fn_minutes_from_duration_SUITE,
fn_minutes_from_time_SUITE,
fn_month_from_date_SUITE,
fn_months_from_duration_SUITE,
fn_month_from_dateTime_SUITE,
fn_name_SUITE,
fn_namespace_uri_SUITE,
fn_namespace_uri_for_prefix_SUITE,
fn_namespace_uri_from_QName_SUITE,
fn_nilled_SUITE,
fn_node_name_SUITE,
fn_normalize_space_SUITE,
fn_normalize_unicode_SUITE,
fn_not_SUITE,
fn_number_SUITE,
fn_one_or_more_SUITE,
fn_outermost_SUITE,
fn_parse_ietf_date_SUITE,
fn_parse_json_SUITE,
fn_parse_xml_SUITE,
fn_parse_xml_fragment_SUITE,
fn_path_SUITE,
fn_position_SUITE,
fn_prefix_from_QName_SUITE,
fn_QName_SUITE,
fn_random_number_generator_SUITE,
fn_remove_SUITE,
fn_replace_SUITE,
fn_resolve_QName_SUITE,
fn_resolve_uri_SUITE,
fn_reverse_SUITE,
fn_root_SUITE,
fn_round_SUITE,
fn_round_half_to_even_SUITE,
fn_seconds_from_dateTime_SUITE,
fn_seconds_from_duration_SUITE,
fn_seconds_from_time_SUITE,
fn_serialize_SUITE,
fn_sort_SUITE,
fn_starts_with_SUITE,
fn_static_base_uri_SUITE,
fn_string_SUITE,
fn_string_join_SUITE,
fn_string_length_SUITE,
fn_string_to_codepoints_SUITE,
fn_subsequence_SUITE,
fn_substring_SUITE,
fn_substring_after_SUITE,
fn_substring_before_SUITE,
fn_sum_SUITE,
fn_tail_SUITE,
fn_timezone_from_date_SUITE,
fn_timezone_from_dateTime_SUITE,
fn_timezone_from_time_SUITE,
fn_tokenize_SUITE,
fn_trace_SUITE,
fn_transform_SUITE,
fn_translate_SUITE,
fn_true_SUITE,
fn_unordered_SUITE,
fn_unparsed_text_SUITE,
fn_unparsed_text_available_SUITE,
fn_unparsed_text_lines_SUITE,
fn_upper_case_SUITE,
fn_uri_collection_SUITE,
fn_xml_to_json_SUITE,
fn_year_from_date_SUITE,
fn_years_from_duration_SUITE,
fn_year_from_dateTime_SUITE,
fn_zero_or_one_SUITE
]);
run(map) ->
run_suites("map", [
map_merge_SUITE,
map_contains_SUITE,
map_find_SUITE,
map_get_SUITE,
map_entry_SUITE,
map_size_SUITE,
map_keys_SUITE,
map_put_SUITE,
map_remove_SUITE,
map_for_each_SUITE
]);
run(array) ->
run_suites("array", [
array_append_SUITE,
array_filter_SUITE,
array_flatten_SUITE,
array_fold_left_SUITE,
array_fold_right_SUITE,
array_for_each_SUITE,
array_for_each_pair_SUITE,
array_get_SUITE,
array_head_SUITE,
array_insert_before_SUITE,
array_join_SUITE,
array_put_SUITE,
array_remove_SUITE,
array_reverse_SUITE,
array_size_SUITE,
array_sort_SUITE,
array_subarray_SUITE,
array_tail_SUITE
]);
run(op) ->
run_suites("op", [
op_add_dayTimeDurations_SUITE,
op_add_dayTimeDuration_to_date_SUITE,
op_add_dayTimeDuration_to_dateTime_SUITE,
op_add_dayTimeDuration_to_time_SUITE,
op_add_yearMonthDurations_SUITE,
op_add_yearMonthDuration_to_date_SUITE,
op_add_yearMonthDuration_to_dateTime_SUITE,
op_anyURI_equal_SUITE,
op_anyURI_greater_than_SUITE,
op_anyURI_less_than_SUITE,
op_bang_SUITE,
op_base64Binary_equal_SUITE,
op_base64Binary_less_than_SUITE,
op_base64Binary_greater_than_SUITE,
op_boolean_equal_SUITE,
op_boolean_greater_than_SUITE,
op_boolean_less_than_SUITE,
op_concat_SUITE,
op_concatenate_SUITE,
op_date_equal_SUITE,
op_date_greater_than_SUITE,
op_date_less_than_SUITE,
op_dateTime_equal_SUITE,
op_dateTime_greater_than_SUITE,
op_dateTime_less_than_SUITE,
op_dayTimeDuration_greater_than_SUITE,
op_dayTimeDuration_less_than_SUITE,
op_divide_dayTimeDuration_SUITE,
op_divide_dayTimeDuration_by_dayTimeDuration_SUITE,
op_divide_yearMonthDuration_SUITE,
op_divide_yearMonthDuration_by_yearMonthDuration_SUITE,
op_duration_equal_SUITE,
op_except_SUITE,
op_gDay_equal_SUITE,
op_gMonth_equal_SUITE,
op_gMonthDay_equal_SUITE,
op_gYear_equal_SUITE,
op_gYearMonth_equal_SUITE,
op_hexBinary_equal_SUITE,
op_hexBinary_greater_than_SUITE,
op_hexBinary_less_than_SUITE,
op_intersect_SUITE,
op_is_same_node_SUITE,
op_multiply_dayTimeDuration_SUITE,
op_multiply_yearMonthDuration_SUITE,
op_node_after_SUITE,
op_node_before_SUITE,
op_NOTATION_equal_SUITE,
op_numeric_add_SUITE,
op_numeric_equal_SUITE,
op_numeric_divide_SUITE,
op_numeric_greater_than_SUITE,
op_numeric_integer_divide_SUITE,
op_numeric_less_than_SUITE,
op_numeric_mod_SUITE,
op_numeric_multiply_SUITE,
op_numeric_subtract_SUITE,
op_numeric_unary_minus_SUITE,
op_numeric_unary_plus_SUITE,
op_QName_equal_SUITE,
op_string_equal_SUITE,
op_string_greater_than_SUITE,
op_string_less_than_SUITE,
op_subtract_dates_SUITE,
op_subtract_dateTimes_SUITE,
op_subtract_dayTimeDuration_from_date_SUITE,
op_subtract_dayTimeDuration_from_dateTime_SUITE,
op_subtract_dayTimeDuration_from_time_SUITE,
op_subtract_dayTimeDurations_SUITE,
op_subtract_times_SUITE,
op_subtract_yearMonthDuration_from_date_SUITE,
op_subtract_yearMonthDuration_from_dateTime_SUITE,
op_subtract_yearMonthDurations_SUITE,
op_time_equal_SUITE,
op_time_greater_than_SUITE,
op_time_less_than_SUITE,
op_to_SUITE,
op_union_SUITE,
op_yearMonthDuration_greater_than_SUITE,
op_yearMonthDuration_less_than_SUITE,
op_same_key_SUITE
]);
run(restxq) ->
run_suites("restxq", [
restxq_SUITE
]);
run(expath) ->
run_suites("expath", [
expath_binary2_SUITE,
expath_binary_SUITE,
expath_file_SUITE
]);
run(xquts) ->
run_suites("xquts", [
xquts_SUITE
]);
run(Str) ->
io:format("~p~n", [Str]),
xqerl:run(Str).
get_value(Key, List) ->
proplists:get_value(Key, List).
get_value(Key, List, Default) ->
proplists:get_value(Key, List, Default).
-define(ERR_NAME(A, B), #xqError{
name = #xqAtomicValue{value = #qname{namespace = A, local_name = B}}
}).
% -define(ERR_LOC(E), ((E#xqError.name)#xqAtomicValue.value)#qname.local_name).
%% ensure that errors from imported libraries are reported in the tests.
combined_error(Err, LibReturns) ->
Sort = fun(?ERR_NAME(AN, AL), ?ERR_NAME(BN, BL)) ->
{AN, AL} == {BN, BL}
end,
LibErrors = lists:usort(Sort, [
E
|| %,
#xqError{} = E <- LibReturns
%?ERR_LOC(E) =/= <<"XQST0059">> % mod not found
]),
?dbg("LibErrors", LibErrors),
case LibErrors of
[H | _] ->
%when ?ERR_LOC(Err) == <<"XQST0059">> ->
H;
_ ->
Err
end.
handle_environment([]) ->
{"", #{}};
handle_environment(List) ->
TestDir = application:get_env(
xqerl,
test_dir,
filename:join(code:lib_dir(xqerl), "test")
),
_ = file:set_cwd([TestDir]),
Sources = get_value(sources, List),
Schemas = get_value(schemas, List, []),
Collections = get_value(collections, List),
BaseUri = get_value('static-base-uri', List),
Params = get_value(params, List),
Vars = get_value(vars, List, []),
ContextItem = get_value('context-item', List, []),
Namespaces = get_value(namespaces, List),
Resources = get_value(resources, List),
Modules = get_value(modules, List),
DecFormats = get_value('decimal-formats', List, []),
DeCollation = get_value('default-collation', List, undefined),
Uniq = integer_to_binary(erlang:unique_integer()),
DefaultCollection = <<"http://example.org/default", Uniq/binary>>,
Map00 = #{default_collection => DefaultCollection},
Map1 =
case DeCollation of
undefined -> Map00;
_ -> Map00#{'default-collation' => ?LB(DeCollation)}
end,
_ = lists:foreach(fun environment_resource/1, Resources),
_ = lists:foreach(fun(I) -> environment_collections(I, DefaultCollection) end, Collections),
{Sources1, EMap} = lists:mapfoldl(fun environment_docs/2, Map1, Sources),
Schemas1 = lists:flatmap(fun environment_schema/1, Schemas),
_ =
case Modules of
[] -> ok;
_ -> xqerl_code_server:unload(all)
end,
ModulesP = [
{File, ?LB(Uri)}
|| {File, Uri} <- Modules
],
_ = lists:foreach(
fun({File, _Uri}) ->
catch xqerl_code_server:compile(File, [], ModulesP)
end,
Modules
),
DecFormats1 = lists:map(fun environment_dec_format/1, DecFormats),
% these can be complex queries, so compile/run instead of just exec
Params1 = lists:foldl(fun environment_params/2, EMap, Params),
Namespaces1 = lists:foldl(fun environment_namespace/2, Params1, Namespaces),
ContextItem1 = lists:foldl(fun environment_context_item/2, Namespaces1, ContextItem),
BaseUri1 = environment_base_uri(BaseUri, ContextItem1),
Namespaces2 = lists:map(fun environment_namespace_1/1, Namespaces),
Vars1 = lists:map(fun environment_variable/1, Vars),
{Sources1 ++ Schemas1 ++ DecFormats1 ++ Namespaces2 ++ Vars1, BaseUri1}.
environment_resource({_MediaType, File, Uri0}) -> environment_resource_1(File, Uri0);
environment_resource({File, Uri0}) -> environment_resource_1(File, Uri0).
environment_resource_1(File, Uri0) ->
Uri = ?LB(Uri0),
case xqldb_dml:exists_resource(Uri) of
true ->
ok;
false ->
{ok, Bin} = file:read_file(File),
xqldb_dml:insert_resource(Uri, Bin)
end.
environment_collections({Uri0, CList}, DefaultCollection) ->
CollectionUri =
case ?LB(Uri0) of
<<>> ->
DefaultCollection;
Other ->
Other
end,
% collection test cannot be done in parallel
_ = xqldb_dml:delete_collection(CollectionUri),
case CList of
[{query, Base, Q}] ->
Opts = #{
'base-uri' => #xqAtomicValue{
type = 'xs:anyURI',
value = ?LB(xqldb_lib:filename_to_uri(Base ++ "/dummy.xq"))
}
},
Items =
case xqerl:run(Q, Opts) of
L when is_list(L) -> L;
L -> [L]
end,
ItemUri = xqldb_uri:join(CollectionUri, "stuff"),
xqldb_dml:insert_item(ItemUri, Items),
ok;
_ ->
_ = [
begin
F = xqldb_lib:filename_to_uri(?LB(FileName0)),
{_, BaseName} = xqldb_uri:split_uri(F),
DocUri = xqldb_uri:join(CollectionUri, BaseName),
catch xqldb_dml:insert_doc(DocUri, FileName0)
end
|| {src, FileName0} <- CList
],
ok
end.
environment_schema(_) -> "".
% environment_schema({File,Uri}) -> "import schema default element namespace '" ++ Uri ++ "' at '" ++ File ++ "';\n".
environment_docs({File0, Role, Uri0}, Map) ->
FileUri = ?LB(xqldb_uri:filename_to_uri(File0)),
Uri2 =
case Uri0 of
[] -> FileUri;
File0 -> FileUri;
_ -> ?LB(Uri0)
end,
_ = (catch xqldb_dml:insert_doc(Uri2, File0)),
case Role of
"." ->
Doc = xqldb_dml:select_doc(Uri2),
{"", Map#{'context-item' => {Doc, 1}}};
"" ->
{"", Map};
_ ->
{
"declare variable " ++
Role ++
" := Q{http://www.w3.org/2005/xpath-functions}doc('" ++
unicode:characters_to_list(Uri2) ++ "');\n",
Map
}
end.
environment_dec_format({"", Values}) ->
"declare default decimal-format \n" ++
lists:flatmap(fun environment_dec_format_vals/1, Values) ++
";";
environment_dec_format({Name, Values}) ->
"declare decimal-format " ++
Name ++
" \n" ++
lists:flatmap(fun environment_dec_format_vals/1, Values) ++
";".
environment_dec_format_vals({K, V}) ->
" " ++ atom_to_list(K) ++ "='" ++ V ++ "' \n".
environment_params({Name, "", Value}, Map) ->
Val = xqerl:run(Value),
Map#{?LB(Name) => Val};
environment_params({Name, As, Value}, Map) ->
Val = xqerl:run(Value ++ " cast as " ++ As),
Map#{?LB(Name) => Val}.
environment_namespace({Uri, Prefix}, Map) ->
Ns = maps:get(namespaces, Map, []),
NewNs = lists:keystore(
?LB(Prefix),
1,
Ns,
{?LB(Prefix), ?LB(Uri)}
),
Map#{namespaces => NewNs}.
environment_context_item("", Map) ->
Map;
environment_context_item(C, Map) ->
R = xqerl:run(C),
Map#{'context-item' => {R, 1}}.
environment_base_uri([{[]}], ContextItem) ->
% undefined
ContextItem#{
'base-uri' => #xqAtomicValue{type = 'xs:anyURI', value = <<"#UNDEFINED">>}
};
environment_base_uri([{Buv}], ContextItem) ->
ContextItem#{'base-uri' => #xqAtomicValue{type = 'xs:anyURI', value = ?LB(Buv)}};
environment_base_uri([], ContextItem) ->
ContextItem.
environment_namespace_1({Uri, ""}) ->
"declare default element namespace '" ++ Uri ++ "';\n";
% block statically known
environment_namespace_1({"http://www.w3.org/2005/xpath-functions/math", "math"}) ->
"";
environment_namespace_1({"http://www.w3.org/2005/xpath-functions/array", "array"}) ->
"";
environment_namespace_1({"http://www.w3.org/2005/xpath-functions/map", "map"}) ->
"";
environment_namespace_1({Uri, Prefix}) ->
"declare namespace " ++ Prefix ++ " = '" ++ Uri ++ "';\n".
environment_variable({Name, "", Value}) ->
"declare variable $" ++ Name ++ " := " ++ Value ++ ";\n";
environment_variable({Name, As, Value}) ->
"declare variable $" ++ Name ++ " as " ++ As ++ " := " ++ Value ++ ";\n".
load_qt3_xml() ->
TestDir = filename:join(
filename:dirname(
filename:dirname(
filename:dirname(code:lib_dir(xqerl))
)
),
"qt3tests"
),
Catalog = filename:join(TestDir, "catalog.xml"),
Tests = [filename:join(TestDir, F) || F <- qt3_files()],
_ = [maybe_insert_file(N) || N <- [Catalog | Tests]],
ok.
maybe_insert_file(Filename) ->
Uri = xqldb_uri:filename_to_uri(?LB(Filename)),
catch xqldb_dml:insert_doc(Uri, Filename).
qt3_files() ->
[
"fn/abs.xml",
"fn/adjust-date-to-timezone.xml",
"fn/adjust-dateTime-to-timezone.xml",
"fn/adjust-time-to-timezone.xml",
"fn/analyze-string.xml",
"fn/apply.xml",
"fn/available-environment-variables.xml",
"fn/avg.xml",
"fn/base-uri.xml",
"fn/boolean.xml",
"fn/ceiling.xml",
"fn/codepoint-equal.xml",
"fn/codepoints-to-string.xml",
"fn/collation-key.xml",
"fn/collection.xml",
"fn/compare.xml",
"fn/concat.xml",
"fn/contains.xml",
"fn/contains-token.xml",
"fn/count.xml",
"fn/current-date.xml",
"fn/current-dateTime.xml",
"fn/current-time.xml",
"fn/data.xml",
"fn/dateTime.xml",
"fn/day-from-date.xml",
"fn/day-from-dateTime.xml",
"fn/days-from-duration.xml",
"fn/deep-equal.xml",
"fn/default-collation.xml",
"fn/default-language.xml",
"fn/distinct-values.xml",
"fn/doc.xml",
"fn/doc-available.xml",
"fn/document-uri.xml",
"fn/element-with-id.xml",
"fn/empty.xml",
"fn/encode-for-uri.xml",
"fn/ends-with.xml",
"fn/environment-variable.xml",
"fn/error.xml",
"fn/escape-html-uri.xml",
"fn/exactly-one.xml",
"fn/exists.xml",
"fn/false.xml",
"fn/filter.xml",
"fn/floor.xml",
"fn/fold-left.xml",
"fn/fold-right.xml",
"fn/for-each.xml",
"fn/for-each-pair.xml",
"fn/format-date.xml",
"fn/format-dateTime.xml",
"fn/format-integer.xml",
"fn/format-number.xml",
"fn/format-time.xml",
"fn/function-lookup.xml",
"fn/function-arity.xml",
"fn/function-name.xml",
"fn/generate-id.xml",
"fn/has-children.xml",
"fn/head.xml",
"fn/hours-from-dateTime.xml",
"fn/hours-from-duration.xml",
"fn/hours-from-time.xml",
"fn/id.xml",
"fn/idref.xml",
"fn/implicit-timezone.xml",
"fn/innermost.xml",
"fn/index-of.xml",
"fn/insert-before.xml",
"fn/in-scope-prefixes.xml",
"fn/iri-to-uri.xml",
"fn/json-doc.xml",
"fn/json-to-xml.xml",
"fn/lang.xml",
"fn/last.xml",
"fn/load-xquery-module.xml",
"fn/local-name.xml",
"fn/local-name-from-QName.xml",
"fn/lower-case.xml",
"fn/max.xml",
"fn/matches.xml",
"fn/matches.re.xml",
"fn/min.xml",
"fn/minutes-from-dateTime.xml",
"fn/minutes-from-duration.xml",
"fn/minutes-from-time.xml",
"fn/month-from-date.xml",
"fn/months-from-duration.xml",
"fn/month-from-dateTime.xml",
"fn/name.xml",
"fn/namespace-uri.xml",
"fn/namespace-uri-for-prefix.xml",
"fn/namespace-uri-from-QName.xml",
"fn/nilled.xml",
"fn/node-name.xml",
"fn/normalize-space.xml",
"fn/normalize-unicode.xml",
"fn/not.xml",
"fn/number.xml",
"fn/one-or-more.xml",
"fn/outermost.xml",
"fn/parse-ietf-date.xml",
"fn/parse-json.xml",
"fn/parse-xml.xml",
"fn/parse-xml-fragment.xml",
"fn/path.xml",
"fn/position.xml",
"fn/prefix-from-QName.xml",
"fn/QName.xml",
"fn/random-number-generator.xml",
"fn/remove.xml",
"fn/replace.xml",
"fn/resolve-QName.xml",
"fn/resolve-uri.xml",
"fn/reverse.xml",
"fn/root.xml",
"fn/round.xml",
"fn/round-half-to-even.xml",
"fn/seconds-from-dateTime.xml",
"fn/seconds-from-duration.xml",
"fn/seconds-from-time.xml",
"fn/serialize.xml",
"fn/sort.xml",
"fn/starts-with.xml",
"fn/static-base-uri.xml",
"fn/string.xml",
"fn/string-join.xml",
"fn/string-length.xml",
"fn/string-to-codepoints.xml",
"fn/subsequence.xml",
"fn/substring.xml",
"fn/substring-after.xml",
"fn/substring-before.xml",
"fn/sum.xml",
"fn/tail.xml",
"fn/timezone-from-date.xml",
"fn/timezone-from-dateTime.xml",
"fn/timezone-from-time.xml",
"fn/tokenize.xml",
"fn/trace.xml",
"fn/transform.xml",
"fn/translate.xml",
"fn/true.xml",
"fn/unordered.xml",
"fn/unparsed-text.xml",
"fn/unparsed-text-available.xml",
"fn/unparsed-text-lines.xml",
"fn/upper-case.xml",
"fn/uri-collection.xml",
"fn/xml-to-json.xml",
"fn/year-from-date.xml",
"fn/years-from-duration.xml",
"fn/year-from-dateTime.xml",
"fn/zero-or-one.xml",
"math/math-acos.xml",
"math/math-asin.xml",
"math/math-atan.xml",
"math/math-atan2.xml",
"math/math-cos.xml",
"math/math-exp.xml",
"math/math-exp10.xml",
"math/math-log.xml",
"math/math-log10.xml",
"math/math-pi.xml",
"math/math-pow.xml",
"math/math-sin.xml",
"math/math-sqrt.xml",
"math/math-tan.xml",
"map/merge.xml",
"map/contains.xml",
"map/find.xml",
"map/get.xml",
"map/entry.xml",
"map/size.xml",
"map/keys.xml",
"map/put.xml",
"map/remove.xml",
"map/for-each.xml",
"array/append.xml",
"array/filter.xml",
"array/flatten.xml",
"array/fold-left.xml",
"array/fold-right.xml",
"array/for-each.xml",
"array/for-each-pair.xml",
"array/get.xml",
"array/head.xml",
"array/insert-before.xml",
"array/join.xml",
"array/put.xml",
"array/remove.xml",
"array/reverse.xml",
"array/size.xml",
"array/sort.xml",
"array/subarray.xml",
"array/tail.xml",
"xs/anyURI.xml",
"xs/base64Binary.xml",
"xs/dateTimeStamp.xml",
"xs/double.xml",
"xs/error.xml",
"xs/float.xml",
"xs/hexBinary.xml",
"xs/normalizedString.xml",
"xs/numeric.xml",
"xs/token.xml",
"op/add-dayTimeDurations.xml",
"op/add-dayTimeDuration-to-date.xml",
"op/add-dayTimeDuration-to-dateTime.xml",
"op/add-dayTimeDuration-to-time.xml",
"op/add-yearMonthDurations.xml",
"op/add-yearMonthDuration-to-date.xml",
"op/add-yearMonthDuration-to-dateTime.xml",
"op/anyURI-equal.xml",
"op/anyURI-greater-than.xml",
"op/anyURI-less-than.xml",
"op/bang.xml",
"op/base64Binary-equal.xml",
"op/base64Binary-less-than.xml",
"op/base64Binary-greater-than.xml",
"op/boolean-equal.xml",
"op/boolean-greater-than.xml",
"op/boolean-less-than.xml",
"op/concat.xml",
"op/concatenate.xml",
"op/date-equal.xml",
"op/date-greater-than.xml",
"op/date-less-than.xml",
"op/dateTime-equal.xml",
"op/dateTime-greater-than.xml",
"op/dateTime-less-than.xml",
"op/dayTimeDuration-greater-than.xml",
"op/dayTimeDuration-less-than.xml",
"op/divide-dayTimeDuration.xml",
"op/divide-dayTimeDuration-by-dayTimeDuration.xml",
"op/divide-yearMonthDuration.xml",
"op/divide-yearMonthDuration-by-yearMonthDuration.xml",
"op/duration-equal.xml",
"op/except.xml",
"op/gDay-equal.xml",
"op/gMonth-equal.xml",
"op/gMonthDay-equal.xml",
"op/gYear-equal.xml",
"op/gYearMonth-equal.xml",
"op/hexBinary-equal.xml",
"op/hexBinary-greater-than.xml",
"op/hexBinary-less-than.xml",
"op/intersect.xml",
"op/is-same-node.xml",
"op/multiply-dayTimeDuration.xml",
"op/multiply-yearMonthDuration.xml",
"op/node-after.xml",
"op/node-before.xml",
"op/NOTATION-equal.xml",
"op/numeric-add.xml",
"op/numeric-equal.xml",
"op/numeric-divide.xml",
"op/numeric-greater-than.xml",
"op/numeric-integer-divide.xml",
"op/numeric-less-than.xml",
"op/numeric-mod.xml",
"op/numeric-multiply.xml",
"op/numeric-subtract.xml",
"op/numeric-unary-minus.xml",
"op/numeric-unary-plus.xml",
"op/QName-equal.xml",
"op/string-equal.xml",
"op/string-greater-than.xml",
"op/string-less-than.xml",
"op/subtract-dates.xml",
"op/subtract-dateTimes.xml",
"op/subtract-dayTimeDuration-from-date.xml",
"op/subtract-dayTimeDuration-from-dateTime.xml",
"op/subtract-dayTimeDuration-from-time.xml",
"op/subtract-dayTimeDurations.xml",
"op/subtract-times.xml",
"op/subtract-yearMonthDuration-from-date.xml",
"op/subtract-yearMonthDuration-from-dateTime.xml",
"op/subtract-yearMonthDurations.xml",
"op/time-equal.xml",
"op/time-greater-than.xml",
"op/time-less-than.xml",
"op/to.xml",
"op/union.xml",
"op/yearMonthDuration-greater-than.xml",
"op/yearMonthDuration-less-than.xml",
"op/same-key.xml",
"prod/AllowingEmpty.xml",
"prod/Annotation.xml",
"prod/ArrayTest.xml",
"prod/ArrowPostfix.xml",
"prod/AxisStep.xml",
"prod/AxisStep.abbr.xml",
"prod/AxisStep.ancestor.xml",
"prod/AxisStep.ancestor-or-self.xml",
"prod/AxisStep.following.xml",
"prod/AxisStep.following-sibling.xml",
"prod/AxisStep.preceding.xml",
"prod/AxisStep.preceding-sibling.xml",
"prod/AxisStep.static-typing.xml",
"prod/AxisStep.unabbr.xml",
"prod/BaseURIDecl.xml",
"prod/BoundarySpaceDecl.xml",
"prod/CastableExpr.xml",
"prod/CastExpr.xml",
"prod/CastExpr.derived.xml",
"prod/CastExpr.schema.xml",
"prod/Comment.xml",
"prod/CompAttrConstructor.xml",
"prod/CompDocConstructor.xml",
"prod/CompCommentConstructor.xml",
"prod/CompElemConstructor.xml",
"prod/CompNamespaceConstructor.xml",
"prod/CompPIConstructor.xml",
"prod/CompTextConstructor.xml",
"prod/ConstructionDecl.xml",
"prod/ConstructionDecl.schema.xml",
"prod/ContextItemDecl.xml",
"prod/ContextItemExpr.xml",
"prod/CopyNamespacesDecl.xml",
"prod/CountClause.xml",
"prod/CurlyArrayConstructor.xml",
"prod/DecimalFormatDecl.xml",
"prod/DefaultCollationDecl.xml",
"prod/DefaultNamespaceDecl.xml",
"prod/DirAttributeList.xml",
"prod/DirectConstructor.xml",
"prod/DirElemConstructor.xml",
"prod/DirElemContent.xml",
"prod/DirElemContent.namespace.xml",
"prod/DirElemContent.whitespace.xml",
"prod/EmptyOrderDecl.xml",
"prod/EQName.xml",
"prod/ExtensionExpr.xml",
"prod/FLWORExpr.xml",
"prod/FLWORExpr.static-typing.xml",
"prod/ForClause.xml",
"prod/FunctionCall.xml",
"prod/FunctionDecl.xml",
"prod/GeneralComp.eq.xml",
"prod/GeneralComp.ge.xml",
"prod/GeneralComp.gt.xml",
"prod/GeneralComp.le.xml",
"prod/GeneralComp.lt.xml",
"prod/GeneralComp.ne.xml",
"prod/GroupByClause.xml",
"prod/IfExpr.xml",
"prod/InlineFunctionExpr.xml",
"prod/InstanceofExpr.xml",
"prod/LetClause.xml",
"prod/Literal.xml",
"prod/Lookup.xml",
"prod/MapConstructor.xml",
"prod/MapTest.xml",
"prod/ModuleImport.xml",
"prod/NamedFunctionRef.xml",
"prod/NamespaceDecl.xml",
"prod/NameTest.xml",
"prod/NodeTest.xml",
"prod/OptionDecl.xml",
"prod/OptionDecl.serialization.xml",
"prod/OrExpr.xml",
"prod/OrderByClause.xml",
"prod/OrderingModeDecl.xml",
"prod/PathExpr.xml",
"prod/ParenthesizedExpr.xml",
"prod/PositionalVar.xml",
"prod/Predicate.xml",
"prod/QuantifiedExpr.xml",
"prod/ReturnClause.xml",
"prod/SchemaImport.xml",
"prod/SequenceType.xml",
"prod/SquareArrayConstructor.xml",
"prod/StepExpr.xml",
"prod/StringConstructor.xml",
"prod/SwitchExpr.xml",
"prod/TreatExpr.xml",
"prod/TryCatchExpr.xml",
"prod/TypeswitchExpr.xml",
"prod/UnorderedExpr.xml",
"prod/UnaryLookup.xml",
"prod/ValidateExpr.xml",
"prod/ValueComp.xml",
"prod/VarDecl.xml",
"prod/VarDecl.external.xml",
"prod/VarDefaultValue.xml",
"prod/VersionDecl.xml",
"prod/WhereClause.xml",
"prod/WindowClause.xml",
"misc/CombinedErrorCodes.xml",
"misc/AnnexE.xml",
"misc/AppendixA4.xml",
"misc/ErrorsAndOptimization.xml",
"misc/HigherOrderFunctions.xml",
"misc/JsonTestSuite.xml",
"misc/StaticContext.xml",
"misc/Surrogates.xml",
"misc/UCACollation.xml",
"misc/XMLEdition.xml",
"ser/method-adaptive.xml",
"ser/method-html.xml",
"ser/method-json.xml",
"ser/method-text.xml",
"ser/method-xhtml.xml",
"ser/method-xml.xml",
"app/CatalogCheck.xml",
"app/Demos.xml",
"app/FunctxFn.xml",
"app/FunctxFunctx.xml",
"app/UseCaseCompoundValues.xml",
"app/UseCaseJSON.xml",
"app/UseCaseNLP.xml",
"app/UseCaseNS.xml",
"app/UseCasePARTS.xml",
"app/UseCaseR.xml",
"app/UseCaseR31.xml",
"app/UseCaseSEQ.xml",
"app/UseCaseSGML.xml",
"app/UseCaseSTRING.xml",
"app/UseCaseTREE.xml",
"app/UseCaseXMP.xml",
"app/Walmsley.xml",
"app/XMark.xml",
"app/fo-spec-examples.xml"
].
|
/**
* PANDA 3D SOFTWARE
* Copyright (c) Carnegie Mellon University. All rights reserved.
*
* All use of this software is subject to the terms of the revised BSD
* license. You should have received a copy of this license along
* with this source code in a file named "LICENSE."
*
* @file showBase.cxx
* @author shochet
* @date 2000-02-02
*/
#ifdef __APPLE__
// We have to include this before we include any Panda libraries, because one
// of the things we pick up in Panda defines a macro for TCP_NODELAY and
// friends, causing heartaches for the header files picked up here.
#include <Carbon/Carbon.h>
extern "C" { void CPSEnableForegroundOperation(ProcessSerialNumber* psn); }
#endif
#include "showBase.h"
#include "throw_event.h"
#include "graphicsWindow.h"
#include "renderBuffer.h"
#include "camera.h"
#include "graphicsPipeSelection.h"
#ifdef WIN32
#include <windows.h> // For SystemParametersInfo()
STICKYKEYS g_StartupStickyKeys = {sizeof(STICKYKEYS), 0};
TOGGLEKEYS g_StartupToggleKeys = {sizeof(TOGGLEKEYS), 0};
FILTERKEYS g_StartupFilterKeys = {sizeof(FILTERKEYS), 0};
#endif
using std::max;
using std::min;
#if !defined(CPPPARSER) && !defined(LINK_ALL_STATIC) && !defined(BUILDING_DIRECT_SHOWBASE)
#error Buildsystem error: BUILDING_DIRECT_SHOWBASE not defined
#endif
ConfigureDef(config_showbase);
ConfigureFn(config_showbase) {
}
ConfigVariableSearchPath particle_path
("particle-path",
PRC_DESC("The directories to search for particle files to be loaded."));
ConfigVariableSearchPath &
get_particle_path() {
return particle_path;
}
// Throw the "NewFrame" event in the C++ world. Some of the lerp code depends
// on receiving this.
void
throw_new_frame() {
throw_event("NewFrame");
}
// Initialize the application for making a Gui-based app, such as wx. At the
// moment, this is a no-op except on Mac.
void
init_app_for_gui() {
#ifdef IS_OSX
// Rudely bring the application to the foreground. This is particularly
// important when running wx via the plugin, since the plugin app is seen as
// separate from the browser app, even though the user sees them as the same
// thing. We need to bring the plugin app to the foreground to make its wx
// windows visible.
activate_osx_application();
#endif
// We don't appear need to do the following, however, if we launch the
// plugin correctly from its own bundle.
/*
static bool initted_for_gui = false;
if (!initted_for_gui) {
initted_for_gui = true;
#ifdef IS_OSX
ProcessSerialNumber psn;
GetCurrentProcess(&psn);
CPSEnableForegroundOperation(&psn);
SetFrontProcess(&psn);
#endif // IS_OSX
}
*/
}
// klunky interface since we cant pass array from python->C++ to use
// verify_window_sizes directly
static int num_fullscreen_testsizes = 0;
#define MAX_FULLSCREEN_TESTS 10
static int fullscreen_testsizes[MAX_FULLSCREEN_TESTS * 2];
void
add_fullscreen_testsize(int xsize, int ysize) {
if ((xsize == 0) && (ysize == 0)) {
num_fullscreen_testsizes = 0;
return;
}
// silently fail if maxtests exceeded
if (num_fullscreen_testsizes < MAX_FULLSCREEN_TESTS) {
fullscreen_testsizes[num_fullscreen_testsizes * 2] = xsize;
fullscreen_testsizes[num_fullscreen_testsizes * 2 + 1] = ysize;
num_fullscreen_testsizes++;
}
}
void
runtest_fullscreen_sizes(GraphicsWindow *win) {
win->verify_window_sizes(num_fullscreen_testsizes, fullscreen_testsizes);
}
bool
query_fullscreen_testresult(int xsize, int ysize) {
// stupid linear search that works ok as long as total tests are small
int i;
for (i=0; i < num_fullscreen_testsizes; i++) {
if((fullscreen_testsizes[i * 2] == xsize) &&
(fullscreen_testsizes[i * 2 + 1] == ysize))
return true;
}
return false;
}
void
store_accessibility_shortcut_keys() {
#ifdef WIN32
SystemParametersInfo(SPI_GETSTICKYKEYS, sizeof(STICKYKEYS), &g_StartupStickyKeys, 0);
SystemParametersInfo(SPI_GETTOGGLEKEYS, sizeof(TOGGLEKEYS), &g_StartupToggleKeys, 0);
SystemParametersInfo(SPI_GETFILTERKEYS, sizeof(FILTERKEYS), &g_StartupFilterKeys, 0);
#endif
}
void
allow_accessibility_shortcut_keys(bool allowKeys) {
#ifdef WIN32
if( allowKeys )
{
// Restore StickyKeysetc to original state and enable Windows key
SystemParametersInfo(SPI_SETSTICKYKEYS, sizeof(STICKYKEYS), &g_StartupStickyKeys, 0);
SystemParametersInfo(SPI_SETTOGGLEKEYS, sizeof(TOGGLEKEYS), &g_StartupToggleKeys, 0);
SystemParametersInfo(SPI_SETFILTERKEYS, sizeof(FILTERKEYS), &g_StartupFilterKeys, 0);
} else {
// Disable StickyKeysetc shortcuts but if the accessibility feature is on,
// then leave the settings alone as its probably being usefully used
STICKYKEYS skOff = g_StartupStickyKeys;
if( (skOff.dwFlags & SKF_STICKYKEYSON) == 0 )
{
// Disable the hotkey and the confirmation
skOff.dwFlags &= ~SKF_HOTKEYACTIVE;
skOff.dwFlags &= ~SKF_CONFIRMHOTKEY;
SystemParametersInfo(SPI_SETSTICKYKEYS, sizeof(STICKYKEYS), &skOff, 0);
}
TOGGLEKEYS tkOff = g_StartupToggleKeys;
if( (tkOff.dwFlags & TKF_TOGGLEKEYSON) == 0 )
{
// Disable the hotkey and the confirmation
tkOff.dwFlags &= ~TKF_HOTKEYACTIVE;
tkOff.dwFlags &= ~TKF_CONFIRMHOTKEY;
SystemParametersInfo(SPI_SETTOGGLEKEYS, sizeof(TOGGLEKEYS), &tkOff, 0);
}
FILTERKEYS fkOff = g_StartupFilterKeys;
if( (fkOff.dwFlags & FKF_FILTERKEYSON) == 0 )
{
// Disable the hotkey and the confirmation
fkOff.dwFlags &= ~FKF_HOTKEYACTIVE;
fkOff.dwFlags &= ~FKF_CONFIRMHOTKEY;
SystemParametersInfo(SPI_SETFILTERKEYS, sizeof(FILTERKEYS), &fkOff, 0);
}
}
#endif
}
#if 0
int TempGridZoneManager::
add_grid_zone(unsigned int x,
unsigned int y,
unsigned int width,
unsigned int height,
unsigned int zoneBase,
unsigned int xZoneResolution,
unsigned int yZoneResolution) {
// zoneBase is the first zone in the grid (e.g. the upper left)
// zoneResolution is the number of cells on each axsis. returns the next
// available zoneBase (i.e. zoneBase+xZoneResolution*yZoneResolution)
std::cerr<<"adding grid zone with a zoneBase of "<<zoneBase<<" and a zoneResolution of "<<zoneResolution;
_grids.append(TempGridZoneManager::GridZone(x, y, width, height, zoneBase, xZoneResolution, yZoneResolution));
return zoneBase+xZoneResolution*yZoneResolution;
}
void TempGridZoneManager::GridZone
GridZone(unsigned int x,
unsigned int y,
unsigned int width,
unsigned int height,
unsigned int zoneBase,
unsigned int xZoneResolution,
unsigned int yZoneResolution) {
_x=x;
_y=y;
_width=width;
_height=heigth;
_zoneBase=zoneBase;
_xZoneResolution=xZoneResolution;
_yZoneResolution=yZoneResolution;
// The cellVis is the number of cells radius that can be seen, including the
// center cell. So, for a 5 x 5 visible area, the cellVis is 3.
const float cellVis=3.0;
unsigned int xMargine=(unsigned int)((float)width/(float)xZoneResolution*cellVis+0.5);
unsigned int yMargine=(unsigned int)((float)height/(float)yZoneResolution*cellVis+0.5);
_xMinVis=x-xMargine;
_yMinVis=y-yMargine;
_xMaxVis=x+width+xMargine;
_yMaxVis=y+height+yMargine;
}
void TempGridZoneManager::
get_grids(int x, int y) {
TempGridZoneManager::ZoneSet canSee;
TempGridZoneManager::GridSet::const_iterator i=_grids.begin();
for (; i!=_grids.end(); ++i) {
if (x >= i._xMinVis && x < i._xMaxVis && y >= i._yMinVis && y < i._yMaxVis) {
add_to_zone_list(i, x, y, canSee);
}
}
}
void TempGridZoneManager::
add_to_zone_list(const TempGridZoneManager::GridZone &gridZone,
unsigned int x,
unsigned int y,
TempGridZoneManager::ZoneSet &zoneSet) {
unsigned int xRes=gridZone._xZoneResolution;
unsigned int yRes=gridZone._yZoneResolution;
float xP=((float)(x-gridZone._x))/gridZone._width;
float yP=((float)(y-gridZone._y))/gridZone._height;
int xCell=(int)(xP*xRes);
int yCell=(int)(yP*yRes);
// range is how many cells can be seen in each direction:
const int range=2;
int yBegin=max(0, yCell-range);
int yEnd=min(yRes, yCell+range);
int xBegin=max(0, xCell-range);
int xEnd=min(xRes, xCell+range);
unsigned int zone=gridZone._zoneBase+yBegin*xRes+xBegin;
for (yCell=yBegin; yCell < yEnd; ++yCell) {
for (xCell=xBegin; xCell < xEnd; ++xCell) {
zoneSet.append(zone+xCell);
}
zone+=xRes;
}
}
int TempGridZoneManager::
get_zone_list(int x, int y, int resolution) {
// x is a float in the range 0.0 to 1.0 y is a float in the range 0.0 to 1.0
// resolution is the number of cells on each axsis. returns a list of zone
// ids. Create a box of cell numbers, while clipping to the edges of the
// set of cells.
if (x < 0.0 || x > 1.0 || y < 0.0 || y > 1.0) {
return 0;
}
std::cerr<<"resolution="<<resolution;
xCell=min(int(x*resolution), resolution-1)
yCell=min(int(y*resolution), resolution-1)
cell=yCell*resolution+xCell
print "cell", cell,
zone=zoneBase+cell
print "zone", zone
zone=zone-2*resolution
endZone=zone+5*resolution
yCell=yCell-2
while zone < endZone:
if yCell >= 0 and yCell < resolution:
if xCell > 1:
zoneList.append(zone-2)
zoneList.append(zone-1)
elif xCell > 0:
zoneList.append(zone-1)
r.append(zone)
if xCell < resolution-2:
zoneList.append(zone+1)
zoneList.append(zone+2)
elif xCell < resolution-1:
zoneList.append(zone+1)
yCell+=1
zone+=resolution
return zoneList
return 5;
}
#endif
|
package net.gini.android.vision.review.multipage.thumbnails;
/**
* Created by Alpar Szotyori on 08.05.2018.
*
* Copyright (c) 2018 Gini GmbH.
*/
/**
* Internal use only.
*
* @suppress
*/
public interface ThumbnailsAdapterListener {
void onThumbnailMoved();
void onThumbnailSelected(final int position);
void onPlusButtonClicked();
}
|
package uk.co.idv.method.entities.otp.simswap.eligibility;
import uk.co.idv.method.entities.eligibility.Ineligible;
public class SimSwapStatusNotAllowed extends Ineligible {
public SimSwapStatusNotAllowed(String status) {
super(String.format("sim swap status %s is not allowed", status));
}
}
|
package com.fernandocejas.cognitive.chatbot.framework
import android.os.Bundle
import android.support.v4.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import com.fernandocejas.cognitive.chatbot.AndroidApplication
import com.fernandocejas.cognitive.chatbot.di.ApplicationComponent
abstract class BaseFragment : Fragment() {
init {
retainInstance = true
}
abstract fun layoutId(): Int
val appComponent: ApplicationComponent by lazy(mode = LazyThreadSafetyMode.NONE) {
(activity?.application as AndroidApplication).appComponent
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View =
inflater.inflate(layoutId(), container, false)
open fun onBackPressed() {}
internal fun firstTimeCreated(savedInstanceState: Bundle?) = savedInstanceState == null
}
|
use amethyst::{
core::SystemDesc,
ecs::{Entities, Join, Read, ReadExpect, System, World, WorldExt, Write, WriteStorage},
shred::{ResourceId, SystemData},
ui::{UiText, UiTransform},
window::ScreenDimensions,
};
use camera_model::play::CameraZoomDimensions;
use derivative::Derivative;
use derive_new::new;
use ui_model::play::UiFovScaleTransform;
/// Builds a `UiTransformForFovSystem`.
#[derive(Debug, Default)]
pub struct UiTransformForFovSystemDesc;
impl<'a, 'b> SystemDesc<'a, 'b, UiTransformForFovSystem> for UiTransformForFovSystemDesc {
fn build(self, world: &mut World) -> UiTransformForFovSystem {
<UiTransformForFovSystem as System<'_>>::SystemData::setup(world);
let screen_dimensions = &*world.read_resource::<ScreenDimensions>();
let screen_dimensions = screen_dimensions.clone();
UiTransformForFovSystem::new(screen_dimensions)
}
}
/// Updates `WidgetStatus` based on `ControlInputEvent`s and `Sibling`s.
#[derive(Debug, new)]
pub struct UiTransformForFovSystem {
/// Last `ScreenDimensions`.
pub screen_dimensions_last: ScreenDimensions,
}
/// `UiTransformForFovSystemData`.
#[derive(Derivative, SystemData)]
#[derivative(Debug)]
pub struct UiTransformForFovSystemData<'s> {
/// `CameraZoomDimensions` resource.
#[derivative(Debug = "ignore")]
pub camera_zoom_dimensions: Read<'s, CameraZoomDimensions>,
/// `ScreenDimensions` resource.
#[derivative(Debug = "ignore")]
pub screen_dimensions: ReadExpect<'s, ScreenDimensions>,
/// `UiFovScaleTransform` resource.
#[derivative(Debug = "ignore")]
pub ui_fov_scale_transform: Write<'s, UiFovScaleTransform>,
/// `Entities`.
#[derivative(Debug = "ignore")]
pub entities: Entities<'s>,
/// `UiTransform` components.
#[derivative(Debug = "ignore")]
pub ui_transforms: WriteStorage<'s, UiTransform>,
/// `UiText` components.
#[derivative(Debug = "ignore")]
pub ui_texts: WriteStorage<'s, UiText>,
}
impl<'s> System<'s> for UiTransformForFovSystem {
type SystemData = UiTransformForFovSystemData<'s>;
fn run(
&mut self,
UiTransformForFovSystemData {
camera_zoom_dimensions,
screen_dimensions,
mut ui_fov_scale_transform,
entities,
mut ui_transforms,
mut ui_texts,
}: Self::SystemData,
) {
if self.screen_dimensions_last != *screen_dimensions {
let aspect_ratio_diff =
screen_dimensions.aspect_ratio() - camera_zoom_dimensions.aspect_ratio();
let scale = if aspect_ratio_diff > 0. {
// Wider than original dimensions, so we use screen height / original height for the
// scale.
screen_dimensions.height() / camera_zoom_dimensions.height
} else if aspect_ratio_diff < 0. {
// Narrower than original dimensions, so we use screen width / original width for
// the scale.
screen_dimensions.width() / camera_zoom_dimensions.width
} else {
1.
};
let x_offset = (screen_dimensions.width() - camera_zoom_dimensions.width * scale) / 2.;
let y_offset =
(screen_dimensions.height() - camera_zoom_dimensions.height * scale) / 2.;
let ui_fov_scale_transform_next = UiFovScaleTransform::new(scale, x_offset, y_offset);
(&entities, &mut ui_transforms)
.join()
.for_each(|(entity, ui_transform)| {
ui_fov_scale_transform.unapply(ui_transform);
ui_fov_scale_transform_next.apply(ui_transform);
if let Some(ui_text) = ui_texts.get_mut(entity) {
ui_text.font_size /= ui_fov_scale_transform.scale;
ui_text.font_size *= scale;
}
});
*ui_fov_scale_transform = ui_fov_scale_transform_next;
self.screen_dimensions_last = screen_dimensions.clone();
}
}
}
|
package cn.edu.scu.dao;
import cn.edu.scu.entity.Keyword;
import org.apache.ibatis.annotations.Param;
import java.util.List;
public interface KeywordDao {
int insertOne(@Param("keywordName") String keywordName);
Keyword queryByKeywordName(@Param("keywordName") String keywordName);
Keyword queryByKeywordId(@Param("keywordId") int keywordId);
List<Keyword> queryAll();
}
|
$ echo '(jbuild_version 1)' > dune
$ dune build
File "dune", line 1, characters 0-18:
1 | (jbuild_version 1)
^^^^^^^^^^^^^^^^^^
Error: 'jbuild_version' was deleted in version 1.0 of the dune language
[1]
$ rm -f dune
$ echo '(jbuild_version 1)' > jbuild
$ dune build
File "jbuild", line 1, characters 0-0:
Warning: jbuild files are deprecated, please convert this file to a dune file instead.
Note: You can use "dune upgrade" to convert your project to dune.
$ rm -f jbuild
$ echo '(executable (name x) (link_executables false))' > dune
$ dune build
File "dune", line 1, characters 21-45:
1 | (executable (name x) (link_executables false))
^^^^^^^^^^^^^^^^^^^^^^^^
Error: 'link_executables' was deleted in version 1.0 of the dune language
[1]
$ rm -f dune
$ echo '(alias (name x) (deps x) (action (run %{<})))' > dune
$ dune build
File "dune", line 1, characters 40-42:
1 | (alias (name x) (deps x) (action (run %{<})))
^^
Error: %{<} was deleted in version 1.0 of the dune language.
Use a named dependency instead:
(deps (:x <dep>) ...)
... %{x} ...
[1]
$ rm -f dune
|
import Vector2 from '@equinor/videx-vector2';
import { VectorLike } from '@equinor/videx-linear-algebra';
/**
* Find the position along a line segment closest to a given point.
* @param point Reference point
* @param lineStart Start of line segment
* @param lineEnd End of line segment
* @returns Closest point on line
*/
export function closestPointOnLine(point: Vector2, lineStart: Vector2, lineEnd: Vector2): Vector2 {
const lineDir: Vector2 = Vector2.sub(lineEnd, lineStart);
// Angle of line relative to x-axis
const lineAngle: number = Vector2.angleRight(lineDir);
const len: number = lineDir.magnitude;
// Local direction to point
const dir: Vector2 = Vector2.sub(point, lineStart)
.mutable
.rotate(-lineAngle);
// If outside
if (dir[0] < 0) return lineStart;
else if(dir[0] > len) return lineEnd;
return lineDir
.mutable
.rescale(dir[0])
.add(lineStart[0], lineStart[1])
.immutable;
};
/**
* Find the shortest distance from a point to a line segment.
* @param point Reference point
* @param lineStart Start of line segment
* @param lineEnd End of line segment
* @returns Distance to line
*/
export function distanceToLine(point: Vector2, lineStart: Vector2, lineEnd: Vector2): number {
const lineDir: Vector2 = Vector2.sub(lineEnd, lineStart);
// Angle of line relative to x-axis
const lineAngle: number = Vector2.angleRight(lineDir);
const len: number = lineDir.magnitude;
// Local direction to point
const dir: Vector2 = Vector2.sub(point, lineStart)
.mutable
.rotate(-lineAngle);
// If outside
if (dir[0] < 0) return dir.magnitude;
else if(dir[0] > len) return Vector2.distance(point, lineEnd);
return Math.abs(dir.y);
};
/**
* Get the relative displacement of the point in respect to lineStart.
* X-component contains displacement along line.
* Y component contains displacement perpendicular to line.
* @param point Reference point
* @param lineStart Start of line segment
* @param lineEnd End of line segment
*/
export function displacementToLineOrigin(point: VectorLike, lineStart: VectorLike, lineEnd: VectorLike): Vector2 {
const lineDir: Vector2 = Vector2.sub(lineEnd, lineStart).mutable;
// Angle of line relative to x-axis
const lineAngle: number = Vector2.angleRight(lineDir);
// Return local direction to point. Re-use linedir.
return lineDir.set(point)
.sub(lineStart)
.rotate(-lineAngle)
.modify(Math.abs)
.immutable;
};
|
using System;
namespace VBessonov.GZip.Core.Compression
{
public interface IProcessorFactory
{
IProcessor Create();
}
}
|
---
title: BARNECOTT
is_name: true
---
BARNECOTT
|
# ENV
SPLINT_VERSION="0.0.8"
VENDOR="francis94c"
PACKAGE="ci-gmail"
created=false
for entry in ./*
do
echo ${entry}
if [ $created = false ]; then
mkdir -p travis-splint-${SPLINT_VERSION}/application/splints/${VENDOR}/${PACKAGE}
$created = true
fi
if [ "x$entry" != "x./phpunit.xml" ] && [ "x$entry" != "x./travis.sh" ]; then
cp -r $entry travis-splint-${SPLINT_VERSION}/application/splints/${VENDOR}/${PACKAGE}/
rm -rf $entry
fi
done
wget https://github.com/splintci/travis-splint/archive/v${SPLINT_VERSION}.tar.gz -O - | tar xz
|
## Schema
-- drops database if it already exists
DROP DATABASE IF EXISTS burgers_db;
-- creates database called burgers_db
CREATE DATABASE burgers_db;
-- switches to the burgers_db
USE burgers_db;
-- creates a table for 'burgers' that has an id, burger_name, and devoured column
CREATE TABLE burgers(
id INT NOT NULL AUTO_INCREMENT,
burger_name VARCHAR(255),
devoured BOOLEAN,
PRIMARY KEY (id)
);
SELECT * FROM burgers;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.