text stringlengths 1 1.05M |
|---|
#pragma once
#include "typed-geometry/types/vec.hh"
#include "typed-geometry/types/pos.hh"
#include "typed-geometry/types/color.hh"
namespace gamedev
{
struct Particle
{
tg::pos3 position;
tg::vec3 color;
tg::vec3 velocity;
float rotation_y;
bool active = false;
float lifeTime;
float lifeRemaining;
float size_t0;
float size_tn;
};
struct ParticleAttributes
{
tg::pos3 pos;
float dummy1 = 0.0;
alignas(16) tg::vec3 color;
float dummy2 = 0.0;
float rotation;
float scale;
float blend;
};
struct ParticleProperties
{
float particlesPerSecond;
tg::pos3 basePosition;
tg::vec3 baseVelocity;
tg::color3 baseColor;
tg::vec3 varyPosition = tg::vec3::zero;
tg::vec3 varyVelocity = tg::vec3::zero;
tg::color3 varyColor = tg::color3::black;
float baseSize;
float varySize = 0.0f;
float baseLife = 1.0f;
float varyLife = 0.4f;
float emitNew = 0.0f;
};
}
|
<reponame>Jliciaga/recipe-app
import { ApolloServer } from "apollo-server-express";
import * as cors from "cors";
import * as express from "express";
import resolvers from "#root/graphql/resolvers";
import typeDefs from "#root/graphql/typeDefs";
import accessEnv from "#root/helpers/accessEnv";
const PORT = accessEnv("PORT", 7000);
const apolloServer = new ApolloServer({ resolvers, typeDefs });
const app = express();
app.use(cors({
origin: (origin, cb) => cb(null, true),
credentials: true,
preflightContinue: true,
exposedHeaders: [
"Access-Control-Allow-Headers",
"Access-Control-Allow-Origin, Origin, X-Requested-With, Content-Type, accept",
"X-Password-Expired"
],
optionsSuccessStatus: 200
}));
apolloServer.applyMiddleware({ app, path:"/graphql"});
app.listen(PORT, "0.0.0.0", () => {
console.info(`GSD service listening on port:${PORT}`);
});
|
#!/bin/bash
for cmd in base.sh py.sh vim.sh git.sh tmux.sh rust.sh fish.sh
do
bash $cmd
done
echo "Enjoy :-)"
|
import uvicore
from app1.models.post import Post
from app1.models.comment import Comment
from app1.models.tag import Tag
from app1.models.hashtag import Hashtag
from app1.models.image import Image
from uvicore.support.dumper import dump, dd
from uvicore import log
@uvicore.seeder()
async def seed():
log.item('Seeding table posts')
# Get all tags keyed by name
tags = await Tag.query().key_by('name').get()
# Get all hastags keyed by name
hashtags = await Hashtag.query().key_by('name').get()
#post = PostModel(slug='test-post1', title='Test Post1', other='other stuff1', creator_id=1)
#await post.save()
# Now I want to do inline, though has to be DIct
# where I create the post with comments=[{dict}]
# WORKS!!!
await Post.insert_with_relations([
{
'slug': 'test-post1',
'title': 'Test Post1',
'body': 'This is the body for test post1. I like the color red and green.',
'other': 'other stuff1',
'creator_id': 1,
'owner_id': 2,
'comments': [
{
'title': 'Post1 Comment1',
'body': 'Body for post1 comment1',
#'post_id': 1, # No id needed, thats what post.create() does
'creator_id': 1,
}
],
# Many-To-Many tags works with existing Model, new Model or new Dict
'tags': [
# Existing Tag
tags['linux'],
tags['mac'],
tags['bsd'],
tags['bsd'], # Yes its a duplicate, testing that it doesn't fail
# New Tag as Model (tag created and linked)
Tag(name='test1', creator_id=4),
# New Tag as Dict (tag created and linked)
{'name': 'test2', 'creator_id': 4},
],
# Polymorphic One-To-One
'image': {
'filename': 'post1-image.png',
'size': 1234932,
},
# Polymorphic One-To-Many Attributes
'attributes': [
{'key': 'post1-test1', 'value': 'value for post1-test1'},
{'key': 'post1-test2', 'value': 'value for post1-test2'},
{'key': 'badge', 'value': 'IT'},
],
# Polymorphic Many-To-Many Hashtags
'hashtags': [
hashtags['important'],
hashtags['outdated'],
hashtags['outdated'], # Yes its a duplicate, testing that it doesn't fail
# New hashtag by model
Hashtag(name='test1'),
# New hashtag by dict
{'name': 'test2'},
],
},
])
# Example of adding attributes later
post = await Post.query().find(1)
# ISSUE: How can we update an attribute that is a dict?
# If it weren't a dict we could get it (post.attributes['badge']) then change an attribute then call post.attributes['badge'].save() probably
# But if a dict, how can we update a value? Doing a .create/.add like so
# await post.add('attributes', [
# {'key': 'post1-test2', 'value': 'xxxx'},
# ])
# Gives us an Integrity error due to models.py insert() around line 92. It assume a bulk insert and cannot upsert
# If we don't pass a list it does a single insert which will also fail with IntegrityError. I would have to add code
# to know how to SELECT to see if exists based on PK or in the case of polymorphism, all 3 or more poly columns.
# Example of adding attributes whos value is also a Dict - DOES NOT WORK YET, need auto-serialization, though I could serialize manually to str
# await post.add('attributes', [
# {'key': 'post1-test3', 'value': {
# 'this': 'value',
# 'is': 'a dict itself!'
# }},
# {'key': 'post1-test4', 'value': ['one', 'two', 'three']}
# ])
# # Blow out all attributes and set this complete List
# await post.set('attributes', [
# {'key': 'post1-test3', 'value': 'value for post1-test3'},
# {'key': 'post1-test4', 'value': 'value for post1-test4'},
# ])
# Example of setting all a Polymorphic Many-To-Many Hashtags - WORKS
# await post.set('hashtags', [
# {'name': 'test1'},
# {'name': 'test2'},
# ])
# Example of setting all Many-To-Many Tags - WORKS
# await post.set('tags', [
# tags['linux'],
# ])
# Example of deleting all Polymorphic Many-To-Many Hashtags - DELETE DOES NOT WORK FOR POLY MTM (as currently designed)
#await post.delete('hashtags')
# Example of deleting all One-To-Many comments - DELETE DOES NOT WORK FOR OTM (as currently designed)
#await post.delete('comments')
# Example of deleteing a HasOne child - WORKS
#post = await Post.query().find(1)
#await post.delete('image')
# Example of linking Polymorphic Many-To-Many Hashags - WORKS
# await post.link('hashtags', [
# hashtags.get('obsolete')
# ])
# Example of linking tags (does not create, only links EXISTING tags) - WORKS
# await post.link('tags', [
# # Linking can be EXISTING Dict
# # {
# # 'id': 1,
# # 'name': 'linux',
# # 'creator_id': 1,
# # }
# # Or existing Model
# tags.get('linux'),
# tags.get('mac'),
# tags.get('bsd'),
# ])
# Test unlink
# await post.unlink('tags', tags.get('linux')) # As not list
# await post.unlink('tags', [tags.get('mac')]) # As list
# await post.unlink('tags') # All
# Create (if not exists) AND link tags
# await post.create('tags', [
# tags['linux'], # Already exists, won't re-create
# Tag(id=1, name='linux', creator_id=1), # Already exists, should just link
# Tag(name='test1', creator_id=4), # Does not exist, should create and link
# {
# 'name': 'test2',
# 'creator_id': 4,
# }
# ])
#post.create()
# Show Attributes
#post.attributes
# Create and link attributes
#post.create('attributes', [{'key': 'asdf', 'value': 'asdf'}])
# Delete and unlink attributes
# post.delete('attributes') # all
# post.delete('attributes', [attribute1, attribute2]) # by model
# post.delete('attributes', 'key1', 'key2') # not by pk, but secondary pk the "key" column somehow
# contacts table for a One-To-One Poly
# combined PK of table_name + table_pk for unique (so could get rid of ID column technically)
# id | table_name | table_pk | name | email | phone
# ------------------------------------------------------
# 1 | users | 1 | Matthew | @asdf | 555
# 2 | employee | 4 | Bob | @asdf | 444
# attributes table for a One-To-Many Poly
# Only unique has to be ID column, or I suppose a combo of table_name+table_pk+key would do it, would also be the composit index
# Then could get rid of ID column
# id | table_name | table_pk | key | value
# -------------------------------------------
# 1 | users | 1 | name | matthew
# 2 | users | 1 | age | 37
# poly_tags pivot table for a Many-To-Many Poly
# entity_tags
# poly_tags
# tag_relations
# tag_linkage
# post_id | tag_id |
# table_name | table_pk | tag_id
# ------------------------------
# posts | 1 | 5
# posts | 1 | 6
# comments | 23 | 5
# comments | 23 | 7
# NO, add does not exist. Use create to make/link or link to just link
# .add() = create record and linkage
#post.query('attributes').add({'key': 'value'})
# this works NOW - it creates and links
#post.create('comments', ['asdf'])
# So this should create a tag and link it
#post.create('tags', ['tag1...])
# Easier than .tags()
# Link and unlink should be ONLY for ManyToMany
# Because all othe relations the ID is a foreing key on one of the tables
# So to unlink it, you have to DELETE the record, there is no "link"
# post.link('tags', tags)
# post.unlink('tags', tag[0]) #unlink one tag
# post.unlink('tags') # unlink all tags
# You can insert one so you can insert relations right after
post = await Post(slug='test-post2', title='Test Post2',
body='This is the body for test post2. My favorite frameworks are Laravel and Uvicore!',
other=None, creator_id=1, owner_id=2
).save()
# Create AND Link if nto exist Many-To-Many tags
await post.link('tags', [
tags['linux'],
tags['bsd'],
])
# Create Polymorphic One-To-One
await post.create('image', {
#'imageable_type': 'posts', # NO, inferred
#'imageable_id': 2, # NO, inferred
'filename': 'post2-image.png',
'size': 2483282
})
# Create Polymorphic One-To-Many
# NOTE: .add is simplay an alias for .create()
await post.add('attributes', [
{'key': 'post2-test1', 'value': 'value for post2-test1'},
{'key': 'post2-test2', 'value': 'value for post2-test2'},
{'key': 'badge', 'value': 'IT'},
])
# Create Polymorphic Many-To-Many
await post.add('hashtags', [
hashtags['obsolete'],
hashtags['outdated'],
hashtags['outdated'], # Yes its a duplicate, testing that it doesn't fail
])
# You can NOT insert relations right away, these tags will be IGNORED
# User Dict with insert_with_relations if you want this
post = await Post(
slug='test-post3',
title='Test Post3',
body='This is the body for test post1. I like the programming in PHP, Python and anything Typescript.',
other='other stuff2-bad', # We'll update this away below
creator_id=2,
owner_id=1,
tags=[ # TAGS IGNORED
tags['linux'],
tags['bsd'],
]
).save()
# Test an update
post.other = 'other stuff3'
await post.save()
await post.add('attributes', [
{'key': 'badge', 'value': 'DEV'},
])
await post.add('hashtags', [
hashtags['important'],
])
# You can use .insert() as a List of model instances
# But obviously you cant then add in tags
# This WILL NOT insert relations at all
await Post.insert([
# 2 posts for admin
#Post(slug='test-post1', title='Test Post1', other='other stuff1', creator_id=1),
#Post(slug='test-post2', title='Test Post2', other=None, creator_id=1, owner_id=2),
# 3 posts for manager1
#Post(slug='test-post3', title='Test Post3', other='other stuff2', creator_id=2, owner_id=1),
Post(slug='test-post4', title='Test Post4',
body='This is the body for test post1. My favorite morotcycles are super fast crotch rockets!',
other=None, creator_id=2, owner_id=1),
Post(slug='test-post5', title='Test Post5',
body='This is the body for test post1. Everyone loves and cynic.',
other=None, creator_id=2, owner_id=2),
# 2 posts for user2
#Post(slug='test-post6', title='Test Post6', other='other stuff3', creator_id=5),
#Post(slug='test-post7', title='Test Post7', other=None, creator_id=5),
])
# You can also user .insert() as a list of Dict
# This one inserts BelongsTo children FIRST (user, then contact, then post)
# This is a multi nesting deep insert (NOT bulk, in a loop because of relations)
# Creates User First, then Contact Second, Then finally Post with new creator_id
await Post.insert_with_relations([
{
'slug': 'test-post6',
'title': 'Test Post6',
'body': 'This is the body for test post1. Everyone wants to fly.',
'other': 'other stuff6',
#NO - 'creator_id': 5,
'creator': {
'username': '<EMAIL>',
'email': '<EMAIL>',
'first_name': 'User',
'last_name': 'Two',
'creator_id': 2,
'password': '<PASSWORD>',
'contact': {
'name': '<NAME>',
'title': 'User2',
'address': '444 User Dr.',
'phone': '444-444-4444'
# NO user_id=5
},
'info': {
'extra1': 'user5 extra',
},
},
'owner_id': 3,
# Polymorphic One-To-One
'image': {
'filename': 'post6-image.png',
'size': 3345432,
},
# Polymorphic One-To-Many
'attributes': [
{'key': 'post6-test1', 'value': 'value for post6-test1'},
{'key': 'post6-test2', 'value': 'value for post6-test2'},
{'key': 'post6-test3', 'value': 'value for post6-test3'},
{'key': 'badge', 'value': 'IT'},
#{'key': 'test', 'value': 'Hi there, my name is <NAME>, what is your name? Again, my name is <NAME>, what is your name? Again, my name is <NAME>, what is your name?'},
],
# Polymorphic Many-To-Many
'hashtags': [
hashtags['outdated']
],
}
])
# This does NOT work yet, but would be nice. Especially if it can UPDATE an existing child
#post = await Post.query().find(6)
# await post.add('creator', {
# 'email': '<EMAIL>',
# 'contact': {
# 'name': '<NAME>',
# 'title': 'User2',
# 'address': '444 User Dr.',
# 'phone': '444-444-4444'
# # NO user_id=5
# },
# })
# You can insert a single model with .save()
post = Post(slug='test-post7', title='Test Post7',
body='This is the body for test post1. I like the to code alone.',
other=None, creator_id=5, owner_id=4)
await post.save()
await post.create('tags', [
tags.get('linux'),
tags.get('bsd'),
tags.get('laravel'),
])
|
<reponame>jakerobinson/pail
require_relative '../../spec_helper'
describe 'Pail::List' do
let(:path) { File.join(File.dirname(__FILE__), '../../assets/') }
let(:bad_path) { '/foo/bar/baz/' }
context 'invalid path' do
it 'raises error on bad path' do
expect { Pail::List.new(bad_path) }.to raise_error(RuntimeError, 'The path: /foo/bar/baz/ is not a valid directory')
end
end
context 'valid path' do
subject(:pail_list) { Pail::List.new path }
describe '#path' do
it 'returns the path' do
expect(pail_list.path).to include('/assets/')
end
end
describe '#files' do
it 'returns a hash' do
expect(pail_list.files).to be_a Hash
end
it 'contains a list of files' do
expect(pail_list.files.keys).to include('cat.jpg')
end
end
describe '#folders' do
it 'returns an array' do
expect(pail_list.folders).to be_a Hash
end
it 'contains a list of folders' do
expect(pail_list.folders.keys).to contain_exactly('moar_cats')
end
end
describe '#to_hash' do
it 'returns a hash' do
expect(pail_list.to_hash).to be_a Hash
end
it 'contains a list of files and folders' do
expect(pail_list.to_hash.keys).to contain_exactly(:files, :folders)
end
end
end
end |
function convertToEuros(usDollars) {
const conversionRate = 0.88;
return usDollars * conversionRate;
}
let euros = convertToEuros(100);
console.log(euros);
// prints 88 |
"""
https://adventofcode.com/2020/day/4
"""
# Read Input File
with open(r'../input_files/day04_input_mb.txt', 'r') as fh:
# Generate list of strings from read() and split by empty line (skip last empty line)
passports = fh.read()[0:-1].split('\n\n')
# Replace newlines with spaces
passports = [line.replace('\n', ' ') for line in passports]
# Split lines by space to get lists of lists
passports = [item.split(' ') for item in passports]
passports = [[item.split(':') for item in sublist] for sublist in passports]
# Convert each passport to a dict with the required fields as keys
passports = [dict(x) for x in passports]
requirements = {'byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid', }
def has_required_fields(passport, required_fields):
"""Checks if a passport has all required fields."""
if set(passport.keys()).issuperset(requirements):
return True
def is_valid_byr(birthyear):
"""Checks for valid Birth Year."""
if birthyear.isdigit() and (1920 <= int(birthyear) <= 2002):
return True
else:
return False
def is_valid_iyr(issue_year):
"""Checks for valid Issue Year."""
if issue_year.isdigit() and (2010 <= int(issue_year) <= 2020):
return True
else:
return False
def is_valid_eyr(experiation_year):
"""Checks for valid Expiration Year."""
if experiation_year.isdigit() and (2020 <= int(experiation_year) <= 2030):
return True
else:
return False
def is_valid_hgt(height):
"""Checks for valid Height."""
if 'cm' in height:
measurement = int(height.split('cm')[0])
if 150 <= measurement <= 193:
return True
else:
return False
elif 'in' in height:
measurement = int(height.split('in')[0])
if 59 <= measurement <= 76:
return True
else:
return False
else:
return False
def is_valid_hcl(hair_color):
"""Checks for valid Hair Color."""
if len(hair_color) == 7 and hair_color[0] == '#' and set(hair_color[1:]).issubset(set('0123456789abcdef')):
return True
else:
return False
def is_valid_ecl(eye_color):
"""Checks for valid Eye Color."""
if eye_color in ['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth']:
return True
else:
return False
def is_valid_pid(passport_id):
"""Checks for valid Passport ID."""
if passport_id.isdigit() and len(passport_id) == 9:
return True
else:
return False
# Part One
valid_passports_1 = 0
for passport in passports:
if has_required_fields(passport, requirements):
valid_passports_1 += 1
print(f'Part One - Number of {valid_passports_1 = }.')
# Part Two
valid_passports_2 = 0
# Filter invalid passports out
passports_all_fields = [x for x in passports if has_required_fields(x, requirements) == True]
for passport in passports_all_fields:
if all([
is_valid_byr(passport['byr']),
is_valid_iyr(passport['iyr']),
is_valid_eyr(passport['eyr']),
is_valid_hgt(passport['hgt']),
is_valid_hcl(passport['hcl']),
is_valid_ecl(passport['ecl']),
is_valid_pid(passport['pid'])]
):
valid_passports_2 += 1
print(f'Part Two - Number of {valid_passports_2 = }.')
|
def find_best_models(model_results: AllModelResultsDict, model_types: ModelDict, datasets: DfDict) -> ModelDict:
best_models = {}
for dataset, results in model_results.items():
best_model = min(results, key=lambda x: (-results[x], x))
best_model_type = model_types[best_model]
best_model_features = datasets[dataset]
best_models[dataset] = {best_model: {'type': best_model_type, 'features': best_model_features}}
return best_models |
#!/bin/bash
source ./env.sh
rest_laddr="http://127.0.0.1:1317"
test_oin_erc20_addr="a4d595F42f3b9CF98d1afe2EFa027c06280662c3"
test_cross_chain_key="t2"
#query_exist=$($CLI keys show ${test_cross_chain_key} 2>/dev/null | grep "name")
#new_test_cross_chain_key=0
#if [ "${query_exist}" == "" ];
#then
# echo "add key $test_cross_chain_key"
# new_test_cross_chain_key=1
# $CLI keys add ${test_cross_chain_key} <<EOM
#$PASSWD
#EOM
#fi
test_cross_chain_address=`$CLI keys show ${test_cross_chain_key} -a`
# send feecoin to new account.
#$CLI tx send $KEY $test_cross_chain_address 10000000000feecoin
#if [ "$new_test_cross_chain_key" == "1" ];
#then
# echo "wait tx send processed"
# sleep 10
#fi
from=$test_cross_chain_address
unsignedTx="unsigned_map_verify.json"
signedTx="signed_map_verify.json"
curl -s -X POST --data-binary "{\"base_req\":{\"from\":\"$from\",\"chain_id\":\"$CHAINID\",\"fees\":[{\"denom\":\"feecoin\",\"amount\":\"100\"}],\"generate_only\":true},\"erc_addr\":\"$test_oin_erc20_addr\",\"cc_addr\":\"${test_cross_chain_address}\"}" ${rest_laddr}/mapping/verify/ > $unsignedTx
accountInfo=`$CLI query account $($CLI keys show $test_cross_chain_key -a)`
account_number=`echo $accountInfo | jq -r .value.account_number`
sequence=`echo $accountInfo | jq -r .value.sequence`
$CLI tx sign $unsignedTx --chain-id=$CHAINID --from=$from --account-number=${account_number} --sequence=${sequence} --offline > $signedTx
$CLI tx broadcast $signedTx
echo "finished"
|
<filename>src/renderer/value-transformer/SwiftValueTransformer.ts<gh_stars>10-100
import {
BasicTypeValue,
DictionaryKeyType,
isArraryType,
isBasicType,
isInterfaceType,
isDictionaryType,
isEnumType,
isOptionalType,
isPredefinedType,
ValueType,
Value,
} from '../../types';
import { ValueTransformer } from './ValueTransformer';
export class SwiftValueTransformer implements ValueTransformer {
constructor(private readonly typeNameMap: Record<string, string>) {}
convertValueType(valueType: ValueType): string {
if (isBasicType(valueType)) {
switch (valueType.value) {
case BasicTypeValue.string:
return 'String';
case BasicTypeValue.number:
return 'Double';
case BasicTypeValue.boolean:
return 'Bool';
default:
throw Error('Type not exists');
}
}
if (isInterfaceType(valueType)) {
return this.convertTypeNameFromCustomMap(valueType.name);
}
if (isEnumType(valueType)) {
return this.convertTypeNameFromCustomMap(valueType.name);
}
if (isArraryType(valueType)) {
return `[${this.convertValueType(valueType.elementType)}]`;
}
if (isDictionaryType(valueType)) {
let keyType: string;
switch (valueType.keyType) {
case DictionaryKeyType.string:
keyType = 'String';
break;
case DictionaryKeyType.number:
keyType = 'Int';
break;
default:
throw Error('Type not exists');
}
return `[${keyType}: ${this.convertValueType(valueType.valueType)}]`;
}
if (isOptionalType(valueType)) {
return `${this.convertValueType(valueType.wrappedType)}?`;
}
if (isPredefinedType(valueType)) {
return this.typeNameMap[valueType.name] ?? valueType.name;
}
throw Error('Type not handled');
}
convertNonOptionalValueType(valueType: ValueType): string {
if (isOptionalType(valueType)) {
return this.convertValueType(valueType.wrappedType);
}
return this.convertValueType(valueType);
}
convertValue(value: Value, type: ValueType): string {
if (isBasicType(type)) {
switch (type.value) {
case BasicTypeValue.boolean:
return (value as boolean) ? 'True' : 'False';
default:
return JSON.stringify(value);
}
}
if (isInterfaceType(type)) {
throw Error('Custom type static value is not supported');
}
if (isEnumType(type)) {
return `.${this.convertEnumKey(value as string)}`;
}
if (isArraryType(type)) {
return `[${(value as Value[]).map((element) => this.convertValue(element, type.elementType)).join(', ')}]`;
}
if (isDictionaryType(type)) {
return `[${Object.entries(value as Record<string, Value>)
.map(([key, element]) => `${JSON.stringify(key)}: ${this.convertValue(element, type.valueType)}`)
.join(', ')}]`;
}
if (isOptionalType(type)) {
if (value === null) {
return 'nil';
}
return this.convertValue(value, type.wrappedType);
}
if (isPredefinedType(type)) {
throw Error('Predefined type static value is not supported');
}
throw Error('Value not handled');
}
convertEnumKey(text: string): string {
if (text.length === 0) {
return '';
}
let index = 0;
// Get the index of the first lowercased letter
while (index < text.length) {
if (text[index].toLowerCase() === text[index]) {
break;
}
index += 1;
}
// Get the index before the first lowercased letter
if (index > 1 && index < text.length && text[index].toLowerCase() === text[index]) {
index -= 1;
}
return text.slice(0, index).toLowerCase() + text.slice(index);
}
convertTypeNameFromCustomMap(name: string): string {
return this.typeNameMap[name] ?? name;
}
}
|
class ClassName {
constructor() {
this.name = '';
}
setName(name) {
this.name = name;
}
getName() {
return this.name;
}
} |
from sklearn.cluster import KMeans
def cluster(data):
kmeans = KMeans(n_clusters=3).fit(data)
labels = kmeans.labels_
clustered_data = []
for idx, label in enumerate(labels):
clustered_data.append((data[idx], label))
return clustered_data |
<reponame>Slathian/react-portfolio
import React from 'react';
import AltLogin from '../auth/RHFLogin';
import loginImg from '../../../static/assets/images/auth/login.jpg';
export default function(props) {
const handleSuccessfulAuth = () => {
props.handleSuccessfulLogin();
props.history.push('/');
}
const handleUnsuccessfulAuth = () => {
props.handleUnsuccessfulLogin();
}
return (
<div>
<div className="auth-page-wrapper">
<div className="left-column"
style={{
backgroundImage: `url(${loginImg})`
}}
/>
<div className="right-column">
{/* <Login /> */}
<AltLogin
handleSuccessfulAuth={handleSuccessfulAuth}
handleUnsuccessfulAuth={handleUnsuccessfulAuth}
/>
</div>
</div>
</div>
)
}; |
<filename>internal/cephfs/volume.go
/*
Copyright 2018 The Ceph-CSI Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cephfs
import (
"context"
"path"
"strconv"
"strings"
"github.com/ceph/ceph-csi/internal/util"
klog "k8s.io/klog/v2"
)
var (
// clusterAdditionalInfo contains information regarding if resize is
// supported in the particular cluster and subvolumegroup is
// created or not.
// Subvolumegroup creation and volume resize decisions are
// taken through this additional cluster information.
clusterAdditionalInfo = make(map[string]*localClusterState)
)
const (
cephEntityClientPrefix = "client."
)
// Subvolume holds subvolume information.
type Subvolume struct {
BytesQuota int `json:"bytes_quota"`
DataPool string `json:"data_pool"`
GID int `json:"gid"`
Mode int `json:"mode"`
MonAddrs []string `json:"mon_addrs"`
Path string `json:"path"`
PoolNamespace string `json:"pool_namespace"`
Type string `json:"type"`
UID int `json:"uid"`
}
func getVolumeRootPathCephDeprecated(volID volumeID) string {
return path.Join("/", "csi-volumes", string(volID))
}
func getVolumeRootPathCeph(ctx context.Context, volOptions *volumeOptions, cr *util.Credentials, volID volumeID) (string, error) {
stdout, stderr, err := util.ExecCommand(
ctx,
"ceph",
"fs",
"subvolume",
"getpath",
volOptions.FsName,
string(volID),
"--group_name",
volOptions.SubvolumeGroup,
"-m", volOptions.Monitors,
"-c", util.CephConfigPath,
"-n", cephEntityClientPrefix+cr.ID,
"--keyfile="+cr.KeyFile)
if err != nil {
klog.Errorf(util.Log(ctx, "failed to get the rootpath for the vol %s(%s) stdError %s"), string(volID), err, stderr)
if strings.Contains(stderr, ErrVolumeNotFound.Error()) {
return "", util.JoinErrors(ErrVolumeNotFound, err)
}
return "", err
}
return strings.TrimSuffix(stdout, "\n"), nil
}
func getSubVolumeInfo(ctx context.Context, volOptions *volumeOptions, cr *util.Credentials, volID volumeID) (Subvolume, error) {
info := Subvolume{}
err := execCommandJSON(
ctx,
&info,
"ceph",
"fs",
"subvolume",
"info",
volOptions.FsName,
string(volID),
"--group_name",
volOptions.SubvolumeGroup,
"-m", volOptions.Monitors,
"-c", util.CephConfigPath,
"-n", cephEntityClientPrefix+cr.ID,
"--keyfile="+cr.KeyFile)
if err != nil {
klog.Errorf(util.Log(ctx, "failed to get subvolume info for the vol %s(%s)"), string(volID), err)
if strings.HasPrefix(err.Error(), ErrVolumeNotFound.Error()) {
return info, ErrVolumeNotFound
}
// Incase the error is other than invalid command return error to the caller.
if !strings.Contains(err.Error(), ErrInvalidCommand.Error()) {
return info, ErrInvalidCommand
}
return info, err
}
return info, nil
}
type localClusterState struct {
// set true if cluster supports resize functionality.
resizeSupported bool
// set true once a subvolumegroup is created
// for corresponding cluster.
subVolumeGroupCreated bool
}
func createVolume(ctx context.Context, volOptions *volumeOptions, cr *util.Credentials, volID volumeID, bytesQuota int64) error {
// verify if corresponding ClusterID key is present in the map,
// and if not, initialize with default values(false).
if _, keyPresent := clusterAdditionalInfo[volOptions.ClusterID]; !keyPresent {
clusterAdditionalInfo[volOptions.ClusterID] = &localClusterState{}
}
// create subvolumegroup if not already created for the cluster.
if !clusterAdditionalInfo[volOptions.ClusterID].subVolumeGroupCreated {
err := execCommandErr(
ctx,
"ceph",
"fs",
"subvolumegroup",
"create",
volOptions.FsName,
volOptions.SubvolumeGroup,
"-m", volOptions.Monitors,
"-c", util.CephConfigPath,
"-n", cephEntityClientPrefix+cr.ID,
"--keyfile="+cr.KeyFile)
if err != nil {
klog.Errorf(util.Log(ctx, "failed to create subvolume group %s, for the vol %s(%s)"), volOptions.SubvolumeGroup, string(volID), err)
return err
}
util.DebugLog(ctx, "cephfs: created subvolume group %s", volOptions.SubvolumeGroup)
clusterAdditionalInfo[volOptions.ClusterID].subVolumeGroupCreated = true
}
args := []string{
"fs",
"subvolume",
"create",
volOptions.FsName,
string(volID),
strconv.FormatInt(bytesQuota, 10),
"--group_name",
volOptions.SubvolumeGroup,
"--mode", "777",
"-m", volOptions.Monitors,
"-c", util.CephConfigPath,
"-n", cephEntityClientPrefix + cr.ID,
"--keyfile=" + cr.KeyFile,
}
if volOptions.Pool != "" {
args = append(args, "--pool_layout", volOptions.Pool)
}
err := execCommandErr(
ctx,
"ceph",
args[:]...)
if err != nil {
klog.Errorf(util.Log(ctx, "failed to create subvolume %s(%s) in fs %s"), string(volID), err, volOptions.FsName)
return err
}
return nil
}
// resizeVolume will try to use ceph fs subvolume resize command to resize the
// subvolume. If the command is not available as a fallback it will use
// CreateVolume to resize the subvolume.
func resizeVolume(ctx context.Context, volOptions *volumeOptions, cr *util.Credentials, volID volumeID, bytesQuota int64) error {
// keyPresent checks whether corresponding clusterID key is present in clusterAdditionalInfo
var keyPresent bool
// verify if corresponding ClusterID key is present in the map,
// and if not, initialize with default values(false).
if _, keyPresent = clusterAdditionalInfo[volOptions.ClusterID]; !keyPresent {
clusterAdditionalInfo[volOptions.ClusterID] = &localClusterState{}
}
// resize subvolume when either it's supported, or when corresponding
// clusterID key was not present.
if clusterAdditionalInfo[volOptions.ClusterID].resizeSupported || !keyPresent {
args := []string{
"fs",
"subvolume",
"resize",
volOptions.FsName,
string(volID),
strconv.FormatInt(bytesQuota, 10),
"--group_name",
volOptions.SubvolumeGroup,
"-m", volOptions.Monitors,
"-c", util.CephConfigPath,
"-n", cephEntityClientPrefix + cr.ID,
"--keyfile=" + cr.KeyFile,
}
err := execCommandErr(
ctx,
"ceph",
args[:]...)
if err == nil {
clusterAdditionalInfo[volOptions.ClusterID].resizeSupported = true
return nil
}
// Incase the error is other than invalid command return error to the caller.
if !strings.Contains(err.Error(), ErrInvalidCommand.Error()) {
klog.Errorf(util.Log(ctx, "failed to resize subvolume %s(%s) in fs %s"), string(volID), err, volOptions.FsName)
return err
}
}
clusterAdditionalInfo[volOptions.ClusterID].resizeSupported = false
return createVolume(ctx, volOptions, cr, volID, bytesQuota)
}
func purgeVolume(ctx context.Context, volID volumeID, cr *util.Credentials, volOptions *volumeOptions, force bool) error {
arg := []string{
"fs",
"subvolume",
"rm",
volOptions.FsName,
string(volID),
"--group_name",
volOptions.SubvolumeGroup,
"-m", volOptions.Monitors,
"-c", util.CephConfigPath,
"-n", cephEntityClientPrefix + cr.ID,
"--keyfile=" + cr.KeyFile,
}
if force {
arg = append(arg, "--force")
}
err := execCommandErr(ctx, "ceph", arg...)
if err != nil {
klog.Errorf(util.Log(ctx, "failed to purge subvolume %s(%s) in fs %s"), string(volID), err, volOptions.FsName)
if strings.HasPrefix(err.Error(), ErrVolumeNotFound.Error()) {
return util.JoinErrors(ErrVolumeNotFound, err)
}
return err
}
return nil
}
|
#!/bin/bash
# Create the directory specified by $ANDROID_NDK_ROOT if it doesn't exist
mkdir -p "$ANDROID_NDK_ROOT"
# Create the file repositories.cfg in the directory specified by $ANDROID_HOME
touch "$ANDROID_HOME/repositories.cfg"
# Ensure that the script can be sourced without executing its content
if [[ "$0" = "${BASH_SOURCE[0]}" ]]; then
exit 0
else
return 0
fi |
import {
Body,
getMetadataArgsStorage,
JsonController,
Post,
QueryParam
} from 'routing-controllers'
import {
getPathParams,
getQueryParams,
getRequestBody,
IRoute,
parseRoutes
} from '../src'
describe('options', () => {
let routes: IRoute[]
beforeEach(() => {
getMetadataArgsStorage().reset()
class CreateUserBody { }
class ParamType { }
@JsonController('/users')
// @ts-ignore: not referenced
class UsersController {
@Post('/:userId')
createUser(
@QueryParam('from') _from: number,
@QueryParam('to', { required: false }) _to: number,
@Body({ type: CreateUserBody }) _body: CreateUserBody[]
) {
return
}
@Post('/:userId')
createManyUsers(
@QueryParam('param', { type: ParamType }) _param: string,
@Body() _body: CreateUserBody[],
) {
return
}
}
routes = parseRoutes(getMetadataArgsStorage())
})
it('sets path parameter always as required regardless of options', () => {
const route = routes[0]
expect(getPathParams(route)[0].required).toEqual(true)
route.options.defaults = { paramOptions: { required: false } }
expect(getPathParams(route)[0].required).toEqual(true)
})
it('sets query parameter optional by default', () => {
const route = routes[0]
expect(getQueryParams(route, {})[0].required).toEqual(false)
})
it('sets query parameter required as per global options', () => {
const route = routes[0]
route.options.defaults = { paramOptions: { required: true } }
expect(getQueryParams(route, {})[0].required).toEqual(true)
})
it('uses local required option over the global one', () => {
const route = routes[0]
route.options.defaults = { paramOptions: { required: true } }
expect(getQueryParams(route, {})[1].required).toEqual(false)
})
it('uses the explicit `type` parameter to override request query type', () => {
const route = routes[1]
expect(getQueryParams(route, {})[0]).toEqual({
in: "query",
name: "param",
required: false,
schema: {
$ref: '#/components/schemas/ParamType'
}
})
})
it('uses the explicit `type` parameter to override array request body item type', () => {
const route = routes[0]
expect(getRequestBody(route)).toEqual({
content: {
'application/json': {
schema: {
items: {
$ref: '#/components/schemas/CreateUserBody'
},
type: 'array'
}
}
},
description: 'CreateUserBody',
required: false
})
})
it('set inner schema as {} if array request body item type is not explicitly defined', () => {
const route = routes[1]
expect(getRequestBody(route)).toEqual({
content: {
'application/json': {
schema: {
items: {
type: 'object'
},
type: 'array'
}
}
},
description: '',
required: false
})
})
})
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.core
package config
import java.util.Locale
import java.lang.reflect.Method
import javax.servlet.ServletConfig
import com.tzavellas.coeus.Stage
import com.tzavellas.coeus.bind.ConverterRegistry
import com.tzavellas.coeus.http.multipart.{ MultipartRequestParser, NullMultipartRequestParser }
import com.tzavellas.coeus.i18n.locale.{ LocaleResolver, AcceptHeaderLocaleResolver }
import com.tzavellas.coeus.i18n.msg.{ MessageBundle, ServletMessageBundle }
import com.tzavellas.coeus.mvc.view.ViewResolver
import com.tzavellas.coeus.mvc.view.scalate.ScalateViewResolver
import error.ExceptionHandler
/**
* Holds the configuration for <code>DispatcherServlet</code>.
*
* @see {@link com.tzavellas.coeus.core.DispatcherServlet DispatcherServlet}
*/
trait DispatcherConfig {
/** The {@code ServletConfig} of the {@code DispatcherServlet}. */
val servletConfig: ServletConfig
/** The name of the {@code DispatcherServlet}. */
def servletName = servletConfig.getServletName
/** The {@code ServletContext} of the web application. */
def servletContext = servletConfig.getServletContext
/**
* Get the deployment stage of the web application ("production" or "development").
*
* @see Stage#of(ServletContext)
*/
def stage = Stage.of(servletContext)
/**
* Translates the controller's class into a base path.
*
* <p>By default the first character of the cotroller's simple class name is transformed
* into lower-case and the "Controller" suffix (if present) is removed. For example a
* controller class with the name <code>UserRegistrationController</code> gets translated
* to "/userRegistration".</p>
*
* <p>The base path that is generated by convention from the class name can be overridden
* by annotating the controller class with the {@literal @Path} annotation.</p>
*
* @see ControllerConventions
* @see {@link com.tzavellas.coeus.annotation.Path Path}
*/
var classNameTranslator: Class[_] => String = ControllerConventions.useClassName()
/**
* Translates the controller's annotated methods into paths.
*
* <p>By default the handler method is used without any transformation. For example a handler
* method with the name <code>submitForm</code> gets translated to "/submitForm".</p>
*
* <p>The paths that are generated by convention from the method names of a controller
* can be overridden using the HTTP method annotations of those methods.</p>
*
* @see ControllerConventions
*/
var methodNameTranslator: Method => String = ControllerConventions.useMethodName
/**
* Finds the appropriate handler for a given request.
*/
var requestResolver: RequestResolver = new TreeBasedRequestResolver
/**
* Called when an exception occurs during the request processing.
*
* <p>By default the exception gets propagated to the Servlet container.</p>
*/
var exceptionHandler: ExceptionHandler = ExceptionHandler.defaultHandler(servletName)
/**
* Tells the <code>DispatcherServlet</code> to set the encoding of the Servlet
* requests to the specified value.
*
* <p>If {@code requestEncoding} is {@code null} then the {@code DispatcherServlet}
* does not set the encoding of the request.</p>
*
* <p>The default value is {@code "UTF-8"}.</p>
*/
var requestEncoding: String = "UTF-8"
/**
* Tells the <code>DispatcherServlet</code> to set the HTTP method to the value
* of the <em>_method</em> request parameter.
*
* <p>This is useful for implementing RESTful applications.</p>
*
* <p>The default value is {@code false}.</p>
*
*/
var overrideHttpMethod: Boolean = false
/**
* Respond to HTTP HEAD for URLs that support HTTP GET.
*
* <p>The default value is {@code false}.</p>
*/
var allowHttpHead: Boolean = false
/**
* Respond to HTTP OPTIONS for all URLs.
*
* <p>The default value is {@code false}.</p>
*/
var allowHttpOptions: Boolean = false
/**
* Tells the {@code DispatcherServlet} to send <em>404</em> (Not Found) instead
* of <em>405</em> (Method Not Allowed) when a resource exist but does not support
* the requested method.
*
* <p>This is useful when (for security reasons) we would like to hide the
* existence of a resource.</p>
*
* <p>The default value is {@code false}.</p>
*/
var hideResources: Boolean = false
/**
* Resolves the user's locale for a given request.
*
* <p>By default the user's locale is the locale specified in the <em>accept-language</em>
* header of the HTTP request.</p>
*
* @see LocaleResolver
* @see AcceptHeaderLocaleResolver
* @see Locale
*/
var localeResolver: LocaleResolver = new AcceptHeaderLocaleResolver
/**
* Loads the i18n messages.
*
* <p>By default the messages are loaded from property files that are located under the
* "WEB-INF" directory and are cached for one second.</p>
*
* <p>The message files follow similar naming patterns with the files used in
* {@link java.util.PropertyResourceBundle PropertyResourceBundle} using the base-name
* "messages". For example if the user's locale is en_US then the <code>MessageBundle</code>
* will use the files "WEB-INF/messages_en_US.properties" and "WEB-INF/messages_en.properties"
* to load the messages.</p>
*
* @see ServletMessageBundle
* @see {@link com.tzavellas.coeus.i18n.msg.ClasspathMessageBundle ClasspathMessageBundle}
* @see {@link java.util.ResourceBundle ResourceBundle}
*/
var messageBundle: MessageBundle = new ServletMessageBundle(servletContext, 1000)
/**
* A collection with pre-configured converters to be used by default when binding
* and formatting values.
*
* @see ConverterRegistry
* @see ConverterRegistry#defaultConverters
* @see {@link com.tzavellas.coeus.WebRequest WebRequest}
*/
var converters: ConverterRegistry = ConverterRegistry.defaultConverters
/**
* Parses multipart requests (used in file uploads).
*
* <p>The configured parser does not parse requests and throws {@code UnsupportedOperationException}.
* This is done in order to avoid having a dependency to an external library by default. If your application
* handles multipart requests you must use another parser (such as {@code CommonsMultipartRequestParser}).</p>
*/
var multipartParser: MultipartRequestParser = new NullMultipartRequestParser
/**
* Maps view names to view instances.
*
* <p>The default resolver is a <code>ScalateViewResolver</code> configured using the
* default values. The <code>ScalateViewResolver</code> also makes available the
* {@link #viewHelpers} object as a attribute to all {@code View} instances using the
* name "c".</p>
*
* @see ScalateViewResolver
* @see ScalateConfig
*/
var viewResolver: ViewResolver = new ScalateViewResolver(servletContext)
}
|
#!/bin/bash
NAME=cave
/usr/bin/g++ -DEVAL -o $NAME grader.cpp $NAME.cpp
|
<reponame>junifar/job1_sample
import React, { Component } from 'react';
import { Row, Col } from 'reactstrap';
import { MyButton } from '../../../_Main';
import moment from 'moment';
export default class UserBooking extends Component{
constructor(props){
super(props);
this.state = {
booking: null,
noauth: false,
token: localStorage.getItem("token")
}
}
componentWillMount(){
this.requestMyBooking();
}
requestMyBooking = () => {
const url = '/v1/flight/my/booking';
let axiosConfig = {
headers: {
'Content-Type': 'application/json',
'WLPS_TOKEN': this.state.token
}};
axios.post(url,"",axiosConfig).then((res) => {
if (res.data.status && res.data.data.length > 0) {
this.setState({
booking: res.data.data
});
}
}).catch((error) => {
switch (+error) {
case 401: // Unauthorized
this.props.openModal("Your session is expired, please do relogin.");
this.props.history.push('/');
break;
default:
this.props.openModal("Maaf terdapat kesalahan pada server.");
this.props.history.push('/');
window.scrollTo(0,0);
}
});
}
forwardPage= (h) => {
this.props.history.push({
pathname: '/User/bookingdetail',
state: {
itinerary: h
}
});
window.scroll(0,0);
}
render() {
if (this.state.booking === null) {
return (
<div className="my-userdashboard-emptybody">
No Booking Available
</div>
);
} else {
return (
<div>
<div className="my-userdashboard-header">
My Booking
</div>
<div>
<div className="my-userdashboard-body">
<div className="my-userdashboard-table">
<div className="dropdown-divider"/>
{ this.state.booking.map((q, index) => {
return (
<div key={q.id}>
<Row onClick={this.forwardPage.bind(this,q)} style={{padding: "10px"}}>
<Col className="col-md-11">
<Col className="col-md-3">
<span className="font-lower-grey">FROM</span>
<br/>
<span className="font-normal-bold">{q.dep.destination.city}
({q.dep.destination.iata})</span>
</Col>
<Col className="col-md-3">
<span className="font-lower-grey">TO</span>
<br/>
<span className="font-normal-bold">{q.dep.origin.city} ({q.dep.origin.iata})</span>
</Col>
<Col className="col-md-3">
<span className="font-lower-grey">DATE</span>
<br/>
<span>{moment(q.dep.departureTime).format("ddd, D MMM YYYY")}</span>
</Col>
<Col className="col-md-4">
<span className="font-lower-grey">TIME</span> <span className="wlps-form-icon"></span>
<br/>
<span>{moment(q.dep.departureTime).format("HH:mm")}</span>
</Col>
<Col className="col-md-3">
<span className="font-lower-grey">AIRLINE</span>
<br/>
<span>{q.dep.airline.name}</span>
</Col>
</Col>
<Col className="col-md-1">
<i className="material-icons">more_vert</i>
</Col>
</Row>
{ q.status.code == 'PAY' &&
<Row className="queue-pay col-md-12">
<Col colSpan="6">
PAY
</Col>
<Col style={{textAlign: "right"}}>
{moment(q.timeLimit , 'YYYY-MM-DD HH:mm:ss').fromNow()} remaining - Rp. { `${Number(q.payment).toLocaleString()}` }
</Col>
</Row>
}
</div>
);
})
}
<div className="row col-md-12">
<div className="dropdown-divider"/>
<div style={{marginTop: "10px"}}>
<MyButton outline accent queue onClick={() => this.onClick("")}>
<div className="my-searchresult-button-title" style={{fontSize: "1.5rem"}}>SEE BOOKING HISTORY</div>
</MyButton>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
}
/*import React, { Component } from 'react';
import { bindActionCreators } from 'redux';
import { browserHistory } from 'react-router';
import * as userActionCreators from '../../../../actions/user';
import { connect } from 'react-redux';
import axios from 'axios';
import moment from 'moment';
import FlightCard from './FlightCard';
import Api from '../../../../scripts/Api';
class UserBooking extends Component{
constructor(props){
super(props);
this.state = {
bookings: null,
noauth: false
}
}
componentWillMount(){
this.requestBooking();
}
// *** Actions ***
requestTicket = (book) => {
console.log("requesting");
const config = {
method: 'get',
headers: this.props.user,
url: `/api/v1/bookings/${book.id}/ticket`
}
this.props.openModal("Tunggu sebentar, mencetak tiket..");
axios(config)
.then((res) => {
console.log(res.data[0].pdf_url);
this.props.openModal("Download PDF", res.data[0].pdf_url);
var user = Api.parseHeader(res, this.props.user);
const loginUser = bindActionCreators(userActionCreators.login, this.props.dispatch);
loginUser(user); // save header as redux state
});
}
requestBooking = () => {
const loginUser = bindActionCreators(userActionCreators.login, this.props.dispatch);
const config = {
url: '/v1/flight/my/booking',
headers: this.props.user,
method: 'post'
}
axios(config).then((res) => {
var user = Api.parseHeader(res, this.props.user);
loginUser(user);
this.setState({
bookings: res.data
});
}).catch((error) => {
if (error.response.status === 401){
this.setState({
noauth: true
});
}
console.log(error.response.status);
});
}
flightAction = (book) => {
this.props.history.push({
pathname: '/FlightDetail',
state: {
bookingid: book.id
}
});
window.scroll(0,0);
}
// *** Render ***
render(){
if (this.state.bookings){
if (this.state.bookings.length === 0){
return(
<div className="my-userdashboard-emptybody">Belum ada pesanan</div>
);
}
return(
<div className="my-userdashboard-body">
<div className="my-userdashboard-body-title">
Booking Status
</div>
<div className="my-userdashboard-body-content">
{ (this.state.bookings) &&
this.state.bookings.slice(0).reverse().map((book, index)=>{
return(
<FlightCard
key={`flight${index}`}
origin_name={book.origin_name}
origin_code={book.origin_iata}
destination_name={book.destination_name}
destination_code={book.destination_iata}
departure_date={moment(book.departs_at)}
status={book.latest_status}
time_limit={moment(book.time_limit)}
onClick={() => this.requestTicket(book)}
/>
);
})
}
</div>
</div>
);
}
if (this.state.noauth){
return(
<div className="my-userdashboard-emptybody">Login terlebih dulu untuk melihat pesanan anda</div>
);
}
return(
<div className="my-userdashboard-emptybody">Loading...</div>
);
}
}
const mapStateToProps = state => (
{
user: state.user
}
);
export default connect(mapStateToProps)(UserBooking);
*/ |
<reponame>NATHAPONDEV2/gatsby-tailwind-markdown-dyna-content-netlify<gh_stars>0
/*[
WORLFLOW in this page:
Query All Markdown file in filesystem
and display its frontmatter/metadata for user to click and navigate
to specific page.
AND! After each Blog post link is being clicked, it grabs link "path" data e.g. /post-one
and navigates to that page
BUT! That detail blog posts page are dynamic it doesn't exit in filesystem
so that's why it need to be created in 'gatsby-node.js' file based on a single template.
]*/
import React from 'react'
import Layout from '../components/layout'
import SEO from "../components/seo"
import { Link }from 'gatsby'
// import PostsTemplate from '../posts/posts-template'
/* [ The data from the graphql query will be in props ]*/
const blog = ({data}) => (
<Layout>
<SEO title="blog" />
<h1 className="text-3xl text-center">Welcome to Blog</h1>
<p className="text-1xl text-center mt-2">Read the latest blogs here</p>
{data.allMarkdownRemark.edges.map(post => (
<div key={ post.node.id } className="mt-4">
<Link to={ post.node.frontmatter.path } className="text-teal-500 font-bold">{ post.node.frontmatter.title }</Link>
<br/>
<small>Posted by <b>{ post.node.frontmatter.author }</b> on { post.node.frontmatter.date }</small>
<br/>
<br/>
<hr/>
</div>
))}
</Layout>
)
/*[ map Markdown data to Props ]*/
export const pageQuery = graphql`
query BlogIndexQuery {
allMarkdownRemark {
edges {
node {
id
frontmatter {
path
title
date
author
}
excerpt
}
}
}
}
`
export default blog;
|
<filename>src/main/scala/git/PullRequest.scala
package git
import git.PullRequestType.PullRequestType
import org.joda.time.{DateTime, DateTimeZone, Minutes}
/**
* An object that holds information about the pull request.
* @param number The number of the pull request.
* @param author The author name.
* @param sha The SHA of the source tip.
* @param shaTarget The SHA of the target tip.
* @param source The source branch name.
* @param target The target branch name.
*/
case class PullRequest( number: Int,
author: String,
sha: String,
shaTarget: String,
source: String,
target: String,
var title: Option[String] = None,
var intraBranch: Option[Boolean] = None,
var createdAt: Option[DateTime] = None,
var updatedAt: Option[DateTime] = None,
var linesAdded: Option[Long] = None,
var linesDeleted: Option[Long] = None,
var filesChanged: Option[Long] = None,
var commits: Option[Long] = None,
var avatar: Option[String] = None,
var coreMember: Option[Boolean] = None,
var comments: Option[Long] = None,
var reviewComments: Option[Long] = None,
var lastCommentMention: Option[Boolean] = None,
var labels: Option[List[String]] = None,
var milestone: Option[Long] = None,
var `type`: Option[PullRequestType] = None,
var isMergeable: Option[Boolean] = None,
var conflictsWith: Option[List[PullRequest]] = None,
var contributedCommits: Option[Int] = None,
var acceptedPullRequests: Option[Int] = None,
var totalPullRequests: Option[Int] = None,
var hasTestCode: Option[Boolean] = None,
var important: Option[Double] = None
) {
var commitProvider: Option[CommitProvider] = None
/**
* @return The total number of added/edited/deleted lines.
*/
def linesTotal: Long = linesAdded.getOrElse(0L) + linesDeleted.getOrElse(0L)
def age: Int = createdAt.map(date => Minutes.minutesBetween(date, DateTime.now).getMinutes).getOrElse(0) // minutes
def createdAtUtc: Option[DateTime] = createdAt.map(date => date.toDateTime(DateTimeZone.UTC))
def updatedAtUtc: Option[DateTime] = updatedAt.map(date => date.toDateTime(DateTimeZone.UTC))
def conflictsWithNumbers: Option[List[Int]] = conflictsWith.map(list => list.map(pr => pr.number))
def hasReviewComments: Option[Boolean] = reviewComments.map(n => n > 0)
def contributedCommitRatio: Option[Double] = for {
commits <- contributedCommits
allCommits <- commitProvider
} yield commits.toDouble / allCommits.commits.toDouble
def pullRequestAcceptRatio: Option[Double] = acceptedPullRequests.map(pulls => pulls.toDouble / totalPullRequests.get.toDouble)
def containsFix = title.map(t => t.toLowerCase.contains("fix"))
override def toString: String =
s"#$number: '$source' into '$target'"
}
|
<filename>app/resources/library_entry_resource.rb
class LibraryEntryResource < BaseResource
TITLE_SORT = /\A([^\.]+)\.titles\.([^\.]+)\z/
class TitleSortableFields
def initialize(whitelist)
@whitelist = whitelist
end
def include?(key)
return true if @whitelist.include?(key)
# Magic match-handling code
match = TITLE_SORT.match(key.to_s)
return false unless match
media, title = match[1..-1]
return false unless %w[anime manga drama].include?(media.downcase)
return true if title.casecmp('canonical')
return false unless /[a-z]{2}(_[a-z]{2})?/ =~ title
true
end
end
caching
attributes :status, :progress, :volumes_owned, :reconsuming, :reconsume_count,
:notes, :private, :reaction_skipped, :progressed_at, :started_at, :finished_at
filters :user_id, :media_id, :media_type, :status, :anime_id, :manga_id,
:drama_id
filter :status, apply: ->(records, values, _options) {
statuses = LibraryEntry.statuses.values_at(*values).compact
statuses = values if statuses.empty?
records.where(status: statuses)
}
filter :kind, apply: ->(records, values, _options) {
records.by_kind(*values)
}
filter :since, apply: ->(records, values, _options) {
time = values.join.to_time
records.where('library_entries.updated_at >= ?', time)
}
filter :following, apply: ->(records, values, _options) {
records.following(values.join(','))
}
has_one :user
has_one :anime
has_one :manga
has_one :drama
has_one :review, eager_load_on_include: false
has_one :media_reaction
has_one :media, polymorphic: true
has_one :unit, polymorphic: true, eager_load_on_include: false
has_one :next_unit, polymorphic: true, eager_load_on_include: false
paginator :library
search_with LibrarySearchService
query :title
# DEPRECATED: These methods are for until all clients have switched to
# rating_twenty
attributes :rating, :rating_twenty
def rating
((_model.rating.to_f / 2).floor.to_f / 2).to_s
end
def rating=(value)
return unless value
_model.rating = value.to_f * 4
end
def rating_twenty
_model.rating
end
def rating_twenty=(value)
_model.rating = value
end
# END DEPRECATED
def self.status_counts(filters, opts = {})
return if should_query?(filters)
statuses = LibraryEntry.statuses.invert
find_records(filters, opts).group(:status).count.transform_keys do |status|
statuses[status]
end
end
def self.sortable_fields(context)
fields = super + %i[anime.subtype manga.subtype drama.subtype
anime.episode_count manga.chapter_count
anime.user_count manga.user_count
anime.average_rating manga.average_rating]
TitleSortableFields.new(fields)
end
def self.apply_sort(records, order_options, context = {})
# For each requested sort option, decide whether to use the title sort logic
order_options = order_options.map do |field, dir|
[(TITLE_SORT =~ field ? :title : :other), field, dir]
end
# Combine consecutive sort options of the same type into lists
order_options = order_options.each_with_object([]) do |curr, acc|
type, field, dir = curr
acc << [type, {}] unless acc.last&.first == type
acc.last[1][field] = dir
end
# Send each list to either apply_title_sort or super
order_options.each do |(type, sorts)|
records = if type == :title
apply_title_sort(records, sorts, context)
else
super(records, sorts, context)
end
end
records
end
def self.apply_title_sort(records, order_options, _context = {})
order_options.each_pair do |field, direction|
media, title = TITLE_SORT.match(field.to_s)[1..-1]
direction = direction.upcase
records = records.joins(<<-EOF.squish)
LEFT JOIN #{media} AS #{media}_sort
ON #{media}_sort.id = library_entries.#{media}_id
EOF
if title == 'canonical'
records = records.order(<<~EOF)
#{media}_sort.titles->#{media}_sort.canonical_title #{direction}
EOF
elsif /[a-z]{2}(_[a-z]{2})?/i =~ title
records = records.order(<<~EOF.squish)
COALESCE(
NULLIF(#{media}_sort.titles->'#{title}', ''),
NULLIF(#{media}_sort.titles->#{media}_sort.canonical_title, ''),
NULLIF(#{media}_sort.titles->'en_jp', '')
) #{direction}
EOF
end
end
records
end
end
|
#!/bin/bash -eux
set -o pipefail
pushd subject
ci/lint/setup || true
ci/lint/lint
popd
|
#!/bin/bash
DIR="$( cd -P "$( dirname "$0" )" && pwd )"
source $DIR/env.sh
CMAKE_ARGS="-DPYTHON_VERSION=3.7"
CMAKE_ARGS+=" -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR}"
CMAKE_ARGS+=" -DCMAKE_CXX_COMPILER=clang++"
CMAKE_ARGS+=" -DCMAKE_CXX_COMPILER_AR=llvm-ar"
CMAKE_ARGS+=" -DCMAKE_CXX_COMPILER_RANLIB=llvm-ranlib"
CMAKE_ARGS+=" -DCMAKE_CXX_FLAGS='-std=gnu++11'"
CMAKE_ARGS+=" -DCMAKE_EXE_LINKER_FLAGS=-lGLU"
CMAKE_ARGS+=" -DPYTHON_LIBRARY=${PY_LIB}"
CMAKE_ARGS+=" -DPYTHON_LIBPATH=${MAUCPY}/lib"
CMAKE_ARGS+=" -DPYTHON_INCLUDE_DIR=${PY_INC}"
CMAKE_ARGS+=" -DWITH_OPENCOLLADA=ON"
#CMAKE_ARGS+=" -DWITH_OPENCOLORIO=ON"
CMAKE_ARGS+=" -DWITH_CYCLES_CUDA_BINARIES=ON"
CMAKE_ARGS+=" -DCUDA_TOOLKIT_ROOT_DIR=/opt/cuda"
CMAKE_ARGS+=" -DCUDA_NVCC_EXECUTABLE=/opt/cuda/bin/nvcc"
CMAKE_ARGS+=" -DCUDA_INCLUDE_DIRS=/opt/cuda/include"
#CMAKE_ARGS+=" -DWITH_CYCLES_STANDALONE=ON"
#CMAKE_ARGS+=" -DWITH_CYCLES_STANDALONE_GUI=ON"
#CMAKE_ARGS+=" -DWITH_MOD_OCEANSIM=ON"
CMAKE_ARGS+=" -DWITH_FFTW3=ON"
[[ ! -d ${BUILD_DIR} ]] && mkdir ${BUILD_DIR}
pushd ${BUILD_DIR}
cmake ${CMAKE_ARGS} ${ROOT_DIR}
# cmake --build . -j ${NUM_BUILD_PROCS}
popd
|
public addTask(task: Task): void {
this.#manager.add(task); // Assuming the task manager has an 'add' method to add tasks
} |
#!/bin/sh
set -e
# usage:
#
# scripts/update_contracts.sh $DEPLOYMENT_NAME $network
#
# For example,
#
# scripts/update_contracts.sh sifchain-testnet-042-ibc ropsten
#
# must run this from the smart-contracts directory, and must update .env with
# the appropriate values for:
# MAINNET_URL=https://eth-mainnet.alchemyapi.io/v2/...
# ROPSTEN_URL=https://eth-ropsten.alchemyapi.io/v2/...
# ROPSTEN_PROXY_ADMIN_PRIVATE_KEY=aaaa...
# DEPLOYMENT_NAME="sifchain-testnet-042-ibc"
deploymentDir=deployments/$1/
rm -f .openzeppelin
ln -s $deploymentDir/.openzeppelin .openzeppelin
npx hardhat run scripts/upgrade_contracts.ts --network $2
git commit -m "update deployment" $deploymentDir
|
public class HavingStream extends TupleStream implements Expressible {
private static final long serialVersionUID = 1;
private TupleStream stream;
private BooleanEvaluator evaluator;
private StreamContext streamContext;
private Tuple currentTuple;
public void open() throws IOException {
stream.open();
// Initialize the evaluator
evaluator.setStreamContext(streamContext);
}
public Tuple read() throws IOException {
while (true) {
Tuple tuple = stream.read();
if (tuple == null) {
return null; // End of stream
}
if (evaluator.evaluate(tuple)) {
return tuple; // Return the tuple if evaluation is true
}
}
}
public void close() throws IOException {
stream.close();
}
} |
<filename>fiscoflex-rest/src/main/java/mx/fiscoflex/contabilidad/cuentacontable/CuentaContableDTO.java
package mx.fiscoflex.contabilidad.cuentacontable;
import java.util.List;
public class CuentaContableDTO {
private Integer idCuentaContable;
private String nombreCuenta;
private Integer cuentaPadre;
private String naturaleza;
private String estadoFinanciero;
private String origen;
private Integer profundidad;
private List<CuentaContableDTO> cuentas;
public Integer getIdCuentaContable() {
return idCuentaContable;
}
public void setIdCuentaContable(Integer idCuentaContable) {
this.idCuentaContable = idCuentaContable;
}
public String getNombreCuenta() {
return nombreCuenta;
}
public void setNombreCuenta(String nombreCuenta) {
this.nombreCuenta = nombreCuenta;
}
public Integer getCuentaPadre() {
return cuentaPadre;
}
public void setCuentaPadre(Integer cuentaPadre) {
this.cuentaPadre = cuentaPadre;
}
public String getNaturaleza() {
return naturaleza;
}
public void setNaturaleza(String naturaleza) {
this.naturaleza = naturaleza;
}
public String getEstadoFinanciero() {
return estadoFinanciero;
}
public void setEstadoFinanciero(String estadoFinanciero) {
this.estadoFinanciero = estadoFinanciero;
}
public String getOrigen() {
return origen;
}
public void setOrigen(String origen) {
this.origen = origen;
}
public Integer getProfundidad() {
return profundidad;
}
public void setProfundidad(Integer profundidad) {
this.profundidad = profundidad;
}
public List<CuentaContableDTO> getCuentas() {
return cuentas;
}
public void setCuentas(List<CuentaContableDTO> cuentas) {
this.cuentas = cuentas;
}
} |
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <<EMAIL>>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
import requests
from cobra.api import start
import json
import multiprocessing
import os
# test apiserver after creating config file.
# kill -9 $(ps aux|grep test_apiserver.py|awk '{print $2}')
p = multiprocessing.Process(target=start, args=("127.0.0.1", 5000, True))
p.start()
def test_add_job():
# url = "http://127.0.0.1:5000/api/add"
# post_data = {
# "key": "your_secret_key",
# "target": ["https://github.com/wufeifei/grw.git", "https://github.com/shadowsocks/shadowsocks.git"],
# }
# headers = {
# "Content-Type": "application/json",
# }
# re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
# assert "1001" in re.content
# assert "Add scan job successfully" in re.content
# assert "sid" in re.content
pass
def test_job_status():
# url = "http://127.0.0.1:5000/api/status"
# post_data = {
# "key": "your_secret_key",
# "sid": 24,
# }
# headers = {
# "Content-Type": "application/json",
# }
# re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
# assert "1001" in re.content
# assert "msg" in re.content
# assert "sid" in re.content
# assert "status" in re.content
# assert "report" in re.content
pass
p.terminate()
|
export * from './approval-types';
export * from './create-meeting-types';
export * from './days';
export * from './get-meeting-types';
export * from './recordinp-types';
export * from './recurrence-types';
export * from './weeks-of-month';
export * from './registrant-status';
export * from './updatable-registrant-statuses';
export * from './user-type';
export * from './user-verification-type';
export * from './create-user-action';
export * from './login-types';
export * from './delete-user-actions';
|
//go:generate varhandler -func CreateUser,GetUser,UpdateUser,DeleteUser -output user_handlers_generated.go
package main
import (
"encoding/json"
"errors"
"log"
"net/http"
)
func init() {
http.HandleFunc("/user/create", CreateUserHandler)
http.HandleFunc("/user/get", GetUserHandler)
http.HandleFunc("/user/update", UpdateUserHandler)
http.HandleFunc("/user/delete", DeleteUserHandler)
}
///////
/// Types
///////
// User
type User struct {
Id UserID
Name string
}
func HTTPUser(r *http.Request) (u User, err error) {
// if request encoding is json :
err = json.NewDecoder(r.Body).Decode(&u)
if u.Name == "" {
return u, errors.New("EmptyName")
}
return
}
func (u User) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// if response encoding has to be json :
json.NewEncoder(w).Encode(u)
}
//UserID
type UserID string
func HTTPUserID(r *http.Request) (uid UserID, err error) {
uid = UserID(r.URL.Query().Get("user_id"))
if uid == "" {
return uid, errors.New("Please provide a user id")
}
log.Printf("uid: %s", uid)
return
}
///////
/// Handlers
///////
//create
func CreateUser(user User) (status int, err error) {
//save user into database
return http.StatusCreated, err
//if the error is not nil
//a status internal server
//error will be returned by default
//otherwise, everything is fine.
}
//get
func GetUser(id UserID) (resp http.Handler, status int, err error) {
if id == "404" { // check case
return nil, http.StatusNotFound, nil
}
user := User{
Id: id,
}
//err := db.GetUser(user)
return user, http.StatusOK, err
}
//update
func UpdateUser(id UserID, user User) (status int, err error) {
//user might have to be
//a UserUpdateRequest type
//that only takes into account modifiable fields
if id == "404" { // check case
return http.StatusNotFound, nil
}
user.Id = id
//err := db.GetUser(user)
if err != nil {
return
}
//do stuff
return http.StatusOK, nil
}
//delete
func DeleteUser(id UserID) (status int, err error) {
if id == "404" { // check case
return http.StatusNotFound, nil
}
//db.DeleteUser(id)
if err != nil {
return
}
return http.StatusNoContent, nil
}
|
#!/usr/bin/env bash
set -e
[ -n "$NODENV_DEBUG" ] && set -x
if declare -Ff after_install > /dev/null; then
after_install run_after_install
else
echo "nodenv: nodenv-yarn-install plugin requires node-build" >&2
fi
run_after_install() {
local node_version
local yarn_status
local yarn_version
# Only if successfully installed Node.
[ "$STATUS" = "0" ] || return 0
echo "Installing Yarn..."
node_version=$(NODENV_VERSION="$DEFINITION" nodenv-exec node -v)
if [[ "$node_version" =~ ^v[0-3]\. ]]; then
echo "Node version ${node_version/v/} is not supported, please use Node.js 4.0 or higher." >&2
return 0
fi
yarn_status=0
NODENV_VERSION="$DEFINITION" nodenv-exec npm install yarn -g --silent || yarn_status="$?"
if [ "$yarn_status" == "0" ]; then
yarn_version=$(NODENV_VERSION="$DEFINITION" nodenv-exec yarn --version)
echo "Installed Yarn $yarn_version"
fi
}
|
var helpers = require('./helpers');
var testsuite = require('./testsuite');
var Environment = require('./environment');
var webpage = require('webpage');
var muteGraceTimeDuration = 2000;
var page = webpage.create();
var suite = testsuite.newSuite('discard', page);
var environment;
var testRepoPath;
var createAndDiscard = function(callback, dialogButtonToClick) {
environment.createTestFile(testRepoPath + '/testfile2.txt', function(err) {
if (err) return callback(err);
helpers.waitForElementVisible(page, '[data-ta-container="staging-file"]', function() {
helpers.click(page, '[data-ta-clickable="discard-file"]');
if (dialogButtonToClick) {
helpers.click(page, '[data-ta-clickable="' + dialogButtonToClick + '"]');
} else {
if (helpers.elementVisible(page, '[data-ta-clickable="yes"]'))
return callback(new Error('Should not see yes button'))
}
if (dialogButtonToClick !== 'no') {
helpers.waitForElementNotVisible(page, '[data-ta-container="staging-file"]', function() {
callback();
});
} else {
helpers.waitForElementVisible(page, '[data-ta-container="staging-file"]', function() {
callback();
});
}
});
});
}
suite.test('Init', function(done) {
environment = new Environment(page, { port: 8453, serverStartupOptions: ['--disableDiscardWarning'] });
environment.init(function(err) {
if (err) return done(err);
testRepoPath = environment.path + '/testrepo';
environment.createRepos([
{ bare: false, path: testRepoPath }
], done);
});
});
suite.test('Open repo screen', function(done) {
page.open(environment.url + '/#/repository?path=' + encodeURIComponent(testRepoPath), function () {
helpers.waitForElementVisible(page, '.graph', function() {
setTimeout(done, 1000); // Let it finnish loading
});
});
});
suite.test('Should be possible to discard a created file without warning message', function(done) {
createAndDiscard(done);
});
suite.test('Shutdown', function(done) {
environment.shutdown(function() {
page = webpage.create();
done();
}, true);
});
suite.test('Init', function(done) {
environment = new Environment(page, { port: 8454, serverStartupOptions: ['--no-disableDiscardWarning', '--disableDiscardMuteTime=' + muteGraceTimeDuration] });
environment.init(function(err) {
if (err) return done(err);
testRepoPath = environment.path + '/testrepo';
environment.createRepos([
{ bare: false, path: testRepoPath }
], done);
});
});
suite.test('Open repo screen', function(done) {
page.open(environment.url + '/#/repository?path=' + encodeURIComponent(testRepoPath), function () {
helpers.waitForElementVisible(page, '.graph', function() {
setTimeout(done, 1000); // Let it finnish loading
});
});
});
suite.test('Should be possible to select no from discard', function(done) {
createAndDiscard(done, 'no');
});
suite.test('Should be possible to discard a created file', function(done) {
createAndDiscard(done, 'yes');
});
suite.test('Should be possible to discard a created file and disable warn for awhile', function(done) {
// Temporarily disabled to get the tests working
/*createAndDiscard(function(err) {
if (err) done(err);
createAndDiscard(function(err) {
if (err) done(err);
setTimeout(function(err) {
if (err) done(err);
createAndDiscard(done, 'yes');
}, muteGraceTimeDuration + 500);
});
}, 'mute');*/
done();
});
suite.test('Shutdown', function(done) {
environment.shutdown(done);
});
testsuite.runAllSuits();
|
// Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#include <jni.h>
#include <string>
#include "libbolt/headers/kit_flags.h"
#include "libbolt/headers/flow.h"
#include <android/log.h>
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, "========= Info ========= ", __VA_ARGS__)
DataType inferencePrecision = DT_F32;
Flow flowExample;
const int width = 32;
const int height = 32;
EE pixelProcess(std::map<std::string, std::shared_ptr<Tensor>> &inputs,
std::shared_ptr<Tensor> &tmp,
std::map<std::string, std::shared_ptr<Tensor>> &outputs,
std::vector<std::string> parameter = std::vector<std::string>())
{
// Already in BGR
uint8_t *myBuffer = (uint8_t *)((CpuMemory *)inputs["input"]->get_memory())->get_ptr();
F32 *oneArr = (F32 *)((CpuMemory *)outputs["input.1"]->get_memory())->get_ptr();
for (int i = 0; i < width * height * 3; i++) {
oneArr[i] = myBuffer[i];
}
return SUCCESS;
}
EE postProcess(std::map<std::string, std::shared_ptr<Tensor>> &inputs,
std::shared_ptr<Tensor> &tmp,
std::map<std::string, std::shared_ptr<Tensor>> &outputs,
std::vector<std::string> parameter = std::vector<std::string>())
{
std::string flowInferenceNodeOutputName = "output";
std::string boltModelOutputName = "1811";
uint8_t *flowInferenceNodeOutput =
(uint8_t *)((CpuMemory *)outputs[flowInferenceNodeOutputName]->get_memory())->get_ptr();
F32 *result = (F32 *)((CpuMemory *)inputs[boltModelOutputName]->get_memory())->get_ptr();
F32 *rArr = (F32 *)malloc(sizeof(int *) * width * 2 * height * 2);
F32 *gArr = (F32 *)malloc(sizeof(int *) * width * 2 * height * 2);
F32 *bArr = (F32 *)malloc(sizeof(int *) * width * 2 * height * 2);
for (int i = 0; i < (height * 2) * (width * 2) * 3; i++) {
if (result[i] <= 1) {
int a = 0;
result[i] = a;
} else if (result[i] > 255) {
int b = 255;
result[i] = b;
}
if (i < (width * 2) * (height * 2)) {
bArr[i] = result[i];
} else if (i < (width * 2) * (height * 2) * 2) {
rArr[i - (width * 2) * (height * 2)] = result[i];
} else {
gArr[i - 2 * (width * 2) * (height * 2)] = result[i];
}
}
for (int i = 0; i < (width * 2) * (height * 2); i++) {
int r = rArr[i];
int g = gArr[i];
int b = bArr[i];
flowInferenceNodeOutput[i * 3] = (uint8_t)r;
flowInferenceNodeOutput[i * 3 + 1] = (uint8_t)g;
flowInferenceNodeOutput[i * 3 + 2] = (uint8_t)b;
}
free(rArr);
free(gArr);
free(bArr);
return SUCCESS;
}
extern "C" void Java_com_example_cameraenlarge_MainActivity_initFlow(
JNIEnv *env, jobject, jstring path)
{
flowRegisterFunction("pixelProcess", pixelProcess);
flowRegisterFunction("postProcess", postProcess);
const char *cur_str_path = env->GetStringUTFChars(path, nullptr);
std::string imageClassificationGraphPath = cur_str_path;
std::vector<std::string> graphPath = {imageClassificationGraphPath};
int threads = 1;
flowExample.init(graphPath, inferencePrecision, AFFINITY_CPU_HIGH_PERFORMANCE, threads, false);
}
std::map<std::string, std::shared_ptr<Tensor>> inputOutput(const unsigned char *myBuffer)
{
std::map<std::string, std::shared_ptr<Tensor>> tensors;
TensorDesc inputDesc = tensor4df(DT_U8, DF_NCHW, 1, 3, height, width);
tensors["input"] = std::shared_ptr<Tensor>(new Tensor());
tensors["input"]->resize(inputDesc);
tensors["input"]->alloc();
void *ptr = (void *)((CpuMemory *)tensors["input"]->get_memory())->get_ptr();
memcpy(ptr, myBuffer, tensorNumBytes(inputDesc));
tensors["output"] = std::shared_ptr<Tensor>(new Tensor());
tensors["output"]->resize(tensor2df(DT_I32, DF_NCHW, 1, (width * 2) * (height * 2) * 3));
tensors["output"]->alloc();
return tensors;
}
extern "C" jbyteArray Java_com_example_cameraenlarge_MainActivity_runFlow(
JNIEnv *env, jobject, jbyteArray bgrData, jstring path)
{
int len = env->GetArrayLength(bgrData);
unsigned char *theValue = new unsigned char[len];
env->GetByteArrayRegion(bgrData, 0, len, reinterpret_cast<jbyte *>(theValue));
int num = 1;
const char *cur_str_path = env->GetStringUTFChars(path, nullptr);
std::string imageClassificationGraphPath = cur_str_path;
for (int i = 0; i < num; i++) {
std::map<std::string, std::shared_ptr<Tensor>> data = inputOutput(theValue);
Task task(imageClassificationGraphPath, data);
flowExample.enqueue(task);
}
std::vector<Task> results;
UNI_PROFILE(results = flowExample.dequeue(true), std::string("image_classification"),
std::string("image_classification"));
uint8_t *result = (uint8_t *)((CpuMemory *)results[0].data["output"]->get_memory())->get_ptr();
uint8_t *endResult = (uint8_t *)malloc(sizeof(uint8_t *) * (width * 2) * (height * 2) * 4);
for (int i = 0; i < (width * 2) * (height * 2) * 4; i++) { //RGBA add alpha
if (i % 4 != 3) {
endResult[i] = result[i - (i / 4) - 1];
} else {
int alpha = 255;
endResult[i] = (unsigned char)alpha;
}
}
jbyteArray intArr = env->NewByteArray((width * 2) * (height * 2) * 4);
env->SetByteArrayRegion(
intArr, 0, (width * 2) * (height * 2) * 4, reinterpret_cast<jbyte *>(endResult));
free(endResult);
return intArr;
}
|
#!/usr/bin/ruby
# -*- mode: ruby; coding: UTF-8 -*-
class NotifySend
ARGS = StandardNotify::ARG
def initialize
@mode = case ARGS["notify-send"]
when "total"
:total
else
nil
end
@use = if ARGS["ns-use-from"]
:from
else
:address
end
end
def fire(headers, db)
str = nil
case
when @mode == :total
str = "Total " + db[:number].to_s + " mails."
else
str = db[@use].map{|k, v| "%s: %d" % [k, v] }.join("\n")
end
system "notify-send", "-a", "MAIL DELIVER", "You got mail(s)", str
end
end
StandardNotify::PLUGINS << NotifySend.new |
#!/bin/bash
GREEN=`tput setaf 2`
Version=$1
MagicGradients="../MagicGradients/MagicGradients.csproj"
MagicGradientsToolkit="../MagicGradients.Toolkit/MagicGradients.Toolkit.csproj"
echo "${GREEN}Set new $Version version for $MagicGradients"
sed -i .tmp "s|<Version>.*|<Version>$Version<\/Version>|" $MagicGradients;
rm -f "$magicGradients.tmp"
echo "Set new $Version version for $MagicGradientsToolkit"
sed -i .tmp "s|<Version>.*|<Version>$Version<\/Version>|" $MagicGradientsToolkit;
rm -f "$MagicGradientsToolkit.tmp"
|
<gh_stars>0
import React from 'react';
import ReactDOM from 'react-dom';
import App from './pages/App';
import reportWebVitals from './reportWebVitals';
import { Provider } from 'react-redux'
import store ,{history} from './redux/store'
import { RootProps } from './types/common'
const render = (Component: React.FC<RootProps>) => {
ReactDOM.render(
<Provider store={store}>
<Component history={history} />
</Provider>,
document.getElementById('root')
);
}
render(App)
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals();
|
<gh_stars>0
package automaticscan
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestNormalizeAppName(t *testing.T) {
appName := normalizeAppName("JBoss")
require.Equal(t, "jboss", appName, "could not get normalized name")
appName = normalizeAppName("JBoss:2.3.5")
require.Equal(t, "jboss", appName, "could not get normalized name")
}
|
import re
def extract_metadata_info(code_snippet):
metadata = {}
pattern = r'__(\w+)__\s*=\s*"([^"]+)"'
matches = re.findall(pattern, code_snippet)
for match in matches:
metadata[match[0]] = match[1]
return metadata |
#include"stdafx.h"
#include <iostream>
#include<cstdio>
#include<algorithm>
using namespace std;
int n, m, w;
const int maxn = 15;
long long d[maxn][1 << 15];
int path[5000000][2];
void get(int c, int pre, int now)
{
if (c > m)
return;
else if (c == m)
{
path[w][0] = pre;
path[w++][1] = now;
return;
}
get(c + 1, (pre << 1) |1,now << 1);
get(c + 1, (pre << 1), (now << 1) | 1);
get(c + 2, (pre << 2) | 3, (now << 2) | 3);
}
int main()
{
while (scanf_s("%d%d", &n, &m), n&&m)
{
w = 0;
if (m > n)
swap(n, m);
if (m % 2 && n % 2)
{
printf("%d\n", 0);
continue;
}
get(0, 0, 0);
memset(d, 0, sizeof(d));
d[0][(1 << m) - 1] = 1;
for (int i = 0; i < n; ++i)
{
for (int j = 0; j < w; ++j)
{
d[i + 1][path[j][1]] += d[i][path[j][0]];
}
}
printf("%I64d\n",d[n][(1<<m)-1]);
}
}
|
#!/bin/bash
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
set -e
set -o pipefail
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output/* ! -name '*summary-info*' -type f -exec rm -f {} +
rm -R -f fifo/*
rm -R -f work/*
mkdir work/kat/
mkdir work/il_S1_summaryleccalc
mkdir work/il_S1_summaryaalcalc
mkfifo fifo/il_P1
mkfifo fifo/il_P2
mkfifo fifo/il_S1_summary_P1
mkfifo fifo/il_S1_summaryeltcalc_P1
mkfifo fifo/il_S1_eltcalc_P1
mkfifo fifo/il_S1_summarysummarycalc_P1
mkfifo fifo/il_S1_summarycalc_P1
mkfifo fifo/il_S1_summarypltcalc_P1
mkfifo fifo/il_S1_pltcalc_P1
mkfifo fifo/il_S1_summary_P2
mkfifo fifo/il_S1_summaryeltcalc_P2
mkfifo fifo/il_S1_eltcalc_P2
mkfifo fifo/il_S1_summarysummarycalc_P2
mkfifo fifo/il_S1_summarycalc_P2
mkfifo fifo/il_S1_summarypltcalc_P2
mkfifo fifo/il_S1_pltcalc_P2
# --- Do insured loss computes ---
eltcalc < fifo/il_S1_summaryeltcalc_P1 > work/kat/il_S1_eltcalc_P1 & pid1=$!
summarycalctocsv < fifo/il_S1_summarysummarycalc_P1 > work/kat/il_S1_summarycalc_P1 & pid2=$!
pltcalc < fifo/il_S1_summarypltcalc_P1 > work/kat/il_S1_pltcalc_P1 & pid3=$!
eltcalc -s < fifo/il_S1_summaryeltcalc_P2 > work/kat/il_S1_eltcalc_P2 & pid4=$!
summarycalctocsv -s < fifo/il_S1_summarysummarycalc_P2 > work/kat/il_S1_summarycalc_P2 & pid5=$!
pltcalc -s < fifo/il_S1_summarypltcalc_P2 > work/kat/il_S1_pltcalc_P2 & pid6=$!
tee < fifo/il_S1_summary_P1 fifo/il_S1_summaryeltcalc_P1 fifo/il_S1_summarypltcalc_P1 fifo/il_S1_summarysummarycalc_P1 work/il_S1_summaryaalcalc/P1.bin work/il_S1_summaryleccalc/P1.bin > /dev/null & pid7=$!
tee < fifo/il_S1_summary_P2 fifo/il_S1_summaryeltcalc_P2 fifo/il_S1_summarypltcalc_P2 fifo/il_S1_summarysummarycalc_P2 work/il_S1_summaryaalcalc/P2.bin work/il_S1_summaryleccalc/P2.bin > /dev/null & pid8=$!
summarycalc -f -1 fifo/il_S1_summary_P1 < fifo/il_P1 &
summarycalc -f -1 fifo/il_S1_summary_P2 < fifo/il_P2 &
eve 1 2 | getmodel | gulcalc -S0 -L0 -r -i - | fmcalc -a2 > fifo/il_P1 &
eve 2 2 | getmodel | gulcalc -S0 -L0 -r -i - | fmcalc -a2 > fifo/il_P2 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8
# --- Do insured loss kats ---
kat work/kat/il_S1_eltcalc_P1 work/kat/il_S1_eltcalc_P2 > output/il_S1_eltcalc.csv & kpid1=$!
kat work/kat/il_S1_pltcalc_P1 work/kat/il_S1_pltcalc_P2 > output/il_S1_pltcalc.csv & kpid2=$!
kat work/kat/il_S1_summarycalc_P1 work/kat/il_S1_summarycalc_P2 > output/il_S1_summarycalc.csv & kpid3=$!
wait $kpid1 $kpid2 $kpid3
aalcalc -Kil_S1_summaryaalcalc > output/il_S1_aalcalc.csv & lpid1=$!
leccalc -r -Kil_S1_summaryleccalc -F output/il_S1_leccalc_full_uncertainty_aep.csv -f output/il_S1_leccalc_full_uncertainty_oep.csv -S output/il_S1_leccalc_sample_mean_aep.csv -s output/il_S1_leccalc_sample_mean_oep.csv -W output/il_S1_leccalc_wheatsheaf_aep.csv -M output/il_S1_leccalc_wheatsheaf_mean_aep.csv -m output/il_S1_leccalc_wheatsheaf_mean_oep.csv -w output/il_S1_leccalc_wheatsheaf_oep.csv & lpid2=$!
wait $lpid1 $lpid2
rm -R -f work/*
rm -R -f fifo/*
|
/*
* Copyright 2013 Stanford University.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* - Neither the name of the copyright holders nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This file tests whether the PhysBAM translator correctly restores and
* saves a face array consisting of two physical instances that perfectly
* fit the region.
*
* Author: <NAME> <<EMAIL>>
*/
#include "data/physbam/translator_physbam.h"
#include "data/physbam/physbam_data.h"
#include "data/physbam/physbam_include.h"
#define R1_VALUE 3.1400
#define R2_VALUE 32.0
void printLdo(nimbus::LogicalDataObject* obj) {
printf("**Object - ID: %lu, Name: %s", obj->id(), obj->variable().c_str());
printf(" region: [%lu+%lu, %lu+%lu, %lu+%lu]\n", obj->region()->x(), obj->region()->dx(), obj->region()->y(), obj->region()->dy(), obj->region()->z(), obj->region()->dz()); // NOLINT
}
const int_dimension_t X = 1;
const int_dimension_t Y = 1;
const int_dimension_t Z = 1;
const int_dimension_t DX = 2;
const int_dimension_t DY = 2;
const int_dimension_t DZ = 2;
const int_dimension_t SIZE = DX * DY * DZ;
const int AVG_PARTICLES = 10;
const int TOTAL_PARTICLES = SIZE * AVG_PARTICLES * 5;
const int NUM_GHOST_CELLS = 2;
const int GRID_SCALE = 16;
float getX() {
double val = drand48();
val *= static_cast<double>(DX);
val += static_cast<double>(X);
return static_cast<float>(val);
}
float getY() {
double val = drand48();
val *= static_cast<double>(DY);
val += static_cast<double>(Y);
return static_cast<float>(val);
}
float getZ() {
double val = drand48();
val *= static_cast<double>(DZ);
val += static_cast<double>(Z);
return static_cast<float>(val);
}
typedef PhysBAM::VECTOR<float, 3> VECTOR_TYPE;
typedef PhysBAM::VECTOR<int, 3> TV_INT;
typedef PhysBAM::GRID<VECTOR_TYPE> Grid;
typedef typename PhysBAM::PARTICLE_LEVELSET<Grid> ParticleLevelset;
typedef typename PhysBAM::PARTICLE_LEVELSET_EVOLUTION_UNIFORM<Grid> ParticleLevelsetEvolution;
typedef typename PhysBAM::GEOMETRY_BOUNDARY_POLICY<Grid>::BOUNDARY_PHI_WATER BoundaryPhiWater;
typedef typename TranslatorPhysBAM<VECTOR_TYPE>::FaceArray FaceArray;
template <class TV>
class CALLBACKS:public PhysBAM::LEVELSET_CALLBACKS<PhysBAM::GRID<TV> > {
typedef typename TV::SCALAR T;
void Adjust_Particle_For_Domain_Boundaries(PhysBAM::PARTICLE_LEVELSET_PARTICLES<TV>& particles, // NOLINT
const int index,
TV& V,
const PhysBAM::PARTICLE_LEVELSET_PARTICLE_TYPE particle_type, // NOLINT
const T dt,
const T time) {} // NOLINT
void Get_Levelset_Velocity(const PhysBAM::GRID<TV>& grid,
PhysBAM::LEVELSET_MULTIPLE_UNIFORM<PhysBAM::GRID<TV> >& levelset_multiple, // NOLINT
PhysBAM::ARRAY<T, PhysBAM::FACE_INDEX<TV::dimension> >& V_levelset,
const T time) const PHYSBAM_OVERRIDE {} // NOLINT
};
typedef CALLBACKS<VECTOR_TYPE> Callbacks;
int main(int argc, char *argv[]) {
dbg_init();
PhysBAM::PARSE_ARGS parse_args;
parse_args.Add_Integer_Argument("-restart", 0, "restart frame");
parse_args.Add_Integer_Argument("-scale", 128, "fine scale grid resolution");
parse_args.Add_Integer_Argument("-substep", -1, "output-substep level");
parse_args.Add_Integer_Argument("-e", 100, "last frame");
parse_args.Add_Integer_Argument("-refine", 1, "refine levels");
parse_args.Add_Integer_Argument("-threads", 1, "number of threads");
parse_args.Add_Double_Argument("-cfl", 1, "cfl number");
PhysBAM::LOG::Initialize_Logging(false, false, 1<<30, true, parse_args.Get_Integer_Value("-threads")); // NOLINT
Callbacks* callbacks = new Callbacks();
PhysBAM::Initialize_Particles();
PhysBAM::Initialize_Read_Write_General_Structures();
PhysBAM::RANGE<PhysBAM::VECTOR<int, 3> > range(0, GRID_SCALE,
0, GRID_SCALE,
0, GRID_SCALE);
// 5 because a particle is a 5-tuple: x, y, z, radius, collision range
int_dimension_t dimensions[] = {X, Y, Z, DX, DY, DZ};
nimbus::GeometricRegion* region = new nimbus::GeometricRegion(dimensions);
CPdiVector* vec = new CPdiVector();
TranslatorPhysBAM<PhysBAM::VECTOR<float, 3> > translator;
nimbus::GeometricRegion* r = new nimbus::GeometricRegion(dimensions);
nimbus::LogicalDataObject* ldo = new LogicalDataObject(1, "velocity", r);
float* floats = new float[TOTAL_PARTICLES];
float* floatSource = new float[TOTAL_PARTICLES];
for (int i = 0; i < TOTAL_PARTICLES; i+=5) {
floatSource[i + 0] = floats[i + 0] = getX();
floatSource[i + 1] = floats[i + 1] = getY();
floatSource[i + 2] = floats[i + 2] = getZ();
floatSource[i + 3] = floats[i + 3] = 1.0;
floatSource[i + 4] = floats[i + 4] = 1.0;
}
PhysBAMData* pd = new PhysBAMData();
pd->set_buffer((char*)floats, TOTAL_PARTICLES * sizeof(float)); // NOLINT
PhysicalDataInstance* instance = new PhysicalDataInstance(1, ldo, pd, 0);
vec->push_back(instance);
Grid* grid = new Grid(TV_INT(),
PhysBAM::RANGE<VECTOR_TYPE>::Unit_Box(),
true);
grid->Initialize(TV_INT::All_Ones_Vector() * GRID_SCALE,
PhysBAM::RANGE<VECTOR_TYPE>(VECTOR_TYPE(),
VECTOR_TYPE::All_Ones_Vector()),
true);
ParticleLevelsetEvolution* evolution = new
ParticleLevelsetEvolution(*grid, NUM_GHOST_CELLS);
printf("grid: %p\n", grid);
printf("evolution: %p\n", evolution);
printf("evolution.particle_levelset: %p\n", &evolution->particle_levelset);
printf("evolution.particle_levelset.levelset: %p\n", &evolution->particle_levelset.levelset);
printf("evolution.particle_levelset.levelset.grid: %p\n", &evolution->particle_levelset.levelset.grid); // NOLINT
evolution->Initialize_Domain(*grid);
evolution->particle_levelset.Set_Band_Width(6);
evolution->Set_Time(0);
evolution->Set_CFL_Number(0.9);
evolution->Set_Number_Particles_Per_Cell(2 * AVG_PARTICLES);
evolution->Use_Particle_Levelset(true);
evolution->Set_Levelset_Callbacks(*callbacks);
FaceArray* faceVelocities = new FaceArray();
faceVelocities->Resize(range);
BoundaryPhiWater* phiWaterBoundary = new BoundaryPhiWater();
phiWaterBoundary->Set_Velocity_Pointer(*faceVelocities);
// phiWaterBoundary.Set_Constant_Extrapolation(domainOpenBoundaries);
evolution->particle_levelset.levelset.Set_Custom_Boundary(*phiWaterBoundary);
// evolution->particle_levelset.Use_Removed_Negative_Particles();
// evolution->particle_levelset.Store_Unique_Particle_Id();
evolution->Use_Particle_Levelset(true);
// evolution.particle_levelset.levelset.Set_Collision_Body_List(example.collision_bodies_affecting_fluid); // NOLINT
// evolution.particle_levelset.levelset.Set_Face_Velocities_Valid_Mask(&example.incompressible.valid_mask); // NOLINT
// evolution->particle_levelset.Set_Collision_Distance_Factors(.1,1);
// evolution->particle_levelset.Use_Removed_Positive_Particles();
for (int z = Z; z <= Z + DZ; ++z) {
for (int y = Y; y <= Y + DY; ++y) {
for (int x = X; x <= X + DX; ++x) {
evolution->phi(TV_INT(x, y, z)) = -1;
}
}
}
evolution->Set_Seed(2606);
evolution->Seed_Particles(0);
bool pass = true;
pass = translator.ReadParticles(region, vec, evolution->particle_levelset, true);
if (!pass) {
printf("Failed to read particles.\n");
goto error;
}
// result = translator.ReadParticles(region, &vec, evolution.particle_levelset, false);
for (int i = 0; i < TOTAL_PARTICLES; i++) {
floats[i] = 1.0;
}
pass = translator.WriteParticles(region, vec, evolution->particle_levelset, true);
if (!pass) {
printf("Failed to write particles.\n");
goto error;
}
for (int i = 0; i < TOTAL_PARTICLES; i++) {
if (floats[i] != floatSource[i]) {
dbg(DBG_ERROR, "Value in physical instance 1 should be %f, it's %f.\n", floatSource[i], floats[i]); // NOLINT
pass = false;
}
}
error:
if (pass) {
printf("Passed all tests successfully.\n");
} else {
printf("Tests failed. Use dbg=error to observe errors.\n");
}
}
|
echo Start
cd $HOME/Dropbox
until [[ $(dropbox exclude list | grep '_large') ]]; do
echo 'excluding'
dropbox exclude add '_large' 'Camera Uploads'
sleep 1
done
echo done
|
#!/usr/bin/env sh
# 确保脚本抛出遇到的错误
set -e
# 生成静态文件
npm run docs:build
# 进入生成的文件夹
cd docs/.vuepress/dist
# 如果是发布到自定义域名
# echo 'www.example.com' > CNAME
git init
git add -A
git commit -m 'deploy'
# 如果发布到 https://<USERNAME>.github.io
# git push -f git@github.com:<USERNAME>/<USERNAME>.github.io.git master
# 如果发布到 https://<USERNAME>.github.io/<REPO>
git push -f git@github.com:yafanisonya/wheel.git master:gh-pages
cd - |
#!/usr/bin/env bash
#
# Lorem ipsum dolor sit amet...
# jaagr <c@rlberg.se>
#
source bootstrap.sh
include utils/log.sh
include utils/cli2.sh
bootstrap::finish
cli::define_flag -h --help "Print this help text"
cli::define_flag -f --force "..."
function main {
cli::parse "$@"
if cli::flag --help; then
cli::usage "[opts...] arg"; exit
fi
}
main "$@"
|
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class Engine {
private static final String KEY_ENGINE = "engine";
private static Engine engineInstance;
private Engine() {
// Private constructor to prevent instantiation from outside the class
}
public static Engine instance() {
if (engineInstance == null) {
engineInstance = new Engine();
}
return engineInstance;
}
public void storeEngineInServletContext(ServletContext servletContext) {
servletContext.setAttribute(KEY_ENGINE, this);
}
public void run(HttpServletRequest req, HttpServletResponse resp) throws Exception {
// Handle HTTP request processing here
// Example:
// resp.getWriter().write("Hello, World!");
}
} |
function l33t (text) {
return text.replace(/l|i/gi, '1').replace(/z/gi, '2').replace(/e/gi, '3').replace(/a/gi, '4').replace(/s/gi, '5').replace(/G/g, '6').replace(/t/gi, '7').replace(/b/gi, '8').replace(/g/g, '9').replace(/o/gi, '0')
}
module.exports = l33t
|
class FileProcessor:
def __init__(self, fil):
self.fil = fil
def count_lines_with_keyword(self, keyword):
count = 0
with open(self.fil, 'r') as file:
for line in file:
if keyword in line:
count += 1
return count |
package com.leetcode;
import org.testng.annotations.Test;
public class Solution_457Test {
@Test
public void testCircularArrayLoop() {
Solution_457 solution_457 = new Solution_457();
System.out.println(solution_457.circularArrayLoop(new int[]{-2,-3,-9}));
}
} |
!/usr/bin/env sh
# 确保脚本抛出遇到的错误
# 打包
# vsce package
yes | vsce package --yarn # 解决下文报错
# 发布地址: https://marketplace.visualstudio.com/manage/publishers/obkoro1
# 打包报错:
# Error: Command failed: npm list --production --parseable --depth=99999
# npm ERR! missing: hoek@6.0.4, required by korofileheader@3.4.0
# 使用:vsce package --yarn
# vscodium发布插件
# npx ovsx publish file -p token
# tag
# https://git-scm.com/book/zh/v2/Git-%E5%9F%BA%E7%A1%80-%E6%89%93%E6%A0%87%E7%AD%BE
# git tag -a 'v4.8.20' -m 'tag信息'
# git push origin --tags
# git tag
# git tag -l | xargs git tag -d #删除所有本地分支
# git fetch origin --prune #从远程拉取所有信息
|
<gh_stars>10-100
// Uncomment for fake i18n
// fakei18n = function (obj) {
// if (typeof obj === 'string') {
// return Array(obj.length + 1).join('_');
// } else {
// for (i in obj) {
// obj[i] = fakei18n(obj[i]);
// }
//
// return obj;
// }
// }
//
// for (i in FormRenderer.t) {
// FormRenderer.t[i] = fakei18n(FormRenderer.t[i])
// }
FormRenderer.addPlugin('BookmarkDraft');
$('.js_stored_val').each(function(){
var stored = store.get($(this).attr('id'));
if (stored) {
$(this).val(stored);
} else {
$(this).val($(this).find('option').first().val());
}
});
$('.js_stored_val').change(function(){
store.set($(this).attr('id'), $(this).val());
location.reload();
});
// Load libraries
$('head').
append($('<link rel="stylesheet" type="text/css" />').attr('href', $('#lib').val()));
// Just append every possible class, it's easiest for now
if ($('#lib').val().match('cardinal')) {
FormRenderer.BUTTON_CLASS = 'button button-primary'
} else if ($('#lib').val().match('bootstrap')) {
FormRenderer.BUTTON_CLASS = 'btn btn-primary'
} else if ($('#lib').val().match('foundation')) {
FormRenderer.BUTTON_CLASS = 'button button-primary'
}
// Initialize form
if ($('#screendoor_project_id').val()) {
var fr = new FormRenderer({
screendoorBase: 'https://screendoor.dobt.dev',
project_id: $('#screendoor_project_id').val()
});
} else {
var fr = new FormRenderer($.extend(
Fixtures.FormRendererOptions[$('#fixture').val()](),
{
screendoorBase: 'https://screendoor.dobt.dev',
onReady: function(){
console.log('Form is ready!');
}
}
));
fr.save = function(){
this.state.set({
hasChanges: false
});
};
}
$(document).on('click', '[href="#"]', function(e){
e.preventDefault()
})
|
<filename>src/utils.cpp
/*******************************************************************************
*
* Copyright (c) 2017, <NAME>
* All rights reserved.
*
* BSD 3-Clause License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
******************************************************************************/
#include <connlib/utils.h>
namespace connlib
{
bool EdgeLengthGreater::operator()(const Edge& e1, const Edge& e2)
{
return e1.length > e2.length;
}
EdgeEqual::EdgeEqual(double eps)
: m_eps(eps)
{
}
bool EdgeEqual::operator()(const Edge& e1, const Edge& e2)
{
return (std::abs(e1.node1->x - e2.node1->x) < m_eps &&
std::abs(e1.node1->y - e2.node1->y) < m_eps &&
std::abs(e1.node2->x - e2.node2->x) < m_eps &&
std::abs(e1.node2->y - e2.node2->y) < m_eps &&
std::abs(e1.length - e2.length) < m_eps);
}
bool compareEdgeList(EdgeList& list1, EdgeList& list2, double eps)
{
if (list1.size() != list2.size()) return false;
list1.sort(Edge::greater);
list2.sort(Edge::greater);
return std::equal(list1.begin(), list1.end(), list2.begin(),
EdgeEqual(1e-3f));
}
void addNode(NodeList& list, const float x, const float y)
{
list.push_back(Node(x, y));
}
void addEdge(EdgeList& list, const float n1X, const float n1Y,
const float n2X, const float n2Y, const float length)
{
NodePtr node1(std::make_shared<connlib::Node>(n1X, n1Y));
NodePtr node2(std::make_shared<connlib::Node>(n2X, n2Y));
Edge edge(node1, node2, length);
list.push_back(edge);
}
} // namespace connlib
|
#!/bin/sh
BUILD_FOLDER=$(stack path --dist-dir)/build/qua-view/qua-view.jsexe
BUILD_FOLDER_GLOADER=$(stack path --dist-dir)/build/qua-worker-loadgeometry/qua-worker-loadgeometry.jsexe
# minify the code of qua-view
cat << EOF > web/qua-view.js
var global = this;
function runQuaView(){
"use strict"
EOF
closure-compiler --warning_level=QUIET \
--language_in=ECMASCRIPT5 \
--compilation_level=ADVANCED_OPTIMIZATIONS \
--externs=$BUILD_FOLDER/all.js.externs \
$BUILD_FOLDER/all.js >> web/qua-view.js
cat << EOF >> web/qua-view.js
}
if (document.readyState === 'complete') {
runQuaView.bind(global)();
} else {
window.onload = runQuaView.bind(global);
}
EOF
# minify the code of qua-worker-loadgeometry
closure-compiler --warning_level=QUIET \
--language_in=ECMASCRIPT5 \
--compilation_level=ADVANCED_OPTIMIZATIONS \
--externs=$BUILD_FOLDER_GLOADER/all.js.externs \
$BUILD_FOLDER_GLOADER/all.js > web/qua-worker-loadgeometry.js
# copy qua-view.js to qua-server if possible
if [ -d "../qua-server/static/js" ] ; then
cp web/qua-view.js ../qua-server/static/js/qua-view.js
fi
# copy qua-view.css to qua-server if possible
if [ -d "../qua-server/static/css" ] ; then
cp web/qua-view.css ../qua-server/static/css/qua-view.css
fi
# copy qua-worker-loadgeometry.js to qua-server if possible
if [ -d "../qua-server/static/js" ] ; then
cp web/qua-worker-loadgeometry.js ../qua-server/static/js/qua-worker-loadgeometry.js
fi
|
#!/usr/bin/env bash
cd realm-elasticsearch
bundle install
../.buildkite/wait-for-it.sh elasticsearch:9200
bundle exec rspec \
&& bundle exec rubocop -c ../.rubocop.yml
|
<filename>intro/part02-03_typecasting/src/typecasting.py
# Write your solution here
number = float(input("Please type in a number: "))
print(f"Integer part: {int(number)}")
print(f"Decimal part: {number-int(number)}") |
const arr = [5, 10, -2, 0, 11];
function sortAsc(arr) {
for (let i = 0; i < arr.length; i++) {
for (let j = i + 1; j < arr.length; j++) {
if (arr[i] > arr[j]) {
let temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
}
}
return arr;
}
console.log(sortAsc(arr)); // [-2, 0, 5, 10, 11] |
#!/bin/sh
echo
echo "=================================================="
echo " REDIS "
echo " REDIS_VERSION $(redis-server -v) "
echo "=================================================="
redis-server --daemonize yes |
package com.avalon.caverns.world;
import com.avalon.caverns.core.init.BlockInit;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.tags.BlockTags;
import net.minecraft.world.gen.feature.OreFeatureConfig;
import net.minecraft.world.gen.feature.template.BlockMatchRuleTest;
import net.minecraft.world.gen.feature.template.RuleTest;
import net.minecraft.world.gen.feature.template.TagMatchRuleTest;
public class CavernFeatureConfig extends OreFeatureConfig{
public CavernFeatureConfig(RuleTest p_i241989_1_, BlockState state, int size) {
super(p_i241989_1_, state, size);
}
public static final class FillerBlockType {
public static final RuleTest ENDSTONE = new BlockMatchRuleTest(Blocks.END_STONE);
public static final RuleTest BLACKSTONEGEN = new BlockMatchRuleTest(Blocks.BLACKSTONE);
public static final RuleTest NETHERGEN = new BlockMatchRuleTest(Blocks.NETHERRACK);
}
}
|
<reponame>deLibertate/Insurances
package service;
import data.InsuredPerson;
import org.junit.Before;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.FileSystemXmlApplicationContext;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import static org.junit.Assert.*;
public class PersonServiceTest {
private InsuredPerson person;
private PersonService ps;
@Before
public void initTest() {
ps=new PersonService();
person = new InsuredPerson(1,"Vasilyi", "Vasilevich", "Vasilyev", LocalDate.of(2018, 9, 1), 223.5);
}
@Test
public void addPerson(){
ps.getDao().create(person,8);// cs.getDao().create(first);
}
@Test
public void outPerson(){
System.out.println(ps.getDao().read(1).toString());
}
@Test
public void updatePerson(){
person.setName("Oscar");
ps.getDao().update(person);
}
@Test
public void deletePerson(){
ps.getDao().delete(person.getId());
}
@Test
public void springPerson(){
ApplicationContext context =
new FileSystemXmlApplicationContext("./src/main/resources/spring-context.xml");
InsuredPerson p = (InsuredPerson)context.getBean("person-bean");
System.out.println(p.toString());
}
} |
#!/bin/bash
DATA_DIR="$(cd "$(dirname "${0}")/.." && pwd)"
docker run -v ${DATA_DIR}:/data --rm -it trivadis/mktools bash -c "cd /data; ./tools/run-in-container/genpdf.sh"
|
<filename>example/tools/helpers.js
const formatd = require('date-fns/format');
const util = {};
util.getPostillionLength = (buf) => {
const div = buf[0];
const rem = buf[1];
return (256 * div) + rem;
};
util.extractPostillionData = (sent) => {
// the data is two bytes shorter than what is sent
const buf = Buffer.from(sent.length - 2);
// the first to bytes represents the length, the rest is data
for (let i = 2; i < sent.length; i += 1) {
buf[i - 2] = sent[i];
}
return buf;
};
util.attachPostillionLength = (_data) => {
let data = null;
// make sure _data is a buffer, if it's a string convert
if (!Buffer.isBuffer(_data)) {
if (typeof (_data) === 'string') {
data = Buffer.from(_data);
}
} else {
data = _data;
}
// data is represented by two bytes
const length = Buffer.alloc(2);
length[0] = data.length / 256;
length[1] = data.length % 256;
return Buffer.concat([length, data]);
};
util.attachDiTimeStamps = (obj) => {
const time = new Date();
if (!obj['7']) obj['7'] = formatd(time, 'MMDDhhmmss');
if (!obj['12']) obj['12'] = formatd(time, 'hhmmss');
if (!obj['13']) obj['13'] = formatd(time, 'MMDD');
return obj;
};
module.exports = util;
|
package io.virtualan.apifirst.config;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.ser.key.Jsr310NullKeySerializer;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.zalando.problem.ProblemModule;
import org.zalando.problem.violations.ConstraintViolationProblemModule;
@Configuration
public class JacksonConfig {
// additional modules for jackson object mapper
/*
* Jackson Afterburner module to speed up serialization/deserialization.
*/
@Bean
public AfterburnerModule afterburnerModule() {
return new AfterburnerModule();
}
/*
* Zalando Problem Module for serialization/deserialization of RFC7807 Problem.
*/
@Bean
public ProblemModule problemModule() {
return new ProblemModule().withStackTraces(false);
}
/*
* Module for serialization/deserialization of ConstraintViolationProblem.
*/
@Bean
public ConstraintViolationProblemModule constraintViolationProblemModule() {
return new ConstraintViolationProblemModule();
}
/*
* enriches jackson object mapper with a null key serializer (by default null keys in maps lead to
* json serialization error). We need this since when using tomcat, the tomcat cache metrics map
* exposed by jhipster framework has null keys and lead to a failing jhi-metrics endpoint.
*/
@Autowired
public void configureJacksonObjectMapper(ObjectMapper mapper) {
mapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
mapper.getSerializerProvider().setNullKeySerializer(new Jsr310NullKeySerializer());
mapper.registerModule(problemModule());
}
}
|
public static void FindPairsWithSum(int[] array, int sum)
{
for (int i = 0; i < array.Length - 1; i++)
{
for (int j = i + 1; j < array.Length; j++)
{
if (array[i] + array[j] == sum)
{
Console.WriteLine("Pair found at index " +
i + " and " + j);
}
}
}
} |
class MaintenancePredictor:
def __init__(self, avarii, coords):
self.avarii = avarii
self.coords = coords
def got_prediction(self, date, length, where, probtype):
avarii_count = ((self.avarii.oblast == where) & \
(self.avarii.probtype == probtype) & \
(self.avarii.date >= date) & \
(self.avarii.date <= date + length)).sum()
return avarii_count |
import { LOAD_ACCOUNTS } from './constants';
export function loadAccounts() {
return {
type: LOAD_ACCOUNTS,
};
} |
#!/bin/sh
# set -e
apk -q add --update wget curl jq iptables gettext
export EXT_PORT=7513
export COINBASE="0x1234"
if ! $BOOTSTRAP ; then
echo "- BOOTSTRAP NODE -"
echo "- GENESIS_ACTIVE_SIZE=$GENESIS_ACTIVE_SIZE -"
if [ $(getent hosts ${BS_NODE_URL} | awk '{ print $1 }')!= $EXT_IP ]; then
echo "BS_NODE_URL doesn't resolve to ${EXT_IP}"
exit 1
fi
export POET_IP=$(getent hosts ${POET_URL} | awk '{ print $1 }')
wget -qO- --tries=0 --retry-connrefused ${POET_IP}:8080/v1/info
export GENESIS_TIME=$(date -d "@$(($(date +%s) + $GENESIS_SEC_DELAY))" --utc +%Y-%m-%dT%H:%M:%S+00:00)
echo "- GENESIS_TIME: $GENESIS_TIME -"
envsubst < /root/config/config.toml.tmpl > ./config.toml
else
echo "- MINER NODE -"
export EXT_PORT=$(curl --unix-socket /var/run/docker.sock http://localhost/containers/${HOSTNAME}/json | jq -r '.NetworkSettings.Ports."7513/tcp"[0].HostPort')
iptables -t nat -A PREROUTING -i eth0 -p tcp --dport 7513 -j REDIRECT --to-port ${EXT_PORT}
iptables -t nat -A PREROUTING -i eth0 -p udp --dport 7513 -j REDIRECT --to-port ${EXT_PORT}
iptables-save
wget -qO- --tries=0 --retry-connrefused ${BS_NODE_URL} -O ./config.toml
fi
set -o xtrace
/bin/go-spacemesh \
--test-mode \
--grpc-server \
--json-server \
--metrics-port 2020 \
--metrics \
--start-mining \
--coinbase $COINBASE \
--tcp-port $EXT_PORT &
set +o xtrace
bg_pid=$!
until [ -d /root/spacemesh/p2p/nodes ]; do sleep 1; done
export P2P="\"spacemesh://`ls /root/spacemesh/p2p/nodes`@${EXT_IP}:${EXT_PORT}\""
echo "P2P: $P2P"
if ! $BOOTSTRAP ; then
export BOOTSTRAP=true
envsubst < /root/config/config.toml.tmpl > /root/config/config.toml
wget -qO- --tries=0 --retry-connrefused --post-data '{ "gatewayAddresses": ["'${EXT_IP}':9091"] }' ${POET_IP}:8080/v1/start
echo "- POET STARTED -"
fi
wait $bg_pid
exec "$@" |
# Generated by Django 3.2.6 on 2021-08-13 16:59
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dash', '0002_person_birthday'),
]
operations = [
migrations.AlterField(
model_name='person',
name='birthday',
field=models.DateField(default=datetime.datetime(2021, 8, 13, 13, 59, 48, 268167)),
),
]
|
import Client from '../src';
import MockTransport from '../__mocks__/MockTransport';
import expect from 'expect';
const transport = new MockTransport();
const endpoint = 'http://wordpress.test/wp-json';
const client = new Client({ transport, endpoint });
describe('Client', () => {
beforeEach(() => {
transport.resetMocks();
});
it('sets transport property', () => {
expect(client.transport).toBe(transport);
});
it('throws error when missing transport', () => {
try {
new Client({ transport: undefined });
} catch (error) {
expect(error instanceof TypeError).toBe(true);
expect(error.message).toBe('Transport is required option, none was set.');
}
});
it('has default options', () => {
const ylletClient = new Client({ transport });
expect(ylletClient.options).toEqual({
endpoint: '',
namespace: 'wp/v2',
nonce: '',
config: {
headers: {
'Content-Type': 'application/json'
}
},
resource: '',
restore: true
});
});
it('merges options', () => {
const ylletClient = new Client({
transport,
headers: {
'X-Test': 'Test'
},
endpoint: 'https://wordpress.test/wp-json',
config: {
foo: 'bar'
}
});
expect(ylletClient.options).toEqual({
endpoint: 'https://wordpress.test/wp-json',
namespace: 'wp/v2',
nonce: '',
config: {
foo: 'bar',
headers: {
'Content-Type': 'application/json',
'X-Test': 'Test'
}
},
resource: '',
restore: true
});
});
it('has HTTP methods', () => {
expect(typeof client.get).toBe('function');
expect(typeof client.create).toBe('function');
expect(typeof client.update).toBe('function');
expect(typeof client.delete).toBe('function');
});
it('has API Resource methods', () => {
[
'categories',
'comments',
'media',
'statuses',
'pages',
'posts',
'settings',
'tags',
'taxonomies',
'types',
'users',
'search'
].forEach(method => {
client[method]();
expect(client.options.resource).toBe(method);
});
});
});
|
set -e
# Change the monit timeout for uaa, to allow for (very) slow database migration.
echo Patching uaa monit for longer timeout, allowing for very slow database migrations
# While the final monit spec will be found in /var/vcap/monit/ at the
# time this script runs the directory will not be filled yet. That is
# done by configgin, comes after us. Thus, we patch the input file to
# configgin instead.
sed -e 's/with timeout 60 seconds/with timeout 600 seconds/' \
-i /var/vcap/jobs-src/uaa/monit
echo OK
exit 0
|
#!/usr/bin/env bash
# Copyright (c) 2021, ARM Limited and Contributors. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of ARM nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
TOP_DIR=`pwd`
create_scripts_link()
{
ln -s $TOP_DIR/../../common/scripts/build-all.sh $TOP_DIR/build-scripts/build-all.sh
ln -s $TOP_DIR/../../common/scripts/build-uefi.sh $TOP_DIR/build-scripts/build-uefi.sh
ln -s $TOP_DIR/../../common/scripts/build-bsaefi.sh $TOP_DIR/build-scripts/build-bsaefi.sh
ln -s $TOP_DIR/../../common/scripts/build-linux.sh $TOP_DIR/build-scripts/build-linux.sh
ln -s $TOP_DIR/../../common/scripts/build-linux-bsa.sh $TOP_DIR/build-scripts/build-linux-bsa.sh
ln -s $TOP_DIR/../../common/scripts/build-grub.sh $TOP_DIR/build-scripts/build-grub.sh
ln -s $TOP_DIR/../../common/scripts/build-busybox.sh $TOP_DIR/build-scripts/build-busybox.sh
ln -s $TOP_DIR/../../common/scripts/framework.sh $TOP_DIR/build-scripts/framework.sh
ln -s $TOP_DIR/../../common/scripts/parse_params.sh $TOP_DIR/build-scripts/parse_params.sh
ln -s $TOP_DIR/../../common/scripts/make_image.sh $TOP_DIR/build-scripts/make_image.sh
ln -s $TOP_DIR/bbr-acs/common/scripts/build-sct.sh $TOP_DIR/build-scripts/build-sct.sh
ln -s $TOP_DIR/bbr-acs/common/scripts/build-fwts.sh $TOP_DIR/build-scripts/build-fwts.sh
ln -s $TOP_DIR/bbr-acs/common/scripts/build-uefi-apps.sh $TOP_DIR/build-scripts/build-uefi-apps.sh
}
init_dir()
{
rm -rf $TOP_DIR/ramdisk
rm -rf $TOP_DIR/build-scripts/config
cp -r $TOP_DIR/../../common/ramdisk $TOP_DIR
cp -r $TOP_DIR/../../common/config $TOP_DIR/build-scripts
}
create_scripts_link
init_dir
source ./build-scripts/build-all.sh IR F
source ./build-scripts/make_image.sh IR
|
# 第一个实验,在webvid数据集上,根据输入文本预测4个帧: cogview_webvid-2021-08-13-12:11:20
# Webvid数据集,原始处理分布
deepspeed --master_port 29502 --num_nodes 1 --num_gpus 4 pretrain_gpt2.py \
--batch-size 48 \
--experiment-name cogview_webvid \
--img-tokenizer-num-tokens 8192 \
--dataset-type WebvidTokensDataset \
--train-data /raid/datasets/video_datasets/webvid/ \
--model-parallel-size 1 \
--num-layers 12 \
--hidden-size 512 \
--num-attention-heads 16 \
--save /home/user/mzsun/codes/CogView/experiments/ \
--train-iters 40000 \
--distributed-backend nccl \
--lr-decay-style cosine \
--warmup .1 \
--checkpoint-activations \
--deepspeed-activation-checkpointing \
--max-position-embeddings 1089 \
--max-memory-length 0 \
--txt-loss-scale 5 \
--fp16 \
--deepspeed --deepspeed_config /home/user/mzsun/codes/CogView/scripts/ds_config.json
# 续第一个实验,在webvid数据集上,根据输入文本预测4个帧
# Webvid数据集,原始处理分布
deepspeed --master_port 29502 --num_nodes 1 --num_gpus 4 pretrain_gpt2.py \
--load /raid/users/mzsun/codes/CogView/experiments/cogview_webvid-2021-08-13-12:11:20/ --fast-load \
--batch-size 48 \
--experiment-name cogview_webvid \
--img-tokenizer-num-tokens 8192 \
--dataset-type WebvidTokensDataset \
--train-data /raid/datasets/video_datasets/webvid/ \
--model-parallel-size 1 \
--num-layers 12 \
--hidden-size 512 \
--num-attention-heads 16 \
--save /home/user/mzsun/codes/CogView/experiments/ \
--train-iters 100000 \
--distributed-backend nccl \
--lr-decay-style cosine \
--warmup .1 \
--checkpoint-activations \
--deepspeed-activation-checkpointing \
--max-position-embeddings 1089 \
--max-memory-length 0 \
--txt-loss-scale 5 \
--fp16 \
--deepspeed --deepspeed_config /home/user/mzsun/codes/CogView/scripts/ds_config.json
# 第二个实验,在webvid数据集上,根据输入文本预测4个帧: cogview_webvid-2021-08-15-13:48:38
# 降低文本预测损失函数,提升图像预测损失函数
deepspeed --master_port 29502 --num_nodes 1 --num_gpus 4 pretrain_gpt2.py \
--load /raid/users/mzsun/codes/CogView/experiments/cogview_webvid-2021-08-15-13:48:38/ --fast-load \
--batch-size 48 \
--experiment-name cogview_webvid \
--img-tokenizer-num-tokens 8192 \
--dataset-type WebvidTokensDataset \
--train-data /raid/datasets/video_datasets/webvid/ \
--model-parallel-size 1 \
--num-layers 12 \
--hidden-size 512 \
--num-attention-heads 16 \
--save /home/user/mzsun/codes/CogView/experiments/ \
--train-iters 40000 \
--distributed-backend nccl \
--lr-decay-style cosine \
--warmup .1 \
--checkpoint-activations \
--deepspeed-activation-checkpointing \
--max-position-embeddings 1089 \
--max-memory-length 0 \
--img-loss-scale 5 \
--txt-loss-scale 1 \
--fp16 \
--deepspeed --deepspeed_config /home/user/mzsun/codes/CogView/scripts/ds_config.json
# 第三个实验,在webvid数据集上,根据输入文本预测4个帧
# 输入64x64维度的图像
deepspeed --include localhost:1 --master_port 29503 pretrain_gpt2.py \
--batch-size 48 \
--image-size 64 \
--experiment-name cogview_webvid_img64_ds3 \
--img-tokenizer-num-tokens 8192 \
--dataset-type WebvidFramesDataset \
--model-parallel-size 1 \
--num-layers 12 \
--hidden-size 512 \
--num-attention-heads 16 \
--train-iters 40000 \
--distributed-backend nccl \
--lr-decay-style cosine \
--warmup .1 --fp16 \
--checkpoint-activations \
--deepspeed-activation-checkpointing \
--max-position-embeddings 1089 \
--max-memory-length 0 \
--img-loss-scale 5 \
--txt-loss-scale 1 \
--train-data /raid/datasets/video_datasets/webvid/ \
--save /home/user/mzsun/codes/CogView/experiments/ \
--img-tokenizer-path /home/user/mzsun/codes/Video_VQVAE/pretrained/OPENAI/ \
--deepspeed --deepspeed_config /home/user/mzsun/codes/CogView/scripts/ds_config.json
# 生成效果展示, 在webvid数据集上
python generate_samples.py \
--deepspeed \
--model-parallel-size 1 \
--num-layers 12 \
--hidden-size 512 \
--load /raid/users/mzsun/codes/CogView/experiments/cogview_webvid-2021-08-13-12:11:20/ \
--num-attention-heads 16 \
--max-position-embeddings 1089 \
--fp16 \
--temperature 1. \
--top_k 200 \
--top_p 0 \
--img-tokenizer-path /home/user/mzsun/codes/Video_VQVAE/pretrained/OPENAI/ \
--query-window 64 \
--key-window-times 4 \
--num-pivot 256 \
--is-sparse 0 \
--max-position-embeddings-finetune 1089 \
--generation-task predict4frames \
--input-source /home/user/mzsun/codes/CogView/generate_inputs.txt \
--output-path /raid/users/mzsun/codes/CogView/experiments/cogview_webvid-2021-08-13-12:11:20/samples_predict4frames \
--batch-size 4 \
--max-inference-batch-size 4 \
--device 0 |
<gh_stars>0
var express = require('express');
var router = express.Router();
var passport = require('passport');
var path = require('path');
var pool = require('../modules/pool')
var constants = require('../modules/constants')
// gets projects user has created
router.get('/', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`SELECT projects.id, projects.project_name, array_agg(collaborators.username)
FROM projects JOIN users
ON users.id = projects.creator
FULL OUTER JOIN projects_users_junction
ON projects.id = projects_users_junction.project_id
FULL OUTER JOIN users AS collaborators
ON collaborators.id=projects_users_junction.user_id
WHERE creator = $1
GROUP BY projects.id;`,
[req.user.id], function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.send(result.rows);
}
});
}
});
});
//gets projects user is collaborator on
router.get('/collaborator', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`SELECT users.username, projects.project_name, projects_users_junction.id
FROM projects JOIN projects_users_junction
ON projects.id = projects_users_junction.project_id
JOIN users ON users.id = projects.creator
WHERE projects_users_junction.user_id = $1;`,
[req.user.id], function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.send(result.rows);
}
});
}
});
});
// deletes a collaborator from a project
router.delete('/collaborator', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`DELETE FROM projects_users_junction WHERE projects_users_junction.id = $1 AND projects_users_junction.user_id = $2;`,
[req.query.track, req.user.id],
function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.sendStatus(200);
}
});
}
})
})
// updates synth properties
router.put('/component', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`UPDATE component SET osc = $1, osc2 = $2, volume = $3 WHERE component.id = $4;;`, [req.body.componentSettings.osc, req.body.componentSettings.osc2, req.body.componentSettings.volume, req.body.componentID], function (err, result) {
done()
if (err) {
res.sendStatus(500)
} else {
res.sendStatus(200)
}
})
}
})
})
// creates a new project and it's components
router.post('/', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`WITH new_track AS (INSERT INTO projects ("creator", "project_name")
VALUES ($1, $2) RETURNING id)
INSERT INTO component ("component_name", "type", "score", "project_id")
VALUES ( 'bass', 'synth', $3, (SELECT id FROM new_track)),
('synth', 'synth', $4, (SELECT id FROM new_track)),
('drum', 'drum', $5, (SELECT id FROM new_track));`,
[req.user.id, req.query.name, constants.stringOf1792zeros, constants.stringOf1792zeros, constants.stringOf128zeros],
function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.sendStatus(200);
}
});
}
})
})
// delete's a project
router.delete('/', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`DELETE FROM projects WHERE projects.project_name = $1 AND projects.creator = $2`,
[req.query.track, req.user.id],
function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.sendStatus(200);
}
});
}
})
})
// get's component properties on a project
router.get('/tracks/:name', function (req, res) {
var name = req.params.name
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`SELECT component.component_name, component.id, component.score, component.type, component.osc, component.osc2, component.volume, projects.project_name, projects.creator
FROM component JOIN projects ON component.project_id=projects.id
WHERE projects.project_name = $1 ORDER BY component.id;`,
[name],
function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.send(result.rows);
}
});
}
});
})
// updates score for a track
router.put('/tracks', function (req, res) {
pool.connect(function (err, client, done) {
if (err) {
console.log('Error connecting to database', err);
res.sendStatus(500);
} else {
client.query(`UPDATE component as t1 SET score = $1
FROM projects as t2 WHERE t1.project_id=t2.id
AND t2.project_name = $2 AND t1.component_name = $3;`, [req.query.string, req.query.projectName, req.query.componentName], function (err, result) {
done()
if (err) {
res.sendStatus(500)
} else {
res.sendStatus(200)
}
})
}
})
})
// get's a user id from a username
router.get('/user', function (req, res) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`SELECT users.id FROM users
WHERE users.username = $1;`,
[req.query.user],
function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.send(result.rows);
}
});
}
});
})
// adds a collaborator to a project
router.post('/user', function (req, res) {
if (req.isAuthenticated()) {
pool.connect(function (errorConnectingToDatabase, client, done) {
if (errorConnectingToDatabase) {
console.log('Error connecting to database', errorConnectingToDatabase);
res.sendStatus(500);
} else {
client.query(`INSERT INTO "projects_users_junction" ("user_id","project_id")
VALUES ($1, (SELECT id FROM projects WHERE id=$2 AND creator=$3));`,
[req.body.user, req.body.track, req.user.id],
function (errorMakingQuery, result) {
done();
if (errorMakingQuery) {
console.log('Error making query', errorMakingQuery);
res.sendStatus(500);
} else {
res.sendStatus(200);
}
});
}
})
}
})
module.exports = router; |
<gh_stars>1-10
import * as assert from 'assert';
import { createCollection, dropCollection } from '../utils/collection.js';
import { insertOne, insertMany } from '../utils/insert.js';
import { MongoDetails } from '../index';
import { ObjectId } from 'mongodb';
const default_args: MongoDetails = {
uri: 'mongodb://localhost:27017',
collection: 'insert_collection',
database: 'insert_database',
pipeline: { id: 1 },
};
describe('Insert tests', () => {
before(async () => {
await dropCollection(default_args).catch((err) => {
if (err.toString().includes('MongoServerError: ns not found')) {
// ok, collection doesn't exist
} else {
throw err;
}
});
await createCollection(default_args);
});
it('Should insert one document', async () => {
const insert_document = { _id: 'randomId1253', test: 'Some text' };
const args = {
uri: default_args.uri,
database: default_args.database,
collection: default_args.collection,
pipeline: insert_document,
};
await insertOne(args)
.then((res) => {
assert.equal(res.toString(), insert_document._id);
})
.catch((err) => {
throw err;
});
});
it('Should fail inserting more documents', async () => {
const args = {
uri: default_args.uri,
database: default_args.database,
collection: default_args.collection,
pipeline: [{ id: 1 }, { id: 2 }],
};
await insertOne(args)
.then(() => {
throw new Error('Should fail inserting more documents');
})
.catch((err) => {
assert.match(err.toString(), /Error: Pipeline must be an object/);
});
});
it('Should insert many documents', async () => {
const insert_documents = [
{ _id: 'randomNewId1253', test: 'Some text' },
{ _id: 'rand1253#$%', test: 'Some other text' },
{ _id: 123334, test: 'Some new text' },
];
const args = {
uri: default_args.uri,
database: default_args.database,
collection: default_args.collection,
pipeline: insert_documents,
};
const expected_result = {
'0': insert_documents[0]._id,
'1': insert_documents[1]._id,
'2': insert_documents[2]._id,
};
await insertMany(args)
.then((res) => {
assert.deepEqual(res, expected_result);
})
.catch((err) => {
throw err;
});
});
it('Should fail inserting single document', async () => {
await insertMany(default_args)
.then(() => {
throw new Error('Should fail inserting single document');
})
.catch((err) => {
assert.match(err.toString(), /Error: Pipeline must be an array/);
});
});
});
|
import React, { useMemo } from 'react';
import { useFocusIdx } from '@/hooks/useFocusIdx';
import { SelectField } from '@/components/core/Form';
const options = [
{
value: '',
label: 'None',
},
{
value: 'underline',
label: 'Underline',
},
{
value: 'overline',
label: 'Overline',
},
{
value: 'line-through',
label: 'Line through',
},
{
value: 'blink',
label: 'Blink',
},
{
value: 'inherit',
label: 'Inherit',
},
];
export function TextDecoration() {
const { focusIdx } = useFocusIdx();
return useMemo(() => {
return (
<SelectField
label='Text decoration'
name={`${focusIdx}.attributes.text-decoration`}
options={options}
inline
/>
);
}, [focusIdx]);
}
|
#!/usr/bin/env bash
SCRIPT="$0"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/swagger-codegen-cli/target/swagger-codegen-cli.jar"
if [ ! -f "$executable" ]
then
mvn clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -DloggerPath=conf/log4j.properties"
ags="$@ generate -l pistache-server -i modules/swagger-codegen/src/test/resources/2_0/petstore.yaml -o samples/server/petstore/pistache-server"
java $JAVA_OPTS -jar $executable $ags
|
aws cloudformation create-stack \
--stack-name UdagramInfra \
--template-body file://infra.yml \
--parameters file://infra.json \
--region=us-east-1 |
rm -rf instance/
mkdir instance
touch instance/app.db
python manage.py db create_all
flask db init
flask db migrate -m "initial migration"
flask db upgrade |
The Tower of Hanoi problem can be solved by following the below algorithm:
1. Move the top n-1 discs from the source tower (A) to the destination tower (C), using the buffer tower (B).
2. Move the nth disc from the source (A) to the buffer tower (B).
3. Move the n-1 discs from the buffer tower (B) to the destination tower (C). |
<reponame>kastelo-labs/Nuitka
# Copyright 2020, <NAME>, mailto:<EMAIL>
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Reformulation of dictionary creations.
Dictionary creations might be directly translated to constants, or they might
become nodes that build dictionaries.
For Python3.5, unpacking can happen while creating dictionaries, these are
being re-formulated to an internal function.
Consult the developer manual for information. TODO: Add ability to sync
source code comments with developer manual sections.
"""
from nuitka.nodes.AssignNodes import (
StatementAssignmentVariable,
StatementDelVariable,
StatementReleaseVariable,
)
from nuitka.nodes.AttributeNodes import ExpressionAttributeLookup
from nuitka.nodes.BuiltinIteratorNodes import ExpressionBuiltinIter1
from nuitka.nodes.BuiltinNextNodes import ExpressionBuiltinNext1
from nuitka.nodes.ConstantRefNodes import makeConstantRefNode
from nuitka.nodes.ContainerMakingNodes import ExpressionMakeTuple
from nuitka.nodes.DictionaryNodes import (
ExpressionKeyValuePair,
ExpressionMakeDict,
StatementDictOperationUpdate,
)
from nuitka.nodes.ExceptionNodes import (
ExpressionBuiltinMakeException,
StatementRaiseException,
)
from nuitka.nodes.FunctionNodes import (
ExpressionFunctionCall,
ExpressionFunctionCreation,
ExpressionFunctionRef,
)
from nuitka.nodes.LoopNodes import StatementLoop, StatementLoopBreak
from nuitka.nodes.OperatorNodes import makeBinaryOperationNode
from nuitka.nodes.ReturnNodes import StatementReturn
from nuitka.nodes.TypeNodes import ExpressionBuiltinType1
from nuitka.nodes.VariableRefNodes import (
ExpressionTempVariableRef,
ExpressionVariableRef,
)
from nuitka.PythonVersions import python_version
from nuitka.specs.ParameterSpecs import ParameterSpec
from .InternalModule import (
internal_source_ref,
makeInternalHelperFunctionBody,
once_decorator,
)
from .ReformulationTryExceptStatements import makeTryExceptSingleHandlerNode
from .ReformulationTryFinallyStatements import makeTryFinallyStatement
from .TreeHelpers import (
buildNode,
buildNodeList,
makeDictCreationOrConstant,
makeStatementsSequenceFromStatement,
makeStatementsSequenceFromStatements,
)
def buildDictionaryNode(provider, node, source_ref):
if python_version >= 350:
for key in node.keys:
if key is None:
return buildDictionaryUnpacking(
provider=provider, node=node, source_ref=source_ref
)
return makeDictCreationOrConstant(
keys=buildNodeList(provider, node.keys, source_ref),
values=buildNodeList(provider, node.values, source_ref),
source_ref=source_ref,
)
@once_decorator
def getDictUnpackingHelper():
helper_name = "_unpack_dict"
result = makeInternalHelperFunctionBody(
name=helper_name,
parameters=ParameterSpec(
ps_name=helper_name,
ps_normal_args=(),
ps_list_star_arg="args",
ps_dict_star_arg=None,
ps_default_count=0,
ps_kw_only_args=(),
ps_pos_only_args=(),
),
)
temp_scope = None
tmp_result_variable = result.allocateTempVariable(temp_scope, "dict")
tmp_iter_variable = result.allocateTempVariable(temp_scope, "iter")
tmp_item_variable = result.allocateTempVariable(temp_scope, "keys")
loop_body = makeStatementsSequenceFromStatements(
makeTryExceptSingleHandlerNode(
tried=StatementAssignmentVariable(
variable=tmp_item_variable,
source=ExpressionBuiltinNext1(
value=ExpressionTempVariableRef(
variable=tmp_iter_variable, source_ref=internal_source_ref
),
source_ref=internal_source_ref,
),
source_ref=internal_source_ref,
),
exception_name="StopIteration",
handler_body=StatementLoopBreak(source_ref=internal_source_ref),
source_ref=internal_source_ref,
),
makeTryExceptSingleHandlerNode(
tried=StatementDictOperationUpdate(
dict_arg=ExpressionTempVariableRef(
variable=tmp_result_variable, source_ref=internal_source_ref
),
value=ExpressionTempVariableRef(
variable=tmp_item_variable, source_ref=internal_source_ref
),
source_ref=internal_source_ref,
),
exception_name="AttributeError",
handler_body=StatementRaiseException(
exception_type=ExpressionBuiltinMakeException(
exception_name="TypeError",
args=(
makeBinaryOperationNode(
operator="Mod",
left=makeConstantRefNode(
constant="""\
'%s' object is not a mapping""",
source_ref=internal_source_ref,
user_provided=True,
),
right=ExpressionMakeTuple(
elements=(
ExpressionAttributeLookup(
expression=ExpressionBuiltinType1(
value=ExpressionTempVariableRef(
variable=tmp_item_variable,
source_ref=internal_source_ref,
),
source_ref=internal_source_ref,
),
attribute_name="__name__",
source_ref=internal_source_ref,
),
),
source_ref=internal_source_ref,
),
source_ref=internal_source_ref,
),
),
source_ref=internal_source_ref,
),
exception_value=None,
exception_trace=None,
exception_cause=None,
source_ref=internal_source_ref,
),
source_ref=internal_source_ref,
),
)
args_variable = result.getVariableForAssignment(variable_name="args")
final = (
StatementReleaseVariable(
variable=tmp_result_variable, source_ref=internal_source_ref
),
StatementReleaseVariable(
variable=tmp_iter_variable, source_ref=internal_source_ref
),
StatementReleaseVariable(
variable=tmp_item_variable, source_ref=internal_source_ref
),
# We get handed our args responsibility.
StatementDelVariable(
variable=args_variable, tolerant=False, source_ref=internal_source_ref
),
)
tried = makeStatementsSequenceFromStatements(
StatementAssignmentVariable(
variable=tmp_iter_variable,
source=ExpressionBuiltinIter1(
value=ExpressionVariableRef(
variable=args_variable, source_ref=internal_source_ref
),
source_ref=internal_source_ref,
),
source_ref=internal_source_ref,
),
StatementAssignmentVariable(
variable=tmp_result_variable,
source=makeConstantRefNode(constant={}, source_ref=internal_source_ref),
source_ref=internal_source_ref,
),
StatementLoop(body=loop_body, source_ref=internal_source_ref),
StatementReturn(
expression=ExpressionTempVariableRef(
variable=tmp_result_variable, source_ref=internal_source_ref
),
source_ref=internal_source_ref,
),
)
result.setBody(
makeStatementsSequenceFromStatement(
makeTryFinallyStatement(
provider=result,
tried=tried,
final=final,
source_ref=internal_source_ref,
)
)
)
return result
def buildDictionaryUnpackingArgs(provider, keys, values, source_ref):
result = []
for key, value in zip(keys, values):
# TODO: We could be a lot cleverer about the dictionaries for non-starred
# arguments, but lets get this to work first.
if key is None:
result.append(buildNode(provider, value, source_ref))
elif type(key) is str:
result.append(
ExpressionMakeDict(
pairs=(
ExpressionKeyValuePair(
key=makeConstantRefNode(
constant=key, source_ref=source_ref
),
value=buildNode(provider, value, source_ref),
source_ref=source_ref,
),
),
source_ref=source_ref,
)
)
else:
result.append(
ExpressionMakeDict(
pairs=(
ExpressionKeyValuePair(
key=buildNode(provider, key, source_ref),
value=buildNode(provider, value, source_ref),
source_ref=source_ref,
),
),
source_ref=source_ref,
)
)
return result
def buildDictionaryUnpacking(provider, node, source_ref):
helper_args = buildDictionaryUnpackingArgs(
provider, node.keys, node.values, source_ref
)
result = ExpressionFunctionCall(
function=ExpressionFunctionCreation(
function_ref=ExpressionFunctionRef(
function_body=getDictUnpackingHelper(), source_ref=source_ref
),
defaults=(),
kw_defaults=None,
annotations=None,
source_ref=source_ref,
),
values=(ExpressionMakeTuple(helper_args, source_ref),),
source_ref=source_ref,
)
result.setCompatibleSourceReference(helper_args[-1].getCompatibleSourceReference())
return result
|
#!/bin/bash
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ln -fs ${SCRIPT_DIR}/joy_detection_demo.service /lib/systemd/system/joy_detection_demo.service
systemctl daemon-reload
systemctl enable joy_detection_demo.service
|
# frozen_string_literal: true
class EntityOrder < Snabberb::Component
needs :round
def render
divs = @round.entities.map.with_index do |entity, index|
entity_props = {
key: "entity_#{index}",
style: {
display: 'inline-block',
height: '1.5rem',
'vertical-align': 'top',
'margin-right': '1rem',
'white-space': 'nowrap'
},
}
if @round.current_entity == entity
scroll_to = lambda do |vnode|
elm = Native(vnode)['elm']
elm['parentElement']['parentElement'].scrollLeft = elm['offsetLeft'] - 10
end
entity_props[:hook] = {
insert: scroll_to,
update: ->(_, vnode) { scroll_to.call(vnode) }
}
end
style = entity_props[:style]
if @round.can_act?(entity)
style['text-decoration'] = 'underline'
style['font-weight'] = 'bold'
end
if index.positive?
style['border-left'] = 'black solid thin'
style['padding-left'] = '1rem'
end
children = []
if entity.corporation?
logo_props = {
attrs: {
src: entity.logo,
},
style: {
'max-height': '1.2rem',
padding: '0 .4rem 0 0',
},
}
logo_container_props = {
style: {
height: '100%',
display: 'inline-block',
'vertical-align': 'top',
},
}
children << h(:span, logo_container_props, [h(:img, logo_props)])
end
owner = " (#{entity.owner.name})" if !entity.player? && entity.owner
children << h(:span, "#{entity.name}#{owner}")
h(:div, entity_props, children)
end
props = {
key: 'entity_order',
style: {
margin: '1rem 0 1rem 0',
'font-size': '1.1rem',
overflow: 'auto',
},
}
container_props = {
style: {
width: 'max-content',
'margin-bottom': '0.5rem',
},
key: 'entity_order_container',
}
h(:div, props, [
h(:div, container_props, divs),
])
end
end
|
<reponame>frontendara/amandes
/*
* Copyright 2016 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { suite, test, assert } from 'vitest';
import sinon from 'sinon';
import wait from '../../test/wait';
import defer from './defer';
suite('defer', function () {
test('without arguments', function (done) {
var spy = sinon.spy();
defer(spy);
wait.untilSpyCalled(spy, function () {
assert.isTrue(spy.calledWithExactly());
done();
});
});
test('with arguments', function (done) {
var spy = sinon.spy();
defer(spy, [1, 2, 3]);
wait.untilSpyCalled(spy, function () {
assert.isTrue(spy.calledWithExactly(1, 2, 3));
done();
});
});
});
|
#! /usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CONTINUOUS_CONF_DIR=${CONTINUOUS_CONF_DIR:-$ACCUMULO_HOME/test/system/continuous/}
. $CONTINUOUS_CONF_DIR/continuous-env.sh
./analyze-missing.pl $ACCUMULO_HOME $CONTINUOUS_LOG_DIR $USER $PASS
|
<gh_stars>0
!/usr/bin/python
import sys, imaplib, smtplib
def send_email(to, name, msgError):
FROM = '<EMAIL>'
TO = [to]
SUBJECT = 'An error has ocurred with your data (' + name + ')'
TEXT = msgError
message = """\From: %s\nTo: %s\nSubject: %s\n\n%s""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
try:
server = smtplib.SMTP("smtp.gmail.com", 587)
server.ehlo()
server.starttls()
server.login("<EMAIL>", "???") PUT THE RIGHT PASSWORD INSTEAD OF ???
server.sendmail(FROM, TO, message)
server.close()
except:
print "ERROR: failed to send mail"
send_email(sys.argv[1], sys.argv[2], sys.argv[3])
|
<filename>FeVER/dataset.py
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
#
# Author: <NAME> - <EMAIL>
# Blog: zhouyichu.com
#
# Python release: 3.6.0
#
# Date: 2018-11-14 13:16:12
# Last modified: 2019-04-05 09:53:40
"""
Loading the training data.
"""
import multiprocessing as mp
import math
import os
import logging
import random
import torch
import numpy as np
import Logger
from FeVER.utils import _parse_line
import FeVER.utils as utils
LongTensor = torch.LongTensor
FloatTensor = torch.FloatTensor
Tensor = torch.Tensor
# Done = mp.Event()
def _processor(raw_queue, data_queue,
label_feat, sampleTable, pid, args):
packs = []
while True:
batch = raw_queue.get()
if batch is None:
raw_queue.put(None)
data_queue.put(None)
# Done.wait()
break
else:
for item in batch:
line = item[0]
y, x = _parse_line(line)
s = item[1].strip().split(' ')
counts = [int(t) for t in s]
s = item[2].strip().split()
weights = [float(t) for t in s]
packs += _preprocess(x, y, counts, weights, args)
if len(packs) >= args.batch_size:
value = packs[:args.batch_size]
value = _collate_fn(value, label_feat, sampleTable, args)
data_queue.put(value)
packs = packs[args.batch_size:]
def _ng_sampler(ng_size, batch_size, sp_y,
label_feat, sampleTable, args):
""" Negative sampling.
Args:
ng_size: int, the size of each negative block.
batch_size: int
sp_y: list - The index of gloden label.
label_feat: list - A list of feature index.
sampleTable: sampling table.
"""
reval = []
sample_index = []
assert batch_size == len(sp_y)
for i in range(batch_size):
tmp = []
if args.modeling in utils.LOCAL:
# Put the golden label feature
tmp.append(label_feat[sp_y[i]])
# Gather negative samples.
samples = sampleTable.sample(ng_size-1)
for j in samples:
tmp.append(label_feat[j])
sample_index.append(0)
elif args.modeling in utils.GLOBAL:
n = min(ng_size, len(sp_y[i]))
# Put gloden label features
sample_index.append(random.sample(range(len(sp_y[i])),
n))
for j in sample_index[-1]:
tmp.append(label_feat[sp_y[i][j]])
# Gather negative samples.
if len(tmp) < ng_size:
samples = sampleTable.sample(ng_size-n)
for j in samples:
tmp.append(label_feat[j])
else:
raise Exception('Wrong mode !')
reval += tmp
assert len(reval) == batch_size * ng_size
return reval, sample_index
def _raw_reader(trainFile, countFile, weightFile, raw_queue, batch_size):
"""
"""
batch = []
for item, count, weight in zip(trainFile, countFile, weightFile):
batch.append((item, count, weight))
if len(batch) == batch_size:
raw_queue.put(batch)
batch = []
if len(batch) != 0:
raw_queue.put(batch)
batch = []
raw_queue.put(None)
def _preprocess(sp_x, sp_y, counts, weights, args):
"""
Reorganize the dataset according to normalization.
If it is gloabl, it remains the same.
If it is local, the data will be flattened.
"""
if args.modeling in utils.LOCAL:
reval = []
assert len(sp_y) == len(counts)
assert len(sp_y) == len(weights)
for y, c, w in zip(sp_y, counts, weights):
reval.append((sp_x, y, c, w))
return reval
else:
return [(sp_x, sp_y, counts, weights)]
def _collate_fn(data, label_feat, sampleTable, args):
prob = args.dropout_prob
x = [t[0] for t in data]
# x = data[0]
sx, xoffsets = _tensor(x, prob)
y = [t[1] for t in data]
# Perform negative sampling
sample, sample_index = _ng_sampler(args.ng_size, len(data), y,
label_feat, sampleTable, args)
# Bagging
sy, yoffsets = _tensor(sample, 1.0)
counts = [t[2] for t in data]
weights = [t[3] for t in data]
# y = data[1]
# counts = data[2]
# weights = data[3]
if args.modeling in utils.LOCAL:
# Because of negative sampling, golden id
# is always 0.
y = LongTensor([0]*len(y))
counts = FloatTensor(counts)
weights = FloatTensor(weights)
if args.modeling in utils.GLOBAL:
# Because of negative sampling, golden id
# is always range(len(w)).
y = [list(range(min(len(w), args.ng_size))) for w in y]
tcounts = []
tweights = []
for i, index in enumerate(sample_index):
tcounts.append([counts[i][j] for j in index])
tweights.append([weights[i][j] for j in index])
counts = tcounts
weights = tweights
return ((sx, xoffsets), y, counts, weights, (sy, yoffsets))
def _tensor(data: list, prob: float):
"""This function applies following things:
1. Apply dropout
2. bagging
3. Transfer to tensor
"""
x = utils.dropout(data, prob)
s, offsets = utils.bag(x)
s = torch.from_numpy(s).long()
offsets = torch.from_numpy(offsets).long()
return s, offsets
class _DataSet:
def __init__(self, args, label_feat, sampleTable):
"""Iterator of reading large file.
"""
# Done.clear()
self.data_queue = mp.Manager().Queue(20)
# self.data_queue = mp.Queue(20)
self.raw_queue = mp.Queue(20)
self.args = args
self.label_feat = label_feat
self.sampleTable = sampleTable
with open(args.train_file, encoding='utf8') as trainFile:
with open(args.counts_file, encoding='utf8') as countFile:
with open(args.weight_file, encoding='utf8') as weightFile:
nums = next(trainFile)
nums = nums.split()
self.length = math.ceil(int(nums[0]) / args.batch_size)
self.counter = 0
self.workers = [mp.Process(
target=_processor,
args=(self.raw_queue, self.data_queue,
label_feat, sampleTable, i, args))
for i in range(args.num_workers)]
self.reader = mp.Process(
target=_raw_reader,
args=(trainFile, countFile, weightFile,
self.raw_queue, args.batch_size))
self.reader.daemon = True
self.reader.start()
for w in self.workers:
w.daemon = True
w.start()
def __next__(self):
while self.counter != self.args.num_workers:
value = self.data_queue.get()
if value is not None:
# value = _collate_fn(value, self.label_feat,
# self.sampleTable, self.args)
return value
else:
self.counter += 1
# Done.set()
self.reader.join()
for worker in self.workers:
worker.join()
raise StopIteration
def __len__(self):
return self.length
class XMLDataset:
def __init__(self, args):
self._args = args
logger = logging.getLogger(Logger.project_name)
train_path = args.train_file
counts_path = args.counts_file
label_path = args.label_file
# Rreading meta info
s = 'Reading meta information...'
logger.info(s)
nums = utils.read_meta(train_path)
self._input_feat_num = nums.feat_num
# Reading label features
s = 'Reading label features...'
logger.info(s)
nums, label_feat, _ = utils.read_binary(label_path, args)
self._label_feat_num = nums.feat_num
self._label_num = nums.ins_num
self._label_feat = label_feat
self._local = utils.LOCAL
self._glob = utils.GLOBAL
s = 'Reading labels ...'
logger.info(s)
y = utils.read_labels(train_path, args)
# Reading counts
s = 'Reading counts ...'
logger.info(s)
counts = utils.read_counts(counts_path)
s = 'Gather the frequency of labels'
logger.info(s)
# Gather the frequency of labels
table = self._gather_frequency(y, counts)
s = 'Construct unigram table...'
self.sampleTable = UnigramTable(table)
if not os.path.exists(args.weight_file):
total_label_count = [count for count in table.values()]
self.total_label_count = sum(total_label_count)
s = 'Calculating the weights...'
logger.info(s)
# Calculate the weights
weights = self._cal_weights(y, table)
s = 'Writing the weights...'
logger.info(s)
utils.write_weights(args.weight_file, weights)
del table
del y
del weights
# s = 'Bagging the label features...'
# logger.info(s)
# self._label_feat = self._bagging(self._label_feat)
def __iter__(self):
return _DataSet(self._args, self._label_feat, self.sampleTable)
def _bagging(self, features: list):
"""Bagging the fearures.
Args:
fearures: A list of np.int32
"""
label_feat, offsets = utils.bag(features)
label_feat = torch.from_numpy(label_feat).long()
offsets = torch.from_numpy(offsets).long()
if self._args.cuda is True:
offsets = offsets.cuda(utils.CUDA0)
label_feat = label_feat.cuda(utils.CUDA0)
return (label_feat, offsets)
@property
def input_feat_num(self):
return self._input_feat_num
@property
def label_feat_num(self):
return self._label_feat_num
@property
def label_num(self):
return self._label_num
@property
def label_feat(self):
return self._label_feat
def _cal_weights(self, sp_y: list, table: dict)->list:
""" Calculate the weights for each label.
"""
weights = []
for Y in sp_y:
tmp = [self._weight_func(table[i]) for i in Y]
weights.append(tmp)
return weights
def _weight_func(self, freq: float)->float:
"""Calculate the weight based on given frequence.
"""
args = self._args
if args.weight == '1':
return 1
elif args.weight == 'frac':
return freq ** (self._args.alpha)
elif args.weight == 'cbow':
t = 0.001 * self.total_label_count
t = t/freq
x = (t)**(0.5) + t
return min(x, 1)
else:
raise Exception('Undefined weight function !')
def _gather_frequency(self, y: list, counts: list)-> dict:
"""Gather a frequency table for given dataset.
sp_y: list(list(int))
counts: list(list(int))
"""
table = dict()
assert len(y) == len(counts)
for Y, C in zip(y, counts):
assert len(Y) == len(C)
for i, c in zip(Y, C):
table[i] = table.get(i, 0) + c
return table
class UnigramTable:
"""
A list of indices of tokens in the vocab following a power law distribution
used to draw negative samples.
"""
def __init__(self, vocab: dict):
logger = logging.getLogger(Logger.project_name)
power = 0.75
# Normalizing constant
norm = sum([math.pow(t, power) for t in vocab.values()])
# Length of the unigram table
table_size = int(1e8)
table = np.zeros(table_size, dtype=np.uint32)
s = 'Sorting the vocabulary...'
logger.info(s)
tmp = [(key, value) for key, value in vocab.items()]
tmp.sort(key=lambda x: x[1], reverse=True)
s = 'Filling unigram table'
logger.info(s)
# Cumulative probability
p = 0
i = 0
for key, count in tmp:
p += float(math.pow(count, power))/norm
while i < table_size and float(i) / table_size < p:
table[i] = key
i += 1
self.table = table
def sample(self, ng_size):
indices = np.random.randint(low=0, high=len(self.table), size=ng_size)
return [self.table[i] for i in indices]
|
SELECT p.name, AVG(s.quantity) as avg_sales
FROM products p
JOIN sales s
ON s.product_id = p.id
WHERE s.date >= DATE_SUB(CURRENT_DATE(), INTERVAL 1 MONTH)
GROUP BY p.name
ORDER BY avg_sales DESC
LIMIT 5 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 02:04:22 PM MST
//
package net.opengis.gml._311;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* Its optional co-boundary is a set of connected directedEdges. The orientation of one of these dirEdges is "+" if the Node is the "to" node of the Edge, and "-" if it is the "from" node.
*
* <p>Java class for NodeType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="NodeType">
* <complexContent>
* <extension base="{http://www.opengis.net/gml}AbstractTopoPrimitiveType">
* <sequence>
* <element ref="{http://www.opengis.net/gml}directedEdge" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://www.opengis.net/gml}pointProperty" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "NodeType", propOrder = {
"directedEdge",
"pointProperty"
})
public class NodeType
extends AbstractTopoPrimitiveType
{
protected List<DirectedEdgePropertyType> directedEdge;
protected PointPropertyType pointProperty;
/**
* Gets the value of the directedEdge property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the directedEdge property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDirectedEdge().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link DirectedEdgePropertyType }
*
*
*/
public List<DirectedEdgePropertyType> getDirectedEdge() {
if (directedEdge == null) {
directedEdge = new ArrayList<DirectedEdgePropertyType>();
}
return this.directedEdge;
}
/**
* Gets the value of the pointProperty property.
*
* @return
* possible object is
* {@link PointPropertyType }
*
*/
public PointPropertyType getPointProperty() {
return pointProperty;
}
/**
* Sets the value of the pointProperty property.
*
* @param value
* allowed object is
* {@link PointPropertyType }
*
*/
public void setPointProperty(PointPropertyType value) {
this.pointProperty = value;
}
}
|
<filename>EIDSS v6.1/Android/app/src/main/java/com/bv/eidss/model/FFTypesEnum.java<gh_stars>1-10
package com.bv.eidss.model;
/**
* Created by Eugene on 18.05.2015.
*/
public class FFTypesEnum {
public static final long None = 0;
public static final long HumanLabTestsDetails = 10034003;
public static final long Reports = 10034004;
public static final long AggregateCase = 10034005;
public static final long AvianFarmEPI = 10034007;
public static final long AvianSpeciesCS = 10034008;
public static final long FarmDetails = 10034009;
public static final long HumanClinicalSigns = 10034010;
public static final long HumanEpiInvestigations = 10034011;
public static final long HumanAggregateCase = 10034012;
public static final long LivestockAnimalCS = 10034013;
public static final long LivestockControlMeasures = 10034014;
public static final long LivestockFarmEPI = 10034015;
public static final long LivestockSpeciesCS = 10034016;
public static final long ServiceType = 10034017;
public static final long TestDetails = 10034018;
public static final long TestRun = 10034019;
public static final long VetLabTestDetails = 10034020;
public static final long VetAggregateCase = 10034021;
/// <summary>
/// Sanitary action
/// </summary>
public static final long VetEpizooticAction = 10034022;
/// <summary>
/// Diagnostic action
/// </summary>
public static final long VetEpizooticActionDiagnosisInv = 10034023;
/// <summary>
/// Prophilactic action
/// </summary>
public static final long VetEpizooticActionTreatment = 10034024;
public static final long WildVetEpiInvestigations = 10034027;
public static final long VectorTypeSpecificData = 10034025;
}
|
#!/bin/bash
brew update
brew install libimobiledevice
brew install ideviceinstaller
brew install ios-deploy
pod repo update
gem update cocoapods
git clone https://github.com/flutter/flutter.git $HOME/flutter
export PATH=$HOME/flutter/bin:$HOME/flutter/bin/cache/dart-sdk/bin:$PATH
flutter doctor
flutter packages get
|
import { ColorsOfMagic } from '../helpers/constants';
export interface SecondaryContentBlock {
title: string;
description?: string;
themes: ColorsOfMagic[];
rooms: string[][];
}
|
<filename>grammar/grammar-analysis-tool/index.js
const firstFollow = require('first-follow')
const fs = require('fs');
const grammar = fs.readFileSync('grammar.txt').toString();
const toRule = (ruleLine) => {
const [left, right] = ruleLine;
return {
left : left,
right: right.split(' ').map(x => x === 'epsilon' ? null : x)
}
}
const rules = grammar.split('\n')
.filter(x => x.length)
.map(x =>
x.split("::=").map(y => y.trim())
)
.map(toRule);
const { firstSets, followSets, predictSets } = firstFollow(rules);
fs.writeFileSync('rules.txt', JSON.stringify(rules, null, 4))
fs.writeFileSync('first.txt', JSON.stringify(firstSets, null, 4))
fs.writeFileSync('follow.txt', JSON.stringify(followSets, null, 4))
fs.writeFileSync('predict.txt', JSON.stringify(predictSets, null, 4))
|
<filename>Code/DistGeom/DistViolationContrib.h
//
// Copyright (C) 2004-2006 Rational Discovery LLC
//
// @@ All Rights Reserved @@
// This file is part of the RDKit.
// The contents are covered by the terms of the BSD license
// which is included in the file license.txt, found at the root
// of the RDKit source tree.
//
#include <RDGeneral/export.h>
#ifndef __RD_DISTVIOLATIONCONTRIB_H__
#define __RD_DISTVIOLATIONCONTRIB_H__
#include <ForceField/Contrib.h>
namespace DistGeom {
//! A term to capture the violation of the upper and lower bounds by
//! distance between two points
class RDKIT_DISTGEOMETRY_EXPORT DistViolationContrib : public ForceFields::ForceFieldContrib {
public:
DistViolationContrib()
: d_end1Idx(0), d_end2Idx(0), d_ub(1000.0), d_lb(0.0), d_weight(1.0){};
//! Constructor
/*!
\param owner pointer to the owning ForceField
\param idx1 index of end1 in the ForceField's positions
\param idx2 index of end2 in the ForceField's positions
\param ub Upper bound on the distance
\param lb Lower bound on the distance
\param weight optional weight for this contribution
*/
DistViolationContrib(ForceFields::ForceField *owner, unsigned int idx1,
unsigned int idx2, double ub, double lb,
double weight = 1.0);
double getEnergy(double *pos) const;
void getGrad(double *pos, double *grad) const;
virtual DistViolationContrib *copy() const {
return new DistViolationContrib(*this);
};
private:
unsigned int d_end1Idx, d_end2Idx; //!< indices of end points
double d_ub; //!< upper bound on the distance between d_end1Idx,d_end2Idx
double d_lb; //!< lower bound on the distance between d_end1Idx,d_end2Idx
double d_weight; //!< used to adjust relative contribution weights
};
}
#endif
|
# updating & upgrading
sudo apt-get update
sudo apt-get upgrade
# dependencies
sudo apt-get install libjpeg8-dev imagemagick libv4l-dev
sudo ln -s /usr/include/linux/videodev2.h /usr/include/linux/videodev.h
# mjpg-streamer
sudo apt-get install subversion
cd ~
svn co https://svn.code.sf.net/p/mjpg-streamer/code/mjpg-streamer/ mjpg-streamer
cd mjpg-streamer
make mjpg_streamer input_file.so input_uvc.so output_http.so
sudo cp mjpg_streamer /usr/local/bin
sudo cp output_http.so input_file.so input_uvc.so /usr/local/lib/
sudo cp -R www /usr/local/www
# create log dir
sudo mkdir /var/log/xm
# run using
# echo "export LD_LIBRARY_PATH=/usr/local/lib/"
# mjpg_streamer -i "/usr/local/lib/input_uvc.so -d /dev/video0 -n -y -r 320x240 -f 5" -o "/usr/local/lib/output_http.so -p 8090 -w /usr/local/www"
|
regex = r'\b[a-zA-Z]*[aeiouAEIOU]{2}[a-zA-Z]{1}\b' |
<filename>packages/amplication-data-service-generator/src/server/resource/controller/create-select.spec.ts<gh_stars>1-10
import { namedTypes, builders } from "ast-types";
import { print } from "recast";
import { Entity, EntityField, EnumDataType } from "../../../types";
import { NamedClassDeclaration } from "../../../util/ast";
import { createEntityDTO } from "../dto/create-entity-dto";
import { EXAMPLE_ID_FIELD } from "../util/test-data";
import {
createObjectSelectProperty,
createSelect,
createSelectProperty,
ID_ID,
SELECT_ID,
TRUE_BOOLEAN_LITERAL,
} from "./create-select";
const EXAMPLE_ENTITY: Entity = {
id: "EXAMPLE_ENTITY_ID",
name: "ExampleEntityName",
displayName: "Example Entity",
pluralName: "ExampleEntities",
pluralDisplayName: "Example Entities",
fields: [EXAMPLE_ID_FIELD],
permissions: [],
};
const EXAMPLE_LOOKUP_FIELD: EntityField = {
id: "EXAMPLE_LOOKUP_FIELD_ID",
permanentId: "EXAMPLE_LOOKUP_PERMANENT_FIELD_ID",
dataType: EnumDataType.Lookup,
required: true,
unique: false,
searchable: false,
name: "exampleLookupFieldName",
displayName: "Example Lookup Field",
properties: {
relatedEntityId: EXAMPLE_ENTITY.id,
relatedEntity: EXAMPLE_ENTITY,
},
};
const EXAMPLE_LOOKUP_ENTITY: Entity = {
id: "EXAMPLE_LOOKUP_ENTITY_ID",
displayName: "Example Lookup Entity",
pluralName: "ExampleEntities",
pluralDisplayName: "Example Lookup Entities",
name: "ExampleLookupEntityName",
fields: [EXAMPLE_LOOKUP_FIELD],
permissions: [],
};
describe("createSelect", () => {
const cases: Array<[
string,
NamedClassDeclaration,
Entity,
namedTypes.ObjectExpression
]> = [
[
"adds true property for scalar field",
createEntityDTO(EXAMPLE_ENTITY),
EXAMPLE_ENTITY,
builders.objectExpression([
createSelectProperty(builders.identifier(EXAMPLE_ID_FIELD.name)),
]),
],
[
"adds true property for lookup field",
createEntityDTO(EXAMPLE_LOOKUP_ENTITY),
EXAMPLE_LOOKUP_ENTITY,
builders.objectExpression([
createObjectSelectProperty(
builders.identifier(EXAMPLE_LOOKUP_FIELD.name),
[createSelectProperty(ID_ID)]
),
]),
],
];
test.each(cases)("%s", (name, entityDTO, entity, expected) => {
expect(print(createSelect(entityDTO, entity)).code).toEqual(
print(expected).code
);
});
});
test("createSelectProperty", () => {
const key = builders.identifier("exampleKey");
expect(createSelectProperty(key)).toEqual(
builders.objectProperty(key, TRUE_BOOLEAN_LITERAL)
);
});
test("createObjectSelectProperty", () => {
const key = builders.identifier("exampleKey");
const properties = [
createSelectProperty(builders.identifier("exampleProperty")),
];
expect(createObjectSelectProperty(key, properties)).toEqual(
builders.objectProperty(
key,
builders.objectExpression([
builders.objectProperty(
SELECT_ID,
builders.objectExpression(properties)
),
])
)
);
});
|
#!/bin/bash
# Print how much memory a process is using.
# By default, Pss (propportioinal set size), Rss adjusted for sharing,
# Could also do Rss (resident).
get_pid() {
ps -F -C "$1" | awk '{print $2 }' | grep -v PID | head -1
}
proc=firefox
measure=Pss
[ $# -gt 0 ] && proc=$1 && shift
while true; do
ps -F -C $proc
pid=$(get_pid $proc)
[ -f /proc/$pid/smaps ] \
&& usage=$(echo 0 $(cat /proc/$pid/smaps \
| grep "$measure" \
| awk '{print $2}' \
| sed 's#^#+#') \
| bc) \
&& echo "===$usage ($(echo "$usage/1024" | bc )M)"
sleep 0.5s
done | uniq
|
#!/bin/bash
#
# Jumpbox setup
#
# This will setup the jumpbox and also configure each hadoop node
#
exec 3>&1 4>&2
trap 'exec 2>&4 1>&3' EXIT SIGHUP SIGINT SIGQUIT
exec 1>> /mnt/hadoop_extension.log 2>&1
# Everything below will go to the file 'hadoop_extension.log':
# Output commands and disable history expansion
export PS4='+(${BASH_SOURCE}:${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }'
set -x +H
# Reverse DNS fix
IP=`hostname -I`
HOST=`hostname`
echo -n "$IP $HOST" >> /etc/hosts
# Helper
function Log() {
echo -e "$(date '+%d/%m/%Y %H:%M:%S:%3N'): $1"
}
############################################################
#
# Constants
#
#
#
# System constants
#
# Mount location (not really needed)
MOUNT=/hadoop
# Name of the machine
HOSTNAME=`hostname`
# Admin user account
ADMIN_USER=`ls /home/`
# Name of the cluster
CLUSTER_NAME=`hostname | sed 's/Jumpbox$//g'`
#
# Hadoop Constants
#
# Hadoop Home Location
HADOOP_HOME=/usr/local/hadoop
# Default hadoop user
HADOOP_USER="hadoop"
# Local hadoop archive
HADOOP_FILE_NAME="hadoop.tar.gz"
# Get the role of this node
USERS=("hdfs" "mapred" "yarn")
############################################################
#
# Variables from input
#
#
# How many worker nodes
NUMBER_NODES="$1"
# How many worker nodes
ADMIN_PASSWORD="$2"
REPLICATION="$3"
# Check to see if ADMIN_USER has been passed in
if [ $# -eq 5 ]; then
ADMIN_USER="$4"
fi
############################################################
#
# Create the list of master and worker nodes in the
# cluster
#
MASTER_NODES=("${CLUSTER_NAME}NameNode" "${CLUSTER_NAME}ResourceManager" "${CLUSTER_NAME}JobHistory")
WORKER_NODES=()
for i in `seq 0 $((NUMBER_NODES - 1))`;
do
worker="${CLUSTER_NAME}Worker$i"
WORKER_NODES[$((i + 4))]=$worker
done
############################################################
#
# Create the list of master and worker nodes in the
# cluster
#
preinstall () {
# Install avahi-daemon and Java Runtime Environment
apt-get update > /dev/null
DEBIAN_FRONTEND=noninteractive apt-get install --yes --quiet default-jre htop sshpass > /dev/null
# Setup JAVA
JAVA_HOME=`readlink -f /usr/bin/java | sed 's:/bin/java::'`
echo -e "export JAVA_HOME=$JAVA_HOME" >> /etc/profile.d/java.sh
}
add_hadoop_user () {
Log "Creating user $HADOOP_USER"
addgroup "hadoop"
# Create user
useradd -m -g hadoop -s /bin/bash $HADOOP_USER
# Location of SSH files
local SSH_DIR=/home/$HADOOP_USER/.ssh
# Create directory
mkdir -p $SSH_DIR
# Key name
local KEY_NAME=$SSH_DIR/id_rsa
# Generate key with empty passphrase
ssh-keygen -t rsa -N "" -f $KEY_NAME
# Add to my own authorized keys for loopback
cat $SSH_DIR/id_rsa.pub >> $SSH_DIR/authorized_keys
# Disable key checking
echo -e "Host *" >> /home/$HADOOP_USER/.ssh/config
echo -e " StrictHostKeyChecking no" >> /home/$HADOOP_USER/.ssh/config
# They own their own home directory and everything under it
chown -R $HADOOP_USER:$HADOOP_USER /home/$HADOOP_USER -R
}
############################################################
#
# Copy public keys from all nodes to all other nodes.
#
copy_users () {
local TMP_FILE='local_authorized_keys'
# Create empty file
> $TMP_FILE
# Create local authorized_keys
for FROM in ${WORKER_NODES[@]}; do
for U in ${USERS[@]}; do
Log "Copy public key from $FROM"
sshpass -p $ADMIN_PASSWORD scp -o StrictHostKeyChecking=no $ADMIN_USER@$FROM:/home/$U/.ssh/id_rsa.pub .
Log "Append to $TMP_FILE"
cat id_rsa.pub >> $TMP_FILE
Log "Remove copied public key"
rm -f id_rsa.pub
done
done
# Copy to remove nodes
for TO in ${MASTER_NODES[@]}; do
for U in ${USERS[@]}; do
Log "Add to remote authorized_keys on host $TO for user $U"
cat $TMP_FILE | sshpass -p $ADMIN_PASSWORD ssh -o StrictHostKeyChecking=no $ADMIN_USER@$TO "sudo tee -a /home/$U/.ssh/authorized_keys" > /dev/null
done
done
Log "Remove $TMP_FILE file"
rm $TMP_FILE
}
############################################################
#
# Restart each node in the Hadoop cluster. This will
# cause Hadoop to start on each node.
#
restart_nodes () {
# Add this to task scheduler
local REBOOT_CMD='echo "sleep 5 && sudo reboot" | at now'
# Restart masters
for N in ${MASTER_NODES[@]}; do
Log "Restarting node $N"
sshpass -p $ADMIN_PASSWORD ssh -o StrictHostKeyChecking=no -o ServerAliveInterval=10 $ADMIN_USER@$N $REBOOT_CMD > /dev/null
done
# Restart workers
for N in ${WORKER_NODES[@]}; do
Log "Restarting node $N"
sshpass -p $ADMIN_PASSWORD ssh -o StrictHostKeyChecking=no -o ServerAliveInterval=10 $ADMIN_USER@$N $REBOOT_CMD > /dev/null
done
}
############################################################
#
# Downloads and extracts hadoop into the correct folder
#
#
install_hadoop () {
# Download Hadoop from a random source
local RET_ERR=1
while [[ $RET_ERR -ne 0 ]];
do
local HADOOP_URI=`shuf -n 1 sources.txt`
Log "Downloading from $HADOOP_URI"
timeout 120 wget --timeout 30 "$HADOOP_URI" -O "$HADOOP_FILE_NAME" > /dev/null
RET_ERR=$?
done
# Extract
tar -xvzf $HADOOP_FILE_NAME > /dev/null
rm $HADOOP_FILE_NAME
# Move files to /usr/local
mkdir -p ${HADOOP_HOME}
mv hadoop-2.9.0/* ${HADOOP_HOME}
# Create log directory with permissions
mkdir ${HADOOP_HOME}/logs
chmod 774 ${HADOOP_HOME}/logs
# Copy configuration files
cp *.xml ${HADOOP_HOME}/etc/hadoop/ -f
# Update hadoop configuration
sed -i -e "s+CLUSTER_NAME+$CLUSTER_NAME+g" $HADOOP_HOME/etc/hadoop/core-site.xml
sed -i -e "s+MOUNT_LOCATION+$MOUNT+g" $HADOOP_HOME/etc/hadoop/core-site.xml
sed -i -e "s+CLUSTER_NAME+$CLUSTER_NAME+g" $HADOOP_HOME/etc/hadoop/hdfs-site.xml
sed -i -e "s+REPLICATION+$REPLICATION+g" $HADOOP_HOME/etc/hadoop/hdfs-site.xml
sed -i -e "s+CLUSTER_NAME+$CLUSTER_NAME+g" $HADOOP_HOME/etc/hadoop/yarn-site.xml
sed -i -e "s+\${JAVA_HOME}+'$JAVA_HOME'+g" $HADOOP_HOME/etc/hadoop/hadoop-env.sh
sed -i -e "s+REPLICATION+$REPLICATION+g" $HADOOP_HOME/etc/hadoop/mapred-site.xml
#
# Global profile environment variables
#
echo -e "export HADOOP_HOME=$HADOOP_HOME" >> /etc/profile.d/hadoop.sh
echo -e 'export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin' >> /etc/profile.d/hadoop.sh
# Hadoop group owns hadoop installation
chown $ADMIN_USER:hadoop -R $HADOOP_HOME
# Hadoop group can do anything owner can do
chmod 664 $HADOOP_HOME/etc/hadoop/*
chmod -R g=u $HADOOP_HOME
}
# Pre-install all required programs
preinstall
# Add the hadoop user so we can submit jobs from here
add_hadoop_user
# Copy public keys around
copy_users
# Restart all Hadoop nodes
restart_nodes
# install hadoop.
install_hadoop
Log "Success"
exit 0
|
public string ConstructTestCommand(TestOptions options, string solutionDirectory, string testResultPath)
{
var command = $"dotnet test \"{options.SolutionFile}\" --no-build --logger trx --results-directory \"{testResultPath}\"";
return command;
} |
#!/usr/bin/env bash
/bin/bash -l -c "bundle exec ./bin/run_brick.rb rollout_brick"
|
import URLS from './emoji.json';
import { join, basename } from 'path';
export { URLS };
export type EmojiName = keyof typeof URLS;
export interface Emoji {
name: EmojiName;
url: string;
file: string;
path: string;
string: string | null;
}
export type Emojis = Map<string, Emoji>;
let cache: Emojis | null = null;
let stringToName: Map<string, EmojiName> | null = null;
const IMAGES_BASE = join(__dirname, 'images');
const RE_HEX = /^[0-9a-f-]+$/;
function fileOf(url: string): string {
return url.slice(url.lastIndexOf('/') + 1, url.lastIndexOf('?'));
}
function strOf(file: string): string | null {
const base = basename(file, '.png');
if (!RE_HEX.test(base)) {
return null;
}
if (!base.includes('-')) {
return String.fromCodePoint(parseInt(base, 16));
}
return String.fromCodePoint(...base.split('-').map(s => parseInt(s, 16)));
}
function emojiOf(name: EmojiName): Emoji {
const url = URLS[name];
const file = fileOf(url);
return {
name,
url,
file,
path: join(IMAGES_BASE, file),
string: strOf(file),
};
}
export function isEmoji(emoji: string): boolean {
if (stringToName !== null) {
return stringToName.has(emoji);
}
stringToName = new Map();
let found = false;
for (const [name, info] of all().entries()) {
if (info.string !== null) {
stringToName.set(info.string, name as EmojiName);
if (info.string === emoji) {
found = true;
}
}
}
return found;
}
export function nameOf(emoji: string): EmojiName | null {
if (stringToName !== null) {
const name = stringToName.get(emoji);
return name || null;
}
stringToName = new Map();
let ret: EmojiName | null = null;
for (const [name, info] of all().entries()) {
if (info.string !== null) {
stringToName.set(info.string, name as EmojiName);
if (info.string === emoji) {
ret = name as EmojiName;
}
}
}
return ret;
}
export function isName(name: string): name is EmojiName {
return name in URLS;
}
export function stringOf(name: string): string | null {
if (!(name in URLS)) {
throw new Error(`Emoji named '${name}' not found`);
}
if (cache !== null) {
return cache.get(name)!.string;
}
return strOf(fileOf(URLS[name as EmojiName]));
}
export function of(name: string): Emoji {
if (!(name in URLS)) {
throw new Error(`Emoji named '${name}' not found`);
}
if (cache !== null) {
return cache.get(name)!;
}
return emojiOf(name as EmojiName);
}
export function all(): Emojis {
if (cache !== null) {
return cache;
}
cache = new Map();
for (const key of Object.keys(URLS)) {
cache.set(key, emojiOf(key as EmojiName));
}
return cache;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.