text
stringlengths 27
775k
|
|---|
pub fn get_sorted_squares(mut v: Vec<i32>) -> Vec<i32> {
let mut splitting_index = 0;
let mut has_positives = false;
// Find the index of 0 to split if it is present
for x in 0..(v.len() / 2) {
let opposite_index = v.len() - (x + 1);
if v[x] > 0 || v[opposite_index] > 0 {
has_positives = true;
}
if v[x] == 0 {
splitting_index = x;
break;
} else if v[opposite_index] == 0 {
splitting_index = opposite_index;
break;
} else if opposite_index == x || opposite_index == x + 1 {
break;
}
}
match splitting_index {
0 => { // No need to split at all, but if all negative should reverse
match has_positives {
true => v.into_iter().map(|x| x.pow(2)).collect::<Vec<i32>>(),
false => v.into_iter().map(|x| x.pow(2)).rev().collect::<Vec<i32>>()
}
},
_ => { // Gotta split, and it could be noncontiguous...
let mut sorted_pows = vec![];
let grtr_than_zero = v.split_off(splitting_index); // Split on zero
let mut gtz_pows = grtr_than_zero.into_iter()
.map(|x| x.pow(2))
.collect::<Vec<i32>>();
let mut ltz_pows = v.into_iter()
.map(|x| x.pow(2))
.rev()
.collect::<Vec<i32>>();
// For all the fans of Merge Sort:
// You've got two separate, already sorted lists,
// ...so just take all the items in order!
loop {
if gtz_pows.is_empty() && ltz_pows.is_empty() {
break;
} else if gtz_pows.is_empty() {
sorted_pows.push(ltz_pows.remove(0));
} else if ltz_pows.is_empty() {
sorted_pows.push(gtz_pows.remove(0));
} else {
if ltz_pows[0] < gtz_pows[0] {
sorted_pows.push(ltz_pows.remove(0));
} else {
sorted_pows.push(gtz_pows.remove(0));
}
}
};
sorted_pows
}
}
}
|
package resource_test
import (
"reflect"
"testing"
"github.com/golang/mock/gomock"
"github.com/itsdalmo/github-pr-resource"
"github.com/itsdalmo/github-pr-resource/mocks"
)
var (
testPullRequests = []*resource.PullRequest{
createTestPR(1, true),
createTestPR(2, false),
createTestPR(3, false),
createTestPR(4, false),
}
)
func TestCheck(t *testing.T) {
tests := []struct {
description string
source resource.Source
version resource.Version
files [][]string
pullRequests []*resource.PullRequest
expected resource.CheckResponse
}{
{
description: "check returns the latest version if there is no previous",
source: resource.Source{
Repository: "itsdalmo/test-repository",
AccessToken: "oauthtoken",
},
version: resource.Version{},
pullRequests: testPullRequests,
files: [][]string{},
expected: resource.CheckResponse{
resource.NewVersion(testPullRequests[1]),
},
},
{
description: "check returns the previous version when its still latest",
source: resource.Source{
Repository: "itsdalmo/test-repository",
AccessToken: "oauthtoken",
},
version: resource.NewVersion(testPullRequests[1]),
pullRequests: testPullRequests,
files: [][]string{},
expected: resource.CheckResponse{
resource.NewVersion(testPullRequests[1]),
},
},
{
description: "check returns all new versions since the last",
source: resource.Source{
Repository: "itsdalmo/test-repository",
AccessToken: "oauthtoken",
},
version: resource.NewVersion(testPullRequests[3]),
pullRequests: testPullRequests,
files: [][]string{},
expected: resource.CheckResponse{
resource.NewVersion(testPullRequests[2]),
resource.NewVersion(testPullRequests[1]),
},
},
{
description: "check will only return versions that match the specified paths",
source: resource.Source{
Repository: "itsdalmo/test-repository",
AccessToken: "oauthtoken",
Paths: []string{"terraform/*/*.tf", "terraform/*/*/*.tf"},
},
version: resource.NewVersion(testPullRequests[3]),
pullRequests: testPullRequests,
files: [][]string{
{"README.md", "travis.yml"},
{"terraform/modules/ecs/main.tf", "README.md"},
{"terraform/modules/variables.tf", "travis.yml"},
},
expected: resource.CheckResponse{
resource.NewVersion(testPullRequests[2]),
},
},
{
description: "check will skip versions which only match the ignore paths",
source: resource.Source{
Repository: "itsdalmo/test-repository",
AccessToken: "oauthtoken",
IgnorePaths: []string{"*.md", "*.yml"},
},
version: resource.NewVersion(testPullRequests[3]),
pullRequests: testPullRequests,
files: [][]string{
{"README.md", "travis.yml"},
{"terraform/modules/ecs/main.tf", "README.md"},
{"terraform/modules/variables.tf", "travis.yml"},
},
expected: resource.CheckResponse{
resource.NewVersion(testPullRequests[2]),
},
},
{
description: "check correctly ignores [skip ci] when specified",
source: resource.Source{
Repository: "itsdalmo/test-repository",
AccessToken: "oauthtoken",
DisableCISkip: "true",
},
version: resource.NewVersion(testPullRequests[1]),
pullRequests: testPullRequests,
expected: resource.CheckResponse{
resource.NewVersion(testPullRequests[0]),
},
},
}
for _, tc := range tests {
t.Run(tc.description, func(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
github := mocks.NewMockGithub(ctrl)
github.EXPECT().ListOpenPullRequests().Times(1).Return(tc.pullRequests, nil)
if len(tc.files) > 0 {
// TODO: Figure out how to do this in a loop with variables. As is, it will break when adding new tests.
gomock.InOrder(
github.EXPECT().ListModifiedFiles(gomock.Any()).Times(1).Return(tc.files[0], nil),
github.EXPECT().ListModifiedFiles(gomock.Any()).Times(1).Return(tc.files[1], nil),
)
}
input := resource.CheckRequest{Source: tc.source, Version: tc.version}
output, err := resource.Check(input, github)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if got, want := output, tc.expected; !reflect.DeepEqual(got, want) {
t.Errorf("\ngot:\n%v\nwant:\n%v\n", got, want)
}
})
}
}
func TestContainsSkipCI(t *testing.T) {
tests := []struct {
description string
message string
want bool
}{
{
description: "does not just match any symbol in the regexp",
message: "(",
want: false,
},
{
description: "does not match when it should not",
message: "test",
want: false,
},
{
description: "matches [ci skip]",
message: "[ci skip]",
want: true,
},
{
description: "matches [skip ci]",
message: "[skip ci]",
want: true,
},
{
description: "matches trailing skip ci",
message: "trailing [skip ci]",
want: true,
},
{
description: "matches leading skip ci",
message: "[skip ci] leading",
want: true,
},
{
description: "is case insensitive",
message: "case[Skip CI]insensitive",
want: true,
},
}
for _, tc := range tests {
t.Run(tc.description, func(t *testing.T) {
got := resource.ContainsSkipCI(tc.message)
if got != tc.want {
t.Errorf("\ngot:\n%v\nwant:\n%v\n", got, tc.want)
}
})
}
}
func TestFilterPath(t *testing.T) {
cases := []struct {
description string
pattern string
files []string
want []string
}{
{
description: "returns all matching files",
pattern: "*.txt",
files: []string{
"file1.txt",
"test/file2.txt",
},
want: []string{
"file1.txt",
},
},
{
description: "works with wildcard",
pattern: "test/*",
files: []string{
"file1.txt",
"test/file2.txt",
},
want: []string{
"test/file2.txt",
},
},
{
description: "excludes unmatched files",
pattern: "*/*.txt",
files: []string{
"test/file1.go",
"test/file2.txt",
},
want: []string{
"test/file2.txt",
},
},
}
for _, tc := range cases {
t.Run(tc.description, func(t *testing.T) {
got, err := resource.FilterPath(tc.files, tc.pattern)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if !reflect.DeepEqual(got, tc.want) {
t.Errorf("\ngot:\n%v\nwant:\n%s\n", got, tc.want)
}
})
}
}
func TestFilterIgnorePath(t *testing.T) {
cases := []struct {
description string
pattern string
files []string
want []string
}{
{
description: "excludes all matching files",
pattern: "*.txt",
files: []string{
"file1.txt",
"test/file2.txt",
},
want: []string{
"test/file2.txt",
},
},
{
description: "works with wildcard",
pattern: "test/*",
files: []string{
"file1.txt",
"test/file2.txt",
},
want: []string{
"file1.txt",
},
},
{
description: "includes unmatched files",
pattern: "*/*.txt",
files: []string{
"test/file1.go",
"test/file2.txt",
},
want: []string{
"test/file1.go",
},
},
}
for _, tc := range cases {
t.Run(tc.description, func(t *testing.T) {
got, err := resource.FilterIgnorePath(tc.files, tc.pattern)
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if !reflect.DeepEqual(got, tc.want) {
t.Errorf("\ngot:\n%v\nwant:\n%s\n", got, tc.want)
}
})
}
}
|
// THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
export { validateGumroadLicenseKeyMainAsync } from './gumroad/validate-gumroad-license-key-main-async.js'
export { validateGumroadLicenseKeyUiAsync } from './gumroad/validate-gumroad-license-key-ui-async.js'
export {
getDocumentUseCount,
incrementDocumentUseCount,
resetDocumentUseCount
} from './utilities/document-use-count.js'
export {
getTotalUseCountAsync,
incrementTotalUseCountAsync,
resetTotalUseCountAsync
} from './utilities/total-use-count.js'
export { LicenseKeyValidationResult } from './utilities/types.js'
|
class ConsultationsController < DocumentsController
def index
filter_params = params.except(:controller, :action, :format, :_)
redirect_to publications_path(filter_params.merge(publication_filter_option: 'consultations'))
end
def show
@related_policies = @document.published_related_policies
set_meta_description(@document.summary)
expire_on_open_state_change(@document)
end
private
def document_class
Consultation
end
end
|
package com.enjin.sdk.services.user;
/**
* Asynchronous and synchronous methods for querying and mutating users.
*
* @author Evan Lindsay
*/
public interface UsersService extends AsynchronousUsersService, SynchronousUsersService {
}
|
{-# LANGUAGE RankNTypes, TypeFamilies, TypeInType, TypeOperators,
UndecidableInstances #-}
module T11719 where
import Data.Kind
data TyFun :: * -> * -> *
type a ~> b = TyFun a b -> *
type family (f :: a ~> b) @@ (x :: a) :: b
data Null a = Nullable a | NotNullable a
type family ((f :: b ~> c) ∘ (g :: a ~> b)) (x :: a) :: c where
(f ∘ g) x = f @@ (g @@ x)
type family BaseType (k :: forall a. a ~> Type) (x :: b) :: Type where -- this fails :(
|
package typingsSlinky.pulumiAws
import typingsSlinky.pulumiAws.enumsRdsMod.StorageType
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
object storageTypeMod {
object StorageTypes {
/** Use StorageType.GP2 instead. */
@JSImport("@pulumi/aws/rds/storageType", "StorageTypes.GP2")
@js.native
val GP2: StorageType = js.native
/** Use StorageType.IO1 instead. */
@JSImport("@pulumi/aws/rds/storageType", "StorageTypes.Io1")
@js.native
val Io1: StorageType = js.native
/** Use StorageType.Standard instead. */
@JSImport("@pulumi/aws/rds/storageType", "StorageTypes.Standard")
@js.native
val Standard: StorageType = js.native
}
}
|
from flask import request
from flask_restx import Resource, Namespace
from .core import get_resource_recommend_v1
from ....iomodels import input_def_recommend_v1, output_def
ns_recomv1 = Namespace('recommendsystem',
description='First version of the recommendation system based on KNN models')
input_def = ns_recomv1.model(*input_def_recommend_v1)
output_def = ns_recomv1.model(*output_def)
@ns_recomv1.route("/v1")
class GetRecommendv1(Resource):
'''This is a test'''
@ns_recomv1.expect(input_def, validate=True)
@ns_recomv1.marshal_with(output_def)
def post(self):
'''Compute the recommendation list based on Knn models'''
return {'output': get_resource_recommend_v1(**request.json)}
@ns_recomv1.errorhandler
def default_error_handler(error):
'''Default error handler'''
return {'message': str(error)}, getattr(error, 'code', 500)
|
#!/bin/bash
fw_depends mysql rvm ruby-2.0
rvm ruby-$MRI_VERSION do bundle install --jobs=4 --gemfile=$TROOT/Gemfile --path=vendor/bundle
WEB_SERVER=Thin DB_HOST=${DBHOST} rvm ruby-$MRI_VERSION do bundle exec thin start -C config/thin.yml &
|
# [143. Reorder List (Medium)](https://leetcode.com/problems/reorder-list/)
<p>You are given the head of a singly linked-list. The list can be represented as:</p>
<pre>L<sub>0</sub> → L<sub>1</sub> → … → L<sub>n - 1</sub> → L<sub>n</sub>
</pre>
<p><em>Reorder the list to be on the following form:</em></p>
<pre>L<sub>0</sub> → L<sub>n</sub> → L<sub>1</sub> → L<sub>n - 1</sub> → L<sub>2</sub> → L<sub>n - 2</sub> → …
</pre>
<p>You may not modify the values in the list's nodes. Only nodes themselves may be changed.</p>
<p> </p>
<p><strong>Example 1:</strong></p>
<img alt="" src="https://assets.leetcode.com/uploads/2021/03/04/reorder1linked-list.jpg" style="width: 422px; height: 222px;">
<pre><strong>Input:</strong> head = [1,2,3,4]
<strong>Output:</strong> [1,4,2,3]
</pre>
<p><strong>Example 2:</strong></p>
<img alt="" src="https://assets.leetcode.com/uploads/2021/03/09/reorder2-linked-list.jpg" style="width: 542px; height: 222px;">
<pre><strong>Input:</strong> head = [1,2,3,4,5]
<strong>Output:</strong> [1,5,2,4,3]
</pre>
<p> </p>
<p><strong>Constraints:</strong></p>
<ul>
<li>The number of nodes in the list is in the range <code>[1, 5 * 10<sup>4</sup>]</code>.</li>
<li><code>1 <= Node.val <= 1000</code></li>
</ul>
**Companies**:
[Amazon](https://leetcode.com/company/amazon), [Adobe](https://leetcode.com/company/adobe), [Bloomberg](https://leetcode.com/company/bloomberg)
**Related Topics**:
[Linked List](https://leetcode.com/tag/linked-list/), [Two Pointers](https://leetcode.com/tag/two-pointers/), [Stack](https://leetcode.com/tag/stack/), [Recursion](https://leetcode.com/tag/recursion/)
**Similar Questions**:
* [Delete the Middle Node of a Linked List (Medium)](https://leetcode.com/problems/delete-the-middle-node-of-a-linked-list/)
## Solution 1.
```cpp
// OJ: https://leetcode.com/problems/reorder-list/
// Author: github.com/lzl124631x
// Time: O(N)
// Space: O(1)
class Solution {
int getLength(ListNode *head) {
int ans = 0;
for (; head; head = head->next) ++ans;
return ans;
}
ListNode *splitList(ListNode *head) {
int len = (getLength(head) - 1) / 2;
while (len--) head = head->next;
auto ans = head->next;
head->next = nullptr;
return ans;
}
ListNode *reverseList(ListNode *head) {
ListNode dummy;
while (head) {
auto node = head;
head = head->next;
node->next = dummy.next;
dummy.next = node;
}
return dummy.next;
}
void interleave(ListNode *first, ListNode *second) {
while (second) {
auto node = second;
second = second->next;
node->next = first->next;
first->next = node;
first = node->next;
}
}
public:
void reorderList(ListNode* head) {
auto second = splitList(head);
second = reverseList(second);
interleave(head, second);
}
};
```
## Solution 2.
```cpp
// OJ: https://leetcode.com/problems/reorder-list/
// Author: github.com/lzl124631x
// Time: O(N)
// Space: O(1)
class Solution {
ListNode *splitList(ListNode *head) {
auto fast = head, slow = head;
while (fast && fast->next) {
slow = slow->next;
fast = fast->next->next;
}
auto ans = slow->next;
slow->next = nullptr;
return ans;
}
ListNode *reverseList(ListNode *head) {
ListNode dummy;
while (head) {
auto node = head;
head = head->next;
node->next = dummy.next;
dummy.next = node;
}
return dummy.next;
}
void interleave(ListNode *first, ListNode *second) {
ListNode dummy, *tail = &dummy;
while (first && second) {
auto node = first;
first = first->next;
tail->next = node;
tail = node;
node = second;
second = second->next;
tail->next = node;
tail = node;
}
tail->next = first;
}
public:
void reorderList(ListNode* head) {
auto second = splitList(head);
second = reverseList(second);
interleave(head, second);
}
};
```
|
<?php
/**
* Toolset Divi can be installed as a standalone glue plugin,
* but it also comes packaged with other Toolset plugins.
*
* To include it on a Toolset plugin, do as follows:
* - Include this repository as a Composer dependency.
* - Wait until after_setup_theme to include this loader.php file.
*
* This will ensure that the glue plugin can be used if avaiilable;
* otherwise, this will ensure that the Toolset plugin packing the newest version will push it.
*
* $toolset_divi_version must be increased on every new version of the glue plugin.
* Note that this must always be greater than 1, since Divi registers its extensions on init:0.
* Also, having a negative priority ensures that the highest version number gets called first.
*/
/**
* WARNING: INCREASE THIS LOADER VERSION ON EVERY NEW RELEASE.
*/
$toolset_divi_version = 11;
add_action( 'init', function() use ( $toolset_divi_version ) {
if ( defined( 'TOOLSET_DIVI_LOADED' ) ) {
// A more recent version of Toolset Divi is already active.
return;
}
// Define TOOLSET_DIVI_LOADED so any older instance of Toolset Divi is not loaded.
define( 'TOOLSET_DIVI_LOADED', $toolset_divi_version );
// This will have to reference toolset-divi.php once the MR form Pierre is merged.
require_once plugin_dir_path( __FILE__ ) . '/toolset-divi.php';
}, 1 - $toolset_divi_version );
|
package io.github.nortthon.r2dbc.usecases;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import io.github.nortthon.r2dbc.gateways.NoteGateway;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.internal.verification.VerificationModeFactory.times;
@ExtendWith(MockitoExtension.class)
class DeleteNoteTest {
@Mock
private NoteGateway noteGateway;
@InjectMocks
private DeleteNote deleteNote;
@Test
void testDeleteExistentNote() {
final var id = 1L;
when(noteGateway.delete(id)).thenReturn(Mono.empty());
StepVerifier.create(deleteNote.execute(id))
.expectSubscription()
.verifyComplete();
verify(noteGateway, times(1)).delete(id);
}
}
|
package xyz.txcplus.redis.aop.lock.config;
import org.redisson.api.RedissonClient;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import xyz.txcplus.redis.aop.lock.aop.LockAnnotationAdvisor;
import xyz.txcplus.redis.aop.lock.aop.LockInterceptor;
/**
* 资源锁配置
*
* @version 1.0.0
* @author: wenhai
* @date:2019-11-01 10:52
* @since JDK 1.8
*/
@Configuration
public class LockConfiguration {
@Bean
@ConditionalOnMissingBean
public LockAnnotationAdvisor lockAnnotationAdvisor(LockInterceptor lockInterceptor) {
return new LockAnnotationAdvisor(lockInterceptor);
}
@Bean
@ConditionalOnMissingBean
public LockInterceptor lockInterceptor(RedissonClient redissonClient) {
LockInterceptor lockInterceptor = new LockInterceptor();
lockInterceptor.setRedissonClient(redissonClient);
return lockInterceptor;
}
}
|
/*
* Copyright (C) 2010 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Ideally, we would rely on platform support for parsing a cookie, since
// this would save us from any potential inconsistency. However, exposing
// platform cookie parsing logic would require quite a bit of additional
// plumbing, and at least some platforms lack support for parsing Cookie,
// which is in a format slightly different from Set-Cookie and is normally
// only required on the server side.
/* eslint-disable rulesdir/no_underscored_properties */
import {Cookie, Type} from './Cookie.js';
export class CookieParser {
_domain: string|undefined;
_cookies: Cookie[];
_input!: string|undefined;
_originalInputLength: number;
_lastCookie?: Cookie|null;
_lastCookieLine?: string;
_lastCookiePosition?: number;
constructor(domain?: string) {
if (domain) {
// Handle domain according to
// https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-5.3.3
this._domain = domain.toLowerCase().replace(/^\./, '');
}
this._cookies = [];
this._originalInputLength = 0;
}
static parseSetCookie(header: string|undefined, domain?: string): Cookie[]|null {
return (new CookieParser(domain)).parseSetCookie(header);
}
cookies(): Cookie[] {
return this._cookies;
}
parseSetCookie(setCookieHeader: string|undefined): Cookie[]|null {
if (!this._initialize(setCookieHeader)) {
return null;
}
for (let kv = this._extractKeyValue(); kv; kv = this._extractKeyValue()) {
if (this._lastCookie) {
this._lastCookie.addAttribute(kv.key, kv.value);
} else {
this._addCookie(kv, Type.Response);
}
if (this._advanceAndCheckCookieDelimiter()) {
this._flushCookie();
}
}
this._flushCookie();
return this._cookies;
}
_initialize(headerValue: string|undefined): boolean {
this._input = headerValue;
if (typeof headerValue !== 'string') {
return false;
}
this._cookies = [];
this._lastCookie = null;
this._lastCookieLine = '';
this._originalInputLength = (this._input as string).length;
return true;
}
_flushCookie(): void {
if (this._lastCookie) {
// if we have a last cookie we know that these valeus all exist, hence the typecasts
this._lastCookie.setSize(
this._originalInputLength - (this._input as string).length - (this._lastCookiePosition as number));
this._lastCookie.setCookieLine((this._lastCookieLine as string).replace('\n', ''));
}
this._lastCookie = null;
this._lastCookieLine = '';
}
_extractKeyValue(): KeyValue|null {
if (!this._input || !this._input.length) {
return null;
}
// Note: RFCs offer an option for quoted values that may contain commas and semicolons.
// Many browsers/platforms do not support this, however (see http://webkit.org/b/16699
// and http://crbug.com/12361). The logic below matches latest versions of IE, Firefox,
// Chrome and Safari on some old platforms. The latest version of Safari supports quoted
// cookie values, though.
const keyValueMatch = /^[ \t]*([^\s=;]+)[ \t]*(?:=[ \t]*([^;\n]*))?/.exec(this._input);
if (!keyValueMatch) {
console.error('Failed parsing cookie header before: ' + this._input);
return null;
}
const result = new KeyValue(
keyValueMatch[1], keyValueMatch[2] && keyValueMatch[2].trim(),
(this._originalInputLength as number) - this._input.length);
this._lastCookieLine += keyValueMatch[0];
this._input = this._input.slice(keyValueMatch[0].length);
return result;
}
_advanceAndCheckCookieDelimiter(): boolean {
if (!this._input) {
return false;
}
const match = /^\s*[\n;]\s*/.exec(this._input);
if (!match) {
return false;
}
this._lastCookieLine += match[0];
this._input = this._input.slice(match[0].length);
return match[0].match('\n') !== null;
}
_addCookie(keyValue: KeyValue, type: Type): void {
if (this._lastCookie) {
this._lastCookie.setSize(keyValue.position - (this._lastCookiePosition as number));
}
// Mozilla bug 169091: Mozilla, IE and Chrome treat single token (w/o "=") as
// specifying a value for a cookie with empty name.
this._lastCookie = typeof keyValue.value === 'string' ? new Cookie(keyValue.key, keyValue.value, type) :
new Cookie('', keyValue.key, type);
if (this._domain) {
this._lastCookie.addAttribute('domain', this._domain);
}
this._lastCookiePosition = keyValue.position;
this._cookies.push(this._lastCookie);
}
}
class KeyValue {
key: string;
value: string|undefined;
position: number;
constructor(key: string, value: string|undefined, position: number) {
this.key = key;
this.value = value;
this.position = position;
}
}
|
package com.kino.argear.argear_flutter_plugin.utils
interface DownloadAsyncResponse {
fun processFinish(result: Boolean)
}
|
# frozen_string_literal: true
module AppMap
# Railtie connects the AppMap recorder to Rails-specific features.
class Railtie < ::Rails::Railtie
config.appmap = ActiveSupport::OrderedOptions.new
initializer 'appmap.init' do |_| # params: app
require 'appmap'
end
# appmap.subscribe subscribes to ActiveSupport Notifications so that they can be recorded as
# AppMap events.
initializer 'appmap.subscribe', after: 'appmap.init' do |_| # params: app
require 'appmap/rails/sql_handler'
require 'appmap/rails/request_handler'
ActiveSupport::Notifications.subscribe 'sql.sequel', AppMap::Rails::SQLHandler.new
ActiveSupport::Notifications.subscribe 'sql.active_record', AppMap::Rails::SQLHandler.new
AppMap::Rails::RequestHandler::HookMethod.new.activate
end
# appmap.trace begins recording an AppMap trace and writes it to appmap.json.
# This behavior is only activated if the configuration setting app.config.appmap.enabled
# is truthy.
initializer 'appmap.trace', after: 'appmap.subscribe' do |app|
lambda do
return unless app.config.appmap.enabled
require 'appmap/command/record'
require 'json'
AppMap::Command::Record.new(AppMap.configuration).perform do |version, metadata, class_map, events|
appmap = JSON.generate \
version: version,
metadata: metadata,
classMap: class_map,
events: events
File.open('appmap.json', 'w').write(appmap)
end
end.call
end
end
end
|
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:firebase_storage/firebase_storage.dart';
import 'package:image_picker/image_picker.dart';
import 'dart:core';
class Home extends StatefulWidget {
@override
_HomeState createState() => _HomeState();
}
class _HomeState extends State<Home> {
File _imageFile;
bool _uploaded = false;
StorageReference _reference = FirebaseStorage.instance.ref().child('myimage.jpg');
String _downloadUrl;
Future getImage(bool isCamera,ImageSource imageSource) async {
File image;
if (isCamera) {
final PickedFile pickedFile = await ImagePicker().getImage(source: ImageSource.camera);
image = File(pickedFile.path);
} else {
final PickedFile pickedFile = await ImagePicker().getImage(source: ImageSource.gallery);
image = File(pickedFile.path);
}
setState(() {
_imageFile = image;
});
}
Future upload() async {
StorageUploadTask uploadTask = _reference.putFile(_imageFile);
StorageTaskSnapshot taskSnapshot = await uploadTask.onComplete;
setState(() {
_uploaded = true;
});
}
Future download() async {
String downloadAddress = await _reference.getDownloadURL();
setState(() {
_downloadUrl = downloadAddress;
});
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: Colors.teal,
appBar: AppBar(
backgroundColor: Colors.black45,
title: Text('Firebase Storage'),
centerTitle: true,
),
body: Center(
child: SingleChildScrollView(
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
width: 200.0,
child: RaisedButton.icon(
onPressed: () {
getImage(true, ImageSource.camera);
},
label: Text('Camera',),
icon: Icon(Icons.camera),
),
),
Container(
width: 200.0,
child: RaisedButton.icon(
onPressed: () {
getImage(false, ImageSource.gallery);
},
label: Text('Gallery',),
icon: Icon(Icons.image),
),
),
_imageFile == null ? Container(): Image.file(_imageFile,height: 500, width: 500,),
_imageFile == null ? Container() : Container(
width: 200.0,
child: RaisedButton.icon(
onPressed: () {
upload();
},
label: Text('Upload'),
icon: Icon(Icons.cloud_upload),
),
),
_uploaded == false ? Container() : Container(
width: 200.0,
child: RaisedButton.icon(
label: Text('Download'),
onPressed: () {
download();
},
icon: Icon(Icons.cloud_download),
),
),
_downloadUrl == null ? Container() :Image.network(_downloadUrl),
],
),
),
),
);
}
}
|
import log from '@apify/log';
import { join } from 'path';
import { ensureDirSync, statSync, writeFileSync } from 'fs-extra';
import { ApifyStorageLocal } from '@apify/storage-local';
import { STORAGE_NAMES } from '@apify/storage-local/dist/consts';
import { prepareTestDir, removeTestDir } from './_tools';
let STORAGE_DIR: string;
beforeEach(() => {
STORAGE_DIR = prepareTestDir();
});
afterAll(() => {
removeTestDir(STORAGE_DIR);
});
test('does not create folders immediately', () => {
// eslint-disable-next-line no-new -- Testing to make sure creating an instance won't immediately create folders
new ApifyStorageLocal({
storageDir: STORAGE_DIR,
});
const requestQueueDir = join(STORAGE_DIR, STORAGE_NAMES.REQUEST_QUEUES);
const keyValueStoreDir = join(STORAGE_DIR, STORAGE_NAMES.KEY_VALUE_STORES);
const datasetDir = join(STORAGE_DIR, STORAGE_NAMES.DATASETS);
for (const dir of [requestQueueDir, keyValueStoreDir, datasetDir]) {
expect(() => statSync(dir)).toThrow('ENOENT');
}
});
test('creates folders lazily', () => {
const storageLocal = new ApifyStorageLocal({
storageDir: STORAGE_DIR,
});
const requestQueueDir = join(STORAGE_DIR, STORAGE_NAMES.REQUEST_QUEUES);
storageLocal.requestQueues();
const keyValueStoreDir = join(STORAGE_DIR, STORAGE_NAMES.KEY_VALUE_STORES);
storageLocal.keyValueStores();
const datasetDir = join(STORAGE_DIR, STORAGE_NAMES.DATASETS);
storageLocal.datasets();
for (const dir of [requestQueueDir, keyValueStoreDir, datasetDir]) {
expect(statSync(dir).isDirectory()).toBe(true);
}
});
test('warning is shown when storage is non-empty', () => {
const storageLocal = new ApifyStorageLocal({
storageDir: STORAGE_DIR,
});
const requestQueueDir = join(STORAGE_DIR, STORAGE_NAMES.REQUEST_QUEUES);
const keyValueStoreDir = join(STORAGE_DIR, STORAGE_NAMES.KEY_VALUE_STORES);
const datasetDir = join(STORAGE_DIR, STORAGE_NAMES.DATASETS);
const fileData = JSON.stringify({ foo: 'bar' });
const innerDirName = 'default';
const innerRequestQueueDir = join(requestQueueDir, innerDirName);
ensureDirSync(innerRequestQueueDir);
writeFileSync(join(innerRequestQueueDir, '000000001.json'), fileData);
const innerKeyValueStoreDir = join(keyValueStoreDir, innerDirName);
ensureDirSync(innerKeyValueStoreDir);
writeFileSync(join(innerKeyValueStoreDir, 'INPUT.json'), fileData);
const innerDatasetDir = join(datasetDir, innerDirName);
ensureDirSync(innerDatasetDir);
writeFileSync(join(innerDatasetDir, '000000001.json'), fileData);
const warnings = jest.spyOn(log, 'warning');
storageLocal.keyValueStores();
storageLocal.requestQueues();
storageLocal.datasets();
// warning is expected to be shown 2 times only (for Dataset and Request queue)
// as it should not be shown when INPUT.json in the only file in Key-value store
expect(warnings).toHaveBeenCalledTimes(2);
});
|
---
layout: watch
title: TLP2 - 06/06/2019 - M20190606_221121_TLP_2T.jpg
date: 2019-06-06 22:11:21
permalink: /2019/06/06/watch/M20190606_221121_TLP_2
capture: TLP2/2019/201906/20190606/M20190606_221121_TLP_2T.jpg
---
|
module QQ
module Api
class Lbs < Base
#更新地理位置
#longitude 经度,例如:22.541321
#latitude 纬度,例如:13.935558
def update_pos(longitude, latitude, opts={})
#hashie post("lbs/update_pos.json",{:longitude => longitude, :latitude => latitude}.merge(opts))
end
#删除最后更新位置
def del_pos()
#hashie post("lbs/del_pos.json")
end
#获取POI(Point of Interest)
#longitude 经度,例如:22.541321
#latitude 纬度,例如:13.935558
#reqnum 每次请求记录的条数(1-25条)
#radius POI的半径(单位为米),取值范围为100-1000,为达到比较好的搜索结果,建议设置为200
#position 上次查询返回的位置,用于翻页(第一次请求时填0)
def get_poi(opts={})
#hashie post("lbs/get_poi.json",opts)
end
#获取身边最新的微博
#longitude 经度,例如:22.541321
#latitude 纬度,例如:13.935558
#pageinfo 翻页参数,由上次请求返回(第一次请求时请填空)
#pagesize 请求的每页个数(1-50),建议25
def get_around_new(opts={})
#hashie post("lbs/get_around_new.json",opts)
end
#获取身边的人
#gender 性别,0-全部,1-男,2-女
def get_around_people(opts={})
#hashie post("lbs/get_around_people.json",opts)
end
end
end
end
|
import TabsActions from './TabsActions';
const initialState = { activeTab: "all" };
export function tabsReducer(state = initialState, action) {
switch(action.type) {
case TabsActions.tabSwitched:
return Object.assign({}, state, {
activeTab: action.tab
});
default:
return state;
}
}
|
#include <Rcpp.h>
using namespace Rcpp;
// Below is a simple example of exporting a C++ function to R. You can
// source this function into an R session using the Rcpp::sourceCpp
// function (or via the Source button on the editor toolbar)
// For more on using Rcpp click the Help button on the editor toolbar
// [[Rcpp::export]]
double f1(NumericVector x) {
int n = x.size();
double y = 0;
for(int i = 0; i < n; ++i) {
y += x[i] / n;
}
return y;
}
// [[Rcpp::export]]
NumericVector f2(NumericVector x) {
int n = x.size();
NumericVector out(n);
out[0] = x[0];
for(int i = 1; i < n; ++i) {
out[i] = out[i - 1] + x[i];
}
return out;
}
// [[Rcpp::export]]
bool f3(LogicalVector x) {
int n = x.size();
for(int i = 0; i < n; ++i) {
if (x[i]) return true;
}
return false;
}
// [[Rcpp::export]]
int f4(Function pred, List x) {
int n = x.size();
for(int i = 0; i < n; ++i) {
LogicalVector res = pred(x[i]);
if (res[0]) return i + 1;
}
return 0;
}
// [[Rcpp::export]]
NumericVector f5(NumericVector x, NumericVector y) {
int n = std::max(x.size(), y.size());
NumericVector x1 = rep_len(x, n);
NumericVector y1 = rep_len(y, n);
NumericVector out(n);
for (int i = 0; i < n; ++i) {
out[i] = std::min(x1[i], y1[i]);
}
return out;
}
|
public-playlist
===============
This dynamic web application creates a playlist of songs that users input into the form.
|
---
title: RESTful
categories: web
date: 2017-02-18 09:21:54
---
#### 理解
>RESTful 表现层状态转移。sc架构下,server保存数据状态,client发送查询,修改,删除,添加等请求。这里的表现层状态指服务器资源在client的展现状态。转移是指我们通过http请求修改服务资源后,表现层的状态改变
#### 要点
> 1 避免url包含动词,有些需求可以用服务代替,比如转账,transaction
> 2 考虑api.example.com vs example.com/api
> 3 api版本 example.com/api/v1/user/1
#### 幂等性
> 1 get 多次查询同一资源 服务器资源状态不会改变
> 2 delete 重复删除同一资源,服务器最终资源结果一样
> 3 put 重复提交同一个修改请求,服务器资源状态不会改变
> 4 post 非幂等,服务器多次创建新的资源
|
// Copyright 2020 New Relic, Inc. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
using System.Collections.Generic;
using System.Linq;
using NewRelic.Agent.IntegrationTestHelpers;
using Xunit;
using Xunit.Abstractions;
namespace NewRelic.Agent.IntegrationTests.DistributedTracing
{
[NetFrameworkTest]
public class DistributedTracingApiTests_W3C : DtApiTestBase
{
public DistributedTracingApiTests_W3C(RemoteServiceFixtures.DistributedTracingApiFixture fixture, ITestOutputHelper output)
: base(fixture, output, TracingTestOption.W3cAndNewrelicHeaders)
{
}
[Fact]
public override void Metrics()
{
var expectedMetrics = new List<Assertions.ExpectedMetric>
{
new Assertions.ExpectedMetric { metricName = @"Supportability/ApiInvocation/InsertDistributedTraceHeaders", callCount = 1 },
new Assertions.ExpectedMetric { metricName = @"Supportability/ApiInvocation/AcceptDistributedTraceHeaders", callCount = 1 },
new Assertions.ExpectedMetric { metricName = @"Supportability/ApiInvocation/CurrentTransaction", callCount = 2 },
new Assertions.ExpectedMetric { metricName = @"Supportability/TraceContext/Accept/Success", callCount = 1 },
new Assertions.ExpectedMetric { metricName = @"Supportability/TraceContext/Create/Success", callCount = 1 },
};
var metrics = _fixture.AgentLog.GetMetrics().ToList();
Assertions.MetricsExist(expectedMetrics, metrics);
}
}
}
|
<?php
namespace common\models;
use Yii;
use yii\behaviors\TimestampBehavior;
/**
* This is the model class for table "register".
*
* @property int $id
* @property int|null $section_id
* @property int|null $employee_id
* @property string|null $fullname
* @property string|null $phone
* @property string|null $email
* @property int|null $date
* @property string|null $information
* @property int|null $status
* @property int|null $created_at
* @property int|null $updated_at
*/
class Register extends \yii\db\ActiveRecord
{
const ACTIVE_STATUS = 10;
const DEACTIVE_STATUS = 9;
const DELETE_STATUS = 0;
public function behaviors()
{
return [
TimestampBehavior::class,
];
}
/**
* {@inheritdoc}
*/
public static function tableName()
{
return 'register';
}
/**
* {@inheritdoc}
*/
public function rules()
{
return [
[['section_id','employee_id','date','fullname','phone','information','email'],'required'],
[['section_id', 'employee_id', 'status', 'created_at', 'updated_at'], 'integer'],
[['information'], 'string'],
[['status'], 'default', 'value' => self::ACTIVE_STATUS],
[['fullname'], 'string', 'max' => 255],
[['phone'], 'string', 'max' => 100],
[['email'], 'string', 'max' => 200],
];
}
/**
* {@inheritdoc}
*/
public function attributeLabels()
{
return [
'id' => Yii::t('app', 'ID'),
'section_id' => Yii::t('app', 'Section ID'),
'employee_id' => Yii::t('app', 'Employee ID'),
'fullname' => Yii::t('app', 'Fullname'),
'phone' => Yii::t('app', 'Phone'),
'email' => Yii::t('app', 'Email'),
'date' => Yii::t('app', 'Date'),
'information' => Yii::t('app', 'Information'),
'status' => Yii::t('app', 'Status'),
'created_at' => Yii::t('app', 'Created At'),
'updated_at' => Yii::t('app', 'Updated At'),
];
}
/**
* {@inheritdoc}
* @return \common\models\query\RegisterQuery the active query used by this AR class.
*/
public static function find()
{
return new \common\models\query\RegisterQuery(get_called_class());
}
public function getSections(){
return $this->hasOne(Sections::class,['id'=>'section_id']);
}
public function getEmployees(){
return $this->hasOne(Employees::class,['id'=>'employee_id']);
}
}
|
# Adminlte template for pure Javascript apps using Apache Cordova and Vue.js
Login is backed by Laravel Passport backend
## TODO
|
/*
* Copyright 2010 Chad Retz
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.statmantis.mport.retro.game;
/**
* Columns in the game log file
*
* @author Chad Retz
*/
public enum GameLogColumn {
DATE,
GAME_NUMBER,
DAY_OF_WEEK,
VISITING_TEAM,
VISITING_LEAGUE,
VISITING_GAME_NUMBER,
HOME_TEAM,
HOME_LEAGUE,
HOME_GAME_NUMBER,
VISITING_SCORE,
HOME_SCORE,
TOTAL_OUTS,
DAY_OR_NIGHT,
COMPLETION_INFO,
FORFEIT_INFO,
PROTEST_INFO,
PARK,
ATTENDANCE,
TIME_IN_MINUTES,
VISITING_LINE_SCORE,
HOME_LINE_SCORE,
VISITING_AT_BATS,
VISITING_HITS,
VISITING_DOUBLES,
VISITING_TRIPLES,
VISITING_HOMERUNS,
VISITING_RBI,
VISITING_SACRIFICE_HITS,
VISITING_SACRIFICE_FLIES,
VISITING_HIT_BY_PITCH,
VISITING_WALKS,
VISITING_INTENTIONAL_WALKS,
VISITING_STRIKEOUTS,
VISITING_STOLEN_BASES,
VISITING_CAUGHT_STEALING,
VISITING_GROUNDED_INTO_DOUBLE_PLAYS,
VISITING_FIRST_ON_CATCHER_INTERFERENCE,
VISITING_LEFT_ON_BASE,
VISITING_PITCHERS_USED,
VISITING_INDIVIDUAL_EARNED_RUNS,
VISITING_TEAM_EARNED_RUNS,
VISITING_WILD_PITCHES,
VISITING_BALKS,
VISITING_PUTOUTS,
VISITING_ASSISTS,
VISITING_ERRORS,
VISITING_PASSED_BALLS,
VISITING_DOUBLE_PLAYS,
VISITING_TRIPLE_PLAYS,
HOME_AT_BATS,
HOME_HITS,
HOME_DOUBLES,
HOME_TRIPLES,
HOME_HOMERUNS,
HOME_RBI,
HOME_SACRIFICE_HITS,
HOME_SACRIFICE_FLIES,
HOME_HIT_BY_PITCH,
HOME_WALKS,
HOME_INTENTIONAL_WALKS,
HOME_STRIKEOUTS,
HOME_STOLEN_BASES,
HOME_CAUGHT_STEALING,
HOME_GROUNDED_INTO_DOUBLE_PLAYS,
HOME_FIRST_ON_CATCHER_INTERFERENCE,
HOME_LEFT_ON_BASE,
HOME_PITCHERS_USED,
HOME_INDIVIDUAL_EARNED_RUNS,
HOME_TEAM_EARNED_RUNS,
HOME_WILD_PITCHES,
HOME_BALKS,
HOME_PUTOUTS,
HOME_ASSISTS,
HOME_ERRORS,
HOME_PASSED_BALLS,
HOME_DOUBLE_PLAYS,
HOME_TRIPLE_PLAYS,
HOME_PLATE_UMPIRE_ID,
HOME_PLATE_UMPIRE_NAME,
FIRST_BASE_UMPIRE_ID,
FIRST_BASE_UMPIRE_NAME,
SECOND_BASE_UMPIRE_ID,
SECOND_BASE_UMPIRE_NAME,
THIRD_BASE_UMPIRE_ID,
THIRD_BASE_UMPIRE_NAME,
LEFT_FIELD_UMPIRE_ID,
LEFT_FIELD_UMPIRE_NAME,
RIGHT_FIELD_UMPIRE_ID,
RIGHT_FIELD_UMPIRE_NAME,
VISITING_MANAGER_ID,
VISITING_MANAGER_NAME,
HOME_MANAGER_ID,
HOME_MANAGER_NAME,
WINNING_PITCHER_ID,
WINNING_PITCHER_NAME,
LOSING_PITCHER_ID,
LOSING_PITCHER_NAME,
SAVING_PITCHER_ID,
SAVING_PITCHER_NAME,
GAME_WINNING_RBI_PLAYER_ID,
GAME_WINNING_RBI_PLAYER_NAME,
VISITING_STARTING_PITCHER_ID,
VISITING_STARTING_PITCHER_NAME,
HOME_STARTING_PITCHER_ID,
HOME_STARTING_PITCHER_NAME,
VISITING_PLAYER_1_ID,
VISITING_PLAYER_1_NAME,
VISITING_PLAYER_1_POSITION,
VISITING_PLAYER_2_ID,
VISITING_PLAYER_2_NAME,
VISITING_PLAYER_2_POSITION,
VISITING_PLAYER_3_ID,
VISITING_PLAYER_3_NAME,
VISITING_PLAYER_3_POSITION,
VISITING_PLAYER_4_ID,
VISITING_PLAYER_4_NAME,
VISITING_PLAYER_4_POSITION,
VISITING_PLAYER_5_ID,
VISITING_PLAYER_5_NAME,
VISITING_PLAYER_5_POSITION,
VISITING_PLAYER_6_ID,
VISITING_PLAYER_6_NAME,
VISITING_PLAYER_6_POSITION,
VISITING_PLAYER_7_ID,
VISITING_PLAYER_7_NAME,
VISITING_PLAYER_7_POSITION,
VISITING_PLAYER_8_ID,
VISITING_PLAYER_8_NAME,
VISITING_PLAYER_8_POSITION,
VISITING_PLAYER_9_ID,
VISITING_PLAYER_9_NAME,
VISITING_PLAYER_9_POSITION,
HOME_PLAYER_1_ID,
HOME_PLAYER_1_NAME,
HOME_PLAYER_1_POSITION,
HOME_PLAYER_2_ID,
HOME_PLAYER_2_NAME,
HOME_PLAYER_2_POSITION,
HOME_PLAYER_3_ID,
HOME_PLAYER_3_NAME,
HOME_PLAYER_3_POSITION,
HOME_PLAYER_4_ID,
HOME_PLAYER_4_NAME,
HOME_PLAYER_4_POSITION,
HOME_PLAYER_5_ID,
HOME_PLAYER_5_NAME,
HOME_PLAYER_5_POSITION,
HOME_PLAYER_6_ID,
HOME_PLAYER_6_NAME,
HOME_PLAYER_6_POSITION,
HOME_PLAYER_7_ID,
HOME_PLAYER_7_NAME,
HOME_PLAYER_7_POSITION,
HOME_PLAYER_8_ID,
HOME_PLAYER_8_NAME,
HOME_PLAYER_8_POSITION,
HOME_PLAYER_9_ID,
HOME_PLAYER_9_NAME,
HOME_PLAYER_9_POSITION,
ADDITIONAL_INFO,
ACQUISITION_INFO
}
|
#!/usr/bin/env python
import sys
from gunicorn.app.wsgiapp import run
if __name__ == '__main__':
sys.exit(run())
|
#!/bin/bash
TESTS=$( dirname $0 )
set -x
THIS=$0
BIN=${THIS%.sh}.x
OUTPUT=${THIS%.sh}.out
${BIN} >& ${OUTPUT}
[[ ${?} == 0 ]] || exit 1
grep "version: 1.2.6" ${OUTPUT} || exit 1
grep "count: 5" ${OUTPUT} || exit 1
grep "b1: -1" ${OUTPUT} || exit 1
grep "b2: 1" ${OUTPUT} || exit 1
grep "b3: 1" ${OUTPUT} || exit 1
exit 0
|
import React, {useEffect, useState} from 'react'
import SlidesApi from "../../api/SlidesApi";
import Title from "./Title";
import {ProgressIndicator} from "@fluentui/react";
import './Slides.css'
import ImageSlide from "./Image";
function SlideImpl(props) {
const {id: presentationId, renderId} = props
const [data, setData] = useState({title: null, subTitle: null, image: null})
const api = new SlidesApi()
useEffect(() => {
api.currentSlide(presentationId).then(data => setData(data))
}, [presentationId, renderId])
console.log(data)
if (!!data.title)
return <Title {...data}/>
if (!!data.image)
return <ImageSlide {...data}/>
return (
<div className="centerTitle">
<ProgressIndicator label="Загружаем" description="Получение информации от сервера." barHeight={5}/>
</div>
)
}
export default class Slide extends React.Component {
constructor(props) {
super(props);
this.state = { renderId: 0}
}
componentDidMount() {
this.interval = setInterval(() => {
this.setState((prev) => {
return {...prev, renderId: prev.renderId + 1};
});
}, 2000);
}
componentWillUnmount() {
clearInterval(this.interval)
}
render() {
return <SlideImpl id={this.props.id} renderId={this.state.renderId}/>
}
}
|
package io.delmore.circeConfig
import com.fortysevendeg.lambdatest._
object Main {
def main(args: Array[String]): Unit =
run("JsonConfig Tests", new JsonConfigTest)
}
|
import React from "react";
import { Icon, Table } from "semantic-ui-react";
const DATE_FORMAT = "Do MMM YYYY";
const TIME_FORMAT = "h:mm a";
const TrainingDetailTable = ({ training }) => (
<Table basic="very" collapsing>
<Table.Body>
<Table.Row>
<Table.Cell>
<Icon name="clock outline" />
Start
</Table.Cell>
<Table.Cell>{training.start.format(TIME_FORMAT)}</Table.Cell>
<Table.Cell textAlign="right">
{training.start.format(DATE_FORMAT)}
</Table.Cell>
</Table.Row>
<Table.Row>
<Table.Cell>
<Icon name="clock outline" />
Finish
</Table.Cell>
<Table.Cell>{training.end.format(TIME_FORMAT)}</Table.Cell>
<Table.Cell textAlign="right">
{training.end.format(DATE_FORMAT)}
</Table.Cell>
</Table.Row>
</Table.Body>
</Table>
);
export default TrainingDetailTable;
|
package cn.threefishes.cloudrepository.entity;
public class CartBundling {
private Integer cartBundlingId;
private Integer cartId;
private Integer commonId;
private Integer goodsId;
private Integer memberId;
public Integer getCartBundlingId() {
return cartBundlingId;
}
public void setCartBundlingId(Integer cartBundlingId) {
this.cartBundlingId = cartBundlingId;
}
public Integer getCartId() {
return cartId;
}
public void setCartId(Integer cartId) {
this.cartId = cartId;
}
public Integer getCommonId() {
return commonId;
}
public void setCommonId(Integer commonId) {
this.commonId = commonId;
}
public Integer getGoodsId() {
return goodsId;
}
public void setGoodsId(Integer goodsId) {
this.goodsId = goodsId;
}
public Integer getMemberId() {
return memberId;
}
public void setMemberId(Integer memberId) {
this.memberId = memberId;
}
}
|
## [1.0.0] - 2020-11-08
* Initial release of Stilo! Includes core utility classes to simplify Flutter development.
## [1.0.1] - 2020-11-08
* Update library documentation
* Add library homepage url
## [1.1.0] - 2020-11-22
* Add black and white colors
* Add numeric value in font_weight docs
## [1.1.1] - 2020-11-22
* Define public constant values for border radius
* Define public constant values for edges
## [1.1.2] - 2021-02-07
* Fix durations types. Replace microseconds with milliseconds
* Add new durations types
## [2.0.0] - 2021-05-15
* Add new constants based on a new spacing scale
* Improve naming of public constant values
* Improve docs of class constants
* Migrated to null safety
|
#!/bin/bash
SCRIPT="$(readlink -f "$0")"
SCRIPT_PATH="$(dirname "$SCRIPT")"
VERSION=$(date -d @`stat -c '%Y' "$SCRIPT_PATH/capture.py"` '+%Y%m%d%H%M%S')
DIR="$(mktemp -d)"
pushd "$DIR" &>/dev/null
mkdir -p usr/bin opt/blinker
ln -s /usr/lib/chromium-browser/chromedriver usr/bin/chromedriver
cp "$SCRIPT_PATH/capture.py" opt/blinker
popd &>/dev/null
fpm -C "$DIR" -t deb -s dir -n blinker-capgen \
--log warn \
--description "Blinker - tools for generating packet captures" \
--license 'BSD-3-Clause' \
--version "$VERSION" \
--vendor 'Gábor Szarka' \
--url 'https://gs509.user.srcf.net/blinker/' \
-d mininet -d bridge-utils \
-d chromium-browser -d chromium-chromedriver -d xvfb \
usr/bin opt/blinker |
grep -v "Debian packaging tools generally labels all files"
rm -rf "$DIR"
|
---
layout: post-index
title: All Posts
excerpt: "A List of Posts"
image:
feature: hoian.jpg
---
To see how posts should be structured, if I ever decide to do this. The posts themselves live in _posts
|
package money.nala.pay.interview.data.model
import money.nala.pay.interview.R
enum class WalletServiceCountry(val countryCode: Int,
val nameResource: Int,
val flagResource: Int,
val countryIso: String,
val defaultServiceCurrency: WalletServiceCurrency,
val languages: List<Language>) {
TZ(255, R.string.country_name_tanzania,
R.drawable.ic_country_tz, WalletServiceCountry.COUNTRY_TZ_ISO, WalletServiceCurrency.TZS,
listOf(Language.SWAHILI, Language.ENGLISH)),
UG(256, R.string.country_name_uganda,
R.drawable.ic_country_ug, WalletServiceCountry.COUNTRY_UG_ISO, WalletServiceCurrency.UGX,
listOf(Language.ENGLISH)),
EMPTY(0, 0, 0, "empty", WalletServiceCurrency.EMPTY,
emptyList());
companion object {
@JvmStatic
fun validCountries() =
values().filter { it != EMPTY }
private const val COUNTRY_TZ_ISO = "TZ"
private const val COUNTRY_UG_ISO = "UG"
}
}
|
class Solution {
public:
vector<int> B;
long long count_inversion(vector<int>& A, int l, int r) {
if (l + 1 >= r) return 0;
int mid = (l + r) / 2; long long res = 0;
res += count_inversion(A, l, mid);
res += count_inversion(A, mid, r);
for (int i = l, j = mid; i < mid; i++) {
while (j < r and A[i] > 2LL * A[j]) j++;
res += (j - mid);
}
int i = l, j = mid, k = l;
for (; i < mid and j < r; k++)
if (A[i] < A[j]) B[k] = A[i++];
else B[k] = A[j++];
for (; i < mid; k++) B[k] = A[i++];
for (; j < r; k++) B[k] = A[j++];
for (int i = l; i < r; i++) A[i] = B[i];
return res;
}
int reversePairs(vector<int>& nums) {
B.reserve(nums.size());
return count_inversion(nums, 0, nums.size());
}
};
|
import BRadio from './radio'
import BRadioGroup from './radio-group'
export {
BRadio,
BRadioGroup
}
|
puppetlabs-netscaler
====================
Puppet module for automating the configuration of Citrix Netscaler devices
|
/**
* Created by Stefan on 9/19/2017
*/
'use strict';
var crypto = require('crypto');
var sessions={}
, timeout;
function ownProp(o,p){return Object.prototype.hasOwnProperty.call(o,p)}
function lookupOrCreate(req,opts){
var id,session;
opts=opts || {};
id=idFromRequest(req, opts);
req.sessionID = id;
if(ownProp(sessions, id)){
session=sessions[id]}
else{
session=new Session(id,opts);
sessions[id]=session}
session.expiration=(+new Date)+session.lifetime * 1000;
if(!timeout)
timeout=setTimeout(cleanup, 60000);
return session
}
function cleanup(){var id, now, next;
now = +new Date;
next=Infinity;
timeout=null;
for(id in sessions) if(ownProp(sessions,id)){
if(sessions[id].expiration < now){
delete sessions[id]}
else next = next<sessions[id].expiration ? next : sessions[id].expiration}
if(next<Infinity)
timeout=setTimeout(cleanup,next - (+new Date) + 1000)
}
function idFromRequest(req,opts){var m;
if(req.headers.cookie
&& (m = /connect.sid=([^ ,;]*)/.exec(req.headers.cookie))
&& ownProp(sessions,m[1])){
return m[1]}
if(opts.sessionID) return opts.sessionID;
return crypto.createHash('sha256').update(randomString(64)+opts.secret).digest('hex');
}
function Session(id,opts){
this.id=id;
this.data={};
this.path=opts.path||'/';
this.domain=opts.domain;
if(opts.lifetime) {
this.persistent = 'persistent' in opts ? opts.persistent : true;
this.lifetime=opts.lifetime}
else {
this.persistent=false;
this.lifetime=86400
}
if(opts.secret) {
this.secret = opts.secret
}
}
function randomString(bits){var chars,rand,i,ret;
chars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
ret='';
while(bits > 0){
rand=Math.floor(Math.random()*0x100000000); // 32-bit integer
for(i=26; i>0 && bits>0; i-=6, bits-=6) ret+=chars[0x3F & rand >>> i]}
return ret
}
Session.prototype.getSetCookieHeaderValue=function(){var parts;
parts=['connect.sid='+this.id];
if(this.path) parts.push('path='+this.path);
if(this.domain) parts.push('domain='+this.domain);
if(this.persistent) parts.push('expires='+dateCookieString(this.expiration));
return parts.join('; ')};
function dateCookieString(ms){var d,wdy,mon;
d=new Date(ms);
wdy=['Sun','Mon','Tue','Wed','Thu','Fri','Sat'];
mon=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'];
return wdy[d.getUTCDay()]+', '+pad(d.getUTCDate())+'-'+mon[d.getUTCMonth()]+'-'+d.getUTCFullYear()
+' '+pad(d.getUTCHours())+':'+pad(d.getUTCMinutes())+':'+pad(d.getUTCSeconds())+' GMT'}
function pad(n){return n>9 ? ''+n : '0'+n}
Session.prototype.destroy = function(){
delete sessions[this.id]
};
Session.prototype.save = function(callback){
callback && callback();
};
module.exports.lookupOrCreate=lookupOrCreate;
module.exports.sessionRoot=sessions;
|
# Automated provisioning with Puppet
The IAM login service Puppet module can be found [here][puppet-iam-repo].
The module configures the IAM Login Service packages installation,
configuration and the automatic generation of the JWK keystore.
The setup of the MySQL database used by the service as well as the setup of the
reverse proxy are **not covered** by this module.
However, the module provides an example of setup of both the Login Service and
NginX as reverse proxy, using the official NginX Puppet module.
For more detailed information about the Indigo IAM Puppet module usage, see the
documentation in the [Github repository][puppet-iam-repo].
[puppet-iam-repo]: https://github.com/indigo-iam/puppet-indigo-iam
|
---
title: TxTransactionBootstrap配置详解
keywords: configuration
description: TxTransactionBootstrap配置详解
---
### @TxTransaction annotation详解
* 该注解为分布式事务的切面(AOP point),如果业务方的service服务需要参与分布式事务,则需要加上此注解。
### TxTransactionBootstrap 详解:
```xml
<context:component-scan base-package="org.dromara.raincat.*"/>
<aop:aspectj-autoproxy expose-proxy="true"/>
<bean id="txTransactionBootstrap" class="org.dromara.raincat.core.bootstrap.TxTransactionBootstrap">
<property name="txManagerUrl" value="http://localhost:8761"/>
<property name="serializer" value="kryo"/>
<property name="nettySerializer" value="kryo"/>
<property name="bufferSize" value="4096"/>
<property name="nettyThreadMax" value="16"/>
<property name="refreshInterval" value="30"/>
<property name="delayTime" value="30"/>
<property name="heartTime" value="10"/>
<property name="compensation" value="true"/>
<property name="recoverDelayTime" value="60"/>
<property name="retryMax" value="3"/>
<property name="compensationRecoverTime" value="60"/>
<property name="compensationCacheType" value="db"/>
<property name="txDbConfig">
<bean class="org.dromara.raincat.common.config.TxDbConfig">
<property name="url"
value="jdbc:mysql://192.168.1.98:3306/tx?useUnicode=true&characterEncoding=utf8"/>
<property name="driverClassName" value="com.mysql.jdbc.Driver"/>
<property name="username" value="root"/>
<property name="password" value="123456"/>
</bean>
</property>
</bean>
```
* txManagerUrl:填写你启动的txManager的ip端口,注意添加http://前缀。
* serializer :事务日志序列化方式,这里我推荐使用是kroy。当然也支持hessian,protostuff,jdk。在我们测试中表现为:
kroy>hessian>protostuff>jdk。
* nettySerializer: 与txManager通信对象的序列化方法,注意与txManager中的序列化方式配置一样。
* bufferSize: disruptor的bufferSize,当高并发的时候,可以调大。注意是 2n次方。
* nettyThreadMax : netty客户端工作线程数量。
* refreshInterval: 拉取txmanager配置信息间隔时间,单位秒。
* delayTime : 客户端与txmanager通信最大延迟时间。
* heartTime : 与txmanager保持心跳时间间隔,单位秒。
* compensation: 是否需要补偿,一般情况下不需要,极端情况下设置为true。
* recoverDelayTime:事务恢复延迟时间,只有当 compensation:为ture才有用。
* compensationRecoverTime: 补偿间隔时间 只有当 compensation:为ture才有用。
* retryMax : 事务补偿最大重试次数。
* compensationCacheType:使用何种方式存储日志,支持的有db,redis,mongo,zookeeper等。
* 接下来是最重要的事务日志的存储 在我们的压测中,推荐使用mongo。表现为 mongodb>redis集群>mysql>zookeeper
* 如果你采用mongodb存储日志,配置如下(url可以配置成mongdb集群的url)
```xml
<property name="compensationCacheType" value="mongodb"/>
<property name="txMongoConfig">
<bean class="org.dromara.raincat.common.config.TxMongoConfig">
<property name="mongoDbUrl" value="192.168.1.68:27017"/>
<property name="mongoDbName" value="happylife"/>
<property name="mongoUserName" value="xiaoyu"/>
<property name="mongoUserPwd" value="123456"/>
</bean>
</property>
```
* 如果你采用redis存储日志,配置如下:
* redis单节点
```xml
<property name="compensationCacheType" value="redis" />
<property name="txRedisConfig">
<bean class="org.dromara.raincat.common.config.TxRedisConfig">
<property name="hostName"
value="192.168.1.68"/>
<property name="port" value="6379"/>
<property name="password" value=""/>
</bean>
</property>
```
* redis哨兵模式集群:
```xml
<property name="compensationCacheType" value="redis"/>
<property name="txRedisConfig">
<bean class="org.dromara.hmily.common.config.TxRedisConfig">
<property name="masterName" value="aaa"/>
<property name="sentinel" value="true"/>
<property name="sentinelUrl" value="192.168.1.91:26379;192.168.1.92:26379;192.168.1.93:26379"/>
<property name="password" value="123456"/>
</bean>
</property>
```
* redis集群:
```xml
<property name="compensationCacheType" value="redis"/>
<property name="txRedisConfig">
<bean class="org.dromara.hmily.common.config.TxRedisConfig">
<property name="cluster" value="true"/>
<property name="clusterUrl" value="192.168.1.91:26379;192.168.1.92:26379;192.168.1.93:26379"/>
<property name="password" value="123456"/>
</bean>
</property>
```
* 如果你采用zookeeper存储日志,配置如下:
```xml
<property name="compensationCacheType" value="zookeeper"/>
<property name="txZookeeperConfig">
<bean class="org.dromara.hmily.common.config.TxZookeeperConfig">
<property name="host" value="192.168.1.73:2181"/>
<property name="sessionTimeOut" value="100000"/>
<property name="rootPath" value="/tcc"/>
</bean>
</property>
```
|
package tarehart.rlbot.math
import tarehart.rlbot.input.CarData
import tarehart.rlbot.math.vector.Vector2
import kotlin.math.pow
import kotlin.math.sqrt
open class Ray2(val position: Vector2, direction: Vector2) {
val direction = direction.normalized()
/**
* Taken from https://math.stackexchange.com/a/311956/550722
*/
fun firstCircleIntersection(circle: Circle): Vector2? {
val a = direction.magnitudeSquared()
val b = 2 * direction.x * (position.x - circle.center.x) + 2 * direction.y * (position.y - circle.center.y)
val c = square(position.x - circle.center.x) + square(position.y - circle.center.y) - square(circle.radius)
val discrim = b * b - 4 * a * c
if (discrim < 0) {
return null
}
val t = 2 * c / (-b + sqrt(discrim))
if (t < 0) {
return null
}
return position + direction * t
}
private fun square(n: Float): Float {
return n * n
}
companion object {
/**
* https://stackoverflow.com/a/2931703/280852
*
* Returns the intersection point if it exists, followed by the distance of that intersection
* along ray A.
*/
fun getIntersection(a: Ray2, b: Ray2): Pair<Vector2?, Float> {
val dx = b.position.x - a.position.x
val dy = b.position.y - a.position.y
val det = b.direction.x * a.direction.y - b.direction.y * a.direction.x
val u = (dy * b.direction.x - dx * b.direction.y) / det
val v = (dy * a.direction.x - dx * a.direction.y) / det
if (u < 0 || v < 0) {
return Pair(null, u)
}
return Pair(a.position + a.direction * u, u)
}
fun fromCar(car: CarData): Ray2 {
return Ray2(car.position.flatten(), car.orientation.noseVector.flatten())
}
}
}
|
---
# Cool URLs don’t change: https://www.w3.org/Provider/Style/URI.html
redirect_from:
- ../../methodology/authoring_workflow.html
---
# 7. The Authoring Workflow « FC4 User Manual
## Summarized Workflow
1. Start Structurizr ([docs][s9r-on-prem])
1. Run `fc4 -fsrw path/to/repo` to start fc4 watching for changes
1. Create and/or edit diagram YAML files
1. fc4 will automatically [format][formatting], [snap][snapping], and [render][rendering] the
diagrams
1. Run `git commit` to commit the new/changed files
## Full Workflow
1. Create a new git branch in your local instance of [the diagram repository][repo]
1. In your text editor: either create a new diagram source file or open an existing diagram source
file
1. Start Structurizr ([docs][s9r-on-prem])
1. In your terminal, run `fc4 -fsrw path/to/repo`
1. This starts FC4 watching your repository for changes to any diagram source YAML file (or new
files)
1. Each file will be [formatted][formatting], [snapped][snapping], and [rendered][rendering] when
it changes or is created
1. A future release of the tool will enable users to specify which actions should be performed
1. In your text editor, open a diagram YAML file in one pane and its rendered PNG file in an
adjacent pane
1. If the diagram is new then the PNG file won’t exist until you’ve saved the YAML file and
fc4 has successfully rendered the diagram
1. Edit the diagram YAML by adding/changing elements, relationships, etc, then save the file
1. This will cause fc4 to [format][formatting], [snap][snapping], and [render][rendering]
the diagram
1. Ideally your editor will see the changes the open files and automatically refresh your open
buffers/windows/tabs so you can immediately see the changes
1. Continue to edit the YAML, save the file, and observe the changes to the PNG until you’re
happy with the changes
1. In your terminal, hit `ctrl-c` to stop fc4
1. Use your git UI of choice to commit your changes
1. Push your local changes to the remote repository regularly
1. When you’re ready to submit your changes for review, open a
[Merge Request](https://docs.gitlab.com/ee/user/project/merge_requests/index.html) or
[Pull Request](https://help.github.com/articles/about-pull-requests/) to get your changes
reviewed and then merged into the default branch (e.g. `main`)
Here’s a screenshot of an editor with a diagram open in two panes:

## Optional: Using Structurizr Express for Graphical Editing
This is optional, but can be very helpful when you need to make broad layout changes, or experiment
quickly with various changes to a layout.
During an editing session as described above, when you have both files of a diagram open in your
editor, you can use Structurizr Express (SE) like so:
1. Select the entire contents of the YAML file in your text editor and cut it into your clipboard
1. Switch to your Web browser and open SE, using your local URL (e.g. http://localhost:8080/express)
1. Once SE has loaded, click the YAML tab on the left-hand side of the UI
1. Paste the diagram source into the YAML textarea
1. Press tab to blur the textarea
1. SE will either render the diagram, or display a red error indicator in its toolbar
1. If SE shows its red error indicator, click the indicator button to bring up a dialog listing
the errors
1. Use the right-hand side of SE to arrange the elements and edges as desired
1. Don’t worry about aligning elements precisely; fc4 will take care of this for you
1. Cut the diagram source from the SE YAML textarea into your clipboard
1. Switch back to your editor, paste the diagram source into the YAML file buffer, and save the file
1. fc4 will see that the YAML file has changed, and will process it as described above
1. NB: the processing includes [snapping][snapping] the elements and vertices of a diagram to a
virtual grid, which has the effect of precisely aligning elements that had been only roughly
aligned
Here’s a screenshot of Structurizr Express:

----
Please continue to [Publishing](./publishing) or go back to [the top page of the manual](./).
[formatting]: ../features#formatting
[snapping]: ../features#snapping
[rendering]: ../features#rendering
[repo]: ./repository
[s9r-on-prem]: https://structurizr.com/help/on-premises
|
<div class="widget-content widget-content-area">
<form wire:submit.prevent='save'>
<div class="form-group mb-4">
<label for="name_ar">Name ar</label>
<input wire:model.lazy='name.ar' type="text" class="form-control" id="name_ar" placeholder="Name ar">
@error('name.ar')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="name_ar">Name en</label>
<input wire:model.lazy='name.en' type="text" class="form-control" id="name_ar" placeholder="Name en">
@error('name.en')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="currency_ar">Currency ar</label>
<input wire:model.lazy='currency.ar' type="text" class="form-control" id="currency_ar"
placeholder="Currency ar">
@error('currency.ar')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="currency_en">Currency en</label>
<input wire:model.lazy='currency.en' type="text"
class="form-control" id="currency_en" placeholder="Currency en">
@error('currency.en')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="zip_code">Zip Code</label>
<input wire:model='zip_code' type="text" class="form-control" id="zip_code" placeholder="Zip Code">
@error('zip_code')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="tax">Tax</label>
<input wire:model.lazy='tax' type="number" class="form-control" id="tax" min="0" placeholder="Tax">
@error('tax')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="price_of_first_ten_kilo">Price Of First Ten Kilo</label>
<input wire:model.lazy='price_of_first_ten_kilo' type="number" class="form-control"
id="price_of_first_ten_kilo" min="0" placeholder="Price Of First Ten Kilo">
@error('price_of_first_ten_kilo')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="price_for_kilo">Price For Kilo</label>
<input wire:model.lazy='price_for_kilo' type="number" class="form-control" id="price_for_kilo" min="0"
placeholder="Price For Kilo">
@error('price_for_kilo')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="profitRatio">Profit Ratio</label>
<input wire:model.lazy='profitRatio' type="number" class="form-control" id="profitRatio" min="0"
placeholder="Profit Ratio">
@error('profitRatio')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<div class="form-group mb-4">
<label for="country_code ">Country Code </label>
<select wire:model.lazy='country_code' name="country_code "
class="custom-select custom-select-md bg-white mt-4" id="country_code ">
<option selected>Choose Country</option>
<option value="DZ" label="Algeria">Algeria</option>
<option value="AO" label="Angola">Angola</option>
<option value="BJ" label="Benin">Benin</option>
<option value="BW" label="Botswana">Botswana</option>
<option value="BF" label="Burkina Faso">Burkina Faso</option>
<option value="BI" label="Burundi">Burundi</option>
<option value="CM" label="Cameroon">Cameroon</option>
<option value="CV" label="Cape Verde">Cape Verde</option>
<option value="CF" label="Central African Republic">Central African Republic
</option>
<option value="TD" label="Chad">Chad</option>
<option value="KM" label="Comoros">Comoros</option>
<option value="CG" label="Congo - Brazzaville">Congo - Brazzaville</option>
<option value="CD" label="Congo - Kinshasa">Congo - Kinshasa</option>
<option value="CI" label="Côte d’Ivoire">Côte d’Ivoire</option>
<option value="DJ" label="Djibouti">Djibouti</option>
<option value="EG" label="Egypt">Egypt</option>
<option value="GQ" label="Equatorial Guinea">Equatorial Guinea</option>
<option value="ER" label="Eritrea">Eritrea</option>
<option value="ET" label="Ethiopia">Ethiopia</option>
<option value="GA" label="Gabon">Gabon</option>
<option value="GM" label="Gambia">Gambia</option>
<option value="GH" label="Ghana">Ghana</option>
<option value="GN" label="Guinea">Guinea</option>
<option value="GW" label="Guinea-Bissau">Guinea-Bissau</option>
<option value="KE" label="Kenya">Kenya</option>
<option value="LS" label="Lesotho">Lesotho</option>
<option value="LR" label="Liberia">Liberia</option>
<option value="LY" label="Libya">Libya</option>
<option value="MG" label="Madagascar">Madagascar</option>
<option value="MW" label="Malawi">Malawi</option>
<option value="ML" label="Mali">Mali</option>
<option value="MR" label="Mauritania">Mauritania</option>
<option value="MU" label="Mauritius">Mauritius</option>
<option value="YT" label="Mayotte">Mayotte</option>
<option value="MA" label="Morocco">Morocco</option>
<option value="MZ" label="Mozambique">Mozambique</option>
<option value="NA" label="Namibia">Namibia</option>
<option value="NE" label="Niger">Niger</option>
<option value="NG" label="Nigeria">Nigeria</option>
<option value="RW" label="Rwanda">Rwanda</option>
<option value="RE" label="Réunion">Réunion</option>
<option value="SH" label="Saint Helena">Saint Helena</option>
<option value="SN" label="Senegal">Senegal</option>
<option value="SC" label="Seychelles">Seychelles</option>
<option value="SL" label="Sierra Leone">Sierra Leone</option>
<option value="SO" label="Somalia">Somalia</option>
<option value="ZA" label="South Africa">South Africa</option>
<option value="SD" label="Sudan">Sudan</option>
<option value="SZ" label="Swaziland">Swaziland</option>
<option value="ST" label="São Tomé and Príncipe">São Tomé and Príncipe</option>
<option value="TZ" label="Tanzania">Tanzania</option>
<option value="TG" label="Togo">Togo</option>
<option value="TN" label="Tunisia">Tunisia</option>
<option value="UG" label="Uganda">Uganda</option>
<option value="EH" label="Western Sahara">Western Sahara</option>
<option value="ZM" label="Zambia">Zambia</option>
<option value="ZW" label="Zimbabwe">Zimbabwe</option>
<option value="AI" label="Anguilla">Anguilla</option>
<option value="AG" label="Antigua and Barbuda">Antigua and Barbuda</option>
<option value="AR" label="Argentina">Argentina</option>
<option value="AW" label="Aruba">Aruba</option>
<option value="BS" label="Bahamas">Bahamas</option>
<option value="BB" label="Barbados">Barbados</option>
<option value="BZ" label="Belize">Belize</option>
<option value="BM" label="Bermuda">Bermuda</option>
<option value="BO" label="Bolivia">Bolivia</option>
<option value="BR" label="Brazil">Brazil</option>
<option value="VG" label="British Virgin Islands">British Virgin Islands
</option>
<option value="CA" label="Canada">Canada</option>
<option value="KY" label="Cayman Islands">Cayman Islands</option>
<option value="CL" label="Chile">Chile</option>
<option value="CO" label="Colombia">Colombia</option>
<option value="CR" label="Costa Rica">Costa Rica</option>
<option value="CU" label="Cuba">Cuba</option>
<option value="DM" label="Dominica">Dominica</option>
<option value="DO" label="Dominican Republic">Dominican Republic</option>
<option value="EC" label="Ecuador">Ecuador</option>
<option value="SV" label="El Salvador">El Salvador</option>
<option value="FK" label="Falkland Islands">Falkland Islands</option>
<option value="GF" label="French Guiana">French Guiana</option>
<option value="GL" label="Greenland">Greenland</option>
<option value="GD" label="Grenada">Grenada</option>
<option value="GP" label="Guadeloupe">Guadeloupe</option>
<option value="GT" label="Guatemala">Guatemala</option>
<option value="GY" label="Guyana">Guyana</option>
<option value="HT" label="Haiti">Haiti</option>
<option value="HN" label="Honduras">Honduras</option>
<option value="JM" label="Jamaica">Jamaica</option>
<option value="MQ" label="Martinique">Martinique</option>
<option value="MX" label="Mexico">Mexico</option>
<option value="MS" label="Montserrat">Montserrat</option>
<option value="AN" label="Netherlands Antilles">Netherlands Antilles</option>
<option value="NI" label="Nicaragua">Nicaragua</option>
<option value="PA" label="Panama">Panama</option>
<option value="PY" label="Paraguay">Paraguay</option>
<option value="PE" label="Peru">Peru</option>
<option value="PR" label="Puerto Rico">Puerto Rico</option>
<option value="BL" label="Saint Barthélemy">Saint Barthélemy</option>
<option value="KN" label="Saint Kitts and Nevis">Saint Kitts and Nevis</option>
<option value="LC" label="Saint Lucia">Saint Lucia</option>
<option value="MF" label="Saint Martin">Saint Martin</option>
<option value="PM" label="Saint Pierre and Miquelon">Saint Pierre and Miquelon
</option>
<option value="VC" label="Saint Vincent and the Grenadines">Saint Vincent and
the Grenadines</option>
<option value="SR" label="Suriname">Suriname</option>
<option value="TT" label="Trinidad and Tobago">Trinidad and Tobago</option>
<option value="TC" label="Turks and Caicos Islands">Turks and Caicos Islands
</option>
<option value="VI" label="U.S. Virgin Islands">U.S. Virgin Islands</option>
<option value="US" label="United States">United States</option>
<option value="UY" label="Uruguay">Uruguay</option>
<option value="VE" label="Venezuela">Venezuela</option>
<option value="AF" label="Afghanistan">Afghanistan</option>
<option value="AM" label="Armenia">Armenia</option>
<option value="AZ" label="Azerbaijan">Azerbaijan</option>
<option value="BH" label="Bahrain">Bahrain</option>
<option value="BD" label="Bangladesh">Bangladesh</option>
<option value="BT" label="Bhutan">Bhutan</option>
<option value="BN" label="Brunei">Brunei</option>
<option value="KH" label="Cambodia">Cambodia</option>
<option value="CN" label="China">China</option>
<option value="CY" label="Cyprus">Cyprus</option>
<option value="GE" label="Georgia">Georgia</option>
<option value="HK" label="Hong Kong SAR China">Hong Kong SAR China</option>
<option value="IN" label="India">India</option>
<option value="ID" label="Indonesia">Indonesia</option>
<option value="IR" label="Iran">Iran</option>
<option value="IQ" label="Iraq">Iraq</option>
<option value="IL" label="Israel">Israel</option>
<option value="JP" label="Japan">Japan</option>
<option value="JO" label="Jordan">Jordan</option>
<option value="KZ" label="Kazakhstan">Kazakhstan</option>
<option value="KW" label="Kuwait">Kuwait</option>
<option value="KG" label="Kyrgyzstan">Kyrgyzstan</option>
<option value="LA" label="Laos">Laos</option>
<option value="LB" label="Lebanon">Lebanon</option>
<option value="MO" label="Macau SAR China">Macau SAR China</option>
<option value="MY" label="Malaysia">Malaysia</option>
<option value="MV" label="Maldives">Maldives</option>
<option value="MN" label="Mongolia">Mongolia</option>
<option value="MM" label="Myanmar [Burma]">Myanmar [Burma]</option>
<option value="NP" label="Nepal">Nepal</option>
<option value="NT" label="Neutral Zone">Neutral Zone</option>
<option value="KP" label="North Korea">North Korea</option>
<option value="OM" label="Oman">Oman</option>
<option value="PK" label="Pakistan">Pakistan</option>
<option value="PS" label="Palestinian Territories">Palestinian Territories
</option>
<option value="YD" label="People's Democratic Republic of Yemen">People's
Democratic Republic of Yemen</option>
<option value="PH" label="Philippines">Philippines</option>
<option value="QA" label="Qatar">Qatar</option>
<option value="SA" label="Saudi Arabia">Saudi Arabia</option>
<option value="SG" label="Singapore">Singapore</option>
<option value="KR" label="South Korea">South Korea</option>
<option value="LK" label="Sri Lanka">Sri Lanka</option>
<option value="SY" label="Syria">Syria</option>
<option value="TW" label="Taiwan">Taiwan</option>
<option value="TJ" label="Tajikistan">Tajikistan</option>
<option value="TH" label="Thailand">Thailand</option>
<option value="TL" label="Timor-Leste">Timor-Leste</option>
<option value="TR" label="Turkey">Turkey</option>
<option value="TM" label="Turkmenistan">Turkmenistan</option>
<option value="AE" label="United Arab Emirates">United Arab Emirates</option>
<option value="UZ" label="Uzbekistan">Uzbekistan</option>
<option value="VN" label="Vietnam">Vietnam</option>
<option value="YE" label="Yemen">Yemen</option>
<option value="AL" label="Albania">Albania</option>
<option value="AD" label="Andorra">Andorra</option>
<option value="AT" label="Austria">Austria</option>
<option value="BY" label="Belarus">Belarus</option>
<option value="BE" label="Belgium">Belgium</option>
<option value="BA" label="Bosnia and Herzegovina">Bosnia and Herzegovina
</option>
<option value="BG" label="Bulgaria">Bulgaria</option>
<option value="HR" label="Croatia">Croatia</option>
<option value="CY" label="Cyprus">Cyprus</option>
<option value="CZ" label="Czech Republic">Czech Republic</option>
<option value="DK" label="Denmark">Denmark</option>
<option value="DD" label="East Germany">East Germany</option>
<option value="EE" label="Estonia">Estonia</option>
<option value="FO" label="Faroe Islands">Faroe Islands</option>
<option value="FI" label="Finland">Finland</option>
<option value="FR" label="France">France</option>
<option value="DE" label="Germany">Germany</option>
<option value="GI" label="Gibraltar">Gibraltar</option>
<option value="GR" label="Greece">Greece</option>
<option value="GG" label="Guernsey">Guernsey</option>
<option value="HU" label="Hungary">Hungary</option>
<option value="IS" label="Iceland">Iceland</option>
<option value="IE" label="Ireland">Ireland</option>
<option value="IM" label="Isle of Man">Isle of Man</option>
<option value="IT" label="Italy">Italy</option>
<option value="JE" label="Jersey">Jersey</option>
<option value="LV" label="Latvia">Latvia</option>
<option value="LI" label="Liechtenstein">Liechtenstein</option>
<option value="LT" label="Lithuania">Lithuania</option>
<option value="LU" label="Luxembourg">Luxembourg</option>
<option value="MK" label="Macedonia">Macedonia</option>
<option value="MT" label="Malta">Malta</option>
<option value="FX" label="Metropolitan France">Metropolitan France</option>
<option value="MD" label="Moldova">Moldova</option>
<option value="MC" label="Monaco">Monaco</option>
<option value="ME" label="Montenegro">Montenegro</option>
<option value="NL" label="Netherlands">Netherlands</option>
<option value="NO" label="Norway">Norway</option>
<option value="PL" label="Poland">Poland</option>
<option value="PT" label="Portugal">Portugal</option>
<option value="RO" label="Romania">Romania</option>
<option value="RU" label="Russia">Russia</option>
<option value="SM" label="San Marino">San Marino</option>
<option value="RS" label="Serbia">Serbia</option>
<option value="CS" label="Serbia and Montenegro">Serbia and Montenegro</option>
<option value="SK" label="Slovakia">Slovakia</option>
<option value="SI" label="Slovenia">Slovenia</option>
<option value="ES" label="Spain">Spain</option>
<option value="SJ" label="Svalbard and Jan Mayen">Svalbard and Jan Mayen
</option>
<option value="SE" label="Sweden">Sweden</option>
<option value="CH" label="Switzerland">Switzerland</option>
<option value="UA" label="Ukraine">Ukraine</option>
<option value="SU" label="Union of Soviet Socialist Republics">Union of Soviet
Socialist Republics</option>
<option value="GB" label="United Kingdom">United Kingdom</option>
<option value="VA" label="Vatican City">Vatican City</option>
<option value="AX" label="Åland Islands">Åland Islands</option>
<option value="AS" label="American Samoa">American Samoa</option>
<option value="AQ" label="Antarctica">Antarctica</option>
<option value="AU" label="Australia">Australia</option>
<option value="BV" label="Bouvet Island">Bouvet Island</option>
<option value="IO" label="British Indian Ocean Territory">British Indian Ocean
Territory</option>
<option value="CX" label="Christmas Island">Christmas Island</option>
<option value="CC" label="Cocos [Keeling] Islands">Cocos [Keeling] Islands
</option>
<option value="CK" label="Cook Islands">Cook Islands</option>
<option value="FJ" label="Fiji">Fiji</option>
<option value="PF" label="French Polynesia">French Polynesia</option>
<option value="TF" label="French Southern Territories">French Southern
Territories</option>
<option value="GU" label="Guam">Guam</option>
<option value="HM" label="Heard Island and McDonald Islands">Heard Island and
McDonald Islands</option>
<option value="KI" label="Kiribati">Kiribati</option>
<option value="MH" label="Marshall Islands">Marshall Islands</option>
<option value="FM" label="Micronesia">Micronesia</option>
<option value="NR" label="Nauru">Nauru</option>
<option value="NC" label="New Caledonia">New Caledonia</option>
<option value="NZ" label="New Zealand">New Zealand</option>
<option value="NU" label="Niue">Niue</option>
<option value="NF" label="Norfolk Island">Norfolk Island</option>
<option value="MP" label="Northern Mariana Islands">Northern Mariana Islands
</option>
<option value="PW" label="Palau">Palau</option>
<option value="PG" label="Papua New Guinea">Papua New Guinea</option>
<option value="PN" label="Pitcairn Islands">Pitcairn Islands</option>
<option value="WS" label="Samoa">Samoa</option>
<option value="SB" label="Solomon Islands">Solomon Islands</option>
<option value="GS" label="South Georgia and the South Sandwich Islands">South
Georgia and the South Sandwich Islands</option>
<option value="TK" label="Tokelau">Tokelau</option>
<option value="TO" label="Tonga">Tonga</option>
<option value="TV" label="Tuvalu">Tuvalu</option>
<option value="UM" label="U.S. Minor Outlying Islands">U.S. Minor Outlying
Islands</option>
<option value="VU" label="Vanuatu">Vanuatu</option>
<option value="WF" label="Wallis and Futuna">Wallis and Futuna</option>
</select>
@error('country_code')
<div class="alert alert-danger m-2">
<span>{{ $message }}</span>
</div>
@enderror
</div>
<input type="submit" name="time" class="btn btn-primary" value="Create Country">
</form>
</div>
|
#!/usr/bin/env bash
## init
THE_BASE_DIR_PATH=$(cd -P -- "$(dirname -- "$0")" && pwd -P)
source "$THE_BASE_DIR_PATH/_init.sh"
## main
main_serve () {
## cd document root dir
cd $THE_WWW_DIR_PATH
## http://php.net/manual/en/features.commandline.webserver.php
#php -S localhost:8080
#php -S 127.0.0.1:8080
php -S 0.0.0.0:8080
## https://docsify.js.org/#/quickstart?id=manual-initialization
## https://docs.python.org/3/library/http.server.html
#python -m SimpleHTTPServer 8080
}
main_serve "$@"
|
class Test < ApplicationRecord
belongs_to :revision
belongs_to :user
has_many :test_questions, dependent: :destroy
has_many :questions, through: :test_questions
rails_admin do
object_label_method :rails_admin_default_object_label_method
list do
field :id
field :revision
field :user
field :user_score
field :created_at
end
show do
field :id
field :revision
field :user
field :user_score
field :questions
field :created_at
end
end
def rails_admin_default_object_label_method
# [user.name].join(', ')
"Test (#{user.name}) = #{user_score}"
end
def user_score
::CalculateTestScore.call(self)
end
end
|
<!--
SPDX-FileCopyrightText: 2021 Diego Elio Pettenò
SPDX-License-Identifier: 0BSD
-->
# LG PQRCUDS0 compatible ESPHome Component

This repository contains the source code and the EAGLE design files for using
[ESPHome](https://esphome.io/) to control a LG air conditioning unit that uses
the PQRCUDS0 control panel.
The source code and an example configuration package are in the `esphome/`
directory, while the `eagle/` directory has the PCB design files.
Note that this project is neither endorsed, nor sponsored, by either of LG
or the ESPHome project.
You can find details of the implementation and some of the choices made in the
design on my blog: https://flameeyes.blog/tag/lg/
See also:
* Python utilities to emulate the panel and engine at
<https://github.com/Flameeyes/lg-PQRCUDS0-emu>
* [Saleae Logic](https://www.saleae.com/) high-level analyzer for the
protocol, at <https://github.com/Flameeyes/saleae-extension-LG-PQRCUDS0>
## Features
The custom component needs [Home Assistant](https://www.home-assistant.io/),
to provide a reference temperature, as it does not provide its own temperature
sensor.
Once provided a reference temperature sensor, the component supports the
following features, tested on my flat's HVAC system (LG ARNU07GB1G2):
* Cool, heat, fan-only, dehumidifer, and heat-cool modes, with two-point set
temperature.
* Fan speed selection (low/medium/high), including "off" speed for idling in
heat-cool mode.
## Board Revision History
### Rev 1
Original production design. It provided support to host the panel to the side
for going back to the panel on request, with a cutoff switch.
The cutoff had problems with holding the HVAC unit and the panel in sync when
switching away from the ESP32, so it never really worked.
The other reason to keep te panel powered up would have been to maintain a
readout of the temperature from the panel's on-board temperature sensor, but
it does not appear to be very reliable or useful, being able to report only
between 18C and 30C.
### Rev 2
A size- and cost-reduced variant of the original board, with the same logic
design, but without the secondary bus (and related components), and replacing
the cutoff switch with a pushbutton that turns off the whole board.
The bus connector is now on the back of the board, to make it easier to access
from the wall itself.
## License
Both the custom component and the EAGLE design files are released under the
permissive 0BSD license. This means you can reuse them pretty much as you may
want to.
Do note that ESPHome libraries are licensed under GNU GPLv3, so combining them
with logic in the components is still subject to the full license.
## Boxes
The `boxes/` directory contains 3D printable files for a box compatible with
the rev2 board.
Since this is literally my first 3D design, take it with a grain of salt,
it particularly suffers from the following minor issues:
* The cover vents on the top are not shaped to keep into consideration the
registration corners.
* The central screw is unnecessary as (at least with Nylon SLS print) it is
perfectly possible to keep the box closed by friction-fitting it.
* There should be a little more clearance for the ESP32 module, by raising
the top of the cover.
* There's no compatible cap to fit into the pushbutton. A different strategy
should be pursued.
* The keyholes should be more tolerant, as screws are often not quite aligned
as they should be.
|
require_relative '../spec_helper'
describe 'return' do
it 'returns early from a method' do
def foo
return 'foo'
'bar'
end
foo.should == 'foo'
end
it 'returns early from a block' do
def one
[1, 2, 3].each do |i|
[1, 2, 3].each do |i|
return i if i == 1
end
end
end
one.should == 1
end
it 'handles other errors properly' do
def foo(x)
[1].each do |i|
return i if x == i
end
raise 'foo'
end
-> { foo(2) }.should raise_error(StandardError)
foo(1).should == 1
end
end
|
package main
import (
"fmt"
"github.com/go-kit/kit/log"
articlePb "github.com/baxiang/soldiers-sortie/go-mircosvc/pb"
"github.com/baxiang/soldiers-sortie/go-mircosvc/pkg/db"
sharedEtcd "github.com/baxiang/soldiers-sortie/go-mircosvc/pkg/etcd"
"github.com/baxiang/soldiers-sortie/go-mircosvc/pkg/logger"
sharedZipkin "github.com/baxiang/soldiers-sortie/go-mircosvc/pkg/zipkin"
"github.com/go-kit/kit/log/level"
kitGrpc "github.com/go-kit/kit/transport/grpc"
"github.com/opentracing/opentracing-go"
zipkinot "github.com/openzipkin-contrib/zipkin-go-opentracing"
"github.com/openzipkin/zipkin-go"
zipkinGrpc "github.com/openzipkin/zipkin-go/middleware/grpc"
"google.golang.org/grpc"
"google.golang.org/grpc/health"
healthgrpc "google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/reflection"
"net"
"os"
"os/signal"
"syscall"
"github.com/baxiang/soldiers-sortie/go-mircosvc/servers/article/config"
"github.com/baxiang/soldiers-sortie/go-mircosvc/servers/article/services"
"github.com/baxiang/soldiers-sortie/go-mircosvc/servers/article/endpoints"
"github.com/baxiang/soldiers-sortie/go-mircosvc/servers/article/transport"
)
func main() {
conf := config.GetConfig()
log, f := logger.NewLogger(conf.LogPath)
defer f.Close()
zipkinTracer, reporter := sharedZipkin.NewZipkin(log, conf.ZipkinAddr, "localhost:"+conf.GrpcPort,
conf.ServiceName)
defer reporter.Close()
opentracing.SetGlobalTracer(zipkinot.Wrap(zipkinTracer))
tracer := opentracing.GlobalTracer()
{
etcdClient := sharedEtcd.NewEtcd(conf.EtcdAddr)
register := sharedEtcd.Register("/articlesvc", "localhost:"+conf.GrpcPort, etcdClient, log)
defer register.Register()
}
var svc services.ArticleServicer
{
mdb := db.NewMysql(conf.MysqlUsername, conf.MysqlPassword, conf.MysqlAddr, conf.MysqlAuthsource)
svc = services.NewArticleService(mdb)
}
eps := endpoints.NewEndpoints(svc, log, tracer, zipkinTracer)
hs := health.NewServer()
hs.SetServingStatus(conf.ServiceName, healthgrpc.HealthCheckResponse_SERVING)
errs := make(chan error, 1)
go grpcServer(transport.MakeGRPCServer(eps, tracer, zipkinTracer, log), conf.GrpcPort, zipkinTracer, hs, log, errs)
go func() {
c := make(chan os.Signal)
signal.Notify(c, syscall.SIGINT)
errs <- fmt.Errorf("%s", <-c)
}()
level.Info(log).Log("serviceName", conf.ServiceName, "terminated", <-errs)
}
func grpcServer(grpcsvc articlePb.ArticlesvcServer, port string, zipkinTracer *zipkin.Tracer, hs *health.Server,
logger log.Logger, errs chan error) {
p := fmt.Sprintf(":%s", port)
listener, err := net.Listen("tcp", p)
if err != nil {
level.Error(logger).Log("protocol", "GRPC", "listen", port, "err", err)
os.Exit(1)
}
level.Info(logger).Log("protocol", "GRPC", "protocol", "GRPC", "exposed", port)
server := grpc.NewServer(grpc.UnaryInterceptor(kitGrpc.Interceptor),
grpc.StatsHandler(zipkinGrpc.NewServerHandler(zipkinTracer)),
)
articlePb.RegisterArticlesvcServer(server, grpcsvc)
healthgrpc.RegisterHealthServer(server, hs)
reflection.Register(server)
errs <- server.Serve(listener)
}
|
Homeland::Jobs::Engine.routes.draw do
get '/jobs', to: 'jobs#index'
end
|
#!/bin/bash
set -e
mongo <<EOF
use $MONGO_INITDB_DATABASE
db.createUser({
user: "$MONGODB_USERNAME",
pwd: "$MONGODB_PASSWORD",
roles: [{
role: "dbOwner",
db: "$MONGO_INITDB_DATABASE"
}]
})
use test
db.createUser({
user: "$MONGODB_USERNAME",
pwd: "$MONGODB_PASSWORD",
roles: [{
role: "dbOwner",
db: "test"
}]
})
EOF
|
use super::loc_hint::*;
use super::util::*;
use crate::config::*;
use std::fmt::Write;
pub struct IfNewLine<LocHint>(pub bool, pub LocHint);
impl<'a, 'b, LocHint> ConfiguredWrite for IfNewLine<LocHint>
where
LocHint: ConfiguredWrite + LocHintConstructible<'a, 'b>,
{
fn configured_write(&self, f: &mut String, cfg: &Config, buf: &str, state: &mut State) -> std::fmt::Result {
if !self.0 {
return self.1.configured_write(f, cfg, buf, state);
}
// erase hint
let loc_hint = LocHint::new(self.1.get_loc(), "");
let mut comment_block = String::new();
match loc_hint.configured_write(&mut comment_block, cfg, buf, state) {
Ok(..) => {
let trimmed = trim_end_spaces_and_tabs(&comment_block);
if !trimmed.is_empty() {
write!(f, "{}", &trimmed)?;
if trimmed.chars().last() != Some('\n') {
write!(f, "\n")?;
}
} else {
// if there was trailing spaces in the line, we need remove them.
// But if at this point we are in the CommentLocHint, we have a pseudo-string, and we cannot
// remove the first letter.
if f.len() > 1 && (f.chars().last() == Some(' ') || f.chars().last() == Some('\t')) {
f.pop();
}
// if the last lexem is one-line comment or shebang
if f.chars().last() != Some('\n') {
write!(f, "\n")?;
}
}
write_indent(f, cfg, state)?;
}
err @ Err(..) => return err,
}
Ok(())
}
}
pub struct IncIndent(pub Option<&'static str>);
impl ConfiguredWrite for IncIndent {
fn configured_write(&self, _: &mut String, _: &Config, _: &str, state: &mut State) -> std::fmt::Result {
if self.0.is_none() || state.stack_indent.last() != Some(&self.0) {
state.indent_level += 1;
}
state.stack_indent.push(self.0.clone());
Ok(())
}
}
pub struct DecIndent();
impl ConfiguredWrite for DecIndent {
fn configured_write(&self, _: &mut String, _: &Config, _: &str, state: &mut State) -> std::fmt::Result {
let last = state.stack_indent.pop();
if last.is_some() && last.unwrap().is_none() || state.stack_indent.last() != last.as_ref() {
state.indent_level -= 1;
}
Ok(())
}
}
pub struct IncFuncLevel();
impl ConfiguredWrite for IncFuncLevel {
fn configured_write(&self, _: &mut String, _: &Config, _: &str, state: &mut State) -> std::fmt::Result {
state.function_nested_level += 1;
Ok(())
}
}
pub struct DecFuncLevel();
impl ConfiguredWrite for DecFuncLevel {
fn configured_write(&self, _: &mut String, _: &Config, _: &str, state: &mut State) -> std::fmt::Result {
state.function_nested_level -= 1;
Ok(())
}
}
pub struct If<'a>(pub bool, pub &'a dyn ConfiguredWrite);
impl ConfiguredWrite for If<'_> {
fn configured_write(&self, f: &mut String, cfg: &Config, buf: &str, state: &mut State) -> std::fmt::Result {
match self.0 {
true => self.1.configured_write(f, cfg, buf, state),
_ => Ok(()),
}
}
}
#[test]
fn test_decors() -> std::fmt::Result {
use crate::{cfg_write, cfg_write_helper};
let cfg = Config::default();
let mut state = State::default();
let mut buf = String::new();
// 1
cfg_write!(&mut buf, &cfg, "", &mut state, IncIndent(Some("1")))?;
assert_eq!(state.indent_level, 1);
assert_eq!(state.stack_indent, vec![Some("1")]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent())?;
assert_eq!(state.indent_level, 0);
assert_eq!(state.stack_indent, vec![]);
// 2
cfg_write!(&mut buf, &cfg, "", &mut state, IncIndent(Some("1")), IncIndent(Some("2")))?;
assert_eq!(state.indent_level, 2);
assert_eq!(state.stack_indent, vec![Some("1"), Some("2")]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent(), DecIndent())?;
assert_eq!(state.indent_level, 0);
assert_eq!(state.stack_indent, vec![]);
// 3
cfg_write!(&mut buf, &cfg, "", &mut state, IncIndent(Some("1")), IncIndent(Some("1")))?;
assert_eq!(state.indent_level, 1);
assert_eq!(state.stack_indent, vec![Some("1"), Some("1")]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent(), DecIndent())?;
assert_eq!(state.indent_level, 0);
assert_eq!(state.stack_indent, vec![]);
// 4
cfg_write!(&mut buf, &cfg, "", &mut state, IncIndent(Some("1")), IncIndent(None), IncIndent(Some("1")))?;
assert_eq!(state.indent_level, 3);
assert_eq!(state.stack_indent, vec![Some("1"), None, Some("1")]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent(), DecIndent(), DecIndent())?;
assert_eq!(state.indent_level, 0);
assert_eq!(state.stack_indent, vec![]);
// 5
cfg_write!(
&mut buf,
&cfg,
"",
&mut state,
IncIndent(None),
IncIndent(None),
IncIndent(Some("1")),
IncIndent(Some("1"))
)?;
assert_eq!(state.indent_level, 3);
assert_eq!(state.stack_indent, vec![None, None, Some("1"), Some("1")]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent())?;
assert_eq!(state.indent_level, 3);
assert_eq!(state.stack_indent, vec![None, None, Some("1")]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent())?;
assert_eq!(state.indent_level, 2);
assert_eq!(state.stack_indent, vec![None, None]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent())?;
assert_eq!(state.indent_level, 1);
assert_eq!(state.stack_indent, vec![None]);
cfg_write!(&mut buf, &cfg, "", &mut state, DecIndent())?;
assert_eq!(state.indent_level, 0);
assert_eq!(state.stack_indent, vec![]);
Ok(())
}
|
import {GridConfig} from "~/interfaces/GridConfig";
import {GridItem} from "~/interfaces/GridItem";
export class GridService {
static maxItemsX = 4;
static itemWidth = 150;
static itemHeight = 180;
static getItems(config: GridConfig, count: number): GridItem[] {
const width = config.endX - config.startX;
const height = config.endY = config.startY;
const items: GridItem[] = [];
const grid = this.optimiseGrid(this.gridData(count));
for (let y = 0; y < grid.length; y++) {
const column = grid[y];
for (let x = 0; x < column.length; x++) {
const adjustmentX = width / (column.length * 2);
const adjustmentY = height / (grid.length * 2);
const xPoint = config.startX + adjustmentX + (x * this.itemWidth);
const yPoint = config.startY + adjustmentY + (y * this.itemHeight);
items.push({x: xPoint, y: yPoint, id: `${x}${y}`});
}
}
return items;
}
static gridData(count: number): number[][] {
let tempRow: number[] = [];
const grid: number[][] = [];
for (let i = 0; i < count; i++) {
if (i != 0 && i % this.maxItemsX == 0) {
grid.push(tempRow)
tempRow = [];
}
tempRow.push(i);
}
if (tempRow.length > 0)
grid.push(tempRow);
return grid;
}
static optimiseGrid(grid: number[][]): number[][] {
if (grid.length < 2) return grid;
const last = grid.length - 1;
const isOptimum = (): boolean => {
const lengths = grid.map(g => g.length).sort();
return lengths[last] - lengths[0] <= 1;
}
while (!isOptimum()) {
grid = grid.sort((a, b) => a.length - b.length);
const item = grid[last].pop();
if (item != null) {
grid[0].push(item);
}
}
return grid;
}
static shuffle(gridItems: GridItem[]): GridItem[] {
if (!gridItems || gridItems.length < 1) return [];
for (let i = gridItems.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
const temp = gridItems[i];
gridItems[i] = gridItems[j];
gridItems[j] = temp;
}
return gridItems;
}
}
|
# import_anywhere
This package allows relative imports no matter where the script is run from.
This is to avoid situations where packages are used and relative imports only work when the script
is run from its actual location. This package requires the user to list all "parent directories" which
the package is to look for in the "IMPORT_ANYWHERE_DIRS" environment variable.
An example of this problem
If a script is located within a package with the following tree for example:
- MY_PARENT_FOLDER
----- PACKAGE_A
-------- C
------------- USEFUL_SCRIPT.PY
--------- MY_SCRIPT.PY
And MY_SCRIPT contains the line "import C.USEFUL_SCRIPT.PY", this won't work if I'm running MY_SCRIPT.PY
from a different directory.
import_anywhere solves this problem if C (or PACKAGE_A) are located in the IMPORT_ANYWHERE_DIRS environment variable.
|
package com.pedrogomez.taskfollower.domian.mapper
import com.pedrogomez.taskfollower.domian.db.SessionTimeDBM
import com.pedrogomez.taskfollower.domian.db.TaskDBM
import com.pedrogomez.taskfollower.domian.view.SessionTimeVM
import com.pedrogomez.taskfollower.domian.view.TaskVM
class SessionTimeMapper : MapperContract<SessionTimeVM, SessionTimeDBM> {
override fun fromVMtoDB(vm: SessionTimeVM): SessionTimeDBM {
TODO("Not yet implemented")
}
override fun fromDBtoVM(db: SessionTimeDBM): SessionTimeVM {
TODO("Not yet implemented")
}
}
|
// Given a string, return a new string that has transformed based on the input:
// Change case of every character, ie. lower case to upper case, upper case to lower case.
// Reverse the order of words from the input.
// Note: You will have to handle multiple spaces, and leading/trailing spaces.
// For example:
// "Example Input" ==> "iNPUT eXAMPLE"
// You may assume the input only contain English alphabet and spaces.
function stringTransformer(str) {
const reversedAndSplit = str.split(' ').reverse();
const mapped = reversedAndSplit.map(entry => {
const split = entry.split('');
for (let i = 0; i < split.length; i += 1) {
if (split[i] === split[i].toUpperCase()) {
split[i] = split[i].toLowerCase();
} else {
split[i] = split[i].toUpperCase();
}
}
return split.join('');
});
return mapped.join(' ');
}
|
using System.Collections.Generic;
namespace RomanNumerals
{
public class RomanToArabicNumber
{
private const string temporaryRoman1Thousands = "O";
private const string temporaryRoman5Thousands = "P";
private const string temporaryRoman10Thousands = "Q";
private string RomanNumeral;
private Dictionary<string, int> RomanArabicPairs;
public RomanToArabicNumber(string romanNumeral)
{
RomanNumeral = romanNumeral;
RomanArabicPairs = new Dictionary<string, int>
{
{"I",1},
{"V",5},
{"X",10},
{"L",50},
{"C",100},
{"D",500},
{"M",1000},
{temporaryRoman1Thousands,1000},
{temporaryRoman5Thousands,5000},
{temporaryRoman10Thousands,10000}
};
}
public int Convert()
{
var result = 0;
AddTemporaryNumerals();
foreach (char letter in RomanNumeral)
{
if (RomanArabicPairs.TryGetValue(letter.ToString(), out int value))
result += value;
}
RemoveTemporaryNumerals();
if (RomanNumeral.Contains("IV") || RomanNumeral.Contains("IX"))
result -= 2;
if (RomanNumeral.Contains("XL") || RomanNumeral.Contains("XC"))
result -= 20;
if (RomanNumeral.Contains("CD") || RomanNumeral.Contains("CM"))
result -= 200;
if (RomanNumeral.Contains("ĪV̄") || RomanNumeral.Contains("ĪX̄"))
result -= 2000;
return result;
}
private void RemoveTemporaryNumerals()
{
RomanNumeral = RomanNumeral.Replace(temporaryRoman1Thousands, "Ī");
RomanNumeral = RomanNumeral.Replace(temporaryRoman5Thousands, "V̄");
RomanNumeral = RomanNumeral.Replace(temporaryRoman10Thousands, "X̄");
}
private void AddTemporaryNumerals()
{
RomanNumeral = RomanNumeral.Replace("Ī", temporaryRoman1Thousands);
RomanNumeral = RomanNumeral.Replace("V̄", temporaryRoman5Thousands);
RomanNumeral = RomanNumeral.Replace("X̄", temporaryRoman10Thousands);
}
}
}
|
#include "Castor3D/Model/Skeleton/Animation/SkeletonAnimationKeyFrame.hpp"
#include "Castor3D/Model/Skeleton/Animation/SkeletonAnimation.hpp"
#include "Castor3D/Model/Skeleton/Animation/SkeletonAnimationBone.hpp"
#include <CastorUtils/Math/SquareMatrix.hpp>
#include <CastorUtils/Math/Quaternion.hpp>
namespace castor3d
{
namespace
{
template< typename T, typename U >
castor::SquareMatrix< T, 4 > & doRotate( castor::SquareMatrix< T, 4 > & matrix
, castor::QuaternionT< U > const & orientation )
{
castor::SquareMatrix< T, 4 > rotate;
auto const qxx( orientation.quat.x * orientation.quat.x );
auto const qyy( orientation.quat.y * orientation.quat.y );
auto const qzz( orientation.quat.z * orientation.quat.z );
auto const qxz( orientation.quat.x * orientation.quat.z );
auto const qxy( orientation.quat.x * orientation.quat.y );
auto const qyz( orientation.quat.y * orientation.quat.z );
auto const qwx( orientation.quat.w * orientation.quat.x );
auto const qwy( orientation.quat.w * orientation.quat.y );
auto const qwz( orientation.quat.w * orientation.quat.z );
rotate[0][0] = T( 1 - 2 * ( qyy + qzz ) );
rotate[0][1] = T( 2 * ( qxy - qwz ) );
rotate[0][2] = T( 2 * ( qxz + qwy ) );
rotate[0][3] = T( 0 );
rotate[1][0] = T( 2 * ( qxy + qwz ) );
rotate[1][1] = T( 1 - 2 * ( qxx + qzz ) );
rotate[1][2] = T( 2 * ( qyz - qwx ) );
rotate[1][3] = T( 0 );
rotate[2][0] = T( 2 * ( qxz - qwy ) );
rotate[2][1] = T( 2 * ( qyz + qwx ) );
rotate[2][2] = T( 1 - 2 * ( qxx + qyy ) );
rotate[3][3] = T( 0 );
rotate[3][0] = T( 0 );
rotate[3][1] = T( 0 );
rotate[3][2] = T( 0 );
rotate[3][3] = T( 1 );
return matrix *= rotate;
}
template< typename T, typename U >
void doConvert( castor::SquareMatrix< T, 4 > const & in
, castor::SquareMatrix< U, 4 > & out )
{
out = in;
}
}
//*************************************************************************************************
SkeletonAnimationKeyFrame::SkeletonAnimationKeyFrame( SkeletonAnimation & skeletonAnimation
, castor::Milliseconds const & timeIndex )
: AnimationKeyFrame{ timeIndex }
, OwnedBy< SkeletonAnimation >{ skeletonAnimation }
{
}
void SkeletonAnimationKeyFrame::addAnimationObject( SkeletonAnimationObject & object
, castor::Point3f const & translate
, castor::Quaternion const & rotate
, castor::Point3f const & scale )
{
castor::Matrix4x4f transform{ 1.0f };
castor::matrix::translate( transform, translate );
doRotate( transform, rotate );
castor::matrix::scale( transform, scale );
addAnimationObject( object, transform );
}
void SkeletonAnimationKeyFrame::addAnimationObject( SkeletonAnimationObject & object
, castor::Matrix4x4f const & transform )
{
auto findTransform = [this]( SkeletonAnimationObject & obj )
{
return std::find_if( m_transforms.begin()
, m_transforms.end()
, [&obj]( auto const & lookup )
{
return lookup.first == &obj;
} );
};
auto it = findTransform( object );
if ( it == m_transforms.end() )
{
auto parent = object.getParent();
if ( parent && findTransform( *parent ) == m_transforms.end() )
{
addAnimationObject( *parent, parent->getNodeTransform() );
}
m_transforms.emplace_back( &object, transform );
}
}
bool SkeletonAnimationKeyFrame::hasObject( SkeletonAnimationObject const & object )const
{
return m_transforms.end() != std::find_if( m_transforms.begin()
, m_transforms.end()
, [&object]( auto const & lookup )
{
return lookup.first == &object;
} );
}
TransformArray::const_iterator SkeletonAnimationKeyFrame::find( SkeletonAnimationObject const & object )const
{
return std::find_if( m_cumulative.begin()
, m_cumulative.end()
, [&object]( ObjectTransform const & lookup )
{
return lookup.first == &object;
} );
}
TransformArray::const_iterator SkeletonAnimationKeyFrame::find( Bone const & bone )const
{
return std::find_if( m_cumulative.begin()
, m_cumulative.end()
, [&bone]( ObjectTransform const & lookup )
{
return lookup.first->getType() == SkeletonAnimationObjectType::eBone
&& static_cast< SkeletonAnimationBone const & >( *lookup.first ).getBone().get() == &bone;
} );
}
void SkeletonAnimationKeyFrame::initialise()
{
m_cumulative.clear();
for ( auto & transform : m_transforms )
{
auto parent = transform.first->getParent();
if ( parent )
{
auto it = find( *parent );
CU_Ensure( it != end() );
m_cumulative.emplace_back( transform.first, it->second * transform.second );
}
else
{
m_cumulative.push_back( transform );
}
}
}
//*************************************************************************************************
}
|
print "Enter a celsius value: "
celsius = gets.to_i
fahrenheit = (celsius * 9 / 5) + 32
puts "Saving result to output file 'temp.out'"
fh = File.new("temp_out.txt", "w")
fh.puts fahrenheit
fh2 = File.new("temp.txt", "r")
puts fh2.read
fh.close
fh2.close
|
/*
* Based on [https://github.com/daemontus/kotlin-ace-wrapper]
*/
package ace.ext
@JsModule("net.akehurst.language.editor-kotlin-ace-loader!?id=ace/autocomplete&name=Autocomplete")
@JsNonModule
external object Autocomplete {
val startCommand: dynamic
}
|
import Exception from './util/Exception';
import Constants from './util/Constants';
import AWS from 'aws-sdk-promise';
/**
* Parent request class
*/
export default class Request {
/**
* Create a new request
* @param {string} tableName The table name concerned
* @param {string} region The region where the aws dynamodb is, default is 'eu-west-1'
*/
constructor(tableName, region) {
/** @type {string} */
this.tableName=tableName;
/*Region configuration*/
const configRegion = {
region: Constants.DEFAULT_REGION
};
if(region) configRegion.region=region;
/** @type {Object} */
this.dynamodb = new AWS.DynamoDB(configRegion);
/** @type {Object} */
this.docClient = new AWS.DynamoDB.DocumentClient(configRegion);
}
}
|
PROVIDER = "S3"
KEY = ""
SECRET = ""
CONTAINER = "yoredis.com"
# FOR LOCAL
PROVIDER = "LOCAL"
CONTAINER = "container_1"
CONTAINER2 = "container_2"
|
import { stringifyUrl } from "query-string"
import { uuid } from "uuidv4"
let anon_id = localStorage.getItem("analytics.anon_id")
if (!anon_id) {
anon_id = uuid()
localStorage.setItem("analytics.anon_id", anon_id)
}
export interface AnalyticsProps {
action: string
label?: string
page_id: string
nabe_name: string
section_id?: string
anchor?: string
uid?: string | null
eid?: string | null
}
export const analyticsURL = (props: AnalyticsProps, redirect?: string) => {
const {
action,
label,
page_id,
nabe_name,
section_id,
anchor,
uid,
eid,
} = props
const v = "1"
const t = "event"
const ec = "page"
const ea = action
const tid = process.env.GA_TID as string
const el = label
const cd1 = nabe_name
const cd2 = eid
const cd3 = page_id
const cd4 = section_id
const aid = anon_id
const url = `https://${process.env.RAILS_HOST}/analytics`
return stringifyUrl({
url,
query: {
redirect,
aid,
uid,
v,
t,
ec,
ea,
el,
tid,
cd1,
cd2,
cd3,
cd4,
cd6: redirect ?? anchor,
cd7: label,
},
})
}
export const track = async (props: AnalyticsProps): Promise<Response> => {
const url = analyticsURL({
...props,
})
return await fetch(url)
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#ifndef REQ_QUANTILE_CALCULATOR_HPP_
#define REQ_QUANTILE_CALCULATOR_HPP_
#include <functional>
namespace datasketches {
template<
typename T,
typename Comparator,
typename Allocator
>
class req_quantile_calculator {
public:
req_quantile_calculator(uint64_t n, const Allocator& allocator);
void add(const T* begin, const T* end, uint8_t lg_weight);
template<bool inclusive>
void convert_to_cummulative();
const T* get_quantile(double rank) const;
private:
using Entry = std::pair<const T*, uint64_t>;
using AllocEntry = typename std::allocator_traits<Allocator>::template rebind_alloc<Entry>;
using Container = std::vector<Entry, AllocEntry>;
template<typename C>
struct compare_pairs_by_first_ptr {
bool operator()(const Entry& a, const Entry& b) {
return C()(*a.first, *b.first);
}
};
struct compare_pairs_by_second {
bool operator()(const Entry& a, const Entry& b) {
return a.second < b.second;
}
};
uint64_t n_;
Container entries_;
};
} /* namespace datasketches */
#include "req_quantile_calculator_impl.hpp"
#endif
|
#!/bin/bash
# Copyright (c) 2021 Linaro Limited
#
# SPDX-License-Identifier: Apache-2.0
set -eE
buildkite-agent artifact download twister-*.xml .
xmls=""
for f in twister-*xml; do [ -s ${f} ] && xmls+="${f} "; done
if [ "${xmls}" ]; then
junitparser merge ${xmls} junit.xml
buildkite-agent artifact upload junit.xml
junit2html junit.xml
buildkite-agent artifact upload junit.xml.html
buildkite-agent annotate --style "info" "Read the <a href=\"artifact://junit.xml.html\">JUnit test report</a>"
fi
|
using System;
using System.Linq;
using System.Threading.Tasks;
using GrpcServices;
using Lightest.Data.Models;
using Moq;
using Xunit;
namespace Lightest.Tests.TestingService.UploadProcessor
{
public class CacheChecker : BaseTest
{
private readonly Checker _checker;
public CacheChecker()
{
_checker = new Checker
{
Id = Guid.NewGuid(),
Code = nameof(_checker)
};
_task.Checker = _checker;
}
protected override Task SetUpData()
{
_context.Checkers.Add(_checker);
return base.SetUpData();
}
[Fact]
public async Task CheckerCached()
{
_context.CachedCheckers.Add(new ServerChecker
{
CheckerId = _checker.Id,
ServerIp = _serverInfo.Ip
});
await SetUpData();
var result = await _uploadProcessor.CacheChecker();
_transferService.Verify(ts => ts.SendChecker(It.IsAny<CheckerRequest>()),
Times.Never);
Assert.True(result, $"{nameof(CacheChecker)} should return true if checker is cached.");
}
[Fact]
public async Task CompilationFailed()
{
const string errorMessage = "Compilation Error";
await SetUpData();
_transferService.Setup(ts => ts.SendChecker(It.Is<CheckerRequest>(
r => r.Id == _checker.Id.ToString() && r.Code == _checker.Code)))
.ReturnsAsync(new CheckerResponse
{
Compiled = false,
Message = errorMessage
});
var result = await _uploadProcessor.CacheChecker();
Assert.False(result, $"{nameof(CacheChecker)} should return false if checker compilation failed.");
Assert.Empty(_context.CachedCheckers);
var checker = _context.Checkers.First(c => c.Id == _checker.Id);
Assert.False(checker.Compiled);
Assert.Equal(errorMessage, checker.Message);
}
[Fact]
public async Task CompilationSuccessful()
{
const string compilationMessage = "Compilation Successful";
await SetUpData();
_transferService.Setup(ts => ts.SendChecker(It.Is<CheckerRequest>(
r => r.Id == _checker.Id.ToString() && r.Code == _checker.Code)))
.ReturnsAsync(new CheckerResponse
{
Compiled = true,
Message = compilationMessage
});
var result = await _uploadProcessor.CacheChecker();
Assert.True(result, $"{nameof(CacheChecker)} should return true if checker compilation succeded.");
var cachedChecker = _context.CachedCheckers.First();
Assert.Equal(_serverInfo.Ip, cachedChecker.ServerIp);
Assert.Equal(_checker.Id, cachedChecker.CheckerId);
var checker = _context.Checkers.First(c => c.Id == _checker.Id);
Assert.True(checker.Compiled);
Assert.Equal(compilationMessage, checker.Message);
}
}
}
|
"""nr_configs.py
This file contains the configuration class to generate
and store the necessary pieces of information regarding
the set-up and configuration for a numerical relativity
project using the Dendro framework.
"""
from dendrosym.general_configs import ImproperInitalization
import sympy as sym
import dendrosym
class AdvectiveDerivativeNotSet(Exception):
pass
class NRConfig(dendrosym.DendroConfiguration):
"""A class to store numerical relativity configurations
In particular, this class can store infomration about
the numerical relativity project such as RHS equations,
store variables, and then generate the C++ code
from the symbolic equations.
"""
def __init__(self, project_name: str):
self.project_name = project_name
self.project_upper = project_name.upper()
# make all vars a dictionary in case there are
# other types to store, but by default we will have
# "constraint", "evolution", and "parameter" which are initialized
# by default
self.all_vars = {
"constraint": [],
"evolution": [],
"parameter": {
"constraint": [],
"evolution": []
}
}
# also create the same type of dictionary but
# just for string representations
self.all_var_names = {
"constraint": [],
"evolution": [],
"parameter": {
"constraint": [],
"evolution": []
}
}
self.enum_prefixes = {"constraint": "C", "evolution": "U"}
# then also store the functions that return the rhs
# portions
self.all_rhs_functions = {"constraint": None, "evolution": None}
# initialize the advective derivative variable as well
self.advective_der_var = None
self.idx_str = ""
self.bcs_info = {"constraint": {}, "evolution": {}}
self.metric_var = None
self.evolution_constraint_info = {"trace_zero": [], "pos_floor": []}
self.stored_rhs_function = {}
def add_evolution_variables(self, in_vars: list):
return super().add_variable(in_vars, "evolution")
def add_constraint_variables(self, in_vars: list):
return super().add_variable(in_vars, "constraint")
def add_parameter_variables(self,
in_vars: list,
eqn_type: str = "evolution"):
return super().add_parameter_variables(in_vars, eqn_type)
def set_advective_derivative_var(self, in_var):
# this should be a 3x1 variable
if isinstance(in_var, sym.Matrix):
if in_var.shape[0] != 3:
raise ImproperInitalization(
"Invalid type of variable for use as advective derivative")
elif isinstance(in_var, tuple):
if len(in_var) != 3:
raise ImproperInitalization(
"Invalid type of variable for use as advective derivative")
self.advective_der_var = str(in_var[0]).split("[")[0][:-1]
def set_metric(self, in_var):
assert in_var in self.all_vars.get(
"evolution",
[]), "Incoming metric variable is not in evolution variables"
self.metric_var = in_var
# also set the metric in the NR code for use there
dendrosym.nr.set_metric(in_var)
def add_evolution_constraint(self, var_adjust, constraint_info):
"""Adds an evolution constraint of some kind
Takes in the input variable that needs to have some kind of constraint
and then information about the constraint. The constraint info
currently supports 'trace_zero', 'pos_floor'
Trace of zero requires the metric to have been set, and the
generation code will ensure that it has been properly extracted
and updated due to the metric's determinant being 1.
Positive threshold only works for single values and requires
a parameter be set *elsewhere* (in parameters.cpp for example)
with the name "{VAR_NAME}_FLOOR" where 'VAR_NAME' is the same
name that was assigned to the variable but in all capitals.
"""
if constraint_info == "trace_zero":
assert var_adjust in self.all_vars.get(
"evolution", []), "Variable not in set evolution variables"
if type(var_adjust) is not sym.Matrix:
raise ValueError(
"For zero trace, the incoming variable needs to be a matrix"
)
self.evolution_constraint_info["trace_zero"].append(var_adjust)
elif constraint_info == "pos_floor":
assert var_adjust in self.all_vars.get(
"evolution", []), "Variable not in set evolution variables"
if type(var_adjust) is not sym.Symbol:
raise ValueError(
"For positive floor, the variable needs to not have dimensions"
)
self.evolution_constraint_info["pos_floor"].append(var_adjust)
else:
raise NotImplementedError(
"This type of constraint has not been implemented")
def generate_evolution_constraints(self):
if self.metric_var is None:
raise ImproperInitalization(
"Metric was not set, cannot generate evolution constraints")
return_str = ""
metric_var_names = self.clean_var_names([self.metric_var])
metric_name = metric_var_names[0][:-2]
# then we also need to get the metric enum information
enum_prefix = self.enum_prefixes.get("evolution", "")
metric_enums = [
f"VAR::{enum_prefix}_{mvar.upper()}" for mvar in metric_var_names
]
# first things first, the metric determinant needs to be zero,
# so we generate that code first
return_str += "////\n//// CODE TO REQUIRE METRIC DETERMINANT TO BE ONE\n"
return_str += dendrosym.codegen.generate_update_sym_mat_extract(
metric_name, metric_enums)
# then we need to generate the determinant code
return_str += dendrosym.codegen.generate_force_sym_matrix_det_to_one(
metric_name, metric_enums)
# now we can start adding our other constraints
for var_use in self.evolution_constraint_info.get("trace_zero", []):
return_str += "////\n////APPLYING TRACE ZERO TO "
# for trace of zero, we need to get all of the variables
var_names = self.clean_var_names([var_use])
# then get the name of the variable
single_var_name = var_names[0][:-2]
return_str += single_var_name + "\n"
# then generate the enums
var_enums = [
f"VAR::{enum_prefix}_{ivar.upper()}" for ivar in var_names
]
# then add the code to extract the sym matrix
return_str += dendrosym.codegen.generate_update_sym_mat_extract(
single_var_name, var_enums)
# then the code to force the traceless symmat
return_str += dendrosym.codegen.generate_force_symmat_traceless(
single_var_name, metric_name)
# then the code to actually update the matrix
return_str += dendrosym.codegen.generate_update_sym_mat_code(
single_var_name, var_enums, include_up=False)
return_str += "//// END APPLICATION OF TRACE ZERO\n////\n\n"
for var_use in self.evolution_constraint_info.get("pos_floor"):
return_str += "////\n////APPLYING POSITIVE FLOOR TO "
single_var_name = self.clean_var_names([var_use])[0]
return_str += single_var_name + "\n"
var_enum = f"VAR::{enum_prefix}_{single_var_name.upper()}"
return_str += dendrosym.codegen.generate_variable_always_positive(
var_enum, floor_var=f"{single_var_name.upper()}_FLOOR")
return_str += "//// END APPLICATION OF POSITIVE FLOOR\n////\n\n"
# now generate the metric update code
return_str += "//// NOW UPDATING THE METRIC VALUES\n"
return_str += dendrosym.codegen.generate_update_sym_mat_code(
metric_name, metric_enums)
return return_str
def __repr__(self):
return f"<NRConfigs for '{self.project_name}'>"
|
#!/usr/bin/env perl -w
# $Id$
# vim:ft=perl:
# Tests various scenarios which would leave behind locks, or would delete too many locks
use strict;
use Test::More tests => 13;
use Data::Dumper qw(Dumper);
use Time::HiRes qw(sleep);
use Sys::Hostname;
use YAML::Syck qw(LoadFile);
#use Log::Log4perl qw(:easy);
#Log::Log4perl->easy_init($DEBUG);
use FindBin qw($Bin);
use lib "$Bin/../lib";
use lib "$Bin/lib";
use Pogo::Server;
use Pogo::Server::Job;
use Pogo::Dispatcher;
use PogoTest;
$Pogo::Server::Job::UPDATE_INTERVAL = 0.1;
ok( PogoTest::start_mojo(), 'start_mojo' );
ok( PogoTest::zookeeper_clear("/pogo") == 0, "zookeeper_clear" );
my $constrfile = "$Bin/conf/constraints.yaml";
my $conffile = "$Bin/conf/server.conf";
my $constr = LoadFile($constrfile) || die "cannot load $constrfile";
my $dispconf = LoadFile($conffile) || die "cannot load $conffile";
mkdir("output_tmp");
$dispconf->{data_dir} = "$Bin/output_tmp";
# FIXME: we need to override Culpa::Client's host so we have repeatability on
# the hostinfo query we're gonna do
ok( my $ns = Pogo::Server->namespace("mail")->init, "Init namespace" );
ok( $ns->set_conf($constr), "Load configuration" );
ok( my $serv = Pogo::Server->instance($dispconf), "Init server" );
ok( ref $serv eq 'Pogo::Server', "Init server ref" );
ok( my $disp = Pogo::Dispatcher->instance($dispconf), "Init dispatcher" );
ok( ref $disp eq 'Pogo::Dispatcher', "Init dispatcher ref" );
ok( my $zkh = Pogo::Server::zkh(), "zookeeper handle" );
my $condvar = AnyEvent->condvar;
my ( $fn, $cfn, $state ) = @_;
# start a new job
my $job = Pogo::Server::Job->new(
{ namespace => "mail",
user => "foo",
password => PogoTest::cryptpw( $dispconf, "foo" ),
pkg_passwords => "{}",
run_as => "foo",
invoked_as => "magic",
range => ['@mail.farm.set.ac4-qa-6195'],
timeout => 3,
job_timeout => 3,
command => 'echo hi',
foo => 'bar',
}
);
my $errc = sub { print("error: $@\n"); $condvar->send(0) };
my $cont = sub { $condvar->send(1) };
$job->start( $errc, $cont );
$zkh->delete( "/pogo/taskq/startjob;" . $job->id );
ok( $condvar->recv(), "startjob " . $job->id );
$condvar = AnyEvent->condvar;
my $poll_timer;
my $poll = sub {
$job = Pogo::Server->job( $job->id );
if ( !$job->is_running )
{
$condvar->send( $job->state );
undef $poll_timer;
}
};
$poll_timer = AnyEvent->timer(
after => 0.1,
interval => 0.1,
cb => $poll,
);
my $t0 = time;
is( $condvar->recv(), "halted", "timed out job state" );
my $t1 = time;
ok( $t1 - $t0 <= 4, "timed out within 3 seconds" );
ok( PogoTest::stop_mojo(), 'stop_mojo' );
|
<?php
namespace EnderLab\MiddleEarth\Router;
use Fig\Http\Message\RequestMethodInterface;
use Psr\Http\Message\ServerRequestInterface;
use Zend\Expressive\Router\FastRouteRouter;
use Zend\Expressive\Router\Route as ZendRoute;
class Router implements RouterInterface
{
const HTTP_GET = RequestMethodInterface::METHOD_GET;
const HTTP_POST = RequestMethodInterface::METHOD_POST;
const HTTP_PUT = RequestMethodInterface::METHOD_PUT;
const HTTP_DELETE = RequestMethodInterface::METHOD_DELETE;
const HTTP_HEAD = RequestMethodInterface::METHOD_HEAD;
const HTTP_OPTION = RequestMethodInterface::METHOD_OPTIONS;
const HTTP_PATCH = RequestMethodInterface::METHOD_PATCH;
const HTTP_TRACE = RequestMethodInterface::METHOD_TRACE;
const HTTP_ANY = ZendRoute::HTTP_METHOD_ANY;
private $router;
private $routes = [];
private $count = 0;
/**
* Router constructor.
*
* @param array $routes
* @param array $config
*
* @throws RouterException
*/
public function __construct(array $routes = [], array $config = [])
{
$this->router = new FastRouteRouter(null, null, $config);
$this->addRoutes($routes);
}
/**
* @return int
*/
public function count(): int
{
return $this->count;
}
/**
* @param array $routes
*
* @throws RouterException
*
* @return Router
*/
public function addRoutes(array $routes = []): self
{
foreach ($routes as $key => $routesDetails) {
if ($routesDetails instanceof Route) {
$this->addRoute($routesDetails);
} else {
$this->addRoute(
new Route(
$routesDetails[0],
$routesDetails[1],
(isset($routesDetails[2]) ? $routesDetails[2] : null),
(isset($routesDetails[3]) ? $routesDetails[3] : null)
)
);
}
} // endforeach
return $this;
}
/**
* @param Route $route
*
* @throws RouterException
*
* @return Router
*/
public function addRoute(Route $route): self
{
foreach ($route->getMethod() as $method) {
if (!in_array($method, $this->getAllowedMethods(), true) &&
ZendRoute::HTTP_METHOD_ANY !== $method
) {
throw new RouterException('Invalid method "' . $method . '"');
}
}
$this->routes[] = $route;
$this->router->addRoute(
new ZendRoute(
$route->getPath(),
$route->getMiddlewares(),
(0 === count($route->getMethod()) ? ZendRoute::HTTP_METHOD_ANY : $route->getMethod()),
$route->getName()
)
);
++$this->count;
return $this;
}
/**
* @param ServerRequestInterface $request
*
* @return Route|null
*/
public function match(ServerRequestInterface $request): ?Route
{
$result = $this->router->match($request);
if ($result->isSuccess()) {
return new Route(
$result->getMatchedRoute()->getPath(),
$result->getMatchedRoute()->getMiddleware(),
$result->getMatchedRoute()->getAllowedMethods(),
$result->getMatchedRouteName(),
$result->getMatchedParams()
);
}
return null;
}
/**
* @param string $name
* @param array $params
* @param array $options
*
* @return string
*/
public function generateUri(string $name, array $params = [], array $options = []): string
{
return $this->router->generateUri($name, $params, $options);
}
/**
* @return array
*/
public function getAllowedMethods(): array
{
return $this->router::HTTP_METHODS_STANDARD;
}
/**
* @return array
*/
public function getRoutes(): array
{
return $this->routes;
}
}
|
import React, { useEffect, useState } from 'react'
import { Alert } from 'react-bootstrap';
import { demoParamValues, ImageTemplate, loadRemoteTemplate, ParamValues, TemplateParam } from '@resoc/core';
import TemplatePresentation from './TemplatePresentation';
import StarterAlert from './StarterAlert';
import { waitForUpdates } from './Utils';
export type TemplateAppProps = {
manifestUrl: string;
templateDir: string;
manifestPath: string;
};
const TemplateApp = (props: TemplateAppProps) => {
const [template, setTemplate] = useState<ImageTemplate | null>(null);
const [parameters, setParameters] = useState<TemplateParam[] | null>(null);
const [values, setValues] = useState<ParamValues | null>(null);
const [error, setError] = useState<Error | null>(null);
const [updateListenerStarted, setUpdateListenerStarted] = useState<boolean>(false);
useEffect(() => {
(async () => {
if (!template) {
try {
const newTemplate = await loadRemoteTemplate(props.manifestUrl);
setTemplate(newTemplate);
if (
!parameters ||
JSON.stringify(newTemplate.parameters) !== JSON.stringify(parameters)
) {
setParameters(newTemplate.parameters);
setValues(demoParamValues(newTemplate.parameters));
}
}
catch(e) {
setError(e);
}
}
})();
}, [props.manifestUrl, template]);
useEffect(() => {
if (!updateListenerStarted) {
setUpdateListenerStarted(true);
waitForUpdates(() => {
// Force reload
setTemplate(null);
});
}
});
return (
<div>
{error && (
<Alert variant="danger">
<p>
<strong>{error.message}</strong>
</p>
</Alert>
)}
<StarterAlert
templateDir={props.templateDir}
manifestPath={props.manifestPath}
/>
{template && parameters && values && (
<TemplatePresentation
template={template}
parameters={parameters}
values={values}
onChange={(newValues) => {
setValues(newValues);
}}
/>
)}
</div>
);
};
export default TemplateApp;
|
#include "ApprovalTests/reporters/AutoApproveReporter.h"
#include "ApprovalTests/utilities/FileUtilsSystemSpecific.h"
#include <iostream>
namespace ApprovalTests
{
bool AutoApproveReporter::report(std::string received, std::string approved) const
{
std::cout << "file " << approved
<< " automatically approved - next run should succeed\n";
FileUtilsSystemSpecific::copyFile(received, approved);
return true;
}
}
|
#!perl
# Test scoping issues with embedded code in regexps.
BEGIN {
require q(test.pl);
}
plan 17;
# Functions for turning to-do-ness on and off (as there are so many
# to-do tests)
sub on { $::TODO = "(?{}) implementation is screwy" }
sub off { undef $::TODO }
on;
fresh_perl_is <<'CODE', '781745', {}, '(?{}) has its own lexical scope';
my $x = 7; my $a = 4; my $b = 5;
print "a" =~ /(?{ print $x; my $x = 8; print $x; my $y })a/;
print $x,$a,$b;
CODE
fresh_perl_is <<'CODE',
for my $x("a".."c") {
$y = 1;
print scalar
"abcabc" =~
/
(
a (?{ print $y; local $y = $y+1; print $x; my $x = 8; print $x })
b (?{ print $y; local $y = $y+1; print $x; my $x = 9; print $x })
c (?{ print $y; local $y = $y+1; print $x; my $x = 10; print $x })
){2}
/x;
print "$x ";
}
CODE
'1a82a93a104a85a96a101a 1b82b93b104b85b96b101b 1c82c93c104c85c96c101c ',
{},
'multiple (?{})s in loop with lexicals';
fresh_perl_is <<'CODE', '781745', {}, 'run-time re-eval has its own scope';
use re qw(eval);
my $x = 7; my $a = 4; my $b = 5;
my $rest = 'a';
print "a" =~ /(?{ print $x; my $x = 8; print $x; my $y })$rest/;
print $x,$a,$b;
CODE
fresh_perl_is <<'CODE', '178279371047857967101745', {},
use re "eval";
my $x = 7; $y = 1;
my $a = 4; my $b = 5;
print scalar
"abcabc"
=~ ${\'(?x)
(
a (?{ print $y; local $y = $y+1; print $x; my $x = 8; print $x })
b (?{ print $y; local $y = $y+1; print $x; my $x = 9; print $x })
c (?{ print $y; local $y = $y+1; print $x; my $x = 10; print $x })
){2}
'};
print $x,$a,$b
CODE
'multiple (?{})s in "foo" =~ $string';
fresh_perl_is <<'CODE', '178279371047857967101745', {},
use re "eval";
my $x = 7; $y = 1;
my $a = 4; my $b = 5;
print scalar
"abcabc" =~
/${\'
(
a (?{ print $y; local $y = $y+1; print $x; my $x = 8; print $x })
b (?{ print $y; local $y = $y+1; print $x; my $x = 9; print $x })
c (?{ print $y; local $y = $y+1; print $x; my $x = 10; print $x })
){2}
'}/x;
print $x,$a,$b
CODE
'multiple (?{})s in "foo" =~ /$string/x';
fresh_perl_is <<'CODE', '123123', {},
for my $x(1..3) {
push @regexps = qr/(?{ print $x })a/;
}
"a" =~ $_ for @regexps;
"ba" =~ /b$_/ for @regexps;
CODE
'qr/(?{})/ is a closure';
off;
{
local $::TODO = "re-eval #328" if is_perlcc_compiled;
"a" =~ do { package foo; qr/(?{ $::pack = __PACKAGE__ })a/ };
is $pack, 'foo', 'qr// inherits package';
"a" =~ do { use re "/x"; qr/(?{ $::re = qr-- })a/ };
is $re, '(?^x:)', 'qr// inherits pragmata';
}
on;
"ba" =~ /b${\do { package baz; qr|(?{ $::pack = __PACKAGE__ })a| }}/;
is $pack, 'baz', '/text$qr/ inherits package';
"ba" =~ m+b${\do { use re "/i"; qr|(?{ $::re = qr-- })a| }}+;
is $re, '(?^i:)', '/text$qr/ inherits pragmata';
off;
{
use re 'eval';
package bar;
"ba" =~ /${\'(?{ $::pack = __PACKAGE__ })a'}/;
}
is $pack, 'bar', '/$text/ containing (?{}) inherits package';
{
use re 'eval', "/m";
"ba" =~ /${\'(?{ $::re = qr -- })a'}/;
}
{
local $::TODO = "re-eval #328" if is_perlcc_compiled;
is $re, '(?^m:)', '/$text/ containing (?{}) inherits pragmata';
}
on;
fresh_perl_is <<'CODE', '45', { stderr => 1 }, '(?{die})';
eval { my $a=4; my $b=5; "a" =~ /(?{die})a/ }; print $a,$b"
CODE
SKIP: {
# The remaining TODO tests crash, which will display an error dialog
# on Windows that has to be manually dismissed. We don't want this
# to happen for release builds: 5.14.x, 5.16.x etc.
# On UNIX, they produce ugly 'Aborted' shell output mixed in with the
# test harness output, so skip on all platforms.
skip "Don't run crashing TODO test on release build", 3
if $::TODO && (int($]*1000) & 1) == 0;
fresh_perl_is <<'CODE', '45', { stderr => 1 }, '(?{last})';
{ my $a=4; my $b=5; "a" =~ /(?{last})a/ }; print $a,$b
CODE
fresh_perl_is <<'CODE', '45', { stderr => 1 }, '(?{next})';
{ my $a=4; my $b=5; "a" =~ /(?{last})a/ }; print $a,$b
CODE
fresh_perl_is <<'CODE', '45', { stderr => 1 }, '(?{return})';
print sub { my $a=4; my $b=5; "a" =~ /(?{return $a.$b})a/ }->();
CODE
}
fresh_perl_is <<'CODE', '45', { stderr => 1 }, '(?{goto})';
my $a=4; my $b=5; "a" =~ /(?{goto _})a/; die; _: print $a,$b
CODE
|
module ltm2ubv
use real_kind
implicit none
real(double) :: tgr(34), ggr(13), tab(34,13,5)
contains
subroutine load_colour_conversion_table(fu)
use real_kind
implicit none
integer, intent(in) :: fu
integer :: i,j, k
991 format (3(10f10.5,/), 4f10.5,/, 10f10.5,/, 3f10.5,/, 442(5f8.3,/))
read (fu,991) tgr, ggr, (((tab(i,j,k), k=1,5), j=1,13), i=1,34)
close (fu)
end subroutine
subroutine lt2ubv ( logl, logt, mass, mv, bminv, uminb )
! yields values of MV, B-V and U-B for given log L, log T, and mass
use real_kind
implicit none
real(double) :: logl, logt, mass, mv, bminv, uminb
integer :: i,ing1,ing2,int1,int2,k,j,k0,k1,k2
real(double) :: logm,logg,logg1,logg2,logt1,logt2
real(double) :: dg1,dg2,bc1,bc2,ub1,ub2,bv1,bv2,dt1,dt2,ubx,bvx,bcx,mbol
real(double) :: bc(4), ub(4), bv(4), vr(4), ri(4)
logm = log10(mass)
logg = logm + 4.0d0*logt - logl - 10.6071d0
! determine values of log g to interpolate between
ing1 = 1
ing2 = 13
1 continue ! FIXME - replace with do while()
if ( ing2 - ing1.gt.1 ) then ! FIXME - replace with do while()
i = (ing1 + ing2)/2
if ( ggr(i).gt.logg ) then
ing2 = i
else
ing1 = i
end if
goto 1 ! FIXME - replace with end do
end if ! FIXME - replace with end do
logg1 = ggr(ing1)
logg2 = ggr(ing2)
! determine values of log T to interpolate between
int1 = 1
int2 = 34
2 continue ! FIXME - replace with do while()
if ( int2 - int1.gt.1 ) then ! FIXME - replace with do while()
i = (int1 + int2)/2
if ( tgr(i).gt.logt ) then
int2 = i
else
int1 = i
end if
goto 2 ! FIXME - replace with end do
end if ! FIXME - replace with end do
logt1 = tgr(int1)
logt2 = tgr(int2)
do k = 1, 2
do j = 1, 2
k0 = (k - 1)*2 + j
k1 = int1 - 1 + k
k2 = ing1 - 1 + j
bc(k0) = tab(k1, k2, 1)
ub(k0) = tab(k1, k2, 2)
bv(k0) = tab(k1, k2, 3)
vr(k0) = tab(k1, k2, 4)
ri(k0) = tab(k1, k2, 5)
end do
end do
dg1 = (logg - logg1)/(logg2 - logg1)
dg2 = 1.0d0 - dg1
bc1 = bc(2)*dg1 + bc(1)*dg2
ub1 = ub(2)*dg1 + ub(1)*dg2
bv1 = bv(2)*dg1 + bv(1)*dg2
bc2 = bc(4)*dg1 + bc(3)*dg2
ub2 = ub(4)*dg1 + ub(3)*dg2
bv2 = bv(4)*dg1 + bv(3)*dg2
dt1 = (logt - logt1)/(logt2 - logt1)
dt2 = 1.0d0 - dt1
bcx = bc2*dt1 + bc1*dt2
ubx = ub2*dt1 + ub1*dt2
bvx = bv2*dt1 + bv1*dt2
mbol = 4.75d0 - 2.5d0*logl
mv = mbol - bcx
bminv = bvx
uminb = ubx
end subroutine lt2ubv
end module
|
import Prelude hiding ((^))
import qualified Prelude ((^))
import ProjectEuler.Divisors (isqrt)
(^) :: Num a => a -> Int -> a
(^) = (Prelude.^)
pos :: Int -> (Int, Int)
pos 1 = (0, 0)
pos n = case s of
0 -> (radius, radius - 1 - f)
1 -> (radius - 1 - f, -radius)
2 -> (-radius, -radius + 1 + f)
3 -> (-radius + 1 + f, radius)
_ -> error "incorrect spiral"
where
period = roundUpEven . isqrt $ n - 1
start = (period - 1) ^ 2 + 1
perim = n - start
radius = period `quot` 2
(s, f) = perim `quotRem` period
roundUpEven :: Int -> Int
roundUpEven n
| even n = n
| otherwise = n + 1
onDiag :: (Int, Int) -> Bool
onDiag (x, y) = abs x == abs y
answer :: Int -> Int
answer n = sum . filter (onDiag . pos) $ [1..n*n]
main :: IO ()
main = print $ answer 1001
|
import {
Feature,
Point,
Rectangle,
RouteSummary,
RouteNote,
} from '@app/route-lib';
import { Observable } from 'rxjs';
export interface RouteGuide {
getFeature(data: Point): Observable<Feature>;
listFeatures(data: Rectangle): Observable<Feature>;
recordRoute(upstream: Observable<Point>): Observable<RouteSummary>;
routeChat(upstream: Observable<RouteNote>): Observable<RouteNote>;
}
|
Rails.application.routes.draw do
root 'home#index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
devise_for :users, controllers: {
sessions: 'users/sessions',
registrations: "users/registrations"
}
resources :organizations, only: [:index, :show, :new, :edit, :update, :create] do
put 'add_member', on: :member
end
resources :interests
get 'edit', to: 'interests#edit'
get 'new', to: 'interests#new'
delete 'destroy', to: 'interests#destroy'
resources :skills
get 'edit', to: 'skills#edit'
get 'new', to: 'skills#new'
delete 'destroy', to: 'skills#destroy'
get '/search/users', to: 'search#users', as: 'search_users'
get '/search/organizations', to: 'search#organizations', as: 'search_organizations'
end
|
import { ApiModelProperty } from '@nestjs/swagger';
export class ResponseStatus {
@ApiModelProperty({ description: 'HTTP status code.', type: 'string' })
code: string;
@ApiModelProperty({ description: 'HTTP status description/message.', type: 'string' })
description: string;
}
export class PagingData {
@ApiModelProperty({ description: 'Current page number.', type: 'number' })
page: number;
@ApiModelProperty({ description: 'The total of all pages.', type: 'number' })
totalPages: number;
@ApiModelProperty({ description: 'The total of all rows.', type: 'number' })
totalRows: number;
@ApiModelProperty({ description: 'Rows displayed per page.', type: 'number' })
rowsPerPage: number;
}
|
C Copyright(C) 2014-2017 National Technology & Engineering Solutions of
C Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525 with
C NTESS, the U.S. Government retains certain rights in this software.
C
C Redistribution and use in source and binary forms, with or without
C modification, are permitted provided that the following conditions are
C met:
C
C * Redistributions of source code must retain the above copyright
C notice, this list of conditions and the following disclaimer.
C
C * Redistributions in binary form must reproduce the above
C copyright notice, this list of conditions and the following
C disclaimer in the documentation and/or other materials provided
C with the distribution.
C
C * Neither the name of NTESS nor the names of its
C contributors may be used to endorse or promote products derived
C from this software without specific prior written permission.
C
C THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
C "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
C LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
C A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
C OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
C SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
C LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
C DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
C THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
C (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
C OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
C
C $Id: pdata.f,v 1.5 2007/07/24 13:10:18 gdsjaar Exp $
C $Log: pdata.f,v $
C Revision 1.5 2007/07/24 13:10:18 gdsjaar
C Fix problem with boundary condition memory overwrite.
C
C Remove old ls5 and r25 terminal tests
C
C Revision 1.4 1999/06/21 22:43:40 gdsjaar
C Fixed more uninitialized variables; one was causing core dump on g77
C compiled executable.
C
C VERSN was not consistently defined -- now 10 characters everywhere
C
C Updated so full version string output
C
C Added capability to debug memory using unit specified in EXT99
C variable. Similar to STRTUP in SUPLIB
C
C Cleaned up some other code
C
C Upped version
C
C Revision 1.3 1998/07/14 18:19:31 gdsjaar
C Removed unused variables, cleaned up a little.
C
C Changed BLUE labels to GREEN to help visibility on black background
C (indirectly requested by a couple users)
C
C Revision 1.2 1992/12/08 22:13:54 gdsjaar
C Changed color of point label output from yellow to red
C
c Revision 1.1.1.1 1990/11/30 11:13:14 gdsjaar
c FASTQ Version 2.0X
c
c Revision 1.1 90/11/30 11:13:13 gdsjaar
c Initial revision
c
C
CC* FILE: [.MAIN]PDATA.FOR
CC* MODIFIED BY: TED BLACKER
CC* MODIFICATION DATE: 7/6/90
CC* MODIFICATION: COMPLETED HEADER INFORMATION
C
SUBROUTINE PDATA (MP, ML, MR, MSC, IPOINT, COOR, IPBOUN, ILINE,
& LTYPE, NINT, LCON, FACTOR, ILBOUN, ISBOUN, IREGN, IMAT, LINKP,
& LINKL, LINKR, LINKSC, RSIZE, SCHEME, DEFSCH, DEFSIZ, REXTRM,
& N, LABP, LABL, LABR, FULL, LABMD, LABI, LABF, LABPB, LABLB,
& LABSBD, LABSC, LABSZ, AXISD, TITLE, XMIN, XMAX, YMIN, YMAX,
& XX1, YY1, XX2, YY2, DEV1, VERSN)
C***********************************************************************
C
C SUBROUTINE PDATA = PLOTS FLAGGED POINTS, LINES, AND REGIONS
C
C***********************************************************************
C
DIMENSION IPOINT (MP), COOR (2, MP), IPBOUN (MP)
DIMENSION ILINE (ML), LTYPE (ML), NINT (ML), LCON (3, ML)
DIMENSION FACTOR (ML)
DIMENSION ILBOUN (ML), ISBOUN (ML)
DIMENSION IREGN (MR), IMAT (MR), REXTRM (4, MR), RSIZE (MR)
DIMENSION SCHEME (MSC)
DIMENSION LINKP (2, MP), LINKL (2, ML), LINKR (2, MR)
DIMENSION LINKSC (2, MR)
DIMENSION N (29), XDUM (2), YDUM (2)
C
CHARACTER*72 DUMMY, SCHEME, DEFSCH, TITLE, DEV1*3
CHARACTER*8 DATE, TIME, VERSN*10
C
LOGICAL LABP, LABL, LABR, AXISD, LABMD, LABI, LABF
LOGICAL LABPB, LABLB, LABSBD
LOGICAL ADDLNK, CPUIFC, TEST, FULL, LABSC
LOGICAL GETMAX, ADD, LABSZ
C
C INITIALIZE THE PLOTTING SURFACE
C
TEST = .FALSE.
GETMAX = .FALSE.
IF (TEST)OPEN (UNIT = 12, FILE = 'HP7580.DAT', STATUS = 'NEW')
ADDLNK = .FALSE.
CALL PLTBGN
CALL PLTSTV (2, 160.)
XDIMR = ABS (XMAX - XMIN)
YDIMR = ABS (YMAX - YMIN)
XDIMD = 1.
YDIMD = .75
CALL MPVIEW (0., XDIMD, 0., YDIMD)
XRAT = XDIMR/XDIMD
YRAT = YDIMR/YDIMD
IF (XRAT.LT.YRAT) THEN
XDIMR = XDIMD*YRAT
XX1 = (XMIN + XMAX - XDIMR)*.5
XX2 = (XMIN + XMAX + XDIMR)*.5
XDIMR = XX2 - XX1
YY1 = YMIN
YY2 = YMAX
ELSE
YDIMR = YDIMD*XRAT
YY1 = (YMIN + YMAX - YDIMR)*.5
YY2 = (YMIN + YMAX + YDIMR)*.5
YDIMR = YY2 - YY1
XX1 = XMIN
XX2 = XMAX
ENDIF
C
C SET UP SCALING EXTREMES FOR AXIS
C
IF (TEST) THEN
WRITE (12, 10000)'IN;SP6;;IP - 5710, -10060, 15710, 10060;'
WRITE (12, 10010)
& 'SC', INT (XX1*1000), ', ', INT (YY1*1000), ', ',
& INT (XX2*1000), ', ', INT (YY2*1000), ';'
ENDIF
IF (AXISD) THEN
XDUM (1) = XX1 - (XDIMR*.05)
XDUM (2) = XX2 + (XDIMR*.05)
YDUM (1) = YY1 - (YDIMR*.05)
YDUM (2) = YY2 + (YDIMR*.05)
SHRINK = .2
ELSE
SHRINK = .1
ENDIF
C
C SHRINK TO FIT A BORDER ON THE PLOT
C
XX1 = XX1 - (XDIMR*SHRINK)
XX2 = XX2 + (XDIMR*SHRINK)
YY1 = YY1 - (YDIMR*SHRINK)
YY2 = YY2 + (YDIMR*SHRINK)
CALL MPORT2 (XX1, XX2, YY1, YY2)
CALL PLTFRM (0)
C
C PLOT THE TITLE AND THE TRACE
C
CALL STRLNG (TITLE, LEN)
IF ( (LEN.GT.1) .OR. (TITLE (1:1).NE.' ')) THEN
CALL PLTXHL (TITLE (1:LEN), XLEN)
XBEGIN = AMAX1 (0., (XDIMD*.5 - XLEN*.5))
CALL PLTXTH (XBEGIN, YDIMD*.95, TITLE (1:LEN))
ENDIF
DUMMY(1:10) = ' DRAWN BY '
DUMMY(11:20) = VERSN
DUMMY(21:22) = ' '
CALL EXDATE (DATE)
DUMMY(23:30) = DATE
DUMMY(31:32) = ' '
CALL EXTIME (TIME)
DUMMY(33:40) = TIME
CALL PLTXTH (0., 0., DUMMY(1:40))
C
C DRAW THE AXIS IF REQUIRED, AND SET CLIPPING WITHIN AXIS
C
IF (AXISD)CALL SETAXS (XDUM, YDUM)
IF (CPUIFC (.TRUE.))GOTO 130
C
C PLOT THE POINTS FLAGGED
C
IF ( (LABP) .OR. (LABPB)) THEN
DO 100 I = 1, N (18)
IF (CPUIFC (.TRUE.))GOTO 130
CALL LTSORT (MP, LINKP, I, II, ADDLNK)
IF (II.GT.0) THEN
IF (IPOINT (II).LT.0) THEN
INUM = - IPOINT (II)
CALL MP2PT (1, COOR (1, II), COOR (2, II),
& X1, Y1, MASK)
IF (MOD (MASK, 2).NE.0) THEN
C
C PLOT THE POINT LABELS
C
IF (LABP) THEN
CALL PLTSTD (1, 1.)
CALL GETDUM (INUM, DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
CALL PLTXHE (X1, Y1)
ENDIF
C
C PLOT THE POINBC FLAGS
C
IF ( ( (LABPB) .OR. ( (FULL) .AND. (LABP))) .AND.
& (IPBOUN (II).GT.0)) THEN
CALL PLTSTD (1, 5.)
IF (LABP) THEN
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
CALL GETDUM (IPBOUN (II), DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ENDIF
ENDIF
ENDIF
ENDIF
100 CONTINUE
ENDIF
C
C PLOT ALL LINES THAT HAVE BEEN FLAGGED
C
DO 110 I = 1, N (19)
IF (CPUIFC (.TRUE.))GOTO 130
CALL LTSORT (ML, LINKL, I, II, ADDLNK)
IF (II.GT.0) THEN
IF (LABL) THEN
ADD = .TRUE.
ELSE
ADD = .FALSE.
ENDIF
CALL PLTSTD (1, 7.)
IF (ILINE (II).LT.0) THEN
IF ( (LABL) .OR. (LABLB) .OR. (LABSBD) .OR. (LABF)
& .OR. (LABI)) THEN
KNUM = - ILINE (II)
ELSE
KNUM = 0
ENDIF
LT = LTYPE (II)
IP1 = LCON (1, II)
IP2 = LCON (2, II)
IP3 = LCON (3, II)
CALL LTSORT (MP, LINKP, IP1, IPNTR1, ADDLNK)
CALL LTSORT (MP, LINKP, IP2, IPNTR2, ADDLNK)
IF (IP3.NE.0) THEN
CALL LTSORT (MP, LINKP, IABS (IP3), IPNTR3, ADDLNK)
ELSE
IPNTR3 = 0
ENDIF
IF ((IPNTR1.GT.0) .AND. (IPNTR2.GT.0) .AND.
& ((LT.EQ.1) .OR. (IPNTR3.GT.0)) ) THEN
CALL DLINE (MP, ML, COOR, LINKP, KNUM, LT, IP1, IP2,
& IP3, LABL, X1, Y1, TEST, GETMAX, DUM1, DUM2, DUM3,
& DUM4)
C
C PLOT INTERVAL NUMBERS
C
IF ( ( (FULL) .AND. (LABL)) .OR. (LABI)) THEN
CALL PLTSTD (1, 5.)
IF (ADD) THEN
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
CALL GETDUM (NINT (II), DUMMY, LEN)
IF (TEST) THEN
CALL PLTD2G (X1, Y1, XR, YR)
CALL PLTG2D (X1, Y1, XR, YR)
WRITE (12, 10020)'PU;PA', INT (XR*1000.),
& ', ', INT (YR*1000.), ';LB',
& DUMMY (1:LEN), CHAR (3)
ENDIF
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ADD = .TRUE.
ENDIF
C
C PLOT THE LINE FACTOR
C
IF ( ( (FULL) .AND. (LABL)) .OR. (LABF)) THEN
IF (ADD) THEN
CALL PLTSTD (1, 1.)
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
CALL GTXDUM (FACTOR (II), DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ADD = .TRUE.
ENDIF
C
C PLOT THE LINEBC FLAGS
C
IF ( ( ( (FULL) .AND. (LABL)) .OR. (LABLB)) .AND.
& (ILBOUN (II).GT.0)) THEN
CALL PLTSTD (1, 2.)
IF (ADD) THEN
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
CALL GETDUM (ILBOUN (II), DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ADD = .TRUE.
ENDIF
C
C PLOT THE SIDEBC FLAGS
C
IF ( ( ( (FULL) .AND. (LABL)) .OR. (LABSBD)) .AND.
& (ISBOUN (II).GT.0)) THEN
CALL PLTSTD (1, 3.)
IF (ADD) THEN
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
CALL GETDUM (ISBOUN (II), DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ENDIF
ENDIF
ENDIF
ENDIF
110 CONTINUE
C
C PLOT ALL REGIONS FLAGGED
C
IF ( (LABR) .OR. (LABMD) .OR. (LABSC) .OR. (LABSZ)) THEN
IF (CPUIFC (.TRUE.))GOTO 130
DO 120 I = 1, N (22)
CALL LTSORT (MR, LINKR, I, II, ADDLNK)
IF (II.GT.0) THEN
IF (IREGN (II).LT.0) THEN
ADD = .FALSE.
INUM = - IREGN (II)
XMID = (REXTRM (1, II) + REXTRM (2, II))/2.
YMID = (REXTRM (3, II) + REXTRM (4, II))/2.
CALL MP2PT (1, XMID, YMID, X1, Y1, MASK)
IF ( (MOD (MASK, 2).NE.0)) THEN
C
C PLOT THE REGION NUMBER
C
IF (LABR) THEN
CALL PLTSTD (1, 2.)
CALL GETDUM (INUM, DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ADD = .TRUE.
ENDIF
C
C PLOT OUT THE MATERIAL NUMBER
C
IF (((FULL) .AND. (LABR)) .OR. (LABMD)) THEN
CALL PLTSTD (1, 1.)
IF (ADD) THEN
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
ADD = .TRUE.
CALL GETDUM (IMAT (II), DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ENDIF
C
C PLOT OUT THE SIZE NUMBER FOR THE REGION
C
IF (((FULL) .AND. (LABR)) .OR. (LABSZ)) THEN
CALL PLTSTD (1, 1.)
IF (ADD) THEN
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
ADD = .TRUE.
CALL GTXDUM (RSIZE (II), DUMMY, LEN)
CALL PLTXTH (X1, Y1, DUMMY (1:LEN))
ENDIF
C
C PLOT OUT THE SCHEME
C
IF (((FULL) .AND. (LABR)) .OR. (LABSC)) THEN
CALL PLTSTD (1, 7.)
IF (ADD) THEN
CALL PLTXHE (X1, Y1)
CALL PLTXTH (X1, Y1, '/')
CALL PLTXHE (X1, Y1)
ENDIF
CALL LTSORT (MR, LINKSC, INUM, IPNTR, ADDLNK)
IF ( (INUM.LE.N (24)) .AND. (IPNTR.GT.0)) THEN
CALL STRLNG (SCHEME (IPNTR), LEN)
IF (TEST) THEN
CALL PLTD2G (X1, Y1, XR, YR)
WRITE (12, 10020)'PU;PA', INT (XR*1000.),
& ', ', INT (YR*1000.), ';LB',
& SCHEME (IPNTR) (1:LEN), CHAR (3)
ENDIF
CALL PLTXTH (X1, Y1, SCHEME (IPNTR) (1:LEN))
ELSE
CALL STRLNG (DEFSCH, LEN)
IF (TEST) THEN
CALL PLTD2G (X1, Y1, XR, YR)
WRITE (12, 10020)'PU;PA', INT (XR*1000.),
& ', ', INT (YR*1000.), ';LB',
& DEFSCH (1:LEN), CHAR (3)
ENDIF
CALL PLTXTH (X1, Y1, DEFSCH (1:LEN))
ENDIF
ENDIF
ENDIF
ENDIF
ENDIF
120 CONTINUE
ENDIF
130 CONTINUE
CALL PLTSTD (1, 7.)
CALL PLTBEL
CALL PLTFLU
C
RETURN
C
10000 FORMAT (A)
10010 FORMAT (A2, I10, A1, I10, A1, I10, A1, I10, A1)
10020 FORMAT (A5, I10, A1, I10, A3, A, A1)
C
END
|
``` bash
$this->crud->addFields($multiple_fields_array); // Tambah beberapa form fields
$this->crud->removeFields($multiple_fields_array); // Hapus beberapa fields
```
|
```toml
title = "FTP 和 SFTP"
date = "2016-02-04 15:00:00"
slug = "zh/docs/deploy/ftp-sftp"
hover = "docs"
lang = "zh"
template = "docs.html"
```
`PoGo` 可以使用 FTP and SFTP 账号发布,目前只支持 **用户名** 和 **密码** 登陆的方式。
```bash
pogo deploy ftp --local="dest" --user="user" --password="xxx" --host="127.0.0.1:21" --directory="pogo"
pogo deploy sftp --local="dest" --user="user" --password="xxx" --host="127.0.0.1:22" --directory="pogo"
```
`--local` 设置本地编译好的内容的文件夹。
`--user` 和 `--password` 设置连接的账号和密码。 SFTP 还不支持使用 `.ssh/keys` 登陆。
`--host` 设置连接的地址和端口。
`--directory` 设置线上保存的目录。SFTP 中 `~/` 代表用户目录。
|
FactoryBot.define do
factory :reservation_detail do
status { 'requested' }
reservation
component
end
end
|
unit SkillDetailEdit;
interface
uses
Windows, Messages, SysUtils, Variants, Classes, Graphics, Controls, Forms,
Dialogs, FormEditAbsUnit, cxGraphics, cxControls, cxLookAndFeels,
cxLookAndFeelPainters, cxStyles, cxCustomData, cxFilter, cxData,
cxDataStorage, cxEdit, DB, cxDBData, dxBarDBNav, dxBar, cxClasses,
ImgList, ActnList, dxLayoutControl, cxGridLevel, cxGridCustomView,
cxGridCustomTableView, cxGridTableView, cxGridDBTableView, cxGrid, cxPC,
cxContainer, dxLayoutcxEditAdapters, cxMemo, cxDBEdit, cxTextEdit,
cxMaskEdit, cxSpinEdit, cxGroupBox, cxRadioGroup, Menus, StdCtrls,
cxButtons;
type
TfmSkillDetailEdit = class(TFormEditAbs)
cxCode: TcxDBSpinEdit;
dxLayoutItem1: TdxLayoutItem;
cxSkill: TcxDBMemo;
dxLayoutItem2: TdxLayoutItem;
dxLayoutSeparatorItem1: TdxLayoutSeparatorItem;
dxLayoutSeparatorItem2: TdxLayoutSeparatorItem;
cxMin: TcxDBMemo;
dxLayoutItem3: TdxLayoutItem;
cxMid: TcxDBMemo;
dxLayoutItem4: TdxLayoutItem;
cxMax: TcxDBMemo;
dxLayoutItem5: TdxLayoutItem;
cxType: TcxDBRadioGroup;
dxLayoutItem6: TdxLayoutItem;
cxSetCode: TcxButton;
dxLayoutItem7: TdxLayoutItem;
dxLayoutGroup1: TdxLayoutGroup;
procedure cxSetCodeClick(Sender: TObject);
procedure cxTypeClick(Sender: TObject);
private
{ Private declarations }
public
{ Public declarations }
end;
var
fmSkillDetailEdit: TfmSkillDetailEdit;
implementation
{$R *.dfm}
uses
DataModuleUnit
;
procedure TfmSkillDetailEdit.cxSetCodeClick(Sender: TObject);
var
Code: integer;
begin
FocusControl(cxSetCode);
Code := 0;
dmPublic.tSkillList.Close;
dmPublic.tSkillList.Open;
dmPublic.tSkillList.First;
while not dmPublic.tSkillList.Eof do
begin
if dmPublic.tSkillListSkillTypeId.Value = dmPublic.tSkillSkillTypeId.Value then
Code := Code + 1;
dmPublic.tSkillList.Next;
end;
dmPublic.tSkill.Edit;
dmPublic.tSkillCode.Value := Code + 1;
end;
procedure TfmSkillDetailEdit.cxTypeClick(Sender: TObject);
begin
cxSetCodeClick(Sender);
end;
end.
|
var testrunner = require('qunit');
testrunner.options.log.summary = true;
testrunner.options.log.tests = false;
testrunner.options.log.assertions = false;
testrunner.run({
deps: ['./src/htmlparser.js', './src/htmllint.js'],
code: './src/htmlminifier.js',
tests: [
'./tests/minifier.js',
'./tests/lint.js',
]
}, function(err, report) {
if(report.failed > 0){
process.on('exit', function() {
process.exit(1);
});
}
if (err) {
console.log(err);
}
});
|
# --- !Ups
UPDATE images SET content_type = 'image/jpg' WHERE content_type IS NULL;
ALTER TABLE images ALTER COLUMN content_type SET DEFAULT 'image/jpg';
ALTER TABLE images ALTER COLUMN content_type SET NOT NULL;
# --- !Downs
ALTER TABLE images ALTER COLUMN content_type DROP NOT NULL;
ALTER TABLE images ALTER COLUMN content_type SET DEFAULT NULL;
|
'use strict';
var isPresent = require('is-present');
var hasClassSelector = require('has-class-selector');
module.exports = function classPrefix(prefix, options) {
options = options || {};
var ignored = options.ignored;
var prefixClassForTag = options.prefixClassForTag;
/** This return will create new rule in new file */
return function prefixRules(styling) {
styling.rules.forEach(function(rule) {
/** Ignore @media */
if (rule.rules) {
return prefixRules(rule);
}
if (!rule.selectors) return rule;
rule.selectors = rule.selectors.map(function(selector) {
var shouldIgnore = false;
/** Ignore @-ms-viewport */
if(selector.indexOf("@") !== -1) {
return selector;
}
if (hasClassSelector(selector)) {
// Ensure that the selector doesn't match the ignored list
if (isPresent(ignored)) {
shouldIgnore = ignored.some(function(opt) {
if (typeof opt == 'string') {
return selector === opt;
} else if (opt instanceof RegExp) {
return opt.exec(selector);
}
});
}
return shouldIgnore ? selector : selector.split('.').join('.' + prefix);
}
else if(isPresent(prefixClassForTag)) {
/**
* Replace html and body to prefixClassForTag
*/
if(selector.indexOf("html") !== -1){
return selector.replace("html", "." + prefixClassForTag);
}
if(selector.indexOf("body") !== -1) {
return selector.replace("body", "." + prefixClassForTag);
}
else {
return '.' + prefixClassForTag +' ' + selector;
}
}
return selector;
});
});
};
};
|
import 'package:flutter/material.dart';
class PostTime extends StatelessWidget {
final DateTime postime;
PostTime({this.postime});
@override
Widget build(BuildContext context) {
return Row(
children: <Widget>[Text(this.postime.toIso8601String())],
);
}
}
|
ALTER TABLE transactions ADD COLUMN ref UUID;
ALTER TABLE transactions ADD COLUMN signer VARCHAR(1024);
ALTER TABLE transactions ADD COLUMN hash CHAR(64);
ALTER TABLE transactions ADD COLUMN protocol_id VARCHAR(256);
ALTER TABLE transactions ADD COLUMN info BYTEA;
CREATE INDEX transactions_protocol_id ON transactions(protocol_id);
CREATE INDEX transactions_ref ON transactions(ref);
|
from flask_wtf import FlaskForm
from wtforms import IntegerField, PasswordField, StringField, SubmitField
from wtforms.validators import DataRequired, Length, NumberRange, Optional
class GenerateForm(FlaskForm):
length = IntegerField(
"Length",
validators=[
Optional(),
NumberRange(min=6, max=36, message="Invalid password length"),
],
)
# TODO: may use on custom password generation
# uppercase = BooleanField("Uppercase Characters")
# punctuation = BooleanField("Punctuation")
submit = SubmitField("Generate")
class RecordForm(FlaskForm):
name = StringField(
"Record Name",
validators=[DataRequired(), Length(max=100, message="Invalid record name")],
)
login = StringField(
"Login",
validators=[DataRequired(), Length(max=100, message="Invalid login names")],
)
password = PasswordField(
"Password",
validators=[
DataRequired(),
Length(min=6, max=128, message="Invalid password length"),
],
)
comment = StringField(
"Comment",
validators=[Optional(), Length(max=200, message="Invalid comment length")],
)
submit = SubmitField("Save")
|
#!/usr/bin/env python
import compiler
import compiler.ast
import fnmatch
import itertools
import os
import sys
import pynocle._modulefinder as modulefinder
import pynocle.utils as utils
_python_stdlib_filter = os.path.dirname(sys.executable) + '*'
_pycharm_filter = '*JetBrains\PyCharm *'
EXCLUDE_PATHS = _python_stdlib_filter, _pycharm_filter
EXCLUDE_MODULES = ('sys', 'time','imp')
class Dependency(object):
"""Data object that represents a single dependency with a
startpoint and endpoint."""
def __init__(self, startpt, endpt):
self.startpt = startpt
self.endpt = endpt
def __iter__(self):
yield self.startpt
yield self.endpt
def __eq__(self, other):
if isinstance(other, Dependency):
return other.startpt == self.startpt and other.endpt == self.endpt
return NotImplemented
def __ne__(self, other):
result = self.__eq__(other)
if result is NotImplemented:
return True
return not result
def __str__(self):
return 'Dependency(%s -> %s)' % (self.startpt, self.endpt)
__repr__ = __str__
class DependencyGroup(object):
def __init__(self, dependencies, failed=()):
self.failed = failed
self.dependencies = dependencies
self.allstartpts, self.allendpts = zip(*dependencies)
self.depnode_to_ca = self._calc_coupling(self.allendpts)
self.depnode_to_ce = self._calc_coupling(self.allstartpts)
#allstartpts and allendpts will be of equal size, but not equal contents- we want to make sure our coupling
#dicts have the same keys so we have all metrics for all modules!
for d in self.depnode_to_ca, self.depnode_to_ce:
for key in self.allstartpts + self.allendpts:
d.setdefault(key, 0)
def _calc_coupling(self, depnodes):
"""Return a dict where keys are all unique items in depnodes and values are the number of times
those items occur.
"""
#This method can be optimized if it ever becomes a bottleneck
result = {}
depnodecopy = list(depnodes)
unique = set(depnodes)
for item in unique:
count = 0
for i in range(len(depnodecopy) - 1, 0, -1): #we're modifying depnodecopy inside loop
if depnodecopy[i] == item: #Increment and remove the item so we don't have to reiterate it
depnodecopy.pop(i)
count += 1
result[item] = count
return result
class DepBuilder:
"""Builds dependencies between modules,
starting from all modules in filenames.
Dependencies are available as a list of `Dependency`
instances as `self.dependencies`.
Modules that could not be parsed are available as `self.failed`.
:param exclude_paths: Collection of fnmatch patterns.
Any path that matches any pattern will not be considered for dependencies.
:param exclude_modules: Any modules that match one of the strings
in this collection will not be considered for dependencies.
This is necessary because some modules do not have filenames.
"""
def __init__(self,
filenames,
exclude_paths=EXCLUDE_PATHS,
exclude_modules=EXCLUDE_MODULES):
self._processed = set()
self.dependencies = []
self.failed = []
self.exclude_paths = exclude_paths
self.exclude_modules = set(exclude_modules)
self.modulefinder_cache = modulefinder.ModuleFinderCache()
for fn in filenames:
self._process_file(fn)
def _is_excluded(self, path):
"""Check whether the given path is an excluded module.
Excluded modules will be cached in self.excluded_modules so
they don't have to be re-checked.
If path evaluates to False, it is excluded.
"""
if not path:
return True
if path in self.exclude_modules:
return True
for epath in self.exclude_paths:
if fnmatch.fnmatch(path, epath):
self.exclude_modules.add(path)
return True
p2 = set(path)
if not '.' in p2 and not os.sep in p2 and not os.altsep in p2:
self.exclude_modules.add(path)
return True
return False
def _extless(self, filename):
"""Return an extensionless path for filename."""
return os.path.splitext(filename)[0]
def _extract_modulename(self, node):
"""If node is a From/Import node, return the module's name,
otherwise return None."""
if isinstance(node, compiler.ast.Import):
return node.names[0][0]
if isinstance(node, compiler.ast.From):
return node.modname
return None
def _get_all_imported_modulenames(self, filename):
"""Compiles an AST for filename and returns the module names
for all modules imported by.
If no file for filename exists, or it is an unparseable file (pyd, pyc),
return an empty list.
If the file cannot be parsed,
append to self.failed and return an empty list.
"""
#We can only read py files right now
if filename.endswith('.pyd'):
return []
if filename.endswith('.pyc') or filename.endswith('.pyo'):
filename = filename[:-1]
elif not os.path.splitext(filename)[1]: #Has no ext whatsoever
filename += '.py'
if not os.path.exists(filename):
return []
try:
astnode = compiler.parseFile(filename)
except SyntaxError:
self.failed.append(self._extless(filename))
return []
allnodes = utils.flatten(astnode, lambda node: node.getChildNodes())
names = itertools.imap(self._extract_modulename, allnodes)
names = itertools.ifilter(None, names)
return names
def _process_file(self, filename):
"""Process the file at filename.
Adds it to processed,
and finds dependencies for all import nodes."""
filename = os.path.abspath(filename)
extless_filename = self._extless(filename)
if (extless_filename in self._processed or
self._is_excluded(extless_filename)):
return
self._processed.add(extless_filename)
importednames = self._get_all_imported_modulenames(filename)
for impmodname in importednames:
imported_modulefilename = self.modulefinder_cache.get_module_filename(impmodname, filename)
#We can get back 'sys' as a filename so check if it's excluded before we get the abspath
if imported_modulefilename and not self._is_excluded(imported_modulefilename):
imported_modulefilename = os.path.abspath(imported_modulefilename)
extless_imported_modulefilename = self._extless(imported_modulefilename)
if not self._is_excluded(extless_imported_modulefilename):
self.dependencies.append(Dependency(extless_filename, extless_imported_modulefilename))
self._process_file(imported_modulefilename)
|
// THIS FILE IS GENERATED AUTOMATICALLY AND SHOULD NOT BE EDITED DIRECTLY.
import 'dart:ffi';
/// -------------------------- GL_ANGLE_timer_query -------------------------
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glBeginQueryANGLE;
/// ```c
/// define glBeginQueryANGLE GLEW_GET_FUN(__glewBeginQueryANGLE)
/// GLEW_FUN_EXPORT PFNGLBEGINQUERYANGLEPROC __glewBeginQueryANGLE
/// typedef void (GLAPIENTRY * PFNGLBEGINQUERYANGLEPROC) (GLenum target, GLuint id)
/// ```
void glBeginQueryANGLE(int target, int id) {
final _glBeginQueryANGLE = glad__glBeginQueryANGLE!
.cast<NativeFunction<Void Function(Uint32 target, Uint32 id)>>()
.asFunction<void Function(int target, int id)>();
return _glBeginQueryANGLE(target, id);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glDeleteQueriesANGLE;
/// ```c
/// define glDeleteQueriesANGLE GLEW_GET_FUN(__glewDeleteQueriesANGLE)
/// GLEW_FUN_EXPORT PFNGLDELETEQUERIESANGLEPROC __glewDeleteQueriesANGLE
/// typedef void (GLAPIENTRY * PFNGLDELETEQUERIESANGLEPROC) (GLsizei n, const GLuint* ids)
/// ```
void glDeleteQueriesANGLE(int n, Pointer<Uint32>? ids) {
final _glDeleteQueriesANGLE = glad__glDeleteQueriesANGLE!
.cast<NativeFunction<Void Function(Uint32 n, Pointer<Uint32>? ids)>>()
.asFunction<void Function(int n, Pointer<Uint32>? ids)>();
return _glDeleteQueriesANGLE(n, ids);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glEndQueryANGLE;
/// ```c
/// define glEndQueryANGLE GLEW_GET_FUN(__glewEndQueryANGLE)
/// GLEW_FUN_EXPORT PFNGLENDQUERYANGLEPROC __glewEndQueryANGLE
/// typedef void (GLAPIENTRY * PFNGLENDQUERYANGLEPROC) (GLenum target)
/// ```
void glEndQueryANGLE(int target) {
final _glEndQueryANGLE = glad__glEndQueryANGLE!
.cast<NativeFunction<Void Function(Uint32 target)>>()
.asFunction<void Function(int target)>();
return _glEndQueryANGLE(target);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glGenQueriesANGLE;
/// ```c
/// define glGenQueriesANGLE GLEW_GET_FUN(__glewGenQueriesANGLE)
/// GLEW_FUN_EXPORT PFNGLGENQUERIESANGLEPROC __glewGenQueriesANGLE
/// typedef void (GLAPIENTRY * PFNGLGENQUERIESANGLEPROC) (GLsizei n, GLuint* ids)
/// ```
void glGenQueriesANGLE(int n, Pointer<Uint32>? ids) {
final _glGenQueriesANGLE = glad__glGenQueriesANGLE!
.cast<NativeFunction<Void Function(Uint32 n, Pointer<Uint32>? ids)>>()
.asFunction<void Function(int n, Pointer<Uint32>? ids)>();
return _glGenQueriesANGLE(n, ids);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glGetQueryObjecti64vANGLE;
/// ```c
/// define glGetQueryObjecti64vANGLE GLEW_GET_FUN(__glewGetQueryObjecti64vANGLE)
/// GLEW_FUN_EXPORT PFNGLGETQUERYOBJECTI64VANGLEPROC __glewGetQueryObjecti64vANGLE
/// typedef void (GLAPIENTRY * PFNGLGETQUERYOBJECTI64VANGLEPROC) (GLuint id, GLenum pname, GLint64* params)
/// ```
void glGetQueryObjecti64vANGLE(int id, int pname, Pointer<Int64>? params) {
final _glGetQueryObjecti64vANGLE = glad__glGetQueryObjecti64vANGLE!
.cast<NativeFunction<Void Function(Uint32 id, Uint32 pname, Pointer<Int64>? params)>>()
.asFunction<void Function(int id, int pname, Pointer<Int64>? params)>();
return _glGetQueryObjecti64vANGLE(id, pname, params);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glGetQueryObjectivANGLE;
/// ```c
/// define glGetQueryObjectivANGLE GLEW_GET_FUN(__glewGetQueryObjectivANGLE)
/// GLEW_FUN_EXPORT PFNGLGETQUERYOBJECTIVANGLEPROC __glewGetQueryObjectivANGLE
/// typedef void (GLAPIENTRY * PFNGLGETQUERYOBJECTIVANGLEPROC) (GLuint id, GLenum pname, GLint* params)
/// ```
void glGetQueryObjectivANGLE(int id, int pname, Pointer<Int32>? params) {
final _glGetQueryObjectivANGLE = glad__glGetQueryObjectivANGLE!
.cast<NativeFunction<Void Function(Uint32 id, Uint32 pname, Pointer<Int32>? params)>>()
.asFunction<void Function(int id, int pname, Pointer<Int32>? params)>();
return _glGetQueryObjectivANGLE(id, pname, params);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glGetQueryObjectui64vANGLE;
/// ```c
/// define glGetQueryObjectui64vANGLE GLEW_GET_FUN(__glewGetQueryObjectui64vANGLE)
/// GLEW_FUN_EXPORT PFNGLGETQUERYOBJECTUI64VANGLEPROC __glewGetQueryObjectui64vANGLE
/// typedef void (GLAPIENTRY * PFNGLGETQUERYOBJECTUI64VANGLEPROC) (GLuint id, GLenum pname, GLuint64* params)
/// ```
void glGetQueryObjectui64vANGLE(int id, int pname, Pointer<Uint64>? params) {
final _glGetQueryObjectui64vANGLE = glad__glGetQueryObjectui64vANGLE!
.cast<NativeFunction<Void Function(Uint32 id, Uint32 pname, Pointer<Uint64>? params)>>()
.asFunction<void Function(int id, int pname, Pointer<Uint64>? params)>();
return _glGetQueryObjectui64vANGLE(id, pname, params);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glGetQueryObjectuivANGLE;
/// ```c
/// define glGetQueryObjectuivANGLE GLEW_GET_FUN(__glewGetQueryObjectuivANGLE)
/// GLEW_FUN_EXPORT PFNGLGETQUERYOBJECTUIVANGLEPROC __glewGetQueryObjectuivANGLE
/// typedef void (GLAPIENTRY * PFNGLGETQUERYOBJECTUIVANGLEPROC) (GLuint id, GLenum pname, GLuint* params)
/// ```
void glGetQueryObjectuivANGLE(int id, int pname, Pointer<Uint32>? params) {
final _glGetQueryObjectuivANGLE = glad__glGetQueryObjectuivANGLE!
.cast<NativeFunction<Void Function(Uint32 id, Uint32 pname, Pointer<Uint32>? params)>>()
.asFunction<void Function(int id, int pname, Pointer<Uint32>? params)>();
return _glGetQueryObjectuivANGLE(id, pname, params);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glGetQueryivANGLE;
/// ```c
/// define glGetQueryivANGLE GLEW_GET_FUN(__glewGetQueryivANGLE)
/// GLEW_FUN_EXPORT PFNGLGETQUERYIVANGLEPROC __glewGetQueryivANGLE
/// typedef void (GLAPIENTRY * PFNGLGETQUERYIVANGLEPROC) (GLenum target, GLenum pname, GLint* params)
/// ```
void glGetQueryivANGLE(int target, int pname, Pointer<Int32>? params) {
final _glGetQueryivANGLE = glad__glGetQueryivANGLE!
.cast<NativeFunction<Void Function(Uint32 target, Uint32 pname, Pointer<Int32>? params)>>()
.asFunction<void Function(int target, int pname, Pointer<Int32>? params)>();
return _glGetQueryivANGLE(target, pname, params);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glIsQueryANGLE;
/// ```c
/// define glIsQueryANGLE GLEW_GET_FUN(__glewIsQueryANGLE)
/// GLEW_FUN_EXPORT PFNGLISQUERYANGLEPROC __glewIsQueryANGLE
/// typedef GLboolean (GLAPIENTRY * PFNGLISQUERYANGLEPROC) (GLuint id)
/// ```
int glIsQueryANGLE(int id) {
final _glIsQueryANGLE = glad__glIsQueryANGLE!
.cast<NativeFunction<Uint8 Function(Uint32 id)>>()
.asFunction<int Function(int id)>();
return _glIsQueryANGLE(id);
}
/// @nodoc
Pointer<NativeFunction<Void Function()>>? glad__glQueryCounterANGLE;
/// ```c
/// define glQueryCounterANGLE GLEW_GET_FUN(__glewQueryCounterANGLE)
/// GLEW_FUN_EXPORT PFNGLQUERYCOUNTERANGLEPROC __glewQueryCounterANGLE
/// typedef void (GLAPIENTRY * PFNGLQUERYCOUNTERANGLEPROC) (GLuint id, GLenum target)
/// ```
void glQueryCounterANGLE(int id, int target) {
final _glQueryCounterANGLE = glad__glQueryCounterANGLE!
.cast<NativeFunction<Void Function(Uint32 id, Uint32 target)>>()
.asFunction<void Function(int id, int target)>();
return _glQueryCounterANGLE(id, target);
}
/// @nodoc
void gladLoadGLLoader_angle_timer_query(Pointer<NativeFunction<Void Function()>> Function(String) load) {
glad__glBeginQueryANGLE = load('glBeginQueryANGLE');
glad__glDeleteQueriesANGLE = load('glDeleteQueriesANGLE');
glad__glEndQueryANGLE = load('glEndQueryANGLE');
glad__glGenQueriesANGLE = load('glGenQueriesANGLE');
glad__glGetQueryObjecti64vANGLE = load('glGetQueryObjecti64vANGLE');
glad__glGetQueryObjectivANGLE = load('glGetQueryObjectivANGLE');
glad__glGetQueryObjectui64vANGLE = load('glGetQueryObjectui64vANGLE');
glad__glGetQueryObjectuivANGLE = load('glGetQueryObjectuivANGLE');
glad__glGetQueryivANGLE = load('glGetQueryivANGLE');
glad__glIsQueryANGLE = load('glIsQueryANGLE');
glad__glQueryCounterANGLE = load('glQueryCounterANGLE');
}
|
package org.owasp.webgoat.plugin;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import javax.xml.bind.annotation.XmlRootElement;
/**
* @author nbaars
* @since 4/8/17.
*/
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
@XmlRootElement
public class Review {
private String user;
private String dateTime;
private String text;
private Integer stars;
}
|
/*==============================================================================
* Copyright (C) 2020 YaoYuan <ibireme@gmail.com>.
* Released under the MIT license (MIT).
*============================================================================*/
#include "yybench_cpu.h"
#include "yybench_time.h"
#define REPEAT_2(x) x x
#define REPEAT_4(x) REPEAT_2(REPEAT_2(x))
#define REPEAT_8(x) REPEAT_2(REPEAT_4(x))
#define REPEAT_16(x) REPEAT_2(REPEAT_8(x))
#define REPEAT_32(x) REPEAT_2(REPEAT_16(x))
#define REPEAT_64(x) REPEAT_2(REPEAT_32(x))
#define REPEAT_128(x) REPEAT_2(REPEAT_64(x))
#define REPEAT_256(x) REPEAT_2(REPEAT_128(x))
#define REPEAT_512(x) REPEAT_2(REPEAT_256(x))
bool yy_cpu_setup_priority(void) {
#if defined(_WIN32)
BOOL ret1 = SetPriorityClass(GetCurrentProcess(), REALTIME_PRIORITY_CLASS);
BOOL ret2 = SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_TIME_CRITICAL);
return ret1 && ret2;
#else
int policy;
struct sched_param param;
pthread_t thread = pthread_self();
pthread_getschedparam(thread, &policy, ¶m);
param.sched_priority = sched_get_priority_max(policy);
if (param.sched_priority != -1) {
return pthread_setschedparam(pthread_self(), policy, ¶m) == 0;
}
return false;
#endif
}
void yy_cpu_spin(f64 second) {
f64 end = yy_time_get_seconds() + second;
while(yy_time_get_seconds() < end) {
volatile int x = 0;
while (x < 1000) x++;
}
}
#if (yy_has_attribute(naked)) && YY_ARCH_ARM64
#define YY_CPU_RUN_INST_COUNT_A (8192 * (128 + 256))
#define YY_CPU_RUN_INST_COUNT_B (8192 * (512))
__attribute__((naked, noinline))
void yy_cpu_run_seq_a(void) {
__asm volatile
(
"mov x0, #8192\n"
"Loc_seq_loop_begin_a:\n"
REPEAT_128("add x1, x1, x1\n")
REPEAT_256("add x1, x1, x1\n")
"subs x0, x0, #1\n"
"bne Loc_seq_loop_begin_a\n"
"ret\n"
);
}
__attribute__((naked, noinline))
void yy_cpu_run_seq_b(void) {
__asm volatile
(
"mov x0, #8192\n"
"Loc_seq_loop_begin_b:\n"
REPEAT_512("add x1, x1, x1\n")
"subs x0, x0, #1\n"
"bne Loc_seq_loop_begin_b\n"
"ret\n"
);
}
#elif (yy_has_attribute(naked)) && YY_ARCH_ARM32
#define YY_CPU_RUN_INST_COUNT_A (8192 * (128 + 256))
#define YY_CPU_RUN_INST_COUNT_B (8192 * (512))
__attribute__((naked, noinline))
void yy_cpu_run_seq_a(void) {
__asm volatile
(
"mov.w r0, #8192\n"
"Loc_seq_loop_begin_a:\n"
REPEAT_128("add r1, r1, r1\n")
REPEAT_256("add r1, r1, r1\n")
"subs r0, r0, #1\n"
"bne.w Loc_seq_loop_begin_a\n"
"bx lr\n"
);
}
__attribute__((naked, noinline))
void yy_cpu_run_seq_b(void) {
__asm volatile
(
"mov.w r0, #8192\n"
"Loc_seq_loop_begin_b:\n"
REPEAT_512("add r1, r1, r1\n")
"subs r0, r0, #1\n"
"bne.w Loc_seq_loop_begin_b\n"
"bx lr\n"
);
}
#elif (yy_has_attribute(naked)) && (YY_ARCH_X64 || YY_ARCH_X86)
#define YY_CPU_RUN_INST_COUNT_A (8192 * (128 + 256))
#define YY_CPU_RUN_INST_COUNT_B (8192 * (512))
__attribute__((naked, noinline))
void yy_cpu_run_seq_a(void) {
__asm volatile
(
"movl $8192, %eax\n"
"seq_loop_begin_a:\n"
REPEAT_128("addl %edx, %edx\n")
REPEAT_256("addl %edx, %edx\n")
"subl $1, %eax\n"
"jne seq_loop_begin_a\n"
"ret\n"
);
}
__attribute__((naked, noinline))
void yy_cpu_run_seq_b(void) {
__asm volatile
(
"movl $8192, %eax\n"
"seq_loop_begin_b:\n"
REPEAT_512("addl %edx, %edx\n")
"subl $1, %eax\n"
"jne seq_loop_begin_b\n"
"ret\n"
);
}
#else
#define YY_CPU_RUN_INST_COUNT_A (8192 * 4 * (32 + 64))
#define YY_CPU_RUN_INST_COUNT_B (8192 * 4 * (128))
/* These functions contains some `add` instructions with data dependence.
This file should be compiled with optimization flag on.
We hope that each line of the code in the inner loop may compiled as
an `add` instruction, each `add` instruction takes 1 cycle, and inner kernel
can fit in the L1i cache. Try: https://godbolt.org/z/d3GP1b */
u32 yy_cpu_run_seq_vals[8];
void yy_cpu_run_seq_a(void) {
u32 loop = 8192;
u32 v1 = yy_cpu_run_seq_vals[1];
u32 v2 = yy_cpu_run_seq_vals[2];
u32 v3 = yy_cpu_run_seq_vals[3];
u32 v4 = yy_cpu_run_seq_vals[4];
do {
REPEAT_32( v1 += v4; v2 += v1; v3 += v2; v4 += v3; )
REPEAT_64( v1 += v4; v2 += v1; v3 += v2; v4 += v3; )
} while(--loop);
yy_cpu_run_seq_vals[0] = v1;
}
void yy_cpu_run_seq_b(void) {
u32 loop = 8192;
u32 v1 = yy_cpu_run_seq_vals[1];
u32 v2 = yy_cpu_run_seq_vals[2];
u32 v3 = yy_cpu_run_seq_vals[3];
u32 v4 = yy_cpu_run_seq_vals[4];
do {
REPEAT_128( v1 += v4; v2 += v1; v3 += v2; v4 += v3; )
} while(--loop);
yy_cpu_run_seq_vals[0] = v1;
}
#endif
static u64 yy_cycle_per_sec = 0;
static u64 yy_tick_per_sec = 0;
void yy_cpu_measure_freq(void) {
#define warmup_count 8
#define measure_count 128
yy_time p1, p2;
u64 ticks_a[measure_count];
u64 ticks_b[measure_count];
/* warm up CPU caches and stabilize the frequency */
for (int i = 0; i < warmup_count; i++) {
yy_cpu_run_seq_a();
yy_cpu_run_seq_b();
yy_time_get_current(&p1);
yy_time_get_ticks();
}
/* run sequence a and b repeatedly, record ticks and times */
yy_time_get_current(&p1);
u64 t1 = yy_time_get_ticks();
for (int i = 0; i < measure_count; i++) {
u64 s1 = yy_time_get_ticks();
yy_cpu_run_seq_a();
u64 s2 = yy_time_get_ticks();
yy_cpu_run_seq_b();
u64 s3 = yy_time_get_ticks();
ticks_a[i] = s2 - s1;
ticks_b[i] = s3 - s2;
}
u64 t2 = yy_time_get_ticks();
yy_time_get_current(&p2);
/* calculate tick count per second, this value is high precision */
f64 total_seconds = yy_time_to_seconds(&p2) - yy_time_to_seconds(&p1);
u64 total_ticks = t2 - t1;
yy_tick_per_sec = (u64)((f64)total_ticks / total_seconds);
/* find the minimum ticks of each sequence to avoid inaccurate values
caused by context switching, etc. */
for (int i = 1; i < measure_count; i++) {
if (ticks_a[i] < ticks_a[0]) ticks_a[0] = ticks_a[i];
if (ticks_b[i] < ticks_b[0]) ticks_b[0] = ticks_b[i];
}
/* use the difference between two sequences to eliminate the overhead of
loops and function calls */
u64 one_ticks = ticks_b[0] - ticks_a[0];
u64 one_insts = YY_CPU_RUN_INST_COUNT_B - YY_CPU_RUN_INST_COUNT_A;
yy_cycle_per_sec = (u64)((f64)one_insts / (f64)one_ticks * (f64)yy_tick_per_sec);
#undef warmup_count
#undef measure_count
}
u64 yy_cpu_get_freq(void) {
return yy_cycle_per_sec;
}
u64 yy_cpu_get_tick_per_sec(void) {
return yy_tick_per_sec;
}
f64 yy_cpu_get_cycle_per_tick(void) {
return (f64)yy_cycle_per_sec / (f64)yy_tick_per_sec;
}
f64 yy_cpu_tick_to_sec(u64 tick) {
return tick / (f64)yy_tick_per_sec;;
}
u64 yy_cpu_tick_to_cycle(u64 tick) {
return (u64)(tick * ((f64)yy_cycle_per_sec / (f64)yy_tick_per_sec));
}
|
REM ** DEBUG THESE STEPS BY turning ON the OUTPUTs **********************************************
set serveroutput off verify off
set termout off
REM set serveroutput on verify on
REM set termout on
REM *********************************************************************************************
REM ** RUN AS SYS (or ADMIN if cloud based)....
REM *********************************************************************************************
define temp_ts = '&1'
define tool_ts = '&2'
define stg_ts = '&3'
define stg_types = '&4'
define pet_usr = '&5'
define pet_pwd = '&6'
define ro_usr = '&7'
define ro_pwd = '&8'
REM *********************************************************************************************
REM ** CREATE THE &pet_usr SCHEMA
REM *********************************************************************************************
create user &pet_usr identified by "&pet_pwd"
DEFAULT TABLESPACE &tool_ts
TEMPORARY TABLESPACE &temp_ts
QUOTA UNLIMITED ON &tool_ts
/
alter user pre_etl_owner profile application_user
/
grant connect, resource to &pet_usr
/
grant execute on dbms_crypto to &pet_usr
/
grant select any table to &pet_usr
/
grant insert any table to &pet_usr
/
grant delete any table to &pet_usr
/
grant update any table to &pet_usr
/
grant select any sequence to &pet_usr
/
grant alter any sequence to &pet_usr
/
grant alter any table to &pet_usr
/
grant analyze any to &pet_usr
/
grant drop any table to &pet_usr
/
grant create any procedure to &pet_usr
/
grant execute any procedure to &pet_usr
/
grant create any index to &pet_usr
/
grant create any context to &pet_usr
/
grant drop any context to &pet_usr
/
grant select any dictionary to &pet_usr
/
grant execute on dbms_lock to &pet_usr
/
grant execute on dbms_scheduler to &pet_usr
/
grant create job to &pet_usr
/
grant manage scheduler to &pet_usr
/
REM ** Might not be possible to grant this on some Instances (ignore failure to grant) **
grant select on dba_scheduler_job_classes to &pet_usr
/
BEGIN
DBMS_RESOURCE_MANAGER_PRIVS.GRANT_SYSTEM_PRIVILEGE(
GRANTEE_NAME => upper('&pet_usr'),
PRIVILEGE_NAME => 'ADMINISTER_RESOURCE_MANAGER',
ADMIN_OPTION => FALSE);
END;
/
REM *********************************************************************************************
REM ** CREATE THE pre_etl_ro READONLY SCHEMA.
REM *********************************************************************************************
-- AS sys
CREATE USER &ro_usr identified by "&ro_pwd"
DEFAULT TABLESPACE &tool_ts
TEMPORARY TABLESPACE &temp_ts
QUOTA UNLIMITED ON &tool_ts
/
GRANT CREATE SESSION TO &ro_usr
/
CREATE OR REPLACE TRIGGER on_logon_user_pero
AFTER logon ON DATABASE
WHEN ( ora_login_user = upper( '&ro_usr' ) )
BEGIN
EXECUTE IMMEDIATE 'ALTER SESSION SET CURRENT_SCHEMA = &pet_usr';
END;
/
REM *********************************************************************************************
REM ** CREATE THE *_stg_etl_owner SCHEMA(s) (RUNS AFTER THE PRE_ETL_SCHEMA IS CREATED)
REM *********************************************************************************************
DECLARE
PROCEDURE pr_exec
(
i_command IN VARCHAR2
,i_ignore_fail IN BOOLEAN DEFAULT FALSE
) IS
BEGIN
EXECUTE IMMEDIATE i_command;
EXCEPTION
WHEN OTHERS THEN
IF NOT i_ignore_fail
THEN
dbms_output.put_line(SQLERRM);
END IF;
-- never fail - don't re-raise.
END pr_exec;
BEGIN
FOR i_buf IN (SELECT regexp_substr('&stg_types', '[^,]+', 1, rownum) AS prefix
FROM dual
CONNECT BY LEVEL <=
regexp_count('&stg_types', '[^,]+'))
LOOP
pr_exec(i_command => 'CREATE USER ' || lower(i_buf.prefix) ||
'_stg_etl_owner identified by ' ||
lower(i_buf.prefix) || '_stg_etl_owner' ||
' DEFAULT TABLESPACE &stg_ts' ||
' TEMPORARY TABLESPACE &temp_ts' ||
' QUOTA UNLIMITED ON &stg_ts');
pr_exec(i_command => 'grant connect, resource to ' ||
i_buf.prefix || '_stg_etl_owner');
-- Might not be possible to grant this on some Instances (ignore failure to grant).
pr_exec(i_command => 'grant select on dba_directories to ' ||
i_buf.prefix || '_stg_etl_owner'
,i_ignore_fail => TRUE);
pr_exec(i_command => 'grant create any table to ' ||
i_buf.prefix || '_stg_etl_owner');
-- Might not be possible to grant this on some Instances (ignore failure to grant) **
pr_exec(i_command => 'grant create any directory to ' ||
i_buf.prefix || '_stg_etl_owner'
,i_ignore_fail => TRUE);
pr_exec(i_command => 'grant drop any directory to ' ||
i_buf.prefix || '_stg_etl_owner');
pr_exec(i_command => 'grant execute on dbms_lock to ' ||
i_buf.prefix || '_stg_etl_owner');
pr_exec(i_command => 'grant create job, manage scheduler to ' ||
i_buf.prefix || '_stg_etl_owner');
END LOOP;
END;
/
exit
|
use specs::prelude::*;
use super::{InBackpack, Equipped, WantsToRemoveItem};
pub struct ItemRemoveSystem {}
impl<'a> System<'a> for ItemRemoveSystem {
#[allow(clippy::type_complexity)]
type SystemData = (
Entities<'a>,
WriteStorage<'a, WantsToRemoveItem>,
WriteStorage<'a, Equipped>,
WriteStorage<'a, InBackpack>
);
fn run(&mut self, data : Self::SystemData) {
let (entities, mut wants_remove, mut equipped, mut backpack) = data;
for (entity, to_remove) in (&entities, &wants_remove).join() {
equipped.remove(to_remove.item);
backpack.insert(to_remove.item, InBackpack{ owner: entity }).expect("Unable to insert backpack");
}
wants_remove.clear();
}
}
|
require_relative "../view"
class ShoppingNodeView < View
attr_reader :children
def initialize(node, options = {})
@template_folder = File.basename(File.dirname(__FILE__))
super(options)
@node = node
if @node.has_children?
@children = @node.children.map do |node|
view_name = "#{node.class.name.underscore}_view"
require_relative(view_name)
classify(view_name).constantize.new(node, @options)
end
end
end
def template_folder
"shopping"
end
def title
@node.name
end
def volume
format_volume(@node.volume)
end
def quantity
format_quantity(@node.quantity)
end
def cost
format_isk(@node.cost)
end
def cost_per_unit
format_isk(@node.cost_per_unit)
end
end
|
#!/bin/bash
# ------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
# ------------------------------------------------------------
CREATE_TABLE_FILE=false
function log {
logger "SF-DCA $1"
}
function running_in_docker {
count=$(awk '/\/docker\//' /proc/self/cgroup | wc -l)
result=0
if [ "$count" -gt "0" ]
then
result=1
fi
return $result
}
#create a new session with the given session name
function createSession {
log "Creating session $1"
fabricLogRoot=$(cat /etc/servicefabric/FabricLogRoot)
logPath="${fabricLogRoot}/lttng-traces/${1}-$(date '+%Y%m%d-%H%M%S')"
lttng create $1 --output=${logPath}
running_in_docker
if [ $? -eq "0" ]
then
lttng enable-channel --session $1 --userspace --tracefile-size 8388608 --subbuf-size 8388608 channel0
else
log "WARN: Inside Docker container, using smaller subbuf size to workaround a bug"
lttng enable-channel --session $1 --userspace --tracefile-size 8388608 --subbuf-size 8192 channel0
fi
lttng enable-event --channel channel0 --userspace "service_fabric:*"
lttng add-context -u -t vtid
lttng add-context -u -t vpid
lttng start
}
#total number of sessions at the moment
function sessionCount {
sessionName=$1
count="$(lttng list | grep -o "[0-9]*) ${sessionName}_[0-9] ([A-Za-z0-9\.\/ _-]*) \[[a-z]*\]" | wc -l)"
echo $count
}
function getSessionName {
session=${1%% (*}
session=${session##*) }
echo $session
}
function getFolderPath {
folder=${1#* (}
folder=${folder%%) *}
echo $folder
}
function listSessions {
sessionName=$1
list="$(lttng list | grep -o "[0-9]*) ${sessionName}_[0-9] ([A-Za-z0-9\.\/ _-]*) \[[a-z]*\]")"
echo "$list"
}
function createlogrotateConfigFile {
echo "${1} {
rotate 3
}" > ${2}
}
#convert the traces by session name and folder path
# $1: session name
# $2: trace dir
# $3: output common folder
#converted file will have .trace extension and will be changed to .dtr once conversion is done
function convertTraces {
log "Stopping session $1"
lttng stop $1
lttng destroy $1
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export LD_LIBRARY_PATH="${SCRIPT_DIR}"
TRACE_DIR=$2
DATE_NOW="$(date -u +%Y-%m-%d_%H:%M:%S)"
SCRIPT_PATH="${SCRIPT_DIR}/sftrace"
OFILE="${3}/${DATE_NOW}"
OUTPUT_FILENAME="${OFILE}.trace"
OUTPUT_FINISHED_FILENAME="${OFILE}.dtr"
OUTPUT_TABLE_FILENAME="${OFILE}.table1"
OUTPUT_TABLE_FINISHED_FILENAME="${OFILE}.table"
OUTPUT_DTR_TO_APPEND_FOLDERNAME="${3}/../UserTraces/"
OUTPUT_DTR_TO_APPEND_FILENAME="${OUTPUT_DTR_TO_APPEND_FOLDERNAME}traces.dtr"
LOGROTATE_CONFIG_FILENAME="/tmp/sfusertraceslogrotate.conf"
sftrace_arguments="--no-delta -f loglevel --clock-date --clock-gmt "${TRACE_DIR}" -w $OUTPUT_FILENAME"
if [ "$CREATE_TABLE_FILE" == "true" ]
then
sftrace_arguments="${sftrace_arguments} -c TableEvents.config -t $OUTPUT_TABLE_FILENAME"
fi
log "Writing to $OUTPUT_FILENAME"
${SCRIPT_PATH} ${sftrace_arguments}
log "Head $(head -1 $OUTPUT_FILENAME)"
log "Tail $(tail -1 $OUTPUT_FILENAME)"
# Creating dtr file for customer user traces
if [ ! -d "$OUTPUT_DTR_TO_APPEND_FOLDERNAME" ]; then
log "Creating customer user traces folder at: ${OUTPUT_DTR_TO_APPEND_FOLDERNAME}"
mkdir $OUTPUT_DTR_TO_APPEND_FOLDERNAME
fi
if [ ! -d "$LOGROTATE_CONFIG_FILENAME" ]; then
log "Creating logrotate configuration file at: $LOGROTATE_CONFIG_FILENAME"
createlogrotateConfigFile $OUTPUT_DTR_TO_APPEND_FILENAME $LOGROTATE_CONFIG_FILENAME
fi
# rotating customer user traces .dtr file
logrotate $LOGROTATE_CONFIG_FILENAME
# writing new .dtr file for customer user traces
cp $OUTPUT_FILENAME $OUTPUT_DTR_TO_APPEND_FILENAME
log "Moving $OUTPUT_FILENAME to $OUTPUT_FINISHED_FILENAME"
mv $OUTPUT_FILENAME $OUTPUT_FINISHED_FILENAME
if [ "$CREATE_TABLE_FILE" == "true" ]
then
log "Moving $OUTPUT_TABLE_FILENAME to $OUTPUT_TABLE_FINISHED_FILENAME"
mv $OUTPUT_TABLE_FILENAME $OUTPUT_TABLE_FINISHED_FILENAME
fi
}
# Validate a session folder name
# $1: session folder name
# $2: session name
function validateSessionFolder {
flag=`echo $1 | awk -v sessionName="$2" '{print match($1, sessionName)}'`;
if [ $flag -gt 0 ]
then
echo 0
else
echo 1
fi
}
log "LTTng start $(date)"
if [ $# -eq 0 ]
then
count=$(sessionCount "WindowsFabric")
if [ $count -eq 0 ]
then
count=$(sessionCount "ServiceFabric")
if [ $count -eq 0 ]
then
createSession ServiceFabric_0
fi
fi
elif [ $# -gt 0 ]
then
if [ "$2" == "-table" ]
then
CREATE_TABLE_FILE=true
fi
if [ "$1" == "-c" ]
then
sessionPrefix="ServiceFabric"
count=$(sessionCount $sessionPrefix)
if [ $count -eq 2 ]
then
sessions=$(listSessions $sessionPrefix)
sessionName=$(getSessionName "$sessions")
sessionFolder=$(getFolderPath "$sessions")
convertTraces $sessionName $sessionFolder $1
if [ 0 -eq $(validateSessionFolder $sessionFolder $sessionName) ]
then
rm -R $sessionFolder
fi
fi
if [ $count -eq 1 ]
then
sessions=$(listSessions $sessionPrefix)
sessionName=$(getSessionName "$sessions")
sessionFolder=$(getFolderPath "$sessions")
convertTraces $sessionName $sessionFolder "./ClusterData/Data/log/Traces/"
parentDirectory=${sessionFolder%/*}
rm -R $parentDirectory/$sessionName*
fi
else
sessionPrefix="WindowsFabric"
count=$(sessionCount $sessionPrefix)
if [ $count -eq 0 ]
then
sessionPrefix="ServiceFabric"
count=$(sessionCount $sessionPrefix)
fi
if [ $count -ge 2 ]
then
sessions=$(listSessions $sessionPrefix)
sessionName=$(getSessionName "$sessions")
sessionFolder=$(getFolderPath "$sessions")
lttng stop $sessionName
lttng destroy $sessionName
if [ 0 -eq $(validateSessionFolder $sessionFolder $sessionName) ]
then
rm -R $sessionFolder
fi
elif [ $count -eq 1 ]
then
newSession="ServiceFabric_0"
sessions=$(listSessions $sessionPrefix)
sessionName=$(getSessionName "$sessions")
sessionFolder=$(getFolderPath "$sessions")
if [ "$sessionName" = "ServiceFabric_0" ]
then
newSession="ServiceFabric_1"
fi
createSession $newSession
sleep 1s
convertTraces $sessionName $sessionFolder $1
if [ 0 -eq $(validateSessionFolder $sessionFolder $sessionName) ]
then
parentDirectory=${sessionFolder%/*}
rm -R $parentDirectory/$sessionName*
fi
elif [ $count -eq 0 ]
then
createSession ServiceFabric_0
fi
fi
fi
log "LTTng end $(date)"
|
#include "btm.h"
// compute p(z_i=k|z/i, B)
NumericVector Btm::sample_prob(Biterm& bi) {
NumericVector Q(K);
for (int k = 0; k < K; k++) {
int subtract = 0;
if (bi.get_z() == k)
subtract = 1.0;
// Rcout << "subtr: " << subtract << ", ";
// Rcout << "topic count word: " << topic_count_wd[k] << ", ";
// Rcout << "denom: " << std::pow(topic_count_wd[k] - (2.0 * subtract) + (V * eta), 2.0) << ", ";
Q[k] = (alpha + topic_count_bt[k] - subtract) *
(eta + word_topic_count(bi.get_wi(), k) - subtract) *
(eta + word_topic_count(bi.get_wj(), k) - subtract) /
((topic_count_wd[k] - subtract + V * eta + 1) *
(topic_count_wd[k] - subtract + V * eta));
if (k != 0)
Q[k] = Q[k] + Q[k-1];
// Rcout << "Q_" << k << ": " << Q[k] << " " << std::endl;
}
// Rcout << std::endl;
return Q;
}
void Btm::update_counts(Biterm& bi, int k) {
word_topic_count(bi.get_wi(), bi.get_z())--;
word_topic_count(bi.get_wj(), bi.get_z())--;
topic_count_wd(bi.get_z()) -= 2;
topic_count_bt(bi.get_z())--;
bi.set_z(k);
word_topic_count(bi.get_wi(), k)++;
word_topic_count(bi.get_wj(), k)++;
topic_count_wd(k) += 2;
topic_count_bt(k)++;
}
arma::mat Btm::calc_beta() {
arma::mat beta(V, K, arma::fill::zeros);
for (int k = 0; k < K; k++) {
for (int w = 0; w < V; w++) {
beta(w, k) = (word_topic_count(w, k) + eta) / (topic_count_wd[k] + V * eta);
}
}
return beta;
}
arma::rowvec Btm::calc_theta() {
// B - number of biterms
arma::rowvec theta(K, arma::fill::zeros);
for (int k = 0; k < K; k++) {
theta[k] = (topic_count_bt[k] + alpha) / (B + K * alpha);
}
return theta;
}
// [[Rcpp::export]]
List btm_gibbs(NumericVector token_ids, NumericVector doc_ids, int K,
double alpha, double eta, int iter) {
NumericVector docs = unique(doc_ids);
int D = docs.size();
int V = unique(token_ids).size();
std::vector<Biterm> bs;
for (int d = 0; d < D; d++) {
NumericVector doc_words = token_ids[doc_ids == docs[d]];
for (int i = 0; i < doc_words.size(); i++) {
for (int j = i+1; j < doc_words.size(); j++) {
bs.push_back(Biterm(doc_words[i], doc_words[j], K));
}
}
}
Btm btm = Btm(V, K, alpha, eta, bs);
int B = bs.size();
arma::mat theta_trace(iter, K);
theta_trace.zeros();
arma::cube beta_trace(V, K, iter);
for (int j = 0; j < iter; j++) {
NumericVector u = runif(B);
if ((j+1) % 100 == 0)
Rcout << "Iteration: " << j << std::endl;
// sample latent topic assignments
for (int i = 0; i < B; i++) {
// Rcout << "Biterm " << i << std::endl;
NumericVector Q = btm.sample_prob(bs[i]);
// Rcout << "----" << std::endl;
for (int k = 0; k < K; k++) {
if (u[i] < (Q[k] / Q[K-1])) {
// Rcout << "assigned topic: " << k << std::endl;
btm.update_counts(bs[i], k);
break;
}
}
}
theta_trace.row(j) = btm.calc_theta();
beta_trace.slice(j) = btm.calc_beta();
}
NumericVector zs(B);
for (int b = 0; b < B; b++)
zs[b] = bs[b].get_z();
List result;
result["beta_trace"] = beta_trace;
result["theta_trace"] = theta_trace;
result["word_topic_count"] = btm.get_word_topic_count();
result["topic_count_wd"] = btm.get_topic_count_wd();
result["topic_count_bt"] = btm.get_topic_count_bt();
result["z"] = zs;
CharacterVector class_names(2);
class_names[0] = "btm";
class_names[1] = "lda";
result.attr("class") = class_names;
return result;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.