text
stringlengths 27
775k
|
|---|
CREATE TABLE "user" (
id serial NOT NULL,
first_name text NOT NULL,
last_name text NOT NULL,
email text NOT NULL,
password_hash text,
activation_key text,
reset_key text,
reset_time timestamptz,
created_at timestamptz DEFAULT now(),
updated_at timestamptz DEFAULT now(),
CONSTRAINT user_pkey PRIMARY KEY (id),
CONSTRAINT email_unique UNIQUE (email)
);
|
import {
render,
screen,
fireEvent,
cleanup,
userEvent,
waitFor,
clearMocks,
} from 'test/testUtils'
import axios from 'axios'
import SymptomsRegistry from 'pages/registro-sintomas'
jest.mock('axios')
const mockPush = jest.fn().mockResolvedValue(null)
const mockQuery: { 'nivel-dolor': string | undefined } = {
'nivel-dolor': '0',
}
jest.mock('next/router', () => ({
useRouter: () => ({
query: mockQuery,
push: mockPush,
}),
}))
jest.mock('next-auth/client', () => ({
useSession: jest.fn().mockReturnValue([{ role: 'tutor' }, false]),
}))
describe('<SymptomsRegistry />', () => {
beforeEach(() => {
clearMocks()
})
afterEach(cleanup)
test('Should render pain box info', () => {
render(<SymptomsRegistry />)
const painBoxDescription = screen.getByText(/^No duele$/)
expect(painBoxDescription).toBeInTheDocument()
})
test('should show title message', () => {
render(<SymptomsRegistry />)
const introMessage = screen.getByText(
/Cuéntale a sayu cómo te sientes hoy/i
)
expect(introMessage).toBeInTheDocument()
})
test('should show symptoms message', () => {
render(<SymptomsRegistry />)
const moreSymptomsMessage = screen.getByText(/^¿Tienes otros síntomas\?$/)
const registerSymptomsMessage = screen.getByText(
/^Regístralos considerando que 0 es ausencia del síntoma y 10 es la mayor intensidad de este\.$/
)
expect(moreSymptomsMessage).toBeInTheDocument()
expect(registerSymptomsMessage).toBeInTheDocument()
})
test('should show Cansancio symptom', () => {
render(<SymptomsRegistry />)
const cansancioText = screen.getByText(/^Cansancio$/)
const minCansancioText = screen.getByText(/^Sin cansancio$/)
const maxCansancioText = screen.getByText(/^Máximo cansancio$/)
expect(cansancioText).toBeInTheDocument()
expect(minCansancioText).toBeInTheDocument()
expect(maxCansancioText).toBeInTheDocument()
})
test('Should change symptom level value when it moves to right side', async () => {
render(<SymptomsRegistry />)
const [sliderButton] = screen.getAllByRole('slider')
fireEvent.keyDown(sliderButton, { key: 'ArrowRight', code: 'ArrowRight' })
fireEvent.keyDown(sliderButton, { key: 'ArrowRight', code: 'ArrowRight' })
fireEvent.keyDown(sliderButton, { key: 'ArrowRight', code: 'ArrowRight' })
expect(await screen.findByText('3')).toBeVisible()
})
test('Should show the right symptom text', () => {
render(<SymptomsRegistry />)
const nauseaText = screen.getByText(/^Náusea$/)
const minNauseaText = screen.getByText(/^Sin náusea$/)
const maxNauseaText = screen.getByText(/^Máxima náusea$/)
expect(nauseaText).toBeInTheDocument()
expect(minNauseaText).toBeInTheDocument()
expect(maxNauseaText).toBeInTheDocument()
})
test('should redirect to home when pressing cancel', () => {
render(<SymptomsRegistry />)
const cancelButton = screen.getByText(/^Cancelar$/)
expect(cancelButton).toHaveAttribute('href', '/')
})
test('should redirect to succesful symptoms registry when pressing register', async () => {
render(<SymptomsRegistry />)
const registerButton = screen.getByText(/^Registrar$/)
userEvent.click(registerButton)
await waitFor(() => expect(axios.post).toHaveBeenCalled())
expect(mockPush).toHaveBeenCalledWith(
'/_success?key=SuccessfulSymptomRegistry'
)
})
test('should redirect to failed symptoms register when there is an error', async () => {
render(<SymptomsRegistry />)
jest.spyOn(axios, 'post').mockRejectedValue(null)
const registerButton = screen.getByText(/^Registrar$/)
userEvent.click(registerButton)
await waitFor(() => expect(axios.post).toHaveBeenCalled())
expect(mockPush).toHaveBeenCalledWith(
'/_error?error=FailedSymptomsRegistry'
)
})
test('should press fever radio button', () => {
render(<SymptomsRegistry />)
const radioOption = screen
.getAllByText(/^Sí$/)[0]
.closest('label')
?.querySelector('input') as HTMLElement
expect(radioOption).not.toBeChecked()
userEvent.click(radioOption)
expect(radioOption).toBeChecked()
})
test('should not render painbox when painLevel is invalid', () => {
mockQuery['nivel-dolor'] = 'bla'
render(<SymptomsRegistry />)
expect(screen.queryByText(/^No duele$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele un poco$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele un poco más$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele mucho$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele mucho más$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele al máximo$/)).not.toBeInTheDocument
})
test('should not render painbox when painLevel is undefined', () => {
mockQuery['nivel-dolor'] = undefined
render(<SymptomsRegistry />)
expect(screen.queryByText(/^No duele$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele un poco$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele un poco más$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele mucho$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele mucho más$/)).not.toBeInTheDocument
expect(screen.queryByText(/^Duele al máximo$/)).not.toBeInTheDocument
})
test('should show rescue radio button', () => {
render(<SymptomsRegistry />)
const rescueRadioButton = screen
.getAllByText(/^sí$/i)[2]
.closest('label')
?.querySelector('input') as HTMLElement
expect(rescueRadioButton['id']).toBe('Rescate-1')
expect(rescueRadioButton).not.toBeChecked()
userEvent.click(rescueRadioButton)
expect(rescueRadioButton).toBeChecked()
})
test('should hint rescue message when pain is higher than two', () => {
mockQuery['nivel-dolor'] = '4'
render(<SymptomsRegistry />)
expect(
screen.queryByText(/^Se recomienda administrar rescate de analgesia$/)
).toBeInTheDocument()
})
test('should not show hint rescue message when pain is less or equals than two', () => {
mockQuery['nivel-dolor'] = '2'
render(<SymptomsRegistry />)
expect(
screen.queryByText(/^Se recomienda administrar rescate de analgesia$/)
).not.toBeInTheDocument()
})
})
|
using System;
using System.Collections;
public class BuildUpLogic : CardLogic
{
public override void Init()
{
base.Init();
this.displayName = "蓄能";
this.Desc = "回合开始时获得1层充能。消耗4层充能使本回合+" + this.CardData.ATK.ToString() + "[100%攻击力]点攻击力。";
}
public override IEnumerator OnTurnStart()
{
base.ShowMe();
base.Layers++;
if (base.Layers >= 4)
{
this.CardData.wATK += this.CardData.ATK;
}
yield break;
}
}
|
<?php
use Illuminate\Support\Facades\Route;
use Illuminate\Support\Facades\Auth;
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Auth::routes();
Route::get('/', 'HomeController@index')->name('home');
Route::get('/api/home',['uses'=>'HomeController@home']);
Route::get('/api/gallery',['uses'=>'HomeController@gallery']);
Route::get('/api/chat',['uses'=>'HomeController@chat']);
Route::get('/test',['uses'=>'HomeController@test']);
Route::get('/api/chat/conversation',['uses'=>'HomeController@conversation']);
Route::get('/api/chat/notify/',['uses'=>'HomeController@notify']);
Route::get('/api/chat/conversation/users',['uses'=>'HomeController@conversationuser']);
Route::get('/api/chat/conversation/receiver/{token}',['uses'=>'HomeController@getSenderinfo']);
Route::post('/api/chat/conversation/create',['uses'=>'HomeController@createconversation']);
Route::post('/api/chat/conversation/updateseen',['uses'=>'HomeController@updateseen']);
Route::post('/api/chat/conversation/chatfunction',['uses'=>'HomeController@chatfunction']);
Route::post('/uploadss', ['uses' => 'ProfileController@uploadapi','as' => 'upload.profile',]);
Route::post('/createprofile',['uses' => 'ProfileController@createprofile','as'=>'create.profile']);
|
package org.backuity.ansi
object AnsiCodes {
val BOLD = "\u001b[1m"
val BOLD_OFF = "\u001b[22m"
val ITALIC = "\u001b[3m"
val ITALIC_OFF = "\u001b[23m"
val UNDERLINE = "\u001b[4m"
val UNDERLINE_OFF = "\u001b[24m"
val BLINK = "\u001b[5m"
val BLINK_OFF = "\u001b[25m"
val COLOR_DEFAULT = "\u001b[39m"
}
|
package trembita
import cats.Applicative
import cats.effect.Sync
import trembita.logging.commons.LoggingF
import org.slf4j.Logger
import scala.language.higherKinds
package object slf4j {
def mkLogging[F[_]](logger: Logger)(implicit F: Sync[F]): LoggingF[F] = new LoggingF[F] {
override def info(msg: String): F[Unit] = F.delay(logger.info(msg))
override def warn(msg: String): F[Unit] = F.delay(logger.warn(msg))
override def trace(msg: String): F[Unit] = F.delay(logger.trace(msg))
override def debug(msg: String): F[Unit] = F.delay(logger.debug(msg))
override def error(msg: String, e: Throwable): F[Unit] = F.delay(logger.error(msg, e))
}
def mkLoggingImpure[F[_]](logger: Logger)(implicit F: Applicative[F]): LoggingF[F] = new LoggingF[F] {
override def info(msg: String): F[Unit] = F.pure(logger.info(msg))
override def warn(msg: String): F[Unit] = F.pure(logger.warn(msg))
override def trace(msg: String): F[Unit] = F.pure(logger.trace(msg))
override def debug(msg: String): F[Unit] = F.pure(logger.debug(msg))
override def error(msg: String, e: Throwable): F[Unit] = F.pure(logger.error(msg, e))
}
}
|
package modbus
import (
"context"
"encoding/binary"
"fmt"
"io"
"math/rand"
"time"
"github.com/goburrow/modbus"
"github.com/pkg/errors"
"github.com/rancher/octopus-simulator/pkg/critical"
"github.com/rancher/octopus-simulator/pkg/log"
)
func mockThermometer(handler modbus.ClientHandler, stop <-chan struct{}) *thermometer {
var ctx, ctxCancel = context.WithCancel(critical.Context(stop))
return &thermometer{
handler: handler,
ctx: ctx,
ctxCancel: ctxCancel,
}
}
type thermometer struct {
handler modbus.ClientHandler
ctx context.Context
ctxCancel context.CancelFunc
}
func (in *thermometer) Close() error {
if in.handler != nil {
if closer, ok := in.handler.(io.Closer); ok {
return closer.Close()
}
}
if in.ctxCancel != nil {
in.ctxCancel()
}
return nil
}
func (in *thermometer) Mock(interval time.Duration) error {
var cli = modbus.NewClient(in.handler)
// defaults temperature limitation is 303.15k
_, _ = cli.WriteMultipleRegisters(5, 2, parseInt64ToBytes(27315+3000, 2))
var ticker = time.NewTicker(interval)
defer ticker.Stop()
for {
select {
case <-in.ctx.Done():
return nil
default:
}
// mocks absolute temperature, base unit is kevin, at lease 278.15K
var holdingRegister0 = uint64(rand.Intn(100000)) + 27315 + 500
_, err := cli.WriteMultipleRegisters(0, 2, parseInt64ToBytes(holdingRegister0, 2))
if err != nil {
return errors.Wrapf(err, "failed to write holding register 0, %s:%v", "value", holdingRegister0)
}
log.Info(fmt.Sprintf("Mocked absolute temperature as %vK", float64(holdingRegister0)/100))
// mocks relative humidity, unit is percent, at lease 10%
var holdingRegister1 = uint64(rand.Intn(10000)) + 1000
_, err = cli.WriteMultipleRegisters(1, 1, parseInt64ToBytes(holdingRegister1, 1))
if err != nil {
return errors.Wrapf(err, "failed to write holding register 1, %s:%v", "value", holdingRegister1)
}
log.Info(fmt.Sprintf("Mocked relative humidity as %v%%", float64(holdingRegister1)/100))
// gets temperature limitation
holdingRegister5Bytes, err := cli.ReadHoldingRegisters(5, 2)
if err != nil {
return errors.Wrap(err, "failed to read holding registers 5")
}
var holdingRegister5 = parseBytesToInt64(holdingRegister5Bytes)
log.Info(fmt.Sprintf("Mocked temperature limiation is %vK", float64(holdingRegister5)/100))
// reports alarm
var coilsRegister0 = []byte{0}
if holdingRegister5 < holdingRegister0 {
log.Info("Reported high temperature alarm")
coilsRegister0 = []byte{1}
}
_, err = cli.WriteMultipleCoils(0, 1, coilsRegister0)
if err != nil {
return errors.Wrapf(err, "failed to write coils register 0, %s:%v", "value", coilsRegister0)
}
select {
case <-in.ctx.Done():
return nil
case <-ticker.C:
}
}
}
func parseInt64ToBytes(i uint64, quantity int) []byte {
var buf [8]byte
binary.BigEndian.PutUint64(buf[:], i)
return buf[8-quantity*2:]
}
func parseBytesToInt64(bs []byte) uint64 {
var l = len(bs)
if l > 8 {
bs = bs[l-8:]
} else if l < 8 {
var tmp = make([]byte, 8)
copy(tmp[8-l:], bs)
bs = tmp
}
return binary.BigEndian.Uint64(bs)
}
|
/*
Copyright (C) 2012-2014 Xiongfa Li, <damao1222@live.com>
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "pthread.h"
#include "../private/dmthread_p.h"
#include "thread/dmthread.h"
#include "dmlogger.h"
#include "dmmath.h"
#include <unistd.h>
DM_BEGIN_NAMESPACE
void ThreadPrivate::finish(void *arg, dbool needlock /*= true*/)
{
Thread *thr = reinterpret_cast<Thread *>(arg);
ThreadPrivate *d = thr->d_func();
SingleLock lock(needlock ? &d->mutex : NULL);
d->threadId = 0;
d->isFinished = true;
d->wait.set();
}
dbool Thread::globalInit()
{
return true;
}
Thread::Thread():
C_D(Thread)
{
pdm->threadId = 0;
pdm->isFinished = true;
pdm->stackSize = 0;
pdm->priority = PTHREAD_INHERIT_SCHED;
}
Thread::~Thread()
{
D_D(Thread);
}
dint Thread::start(duint stacksize /*= 0*/)
{
pthread_attr_t attr;
pthread_attr_init(&attr);
if (pdm->stackSize < stacksize)
pdm->stackSize = stacksize;
#if !defined(DM_OS_ANDROID) // http://code.google.com/p/android/issues/detail?id=7808
if (stacksize > PTHREAD_STACK_MIN)
pthread_attr_setstacksize(&attr, pdm->stackSize);
#endif
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
pdm->isFinished = false;
if (pthread_create(&pdm->threadId, &attr, (void*(*)(void*))ThreadPrivate::staticThread, this) != 0)
{
pdm->isFinished = true;
DM_FATAL("creating thread");
return -1;
}
pthread_attr_destroy(&attr);
return 1;
}
dbool Thread::isRunning() const
{
SingleLock l(&pdm->mutex);
return !pdm->isFinished;
}
dbool Thread::isCurrentThread() const
{
return isCurrentThread(pdm->threadId);
}
ThreadIdentifier Thread::currentId() const
{
return pdm->threadId;
}
ThreadIdentifier Thread::currentThreadId()
{
return pthread_self();
}
dbool Thread::isCurrentThread(const ThreadIdentifier id)
{
return pthread_equal(pthread_self(), id);
}
void Thread::sleep(dint ms)
{
::usleep(1000*ms);
}
void Thread::yieldCurrentThread()
{
sched_yield();
}
void Thread::terminate()
{
//FIXME: pthread terminate is invalid
DM_ASSERT(false);
SingleLock locker(&pdm->mutex);
if (pdm->isFinished || pdm->threadId)
return;
//dint code = pthread_cancel(pdm->threadId);
dint code = 0;
if (code) {
DM_LOGW("Thread termination error: %d", code);
} else {
ThreadPrivate::finish(this, false);
}
}
dbool Thread::wait(duint msec /*= UINT_MAX*/)
{
dbool bReturn = true;
SingleLock lock(&pdm->mutex);
if (pdm->threadId == pthread_self()) {
DM_LOGW("Thread::wait: Thread tried to wait on itself");
return false;
}
if (pdm->isFinished)
return true;
return pdm->wait.waitMSec(msec);
}
enum Priority {
IdlePriority,
LowestPriority,
LowPriority,
NormalPriority,
HighPriority,
HighestPriority,
TimeCriticalPriority,
InheritPriority
};
static dbool CalculateUnixPriority(int priority, int *sched_policy, int *sched_priority)
{
#ifdef SCHED_IDLE
if (priority == IdlePriority) {
*sched_policy = SCHED_IDLE;
*sched_priority = 0;
return true;
}
const int lowestPriority = LowestPriority;
#else
const int lowestPriority = IdlePriority;
#endif
const int highestPriority = TimeCriticalPriority;
int prio_min = sched_get_priority_min(*sched_policy);
int prio_max = sched_get_priority_max(*sched_policy);
if (prio_min == -1 || prio_max == -1)
return false;
int prio;
// crudely scale our priority enum values to the prio_min/prio_max
prio = ((priority - lowestPriority) * (prio_max - prio_min) / highestPriority) + prio_min;
prio = Max(prio_min, Min(prio_max, prio));
*sched_priority = prio;
return true;
}
dbool Thread::setPriority(dint priority)
{
SingleLock locker(&pdm->mutex);
if (pdm->isFinished) {
DM_LOGW("Thread::setPriority: Cannot set priority, thread is not running");
return false;
}
pdm->priority = priority;
// copied from start() with a few modifications:
int sched_policy;
sched_param param;
if (pthread_getschedparam(pdm->threadId, &sched_policy, ¶m) != 0) {
// failed to get the scheduling policy, don't bother setting
// the priority
DM_LOGW("Thread::setPriority: Cannot get scheduler parameters");
return false;
}
int prio;
if (!CalculateUnixPriority(priority, &sched_policy, &prio)) {
// failed to get the scheduling parameters, don't
// bother setting the priority
DM_LOGW("Thread::setPriority: Cannot determine scheduler priority range");
return false;
}
param.sched_priority = prio;
int status = pthread_setschedparam(pdm->threadId, sched_policy, ¶m);
# ifdef SCHED_IDLE
// were we trying to set to idle priority and failed?
if (status == -1 && sched_policy == SCHED_IDLE && errno == EINVAL) {
// reset to lowest priority possible
pthread_getschedparam(d->thread_id, &sched_policy, ¶m);
param.sched_priority = sched_get_priority_min(sched_policy);
pthread_setschedparam(d->thread_id, sched_policy, ¶m);
}
# else
DM_UNUSED(status);
# endif // SCHED_IDLE
return true;
}
dint Thread::priority() const
{
SingleLock locker(&pdm->mutex);
return pdm->priority;
}
THREADFUNC ThreadPrivate::staticThread(void *data)
{
Thread* pThread = (Thread*)(data);
pThread->run();
finish(pThread);
return 0;
}
DM_END_NAMESPACE
|
package main
import (
"fmt"
"github.com/jojomi/interview"
)
func main() {
appleChoice := interview.Choice{
"apple",
"",
}
question := &interview.ChoiceQuestion{
Choices: []interview.Choice{
appleChoice,
interview.Choice{
"raspberry",
"",
},
},
DefaultChoice: appleChoice,
PromptChoices: true,
PromptHighlightDefault: true,
MatchIgnoreCase: true,
MatchType: interview.MatchFuzzy,
AllowFreeText: false,
}
question.SetPrompt("Please pick a fruit")
choice, err := interview.AskForChoice(question)
if err != nil {
panic(err)
}
fmt.Println("Your choice: " + choice.String())
}
|
# Node Auth Server
Built with Express.js / MongoDB
> This project built for simple demonstration of session management and JWT authorization.
> [Live Preview(Client) ->](https://react-msn-messenger.netlify.app/)
> [Live Preview(Server) ->](https://node-auth-server-2.herokuapp.com/auth/login)
- Routes
- Auth
- login <POST>
- logout <GET>
- register <POST>
- check <GET> Session Check
- browser <GET> Browser Auth
- Jwt
- login <POST>
- logout <GET>
- register <POST>
- check <GET> JWT Check
- browser <GET> Browser Auth
## How to install
To start the server you need;
```
PORT=<9000>
DB_URL=<mongo_db_url>
JWT_TOKEN_SECRET=<you can use crypto>
JWT_REFRESH_SECRET=<you can use crypto>
```
After successful installation:
```
npm install
npm start
```
or with `pnpm`
```
pnpm install
pnpm start
```
> [movwf](https://github.com/movwf) - 2021
|
namespace Rug.Domain.Championship
{
public class TeamShuffle { }
}
|
package backoff
import (
"context"
"fmt"
"time"
"github.com/giantswarm/micrologger"
)
func NewNotifier(l micrologger.Logger, ctx context.Context) func(error, time.Duration) {
return func(err error, d time.Duration) {
l.LogCtx(ctx, "level", "warning", "message", fmt.Sprintf("retrying backoff in '%s' due to error", d.String()), "stack", fmt.Sprintf("%#v", err))
}
}
|
using System;
using System.Collections.Generic;
using Wanderer.Actors;
namespace Wanderer.Systems
{
/// <summary>
/// A system for applying a range of possibly cumulative effects on an
/// <see cref="IActor"/> e.g. injuries
/// </summary>
public interface ISystem
{
/// <summary>
/// Human readable name for the system
/// </summary>
string Name { get; set; }
/// <summary>
/// The unique identifier for this system so that it can be referenced from scripts
/// etc. This should be a constant (Don't use NewGuid!). When sub-classing it is
/// permissible to use the parents guid if you are semantically the same (e.g. subclass
/// methods are alternate ways to load the system)
/// </summary>
Guid Identifier { get; set; }
/// <summary>
/// Apply the system to the recipient
/// </summary>
void Apply(SystemArgs args);
/// <summary>
/// Apply the system once to each of the <paramref name="recipients"/>
/// </summary>
/// <param name="recipients"></param>
/// <param name="args"></param>
void ApplyToAll(IEnumerable<IHasStats> recipients, SystemArgs args);
}
}
|
# frozen_string_literal: true
# encoding: UTF-8
require 'test_helper'
class PriorityInputTest < ActionView::TestCase
test 'input generates a country select field' do
with_input_for @user, :country, :country
assert_select 'select#user_country'
if ActionPack::VERSION::STRING >= '5'
assert_select 'select option[value=BR]', 'Brazil'
elsif ActionPack::VERSION::STRING < '5'
assert_select 'select option[value=Brazil]', 'Brazil'
end
assert_no_select 'select option[value=""][disabled=disabled]'
end
test 'input generates a country select with SimpleForm default' do
swap SimpleForm, country_priority: [ 'Brazil' ] do
with_input_for @user, :country, :country
if ActionPack::VERSION::STRING >= '5'
assert_select 'select option[value="---------------"][disabled=disabled]'
elsif ActionPack::VERSION::STRING < '5'
assert_select 'select option[value=""][disabled=disabled]'
end
end
end
test 'input generates a time zone select field' do
with_input_for @user, :time_zone, :time_zone
assert_select 'select#user_time_zone'
assert_select 'select option[value=Brasilia]', '(GMT-03:00) Brasilia'
assert_no_select 'select option[value=""][disabled=disabled]'
end
test 'input generates a time zone select field with default' do
with_input_for @user, :time_zone, :time_zone, default: 'Brasilia'
assert_select 'select option[value=Brasilia][selected=selected]'
assert_no_select 'select option[value=""]'
end
test 'input generates a time zone select using options priority' do
with_input_for @user, :time_zone, :time_zone, priority: /Brasilia/
assert_select 'select option[value=""][disabled=disabled]'
assert_no_select 'select option[value=""]', /^$/
end
test 'priority input does not generate invalid required html attribute' do
with_input_for @user, :country, :country
assert_select 'select.required'
assert_no_select 'select[required]'
end
test 'priority input does not generate invalid aria-required html attribute' do
with_input_for @user, :country, :country
assert_select 'select.required'
assert_no_select 'select[aria-required]'
end
end
|
<?php
return [
'Names' => [
'America/Miquelon' => 'Saint-Pierre-et-Miquelon Time',
'America/St_Barthelemy' => 'Atlantic Time (Saint-Barthélemy)',
'America/St_Johns' => 'Newfoundland Time (Saint John’s)',
'America/St_Kitts' => 'Atlantic Time (Saint Kitts)',
'America/St_Lucia' => 'Atlantic Time (Saint Lucia)',
'America/St_Thomas' => 'Atlantic Time (Saint Thomas)',
'America/St_Vincent' => 'Atlantic Time (Saint Vincent)',
'Asia/Aqtau' => 'West Kazakhstan Time (Aktau)',
'Asia/Rangoon' => 'Myanmar Time (Rangoon)',
'Atlantic/St_Helena' => 'Greenwich Mean Time (Saint Helena)',
'Indian/Kerguelen' => 'French Southern and Antarctic Time (Kerguelen)',
'Pacific/Wallis' => 'Wallis and Futuna Time',
],
'Meta' => [
],
];
|
/*
* Copyright 2020 Peter Kenji Yamanaka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pyamsoft.pydroid.ui.internal.billing
import com.pyamsoft.pydroid.arch.UiControllerEvent
import com.pyamsoft.pydroid.arch.UiViewEvent
import com.pyamsoft.pydroid.billing.BillingSku
import com.pyamsoft.pydroid.billing.BillingState
import com.pyamsoft.pydroid.ui.internal.app.AppState
internal data class BillingViewState
internal constructor(
override val icon: Int,
override val name: CharSequence,
val connected: BillingState,
val skuList: List<BillingSku>,
val error: Throwable?
) : AppState
internal sealed class BillingViewEvent : UiViewEvent {
object Close : BillingViewEvent()
object ClearError : BillingViewEvent()
data class Purchase internal constructor(val index: Int) : BillingViewEvent()
}
internal sealed class BillingControllerEvent : UiControllerEvent {
data class Purchase internal constructor(val sku: BillingSku) : BillingControllerEvent()
}
|
# sla-vault
Retain high resolution perf data from appdynamics
Configure: create InfluxDB database and configure one or more data sources in config.json
Get: *one minute* Business Transaction(BT) *scorecard* data from one or several data sources
Save: one minute *scorecard* into InfluxDB with possibility to calculate *SLA* -> calcSLA()
(Graph:) can easily graph *SLA* from saved *scorecard* with your favorite tool Grafana/Graphite etc.
Example config:
{
"database": {
"db_host": "http://localhost:8086",
"db_name": "SLA",
"db_user": "xxx",
"db_pwd": "xxx"
},
"data_sources": [
{
"unique_name": "perf",
"host": "http://AppDController1:8090/controller/rest/applications/myApplication1",
"metric_path": "Business Transaction Performance|Business Transaction Groups|SLA|*",
"marest_user": "rest@customer1",
"rest_pwd": "xxx"
},
{
"unique_name": "BT_test_env",
"host": "http://myAppDController2:8090/controller/rest/applications/myApplication2",
"metric_path": "Business Transaction Performance|Business Transactions|*|*|*",
"rest_user": "rest@customer1",
"rest_pwd": "xxx"
}
]
}
|
package de.rki.coronawarnapp.ui.settings.notifications
import android.os.Bundle
import android.view.View
import android.view.accessibility.AccessibilityEvent
import androidx.fragment.app.Fragment
import de.rki.coronawarnapp.R
import de.rki.coronawarnapp.databinding.FragmentSettingsNotificationsBinding
import de.rki.coronawarnapp.ui.main.MainActivity
import de.rki.coronawarnapp.util.ExternalActionHelper
import de.rki.coronawarnapp.util.di.AutoInject
import de.rki.coronawarnapp.util.ui.observe2
import de.rki.coronawarnapp.util.ui.viewBindingLazy
import de.rki.coronawarnapp.util.viewmodel.CWAViewModelFactoryProvider
import de.rki.coronawarnapp.util.viewmodel.cwaViewModels
import javax.inject.Inject
/**
* This is the setting notification page. Here the user sees his os notifications settings status.
* If os notifications are disabled he can navigate to them with one click. And if the os is enabled
* the user can decide which notifications he wants to get: risk updates and/or test results.
*/
class NotificationSettingsFragment :
Fragment(R.layout.fragment_settings_notifications),
AutoInject {
@Inject lateinit var viewModelFactory: CWAViewModelFactoryProvider.Factory
private val vm: NotificationSettingsFragmentViewModel by cwaViewModels { viewModelFactory }
private val binding: FragmentSettingsNotificationsBinding by viewBindingLazy()
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
vm.notificationSettingsState.observe2(this) {
binding.state = it
}
setButtonOnClickListener()
}
override fun onResume() {
super.onResume()
binding.settingsNotificationsContainer.sendAccessibilityEvent(AccessibilityEvent.TYPE_ANNOUNCEMENT)
}
private fun setButtonOnClickListener() {
// Notifications about risk status
val updateRiskNotificationSwitch =
binding.settingsSwitchRowNotificationsRisk.settingsSwitchRowSwitch
// Additional click target to toggle switch
val updateRiskNotificationRow =
binding.settingsSwitchRowNotificationsRisk.settingsSwitchRow
// Notifications about test status
val updateTestNotificationSwitch =
binding.settingsSwitchRowNotificationsTest.settingsSwitchRowSwitch
// Additional click target to toggle switch
val updateTestNotificationRow =
binding.settingsSwitchRowNotificationsTest.settingsSwitchRow
// Settings
val settingsRow = binding.settingsNotificationsCard.tracingStatusCardButton
val goBack =
binding.settingsNotificationsHeader.headerButtonBack.buttonIcon
// Update Risk
updateRiskNotificationSwitch.setOnCheckedChangeListener { view, _ ->
// Make sure that listener is called by user interaction
if (!view.isPressed) return@setOnCheckedChangeListener
vm.toggleNotificationsRiskEnabled()
}
// Additional click target to toggle switch
updateRiskNotificationRow.setOnClickListener {
if (updateRiskNotificationRow.isEnabled) vm.toggleNotificationsRiskEnabled()
}
// Update Test
updateTestNotificationSwitch.setOnCheckedChangeListener { view, _ ->
// Make sure that listener is called by user interaction
if (!view.isPressed) return@setOnCheckedChangeListener
vm.toggleNotificationsTestEnabled()
}
// Additional click target to toggle switch
updateTestNotificationRow.setOnClickListener {
if (updateTestNotificationRow.isEnabled) vm.toggleNotificationsTestEnabled()
}
goBack.setOnClickListener {
(activity as MainActivity).goBack()
}
// System Settings
settingsRow.setOnClickListener {
ExternalActionHelper.toNotifications(requireContext())
}
}
}
|
import { gql } from 'mercurius-codegen';
export const Palette = gql`
type Palette {
darkMuted: String
darkVibrant: String
lightMuted: String
lightVibrant: String
muted: String
vibrant: String
createdAt: BigInt!
updatedAt: BigInt!
}
`;
|
// DO NOT EDIT. Autogenerated by Perfetto cppgen_plugin
#ifndef PERFETTO_PROTOS_PROTOS_PERFETTO_TRACE_CHROME_CHROME_BENCHMARK_METADATA_PROTO_CPP_H_
#define PERFETTO_PROTOS_PROTOS_PERFETTO_TRACE_CHROME_CHROME_BENCHMARK_METADATA_PROTO_CPP_H_
#include <stdint.h>
#include <bitset>
#include <vector>
#include <string>
#include <type_traits>
#include "perfetto/protozero/cpp_message_obj.h"
#include "perfetto/protozero/copyable_ptr.h"
#include "perfetto/base/export.h"
namespace perfetto {
namespace protos {
namespace gen {
class ChromeBenchmarkMetadata;
} // namespace perfetto
} // namespace protos
} // namespace gen
namespace protozero {
class Message;
} // namespace protozero
namespace perfetto {
namespace protos {
namespace gen {
class PERFETTO_EXPORT ChromeBenchmarkMetadata : public ::protozero::CppMessageObj {
public:
enum FieldNumbers {
kBenchmarkStartTimeUsFieldNumber = 1,
kStoryRunTimeUsFieldNumber = 2,
kBenchmarkNameFieldNumber = 3,
kBenchmarkDescriptionFieldNumber = 4,
kLabelFieldNumber = 5,
kStoryNameFieldNumber = 6,
kStoryTagsFieldNumber = 7,
kStoryRunIndexFieldNumber = 8,
kHadFailuresFieldNumber = 9,
};
ChromeBenchmarkMetadata();
~ChromeBenchmarkMetadata() override;
ChromeBenchmarkMetadata(ChromeBenchmarkMetadata&&) noexcept;
ChromeBenchmarkMetadata& operator=(ChromeBenchmarkMetadata&&);
ChromeBenchmarkMetadata(const ChromeBenchmarkMetadata&);
ChromeBenchmarkMetadata& operator=(const ChromeBenchmarkMetadata&);
bool operator==(const ChromeBenchmarkMetadata&) const;
bool operator!=(const ChromeBenchmarkMetadata& other) const { return !(*this == other); }
bool ParseFromArray(const void*, size_t) override;
std::string SerializeAsString() const override;
std::vector<uint8_t> SerializeAsArray() const override;
void Serialize(::protozero::Message*) const;
bool has_benchmark_start_time_us() const { return _has_field_[1]; }
int64_t benchmark_start_time_us() const { return benchmark_start_time_us_; }
void set_benchmark_start_time_us(int64_t value) { benchmark_start_time_us_ = value; _has_field_.set(1); }
bool has_story_run_time_us() const { return _has_field_[2]; }
int64_t story_run_time_us() const { return story_run_time_us_; }
void set_story_run_time_us(int64_t value) { story_run_time_us_ = value; _has_field_.set(2); }
bool has_benchmark_name() const { return _has_field_[3]; }
const std::string& benchmark_name() const { return benchmark_name_; }
void set_benchmark_name(const std::string& value) { benchmark_name_ = value; _has_field_.set(3); }
bool has_benchmark_description() const { return _has_field_[4]; }
const std::string& benchmark_description() const { return benchmark_description_; }
void set_benchmark_description(const std::string& value) { benchmark_description_ = value; _has_field_.set(4); }
bool has_label() const { return _has_field_[5]; }
const std::string& label() const { return label_; }
void set_label(const std::string& value) { label_ = value; _has_field_.set(5); }
bool has_story_name() const { return _has_field_[6]; }
const std::string& story_name() const { return story_name_; }
void set_story_name(const std::string& value) { story_name_ = value; _has_field_.set(6); }
const std::vector<std::string>& story_tags() const { return story_tags_; }
std::vector<std::string>* mutable_story_tags() { return &story_tags_; }
int story_tags_size() const { return static_cast<int>(story_tags_.size()); }
void clear_story_tags() { story_tags_.clear(); }
void add_story_tags(std::string value) { story_tags_.emplace_back(value); }
std::string* add_story_tags() { story_tags_.emplace_back(); return &story_tags_.back(); }
bool has_story_run_index() const { return _has_field_[8]; }
int32_t story_run_index() const { return story_run_index_; }
void set_story_run_index(int32_t value) { story_run_index_ = value; _has_field_.set(8); }
bool has_had_failures() const { return _has_field_[9]; }
bool had_failures() const { return had_failures_; }
void set_had_failures(bool value) { had_failures_ = value; _has_field_.set(9); }
private:
int64_t benchmark_start_time_us_{};
int64_t story_run_time_us_{};
std::string benchmark_name_{};
std::string benchmark_description_{};
std::string label_{};
std::string story_name_{};
std::vector<std::string> story_tags_;
int32_t story_run_index_{};
bool had_failures_{};
// Allows to preserve unknown protobuf fields for compatibility
// with future versions of .proto files.
std::string unknown_fields_;
std::bitset<10> _has_field_{};
};
} // namespace perfetto
} // namespace protos
} // namespace gen
#endif // PERFETTO_PROTOS_PROTOS_PERFETTO_TRACE_CHROME_CHROME_BENCHMARK_METADATA_PROTO_CPP_H_
|
@using BDTest.NetCore.Razor.ReportMiddleware.Models.ViewModels
@model BDTest.Maps.BDTestOutputModel
@{
ViewBag.Title = "Summary";
Layout = "_Layout";
}
@await Html.PartialAsync("_TestPageHeaderHero", new TestPageHeaderHeroViewModel
{
Title = "Welcome to your report by BDTest!",
Description = "Here is a summary of your tests"
})
<section class="section">
<div class="container">
@await Html.PartialAsync("_SummaryGraphics", new SummaryViewModel
{
Scenarios = Model.Scenarios,
TotalReportData = Model
})
</div>
</section>
|
import { useNavigation } from '@react-navigation/native';
import React from 'react';
import { Platform, Pressable, StyleSheet, View } from 'react-native';
import Icon from 'react-native-vector-icons/MaterialIcons';
const styles = StyleSheet.create({
wrapper: {
position: 'absolute',
bottom: 16,
right: 16,
width: 56,
height: 56,
borderRadius: 28,
shadowColor: '#4d4d4d',
shadowOffset: { width: 0, height: 4 },
shadowOpacity: 0.3,
shadowRadius: 4,
elevation: 5,
overflow: Platform.select({ android: 'hidden' }),
},
button: {
width: 56,
height: 56,
borderRadius: 28,
backgroundColor: '#009688',
justifyContent: 'center',
alignItems: 'center',
},
icon: {
color: 'white',
},
});
function FloatingWriteButton() {
const navigation = useNavigation();
const onPress = () => {
navigation.navigate('Write');
};
return (
<View style={styles.wrapper}>
<Pressable
style={({ pressed }): object => [
styles.button,
Platform.OS === 'ios' && {
opacity: pressed ? 0.6 : 1,
},
]}
android_ripple={{ color: 'white' }}
onPress={onPress}>
<Icon name="add" size={24} style={styles.icon} />
</Pressable>
</View>
);
}
export default FloatingWriteButton;
|
<?php
/**
* The template part for displaying an Author biography
*
* @since Simplent 1.0
*/
?>
<div class="entry-author-info clearfix">
<div class="author-avatar">
<?php
/**
* Filter the Simplent author bio avatar size.
*
* @since Simplent 1.0
*
* @param int $size The avatar height and width size in pixels.
*/
$author_bio_avatar_size = apply_filters( 'simplent_author_bio_avatar_size', 72 );
echo get_avatar( get_the_author_meta( 'user_email' ), $author_bio_avatar_size );
?>
</div><!-- .author-avatar -->
<div class="author-description">
<p class="author-title">
<?php echo get_the_author_posts_link(); ?>
</p>
<p class="author-bio">
<?php the_author_meta( 'description' ); ?>
</p><!-- .author-bio -->
</div><!-- .author-description -->
</div><!-- .author-info -->
|
package me.invkrh.raft.core
import scala.concurrent.duration._
import scala.language.postfixOps
import akka.actor.PoisonPill
import akka.testkit.TestProbe
import me.invkrh.raft.exception._
import me.invkrh.raft.kit.{Tell, _}
import me.invkrh.raft.message.AdminMessage._
import me.invkrh.raft.message.ClientMessage._
import me.invkrh.raft.message.RPCMessage._
import me.invkrh.raft.storage.MemoryStore
class ServerTest extends RaftTestHarness("SeverSpec") { self =>
def heartbeat(
term: Int,
leaderId: Int,
prevLogIndex: Int = 0,
prevLogTerm: Int = 0,
leaderCommit: Int = 0): AppendEntries =
AppendEntries(term, leaderId, prevLogIndex, prevLogTerm, Nil, leaderCommit)
//////////////////////////////////////////////////////////////////////////////////////////////////
// Log Replication
//////////////////////////////////////////////////////////////////////////////////////////////////
"Log replication on leader side" should {
"initialize nextIndex and matchIndex" in {
new LeaderEndPointChecker()
.setActions(
Reply(AppendEntriesResult(1, success = true)),
Tell(GetStatus),
Expect(
Status(
42,
1,
ServerState.Leader,
Some(42),
Map(1 -> 1, 42 -> 1),
Map(1 -> 0, 42 -> 0),
0,
0)))
.run()
}
"process message exchanges" in {
new LeaderEndPointChecker()
.setActions(
Reply(AppendEntriesResult(1, success = true)),
Rep(5, Tell(SET("x", 1))), // leader receives 5 commands
FishForMsg { case req: AppendEntries if req.entries.size == 5 => true },
Reply(AppendEntriesResult(1, success = true)),
Expect(heartbeat(1, 42, 5, 1)),
Tell(GetStatus),
FishForMsg {
case st: Status
if st.nextIndex == Map(42 -> 1, 1 -> 6) && st.matchIndex == Map(42 -> 0, 1 -> 5) =>
true
},
Expect(heartbeat(1, 42, 5, 1)),
Reply(AppendEntriesResult(1, success = false)),
FishForMsg {
case req: AppendEntries if req.entries.size == 1 && req.prevLogIndex == 4 => true
})
.run()
}
}
"Log replication on follower side" should {
def followerLogReplicationCheck(
term: Int,
prevIndex: Int,
prevTerm: Int,
leaderCommit: Int,
isAccept: Boolean): MemoryStore = {
val checker = new FollowerEndPointChecker()
val leaderId = 1
checker
.setActions(
Tell(heartbeat(term, leaderId)),
Expect(AppendEntriesResult(term, success = true)),
Tell(
AppendEntries(
term,
leaderId,
prevIndex,
prevTerm,
List(dummyEntry(1, SET("x", 1))),
leaderCommit)),
Expect(AppendEntriesResult(term, success = isAccept)))
.run()
checker.memoryStore
}
"accept AppendEntry request and apply command" in {
val store = followerLogReplicationCheck(1, 0, 0, 1, isAccept = true)
assertResult(Some(1)) {
store.cache.get("x")
}
}
"accept AppendEntry request but not apply command if leader commit is not bigger than " +
"local commit index" in {
val store = followerLogReplicationCheck(1, 0, 0, 0, isAccept = true)
assertResult(None) {
store.cache.get("x")
}
}
"reject AppendEntry request if local logs are outdated" in {
followerLogReplicationCheck(1, 1, 1, 0, isAccept = false)
}
"reject AppendEntry request if previous log term is not matched" in {
followerLogReplicationCheck(1, 0, 2, 0, isAccept = false)
}
}
"Server.findNewCommitIndex" should {
"find right new commit index" in {
assertResult(Some(31)) {
Server.findNewCommitIndex(
20,
List(20, 30, 31, 32, 33, 34),
genDummyLogsUntilNewTerm(4, 42),
3)
}
assertResult(Some(31)) {
Server.findNewCommitIndex(21, List(20, 23, 31, 32, 33), genDummyLogsUntilNewTerm(4, 42), 3)
}
}
"return None if all matchIndex is smaller than commitIndex" in {
assertResult(None) {
Server.findNewCommitIndex(21, List(10, 13, 11, 12, 15), genDummyLogsUntilNewTerm(4, 42), 3)
}
}
"return None if eligible value has a different term with current term" in {
assertResult(None) {
Server.findNewCommitIndex(21, List(20, 23, 31, 32, 33), genDummyLogsUntilNewTerm(3, 42), 4)
}
}
}
"Server.syncLogsByRequest" should {
val probe = TestProbe()
def req(prevIndex: Int, entries: LogEntry*): AppendEntries =
AppendEntries(0, 0, prevIndex, 0, entries.toList, 0)
val logs = List(
dummyEntry(0, Init),
dummyEntry(0, SET("x", 1)),
dummyEntry(0, SET("x", 2)),
dummyEntry(0, SET("x", 3)),
dummyEntry(0, SET("x", 4)),
dummyEntry(0, SET("x", 5)))
"merge request logs and local logs" in {
val request = req(3, dummyEntry(0, SET("x", 4)), dummyEntry(0, SET("x", 5)))
assertResult((logs, 5)) {
Server.syncLogsFromLeader(request, logs)
}
}
"throw exception if log matching property is broken" in {
val request = req(3, dummyEntry(0, SET("x", 1)), dummyEntry(0, SET("x", 2)))
intercept[LogMatchingPropertyException] {
Server.syncLogsFromLeader(request, logs)
}
}
"return the index of the last new entry if local logs is longer than logs in the request" in {
val request = req(2, dummyEntry(0, SET("x", 3)))
assertResult((logs, 3)) {
Server.syncLogsFromLeader(request, logs)
}
}
"add new entries if prevIndex points to the last log" in {
val request = req(5, dummyEntry(1, SET("x", 1)), dummyEntry(1, SET("x", 2)))
val mergedLogs = logs ::: List(dummyEntry(1, SET("x", 1)), dummyEntry(1, SET("x", 2)))
assertResult((mergedLogs, mergedLogs.size - 1)) {
Server.syncLogsFromLeader(request, logs)
}
}
"replace logs after inconsistent entry" in {
val request = req(3, dummyEntry(1, SET("x", 1)), dummyEntry(1, SET("x", 2)))
val mergedLogs = logs.take(4) ::: List(dummyEntry(1, SET("x", 1)), dummyEntry(1, SET("x", 2)))
assertResult((mergedLogs, 5)) {
Server.syncLogsFromLeader(request, logs)
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// Leader Election
//////////////////////////////////////////////////////////////////////////////////////////////////
"Server" should {
"throw exception when election time is shorter than heartbeat interval" in {
intercept[HeartbeatIntervalException] {
val server =
system.actorOf(
Server.props(0, 100 millis, 100 millis, 150 millis, 0, MemoryStore()),
"svr")
server ! PoisonPill
}
}
"start if none of the bootstrap members are resolved" in {
val server =
system.actorOf(Server.props(0, 150 millis, 150 millis, 100 millis, 0, MemoryStore()))
expectNoMsg()
server ! PoisonPill
}
}
"Follower" should {
"resend command to leader if leader is elected" in {
new FollowerEndPointChecker()
.setActions(
Tell(heartbeat(2, 1)), // 1 is the id of leader
Expect(AppendEntriesResult(2, success = true)), // leader is set
Tell(SET("x", 1)), // reuse leader ref as client ref
Tell(SET("y", 2)),
FishForMsg { case _: Command => true },
FishForMsg { case _: Command => true })
.run()
}
"accept AppendEntries when the term of the message is equal to his own" in {
new FollowerEndPointChecker()
.setActions(Tell(heartbeat(0, 1)), Expect(AppendEntriesResult(0, success = true)))
.run()
}
"reject AppendEntries when the term of the message is smaller than his own" in {
new FollowerEndPointChecker()
.setActions(Tell(heartbeat(-1, 0)), Expect(AppendEntriesResult(0, success = false)))
.run()
}
"reply AppendEntries with larger term which is received with the message" in {
new FollowerEndPointChecker()
.setActions(Tell(heartbeat(2, 0)), Expect(AppendEntriesResult(2, success = true)))
.run()
}
"accept the first RequestVote and reject the second one, " +
"if two different candidates have the same term, retried messages " +
" should also be accepted" in {
new FollowerEndPointChecker()
.setActions(
Tell(RequestVote(1, 1, 0, 0)),
Expect(RequestVoteResult(1, success = true)),
Tell(RequestVote(1, 2, 0, 0)),
Expect(RequestVoteResult(1, success = false)),
Tell(RequestVote(1, 1, 0, 0)),
Expect(RequestVoteResult(1, success = true)))
.run()
}
"reject RequestVote when the term of the message is smaller than his own" in {
new FollowerEndPointChecker()
.setActions(Tell(RequestVote(-1, 0, 0, 0)), Expect(RequestVoteResult(0, success = false)))
.run()
}
"accept RequestVote when the term of the message is (at least) larger than his own" in {
new FollowerEndPointChecker()
.setActions(Tell(RequestVote(10, 0, 0, 0)), Expect(RequestVoteResult(10, success = true)))
.run()
}
"reject RequestVote when it has already voted" in {
new FollowerEndPointChecker()
.setActions(
Tell(RequestVote(1, 0, 0, 0)),
Expect(RequestVoteResult(1, success = true)),
Reply(RequestVote(1, 1, 0, 0)),
Expect(RequestVoteResult(1, success = false)))
.run()
}
"accept RequestVote after leader is elected " +
"and a RequestVote is received with a higher term" in {
new FollowerEndPointChecker()
.setActions(
Tell(RequestVote(1, 0, 0, 0)),
Expect(RequestVoteResult(1, success = true)),
Tell(RequestVote(10, 1, 0, 0)),
Expect(RequestVoteResult(10, success = true)))
.run()
}
"launch election after election timeout elapsed" in {
val checker = new FollowerEndPointChecker()
checker
.setActions(Expect(RequestVote(1, checker.getId, 0, 0)))
.run()
}
"reset election timeout if AppendEntries msg is received" in {
val electionTime = 1000.millis
val tickTime = 200.millis
val heartbeatNum = 8
val checker = new FollowerEndPointChecker()
checker
.setElectionTime(electionTime)
.setTickTime(tickTime)
.setActions(Within(
tickTime * heartbeatNum + electionTime,
tickTime * heartbeatNum + electionTime * 2,
Rep(
heartbeatNum,
Delay(tickTime, Tell(heartbeat(0, 1))),
Expect(AppendEntriesResult(0, success = true))),
Expect(RequestVote(1, checker.getId, 0, 0))))
.run()
}
"return server status after receiving GetStatus request" in {
val checker = new FollowerEndPointChecker()
val term = 10
val leaderId = 1
checker
.setActions(
Tell(heartbeat(term, leaderId)),
Expect(AppendEntriesResult(term, success = true)),
Tell(GetStatus),
Expect(
Status(checker.getId, term, ServerState.Follower, Some(leaderId), Map(), Map(), 0, 0)))
.run()
}
"never receive heartbeat from another leader" in {
val term = 10
val leaderId = 1
new FollowerEndPointChecker()
.setActions(
Tell(heartbeat(term, leaderId)),
Expect(AppendEntriesResult(term, success = true)),
Tell(heartbeat(term, leaderId + 1)),
FishForMsg { case _: MultiLeaderException => true })
.run()
}
}
"Candidate" should {
"memorize leaderID after becoming follower and receiving heartbeat" in {
val leaderId = 1
val higherTerm = 10
val checker = new CandidateEndPointChecker()
checker
.setActions(
Tell(RequestVote(higherTerm, leaderId, 0, 0)),
Expect(RequestVoteResult(higherTerm, success = true)),
Tell(GetStatus),
Expect(Status(checker.getId, higherTerm, ServerState.Follower, None, Map(), Map(), 0, 0)),
Tell(heartbeat(higherTerm, leaderId)),
Expect(AppendEntriesResult(higherTerm, success = true)),
Tell(GetStatus),
Expect(
Status(
checker.getId,
higherTerm,
ServerState.Follower,
Some(leaderId),
Map(),
Map(),
0,
0)))
.run()
}
"relaunch RequestVote every election time" in {
val electionTimeout = 1.second
val checker = new CandidateEndPointChecker()
.setElectionTime(electionTimeout)
checker
.setActions(
Within(electionTimeout, electionTimeout, Expect(RequestVote(2, checker.getId, 0, 0))),
Within(electionTimeout, electionTimeout, Expect(RequestVote(3, checker.getId, 0, 0))),
Within(electionTimeout, electionTimeout, Expect(RequestVote(4, checker.getId, 0, 0))))
.run()
}
"start a new term if no one wins the election" in { // 1 server vs 1 probe
val checker = new CandidateEndPointChecker()
checker
.setActions(
Reply(RequestVoteResult(1, success = false)),
Expect(RequestVote(2, checker.getId, 0, 0)))
.run()
}
"accept VoteRequest with a higher termA after stepping down to follower" in {
val checker = new CandidateEndPointChecker()
checker
.setActions(
Reply(RequestVoteResult(2, success = false)), // step down
Tell(RequestVote(10, 100, 0, 0)),
Expect(RequestVoteResult(10, success = true)))
.run()
}
"become leader when received messages of majority" in {
val checker = new CandidateEndPointChecker()
checker
.setProbeNum(5)
.setActions(
MajorReply(RequestVoteResult(1, success = true)),
Expect(heartbeat(1, checker.getId)))
.run()
}
"launch election of the next term when only minority granted" in {
val checker = new CandidateEndPointChecker()
checker
.setProbeNum(5)
.setActions(
MinorReply(
RequestVoteResult(1, success = true),
Some(RequestVoteResult(1, success = false))),
Expect(RequestVote(2, checker.getId, 0, 0)))
.run()
}
"become follower when one of the received term in RequestVoteResult is larger than " +
"current term" in {
val checker = new CandidateEndPointChecker()
checker
.setProbeNum(5)
.setActions(
Reply(RequestVoteResult(2, success = false)),
Expect(RequestVote(3, checker.getId, 0, 0)))
.run()
}
"become follower if it receives a RequestVote with term larger than its current term" in {
new CandidateEndPointChecker()
.setActions(
Tell(RequestVote(2, 1, 0, 0)),
Expect(RequestVoteResult(2, success = true)),
Tell(heartbeat(2, 1)),
Expect(AppendEntriesResult(2, success = true)))
.run()
}
"become follower if it receives a AppendEntries with term larger than " +
"its current term" in {
new CandidateEndPointChecker()
.setActions(
Tell(heartbeat(2, 1)),
Expect(AppendEntriesResult(2, success = true)),
Tell(heartbeat(2, 1)),
Expect(AppendEntriesResult(2, success = true)))
.run()
}
"become follower when received term in AppendEntriesResult equal to his own" in {
val checker = new CandidateEndPointChecker()
checker
.setActions(
Tell(heartbeat(1, 0)),
Expect(AppendEntriesResult(1, success = true)),
Expect(RequestVote(2, checker.getId, 0, 0)))
.run()
}
"reject AppendEntries if its term is smaller than current term" in {
new CandidateEndPointChecker()
.setProbeNum(5)
.setActions(Tell(heartbeat(0, 0)), Expect(AppendEntriesResult(1, success = false)))
.run()
}
"return server status after receive GetStatus request" in {
val checker = new CandidateEndPointChecker()
val term = 10
val leaderId = 1
checker
.setActions(
Tell(heartbeat(term, leaderId)),
Expect(AppendEntriesResult(term, success = true)),
Tell(GetStatus),
Expect(
Status(checker.getId, term, ServerState.Follower, Some(leaderId), Map(), Map(), 0, 0)))
.run()
}
}
"leader" should {
"never receive an AppendEntries RPC with the same term" in {
new LeaderEndPointChecker()
.setActions(
Reply(AppendEntriesResult(1, success = true)),
Tell(heartbeat(1, 2)),
FishForMsg { case _: MultiLeaderException => true })
.run()
}
"send heartbeat to every follower every heartbeat interval" in {
val tickTime = 200.millis
val checker = new LeaderEndPointChecker()
checker
.setTickTime(tickTime)
.setElectionTime(tickTime * 2)
.setActions(Rep(
3,
Within(
tickTime,
tickTime * 2,
Reply(AppendEntriesResult(1, success = true)),
Expect(heartbeat(1, checker.getId)),
Reply(AppendEntriesResult(1, success = true)),
Expect(heartbeat(1, checker.getId)))))
.run()
}
"become follower if it receives a RequestVote with term larger than " +
"its current term" in {
val term = 10
new LeaderEndPointChecker()
.setActions(
Tell(RequestVote(term, 1, 0, 0)),
Expect(RequestVoteResult(term, success = true)),
Tell(heartbeat(term, 1)),
Expect(AppendEntriesResult(term, success = true)))
.run()
}
"become follower if the received term of AppendEntriesResult is larger than " +
"current term" in {
val checker = new LeaderEndPointChecker()
checker
.setProbeNum(5)
.setActions(
Reply(AppendEntriesResult(2, success = false)),
Expect(RequestVote(3, checker.getId, 0, 0)))
.run()
}
"continue to distribute heartbeat when AppendEntry requests are rejected" in {
val checker = new LeaderEndPointChecker()
checker
.setProbeNum(5)
.setActions(
MajorReply(
AppendEntriesResult(1, success = false),
Some(AppendEntriesResult(1, success = true))),
Expect(heartbeat(1, checker.getId)),
MajorReply(
AppendEntriesResult(1, success = false),
Some(AppendEntriesResult(1, success = true))),
Expect(heartbeat(1, checker.getId)),
Reply(AppendEntriesResult(1, success = true)))
.run()
}
"continue to distribute heartbeat when some heartbeat acks are not received" in {
val checker = new LeaderEndPointChecker()
checker
.setProbeNum(5)
.setActions(
Expect(heartbeat(1, checker.getId)),
MinorReply(AppendEntriesResult(1, success = false)),
Expect(heartbeat(1, checker.getId)),
MajorReply(AppendEntriesResult(1, success = true)),
Expect(heartbeat(1, checker.getId)),
Reply(AppendEntriesResult(1, success = true)))
.run()
}
"return initialized leader status after receive GetStatus request" in {
val checker = new LeaderEndPointChecker()
checker
.setActions(
Tell(GetStatus),
Expect(
Status(
checker.getId,
1,
ServerState.Leader,
Some(checker.getId),
Map(1 -> 1, checker.getId -> 1),
Map(1 -> 0, checker.getId -> 0),
0,
0)))
.run()
}
}
}
|
package su.svn.hiload.socialnetwork.services.security;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.Authentication;
import org.springframework.security.web.server.DefaultServerRedirectStrategy;
import org.springframework.security.web.server.ServerRedirectStrategy;
import org.springframework.security.web.server.WebFilterExchange;
import org.springframework.security.web.server.authentication.logout.ServerLogoutSuccessHandler;
import org.springframework.security.web.server.savedrequest.ServerRequestCache;
import org.springframework.security.web.server.savedrequest.WebSessionServerRequestCache;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Mono;
import java.net.URI;
public class LogoutSuccessHandler implements ServerLogoutSuccessHandler {
private static final Logger LOG = LoggerFactory.getLogger(LogoutSuccessHandler.class);
final static URI locationUsername = URI.create("/");
private ServerRedirectStrategy redirectStrategy = new DefaultServerRedirectStrategy();
private ServerRequestCache requestCache = new WebSessionServerRequestCache();
@Override
public Mono<Void> onLogoutSuccess(WebFilterExchange webFilterExchange, Authentication authentication) {
ServerWebExchange exchange = webFilterExchange.getExchange();
String name = authentication.getName();
LOG.debug("authentication.getName(): {} logout", name);
return this.requestCache.getRedirectUri(exchange)
.defaultIfEmpty(locationUsername)
.flatMap(location -> this.redirectStrategy.sendRedirect(exchange, location));
}
}
|
/**
* Tidory Configuration
* https://tidory.com/docs/configuration/
*/
module.exports = {
ts_session: null,
url: null,
/**
* Preview
*/
preview: {
/**
* homeType
*
* NONE
* COVER
*/
homeType: 'NONE',
/**
* Preview Mode
*
* index
* entry
* category
* tag,
* location
* media,
* guestbook
*/
mode: 'index'
},
/**
* Template aliases
*/
alias: {
'@': 'assets',
'~views': 'views'
},
/**
* Webpack Configuration
*
* @param {object} webpackConfig
*/
extends (webpackConfig) {
webpackConfig.module.rules = [
{
enforce: 'pre',
test: /\.js$/,
exclude: /node_modules/,
loader: 'eslint-loader'
},
...webpackConfig.module.rules
]
webpackConfig.entry = {
vendor: './assets/vendor.js',
...webpackConfig.entry
}
}
}
|
//! This examples plots what FixedPartition does with a fixed sample.
//!
//! # Remarks
//!
//! Needs `gnuplot` installed.
use approx_entropy::{
count_dup, DirectEstimator, Estimator, FixedPartition, NaiveEstimator, SamplingMethod,
};
use preexplorer::errors::PreexplorerError;
use preexplorer::prelude::*;
use rand::distributions::{Distribution, Uniform};
const SUPPORT: usize = 1 << 12;
const SAMPLE_SIZE: usize = 1 << 6;
fn main() -> Result<(), PreexplorerError> {
// Construct fixed partition from sample
let samples: Vec<usize> = Uniform::from(0..SUPPORT)
.sample_iter(rand::thread_rng())
.take(SAMPLE_SIZE)
.collect();
let size_subsamples = [SAMPLE_SIZE / 2, SAMPLE_SIZE / 4, SAMPLE_SIZE / 8];
let samples_rep = [1, 1, 2];
let degree = 2;
let mut fixed = FixedPartition::new(&samples, &size_subsamples, &samples_rep, degree).unwrap();
// Compute naive entropy estimations that will be extrapolated
let (sizes, values): (Vec<_>, Vec<_>) = fixed.naive_entropies().into_iter().unzip();
println!(
"Final estimation: {:?}",
Estimator::from(fixed.clone()).entropy()
);
println!(
"Final direct estimation: {:?}",
DirectEstimator::from(fixed).entropy()
);
println!(
"Naive entropy: {:?}",
NaiveEstimator::new(&count_dup(&samples)).unwrap().entropy()
);
println!("Real: {}", (SUPPORT as f64).ln());
// Plot
(sizes.iter().map(|s| 1. / *s as f64), values)
.preexplore()
.set_title("Naive entropy subsamples used by FixedPartition")
.set_xlabel("1/n")
.set_ylabel("entropy estimation")
.set_style("points")
.plot("fixed_partition")?;
Ok(())
}
|
<?php include"includes/web_header.php" ?>
<div id="wrapper">
<!-- Sidebar -->
<div id="content-wrapper">
<div class="container-fluid">
<form method="post">
<input type="submit" value="subbbb" name="go" placeholder="go">
</form>
<?php
$python='/usr/bin/python3';
$pyscript1='test.py';
if(isset($_POST['go']))
{
$cmd=`$python test.py`;
echo $cmd;
}
?><!-- /.container-fluid -->
<!-- Sticky Footer -->
<?php include"includes/web_footer.php" ?>
<!--
$totalTurf;
$highest = 0;
$display_query="SELECT * from BookingDetails
where (turfName='$turfBookName' && turfArea='$turfArea' && date = '$bookingdate')";
$result=mysqli_query($connection,$display_query);
$rowcount=mysqli_num_rows($result);
$clash = "SELECT * FROM TurfDetails where turfName='$turfBookName' && turfArea='$turfArea'";
$clash_result=mysqli_query($connection,$clash);
while($clash_row = mysqli_fetch_assoc($clash_result))
{
echo $totalTurf = $clash_row['totalGrounds'];
}
while($row = mysqli_fetch_assoc($result))
{
echo $count = $row['count'];
if($highest<=$count)
{
$highest = $count;
}
}
echo $inc = $highest +1;
$update_count = "UPDATE BookingDetails SET count = '$inc' ";
if($highest>$totalTurf)
{
echo "<script>
alert('Booking already exists choose another time slot ')
</script> ";
}
-->
|
import { DefaultTheme } from "styled-components"
const theme: DefaultTheme = {
colors: {
primary: "#0EDEFF",
rgbPrimary: "14, 222, 255",
background: "#00080A",
rgbBackground: "0, 8, 10",
text: "#F3F3F3",
darkText: "#EEE",
cardBackground: "#091619",
},
}
export default theme
|
package com.barisatalay.yorkiewallet.ui.base
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.databinding.DataBindingUtil
import androidx.databinding.ViewDataBinding
import androidx.fragment.app.Fragment
import com.barisatalay.yorkiewallet.BR
import com.barisatalay.yorkiewallet.util.extension.observeNotNull
import com.barisatalay.yorkiewallet.view.LoadingDialog
abstract class BaseFragment<DB : ViewDataBinding> : Fragment() {
abstract val getLayoutId: Int
abstract fun getViewModelForBase(): BaseViewModel
private val loadingDialog: LoadingDialog by lazy { LoadingDialog(requireActivity()) }
private var _binding: DB? = null
protected val binding: DB
get() = _binding!!
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View? {
_binding = DataBindingUtil.inflate(inflater, getLayoutId, container, false)
binding.setVariable(BR.viewModel, getViewModelForBase())
binding.lifecycleOwner = viewLifecycleOwner
return binding.root
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
subscribeLoading()
}
private fun subscribeLoading() {
getViewModelForBase().isDataLoading.observeNotNull(this) {
when (it) {
true -> if (isAdded) loadingDialog.showLoading()
false -> loadingDialog.hideLoading()
}
}
}
}
|
#!/bin/bash
case $1 in
start)
echo $$ > /home/deploy/datadog/.datadog-agent/run/datadog_wrapper.pid
/bin/bash /home/deploy/datadog/.datadog-agent/bin/agent start
;;
stop)
kill `cat /home/deploy/datadog/.datadog-agent/run/datadog_wrapper.pid`
kill `cat /home/deploy/datadog/.datadog-agent/run/supervisord.pid`
;;
*)
echo "usage: datadog_wrapper {start|stop}" ;;
esac
exit 0
|
<div class="col-md-4 col-sm-6">
<a href="<?php echo e($url); ?>">
<img class="img-responsive img-portfolio img-hover" src="<?php echo e($image); ?>" alt="<?php echo e($name); ?>">
</a>
</div>
|
package repolizer.adapter.network.retrofit.api
import okhttp3.RequestBody
import repolizer.repository.util.QueryHashMap
import retrofit2.Call
import retrofit2.http.*
interface NetworkInterface {
@GET("{url}")
fun get(
@HeaderMap headerMap: Map<String, String>,
@Path(value = "url", encoded = true) url: String,
@QueryMap map: QueryHashMap
): Call<String>
@POST("{url}")
fun post(
@HeaderMap headerMap: Map<String, String>,
@Path(value = "url", encoded = true) url: String,
@QueryMap map: QueryHashMap,
@Body raw: RequestBody?
): Call<String>
@PUT("{url}")
fun put(
@HeaderMap headerMap: Map<String, String>,
@Path(value = "url", encoded = true) url: String,
@QueryMap map: QueryHashMap,
@Body raw: RequestBody?
): Call<String>
@PATCH("{url}")
fun patch(
@HeaderMap headerMap: Map<String, String>,
@Path(value = "url", encoded = true) url: String,
@QueryMap map: QueryHashMap,
@Body raw: RequestBody?
): Call<String>
@DELETE("{url}")
fun delete(
@HeaderMap headerMap: Map<String, String>,
@Path(value = "url", encoded = true) url: String,
@QueryMap map: QueryHashMap
): Call<String>
@HTTP(method = "DELETE", path = "{url}", hasBody = true)
fun delete(
@HeaderMap headerMap: Map<String, String>,
@Path(value = "url", encoded = true) url: String,
@QueryMap map: QueryHashMap,
@Body raw: RequestBody?
): Call<String>
}
|
using UnityEngine;
using UnityEngine.AI;
[System.Obsolete]
public class Bot : MonoBehaviour
{
private RaycastHit _hitInfo = default(RaycastHit);
private NavMeshAgent _navMeshAgent = null;
private Camera _mainCam = null;
private void Awake()
{
_navMeshAgent = GetComponent<NavMeshAgent>();
_mainCam = Camera.main;
}
private void Update()
{
if (Input.GetMouseButtonDown(0))
{
Ray ray = _mainCam.ScreenPointToRay(Input.mousePosition);
if (Physics.Raycast(ray.origin, ray.direction, out _hitInfo))
{
_navMeshAgent.SetDestination(_hitInfo.point);
}
}
}
}
|
// Observable Library
// Copyright (c) 2016 David Capello
//
// This file is released under the terms of the MIT license.
// Read LICENSE.txt for more information.
#include "obs/signal.h"
#include "test.h"
obs::signal<void()> sig;
obs::scoped_connection conn;
void func() {
conn = sig.connect(func);
// This second connection produced an infinite loop in an old
// implementation of iterators where we skipped removed nodes in the
// same operator*(). So then the iterator::operator!=() was always
// true.
conn = sig.connect(func);
}
int main() {
conn = sig.connect(func);
sig();
}
|
import unittest
import unittest.mock
from nssift.grind.netstats.bundler import Bundler
from nssift.grind.netstats import gauge
class TestBundler(unittest.TestCase):
"""Validate the bunlded updates of the statics counters."""
def _makemock(self, value):
m = unittest.mock.Mock()
m.normalize = unittest.mock.Mock(return_value=value)
return m
def test_normalize(self):
gauges = [self._makemock(1.5),
self._makemock(3.2),
self._makemock(2.8)]
bundler = Bundler(gauges)
normalized = list(bundler.normalize())
# Ensure the normalized returned values.
self.assertEqual(normalized, [1.5, 3.2, 2.8])
for gauge in gauges:
gauge.normalize.assert_called_once_with()
|
import 'package:shared_preferences/shared_preferences.dart';
class PrefUtils {
static Future<bool> setBool(String key,
{bool value, SharedPreferences prefs}) async {
return (prefs ?? await SharedPreferences.getInstance()).setBool(key, value);
}
static Future<bool> getBool(
String key, {
SharedPreferences prefs,
bool def = false,
}) async {
try {
return (prefs ?? await SharedPreferences.getInstance()).getBool(key) ??
def;
} on Exception catch (_) {
return def;
}
}
}
|
<?
$MESS["SALE_EBAY_HLP_FLAG_CANCELED"] = "Флаг отмены";
$MESS["SALE_EBAY_HLP_FLAG_DELIVERY"] = "Флаг доставки";
$MESS["SALE_EBAY_HLP_FLAG_PAYED"] = "Флаг оплаты";
$MESS["SALE_EBAY_HLP_FLAG_DEDUCTED"] = "Флаг отгрузки";
$MESS["SALE_EBAY_HLP_STATUS"] = "Статус";
$MESS["SALE_EBAY_HLP_CATEGORY_PROPS"] = "Свойства категории";
$MESS["SALE_EBAY_HLP_OFFERS_PROPS"] = "Свойства торговых предложений";
$MESS["SALE_EBAY_HLP_NOT_USE"] = "Не использовать";
|
# frozen_string_literal: true
# PieceHead Label
class PhLabel < ActiveRecord::Base
belongs_to :company
belongs_to :piece_head
end
|
#if !DISABLE_DHT
using System;
using System.Collections.Generic;
using System.Text;
using Xunit;
using System.Net.BitTorrent.Dht.Listeners;
using System.Net.BitTorrent.Dht.Messages;
using System.Net;
namespace System.Net.BitTorrent.Dht
{
internal class TestListener : DhtListener
{
private bool started;
public TestListener()
: base(new IPEndPoint(IPAddress.Loopback, 0))
{
}
public bool Started
{
get { return started; }
}
public override void Send(byte[] buffer, IPEndPoint endpoint)
{
// Do nothing
}
public void RaiseMessageReceived(Message message, IPEndPoint endpoint)
{
DhtEngine.MainLoop.Queue(delegate
{
OnMessageReceived(message.Encode(), endpoint);
});
}
public override void Start()
{
started = true;
}
public override void Stop()
{
started = false;
}
}
}
#endif
|
package com.zihler.wiki.domain.values;
import java.util.regex.Pattern;
public enum Patterns {
NON_CHARACTER_TOKEN_REGEX("[^a-zA-Z0-9#]"),
REFERENCE_TAG_MATCHING_REGEX("(.*(?<!.)#[A-Z0-9]+[a-z0-9][^ ]*)+"),
UPPER_CASE_LETTERS_REGEX("[A-Z]"),
NUMBERS("[0-9]");
private String regexPattern;
Patterns(String regexPattern) {
this.regexPattern = regexPattern;
}
@Override
public String toString() {
return regexPattern;
}
public Pattern toPattern() {
return Pattern.compile(regexPattern);
}
}
|
import 'package:cloud_firestore/cloud_firestore.dart';
class FirestoreServiceGetaway {
FirebaseFirestore _firebaseFirestore = FirebaseFirestore.instance;
Future<Map<String, dynamic>> _mergeData(
DocumentSnapshot documentSnapshot) async {
if (!documentSnapshot.exists) {
return null;
}
QuerySnapshot querySnapshotFollowers =
await documentSnapshot.reference.collection('followers').get();
QuerySnapshot querySnapshotFollowings =
await documentSnapshot.reference.collection('followings').get();
return {
...{
'uid': documentSnapshot.id,
'reference': documentSnapshot.reference,
'followersList':
querySnapshotFollowers.docs.map((e) => e.data()).toList(),
'followingsList':
querySnapshotFollowings.docs.map((e) => e.data()).toList(),
},
...documentSnapshot.data(),
};
}
Future<Map<String, dynamic>> getReference(dynamic reference) async {
DocumentSnapshot documentSnapshot = await reference['ref'].get();
return await _mergeData(documentSnapshot);
}
Future<void> update(String collection, String document, dynamic data) async {
await _firebaseFirestore.collection(collection).doc(document).update(data);
}
Stream<Map<String, dynamic>> getSnapshotById(
String collection, String document) {
Stream<DocumentSnapshot> documentSnapshot = _firebaseFirestore
.collection(collection)
.doc(document)
.snapshots(includeMetadataChanges: true);
return documentSnapshot.asyncMap((e) async => await _mergeData(e));
}
Future<Map<String, dynamic>> getById(
String collection, String document) async {
DocumentSnapshot documentSnapshot =
await _firebaseFirestore.collection(collection).doc(document).get();
return await _mergeData(documentSnapshot);
}
Future<List<Map<String, dynamic>>> getList(String collection,
{List<String> listIds = const []}) async {
QuerySnapshot querySnapshot;
CollectionReference collectionReference =
_firebaseFirestore.collection(collection);
if (listIds.length > 0) {
querySnapshot = await collectionReference
.where(FieldPath.documentId, whereIn: listIds)
.get();
} else {
querySnapshot = await collectionReference.get();
}
return querySnapshot.docs
.map((e) => {
...{'uid': e.id},
...e.data()
})
.toList();
}
Future<void> updateByDocument(
String collection, String document, dynamic data) async {
await _firebaseFirestore.collection(collection).doc(document).update(data);
}
Future<void> addDocumentInCollection(String collection, String subCollection,
String document, Map<String, dynamic> data) async {
await _firebaseFirestore
.collection(collection)
.doc(document)
.collection(subCollection)
.doc(data['uid'])
.set({'ref': data['reference']});
}
Future<void> removeDocumentInCollection(String collection,
String subCollection, String document, String docId) async {
await _firebaseFirestore
.collection(collection)
.doc(document)
.collection(subCollection)
.doc(docId)
.delete();
}
Future<void> updateReference(dynamic ref, dynamic data) async {
await ref.update(data);
}
Future<bool> documentInCollectionExists(String collection,
String subCollection, String document, String docId) async {
DocumentSnapshot documentSnapshot = await _firebaseFirestore
.collection(collection)
.doc(document)
.collection(subCollection)
.doc(docId)
.get();
return documentSnapshot.exists;
}
// TODO: Changer nom getListFormDocAndSubDoc => getListFromDocAndSubDoc
Stream<List<Map<String, dynamic>>> getListFormDocAndSubDoc(
String collection, String document, String subDocument) {
Stream<QuerySnapshot> querySnapshot = _firebaseFirestore
.collection(collection)
.doc(document)
.collection(subDocument)
// TODO: Rendre plus générique...
.orderBy('sendAt')
.snapshots();
return querySnapshot.asyncMap(
(event) => event.docs
.map((e) => {
...{
'uid': e.id,
},
...e.data()
})
.toList(),
);
}
Future<List<Map<String, dynamic>>> getListJoinCollections(
String collection, String collectionJoin, String document) async {
DocumentSnapshot user =
await _firebaseFirestore.collection(collectionJoin).doc(document).get();
QuerySnapshot chatRefs = await user.reference.collection('chatRefs').get();
List<Map<String, dynamic>> list = await Future.wait(
chatRefs.docs.map((e) async {
QuerySnapshot messages = await _firebaseFirestore
.collection(e['ref'])
.orderBy('sendAt', descending: true)
.limit(1)
.get();
List<String> s = e['ref'].split('/');
DocumentSnapshot userTo = await _firebaseFirestore
.collection(collectionJoin)
.doc(s.last)
.get();
if (messages.docs.length == 0) {
return null;
}
return {
...{
'uid': messages.docs.last.id,
'sendAt': messages.docs.last.get('sendAt'),
'user': {
...{'uid': userTo.id},
...userTo.data(),
}
},
...messages.docs.last.data()
};
}),
);
List<Map<String, dynamic>> newList = list.where((element) {
return element != null;
}).toList()
..sort((a, b) => b['sendAt'].compareTo(a['sendAt']));
return newList;
}
Future<String> saveFromDocAndSubDoc(String collection, String document,
String subDocument, dynamic data) async {
String path = '$collection/$document/$subDocument';
await _firebaseFirestore.collection(path).add(data);
return path;
}
}
|
/// ReflectorDispatcher. It does the Dispatcher thing using reflector
library reflector_dispatcher;
export 'src/reflector_dispatcher_base.dart';
|
package com.yashlan.aplikasigithubuser.util
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.os.Handler
import android.os.Looper
import android.widget.Toast
import com.yashlan.aplikasigithubuser.R
import com.yashlan.aplikasigithubuser.ui.activity.ListUserActivity
import java.util.*
class LocalizationUtil {
@Suppress("DEPRECATION")
companion object {
private const val LANGUAGE_PREFS = "LANGUAGE_PREFS"
private const val DEFAULT_LANGUAGE = "en"
fun setLanguage(languageCode: String, activity: Activity) {
try {
val locale = Locale(languageCode)
Locale.setDefault(locale)
val config = activity.baseContext.resources.configuration
config.locale = locale
activity.baseContext.resources.updateConfiguration(
config,
activity.baseContext.resources.displayMetrics
)
val sharedPref = activity.getSharedPreferences(LANGUAGE_PREFS, Context.MODE_PRIVATE)
with(sharedPref.edit()) {
putString(LANGUAGE_PREFS, languageCode).apply()
}
Toast.makeText(
activity.applicationContext,
activity.resources.getString(R.string.language_change),
Toast.LENGTH_LONG
).show()
Handler(Looper.getMainLooper()).postDelayed({
val intent = Intent(activity, ListUserActivity::class.java)
activity.startActivity(intent)
}, 1000)
} catch (e: Exception) {
e.message
}
}
fun loadLanguage(activity: Activity) {
try {
val sharedPref = activity.getSharedPreferences(LANGUAGE_PREFS, Context.MODE_PRIVATE)
val language = sharedPref.getString(LANGUAGE_PREFS, DEFAULT_LANGUAGE).toString()
val locale = Locale(language)
Locale.setDefault(locale)
val config = activity.baseContext.resources.configuration
config.locale = locale
activity.baseContext.resources.updateConfiguration(
config,
activity.baseContext.resources.displayMetrics
)
} catch (e: Exception) {
e.message
}
}
}
}
|
import * as fs from 'fs-extra';
import * as tmp from 'tmp';
import chai from 'chai';
import chaiAsPromised from 'chai-as-promised';
import { EditorView, InputBox, VSBrowser, WebDriver, WebView, By } from 'vscode-extension-tester';
import { dismissNotifications, CommonUICreator, notificationExists } from '../../util/common';
import { getCommonAlloyProjectDirectory } from '../../../common/utils';
import { basename, join } from 'path';
describe('Keystore creation', function () {
this.timeout(30000);
chai.use(chaiAsPromised);
const { expect } = chai;
const projectDirectory = getCommonAlloyProjectDirectory();
let browser: VSBrowser;
let driver: WebDriver;
let tempDirectory: tmp.DirResult;
let creator: CommonUICreator;
let webview: WebView|undefined;
before(async function () {
this.timeout(180000);
browser = VSBrowser.instance;
driver = browser.driver;
const editorView = new EditorView();
await editorView.closeAllEditors();
await browser.waitForWorkbench();
tempDirectory = tmp.dirSync();
creator = new CommonUICreator(browser);
await dismissNotifications();
await fs.copy(projectDirectory, tempDirectory.name);
await browser.openResources(tempDirectory.name);
await creator.waitForGetStarted();
});
afterEach(async () => {
if (webview) {
await webview.switchBack();
webview = undefined;
}
});
it('should create a keystore', async () => {
await creator.workbench.executeCommand('Titanium: Create keystore');
const input = await InputBox.create();
await input.setText(basename(tempDirectory.name));
await input.confirm();
await creator.waitForEditorTab('Create Keystore');
webview = new WebView();
await webview.switchToFrame();
(await webview.findWebElement(By.id('password'))).sendKeys('apassword');
const validation = await webview.findWebElement(By.id('confirmPasswordValidation'));
expect(validation.getText()).to.eventually.include('Keystore password and confirmation do not match');
(await webview.findWebElement(By.id('confirmPassword'))).sendKeys('apassword');
(await webview.findWebElement(By.id('alias'))).sendKeys('tester');
(await webview.findWebElement(By.id('name'))).sendKeys('Tester Test');
(await webview.findWebElement(By.id('orgUnit'))).sendKeys('Tester & Test Co.');
(await webview.findWebElement(By.id('org'))).sendKeys('Testing');
(await webview.findWebElement(By.id('city'))).sendKeys('Testing');
(await webview.findWebElement(By.id('state'))).sendKeys('Testing');
(await webview.findWebElement(By.id('country'))).sendKeys('TE');
await driver.sleep(2500);
(await webview.findWebElement(By.id('buttonFinish'))).click();
await driver.wait(() => notificationExists('Keystore created successfully'), 5000);
expect(fs.pathExistsSync(join(tempDirectory.name, 'keystore'))).to.equal(true, 'Keystore did not get created');
});
});
|
import Vue from 'vue' // 뷰 코어
import Axios from 'axios' //AJAX 통신모듈
import VueAxios from 'vue-axios' //vue wrapper for axios
import VueRouter from 'vue-router' //라우팅모듈
import layout from './layout.vue' //메인 페이지 프레임
//로드 확인
// console.log("Vue :",Vue)
// console.log("Axios :",Axios)
// console.log("VueRouter :",VueRouter)
// console.log("layout :",layout)
// window.rbl=rbl // 디버그용
// console.log("rbl.toggle : ",rbl.toggleClass)
// console.log("this : ",this)
//routing 라이브러리를 vue 에 등록
Vue.use(VueRouter)
//라우팅에 사용할 페이지 불러오기
import Contents from './components/contents.vue'
import Join from './components/join.vue'
import Write from './components/write.vue'
const routes = [
{ path: '/', component: Contents },
{ path: '/join', component: Join},
{ path: '/write', component: Write},
{ path: '*', component: {template: '<div style="text-align: center">page not found</div>'}},
]
// 설정한 경로들로 새로운 라우터 생성
const router = new VueRouter({
routes
})
//axios 라이브러리를 vue에 등록
Vue.use(VueAxios, Axios)
//axios test
// console.log("initiating axios")
// var api = parrot.server_dir+'/post/'
// Vue.axios.get(api).then((response) => {
// console.log(response.data)
// })
window.eventbus=new Vue()
// vue 시작
new Vue({
el: '#app',
template: '<layout/>',
components: { layout },
props: ['axios', 'parrot'],
router //등록된 라우터
})
|
#!/bin/sh
set -e
input_dir="$1"
if [ -z "$input_dir" ]; then
echo "Usage: $0 INPUT_DIR"
exit 1;
fi
name=$(echo $input_dir | sed -e 's|/$||' -e 's|.*/\(.*\)|\1|')
template_file=$name.hsfiles
test -f $template_file && rm $template_file
find "$input_dir" -type f | sed -e '/\/stack.yaml/d' \
-e '/\.cabal/d' \
-e '/\/\.stack-work\//d' \
-e '/\/yesod-devel\//d' \
-e '/\/client_session_key.aes/d' | LC_ALL=C sort | while read input_file; do
filename="${input_file#*$name/}" # with everything, up to and including first $name stripped out
if file $input_file | grep -q \
-e 'empty$' \
-e 'ASCII text' \
-e 'Unicode text' \
-e 'Scalable Vector Graphics image$';
then
echo "{-# START_FILE $filename #-}" >> "$template_file"
sed "s/$name/{{name}}/g" "$input_file" >> "$template_file"
else
echo "{-# START_FILE BASE64 $filename #-}" >> "$template_file"
base64 "$input_file" >> "$template_file"
fi
echo >> "$template_file"
done
|
# Clase 2022-03-15
# Listas
rm(list = ls())
A <- matrix(1:6, 3, 2)
m1 <- rowMeans(A)
m2 <- colMeans(A)
minA <- min(A)
maxA <- max(A)
nomb <- c("col1", "col2", "col3")
cond <- A > mean(A) + sd(A)
cond
A
lista <- list(A, m1, m2, minA, maxA, nomb, cond)
lista
lista <- list(
matriz = A, med_fil = m1, med_col = m2, minimo = minA, maximo = maxA,
nombres = nomb, cond = cond
)
lista[["matriz"]]
lista$matriz
class(lista)
lapply(lista, class)
lapply(lista, mean)
sapply(lista, class)
sapply(lista, mean)
unlist(lista)
class(lista[1])
class(lista[[1]])
x <- rnorm(100)
res <- hist(x)
res
sum(diff(res$breaks) * res$density)
#
# --- DATA FRAMES ----------------------------------------------------------------
#
rm(list = ls())
dni <- c("22456715A", "22456716B", "22456717C", "22456718D", "22456719E")
edad <- c(45, 35, 52, 60, 25)
sexo <- factor(c("Hombre", "Mujer", "Hombre", "Mujer", "Hombre"))
estudios <- factor(c("superior", "superior", "profesional", "medio", "profesional"))
salario <- c(2500, 1500, 2000, 1200, 1800)
datos <- data.frame(dni, edad, sexo, estudios, salario)
datos
class(datos)
names(datos)
str(datos)
summary(datos)
datos[3] # Tratar como lista
datos[3, ] # Tratar como matriz
datos[, 3]
datos[1:2, c("edad", "salario")]
datos[, c(-1, -3, -4)]
model <- lm(datos$salario ~ datos$edad)
model
plot(model)
sample(datos, size = 2) # same as
datos[sample(seq_len(length(datos)), 2)]
mtcars
head(mtcars)
tail(mtcars)
subset(mtcars, vs == 0 & hp > 90)
subset(mtcars, vs == 0 & hp > 90, select = c(-vs))
# Transformar la masa de libras a kg:
transform(mtcars, wt = wt / 2.2046)
transform(mtcars, wtkg = wt / 2.2046)
# --------------------------------------------------------------------------------
# within:
mtcars2 <- within(mtcars, {
vs <- factor(vs, labels = c("V", "S"))
am <- factor(am, labels = c("automatic", "manual"))
cyl <- ordered(cyl)
gear <- ordered(gear)
carb <- ordered(carb)
})
mtcars2
with(mtcars2, boxplot(mpg ~ vs))
with(subset(mtcars2, vs == "V"), hist(mpg, main = "vs=V"))
with(subset(mtcars2, vs == "S"), hist(mpg, main = "vs=S"))
# --------------------------------------------------------------------------------
# Comprobación de tipo y casteo
x <- c(1, 2, 2, 1, 2, 1, 1, 1)
is.numeric(x)
is.factor(x)
x <- as.factor(x)
x
class(x)
as.matrix(1:2)
as.complex(1:2)
A <- matrix(1:4, 2, 2)
is.numeric(A)
as.numeric(A)
D <- as.data.frame(A)
is.data.frame(D[1])
is.data.frame(D[1, ])
is.data.frame(D[, 1])
is.vector(D[, 1])
as.numeric(factor(c("H", "M")))
hh <- hist(runif(100), plot = FALSE)
is.list(hh)
|
namespace CSharpWin32GenEx
{
class CSharpWin32GenEx
{
public static void Main(string[] args)
{
}
}
}
|
To install... please put ONLY THE gmgram-addon inside your gmod server! DO NOT UPLOAD THE FILTERS FILE!
After that please follow the read me inside the addon folder!
|
// Type definitions for ds18b20 0.1
// Project: https://github.com/chamerling/ds18b20
// Definitions by: Peter Müller <https://github.com/crycode-de>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.1
/// <reference types="node" />
/**
* Options for a `temperature` call
*/
export interface TemperatureOptions {
parser: 'decimal' | 'default' | 'hex';
}
/**
* Callback for a `sensors` call
* @param err An error or null
* @param ids An array of sensor IDs
*/
export type SensorsCallback = (err: Error | null, ids: string[]) => void;
/**
* Callback for a `temperature` call
* @param err An error or null
* @param value The temperature
*/
export type TemperatureCallback = (err: Error | null, value: number) => void;
/**
* Get all connected sensor IDs as array
* @param callback callback(err, array)
*/
export function sensors(callback: SensorsCallback): void;
/**
* Get the temperature of a given sensor
* @param sensorId The sensor ID
* @param callback callback(err, value)
*/
export function temperature(sensorId: string, callback: TemperatureCallback): void;
/**
* Get the temperature of a given sensor
* @param sensorId The sensor ID
* @param options The options
* @param callback callback(err, value)
*/
export function temperature(sensorId: string, options: TemperatureOptions, callback: TemperatureCallback): void;
/**
* Get the temperature of a given sensor sync
* @param sensorId The sensor ID
* @param options The options
* @return The temperature
*/
export function temperatureSync(sensorId: string, options?: TemperatureOptions): number;
|
class Video < ActiveRecord::Base
has_attached_file :clip, :styles => {
:medium => { :geometry => "640x480", :format => 'flv', :convert_options => {:output => {:ar => 44100}} },
:large => { :geometry => "1024x576", :format => 'flv', :convert_options => {:output => {:ar => 44100}} }
}, :processors => [:ffmpeg]
validates_attachment_content_type :clip, content_type:['video/mp4','video/mpeg']
has_attached_file :wrongClip, :styles => {
:large => { :geometry => "1024x576", :format => 'flv', :convert_options => {:output => {:ar => 3000}} }
}, :processors => [:ffmpeg]
validates_attachment_content_type :wrongClip, content_type:['video/mp4','video/mpeg']
has_attached_file :clip_thumb_exceed, :styles => {
:large => { :geometry => "1024x576", :format => 'flv', :convert_options => {:output => {:ar => 44100}} },
:thumb => { :geometry => "100x100#", :format => 'jpg', :time => 10}
}, :processors => [:ffmpeg]
validates_attachment_content_type :clip_thumb_exceed, content_type:['video/mp4','video/mpeg']
has_attached_file :clip_thumb_normal, :styles => {
:large => { :geometry => "1024x576", :format => 'flv', :convert_options => {:output => {:ar => 44100}} },
:thumb => { :geometry => "100x100#", :format => 'jpg', :time => 0}
}, :processors => [:ffmpeg]
validates_attachment_content_type :clip_thumb_normal, content_type:['video/mp4','video/mpeg']
has_attached_file :clip_thumb_negative, :styles => {
:large => { :geometry => "1024x576", :format => 'flv', :convert_options => {:output => {:ar => 44100}} },
:thumb => { :geometry => "100x100#", :format => 'jpg', :time => -10}
}, :processors => [:ffmpeg]
validates_attachment_content_type :clip_thumb_negative, content_type:['video/mp4','video/mpeg']
has_attached_file :clip_thumb_bad_extension, :styles => {
:large => { :geometry => "1024x576", :format => 'flv', :convert_options => {:output => {:ar => 44100}} },
:thumb => { :geometry => "100x100#", :format => 'foo', :time => 0}
}, :processors => [:ffmpeg]
validates_attachment_content_type :clip_thumb_bad_extension, content_type:['video/mp4','video/mpeg']
end
|
# typed: true
module Datadog
module Tracing
module Contrib
module SuckerPunch
# SuckerPunch integration constants
# @public_api Changing resource names, tag names, or environment variables creates breaking changes.
module Ext
ENV_ENABLED = 'DD_TRACE_SUCKER_PUNCH_ENABLED'.freeze
ENV_ANALYTICS_ENABLED = 'DD_TRACE_SUCKER_PUNCH_ANALYTICS_ENABLED'.freeze
ENV_ANALYTICS_SAMPLE_RATE = 'DD_TRACE_SUCKER_PUNCH_ANALYTICS_SAMPLE_RATE'.freeze
SERVICE_NAME = 'sucker_punch'.freeze
SPAN_PERFORM = 'sucker_punch.perform'.freeze
SPAN_PERFORM_ASYNC = 'sucker_punch.perform_async'.freeze
SPAN_PERFORM_IN = 'sucker_punch.perform_in'.freeze
TAG_PERFORM_IN = 'sucker_punch.perform_in'.freeze
TAG_QUEUE = 'sucker_punch.queue'.freeze
TAG_COMPONENT = 'sucker_punch'.freeze
TAG_OPERATION_PERFORM = 'perform'.freeze
TAG_OPERATION_PERFORM_ASYNC = 'perform_async'.freeze
TAG_OPERATION_PERFORM_IN = 'perform_in'.freeze
end
end
end
end
end
|
FactoryGirl.define do
factory :flyer do
company_id 1
language_id 1
flyer "MyString"
end
end
|
package com.mathewsachin.fategrandautomata.ui
import android.content.SharedPreferences
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.view.Menu
import android.view.MenuItem
import androidx.appcompat.app.AlertDialog
import androidx.core.content.edit
import androidx.preference.PreferenceManager
import com.mathewsachin.fategrandautomata.R
import com.mathewsachin.fategrandautomata.ui.prefs.AutoSkillItemSettingsFragment
import kotlinx.android.synthetic.main.settings.*
import java.lang.IllegalArgumentException
class AutoSkillItemActivity : AppCompatActivity() {
var autoSkillItemKey = ""
private set
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.settings)
setSupportActionBar(toolbar_settings)
autoSkillItemKey = intent.getStringExtra(::autoSkillItemKey.name)
?: throw IllegalArgumentException("Missing AutoSkill item key in intent")
// Add the fragment only on first launch
if (savedInstanceState == null) {
val fragment = AutoSkillItemSettingsFragment()
val args = Bundle()
args.putString(::autoSkillItemKey.name, autoSkillItemKey)
fragment.arguments = args
supportFragmentManager
.beginTransaction()
.replace(R.id.settings_container, fragment)
.commit()
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
menuInflater.inflate(R.menu.autoskill_item_menu, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
return when (item.itemId) {
R.id.action_auto_skill_delete -> {
AlertDialog.Builder(this)
.setMessage("Are you sure you want to delete this configuration?")
.setTitle("Confirm Deletion")
.setPositiveButton("Delete") { _, _ -> deleteItem(autoSkillItemKey) }
.setNegativeButton("Cancel", null)
.show()
true
}
else -> super.onOptionsItemSelected(item)
}
}
private fun deleteItem(AutoSkillItemKey: String) {
deleteSharedPreferences(AutoSkillItemKey)
val prefs = PreferenceManager.getDefaultSharedPreferences(this)
val autoSkillItemsKey = getString(R.string.pref_autoskill_list)
val autoSkillItems = prefs.getStringSet(autoSkillItemsKey, mutableSetOf())!!
.toSortedSet()
autoSkillItems.remove(AutoSkillItemKey)
prefs.edit(commit = true) {
putStringSet(autoSkillItemsKey, autoSkillItems)
}
unselectItem(AutoSkillItemKey, prefs)
// We opened a separate activity for AutoSkill item
finish()
}
private fun unselectItem(AutoSkillItemKey: String, Prefs: SharedPreferences) {
val selectedAutoSkillKey = getString(R.string.pref_autoskill_selected)
val selectedAutoSkill = Prefs.getString(selectedAutoSkillKey, "")
if (selectedAutoSkill == AutoSkillItemKey) {
Prefs.edit(commit = true) { remove(selectedAutoSkillKey) }
}
}
}
|
import React, {CSSProperties, PropsWithChildren} from 'react';
import {variationName, classNames} from '@shopify/css-utilities';
import {TilesProps} from '@shopify/post-purchase-ui-extensions';
import {View} from '../View';
import {rem} from '../../utilities/units';
import styles from './Tiles.css';
interface ExtendedCSSProperties extends CSSProperties {
'--tile-width'?: string;
'--tiles-breakpoint'?: string;
}
export function Tiles({
children,
maxPerLine,
breakAt,
alignment,
spacing,
}: PropsWithChildren<TilesProps>) {
const className = classNames(
styles.Tiles,
spacing && styles[variationName('spacing', spacing)],
alignment && styles[variationName('alignment', alignment)],
maxPerLine ? undefined : styles.doesNotWrap,
breakAt ? undefined : styles.doesNotBreak,
);
const tileWidth = maxPerLine ? `${100 / maxPerLine}%` : undefined;
const tileBreakPoint = breakAt ? rem(breakAt) : undefined;
const style: ExtendedCSSProperties = {
'--tile-width': tileWidth /* stylelint-disable-line value-keyword-case */,
'--tiles-breakpoint': tileBreakPoint /* stylelint-disable-line value-keyword-case */,
};
return (
/* View is to avoid problems with negative margins when nested inside other layouts */
<View>
<div className={className} style={style}>
{children}
</div>
</View>
);
}
|
/**
* Copyright 2019 The Pennsylvania State University
* @license Apache-2.0, see License.md for full text.
*/
import { LitElement, html, css } from "lit-element/lit-element.js";
import { store } from "@lrnwebcomponents/haxcms-elements/lib/core/haxcms-site-store.js";
import { autorun, toJS } from "mobx";
/**
* `site-title`
* `Title of the site`
*
* @demo demo/index.html
*/
class SiteTitle extends LitElement {
/**
* LitElement constructable styles enhancement
*/
static get styles() {
return [
css`
:host {
display: block;
text-rendering: optimizelegibility;
position: relative;
color: inherit;
}
a {
color: inherit;
display: var(--site-title-link-display, block);
text-decoration: var(--site-title-link-text-decoration);
}
a h1 {
color: inherit;
text-rendering: optimizelegibility;
font-family: var(--site-title-heading-font-family);
font-size: var(--site-title-heading-font-size);
margin: var(--site-title-heading-margin);
padding: var(--site-title-heading-padding);
text-align: var(--site-title-heading-text-align);
text-rendering: var(--site-title-heading-text-rendering);
font-weight: var(--site-title-heading-font-weight);
}
`,
];
}
/**
* Store the tag name to make it easier to obtain directly.
*/
static get tag() {
return "site-title";
}
constructor() {
super();
this.__disposer = [];
this.label = "Home";
this.notitle = false;
import("@lrnwebcomponents/simple-icon/lib/simple-icon-lite.js");
import("@lrnwebcomponents/simple-icon/lib/simple-icons.js");
autorun((reaction) => {
this.siteTitle = toJS(store.siteTitle);
this.__disposer.push(reaction);
});
autorun((reaction) => {
this.homeLink = toJS(store.homeLink);
this.__disposer.push(reaction);
});
}
/**
* LitElement
*/
render() {
return html`
<a
href="${this.homeLink}"
title="${this.label}"
?disabled="${this.disabled}"
>
<simple-icon-lite
?hidden="${this.icon ? false : true}"
icon="${this.icon}"
></simple-icon-lite>
${this.notitle ? html`` : html` <h1>${this.siteTitle}</h1> `}
</a>
`;
}
/**
* Props
*/
static get properties() {
return {
disabled: {
type: Boolean,
reflect: true,
},
/**
* Site title
*/
siteTitle: {
type: String,
attribute: "site-title",
},
/**
* HREF to the home page
*/
homeLink: {
type: String,
attribute: "home-link",
},
/**
* Label
*/
label: {
type: String,
},
/**
* optional icon
*/
icon: {
type: String,
},
/**
* If the title should be displayed or not
*/
notitle: {
type: Boolean,
reflect: true,
},
};
}
disconnectedCallback() {
for (var i in this.__disposer) {
this.__disposer[i].dispose();
}
super.disconnectedCallback();
}
}
window.customElements.define(SiteTitle.tag, SiteTitle);
export { SiteTitle };
|
CREATE TABLE ABSENCEDATA (
AD_ID INTEGER NOT NULL,
SUBMITTER_NAME VARCHAR(40),
SUBMITTER_EMAIL VARCHAR(40),
STATE INTEGER,
REASON VARCHAR(40),
FROM_DATE DATE,
TO_DATE DATE,
PRIMARY KEY (PCP_ID)
);
|
<?php
// application/views/pics/search.php
$this->load->view($this->config->item('theme') . 'header');
?>
<h2><?php echo $title; ?></h2>
<?=validation_errors(); ?>
<?=form_open('pics/search'); ?>
<input type="input" name="title" placeholder="Search Title"/><br />
<input type="input" name="tag" placeholder="Tag (no spaces)"/><br />
<input type="submit" name="submit" value="View Images" />
</form>
<?php
$this->load->view($this->config->item('theme') . 'footer');
?>
|
package com.spothero.employmentTest.service
import com.spothero.employmentTest.domain.error_handling.CustomException
import com.spothero.employmentTest.domain.error_handling.CustomExceptionHandler
import com.spothero.employmentTest.entity.RateEntity
import com.spothero.employmentTest.repository.RatesRepository
import com.spothero.employmentTest.view.PriceResponse
import org.joda.time.LocalDateTime
import org.joda.time.LocalTime
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
class PriceService(
@Autowired private val ratesRepository: RatesRepository
) {
@Throws(
CustomExceptionHandler::class
)
fun getPrice(
startTime: LocalDateTime,
endTime: LocalDateTime
): PriceResponse {
val dayOfWeek = startTime.dayOfWeek().asShortText
//val endDayOfWeek = endTime.dayOfWeek().asShortText
val startLocalDateTime: LocalTime = startTime.toLocalTime()
val endLocalDateTime: LocalTime = endTime.toLocalTime()
val rateEntityList: List<RateEntity>? =
ratesRepository.findByLocalEndTimeGreaterThanEqualAndLocalStartTimeLessThanEqual(
"$endLocalDateTime",
"$startLocalDateTime"
)
val rateEntityListParsedByDay: List<RateEntity>? = rateEntityList?.filter {
it.days.contains(dayOfWeek, true)
}
if (rateEntityListParsedByDay != null) {
when {
rateEntityListParsedByDay.size > 1 -> {
throw CustomExceptionHandler(
CustomException.PRICE_UNAVAILABLE,
RuntimeException("Parking stay spans more than one (1) rate.")
)
}
rateEntityListParsedByDay.isEmpty() -> {
throw CustomExceptionHandler(CustomException.PRICE_UNAVAILABLE, RuntimeException("No valid rate."))
}
}
}
return PriceResponse(price = rateEntityListParsedByDay?.get(0)?.rate ?: 0)
}
}
|
using System;
using SpotifyAPI.Web;
namespace SpotifyProject.SpotifyPlaybackModifier
{
public class SpotifyConfiguration : ISpotifyConfigurationContainer
{
public SpotifyClient Spotify { get; set; }
public string Market { get; set; }
SpotifyConfiguration ISpotifyConfigurationContainer.SpotifyConfiguration => this;
}
}
|
#!/bin/zsh
go build -o app cmd/web/*.go && ./app
|
package com.guilla.lab.mvp.interactors
/**
* Created by dino on 20/03/15.
*/
interface BaseInteractor {
fun cancel()
fun reset()
}
|
import 'package:etherwallet/service/configuration_service.dart';
// ignore_for_file: flutter_style_todos
class ConfigServiceMock implements IConfigurationService {
@override
bool didSetupWallet() {
// TODO: implement didSetupWallet
throw UnimplementedError();
}
@override
String? getMnemonic() {
// TODO: implement getMnemonic
throw UnimplementedError();
}
@override
String? getPrivateKey() {
// TODO: implement getPrivateKey
throw UnimplementedError();
}
@override
Future<void> setMnemonic(String? value) {
// TODO: implement setMnemonic
throw UnimplementedError();
}
@override
Future<void> setPrivateKey(String? value) {
// TODO: implement setPrivateKey
throw UnimplementedError();
}
@override
Future<void> setupDone(bool value) {
// TODO: implement setupDone
throw UnimplementedError();
}
}
|
// scalac: -unchecked -Xfatal-warnings
class A {
def f: Boolean = {
val xs = Nil map (_ => return false)
true
}
}
|
import 'package:fluent_ui/fluent_ui.dart';
import 'package:win32_gamepad/win32_gamepad.dart';
class ABXYButtons extends StatelessWidget {
const ABXYButtons({
Key? key,
required this.gamepad,
}) : super(key: key);
final Gamepad gamepad;
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.all(8.0),
child: FittedBox(
child: Column(
children: [
Row(mainAxisAlignment: MainAxisAlignment.center, children: [
ToggleSwitch(
checked: gamepad.state.buttonY,
content: const Text('Y button'),
onChanged: (_) {},
),
]),
Row(mainAxisAlignment: MainAxisAlignment.center, children: [
ToggleSwitch(
checked: gamepad.state.buttonX,
content: const Text('X button'),
onChanged: (_) {},
),
const SizedBox(width: 20),
ToggleSwitch(
checked: gamepad.state.buttonB,
content: const Text('B button'),
onChanged: (_) {},
),
]),
Row(mainAxisAlignment: MainAxisAlignment.center, children: [
ToggleSwitch(
checked: gamepad.state.buttonA,
content: const Text('A button'),
onChanged: (_) {},
),
]),
],
),
),
);
}
}
|
using Microsoft.ML.Transforms;
using System;
namespace Unearth.Demo.MLCustomTransform.CustomTransforms
{
// The input columns we want to be passed to our transformer
public class FlightCodeCMInput
{
public string FlightCode { get; set; }
}
// The output columns we want our transformer to add to the pipeline
// If the name is the same as an existing column then that column will be replaced
public class FlightCodeCMOutput
{
public float SpecialFeature { get; set; }
}
[CustomMappingFactoryAttribute("FlightCodeMapping")]
public class FlightCodeMapping : CustomMappingFactory<FlightCodeCMInput, FlightCodeCMOutput>
{
static FlightCodeMapping()
{
// Init any static data needed
}
public static void Transform(FlightCodeCMInput input, FlightCodeCMOutput output)
{
// Boeing 737-800s are special (for no very good reason)
if (input.FlightCode.Contains("B738") || input.FlightCode.Contains("73H"))
output.SpecialFeature = 1f;
else
output.SpecialFeature = 0f;
}
public override Action<FlightCodeCMInput, FlightCodeCMOutput> GetMapping()
{
return Transform;
}
}
}
|
import 'package:dio/dio.dart';
import 'package:maby_dio/codable.dart';
import 'package:maby_dio/response_data.dart';
abstract class ResponseSerializer<T extends Codable> {
RawResponseData serialize(Response response, T? model);
}
class DefaultResponseSerializer<T extends Codable> extends ResponseSerializer<T> {
@override
RawResponseData serialize(Response response, T? model) {
dynamic data = response.data;
if (data is Map) {
dynamic innerData = data['data'];
if (model != null && innerData is Map<String, dynamic>) {
model.fromJson(innerData);
return ResponseModelData(model);
}
return ResponseRawData(innerData);
}
return ResponseRawData(data);
}
}
|
/*
* Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.execution
import java.io.File
import java.nio.file.{Files, Paths}
import java.util.Map.Entry
import java.util.function.Consumer
import scala.collection.mutable.ArrayBuffer
import com.gemstone.gemfire.SystemFailure
import com.pivotal.gemfirexd.internal.engine.Misc
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore
import com.pivotal.gemfirexd.internal.iapi.reference.{Property => GemXDProperty}
import com.pivotal.gemfirexd.internal.impl.jdbc.Util
import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState
import io.snappydata.util.ServiceUtils
import io.snappydata.{Constant, Property}
import org.apache.spark.SparkContext
import org.apache.spark.deploy.SparkSubmitUtils
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Cast, Expression, GenericInternalRow, SortDirection}
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.collection.ToolsCallbackInit
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.execution.command.{DescribeTableCommand, DropTableCommand, RunnableCommand, SetCommand, ShowTablesCommand}
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.internal.{BypassRowLevelSecurity, ContextJarUtils, StaticSQLConf}
import org.apache.spark.sql.sources.DestroyRelation
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Duration, SnappyStreamingContext}
/**
* Allow execution of adhoc scala code on the Lead node.
* Creates a new Scala interpreter for a Snappy Session. But, cached for the life of the
* session. Subsequent invocations of the 'interpret' command will resuse the cached
* interpreter. Allowing any variables (e.g. dataframe) to be preserved across invocations.
* State will not be preserved during Lead node failover.
* <p> Application is injected (1) The SnappySession in variable called 'session' and
* (2) The Options in a variable called 'intp_options'.
* <p> To return values set a variable called 'intp_return' - a Seq[Row].
*/
case class InterpretCodeCommand(
code: String,
snappySession: SnappySession,
options: Map[String, String] = Map.empty) extends RunnableCommand {
lazy val df: Dataset[Row] = {
val tcb = ToolsCallbackInit.toolsCallback
if (tcb != null) {
// supported in embedded mode only
tcb.getScalaCodeDF(code, snappySession, options)
} else {
null
}
}
// This is handled directly by Remote Interpreter code
override def run(sparkSession: SparkSession): Seq[Row] = df.collect()
override def output: Seq[Attribute] = df.schema.fields.map(
x => AttributeReference(x.name, x.dataType, x.nullable)())
}
case class GrantRevokeIntpCommand(
isGrant: Boolean, users: String) extends RunnableCommand {
// This is handled directly by Remote Interpreter code
override def run(sparkSession: SparkSession): Seq[Row] = {
val tcb = ToolsCallbackInit.toolsCallback
if (tcb == null) {
throw new AnalysisException("Granting/Revoking" +
" of INTP not supported from smart connector mode")
}
val session = sparkSession.asInstanceOf[SnappySession]
val user = session.conf.get(com.pivotal.gemfirexd.Attribute.USERNAME_ATTR)
tcb.updateIntpGrantRevoke(user, isGrant, users)
Nil
}
}
case class GrantRevokeOnExternalTable(
isGrant: Boolean, table: TableIdentifier, users: String) extends RunnableCommand {
// This is handled directly by Remote Interpreter code
override def run(sparkSession: SparkSession): Seq[Row] = {
val tcb = ToolsCallbackInit.toolsCallback
if (tcb == null) {
throw new AnalysisException("Granting/Revoking" +
" on external table not supported from smart connector mode")
}
val session = sparkSession.asInstanceOf[SnappySession]
val ct = session.sessionCatalog.getTableMetadata(table)
val user = session.conf.get(com.pivotal.gemfirexd.Attribute.USERNAME_ATTR)
tcb.updateGrantRevokeOnExternalTable(user, isGrant, table, users, ct)
Nil
}
}
object GrantRevokeOnExternalTable {
val META_REGION_KEY_PREFIX = "##_EXTERNAL__GRANT__REVOKE_##"
def getMetaRegionKey(fqtn: String): String = {
META_REGION_KEY_PREFIX + "####" + fqtn
}
}
case class CreateTableUsingCommand(
tableIdent: TableIdentifier,
baseTable: Option[String],
userSpecifiedSchema: Option[StructType],
schemaDDL: Option[String],
provider: String,
mode: SaveMode,
options: Map[String, String],
partitionColumns: Array[String],
bucketSpec: Option[BucketSpec],
query: Option[LogicalPlan],
isBuiltIn: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val session = sparkSession.asInstanceOf[SnappySession]
val allOptions = session.addBaseTableOption(baseTable, options)
session.createTableInternal(tableIdent, provider, userSpecifiedSchema,
schemaDDL, mode, allOptions, isBuiltIn, partitionColumns, bucketSpec, query)
Nil
}
}
/**
* Like Spark's DropTableCommand but checks for non-existent table case upfront to avoid
* unnecessary warning logs from Spark's DropTableCommand.
*/
case class DropTableOrViewCommand(
tableIdent: TableIdentifier,
ifExists: Boolean,
isView: Boolean,
purge: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.asInstanceOf[SnappySession].sessionCatalog
if (!catalog.isTemporaryTable(tableIdent) && !catalog.tableExists(tableIdent)) {
val resolved = catalog.resolveTableIdentifier(tableIdent)
if (ifExists) return Nil
else throw new TableNotFoundException(resolved.database.get, resolved.table)
}
DropTableCommand(tableIdent, ifExists, isView, purge).run(sparkSession)
}
}
case class CreateSchemaCommand(ifNotExists: Boolean, schemaName: String,
authId: Option[(String, Boolean)]) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val session = sparkSession.asInstanceOf[SnappySession]
val catalog = session.sessionCatalog
val schema = catalog.formatDatabaseName(schemaName)
catalog.createSchema(schema, ifNotExists, authId)
Nil
}
}
case class DropSchemaCommand(schemaName: String, ignoreIfNotExists: Boolean,
cascade: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val session = sparkSession.asInstanceOf[SnappySession]
session.sessionCatalog.dropSchema(schemaName, ignoreIfNotExists, cascade)
Nil
}
}
case class DropPolicyCommand(ifExists: Boolean,
policyIdentifer: TableIdentifier) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
snappySession.dropPolicy(policyIdentifer, ifExists)
Nil
}
}
case class TruncateManagedTableCommand(ifExists: Boolean,
table: TableIdentifier) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val catalog = session.asInstanceOf[SnappySession].sessionCatalog
// skip if "ifExists" is true and table does not exist
if (!(ifExists && !catalog.tableExists(table))) {
catalog.resolveRelation(table) match {
case lr: LogicalRelation if lr.relation.isInstanceOf[DestroyRelation] =>
lr.relation.asInstanceOf[DestroyRelation].truncate()
case plan => throw new AnalysisException(
s"Table '$table' must be a DestroyRelation for truncate. Found plan: $plan")
}
session.sharedState.cacheManager.uncacheQuery(session.table(table))
}
Nil
}
}
case class AlterTableAddColumnCommand(tableIdent: TableIdentifier,
addColumn: StructField, extensions: String) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
snappySession.alterTable(tableIdent, isAddColumn = true, addColumn, extensions)
Nil
}
}
case class AlterTableToggleRowLevelSecurityCommand(tableIdent: TableIdentifier,
enableRls: Boolean) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
snappySession.alterTableToggleRLS(tableIdent, enableRls)
Nil
}
}
case class AlterTableDropColumnCommand(tableIdent: TableIdentifier, column: String,
extensions: String) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
// drop column doesn't need anything apart from name so fill dummy values
snappySession.alterTable(tableIdent, isAddColumn = false,
StructField(column, NullType), extensions)
Nil
}
}
case class AlterTableMiscCommand(tableIdent: TableIdentifier, sql: String)
extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
snappySession.alterTableMisc(tableIdent, sql)
Nil
}
}
case class CreateIndexCommand(indexName: TableIdentifier,
baseTable: TableIdentifier,
indexColumns: Seq[(String, Option[SortDirection])],
options: Map[String, String]) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
snappySession.createIndex(indexName, baseTable, indexColumns, options)
Nil
}
}
case class CreatePolicyCommand(policyIdent: TableIdentifier,
tableIdent: TableIdentifier,
policyFor: String, applyTo: Seq[String], expandedPolicyApplyTo: Seq[String],
currentUser: String, filterStr: String,
filter: BypassRowLevelSecurity) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
if (!Misc.isSecurityEnabled && !GemFireStore.ALLOW_RLS_WITHOUT_SECURITY) {
throw Util.generateCsSQLException(SQLState.SECURITY_EXCEPTION_ENCOUNTERED,
null, new IllegalStateException("CREATE POLICY failed: Security (" +
com.pivotal.gemfirexd.Attribute.AUTH_PROVIDER + ") not enabled in the system"))
}
if (!Misc.getMemStoreBooting.isRLSEnabled) {
throw Util.generateCsSQLException(SQLState.SECURITY_EXCEPTION_ENCOUNTERED,
null, new IllegalStateException("CREATE POLICY failed: Row level security (" +
GemXDProperty.SNAPPY_ENABLE_RLS + ") not enabled in the system"))
}
val snappySession = session.asInstanceOf[SnappySession]
SparkSession.setActiveSession(snappySession)
snappySession.createPolicy(policyIdent, tableIdent, policyFor, applyTo, expandedPolicyApplyTo,
currentUser, filterStr, filter)
Nil
}
}
case class DropIndexCommand(ifExists: Boolean,
indexName: TableIdentifier) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
val snappySession = session.asInstanceOf[SnappySession]
snappySession.dropIndex(indexName, ifExists)
Nil
}
}
case class SetSchemaCommand(schemaName: String) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.asInstanceOf[SnappySession].setCurrentSchema(schemaName)
Nil
}
}
case class SnappyStreamingActionsCommand(action: Int,
batchInterval: Option[Duration]) extends RunnableCommand {
override def run(session: SparkSession): Seq[Row] = {
def creatingFunc(): SnappyStreamingContext = {
// batchInterval will always be defined when action == 0
new SnappyStreamingContext(session.sparkContext, batchInterval.get)
}
action match {
case 0 =>
val ssc = SnappyStreamingContext.getInstance()
ssc match {
case Some(_) => // TODO .We should create a named Streaming
// Context and check if the configurations match
case None => SnappyStreamingContext.getActiveOrCreate(creatingFunc)
}
case 1 =>
val ssc = SnappyStreamingContext.getInstance()
ssc match {
case Some(x) => x.start()
case None => throw new AnalysisException(
"Streaming Context has not been initialized")
}
case 2 =>
val ssc = SnappyStreamingContext.getActive
ssc match {
case Some(strCtx) => strCtx.stop(stopSparkContext = false,
stopGracefully = true)
case None => // throw new AnalysisException(
// "There is no running Streaming Context to be stopped")
}
}
Nil
}
}
/**
* Alternative to Spark's CacheTableCommand that shows the plan being cached
* in the GUI rather than count() plan for InMemoryRelation.
*/
case class SnappyCacheTableCommand(tableIdent: TableIdentifier, queryString: String,
plan: Option[LogicalPlan], isLazy: Boolean) extends RunnableCommand {
require(plan.isEmpty || tableIdent.database.isEmpty,
"Schema name is not allowed in CACHE TABLE AS SELECT")
override def output: Seq[Attribute] = AttributeReference(
"batchCount", LongType)() :: Nil
override protected def innerChildren: Seq[QueryPlan[_]] = plan match {
case None => Nil
case Some(p) => p :: Nil
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val session = sparkSession.asInstanceOf[SnappySession]
val df = plan match {
case None => session.table(tableIdent)
case Some(lp) =>
val df = Dataset.ofRows(session, lp)
df.createTempView(tableIdent.quotedString)
df
}
val isOffHeap = ServiceUtils.isOffHeapStorageAvailable(session)
if (isLazy) {
if (isOffHeap) df.persist(StorageLevel.OFF_HEAP) else df.persist()
Nil
} else {
val queryShortString = CachedDataFrame.queryStringShortForm(queryString)
val localProperties = session.sparkContext.getLocalProperties
val previousJobDescription = localProperties.getProperty(SparkContext.SPARK_JOB_DESCRIPTION)
localProperties.setProperty(SparkContext.SPARK_JOB_DESCRIPTION, queryShortString)
try {
session.sessionState.enableExecutionCache = true
// Get the actual QueryExecution used by InMemoryRelation so that
// "withNewExecutionId" runs on the same and shows proper metrics in GUI.
val cachedExecution = try {
if (isOffHeap) df.persist(StorageLevel.OFF_HEAP) else df.persist()
session.sessionState.getExecution(df.logicalPlan)
} finally {
session.sessionState.enableExecutionCache = false
session.sessionState.clearExecutionCache()
}
val memoryPlan = df.queryExecution.executedPlan.collectFirst {
case plan: InMemoryTableScanExec => plan.relation
}.get
// cachedExecution can be null if query was already cached
val (cachedPlan, cachedPlanStr) =
if (cachedExecution ne null) cachedExecution.executedPlan -> cachedExecution.toString
else {
val executedPlan = session.sharedState.cacheManager().lookupCachedData(df.logicalPlan)
.getOrElse(throw new AnalysisException(s"Cached data for [ $queryString ] after " +
"successful persist() call no longer present.")).cachedRepresentation.child
executedPlan ->
s"""== Physical Plan previously cached ==
|${executedPlan.treeString(verbose = false)}
""".stripMargin.trim
}
val planInfo = PartitionedPhysicalScan.getSparkPlanInfo(cachedPlan)
Row(CachedDataFrame.withCallback(session, df = null, cachedExecution, "cache")(_ =>
CachedDataFrame.withNewExecutionId(session, queryShortString, queryString,
cachedPlanStr, planInfo, if (cachedExecution ne null) cachedPlan else null,
removeBroadcastsFromDriver = true)({
val start = System.nanoTime()
// Dummy op to materialize the cache. This does the minimal job of count on
// the actual cached data (RDD[CachedBatch]) to force materialization of cache
// while avoiding creation of any new SparkPlan.
val count = memoryPlan.cachedColumnBuffers.count()
(count, System.nanoTime() - start)
}))._1) :: Nil
} finally {
if (previousJobDescription ne null) {
localProperties.setProperty(SparkContext.SPARK_JOB_DESCRIPTION, previousJobDescription)
} else {
localProperties.remove(SparkContext.SPARK_JOB_DESCRIPTION)
}
}
}
}
}
/**
* Changes the name of "database" column to "schemaName" over Spark's ShowTablesCommand.
* Also when hive compatibility is turned on, then this does not include the schema name
* or "isTemporary" to return hive compatible result.
*/
class ShowSnappyTablesCommand(session: SnappySession, schemaOpt: Option[String],
tablePattern: Option[String]) extends ShowTablesCommand(schemaOpt, tablePattern) {
private val hiveCompatible = Property.HiveCompatibility.get(
session.sessionState.conf).equalsIgnoreCase("full")
override val output: Seq[Attribute] = {
if (hiveCompatible) AttributeReference("name", StringType, nullable = false)() :: Nil
else {
AttributeReference("schemaName", StringType, nullable = false)() ::
AttributeReference("tableName", StringType, nullable = false)() ::
AttributeReference("isTemporary", BooleanType, nullable = false)() :: Nil
}
}
override def run(sparkSession: SparkSession): Seq[Row] = {
if (!hiveCompatible) {
return super.run(sparkSession)
}
val catalog = sparkSession.sessionState.catalog
val schemaName = schemaOpt match {
case None => catalog.getCurrentDatabase
case Some(s) => s
}
val tables = tableIdentifierPattern match {
case None => catalog.listTables(schemaName)
case Some(p) => catalog.listTables(schemaName, p)
}
tables.map(tableIdent => Row(tableIdent.table))
}
}
case class ShowViewsCommand(session: SnappySession, schemaOpt: Option[String],
viewPattern: Option[String]) extends RunnableCommand {
private val hiveCompatible = Property.HiveCompatibility.get(
session.sessionState.conf).equalsIgnoreCase("full")
// The result of SHOW VIEWS has four columns: schemaName, tableName, isTemporary and isGlobal.
override val output: Seq[Attribute] = {
if (hiveCompatible) AttributeReference("viewName", StringType, nullable = false)() :: Nil
else {
AttributeReference("schemaName", StringType, nullable = false)() ::
AttributeReference("viewName", StringType, nullable = false)() ::
AttributeReference("isTemporary", BooleanType, nullable = false)() ::
AttributeReference("isGlobal", BooleanType, nullable = false)() :: Nil
}
}
private def getViewType(table: TableIdentifier,
session: SnappySession): Option[(Boolean, Boolean)] = {
val catalog = session.sessionCatalog
if (catalog.isTemporaryTable(table)) Some(true -> !catalog.isLocalTemporaryView(table))
else if (catalog.getTableMetadata(table).tableType != CatalogTableType.VIEW) None
else Some(false -> false)
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val session = sparkSession.asInstanceOf[SnappySession]
val catalog = session.sessionCatalog
val schemaName = schemaOpt match {
case None => catalog.getCurrentDatabase
case Some(s) => s
}
val tables = viewPattern match {
case None => catalog.listTables(schemaName)
case Some(p) => catalog.listTables(schemaName, p)
}
tables.map(tableIdent => tableIdent -> getViewType(tableIdent, session)).collect {
case (viewIdent, Some((isTemp, isGlobalTemp))) =>
if (hiveCompatible) Row(viewIdent.table)
else {
val viewSchema = viewIdent.database match {
case None => ""
case Some(s) => s
}
Row(viewSchema, viewIdent.table, isTemp, isGlobalTemp)
}
}
}
}
/**
* This extends Spark's describe to add support for CHAR and VARCHAR types.
*/
class DescribeSnappyTableCommand(table: TableIdentifier,
partitionSpec: TablePartitionSpec, isExtended: Boolean, isFormatted: Boolean)
extends DescribeTableCommand(table, partitionSpec, isExtended, isFormatted) {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.asInstanceOf[SnappySession].sessionCatalog
catalog.synchronized {
// set the flag to return CharType/VarcharType if present
catalog.convertCharTypesInMetadata = true
try {
super.run(sparkSession)
} finally {
catalog.convertCharTypesInMetadata = false
}
}
}
}
class SetSnappyCommand(kv: Option[(String, Option[String])]) extends SetCommand(kv) {
override def run(sparkSession: SparkSession): Seq[Row] = kv match {
// SnappySession allows attaching external hive catalog at runtime
case Some((k, Some(v))) if k.equalsIgnoreCase(StaticSQLConf.CATALOG_IMPLEMENTATION.key) =>
sparkSession.sessionState.conf.setConfString(k, v)
Row(k, v) :: Nil
case _ => super.run(sparkSession)
}
}
case class DeployCommand(
coordinates: String,
alias: String,
repos: Option[String],
jarCache: Option[String],
restart: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
try {
val jarsstr = SparkSubmitUtils.resolveMavenCoordinates(coordinates, repos, jarCache)
if (jarsstr.nonEmpty) {
val jars = jarsstr.split(",")
val sc = sparkSession.sparkContext
val uris = jars.map(j => sc.env.rpcEnv.fileServer.addFile(new File(j)))
SnappySession.addJarURIs(uris)
RefreshMetadata.executeOnAll(sc, RefreshMetadata.ADD_URIS_TO_CLASSLOADER, uris)
val deployCmd = s"$coordinates|${repos.getOrElse("")}|${jarCache.getOrElse("")}"
ToolsCallbackInit.toolsCallback.addURIs(alias, jars, deployCmd)
}
Nil
} catch {
case ex: Throwable =>
ex match {
case err: Error =>
if (SystemFailure.isJVMFailureError(err)) {
SystemFailure.initiateFailure(err)
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw err
}
case _ =>
}
Misc.checkIfCacheClosing(ex)
if (restart) {
logWarning(s"Following mvn coordinate" +
s" could not be resolved during restart: $coordinates", ex)
}
throw ex
}
}
}
case class DeployJarCommand(
alias: String,
paths: String,
restart: Boolean) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
if (paths.nonEmpty) {
val jars = paths.split(",")
val (availableUris, unavailableUris) = jars.partition(f => Files.isReadable(Paths.get(f)))
if (unavailableUris.nonEmpty) {
logWarning(s"Following jars are unavailable" +
s" for deployment during restart: ${unavailableUris.deep.mkString(",")}")
if (restart) {
throw new IllegalStateException(
s"Could not find deployed jars: ${unavailableUris.mkString(",")}")
}
throw new IllegalArgumentException(s"jars not readable: ${unavailableUris.mkString(",")}")
}
val sc = sparkSession.sparkContext
val uris = availableUris.map(j => sc.env.rpcEnv.fileServer.addFile(new File(j)))
SnappySession.addJarURIs(uris)
RefreshMetadata.executeOnAll(sc, RefreshMetadata.ADD_URIS_TO_CLASSLOADER, uris)
ToolsCallbackInit.toolsCallback.addURIs(alias, jars, paths, isPackage = false)
}
Nil
}
}
case class ListPackageJarsCommand(isJar: Boolean) extends RunnableCommand {
override val output: Seq[Attribute] = {
AttributeReference("alias", StringType, nullable = false)() ::
AttributeReference("coordinate", StringType, nullable = false)() ::
AttributeReference("isPackage", BooleanType, nullable = false)() :: Nil
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val commands = ToolsCallbackInit.toolsCallback.getGlobalCmndsSet
val rows = new ArrayBuffer[Row]
commands.forEach(new Consumer[Entry[String, Object]] {
override def accept(t: Entry[String, Object]): Unit = {
if (!(t.getKey.equals(Constant.CLUSTER_ID) ||
t.getKey.startsWith(Constant.MEMBER_ID_PREFIX))) {
var alias = t.getKey
// Skip dropped functions entry
if (alias.contains(ContextJarUtils.droppedFunctionsKey)) return
// Explicitly mark functions as UDF while listing jars/packages.
alias = alias.replace(ContextJarUtils.functionKeyPrefix, "[UDF]")
if (t.getValue.isInstanceOf[String]) {
val value = t.getValue.toString
val indexOf = value.indexOf('|')
if (indexOf > 0) {
// It is a package
val pkg = value.substring(0, indexOf)
rows += Row(alias, pkg, true)
}
else {
// It is a jar
val jars = value.split(',')
val jarfiles = jars.map(f => {
val lastIndexOf = f.lastIndexOf('/')
val length = f.length
if (lastIndexOf > 0) f.substring(lastIndexOf + 1, length)
else {
f
}
})
rows += Row(alias, jarfiles.mkString(","), false)
}
}
}
}
})
rows
}
}
case class UnDeployCommand(alias: String) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
var value = ""
val sc = sparkSession.sparkContext
if (alias != null) {
val cmndsSet = ToolsCallbackInit.toolsCallback.getGlobalCmndsSet
cmndsSet.forEach(new Consumer[Entry[String, Object]] {
override def accept(t: Entry[String, Object]): Unit = {
val alias1 = t.getKey
if (alias == alias1) {
value = t.getValue match {
case s: String => s
case _ => null
}
}
}
})
if (value != null) {
val indexOf = value.indexOf("|")
val lastIndexOf = value.lastIndexOf("|")
if (indexOf > 0) {
val coordinates = value.substring(0, indexOf)
val repos = Option(value.substring(indexOf + 1, lastIndexOf))
val jarCache = Option(value.substring(lastIndexOf + 1, value.length))
val jarsstr = SparkSubmitUtils.resolveMavenCoordinates(coordinates,
repos, jarCache)
if (jarsstr.nonEmpty) {
val pkgs = jarsstr.split(",")
RefreshMetadata.executeOnAll(sc, RefreshMetadata.REMOVE_URIS_FROM_CLASSLOADER, pkgs)
ToolsCallbackInit.toolsCallback.removeURIs(pkgs)
}
}
else {
if (value.nonEmpty) {
val jars = value.split(',')
RefreshMetadata.executeOnAll(sc, RefreshMetadata.REMOVE_URIS_FROM_CLASSLOADER, jars)
ToolsCallbackInit.toolsCallback.removeURIs(jars)
}
}
}
}
ToolsCallbackInit.toolsCallback.removePackage(alias)
Nil
}
}
case class PutIntoValuesColumnTable(table: CatalogTable,
colNames: Option[Seq[String]],
values: Seq[Seq[Expression]])
extends RunnableCommand {
if (values.isEmpty) {
throw new IllegalArgumentException(s"PUT: empty value list passed")
}
override lazy val output: Seq[Attribute] = AttributeReference("count", LongType)() :: Nil
private def cast(from: Expression, to: StructField): Expression = {
val fromType = from.dataType
val toType = to.dataType
if (fromType == toType) from
else if (Cast.canCast(fromType, toType)) {
Cast(from, toType)
} else if (Cast.canCast(StringType, toType)) {
Cast(Cast(from, StringType), toType)
} else {
throw new AnalysisException(
s"Cannot cast '$from' to target type '${toType.sql}' for column '${to.name}'")
}
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val snc = sparkSession.asInstanceOf[SnappySession]
val tableName = table.identifier.unquotedString
val schema = table.schema
import snappy._
if (colNames.isEmpty) {
val rows = values.zipWithIndex.map { case (exprs, i) =>
if (exprs.length != schema.length) {
throw new AnalysisException(
s"PUT: mismatch in the length of value list and table schema at index $i")
}
new GenericInternalRow(
exprs.zip(schema).map(p => cast(p._1, p._2).eval()).toArray)
}
Row(snc.internalCreateDataFrame(rows, schema).write.putInto(tableName)) :: Nil
}
else {
val columnNames = colNames.get
val resolver = snc.sessionState.analyzer.resolver
val indexMapping = columnNames.map { column =>
val index = schema.fields.indexWhere(f => resolver(f.name, column))
if (index == -1) {
throw new AnalysisException(s"Field '$column' does not exist in " +
s"${table.identifier.quotedString} with schema=${schema.map(_.name)}")
}
index
}
val rows = values.zipWithIndex.map { case (exprs, i) =>
if (exprs.length != columnNames.length) {
throw new AnalysisException(
s"PUT: mismatch in the length of value list and column names at index $i")
}
val rowData = new Array[Any](schema.length)
exprs.zip(indexMapping).foreach(p =>
rowData(p._2) = cast(p._1, schema(p._2)).eval())
new GenericInternalRow(rowData)
}
Row(snc.internalCreateDataFrame(rows, schema).write.putInto(tableName)) :: Nil
}
}
}
|
# frozen_string_literal: true
module Identity
# This class represents the Openstack Group
class Group < Core::ServiceLayer::Model
validates :name, presence: {
message: 'Please provide a name for this group.'
}
end
end
|
// Copyright 2019 WHTCORPS INC Project Authors. Licensed under Apache-2.0.
use crate::errors::Result;
use crate::properties::DecodeProperties;
use crate::cone::Cone;
use crate::CausetHandleExt;
use std::ops::Deref;
pub trait BlockPropertiesExt: CausetHandleExt {
type BlockPropertiesCollection: BlockPropertiesCollection<
Self::BlockPropertiesCollectionIter,
Self::BlockPropertiesKey,
Self::BlockProperties,
Self::UserCollectedProperties,
>;
type BlockPropertiesCollectionIter: BlockPropertiesCollectionIter<
Self::BlockPropertiesKey,
Self::BlockProperties,
Self::UserCollectedProperties,
>;
type BlockPropertiesKey: BlockPropertiesKey;
type BlockProperties: BlockProperties<Self::UserCollectedProperties>;
type UserCollectedProperties: UserCollectedProperties;
fn get_properties_of_Blocks_in_cone(
&self,
causet: &Self::CausetHandle,
cones: &[Cone],
) -> Result<Self::BlockPropertiesCollection>;
fn get_cone_properties_causet(
&self,
causetname: &str,
spacelike_key: &[u8],
lightlike_key: &[u8],
) -> Result<Self::BlockPropertiesCollection> {
let causet = self.causet_handle(causetname)?;
let cone = Cone::new(spacelike_key, lightlike_key);
Ok(self.get_properties_of_Blocks_in_cone(causet, &[cone])?)
}
}
pub trait BlockPropertiesCollection<I, PKey, P, UCP>
where
I: BlockPropertiesCollectionIter<PKey, P, UCP>,
PKey: BlockPropertiesKey,
P: BlockProperties<UCP>,
UCP: UserCollectedProperties,
{
fn iter(&self) -> I;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
}
pub trait BlockPropertiesCollectionIter<PKey, P, UCP>: Iteron<Item = (PKey, P)>
where
PKey: BlockPropertiesKey,
P: BlockProperties<UCP>,
UCP: UserCollectedProperties,
{
}
pub trait BlockPropertiesKey: Deref<Target = str> {}
pub trait BlockProperties<UCP>
where
UCP: UserCollectedProperties,
{
fn num_entries(&self) -> u64;
fn user_collected_properties(&self) -> UCP;
}
pub trait UserCollectedProperties: DecodeProperties {
fn get(&self, index: &[u8]) -> Option<&[u8]>;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
}
|
package amisync.memory
import amisync._
import com.amazonaws.services.s3.AbstractAmazonS3
import com.amazonaws.services.s3.model.{ListObjectsRequest, ObjectListing, S3ObjectSummary}
case class MemoryS3(
bucket: Bucket,
var keys: Set[Key] = Set.empty
) extends AbstractAmazonS3 {
override def listObjects(req: ListObjectsRequest): ObjectListing = {
require(req.getBucketName == bucket.name)
require(req.getDelimiter eq null)
val listing = new ObjectListing
keys.iterator.filter(_.name.startsWith(req.getPrefix)).foreach { key =>
val summary = new S3ObjectSummary
summary.setKey(key.name)
listing.getObjectSummaries.add(summary)
}
listing
}
}
|
import 'package:flutter/material.dart';
import 'package:shared/shared.dart';
class AnimatedRotation extends StatelessWidget {
final double angle;
final Widget child;
final AlignmentGeometry alignment;
final Offset? origin;
final Duration duration;
final Curve? curve;
const AnimatedRotation({
Key? key,
required this.angle,
required this.child,
this.alignment = Alignment.center,
this.origin,
required this.duration,
this.curve,
}) : super(key: key);
@override
Widget build(BuildContext context) {
return ImplicitAnimationBuilder<double>(
lerp: (a, b, t) => lerpDouble(a, b, t)!,
value: angle,
duration: duration,
builder: (context, value, _) {
return Transform.rotate(
angle: value,
child: child,
origin: origin,
alignment: alignment,
);
},
);
}
}
|
#!/bin/bash
# some code i wrote to add to revamp that turned out to be unsuitable.
# but it corrects a problem in cakelampvm v002 release that i find annoying,
# so here it is as its own file.
# clean out some old files that were not checked in in mapsdemo.
echo Doing some git repository maintenance in fred account.
#
# change over to fred folder
pushd /home/fred
exit_on_error "changing dir to fred's home; what have you done with fred?"
pushd apps/mapsdemo/avenger5
exit_on_error "changing dir to mapsdemo app"
rpuffer . &>/dev/null
if [ $? -ne 0 ]; then
# it seems our old files are still conflicting this.
if [ -f config/config_google.php ]; then
\rm -f config/config_google.php
exit_on_error "removing old config for google"
fi
if [ -f config/app.php ]; then
\rm -f config/app.php
exit_on_error "removing old config for app"
fi
git reset --hard HEAD
exit_on_error "resetting git's hard head"
rpuffer .
#hmmm: use output saver thing when that exists.
exit_on_error "puffing out mapsdemo app after inadequate corrective action was taken"
fi
popd
popd
#...coolness, if we got to here.
|
#pragma once
/* Copyright © 2018-2019 N. Van Bossuyt. */
/* This code is licensed under the MIT License. */
/* See: LICENSE.md */
#include <libdevice/keys.h>
#define KEYBOARD_DEVICE "/dev/kbd"
#define KEYBOARD_CHANNEL "#dev:keyboard"
#define KEYBOARD_KEYPRESSED "dev:keyboard.keypressed"
#define KEYBOARD_KEYRELEASED "dev:keyboard.keyreleased"
#define KEYBOARD_KEYTYPED "dev:keyboard.keytyped"
#define FRAMEBUFFER_CALL_SET_KEYMAP 0
typedef struct
{
uint size;
void* keymap;
} keyboard_set_keymap_args_t;
#define FRAMEBUFFER_CALL_GET_KEYMAP 1
typedef struct
{
key_t key;
int codepoint;
} keyboard_event_t;
|
import type { Option } from './option';
import { some, none } from './option';
export const head = <A>(as: Array<A>): Option<A> => {
return as.length === 0 ? none : some(as[0]);
};
|
#set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package};
import org.apache.camel.builder.RouteBuilder;
public class WelcomeRoute extends RouteBuilder {
public static final String ROUTE_ID = "processGreeting";
public static final String FROM = "direct:welcome-route";
@Override
public void configure() throws Exception {
from(FROM)
.routeId(ROUTE_ID)
.delay(simple("${symbol_dollar}{random(1000, 2000)}"))
.log("Welcome ${symbol_dollar}{header.x-correlation-id}") //show up in opentracing logs
.setBody(constant("Hello World - greetings!"));
}
}
|
'use strict'
const webpack = require('./helpers/compiler')
const { loader } = require('./helpers/compilation')
const { copyFile, deleteFile } = require('./helpers/fs');
describe('Loader', () => {
test('Default', () => {
const config = {
loader: {
options: {
plugins: []
}
}
}
return webpack('css/index.js', config).then((stats) => {
const src = loader(stats).src
expect(src).toEqual("module.exports = \"a { color: black }\\n\"")
expect(src).toMatchSnapshot()
})
})
describe('Watching', () => {
describe('Dependencies', () => {
const files = {
css: 'watch/index.css',
error: 'watch/error.css',
changed: 'watch/import.css'
}
beforeEach(() => copyFile(files.css, files.changed))
afterEach(() => deleteFile(files.changed))
test('Error', () => {
const config = {
loader: {
options: {
plugins: [
require('postcss-import')
],
}
}
}
const steps = [
(stats) => {
const { err, src } = loader(stats)
expect(src).toMatchSnapshot()
expect(err.length).toEqual(0)
return copyFile(files.error, files.changed)
},
(stats) => {
const { err, src } = loader(stats)
expect(src).toMatchSnapshot()
expect(err.length).toEqual(1)
return copyFile(files.css, files.changed)
},
(stats, close) => {
const { err, src } = loader(stats)
expect(src).toMatchSnapshot()
expect(src).toEqual("module.exports = \"a { color: black }\\n\"")
expect(err.length).toEqual(0)
return close()
}
]
let step = 0
const options = {
watch (err, stats, close) {
steps[step](stats, close)
step++
}
}
return webpack('watch/index.js', config, options)
})
})
})
})
|
class AddMorePartsAttrib < ActiveRecord::Migration[6.1]
def change
add_column :parts, :part_num, :string
add_column :parts, :revision, :int
add_column :parts, :qty_per, :int
add_column :parts, :order_qty, :int
add_column :parts, :design_eng_comments, :string
add_column :parts, :stock, :int
end
end
|
package io.opentargets.etl.backend.target
import io.opentargets.etl.backend.EtlSparkUnitTest
import io.opentargets.etl.backend.target.GeneWithLocationTest.getHpaDataframe
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.scalatest.matchers.should.Matchers.convertToAnyShouldWrapper
object GeneWithLocationTest {
def getHpaDataframe(implicit sparkSession: SparkSession): (DataFrame, DataFrame) = {
val pathHpa = this.getClass.getResource("/target/hpa_20.tsv").getPath
val pathHpaSl = this.getClass.getResource("/target/hpa_sl.tsv").getPath
val reader: String => DataFrame = sparkSession.read.option("sep", "\\t").option("header", value = true).csv(_)
(reader(pathHpa), reader(pathHpaSl))
}
}
class GeneWithLocationTest extends EtlSparkUnitTest {
"Subcellular locations" should "be extracted from HPA raw data" in {
import sparkSession.implicits._
// given
val (hpa, sl) = getHpaDataframe
// when
val results: Dataset[GeneWithLocation] = GeneWithLocation(hpa, sl)
// then
results.filter(_.id equals "ENSG00000001629").map(_.locations).head() should have size 2
results.count() should be(19) // size of input file -1 because of headers.
}
}
|
package ru.luckycactus.steamroulette.presentation.features.game_details.model
import ru.luckycactus.steamroulette.domain.games.entity.GameHeader
import ru.luckycactus.steamroulette.domain.games.entity.MetacriticInfo
import ru.luckycactus.steamroulette.domain.games.entity.PlatformsAvailability
import ru.luckycactus.steamroulette.domain.games.entity.Screenshot
import ru.luckycactus.steamroulette.presentation.ui.widget.ContentState
sealed class GameDetailsUiModel {
data class Header(
val gameHeader: GameHeader,
val developer: String? = null,
val publisher: String? = null,
val releaseDate: String? = null
) : GameDetailsUiModel()
data class ShortDescription(
val value: String?,
val categories: List<String>?,
val genres: List<String>?,
val requiredAge: Int?,
val metacriticInfo: MetacriticInfo?,
val detailedDescriptionAvailable: Boolean
) : GameDetailsUiModel() {
fun isEmpty() =
value.isNullOrEmpty() && categories.isNullOrEmpty() && genres.isNullOrEmpty()
&& requiredAge == null && metacriticInfo == null
}
object Links : GameDetailsUiModel()
data class Languages(
val languages: String
) : GameDetailsUiModel()
data class Platforms(
val platforms: PlatformsAvailability,
val systemRequirementsAvailable: Boolean
) : GameDetailsUiModel()
data class Screenshots(
val screenshots: List<Screenshot>
) : GameDetailsUiModel()
data class Placeholder(
val contentState: ContentState
) : GameDetailsUiModel()
}
|
using System.Collections.Generic;
namespace Uskr.IR
{
public class IRAssembly : IntermediateRepresentation
{
public List<IRMethod> Methods { get; set; } = new List<IRMethod>();
public List<IRMember> Members { get; set; } = new List<IRMember>();
public List<IREmbedded> EmbeddedResources { get; set; } = new List<IREmbedded>();
}
}
|
package evbadger
import (
"encoding/binary"
"sort"
"sync"
"time"
"github.com/alxarch/evdb/blob"
"github.com/dgraph-io/badger/v2"
)
// Compaction merges event snapshot compacting data to hourly batches
func (db *DB) Compaction(now time.Time) error {
var (
wg sync.WaitGroup
errc = make(chan error, len(db.events))
gc *badger.DB
)
for event := range db.events {
b := db.events[event]
wg.Add(1)
go func() {
defer wg.Done()
errc <- compactionScan(db.badger, b.id, now)
}()
}
wg.Wait()
close(errc)
for err := range errc {
if err != nil {
return err
}
}
if gc == nil {
return nil
}
return gc.RunValueLogGC(0.5)
}
type compactionEntry struct {
id uint64
n int64
}
type compactionBuffer []compactionEntry
func (cc compactionBuffer) Len() int {
return len(cc)
}
func (cc compactionBuffer) Swap(i, j int) {
cc[i], cc[j] = cc[j], cc[i]
}
func (cc compactionBuffer) Less(i, j int) bool {
return cc[i].id < cc[j].id
}
func (cc compactionBuffer) Read(value []byte) compactionBuffer {
for tail := value; len(tail) >= 16; tail = tail[16:] {
id := binary.BigEndian.Uint64(tail)
n := int64(binary.BigEndian.Uint64(tail[8:]))
cc = append(cc, compactionEntry{id, n})
}
return cc
}
var compactionBuffers sync.Pool
func getCompactionBuffer() compactionBuffer {
if x := compactionBuffers.Get(); x != nil {
return x.(compactionBuffer)
}
return make([]compactionEntry, 0, 64)
}
func putCompactionBuffer(cc compactionBuffer) {
compactionBuffers.Put(cc[:0])
}
func (cc compactionBuffer) Compact() compactionBuffer {
sort.Sort(cc)
var last *compactionEntry
j := 0
for i := range cc {
c := &cc[i]
if last != nil && last.id == c.id {
last.n += c.n
continue
}
last = c
cc[j] = *c
j++
}
return cc[:j]
}
func (cc compactionBuffer) Reset() compactionBuffer {
return cc[:0]
}
func (cc compactionBuffer) AppendBlob(s []byte) ([]byte, error) {
for i := range cc {
c := &cc[i]
s = blob.WriteU64BE(s, c.id)
s = blob.WriteU64BE(s, uint64(c.n))
}
return s, nil
}
func compactionScan(db *badger.DB, id eventID, now time.Time) error {
txn := db.NewTransaction(false)
defer txn.Discard()
iter := txn.NewIterator(badger.IteratorOptions{})
defer iter.Close()
seekEvent(iter, id, time.Time{})
if !iter.Valid() {
return nil
}
key := iter.Item().Key()
ts, ok := parseEventKey(id, key)
const step = int64(time.Hour)
// Truncate timestamp to step
ts -= -ts % step
max := now.Truncate(time.Hour).Add(-1 * time.Hour).Unix()
for start, end, n := ts, ts+step, 0; ok && start < max; start, end, n = end, start+step, 0 {
for ; iter.Valid(); iter.Next() {
key = iter.Item().Key()
ts, ok = parseEventKey(id, key)
if ok && start < ts && ts < end {
n++
} else if start == ts {
continue
} else {
break
}
}
if n > 0 {
err := compactionTask(db, id, start, end)
if err != nil {
return err
}
}
}
return nil
}
func compactionTask(db *badger.DB, id eventID, start, end int64) error {
txn := db.NewTransaction(true)
defer txn.Discard()
opt := badger.DefaultIteratorOptions
iter := txn.NewIterator(opt)
defer iter.Close()
seek := eventKey(id, start)
cc := getCompactionBuffer()
defer putCompactionBuffer(cc)
for iter.Seek(seek[:]); iter.Valid(); iter.Next() {
item := iter.Item()
key := item.Key()
ts, ok := parseEventKey(id, key)
if !ok || ts >= end {
break
}
err := item.Value(func(v []byte) error {
cc = cc.Read(v)
return nil
})
if err != nil {
return err
}
if ts > start {
if err := txn.Delete(key); err != nil {
return err
}
}
if ts < start {
panic("Invalid seek")
}
}
cc = cc.Compact()
if len(cc) > 0 {
value := getBuffer()
value, _ = cc.AppendBlob(value[:0])
defer putBuffer(value)
if err := txn.Set(seek[:], value); err != nil {
return err
}
return txn.Commit()
}
return nil
}
|
require 'test_helper'
class ConferencesControllerTest < ActionController::TestCase
fixtures :conferences, :speakers, :presentations
setup do
@conference = conferences(:ruby)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:conferences)
end
test "should get new" do
get :new
assert_response :success
end
test "should create conference" do
assert_difference('Conference.count') do
post :create, conference: {
name: "Euruco",
city: "Athens",
speaker_attributes: {
name: "Petros Markou",
occupation: "Developer",
presentations_attributes: {
"0" => { topic: "Ruby OOP", duration: "1h" },
"1" => { topic: "Ruby Closures", duration: "1h" },
}
}
}
end
conference_form = assigns(:conference_form)
assert conference_form.valid?
assert_redirected_to conference_path(conference_form)
assert_equal "Euruco", conference_form.name
assert_equal "Athens", conference_form.city
assert_equal "Petros Markou", conference_form.speaker.name
assert_equal "Developer", conference_form.speaker.occupation
assert_equal "Ruby OOP", conference_form.speaker.presentations[0].topic
assert_equal "1h", conference_form.speaker.presentations[0].duration
assert_equal "Ruby Closures", conference_form.speaker.presentations[1].topic
assert_equal "1h", conference_form.speaker.presentations[1].duration
assert conference_form.speaker.persisted?
conference_form.speaker.presentations.each do |presentation|
presentation.persisted?
end
assert_equal "Conference: #{conference_form.name} was successfully created.", flash[:notice]
end
test "should create dynamically added presentation to speaker" do
assert_difference('Conference.count') do
post :create, conference: {
name: "Euruco",
city: "Athens",
speaker_attributes: {
name: "Petros Markou",
occupation: "Developer",
presentations_attributes: {
"0" => { topic: "Ruby OOP", duration: "1h" },
"1" => { topic: "Ruby Closures", duration: "1h" },
"12312" => { topic: "Ruby Metaprogramming", duration: "2h" }
}
}
}
end
conference_form = assigns(:conference_form)
assert conference_form.valid?
assert_redirected_to conference_path(conference_form)
assert_equal "Euruco", conference_form.name
assert_equal "Athens", conference_form.city
assert_equal "Petros Markou", conference_form.speaker.name
assert_equal "Developer", conference_form.speaker.occupation
assert_equal 3, conference_form.speaker.presentations.size
assert_equal "Ruby OOP", conference_form.speaker.presentations[0].topic
assert_equal "1h", conference_form.speaker.presentations[0].duration
assert_equal "Ruby Closures", conference_form.speaker.presentations[1].topic
assert_equal "1h", conference_form.speaker.presentations[1].duration
assert_equal "Ruby Metaprogramming", conference_form.speaker.presentations[2].topic
assert_equal "2h", conference_form.speaker.presentations[2].duration
assert conference_form.speaker.persisted?
conference_form.speaker.presentations.each do |presentation|
presentation.persisted?
end
assert_equal "Conference: #{conference_form.name} was successfully created.", flash[:notice]
end
test "should not create conference with invalid params" do
conference = conferences(:ruby)
assert_difference(['Conference.count', 'Speaker.count'], 0) do
post :create, conference: {
name: conference.name,
city: nil,
speaker_attributes: {
name: conference.speaker.name,
occupation: "Developer",
presentations_attributes: {
"0" => { topic: nil, duration: "1h" },
"1" => { topic: "Ruby Closures", duration: nil },
}
}
}
end
conference_form = assigns(:conference_form)
assert_not conference_form.valid?
assert_includes conference_form.errors.messages[:name], "has already been taken"
assert_includes conference_form.errors.messages[:city], "can't be blank"
assert_includes conference_form.speaker.errors.messages[:name], "has already been taken"
assert_includes conference_form.speaker.presentations[0].errors.messages[:topic], "can't be blank"
assert_includes conference_form.speaker.presentations[1].errors.messages[:duration], "can't be blank"
end
test "should show conference" do
get :show, id: @conference
assert_response :success
end
test "should get edit" do
get :edit, id: @conference
assert_response :success
end
test "should update conference" do
assert_difference('Conference.count', 0) do
patch :update, id: @conference, conference: {
name: "GoGaruco",
city: "Golden State",
speaker_attributes: {
name: "John Doe",
occupation: "Developer",
presentations_attributes: {
"0" => { topic: "Rails OOP", duration: "1h", id: presentations(:ruby_oop).id },
"1" => { topic: "Rails Patterns", duration: "1h", id: presentations(:ruby_closures).id },
}
}
}
end
conference_form = assigns(:conference_form)
assert_redirected_to conference_path(conference_form)
assert_equal "GoGaruco", conference_form.name
assert_equal "Golden State", conference_form.city
assert_equal "John Doe", conference_form.speaker.name
assert_equal "Developer", conference_form.speaker.occupation
assert_equal "Rails Patterns", conference_form.speaker.presentations[0].topic
assert_equal "1h", conference_form.speaker.presentations[0].duration
assert_equal "Rails OOP", conference_form.speaker.presentations[1].topic
assert_equal "1h", conference_form.speaker.presentations[1].duration
assert_equal "Conference: #{conference_form.name} was successfully updated.", flash[:notice]
end
test "should destroy dynamically removed presentation from speaker" do
assert_difference('Conference.count', 0) do
patch :update, id: @conference, conference: {
name: "GoGaruco",
city: "Golden State",
speaker_attributes: {
name: "John Doe",
occupation: "Developer",
presentations_attributes: {
"0" => { topic: "Rails OOP", duration: "1h", id: presentations(:ruby_oop).id },
"1" => { topic: "Rails Patterns", duration: "1h", id: presentations(:ruby_closures).id, _destroy: "1" },
}
}
}
end
conference_form = assigns(:conference_form)
assert_redirected_to conference_path(conference_form)
assert_equal "GoGaruco", conference_form.name
assert_equal "Golden State", conference_form.city
assert_equal "John Doe", conference_form.speaker.name
assert_equal "Developer", conference_form.speaker.occupation
assert_equal "Rails OOP", conference_form.speaker.presentations[0].topic
assert_equal "1h", conference_form.speaker.presentations[0].duration
assert_equal 1, conference_form.speaker.presentations.size
assert_equal "Conference: #{conference_form.name} was successfully updated.", flash[:notice]
end
test "should destroy conference" do
assert_difference('Conference.count', -1) do
delete :destroy, id: @conference
end
assert_redirected_to conferences_path
end
end
|
# retro-snaker
一个简单的贪吃蛇程序,Win32. 基于EasyWindow库
A Simple retro-snaker Win32 program, based on EasyWindow
|
using System;
using System.Windows.Forms;
namespace OpenSkieScheduler3.Controls.Buttons
{
public class SaveSchedule: Button
{
public SaveSchedule()
{
InitializeComponent();
}
void InitializeComponent()
{
Click += new EventHandler( SaveSchedule_Click );
}
[STAThread]
void SaveSchedule_Click( object sender, EventArgs e )
{
SaveFileDialog openFileDialog1 = new SaveFileDialog();
openFileDialog1.InitialDirectory = Environment.CurrentDirectory;
openFileDialog1.Filter = "Schedule files(*.xml)|*.xml|All files (*.*)|*.*";
openFileDialog1.FilterIndex = 1;
openFileDialog1.RestoreDirectory = true;
//openFileDialog1.RestoreDirectory = true;
if( openFileDialog1.ShowDialog() == DialogResult.OK )
{
try
{
ControlList.schedule.WriteXML( openFileDialog1.FileName );
}
catch( Exception e2 )
{
Console.WriteLine( e2.Message );
}
}
}
}
public class LoadSchedule: Button
{
public LoadSchedule()
{
InitializeComponent();
}
void InitializeComponent()
{
Click += new EventHandler( SaveSchedule_Click );
}
void SaveSchedule_Click( object sender, EventArgs e )
{
OpenFileDialog openFileDialog1 = new OpenFileDialog();
openFileDialog1.InitialDirectory = Environment.CurrentDirectory;
openFileDialog1.Filter = "Schedule files(*.xml)|*.xml|All files (*.*)|*.*";
openFileDialog1.FilterIndex = 1;
openFileDialog1.RestoreDirectory = true;
//openFileDialog1.RestoreDirectory = true;
if( openFileDialog1.ShowDialog() == DialogResult.OK )
{
try
{
ControlList.schedule.ReadXML( openFileDialog1.FileName );
}
catch( Exception e2 )
{
Console.WriteLine( e2.Message );
}
}
}
}
}
|
import React from "react";
import { renderWithRedux } from "../../services/TestHelper";
import NearbyScreen from "../NearbyScreen";
import "../../common/String.extension";
test(`NearbyScreen`, async () => {
renderWithRedux(<NearbyScreen />);
});
|
/*
* Copyright 2018 Kaidan Gustave
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@file:Suppress("MemberVisibilityCanBePrivate")
package me.kgustave.dkt.core.handle
import kotlinx.atomicfu.atomic
import kotlinx.coroutines.*
import me.kgustave.dkt.core.internal.websocket.WebSocketConnection
import me.kgustave.dkt.util.createLogger
import me.kgustave.dkt.util.currentTimeMs
import java.lang.IllegalStateException
import java.util.concurrent.CancellationException
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.RejectedExecutionException
open class SessionHandlerAdapter: SessionHandler {
private companion object {
private const val ConnectionDelay = 5 * 1000L
private val Log = createLogger(SessionHandler::class)
}
protected val global = atomic(Long.MIN_VALUE)
override var globalRateLimit: Long
get() = global.value
set(value) { global.value = value }
protected val connectionQueue = ConcurrentLinkedQueue<WebSocketConnection>()
protected val lock = Any()
private var job: Job? = null
private var lastConnectTime = 0L
private val dispatcher = newSingleThreadContext("SessionHandler Queue Dispatcher")
override fun queueConnection(connection: WebSocketConnection) {
connectionQueue += connection
startQueueJob()
}
override fun dequeueConnection(connection: WebSocketConnection) {
connectionQueue.remove(connection)
}
private fun startQueueJob() {
synchronized(lock) {
if(job != null) return
job = createJob()
job!!.start()
}
}
private fun createJob(): Job = GlobalScope.launch(dispatcher, start = CoroutineStart.LAZY) {
val delay = currentTimeMs - lastConnectTime
if(delay < ConnectionDelay) delay(delay)
var multiple = connectionQueue.size > 1
while(connectionQueue.isNotEmpty()) {
val connection = connectionQueue.poll() ?: break
try {
connection.run(multiple && connectionQueue.isEmpty())
multiple = true
lastConnectTime = currentTimeMs
if(connectionQueue.isEmpty()) break
delay(ConnectionDelay)
} catch(e: IllegalStateException) {
Log.error("Failed to run connection!", e)
queueConnection(connection)
} catch(e: CancellationException) {
// TODO Logging
queueConnection(connection)
break
} catch(e: RejectedExecutionException) {
break
}
}
synchronized(lock) {
job = null
if(connectionQueue.isNotEmpty()) {
startQueueJob()
}
}
}
override fun shutdown() {
job?.cancel()
dispatcher.close()
}
}
|
import { Shaders, Node, GLSL } from 'gl-react';
import React from 'react';
import mixArrays from '../utils/mixArrays';
const shaders = Shaders.create({
sepia: {
frag: GLSL`
precision highp float;
varying vec2 uv;
uniform sampler2D t;
uniform mat4 sepia;
void main () {
gl_FragColor = sepia * texture2D(t, uv);
}
`
}
});
export const DefaultValue = 0;
export default function Sepia({ factor = DefaultValue, children: t }) {
const sepia = mixArrays([
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
], [
.3, .3, .3, 0,
.6, .6, .6, 0,
.1, .1, .1, 0,
0.2, 0, -0.2, 1
], factor);
return (
<Node
shader={shaders.sepia}
uniforms={{
sepia,
t,
}}
/>
)
}
|
import { EmmetConfiguration, getEmmetCompletionParticipants } from 'vscode-emmet-helper';
import { getLanguageService, HTMLDocument } from 'vscode-html-languageservice';
import {
CompletionList,
Hover,
Position,
SymbolInformation,
CompletionItem,
CompletionItemKind
} from 'vscode-languageserver';
import {
DocumentManager,
Document,
isInTag,
getNodeIfIsInComponentStartTag
} from '../../lib/documents';
import { LSConfigManager, LSHTMLConfig } from '../../ls-config';
import { svelteHtmlDataProvider } from './dataProvider';
import { HoverProvider, CompletionsProvider } from '../interfaces';
export class HTMLPlugin implements HoverProvider, CompletionsProvider {
private configManager: LSConfigManager;
private lang = getLanguageService({ customDataProviders: [svelteHtmlDataProvider] });
private documents = new WeakMap<Document, HTMLDocument>();
private styleScriptTemplate = new Set(['template', 'style', 'script']);
constructor(
docManager: DocumentManager,
configManager: LSConfigManager,
private emmetConfig?: EmmetConfiguration
) {
this.configManager = configManager;
docManager.on('documentChange', (document) => {
this.documents.set(document, document.html);
});
}
doHover(document: Document, position: Position): Hover | null {
if (!this.featureEnabled('hover')) {
return null;
}
const html = this.documents.get(document);
if (!html) {
return null;
}
return this.lang.doHover(document, position, html);
}
getCompletions(document: Document, position: Position): CompletionList | null {
if (!this.featureEnabled('completions')) {
return null;
}
const html = this.documents.get(document);
if (!html) {
return null;
}
if (
this.isInsideMoustacheTag(html, document, position) ||
isInTag(position, document.scriptInfo) ||
isInTag(position, document.moduleScriptInfo)
) {
return null;
}
const emmetResults: CompletionList = {
isIncomplete: true,
items: []
};
this.lang.setCompletionParticipants([
getEmmetCompletionParticipants(
document,
position,
'html',
this.emmetConfig || {},
emmetResults
)
]);
const results = this.isInComponentTag(html, document, position)
? // Only allow emmet inside component element tags.
// Other attributes/events would be false positives.
CompletionList.create([])
: this.lang.doComplete(document, position, html);
return CompletionList.create(
[...results.items, ...this.getLangCompletions(results.items), ...emmetResults.items],
// Emmet completions change on every keystroke, so they are never complete
emmetResults.items.length > 0
);
}
private isInComponentTag(html: HTMLDocument, document: Document, position: Position) {
return !!getNodeIfIsInComponentStartTag(html, document.offsetAt(position));
}
private getLangCompletions(completions: CompletionItem[]): CompletionItem[] {
const styleScriptTemplateCompletions = completions.filter(
(completion) =>
completion.kind === CompletionItemKind.Property &&
this.styleScriptTemplate.has(completion.label)
);
const langCompletions: CompletionItem[] = [];
addLangCompletion('script', ['ts']);
addLangCompletion('style', ['less', 'scss']);
addLangCompletion('template', ['pug']);
return langCompletions;
function addLangCompletion(tag: string, languages: string[]) {
const existingCompletion = styleScriptTemplateCompletions.find(
(completion) => completion.label === tag
);
if (!existingCompletion) {
return;
}
languages.forEach((lang) =>
langCompletions.push({
...existingCompletion,
label: `${tag} (lang="${lang}")`,
insertText:
existingCompletion.insertText &&
`${existingCompletion.insertText} lang="${lang}"`,
textEdit: existingCompletion.textEdit && {
range: existingCompletion.textEdit.range,
newText: `${existingCompletion.textEdit.newText} lang="${lang}"`
}
})
);
}
}
doTagComplete(document: Document, position: Position): string | null {
if (!this.featureEnabled('tagComplete')) {
return null;
}
const html = this.documents.get(document);
if (!html) {
return null;
}
if (this.isInsideMoustacheTag(html, document, position)) {
return null;
}
return this.lang.doTagComplete(document, position, html);
}
private isInsideMoustacheTag(html: HTMLDocument, document: Document, position: Position) {
const offset = document.offsetAt(position);
const node = html.findNodeAt(offset);
const charactersInNode = document.getText().substring(node.start, offset);
return charactersInNode.lastIndexOf('{') > charactersInNode.lastIndexOf('}');
}
getDocumentSymbols(document: Document): SymbolInformation[] {
if (!this.featureEnabled('documentSymbols')) {
return [];
}
const html = this.documents.get(document);
if (!html) {
return [];
}
return this.lang.findDocumentSymbols(document, html);
}
private featureEnabled(feature: keyof LSHTMLConfig) {
return (
this.configManager.enabled('html.enable') &&
this.configManager.enabled(`html.${feature}.enable`)
);
}
}
|
PARAMETERS ( PARENT_RECORD_ID VARCHAR )
SELECT
po.date,
po.outcome,
po.infantid,
po.rejected,
po.protected,
po.project,
po.remark,
po.performedby
FROM pregnancy_outcomes po
WHERE po.pregnancyid = (SELECT p.lsid
FROM pregnancies p
WHERE p.objectid = PARENT_RECORD_ID
LIMIT 1)
ORDER BY po.date DESC
|
# Aprendendo Python
Resoluções de exercícios básicos, meus primeiros passos com Python
|
package tracing
import (
"context"
"time"
"github.com/asim/nitro/v3/metadata"
)
const (
UniqueIDKey = "unique_id"
)
// NewContextWithOld 为请求生成不依赖于父一级服务超时设置的请求
func NewContextWithOld(ctx context.Context) context.Context {
// nil
if ctx == nil {
return nil
}
md, ok := metadata.FromContext(ctx)
if !ok {
md = make(metadata.Metadata)
}
md = metadata.Copy(md)
var deadline time.Time
if d, ok := ctx.Deadline(); ok {
deadline = d
} else {
deadline = time.Now().Add(time.Second * 2)
}
nctx, cancel := context.WithDeadline(context.Background(), deadline)
_ = cancel
nctx = metadata.NewContext(nctx, md)
return nctx
}
// GetRequestIDFromContext return request ID or call it unique_id
func GetRequestIDFromContext(ctx context.Context) string {
var uniqueID string
// nil
if ctx == nil {
return uniqueID
}
var md, ok = metadata.FromContext(ctx)
if ok {
if v, suc := md[UniqueIDKey]; suc {
uniqueID = v
}
}
return uniqueID
}
|
<!-- <navbar-left></navbar-left> -->
<div class="row heading">
<a href="/">
<div class="col-sm-6 col-xs-3 heading-avatar">
<img src="/image/logo.png" style="width:161px;height:96px;margin-top:-26px;">
</div>
</a>
<div class="col-sm-1 col-xs-1 heading-dot pull-right">
<i class="fa fa-ellipsis-v fa-2x pull-right" aria-hidden="true"></i>
</div>
<div class="col-sm-2 col-xs-2 heading-compose pull-right">
<i class="fa fa-comments fa-2x pull-right" aria-hidden="true"></i>
</div>
</div>
|
set echo off
set feedback off
set linesize 512
prompt
prompt Primary Keys for Connected User
prompt
select table_name, constraint_name, constraint_type, status
from user_constraints
where constraint_type = 'P'
order by table_name, constraint_name;
|
/*
* Copyright 2019 IIT Software GmbH
*
* IIT Software GmbH licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.swiftmq.jms;
import com.swiftmq.jndi.SwiftMQObjectFactory;
import com.swiftmq.tools.util.LazyUTF8String;
import javax.jms.JMSException;
import javax.jms.Queue;
import javax.naming.NamingException;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.StringRefAddr;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
/**
* Implementation of a Queue.
*
* @author IIT GmbH, Bremen/Germany, Copyright (c) 2000-2002, All Rights Reserved
*/
public class QueueImpl implements Queue, Referenceable, Serializable, DestinationImpl {
LazyUTF8String queueName = null;
/**
* Creates a new QueueImpl.
*
* @param queueName queue name.
*/
public QueueImpl(String queueName) {
setQueueName(queueName);
}
/**
* Creates a new QueueImpl.
*/
public QueueImpl() {
}
public Reference getReference() throws NamingException {
return new Reference(QueueImpl.class.getName(),
new StringRefAddr("queueName", queueName != null ? queueName.getString() : null),
SwiftMQObjectFactory.class.getName(),
null);
}
public void unfoldBuffers() {
if (queueName != null)
queueName.getString(true);
}
public int getType() {
return DestinationFactory.TYPE_QUEUE;
}
public void writeContent(DataOutput out) throws IOException {
queueName.writeContent(out);
}
public void readContent(DataInput in) throws IOException {
queueName = new LazyUTF8String(in);
}
/**
* Get the name of this queue.
* <p/>
* <P>Clients that depend upon the name, are not portable.
*
* @return the queue name
* @throws JMSException if JMS implementation for Queue fails to
* to return queue name due to some internal
* error.
*/
public String getQueueName() throws JMSException {
return queueName != null ? queueName.getString() : null;
}
/**
* Set the queue name.
*
* @param queueName queue name.
*/
public void setQueueName(String queueName) {
this.queueName = queueName != null ? new LazyUTF8String(queueName) : null;
}
/**
* Return a pretty printed version of the queue name
*
* @return the provider specific identity values for this queue.
*/
public String toString() {
return queueName != null ? queueName.getString() : null;
}
}
|
module.exports = async function (context, message) {
// If JSON parsing fails when a message is sent to the fews-eventcode queue the message
// will remain on the dead letter queue. It seems that some core forecasting engine messages
// can contain invalid JSON, causing this scenario. To workaround this temporarily, try and parse
// the message as JSON before replay is attempted. If parsing fails, stringify the message before replaying it.
let messageToReplay = message
if (message.constructor.name === 'String') {
try {
JSON.parse(message)
} catch (err) {
messageToReplay = JSON.stringify(message)
}
}
if (message.constructor.name === 'Object') {
context.log(`Replaying ${JSON.stringify(messageToReplay)}`)
} else {
context.log(`Replaying ${messageToReplay}`)
}
context.bindings.processFewsEventCode = messageToReplay
}
|
library streamy.generator.template_loader;
import 'dart:async';
import 'dart:io' as io;
import 'package:mustache/mustache.dart' as mustache;
/// The location of templates bundled with Streamy. It assumes Streamy is run
/// from the root of the project. This value is used by default if no specific
/// value is provided.
const String DEFAULT_TEMPLATE_DIR = 'lib/templates';
/// Reads template source from files named {templateName}.mustache.
class DefaultTemplateLoader implements TemplateLoader {
final String templateDir;
DefaultTemplateLoader(this.templateDir);
factory DefaultTemplateLoader.defaultInstance() {
return new DefaultTemplateLoader(DEFAULT_TEMPLATE_DIR);
}
@override
Future<mustache.Template> load(String templateName) {
var templateFile =
new io.File('${templateDir}/${templateName}.mustache');
return templateFile.readAsString()
.then((source) => new mustache.Template(source, htmlEscapeValues: false));
}
}
abstract class TemplateLoader {
factory TemplateLoader.fromDirectory(String path) {
return new FileTemplateLoader(path);
}
Future<mustache.Template> load(String name);
}
class FileTemplateLoader implements TemplateLoader {
final io.Directory path;
FileTemplateLoader(String path) : path = new io.Directory(path).absolute;
Future<mustache.Template> load(String name) {
var f = new io.File("${path.path}/$name.mustache");
if (!f.existsSync()) {
return null;
}
return f.readAsString()
.then((source) => new mustache.Template(source, htmlEscapeValues: false));
}
}
|
import 'package:aves/model/source/section_keys.dart';
import 'package:aves/utils/time_utils.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/common/grid/header.dart';
import 'package:flutter/material.dart';
import 'package:intl/intl.dart';
class DaySectionHeader<T> extends StatelessWidget {
final DateTime? date;
const DaySectionHeader({
Key? key,
required this.date,
}) : super(key: key);
// Examples (en_US):
// `MMMMd`: `April 15`
// `yMMMMd`: `April 15, 2020`
// `MMMEd`: `Wed, Apr 15`
// `yMMMEd`: `Wed, Apr 15, 2020`
// `MMMMEEEEd`: `Wednesday, April 15`
// `yMMMMEEEEd`: `Wednesday, April 15, 2020`
// `MEd`: `Wed, 4/15`
// `yMEd`: `Wed, 4/15/2020`
// Examples (ko):
// `MMMMd`: `1월 26일`
// `yMMMMd`: `2021년 1월 26일`
// `MMMEd`: `1월 26일 (화)`
// `yMMMEd`: `2021년 1월 26일 (화)`
// `MMMMEEEEd`: `1월 26일 화요일`
// `yMMMMEEEEd`: `2021년 1월 26일 화요일`
// `MEd`: `1. 26. (화)`
// `yMEd`: `2021. 1. 26. (화)`
static String _formatDate(BuildContext context, DateTime? date) {
final l10n = context.l10n;
if (date == null) return l10n.sectionUnknown;
if (date.isToday) return l10n.dateToday;
if (date.isYesterday) return l10n.dateYesterday;
final locale = l10n.localeName;
if (date.isThisYear) return '${DateFormat.MMMMd(locale).format(date)} (${DateFormat.E(locale).format(date)})';
return '${DateFormat.yMMMMd(locale).format(date)} (${DateFormat.E(locale).format(date)})';
}
@override
Widget build(BuildContext context) {
return SectionHeader<T>(
sectionKey: EntryDateSectionKey(date),
title: _formatDate(context, date),
);
}
}
class MonthSectionHeader<T> extends StatelessWidget {
final DateTime? date;
const MonthSectionHeader({
Key? key,
required this.date,
}) : super(key: key);
static String _formatDate(BuildContext context, DateTime? date) {
final l10n = context.l10n;
if (date == null) return l10n.sectionUnknown;
if (date.isThisMonth) return l10n.dateThisMonth;
final locale = l10n.localeName;
final localized = date.isThisYear? DateFormat.MMMM(locale).format(date) : DateFormat.yMMMM(locale).format(date);
return '${localized.substring(0, 1).toUpperCase()}${localized.substring(1)}';
}
@override
Widget build(BuildContext context) {
return SectionHeader<T>(
sectionKey: EntryDateSectionKey(date),
title: _formatDate(context, date),
);
}
}
|
// roomList
export const INIT_ROOM_LIST = 'ROOM_INIT_LIST';
export const ADD_ROOM_ITEM = 'ROOM_ADD_ITEM';
export const REMOVE_ROOM_ITEM = 'REMOVE_ROOM_ITEM';
export const PUSH_HISTORY = 'ROOM_PUSH_HISTORY';
export const UNSHIFT_HISTORY = 'ROOM_UNSHIFT_HISTORY';
export const MERGE_ROOM_INFO = 'MERGE_ROOM_INFO';
export const ADD_UNREAD_COUNT = 'ADD_UNREAD_COUNT';
export const CLEAR_UNREAD_COUNT = 'CLEAR_UNREAD_COUNT';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.