text
stringlengths 27
775k
|
|---|
=begin
@author: aaditkamat
@date: 07/01/2019
=end
def fibonacci_recursive_value(num, array, ctr)
if num < 1
return "Incorrect num #{num} for fibonacci sequence"
end
if num == 1 or num == 2
array[num - 1] = 1
return array[num - 1]
end
if array[num - 2] != 0 and array[num - 1] != 0
return array[num - 2] + array[num - 3]
elsif array[num - 2] != 0
array[num - 3] = fibonacci_recursive_value(num - 2, array, ctr + 1)
elsif array[num - 3] != 0
array[num - 2] = fibonacci_recursive_value(num - 1, array, ctr + 1)
else
array[num - 2] = fibonacci_recursive_value(num - 1, array, ctr + 1)
array[num - 3] = fibonacci_recursive_value(num - 2, array, ctr + 1)
end
value = array[num - 2] + array[num - 3]
value
end
def fibonacci_recursive_sequence(num, arr)
if num < 1
return fibonacci_recursive_value(num, arr, 0)
end
start = 1
print "The sequence is: "
until start > num do
if start < num
print"#{fibonacci_recursive_value(start, Array.new(arr), 0)}, "
else
puts "#{fibonacci_recursive_value(start, Array.new(arr), 0)}"
end
start += 1
end
end
def fibonacci_iterative_sequence(num)
if num < 1
return "Incorrect num #{num} for fibonacci sequence"
end
start = 1
print "The sequence is: "
until start > num do
if start < num
print"#{fibonacci_iterative_value(start)}, "
else
puts "#{fibonacci_iterative_value(start)}"
end
start += 1
end
end
def fibonacci_iterative_value(num)
if num < 1
return "Incorrect num #{num} for fibonacci sequence"
end
if num == 1 || num == 2
return 1
end
first = 1
second = 1
ctr = 2
until ctr >= num do
first += second
ctr += 1
if ctr == num
return first
end
second += first
ctr += 1
end
second
end
def main
print("Enter an integer: ")
num = gets.chomp!.to_i
a = []
a.fill(0, 0, num)
puts "The number at position #{num} of fibonacci sequence: #{fibonacci_recursive_value(num, a, 0)}"
fibonacci_recursive_sequence(num, a)
puts "The number at position #{num} of fibonacci sequence: #{fibonacci_iterative_value(num)}"
fibonacci_iterative_sequence(num)
end
main
|
use swc_ecma_ast::*;
use swc_ecma_visit::{noop_fold_type, Fold, FoldWith};
/// Converts destructured parameters with default values to non-shorthand
/// syntax. This fixes the only arguments-related bug in ES Modules-supporting
/// browsers (Edge 16 & 17). Use this plugin instead of
/// @babel/plugin-transform-parameters when targeting ES Modules.
pub fn edge_default_param() -> impl Fold {
EdgeDefaultParam::default()
}
#[derive(Default, Clone, Copy)]
struct EdgeDefaultParam {
in_arrow: bool,
}
impl Fold for EdgeDefaultParam {
noop_fold_type!();
fn fold_arrow_expr(&mut self, n: ArrowExpr) -> ArrowExpr {
self.in_arrow = true;
let params = n.params.fold_with(self);
self.in_arrow = false;
let body = n.body.fold_with(self);
ArrowExpr { params, body, ..n }
}
fn fold_object_pat(&mut self, n: ObjectPat) -> ObjectPat {
let n = n.fold_children_with(self);
if !self.in_arrow {
return n;
}
let props = n
.props
.into_iter()
.map(|prop| match prop {
ObjectPatProp::Assign(assign_pat) => {
if let Some(value) = assign_pat.value {
ObjectPatProp::KeyValue(KeyValuePatProp {
key: PropName::Ident(assign_pat.key.clone()),
value: Box::new(Pat::Assign(AssignPat {
span: assign_pat.span,
left: Box::new(Pat::Ident(BindingIdent::from(
assign_pat.key.clone(),
))),
right: value.clone(),
type_ann: None,
})),
})
} else {
ObjectPatProp::Assign(assign_pat)
}
}
_ => prop,
})
.collect();
ObjectPat { props, ..n }
}
}
#[cfg(test)]
mod tests {
use super::*;
use swc_ecma_transforms_testing::test;
test!(
::swc_ecma_parser::Syntax::default(),
|_| edge_default_param(),
destructured_default_value,
"const f = ({ a = 1 }) => a;",
"const f = ({ a: a = 1 }) => a;"
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| edge_default_param(),
destructured_no_default_value,
"const f = ({ a }) => a;",
"const f = ({ a }) => a;"
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| edge_default_param(),
nested_default_value,
"const f = ({ a: { b = 1 } }) => [a, b];",
"const f = ({ a: { b: b = 1 } }) => [a, b];"
);
test!(
::swc_ecma_parser::Syntax::default(),
|_| edge_default_param(),
non_arguments,
"const f = () => { const { a = 1 } = {}; };",
"const f = () => { const { a = 1 } = {}; };"
);
}
|
# Spicy Chili Cheese Dip
## Ingredients
- 8 oz cream cheese (1 brick)
- 15 oz chili (1 can)
- 4 oz pepperjack cheese, shredded
- tortilla chips
## Directions
Preheat oven to 350° F.
In a 13x9 inch baking pan spread cream cheese in a flat layer across the
bottom. Add chili and spread across the top of the cream cheese. Sprinkle
pepperjack cheese across the top. Bake 15-20 minutes, until cheese is bubbly
and slightly brown around the edges of the pan.
|
#!/usr/bin/env bash
set -eux
pytest
python transitions_example.py
jupyter nbconvert --execute README.ipynb
rm README.html
|
import express from "express";
import { getUserController, loginController, signupController } from "../controller/user.js";
import { auth } from "../auth.js";
const userRouter = express.Router();
userRouter.use((req, res, next) => {
req.endpoint = "/user";
next();
});
userRouter.get("/", auth, getUserController);
userRouter.post("/login", loginController);
userRouter.post("/signup", signupController);
export default userRouter;
|
class Invite < ApplicationRecord
belongs_to :rescue_action
belongs_to :rescuer
enum status: [:unanswered, :accepted, :denied]
end
|
import "./index.scss";
import { render } from "react-dom";
import { BrowserRouter } from "react-router-dom";
import { ApplicationProvider } from "@providers/application-provider";
import { Routes as RouterLoader } from "@providers/application-provider/routes";
import { ModelProvider, useRootStore } from "@providers/model-provider";
import { useCallback } from "react";
import { LoginControl } from "@components/dev-component/login-control";
import { Layout } from "./layout";
import { AppRole } from "@root/models";
import { admin, student, guest } from "./pages";
import { SnackbarProvider } from "notistack";
import { LayoutProvider } from "./layout/layout-provider";
const getFallback = (user: AppUser | null) => {
if (!user) {
return "/login";
}
const { role } = user;
return role === AppRole.ADMIN ? "classroom" : "/";
};
const Loader = () => {
const getRoutes = (user: AppUser | null) => {
if (user) {
return user.role === AppRole.ADMIN ? admin : student;
}
return guest;
};
const root = useRootStore();
const authFunction = useCallback(async (): Promise<AppUser | null> => {
const response = await root.queryAuth().currentPromise();
if (response.auth) {
const { auth } = response;
return {
id: auth.id as string,
name: auth.name as string,
email: auth.email as string,
role: auth.role as string,
};
}
return null;
}, []);
return (
<ApplicationProvider getRoutes={getRoutes} authFunctions={authFunction}>
<LayoutProvider>
<Layout>
<RouterLoader getFallback={getFallback} />
</Layout>
</LayoutProvider>
<LoginControl />
</ApplicationProvider>
);
};
const root = document.getElementById("root");
if (root) {
render(
<BrowserRouter>
<SnackbarProvider
anchorOrigin={{
horizontal: "right",
vertical: "top",
}}
>
<ModelProvider
url={(import.meta.env.VITE_BACKEND as string) + "/graphql"}
>
<Loader />
</ModelProvider>
</SnackbarProvider>
</BrowserRouter>,
root
);
}
|
import _ from 'the-lodash';
import { Context } from '../context';
import { Router } from '@kubevious/helper-backend';
import Joi from 'joi';
import { SearchQuery } from '../types';
export default function (router: Router, context: Context) {
router.url('/api/v1/diagram');
router
.get('/node', function (req, res) {
const dn : string = <string>req.query.dn;
var state = context.registry.getCurrentState();
const node = state.getNodeItem(dn);
if (node) {
return node.config;
}
return null;
})
.querySchema(
Joi.object({
dn: Joi.string().required()
})
);
router
.get('/children', function (req, res) {
const dn: string = <string>req.query.dn;
var state = context.registry.getCurrentState();
return state.getChildren(dn);
})
.querySchema(
Joi.object({
dn: Joi.string().required(),
})
);
router
.get('/props', function (req, res) {
const dn : string = <string>req.query.dn;
var state = context.registry.getCurrentState();
var nodeItem = state.getNodeItem(dn);
if (!nodeItem) {
return [];
}
return _.values(nodeItem?.propertiesMap);
})
.querySchema(
Joi.object({
dn: Joi.string().required(),
})
);
router
.get('/alerts', function (req, res) {
const dn : string = <string>req.query.dn;
var state = context.registry.getCurrentState();
var nodeItem = state.getNodeItem(dn);
if (!nodeItem) {
return [];
}
return nodeItem?.hierarchyAlerts;
})
.querySchema(
Joi.object({
dn: Joi.string().required(),
})
);
/*************************/
router
.post('/search', function (req, res) {
const criteria: SearchQuery = <SearchQuery>req.body;
return context.searchEngine.search(criteria);
})
.bodySchema(
Joi.object({
criteria: Joi.string(),
kind: Joi.string(),
error: Joi.object(),
warn: Joi.object(),
markers: Joi.array().items(Joi.string()),
labels: Joi.array().items(Joi.object()),
annotations: Joi.array().items(Joi.object()),
}),
);
}
|
import {
CornerstoneImage,
CornerstoneSingleImage,
DCMImage,
installWADOImageLoader,
unloadWADOImage,
withInsightViewerStorybookGlobalStyle,
} from '@lunit/insight-viewer';
import { withOPTComponentsStorybookGlobalStyle } from '@lunit/opt-components';
import React, { useMemo } from 'react';
installWADOImageLoader();
export default {
title: 'insight-viewer/Utils',
decorators: [withInsightViewerStorybookGlobalStyle, withOPTComponentsStorybookGlobalStyle],
};
export const DCMImageSample = () => {
const images: CornerstoneImage[] = useMemo(() => {
return [
'wadouri:https://lunit-frontend-fixtures.netlify.com/dcm-files/series/CT000010.dcm',
`wadouri:https://lunit-frontend-fixtures.netlify.com/dcm-files/series/CT000011.dcm`,
`wadouri:https://lunit-frontend-fixtures.netlify.com/dcm-files/series/CT000012.dcm`,
`wadouri:https://lunit-frontend-fixtures.netlify.com/dcm-files/series/CT000013.dcm`,
].map(imageId => new CornerstoneSingleImage(imageId, { unload: unloadWADOImage }));
}, []);
// `<DCMImage>`를 사용해서 좀 더 간단하게 Dicom File을 출력할 수 있다.
// 하지만, `.dcm` 파일의 용량 및 렌더링 비용이 너무 크기 때문에
// 가능하다면 `.jpg` 썸네일 이미지를 만들어서 사용하는 것이 더 좋다.
return (
<ul>
{images.map((image, i) => (
<li key={'image' + i}>
<DCMImage cornerstoneImage={image} width={120} height={150} />
</li>
))}
</ul>
);
};
DCMImageSample.story = {
name: '<DCMImage>',
};
|
package main
import (
"context"
"net/http"
"net/http/httptest"
"github.com/stretchr/testify/mock"
"github.com/xmidt-org/wrp-go/v3"
"github.com/xmidt-org/wrp-go/v3/wrphttp"
)
type mockWRPAccessAuthority struct {
mock.Mock
}
func (m *mockWRPAccessAuthority) authorizeWRP(ctx context.Context, message *wrp.Message) (bool, error) {
arguments := m.Called(ctx, message)
return arguments.Bool(0), arguments.Error(1)
}
type testWRPResponseWriter struct {
http.ResponseWriter
}
func (t *testWRPResponseWriter) WriteWRP(i *wrphttp.Entity) (int, error) {
return 0, nil
}
func (t *testWRPResponseWriter) WriteWRPBytes(_ wrp.Format, _ []byte) (int, error) {
return 0, nil
}
func (t *testWRPResponseWriter) WRPFormat() wrp.Format {
return wrp.Msgpack
}
func newTestWRPResponseWriter(w *httptest.ResponseRecorder) *testWRPResponseWriter {
return &testWRPResponseWriter{
ResponseWriter: w,
}
}
|
mirpipe.pl -file ./data/test.fastq -ref ./data/mirbase20_mature.fa
DIFF=$(diff target.output mirpipe_mirna.tsv)
if [ "$DIFF" != "" ]
then
echo "Automatic test failed. Please check the content of mirpipe_mirna.tsv manually."
fi
if [ "$DIFF" == "" ]
then
echo "Automatic testing was successful. You are ready to use MIRPIPE."
fi
|
---
layout: post
microblog: true
date: 2016-11-11 19:38 +1300
guid: http://JacksonOfTrades.micro.blog/2016/11/11/t796965199567720448.html
---
Well, I ran into its owners on the way back. Apparently it does this a lot.
|
package com.vanniktech.maven.publish.tasks
import org.gradle.jvm.tasks.Jar
@Suppress("UnstableApiUsage")
open class EmptySourcesJar : Jar() {
init {
archiveClassifier.set("sources")
}
}
|
#include <assert.h>
#include <tbb/parallel_for.h>
#include <vector> // for test sample only;
class Solution {
public:
bool isPerfectSquare(const int num) {
int lower = 1;
int upper = num;
while (1 < upper - lower) {
const size_t mid = (lower + upper) / 2;
if (mid * mid < num)
lower = mid;
else if (num < mid * mid)
upper = mid;
else
return true;
}
if (lower * lower == num || upper * upper == num)
return true;
else
return false;
}
};
constexpr size_t kTestSampleCount = 10000;
// solution is OK and beat 100% in LeetCode, there are some problem with the following test program.
int main(void) {
Solution sln;
assert(sln.isPerfectSquare(93025));
assert(sln.isPerfectSquare(16));
assert(!sln.isPerfectSquare(123));
assert(sln.isPerfectSquare(1));
std::vector<int> test_sample(kTestSampleCount, 0);
tbb::parallel_for(size_t(1), kTestSampleCount, [&test_sample](size_t i) { test_sample[i] = (i * i); });
// tbb::parallel_for(1, test_sample.back(), [&test_sample](int i) {
// });
for (int i = 1, ii = 0; i < test_sample.back(); ++i) {
if (i == test_sample[ii]) {
assert(sln.isPerfectSquare(i));
++ii;
} else {
assert(!sln.isPerfectSquare(i));
}
}
return 0;
}
|
"use strict";
const express = require( "express" );
const expressApp = express();
const compression = require( "compression" );
const cookieParser = require( "cookie-parser" );
const bodyParser = require( "body-parser" );
const lager = require( "properjs-lager" );
const csurf = require( "csurf" );
const listeners = {};
const core = {
query: require( "./query" ),
config: require( "../../clutch.config" ),
content: require( "./content" ),
template: require( "./template" )
};
const ContextObject = require( "../class/ContextObject" );
const checkCSRF = csurf({
cookie: true
});
const http = require( "http" );
const fs = require( "fs" );
const stasis = require( `../generators/static` );
let httpServer = null;
let cacheIndex = 0;
/**
*
* Configure Express Middleware.
*
*/
expressApp.use( cookieParser() );
expressApp.use( bodyParser.json() );
expressApp.use( bodyParser.urlencoded({
extended: true
}));
expressApp.use( compression( core.config.compression ) );
expressApp.use( express.static( core.config.template.staticDir, {
maxAge: core.config.static.maxAge
}));
/**
*
* Configure Express Routes.
*
*/
const setRoutes = () => {
// SYSTEM
expressApp.get( "/robots.txt", getRobots );
expressApp.get( "/sitemap.xml", getSitemap );
// SYSTEM => OLD
// Formerly used for the live node app deployments...
// expressApp.get( "/preview", getPreview );
// expressApp.post( "/webhook", postWebhook );
// URI => HTML?format=json
// URI => HTML?nocache=420
expressApp.get( "/", checkCSRF, setReq, getPage );
expressApp.get( "/:type", checkCSRF, setReq, getPage );
expressApp.get( "/:type/:uid", checkCSRF, setReq, getPage );
expressApp.get( "/:type/:uid/index.json", checkCSRF, setReq, getPage );
};
/**
*
* Request handling.
*
*/
const setReq = ( req, res, next ) => {
req.params.type = req.params.type || core.config.homepage;
next();
};
const getKey = ( type ) => {
const key = type;
return key || core.config.homepage;
};
/**
*
* :GET Pages
*
*/
const getPage = ( req, res ) => {
const key = getKey( req.params.type );
const done = () => {
const rJson = /\.json$/;
const isStaticJson = rJson.test( req.path );
const isServerJson = (req.query.format === "json");
if ( isStaticJson || isServerJson ) {
if ( isStaticJson && rJson.test( req.params.uid ) ) {
delete req.params.uid;
}
core.query.getApi( req, res, listeners[ key ] ).then(( result ) => {
if ( isServerJson ) {
res.status( 200 ).json( result );
} else {
res.status( 200 ).send( JSON.stringify( result ) );
}
});
} else {
core.content.getPage( req, res, listeners[ key ] ).then(( callback ) => {
// Handshake callback
callback(( status, html ) => {
res.status( status ).send( html );
});
});
}
};
// Local CACHEBUSTER!!!
if ( req.query.nocache ) {
cacheIndex = Number( req.query.nocache );
core.query.getSite().then(() => {
lager.cache( `[Clutch] Cache query index ${cacheIndex}` );
done();
});
} else {
done();
}
};
/**
*
* :GET Prismic stuff
* :POST Prismic stuff
*
*/
// const getPreview = ( req, res ) => {
// core.query.getPreview( req, res ).then(( url ) => {
// res.redirect( url );
// });
// };
// const postWebhook = ( req, res ) => {
// // Skip if update is in progress, Skip if invalid secret was sent
// if ( !isSiteUpdate && req.body.secret === core.config.api.secret ) {
// isSiteUpdate = true;
//
// // Re-Fetch Site JSON
// core.query.getSite().then(() => {
// isSiteUpdate = false;
// });
// }
//
// // Always resolve with a 200 and some text
// res.status( 200 ).send( "success" );
// };
const getSitemap = ( req, res ) => {
const sitemap = require( `../generators/sitemap` );
sitemap.generate().then(( xml ) => {
res.set( "Content-Type", "text/xml" ).status( 200 ).send( xml );
});
};
const getRobots = ( req, res ) => {
const robots = require( `../generators/robots` );
robots.generate().then(( txt ) => {
res.set( "Content-Type", "text/plain" ).status( 200 ).send( txt );
});
};
/**
*
* Middleware checks
*
*/
const checkOrigin = ( req, res, next ) => {
// No origin means not CORS :-)
if ( !req.headers.origin ) {
next();
} else {
res.status( 200 ).json({
error: "Invalid origin for request"
});
}
};
/**
*
* Router API.
*
*/
module.exports = {
/**
*
* Handle router subscribe.
*
*/
on ( type, handlers ) {
const key = getKey( type );
// One handler per route
if ( !listeners[ key ] ) {
listeners[ key ] = {
type: type,
handlers: handlers
};
}
},
/**
*
* Start the Express {app}.
*
*/
init () {
return new Promise(( resolve, reject ) => {
// Init routes
setRoutes();
// Fetch ./template/pages listing
core.template.getPages().then(() => {
// Fetch Site JSON
core.query.getSite().then(() => {
httpServer = http.createServer( expressApp );
httpServer.listen( core.config.express.port );
stasis.clean( core.config ).then( resolve );
lager.cache( `[Clutch] Server Initialized` );
});
});
});
}
};
|
from typing import List
from pydantic import BaseModel, Field
from aos_sw_api.enums import Dot1xAuthenticatorPortControlEnum, Dot1xControlledDirectionEnum
from aos_sw_api.globel_models import CollectionResult
class Dot1xModel(BaseModel):
is_dot1x_enabled: bool
cached_reauth_delay: int = Field(..., ge=0, le=2147483647)
allow_gvrp_vlans: bool
use_lldp_data: bool
class Dot1xAuthenticatorPort(BaseModel):
port_id: str
is_authenticator_enabled: bool
control: Dot1xAuthenticatorPortControlEnum
unauthorized_vlan_id: int = Field(..., ge=0, le=4094)
client_limit: int = Field(..., ge=0, le=32)
quiet_period: int = Field(..., ge=0, le=65535)
tx_period: int = Field(..., ge=1, le=65535)
supplicant_timeout: int = Field(..., ge=1, le=300)
server_timeout: int = Field(..., ge=1, le=300)
max_requests: int = Field(..., ge=1, le=10)
reauth_period: int = Field(..., ge=0, le=999999999)
authorized_vlan_id: int = Field(..., ge=0, le=4094)
logoff_period: int = Field(..., ge=1, le=999999999)
unauth_period: int = Field(..., ge=0, le=250)
cached_reauth_period: int = Field(..., ge=0, le=2147483647)
class Dot1xPortSecurity(BaseModel):
port_id: str
controlled_direction: Dot1xControlledDirectionEnum
allow_mbv: bool
allowed_mixed_users: bool
is_port_speed_vsa_enabled: bool
class Dot1xAuthenticatorPortList(BaseModel):
collection_result: CollectionResult
dot1x_authenticator_port_element: List[Dot1xAuthenticatorPort]
class Dot1xPortSecurityList(BaseModel):
collection_result: CollectionResult
dot1x_port_security_element: List[Dot1xPortSecurity]
|
{-# LANGUAGE TypeOperators, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses #-}
module SessionCheck.Backend.TCP.Instances where
import SessionCheck.Backend.TCP.Types
import SessionCheck.Classes
instance [Char] :< TCPMessage where
inj = TCPMessage
prj = Just . unTCPMessage
|
use strict;
use warnings;
use Net::EmptyPort qw(check_port empty_port);
use Test::More;
use t::Util;
my $upstream_port = empty_port();
my $upstream = spawn_server(
argv => [
qw(plackup -s Starlet --access-log /dev/null -p), $upstream_port, ASSETS_DIR . "/upstream.psgi",
],
is_ready => sub {
check_port($upstream_port);
},
);
my $h2o = spawn_h2o(<< "EOT");
hosts:
default:
paths:
/:
proxy.reverse.url: http://127.0.0.1:$upstream_port
EOT
subtest 'http1' => sub {
plan skip_all => 'curl not found'
unless prog_exists('curl');
my $doit = sub {
my ($proto, $port) = @_;
my $extra = '';
$extra .= ' --insecure'
if $proto eq 'https';
subtest $proto => sub {
my $resp = `curl --max-time 1 $extra $proto://127.0.0.1:$port/sleep 2>&1`;
like $resp, qr/operation timed out/i, "operation should time out";
sleep 1;
$resp = `curl --silent --dump-header /dev/stderr $extra $proto://127.0.0.1:$port/ 2>&1 > /dev/null`;
like $resp, qr{^HTTP/1\.[0-9]+ 404 }s, "server is still alive";
};
};
$doit->('http', $h2o->{port});
$doit->('https', $h2o->{tls_port});
};
# note: implement test using HTTP/2, nghttp --timeout 2 does not seem to work like above
done_testing;
|
import { StateService } from '../state/state.service';
import { NotifyEnum } from '../enum/notify.enum';
export function notify (tittle : string , msg : string ,type:NotifyEnum , time : number ) {
StateService.$emit('notify', {tittle, msg,type, time});
};
|
<!--
Thank you for helping to improve pre-commit-terraform!
-->
Put an `x` into the box if that apply:
- [ ] This PR introduces breaking change.
- [ ] This PR fixes a bug.
- [ ] This PR adds new functionality.
- [ ] This PR enhances existing functionality.
### Description of your changes
<!--
Briefly describe what this pull request does. Be sure to direct your reviewers'
attention to anything that needs special consideration.
We love pull requests that resolve an open pre-commit-terraform issue. If yours does, you
can uncomment the below line to indicate which issue your PR fixes, for example
"Fixes #123456":
-->
<!-- Fixes # -->
### How can we test changes
<!--
Before reviewers can be confident in the correctness of this pull request, it
needs to tested and shown to be correct. Briefly describe the testing that has
already been done or which is planned for this change.
-->
|
package io.testaxis.intellijplugin.models
import com.intellij.icons.AllIcons
import javax.swing.Icon
enum class BuildStatus {
SUCCESS {
override val icon get() = AllIcons.General.InspectionsOK
},
BUILD_FAILED {
override val icon get() = AllIcons.General.Warning
},
TESTS_FAILED {
override val icon get() = AllIcons.General.Error
},
UNKNOWN {
override val icon get() = AllIcons.RunConfigurations.TestUnknown
};
abstract val icon: Icon
}
|
define({
"defaultAreaUnit": "Standard arealenhet",
"defaultLengthUnit": "Standard lengdeenhet",
"acres": "Acre",
"sqMiles": "mile²",
"sqKilometers": "km²",
"hectares": "Hektar",
"sqYards": "yard²",
"sqFeet": "fot²",
"sqFeetUS": "fot² (USA)",
"sqMeters": "m²",
"miles": "Miles",
"kilometers": "Kilometer",
"feet": "Fot",
"feetUS": "fot (USA)",
"meters": "Meter",
"yards": "Yards",
"nauticalMiles": "Nautiske mil",
"showArea": "Område",
"showDistance": "Avstand",
"showLocation": "Lokasjon",
"showtools": "Vis måleverktøy:",
"operationalLayer": "Legg til målegrafikken som et operativt lag i kartet.",
"allHidedTips": "Du må velge minst ett verktøy."
});
|
<div {!! $attributes->merge($attrs) !!}>
@if(!isset($_header['hide']) && (!empty($_header) || isset($header)))
<div class="card-header {{ $_header['class'] ?? '' }}" {!! $_header['id'] ?? '' !!}>
@if(!empty($_header['headline']))
<x-headline :all="$_header['headline']"/>
@endif
{!! $_header['text'] ?? '' !!}
{!! $header ?? '' !!}
</div>
@endif
@if(!empty($_image) && !isset($_image['hide']))
<x-image :all="$_image" :class="'card-image ' . ($_image['class'] ?? 'card-img-top')"/>
@endif
@isset($image)
{!! $image !!}
@endisset
@isset($collapse['id'])
<div id="collapse-{{ $collapse['id'] }}" {!! $collapse['attrs'] !!}>
@endisset
@if(!isset($_body['hide']))
<div class="{{ $_body['class'] }}">
@if(!empty($_body['headline']))
<x-headline :all="$_body['headline']"/>
@endif
@if (!isset($_body['hideText']))
{!! $_body['text'] ?? '' !!}
{!! $body ?? '' !!}
@endif
</div>
@endif
@isset($collapse['id'])
</div>
@endisset
@if(!isset($_footer['hide']) && (!empty($_footer['text']) || isset($footer)))
<div class="{{ $_footer['class'] ?? 'card-footer' }}">
{!! $_footer['text'] ?? '' !!}
{!! $footer ?? '' !!}
</div>
@endif
</div>
|
<?php
/**
* Created by PhpStorm.
* User: alex
* Date: 12.09.18
* Time: 9:52
*/
namespace frontend\assets;
class SidebarAsset extends FrontAsset
{
public $js = [
'js/jquery.sticky-sidebar.js',
];
public $depends = [
'frontend\assets\AppAsset'
];
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.receiver
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.collection.JavaConversions._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* Abstract class of a receiver that can be run on worker nodes to receive external data. A
* custom receiver can be defined by defining the functions onStart() and onStop(). onStart()
* should define the setup steps necessary to start receiving data,
* and onStop() should define the cleanup steps necessary to stop receiving data. A custom
* receiver would look something like this.
*
* @example {{{
* class MyReceiver(storageLevel: StorageLevel) extends NetworkReceiver[String](storageLevel) {
* def onStart() {
* // Setup stuff (start threads, open sockets, etc.) to start receiving data.
* // Must start new thread to receive data, as onStart() must be non-blocking.
*
* // Call store(...) in those threads to store received data into Spark's memory.
*
* // Call stop(...), restart() or reportError(...) on any thread based on how
* // different errors should be handled.
*
* // See corresponding method documentation for more details
* }
*
* def onStop() {
* // Cleanup stuff (stop threads, close sockets, etc.) to stop receiving data.
* }
* }
* }}}
*/
@DeveloperApi
abstract class Receiver[T](val storageLevel: StorageLevel) extends Serializable {
/**
* This method is called by the system when the receiver is started. This function
* must initialize all resources (threads, buffers, etc.) necessary for receiving data.
* This function must be non-blocking, so receiving the data must occur on a different
* thread. Received data can be stored with Spark by calling `store(data)`.
*
* If there are errors in threads started here, then following options can be done
* (i) `reportError(...)` can be called to report the error to the driver.
* The receiving of data will continue uninterrupted.
* (ii) `stop(...)` can be called to stop receiving data. This will call `onStop()` to
* clear up all resources allocated (threads, buffers, etc.) during `onStart()`.
* (iii) `restart(...)` can be called to restart the receiver. This will call `onStop()`
* immediately, and then `onStart()` after a delay.
*/
def onStart()
/**
* This method is called by the system when the receiver is stopped. All resources
* (threads, buffers, etc.) setup in `onStart()` must be cleaned up in this method.
*/
def onStop()
/** Override this to specify a preferred location (hostname). */
def preferredLocation : Option[String] = None
/**
* Store a single item of received data to Spark's memory.
* These single items will be aggregated together into data blocks before
* being pushed into Spark's memory.
*/
def store(dataItem: T) {
executor.pushSingle(dataItem)
}
/** Store an ArrayBuffer of received data as a data block into Spark's memory. */
def store(dataBuffer: ArrayBuffer[T]) {
executor.pushArrayBuffer(dataBuffer, None, None)
}
/**
* Store an ArrayBuffer of received data as a data block into Spark's memory.
* The metadata will be associated with this block of data
* for being used in the corresponding InputDStream.
*/
def store(dataBuffer: ArrayBuffer[T], metadata: Any) {
executor.pushArrayBuffer(dataBuffer, Some(metadata), None)
}
/** Store an iterator of received data as a data block into Spark's memory. */
def store(dataIterator: Iterator[T]) {
executor.pushIterator(dataIterator, None, None)
}
/**
* Store an iterator of received data as a data block into Spark's memory.
* The metadata will be associated with this block of data
* for being used in the corresponding InputDStream.
*/
def store(dataIterator: java.util.Iterator[T], metadata: Any) {
executor.pushIterator(dataIterator, Some(metadata), None)
}
/** Store an iterator of received data as a data block into Spark's memory. */
def store(dataIterator: java.util.Iterator[T]) {
executor.pushIterator(dataIterator, None, None)
}
/**
* Store an iterator of received data as a data block into Spark's memory.
* The metadata will be associated with this block of data
* for being used in the corresponding InputDStream.
*/
def store(dataIterator: Iterator[T], metadata: Any) {
executor.pushIterator(dataIterator, Some(metadata), None)
}
/**
* Store the bytes of received data as a data block into Spark's memory. Note
* that the data in the ByteBuffer must be serialized using the same serializer
* that Spark is configured to use.
*/
def store(bytes: ByteBuffer) {
executor.pushBytes(bytes, None, None)
}
/**
* Store the bytes of received data as a data block into Spark's memory.
* The metadata will be associated with this block of data
* for being used in the corresponding InputDStream.
*/
def store(bytes: ByteBuffer, metadata: Any) {
executor.pushBytes(bytes, Some(metadata), None)
}
/** Report exceptions in receiving data. */
def reportError(message: String, throwable: Throwable) {
executor.reportError(message, throwable)
}
/**
* Restart the receiver. This will call `onStop()` immediately and return.
* Asynchronously, after a delay, `onStart()` will be called.
* The `message` will be reported to the driver.
* The delay is defined by the Spark configuration
* `spark.streaming.receiverRestartDelay`.
*/
def restart(message: String) {
executor.restartReceiver(message)
}
/**
* Restart the receiver. This will call `onStop()` immediately and return.
* Asynchronously, after a delay, `onStart()` will be called.
* The `message` and `exception` will be reported to the driver.
* The delay is defined by the Spark configuration
* `spark.streaming.receiverRestartDelay`.
*/
def restart(message: String, error: Throwable) {
executor.restartReceiver(message, Some(error))
}
/**
* Restart the receiver. This will call `onStop()` immediately and return.
* Asynchronously, after the given delay, `onStart()` will be called.
*/
def restart(message: String, error: Throwable, millisecond: Int) {
executor.restartReceiver(message, Some(error), millisecond)
}
/** Stop the receiver completely. */
def stop(message: String) {
executor.stop(message, None)
}
/** Stop the receiver completely due to an exception */
def stop(message: String, error: Throwable) {
executor.stop(message, Some(error))
}
def isStarted(): Boolean = {
executor.isReceiverStarted()
}
/** Check if receiver has been marked for stopping. */
def isStopped(): Boolean = {
executor.isReceiverStopped()
}
/** Get unique identifier of this receiver. */
def streamId = id
/*
* =================
* Private methods
* =================
*/
/** Identifier of the stream this receiver is associated with. */
private var id: Int = -1
/** Handler object that runs the receiver. This is instantiated lazily in the worker. */
private[streaming] var executor_ : ReceiverSupervisor = null
/** Set the ID of the DStream that this receiver is associated with. */
private[streaming] def setReceiverId(id_ : Int) {
id = id_
}
/** Attach Network Receiver executor to this receiver. */
private[streaming] def attachExecutor(exec: ReceiverSupervisor) {
assert(executor_ == null)
executor_ = exec
}
/** Get the attached executor. */
private def executor = {
assert(executor_ != null, "Executor has not been attached to this receiver")
executor_
}
}
|
import { ARCCookie as LegacyARCCookie } from './legacy/models/Cookies.js';
export type CookieSameSiteType = 'unspecified' | 'no_restriction' | 'lax' | 'strict';
export type CookieChangeReason = 'explicit' | 'overwrite' | 'expired' | 'evicted' | 'expired-overwrite';
// eslint-disable-next-line no-control-regex
const fieldContentRegExp = /^[\u0009\u0020-\u007e\u0080-\u00ff]+$/;
export interface IElectronCookie {
name: string;
value: string;
/**
* The domain of the cookie; this will be normalized with a preceding dot so that it's also valid for subdomains.
*/
domain?: string;
/**
* Whether the cookie is a host-only cookie; this will only be true if no domain was passed.
*/
hostOnly?: boolean;
/**
* The path of the cookie.
*/
path?: string;
secure?: boolean;
httpOnly?: boolean;
session?: boolean;
expirationDate?: number;
sameSite?: CookieSameSiteType;
}
export interface IHttpCookieChangeRecord {
/**
* The cookie that was changed.
*/
cookie: IHttpCookie;
/**
* The cause of the change with one of the following values:
* - `explicit` - The cookie was changed directly by a consumer's action.
* - `overwrite` - The cookie was automatically removed due to an insert operation that overwrote it.
* - `expired` - The cookie was automatically removed as it expired.
* - `evicted` - The cookie was automatically evicted during garbage collection.
* - `expired-overwrite` - The cookie was overwritten with an already-expired expiration date.
*/
cause: CookieChangeReason;
/**
* `true` if the cookie was removed, `false` otherwise.
*/
removed: boolean;
}
export interface IHttpCookie {
// https://www.electronjs.org/docs/latest/api/structures/cookie
/**
* The name of the cookie.
*/
name: string;
/**
* The value of the cookie.
*/
value: string;
/**
* The domain of the cookie; this will be normalized with a preceding dot so that
* it's also valid for subdomains.
*/
domain?: string;
/**
* Whether the cookie is a host-only cookie; this will only be `true` if no domain
* was passed.
*/
hostOnly?: boolean;
/**
* The path of the cookie.
*/
path?: string;
/**
* Whether the cookie is marked as secure.
*/
secure?: boolean;
/**
* Whether the cookie is marked as HTTP only.
*/
httpOnly?: boolean;
/**
* Whether the cookie is a session cookie or a persistent cookie with an expiration
* date.
*/
session?: boolean;
/**
* The expiration date of the cookie as the number of seconds since the UNIX epoch.
* Not provided for session cookies.
*/
expirationDate?: number;
/**
* The Same Site policy applied to this cookie. Can be `unspecified`, `no_restriction`, `lax` or `strict`.
* @default unspecified
*/
sameSite?: CookieSameSiteType;
}
export class HttpCookie {
/**
* The name of the cookie.
*/
name = '';
/**
* The value of the cookie.
*/
value = '';
/**
* Whether the cookie is a host-only cookie; this will only be `true` if no domain
* was passed.
*/
hostOnly?: boolean;
/**
* The path of the cookie.
*/
path?: string;
/**
* Whether the cookie is marked as secure.
*/
secure?: boolean;
/**
* Whether the cookie is marked as HTTP only.
*/
httpOnly?: boolean;
/**
* Whether the cookie is a session cookie or a persistent cookie with an expiration
* date.
*/
session?: boolean = true;
/**
* The Same Site policy applied to this cookie. Can be `unspecified`, `no_restriction`, `lax` or `strict`.
* @default unspecified
*/
sameSite: CookieSameSiteType = 'unspecified';
protected _domain?: string;
protected _maxAge?: number;
protected _expirationDate?: number | undefined;
/**
* @param max The max age value
*/
set maxAge(max: number | undefined) {
const typedMax = Number(max);
if (Number.isNaN(typedMax)) {
return;
}
this._maxAge = typedMax;
if (typedMax <= 0) {
// see http://stackoverflow.com/a/11526569/1127848
// and https://tools.ietf.org/html/rfc6265#section-5.2.2
this._expirationDate = -8640000000000000;
} else {
let now = Date.now();
now += typedMax * 1000;
this._expirationDate = now;
}
this.session = false;
}
/**
* @return Returns a value of maxAge property
*/
get maxAge(): number | undefined {
return this._maxAge;
}
/**
* The expiration date of the cookie as the number of seconds since the UNIX epoch.
* Not provided for session cookies.
*/
set expirationDate(expires: number | Date | string | undefined) {
this.setExpirationTime(expires);
}
get expirationDate(): number | undefined {
return this._expirationDate;
}
/**
* The domain of the cookie; this will be normalized with a preceding dot so that
* it's also valid for subdomains.
*/
set domain(domain: string | undefined) {
this._domain = domain;
if (!domain) {
this.hostOnly = false;
} else {
this.hostOnly = true;
}
}
get domain(): string | undefined {
return this._domain;
}
// get samesite(): CookieSameSiteType | undefined {
// return this.sameSite;
// }
set samesite(value: CookieSameSiteType | undefined) {
this.sameSite = value || 'unspecified';
}
// get httponly(): boolean | undefined {
// return this.httpOnly;
// }
set httponly(value: boolean | undefined) {
this.httpOnly = value;
}
// get hostonly(): boolean | undefined {
// return this.hostOnly;
// }
set hostonly(value: boolean | undefined) {
this.hostOnly = value;
}
// get ['max-age'](): number | undefined {
// return this.maxAge;
// }
set ['max-age'](value: number | undefined) {
this.maxAge = value;
}
set expires(value: string | number | Date | undefined) {
this.expirationDate = value;
}
static fromLegacy(old: LegacyARCCookie): HttpCookie {
const init: IHttpCookie = {
name: old.name,
value: old.value || '',
sameSite: 'unspecified',
};
if (old.domain) {
init.domain = old.domain;
}
if (typeof old.expires === 'number') {
init.expirationDate = old.expires;
}
if (typeof old.hostOnly === 'boolean') {
init.hostOnly = old.hostOnly;
}
if (typeof old.httpOnly === 'boolean') {
init.httpOnly = old.httpOnly;
}
if (typeof old.path === 'string') {
init.path = old.path;
}
if (typeof old.secure === 'boolean') {
init.secure = old.secure;
}
if (typeof old.session === 'boolean') {
init.session = old.session;
}
return new HttpCookie(init);
}
static fromValue(name: string, value = ''): HttpCookie {
const init: IHttpCookie = {
name,
value,
sameSite: 'unspecified',
};
return new HttpCookie(init);
}
constructor(input?: string|IHttpCookie) {
let init: IHttpCookie;
if (typeof input === 'string') {
init = JSON.parse(input);
} else if (typeof input === 'object') {
init = input;
} else {
init = {
name: '',
value: '',
sameSite: 'unspecified',
};
}
this.new(init);
}
new(init: IHttpCookie): void {
const {
name = '',
value = '',
sameSite = 'unspecified',
domain, expirationDate, hostOnly, httpOnly, path, secure, session
} = init;
if (name && !fieldContentRegExp.test(name)) {
throw new TypeError('Argument `name` is invalid');
}
if (value && !fieldContentRegExp.test(value)) {
throw new TypeError('Argument `value` is invalid');
}
this.name = name;
this.value = value;
this.sameSite = sameSite;
if (typeof path === 'string') {
if (!fieldContentRegExp.test(path)) {
throw new TypeError('Option `path` is invalid');
}
this.path = path;
} else {
this.path = undefined;
}
if (domain) {
if (!fieldContentRegExp.test(domain)) {
throw new TypeError('Option `domain` is invalid');
}
this.domain = domain;
} else {
this.domain = undefined;
}
if (typeof expirationDate === 'number') {
this.expirationDate = expirationDate;
} else {
this.expirationDate = undefined;
}
if (typeof hostOnly === 'boolean') {
this.hostOnly = hostOnly;
} else {
this.hostOnly = undefined;
}
if (typeof httpOnly === 'boolean') {
this.httpOnly = httpOnly;
} else {
this.httpOnly = undefined;
}
if (typeof secure === 'boolean') {
this.secure = secure;
} else {
this.secure = undefined;
}
if (typeof session === 'boolean') {
this.session = session;
} else {
this.session = !this.expirationDate;
}
}
/**
* Sets value for `expirationDate` property.
* @param expires The value as string, date, or a number
*/
setExpirationTime(expires: Date | string | number | undefined): void {
let value: number | undefined;
const typedNumber = Number(expires);
if (!Number.isNaN(typedNumber)) {
value = typedNumber;
} else if (expires instanceof Date) {
value = expires.getTime();
} else if (typeof expires === 'string') {
const tmp = new Date(expires);
if (tmp.toString() === 'Invalid Date') {
value = 0;
} else {
value = tmp.getTime();
}
} else {
value = undefined;
}
this._expirationDate = value;
this.session = !value;
}
/**
* @return Cookie's `name=value` string.
*/
toString(): string {
const { name, value } = this;
return `${name}=${value}`;
}
toJSON(): IHttpCookie {
const result: IHttpCookie = {
name: this.name,
value: this.value,
sameSite: this.sameSite,
};
if (this.domain) {
result.domain = this.domain;
}
if (typeof this.expirationDate === 'number') {
result.expirationDate = this.expirationDate;
}
if (typeof this.hostOnly === 'boolean') {
result.hostOnly = this.hostOnly;
}
if (typeof this.httpOnly === 'boolean') {
result.httpOnly = this.httpOnly;
}
if (typeof this.secure === 'boolean') {
result.secure = this.secure;
}
if (typeof this.session === 'boolean') {
result.session = this.session;
}
if (this.path) {
result.path = this.path;
}
return result;
}
/**
* Returns a Cookie as a HTTP header string.
* @return Cookie string as a HTTP header value
*/
toHeader(): string {
let header = this.toString();
let expires;
if (this._expirationDate) {
expires = new Date(this._expirationDate);
if (expires.toString() === 'Invalid Date') {
expires = new Date(0);
}
}
if (expires) {
header += `; expires=${expires.toUTCString()}`;
}
const { path, domain, httpOnly, sameSite, secure } = this;
if (path) {
header += `; path=${path}`;
}
if (domain) {
header += `; domain=${domain}`;
}
if (httpOnly) {
header += `; httpOnly=${httpOnly}`;
}
switch (sameSite) {
case 'lax': header += `; SameSite=Lax`; break;
case 'no_restriction': header += `; SameSite=None`; break;
case 'strict': header += `; SameSite=Strict`; break;
}
if (secure || sameSite === 'strict') {
header += `; Secure`;
}
return header;
}
}
|
module ActiveData
module Model
module Attributes
module Reflections
class Represents < Attribute
def self.build(target, generated_methods, name, *args, &block)
options = args.extract_options!
reference = target.reflect_on_association(options[:of]) if target.respond_to?(:reflect_on_association)
reference ||= target.reflect_on_attribute(options[:of]) if target.respond_to?(:reflect_on_attribute)
options[:of] = reference.name if reference
validates_nested = target.respond_to?(:validates_nested) && !target.validates_nested?(options[:of])
target.validates_nested(options[:of]) if validates_nested
super(target, generated_methods, name, *args, options, &block)
end
def initialize(name, options)
super
raise ArgumentError, "Undefined reference for `#{name}`" if reference.blank?
end
def type
Object
end
def reference
@reference ||= options[:of].to_s
end
def column
@column ||= options[:column].presence.try(:to_s) || name
end
def reader
@reader ||= options[:reader].presence.try(:to_s) || column
end
def reader_before_type_cast
@reader_before_type_cast ||= "#{reader}_before_type_cast"
end
def writer
@writer ||= "#{options[:writer].presence || column}="
end
def inspect_reflection
"#{name}: (represents)"
end
end
end
end
end
end
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\User;
use App\ClassSchedules;
use App\ClassSchedulesSubjects;
use App\Enrolled;
use App\Payments;
use App\StudentGrades;
use App\GradeAndSubjects;
class EnrollmentController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
return view('Students.Enrollment.index');
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
return view('Students.Enrollment.create');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
//
if($request->typeOfPayment == "Full Payment"){
$enrolledId = Enrolled::create([
'userId' => $request->userId,
'classSchedId' => $request->section,
'typeOfPayment' => $request->typeOfPayment,
'tuitionFee' => $request->tuitionFee1,
'gradeLevel' => $request->gradeLevel,
'paymentStatus' => 'Fully Paid',
])->id;
Payments::create([
'userId' => $request->userId,
'classSchedId' =>$request->section,
'enrolledId' => $enrolledId,
'sy' => $request->sy,
'paymentAmount' => $request->payment1,
'dateOfPayment' => $request->date1,
'notes' => $request->notes1,
'remarks' => 'Fully Paid',
]);
}else{
$enrolledId = Enrolled::create([
'userId' => $request->userId,
'classSchedId' => $request->section,
'typeOfPayment' => $request->typeOfPayment,
'tuitionFee' => $request->tuitionFee2,
'monthlyPayment' => $request->monthlyPayment2,
'gradeLevel' => $request->gradeLevel,
'paymentStatus' => 'Partially Paid',
])->id;
Payments::create([
'userId' => $request->userId,
'classSchedId' =>$request->section,
'enrolledId' => $enrolledId,
'sy' => $request->sy,
'paymentAmount' => $request->payment2,
'dateOfPayment' => $request->date2,
'notes' => $request->notes2,
'remarks' => 'Downpayment',
]);
}
$subjects = GradeAndSubjects::where('GradeLevel',$request->gradeLevel)->get();
foreach($subjects as $subject){
StudentGrades::create([
'userId' => $request->userId,
'enrolledId' => $enrolledId,
'classSchedId' =>$request->section,
'gradeLevel' => $request->gradeLevel,
'subject' => $subject->Subject,
]);
}
if($request->gradeLevel == 'Grade 1'){
User::find($request->userId)->update([
'grade1' => 'Enrolled',
]);
return redirect()->route('Grade1Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 2'){
User::find($request->userId)->update([
'grade2' => 'Enrolled',
]);
return redirect()->route('Grade2Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 3'){
User::find($request->userId)->update([
'grade3' => 'Enrolled',
]);
return redirect()->route('Grade3Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 4'){
User::find($request->userId)->update([
'grade4' => 'Enrolled',
]);
return redirect()->route('Grade4Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 5'){
User::find($request->userId)->update([
'grade5' => 'Enrolled',
]);
return redirect()->route('Grade5Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 6'){
User::find($request->userId)->update([
'grade6' => 'Enrolled',
]);
return redirect()->route('Grade6Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 7'){
User::find($request->userId)->update([
'grade7' => 'Enrolled',
]);
return redirect()->route('Grade7Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 8'){
User::find($request->userId)->update([
'grade8' => 'Enrolled',
]);
return redirect()->route('Grade8Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 9'){
User::find($request->userId)->update([
'grade9' => 'Enrolled',
]);
return redirect()->route('Grade9Index')->with('success', 'Student Enrolled Successfully');
}else if($request->gradeLevel == 'Grade 10'){
User::find($request->userId)->update([
'grade10' => 'Enrolled',
]);
return redirect()->route('Grade10Index')->with('success', 'Student Enrolled Successfully');
}
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
return view('Students.Enrollment.edit');
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
public function enrollCreate(Request $request)
{
$student = User::find($request->studentId);
$gradeLevel = $request->gradeLevel;
$section = ClassSchedules::where('gradeLevel',$request->gradeLevel)->get();
return view('Students.Enrollment.create',compact('section','student','gradeLevel'));
}
public function enrollShow(Request $request)
{
$gradeLevel = $request->gradeLevel;
$enrolled = Enrolled::where('userId',$request->studentId)->where('gradeLevel',$gradeLevel)->first();
$payments = Payments::where('enrolledId',$enrolled->id)->get();
return view('Students.Enrollment.show',compact('enrolled','payments','gradeLevel'));
}
public function Grade1Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade1Index',compact('students'));
}
public function Grade2Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade2Index',compact('students'));
}
public function Grade3Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade3Index',compact('students'));
}
public function Grade4Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade4Index',compact('students'));
}
public function Grade5Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade5Index',compact('students'));
}
public function Grade6Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade6Index',compact('students'));
}
public function Grade7Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade7Index',compact('students'));
}
public function Grade8Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade8Index',compact('students'));
}
public function Grade9Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade9Index',compact('students'));
}
public function Grade10Index()
{
$students = User::where('accountType','Student')->where('isActivated',1)->get();
return view('Students.Enrollment.grade10Index',compact('students'));
}
}
|
<?hh
$x = Vector {'a'};
var_dump($x->toKeysArray());
var_dump($x->lazy()->toKeysArray());
var_dump($x->lazy()->map(function($x){return $x;})->toKeysArray());
$x = Map {123 => 'a'};
var_dump($x->toKeysArray());
var_dump($x->lazy()->toKeysArray());
var_dump($x->lazy()->map(function($x){return $x;})->toKeysArray());
$x = Pair {'a', 'b'};
var_dump($x->toKeysArray());
var_dump($x->lazy()->toKeysArray());
var_dump($x->lazy()->map(function($x){return $x;})->toKeysArray());
|
Digital Image Processing
This project perform on the given image:
Brightness,
Contrast,
Range Filter,
Median Filter,
Binarization,
HSL.
|
# description
`attore` is an actor and IO framework for Scala 3.
The design of the framework is not stable at this stage, please do not use it in the production environment.
|
import 'package:animatingpagetransition/theme.dart';
import 'package:animatingpagetransition/ui/beachscreen.dart';
import 'package:animatingpagetransition/utils/fadepageroute.dart';
import 'package:animatingpagetransition/utils/title.dart';
import 'package:animatingpagetransition/widgets/header.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:line_icons/line_icons.dart';
import '../viewstate.dart';
class HomeScreen extends StatefulWidget {
@override
_HomeScreenState createState() => _HomeScreenState();
}
class _HomeScreenState extends State<HomeScreen> {
Map<int, Map<String, String>> beaches = {
0: {'beachName': 'Miami', 'beachImage': 'assets/images/beach1.jpg'},
1: {'beachName': 'Rio', 'beachImage': 'assets/images/beach2.jpg'},
2: {'beachName': 'Ca. USA', 'beachImage': 'assets/images/beach3.jpg'},
3: {'beachName': 'Mexico', 'beachImage': 'assets/images/beach1.jpg'},
4: {'beachName': 'Toronto', 'beachImage': 'assets/images/beach2.jpg'}
};
@override
Widget build(BuildContext context) {
var deviceSize = MediaQuery.of(context).size;
return Scaffold(
appBar: AppBar(
leading: Padding(
padding: const EdgeInsets.only(left: 10),
child: Icon(LineIcons.bars, color: Color(0xFF1A201A)),
),
elevation: 0,
actions: [
Padding(
padding: const EdgeInsets.only(right: 2),
child: IconButton(
icon: Icon(LineIcons.search, color: Color(0xFF1A201A)),
onPressed: () {},
),
),
],
),
backgroundColor: Color(0xFFFCFDFD),
body: ListView(
physics: BouncingScrollPhysics(),
children: [
Row(
children: [
Hero(
tag: 'beaches',
flightShuttleBuilder: (
BuildContext flightContext,
Animation<double> animation,
HeroFlightDirection flightDirection,
BuildContext fromHeroContext,
BuildContext toHeroContext,
) {
return Header(
viewState: flightDirection == HeroFlightDirection.push
? ViewState.shrink
: ViewState.enlarge,
smallFontSize: 20.0,
largeFontSize: 32.0,
smallIconSize: 24.0,
largeIconSize: 0.0,
);
},
child: Header(
viewState: ViewState.enlarged,
),
),
],
),
Padding(
padding: const EdgeInsets.all(3),
),
Row(
mainAxisAlignment: MainAxisAlignment.end,
children: [
Icon(
LineIcons.angle_down,
size: 16,
),
Padding(
padding: const EdgeInsets.all(4),
),
Text(
'Most Visited',
style: sorttext,
),
Padding(
padding: const EdgeInsets.all(8),
)
],
),
Container(
padding: const EdgeInsets.only(top: 5),
width: deviceSize.width,
height: 400,
child: BeachList(beaches: beaches),
)
],
),
);
}
}
class BeachList extends StatelessWidget {
const BeachList({
Key key,
@required this.beaches,
}) : super(key: key);
final Map<int, Map<String, String>> beaches;
@override
Widget build(BuildContext context) {
return ListView.builder(
padding: const EdgeInsets.only(left: 25, right: 16),
itemCount: beaches.keys.length,
physics: BouncingScrollPhysics(),
itemBuilder: (context, index) {
return InkWell(
onTap: () {
var router = FadePageRoute(
fullscreenDialog: true,
builder: (BuildContext context) =>
BeachScreen(beachId: beaches.keys.elementAt(index)));
Navigator.of(context).push(router);
},
child: Padding(
padding: const EdgeInsets.only(top: 15),
child: Row(
children: [
Container(
width: 70,
child: Hero(
tag: 'title${beaches.keys.elementAt(index)}',
flightShuttleBuilder: (
BuildContext flightContext,
Animation<double> animation,
HeroFlightDirection flightDirection,
BuildContext fromHeroContext,
BuildContext toHeroContext,
) {
return DestinationTitle(
title: beaches[index]['beachName'],
isOverflow: true,
viewState: flightDirection == HeroFlightDirection.push
? ViewState.enlarge
: ViewState.shrink,
smallFontSize: 15.0,
largeFontSize: 32.0,
);
},
child: DestinationTitle(
title: beaches[index]['beachName'],
viewState: ViewState.shrunk,
),
),
),
Expanded(
child: Hero(
tag: 'image${beaches.keys.elementAt(index)}',
child: Image.asset(
beaches[index]['beachImage'],
height: 56,
fit: BoxFit.cover,
),
),
),
FloatingActionButton(
heroTag: 'beach${beaches.keys.elementAt(index)}',
backgroundColor: Color(0xFF010101),
elevation: 1,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.all(Radius.circular(0))),
onPressed: () {},
child: Icon(Icons.add),
),
],
),
),
);
},
);
}
}
|
{-# LANGUAGE NumericUnderscores #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
{-| Generators for the property tests
-}
module Convex.Options.Generators(
-- * Generators for @convex-options@ types
optionType,
optionState,
option,
-- * Generators for @Ledger@ types
tokenName,
assetClass,
nonNegativeAda,
wallet
) where
import Convex.Options.OnChain.Types (Option (..), OptionState (..), OptionType (..))
import Cooked.MockChain.Wallet (Wallet)
import Cooked.MockChain.Wallet qualified as W
import Cooked.Currencies qualified as C
import Plutus.V1.Ledger.Ada (Ada (..))
import Plutus.V1.Ledger.Value (AssetClass, TokenName)
import Plutus.V1.Ledger.Value qualified as V
import Test.QuickCheck qualified as QC
optionType :: QC.Gen OptionType
optionType = QC.elements [Put, Call]
optionState :: QC.Gen OptionState
optionState = QC.elements [Ready, Exercised]
tokenName :: QC.Gen TokenName
tokenName = QC.elements ["a", "b", "c", "My token", "DOGECOIN", "BUYER", "SELLER"]
assetClass :: QC.Gen AssetClass
assetClass = V.assetClass C.permanentCurrencySymbol <$> tokenName
nonNegativeAda :: QC.Gen Ada
nonNegativeAda = Lovelace <$> QC.arbitrarySizedNatural
wallet :: QC.Gen Wallet
wallet = QC.elements W.knownWallets
option :: QC.Gen Option
option =
Option
<$> assetClass
<*> (fromIntegral @Int <$> QC.chooseBoundedIntegral (0, 10_000_000_000))
<*> nonNegativeAda
<*> (Lovelace . fromIntegral @Int <$> QC.chooseBoundedIntegral (2_000_000, 20_000_000))
<*> optionType
<*> pure 0 -- FIXME
<*> pure 10_000
<*> pure "BUYER"
<*> pure "SELLER"
<*> pure "UTXO"
|
package gofmts
import (
"io"
"io/ioutil"
)
func ApplyReplacements(w io.Writer, r io.Reader, issues []Issue) (unresolvedIssues []Issue, _ error) {
lastOffset := 0
for _, i := range issues {
replacement, ok := i.(IssueWithReplacement)
if !ok {
unresolvedIssues = append(unresolvedIssues, replacement)
}
if _, err := io.CopyN(w, r, int64(replacement.Position().Offset-lastOffset)); err != nil {
return nil, err
}
if _, err := w.Write([]byte(replacement.Replacement())); err != nil {
return nil, err
}
if _, err := io.CopyN(ioutil.Discard, r, int64(replacement.Length())); err != nil {
return nil, err
}
lastOffset = replacement.Position().Offset + replacement.Length()
}
// copy the rest
_, err := io.Copy(w, r)
return unresolvedIssues, err
}
|
import { Repository } from "typeorm";
import { ApiToken } from "../models/apiToken.model";
export abstract class ApiProvider {
public ApiUrl: string;
public ApiTokenUrl: string;
abstract get Auth(): any;
constructor(
public ApiKey: string,
public ApiSecret: string,
public tokenRepository: Repository<ApiToken>,
) {}
Validate(onFalseMessage: string) {
if (!(this.ApiUrl && this.ApiTokenUrl && this.ApiKey && this.ApiSecret))
throw new Error(onFalseMessage);
}
abstract async GetToken(): Promise<string>;
}
export enum ApiProviders {
Spotify = 1,
}
|
using System;
namespace EventsExpress.Db.Entities
{
public class AccountRole
{
public Enums.Role RoleId { get; set; }
public Guid AccountId { get; set; }
public virtual Account Account { get; set; }
public virtual Role Role { get; set; }
}
}
|
using System.Reflection;
using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("NuGet importer for Unity")]
[assembly: AssemblyDescription("NuGet importer for Unity.")]
[assembly: AssemblyCompany("kumaS")]
[assembly: AssemblyCopyright("Apache 2.0 Copyright 2021 kumaS")]
[assembly: AssemblyVersion("1.1.0.1")]
[assembly: InternalsVisibleTo("kumaS.NuGetImporter.Editor.Tests")]
|
// Copyright Kani Contributors
// SPDX-License-Identifier: Apache-2.0 OR MIT
//! To run this test, do
//! kani fixme_varadic.rs -- lib.c
use std::os::raw::c_int;
// https://doc.rust-lang.org/reference/items/external-blocks.html
// https://doc.rust-lang.org/nomicon/ffi.html
extern "C" {
fn my_add(num: usize, ...) -> usize;
fn my_add2(num: usize, ...) -> c_int;
}
#[kani::proof]
fn main() {
unsafe {
assert!(my_add(2 as usize, 3 as usize, 4 as usize) == 7); //works
assert!(my_add(3, 3 as usize, 4 as usize, 5 as usize) == 12); //works
assert!(my_add2(2, -1 as c_int, -3 as c_int) == -4); //works
}
}
|
// SPDX-License-Identifier: Apache-2.0
import { RuleTester } from "eslint";
import { InputOptions } from "./options";
import { rule } from "./rule";
const groups: InputOptions["group-ordering"] = [
{ name: "parent directories", match: "^\\.\\.", order: 10 },
{ name: "current directory", match: "^\\.", order: 20 },
{ name: "third-party", match: ".*", order: 5 },
];
const tester = new RuleTester({
parserOptions: {
ecmaVersion: 6,
sourceType: "module",
},
});
tester.run("ordered-imports", rule, {
valid: [
// "declaration-ordering": ["any"]
{
options: [{ "declaration-ordering": ["any"] }],
code: `
import e from "e";
import { b, c, d } from "bcd";
import a from "a";
import _ from "lodash";
`,
},
// "declaration-ordering": ["name", ?]
{
options: [{ "declaration-ordering": ["name", "case-insensitive"] }],
code: `
import _ from "lodash";
import a from "e";
import A from "E";
import { b, c, d } from "bcd";
import e from "a";
import E from "A";
`,
},
{
options: [{ "declaration-ordering": ["name", "lowercase-last"] }],
code: `
import _ from "lodash";
import A from "E";
import E from "A";
import { b, c, d } from "bcd";
import a from "e";
import e from "a";
`,
},
// "symbols-first": false
// "declaration-ordering": ["name", ?]
{
options: [{ "symbols-first": false, "declaration-ordering": ["name", "case-insensitive"] }],
code: `
import a from "e";
import A from "E";
import { b, c, d } from "bcd";
import e from "a";
import E from "A";
import _ from "lodash";
`,
},
{
options: [{ "symbols-first": false, "declaration-ordering": ["name", "lowercase-last"] }],
code: `
import A from "E";
import E from "A";
import { b, c, d } from "bcd";
import _ from "lodash";
import a from "e";
import e from "a";
`,
},
// "declaration-ordering": ["source", ?]
{
options: [{ "declaration-ordering": ["source", "case-insensitive"] }],
code: `
import * as A from "A";
import b from "b";
import "C";
import { d } from "d";
import _ from "lodash";
import a from "./a";
import B from "./B";
`,
},
{
options: [{ "declaration-ordering": ["source", "lowercase-last"] }],
code: `
import * as A from "A";
import "C";
import b from "b";
import { d } from "d";
import _ from "lodash";
import B from "./B";
import a from "./a";
`,
},
// "declaration-ordering": ["type", ?]
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["name", "case-insensitive"] }] },
],
code: `
import "d";
import "D";
import _ from "lodash";
import a from "A";
import A from "a";
import * as b from "b";
import * as B from "B";
import { c } from "c";
import { C } from "C";
`,
},
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["name", "lowercase-last"] }] },
],
code: `
import "D";
import "d";
import _ from "lodash";
import A from "a";
import a from "A";
import * as B from "B";
import * as b from "b";
import { c } from "c";
import { C } from "C";
`,
},
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["source", "case-insensitive"] }] },
],
code: `
import "g";
import "H";
import a from "a";
import B from "B";
import _ from "lodash";
import * as c from "c";
import * as D from "D";
import { e } from "e";
import { E } from "F";
`,
},
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["source", "lowercase-last"] }] },
],
code: `
import "H";
import "g";
import B from "B";
import a from "a";
import _ from "lodash";
import * as D from "D";
import * as c from "c";
import { E } from "F";
import { e } from "e";
`,
},
{
options: [{ "declaration-ordering": ["type", { secondaryOrdering: ["any"] }] }],
code: `
import "z1";
import "a1";
import z2 from "z2";
import a2 from "a2";
import _ from "lodash";
import * as z3 from "z3";
import * as a3 from "a3";
import { z4 } from "z4";
import { a4 } from "a4";
`,
},
// "specifier-ordering": "any"
{
options: [{ "specifier-ordering": "any" }],
code: `import { _, c, b, a, C, B, A } from "a";`,
},
// "specifier-ordering": "lowercase-last"
{
options: [{ "specifier-ordering": "lowercase-last" }],
code: `import { _, A, B, C, a, b, c } from "a";`,
},
// "specifier-ordering": "case-insensitive"
{
options: [{ "specifier-ordering": "case-insensitive" }],
code: `import { _, A, a, B, b, C, c } from "a";`,
},
// "symbols-first": false
// "specifier-ordering": "lowercase-last"
{
options: [{ "symbols-first": false, "specifier-ordering": "lowercase-last" }],
code: `import { A, B, C, _, a, b, c } from "a";`,
},
// "symbols-first": false
// "specifier-ordering": "case-insensitive"
{
options: [{ "symbols-first": false, "specifier-ordering": "case-insensitive" }],
code: `import { A, a, B, b, C, c, _ } from "a";`,
},
// "group-ordering"
{
options: [{ "group-ordering": groups }],
code: `
import "b1";
import "b2";
import "../a1";
import "../a2";
import "./b1";
import "./b2";
`,
},
],
invalid: [
// "declaration-ordering": ["name", ?]
{
options: [{ "declaration-ordering": ["name", "case-insensitive"] }],
code: `
import D from "z1";
import c from "z2";
import { foo } from "foo";
import B from "z3";
import a from "z4";
import _ from "lodash";
`,
output: `
import _ from "lodash";
import a from "z4";
import { foo } from "foo";
import B from "z3";
import c from "z2";
import D from "z1";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
{
options: [{ "declaration-ordering": ["name", "lowercase-last"] }],
code: `
import D from "z1";
import c from "z2";
import { foo } from "foo";
import B from "z3";
import a from "z4";
import _ from "lodash";
`,
output: `
import _ from "lodash";
import B from "z3";
import { foo } from "foo";
import D from "z1";
import a from "z4";
import c from "z2";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
// "declaration-ordering": ["source", ?]
{
options: [{ "declaration-ordering": ["source", "case-insensitive"] }],
code: `
import _ from "lodash";
import z1 from "D";
import z2 from "c";
import { foo } from "foo";
import z3 from "B";
import z4 from "a";
`,
output: `
import z4 from "a";
import z3 from "B";
import z2 from "c";
import z1 from "D";
import { foo } from "foo";
import _ from "lodash";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
{
options: [{ "declaration-ordering": ["source", "lowercase-last"] }],
code: `
import _ from "lodash";
import z1 from "D";
import z2 from "c";
import { foo } from "foo";
import z3 from "B";
import z4 from "a";
`,
output: `
import z3 from "B";
import z1 from "D";
import z4 from "a";
import z2 from "c";
import { foo } from "foo";
import _ from "lodash";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
// "declaration-ordering": ["type", ?]
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["name", "case-insensitive"] }] },
],
code: `
import { B } from "z1";
import { a } from "z2";
import * as D from "z3";
import * as c from "z4";
import F from "z5";
import e from "z6";
import _ from "lodash";
import "z7";
import "z8";
`,
output: `
import "z7";
import "z8";
import _ from "lodash";
import e from "z6";
import F from "z5";
import * as c from "z4";
import * as D from "z3";
import { B } from "z1";
import { a } from "z2";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["name", "lowercase-last"] }] },
],
code: `
import { a } from "z1";
import { B } from "z2";
import * as c from "z3";
import * as D from "z4";
import e from "z5";
import F from "z6";
import _ from "lodash";
import "z7";
import "z8";
`,
output: `
import "z7";
import "z8";
import _ from "lodash";
import F from "z6";
import e from "z5";
import * as D from "z4";
import * as c from "z3";
import { a } from "z1";
import { B } from "z2";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["source", "case-insensitive"] }] },
],
code: `
import { z1 } from "B";
import { z2 } from "a";
import * as z3 from "D";
import * as z4 from "c";
import _ from "lodash";
import z5 from "F";
import z6 from "e";
import "H";
import "g";
`,
output: `
import "g";
import "H";
import z6 from "e";
import z5 from "F";
import _ from "lodash";
import * as z4 from "c";
import * as z3 from "D";
import { z2 } from "a";
import { z1 } from "B";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
{
options: [
{ "declaration-ordering": ["type", { secondaryOrdering: ["source", "lowercase-last"] }] },
],
code: `
import { z1 } from "a";
import { z2 } from "B";
import * as z3 from "c";
import * as z4 from "D";
import _ from "lodash";
import z5 from "e";
import z6 from "F";
import "g";
import "H";
`,
output: `
import "H";
import "g";
import z6 from "F";
import z5 from "e";
import _ from "lodash";
import * as z4 from "D";
import * as z3 from "c";
import { z2 } from "B";
import { z1 } from "a";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
{
options: [{ "declaration-ordering": ["type", { secondaryOrdering: ["any"] }] }],
code: `
import { z4 } from "z4";
import { a4 } from "a4";
import * as z3 from "z3";
import * as a3 from "a3";
import z2 from "z2";
import a2 from "a2";
import _ from "lodash";
import "z1";
import "a1";
`,
output: `
import "z1";
import "a1";
import z2 from "z2";
import a2 from "a2";
import _ from "lodash";
import * as z3 from "z3";
import * as a3 from "a3";
import { z4 } from "z4";
import { a4 } from "a4";
`,
errors: [
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
{ message: "unordered import declaration" },
],
},
// "specifier-ordering": "lowercase-last"
{
options: [{ "specifier-ordering": "lowercase-last" }],
code: `import { c, b, a, C, B, A } from "a";`,
output: `import { A, B, C, a, b, c } from "a";`,
errors: [
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
],
},
// "specifier-ordering": "case-insensitive"
{
options: [{ "specifier-ordering": "case-insensitive" }],
code: `import { c, b, a, C, B, A } from "a";`,
output: `import { a, A, b, B, c, C } from "a";`,
errors: [
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
{ message: "unordered import specifier" },
],
},
// "group-ordering"
{
options: [{ "group-ordering": groups }],
code: `
import a30 from "./a";
import b30 from "./b";
import a10 from "a";
import b10 from "b";
import a20 from "../a";
import b20 from "../b";
`,
output: `
import a10 from "a";
import b10 from "b";
import a20 from "../a";
import b20 from "../b";
import a30 from "./a";
import b30 from "./b";
`,
errors: [
{ message: "unordered import group" },
{ message: "unordered import group" },
{ message: "unordered import group" },
],
},
{
options: [{ "group-ordering": groups }],
code: `
import a10 from "a";
import b10 from "b";
import a20 from "../a";
import b20 from "../b";
import a30 from "./a";
import b30 from "./b";
`,
errors: [
{ message: "unmerged import group" },
{ message: "unmerged import group" },
{ message: "unmerged import group" },
],
},
{
options: [{ "declaration-ordering": ["any"], "group-ordering": groups }],
code: `
import a10 from "a";
import b30 from "./b";
import a20 from "../a";
import b10 from "b";
import a30 from "./a";
import b20 from "../b";
`,
errors: [
{ message: "invalid import group member" },
{ message: "invalid import group member" },
{ message: "invalid import group member" },
],
},
],
});
|
# Python GTFS-realtime Language Bindings
[](http://badge.fury.io/py/gtfs-realtime-bindings)
Provides Python classes generated from the
[GTFS-realtime](https://github.com/google/transit/tree/master/gtfs-realtime) Protocol
Buffer specification. These classes will allow you to parse a binary Protocol
Buffer GTFS-realtime data feed into Python objects.
## Add the Dependency
To use the `gtfs-realtime-bindings` classes in your own project, you need to
first install the module from the
[PyPI repository](https://pypi.python.org/pypi/gtfs-realtime-bindings).
```
# Using easy_install
easy_install --upgrade gtfs-realtime-bindings
# Using pip
pip install --upgrade gtfs-realtime-bindings
```
## Example Code
The following code snippet demonstrates downloading a GTFS-realtime data feed
from a particular URL, parsing it as a FeedMessage (the root type of the
GTFS-realtime schema), and iterating over the results.
```python
from google.transit import gtfs_realtime_pb2
import requests
feed = gtfs_realtime_pb2.FeedMessage()
response = requests.get('URL OF YOUR GTFS-REALTIME SOURCE GOES HERE')
feed.ParseFromString(response.content)
for entity in feed.entity:
if entity.HasField('trip_update'):
print entity.trip_update
```
For more details on the naming conventions for the Python classes generated
from the
[gtfs-realtime.proto](https://github.com/google/transit/blob/master/gtfs-realtime/proto/gtfs-realtime.proto),
check out the
[Python Generated Code](https://developers.google.com/protocol-buffers/docs/reference/python-generated)
section of the Protocol Buffers developer site.
|
package de.webis.webisstud.thesis.reimer.ltr.pipeline
import de.webis.webisstud.thesis.reimer.model.FeatureVector
import de.webis.webisstud.thesis.reimer.model.RunLine
import de.webis.webisstud.thesis.reimer.model.format.RunLineFormat
interface Reranker {
fun rerank(testRuns: Sequence<RunLine>, testVectors: Sequence<FeatureVector>): Sequence<RunLine>
fun rerankFeatureVectors(featureVectors: Sequence<FeatureVector>): Sequence<String> {
val runs = featureVectors
.mapIndexed { index, vector ->
RunLine(vector.documentId, vector.topicId, 0f, index + 1)
}
.toList()
return rerank(runs.asSequence(), featureVectors.asSequence())
.map { RunLineFormat.format(it) }
.asSequence()
}
}
|
import React from 'react';
import Customer from '../features/customer/customer';
export default function CustomerPage() {
return <Customer />;
}
|
package redisearch_test
import (
"crypto/tls"
"crypto/x509"
"fmt"
"io/ioutil"
"log"
"os"
"time"
"github.com/RediSearch/redisearch-go/redisearch"
"github.com/gomodule/redigo/redis"
)
// exemplifies the NewClient function
func ExampleNewClient() {
// Create a client. By default a client is schemaless
// unless a schema is provided when creating the index
c := redisearch.NewClient("localhost:6379", "myIndex")
// Create a schema
sc := redisearch.NewSchema(redisearch.DefaultOptions).
AddField(redisearch.NewTextField("body")).
AddField(redisearch.NewTextFieldOptions("title", redisearch.TextFieldOptions{Weight: 5.0, Sortable: true})).
AddField(redisearch.NewNumericField("date"))
// Drop an existing index. If the index does not exist an error is returned
c.Drop()
// Create the index with the given schema
if err := c.CreateIndex(sc); err != nil {
log.Fatal(err)
}
// Create a document with an id and given score
doc := redisearch.NewDocument("ExampleNewClient:doc1", 1.0)
doc.Set("title", "Hello world").
Set("body", "foo bar").
Set("date", time.Now().Unix())
// Index the document. The API accepts multiple documents at a time
if err := c.Index([]redisearch.Document{doc}...); err != nil {
log.Fatal(err)
}
// Wait for all documents to be indexed
info, _ := c.Info()
for info.IsIndexing {
time.Sleep(time.Second)
info, _ = c.Info()
}
// Searching with limit and sorting
docs, total, err := c.Search(redisearch.NewQuery("hello world").
Limit(0, 2).
SetReturnFields("title"))
fmt.Println(docs[0].Id, docs[0].Properties["title"], total, err)
// Output: ExampleNewClient:doc1 Hello world 1 <nil>
// Drop the existing index
c.Drop()
}
// RediSearch 2.0, marks the re-architecture of the way indices are kept in sync with the data.
// Instead of having to write data through the index (using the FT.ADD command),
// RediSearch will now follow the data written in hashes and automatically index it.
// The following example illustrates how to achieve it with the go client
func ExampleClient_CreateIndexWithIndexDefinition() {
host := "localhost:6379"
password := ""
pool := &redis.Pool{Dial: func() (redis.Conn, error) {
return redis.Dial("tcp", host, redis.DialPassword(password))
}}
c := redisearch.NewClientFromPool(pool, "products-from-hashes")
// Create a schema
schema := redisearch.NewSchema(redisearch.DefaultOptions).
AddField(redisearch.NewTextFieldOptions("name", redisearch.TextFieldOptions{Sortable: true})).
AddField(redisearch.NewTextFieldOptions("description", redisearch.TextFieldOptions{Weight: 5.0, Sortable: true})).
AddField(redisearch.NewNumericField("price"))
// IndexDefinition is available for RediSearch 2.0+
// Create a index definition for automatic indexing on Hash updates.
// In this example we will only index keys started by product:
indexDefinition := redisearch.NewIndexDefinition().AddPrefix("product:")
// Add the Index Definition
c.CreateIndexWithIndexDefinition(schema, indexDefinition)
// Get a vanilla connection and create 100 hashes
vanillaConnection := pool.Get()
for productNumber := 0; productNumber < 100; productNumber++ {
vanillaConnection.Do("HSET", fmt.Sprintf("product:%d", productNumber), "name", fmt.Sprintf("product name %d", productNumber), "description", "product description", "price", 10.99)
}
// Wait for all documents to be indexed
info, _ := c.Info()
for info.IsIndexing {
time.Sleep(time.Second)
info, _ = c.Info()
}
_, total, _ := c.Search(redisearch.NewQuery("description"))
fmt.Printf("Total documents containing \"description\": %d.\n", total)
}
// The following example illustrates an index creation and deletion.
// By default, DropIndex() which is a wrapper for RediSearch FT.DROPINDEX does not delete the document hashes associated with the index.
// Setting the argument deleteDocuments to true deletes the hashes as well.
// Available since RediSearch 2.0
func ExampleClient_DropIndex() {
host := "localhost:6379"
password := ""
pool := &redis.Pool{Dial: func() (redis.Conn, error) {
return redis.Dial("tcp", host, redis.DialPassword(password))
}}
c := redisearch.NewClientFromPool(pool, "products-from-hashes")
// Create a schema
schema := redisearch.NewSchema(redisearch.DefaultOptions).
AddField(redisearch.NewTextFieldOptions("name", redisearch.TextFieldOptions{Sortable: true})).
AddField(redisearch.NewTextFieldOptions("description", redisearch.TextFieldOptions{Weight: 5.0, Sortable: true})).
AddField(redisearch.NewNumericField("price"))
// IndexDefinition is available for RediSearch 2.0+
// Create a index definition for automatic indexing on Hash updates.
// In this example we will only index keys started by product:
indexDefinition := redisearch.NewIndexDefinition().AddPrefix("product:")
// Add the Index Definition
c.CreateIndexWithIndexDefinition(schema, indexDefinition)
// Get a vanilla connection and create 100 hashes
vanillaConnection := pool.Get()
for productNumber := 0; productNumber < 100; productNumber++ {
vanillaConnection.Do("HSET", fmt.Sprintf("product:%d", productNumber), "name", fmt.Sprintf("product name %d", productNumber), "description", "product description", "price", 10.99)
}
// Wait for all documents to be indexed
info, _ := c.Info()
for info.IsIndexing {
time.Sleep(time.Second)
info, _ = c.Info()
}
// Delete Index and Documents
err := c.DropIndex(true)
if err != nil {
log.Fatal(err)
}
}
// exemplifies the NewClientFromPool function
func ExampleNewClientFromPool() {
host := "localhost:6379"
password := ""
pool := &redis.Pool{Dial: func() (redis.Conn, error) {
return redis.Dial("tcp", host, redis.DialPassword(password))
}}
c := redisearch.NewClientFromPool(pool, "search-client-1")
// Create a schema
sc := redisearch.NewSchema(redisearch.DefaultOptions).
AddField(redisearch.NewTextField("body")).
AddField(redisearch.NewTextFieldOptions("title", redisearch.TextFieldOptions{Weight: 5.0, Sortable: true})).
AddField(redisearch.NewNumericField("date"))
// Drop an existing index. If the index does not exist an error is returned
c.Drop()
// Create the index with the given schema
if err := c.CreateIndex(sc); err != nil {
log.Fatal(err)
}
// Create a document with an id and given score
doc := redisearch.NewDocument("ExampleNewClientFromPool:doc2", 1.0)
doc.Set("title", "Hello world").
Set("body", "foo bar").
Set("date", time.Now().Unix())
// Index the document. The API accepts multiple documents at a time
if err := c.Index([]redisearch.Document{doc}...); err != nil {
log.Fatal(err)
}
// Searching with limit and sorting
docs, total, err := c.Search(redisearch.NewQuery("hello world").
Limit(0, 2).
SetReturnFields("title"))
fmt.Println(docs[0].Id, docs[0].Properties["title"], total, err)
// Output: ExampleNewClientFromPool:doc2 Hello world 1 <nil>
// Drop the existing index
c.Drop()
}
//Example of how to establish an SSL connection from your app to the RedisAI Server
func ExampleNewClientFromPool_ssl() {
// Consider the following helper methods that provide us with the connection details (host and password)
// and the paths for:
// tls_cert - A a X.509 certificate to use for authenticating the server to connected clients, masters or cluster peers. The file should be PEM formatted
// tls_key - A a X.509 private key to use for authenticating the server to connected clients, masters or cluster peers. The file should be PEM formatted
// tls_cacert - A PEM encoded CA's certificate file
host, password := getConnectionDetails()
tlsready, tls_cert, tls_key, tls_cacert := getTLSdetails()
// Skip if we dont have all files to properly connect
if tlsready == false {
return
}
// Load client cert
cert, err := tls.LoadX509KeyPair(tls_cert, tls_key)
if err != nil {
log.Fatal(err)
}
// Load CA cert
caCert, err := ioutil.ReadFile(tls_cacert)
if err != nil {
log.Fatal(err)
}
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM(caCert)
clientTLSConfig := &tls.Config{
Certificates: []tls.Certificate{cert},
RootCAs: caCertPool,
}
// InsecureSkipVerify controls whether a client verifies the
// server's certificate chain and host name.
// If InsecureSkipVerify is true, TLS accepts any certificate
// presented by the server and any host name in that certificate.
// In this mode, TLS is susceptible to man-in-the-middle attacks.
// This should be used only for testing.
clientTLSConfig.InsecureSkipVerify = true
pool := &redis.Pool{Dial: func() (redis.Conn, error) {
return redis.Dial("tcp", host,
redis.DialPassword(password),
redis.DialTLSConfig(clientTLSConfig),
redis.DialUseTLS(true),
redis.DialTLSSkipVerify(true),
)
}}
c := redisearch.NewClientFromPool(pool, "search-client-1")
// Create a schema
sc := redisearch.NewSchema(redisearch.DefaultOptions).
AddField(redisearch.NewTextField("body")).
AddField(redisearch.NewTextFieldOptions("title", redisearch.TextFieldOptions{Weight: 5.0, Sortable: true})).
AddField(redisearch.NewNumericField("date"))
// Drop an existing index. If the index does not exist an error is returned
c.Drop()
// Create the index with the given schema
if err := c.CreateIndex(sc); err != nil {
log.Fatal(err)
}
// Create a document with an id and given score
doc := redisearch.NewDocument("ExampleNewClientFromPool_ssl:doc3", 1.0)
doc.Set("title", "Hello world").
Set("body", "foo bar").
Set("date", time.Now().Unix())
// Index the document. The API accepts multiple documents at a time
if err := c.Index([]redisearch.Document{doc}...); err != nil {
log.Fatal(err)
}
// Searching with limit and sorting
docs, total, err := c.Search(redisearch.NewQuery("hello world").
Limit(0, 2).
SetReturnFields("title"))
fmt.Println(docs[0].Id, docs[0].Properties["title"], total, err)
// Drop the existing index
c.Drop()
}
// The following example illustrates geospatial search using RediSearch.
// This examples maps to the Redis vanilla example showcased on https://redis.io/commands/georadius#examples.
// We'll start by adding two docs ( one for each city ) and then do a georadius search based on a starting point
// and 2 distinct radius:
// 1)- First query with 100KM radius centered at long,lat 15,37 that should only output the city named "Catania";
// 2)- Second query with 200KM radius centered at long,lat 15,37 that should output the cities named "Palermo" and "Catania";
func ExampleClient_Search() {
// Create a client. By default a client is schemaless
// unless a schema is provided when creating the index
c := redisearch.NewClient("localhost:6379", "cityIndex")
// Create a schema
sc := redisearch.NewSchema(redisearch.DefaultOptions).
AddField(redisearch.NewTextField("city")).
AddField(redisearch.NewGeoField("location"))
// Drop an existing index. If the index does not exist an error is returned
c.Drop()
// Create the index with the given schema
if err := c.CreateIndex(sc); err != nil {
log.Fatal(err)
}
// Create the city docs
// Note While Specifying location you should specify in following order -> longitude,latitude
// Same look and feel as GEOADD https://redis.io/commands/geoadd
// This example maps to https://redis.io/commands/geoadd#examples
docPalermo := redisearch.NewDocument("doc:Palermo", 1.0)
docPalermo.Set("name", "Palermo").
Set("location", "13.361389,38.115556")
docCatania := redisearch.NewDocument("doc:Catania", 1.0)
docCatania.Set("name", "Catania").
Set("location", "15.087269,37.502669")
// Index the documents. The API accepts multiple documents at a time
if err := c.IndexOptions(redisearch.DefaultIndexingOptions, docPalermo, docCatania); err != nil {
log.Fatal(err)
}
// Searching for 100KM radius should only output Catania
docs, _, _ := c.Search(redisearch.NewQuery("*").AddFilter(
redisearch.Filter{
Field: "location",
Options: redisearch.GeoFilterOptions{
Lon: 15,
Lat: 37,
Radius: 100,
Unit: redisearch.KILOMETERS,
},
},
).Limit(0, 2))
fmt.Println("100KM Radius search from longitude 15 latitude 37")
fmt.Println(docs[0])
// Searching for 200KM radius should output Catania and Palermo
docs, _, _ = c.Search(redisearch.NewQuery("*").AddFilter(
redisearch.Filter{
Field: "location",
Options: redisearch.GeoFilterOptions{
Lon: 15,
Lat: 37,
Radius: 200,
Unit: redisearch.KILOMETERS,
},
},
).Limit(0, 2).SetSortBy("location", true))
fmt.Println("200KM Radius search from longitude 15 latitude 37")
fmt.Println(docs[0])
fmt.Println(docs[1])
// Output: 100KM Radius search from longitude 15 latitude 37
// {doc:Catania 1 [] map[location:15.087269,37.502669 name:Catania]}
// 200KM Radius search from longitude 15 latitude 37
// {doc:Palermo 1 [] map[location:13.361389,38.115556 name:Palermo]}
// {doc:Catania 1 [] map[location:15.087269,37.502669 name:Catania]}
}
func getConnectionDetails() (host string, password string) {
value, exists := os.LookupEnv("REDISEARCH_TEST_HOST")
host = "localhost:6379"
password = ""
valuePassword, existsPassword := os.LookupEnv("REDISEARCH_TEST_PASSWORD")
if exists && value != "" {
host = value
}
if existsPassword && valuePassword != "" {
password = valuePassword
}
return
}
func getTLSdetails() (tlsready bool, tls_cert string, tls_key string, tls_cacert string) {
tlsready = false
value, exists := os.LookupEnv("TLS_CERT")
if exists && value != "" {
info, err := os.Stat(value)
if os.IsNotExist(err) || info.IsDir() {
return
}
tls_cert = value
} else {
return
}
value, exists = os.LookupEnv("TLS_KEY")
if exists && value != "" {
info, err := os.Stat(value)
if os.IsNotExist(err) || info.IsDir() {
return
}
tls_key = value
} else {
return
}
value, exists = os.LookupEnv("TLS_CACERT")
if exists && value != "" {
info, err := os.Stat(value)
if os.IsNotExist(err) || info.IsDir() {
return
}
tls_cacert = value
} else {
return
}
tlsready = true
return
}
|
# FORMS MODULE FOR FUEL CMS
This is a [FUEL CMS](http://www.getfuelcms.com) forms module for easily adding form functionality to your website.
## INSTALLATION
There are a couple ways to install the module. If you are using GIT you can use the following method
to create a submodule:
### USING GIT
1. Open up a Terminal window, "cd" to your FUEL CMS installation then type in:
Type in:
``php index.php fuel/installer/add_git_submodule https://github.com/daylightstudio/FUEL-CMS-Forms-Module.git forms``
2. Then to install, type in:
``php index.php fuel/installer/install forms``
### MANUAL
1. Download the zip file from GitHub:
[https://github.com/daylightstudio/FUEL-CMS-Forms-Module](https://github.com/daylightstudio/FUEL-CMS-Forms-Module)
2. Create a "forms" folder in fuel/modules/ and place the contents of the forms module folder in there.
3. Import the forms_install.sql from the forms/install folder into your database
4. Add "forms" to the the `$config['modules_allowed']` in fuel/application/config/MY_fuel.php
## UNINSTALL
To uninstall the module which will remove any permissions and database information:
``php index.php fuel/installer/uninstall forms``
### TROUBLESHOOTING
1. You may need to put in your full path to the "php" interpreter when using the terminal.
2. You must have access to an internet connection to install using GIT.
## DOCUMENTATION
To access the documentation, you can visit it [here](http://docs.getfuelcms.com/modules/forms).
## TEAM
* David McReynolds, Daylight Studio, Main Developer
## BUGS
To file a bug report, go to the [issues](https://github.com/daylightstudio/FUEL-CMS-Forms-Module/issues) page.
## LICENSE
The Forms Module for FUEL CMS is licensed under [APACHE 2](http://www.apache.org/licenses/LICENSE-2.0).
|
import 'package:flutter/material.dart';
import 'package:url_launcher/url_launcher.dart';
/// OpenURLOnTap is a component that opens the URL in a new tab on tap.
/// It's used to render paper list item and repository list item.
class OpenURLOnTap extends StatelessWidget {
final String url;
final Widget? child;
const OpenURLOnTap({Key? key, required this.url, required this.child})
: super(key: key);
@override
Widget build(BuildContext context) {
return GestureDetector(
onTap: () async {
if (await canLaunch(url)) {
await launch(url);
return;
}
ScaffoldMessenger.of(context).removeCurrentSnackBar();
ScaffoldMessenger.of(context).showSnackBar(SnackBar(
backgroundColor: Theme.of(context).colorScheme.error,
content: Text("$url is not a valid URL"),
action: SnackBarAction(
label: "OK",
textColor: Theme.of(context).colorScheme.onError,
onPressed: () {},
),
));
},
child: child);
}
}
|
//go:build linux || darwin
// +build linux darwin
package ui
import (
"bytes"
"context"
"fmt"
"io"
"os"
"sync"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/logger"
syftEvent "github.com/anchore/syft/syft/event"
"github.com/anchore/syft/ui"
"github.com/wagoodman/go-partybus"
"github.com/wagoodman/jotframe/pkg/frame"
)
// ephemeralTerminalUI provides an "ephemeral" terminal user interface to display the application state dynamically.
// The terminal is placed into raw mode and the cursor is manipulated to allow for a dynamic, multi-line
// UI (provided by the jotframe lib), for this reason all other application mechanisms that write to the screen
// must be suppressed before starting (such as logs); since bytes in the device and in application memory combine to make
// a shared state, bytes coming from elsewhere to the screen will disrupt this state.
//
// This UI is primarily driven off of events from the event bus, creating single-line terminal widgets to represent a
// published element on the event bus, typically polling the element for the latest state. This allows for the UI to
// control update frequency to the screen, provide "liveness" indications that are interpolated between bus events,
// and overall loosely couple the bus events from screen interactions.
//
// By convention, all elements published on the bus should be treated as read-only, and publishers on the bus should
// attempt to enforce this when possible by wrapping complex objects with interfaces to prescribe interactions. Also by
// convention, each new event that the UI should respond to should be added either in this package as a handler function,
// or in the shared ui package as a function on the main handler object. All handler functions should be completed
// processing an event before the ETUI exits (coordinated with a sync.WaitGroup)
type ephemeralTerminalUI struct {
unsubscribe func() error
handler *ui.Handler
waitGroup *sync.WaitGroup
frame *frame.Frame
logBuffer *bytes.Buffer
uiOutput *os.File
reportOutput io.Writer
}
// NewEphemeralTerminalUI writes all events to a TUI and writes the final report to the given writer.
func NewEphemeralTerminalUI(reportWriter io.Writer) UI {
return &ephemeralTerminalUI{
handler: ui.NewHandler(),
waitGroup: &sync.WaitGroup{},
uiOutput: os.Stderr,
reportOutput: reportWriter,
}
}
func (h *ephemeralTerminalUI) Setup(unsubscribe func() error) error {
h.unsubscribe = unsubscribe
hideCursor(h.uiOutput)
// prep the logger to not clobber the screen from now on (logrus only)
h.logBuffer = bytes.NewBufferString("")
logWrapper, ok := log.Log.(*logger.LogrusLogger)
if ok {
logWrapper.Logger.SetOutput(h.logBuffer)
}
return h.openScreen()
}
func (h *ephemeralTerminalUI) Handle(event partybus.Event) error {
ctx := context.Background()
switch {
case h.handler.RespondsTo(event):
if err := h.handler.Handle(ctx, h.frame, event, h.waitGroup); err != nil {
log.Errorf("unable to show %s event: %+v", event.Type, err)
}
case event.Type == syftEvent.AppUpdateAvailable:
if err := handleAppUpdateAvailable(ctx, h.frame, event, h.waitGroup); err != nil {
log.Errorf("unable to show %s event: %+v", event.Type, err)
}
case event.Type == syftEvent.PresenterReady:
// we need to close the screen now since signaling the the presenter is ready means that we
// are about to write bytes to stdout, so we should reset the terminal state first
h.closeScreen(false)
if err := handleCatalogerPresenterReady(event, h.reportOutput); err != nil {
log.Errorf("unable to show %s event: %+v", event.Type, err)
}
// this is the last expected event, stop listening to events
return h.unsubscribe()
}
return nil
}
func (h *ephemeralTerminalUI) openScreen() error {
config := frame.Config{
PositionPolicy: frame.PolicyFloatForward,
// only report output to stderr, reserve report output for stdout
Output: h.uiOutput,
}
fr, err := frame.New(config)
if err != nil {
return fmt.Errorf("failed to create the screen object: %w", err)
}
h.frame = fr
return nil
}
func (h *ephemeralTerminalUI) closeScreen(force bool) {
// we may have other background processes still displaying progress, wait for them to
// finish before discontinuing dynamic content and showing the final report
if !h.frame.IsClosed() {
if !force {
h.waitGroup.Wait()
}
h.frame.Close()
// TODO: there is a race condition within frame.Close() that sometimes leads to an extra blank line being output
frame.Close()
// only flush the log on close
h.flushLog()
}
}
func (h *ephemeralTerminalUI) flushLog() {
// flush any errors to the screen before the report
logWrapper, ok := log.Log.(*logger.LogrusLogger)
if ok {
fmt.Fprint(logWrapper.Output, h.logBuffer.String())
logWrapper.Logger.SetOutput(h.uiOutput)
} else {
fmt.Fprint(h.uiOutput, h.logBuffer.String())
}
}
func (h *ephemeralTerminalUI) Teardown(force bool) error {
h.closeScreen(force)
showCursor(h.uiOutput)
return nil
}
func hideCursor(output io.Writer) {
fmt.Fprint(output, "\x1b[?25l")
}
func showCursor(output io.Writer) {
fmt.Fprint(output, "\x1b[?25h")
}
|
<?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use App\Jobs\NewsParsing;
use App\Source;
use Illuminate\Http\RedirectResponse;
class NewsParserController extends Controller
{
/**
* обработка ранее зарегистрированных источников по одному
* @return RedirectResponse
*/
public function index()
{
$sources = Source::all();
foreach ($sources as $source) {
NewsParsing::dispatch($source);
}
return redirect()->route('admin.source.index')->with('success',
'Источники из этого списка только что были добавлены в очередь для парсинга.');
}
}
|
//-- Filename:
//-- plot_timeline.js
//--
//-- Author:
//-- Chieh-An Lin
var PT_plot_list = [
[CT_Main, CT_wrap, 1500],
[ET_Main, ET_wrap, 1500],
];
GP_Cascade(PT_plot_list);
|
docker run --restart always --name crawlab \
-e CRAWLAB_REDIS_ADDRESS=192.168.99.1:6379 \
-e CRAWLAB_MONGO_HOST=192.168.99.1 \
-e CRAWLAB_SERVER_MASTER=N \
-v /var/logs/crawlab:/var/logs/crawlab \
tikazyq/crawlab:0.3.0
|
#!/bin/bash
PACKAGE_NAME=com.licenta.grt_benchmark;
ACTIVITY=MainActivity
adb shell am start -n $PACKAGE_NAME/$PACKAGE_NAME.$ACTIVITY
#adb logcat com.licenta.grt_benchmark:* *:S;
adb logcat | grep `adb shell ps | grep com.licenta.grt_benchmark | cut -c10-15`
|
# typed: false
# frozen_string_literal: true
# This is a default, one-size-fits all protocol that shows how you can
# access the inputs and outputs of the operations associated with a job.
# Add specific instructions for this protocol!
needs 'Collection Management/CollectionDisplay'
class Protocol
include CollectionDisplay
def main
collection = Collection.new_collection('96-well qPCR Plate')
tbl = highlight_collection_rc(collection, [[0,0],[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]])
show do
note collection.id.to_s
note tbl[0][0].to_s
table tbl
table highlight_collection_rc(collection, [[0,0],[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]], check: false)
end
m = [
[ "A", "Very", "Nice", { content: "Table", style: {color: "white", "background-color"=>"red"} } ],
[ { content: 1, check: false }, 2, 3, 4 ]
]
show {
title "A Table"
table m
}
end
end
|
<?php
namespace App\Helpers;
use Illuminate\Support\Facades\Request;
class RouteHelper
{
public static function set_active($route)
{
$path = Request::path();
if ($path == "/") {
$path = 'index';
}
return ($path == $route ? "active" : '');
}
}
|
# espoir-cli
---
## 命令
### espoir {create, new}
* 若当前工作目录位于一个由 espoir 创建的 monorepo 中,则新建一个 package。
在创建 package 时,可以选择预定义的模板,它们由 espoir-cli 内置。
* 若当前工作目录不位于一个由 espoir 创建的 monorepo 中,则新建一个 monorepo。
#### 用例
* `espoir create`
---
### espoir {install, i, ins}
为指定(或所有)子仓库安装新增的依赖,或为指定(或所有)子仓库安装已定义的依赖。
#### 参数
##### optional module-names
将要安装的依赖名,支持多个。
可以使用 `module_name@version_range` 的格式指定一个依赖的版本范围。
缺省时,视为安装已定义的依赖。
#### 设置项
##### {--save, -S} (default: `true`)
将目标依赖安装到仓库的 `dependencies` 字段中。
当提供 `module-names` 时,此选项默认为 `true`。
_当根目录被指定时,`--save` 选项将产生一个错误。这是因为根目录被认为只影响仓库的工程开发,而不影响产物,所以 `dependencies` 字段不被允许。请使用 `--save-dev` 选项或考虑排除根目录。_
##### {--save-dev, -D} (default: `false`)
将目标依赖安装到仓库的 `devDependencies` 字段中。
当提供 `module-names` 时,此选项默认为 `false`。
##### --production (default: `false`)
当未提供 `module-names`,即安装已定义的依赖时,激活这个选项以在生产模式下安装依赖。`devDependencies` 中定义的依赖将不会被安装。此选项默认为 `false`。
##### {--workspace, -w} <workspace...>
指定安装依赖的目标仓库。默认为所有仓库。
#### 用例
* `espoir install`
运行这条命令将为所有子仓库(包括根目录)安装已定义的依赖。
* `espoir install --production`
运行这条命令将为所有子仓库(包括根目录)安装已定义的、生产环境的依赖。
* `espoir install -w root foo`
运行这条命令将为包括根目录(`root`)及子仓库 `foo` 安装已定义的依赖。
* `espoir i react react-dom -w foo`
运行这条命令将为包括子仓库 `foo` 添加并安装依赖 `react`、`react-dom`。
* `espoir i --save-dev @types/react @types/react-dom -w root foo`
运行这条命令将为包括子仓库 `foo` 添加并安装开发环境依赖 `@types/react`、`@types/react-dom`。
* `espoir install -h`
运行这条命令将获取帮助信息。
---
### espoir {run-script, run, r}
运行一条预定义脚本。
对于每一个子仓库而言,它将包括:
* `package.json` 中的 `scripts` 对象.
* `scripts` 目录下的 JS 脚本.
* `tasks` 目录下的 JS 脚本.
#### 参数
##### optional command
脚本名。
可以使用 `package_name.script_name` 的格式指定一个子仓库内定义的指令。
不提供 `package_name` 时,将以当前工作目录向上寻找一个 `package.json` 作为指令的上下文。
##### optional args
提供给指令的参数表,支持多个。
#### 设置项
##### --list (default: `false`)
当未提供 `command` 时,激活此选项以获取可执行的所有脚本名称。
#### 用例
* `espoir run-scripts foo.build`
运行这条命令将执行子仓库 `foo` 的 `build` 脚本。
* `espoir run-scripts --list`
运行这条命令将获取可执行的脚本列表。
* `(/packages/foo/src/utils/) espoir run dev`
运行这条命令将执行子仓库 `foo` 的 `dev` 脚本。
* `(/packages/) espoir run bootstrap`
运行这条命令将执行根目录的 `bootstrap` 脚本。
* `espoir run -h`
运行这条命令将获取帮助信息。
---
### espoir {contribute, contr, cont, c, commit}
提交代码版本。
#### 用例
* `espoir contribute`
运行这条命令将启动提交代码交互。
提交包含以下几个阶段:
1. 检查 git 的缓冲区,如果存在未添加的改动,提示用户进行 **自动添加** / **手动添加并重试** / **中断提交** / **忽略警告继续提交**。
2. 收集信息,生成改动日志和 git 提交注释。
3. 执行提交。
4. 如果需要,则提交到远程分支。
* `espoir contribute -h`
运行这条命令将获取帮助信息。
---
### espoir {uninstall, uni, u, del, remove}
为指定子仓库移除依赖。
若指定了某个并不存在的依赖,则该依赖会被跳过。
卸载时,会检查不再被(任意子仓库或其他包)依赖的依赖进行清理。
若指定的某个依赖同时被其他(任意子仓库或其他包)依赖,则该依赖只将被从指定仓库的依赖项中移除,而不会被物理删除。
#### 参数
##### module-names
将要移除的依赖名,支持多个,且至少提供一个。
#### 设置项
##### --here (default: `false`)
指定当前目录所在子仓库执行操作。
当你确定你的当前工作目录正确时,使用 `--here` 选项以省略 `--workspace` 指定。
##### {--workspace, -w} <...workspace>
指定执行卸载操作的目标仓库。当 `--here` 有效时不需要提供,否则必须提供。
#### 用例
* `espoir uninstall axios jquery -w foo`
运行这条命令将为子仓库 `foo` 卸载依赖 `axios`、`jquery`。
* `(/packages/foo/src/utils/) espoir del axios --here`
运行这条命令将为子仓库 `foo` 卸载依赖 `axios`。
* `espoir uni -h`
运行这条命令将获取帮助信息。
---
### espoir {use-static, use, static, export}
设置指定的子仓库为可导出,以供其他子仓库使用。
#### 参数
##### packages
将要导出的包名,支持多个。
#### 用例
* `espoir use-static foo`
运行这条命令将子仓库 `foo` 标记为导出。
* `espoir use-static -h`
运行这条命令将获取帮助信息。
---
### espoir update
升级 espoir-cli。
#### 用例
* `espoir update`
运行这条命令将自动更新 espoir-cli 到最新版本。
|
import 'package:equatable/equatable.dart';
import 'package:guardian/slack/model/slack_text_object.dart';
import 'package:guardian/slack/model/validation_result.dart';
/// A class representing Section Block element
/// (https://api.slack.com/reference/block-kit/blocks#section)
class SlackSectionBlock extends Equatable {
/// Maximum allowed number of items in [fields] array.
static const int _maxFieldsPerSection = 10;
/// Maximum allowed length of [SlackTextObject.text] within [fields]'s item.
static const int _maxFieldTextLength = 2000;
/// Maximum allowed length of [text].
static const int _maxSectionTextLength = 3000;
/// The text for the block, in the form of a [SlackTextObject].
///
/// Maximum length for the text in this field is 3000 characters.
final SlackTextObject text;
/// An array of text objects. Any text objects included with fields will be
/// rendered in a compact format that allows for 2 columns of side-by-side
/// text.
///
/// Maximum number of items is 10. Maximum length for the
/// [SlackTextObject.text] in each item is 2000 characters.
final List<SlackTextObject> fields;
/// Creates an instance of Slack section block.
///
/// Either [text] or [fields] is required, however can be provided
/// simultaneously.
const SlackSectionBlock({
this.text,
this.fields,
});
/// Creates an instance of Section Block from decoded JSON object.
///
/// Returns `null` if [json] is `null`.
factory SlackSectionBlock.fromJson(Map<String, dynamic> json) {
if (json == null) return null;
return SlackSectionBlock(
text: SlackTextObject.fromJson(json['text'] as Map<String, dynamic>),
fields: SlackTextObject.listFromJson(json['fields'] as List<dynamic>),
);
}
/// Creates a list of Section Blocks from list of decoded JSON objects.
static List<SlackSectionBlock> listFromJson(List<dynamic> list) {
return list
?.map(
(json) => SlackSectionBlock.fromJson(json as Map<String, dynamic>))
?.toList();
}
@override
List<Object> get props => [text, fields];
/// Validates [SlackSectionBlock] to match following rules:
/// 1. Either [text] or [fields] is required.
/// 2. If [fields] is not presented, [text] must be not `null` and valid
/// (not empty and contain up to 3000 characters).
/// 3. If [text] is not presented, [fields] must be not `null` and valid
/// (not empty and contain up to 10 non-null items with maximum 2000
/// characters each).
/// 4. Both [text] and [fields] must be valid if presented simultaneously.
ValidationResult validate() {
final textValidation = text?.validate(_maxSectionTextLength) ??
ValidationResult.invalid('Text is missing');
if (text == null) {
return _validateFields();
} else if (fields == null) {
return textValidation;
} else {
return textValidation.combine(_validateFields());
}
}
/// Validates [fields] to be not `null`, contain up to 10 valid and non-null
/// [SlackTextObject]s limited to 2000 characters.
ValidationResult _validateFields() {
if (fields == null) {
return ValidationResult.invalid('Text is missing');
} else if (fields.length > _maxFieldsPerSection) {
return ValidationResult.invalid(
'Fields is limited to contain up to 10 '
'items but ${fields.length} found',
);
} else {
return fields.fold(ValidationResult.valid(), (validation, field) {
final fieldValidation = field?.validate(_maxFieldTextLength) ??
ValidationResult.invalid('Text is missing');
return validation.combine(fieldValidation);
});
}
}
/// Converts object into the [Map].
///
/// Resulting map will include only non-null fields of an object it
/// represents. Result is valid to be sent to Slack API.
Map<String, dynamic> toJson() {
final map = <String, dynamic>{'type': 'section'};
if (text != null) map['text'] = text.toJson();
if (fields != null && fields.isNotEmpty) {
map['fields'] = fields.map((field) => field.toJson()).toList();
}
return map;
}
@override
String toString() {
return '$runtimeType ${toJson()}';
}
}
|
package com.lykke.matching.engine.order
import com.lykke.matching.engine.order.transaction.ExecutionContext
import com.lykke.matching.engine.outgoing.messages.v2.builders.EventFactory
import com.lykke.matching.engine.outgoing.messages.v2.events.Event
import com.lykke.matching.engine.services.MessageSender
import com.lykke.matching.engine.utils.event.isThereClientEvent
import com.lykke.matching.engine.utils.event.isThereTrustedClientEvent
import org.springframework.stereotype.Component
@Component
class ExecutionEventSender(
private val messageSender: MessageSender
) {
fun generateEvents(
executionContext: ExecutionContext,
sequenceNumbers: SequenceNumbersWrapper
): EventsHolder {
val trustedClientsLimitOrdersWithTrades =
executionContext.getTrustedClientsLimitOrdersWithTrades().toMutableList()
var trustedClientEvent: Event<*>? = null
if (isThereTrustedClientEvent(trustedClientsLimitOrdersWithTrades))
trustedClientEvent = EventFactory.createTrustedClientsExecutionEvent(
sequenceNumbers.trustedClientsSequenceNumber!!,
executionContext.messageId,
executionContext.requestId,
executionContext.date,
executionContext.messageType,
trustedClientsLimitOrdersWithTrades
)
val clientsLimitOrdersWithTrades = executionContext.getClientsLimitOrdersWithTrades().toList()
var clientEvent: Event<*>? = null
if (isThereClientEvent(clientsLimitOrdersWithTrades, executionContext.marketOrderWithTrades))
clientEvent = EventFactory.createExecutionEvent(
sequenceNumbers.clientsSequenceNumber!!,
executionContext.messageId,
executionContext.requestId,
executionContext.date,
executionContext.messageType,
executionContext.walletOperationsProcessor.getClientBalanceUpdates(),
clientsLimitOrdersWithTrades,
executionContext.marketOrderWithTrades
)
return EventsHolder(trustedClientEvent, clientEvent)
}
fun sendEvents(
events: EventsHolder
) {
if (events.trustedClientsEvent != null) {
messageSender.sendTrustedClientsMessage(events.trustedClientsEvent)
}
if (events.clientsEvent != null) {
messageSender.sendMessage(events.clientsEvent)
}
}
}
|
//===-- MainLoopTest.cpp --------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "lldb/Host/MainLoop.h"
#include "TestingSupport/SubsystemRAII.h"
#include "lldb/Host/ConnectionFileDescriptor.h"
#include "lldb/Host/PseudoTerminal.h"
#include "lldb/Host/common/TCPSocket.h"
#include "llvm/Testing/Support/Error.h"
#include "gtest/gtest.h"
#include <future>
using namespace lldb_private;
namespace {
class MainLoopTest : public testing::Test {
public:
SubsystemRAII<Socket> subsystems;
void SetUp() override {
bool child_processes_inherit = false;
Status error;
std::unique_ptr<TCPSocket> listen_socket_up(
new TCPSocket(true, child_processes_inherit));
ASSERT_TRUE(error.Success());
error = listen_socket_up->Listen("localhost:0", 5);
ASSERT_TRUE(error.Success());
Socket *accept_socket;
std::future<Status> accept_error = std::async(std::launch::async, [&] {
return listen_socket_up->Accept(accept_socket);
});
std::unique_ptr<TCPSocket> connect_socket_up(
new TCPSocket(true, child_processes_inherit));
error = connect_socket_up->Connect(
llvm::formatv("localhost:{0}", listen_socket_up->GetLocalPortNumber())
.str());
ASSERT_TRUE(error.Success());
ASSERT_TRUE(accept_error.get().Success());
callback_count = 0;
socketpair[0] = std::move(connect_socket_up);
socketpair[1].reset(accept_socket);
}
void TearDown() override {
socketpair[0].reset();
socketpair[1].reset();
}
protected:
MainLoop::Callback make_callback() {
return [&](MainLoopBase &loop) {
++callback_count;
loop.RequestTermination();
};
}
std::shared_ptr<Socket> socketpair[2];
unsigned callback_count;
};
} // namespace
TEST_F(MainLoopTest, ReadObject) {
char X = 'X';
size_t len = sizeof(X);
ASSERT_TRUE(socketpair[0]->Write(&X, len).Success());
MainLoop loop;
Status error;
auto handle = loop.RegisterReadObject(socketpair[1], make_callback(), error);
ASSERT_TRUE(error.Success());
ASSERT_TRUE(handle);
ASSERT_TRUE(loop.Run().Success());
ASSERT_EQ(1u, callback_count);
}
TEST_F(MainLoopTest, TerminatesImmediately) {
char X = 'X';
size_t len = sizeof(X);
ASSERT_TRUE(socketpair[0]->Write(&X, len).Success());
ASSERT_TRUE(socketpair[1]->Write(&X, len).Success());
MainLoop loop;
Status error;
auto handle0 = loop.RegisterReadObject(socketpair[0], make_callback(), error);
ASSERT_TRUE(error.Success());
auto handle1 = loop.RegisterReadObject(socketpair[1], make_callback(), error);
ASSERT_TRUE(error.Success());
ASSERT_TRUE(loop.Run().Success());
ASSERT_EQ(1u, callback_count);
}
#ifdef LLVM_ON_UNIX
TEST_F(MainLoopTest, DetectsEOF) {
PseudoTerminal term;
ASSERT_TRUE(term.OpenFirstAvailablePrimary(O_RDWR, nullptr, 0));
ASSERT_TRUE(term.OpenSecondary(O_RDWR | O_NOCTTY, nullptr, 0));
auto conn = std::make_unique<ConnectionFileDescriptor>(
term.ReleasePrimaryFileDescriptor(), true);
Status error;
MainLoop loop;
auto handle =
loop.RegisterReadObject(conn->GetReadObject(), make_callback(), error);
ASSERT_TRUE(error.Success());
term.CloseSecondaryFileDescriptor();
ASSERT_TRUE(loop.Run().Success());
ASSERT_EQ(1u, callback_count);
}
TEST_F(MainLoopTest, Signal) {
MainLoop loop;
Status error;
auto handle = loop.RegisterSignal(SIGUSR1, make_callback(), error);
ASSERT_TRUE(error.Success());
kill(getpid(), SIGUSR1);
ASSERT_TRUE(loop.Run().Success());
ASSERT_EQ(1u, callback_count);
}
// Test that a signal which is not monitored by the MainLoop does not
// cause a premature exit.
TEST_F(MainLoopTest, UnmonitoredSignal) {
MainLoop loop;
Status error;
struct sigaction sa;
sa.sa_sigaction = [](int, siginfo_t *, void *) { };
sa.sa_flags = SA_SIGINFO; // important: no SA_RESTART
sigemptyset(&sa.sa_mask);
ASSERT_EQ(0, sigaction(SIGUSR2, &sa, nullptr));
auto handle = loop.RegisterSignal(SIGUSR1, make_callback(), error);
ASSERT_TRUE(error.Success());
std::thread killer([]() {
sleep(1);
kill(getpid(), SIGUSR2);
sleep(1);
kill(getpid(), SIGUSR1);
});
ASSERT_TRUE(loop.Run().Success());
killer.join();
ASSERT_EQ(1u, callback_count);
}
#endif
|
composer install
#yarn install
yarn encore production
php bin/console doctrine:migrations:migrate --no-interaction
php bin/console c:c --env=prod
#php bin/console opti:covers
|
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
@Component({
selector: 'banner',
templateUrl: './banner.component.html',
styleUrls: ['./banner.component.css']
})
export class BannerComponent implements OnInit {
//Instance variables
private title: string;
constructor(private activatedRoute: ActivatedRoute) { }
ngOnInit() {
this.setTitle(this.activatedRoute.data['value']['title']);
}
//MUTATORS
setTitle(title: string) {
this.title = title;
}
//ACCESSORS
getTitle() {
return this.title;
}
}
|
export interface Listener<T> {
(e: T): void
}
export class TypedEmitter<T> {
private listeners: Listener<T>[] = []
on(l: Listener<T>) {
this.listeners.push(l)
}
off(l: Listener<T>) {
const idx = this.listeners.indexOf(l)
if (idx > -1) this.listeners.splice(idx, 1)
}
emit(e: T) {
this.listeners.forEach(l => l(e))
}
}
|
"""Add Sutami, Wlingi, Sutami Operasi
Revision ID: 8dd3bf604083
Revises: 9bf8d3c01e1d
Create Date: 2021-03-20 14:42:14.308774
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8dd3bf604083'
down_revision = '9bf8d3c01e1d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('data_waduk_sutami',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('elevation', sa.Float(), nullable=True),
sa.Column('volume', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('data_waduk_sutami_operasi',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('h', sa.Float(), nullable=True),
sa.Column('p', sa.Float(), nullable=True),
sa.Column('q', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index('sutamioperasi_h_p', 'data_waduk_sutami_operasi', ['h', 'p'], unique=True)
op.create_table('data_waduk_wlingi',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('elevation', sa.Float(), nullable=True),
sa.Column('volume', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('data_waduk_wlingi')
op.drop_index('sutamioperasi_h_p', table_name='data_waduk_sutami_operasi')
op.drop_table('data_waduk_sutami_operasi')
op.drop_table('data_waduk_sutami')
# ### end Alembic commands ###
|
#!/bin/sh
echo [$0]: $1 ... > /dev/console
case "$1" in
start|stop|restart)
service LAYOUT $1
;;
*)
echo [$0]: invalid argument - $1 > /dev/console
;;
esac
|
## Prolific Inherit
Utility to determine which file handles the user wants a child to inherit from
its Prolific supervisor.
|
#
# This file is a part of MolecularGraph.jl
# Licensed under the MIT License http://opensource.org/licenses/MIT
#
@testset "graph.triangle" begin
@testset "triangles" begin
graph1 = pathgraph(5)
@test isempty(triangles(graph1))
graph2 = plaingraph(5, [(1, 2), (2, 3), (3, 1)])
@test issetequal(collect(triangles(graph2))[1], 1:3)
graph3 = plaingraph(8, [
(1, 2), (2, 3), (1, 3), (3, 4), (4, 5),
(5, 6), (4, 6), (5, 7), (7, 8), (8, 6)
])
@test length(triangles(graph3)) == 2
graph4 = plaingraph(10, [
(1, 2), (2, 3), (3, 4), (3, 5), (5, 6),
(6, 7), (7, 5), (5, 8), (8, 9), (9, 5), (5, 10)
])
@test length(triangles(graph4)) == 2
end
end # graph.triangle
|
# pass
### does nothing
```
>>> while True:
... pass # Busy-wait for keyboard interrupt (Ctrl+C)
```
|
using System;
namespace CodelyTv.Mooc.CoursesCounters.Domain
{
public class CoursesCounterNotInitialized : SystemException
{
}
}
|
package jp.gree.techcon.common.util
import com.soywiz.klock.DateFormat
import com.soywiz.klock.DateTime
import com.soywiz.klock.KlockLocale
import com.soywiz.klock.hours
import com.soywiz.klock.locale.japanese
// To resolve format issue on klock
internal object AppDateTime {
fun parseToArticleTime(timeSecond: Long): String {
val dt = DateTime.fromUnix(timeSecond * 1000) + 9.hours
val amOrPm = dt.format(defaultFormat("a")).toUpperCase()
return "${getHourMin(dt)} $amOrPm"
}
fun parseToArticleDate(timeSecond: Long): String {
val dt = DateTime.fromUnix(timeSecond * 1000) + 9.hours
val e = dt.format(defaultFormat("(EEE)"))
val day = dt.format(defaultFormat("dd日")).replaceFirst("^0+(?!$)".toRegex(), "")
val month = dt.format(defaultFormat("MM月")).replaceFirst("^0+(?!$)".toRegex(), "")
return "$month$day$e"
}
fun parseToTimeDuration(startSecond: Long, endSecond: Long): String {
val dt = DateTime.fromUnix(startSecond * 1000) + 9.hours
val dt2 = DateTime.fromUnix(endSecond * 1000) + 9.hours
return "${getHourMin(dt)} - ${getHourMin(dt2)}"
}
private fun getHourMin(dateTime: DateTime): String {
val hh = dateTime.format(defaultFormat("hh")).replaceFirst("^0+(?!$)".toRegex(), "")
val mm = dateTime.format(defaultFormat("mm"))
return "$hh:$mm"
}
private fun defaultFormat(pattern: String) =
DateFormat(pattern).withLocale(KlockLocale.japanese)
}
|
[Eureka source repo](https://github.com/Netflix/eureka) for detailed documentation
fork from the eureka-1.9.2 release zip source code.
|
#!/bin/sh
set -e
set -x
FILE=$1
CONTENT="foo"
CONTENT2="foo2"
ret=0
adduser -D testuser
adduser -D testuser2
adduser testuser daemon
rm -rf "$FILE"
echo "$CONTENT" > "$FILE"
chmod 0600 "$FILE"
chown root:daemon "$FILE"
echo
TEST="file r/w root user only. Read access as root"
RESULT=$(cat "$FILE")
if [ "$CONTENT" != "$RESULT" ]; then
echo "$TEST: expected $CONTENT got $RESULT"
ret=1
fi
echo
TEST="file r/w root user only. Write access as root"
echo "$CONTENT2" > "$FILE"
RESULT=$(cat "$FILE")
if [ "$CONTENT2" != "$RESULT" ]; then
echo "$TEST: expected $CONTENT2 got $RESULT"
ret=1
fi
echo
TEST="file r/w root user only. Read access as testuser"
set +e
su -c "cat $FILE" testuser
res=$?
set -e
if [ "$res" != "1" ]; then
echo "$TEST: expected it to fail"
ret=1
fi
echo
TEST="file r/w root user only. Write access as testuser"
set +e
su -c "echo $CONTENT > $FILE" testuser
res=$?
set -e
if [ "$res" != "1" ]; then
echo "$TEST: expected it to fail"
ret=1
fi
echo
TEST="file r/w root user only. Read access as testuser2"
set +e
su -c "cat $FILE" testuser2
res=$?
set -e
if [ "$res" != "1" ]; then
echo "$TEST: expected it to fail"
ret=1
fi
echo
TEST="file r/w root user only. Write access as testuser2"
set +e
su -c "echo $CONTENT > $FILE" testuser2
res=$?
set -e
if [ "$res" != "1" ]; then
echo "$TEST: expected it to fail"
ret=1
fi
# change to allow group access
rm -rf "$FILE"
echo "$CONTENT" > "$FILE"
chmod 0660 "$FILE"
chown root:daemon "$FILE"
echo
TEST="file r/w user/group only. Read access as testuser"
RESULT=$(su -c "cat $FILE" testuser)
if [ "$CONTENT" != "$RESULT" ]; then
echo "$TEST: expected $CONTENT got $RESULT"
ret=1
fi
echo
TEST="file r/w user/group only. Write access as testuser"
su -c "echo $CONTENT2 > $FILE" testuser
RESULT=$(cat "$FILE")
if [ "$CONTENT2" != "$RESULT" ]; then
echo "$TEST: expected $CONTENT2 got $RESULT"
ret=1
fi
echo
TEST="file r/w root user only. Read access as testuser2"
set +e
su -c "cat $FILE" testuser2
res=$?
set -e
if [ "$res" != "1" ]; then
echo "$TEST: expected it to fail"
ret=1
fi
echo
TEST="file r/w root user only. Write access as testuser2"
set +e
su -c "echo $CONTENT > $FILE" testuser2
res=$?
set -e
if [ "$res" != "1" ]; then
echo "$TEST: expected it to fail"
ret=1
fi
exit $ret
|
{-# language DataKinds #-}
{-# language GADTSyntax #-}
{-# language KindSignatures #-}
{-# language LinearTypes #-}
module Linear.Stack
( Stack
, empty
, push
, pop
-- * Consume
, foldl
) where
import Prelude hiding (foldl)
import Data.Kind (Type)
import Linear.Slate (Slate)
import Linear.Types (Mode(Static,Dynamic),Token,Object)
import Linear.Class (Unrestricted(..))
import qualified Linear.Slate as S
import qualified Linear.Class as C
-- | A stack backed by a contiguous array. The array doubles in
-- size whenever there is no more space. The array backing the
-- stack never shrinks.
data Stack :: (Mode -> Type) -> Mode -> Type where
-- Contains a slate and the slate's maximum number of
-- elements. We reallocate a new slate whenever we fill up.
Stack :: {-# UNPACK #-} !(Slate f m) ->. {-# UNPACK #-} !Int -> Stack f m
empty :: Object f => Token ->. Stack f 'Dynamic
empty t = Stack (S.allocate 1 t) 1
push :: Object f => Stack f 'Dynamic ->. f 'Dynamic ->. Stack f 'Dynamic
push (Stack slate0 maxSz) val = C.uncurry
(\slate1 (Unrestricted len) -> pushHelp (len < maxSz) slate1 val maxSz)
(S.length slate0)
pushHelp :: Object f => Bool ->. Slate f 'Dynamic ->. f 'Dynamic ->. Int -> Stack f 'Dynamic
pushHelp True slate1 val maxSz = Stack (S.push slate1 val) maxSz
pushHelp False slate1 val maxSz = Stack (S.push (S.resize slate1 (maxSz * 2)) val) (maxSz * 2)
pop :: Object f => Stack f m ->. (Stack f m, f m)
pop (Stack slate0 maxSz) = C.first (\slate1 -> Stack slate1 maxSz) (S.pop slate0)
-- | Left fold over the slate, stack in the accumulator. This deallocates
-- the stack as it walks over it. This is an unusual way to interact with
-- a stack since it handles the oldest element first.
foldl :: Object f
=> (b ->. f 'Dynamic ->. b)
-> b
->. Stack f 'Dynamic
->. (Token,b)
foldl g b (Stack s _) = S.foldl g b s
|
package com.smxy.hencoder.testkotlin
import java.util.*
/**
* @author huangkangqiang
* @name TestA
* @description
* @date 2019/5/1
*/
class TestA {
private fun getA() {
val objects = ArrayList<Any>()
objects.add(Any())
}
}
|
# 🙌 Contributions and Community
```{toctree}
:maxdepth: 2
../n00b-overview
../development
../CHANGELOG
../newDiagram
```
|
module.exports = {
vue: [
{
name: 'Vue + antdUI',
description: '基于vue + antdUI搭建的中后台项目模板',
value: 'https://gitee.com/misthin/vue-frame-antd.git'
},
{
name: 'Vue + TS + ElementUI',
description: '基于vue + TS + ElementUI搭建的中后台项目模板',
value: 'https://gitee.com/misthin/vue-ts-element.git'
},
{
name: 'Vue + requirejs',
description: '基于Vue + requirejs搭建的项目模板',
value: 'https://gitee.com/misthin/requirejs-vue.git'
},
{
name: 'vue-element-admin',
description: '一个后台前端解决方案,它基于 vue 和 element-ui实现',
value: 'https://github.com/PanJiaChen/vue-element-admin.git'
},
{
name: 'vue-typescript-admin-template',
description: '一个后台前端解决方案,它基于 vue 和 element-ui实现',
value: 'https://github.com/Armour/vue-typescript-admin-template.git'
}
],
react: [
{
name: 'React + TS + antdUI + Mobx',
description: '基于React + TS + antdUI + Mobx搭建的项目模板',
value: 'https://gitee.com/misthin/react-ts-antd-mobx.git'
}
],
electron: [
{
name: 'electron-vue-admin',
description: '一个后台前端解决方案,它基于 vue 和 element-ui实现',
value: 'https://github.com/PanJiaChen/electron-vue-admin.git'
},
{
name: 'Sugar-Electron',
description: '基于Electorn的轻量级开发框架',
value: 'https://github.com/SugarTurboS/Sugar-Electron.git'
}
]
};
|
# Fill the values and save this file as credentials.rb
# Don't forget to also download spotify_appkey.key,
# available https://developer.spotify.com/en/libspotify/application-key/
# and place it in the same directory as the spotify2rdio.rb
# Get these by signing up or creating a new app
# at http://developer.rdio.com/member/register
RDIO_CONSUMER_KEY = ''
RDIO_CONSUMER_SECRET = ''
|
class String
# Verifica se uma máscara de Título de Eleitor é válida:
#
# "7590.2631.1727".valid_titulo_eleitor_mask? => # true
def valid_titulo_eleitor_mask?
without_mask = !!(self =~ /^[0-9]{12}+$/)
with_mask = !!(self =~ /^[0-9]{4}\.[0-9]{4}\.[0-9]{4}+$/)
with_mask || without_mask
end
# Verifica se um Título de Eleitor é válido:
#
# "759026311727".valid_titulo_eleitor? => # true
# "7590.2631.1727".valid_titulo_eleitor? => # true
def valid_titulo_eleitor?
valid_state = TituloEleitorUtils::TituloEleitor.valid_state?(self)
if valid_titulo_eleitor_mask? && valid_state
original_titulo_eleitor = self.gsub(/\.?/,"",)
tested_titulo_eleitor = original_titulo_eleitor[0..9]
tested_titulo_eleitor << (
TituloEleitorUtils::TituloEleitor.new(
original_titulo_eleitor[0..9]
).first_digit.to_s
)
tested_titulo_eleitor << (
TituloEleitorUtils::TituloEleitor.new(
tested_titulo_eleitor[0..11]
).second_digit.to_s
)
tested_titulo_eleitor == original_titulo_eleitor ? true : false
end
end
# Para formatar um número válido de Título de Eleitor:
#
# "759026311727".to_titulo_eleitor_format => # "7590.2631.1727"
def to_titulo_eleitor_format
if self.valid_titulo_eleitor?
"#{self[0..3]}.#{self[4..7]}.#{self[8..11]}"
end
end
# Gera um número de Título de Eleitor a partir de um número candidato:
#
# "7590263117".generate_titulo_eleitor => # "759026311727"
def generate_titulo_eleitor
if !!(self =~ /^[0-9]{10}+$/)
final_titulo_eleitor = self
final_titulo_eleitor << (
TituloEleitorUtils::TituloEleitor.new(
final_titulo_eleitor[0..9]
).first_digit.to_s
)
final_titulo_eleitor << (
TituloEleitorUtils::TituloEleitor.new(
final_titulo_eleitor[0..10]
).second_digit.to_s
)
final_titulo_eleitor
end
end
# Para gerar um número de Título de Eleitor formatado a partir de um número candidato:
#
# "759026311727".generate_titulo_eleitor_formatted => # "7590.2631.1727"
def generate_titulo_eleitor_formatted
generate_titulo_eleitor.to_titulo_eleitor_format
end
# Para saber a qual unidade federativa o Título de Eleitor pertence:
#
# "759026311727".titulo_eleitor_province => # "TO"
def titulo_eleitor_province
TituloEleitorUtils.titulo_eleitor_province(self)
end
# Apelido 'mascara_de_titulo_eleitor_valida' para o método valid_titulo_eleitor_mask
alias_method :mascara_de_titulo_eleitor_valida?, :valid_titulo_eleitor_mask?
# Apelido 'titulo_eleitor_valido?' para o método valid_titulo_eleitor?
alias_method :titulo_eleitor_valido?, :valid_titulo_eleitor?
# Apelido 'para_formato_titulo_eleitor' para o método to_titulo_eleitor_format
alias_method :para_formato_titulo_eleitor, :to_titulo_eleitor_format
# Apelido 'gerar_titulo_eleitor' para o método generate_titulo_eleitor
alias_method :gerar_titulo_eleitor, :generate_titulo_eleitor
# Apelido 'gerar_titulo_eleitor_formatado' para o método generate_titulo_eleitor_formatted
alias_method :gerar_titulo_eleitor_formatado, :generate_titulo_eleitor_formatted
# Apelido 'uf_do_titulo_eleitor' para o método titulo_eleitor_province
alias_method :uf_do_titulo_eleitor, :titulo_eleitor_province
end
|
package org.idiosapps
import java.io.PrintWriter
class SummaryPageUtils {
// TODO fun writeTeXGrammarSection
// TODO fun writeTeXQuestionsSection
// TODO fun writeNamesSection
companion object {
const val endLine = "\\\\"
fun writeVocabSection(
outputStoryWriter: PrintWriter,
vocab: MutableList<Vocab>
) {
outputStoryWriter.println("\\clearpage")
outputStoryWriter.println("\\setlength{\\parindent}{0ex}") // remove indenting
outputStoryWriter.println("\\centerline{Vocabulary}") // add page title
vocab.forEachIndexed {index, vocabItem ->
var L2Extra = LanguageUtils.getMarkedL2Extra(vocabItem)
val vocabLine = "${index + 1}. ${vocabItem.L2Word} $L2Extra ${vocabItem.L1Word}$endLine"
outputStoryWriter.println(vocabLine)
}
}
}
}
|
#!/usr/bin/env bash
#
# Remove OpenStack configuration from a server.
#
pycassaShell -f drop-cassandra-cfgm-keyspaces
# shutdown all the services
if [ -f /etc/lsb-release ] && (egrep -q 'DISTRIB_RELEASE.*16.04' /etc/lsb-release); then
for svc in api config-nodemgr device-manager schema svc-monitor; do
chkconfig contrail-$svc off > /dev/null 2>&1
service contrail-$svc stop > /dev/null 2>&1
done
else
for svc in supervisor-config; do
chkconfig $svc off > /dev/null 2>&1
service $svc stop > /dev/null 2>&1
done
fi
for svc in api objectstore scheduler cert consoleauth novncproxy conductor; do
svc=openstack-nova-$svc
chkconfig $svc off > /dev/null 2>&1
service $svc stop > /dev/null 2>&1
done
for svc in api registry; do
svc=openstack-glance-$svc
chkconfig $svc off > /dev/null 2>&1
service $svc stop > /dev/null 2>&1
done
for svc in api scheduler; do
svc=openstack-cinder-$svc
chkconfig $svc off > /dev/null 2>&1
service $svc stop > /dev/null 2>&1
done
CONF_DIR=/etc/contrail
if [ -n "$MYSQL_ROOT_PW" ]; then
MYSQL_TOKEN=$MYSQL_ROOT_PW
elif [ -f $CONF_DIR/mysql.token ]; then
MYSQL_TOKEN=$(cat $CONF_DIR/mysql.token)
fi
if [ -z "$MYSQL_TOKEN" ]; then
echo "Please provide MySQL root password"
exit 1
fi
for svc in keystone nova glance cinder; do
openstack-db -y --drop --service $svc --rootpw "$MYSQL_TOKEN"
done
rm -rf /etc/keystone/ssl
rm -f /etc/contrail/service.token
rm -f /etc/contrail/keystonerc
rm -f /etc/contrail/openstackrc
if [ -n "$DISABLE_MYSQL" ]; then
mysqladmin --password="$MYSQL_TOKEN" password ""
rm -f /etc/contrail/mysql.token
chkconfig mysqld off > /dev/null 2>&1
service mysqld stop > /dev/null 2>&1
fi
# TODO: determine what needs to be removed
for subdir in keys buckets images; do
ls /var/lib/nova/$subdir
done
for file in $(ls /var/lib/glance/images); do
rm -f $file
done
# Remove keystone keys
for svc in nova glance quantum; do
rm -f /var/lib/$svc/keystone-signing/*.pem
done
rm -f /var/lib/cinder/*.pem
|
// License [CC0](http://creativecommons.org/publicdomain/zero/1.0/)
library startstopstats;
import 'dart:html';
// in milliseconds ( like window.performance.now() )
class StartStopStats {
Function displayFct;
double displayLast = 0.0;
double resetLast = 0.0;
double min;
double max;
double total;
int count;
double _pstart;
final _perf = window.performance;
get avg => (count == 0) ? 0.0 : total/count;
StartStopStats() {
reset();
start();
}
start() {
_pstart = _perf.now();
}
stop() {
var now = _perf.now();
store(now - _pstart);
if (displayFct != null) {
displayFct(this, now);
}
}
store(double t) {
if (min > t) min = t;
if (max < t) max = t;
count++;
total += t;
}
reset() {
resetLast = _perf.now();
min = double.MAX_FINITE;
max = double.MIN_POSITIVE;
total = 0.0;
count = 0;
}
}
|
package com.conlect.oatos.dto.client;
import com.conlect.oatos.dto.autobean.IEnterpriseLoginDTO;
/**
* 企业用户登录dto
*
* @author yang
*
*/
public class EnterpriseLoginDTO extends LoginDTO implements IEnterpriseLoginDTO {
private static final long serialVersionUID = 1L;
/**
* 企业名称
*/
public String enterpriseName;
/**
* IOS设备使用的device 标识
*/
private String deviceToken;
@Override
public String getDeviceToken() {
return deviceToken;
}
@Override
public void setDeviceToken(String deviceToken) {
this.deviceToken = deviceToken;
}
@Override
public String getEnterpriseName() {
return enterpriseName;
}
@Override
public void setEnterpriseName(String enterpriseName) {
this.enterpriseName = enterpriseName;
}
}
|
# Author: Bichen Wu (bichen@berkeley.edu) 08/25/2016
"""Neural network model base class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from utils import util
from easydict import EasyDict as edict
import numpy as np
import tensorflow as tf
def _add_loss_summaries(total_loss):
"""Add summaries for losses
Generates loss summaries for visualizing the performance of the network.
Args:
total_loss: Total loss from loss().
"""
losses = tf.get_collection('losses')
# Attach a scalar summary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
tf.summary.scalar(l.op.name, l)
def _variable_on_device(name, shape, initializer, trainable=True):
"""Helper to create a Variable.
Args:
name: name of the variable
shape: list of ints
initializer: initializer for Variable
Returns:
Variable Tensor
"""
# TODO(bichen): fix the hard-coded data type below
dtype = tf.float32
if not callable(initializer):
var = tf.get_variable(name, initializer=initializer, trainable=trainable)
else:
var = tf.get_variable(
name, shape, initializer=initializer, dtype=dtype, trainable=trainable)
return var
def _variable_with_weight_decay(name, shape, wd, initializer, trainable=True):
"""Helper to create an initialized Variable with weight decay.
Note that the Variable is initialized with a truncated normal distribution.
A weight decay is added only if one is specified.
Args:
name: name of the variable
shape: list of ints
wd: add L2Loss weight decay multiplied by this float. If None, weight
decay is not added for this Variable.
Returns:
Variable Tensor
"""
var = _variable_on_device(name, shape, initializer, trainable)
if wd is not None and trainable:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
class ModelSkeleton:
"""Base class of NN detection models."""
def __init__(self, mc):
self.mc = mc
self.is_training = tf.placeholder(tf.bool, name='is_training')
# image batch input
self.image_input = tf.placeholder(
tf.float32, [mc.BATCH_SIZE, mc.IMAGE_HEIGHT, mc.IMAGE_WIDTH, 3],
name='image_input'
)
# a scalar tensor in range (0, 1]. Usually set to 0.5 in training phase and
# 1.0 in evaluation phase
self.keep_prob = tf.placeholder(tf.float32, name='keep_prob')
# A tensor where an element is 1 if the corresponding box is "responsible"
# for detection an object and 0 otherwise.
self.input_mask = tf.placeholder(
tf.float32, [mc.BATCH_SIZE, mc.ANCHORS, 1], name='box_mask')
# Tensor used to represent bounding box deltas.
self.box_delta_input = tf.placeholder(
tf.float32, [mc.BATCH_SIZE, mc.ANCHORS, 4], name='box_delta_input')
# Tensor used to represent bounding box coordinates.
self.box_input = tf.placeholder(
tf.float32, [mc.BATCH_SIZE, mc.ANCHORS, 4], name='box_input')
# Tensor used to represent labels
self.labels = tf.placeholder(
tf.float32, [mc.BATCH_SIZE, mc.ANCHORS, mc.CLASSES], name='labels')
# Tensor representing the IOU between predicted bbox and gt bbox
self.ious = tf.Variable(
initial_value=np.zeros((mc.BATCH_SIZE, mc.ANCHORS)), trainable=False,
name='iou', dtype=tf.float32
)
# model parameters
self.model_params = []
# model size counter
self.model_size_counter = [] # array of tuple of layer name, parameter size
# flop counter
self.flop_counter = [] # array of tuple of layer name, flop number
# activation counter
self.activation_counter = [] # array of tuple of layer name, output activations
self.activation_counter.append(('input', mc.IMAGE_WIDTH*mc.IMAGE_HEIGHT*3))
def _add_forward_graph(self):
"""NN architecture specification."""
raise NotImplementedError
def _trim_bbox(self, bboxes):
"""Trim bbox for one batch"""
valid_xmax = self.mc.IMAGE_WIDTH - 1.0
valid_ymax = self.mc.IMAGE_HEIGHT - 1.0
new_bboxes = tf.stack(
[tf.clip_by_value(bboxes[..., 0], 0.0, valid_xmax),
tf.clip_by_value(bboxes[..., 1], 0.0, valid_ymax),
tf.clip_by_value(bboxes[..., 2], 0.0, valid_xmax),
tf.clip_by_value(bboxes[..., 3], 0.0, valid_ymax),
],
axis=-1,
)
return new_bboxes
def _smooth_softmax(self, logits):
"""Smoothed version softmax"""
new_shape = logits.get_shape().as_list()
new_shape[-1] = 1
acts = tf.nn.softmax(
tf.subtract(
logits,
tf.reshape(
tf.reduce_max(
logits,
reduction_indices=-1
),
new_shape
)
)
)
return acts
def _add_yolo_interpret_graph(self):
"""Interpret yolo output."""
mc = self.mc
with tf.variable_scope('interpret_output') as scope:
# TODO(jeff): add summary
N = mc.BATCH_SIZE
H, W, B = mc.NET_OUT_SHAPE
C = mc.CLASSES
preds = self.preds
preds = tf.reshape(
self.preds,
(N, H, W, B, 5+C)
)
# confidence
self.pred_conf = tf.sigmoid(
tf.reshape(
preds[:, :, :, :, 5],
(N, H, W, B, 1)
),
name='conf'
)
# bbox scale
self.bbox_x = tf.reshape(
tf.add(
tf.sigmoid(
preds[:, :, :, :, 0]
),
tf.reshape(
tf.to_float(
tf.range(0, W, 1)
),
(1, 1, W, 1)
)
),
(N, H, W, B, 1),
name='bbox_x_ratio'
)
self.bbox_y = tf.reshape(
tf.add(
tf.sigmoid(
preds[:, :, :, :, 1]
),
tf.reshape(
tf.to_float(
tf.range(0, H, 1)
),
(1, H, 1, 1)
)
),
(N, H, W, B, 1),
name='bbox_y_ratio'
)
self.bbox_w = tf.reshape(
tf.multiply(
tf.exp(
preds[:, :, :, :, 2]
),
mc.ANCHOR_BOX[:, :, :, 0]
),
(N, H, W, B, 1),
name='bbox_w_ratio'
)
self.bbox_h = tf.reshape(
tf.multiply(
tf.exp(
preds[:, :, :, :, 3]
),
mc.ANCHOR_BOX[:, :, :, 1]
),
(N, H, W, B, 1),
name='bbox_h_ratio'
)
self.bbox = tf.stack(
[self.bbox_x,
self.bbox_y,
self.bbox_w,
self.bbox_h],
axis=4,
name='bbox_ratio'
)
# bbox prediction
w_scale = float(mc.IMAGE_WIDTH) / W
h_scale = float(mc.IMAGE_HEIGHT) / H
self.raw_boxes = tf.reshape(
tf.stack(
[self.bbox_x * w_scale,
self.bbox_y * h_scale,
self.bbox_w * w_scale,
self.bbox_h * h_scale],
axis=4
),
(N, H*W*B, 4),
name='raw_bbox'
)
# trim bbox
self.det_boxes = tf.py_func(
lambda x: util.bbox_transform_inv(x),
[self._trim_bbox(
tf.py_func(
lambda x: util.bbox_transform(x),
[self.raw_boxes],
tf.float32
)
)],
tf.float32,
name='det_boxes'
)
# prob
self.probs = tf.multiply(
self._smooth_softmax(preds[:, :, :, :, 5:]),
self.pred_conf,
name='probs'
)
# class prediction
self.det_probs = tf.reshape(
#tf.reduce_max(self.probs, 4),
self.probs,
(N, H*W*B, C),
name='score'
)
self.det_class = tf.reshape(
tf.argmax(self.probs, 4),
(N, H*W*B),
name='class_idx'
)
def _add_sqt_interpret_graph(self):
"""Interpret NN output."""
mc = self.mc
with tf.variable_scope('interpret_output') as scope:
preds = self.preds
# probability
num_class_probs = mc.ANCHOR_PER_GRID*mc.CLASSES
self.pred_class_probs = tf.reshape(
tf.nn.softmax(
tf.reshape(
preds[:, :, :, :num_class_probs],
[-1, mc.CLASSES]
)
),
[mc.BATCH_SIZE, mc.ANCHORS, mc.CLASSES],
name='pred_class_probs'
)
# confidence
num_confidence_scores = mc.ANCHOR_PER_GRID+num_class_probs
self.pred_conf = tf.sigmoid(
tf.reshape(
preds[:, :, :, num_class_probs:num_confidence_scores],
[mc.BATCH_SIZE, mc.ANCHORS]
),
name='pred_confidence_score'
)
# bbox_delta
self.pred_box_delta = tf.reshape(
preds[:, :, :, num_confidence_scores:],
[mc.BATCH_SIZE, mc.ANCHORS, 4],
name='bbox_delta'
)
# number of object. Used to normalize bbox and classification loss
self.num_objects = tf.reduce_sum(self.input_mask, name='num_objects')
with tf.variable_scope('bbox') as scope:
with tf.variable_scope('stretching'):
delta_x, delta_y, delta_w, delta_h = tf.unstack(
self.pred_box_delta, axis=2)
anchor_x = mc.ANCHOR_BOX[:, 0]
anchor_y = mc.ANCHOR_BOX[:, 1]
anchor_w = mc.ANCHOR_BOX[:, 2]
anchor_h = mc.ANCHOR_BOX[:, 3]
box_center_x = tf.identity(
anchor_x + delta_x * anchor_w, name='bbox_cx')
box_center_y = tf.identity(
anchor_y + delta_y * anchor_h, name='bbox_cy')
box_width = tf.identity(
anchor_w * util.safe_exp(delta_w, mc.EXP_THRESH),
name='bbox_width')
box_height = tf.identity(
anchor_h * util.safe_exp(delta_h, mc.EXP_THRESH),
name='bbox_height')
self._activation_summary(delta_x, 'delta_x')
self._activation_summary(delta_y, 'delta_y')
self._activation_summary(delta_w, 'delta_w')
self._activation_summary(delta_h, 'delta_h')
self._activation_summary(box_center_x, 'bbox_cx')
self._activation_summary(box_center_y, 'bbox_cy')
self._activation_summary(box_width, 'bbox_width')
self._activation_summary(box_height, 'bbox_height')
with tf.variable_scope('trimming'):
xmins, ymins, xmaxs, ymaxs = util.bbox_transform(
[box_center_x, box_center_y, box_width, box_height])
# The max x position is mc.IMAGE_WIDTH - 1 since we use zero-based
# pixels. Same for y.
xmins = tf.minimum(
tf.maximum(0.0, xmins), mc.IMAGE_WIDTH-1.0, name='bbox_xmin')
self._activation_summary(xmins, 'box_xmin')
ymins = tf.minimum(
tf.maximum(0.0, ymins), mc.IMAGE_HEIGHT-1.0, name='bbox_ymin')
self._activation_summary(ymins, 'box_ymin')
xmaxs = tf.maximum(
tf.minimum(mc.IMAGE_WIDTH-1.0, xmaxs), 0.0, name='bbox_xmax')
self._activation_summary(xmaxs, 'box_xmax')
ymaxs = tf.maximum(
tf.minimum(mc.IMAGE_HEIGHT-1.0, ymaxs), 0.0, name='bbox_ymax')
self._activation_summary(ymaxs, 'box_ymax')
self.det_boxes = tf.transpose(
tf.stack(util.bbox_transform_inv([xmins, ymins, xmaxs, ymaxs])),
(1, 2, 0), name='bbox'
)
with tf.variable_scope('IOU'):
def _tensor_iou(box1, box2):
with tf.variable_scope('intersection'):
xmin = tf.maximum(box1[0], box2[0], name='xmin')
ymin = tf.maximum(box1[1], box2[1], name='ymin')
xmax = tf.minimum(box1[2], box2[2], name='xmax')
ymax = tf.minimum(box1[3], box2[3], name='ymax')
w = tf.maximum(0.0, xmax-xmin, name='inter_w')
h = tf.maximum(0.0, ymax-ymin, name='inter_h')
intersection = tf.multiply(w, h, name='intersection')
with tf.variable_scope('union'):
w1 = tf.subtract(box1[2], box1[0], name='w1')
h1 = tf.subtract(box1[3], box1[1], name='h1')
w2 = tf.subtract(box2[2], box2[0], name='w2')
h2 = tf.subtract(box2[3], box2[1], name='h2')
union = w1*h1 + w2*h2 - intersection
return intersection/(union+mc.EPSILON) \
* tf.reshape(self.input_mask, [mc.BATCH_SIZE, mc.ANCHORS])
self.ious = self.ious.assign(
_tensor_iou(
util.bbox_transform(tf.unstack(self.det_boxes, axis=2)),
util.bbox_transform(tf.unstack(self.box_input, axis=2))
)
)
self._activation_summary(self.ious, 'conf_score')
with tf.variable_scope('probability') as scope:
self._activation_summary(self.pred_class_probs, 'class_probs')
probs = tf.multiply(
self.pred_class_probs,
tf.reshape(self.pred_conf, [mc.BATCH_SIZE, mc.ANCHORS, 1]),
name='final_class_prob'
)
self._activation_summary(probs, 'final_class_prob')
self.det_probs = tf.reduce_max(probs, 2, name='score')
self.det_class = tf.argmax(probs, 2, name='class_idx')
def _add_yolo_loss_graph(self):
"""Define the YOLO loss operation."""
# TODO(jeff): add yolo loss graph
pass
def _add_sqt_loss_graph(self):
"""Define the SqueezeDet loss operation."""
mc = self.mc
with tf.variable_scope('class_regression') as scope:
# cross-entropy: q * -log(p) + (1-q) * -log(1-p)
# add a small value into log to prevent blowing up
self.class_loss = tf.truediv(
tf.reduce_sum(
(self.labels*(-tf.log(self.pred_class_probs+mc.EPSILON))
+ (1-self.labels)*(-tf.log(1-self.pred_class_probs+mc.EPSILON)))
* self.input_mask * mc.LOSS_COEF_CLASS),
self.num_objects,
name='class_loss'
)
tf.add_to_collection('losses', self.class_loss)
with tf.variable_scope('confidence_score_regression') as scope:
input_mask = tf.reshape(self.input_mask, [mc.BATCH_SIZE, mc.ANCHORS])
self.conf_loss = tf.reduce_mean(
tf.reduce_sum(
tf.square((self.ious - self.pred_conf))
* (input_mask*mc.LOSS_COEF_CONF_POS/self.num_objects
+(1-input_mask)*mc.LOSS_COEF_CONF_NEG/(mc.ANCHORS-self.num_objects)),
reduction_indices=[1]
),
name='confidence_loss'
)
tf.add_to_collection('losses', self.conf_loss)
tf.summary.scalar('mean iou', tf.reduce_sum(self.ious)/self.num_objects)
with tf.variable_scope('bounding_box_regression') as scope:
self.bbox_loss = tf.truediv(
tf.reduce_sum(
mc.LOSS_COEF_BBOX * tf.square(
self.input_mask*(self.pred_box_delta-self.box_delta_input))),
self.num_objects,
name='bbox_loss'
)
tf.add_to_collection('losses', self.bbox_loss)
# add above losses as well as weight decay losses to form the total loss
self.loss = tf.add_n(tf.get_collection('losses'), name='total_loss')
def _add_train_graph(self):
"""Define the training operation."""
mc = self.mc
self.global_step = tf.Variable(0, name='global_step', trainable=False)
assert mc.LR_POLICY in ['exponential', 'step'], \
'Invalid learning rate policy'
if mc.LR_POLICY == 'exponential':
lr = tf.train.exponential_decay(mc.LEARNING_RATE,
self.global_step,
mc.DECAY_STEPS,
mc.LR_DECAY_FACTOR,
staircase=True)
elif mc.LR_POLICY == 'step':
lr = tf.train.piecewise_constant(self.global_step,
mc.LR_STEP_BOUNDRY,
mc.LR_STEP_VALUE)
tf.summary.scalar('learning_rate', lr)
_add_loss_summaries(self.loss)
opt = tf.train.MomentumOptimizer(learning_rate=lr, momentum=mc.MOMENTUM)
grads_vars = opt.compute_gradients(self.loss, tf.trainable_variables(), aggregation_method=None)
with tf.variable_scope('clip_gradient') as scope:
for i, (grad, var) in enumerate(grads_vars):
grads_vars[i] = (tf.clip_by_norm(grad, mc.MAX_GRAD_NORM), var)
apply_gradient_op = opt.apply_gradients(grads_vars, global_step=self.global_step)
for var in tf.trainable_variables():
tf.summary.histogram(var.op.name, var)
for grad, var in grads_vars:
if grad is not None:
tf.summary.histogram(var.op.name + '/gradients', grad)
with tf.control_dependencies([apply_gradient_op]):
self.train_op = tf.no_op(name='train')
def _add_viz_graph(self):
"""Define the visualization operation."""
mc = self.mc
self.image_to_show = tf.placeholder(
tf.float32, [None, mc.IMAGE_HEIGHT, mc.IMAGE_WIDTH, 3],
name='image_to_show'
)
self.viz_op = tf.summary.image('sample_detection_results',
self.image_to_show, collections='image_summary',
max_outputs=mc.BATCH_SIZE)
def _conv_layer(
self, layer_name, inputs, filters, size, stride, padding='SAME',
freeze=False, xavier=False, bn=False, act='relu', stddev=0.001):
"""Convolutional layer operation constructor.
Args:
layer_name: layer name.
inputs: input tensor
filters: number of output filters.
size: kernel size.
stride: stride
padding: 'SAME' or 'VALID'. See tensorflow doc for detailed description.
freeze: if true, then do not train the parameters in this layer.
xavier: whether to use xavier weight initializer or not.
act: activation type (none / relu / lrelu)
stddev: standard deviation used for random weight initializer.
Returns:
A convolutional layer operation.
"""
mc = self.mc
use_pretrained_param = False
if mc.LOAD_PRETRAINED_MODEL:
cw = self.caffemodel_weight
if layer_name in cw:
# kernel_val = np.transpose(cw[layer_name][0], [2,3,1,0])
kernel_val = cw[layer_name][0]
bias_val = cw[layer_name][1]
if bn and mc.LOAD_BN:
scale_val = cw[layer_name][2]
mean_val = cw[layer_name][3]
var_val = cw[layer_name][4]
# check the shape
if (kernel_val.shape ==
(size, size, inputs.get_shape().as_list()[-1], filters)) \
and (bias_val.shape == (filters,)):
use_pretrained_param = True
else:
print ('Shape of the pretrained parameter of {} does not match, '
'use randomly initialized parameter'.format(layer_name))
else:
print ('Cannot find {} in the pretrained model. Use randomly initialized '
'parameters'.format(layer_name))
if mc.DEBUG_MODE:
print('Input tensor shape to {}: {}'.format(layer_name, inputs.get_shape()))
with tf.variable_scope(layer_name) as scope:
channels = inputs.get_shape()[3]
# re-order the caffe kernel with shape [out, in, h, w] -> tf kernel with
# shape [h, w, in, out]
if use_pretrained_param:
if mc.DEBUG_MODE:
print ('Using pretrained model for {}'.format(layer_name))
kernel_init = tf.constant(kernel_val , dtype=tf.float32)
bias_init = tf.constant(bias_val, dtype=tf.float32)
elif xavier:
kernel_init = tf.contrib.layers.xavier_initializer_conv2d()
bias_init = tf.constant_initializer(0.0)
else:
kernel_init = tf.truncated_normal_initializer(
stddev=stddev, dtype=tf.float32)
bias_init = tf.constant_initializer(0.0)
kernel = _variable_with_weight_decay(
'kernels', shape=[size, size, int(channels), filters],
wd=mc.WEIGHT_DECAY, initializer=kernel_init, trainable=(not freeze))
biases = _variable_on_device('biases', [filters], bias_init,
trainable=(not freeze))
self.model_params += [kernel, biases]
conv = tf.nn.conv2d(
inputs, kernel, [1, stride, stride, 1], padding=padding,
name='convolution')
conv_bias = tf.nn.bias_add(conv, biases, name='bias_add')
if bn:
if mc.LOAD_BN:
scale_init = lambda shape,dtype,partition_info: tf.constant(scale_val, dtype=dtype)
mean_init = lambda shape,dtype,partition_info: tf.constant(mean_val, dtype=dtype)
var_init = lambda shape,dtype,partition_info: tf.constant(var_val, dtype=dtype)
param_init = {'gamma': scale_init, 'moving_mean': mean_init, 'moving_variance': var_init}
else:
param_init = None
conv_bias = self._batch_norm(conv_bias, param_init, scope.name)
bn_vars = tf.get_collection(tf.GraphKeys.VARIABLES, scope=scope.name+"_bn")
self.model_params += bn_vars
assert act in [None, 'relu', 'lrelu'], \
"Invalid type of conv activation"
if act == 'relu':
out = tf.nn.relu(conv_bias, 'relu')
elif act == 'lrelu':
out = self._lrelu(conv_bias, scope.name)
else:
out = conv_bias
self.model_size_counter.append(
(layer_name, (1+size*size*int(channels))*filters)
)
out_shape = out.get_shape().as_list()
num_flops = \
(1+2*int(channels)*size*size)*filters*out_shape[1]*out_shape[2]
if act is not None:
num_flops += 2*filters*out_shape[1]*out_shape[2]
self.flop_counter.append((layer_name, num_flops))
self.activation_counter.append(
(layer_name, out_shape[1]*out_shape[2]*out_shape[3])
)
return out
def _pooling_layer(
self, layer_name, inputs, size, stride, padding='SAME', ptype='max'):
"""Pooling layer operation constructor.
Args:
layer_name: layer name.
inputs: input tensor
size: kernel size.
stride: stride
padding: 'SAME' or 'VALID'. See tensorflow doc for detailed description.
Returns:
A pooling layer operation.
"""
assert ptype in ['max', 'avg', 'global_avg'], \
'Invalid pooling type: {}'.format(ptype)
with tf.variable_scope(layer_name) as scope:
if ptype == 'max':
out = tf.nn.max_pool(inputs,
ksize=[1, size, size, 1],
strides=[1, stride, stride, 1],
padding=padding)
elif ptype == 'avg':
out = tf.nn.avg_pool(inputs,
ksize=[1, size, size, 1],
strides=[1, stride, stride, 1],
padding=padding)
elif ptype == 'global_avg':
i_shape = inputs.get_shape().as_list()
assert i_shape[1] == i_shape[2], \
'width must equal to height for global avg'
out = tf.nn.avg_pool(inputs,
ksize=[1, i_shape[1], i_shape[1], 1],
strides=[1, i_shape[1], i_shape[1], 1],
padding=padding)
activation_size = np.prod(out.get_shape().as_list()[1:])
self.activation_counter.append((layer_name, activation_size))
return out
def _fc_layer(
self, layer_name, inputs, hiddens, flatten=False, relu=True,
xavier=False, stddev=0.001):
"""Fully connected layer operation constructor.
Args:
layer_name: layer name.
inputs: input tensor
hiddens: number of (hidden) neurons in this layer.
flatten: if true, reshape the input 4D tensor of shape
(batch, height, weight, channel) into a 2D tensor with shape
(batch, -1). This is used when the input to the fully connected layer
is output of a convolutional layer.
relu: whether to use relu or not.
xavier: whether to use xavier weight initializer or not.
stddev: standard deviation used for random weight initializer.
Returns:
A fully connected layer operation.
"""
mc = self.mc
use_pretrained_param = False
if mc.LOAD_PRETRAINED_MODEL:
cw = self.caffemodel_weight
if layer_name in cw:
use_pretrained_param = True
kernel_val = cw[layer_name][0]
bias_val = cw[layer_name][1]
if mc.DEBUG_MODE:
print('Input tensor shape to {}: {}'.format(layer_name, inputs.get_shape()))
with tf.variable_scope(layer_name) as scope:
input_shape = inputs.get_shape().as_list()
if flatten:
dim = input_shape[1]*input_shape[2]*input_shape[3]
inputs = tf.reshape(inputs, [-1, dim]) # N x dim
if use_pretrained_param:
try:
assert kernel_val.shape == (dim, hiddens), \
'kernel shape error at {}'.format(layer_name)
except:
# Do not use pretrained parameter if shape doesn't match
use_pretrained_param = False
print ('Shape of the pretrained parameter of {} does not match, '
'use randomly initialized parameter'.format(layer_name))
else:
dim = input_shape[1]
if use_pretrained_param:
try:
assert kernel_val.shape == (dim, hiddens), \
'kernel shape error at {}'.format(layer_name)
except:
use_pretrained_param = False
print ('Shape of the pretrained parameter of {} does not match, '
'use randomly initialized parameter'.format(layer_name))
if use_pretrained_param:
if mc.DEBUG_MODE:
print ('Using pretrained model for {}'.format(layer_name))
kernel_init = tf.constant(kernel_val, dtype=tf.float32)
bias_init = tf.constant(bias_val, dtype=tf.float32)
elif xavier:
kernel_init = tf.contrib.layers.xavier_initializer()
bias_init = tf.constant_initializer(0.0)
else:
kernel_init = tf.truncated_normal_initializer(
stddev=stddev, dtype=tf.float32)
bias_init = tf.constant_initializer(0.0)
weights = _variable_with_weight_decay(
'weights', shape=[dim, hiddens], wd=mc.WEIGHT_DECAY,
initializer=kernel_init)
biases = _variable_on_device('biases', [hiddens], bias_init)
self.model_params += [weights, biases]
outputs = tf.nn.bias_add(tf.matmul(inputs, weights), biases)
if relu:
outputs = tf.nn.relu(outputs, 'relu')
# count layer stats
self.model_size_counter.append((layer_name, (dim+1)*hiddens))
num_flops = 2 * dim * hiddens + hiddens
if relu:
num_flops += 2*hiddens
self.flop_counter.append((layer_name, num_flops))
self.activation_counter.append((layer_name, hiddens))
return outputs
def _concat_layer(self, layer_name, inputs1, inputs2):
"""Concatenation layer
Args:
layer_name: layer name.
inputs1: tensor1 with shape (N, W, B, C1)
inputs2: tensor2 with shape (N, W, B, C2)
Returns:
tensor with shape (N, W, B, C1+C2)
"""
with tf.variable_scope(layer_name) as scope:
shape1 = inputs1.get_shape().as_list()
shape2 = inputs2.get_shape().as_list()
assert shape1[:-1] == shape2[:-1], \
'Cannot concat unmatch tensor shapes ({}, {}, {}, {}), ({}, {}, {}, {})'.format(*(shape1 + shape2))
return tf.concat([inputs1, inputs2], 3, name='concat')
def _reorg_layer(self, layer_name, inputs, stride):
"""Reorganization layer
Args:
layer_name: layer name.
inputs: input tensor with shape (N, W, H, C)
stride: stride
Returns:
tensor with shape (N, W/stride, H/stride, C*stride*stride)
"""
with tf.variable_scope(layer_name) as scope:
n, w, h, c = inputs.get_shape().as_list()
assert (w % stride == 0) and (h % stride == 0), \
'({}, {}) are not divisible by stride {}'.format(w, h, stride)
new_w = int(w / stride)
new_h = int(h / stride)
#new_c = int(c*stride*stride)
#return tf.reshape(inputs, [n, new_w, new_h, new_c], name='reorg')
return tf.map_fn(lambda x: self._reorg(x, new_w, new_h, stride), inputs, name='reorg')
def _reorg(self, f_map, w, h, stride):
f_maps = []
for i in xrange(w):
rows = []
for j in xrange(h):
start = (i*stride, j*stride)
end = ((i+1)*stride, (j+1)*stride)
vec = tf.strided_slice(f_map, start, end, (1,1))
vec = tf.reshape(vec, [-1])
rows.append(vec)
f_maps.append(tf.stack(rows))
return tf.stack(f_maps)
def _lrelu(self, inputs, scope, alpha=0.1):
return tf.maximum(alpha * inputs, inputs, name='lrelu')
def _batch_norm(self, inputs, param_init, scope):
return tf.cond(self.is_training, \
lambda: tf.contrib.layers.batch_norm(inputs, is_training=True, \
scale=True, epsilon=1e-5, \
center=False, param_initializers=param_init, updates_collections=None, \
scope=scope+"_bn"), \
lambda: tf.contrib.layers.batch_norm(inputs, is_training=False, \
scale=True, epsilon=1e-5, \
center=False, updates_collections=None, param_initializers=param_init, \
scope=scope+"_bn", reuse=True))
def filter_yolo_predict(self, boxes, probs, cls_idx):
"""Filter yolo prediction with Thres and NMS.
Args:
boxes: one batch boxes of shape (H*W*B, 4)
probs: one batch probs of shape (H*W*B, C)
cls_idx: one batch probs of shape (H*W*B,)
Returns:
final_boxes: filtered bbox of shape (K, 4)
final_probs: filtered probs of shape (K,)
final_class: filtered score of shape (K,)
# where K is the remaining box number
"""
mc = self.mc
# Set prob of boxes below threshold to 0
probs *= probs > mc.PROB_THRESH
# NMS
final_boxes = []
final_probs = []
final_class = []
for c in xrange(mc.CLASSES):
sort_idx = probs[(cls_idx == c), c].argsort()[::-1]
sort_probs = probs[sort_idx, c] # (H*W*B,)
sort_boxes = boxes[sort_idx] # (H*W*B, 4)
for i in xrange(len(sort_boxes)):
boxi = sort_boxes[i]
if sort_probs[i] == 0: continue
for j in xrange(i+1, len(sort_boxes)):
boxj = sort_boxes[j]
if util.iou(boxi, boxj) > mc.NMS_THRESH:
sort_probs[j] = 0.
keep_idx = np.where(sort_probs)[0]
final_boxes.append(sort_boxes[keep_idx])
final_probs.append(sort_probs[keep_idx])
final_class.append(np.full(keep_idx.shape, c))
return (np.stack(final_boxes, axis=-1),
np.stack(final_probs, axis=-1),
np.stack(final_class, axis=-1))
def filter_prediction(self, boxes, probs, cls_idx):
"""Filter bounding box predictions with probability threshold and
non-maximum supression.
Args:
boxes: array of [cx, cy, w, h].
probs: array of probabilities
cls_idx: array of class indices
Returns:
final_boxes: array of filtered bounding boxes.
final_probs: array of filtered probabilities
final_cls_idx: array of filtered class indices
"""
mc = self.mc
if mc.TOP_N_DETECTION < len(probs) and mc.TOP_N_DETECTION > 0:
order = probs.argsort()[:-mc.TOP_N_DETECTION-1:-1]
probs = probs[order]
boxes = boxes[order]
cls_idx = cls_idx[order]
else:
filtered_idx = np.nonzero(probs>mc.PROB_THRESH)[0]
probs = probs[filtered_idx]
boxes = boxes[filtered_idx]
cls_idx = cls_idx[filtered_idx]
final_boxes = []
final_probs = []
final_cls_idx = []
for c in range(mc.CLASSES):
idx_per_class = [i for i in range(len(probs)) if cls_idx[i] == c]
keep = util.nms(boxes[idx_per_class], probs[idx_per_class], mc.NMS_THRESH)
for i in range(len(keep)):
if keep[i]:
final_boxes.append(boxes[idx_per_class[i]])
final_probs.append(probs[idx_per_class[i]])
final_cls_idx.append(c)
return final_boxes, final_probs, final_cls_idx
def _activation_summary(self, x, layer_name):
"""Helper to create summaries for activations.
Args:
x: layer output tensor
layer_name: name of the layer
Returns:
nothing
"""
with tf.variable_scope('activation_summary') as scope:
tf.summary.histogram(
'activation_summary/'+layer_name, x)
tf.summary.scalar(
'activation_summary/'+layer_name+'/sparsity', tf.nn.zero_fraction(x))
tf.summary.scalar(
'activation_summary/'+layer_name+'/average', tf.reduce_mean(x))
tf.summary.scalar(
'activation_summary/'+layer_name+'/max', tf.reduce_max(x))
tf.summary.scalar(
'activation_summary/'+layer_name+'/min', tf.reduce_min(x))
|
require_relative 'test_helper'
SAMPLE_CONFIG = %(
common_fields:
fields:
- name: name
width: 20
starts_at: 1
validate:
- not_blank
test_format:
skip_top_lines: 1
skip_bottom_lines: 1
inherit_from: common_fields
new_line_style: true
fields:
- name: phone
width: 12
starts_at: 21
format: '%05d'
validate:
- start_with? '60'
- unique
- width: 50
validate:
- "^ include? r[:name]"
- width: 3
validate: 'XYZ'
- width: 2
validate:
- width 2
- ['AA', 'BB', 'CC']
).freeze
class FileFormatConfigurationTest < Minitest::Test
def test_can_parse_sample_file
format = FixedWidthFileValidator::FileFormat.for(:test_format, StringIO.new(SAMPLE_CONFIG))
total_fields = 5
assert_equal total_fields, format.fields.size, 'inherit_from did not work'
assert format.field_validations(:phone).include? 'unique'
assert_instance_of Array, format.field_validations(:field_86)
assert_instance_of Array, format.field_validations(:field_83)
assert_equal 'XYZ', format.field_validations(:field_83).first
assert_nil format.field_validations(:non_existent)
assert_equal '%05d', format.record_formatter.instance_variable_get('@field_list').at(1)[:format]
end
end
|
<?php
namespace NilPortugues\Serializer;
use Closure;
use NilPortugues\Serializer\Serializer\InternalClasses\SplFixedArraySerializer;
use NilPortugues\Serializer\Strategy\StrategyInterface;
use ReflectionClass;
use ReflectionException;
use SplObjectStorage;
class Serializer
{
const CLASS_IDENTIFIER_KEY = '@type';
const CLASS_PARENT_KEY = '@parent';
const SCALAR_TYPE = '@scalar';
const SCALAR_VALUE = '@value';
const NULL_VAR = null;
const MAP_TYPE = '@map';
/**
* Storage for object.
*
* Used for recursion
*
* @var SplObjectStorage
*/
protected static $objectStorage;
/**
* Object mapping for recursion.
*
* @var array
*/
protected static $objectMapping = [];
/**
* Object mapping index.
*
* @var int
*/
protected static $objectMappingIndex = 0;
/**
* @var \NilPortugues\Serializer\Strategy\StrategyInterface|\NilPortugues\Serializer\Strategy\JsonStrategy
*/
protected $serializationStrategy;
/**
* @var array
*/
private $dateTimeClassType = ['DateTime', 'DateTimeImmutable', 'DateTimeZone', 'DateInterval', 'DatePeriod'];
/**
* @var array
*/
protected $serializationMap = [
'array' => 'serializeArray',
'integer' => 'serializeScalar',
'double' => 'serializeScalar',
'boolean' => 'serializeScalar',
'string' => 'serializeScalar',
];
/**
* Hack specific serialization classes.
*
* @var array
*/
protected $unserializationMapHHVM = [];
/**
* @param StrategyInterface $strategy
*/
public function __construct(StrategyInterface $strategy)
{
$this->serializationStrategy = $strategy;
}
/**
* This is handly specially in order to add additional data before the
* serialization process takes place using the transformer public methods, if any.
*
* @return StrategyInterface
*/
public function getTransformer()
{
return $this->serializationStrategy;
}
/**
* Serialize the value in JSON.
*
* @param mixed $value
*
* @return string JSON encoded
*
* @throws SerializerException
*/
public function serialize($value)
{
$this->reset();
return $this->serializationStrategy->serialize($this->serializeData($value));
}
/**
* Reset variables.
*/
protected function reset()
{
self::$objectStorage = new SplObjectStorage();
self::$objectMapping = [];
self::$objectMappingIndex = 0;
}
/**
* Parse the data to be json encoded.
*
* @param mixed $value
*
* @return mixed
*
* @throws SerializerException
*/
protected function serializeData($value)
{
$this->guardForUnsupportedValues($value);
if ($this->isInstanceOf($value, 'SplFixedArray')) {
return SplFixedArraySerializer::serialize($this, $value);
}
if (\is_object($value)) {
return $this->serializeObject($value);
}
$type = (\gettype($value) && $value !== null) ? \gettype($value) : 'string';
$func = $this->serializationMap[$type];
return $this->$func($value);
}
/**
* Check if a class is instance or extends from the expected instance.
*
* @param mixed $value
* @param string $classFQN
*
* @return bool
*/
private function isInstanceOf($value, $classFQN)
{
return is_object($value)
&& (strtolower(get_class($value)) === strtolower($classFQN) || \is_subclass_of($value, $classFQN, true));
}
/**
* @param mixed $value
*
* @throws SerializerException
*/
protected function guardForUnsupportedValues($value)
{
if ($value instanceof Closure) {
throw new SerializerException('Closures are not supported in Serializer');
}
if ($value instanceof \DatePeriod) {
throw new SerializerException(
'DatePeriod is not supported in Serializer. Loop through it and serialize the output.'
);
}
if (\is_resource($value)) {
throw new SerializerException('Resource is not supported in Serializer');
}
}
/**
* Unserialize the value from string.
*
* @param mixed $value
*
* @return mixed
*/
public function unserialize($value)
{
if (\is_array($value) && isset($value[self::SCALAR_TYPE])) {
return $this->unserializeData($value);
}
$this->reset();
return $this->unserializeData($this->serializationStrategy->unserialize($value));
}
/**
* Parse the json decode to convert to objects again.
*
* @param mixed $value
*
* @return mixed
*/
protected function unserializeData($value)
{
if ($value === null || !is_array($value)) {
return $value;
}
if (isset($value[self::MAP_TYPE]) && !isset($value[self::CLASS_IDENTIFIER_KEY])) {
$value = $value[self::SCALAR_VALUE];
return $this->unserializeData($value);
}
if (isset($value[self::SCALAR_TYPE])) {
return $this->getScalarValue($value);
}
if (isset($value[self::CLASS_PARENT_KEY]) && 0 === strcmp($value[self::CLASS_PARENT_KEY], 'SplFixedArray')) {
return SplFixedArraySerializer::unserialize($this, $value[self::CLASS_IDENTIFIER_KEY], $value);
}
if (isset($value[self::CLASS_IDENTIFIER_KEY])) {
return $this->unserializeObject($value);
}
return \array_map([$this, __FUNCTION__], $value);
}
/**
* @param $value
*
* @return float|int|null|bool
*/
protected function getScalarValue($value)
{
switch ($value[self::SCALAR_TYPE]) {
case 'integer':
return \intval($value[self::SCALAR_VALUE]);
case 'float':
return \floatval($value[self::SCALAR_VALUE]);
case 'boolean':
return $value[self::SCALAR_VALUE];
case 'NULL':
return self::NULL_VAR;
}
return $value[self::SCALAR_VALUE];
}
/**
* Convert the serialized array into an object.
*
* @param array $value
*
* @return object
*
* @throws SerializerException
*/
protected function unserializeObject(array $value)
{
$className = $value[self::CLASS_IDENTIFIER_KEY];
unset($value[self::CLASS_IDENTIFIER_KEY]);
if (isset($value[self::MAP_TYPE])) {
unset($value[self::MAP_TYPE]);
unset($value[self::SCALAR_VALUE]);
}
if ($className[0] === '@') {
return self::$objectMapping[substr($className, 1)];
}
if (!class_exists($className)) {
throw new SerializerException('Unable to find class '.$className);
}
return (null === ($obj = $this->unserializeDateTimeFamilyObject($value, $className)))
? $this->unserializeUserDefinedObject($value, $className) : $obj;
}
/**
* @param array $value
* @param string $className
*
* @return mixed
*/
protected function unserializeDateTimeFamilyObject(array $value, $className)
{
$obj = null;
if ($this->isDateTimeFamilyObject($className)) {
$obj = $this->restoreUsingUnserialize($className, $value);
self::$objectMapping[self::$objectMappingIndex++] = $obj;
}
return $obj;
}
/**
* @param string $className
*
* @return bool
*/
protected function isDateTimeFamilyObject($className)
{
$isDateTime = false;
foreach ($this->dateTimeClassType as $class) {
$isDateTime = $isDateTime || \is_subclass_of($className, $class, true) || $class === $className;
}
return $isDateTime;
}
/**
* @param string $className
* @param array $attributes
*
* @return mixed
*/
protected function restoreUsingUnserialize($className, array $attributes)
{
foreach ($attributes as &$attribute) {
$attribute = $this->unserializeData($attribute);
}
$obj = (object) $attributes;
$serialized = \preg_replace(
'|^O:\d+:"\w+":|',
'O:'.strlen($className).':"'.$className.'":',
\serialize($obj)
);
return \unserialize($serialized);
}
/**
* @param array $value
* @param string $className
*
* @return object
*/
protected function unserializeUserDefinedObject(array $value, $className)
{
$ref = new ReflectionClass($className);
$obj = $ref->newInstanceWithoutConstructor();
self::$objectMapping[self::$objectMappingIndex++] = $obj;
$this->setUnserializedObjectProperties($value, $ref, $obj);
if (\method_exists($obj, '__wakeup')) {
$obj->__wakeup();
}
return $obj;
}
/**
* @param array $value
* @param ReflectionClass $ref
* @param mixed $obj
*
* @return mixed
*/
protected function setUnserializedObjectProperties(array $value, ReflectionClass $ref, $obj)
{
foreach ($value as $property => $propertyValue) {
try {
$propRef = $ref->getProperty($property);
$propRef->setAccessible(true);
$propRef->setValue($obj, $this->unserializeData($propertyValue));
} catch (ReflectionException $e) {
$obj->$property = $this->unserializeData($propertyValue);
}
}
return $obj;
}
/**
* @param $value
*
* @return string
*/
protected function serializeScalar($value)
{
$type = \gettype($value);
if ($type === 'double') {
$type = 'float';
}
return [
self::SCALAR_TYPE => $type,
self::SCALAR_VALUE => $value,
];
}
/**
* @param array $value
*
* @return array
*/
protected function serializeArray(array $value)
{
if (\array_key_exists(self::MAP_TYPE, $value)) {
return $value;
}
$toArray = [self::MAP_TYPE => 'array', self::SCALAR_VALUE => []];
foreach ($value as $key => $field) {
$toArray[self::SCALAR_VALUE][$key] = $this->serializeData($field);
}
return $this->serializeData($toArray);
}
/**
* Extract the data from an object.
*
* @param mixed $value
*
* @return array
*/
protected function serializeObject($value)
{
if (self::$objectStorage->contains($value)) {
return [self::CLASS_IDENTIFIER_KEY => '@'.self::$objectStorage[$value]];
}
self::$objectStorage->attach($value, self::$objectMappingIndex++);
$reflection = new ReflectionClass($value);
$className = $reflection->getName();
return $this->serializeInternalClass($value, $className, $reflection);
}
/**
* @param mixed $value
* @param string $className
* @param ReflectionClass $ref
*
* @return array
*/
protected function serializeInternalClass($value, $className, ReflectionClass $ref)
{
$paramsToSerialize = $this->getObjectProperties($ref, $value);
$data = [self::CLASS_IDENTIFIER_KEY => $className];
$data += \array_map([$this, 'serializeData'], $this->extractObjectData($value, $ref, $paramsToSerialize));
return $data;
}
/**
* Return the list of properties to be serialized.
*
* @param ReflectionClass $ref
* @param $value
*
* @return array
*/
protected function getObjectProperties(ReflectionClass $ref, $value)
{
$props = [];
foreach ($ref->getProperties() as $prop) {
$props[] = $prop->getName();
}
return \array_unique(\array_merge($props, \array_keys(\get_object_vars($value))));
}
/**
* Extract the object data.
*
* @param mixed $value
* @param \ReflectionClass $rc
* @param array $properties
*
* @return array
*/
protected function extractObjectData($value, ReflectionClass $rc, array $properties)
{
$data = [];
$this->extractCurrentObjectProperties($value, $rc, $properties, $data);
$this->extractAllInhertitedProperties($value, $rc, $data);
return $data;
}
/**
* @param mixed $value
* @param ReflectionClass $rc
* @param array $properties
* @param array $data
*/
protected function extractCurrentObjectProperties($value, ReflectionClass $rc, array $properties, array &$data)
{
foreach ($properties as $propertyName) {
try {
$propRef = $rc->getProperty($propertyName);
$propRef->setAccessible(true);
$data[$propertyName] = $propRef->getValue($value);
} catch (ReflectionException $e) {
$data[$propertyName] = $value->$propertyName;
}
}
}
/**
* @param mixed $value
* @param ReflectionClass $rc
* @param array $data
*/
protected function extractAllInhertitedProperties($value, ReflectionClass $rc, array &$data)
{
do {
$rp = array();
/* @var $property \ReflectionProperty */
foreach ($rc->getProperties() as $property) {
$property->setAccessible(true);
$rp[$property->getName()] = $property->getValue($value);
}
$data = \array_merge($rp, $data);
} while ($rc = $rc->getParentClass());
}
}
|
<?php
namespace Tests;
use PHPUnit\Framework\TestCase;
use Logme\Soap\Ups\TransactionReference;
class TransactionReferenceTest extends TestCase
{
/**
* @test Sets the customer context attribute value.
*/
public function it_sets_customer_context_attribute_value()
{
$transactionReference = new TransactionReference();
$transactionReference->customerContext = 'Send my identifier number.';
$this->assertEquals('Send my identifier number.', $transactionReference->customerContext);
}
/**
* @test Tries to set customer context with string greater than 512.
* @expectedException Exception
* @expectedExceptionMessage The string length of customer context must be less or equal to 512.
*/
public function it_tries_to_set_customer_context_context_with_string_greater_than_512()
{
$transactionReference = new TransactionReference();
$str = str_repeat('a', 513);
$transactionReference->customerContext = $str;
}
/**
* @test Tries to set an unrecognized attribute.
* @expectedException Exception
* @expectedExceptionMessage Unrecognized method setBanana.
*/
public function it_tries_to_set_an_unrecognized_method()
{
$transactionReference = new TransactionReference();
$transactionReference->banana = 'banana';
}
/**
* @test Tries to get value from an unrecognized attribute.
* @expectedException Exception
* @expectedExceptionMessage Unrecognized attribute banana.
*/
public function it_tries_to_get_value_from_an_unrecognized_attribute()
{
$transactionReference = new TransactionReference();
$banana = $transactionReference->banana;
}
/**
* @test Gets value with uppercase attribute for first letter.
*/
public function it_gets_value_with_uppercase_attribute_for_first_letter()
{
$transactionReference = new TransactionReference();
$transactionReference->customerContext = 'Thing';
$thing = $transactionReference->CustomerContext;
$this->assertEquals('Thing', $thing);
}
}
|
type Pedigree{T<:Integer}
sire::Vector{T}
dam::Vector{T}
perm::Vector{T}
lappt::Vector{T}
end
function Pedigree{T<:Integer}(sire::Vector{T},dam::Vector{T})
(n = length(sire)) == length(dam) || throw(DimensionMismatch(""))
for i in 1:n
zero(T) ≤ sire[i] ≤ n && zero(T) ≤ dam[i] ≤ n ||
error("row $i: sire and dam must be in 0:$n")
sire[i] == i && error("Malformed pedigree, sire[$i] == $i")
dam[i] == i && error("Malformed pedigree, dam[$i] == $i")
sire[i] < i && dam[i] < i || error("order failed at $i, with sire and dam, $(sire[i]), $(dam[i])")
end
Pedigree(sire,dam,T[],T[])
end
## Create an ordering by longest ancestral path
function laporder{T<:Integer}(sire::Vector{T},dam::Vector{T})
(n = length(sire)) == length(dam) || throw(DimensionMismatch(""))
anc = sizehint(IntSet(),n) # current set of ancestors
for i in 1:n
0 ≤ sire[i] ≤ n && 0 ≤ dam[i] ≤ n || error("row $i: sire and dam must be in 0:$n")
sire[i] == 0 && dam[i] == 0 && push!(anc,i) # first generation
end
ord = sizehint(collect(anc),n) # anc first in ord, also reserve space
pop = setdiff!(IntSet(1:n),anc) # animals who have not yet been sorted
lappt = sizehint([0,length(anc)],20) # pointer to start of each lap level
push!(anc,0) # add zero to the set of ancestors already done
nextgen = sizehint(IntSet(),n)
while length(pop) > 0
empty!(nextgen)
for i in pop
sire[i] ∈ anc && dam[i] ∈ anc && push!(nextgen,i)
end
length(nextgen) > 0 || error("algorithm failure, empty nextgen")
append!(ord,collect(nextgen))
push!(lappt,lappt[end]+length(nextgen))
union!(anc,nextgen)
setdiff!(pop,nextgen)
end
length(ord) == n || error("Logic error in laporder: ord is not length n = $n")
invp = invperm(ord) # checks that ord is indeed a permutation
ss = Array(T,(n,))
dd = Array(T,(n,))
for i in 1:n
j = ord[i]
ss[i] = sire[j] > 0 ? invp[sire[j]] : zero(T)
dd[i] = dam[j] > 0 ? invp[dam[j]] : zero(T)
end
ord,lappt,ss,dd
end
|
package com.github.donkirkby.vograbulary.client;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.github.donkirkby.vograbulary.WordDisplay;
import com.github.donkirkby.vograbulary.WordDisplay.WordDisplayListener;
import com.github.donkirkby.vograbulary.bacronyms.BacronymsScreen;
import com.github.donkirkby.vograbulary.bacronyms.Controller;
import com.github.donkirkby.vograbulary.bacronyms.Puzzle;
import com.github.donkirkby.vograbulary.ultraghost.WordList;
import com.google.gwt.animation.client.Animation;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.ParagraphElement;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.logical.shared.ResizeEvent;
import com.google.gwt.event.logical.shared.ResizeHandler;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.AbsolutePanel;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.Widget;
public class BacronymsPresenter
extends VograbularyPresenter implements BacronymsScreen {
public static final String HISTORY_TOKEN = "bacronyms";
private class WordAnimation extends Animation {
private WordDisplay word;
private double startRotation;
private double targetRotation;
public WordAnimation(WordDisplay word) {
this.word = word;
}
public WordDisplay getWord() {
return word;
}
@Override
protected void onComplete() {
controller.solve();
}
@Override
protected void onUpdate(double progress) {
word.setRotation(
startRotation + (targetRotation - startRotation) * progress);
}
public void rotateWord(double targetRotation) {
startRotation = word.getRotation();
this.targetRotation = targetRotation;
word.setTop(word.getTop());
run(1000);
}
}
private Controller controller;
private Puzzle puzzle;
private State state;
private List<WordDisplay> wordDisplays;
private List<WordAnimation> animations;
private GwtLetterDisplayFactory displayFactory;
private GwtScheduler scheduler = new GwtScheduler();
private WordDisplayListener wordListener = new WordDisplayListener() {
@Override
public void onClick(WordDisplay wordDisplay) {
for (int i = 0; i < animations.size(); i++) {
WordAnimation wordAnimation = animations.get(i);
if (wordAnimation.getWord() == wordDisplay &&
Math.abs(wordDisplay.getRotation()) < 0.0001) {
wordAnimation.rotateWord(Math.PI);
puzzle.setSelectedIndex(i);
}
else {
wordAnimation.rotateWord(0);
}
}
}
};
interface BacronymsCompositeUiBinder extends
UiBinder<Widget, BacronymsPresenter> {
}
@UiField
AbsolutePanel wordPanel;
@UiField
ParagraphElement stateText;
@UiField
Button nextButton;
private static BacronymsCompositeUiBinder uiBinder =
GWT.create(BacronymsCompositeUiBinder.class);
public BacronymsPresenter() {
initWidget(uiBinder.createAndBindUi(this));
displayFactory = new GwtLetterDisplayFactory(wordPanel);
wordDisplays = new ArrayList<>();
animations = new ArrayList<>();
for (int i = 0; i < 3; i++) {
WordDisplay wordDisplay = new WordDisplay(displayFactory);
wordDisplay.addListener(wordListener);
wordDisplays.add(wordDisplay);
animations.add(new WordAnimation(wordDisplay));
}
Window.addResizeHandler(new ResizeHandler() {
@Override
public void onResize(ResizeEvent event) {
layout(3);
}
});
String puzzleLines = Assets.INSTANCE.bacronyms().getText();
String wordListText = Assets.INSTANCE.wordList().getText();
WordList wordList = new WordList();
wordList.read(Arrays.asList(wordListText.split("\\n")));
controller = new Controller();
controller.setScreen(this);
controller.setWordList(wordList);
controller.loadPuzzles(Arrays.asList(puzzleLines.split("\\n")));
controller.next();
}
private void layout(int retries) {
int left = 0;
for (WordDisplay wordDisplay : wordDisplays) {
wordDisplay.setLeft(left);
wordDisplay.setTop(wordDisplay.getTop());
left += wordDisplay.getWidth() + 20;
}
int layoutWidth = wordPanel.getOffsetWidth();
if (retries > 0 && (left < layoutWidth * .75 || layoutWidth < left)) {
float textSize = 0;
boolean isSizeCalculated = false;
for (WordDisplay wordDisplay : wordDisplays) {
if ( ! isSizeCalculated) {
float oldTextSize = wordDisplay.getTextSize();
float correction = layoutWidth * 0.9f / left;
textSize = oldTextSize * correction;
wordPanel.setHeight(textSize * 3 + "px");
isSizeCalculated = true;
}
wordDisplay.setTextSize(textSize);
wordDisplay.setTop((int)textSize);
}
layout(retries-1);
}
}
@Override
public Puzzle getPuzzle() {
return puzzle;
}
@Override
public void setPuzzle(Puzzle puzzle) {
this.puzzle = puzzle;
displayWords();
}
@Override
public State getState() {
return state;
}
@Override
public void setState(State state) {
this.state = state;
stateText.setInnerText(state.name());
nextButton.setVisible(state == State.SOLVED);
}
private void displayWords() {
for (int i = 0; i < 3; i++) {
wordDisplays.get(i).setWord(puzzle.getWord(i));
}
scheduler.scheduleDeferred(new Runnable() {
@Override
public void run() {
layout(3);
}
});
}
@UiHandler("nextButton")
void next(ClickEvent e) {
controller.next();
}
}
|
import { ComponentFixture, TestBed, async, fakeAsync, tick, flushMicrotasks } from '@angular/core/testing';
import { NO_ERRORS_SCHEMA, ApplicationInitStatus, Component } from '@angular/core';
import { APP_BASE_HREF } from '@angular/common';
import { Router } from '@angular/router';
import { RouterTestingModule } from '@angular/router/testing';
import { Title } from '@angular/platform-browser';
import { AppComponent } from './app.component';
import { ScriptInjectorService } from 'features/utils';
// create dummy search component
@Component({
template: `Test`
})
export class TestNavigationComponent {
}
describe('AppComponent', () => {
let fixture: ComponentFixture<AppComponent>;
let component: AppComponent;
let router: Router;
let scriptInjectorServiceSpy: ScriptInjectorService;
beforeEach(
async(() => {
const titleStub = { getTitle: () => ({}) };
scriptInjectorServiceSpy = jasmine.createSpyObj('ScriptInjectorService', {
load: new Promise((resolve) => { resolve(); })
});
TestBed.configureTestingModule({
schemas: [NO_ERRORS_SCHEMA],
declarations: [AppComponent, TestNavigationComponent],
imports: [
RouterTestingModule.withRoutes([
{ path: 'test', component: TestNavigationComponent }
])
],
providers: [
{ provide: APP_BASE_HREF, useValue: '/' },
{ provide: Title, useValue: titleStub },
{ provide: ScriptInjectorService, useValue: scriptInjectorServiceSpy }
]
}).compileComponents();
})
);
// beforeEach(async () => {
// // until https://github.com/angular/angular/issues/24218 is fixed
// await TestBed.get(ApplicationInitStatus).donePromise;
// });
beforeEach(() => {
fixture = TestBed.createComponent(AppComponent);
component = fixture.debugElement.componentInstance;
router = TestBed.get(Router);
fixture.detectChanges();
});
it('should create a component', () => {
expect(component).toBeTruthy();
});
it('should have current year', () => {
expect(component.curYear).toBe(new Date().getFullYear());
});
it('should load gtag script', fakeAsync(() => {
expect(scriptInjectorServiceSpy.load).toHaveBeenCalledWith('gtag');
}));
describe('ngOnInit', () => {
beforeEach((done) => {
// inject gtag script
const script = document.createElement('script');
script.type = 'text/javascript';
script.async = true;
script.src = '../assets/js/gtag.js';
document.getElementsByTagName('head')[0].appendChild(script);
setTimeout(() => {
done();
}, 1000);
});
it('should call gtag on page navigation', async() => {
const titleStub: Title = fixture.debugElement.injector.get(Title);
spyOn(titleStub, 'getTitle');
component.ngOnInit();
await fixture.ngZone.run(async () => {
await router.navigate(['/test']);
});
expect(titleStub.getTitle).toHaveBeenCalled();
});
});
});
|
package models
type Machine struct {
ID int `json:"id"`
ItemProxy NamedAPIResource `json:"item"`
MoveProxy NamedAPIResource `json:"move"`
VersionGroupProxy NamedAPIResource `json:"version_group"`
Item *Item
Move *Move
VersionGroup *VersionGroup
}
|
const validator = require('validator');
const isEmpty = require('./is-empty');
const isString = require('./is-string');
module.exports = ({ firstName, lastName, phone }) => {
const errors = {};
// Check if first name is a string
if (!isEmpty(firstName) && !isString(firstName)) {
errors.firstName = 'First name must be a string';
}
// Check if first name length is valid
if (!isEmpty(firstName) && isString(firstName) && !validator.isLength(firstName.trim(), { min: 2, max: 15 })) {
errors.firstName = 'First name must be between 2 and 15 characters';
}
// Check if last name is a string
if (!isEmpty(lastName) && !isString(lastName)) {
errors.lastName = 'Last name must be a string';
}
// Check if last name length is valid
if (!isEmpty(lastName) && isString(lastName) && !validator.isLength(lastName.trim(), { min: 2, max: 20 })) {
errors.lastName = 'Last name must be between 2 and 20 characters';
}
// Check if phone is a string
if (!isEmpty(phone) && !isString(phone)) {
errors.phone = 'Phone must be a string';
}
// Check if phone is valid
if (!isEmpty(phone) && isString(phone) && !validator.isMobilePhone(phone, 'pl-PL')) {
errors.phone = 'Phone is invalid';
}
return {
errors,
isValid: isEmpty(errors),
};
};
|
import { test } from 'ember-qunit';
import { moduleFor } from 'dummy/tests/helpers/test-module-for-engine';
moduleFor('service:transfers', 'Unit | Service | transfers', {});
test('it can get and set transfer state', function(assert) {
const service = this.subject();
service.setTransferState({ tickets: [], email: 'test', message: 'Hello!' });
assert.deepEqual(service.getTransferState(), {
tickets: [],
email: 'test',
message: 'Hello!'
});
});
|
def post_tax_income()
puts "How much is earned per year?"
yearly = gets.chomp.to_i
puts "How much is rent per month?"
rent = gets.chomp.to_i
puts "How many miscellaneous bills are there per month?"
bills = gets.chomp.to_i
# This is assuming single taxable income for 2017.
case
when yearly < 9325
first_bracket = yearly - (0.10 * yearly)
post_tax = first_bracket
when yearly > 9326 && yearly < 37950
second_bracket = yearly - (932 + 0.15 * (yearly - 9325))
post_tax = second_bracket
when yearly > 37951 && yearly < 91900
third_bracket = yearly - (5226 + 0.24 * (yearly - 37950))
post_tax = third_bracket
when yearly > 91901 && yearly < 191650
fourth_bracket = yearly - (18713 + 0.28 * (yearly - 91900))
post_tax = fourth_bracket
when yearly > 191651 && yearly < 416700
fifth_bracket = yearly - (46643 + 0.33 * (yearly - 191650))
post_tax = fifth_bracket
when yearly > 416701 && yearly < 418400
sixth_bracket = yearly - (120910 + 0.35 * (yearly - 416700))
post_tax = sixth_bracket
when yearly > 418400
seventh_bracket = yearly - (121505 + 0.396 * (yearly - 418400))
post_tax = seventh_bracket
else
puts 'Invalid yearly income entered. Please enter a valid integer.'
end
paycheck_pre = (yearly / 26).round(2)
paycheck_post = (post_tax / 26).round(2)
puts "\n$#{paycheck_pre} per paycheck pre-taxes."
puts "$#{paycheck_post} per paycheck post-taxes."
monthly = (post_tax / 12).round(2)
max_rent = (monthly / 3).round(2)
puts "\n$#{monthly} per month after taxes."
puts "$#{max_rent} is the maximum rent cost."
monthly_post_rent = (monthly - rent).round(2)
puts "$#{monthly_post_rent} per month after rent."
monthly_post_bills = (monthly - rent - bills).round(2)
puts "$#{monthly_post_bills} per month after all expenses."
# Assuming 30 days in a month.
daily_post = (monthly_post_bills / 30).round(2)
puts "\n$#{daily_post} can be spent on whatever you want per day."
end
post_tax_income()
|
import {AttributePath} from "./AttributePath";
import {AttributeValue} from "./AttributeValue";
import {ExpressionAttributes} from "./ExpressionAttributes";
import {FunctionExpression} from "./FunctionExpression";
export type ComparisonOperand = AttributePath|AttributeValue|FunctionExpression|any;
export interface BinaryComparisonPredicate {
/**
* The value against which the comparison subject will be compared.
*/
object: ComparisonOperand;
}
/**
* A comparison predicate asserting that the subject and object are equal.
*/
export interface EqualityExpressionPredicate extends BinaryComparisonPredicate {
type: 'Equals';
}
/**
* Create an expression predicate asserting that the subject is equal to the
* predicate.
*/
export function equals(
operand: ComparisonOperand
): EqualityExpressionPredicate {
return {
type: 'Equals',
object: operand,
};
}
/**
* A comparison predicate asserting that the subject and object are not equal.
*/
export interface InequalityExpressionPredicate extends BinaryComparisonPredicate {
type: 'NotEquals';
}
export function notEquals(
operand: ComparisonOperand
): InequalityExpressionPredicate {
return {
type: 'NotEquals',
object: operand,
}
}
/**
* A comparison predicate asserting that the subject is less than the object.
*/
export interface LessThanExpressionPredicate extends BinaryComparisonPredicate {
type: 'LessThan';
}
export function lessThan(
operand: ComparisonOperand
): LessThanExpressionPredicate {
return {
type: 'LessThan',
object: operand,
}
}
/**
* A comparison predicate asserting that the subject is less than or equal to
* the object.
*/
export interface LessThanOrEqualToExpressionPredicate extends BinaryComparisonPredicate {
type: 'LessThanOrEqualTo';
}
export function lessThanOrEqualTo(
operand: ComparisonOperand
): LessThanOrEqualToExpressionPredicate {
return {
type: 'LessThanOrEqualTo',
object: operand,
}
}
/**
* A comparison predicate asserting that the subject is greater than the object.
*/
export interface GreaterThanExpressionPredicate extends BinaryComparisonPredicate {
type: 'GreaterThan';
}
export function greaterThan(
operand: ComparisonOperand
): GreaterThanExpressionPredicate {
return {
type: 'GreaterThan',
object: operand,
}
}
/**
* A comparison predicate asserting that the subject is greater than or equal
* to the object.
*/
export interface GreaterThanOrEqualToExpressionPredicate extends BinaryComparisonPredicate {
type: 'GreaterThanOrEqualTo';
}
export function greaterThanOrEqualTo(
operand: ComparisonOperand
): GreaterThanOrEqualToExpressionPredicate {
return {
type: 'GreaterThanOrEqualTo',
object: operand,
}
}
/**
* A comparison predicate asserting that the subject is between two bounds.
*/
export interface BetweenExpressionPredicate {
type: 'Between';
lowerBound: ComparisonOperand;
upperBound: ComparisonOperand;
}
export function between(
lowerBound: ComparisonOperand,
upperBound: ComparisonOperand
): BetweenExpressionPredicate {
return {
type: 'Between',
lowerBound,
upperBound,
}
}
/**
* A comparison predicate asserting that the subject is equal to any member of
* the provided list of values.
*/
export interface MembershipExpressionPredicate {
type: 'Membership';
values: Array<ComparisonOperand>;
}
export function inList(
...operands: Array<ComparisonOperand>
): MembershipExpressionPredicate {
return {
type: 'Membership',
values: operands,
}
}
/**
* An object structure used as the base of all function expression predicates.
*/
export interface BaseFunctionExpressionPredicate {
type: 'Function';
name: string;
}
/**
* A comparison predicate asserting that the subject is contained in a given
* record.
*/
export interface AttributeExistsPredicate extends
BaseFunctionExpressionPredicate
{
name: 'attribute_exists';
}
export function attributeExists(): AttributeExistsPredicate {
return {
type: 'Function',
name: 'attribute_exists',
};
}
/**
* A comparison predicate asserting that the subject is **not** contained in a
* given record.
*/
export interface AttributeNotExistsPredicate extends
BaseFunctionExpressionPredicate
{
name: 'attribute_not_exists';
}
export function attributeNotExists(): AttributeNotExistsPredicate {
return {
type: 'Function',
name: 'attribute_not_exists',
};
}
export type AttributeType = 'S'|'SS'|'N'|'NS'|'B'|'BS'|'BOOL'|'NULL'|'L'|'M';
/**
* A comparison predicate asserting that the subject is of the specified type.
*/
export interface AttributeTypePredicate extends
BaseFunctionExpressionPredicate
{
name: 'attribute_type';
expected: AttributeType;
}
export function attributeType(expected: AttributeType): AttributeTypePredicate {
return {
type: 'Function',
name: 'attribute_type',
expected,
};
}
/**
* A comparison predicate asserting that the value of the subject in a given
* record begins with the specified string.
*/
export interface BeginsWithPredicate extends
BaseFunctionExpressionPredicate
{
name: 'begins_with';
expected: string;
}
export function beginsWith(expected: string): BeginsWithPredicate {
return {
type: 'Function',
name: 'begins_with',
expected,
};
}
/**
* A comparison predicate asserting that the value of the subject in a given
* record contains the specified string.
*/
export interface ContainsPredicate extends
BaseFunctionExpressionPredicate
{
name: 'contains';
expected: string;
}
export function contains(expected: string): ContainsPredicate {
return {
type: 'Function',
name: 'contains',
expected,
};
}
export type FunctionExpressionPredicate =
AttributeExistsPredicate |
AttributeNotExistsPredicate |
AttributeTypePredicate |
BeginsWithPredicate |
ContainsPredicate;
export type ConditionExpressionPredicate =
EqualityExpressionPredicate |
InequalityExpressionPredicate |
LessThanExpressionPredicate |
LessThanOrEqualToExpressionPredicate |
GreaterThanExpressionPredicate |
GreaterThanExpressionPredicate |
GreaterThanOrEqualToExpressionPredicate |
BetweenExpressionPredicate |
MembershipExpressionPredicate |
FunctionExpressionPredicate;
/**
* Evaluate whether the provided value is a condition expression predicate.
*/
export function isConditionExpressionPredicate(
arg: any
): arg is ConditionExpressionPredicate {
if (arg && typeof arg === 'object') {
switch (arg.type) {
case 'Equals':
case 'NotEquals':
case 'LessThan':
case 'LessThanOrEqualTo':
case 'GreaterThan':
case 'GreaterThanOrEqualTo':
return arg.object !== undefined;
case 'Between':
return arg.lowerBound !== undefined
&& arg.upperBound !== undefined;
case 'Membership':
return Array.isArray(arg.values);
case 'Function':
switch (arg.name) {
case 'attribute_exists':
case 'attribute_not_exists':
return true;
case 'attribute_type':
case 'begins_with':
case 'contains':
return typeof arg.expected === 'string';
}
}
}
return false;
}
export interface ConditionExpressionSubject {
/**
* The path to the item attribute containing the subject of the comparison.
*/
subject: AttributePath|string;
}
export function isConditionExpressionSubject(
arg: any
): arg is ConditionExpressionSubject {
return Boolean(arg)
&& typeof arg === 'object'
&& (typeof arg.subject === 'string' || AttributePath.isAttributePath(arg.subject));
}
export type SimpleConditionExpression = ConditionExpressionSubject &
ConditionExpressionPredicate;
export type ConditionExpression =
SimpleConditionExpression |
AndExpression |
OrExpression |
NotExpression |
FunctionExpression;
/**
* A comparison expression asserting that all conditions in the provided list
* are true.
*/
export interface AndExpression {
type: 'And';
conditions: Array<ConditionExpression>;
}
/**
* A comparison expression asserting that one or more conditions in the provided
* list are true.
*/
export interface OrExpression {
type: 'Or';
conditions: Array<ConditionExpression>;
}
/**
* A comparison expression asserting that the provided condition is not true.
*/
export interface NotExpression {
type: 'Not';
condition: ConditionExpression;
}
/**
* Evaluates whether the provided value is a condition expression.
*/
export function isConditionExpression(arg: any): arg is ConditionExpression {
if (FunctionExpression.isFunctionExpression(arg)) {
return true;
}
if (Boolean(arg) && typeof arg === 'object') {
switch (arg.type) {
case 'Not':
return isConditionExpression(arg.condition);
case 'And':
case 'Or':
if (Array.isArray(arg.conditions)) {
for (const condition of arg.conditions) {
if (!isConditionExpression(condition)) {
return false;
}
}
return true;
}
return false;
default:
return isConditionExpressionSubject(arg)
&& isConditionExpressionPredicate(arg);
}
}
return false;
}
/**
* Convert the provided condition expression object to a string, escaping any
* values and attributes to expression-safe placeholders whose expansion value
* will be managed by the provided ExpressionAttributes object.
*/
export function serializeConditionExpression(
condition: ConditionExpression,
attributes: ExpressionAttributes
): string {
if (FunctionExpression.isFunctionExpression(condition)) {
return condition.serialize(attributes);
}
switch (condition.type) {
case 'Equals':
return serializeBinaryComparison(condition, attributes, '=');
case 'NotEquals':
return serializeBinaryComparison(condition, attributes, '<>');
case 'LessThan':
return serializeBinaryComparison(condition, attributes, '<');
case 'LessThanOrEqualTo':
return serializeBinaryComparison(condition, attributes, '<=');
case 'GreaterThan':
return serializeBinaryComparison(condition, attributes, '>');
case 'GreaterThanOrEqualTo':
return serializeBinaryComparison(condition, attributes, '>=');
case 'Between':
return `${
attributes.addName(condition.subject)
} BETWEEN ${
serializeOperand(condition.lowerBound, attributes)
} AND ${
serializeOperand(condition.upperBound, attributes)
}`;
case 'Membership':
return `${
attributes.addName(condition.subject)
} IN (${
condition.values.map(val => serializeOperand(val, attributes))
.join(', ')
})`;
case 'Function':
const subject = AttributePath.isAttributePath(condition.subject)
? condition.subject
: new AttributePath(condition.subject);
switch (condition.name) {
case 'attribute_exists':
case 'attribute_not_exists':
return (new FunctionExpression(condition.name, subject))
.serialize(attributes);
case 'attribute_type':
case 'begins_with':
case 'contains':
return (new FunctionExpression(
condition.name,
subject,
condition.expected
))
.serialize(attributes);
}
case 'Not':
return `NOT (${
serializeConditionExpression(condition.condition, attributes)
})`;
case 'And':
case 'Or':
if (condition.conditions.length === 1) {
return serializeConditionExpression(
condition.conditions[0],
attributes
);
}
return condition.conditions
.map(cond => `(${serializeConditionExpression(cond, attributes)})`)
.join(` ${condition.type.toUpperCase()} `);
}
}
function serializeBinaryComparison(
cond: BinaryComparisonPredicate & ConditionExpressionSubject,
attributes: ExpressionAttributes,
comparator: string
): string {
return `${
attributes.addName(cond.subject)
} ${comparator} ${
serializeOperand(cond.object, attributes)
}`;
}
function serializeOperand(
operand: ComparisonOperand,
attributes: ExpressionAttributes
): string {
if (FunctionExpression.isFunctionExpression(operand)) {
return operand.serialize(attributes);
}
return AttributePath.isAttributePath(operand)
? attributes.addName(operand)
: attributes.addValue(operand);
}
|
<?php
if (! function_exists('phone_format')) {
function phone_format($str)
{
$number = chunk_split($str,4,"-");
$split = str_split($number);
for($i = 0; $i < count($split)-1; $i++ ){
$array[$i] = $split[$i];
}
return implode($array);
}
}
|
trait Higher[F[_]]
trait Box[A]
object Box {
implicit def HigherBox = new Higher[Box] {}
}
object Foo {
val box = implicitly[Higher[Box]] // compiles fine !!!
type Bar[A] = Box[A]
val bar = implicitly[Higher[Bar]] // <-- this doesn't compile in 2.10.1-RC1, but does in 2.10.0 !!!
}
|
-- 与清算有关的业务表
create table SETTLE_STATEMT
(
SETTLE_ID BIGINT primary key not null auto_increment comment '结算单主键',
SETTLE_NO VARCHAR(12) not null comment '结算单号,商铺号后两位+年月日+2位随机码',
YEAR char(4) not null comment '年份',
MONTH char(2) not null comment '月份',
STORE_NO VARCHAR(18) not null comment '商铺号',
STATUS char(1) not null comment '状态 0-待打款;1-完成打款;9-废除',
TOTAL_AMT DECIMAL(25) comment '订单总额',
MONTHLY_CHARGE DECIMAL(20) comment '月租费',
COMMISSION DECIMAL(20) comment '佣金',
OTHER_CHARGE DECIMAL(20) comment '其他扣除',
PENDING_PAYMENT DECIMAL(20) comment '待结款',
ALREADY_PAID DECIMAL(20) comment '已结款',
MEMO varchar(512) comment '备忘',
INSERT_TIME TIMESTAMP not null DEFAULT NOW() comment '插入时间',
UPDATE_TIME TIMESTAMP not null DEFAULT NOW() comment '修改时间'
) comment='结算单表';
create table SETTLE_ITEM_ORDER
(
ITEM_ID BIGINT primary key not null auto_increment comment '结算单项主键',
SETTLE_ID BIGINT not null comment '结算单主键',
ORDER_NO VARCHAR(20) not null comment '订单号',
TOTAL_AMT DECIMAL(25) comment '订单总额',
COMMISSION DECIMAL(20) comment '佣金',
OTHER_CHARGE DECIMAL(20) comment '其他扣除',
INSERT_TIME TIMESTAMP not null DEFAULT NOW() comment '插入时间',
UPDATE_TIME TIMESTAMP not null DEFAULT NOW() comment '修改时间'
) comment='结算单项(订单类)表';
CREATE UNIQUE INDEX IDX_SETTLE_STATEMT_NO ON SETTLE_STATEMT(SETTLE_NO);
CREATE INDEX IDX_SETTLE_ITEM_ORDER_SETTLE_ID ON SETTLE_ITEM_ORDER(SETTLE_ID);
|
import 'package:flutter/material.dart';
/// This extension is for form validations using the [FormKey] approach
/// Import this file in any form view and call the methods from the [Buildcontext]'s
/// [context] variable
///
/// This extension still needs improvement
extension ValidationExtension on BuildContext {
String? validateEmail(String? email) {
if (email == null || email.isEmpty) return 'Field cannot be empty';
bool isEmailValid = RegExp(
r'^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$')
.hasMatch(email);
return (isEmailValid) ? null : 'Please enter a valid email';
}
String? validatePassword(String? password) {
if (password == null || password.isEmpty) return 'Field cannot be empty';
/*bool isPasswordValid = (password.length >= 8 &&
RegExp(r"^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[!@#\$&*~]).{8,}$")
.hasMatch(password));*/
//return (isPasswordValid) ? null : 'Please enter a valid password';
}
String? validateConfirmPassword(String password, String? confirmPassword) {
if (confirmPassword == null || confirmPassword.isEmpty)
return 'Field cannot be empty';
bool isValid = password.compareTo(confirmPassword) >= 0;
return (isValid) ? null : 'The passwords does not match';
}
String? validateName(String? name) {
if (name == null || name.isEmpty) return 'Field cannot be empty';
bool isNameValid = name.length >= 3;
return (isNameValid) ? null : 'Please enter a valid name';
}
String? validatePhoneNumber(String? phone) {
if (phone == null || phone.isEmpty) return 'Field cannot be empty';
bool isPhoneValid = phone.length >= 3;
return (isPhoneValid) ? null : 'Please enter a valid phone number';
}
}
|
using System;
using Csla;
namespace ParentLoadROSoftDelete.DataAccess.ERCLevel
{
/// <summary>
/// DTO for F07_Country_Child type
/// </summary>
public partial class F07_Country_ChildDto
{
/// <summary>
/// Gets or sets the parent Country ID.
/// </summary>
/// <value>The Country ID.</value>
public int Parent_Country_ID { get; set; }
/// <summary>
/// Gets or sets the Regions Child Name.
/// </summary>
/// <value>The Country Child Name.</value>
public string Country_Child_Name { get; set; }
}
}
|
#### Flutter
>https://flutter.io/
>https://github.com/flutter/flutter
>环境变量
```bash
export PATH=/Users/zl/code/flutter/bin:$PATH
export PUB_HOSTED_URL=https://pub.flutter-io.cn
export FLUTTER_STORAGE_BASE_URL=https://storage.flutter-io.cn
```
```bash
vim ~/.bash_profile
source ~/.bash_profile
```
>新建必要文件夹并授权
> /usr/local/ 新建 Cellar,opt,Frameworks 文件夹
>授权
```bash
sudo chown -R $(whoami) /usr/local/Cellar
sudo chown -R `whoami`:admin /usr/local/opt
sudo chown -R `whoami`:admin /usr/local/Frameworks
sudo chown -R `whoami`:admin /usr/local/bin
sudo chown -R `whoami`:admin /usr/local/share
```
```bash
xcode-select --install
```
>Download brew at https://brew.sh/
>检查环境
>flutter doctor
>同步项目的包
>flutter packages get
|
using System.IO;
using System.Linq;
using Data;
using Microsoft.AspNetCore.Mvc;
using Models.Diagnosis;
using Serilog;
using System;
using System.Collections.Generic;
namespace WebAPI.Controllers
{
[Route("[Controller]")]
[ApiController]
public class SurgeryController : ControllerBase
{
//Dependency Injection
private readonly IRepository<Surgery> _repo;
public SurgeryController(IRepository<Surgery> context)
{
_repo = context;
}
// GET: surgery/Get/All
[HttpGet("Get/All")]//Gets list of all surgeries
public IActionResult GetAll()
{
try
{
List<Surgery> s = _repo.GetAll().ToList();
if (s.Count == 0)
throw new FileNotFoundException("No data found");
return Ok(_repo.GetAll());
}
catch (Exception e)
{
Log.Error(e.Message);
return BadRequest("Failed to update");//Logs all bad requests into separate file
}
}
// GET: surgery/Get/{id}
[HttpGet("Get/{id}")]//Gets surgery by Id
public IActionResult GetById(int id)
{
try
{
if (_repo.GetById(id) == null)
throw new InvalidDataException("Invalid Id");
return Ok(_repo.GetById(id));
}
catch (Exception e)
{
Log.Error(e.Message);
return BadRequest("Failed to update");
}
}
// DELETE surgery/delete/{Id}
[HttpDelete("Delete/{id}")]//Deletes surgery by Id
public IActionResult Delete([FromBody] Surgery p_surgery)
{
try
{
if (p_surgery == null)
throw new InvalidDataException("Delete failed!");
_repo.Delete(p_surgery);
_repo.Save();
return Ok();
}
catch (Exception e)
{
Log.Error(e.Message);
return BadRequest("Failed to update");
}
}
// PUT surgery/Update/{Id}
[HttpPut("Update/{id}")]//Updates surgery by Id
public IActionResult Update(int id, [FromBody] Surgery p_surgery)
{
try
{
p_surgery.Id = id;
_repo.Update(p_surgery);
_repo.Save();
return Ok();
}
catch (Exception e)
{
Log.Error(e.Message);
return BadRequest("Failed to update");
}
}
// POST surgery/Add
[HttpPost("Add")]//Adds new surgery info
public IActionResult Add([FromBody] Surgery p_surgery)
{
try
{
if (p_surgery == null)
throw new InvalidDataException("Invalid data!");
_repo.Create(p_surgery);
_repo.Save();
return Created("Surgery/Add", p_surgery);
}
catch (Exception e)
{
Log.Error(e.Message);
return BadRequest("Failed to update");
}
}
}
}
|
LDA 50 Push two numbers, 1 and 2, to the stack.
PUSH
LDA 51
PUSH
LDA 52 Clear the accumulator to ensure pop actually works.
POP 0 Then, pop the two numbers in order and output them.
OUT 0 They should be displayed in reverse order, so 2 then 1.
POP
OUT
HALT
50 DATA 1
51 DATA 2
52 DATA 0
|
package com.example.weatherapp.data.network
import com.example.weatherapp.data.model.WeatherModel
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import javax.inject.Inject
class OpenWeatherMapService @Inject constructor(private val api : OpenWeatherMapApiClient) {
suspend fun getWeather(lat: Double, lon: Double) : WeatherModel? {
return withContext(Dispatchers.IO) {
val response = api.getWeather(lat, lon)
response.body()
}
}
}
|
using System;
using System.Collections.Generic;
using CompanyName.MyMeetings.Modules.Meetings.Application.Contracts;
namespace CompanyName.MyMeetings.Modules.Meetings.Application.MeetingComments.GetMeetingCommentLikes
{
public class GetMeetingCommentLikersQuery : IQuery<List<MeetingCommentLikerDto>>
{
public Guid MeetingCommentId { get; }
public GetMeetingCommentLikersQuery(Guid meetingCommentId)
{
MeetingCommentId = meetingCommentId;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.