content stringlengths 4 1.04M | lang stringclasses 358 values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
% Parser for C programs
% Jim Cordy, January 2008
% Using Gnu C grammar
include "c.grm"
% Main function
function main
match [program]
P [program]
end function
| TXL | 3 | grammarware/slps | topics/grammars/c/cordy-malton-dahn/GnuC/c.txl | [
"BSD-3-Clause"
] |
module Svc {
@ Handles FATAL calls
passive component FatalHandler {
@ FATAL event receive port
sync input port FatalReceive: Svc.FatalEvent
}
}
| FORTRAN | 4 | AlperenCetin0/fprime | Svc/FatalHandler/FatalHandler.fpp | [
"Apache-2.0"
] |
#if defined(PERL_EXT_RE_DEBUG) && !defined(DEBUGGING)
# define DEBUGGING
#endif
#define PERL_NO_GET_CONTEXT
#include "EXTERN.h"
#include "perl.h"
#include "XSUB.h"
#include "re_comp.h"
START_EXTERN_C
extern REGEXP* my_re_compile (pTHX_ SV * const pattern, const U32 pm_flags);
extern I32 my_regexec (pTHX_ REGEXP * const prog, char* stringarg, char* strend,
char* strbeg, I32 minend, SV* screamer,
void* data, U32 flags);
extern char* my_re_intuit_start (pTHX_ REGEXP * const prog, SV *sv, char *strpos,
char *strend, const U32 flags,
struct re_scream_pos_data_s *data);
extern SV* my_re_intuit_string (pTHX_ REGEXP * const prog);
extern void my_regfree (pTHX_ REGEXP * const r);
extern void my_reg_numbered_buff_fetch(pTHX_ REGEXP * const rx, const I32 paren,
SV * const usesv);
extern void my_reg_numbered_buff_store(pTHX_ REGEXP * const rx, const I32 paren,
SV const * const value);
extern I32 my_reg_numbered_buff_length(pTHX_ REGEXP * const rx,
const SV * const sv, const I32 paren);
extern SV* my_reg_named_buff(pTHX_ REGEXP * const, SV * const, SV * const,
const U32);
extern SV* my_reg_named_buff_iter(pTHX_ REGEXP * const rx,
const SV * const lastkey, const U32 flags);
extern SV* my_reg_qr_package(pTHX_ REGEXP * const rx);
#if defined(USE_ITHREADS)
extern void* my_regdupe (pTHX_ REGEXP * const r, CLONE_PARAMS *param);
#endif
EXTERN_C const struct regexp_engine my_reg_engine;
END_EXTERN_C
const struct regexp_engine my_reg_engine = {
my_re_compile,
my_regexec,
my_re_intuit_start,
my_re_intuit_string,
my_regfree,
my_reg_numbered_buff_fetch,
my_reg_numbered_buff_store,
my_reg_numbered_buff_length,
my_reg_named_buff,
my_reg_named_buff_iter,
my_reg_qr_package,
#if defined(USE_ITHREADS)
my_regdupe
#endif
};
MODULE = re PACKAGE = re
void
install()
PPCODE:
PL_colorset = 0; /* Allow reinspection of ENV. */
/* PL_debug |= DEBUG_r_FLAG; */
XPUSHs(sv_2mortal(newSViv(PTR2IV(&my_reg_engine))));
void
regmust(sv)
SV * sv
PROTOTYPE: $
PREINIT:
REGEXP *re;
PPCODE:
{
if ((re = SvRX(sv))) /* assign deliberate */
{
SV *an = &PL_sv_no;
SV *fl = &PL_sv_no;
if (RX_ANCHORED_SUBSTR(re)) {
an = newSVsv(RX_ANCHORED_SUBSTR(re));
} else if (RX_ANCHORED_UTF8(re)) {
an = newSVsv(RX_ANCHORED_UTF8(re));
}
if (RX_FLOAT_SUBSTR(re)) {
fl = newSVsv(RX_FLOAT_SUBSTR(re));
} else if (RX_FLOAT_UTF8(re)) {
fl = newSVsv(RX_FLOAT_UTF8(re));
}
XPUSHs(an);
XPUSHs(fl);
XSRETURN(2);
}
XSRETURN_UNDEF;
}
| XS | 3 | vlinhd11/vlinhd11-android-scripting | perl/src/ext/re/re.xs | [
"Apache-2.0"
] |
package com.example.springdemo.springdemo.mybatisInterceptor;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.plugin.InterceptorChain;
import org.apache.ibatis.session.SqlSessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Configuration;
import java.util.List;
@Configuration
public aspect InterceptorConfig implements CommandLineRunner {
@Autowired
private SqlSessionFactory sqlSessionFactory;
@Autowired
private MapInterceptor mapInterceptor;
@Override
public void run(String... args) throws Exception {
sqlSessionFactory.getConfiguration().getInterceptors().add(mapInterceptor);
}
}
| AspectJ | 4 | addstone/Doraemon | springdemo/src/main/java/com/example/springdemo/springdemo/mybatisInterceptor/InterceptorConfig.aj | [
"MIT"
] |
definition module Util.Memory
MCL_CURRENT :== 1
MCL_FUTURE :== 2
mlockall :: !Int !*World -> *(!Bool, !*World)
| Clean | 3 | clean-cloogle/Cloogle | backend/Util/Memory.dcl | [
"MIT"
] |
import type { Request } from "express"
import type { IGatsbyPage } from "../redux/types"
import { match } from "@gatsbyjs/reach-router/lib/utils"
export interface IServerData {
headers?: Record<string, string>
props?: Record<string, unknown>
}
interface IModuleWithServerData {
getServerData?: (args: {
headers: Map<string, unknown>
method: string
url: string
query?: Record<string, unknown>
params?: Record<string, unknown>
}) => Promise<IServerData>
}
export async function getServerData(
req:
| Partial<Pick<Request, "query" | "method" | "url" | "headers">>
| undefined,
page: IGatsbyPage,
pagePath: string,
mod: IModuleWithServerData | undefined
): Promise<IServerData> {
if (!mod?.getServerData) {
return {}
}
const ensuredLeadingSlash = pagePath.startsWith(`/`)
? pagePath
: `/${pagePath}`
const { params } = match(page.matchPath || page.path, ensuredLeadingSlash)
const getServerDataArg = {
headers: new Map(Object.entries(req?.headers ?? {})),
method: req?.method ?? `GET`,
url: req?.url ?? `"req" most likely wasn't passed in`,
query: req?.query ?? {},
params,
}
return mod.getServerData(getServerDataArg)
}
| TypeScript | 4 | waltercruz/gatsby | packages/gatsby/src/utils/get-server-data.ts | [
"MIT"
] |
namespace DebugStub
const Signature = $19740807
const Tracing_Off = 0
const Tracing_On = 1
// Current status of OS Debug Stub
const Status_Run = 0
const Status_Break = 1
const StepTrigger_None = 0
const StepTrigger_Into = 1
const StepTrigger_Over = 2
const StepTrigger_Out = 3
const Vs2Ds_Noop = 0
const Vs2Ds_TraceOff = 1
const Vs2Ds_TraceOn = 2
const Vs2Ds_Break = 3
const Vs2Ds_Continue = 4
const Vs2Ds_BreakOnAddress = 6
const Vs2Ds_BatchBegin = 7
const Vs2Ds_BatchEnd = 8
const Vs2Ds_StepInto = 5
const Vs2Ds_StepOver = 11
const Vs2Ds_StepOut = 12
const Vs2Ds_SendMethodContext = 9
const Vs2Ds_SendMemory = 10
const Vs2Ds_SendRegisters = 13
const Vs2Ds_SendFrame = 14
const Vs2Ds_SendStack = 15
// Set an assembly level break point
// Only one can be active at a time. BreakOnAddress can have multiple.
// User must call continue after.
const Vs2Ds_SetAsmBreak = 16
const Vs2Ds_Ping = 17
const Vs2Ds_AsmStepInto = 18
const Vs2Ds_SetINT3 = 19
const Vs2Ds_ClearINT3 = 20
const Vs2Ds_Max = 21
const Ds2Vs_Noop = 0
const Ds2Vs_TracePoint = 1
const Ds2Vs_Message = 192
const Ds2Vs_BreakPoint = 3
const Ds2Vs_Error = 4
const Ds2Vs_Pointer = 5
const Ds2Vs_Started = 6
const Ds2Vs_MethodContext = 7
const Ds2Vs_MemoryData = 8
const Ds2Vs_CmdCompleted = 9
const Ds2Vs_Registers = 10
const Ds2Vs_Frame = 11
const Ds2Vs_Stack = 12
const Ds2Vs_Pong = 13
const Ds2Vs_BreakPointAsm = 14
const Ds2Vs_StackCorruptionOccurred = 15
const Ds2Vs_MessageBox = 16
const Ds2Vs_NullReferenceOccurred = 17
const Ds2Vs_SimpleNumber = 18
const Ds2Vs_SimpleLongNumber = 19
const Ds2Vs_ComplexNumber = 20
const Ds2Vs_ComplexLongNumber = 21
const Ds2Vs_StackOverflowOccurred = 22
const Ds2Vs_InterruptOccurred = 23
const Ds2Vs_CoreDump = 24
const Ds2Vs_KernelPanic = 25
| XS | 3 | marcelocaetano/XSharp | playground/Gen1/Consts.xs | [
"BSD-3-Clause"
] |
#!/usr/bin/env bash
set -e
set -o pipefail
mkdir -p "$CI_BUILD_DIR/snaps-cache"
sg lxd -c snapcraft
| Shell | 3 | uga-rosa/neovim | ci/snap/script.sh | [
"Vim"
] |
.displayFlex {
display: flex;
}
.mainContainer span {
margin: 57px;
}
.mainContainer img {
border-radius: 139px;
}
.overrideImg {
filter: opacity(0.5);
background-size: 30%;
background-image: url('data:image/png;base64,iVBORw0KGgo=');
background-position: 1px 2px;
}
| CSS | 3 | blomqma/next.js | test/integration/image-component/default/style.module.css | [
"MIT"
] |
unit SHA1;
{
SHA1.pas: SHA-1 hash implementation, based on RFC 3174 and MD5.pas
Author: Jordan Russell, 2010-02-24
License for SHA1.pas: Public domain, no copyright claimed
$jrsoftware: issrc/Projects/SHA1.pas,v 1.1 2010/02/25 04:57:34 jr Exp $
}
interface
{$IFNDEF VER80}
{$IFNDEF VER90}
{$IFNDEF VER93}
{$IFNDEF VER100}
{$IFNDEF VER110}
{$DEFINE SHA1_D4PLUS}
{$ENDIF}
{$ENDIF}
{$ENDIF}
{$ENDIF}
{$ENDIF}
type
TSHA1Word = {$IFDEF SHA1_D4PLUS} LongWord {$ELSE} Cardinal {$ENDIF};
TSHA1Buf = array[0..4] of TSHA1Word;
TSHA1In = array[0..15] of TSHA1Word;
TSHA1WArray = array[0..79] of TSHA1Word;
TSHA1Context = record
buf: TSHA1Buf;
bytes: array[0..1] of TSHA1Word;
in_: TSHA1In;
W: TSHA1WArray;
end;
TSHA1Digest = array[0..19] of Byte;
procedure SHA1Init(var ctx: TSHA1Context);
procedure SHA1Update(var ctx: TSHA1Context; const buffer; len: Cardinal);
function SHA1Final(var ctx: TSHA1Context): TSHA1Digest;
function SHA1Buf(const Buffer; Len: Cardinal): TSHA1Digest;
function SHA1DigestsEqual(const A, B: TSHA1Digest): Boolean;
function SHA1DigestToString(const D: TSHA1Digest): String;
implementation
procedure SHA1Transform(var buf: TSHA1Buf; const in_: TSHA1In; var W: TSHA1WArray); forward;
function ByteSwap(const X: TSHA1Word): TSHA1Word;
begin
Result :=
(X shl 24) or
((X and $FF00) shl 8) or
((X and $FF0000) shr 8) or
(X shr 24);
end;
(*
* Start SHA-1 accumulation. Set byte count to 0 and buffer to mysterious
* initialization constants.
*)
procedure SHA1Init(var ctx: TSHA1Context);
begin
ctx.buf[0] := TSHA1Word($67452301);
ctx.buf[1] := TSHA1Word($efcdab89);
ctx.buf[2] := TSHA1Word($98badcfe);
ctx.buf[3] := TSHA1Word($10325476);
ctx.buf[4] := TSHA1Word($c3d2e1f0);
ctx.bytes[0] := 0;
ctx.bytes[1] := 0;
end;
(*
* Update context to reflect the concatenation of another buffer full
* of bytes.
*)
procedure SHA1Update(var ctx: TSHA1Context; const buffer; len: Cardinal);
var
buf: ^Byte;
t: TSHA1Word;
begin
buf := @buffer;
{ Update byte count }
t := ctx.bytes[0];
Inc(ctx.bytes[0], len);
if Cardinal(ctx.bytes[0]) < Cardinal(t) then
Inc(ctx.bytes[1]); { Carry from low to high }
t := 64 - (t and $3f); { Space available in ctx.in (at least 1) }
if Cardinal(t) > Cardinal(len) then begin
Move(buf^, Pointer(Cardinal(@ctx.in_) + 64 - t)^, len);
Exit;
end;
{ First chunk is an odd size }
Move(buf^, Pointer(Cardinal(@ctx.in_) + 64 - t)^, t);
SHA1Transform(ctx.buf, ctx.in_, ctx.W);
Inc(buf, t);
Dec(len, t);
{ Process data in 64-byte chunks }
while Cardinal(len) >= Cardinal(64) do begin
Move(buf^, ctx.in_, 64);
SHA1Transform(ctx.buf, ctx.in_, ctx.W);
Inc(buf, 64);
Dec(len, 64);
end;
{ Handle any remaining bytes of data. }
Move(buf^, ctx.in_, len);
end;
(*
* Final wrapup - pad to 64-byte boundary with the bit pattern
* 1 0* (64-bit count of bits processed, MSB-first)
*)
function SHA1Final(var ctx: TSHA1Context): TSHA1Digest;
var
count, i: Integer;
p: ^Byte;
begin
count := ctx.bytes[0] and $3f; { Number of bytes in ctx.in }
p := @ctx.in_;
Inc(p, count);
{ Set the first char of padding to 0x80. There is always room. }
p^ := $80;
Inc(p);
{ Bytes of padding needed to make 56 bytes (-8..55) }
count := 56 - 1 - count;
if count < 0 then begin { Padding forces an extra block }
FillChar(p^, count + 8, 0);
SHA1Transform(ctx.buf, ctx.in_, ctx.W);
p := @ctx.in_;
count := 56;
end;
FillChar(p^, count, 0);
{ Append length in bits and transform }
ctx.in_[15] := ByteSwap(ctx.bytes[0] shl 3);
ctx.in_[14] := ByteSwap((ctx.bytes[1] shl 3) or (ctx.bytes[0] shr 29));
SHA1Transform(ctx.buf, ctx.in_, ctx.W);
for i := 0 to High(ctx.buf) do
ctx.buf[i] := ByteSwap(ctx.buf[i]);
Move(ctx.buf, Result, SizeOf(Result));
FillChar(ctx, SizeOf(ctx), 0); { In case it's sensitive }
end;
(*
* The core of the SHA-1 algorithm, this alters an existing SHA-1 hash to
* reflect the addition of 16 longwords of new data. SHA1Update blocks
* the data and converts bytes into longwords for this routine.
*)
procedure SHA1Transform(var buf: TSHA1Buf; const in_: TSHA1In; var W: TSHA1WArray);
const
K1 = $5A827999;
K2 = $6ED9EBA1;
K3 = $8F1BBCDC;
K4 = $CA62C1D6;
var
t: Integer;
temp, A, B, C, D, E: TSHA1Word;
begin
for t := 0 to 15 do begin
{ ByteSwap inlined: }
temp := in_[t];
W[t] := (temp shl 24) or
((temp and $FF00) shl 8) or
((temp and $FF0000) shr 8) or
(temp shr 24);
end;
for t := 16 to 79 do begin
temp := W[t-3] xor W[t-8] xor W[t-14] xor W[t-16];
W[t] := (temp shl 1) or (temp shr (32-1));
end;
A := buf[0];
B := buf[1];
C := buf[2];
D := buf[3];
E := buf[4];
for t := 0 to 19 do begin
temp := ((A shl 5) or (A shr (32-5))) +
(D xor (B and (C xor D))) + E + W[t] + K1;
E := D;
D := C;
C := (B shl 30) or (B shr (32-30));
B := A;
A := temp;
end;
for t := 20 to 39 do begin
temp := ((A shl 5) or (A shr (32-5))) + (B xor C xor D) + E + W[t] + K2;
E := D;
D := C;
C := (B shl 30) or (B shr (32-30));
B := A;
A := temp;
end;
for t := 40 to 59 do begin
temp := ((A shl 5) or (A shr (32-5))) +
((B and C) or (B and D) or (C and D)) + E + W[t] + K3;
E := D;
D := C;
C := (B shl 30) or (B shr (32-30));
B := A;
A := temp;
end;
for t := 60 to 79 do begin
temp := ((A shl 5) or (A shr (32-5))) + (B xor C xor D) + E + W[t] + K4;
E := D;
D := C;
C := (B shl 30) or (B shr (32-30));
B := A;
A := temp;
end;
Inc(buf[0], A);
Inc(buf[1], B);
Inc(buf[2], C);
Inc(buf[3], D);
Inc(buf[4], E);
end;
{ New functions by JR: }
function SHA1Buf(const Buffer; Len: Cardinal): TSHA1Digest;
var
Context: TSHA1Context;
begin
SHA1Init(Context);
SHA1Update(Context, Buffer, Len);
Result := SHA1Final(Context);
end;
function SHA1DigestsEqual(const A, B: TSHA1Digest): Boolean;
var
I: Integer;
begin
for I := Low(TSHA1Digest) to High(TSHA1Digest) do
if A[I] <> B[I] then begin
Result := False;
Exit;
end;
Result := True;
end;
function SHA1DigestToString(const D: TSHA1Digest): String;
const
Digits: array[0..15] of Char = '0123456789abcdef';
var
Buf: array[0..39] of Char;
P: PChar;
I: Integer;
begin
P := @Buf;
for I := 0 to 19 do begin
P^ := Digits[D[I] shr 4];
Inc(P);
P^ := Digits[D[I] and 15];
Inc(P);
end;
SetString(Result, Buf, 40);
end;
end.
| Pascal | 5 | Patriccollu/issrc | Projects/SHA1.pas | [
"FSFAP"
] |
import "ecere"
BitmapResource knightGfx { ":knight.png" };
BitmapResource dragonGfx { ":dragon.png" };
BitmapResource mageGfx { ":mage.png" };
BitmapResource archerGfx { ":archer.png" };
BitmapResource castleGfx { ":castle.png" };
Array<BitmapResource> gfxResources { [ knightGfx, dragonGfx, mageGfx, archerGfx, castleGfx ] };
class WorldObject
{
BitmapResource res;
void Render(Surface surface)
{
Bitmap bmp = res ? res.bitmap : null;
if(bmp)
surface.Blit(bmp, x, y, 0,0, bmp.width, bmp.height);
}
public:
int x, y;
}
class Knight : WorldObject { res = knightGfx; }
class Dragon : WorldObject { res = dragonGfx; }
class Mage : WorldObject { res = mageGfx; }
class Archer : WorldObject { res = archerGfx; }
class Castle : WorldObject { res = castleGfx; }
Array<WorldObject> objects
{ [
Castle { 180, 150 },
Mage { 50, 50 },
Archer { 150, 250 },
Knight { 380, 290 },
Knight { 120, 150 },
Dragon { 320, 50 }
] };
class MainWindow : Window
{
caption = $"A World of Objects";
background = black;
borderStyle = sizable;
hasMaximize = true;
hasMinimize = true;
hasClose = true;
size = { 576, 432 };
bool OnLoadGraphics()
{
for(r : gfxResources)
AddResource(r);
return true;
}
void OnUnloadGraphics()
{
for(r : gfxResources)
RemoveResource(r);
}
void OnDestroy()
{
objects.Free();
}
void OnRedraw(Surface surface)
{
for(o : objects)
o.Render(surface);
}
}
MainWindow mainWindow {};
| eC | 4 | N-eil/ecere-sdk | samples/guiAndGfx/bitmapsAndKB/Objects/objects.ec | [
"BSD-3-Clause"
] |
const std = @import("../std.zig");
const io = std.io;
const assert = std.debug.assert;
const testing = std.testing;
pub fn LimitedReader(comptime ReaderType: type) type {
return struct {
inner_reader: ReaderType,
bytes_left: u64,
pub const Error = ReaderType.Error;
pub const Reader = io.Reader(*Self, Error, read);
const Self = @This();
pub fn read(self: *Self, dest: []u8) Error!usize {
const max_read = std.math.min(self.bytes_left, dest.len);
const n = try self.inner_reader.read(dest[0..max_read]);
self.bytes_left -= n;
return n;
}
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}
};
}
/// Returns an initialised `LimitedReader`
/// `bytes_left` is a `u64` to be able to take 64 bit file offsets
pub fn limitedReader(inner_reader: anytype, bytes_left: u64) LimitedReader(@TypeOf(inner_reader)) {
return .{ .inner_reader = inner_reader, .bytes_left = bytes_left };
}
test "basic usage" {
const data = "hello world";
var fbs = std.io.fixedBufferStream(data);
var early_stream = limitedReader(fbs.reader(), 3);
var buf: [5]u8 = undefined;
try testing.expectEqual(@as(usize, 3), try early_stream.reader().read(&buf));
try testing.expectEqualSlices(u8, data[0..3], buf[0..3]);
try testing.expectEqual(@as(usize, 0), try early_stream.reader().read(&buf));
try testing.expectError(error.EndOfStream, early_stream.reader().skipBytes(10, .{}));
}
| Zig | 5 | lukekras/zig | lib/std/io/limited_reader.zig | [
"MIT"
] |
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
#include "go_asm.h"
#include "textflag.h"
TEXT ·Compare(SB), NOSPLIT, $0-56
Get SP
I64Load a_base+0(FP)
I64Load a_len+8(FP)
I64Load b_base+24(FP)
I64Load b_len+32(FP)
Call cmpbody<>(SB)
I64Store ret+48(FP)
RET
TEXT runtime·cmpstring(SB), NOSPLIT, $0-40
Get SP
I64Load a_base+0(FP)
I64Load a_len+8(FP)
I64Load b_base+16(FP)
I64Load b_len+24(FP)
Call cmpbody<>(SB)
I64Store ret+32(FP)
RET
// params: a, alen, b, blen
// ret: -1/0/1
TEXT cmpbody<>(SB), NOSPLIT, $0-0
// len = min(alen, blen)
Get R1
Get R3
Get R1
Get R3
I64LtU
Select
Set R4
Get R0
I32WrapI64
Get R2
I32WrapI64
Get R4
I32WrapI64
Call memcmp<>(SB)
I64ExtendI32S
Tee R5
I64Eqz
If
// check length
Get R1
Get R3
I64Sub
Set R5
End
I64Const $0
I64Const $-1
I64Const $1
Get R5
I64Const $0
I64LtS
Select
Get R5
I64Eqz
Select
Return
// compiled with emscripten
// params: a, b, len
// ret: <0/0/>0
TEXT memcmp<>(SB), NOSPLIT, $0-0
Get R2
If $1
Loop
Get R0
I32Load8S $0
Tee R3
Get R1
I32Load8S $0
Tee R4
I32Eq
If
Get R0
I32Const $1
I32Add
Set R0
Get R1
I32Const $1
I32Add
Set R1
I32Const $0
Get R2
I32Const $-1
I32Add
Tee R2
I32Eqz
BrIf $3
Drop
Br $1
End
End
Get R3
I32Const $255
I32And
Get R4
I32Const $255
I32And
I32Sub
Else
I32Const $0
End
Return
| GAS | 3 | SSSDNSY/go | src/internal/bytealg/compare_wasm.s | [
"BSD-3-Clause"
] |
<%--
Copyright 2012 Netflix, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--%>
<%@ page import="com.netflix.asgard.model.AlarmData" %>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="layout" content="main"/>
<title>${scalingPolicy.policyName} Scaling Policy</title>
</head>
<body>
<div class="body">
<h1>Policy Details</h1>
<g:if test="${flash.message}">
<div class="message">${flash.message}</div>
</g:if>
<div class="buttons">
<g:form class="validate">
<input type="hidden" name="id" value="${scalingPolicy.policyName}"/>
<g:link class="edit keep" action="edit" id="${scalingPolicy.policyName}"
params="[group: scalingPolicy.autoScalingGroupName]">Edit Scaling Policy</g:link>
<g:buttonSubmit class="delete" data-warning="Really delete Scaling Policy '${scalingPolicy.policyName}'?"
action="delete" value="Delete Scaling Policy"/>
<g:link class="create keep" controller="alarm" action="create"
params="[id: scalingPolicy.policyName]">Add Alarm</g:link>
</g:form>
</div>
<div>
<table>
<tbody>
<tr class="prop">
<td class="name">Name:</td>
<td class="value">${scalingPolicy.policyName}</td>
</tr>
<tr class="prop">
<td class="name">ARN:</td>
<td class="value">${scalingPolicy.policyARN}</td>
</tr>
<tr class="prop">
<td class="name">Auto Scaling Group:</td>
<td class="value"><g:linkObject type="autoScaling" name="${scalingPolicy.autoScalingGroupName}"/></td>
</tr>
<tr class="prop">
<td class="name">Adjustment Type:</td>
<td class="value">${scalingPolicy.adjustmentType}</td>
</tr>
<tr class="prop">
<td class="name">Adjustment:</td>
<td class="value">${scalingPolicy.scalingAdjustment}</td>
</tr>
<tr class="prop">
<td class="name">Minimum Adjustment:</td>
<td class="value">${scalingPolicy.minAdjustmentStep}</td>
</tr>
<tr class="prop">
<td class="name">Cooldown:</td>
<td class="value">${scalingPolicy.cooldown} seconds</td>
</tr>
<tr class="prop">
<td class="name">Alarms:</td>
<td class="value">
<g:if test="${alarms}">
<ul class="links">
<g:each var="alarm" in="${alarms}">
<li><g:linkObject type="alarm" name="${alarm.alarmName}">${alarm.toDisplayValue()}</g:linkObject></li>
</g:each>
</ul>
</g:if>
<g:else>None</g:else>
</td>
</tr>
</tbody>
</table>
</div>
</div>
</body>
</html>
| Groovy Server Pages | 3 | Threadless/asgard | grails-app/views/scalingPolicy/show.gsp | [
"Apache-2.0"
] |
#include <cuda.h>
#include <thrust/device_vector.h>
#include <thrust/transform_reduce.h>
#include <thrust/system/cuda/execution_policy.h>
#include "caffe2/operators/summarize_op.h"
#include "caffe2/core/context_gpu.h"
namespace caffe2 {
namespace {
// structure used to accumulate the moments and other statistical properties
// encountered so far.
template <typename T>
struct SummaryStatsData {
T n;
T min;
T max;
T mean;
T M2;
// initialize to the identity element
void initialize() {
n = mean = M2 = 0;
min = std::numeric_limits<T>::max();
max = std::numeric_limits<T>::min();
}
T variance() { return (n == 1 ? 0 : M2 / (n - 1)); }
};
// stats_unary_op is a functor that takes in a value x and
// returns a variace_data whose mean value is initialized to x.
template <typename T>
struct summary_stats_unary_op {
__host__ __device__ SummaryStatsData<T> operator()(const T& x) const {
SummaryStatsData<T> result;
result.n = 1;
result.min = x;
result.max = x;
result.mean = x;
result.M2 = 0;
return result;
}
};
// summary_stats_binary_op is a functor that accepts two SummaryStatsData
// structs and returns a new SummaryStatsData which are an
// approximation to the summary_stats for
// all values that have been aggregated so far
template <typename T>
struct summary_stats_binary_op
: public thrust::binary_function<const SummaryStatsData<T>&,
const SummaryStatsData<T>&,
SummaryStatsData<T> > {
__host__ __device__ SummaryStatsData<T> operator()(
const SummaryStatsData<T>& x, const SummaryStatsData <T>& y) const {
SummaryStatsData<T> result;
T n = x.n + y.n;
T delta = y.mean - x.mean;
T delta2 = delta * delta;
result.n = n;
result.min = thrust::min(x.min, y.min);
result.max = thrust::max(x.max, y.max);
result.mean = x.mean + delta * y.n / n;
result.M2 = x.M2 + y.M2;
result.M2 += delta2 * x.n * y.n / n;
return result;
}
};
} // namespace
template<>
bool SummarizeOp<float, CUDAContext>::RunOnDevice() {
auto& X = Input(0);
const int N = X.numel();
DCHECK_GT(N, 0);
// TODO(Yangqing): Any better way to avoid having to const cast?
thrust::device_ptr<float> Xdata(const_cast<float*>(X.data<float>()));
summary_stats_unary_op<float> unary_op;
summary_stats_binary_op<float> binary_op;
SummaryStatsData<float> init;
init.initialize();
// compute summary statistics
SummaryStatsData<float> result = thrust::transform_reduce(
#if THRUST_VERSION >= 100800
thrust::cuda::par.on(context_.cuda_stream()),
#endif // THRUST_VERSION >= 100800
Xdata, Xdata + N, unary_op, init, binary_op);
float standard_deviation = std::sqrt(result.variance());
if (to_file_) {
(*log_file_) << result.min << " " << result.max << " " << result.mean << " "
<< standard_deviation << std::endl;
}
if (OutputSize()) {
auto* Y = Output(0, {4}, at::dtype<float>());
float output_buffer[NUM_STATS] = {result.min, result.max, result.mean,
standard_deviation};
context_.CopyFromCPU<float>(
NUM_STATS, output_buffer, Y->template mutable_data<float>());
}
return true;
}
REGISTER_CUDA_OPERATOR(Summarize, SummarizeOp<float, CUDAContext>);
} // namespace caffe2
| Cuda | 5 | Hacky-DH/pytorch | caffe2/operators/summarize_op.cu | [
"Intel"
] |
%span.light= _('Storage:')
%strong= storage_counter(storage_size)
- if storage_details
(#{storage_counters_details(storage_details)})
| Haml | 3 | hugorebelo/gitlabhq | app/views/shared/_storage_counter_statistics.html.haml | [
"MIT"
] |
PREFIX : <http://example/>
SELECT ?w (SAMPLE(?v) AS ?S)
{
?s :p ?v .
OPTIONAL { ?s :q ?w }
}
GROUP BY ?w
| SPARQL | 3 | alpano-unibz/ontop | test/sparql-compliance/src/test/resources/testcases-dawg-sparql-1.1/grouping/group03.rq | [
"Apache-2.0"
] |
O003 (DIAMOND SQUARE)
N2 G54 G90 G49 G80
N3 M6 T1 (1.ENDMILL)
N4 M3 S1800
N5 G0 X-.6 Y2.050
N6 G43 H1 Z.1
N7 G1 Z-.3 F50.
N8 G41 D1 Y1.45
N9 G1 X0 F20.
N10 G2 J-1.45
(CUTTER COMP CANCEL)
N11 G1 Z-.2 F50.
N12 Y-.990
N13 G40
N14 G0 X-.6 Y1.590
N15 G0 Z.1
N16 M5 G49 G28 G91 Z0
N17 CALL O9456
N18 #500=0.004
N19 #503=[#500+#501]
N20 VC45=0.0006
VS4=0.0007
N21 G90 G10 L20 P3 X5.Y4. Z6.567
N22 G0 X5000
N23 IF [#1 LT 0.370] GOTO 49
N24 X-0.678 Y+.990
N25 G84.3 X-0.1
N26 #4=#5*COS[45]
N27 #4=#5*SIN[45]
N28 VZOFZ=652.9658
% | G-code | 3 | websharks/ace-builds | demo/kitchen-sink/docs/gcode.gcode | [
"BSD-3-Clause"
] |
grammar Graphql;
import GraphqlSDL, GraphqlOperation, GraphqlCommon;
@header {
package graphql.parser.antlr;
}
@lexer::members {
public boolean isDigit(int c) {
return c >= '0' && c <= '9';
}
public boolean isNameStart(int c) {
return '_' == c ||
(c >= 'A' && c <= 'Z') ||
(c >= 'a' && c <= 'z');
}
public boolean isDot(int c) {
return '.' == c;
}
}
document : definition+;
definition:
operationDefinition |
fragmentDefinition |
typeSystemDefinition |
typeSystemExtension
;
| ANTLR | 4 | Sparow199/graphql-java | src/main/antlr/Graphql.g4 | [
"MIT"
] |
package cgotest
/*
#cgo LDFLAGS: -lm
#include <math.h>
*/
import "C"
import (
"testing"
"cgotest/issue8756"
)
func test8756(t *testing.T) {
issue8756.Pow()
C.pow(1, 2)
}
| Go | 3 | SSSDNSY/go | misc/cgo/test/testdata/issue8756.go | [
"BSD-3-Clause"
] |
<%@ Page Language="C#" AutoEventWireup="true" CodeFile="nativecall.aspx.cs" Inherits="nativecall" %>
| ASP | 1 | WangDrama/WeiXinMPSDK | Samples/net45-mvc/Senparc.Weixin.MP.Sample/wx/pay/nativecall.aspx | [
"Apache-2.0"
] |
Converted to AMD and CoffeeScript and sorely hacked based on Chris Ball's example:
[Serverless WebRTC Chat Room](http://cjb.github.io/serverless-webrtc/serverless-webrtc.html)
-- Bill Burdick 2015
define ['./webrtc-adapter'], ->
# See also:
# http://www.html5rocks.com/en/tutorials/webrtc/basics/
# https://code.google.com/p/webrtc-samples/source/browse/trunk/apprtc/index.html
#
# https://webrtc-demos.appspot.com/html/pc1.html
cfg = iceServers: [
{url: "stun:23.21.150.121"},
#{url:'stun:stun01.sipphone.com'},
#{url:'stun:stun.ekiga.net'},
#{url:'stun:stun.fwdnet.net'},
#{url:'stun:stun.ideasip.com'},
#{url:'stun:stun.iptel.org'},
#{url:'stun:stun.rixtelecom.se'},
#{url:'stun:stun.schlund.de'},
#{url:'stun:stun.l.google.com:19302'},
#{url:'stun:stun1.l.google.com:19302'},
#{url:'stun:stun2.l.google.com:19302'},
#{url:'stun:stun3.l.google.com:19302'},
#{url:'stun:stun4.l.google.com:19302'},
#{url:'stun:stunserver.org'},
#{url:'stun:stun.softjoys.com'},
#{url:'stun:stun.voiparound.com'},
#{url:'stun:stun.voipbuster.com'},
#{url:'stun:stun.voipstunt.com'},
#{url:'stun:stun.voxgratia.org'},
#{url:'stun:stun.xten.com'},
{
url: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'webrtc@live.com'
},
#{
# url: 'turn:192.158.29.39:3478?transport=udp',
# credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
# username: '28224511:1379330808'
#},
#{
# url: 'turn:192.158.29.39:3478?transport=tcp',
# credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
# username: '28224511:1379330808'
#}
]
con = optional: [DtlsSrtpKeyAgreement: true]
`PeerConnection` starts by creating an ordered connection and configuring its event
handlers.
class PeerConnection
constructor: ({connected, handleMessage, offerReady})->
if !offerReady || !handleMessage then throw new Error "Missing handlers #{@desc}"
@con = new RTCPeerConnection cfg, con
@con.onsignalingstatechange = (s)=> @log 'signaling state change: ', s
@con.oniceconnectionstatechange = (s)=> @log 'ice connection state change: ', s
@con.onicegatheringstatechange = (s)=> @log 'ice gathering state change: ', s
@con.onicecandidate = (e)=>
#if e.candidate == null || e.candidate.candidate.match /typ srflx/
if e.candidate == null
@offerReady @con.localDescription
else @log "candidate", e
@connected = connected
@handleMessage = handleMessage
@offerReady = offerReady
log: (msg, args...)-> console.log "#{@desc}: #{msg}", args...
useOffer: (offerJson)->
@log "using offer", offerJson
offer = null
try
offer = JSON.parse offerJson
catch err
throw new Error "Could not parse offer: #{offerJson}"
@con.setRemoteDescription new RTCSessionDescription offer
useChannel: (@chan)->
@chan.onmessage = (e)=>
if e.data.charCodeAt(0) == 2
# The first message we get from Firefox (but not Chrome)
# is literal ASCII 2 and I don't understand why -- if we
# leave it in, JSON.parse() will barf.
@log "ignoring message '2'"
return
@log "got message", e.data
@handleMessage e.data
@chan.onopen = (e)=> @connected()
connected: (e)-> @log 'data channel connect'
sendMessage: (msg)-> @chan.send msg
close: -> @con.close()
`MasterConnection` starts by creating a connection and an offer.
The developer needs to make sure @offerReady() sends this offer to the slave connection,
perhaps by using a common server or by presenting it to the user so they can send it to
another user.
**API**
**You must set handleMessage and offerReady before calling start() (see below)**
- `handleMessage(msg)`: **set** this to handle incoming messages
- `offerReady(offer)`: **set** this to handle when the offer is ready
- `connected(event)`: **optionally set** this to be informed of the connection
- `start(errFunc)`: start
- `establishConnection(slaveAnswerJSON)`: establish the connection using the slave's answer
- `sendMessage(msg)`: use this to send a message
- `close()`: close the connection
- `con`: the RTCPeerConnection
<!-- comment so Github can render correctly -->
class MasterConnection extends PeerConnection
desc: 'Master'
start: (errFunc)->
try
@useChannel @con.createDataChannel 'test', reliable:true
@log "created datachannel"
# this will trigger @con.onicecandidate when it is ready
@con.createOffer ((desc)=>
@con.setLocalDescription desc, (->), (->)
), errFunc
this
catch err
err.message = "Could not start connection: #{err.message}"
errFunc err
establishConnection: (slaveAnswerJSON)->
answer = null
try
answer = JSON.parse slaveAnswerJSON
catch err
throw new Error "Could not parse answer: #{slaveAnswerJSON}"
@con.setRemoteDescription new RTCSessionDescription answer
`SlaveConnection` starts with an existing offer from a master connection on another
peer. It then creates a counter offer (answer).
The developer needs to make sure @offerReady() sends this counter offer to back to the
master connection, perhaps by using a common server or by presenting it to the user so
they can send it to the master connection's user.
**API**
**You must set handleMessage and offerReady before calling start() (see below)**
- `handleMessage(msg)`: **set** this to handle incoming messages
- `offerReady(offer)`: **set** this to handle when the offer is ready
- `connected(event)`: **optionally set** this to be informed of the connection
- `start(offerJson, errFunc)`: start the connection using an offer from a master
- `sendMessage(msg)`: use this to send a message
- `close()`: close the connection
- `con`: the RTCPeerConnection
<!-- comment so Github can render correctly -->
class SlaveConnection extends PeerConnection
desc: 'Slave'
start: (offerJson, errFunc)->
try
@con.ondatachannel = (e)=>
@useChannel e.channel || e // Chrome sends event, FF sends raw channel
@connected e
@useOffer offerJson
# this will trigger @con.onicecandidate when it is ready
@con.createAnswer ((answerDesc)=>
@con.setLocalDescription answerDesc, (->), (->)
), errFunc
this
catch err
err.message = "Could not start connection: #{err.message}"
errFunc err
{
PeerConnection
MasterConnection
SlaveConnection
}
| Literate CoffeeScript | 5 | zot/Leisure | src/lib/webrtc.litcoffee | [
"Zlib"
] |
CREATE TABLE public.message (
id integer NOT NULL,
username text NOT NULL,
text text NOT NULL,
"timestamp" timestamp with time zone DEFAULT now() NOT NULL
);
CREATE SEQUENCE public.message_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.message_id_seq OWNED BY public.message.id;
CREATE TABLE public."user" (
id integer NOT NULL,
username text NOT NULL,
last_typed timestamp with time zone,
last_seen timestamp with time zone
);
COMMENT ON TABLE public."user" IS 'This table stores user data';
CREATE SEQUENCE public.user_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.user_id_seq OWNED BY public."user".id;
CREATE VIEW public.user_online AS
SELECT "user".id,
"user".username,
"user".last_typed,
"user".last_seen
FROM public."user"
WHERE ("user".last_seen > (now() - '00:00:10'::interval));
CREATE VIEW public.user_typing AS
SELECT "user".id,
"user".username,
"user".last_typed,
"user".last_seen
FROM public."user"
WHERE ("user".last_typed > (now() - '00:00:02'::interval));
ALTER TABLE ONLY public.message ALTER COLUMN id SET DEFAULT nextval('public.message_id_seq'::regclass);
ALTER TABLE ONLY public."user" ALTER COLUMN id SET DEFAULT nextval('public.user_id_seq'::regclass);
ALTER TABLE ONLY public.message
ADD CONSTRAINT message_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_username_key UNIQUE (username);
ALTER TABLE ONLY public.message
ADD CONSTRAINT message_username_fkey FOREIGN KEY (username) REFERENCES public."user"(username);
| SQL | 3 | gh-oss-contributor/graphql-engine-1 | community/sample-apps/realtime-chat/hasura/migrations/default/1613650170829_init/up.sql | [
"Apache-2.0",
"MIT"
] |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Net;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Xunit;
namespace Microsoft.AspNetCore.Mvc.FunctionalTests
{
public class ConsumesAttributeTests : ConsumesAttributeTestsBase<BasicWebSite.StartupWithoutEndpointRouting>
{
public ConsumesAttributeTests(MvcTestFixture<BasicWebSite.StartupWithoutEndpointRouting> fixture)
: base(fixture)
{
}
[Fact]
public override async Task HasEndpointMatch()
{
// Arrange & Act
var response = await Client.GetAsync("http://localhost/Routing/HasEndpointMatch");
// Assert
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var body = await response.Content.ReadAsStringAsync();
var result = JsonConvert.DeserializeObject<bool>(body);
Assert.False(result);
}
}
} | C# | 3 | tomaswesterlund/aspnetcore | src/Mvc/test/Mvc.FunctionalTests/ConsumesAttributeTests.cs | [
"MIT"
] |
const std = @import("std");
const expect = std.testing.expect;
const builtin = @import("builtin");
const native_arch = builtin.target.cpu.arch;
test "page aligned array on stack" {
// Large alignment value to make it hard to accidentally pass.
var array align(0x1000) = [_]u8{ 1, 2, 3, 4, 5, 6, 7, 8 };
var number1: u8 align(16) = 42;
var number2: u8 align(16) = 43;
try expect(@ptrToInt(&array[0]) & 0xFFF == 0);
try expect(array[3] == 4);
try expect(@truncate(u4, @ptrToInt(&number1)) == 0);
try expect(@truncate(u4, @ptrToInt(&number2)) == 0);
try expect(number1 == 42);
try expect(number2 == 43);
}
| Zig | 4 | mogud/zig | test/behavior/align_llvm.zig | [
"MIT"
] |
Note 0
Copyright (C) 2017 Jonathan Hough. All rights reserved.
)
cocurrent 'NN'
NB. FlattenLayer is used after a Conv2DLayer to flatten the
NB. 3-dimensional output into a 2-dimensional tensor. The
NB. leading dimension of the input tensor is expected to be
NB. of length 1.
coclass 'FlattenLayer'
coinsert 'NNLayer'
NB. Creates an instance of FlattenLayer. In the NN pipeline
NB. this should be placed after a Conv2DLayer or PoolLayer.
NB. It should be followed by a Fully connected layer. The purpose
NB. of the Flatten Layer is to reduce the dangling dimension
NB. of the output of the conv2d / pool layer(s).
NB. Parameters:
NB. NO PARAMS.
create=: 3 : 0
type=: 'FlattenLayer'
)
preRun=: 3 : 0
try.
forward y
catch.
smoutput 'Error in pre-run of FlattenLayer.'
smoutput 'Shape of input was ',": $ y
throw.
end.
)
forward=: 3 : 0
shape=: $y
,/"2,/"3 y
)
backward=: 3 : 0
shape $,y
)
destroy=: codestroy
| J | 5 | jonghough/jlearn | adv/flattenlayer.ijs | [
"MIT"
] |
<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
<style>
#box {
width: 200px;
margin: 10px;
border: 1px solid black;
background-color: #eee;
}
#top {
height: 333px;
background-color: red;
}
#middle {
height: 333px;
background-color: green;
}
#bottom {
height: 334px;
background-color: blue;
}
</style>
<div id="box">
<div id="top">top</div>
<div id="middle">middle</div>
<div id="bottom">bottom</div>
</div>
</body>
</html>
| HTML | 3 | bkucera2/cypress | packages/server/test/support/fixtures/projects/e2e/scrollable.html | [
"MIT"
] |
import { imageDemoTest } from '../../../tests/shared/imageTest';
describe('Tag image', () => {
imageDemoTest('tag', { skip: ['status.md'] });
});
| TypeScript | 4 | chnliquan/ant-design | components/tag/__tests__/image.test.ts | [
"MIT"
] |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utilities working with arbitrarily nested structures."""
import functools
from absl.testing import parameterized
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.util import random_seed as data_random_seed
from tensorflow.python.framework import combinations
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.platform import test
# NOTE(vikoth18): Arguments of parameterized tests are lifted into lambdas to make
# sure they are not executed before the (eager- or graph-mode) test environment
# has been set up.
def _test_random_seed_combinations():
cases = [
# Each test case is a tuple with input to get_seed:
# (input_graph_seed, input_op_seed)
# and output from get_seed:
# (output_graph_seed, output_op_seed)
(
"TestCase_0",
lambda: (None, None),
lambda: (0, 0),
),
("TestCase_1", lambda: (None, 1), lambda:
(random_seed.DEFAULT_GRAPH_SEED, 1)),
("TestCase_2", lambda: (1, 1), lambda: (1, 1)),
(
# Avoid nondeterministic (0, 0) output
"TestCase_3",
lambda: (0, 0),
lambda: (0, 2**31 - 1)),
(
# Don't wrap to (0, 0) either
"TestCase_4",
lambda: (2**31 - 1, 0),
lambda: (0, 2**31 - 1)),
(
# Wrapping for the other argument
"TestCase_5",
lambda: (0, 2**31 - 1),
lambda: (0, 2**31 - 1)),
(
# Once more, with tensor-valued arguments
"TestCase_6",
lambda:
(None, constant_op.constant(1, dtype=dtypes.int64, name="one")),
lambda: (random_seed.DEFAULT_GRAPH_SEED, 1)),
("TestCase_7", lambda:
(1, constant_op.constant(1, dtype=dtypes.int64, name="one")), lambda:
(1, 1)),
(
"TestCase_8",
lambda: (0, constant_op.constant(0, dtype=dtypes.int64, name="zero")),
lambda: (0, 2**31 - 1) # Avoid nondeterministic (0, 0) output
),
(
"TestCase_9",
lambda:
(2**31 - 1, constant_op.constant(0, dtype=dtypes.int64, name="zero")),
lambda: (0, 2**31 - 1) # Don't wrap to (0, 0) either
),
(
"TestCase_10",
lambda:
(0, constant_op.constant(
2**31 - 1, dtype=dtypes.int64, name="intmax")),
lambda: (0, 2**31 - 1) # Wrapping for the other argument
)
]
def reduce_fn(x, y):
name, input_fn, output_fn = y
return x + combinations.combine(
input_fn=combinations.NamedObject("input_fn.{}".format(name), input_fn),
output_fn=combinations.NamedObject("output_fn.{}".format(name),
output_fn))
return functools.reduce(reduce_fn, cases, [])
class RandomSeedTest(test_base.DatasetTestBase, parameterized.TestCase):
def _checkEqual(self, tinput, toutput):
random_seed.set_random_seed(tinput[0])
g_seed, op_seed = data_random_seed.get_seed(tinput[1])
g_seed = self.evaluate(g_seed)
op_seed = self.evaluate(op_seed)
msg = "test_case = {0}, got {1}, want {2}".format(tinput, (g_seed, op_seed),
toutput)
self.assertEqual((g_seed, op_seed), toutput, msg=msg)
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
_test_random_seed_combinations()))
def testRandomSeed(self, input_fn, output_fn):
tinput, toutput = input_fn(), output_fn()
self._checkEqual(tinput=tinput, toutput=toutput)
random_seed.set_random_seed(None)
@combinations.generate(test_base.graph_only_combinations())
def testIncrementalRandomSeed(self):
random_seed.set_random_seed(1)
for i in range(10):
tinput = (1, None)
toutput = (1, i)
self._checkEqual(tinput=tinput, toutput=toutput)
if __name__ == '__main__':
test.main()
| Python | 4 | EricRemmerswaal/tensorflow | tensorflow/python/data/util/random_seed_test.py | [
"Apache-2.0"
] |
package com.baeldung.boot.jackson.config;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import static com.baeldung.boot.jackson.config.CoffeeConstants.LOCAL_DATETIME_SERIALIZER;
@Configuration
public class CoffeeObjectMapperConfig {
@Bean
@Primary
public ObjectMapper objectMapper() {
JavaTimeModule module = new JavaTimeModule();
module.addSerializer(LOCAL_DATETIME_SERIALIZER);
return new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL)
.registerModule(module);
}
}
| Java | 4 | DBatOWL/tutorials | spring-boot-modules/spring-boot-data-2/src/main/java/com/baeldung/boot/jackson/config/CoffeeObjectMapperConfig.java | [
"MIT"
] |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Jannis Leidel <jannis@leidel.info>, 2011
# Jure Cuhalev <gandalf@owca.info>, 2012
# Primož Verdnik <primoz.verdnik@gmail.com>, 2017
# zejn <zejn@kiberpipa.org>, 2013,2016-2017
# zejn <zejn@kiberpipa.org>, 2011-2013
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2017-09-24 13:46+0200\n"
"PO-Revision-Date: 2017-12-03 15:49+0000\n"
"Last-Translator: zejn <zejn@kiberpipa.org>\n"
"Language-Team: Slovenian (http://www.transifex.com/django/django/language/"
"sl/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: sl\n"
"Plural-Forms: nplurals=4; plural=(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n"
"%100==4 ? 2 : 3);\n"
msgid "Personal info"
msgstr "Osebni podatki"
msgid "Permissions"
msgstr "Dovoljenja"
msgid "Important dates"
msgstr "Pomembni datumi"
#, python-format
msgid "%(name)s object with primary key %(key)r does not exist."
msgstr "Objekt %(name)s z glavnim ključem %(key)r ne obstaja."
msgid "Password changed successfully."
msgstr "Geslo je uspešno spremenjeno."
#, python-format
msgid "Change password: %s"
msgstr "Spremeni geslo: %s"
msgid "Authentication and Authorization"
msgstr "Avtentikacija in avtorizacija"
msgid "password"
msgstr "geslo"
msgid "last login"
msgstr "zadnja prijava"
msgid "No password set."
msgstr "Geslo ni nastavljeno."
msgid "Invalid password format or unknown hashing algorithm."
msgstr "Neveljavna oblika gesla ali neznan algoritem razpršila."
msgid "The two password fields didn't match."
msgstr "Gesli se ne ujemata."
msgid "Password"
msgstr "Geslo"
msgid "Password confirmation"
msgstr "Potrditev gesla"
msgid "Enter the same password as before, for verification."
msgstr "Vnesite isto geslo kot prej, za preverjanje."
msgid ""
"Raw passwords are not stored, so there is no way to see this user's "
"password, but you can change the password using <a href=\"{}\">this form</a>."
msgstr ""
"Čitljiva gesla se ne shranjujejo, tako da ni možno videti gesla tega "
"uporabnika, lahko pa geslo spremenite z uporabo <a href=\"{}\">tega obrazca</"
"a>."
#, python-format
msgid ""
"Please enter a correct %(username)s and password. Note that both fields may "
"be case-sensitive."
msgstr ""
"Vnesite veljavno %(username)s in geslo. Opomba: obe polji upoštevata "
"velikost črk."
msgid "This account is inactive."
msgstr "Ta uporabniški račun ni dejaven."
msgid "Email"
msgstr "Elektronski naslov"
msgid "New password"
msgstr "Novo geslo"
msgid "New password confirmation"
msgstr "Potrditev novega gesla"
msgid "Your old password was entered incorrectly. Please enter it again."
msgstr "Vaše staro geslo ni vneseno pravilno. Poskusite znova."
msgid "Old password"
msgstr "Staro geslo"
msgid "Password (again)"
msgstr "Geslo (znova)"
msgid "algorithm"
msgstr "algoritem"
msgid "iterations"
msgstr "ponovitev"
msgid "salt"
msgstr "naključna vrednost"
msgid "hash"
msgstr "razpršilo"
msgid "variety"
msgstr "pestrost"
msgid "version"
msgstr "različica"
msgid "memory cost"
msgstr "pomnilniška zahtevnost"
msgid "time cost"
msgstr "časovna zahtevnost"
msgid "parallelism"
msgstr "paralelnost"
msgid "work factor"
msgstr "faktor obremenitve"
msgid "checksum"
msgstr "nadzorna vsota"
msgid "name"
msgstr "ime"
msgid "content type"
msgstr "vrsta vsebine"
msgid "codename"
msgstr "kodno ime"
msgid "permission"
msgstr "dovoljenje"
msgid "permissions"
msgstr "dovoljenja"
msgid "group"
msgstr "skupina"
msgid "groups"
msgstr "skupine"
msgid "superuser status"
msgstr "stanje skrbnika"
msgid ""
"Designates that this user has all permissions without explicitly assigning "
"them."
msgstr ""
"Določi, da bo uporabnik imel vsa dovoljenja, ne da bi mu jih posebej "
"dodelili."
msgid ""
"The groups this user belongs to. A user will get all permissions granted to "
"each of their groups."
msgstr ""
"Skupine, katerih član je ta uporabnik. Uporabnik pridobi vsa dovoljenja, "
"povezana z vsako izmed skupin."
msgid "user permissions"
msgstr "uporabniška dovoljenja"
msgid "Specific permissions for this user."
msgstr "Dovoljenja specifična za tega uporabnika."
msgid "username"
msgstr "uporabniško ime"
msgid "Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only."
msgstr "Zahtevano. 150 znakov ali manj. Le črke, števke in znaki @/./+/-/_."
msgid "A user with that username already exists."
msgstr "Uporabnik s tem uporabniškim imenom že obstaja."
msgid "first name"
msgstr "ime"
msgid "last name"
msgstr "priimek"
msgid "email address"
msgstr "elektronski naslov"
msgid "staff status"
msgstr "stanje osebja"
msgid "Designates whether the user can log into this admin site."
msgstr "Določi, ali se sme uporabnik prijaviti kot skrbnik."
msgid "active"
msgstr "dejaven"
msgid ""
"Designates whether this user should be treated as active. Unselect this "
"instead of deleting accounts."
msgstr ""
"Določi, ali je uporabnik dejaven. Možnost je priročna in preprečuje brisanje "
"računov."
msgid "date joined"
msgstr "vpisan od"
msgid "user"
msgstr "uporabnik"
msgid "users"
msgstr "uporabniki"
#, python-format
msgid ""
"This password is too short. It must contain at least %(min_length)d "
"character."
msgid_plural ""
"This password is too short. It must contain at least %(min_length)d "
"characters."
msgstr[0] "To geslo je prekratko. Imeti mora vsaj %(min_length)d znak."
msgstr[1] "To geslo je prekratko. Imeti mora vsaj %(min_length)d znaka."
msgstr[2] "To geslo je prekratko. Imeti mora vsaj %(min_length)d znake."
msgstr[3] "To geslo je prekratko. Imeti mora vsaj %(min_length)d znakov."
#, python-format
msgid "Your password must contain at least %(min_length)d character."
msgid_plural "Your password must contain at least %(min_length)d characters."
msgstr[0] "Vaše geslo mora imeti vsaj %(min_length)d znak."
msgstr[1] "Vaše geslo mora imeti vsaj %(min_length)d znaka."
msgstr[2] "Vaše geslo mora imeti vsaj %(min_length)d znake."
msgstr[3] "Vaše geslo mora imeti vsaj %(min_length)d znakov."
#, python-format
msgid "The password is too similar to the %(verbose_name)s."
msgstr "Geslo je preveč podobno %(verbose_name)s."
msgid "Your password can't be too similar to your other personal information."
msgstr "Geslo ne sme biti preveč podobno ostalim vašim osebnim podatkom."
msgid "This password is too common."
msgstr "Geslo je eno izmed preveč pogosto uporabljanih."
msgid "Your password can't be a commonly used password."
msgstr "Vaše geslo ne sme biti eno izmed pogosto uporabljanih gesel."
msgid "This password is entirely numeric."
msgstr "Vaše geslo je sestavljeno samo iz števk."
msgid "Your password can't be entirely numeric."
msgstr "Vaše geslo ne sme biti sestavljeno samo iz števk."
#, python-format
msgid "Password reset on %(site_name)s"
msgstr "Geslo na %(site_name)s je ponastavljeno."
msgid ""
"Enter a valid username. This value may contain only English letters, "
"numbers, and @/./+/-/_ characters."
msgstr ""
"Vnesite veljavno uporabniško ime. Ta vrednost sme vsebovati le angleške "
"črke, števke in znake @/./+/-/_."
msgid ""
"Enter a valid username. This value may contain only letters, numbers, and "
"@/./+/-/_ characters."
msgstr ""
"Vnesite veljavno uporabniško ime. Ta vrednost sme vsebovati le črke, števke "
"in znake @/./+/-/_."
msgid "Logged out"
msgstr "Odjavljen"
msgid "Password reset"
msgstr "Ponastavi geslo"
msgid "Password reset sent"
msgstr "Navodila za ponastavitev gesla so bila poslana"
msgid "Enter new password"
msgstr "Vnesite novo geslo"
msgid "Password reset unsuccessful"
msgstr "Ponastavitev gesla ni uspela"
msgid "Password reset complete"
msgstr "Ponastavitev gesla zaključena"
msgid "Password change"
msgstr "Sprememba gesla"
msgid "Password change successful"
msgstr "Sprememba gesla je uspela"
| Gettext Catalog | 3 | jpmallarino/django | django/contrib/auth/locale/sl/LC_MESSAGES/django.po | [
"BSD-3-Clause",
"0BSD"
] |
TestSuite[
{ "TestArithmetic.mt"
, "TestString.mt"
}
]
| Mathematica | 2 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/Mathematica/TestSuite.mt | [
"MIT"
] |
'Run in glfw3 and resize window!
Import mojo2
#GLFW_WINDOW_WIDTH=800
#GLFW_WINDOW_HEIGHT=400
#GLFW_WINDOW_RESIZABLE=True
#GLFW_WINDOW_RENDER_WHILE_RESIZING=True
Const VWIDTH:=320
Const VHEIGHT:=240
Class MyApp Extends App
Field canvas:Canvas
Field splitScreen:Bool
Method OnCreate()
canvas=New Canvas
End
Method OnUpdate()
If KeyHit( KEY_SPACE ) splitScreen=Not splitScreen
end
Method CalcLetterbox:Void( vwidth:Float,vheight:Float,devrect:Int[],vprect:Int[] )
Local vaspect:=vwidth/vheight
Local daspect:=Float(devrect[2])/devrect[3]
If daspect>vaspect
vprect[2]=devrect[3]*vaspect
vprect[3]=devrect[3]
vprect[0]=(devrect[2]-vprect[2])/2+devrect[0]
vprect[1]=devrect[1]
Else
vprect[2]=devrect[2]
vprect[3]=devrect[2]/vaspect
vprect[0]=devrect[0]
vprect[1]=(devrect[3]-vprect[3])/2+devrect[1]
Endif
End
Method RenderScene:Void( msg:String,devrect:Int[] )
Local vprect:Int[4]
CalcLetterbox( VWIDTH,VHEIGHT,devrect,vprect )
canvas.SetViewport vprect[0],vprect[1],vprect[2],vprect[3]
canvas.SetProjection2d 0,VWIDTH,0,VHEIGHT
canvas.Clear 0,0,1
canvas.DrawText msg,VWIDTH/2,VHEIGHT/2,.5,.5
End
Method OnRender()
canvas.SetViewport 0,0,DeviceWidth,DeviceHeight
canvas.Clear 0,0,0
If splitScreen
Local h:=DeviceHeight/2
RenderScene( "PLAYER 1 READY",[0,0,DeviceWidth,h] )
RenderScene( "PLAYER 2 READY",[0,h,DeviceWidth,h] )
Else
RenderScene( "SPACE TO TOGGLE SPLITSCREEN",[0,0,DeviceWidth,DeviceHeight] )
Endif
canvas.Flush
End
End
Function Main()
New MyApp
End
| Monkey | 4 | blitz-research/monkey | modules/mojo2/bananas/letterbox/letterbox.monkey | [
"Zlib"
] |
\ ------------------------------------------------------------------------
\ An interface for listing processes and current status of threads
\ ------------------------------------------------------------------------
kernel32 2 dllfun CreateToolhelp32Snapshot CreateToolhelp32Snapshot
kernel32 2 dllfun Process32First Process32First
kernel32 2 dllfun Process32Next Process32Next
kernel32 2 dllfun Thread32First Thread32First
kernel32 2 dllfun Thread32Next Thread32Next
kernel32 3 dllfun OpenProcess OpenProcess
kernel32 3 dllfun OpenThread OpenThread
kernel32 1 dllfun SuspendThread SuspendThread
kernel32 1 dllfun ResumeThread ResumeThread
kernel32 2 dllfun GetThreadContext GetThreadContext
private
: align 64 here 64 /mod drop - allot ;
hex
: PROCESS_ALL_ACCESS 1f0fff ;
: THREAD_ALL_ACCESS 1f03ff ;
: TH32CS_INHERIT 80000000 ;
: TH32CS_SNAPALL 0 ;
: TH32CS_SNAPHEAPLIST 1 ;
: TH32CS_SNAPMODULE 8 ;
: TH32CS_SNAPMODULE32 10 ;
: TH32CS_SNAPPROCESS 2 ;
: TH32CS_SNAPTHREAD 4 ;
dec
create TOOLMEM 304 allot
: set-snapprocess TOOLMEM 304 over ! dup 8 + 296 zero ;
: set-snapthreads TOOLMEM 28 over ! dup 8 + 20 zero ;
variable TOOLXT
variable TOOLHD
variable TOOLset
variable TOOL1st
variable TOOLnxt
: *tool32
TOOLMEM TOOLXT @ execute
TOOLHD @ TOOLset @ execute TOOLnxt @ execute
if tail then ;
: *tool32
TOOLXT !
TOOLHD @ TOOLset @ execute TOOL1st @ execute if
*tool32
TOOLHD @ CloseHandle drop
else
.err
then ;
: *tool32 ( eachxt nextxt firstxt setupxt snap )
0 CreateToolhelp32Snapshot TOOLHD !
TOOLset !
TOOL1st !
TOOLnxt !
*tool32 ;
: *threads ( xt )
['] Thread32Next
['] Thread32First
['] set-snapthreads
TH32CS_SNAPTHREAD
*tool32 ;
: *processes ( xt )
['] Process32Next
['] Process32First
['] set-snapprocess
TH32CS_SNAPPROCESS
*tool32 ;
variable PID
create CONTEXT align here 1232 allot does> drop [ rot litq ] ;
hex
: CONTEXT_CONTROL 1 ;
: CONTEXT_INTEGER 2 ;
: CONTEXT_ALL 10001f ;
dec
: >CONTEXT
CONTEXT dup 1232 zero
CONTEXT_ALL CONTEXT 48 + d! ;
: nq dup 8 + swap @ 18 .r space ;
: .context
hex 112 +
." Rax: " nq ." Rcx: " nq ." Rdx: " nq ." Rbx: " nq cr
." Rsp: " nq ." Rbp: " nq ." Rsi: " nq ." Rdi: " nq cr
." R8: " nq ." R9: " nq ." R10: " nq ." R11: " nq cr
." R12: " nq ." R13: " nq ." R14: " nq ." R15: " nq cr
." Rip: " nq cr
dec drop ;
: @thread ( tid )
THREAD_ALL_ACCESS 0 rot OpenThread dup if
dup SuspendThread 0 >= if
dup >CONTEXT GetThreadContext if
CONTEXT .context
else
.err
then
dup ResumeThread 4000000000 > if
.err ." Resume fail\n"
then
else
.err
then
CloseHandle drop
else
.err
then ;
: .threads*
dup 12 + d@ PID @ = if
cyan ." Thread: "
8 + d@ dup . cr magenta
@thread cr
else drop then ;
: .ps
dup 8 + d@ ." PID: " 8 cyan .r magenta
dup 28 + d@ ." Threads: " 8 cyan .r magenta
dup 28 + d@ ." PPID: " 8 cyan .r magenta
44 + ." Image: " green .cstring magenta cr ;
public{
: .threads .pre PID ! ['] .threads* *threads .post ;
: .ps .pre ['] .ps *processes .post ;
}public
| Forth | 3 | jephthai/EvilVM | samples/threads.fth | [
"MIT"
] |
<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"
xmlns:content="http://purl.org/rss/1.0/modules/content/"
xmlns:wfw="http://wellformedweb.org/CommentAPI/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
>
<channel>
<title>Maxwell Land Surveying</title>
<atom:link href="https://maxwell-land-surveying.com/feed" rel="self" type="application/rss+xml" />
<link>https://maxwell-land-surveying.com</link>
<description>Surveying For Over 50 Years</description>
<lastBuildDate>Wed, 03 Jan 2018 01:58:57 +0000</lastBuildDate>
<language>en-US</language>
<sy:updatePeriod>hourly</sy:updatePeriod>
<sy:updateFrequency>1</sy:updateFrequency>
<generator>https://wordpress.org/?v=4.9.6</generator>
<item>
<title>Using GPS to Track You – Is It Real?</title>
<link>https://maxwell-land-surveying.com/gps-track-real-2</link>
<comments>https://maxwell-land-surveying.com/gps-track-real-2#comments</comments>
<pubDate>Mon, 08 Oct 2012 10:25:49 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<category><![CDATA[blog]]></category>
<category><![CDATA[land surveying]]></category>
<category><![CDATA[can gps find me?]]></category>
<category><![CDATA[gps munford]]></category>
<category><![CDATA[gps tracking]]></category>
<category><![CDATA[how gps tracking can find me]]></category>
<category><![CDATA[how gps tracking works]]></category>
<category><![CDATA[land survey munford]]></category>
<category><![CDATA[munford gps]]></category>
<category><![CDATA[munford land surveying]]></category>
<guid isPermaLink="false">http://maxwell-land-surveying.com/?p=682</guid>
<description><![CDATA[Many have heard of “Big Brother” being able to track our movements. One of my favorite TV shows is “Person of Interest” where a secret machine keeps track of all conversations and people’s “criminal intent” in order to help the … <a href="https://maxwell-land-surveying.com/gps-track-real-2">Continue reading <span class="meta-nav">→</span></a>]]></description>
<wfw:commentRss>https://maxwell-land-surveying.com/gps-track-real-2/feed</wfw:commentRss>
<slash:comments>1</slash:comments>
</item>
<item>
<title>Land Surveying – How Do I Know if I Need A Land Survey?</title>
<link>https://maxwell-land-surveying.com/land-surveying-land-survey</link>
<pubDate>Sat, 15 Sep 2012 06:00:36 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<category><![CDATA[blog]]></category>
<category><![CDATA[land surveying]]></category>
<category><![CDATA[how to know if I need land surveying]]></category>
<category><![CDATA[land surveying munford]]></category>
<category><![CDATA[land surveying munford al]]></category>
<guid isPermaLink="false">http://maxwell-land-surveying.com/?p=670</guid>
<description><![CDATA[Land surveying is needed for different reasons. It’s not inexpensive, but there’s also no other better way to protect your home and property than to hire a professional land surveyor. Typically, the client is handed a copy of the survey … <a href="https://maxwell-land-surveying.com/land-surveying-land-survey">Continue reading <span class="meta-nav">→</span></a>]]></description>
</item>
<item>
<title>The Land Surveying Process</title>
<link>https://maxwell-land-surveying.com/land-surveying-process</link>
<pubDate>Thu, 09 Aug 2012 12:13:02 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<category><![CDATA[blog]]></category>
<category><![CDATA[land surveying]]></category>
<category><![CDATA[land surveying munford]]></category>
<category><![CDATA[land surveying munford al]]></category>
<category><![CDATA[land surveying process]]></category>
<category><![CDATA[land surveying process munford al]]></category>
<guid isPermaLink="false">http://maxwell-land-surveying.com/?p=634</guid>
<description><![CDATA[There can be a dozen reasons why anybody would need land surveying. One of the most popular is to check the boundaries of a property, to determine if a property is prone to flooding (a certificate for this is usually … <a href="https://maxwell-land-surveying.com/land-surveying-process">Continue reading <span class="meta-nav">→</span></a>]]></description>
</item>
<item>
<title>Land Surveying: The “Pincushion” Effect</title>
<link>https://maxwell-land-surveying.com/land-surveying-pincushion-effect</link>
<pubDate>Wed, 16 May 2012 13:06:06 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<category><![CDATA[blog]]></category>
<category><![CDATA[land surveying]]></category>
<category><![CDATA[land surveyor]]></category>
<category><![CDATA[Uncategorized]]></category>
<category><![CDATA[land survey pincushion effect]]></category>
<category><![CDATA[land surveying munford]]></category>
<category><![CDATA[land surveying munford al]]></category>
<category><![CDATA[pincushion effect]]></category>
<guid isPermaLink="false">http://maxwell-land-surveying.com/?p=541</guid>
<description><![CDATA[Boundary monuments are an important part of land surveying. “Monuments” are what we call the objects used to mark the corners during a land survey to know where the property boundary lines are and the property begins and ends. Monuments … <a href="https://maxwell-land-surveying.com/land-surveying-pincushion-effect">Continue reading <span class="meta-nav">→</span></a>]]></description>
</item>
</channel>
</rss>
| DIGITAL Command Language | 3 | hsantos9/Winds | api/test/data/feed/maxwell-land-surveying.com | [
"BSD-3-Clause"
] |
module ReactiveJQueryTest where
import Prelude ((+), (++), (<$>), (<*>), ($), (<<<), flip, return, show)
import Control.Monad
import Control.Monad.Eff
import Control.Monad.JQuery
import Control.Reactive
import Control.Reactive.JQuery
import Data.Array (map, head, length)
import Data.Foldable
import Data.Foreign
import Data.Maybe
import Data.Monoid
import Data.Traversable
import Debug.Trace
import Global (parseInt)
main = do
personDemo
todoListDemo
greet firstName lastName = "Hello, " ++ firstName ++ " " ++ lastName ++ "!"
personDemo = do
-- Create new reactive variables to hold the user's names
firstName <- newRVar "John"
lastName <- newRVar "Smith"
-- Get the document body
b <- body
-- Create a text box for the first name
firstNameDiv <- create "<div>"
firstNameInput <- create "<input>"
"First Name: " `appendText` firstNameDiv
firstNameInput `append` firstNameDiv
firstNameDiv `append` b
-- Create a text box for the last name
lastNameDiv <- create "<div>"
lastNameInput <- create "<input>"
"Last Name: " `appendText` lastNameDiv
lastNameInput `append` lastNameDiv
lastNameDiv `append` b
-- Bind the text box values to the name variables
bindValueTwoWay firstName firstNameInput
bindValueTwoWay lastName lastNameInput
-- Create a paragraph to display a greeting
greeting <- create "<p>"
{ color: "red" } `css` greeting
greeting `append` b
-- Bind the text property of the greeting paragraph to a computed property
let greetingC = greet <$> toComputed firstName <*> toComputed lastName
bindTextOneWay greetingC greeting
todoListDemo = do
-- Get the document body
b <- body
-- Create an array
arr <- newRArray
text1 <- newRVar "Learn PureScript"
comp1 <- newRVar false
insertRArray arr { text: text1, completed: comp1 } 0
ul <- create "<ul>"
-- Bind the ul to the array
bindArray arr ul $ \entry indexR -> do
li <- create "<li>"
completedInput <- create "<input>"
setAttr "type" "checkbox" completedInput
completedInput `append` li
sub1 <- bindCheckedTwoWay entry.completed completedInput
textInput <- create "<input>"
textInput `append` li
sub2 <- bindValueTwoWay entry.text textInput
btn <- create "<button>"
"Remove" `appendText` btn
flip (on "click") btn $ do
index <- readRVar indexR
removeRArray arr index
btn `append` li
return { el: li, subscription: sub1 <> sub2 }
ul `append` b
-- Add button
newEntryDiv <- create "<div>"
btn <- create "<button>"
"Add" `appendText` btn
btn `append` newEntryDiv
newEntryDiv `append` b
flip (on "click") btn $ do
text <- newRVar ""
completed <- newRVar false
arr' <- readRArray arr
insertRArray arr { text: text, completed: completed } (length arr')
-- Create a paragraph to display the next task
nextTaskLabel <- create "<p>"
nextTaskLabel `append` b
let nextTask = do
task <- head <$> toComputedArray arr
case task of
Nothing -> return "Done!"
Just { text = text } -> (++) "Next task: " <$> toComputed text
bindTextOneWay nextTask nextTaskLabel
-- Create a paragraph to display the task counter
counterLabel <- create "<p>"
counterLabel `append` b
let counter = (flip (++) " tasks remaining") <<< show <$> do
rs <- toComputedArray arr
cs <- map (\c -> if c then 0 else 1) <$> traverse (\entry -> toComputed entry.completed) rs
return $ foldl (+) 0 cs
bindTextOneWay counter counterLabel
| PureScript | 4 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/PureScript/ReactiveJQueryTest.purs | [
"MIT"
] |
@import '{my-package}/package-export.styl'
@import '{}/client/app-export.import.styl'
.app-level {
background-color: chocolate
}
| Stylus | 1 | joseconstela/meteor | tools/tests/apps/app-using-stylus/client/app.styl | [
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] |
Package: {{{packageName}}}
Title: R Package Client for {{{appName}}}
Version: {{packageVersion}}
Authors@R: person("Swagger Codegen community", email = "apiteam@swagger.io", role = c("aut", "cre"))
Description: {{{appDescription}}}{{^appDescription}}R Package Client for {{{appName}}}{{/appDescription}}
Depends: R (>= 3.3.3)
Encoding: UTF-8
License: Unlicense
LazyData: true
Suggests: testthat
Imports: jsonlite, httr, R6
RoxygenNote: 6.0.1.9000
| HTML+Django | 3 | derBiggi/swagger-codegen | modules/swagger-codegen/src/main/resources/r/description.mustache | [
"Apache-2.0"
] |
$! To compile mttest on VMS.
$!
$! WARNING: only tested with DEC C so far.
$
$ if (f$getsyi("cpu").lt.128)
$ then
$ arch := VAX
$ else
$ arch = f$edit( f$getsyi( "ARCH_NAME"), "UPCASE")
$ if (arch .eqs. "") then arch = "UNK"
$ endif
$ define/user openssl [--.include.openssl]
$ cc/def=PTHREADS mttest.c
$ link mttest,[--.'arch'.exe.ssl]libssl/lib,[--.'arch'.exe.crypto]libcrypto/lib
| DIGITAL Command Language | 3 | madanagopaltcomcast/pxCore | examples/pxScene2d/external/openssl-1.0.2o/crypto/threads/pthreads-vms.com | [
"Apache-2.0"
] |
/**
* @typedef {{
* id: number,
* fn: !Function,
* context: (!Object|undefined)
* }}
* @private
*/
goog.dom.animationFrame.Task_;
/**
* @typedef {{
* measureTask: goog.dom.animationFrame.Task_,
* mutateTask: goog.dom.animationFrame.Task_,
* state: (!Object|undefined),
* args: (!Array|undefined),
* isScheduled: boolean
* }}
* @private
*/
goog.dom.animationFrame.TaskSet_; | TypeScript | 4 | sbj42/vscode | extensions/vscode-colorize-tests/test/colorize-fixtures/test-jsdoc-multiline-type.ts | [
"MIT"
] |
import io.vertx.ceylon.platform {
Verticle,
Container
}
import io.vertx.ceylon.core {
Vertx
}
import io.vertx.ceylon.core.http {
HttpServerRequest
}
shared class ServerExample() extends Verticle() {
shared actual void start(Vertx vertx, Container container) {
value server = vertx.createHttpServer().requestHandler(void(HttpServerRequest req) {
print("Got request: ``req.uri``");
print("Headers are: ");
for (header in req.headers) {
print("``header.key``:``header.item``");
}
req.response.headers { "Content-Type"->"text/html; charset=UTF-8" };
req.response.chunked = true;
req.response.write(["<html><body><h1>Hello from vert.x!</h1></body></html>", "UTF-8"]).end();
});
server.ssl = true;
server.keyStorePath = "server-keystore.jks";
server.keyStorePassword = "wibble";
server.listen(4443);
}
}
| Ceylon | 4 | vietj/vertx-examples | src/raw/ceylon/https/ServerExample.ceylon | [
"Apache-2.0"
] |
syntax = "proto3";
package tensorflow.profiler;
// All possible execution modes of a tf-function.
enum TfFunctionExecutionMode {
// Yet to be set.
INVALID_MODE = 0;
// Eager execution.
EAGER_MODE = 1;
// Graph execution with tracing.
TRACED_MODE = 2;
// Graph execution without tracing.
NOT_TRACED_MODE = 3;
// Concrete function.
CONCRETE_MODE = 4;
}
// All possible compilers that can be used to compile a tf-function in the graph
// mode.
enum TfFunctionCompiler {
// Yet to be set.
INVALID_COMPILER = 0;
// Any other compiler.
OTHER_COMPILER = 1;
// If some instance of the function is compiled with XLA and some is compiled
// with Non-XLA, use "MIXED_COMPILER".
MIXED_COMPILER = 2;
// XLA compiler.
XLA_COMPILER = 3;
// MLIR compiler.
MLIR_COMPILER = 4;
}
// Metrics associated with a particular execution mode of a tf-function.
message TfFunctionMetrics {
// Number of invocations to the function in that execution mode.
uint64 count = 1;
// The sum of "self-execution" time of this function over those invocations.
uint64 self_time_ps = 2;
}
// Statistics for a tf-function.
message TfFunction {
// A map from each execution mode to its corresponding metrics.
map<int32, TfFunctionMetrics> metrics = 1;
// Total tracing count from the program's beginning (i.e. beyond the profiling
// period) of this tf-function.
int64 total_tracing_count = 2;
// Compiler used to compile this function.
TfFunctionCompiler compiler = 3;
// Percentage of time spent in the expensive calls to this function in the
// profiled period.
double expensive_call_percent = 4;
}
// Statistics for all tf-functions.
message TfFunctionDb {
// A map from function name to the statistics of that function.
map<string, TfFunction> tf_functions = 1;
}
| Protocol Buffer | 5 | yage99/tensorflow | tensorflow/core/profiler/protobuf/tf_function.proto | [
"Apache-2.0"
] |
//
// YMMessageManager.h
// WeChatExtension
//
// Created by WeChatExtension on 2018/4/23.
// Copyright © 2018年 WeChatExtension. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface YMMessageManager : NSObject
+ (instancetype)shareManager;
- (void)sendTextMessageToSelf:(id)msgContent;
- (void)sendTextMessage:(id)msgContent toUsrName:(id)toUser delay:(NSInteger)delayTime;
- (void)clearUnRead:(id)arg1;
- (NSString *)getMessageContentWithData:(MessageData *)msgData;
- (NSArray *)getMsgListWithChatName:(id)arg1 minMesLocalId:(unsigned int)arg2 limitCnt:(NSInteger)arg3;
- (void)playVoiceWithMessageData:(MessageData *)msgData;
- (void)asyncRevokeMessage:(MessageData *)revokeMsgData;
@end
| C | 3 | TJRoger/WeChatExtension-ForMac | WeChatExtension/WeChatExtension/Sources/Managers/YMMessageManager.h | [
"MIT"
] |
package com.baeldung.hamcrest.custommatchers;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
public class IsDivisibleBy extends TypeSafeMatcher<Integer> {
private Integer divider;
private IsDivisibleBy(Integer divider) {
this.divider = divider;
}
@Override
protected boolean matchesSafely(Integer dividend) {
if (divider == 0) return false;
return ((dividend % divider) == 0);
}
@Override
public void describeTo(Description description) {
description.appendText("divisible by " + divider);
}
public static Matcher<Integer> divisibleBy(Integer divider) {
return new IsDivisibleBy(divider);
}
}
| Java | 5 | DBatOWL/tutorials | testing-modules/hamcrest/src/test/java/com/baeldung/hamcrest/custommatchers/IsDivisibleBy.java | [
"MIT"
] |
//This file is part of "GZE - GroundZero Engine"
//The permisive licence allow to use GZE for free or commercial project (Apache License, Version 2.0).
//For conditions of distribution and use, see copyright notice in Licence.txt, this license must be included with any distribution of the code.
package {
import flash.display.InteractiveObject;
import GZ.Sys.Interface.Window;
import GZ.Gfx.Shape;
import GZ.Gfx.Root;
import GZ.Gfx.Triangle;
import GZ.Gfx.Object;
import GZ.Gfx.File.FilesImg;
import GZ.Base.Math.Math;
public class BaseForm extends Object {
public var aFile : CArray<Int, 1>;
public var oImage : FilesImg;
public var aPoint : CArray<Float, 1, 9>;
public var aPtSource: CArray<Float, 1, 6>;
private var rPtHT : CArray<Float, 1>;
private var rPtHL : CArray<Float, 1>;
private var rPtHR : CArray<Float, 1>;
private var rPtHB : CArray<Float, 1>;
public var oTriangle1 : Triangle;
public function BaseForm( _oParent : Root, _nX: Float, _nY:Float, _sPath:String, _bCenter:Bool = true, _nCenterX:Int = 0, _nCenterY:Int = 0, _bSmoothBorder:Bool = true, _bDrawLine:Bool = false):Void {
Object(_oParent, _nX , _nY);
oImage = new Image();
oImage.fOpen(_sPath);
//oPng.fOpenFromMemory(aFile, 512000);
aPixelArray = oImage.fGetImage();
nWidth = oImage.fGetWidth();
nHeight = oImage.fGetHeight();
sPath = _sPath;
oGpuObj.fSetImg(aPixelArray[0], nWidth, nHeight);
//Embed.fCreateImg(this);
if (_bCenter) {
_nCenterX = nWidth / 2;
_nCenterY = nHeight / 2;
}
var _nBorder : Float = 2;
//////////////////////////
aPoint[0] = (nWidth-1) / 2;
aPoint[1] = (nHeight-1) / 2;
aPoint[2] = 0;
aPoint[3] = nWidth -1;
aPoint[4] = 0;
aPoint[5] = 0;
aPoint[6] = nWidth -1;
aPoint[7] = nHeight -1;
aPoint[8] = 0;
//////////////////////////
aPtSource[0] = (nWidth-1) / 2;
aPtSource[1] = (nHeight-1) / 2;
aPtSource[2] = nWidth - 1;
aPtSource[3] = 0;
aPtSource[4] = nWidth - 1;
aPtSource[5] = nHeight - 1;
//////////////////
aPoint[0] -= _nCenterX;
aPoint[1] -= _nCenterY;
aPoint[3] -= _nCenterX;
aPoint[4] -= _nCenterY;
aPoint[6] -= _nCenterX;
aPoint[7] -= _nCenterY;
aPtSource[0] += _nBorder;
aPtSource[1] += _nBorder;
aPtSource[2] += _nBorder;
aPtSource[3] += _nBorder;
aPtSource[4] += _nBorder;
aPtSource[5] += _nBorder;
//////////////////
oTriangle1 = new Triangle(aPoint[0], aPoint[3], aPoint[6], aPtSource[0], aPtSource[2], aPtSource[4] );
oGpuObj.fIniTriangle(aPtSource[0], aPtSource[1], aPtSource[2], aPtSource[3], aPtSource[4], aPtSource[5]);
fSetRectangleBorderCorner();
}
override public function fFinalUpdate():Void {
oTriangle1.fUpdateSegment();
oGpuObj.fUpdateTriangle(aPoint[0] + nX, aPoint[1] + nY, aPoint[3] + nX, aPoint[4] + nY, aPoint[6] + nX, aPoint[7] + nY);
}
//All HighPos Val must be get
public function fSetRectangleBorderCorner():Void {
/*
nBorderT = rPtHT[1];
nBorderB = rPtHB[1];
nBorderR = rPtHR[0];
nBorderL = rPtHL[0];
*/
nLimT = Math.fITronc(-500.0* Math.nPrec + Math.nPrec/2);
nLimB = Math.fITronc(500.0 * Math.nPrec + Math.nPrec/2);
nLimR = Math.fITronc(500.0 * Math.nPrec + Math.nPrec/2);
nLimL = Math.fITronc(-500.0 * Math.nPrec + Math.nPrec / 2);
/*
nLimT = Math.fITronc(rPtHT[1] * Math.nPrec + Math.nPrec/2);
nLimB = Math.fITronc(rPtHB[1] * Math.nPrec + Math.nPrec/2);
nLimR = Math.fITronc(rPtHR[0] * Math.nPrec + Math.nPrec/2);
nLimL = Math.fITronc(rPtHL[0] * Math.nPrec + Math.nPrec / 2);
*/
}
override public function fDrawObject( _oSource : Object, _nX_Start : Int, _nX_End : Int, _nY_Start : Int, _nY_End : Int):Void {
oGpuObj.fDrawTriangle(_oSource);
oTriangle1.fDraw(nPosX, nPosY, aPixelArray, _oSource , _nX_Start , _nX_End , _nY_Start , _nY_End, nRsAlpha, nRsBrRed, nRsBrGreen , nRsBrBlue, nRsRevRed, nRsRevBlue, nRsRevGreen, nOfRevRed, nOfRevBlue, nOfRevGreen);
}
}
} | Redcode | 3 | VLiance/GZE | src/Lib_GZ/Gfx/ShowTriangle.cw | [
"Apache-2.0"
] |
//
// MetalConvolution1x1.metal
// MNN
//
// Created by MNN on 2019/02/25.
// Copyright © 2018, Alibaba Group Holding Limited
//
#include <metal_stdlib>
#include "MetalConvolutionActivation.metal"
using namespace metal;
using namespace MNN;
#define CONV_UNROLL (4)
#define CONV_UNROLL_L (8)
struct conv1x1_constants {
int input_size;
int input_slice;
int output_width;
int output_height;
int output_size;
int output_slice;
int batch;
conv_activation_type activation;
};
kernel void conv1x1_w1h1(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x >= cst.output_width || (int)gid.y >= cst.output_height || (int)gid.z >= cst.batch * cst.output_slice) return;
int idx_w = gid.x;
int idx_h = gid.y;
int idx_c = gid.z / cst.batch;
int idx_b = gid.z % cst.batch;
auto xy_wt = wt + idx_c * cst.input_slice;
auto xy_in0 = in + (int)idx_b * cst.input_slice * cst.input_size + idx_h * cst.output_width + idx_w;
auto xy_out = out + (int)idx_b * cst.output_slice * cst.output_size + idx_c * cst.output_size + idx_h * cst.output_width + idx_w;
auto biasValue = FLOAT4(biasTerms[idx_c]);
FLOAT4 result0 = biasValue;
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = xy_in0[0];
auto w = xy_wt[z];
result0 += FLOAT4(in40 * w);
xy_in0 += cst.input_size;
}
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
}
kernel void conv1x1_g1z4(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * CONV_UNROLL >= cst.output_size || (int)gid.y >= cst.output_slice || (int)gid.z >= cst.batch) return;
int rx = gid.x * CONV_UNROLL;
int uz = gid.y;
auto xy_wt = wt + uz * cst.input_slice;
auto xy_in0 = in + (int)gid.z * cst.input_slice * cst.input_size + rx + 0;
auto xy_out = out + (int)gid.z * cst.output_slice * cst.output_size + uz * cst.output_size + rx;
auto biasValue = FLOAT4(biasTerms[uz]);
FLOAT4 result0 = biasValue, result1 = biasValue, result2 = biasValue, result3 = biasValue;
int computeSize = min(cst.output_size - rx, CONV_UNROLL);
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = *xy_in0;
auto in41 = *(xy_in0 + 1);
auto in42 = *(xy_in0 + 2);
auto in43 = *(xy_in0 + 3);
auto w = xy_wt[z];
result0 += FLOAT4(in40 * w);
result1 += FLOAT4(in41 * w);
result2 += FLOAT4(in42 * w);
result3 += FLOAT4(in43 * w);
xy_in0 += cst.input_size;
}
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
if (computeSize > 1) {xy_out[1] = activate(ftype4(result1), cst.activation); }
if (computeSize > 2) {xy_out[2] = activate(ftype4(result2), cst.activation); }
if (computeSize > 3) {xy_out[3] = activate(ftype4(result3), cst.activation); }
}
kernel void conv1x1_g1z8(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * CONV_UNROLL_L >= cst.output_size || (int)gid.y >= cst.output_slice || (int)gid.z >= cst.batch) return;
int rx = gid.x * CONV_UNROLL_L;
int uz = gid.y;
auto xy_wt = wt + uz * cst.input_slice;
auto xy_in0 = in + (int)gid.z * cst.input_slice * cst.input_size + rx + 0;
auto xy_out = out + (int)gid.z * cst.output_slice * cst.output_size + uz * cst.output_size + rx;
auto biasValue = FLOAT4(biasTerms[uz]);
FLOAT4 result0 = biasValue, result1 = biasValue, result2 = biasValue, result3 = biasValue;
FLOAT4 result4 = biasValue, result5 = biasValue, result6 = biasValue, result7 = biasValue;
int computeSize = min(cst.output_size - rx, CONV_UNROLL_L);
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = xy_in0[0];
auto in41 = xy_in0[1];
auto in42 = xy_in0[2];
auto in43 = xy_in0[3];
auto in44 = xy_in0[4];
auto in45 = xy_in0[5];
auto in46 = xy_in0[6];
auto in47 = xy_in0[7];
auto w = xy_wt[z];
result0 += FLOAT4(in40 * w);
result1 += FLOAT4(in41 * w);
result2 += FLOAT4(in42 * w);
result3 += FLOAT4(in43 * w);
result4 += FLOAT4(in44 * w);
result5 += FLOAT4(in45 * w);
result6 += FLOAT4(in46 * w);
result7 += FLOAT4(in47 * w);
xy_in0 += cst.input_size;
}
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
if (computeSize > 1) {xy_out[1] = activate(ftype4(result1), cst.activation); }
if (computeSize > 2) {xy_out[2] = activate(ftype4(result2), cst.activation); }
if (computeSize > 3) {xy_out[3] = activate(ftype4(result3), cst.activation); }
if (computeSize > 4) {xy_out[4] = activate(ftype4(result4), cst.activation); }
if (computeSize > 5) {xy_out[5] = activate(ftype4(result5), cst.activation); }
if (computeSize > 6) {xy_out[6] = activate(ftype4(result6), cst.activation); }
if (computeSize > 7) {xy_out[7] = activate(ftype4(result7), cst.activation); }
}
kernel void conv1x1_w4h2(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * 4 >= cst.output_width || (int)gid.y * 2 >= cst.output_height || (int)gid.z >= cst.batch * cst.output_slice) return;
int idx_w = gid.x << 2;
int idx_h = gid.y << 1;
int idx_c = gid.z / cst.batch;
int idx_b = gid.z % cst.batch;
auto xy_wt = wt + idx_c * cst.input_slice;
auto xy_in0 = in + (int)idx_b * cst.input_slice * cst.input_size + idx_h * cst.output_width + idx_w;
auto xy_out = out + (int)idx_b * cst.output_slice * cst.output_size + idx_c * cst.output_size + idx_h * cst.output_width + idx_w;
auto biasValue = FLOAT4(biasTerms[idx_c]);
FLOAT4 result0 = biasValue, result1 = biasValue, result2 = biasValue, result3 = biasValue;
FLOAT4 result4 = biasValue, result5 = biasValue, result6 = biasValue, result7 = biasValue;
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = xy_in0[0];
auto in41 = xy_in0[1];
auto in42 = xy_in0[2];
auto in43 = xy_in0[3];
auto in44 = xy_in0[cst.output_width+0];
auto in45 = xy_in0[cst.output_width+1];
auto in46 = xy_in0[cst.output_width+2];
auto in47 = xy_in0[cst.output_width+3];
auto w = xy_wt[z];
result0 += FLOAT4(in40 * w);
result1 += FLOAT4(in41 * w);
result2 += FLOAT4(in42 * w);
result3 += FLOAT4(in43 * w);
result4 += FLOAT4(in44 * w);
result5 += FLOAT4(in45 * w);
result6 += FLOAT4(in46 * w);
result7 += FLOAT4(in47 * w);
xy_in0 += cst.input_size;
}
int widthSize = min(cst.output_width - idx_w, 4);
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
if (widthSize > 1) {xy_out[1] = activate(ftype4(result1), cst.activation); }
if (widthSize > 2) {xy_out[2] = activate(ftype4(result2), cst.activation); }
if (widthSize > 3) {xy_out[3] = activate(ftype4(result3), cst.activation); }
int heightSize = min(cst.output_height - idx_h, 2);
if(heightSize > 1) {
/* true */ {xy_out[cst.output_width+0] = activate(ftype4(result4), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_width+1] = activate(ftype4(result5), cst.activation); }
if (widthSize > 2) {xy_out[cst.output_width+2] = activate(ftype4(result6), cst.activation); }
if (widthSize > 3) {xy_out[cst.output_width+3] = activate(ftype4(result7), cst.activation); }
}
}
kernel void conv1x1_w4h4(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * 4 >= cst.output_width || (int)gid.y * 4 >= cst.output_height || (int)gid.z >= cst.batch * cst.output_slice) return;
int idx_w = gid.x << 2;
int idx_h = gid.y << 2;
int idx_c = gid.z / cst.batch;
int idx_b = gid.z % cst.batch;
auto xy_wt = wt + idx_c * cst.input_slice;
auto xy_in0 = in + (int)idx_b * cst.input_slice * cst.input_size + idx_h * cst.output_width + idx_w;
auto xy_out = out + (int)idx_b * cst.output_slice * cst.output_size + idx_c * cst.output_size + idx_h * cst.output_width + idx_w;
auto biasValue = FLOAT4(biasTerms[idx_c]);
FLOAT4 result00 = biasValue, result01 = biasValue, result02 = biasValue, result03 = biasValue;
FLOAT4 result10 = biasValue, result11 = biasValue, result12 = biasValue, result13 = biasValue;
FLOAT4 result20 = biasValue, result21 = biasValue, result22 = biasValue, result23 = biasValue;
FLOAT4 result30 = biasValue, result31 = biasValue, result32 = biasValue, result33 = biasValue;
for (auto z = 0; z < cst.input_slice; z++) {
auto in00 = xy_in0[0];
auto in01 = xy_in0[1];
auto in02 = xy_in0[2];
auto in03 = xy_in0[3];
auto in10 = xy_in0[cst.output_width+0];
auto in11 = xy_in0[cst.output_width+1];
auto in12 = xy_in0[cst.output_width+2];
auto in13 = xy_in0[cst.output_width+3];
auto in20 = xy_in0[cst.output_width+cst.output_width+0];
auto in21 = xy_in0[cst.output_width+cst.output_width+1];
auto in22 = xy_in0[cst.output_width+cst.output_width+2];
auto in23 = xy_in0[cst.output_width+cst.output_width+3];
auto in30 = xy_in0[cst.output_width+cst.output_width+cst.output_width+0];
auto in31 = xy_in0[cst.output_width+cst.output_width+cst.output_width+1];
auto in32 = xy_in0[cst.output_width+cst.output_width+cst.output_width+2];
auto in33 = xy_in0[cst.output_width+cst.output_width+cst.output_width+3];
auto w = xy_wt[z];
result00 += FLOAT4(in00 * w);
result01 += FLOAT4(in01 * w);
result02 += FLOAT4(in02 * w);
result03 += FLOAT4(in03 * w);
result10 += FLOAT4(in10 * w);
result11 += FLOAT4(in11 * w);
result12 += FLOAT4(in12 * w);
result13 += FLOAT4(in13 * w);
result20 += FLOAT4(in20 * w);
result21 += FLOAT4(in21 * w);
result22 += FLOAT4(in22 * w);
result23 += FLOAT4(in23 * w);
result30 += FLOAT4(in30 * w);
result31 += FLOAT4(in31 * w);
result32 += FLOAT4(in32 * w);
result33 += FLOAT4(in33 * w);
xy_in0 += cst.input_size;
}
int widthSize = min(cst.output_width - idx_w, 4);
/* true */ *xy_out = activate(ftype4(result00), cst.activation);
if (widthSize > 1) {xy_out[1] = activate(ftype4(result01), cst.activation); }
if (widthSize > 2) {xy_out[2] = activate(ftype4(result02), cst.activation); }
if (widthSize > 3) {xy_out[3] = activate(ftype4(result03), cst.activation); }
int heightSize = min(cst.output_height - idx_h, 4);
if(heightSize > 1) {
/* true */ {xy_out[cst.output_width+0] = activate(ftype4(result10), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_width+1] = activate(ftype4(result11), cst.activation); }
if (widthSize > 2) {xy_out[cst.output_width+2] = activate(ftype4(result12), cst.activation); }
if (widthSize > 3) {xy_out[cst.output_width+3] = activate(ftype4(result13), cst.activation); }
}
if(heightSize > 2) {
/* true */ {xy_out[cst.output_width+cst.output_width+0] = activate(ftype4(result20), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_width+cst.output_width+1] = activate(ftype4(result21), cst.activation); }
if (widthSize > 2) {xy_out[cst.output_width+cst.output_width+2] = activate(ftype4(result22), cst.activation); }
if (widthSize > 3) {xy_out[cst.output_width+cst.output_width+3] = activate(ftype4(result23), cst.activation); }
}
if(heightSize > 3) {
/* true */ {xy_out[cst.output_width+cst.output_width+cst.output_width+0] = activate(ftype4(result30), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_width+cst.output_width+cst.output_width+1] = activate(ftype4(result31), cst.activation); }
if (widthSize > 2) {xy_out[cst.output_width+cst.output_width+cst.output_width+2] = activate(ftype4(result32), cst.activation); }
if (widthSize > 3) {xy_out[cst.output_width+cst.output_width+cst.output_width+3] = activate(ftype4(result33), cst.activation); }
}
}
kernel void conv1x1_w2c2(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * 2 >= cst.output_width || (int)gid.y >= cst.output_height || (int)gid.z * 2 >= cst.batch * cst.output_slice) return;
int idx_w = gid.x << 1;
int idx_h = gid.y;
int idx_c = (gid.z / cst.batch) << 1;
int idx_b = gid.z % cst.batch;
auto xy_wt = wt + idx_c * cst.input_slice;
auto xy_in0 = in + (int)idx_b * cst.input_slice * cst.input_size + idx_h * cst.output_width + idx_w;
auto xy_out = out + (int)idx_b * cst.output_slice * cst.output_size + idx_c * cst.output_size + idx_h * cst.output_width + idx_w;
auto biasValue0 = FLOAT4(biasTerms[idx_c]);
auto biasValue1 = FLOAT4(biasTerms[idx_c+1]);
FLOAT4 result0 = biasValue0, result1 = biasValue0;
FLOAT4 result4 = biasValue1, result5 = biasValue1;
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = xy_in0[0];
auto in41 = xy_in0[1];
auto w0 = xy_wt[z];
auto w1 = xy_wt[cst.input_slice+z];
result0 += FLOAT4(in40 * w0);
result1 += FLOAT4(in41 * w0);
result4 += FLOAT4(in40 * w1);
result5 += FLOAT4(in41 * w1);
xy_in0 += cst.input_size;
}
int widthSize = min(cst.output_width - idx_w, 2);
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
if (widthSize > 1) {xy_out[1] = activate(ftype4(result1), cst.activation); }
int channelSize = min(cst.output_slice - idx_c, 2);
if(channelSize > 1) {
/* true */ {xy_out[cst.output_size+0] = activate(ftype4(result4), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_size+1] = activate(ftype4(result5), cst.activation); }
}
}
kernel void conv1x1_w2h2(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * 2 >= cst.output_width || (int)gid.y * 2 >= cst.output_height || (int)gid.z >= cst.batch * cst.output_slice) return;
int idx_w = gid.x << 1;
int idx_h = gid.y << 1;
int idx_c = gid.z / cst.batch;
int idx_b = gid.z % cst.batch;
auto xy_wt = wt + idx_c * cst.input_slice;
auto xy_in0 = in + (int)idx_b * cst.input_slice * cst.input_size + idx_h * cst.output_width + idx_w;
auto xy_out = out + (int)idx_b * cst.output_slice * cst.output_size + idx_c * cst.output_size + idx_h * cst.output_width + idx_w;
auto biasValue = FLOAT4(biasTerms[idx_c]);
FLOAT4 result0 = biasValue, result1 = biasValue;
FLOAT4 result4 = biasValue, result5 = biasValue;
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = xy_in0[0];
auto in41 = xy_in0[1];
auto in44 = xy_in0[cst.output_width+0];
auto in45 = xy_in0[cst.output_width+1];
auto w = xy_wt[z];
result0 += FLOAT4(in40 * w);
result1 += FLOAT4(in41 * w);
result4 += FLOAT4(in44 * w);
result5 += FLOAT4(in45 * w);
xy_in0 += cst.input_size;
}
int widthSize = min(cst.output_width - idx_w, 2);
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
if (widthSize > 1) {xy_out[1] = activate(ftype4(result1), cst.activation); }
int heightSize = min(cst.output_height - idx_h, 2);
if(heightSize > 1) {
/* true */ {xy_out[cst.output_width+0] = activate(ftype4(result4), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_width+1] = activate(ftype4(result5), cst.activation); }
}
}
kernel void conv1x1_w2h2c2(const device ftype4 *in [[buffer(0)]],
device ftype4 *out [[buffer(1)]],
constant conv1x1_constants& cst [[buffer(2)]],
const device ftype4x4 *wt [[buffer(3)]],
const device ftype4 *biasTerms [[buffer(4)]],
uint3 gid [[thread_position_in_grid]]) {
if ((int)gid.x * 2 >= cst.output_width || (int)gid.y * 2 >= cst.output_height || (int)gid.z*2 >= cst.batch * cst.output_slice) return;
int idx_w = gid.x << 1;
int idx_h = gid.y << 1;
int idx_c = (gid.z / cst.batch) << 1;
int idx_b = gid.z % cst.batch;
auto xy_wt = wt + idx_c * cst.input_slice;
auto xy_in0 = in + (int)idx_b * cst.input_slice * cst.input_size + idx_h * cst.output_width + idx_w;
auto xy_out = out + (int)idx_b * cst.output_slice * cst.output_size + idx_c * cst.output_size + idx_h * cst.output_width + idx_w;
auto biasValue0 = FLOAT4(biasTerms[idx_c]);
auto biasValue1 = FLOAT4(biasTerms[idx_c+1]);
FLOAT4 result0 = biasValue0, result1 = biasValue0;
FLOAT4 result4 = biasValue0, result5 = biasValue0;
FLOAT4 result2 = biasValue1, result3 = biasValue1;
FLOAT4 result6 = biasValue1, result7 = biasValue1;
for (auto z = 0; z < cst.input_slice; z++) {
auto in40 = xy_in0[0];
auto in41 = xy_in0[1];
auto in44 = xy_in0[cst.output_width+0];
auto in45 = xy_in0[cst.output_width+1];
auto w0 = xy_wt[z];
auto w1 = xy_wt[cst.input_slice+z];
result0 += FLOAT4(in40 * w0);
result1 += FLOAT4(in41 * w0);
result4 += FLOAT4(in44 * w0);
result5 += FLOAT4(in45 * w0);
result2 += FLOAT4(in40 * w1);
result3 += FLOAT4(in41 * w1);
result6 += FLOAT4(in44 * w1);
result7 += FLOAT4(in45 * w1);
xy_in0 += cst.input_size;
}
int widthSize = min(cst.output_width - idx_w, 2);
/* true */ *xy_out = activate(ftype4(result0), cst.activation);
if (widthSize > 1) {xy_out[1] = activate(ftype4(result1), cst.activation); }
int heightSize = min(cst.output_height - idx_h, 2);
if(heightSize > 1) {
/* true */ {xy_out[cst.output_width+0] = activate(ftype4(result4), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_width+1] = activate(ftype4(result5), cst.activation); }
}
int channelSize = min(cst.output_slice - idx_c, 2);
if(channelSize > 1) {
/* true */ xy_out[cst.output_size] = activate(ftype4(result2), cst.activation);
if (widthSize > 1) {xy_out[cst.output_size+1] = activate(ftype4(result3), cst.activation); }
if(heightSize > 1) {
/* true */ {xy_out[cst.output_size+cst.output_width+0] = activate(ftype4(result6), cst.activation); }
if (widthSize > 1) {xy_out[cst.output_size+cst.output_width+1] = activate(ftype4(result7), cst.activation); }
}
}
}
| Metal | 4 | Napoleon-Jm/MNN | source/backend/metal/MetalConvolution1x1.metal | [
"Apache-2.0"
] |
# Input
INCLUDEPATH += $$PWD
HEADERS += $$PWD/default_extensionfactory.h \
$$PWD/extension.h \
$$PWD/qextensionmanager.h
SOURCES += $$PWD/default_extensionfactory.cpp \
$$PWD/extension.cpp \
$$PWD/qextensionmanager.cpp
| QMake | 2 | liyuzhao/QWidgetDemo | designer/lib/extension/extension.pri | [
"MulanPSL-1.0"
] |
--TEST--
Test FILEINFO_EXTENSION flag
--EXTENSIONS--
fileinfo
--FILE--
<?php
$f = new finfo;
var_dump($f->file(__DIR__ . "/resources/test.jpg", FILEINFO_EXTENSION));
?>
--EXPECT--
string(17) "jpeg/jpg/jpe/jfif"
| PHP | 4 | NathanFreeman/php-src | ext/fileinfo/tests/finfo_extension_flag.phpt | [
"PHP-3.01"
] |
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "ur8xi4C7S06n"
},
"outputs": [],
"source": [
"# Copyright 2021 Google LLC\n",
"#\n",
"# Licensed under the Apache License, Version 2.0 (the \"License\");\n",
"# you may not use this file except in compliance with the License.\n",
"# You may obtain a copy of the License at\n",
"#\n",
"# https://www.apache.org/licenses/LICENSE-2.0\n",
"#\n",
"# Unless required by applicable law or agreed to in writing, software\n",
"# distributed under the License is distributed on an \"AS IS\" BASIS,\n",
"# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
"# See the License for the specific language governing permissions and\n",
"# limitations under the License."
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "tvgnzT1CKxrO"
},
"source": [
"### Overview\n",
"\n",
"In this notebook, you'll learn how to submit a job to the Vertex AI training service. In the job you'll train your TensorFlow 2 model and export the saved model to Cloud Storage.\n",
"\n",
"### Dataset\n",
"\n",
"[CTA - Ridership - Daily Boarding Totals](https://data.cityofchicago.org/Transportation/CTA-Ridership-Daily-Boarding-Totals/6iiy-9s97): This dataset shows systemwide boardings for both bus and rail services provided by Chicago Transit Authority, dating back to 2001.\n",
"\n",
"### Objective\n",
"\n",
"The goal is to forecast future transit ridership in the City of Chicago, based on previous ridership."
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "i7EUnXsZhAGF"
},
"source": [
"## Install packages and dependencies"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "XoEqT2Y4DJmf"
},
"source": [
"### Import libraries and define constants"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 161
},
"colab_type": "code",
"id": "kRv5imUnKkuP",
"outputId": "f2f8b527-1b8b-4d7b-c69d-dc9426f62cf6"
},
"outputs": [],
"source": [
"import datetime\n",
"import os\n",
"import time\n",
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"import tensorflow as tf\n",
"\n",
"from google.cloud import aiplatform, storage\n",
"from google.cloud.aiplatform import gapic as aip\n",
"from sklearn.preprocessing import StandardScaler"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Check the TensorFlow version installed\n",
"\n",
"tf.__version__"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "oM1iC_MfAts1"
},
"outputs": [],
"source": [
"# Enter your project, region, and a bucket name. Then run the cell to make sure the\n",
"# Cloud SDK uses the right project for all the commands in this notebook.\n",
"\n",
"PROJECT = 'your-project-name' # REPLACE WITH YOUR PROJECT ID\n",
"BUCKET = 'your-regional-bucket' # REPLACE WITH A UNIQUE REGIONAL BUCKET NAME e.g. your PROJECT NAME\n",
"REGION = 'us-central1' # REPLACE WITH YOUR BUCKET REGION e.g. us-central1\n",
"BUCKET_URI = 'gs://' + BUCKET\n",
"\n",
"#Don't change the following command - this is to check if you have changed the project name above.\n",
"assert PROJECT != 'your-project-name', 'Don''t forget to change the project variables!'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Initialize the Vertex SDK for Python\n",
"\n",
"aiplatform.init(project=PROJECT, location=REGION, staging_bucket=BUCKET)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Dataset parameters\n",
"\n",
"target_col = 'total_rides' # The variable you are predicting\n",
"ts_col = 'service_date' # The name of the column with the date field"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Model parameters\n",
"\n",
"freq = 'D' # Daily frequency\n",
"n_input_steps = 30 # Lookback window\n",
"n_output_steps = 7 # How many steps to predict forward\n",
"n_seasons = 7 # Monthly periodicity\n",
"\n",
"train_split = 0.8 # % Split between train/test data\n",
"epochs = 1000 # How many passes through the data (early-stopping will cause training to stop before this)\n",
"patience = 5 # Terminate training after the validation loss does not decrease after this many epochs\n",
"\n",
"lstm_units = 64\n",
"input_layer_name = 'lstm_input'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Training parameters\n",
"\n",
"MODEL_NAME = 'cta_ridership'"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "zgPO1eR3CYjk"
},
"source": [
"### Create a Cloud Storage bucket\n",
"\n",
"**The following steps are required, regardless of your notebook environment.**\n",
"\n",
"When you submit a training job using the Cloud SDK, you upload a Python package\n",
"containing your training code to a Cloud Storage bucket. AI Platform runs\n",
"the code from this package. In this tutorial, AI Platform also saves the\n",
"trained model that results from your job in the same bucket. You can then\n",
"create an AI Platform model version based on this output in order to serve\n",
"online predictions."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "both",
"colab": {},
"colab_type": "code",
"id": "MzGDU7TWdts_"
},
"outputs": [],
"source": [
"storage_client = storage.Client()\n",
"try:\n",
" bucket = storage_client.get_bucket(BUCKET)\n",
" print('Bucket exists, let''s not recreate it.')\n",
"except:\n",
" bucket = storage_client.create_bucket(BUCKET)\n",
" print('Created bucket: ' + BUCKET)"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "M8_SY9abGxCc"
},
"source": [
"## Load and preview the data\n",
"\n",
"Pre-processing on the original dataset has been done for you and made available on Cloud Storage."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"processed_file = 'cta_ridership.csv' # Which file to save the results to\n",
"\n",
"if os.path.exists(processed_file):\n",
" input_file = processed_file # File created in previous lab\n",
"else:\n",
" input_file = f'data/{processed_file}'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "JOfBsktiGCOp"
},
"outputs": [],
"source": [
"df = pd.read_csv(input_file, index_col=ts_col, parse_dates=True)\n",
"\n",
"# Plot 30 days of ridership \n",
"_ = df[target_col][:30].plot()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Define some characteristics of the data that will be used later\n",
"n_features = len(df.columns)\n",
"\n",
"# Index of target column. Used later when creating dataframes.\n",
"target_col_num = df.columns.get_loc(target_col)"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "067UQKwlVBUf"
},
"source": [
"### Process data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 227
},
"colab_type": "code",
"id": "59PwFlYDU13-",
"outputId": "b4b59f59-6d39-4235-8f9f-8a73d7e1cd0e"
},
"outputs": [],
"source": [
"# Split data\n",
"\n",
"size = int(len(df) * train_split)\n",
"df_train, df_test = df[0:size].copy(deep=True), df[size:len(df)].copy(deep=True)\n",
"\n",
"df_train.head()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 307
},
"colab_type": "code",
"id": "Yn9RSS3DVEtt",
"outputId": "cd066456-b419-4e0f-8c52-2fd7d82123ba"
},
"outputs": [],
"source": [
"_ = df_train.plot()"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "jlgFdwM9VOnd"
},
"source": [
"### Scale values"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Review original values\n",
"\n",
"df_train.head()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "xqRM0Wt6VKzm"
},
"outputs": [],
"source": [
"# For neural networks to converge quicker, it is helpful to scale the values.\n",
"# For example, each feature might be transformed to have a mean of 0 and std. dev. of 1.\n",
"#\n",
"# You are working with a mix of features, input timesteps, output horizon, etc.\n",
"# which don't work out-of-the-box with common scaling utilities.\n",
"# So, here are a couple wrappers to handle scaling and inverting the scaling.\n",
"\n",
"feature_scaler = StandardScaler()\n",
"target_scaler = StandardScaler()\n",
"\n",
"def scale(df, \n",
" fit=True, \n",
" target_col=target_col,\n",
" feature_scaler=feature_scaler,\n",
" target_scaler=target_scaler):\n",
" \"\"\"\n",
" Scale the input features, using a separate scaler for the target.\n",
" \n",
" Parameters: \n",
" df (pd.DataFrame): Input dataframe\n",
" fit (bool): Whether to fit the scaler to the data (only apply to training data)\n",
" target_col (pd.Series): The column that is being predicted\n",
" feature_scaler (StandardScaler): Scaler used for features\n",
" target_scaler (StandardScaler): Scaler used for target\n",
" \n",
" Returns: \n",
" df_scaled (pd.DataFrame): Scaled dataframe \n",
" \"\"\" \n",
" \n",
" target = df[target_col].values.reshape(-1, 1)\n",
" if fit:\n",
" target_scaler.fit(target)\n",
" target_scaled = target_scaler.transform(target)\n",
" \n",
" # Select all columns other than target to be features\n",
" features = df.loc[:, df.columns != target_col].values\n",
" \n",
" if features.shape[1]: # If there are any features\n",
" if fit:\n",
" feature_scaler.fit(features)\n",
" features_scaled = feature_scaler.transform(features)\n",
" \n",
" # Combine target and features into one data frame\n",
" df_scaled = pd.DataFrame(features_scaled)\n",
" target_col_num = df.columns.get_loc(target_col)\n",
" df_scaled.insert(target_col_num, target_col, target_scaled)\n",
" df_scaled.columns = df.columns \n",
" \n",
" else: # If only target column (no additional features)\n",
" df_scaled = pd.DataFrame(target_scaled, columns=df.columns)\n",
" \n",
" return df_scaled\n",
"\n",
"def inverse_scale(data, target_scaler=target_scaler):\n",
" \"\"\"\n",
" Transform the scaled values of the target back into their original form.\n",
" The features are left alone, as we're assuming that the output of the model only includes the target.\n",
" \n",
" Parameters: \n",
" data (np.array): Input array\n",
" target_scaler (StandardScaler): Scaler used for target\n",
" \n",
" Returns: \n",
" data_scaled (np.array): Scaled array \n",
" \"\"\" \n",
" \n",
" df = pd.DataFrame()\n",
" data_scaled = np.empty([data.shape[1], data.shape[0]])\n",
" for i in range(data.shape[1]):\n",
" data_scaled[i] = target_scaler.inverse_transform([data[:,i]])\n",
" return data_scaled.transpose()\n",
"\n",
"df_train_scaled=scale(df_train)\n",
"df_test_scaled=scale(df_test, False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 197
},
"colab_type": "code",
"id": "4w2K9hVHXV6-",
"outputId": "b48986d7-e9b4-4a63-c71c-54b43b1ea7ad"
},
"outputs": [],
"source": [
"# Review scaled values\n",
"\n",
"df_train_scaled.head()"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "CNw7mVozbsA2"
},
"source": [
"### Create sequences of time series data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "76Wp5gRtbrA9"
},
"outputs": [],
"source": [
"def reframe(data, n_input_steps = n_input_steps, n_output_steps = n_output_steps, target_col = target_col):\n",
"\n",
" target_col_num = data.columns.get_loc(target_col) \n",
" \n",
" # Iterate through data and create sequences of features and outputs\n",
" df = pd.DataFrame(data)\n",
" cols=list()\n",
" for i in range(n_input_steps, 0, -1):\n",
" cols.append(df.shift(i))\n",
" for i in range(0, n_output_steps):\n",
" cols.append(df.shift(-i))\n",
" \n",
" # Concatenate values and remove any missing values\n",
" df = pd.concat(cols, axis=1)\n",
" df.dropna(inplace=True)\n",
" \n",
" # Split the data into feature and target variables\n",
" n_feature_cols = n_input_steps * n_features\n",
" features = df.iloc[:,0:n_feature_cols]\n",
" target_cols = [i for i in range(n_feature_cols + target_col_num, n_feature_cols + n_output_steps * n_features, n_features)]\n",
" targets = df.iloc[:,target_cols]\n",
"\n",
" return (features, targets)\n",
"\n",
"X_train_reframed, y_train_reframed = reframe(df_train_scaled)\n",
"X_test_reframed, y_test_reframed = reframe(df_test_scaled)"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "tqhwqDsxb-E_"
},
"source": [
"## Build a model and submit your training job to AI Platform\n",
"\n",
"The model you're building here trains pretty fast so you could train it in this notebook, but for more computationally expensive models, it's useful to train them in the Cloud. To use AI Platform Training, you'll package up your training code and submit a training job to the AI Platform Prediction service.\n",
"\n",
"In your training script, you'll also export your trained `SavedModel` to a Cloud Storage bucket."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Prepare test data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {},
"colab_type": "code",
"id": "HG1bYOO8b4sN"
},
"outputs": [],
"source": [
"# Reshape test data to match model inputs and outputs\n",
"\n",
"X_train = X_train_reframed.values.reshape(-1, n_input_steps, n_features)\n",
"X_test = X_test_reframed.values.reshape(-1, n_input_steps, n_features)\n",
"y_train = y_train_reframed.values.reshape(-1, n_output_steps)\n",
"y_test = y_test_reframed.values.reshape(-1, n_output_steps)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Specify directories to be used later\n",
"\n",
"TRAINER_DIR = 'trainer'\n",
"EXPORT_DIR = 'tf_export'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create trainer directory if it doesn't already exist\n",
"\n",
"!mkdir $TRAINER_DIR"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Copy numpy arrays to npy files\n",
"\n",
"np.save(TRAINER_DIR + '/x_train.npy', X_train)\n",
"np.save(TRAINER_DIR + '/x_test.npy', X_test)\n",
"np.save(TRAINER_DIR + '/y_train.npy', y_train)\n",
"np.save(TRAINER_DIR + '/y_test.npy', y_test)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Prepare model code"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Write training code out to a file that will be submitted to the training job\n",
"# Note: f-strings are supported in Python 3.6 and above\n",
"\n",
"model_template = f\"\"\"import argparse\n",
"import numpy as np\n",
"import os\n",
"import tempfile\n",
"\n",
"from google.cloud import storage\n",
"from tensorflow import keras\n",
"from tensorflow.keras import Sequential\n",
"from tensorflow.keras.layers import Dense, LSTM\n",
"from tensorflow.keras.callbacks import EarlyStopping\n",
"\n",
"n_features = {n_features} # Two features: y (previous values) and whether the date is a holiday\n",
"n_input_steps = {n_input_steps} # Lookback window\n",
"n_output_steps = {n_output_steps} # How many steps to predict forward\n",
"\n",
"epochs = {epochs} # How many passes through the data (early-stopping will cause training to stop before this)\n",
"patience = {patience} # Terminate training after the validation loss does not decrease after this many epochs\n",
"\n",
"def download_blob(bucket_name, source_blob_name, destination_file_name):\n",
" '''Downloads a blob from the bucket.'''\n",
" # bucket_name = \"your-bucket-name\"\n",
" # source_blob_name = \"storage-object-name\"\n",
" # destination_file_name = \"local/path/to/file\"\n",
"\n",
" storage_client = storage.Client()\n",
"\n",
" bucket = storage_client.bucket(bucket_name)\n",
"\n",
" # Construct a client side representation of a blob.\n",
" # Note `Bucket.blob` differs from `Bucket.get_blob` as it doesn't retrieve\n",
" # any content from Google Cloud Storage. As we don't need additional data,\n",
" # using `Bucket.blob` is preferred here.\n",
" blob = bucket.blob(source_blob_name)\n",
" blob.download_to_filename(destination_file_name)\n",
"\n",
" print(\"Blob \" + source_blob_name + \" downloaded to \" + destination_file_name + \".\")\n",
"\n",
"def extract_bucket_and_prefix_from_gcs_path(gcs_path: str):\n",
" '''Given a complete GCS path, return the bucket name and prefix as a tuple.\n",
"\n",
" Example Usage:\n",
"\n",
" bucket, prefix = extract_bucket_and_prefix_from_gcs_path(\n",
" \"gs://example-bucket/path/to/folder\"\n",
" )\n",
"\n",
" # bucket = \"example-bucket\"\n",
" # prefix = \"path/to/folder\"\n",
"\n",
" Args:\n",
" gcs_path (str):\n",
" Required. A full path to a Google Cloud Storage folder or resource.\n",
" Can optionally include \"gs://\" prefix or end in a trailing slash \"/\".\n",
"\n",
" Returns:\n",
" Tuple[str, Optional[str]]\n",
" A (bucket, prefix) pair from provided GCS path. If a prefix is not\n",
" present, a None will be returned in its place.\n",
" '''\n",
" if gcs_path.startswith(\"gs://\"):\n",
" gcs_path = gcs_path[5:]\n",
" if gcs_path.endswith(\"/\"):\n",
" gcs_path = gcs_path[:-1]\n",
"\n",
" gcs_parts = gcs_path.split(\"/\", 1)\n",
" gcs_bucket = gcs_parts[0]\n",
" gcs_blob_prefix = None if len(gcs_parts) == 1 else gcs_parts[1]\n",
"\n",
" return (gcs_bucket, gcs_blob_prefix)\n",
"\n",
"def get_args():\n",
" parser = argparse.ArgumentParser()\n",
" parser.add_argument(\n",
" '--data-uri',\n",
" default=None,\n",
" help='URL where the training files are located')\n",
" args = parser.parse_args()\n",
" print(args)\n",
" return args\n",
"\n",
"def main():\n",
" args = get_args()\n",
" bucket_name, blob_prefix = extract_bucket_and_prefix_from_gcs_path(args.data_uri)\n",
" \n",
" # Get the training data and convert back to np arrays\n",
" local_data_dir = os.path.join(os.getcwd(), tempfile.gettempdir())\n",
" files = ['x_train.npy', 'y_train.npy', 'x_test.npy', 'y_test.npy']\n",
" \n",
" for file in files:\n",
" download_blob(bucket_name, os.path.join(blob_prefix,file), os.path.join(local_data_dir,file))\n",
"\n",
" X_train = np.load(local_data_dir + '/x_train.npy')\n",
" y_train = np.load(local_data_dir + '/y_train.npy')\n",
" X_test = np.load(local_data_dir + '/x_test.npy')\n",
" y_test = np.load(local_data_dir + '/y_test.npy')\n",
" \n",
" # Build and train the model\n",
" model = Sequential([\n",
" LSTM({lstm_units}, input_shape=[n_input_steps, n_features], recurrent_activation=None),\n",
" Dense(n_output_steps)])\n",
"\n",
" model.compile(optimizer='adam', loss='mae')\n",
"\n",
" early_stopping = EarlyStopping(monitor='val_loss', patience=patience)\n",
" _ = model.fit(x=X_train, y=y_train, validation_data=(X_test, y_test), epochs=epochs, callbacks=[early_stopping])\n",
" \n",
" # Export the model\n",
" model.save(os.environ[\"AIP_MODEL_DIR\"])\n",
" \n",
"if __name__ == '__main__':\n",
" main()\n",
"\"\"\"\n",
"\n",
"with open(os.path.join(TRAINER_DIR, 'task.py'), 'w') as f:\n",
" f.write(model_template.format(**globals()))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Copy the data files to a GCS bucket\n",
"\n",
"!gsutil -m cp -r trainer/*.npy $BUCKET_URI/$TRAINER_DIR"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# List the contents of the bucket to ensure they were copied properly\n",
"\n",
"!gsutil ls $BUCKET_URI/$TRAINER_DIR"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Submit training job"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Set training job parameters\n",
"\n",
"CMDARGS = [\n",
" f\"--data-uri={BUCKET_URI}/{TRAINER_DIR}\"\n",
"]\n",
"TRAIN_VERSION = \"tf-cpu.2-6\"\n",
"DEPLOY_VERSION = \"tf2-cpu.2-6\"\n",
"\n",
"TRAIN_IMAGE = \"us-docker.pkg.dev/vertex-ai/training/{}:latest\".format(TRAIN_VERSION)\n",
"DEPLOY_IMAGE = \"us-docker.pkg.dev/vertex-ai/prediction/{}:latest\".format(DEPLOY_VERSION)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Re-run these additional parameters if you need to create a new training job\n",
"\n",
"TIMESTAMP = str(datetime.datetime.now().time())\n",
"JOB_NAME = 'vertex_ai_training_' + TIMESTAMP\n",
"MODEL_DISPLAY_NAME = MODEL_NAME + TIMESTAMP"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create and run the training job\n",
"\n",
"job = aiplatform.CustomTrainingJob(\n",
" display_name=JOB_NAME,\n",
" script_path=f\"{TRAINER_DIR}/task.py\",\n",
" container_uri=TRAIN_IMAGE,\n",
" model_serving_container_image_uri=DEPLOY_IMAGE,\n",
")\n",
"\n",
"model = job.run(\n",
" model_display_name=MODEL_DISPLAY_NAME,\n",
" args=CMDARGS,\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Deploy the model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"DEPLOYED_NAME = f\"{MODEL_NAME}_deployed-\" + TIMESTAMP\n",
"\n",
"endpoint = model.deploy(\n",
" deployed_model_display_name=DEPLOYED_NAME,\n",
" machine_type=\"n1-standard-4\",\n",
" min_replica_count=1,\n",
" max_replica_count=1,\n",
" traffic_split={\"0\": 100},\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {
"colab_type": "text",
"id": "ZFglcq4kcd4R"
},
"source": [
"## Get predictions on deployed model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Get predictions for the first test instance\n",
"\n",
"raw_predictions = endpoint.predict(instances=X_test.tolist()).predictions[0]\n",
"predicted_values = inverse_scale(np.array([raw_predictions])).round()\n",
"\n",
"actual_values = inverse_scale(np.array([y_test[0]]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Print prediction and compare to actual value\n",
"\n",
"print('Predicted riders:', predicted_values)\n",
"print('Actual riders: ', actual_values)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Cleanup"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"delete_training_job = True\n",
"delete_model = True\n",
"delete_endpoint = True\n",
"\n",
"# Warning: Setting this to true will delete everything in your bucket\n",
"delete_bucket = False\n",
"\n",
"# Delete the training job\n",
"job.delete()\n",
"\n",
"# Delete the endpoint\n",
"endpoint.delete(force=True)\n",
"\n",
"# Delete the model\n",
"model.delete()\n",
"\n",
"# Warning: uncomment this section only if you want to delete the entire bucket\n",
"# if delete_bucket and \"BUCKET\" in globals():\n",
"# ! gsutil -m rm -r $BUCKET"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Conclusion"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"In this section, you've learned how to:\n",
"* Prepare data and models for training in the cloud\n",
"* Train your model and monitor the progress of the job with AI Platform Training\n",
"* Predict using the model with AI Platform Predictions"
]
}
],
"metadata": {
"colab": {
"collapsed_sections": [],
"name": "liquor-sales-xai.ipynb",
"provenance": []
},
"environment": {
"kernel": "python3",
"name": "tf2-gpu.2-6.m82",
"type": "gcloud",
"uri": "gcr.io/deeplearning-platform-release/tf2-gpu.2-6:m82"
},
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.10"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
| Jupyter Notebook | 5 | memeyankm/training-data-analyst | courses/ai-for-time-series/notebooks/03-cloud-training.ipynb | [
"Apache-2.0"
] |
dot ← {
WA ← (1↓⍴⍵),⍴⍺
KA ← (⊃⍴⍴⍺)-1
VA ← ⍳ ⊃ ⍴WA
ZA ← (KA⌽¯1↓VA),¯1↑VA
TA ← ZA⍉WA⍴⍺ ⍝ Replicate, transpose
WB ← (¯1↓⍴⍺),⍴⍵
KB ← ⊃ ⍴⍴⍺
VB ← ⍳ ⊃ ⍴WB
ZB0 ← (-KB) ↓ KB ⌽ ⍳(⊃⍴VB)
ZB ← (¯1↓(⍳ KB)),ZB0,KB
TB ← ZB⍉WB⍴⍵ ⍝ Replicate, transpose
⍺⍺ / TA ⍵⍵ TB ⍝ Compute the result
}
A ← 3 2 ⍴ ⍳ 5 ⍝ Example input A
B ← ⍉ A ⍝ Example input B
R ← A + dot × B
R2 ← ×/ +/ R
⍝ 1 3 5
⍝ 2 4 1
⍝
⍝ 1 2 5 11 7 -+-> 23 |
⍝ 3 4 11 25 19 -+-> 55 ×
⍝ 5 1 7 19 26 -+-> 52 |
⍝ 65780 v
| APL | 4 | mbudde/apltail | tests/inner4.apl | [
"MIT"
] |
USING System.Runtime.InteropServices
CLASS DialogWindow INHERIT Window IMPLEMENTS ILastFocus
PROTECT oResourceID AS ResourceID
PROTECT bModal AS LOGIC
PROTECT nResult AS LONG
PROTECT aRadioGroups AS ARRAY
PROTECT oLastFocus AS Control
PROTECT oSurface AS VOPanel
ACCESS __Dialog AS VODialogForm
RETURN (VODialogForm) oWnd
STATIC METHOD GetShell(oWin AS OBJECT) AS ShellWindow
DO WHILE oWin != NULL_OBJECT
IF oWin IS ShellWindow
EXIT
ENDIF
IF IsAccess(oWin, #Owner)
oWin := Ivarget(oWin,#Owner)
ELSE
oWin := NULL_OBJECT
ENDIF
ENDDO
RETURN oWin
METHOD __CreateForm AS VOForm STRICT
LOCAL oDlg AS VODialogForm
oDlg := GuiFactory.Instance:CreateDialogWindow(SELF, SELF:oResourceDialog)
// Set owner window and prevent from showing on the taskbar
IF SELF:Owner IS Window
LOCAL oShell as ShellWindow
oShell := GetShell(SELF:Owner)
IF oShell != NULL_OBJECT
oDlg:Owner := oShell:__Form
ENDIF
oDlg:ShowInTaskbar := FALSE
oDlg:Text := SELF:Caption
ENDIF
SELF:oSurface := oDlg:Surface
SELF:SetExStyle(WS_EX_APPWINDOW, FALSE)
SELF:SetExStyle(WS_EX_DLGMODALFRAME, TRUE)
SELF:SetStyle(WS_CLIPCHILDREN, FALSE)
SELF:SetStyle(4, TRUE)
IF _AND( SELF:dwStyle, WS_THICKFRAME ) = WS_THICKFRAME
oDlg:SetSizable(TRUE)
oDlg:MinimizeBox := TRUE
oDlg:MaximizeBox := TRUE
ELSE
oDlg:SetSizable(FALSE)
oDlg:MinimizeBox := FALSE
oDlg:MaximizeBox := FALSE
ENDIF
IF _AND(SELF:dwStyle, WS_SYSMENU) == WS_SYSMENU
oDlg:ControlBox := TRUE
ELSE
oDlg:ControlBox := FALSE
ENDIF
RETURN oDlg
METHOD __Close(oEvent AS @@Event) AS VOID STRICT
SELF:Close(oEvent)
SELF:Destroy()
SELF:EventReturnValue := 1L
RETURN
METHOD __RestoreLastFocus() AS VOID STRICT
IF oParent != NULL_OBJECT .AND. oParent IS ILastFocus VAR oLastFocus
IF oLastFocus:LastFocus != NULL_OBJECT
oLastFocus:LastFocus:SetFocus()
ENDIF
ENDIF
ACCESS __HasSurface AS LOGIC
RETURN TRUE
ACCESS __Surface AS IVOControlContainer
RETURN oSurface
METHOD __SetupDataControl(oDC AS Control) AS VOID
IF oDC IS RadioButtonGroup
AAdd(aRadioGroups, oDC)
ENDIF
RETURN
METHOD __SetFont() AS VOID STRICT
IF SELF:oFont != NULL_OBJECT .and. oSurface != NULL_OBJECT
oSurface:Font := SELF:oFont
ELSE
SUPER:__SetFont()
ENDIF
RETURN
METHOD Activate(oEvent AS Event)
WC.AppSetDialogWindow(SELF:oSurface)
RETURN SUPER:Activate(oEvent)
METHOD Active() AS LOGIC
RETURN __Dialog:IsShown
METHOD ButtonClick(oControlEvent AS ControlEvent)
LOCAL oButton AS Control
LOCAL dwI, dwCount AS DWORD
LOCAL oRBG AS RadioButtonGroup
LOCAL oCE := oControlEvent AS ControlEvent
SUPER:ButtonClick(oControlEvent)
oButton := oCE:Control
IF oButton IS Button
oButton:Modified := TRUE // assume its modified
IF oButton IS RadioButton
//SE-060526
dwCount := ALen(aRadioGroups)
FOR dwI := 1 UPTO dwCount
oRBG := aRadioGroups[dwI]
IF oRBG:__IsElement(OBJECT(_CAST,oButton))
oRBG:__SetOn(OBJECT(_CAST,oButton))
oButton:__Update()
oButton := oRBG
EXIT
ENDIF
NEXT //Vulcan.NET-Transporter: dwI
ENDIF
oButton:__Update() // Update radio button group
ENDIF
RETURN 0
METHOD ChangeFont(oNew_Font, lRescale)
SELF:Font := oNew_Font
RETURN TRUE
ACCESS ClipperKeys() AS LOGIC
RETURN FALSE
ASSIGN ClipperKeys(lNewValue AS LOGIC)
RETURN
METHOD ControlFocusChange(oControlFocusChangeEvent AS ControlFocusChangeEvent) AS USUAL STRICT
LOCAL oCFCE := oControlFocusChangeEvent AS ControlFocusChangeEvent
IF oCFCE:GotFocus
SELF:LastFocus := oCFCE:Control
WC.AppSetDialogWindow(oSurface)
ENDIF
RETURN NIL
METHOD DeActivate(oEvent AS Event)
RETURN SUPER:DeActivate(oEvent)
METHOD Default(oEvent AS Event)
SELF:EventReturnValue := 0
RETURN SELF
METHOD Destroy() AS USUAL
IF SELF:oSurface != NULL_OBJECT
IF (WC.AppGetDialogWindow() == SELF:oSurface)
WC.AppSetDialogWindow(NULL_OBJECT)
ENDIF
SELF:oSurface:CleanUp()
// Surface is not disposed. We may want to access the controls on the surface
ENDIF
SELF:oLastFocus := NULL_OBJECT
SUPER:Destroy()
RETURN NIL
METHOD EditFocusChange(oEditFocusChangeEvent AS EditFocusChangeEvent)
LOCAL uRetCode AS USUAL
LOCAL oEFCE AS EditFocusChangeEvent
oEFCE := oEditFocusChangeEvent
uRetCode := SUPER:EditFocusChange(oEFCE)
IF !oEFCE:GotFocus
IF oEFCE:Control != NULL_OBJECT
oEFCE:Control:__Update()
ENDIF
ENDIF
RETURN uRetCode
METHOD EndDialog(iResult)
Default(@iResult, 0)
nResult := iResult
IF SELF:__IsValid
// prevent owner invalidation and visual noise
SELF:oWnd:Owner := NULL
SELF:oWnd:Close()
ENDIF
SELF:__RestoreLastFocus()
RETURN iResult
METHOD ExecModal()
oApp:Exec(EXECNORMAL, SELF)
RETURN SELF
//METHOD HelpRequest(oHelpRequestEvent)
// SUPER:HelpRequest(oHelpRequestEvent)
// RETURN SELF
ACCESS HyperLabel AS HyperLabel
RETURN SUPER:HyperLabel
ASSIGN HyperLabel (oHL AS HyperLabel)
SUPER:HyperLabel := oHL
IF oHL != NULL_OBJECT
SELF:__Surface:Text := "Surface:"+oHL:Name
ENDIF
CONSTRUCTOR(oOwner, xResourceID, lModal)
IF IsInstanceOfUsual(oOwner, #App)
oOwner := NIL
ENDIF
IF !IsNil(oOwner) .AND. !IsInstanceOfUsual(oOwner, #Window) .AND. !IsInstanceOfUsual(oOwner, #ToolBar) .AND. !IsPtr(oOwner)
WCError{#Init,#DialogWindow,__WCSTypeError,oOwner,1}:@@Throw()
ENDIF
IF IsNumeric(xResourceID) .OR. IsSymbol(xResourceID) .OR. IsString(xResourceID)
oResourceID := ResourceID{xResourceID}
ELSEIF IsInstanceOfUsual(xResourceID, #ResourceID)
oResourceID := xResourceID
ENDIF
SELF:__ReadResource(oResourceID, oOwner)
DEFAULT(@lModal, TRUE)
bModal := lModal
SUPER(oOwner)
aRadioGroups := {}
IF (SELF:oParent != NULL_OBJECT .AND. IsAccess(SELF:oParent, #HELPDISPLAY ))
SELF:HelpDisplay := IVarGet(oParent,#HelpDisplay)
ENDIF
RETURN
ACCESS IsModal AS LOGIC
RETURN bModal
ACCESS LastFocus AS Control
RETURN oLastFocus
ASSIGN LastFocus (oControl AS Control)
LOCAL nStyle AS LONG
nStyle := SELF:GetStyle()
IF _AND(nStyle, WS_CHILD) = WS_CHILD
IF oParent != NULL_OBJECT .AND. oParent IS ILastFocus VAR oLastFocus
oLastFocus:LastFocus := oControl
ENDIF
ENDIF
oLastFocus := oControl
METHOD ListBoxClick(oControlEvent AS ControlEvent)
LOCAL oListBox := NULL_OBJECT AS ListBox
LOCAL oCE AS ControlEvent
oCE := oControlEvent
oListBox := (OBJECT) oCE:Control
oListBox:Modified := TRUE // assume its modified
oListBox:__Update()
RETURN SELF
METHOD ListBoxSelect(oControlEvent AS ControlEvent)
LOCAL oListBox := NULL_OBJECT AS ListBox
LOCAL oCE AS ControlEvent
oCE := oControlEvent
oListBox := (OBJECT) oCE:Control
oListBox:Modified := TRUE // assume its modified
oListBox:__SetText(oListBox:CurrentItem)
oListBox:__Update()
RETURN SELF
ACCESS Owner AS OBJECT
IF oParent == NULL_OBJECT
RETURN oApp
ENDIF
RETURN oParent
METHOD PostShowDialog()
RETURN NIL
ACCESS Result() AS LONG
RETURN nResult
ACCESS Surface AS VOPanel
RETURN oSurface
ASSIGN Size (oSize AS Dimension)
IF ! SELF:__Dialog:IsShown
SELF:__Dialog:InitialSize := oSize
ENDIF
SUPER:Size := oSize
METHOD Show(kShowState := SHOWCENTERED AS LONG ) AS VOID
IF bModal
oWnd:StartPosition := SELF:__GetStartPosFromShowState(kShowState)
SELF:ShowModal(TRUE)
ELSE
SUPER:Show(kShowState)
//SELF:PostShowDialog() // Is now called from the OnShown method on the Form
ENDIF
RETURN
METHOD ShowModal(lActive AS LOGIC)
LOCAL oShell AS ShellWindow
IF SELF:Owner IS Window
oShell := GetShell(SELF:Owner)
ENDIF
IF lActive
oWnd:Visible := FALSE
IF oShell != NULL_OBJECT
oWnd:ShowDialog(oShell:__Form)
ELSE
oWnd:ShowDialog()
ENDIF
// Set Focus to Last Control of Parent that had focus
SELF:__RestoreLastFocus()
ELSE
oWnd:Visible := FALSE
oWnd:Close()
ENDIF
RETURN TRUE
END CLASS
| xBase | 4 | orangesocks/XSharpPublic | Runtime/VOSdkTyped/Source/VOSdk/GUI_Classes_SDK/Windows/DialogWindow.prg | [
"Apache-2.0"
] |
syntax = "proto3";
package tensorflow.profiler;
// Next ID: 15
message KernelReport {
// Name of the kernel.
string name = 1;
// Registers per thread.
uint32 registers_per_thread = 2;
// Static shared memory in bytes.
uint32 static_shmem_bytes = 3;
// Dynamic shared memory in bytes.
uint32 dynamic_shmem_bytes = 4;
// Block dimensions.
repeated uint32 block_dim = 5;
// Grid dimensions.
repeated uint32 grid_dim = 6;
// Total duration of this kernel.
uint64 total_duration_ns = 7;
// Min duration of kernel in nanoseconds.
uint64 min_duration_ns = 8;
// Max duration of kernel in nanoseconds.
uint64 max_duration_ns = 9;
// Kernel utilizes TensorCore instructions.
bool is_kernel_using_tensor_core = 10;
// Operation is eligible to use TensorCores.
bool is_op_tensor_core_eligible = 11;
// TF operation name.
string op_name = 12;
// Number of occurrences.
uint32 occurrences = 13;
// Occupancy percentage.
float occupancy_pct = 14;
}
message KernelStatsDb {
// A list of kernels aggregated by name.
repeated KernelReport reports = 1;
}
| Protocol Buffer | 5 | EricRemmerswaal/tensorflow | tensorflow/core/profiler/protobuf/kernel_stats.proto | [
"Apache-2.0"
] |
/* Arduino Smart_Eink Library
* Copyright (C) 2016 by NOA Labs
* Author Bruce Guo (NOA Labs)
*
* This file is E-ink demo showing string.
*
* This Library is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This Library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this Library. If not, see
* <http://www.gnu.org/licenses/>.
*/
/*
D/C ~ D5
CS ~ D6
BUSY ~ D7
BS ~ D8
MOSI ~ D11
MISO ~ D12
CLK ~ D13
*/
#include <SmartEink.h>
#include <SPI.h>
E_ink Eink;
unsigned long wait_max;
void wait(void)
{
unsigned v;
unsigned long w;
w = millis();
do
{
v = digitalRead(A0);
} while( v != 0 );
w = millis() - w;
Serial.print("wait: ");
Serial.print(w);
Serial.println(" ");
}
void setup()
{
Serial.begin(9600);
while (!Serial) {
; // wait for serial port to connect. Needed for native USB port only
}
pinMode(8,OUTPUT);
pinMode(9,OUTPUT);
pinMode(10,OUTPUT);
digitalWrite(8, HIGH);
delay(1000);
Eink.InitEink();
delay(1000);
Eink.WriteComm(0x3b);//set gate line width
Eink.WriteData(0x08);//60 + value*4 us --> this influences delay 4: 694/ms, 9:1062/297, 8(POR):957/266
}
unsigned x = 0;
void loop()
{
Eink.WriteComm(0x44);//set RAM x address start/end, in page 36
Eink.WriteData(0x00);//RAM x address start at 00h;
Eink.WriteData(0x11);//RAM x address end at 11h(17)->72: [because 1F(31)->128 and 12(18)->76]
Eink.WriteComm(0x45);//set RAM y address start/end, in page 37
Eink.WriteData(0x00);//RAM y address start at 00h;
Eink.WriteData(0xab);//RAM y address start at ABh(171)->172: [because B3(179)->180]
Eink.WriteComm(0x11);//data enter mode
Eink.WriteData(0x07);
Eink.WriteComm(0x4E);//set RAM x address count to 0;
Eink.WriteData(0x00);
Eink.WriteComm(0x4F);//set RAM y address count to 0;
Eink.WriteData(0);
Eink.ClearScreen();// clear the screen
Eink.WriteComm(0x11);//data enter mode
Eink.WriteData(0x07);
Eink.WriteComm(0x4E);//set RAM x address count to 0;
Eink.WriteData(0x08);
Eink.WriteComm(0x4F);//set RAM y address count to 0;
Eink.WriteData(x*2);
x ++;
x = x & 0x01f;
//delay(1000);
Eink.WriteComm(0x24);
Eink.WriteData(0x00);
Eink.WriteData(0x03);
Eink.WriteData(0x0f);
Eink.WriteData(0x3f);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0x03);
Eink.WriteData(0x0f);
Eink.WriteData(0x3f);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0xff);
Eink.WriteData(0x00);
Eink.WriteComm(0x22);//display updata sequence option ,in page 33
Eink.WriteData(0xC0);//enable sequence: clk -> CP -> on
Eink.WriteComm(0x20);
Serial.print("Clk+CP ");
wait();
//delay(1000);
Eink.WriteComm(0x22);//display updata sequence option ,in page 33
Eink.WriteData(0x04);//enable sequence: clk -> CP -> LUT -> initial display -> pattern display
Eink.WriteComm(0x20);
//delay(1000);
Serial.print("To Display ");
wait();
Eink.WriteComm(0x22);
Eink.WriteData(0x03);
Eink.WriteComm(0x20);
//delay(1000);
Serial.print("CP/Clock off ");
wait();
//delay(1000);
//Eink.ClearScreen();// clear the screen
//Eink.EinkP8x16Str(14,8,"NOA-Labs.com");
//Eink.EinkP8x16Str(10,8,"smart-prototyping.com");
//Eink.EinkP8x16Str(6,8,"0123456789");
//Eink.EinkP8x16Str(2,8,"ABCDEFG abcdefg");
//Eink.RefreshScreen();
}
void xloop()
{
}
| Arduino | 4 | Linghhh/u8g2 | tools/extra/ssd1606timing.ino | [
"BSD-2-Clause"
] |
/* Route packets between a local bus and a network bus in both directions,
using a switch that supports NAT (network address translation).
The local bus is assigned a public address 0.0.0.1 in the switch, and that
bus id can be used to reach devices within the local bus.
__________ ________ __________
| LOCAL | Pin 7 | NAT | Pin 12 | NETWORK |
| DEVICE |_______________| SWITCH |_______________| DEVICE |
|__________| Local bus |________| Bus 0.0.0.2 |__________|
(NAT 0.0.0.1)
*/
#include <PJONSoftwareBitBang.h>
// Bus id definition
uint8_t bus_id[] = {0, 0, 0, 2};
uint8_t remote_bus_id[] = {0, 0, 0, 1};
// PJON object for a network bus
PJONSoftwareBitBang bus(bus_id, 45);
void setup() {
pinMode(LED_BUILTIN, OUTPUT);
digitalWrite(LED_BUILTIN, LOW); // Initialize LED to be off
bus.strategy.set_pin(12);
bus.set_receiver(receiver_function);
bus.begin();
bus.send(44, remote_bus_id, "B", 1);
}
void receiver_function(uint8_t *payload, uint16_t length, const PJON_Packet_Info &packet_info) {
if((char)payload[0] == 'B') {
bus.reply("B", 1);
static bool led_on = false;
digitalWrite(LED_BUILTIN, led_on ? HIGH : LOW);
led_on = !led_on;
}
}
void loop() {
bus.receive(1000);
bus.update();
}
| Arduino | 5 | solhuebner/PJON | examples/routing/ARDUINO/Network/LocalBusViaNATSwitch/NetworkDevice/NetworkDevice.ino | [
"Apache-2.0"
] |
size: 1024px 512px;
dpi: 96;
limit-x: 0 1000;
limit-y: 0 400;
bars {
data-x: list(100 300 500 700 900);
data-y: list(200 300 200 300 100);
color: #06c;
}
| CLIPS | 2 | paulasmuth/fnordmetric | test/plot-bars/bars_basic.clp | [
"Apache-2.0"
] |
////////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// Intel License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000, Intel Corporation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of Intel Corporation may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//
//
// The code has been contributed by Justin G. Eskesen on 2010 Jan
//
#include "precomp.hpp"
#include "cap_interface.hpp"
#ifdef HAVE_PVAPI
#if !defined _WIN32 && !defined _LINUX
#define _LINUX
#endif
#if defined(_x64) || defined (__x86_64) || defined (_M_X64)
#define _x64 1
#elif defined(_x86) || defined(__i386) || defined (_M_IX86)
#define _x86 1
#endif
#include <PvApi.h>
#ifdef _WIN32
# include <io.h>
#else
# include <time.h>
# include <unistd.h>
#endif
//#include <arpa/inet.h>
#define MAX_CAMERAS 10
/********************* Capturing video from camera via PvAPI *********************/
class CvCaptureCAM_PvAPI : public CvCapture
{
public:
CvCaptureCAM_PvAPI();
virtual ~CvCaptureCAM_PvAPI()
{
close();
}
virtual bool open( int index );
virtual void close();
virtual double getProperty(int) const CV_OVERRIDE;
virtual bool setProperty(int, double) CV_OVERRIDE;
virtual bool grabFrame() CV_OVERRIDE;
virtual IplImage* retrieveFrame(int) CV_OVERRIDE;
virtual int getCaptureDomain() CV_OVERRIDE
{
return CV_CAP_PVAPI;
}
protected:
#ifndef _WIN32
virtual void Sleep(unsigned int time);
#endif
void stopCapture();
bool startCapture();
bool resizeCaptureFrame (int frameWidth, int frameHeight);
typedef struct
{
unsigned long UID;
tPvHandle Handle;
tPvFrame Frame;
} tCamera;
IplImage *frame;
tCamera Camera;
tPvErr Errcode;
};
CvCaptureCAM_PvAPI::CvCaptureCAM_PvAPI()
{
frame = NULL;
memset(&this->Camera, 0, sizeof(this->Camera));
}
#ifndef _WIN32
void CvCaptureCAM_PvAPI::Sleep(unsigned int time)
{
struct timespec t,r;
t.tv_sec = time / 1000;
t.tv_nsec = (time % 1000) * 1000000;
while(nanosleep(&t,&r)==-1)
t = r;
}
#endif
void CvCaptureCAM_PvAPI::close()
{
// Stop the acquisition & free the camera
stopCapture();
PvCameraClose(Camera.Handle);
PvUnInitialize();
}
// Initialize camera input
bool CvCaptureCAM_PvAPI::open( int index )
{
tPvCameraInfo cameraList[MAX_CAMERAS];
tPvCameraInfo camInfo;
tPvIpSettings ipSettings;
if (PvInitialize()) {
}
//return false;
Sleep(1000);
//close();
int numCameras=PvCameraList(cameraList, MAX_CAMERAS, NULL);
if (numCameras <= 0 || index >= numCameras)
return false;
Camera.UID = cameraList[index].UniqueId;
if (!PvCameraInfo(Camera.UID,&camInfo) && !PvCameraIpSettingsGet(Camera.UID,&ipSettings))
{
/*
struct in_addr addr;
addr.s_addr = ipSettings.CurrentIpAddress;
printf("Current address:\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpSubnet;
printf("Current subnet:\t\t%s\n",inet_ntoa(addr));
addr.s_addr = ipSettings.CurrentIpGateway;
printf("Current gateway:\t%s\n",inet_ntoa(addr));
*/
}
else
{
fprintf(stderr,"ERROR: could not retrieve camera IP settings.\n");
return false;
}
if (PvCameraOpen(Camera.UID, ePvAccessMaster, &(Camera.Handle))==ePvErrSuccess)
{
tPvUint32 frameWidth, frameHeight;
unsigned long maxSize;
PvAttrUint32Get(Camera.Handle, "Width", &frameWidth);
PvAttrUint32Get(Camera.Handle, "Height", &frameHeight);
// Determine the maximum packet size supported by the system (ethernet adapter)
// and then configure the camera to use this value. If the system's NIC only supports
// an MTU of 1500 or lower, this will automatically configure an MTU of 1500.
// 8228 is the optimal size described by the API in order to enable jumbo frames
maxSize = 8228;
//PvAttrUint32Get(Camera.Handle,"PacketSize",&maxSize);
if (PvCaptureAdjustPacketSize(Camera.Handle,maxSize)!=ePvErrSuccess)
return false;
resizeCaptureFrame(frameWidth, frameHeight);
return startCapture();
}
fprintf(stderr,"Error cannot open camera\n");
return false;
}
bool CvCaptureCAM_PvAPI::grabFrame()
{
//if(Camera.Frame.Status != ePvErrUnplugged && Camera.Frame.Status != ePvErrCancelled)
return PvCaptureQueueFrame(Camera.Handle, &(Camera.Frame), NULL) == ePvErrSuccess;
}
IplImage* CvCaptureCAM_PvAPI::retrieveFrame(int)
{
if (PvCaptureWaitForFrameDone(Camera.Handle, &(Camera.Frame), 1000) == ePvErrSuccess)
{
return frame;
}
else return NULL;
}
double CvCaptureCAM_PvAPI::getProperty( int property_id ) const
{
tPvUint32 nTemp;
switch ( property_id )
{
case CV_CAP_PROP_FRAME_WIDTH:
PvAttrUint32Get(Camera.Handle, "Width", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_FRAME_HEIGHT:
PvAttrUint32Get(Camera.Handle, "Height", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_EXPOSURE:
PvAttrUint32Get(Camera.Handle,"ExposureValue",&nTemp);
return (double)nTemp;
case CV_CAP_PROP_FPS:
tPvFloat32 nfTemp;
PvAttrFloat32Get(Camera.Handle, "StatFrameRate", &nfTemp);
return (double)nfTemp;
case CV_CAP_PROP_PVAPI_MULTICASTIP:
char mEnable[2];
char mIp[11];
PvAttrEnumGet(Camera.Handle,"MulticastEnable",mEnable,sizeof(mEnable),NULL);
if (strcmp(mEnable, "Off") == 0)
{
return -1;
}
else
{
long int ip;
int a,b,c,d;
PvAttrStringGet(Camera.Handle, "MulticastIPAddress",mIp,sizeof(mIp),NULL);
sscanf(mIp, "%d.%d.%d.%d", &a, &b, &c, &d); ip = ((a*256 + b)*256 + c)*256 + d;
return (double)ip;
}
case CV_CAP_PROP_GAIN:
PvAttrUint32Get(Camera.Handle, "GainValue", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
char triggerMode[256];
PvAttrEnumGet(Camera.Handle, "FrameStartTriggerMode", triggerMode, 256, NULL);
if (strcmp(triggerMode, "Freerun")==0)
return 0.0;
else if (strcmp(triggerMode, "SyncIn1")==0)
return 1.0;
else if (strcmp(triggerMode, "SyncIn2")==0)
return 2.0;
else if (strcmp(triggerMode, "FixedRate")==0)
return 3.0;
else if (strcmp(triggerMode, "Software")==0)
return 4.0;
else
return -1.0;
case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
PvAttrUint32Get(Camera.Handle, "DecimationHorizontal", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
PvAttrUint32Get(Camera.Handle, "DecimationVertical", &nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_BINNINGX:
PvAttrUint32Get(Camera.Handle,"BinningX",&nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_BINNINGY:
PvAttrUint32Get(Camera.Handle,"BinningY",&nTemp);
return (double)nTemp;
case CV_CAP_PROP_PVAPI_PIXELFORMAT:
char pixelFormat[256];
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
if (strcmp(pixelFormat, "Mono8")==0)
return 1.0;
else if (strcmp(pixelFormat, "Mono16")==0)
return 2.0;
else if (strcmp(pixelFormat, "Bayer8")==0)
return 3.0;
else if (strcmp(pixelFormat, "Bayer16")==0)
return 4.0;
else if (strcmp(pixelFormat, "Rgb24")==0)
return 5.0;
else if (strcmp(pixelFormat, "Bgr24")==0)
return 6.0;
else if (strcmp(pixelFormat, "Rgba32")==0)
return 7.0;
else if (strcmp(pixelFormat, "Bgra32")==0)
return 8.0;
}
return -1.0;
}
bool CvCaptureCAM_PvAPI::setProperty( int property_id, double value )
{
tPvErr error;
switch ( property_id )
{
case CV_CAP_PROP_FRAME_WIDTH:
{
tPvUint32 currHeight;
PvAttrUint32Get(Camera.Handle, "Height", &currHeight);
stopCapture();
// Reallocate Frames
if (!resizeCaptureFrame(value, currHeight))
{
startCapture();
return false;
}
startCapture();
break;
}
case CV_CAP_PROP_FRAME_HEIGHT:
{
tPvUint32 currWidth;
PvAttrUint32Get(Camera.Handle, "Width", &currWidth);
stopCapture();
// Reallocate Frames
if (!resizeCaptureFrame(currWidth, value))
{
startCapture();
return false;
}
startCapture();
break;
}
case CV_CAP_PROP_EXPOSURE:
if ((PvAttrUint32Set(Camera.Handle,"ExposureValue",(tPvUint32)value)==ePvErrSuccess))
break;
else
return false;
case CV_CAP_PROP_PVAPI_MULTICASTIP:
if (value==-1)
{
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "Off")==ePvErrSuccess))
break;
else
return false;
}
else
{
cv::String ip=cv::format("%d.%d.%d.%d", ((unsigned int)value>>24)&255, ((unsigned int)value>>16)&255, ((unsigned int)value>>8)&255, (unsigned int)value&255);
if ((PvAttrEnumSet(Camera.Handle,"MulticastEnable", "On")==ePvErrSuccess) &&
(PvAttrStringSet(Camera.Handle, "MulticastIPAddress", ip.c_str())==ePvErrSuccess))
break;
else
return false;
}
case CV_CAP_PROP_GAIN:
if (PvAttrUint32Set(Camera.Handle,"GainValue",(tPvUint32)value)!=ePvErrSuccess)
{
return false;
}
break;
case CV_CAP_PROP_PVAPI_FRAMESTARTTRIGGERMODE:
if (value==0)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun");
else if (value==1)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn1");
else if (value==2)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "SyncIn2");
else if (value==3)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "FixedRate");
else if (value==4)
error = PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Software");
else
error = ePvErrOutOfRange;
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_DECIMATIONHORIZONTAL:
if (value >= 1 && value <= 8)
error = PvAttrUint32Set(Camera.Handle, "DecimationHorizontal", value);
else
error = ePvErrOutOfRange;
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_DECIMATIONVERTICAL:
if (value >= 1 && value <= 8)
error = PvAttrUint32Set(Camera.Handle, "DecimationVertical", value);
else
error = ePvErrOutOfRange;
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_BINNINGX:
error = PvAttrUint32Set(Camera.Handle, "BinningX", value);
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_BINNINGY:
error = PvAttrUint32Set(Camera.Handle, "BinningY", value);
if(error==ePvErrSuccess)
break;
else
return false;
case CV_CAP_PROP_PVAPI_PIXELFORMAT:
{
cv::String pixelFormat;
if (value==1)
pixelFormat = "Mono8";
else if (value==2)
pixelFormat = "Mono16";
else if (value==3)
pixelFormat = "Bayer8";
else if (value==4)
pixelFormat = "Bayer16";
else if (value==5)
pixelFormat = "Rgb24";
else if (value==6)
pixelFormat = "Bgr24";
else if (value==7)
pixelFormat = "Rgba32";
else if (value==8)
pixelFormat = "Bgra32";
else
return false;
if ((PvAttrEnumSet(Camera.Handle,"PixelFormat", pixelFormat.c_str())==ePvErrSuccess))
{
tPvUint32 currWidth;
tPvUint32 currHeight;
PvAttrUint32Get(Camera.Handle, "Width", &currWidth);
PvAttrUint32Get(Camera.Handle, "Height", &currHeight);
stopCapture();
// Reallocate Frames
if (!resizeCaptureFrame(currWidth, currHeight))
{
startCapture();
return false;
}
startCapture();
return true;
}
else
return false;
}
default:
return false;
}
return true;
}
void CvCaptureCAM_PvAPI::stopCapture()
{
PvCommandRun(Camera.Handle, "AcquisitionStop");
PvCaptureEnd(Camera.Handle);
}
bool CvCaptureCAM_PvAPI::startCapture()
{
// Start the camera
PvCaptureStart(Camera.Handle);
// Set the camera to capture continuously
if(PvAttrEnumSet(Camera.Handle, "AcquisitionMode", "Continuous")!= ePvErrSuccess)
{
fprintf(stderr,"Could not set PvAPI Acquisition Mode\n");
return false;
}
if(PvCommandRun(Camera.Handle, "AcquisitionStart")!= ePvErrSuccess)
{
fprintf(stderr,"Could not start PvAPI acquisition\n");
return false;
}
if(PvAttrEnumSet(Camera.Handle, "FrameStartTriggerMode", "Freerun")!= ePvErrSuccess)
{
fprintf(stderr,"Error setting PvAPI trigger to \"Freerun\"");
return false;
}
return true;
}
bool CvCaptureCAM_PvAPI::resizeCaptureFrame (int frameWidth, int frameHeight)
{
char pixelFormat[256];
tPvUint32 frameSize;
tPvUint32 sensorHeight;
tPvUint32 sensorWidth;
if (frame)
{
cvReleaseImage(&frame);
frame = NULL;
}
if (PvAttrUint32Get(Camera.Handle, "SensorWidth", &sensorWidth) != ePvErrSuccess)
{
return false;
}
if (PvAttrUint32Get(Camera.Handle, "SensorHeight", &sensorHeight) != ePvErrSuccess)
{
return false;
}
// Cap out of bounds widths to the max supported by the sensor
if ((frameWidth < 0) || ((tPvUint32)frameWidth > sensorWidth))
{
frameWidth = sensorWidth;
}
if ((frameHeight < 0) || ((tPvUint32)frameHeight > sensorHeight))
{
frameHeight = sensorHeight;
}
if (PvAttrUint32Set(Camera.Handle, "Height", frameHeight) != ePvErrSuccess)
{
return false;
}
if (PvAttrUint32Set(Camera.Handle, "Width", frameWidth) != ePvErrSuccess)
{
return false;
}
PvAttrEnumGet(Camera.Handle, "PixelFormat", pixelFormat,256,NULL);
PvAttrUint32Get(Camera.Handle, "TotalBytesPerFrame", &frameSize);
if ( (strcmp(pixelFormat, "Mono8")==0) || (strcmp(pixelFormat, "Bayer8")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 1);
frame->widthStep = (int)frameWidth;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else if ( (strcmp(pixelFormat, "Mono16")==0) || (strcmp(pixelFormat, "Bayer16")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_16U, 1);
frame->widthStep = (int)frameWidth*2;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else if ( (strcmp(pixelFormat, "Rgb24")==0) || (strcmp(pixelFormat, "Bgr24")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 3);
frame->widthStep = (int)frameWidth*3;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else if ( (strcmp(pixelFormat, "Rgba32")==0) || (strcmp(pixelFormat, "Bgra32")==0) )
{
frame = cvCreateImage(cvSize((int)frameWidth, (int)frameHeight), IPL_DEPTH_8U, 4);
frame->widthStep = (int)frameWidth*4;
Camera.Frame.ImageBufferSize = frameSize;
Camera.Frame.ImageBuffer = frame->imageData;
}
else
return false;
return true;
}
cv::Ptr<cv::IVideoCapture> cv::create_PvAPI_capture( int index )
{
CvCaptureCAM_PvAPI* capture = new CvCaptureCAM_PvAPI;
if ( capture->open( index ))
return cv::makePtr<cv::LegacyCapture>(capture);
delete capture;
return NULL;
}
#endif
| C++ | 4 | thisisgopalmandal/opencv | modules/videoio/src/cap_pvapi.cpp | [
"BSD-3-Clause"
] |
a ← (-,÷)5
+/a | APL | 0 | melsman/apltail | tests/train0.apl | [
"MIT"
] |
// @noemithelpers: true
// @experimentaldecorators: true
// @emitdecoratormetadata: true
// @target: es5
// @module: commonjs
// @filename: db.ts
export class db {
public doSomething() {
}
}
// @filename: service.ts
import {db} from './db';
function someDecorator(target) {
return target;
}
@someDecorator
class MyClass {
db: db;
constructor(db: db) {
this.db = db;
this.db.doSomething();
}
}
export {MyClass};
| TypeScript | 3 | nilamjadhav/TypeScript | tests/cases/compiler/decoratorMetadataWithImportDeclarationNameCollision.ts | [
"Apache-2.0"
] |
# Copyright (C) 2001-2003, Parrot Foundation.
# $Id$
=head1 NAME
examples/benchmarks/primes.pasm - Calculate prime numbers < 50000
=head1 SYNOPSIS
% ./parrot examples/benchmarks/primes.pasm
=head1 DESCRIPTION
Calculates all the prime numbers up to 50000 and prints out the number
of primes, the last one found, and the time taken.
=cut
# I1 holds the number we're currently checking for primality
set I1, 1
# I2 holds the highest number we want to check for primality
set I2, 10000
set I6, 0
print "N primes up to "
print I2
print " is: "
time N10
# I1 counts up to I2
REDO: # I3 counts from 2 up to I4 (I1/2)
set I3, 2
div I4, I1, 2
LOOP: # Check if I3 is a factor of I1
cmod I5, I1, I3
if I5, OK
# We've found a factor, so it can't be a prime and
# we can skip right out of this loop and to the next
# number
branch NEXT
OK: inc I3
le I3, I4, LOOP
# We haven't found a factor so it must be a prime
inc I6
set I7, I1
# print I1
# print "\n" # to get them all
NEXT: # Move on to the next number
inc I1
le I1, I2, REDO
time N11
print I6
print "\nlast is: "
print I7
print "\n"
sub N11, N10
print "Elapsed time: "
print N11
print "\n"
end
=head1 SEE ALSO
F<examples/benchmarks/primes.c>,
F<examples/benchmarks/primes.pl>,
F<examples/benchmarks/primes2_p.pasm>,
F<examples/benchmarks/primes2.c>,
F<examples/benchmarks/primes2.pir>,
F<examples/benchmarks/primes2.py>.
=cut
# Local Variables:
# mode: pir
# fill-column: 100
# End:
# vim: expandtab shiftwidth=4 ft=pir:
| Parrot Assembly | 4 | allisonrandal/pcc_testing | examples/benchmarks/primes_i.pasm | [
"Artistic-2.0"
] |
{% set keyfile = pillar['keyfile'] %}
{% set crtfile = pillar['crtfile'] %}
private_key:
x509.private_key_managed:
- name: {{ keyfile }}
self_signed_cert:
x509.certificate_managed:
- name: {{ crtfile }}
- signing_private_key: {{ keyfile }}
- CN: localhost
- days_valid: 30
- days_remaining: 0
- require:
- x509: private_key
| SaltStack | 3 | Noah-Huppert/salt | tests/integration/files/file/base/x509/self_signed.sls | [
"Apache-2.0"
] |
# ======================================================================================================================
# Government expenses
# ======================================================================================================================
# ======================================================================================================================
# Variable definition
# - Define variables and group them based on endogeneity, inflation or growth adjustment, and how they should be forecast (if exogenous)
# ======================================================================================================================
$IF %stage% == "variables":
$GROUP G_GovExpenses_prices
pGxAfskr[t]$(tx0[t]) "Deflator for offentligt forbrug fratrukket offentlige afskrivninger, Kilde: ADAM[pCov]"
;
$GROUP G_GovExpenses_quantities
qGxAfskr[t] "Offentligt forbrug fratrukket offentlige afskrivninger, Kilde: ADAM[fCov]"
qG[g_,t]$(g[g_]) "Offentligt forbrug, Kilde: ADAM[fCo]"
;
$GROUP G_GovExpenses_values_endo
# Primære udgifter
vGxAfskr[t] "Offentligt forbrug fratrukket offentlige afskrivninger, Kilde: ADAM[Cov]"
vOffPrimUd[t] "Primære offentlige udgifter, Kilde: ADAM[Tf_o_z]-ADAM[Ti_o_z]"
vOffInv[t] "Offentlige investeringer, Kilde: ADAM[Io1]"
vOffSub[t] "Dansk finansieret subsidier ialt, Kilde: ADAM[Spu_o]"
vSubY[t]$(t.val > 2015) "Produktionssubsidier, Kilde: ADAM[Spzu]"
vSubYRes[t]$(tx0[t]) "Produktionssubsidier ekskl. løntilskud, Kilde: ADAM[Spzu]-ADAM[Spzul]"
vOffUdRest[t] "Øvrige offentlige udgifter."
vOffTilHh[t] "Residuale overførsler fra offentlig sektor til husholdningerne, Kilde: ADAM[tK_o_h]+ADAM[Tr_o_h])"
vOvf[ovf_,t]$(t.val > 2015) "Sociale overførsler fra offentlig forvaltning og service til husholdninger, Kilde: ADAM[Ty_o] for underkomponenter jf. o-set."
vHhOvf[a,t]$(a15t100[a] and t.val > 2015) "Indkomstoverførsler til indl. hush. pr. person (i befolkningen) fordelt på alder."
vOvfSkatPl[a_,t]$(t.val > 2015) "Skattepligtige indkomstoverførsler pr. person (i befolkningen) fordelt på alder."
vOvfSats[ovf,t]$(t.val > 2015) "Sociale overførsler fra offentlig forvaltning og service til husholdninger pr. person i basen (mio. kr.)"
vOvfUbeskat[a_,t]$((a0t100[a_] and t.val > 2015) or (sameas['tot',a_] and t.val > 2000)) "Ubeskattede indkomstoverførsler pr. person (i befolkningen) fordelt på alder."
vGLukning[t] "Udgift til beregningsteknisk stigning i offentligt forbrug til lukning af offentlig budgetrestriktion."
;
$GROUP G_GovExpenses_endo
G_GovExpenses_prices
G_GovExpenses_quantities
G_GovExpenses_values_endo
uvGxAfskr[t] "Skala-parameter som bestemmer niveau for offentligt service før korrektion for demografisk træk og løn-niveau."
nOvf[ovf,t] "Basen til sociale overførsler i antal 1000 personer fordelt på ordninger."
fHhOvf[t]$(t.val > 2015) "Korrektionsfaktor som sikre afbalancering af overførsler-indkomster fordelt på henholdsvis alder og overførselstype."
dnOvf2dBesk[ovf,t] "Beskæftigelsesnøgle fordelt på modtagere af overførsels-indkomster."
dnOvf2dPop[ovf,t]$(t.val > 1990) "Befolkningssnøgle fordelt på modtagere af overførsels-indkomster."
dvOvf2dBesk[ovf_,t]$(ovf[ovf_] or sameas['hh',ovf_]) "Beskæftigelsesnøgle fordelt på overførsels-indkomster."
dvOvf2dPop[ovf_,t]$(ovf[ovf_] or sameas['hh',ovf_]) "Befolkningssnøgle fordelt på overførsels-indkomster."
rOffLandKoeb2BNP[t] "Nettoopkob af jord og rettigheder relativt til BNP."
rOffTilUdl2BNP[t] "Offentlige overførsler til udlandet relativt til BNP."
rOffTilVirk2BNP[t] "Offentlige overførsler til selskaber relativt til BNP."
rOffTilHhKap2BNP[t] "Offentlige kapitaloverførsler til husholdninger relativt til BNP."
rOffTilHhOev2BNP[t] "Offentlige øvrige overførsler til husholdninger relativt til BNP."
rSubEU2BNP[t] "Subsidier finansieret af EU relativt til BNP."
;
$GROUP G_GovExpenses_endo G_GovExpenses_endo$(tx0[t]); # Restrict endo group to tx0[t]
$GROUP G_GovExpenses_values_exo
vHhOvfRest[a,t] "Residual som sikrer at samlede overførselsindkomster til en aldersgruppe passer til aldersprofil."
vOffLandKoeb[t] "Den offentlige sektors nettoopkøb af jord og rettigheder, Kilde: ADAM[Izn_o]"
vOffTilUdl[t] "Residuale overførsler fra offentlig sektor til udlandet, Kilde: ADAM[Tr_o_e]+ADAM[tK_o_e]"
vOffTilVirk[t] "Residuale overførsler fra offentlig sektor til indenlandske selskaber, Kilde: ADAM[tK_o_c]"
vOffTilHhKap[t] "Residuale kapitaloverførsler fra offentlig sektor til husholdningerne, Kilde: ADAM[tK_o_h])"
vOffTilHhOev[t] "Residuale øvrige overførsler fra offentlig sektor til husholdningerne, Kilde: ADAM[Tr_o_h])"
vSubEU[t] "Subsider finansieret af EU, Kilde: ADAM[Spueu]"
;
$GROUP G_GovExpenses_values
G_GovExpenses_values_endo
G_GovExpenses_values_exo
;
$GROUP G_GovExpenses_ARIMA_forecast
uG[g_,t] "Skalaparameter i det offentlige forbrugsnest."
rvYsubRest2BVT[t] "Produktionssubsidier ekskl. løntilskud relativt til BVT."
# Endogene i stødforløb:
rOffLandKoeb2BNP
rOffTilUdl2BNP
rOffTilVirk2BNP
rOffTilHhKap2BNP
rOffTilHhOev2BNP
rSubEU2BNP
;
$GROUP G_GovExpenses_other
eG[g_] "Substitutionselasticitet mellem forskellige grupper af offentligt forbrug."
rGLukning[t] "Beregningsteknisk stigning i offentligt forbrug til lukning af offentlig budgetrestriktion."
;
$GROUP G_GovExpenses_exogenous_forecast
fDemoTraek[t] "Demografisk træk."
uHhOvfPop[a,t] "Aldersmæssig fordelingsnøgle knyttet til dvOvf2dPop."
uOvfUbeskat[a,t] "Aldersmæssig fordelingsnøgle knyttet til vOvfUbeskat."
jvOvf[ovf,t] "J-led som fanger at nogle grupper har en overførsel, men ingen modtagere. Fordeles til husholdningerne gennem fHhOvf."
uvOvfSats[ovf,t]$(sameas['groen',ovf]) "Skalaparameter for overførselssatser."
;
$ENDIF
# ======================================================================================================================
# Equations
# ======================================================================================================================
$IF %stage% == "equations":
$BLOCK B_GovExpenses
# ----------------------------------------------------------------------------------------------------------------------
# Offentligt forbrug
# ----------------------------------------------------------------------------------------------------------------------
# Aggregate public consumption excluding depreciations follows demographic needs and wage growth
E_vGxAfskr[t]$(tx0[t]).. vGxAfskr[t] =E= uvGxAfskr[t] * (1+rGLukning[t]) * fDemoTraek[t] * vhW[t];
E_qGxAfskr[t]$(tx0[t])..
qGxAfskr[t] * pGxAfskr[t-1]/fp =E= pG[gTot,t-1]/fp * qG[gTot,t] - pOffAfskr[kTot,t-1]/fp * qOffAfskr[kTot,t];
E_pGxAfskr[t]$(tx0[t]).. pGxAfskr[t] * qGxAfskr[t] =E= vGxAfskr[t];
E_qG_tot[t]$(tx0[t]).. qG[gTot,t] * pG[gTot,t] =E= vGxAfskr[t] + vOffAfskr[kTot,t];
# CES demand
# there is currently only one type of government consumption, i.e. qG['gTot'] = qG['g']
E_qG[g_,gNest,t]$(tx0[t] and gNest2g_(gNest,g_))..
qG[g_,t] =E= uG[g_,t] * qG[gNest,t] * (pG[gNest,t] / pG[g_,t])**eG(gNest);
# A technical adjustment to government spending can be used to close the government intertemporal budget constraint
E_vGLukning[t]$(tx0[t]).. vGLukning[t] =E= rGLukning[t] / (1+rGLukning[t]) * vG[gTot,t];
# ----------------------------------------------------------------------------------------------------------------------
# Primære offentlige udgifter
# ----------------------------------------------------------------------------------------------------------------------
E_vOffPrimUd[t]$(tx0[t]).. vOffPrimUd[t] =E= vG[gTot,t]
+ vOvf['tot',t]
+ vOffInv[t]
+ vOffSub[t]
+ vOffUdRest[t];
# Indkomstoverførsler - satser
E_vOvfSats_satsreg[satsreg,t]$(tx0[t])..
vOvfSats[satsreg,t] =E= vSatsIndeks[t] * uvOvfSats[satsreg,t];
E_vOvfSats_groen[t]$(tx0[t])..
vOvfSats['groen',t] =E= vOvfSats['groen',t-1]/fp + uvOvfSats['groen',t];
# Antal modtagere af overførsler knyttet til sociogrupper
E_nOvf[ovf,t]$(tx0[t] and not (ovf_a0t17[ovf] or ovf_a18t100[ovf]))..
nOvf[ovf,t] =E= sum(soc, nOvf2Soc[ovf,soc] * nSoc[soc,t]);
# Overførsler som fordeles på alle under 18
E_nOvf_a0t17[ovf,t]$(tx0[t] and ovf_a0t17[ovf])..
nOvf[ovf,t] =E= nPop['a0t17',t];
# Overførsler som fordeles på alle over 18
E_nOvf_a18t100[ovf,t]$(tx0[t] and ovf_a18t100[ovf])..
nOvf[ovf,t] =E= nPop['a18t100',t];
# Indkomstoverførsler - samlet udgift fordelt på overførselstype
E_vOvf[ovf,t]$(tx0[t]).. vOvf[ovf,t] =E= vOvfSats[ovf,t] * nOvf[ovf,t] + jvOvf[ovf,t];
E_vOvf_tot[t]$(tx0[t] and t.val > 2015).. vOvf['tot',t] =E= sum(ovf, vOvf[ovf,t]);
E_vOvf_hh[t]$(tx0[t] and t.val > 2015).. vOvf['hh',t] =E= sum(ovfhh, vOvf[ovfhh,t]);
E_vOvf_a18t100[t]$(tx0[t] and t.val > 2015)..
vOvf['a18t100',t] =E= sum(ovf$(ovf_a18t100[ovf]), vOvf[ovf,t]) / nPop['a18t100',t];
# Aldersfordelte indkomstoverførsler
E_vHhOvf[a,t]$(a15t100[a] and tx0[t] and t.val > 2015)..
vHhOvf[a,t] =E= ( dvOvf2dBesk['hh',t] * nLHh[a,t] / nPop[a,t]
+ dvOvf2dPop['hh',t] * uHhOvfPop[a,t]
+ sum(ovf$(ovf_a0t17[ovf]), vOvf[ovf,t]) * rBoern[a,t]
+ vOvf['a18t100',t]$(a18t100[a])
+ sum(ovf, jvOvf[ovf,t]) / nPop['a15t100',t]
+ vHhOvfRest[a,t]
) * fHhOvf[t];
# Antallet af overførselsmodtager følger befolkningsstørrelsen samt beskæftigelsen
# dnOvf2dBesk og dnOvf2dPop kan gøres eksogene for at spare beregning
E_dnOvf2dBesk[ovf,t]$(tx0[t])..
dnOvf2dBesk[ovf,t] =E= sum(soc, dSoc2dBesk[soc,t] * nOvf2Soc[ovf,soc]);
E_dnOvf2dPop[ovf,t]$(tx0[t] and t.val > 1990)..
dnOvf2dPop[ovf,t] =E= sum(soc, (dSoc2dPop[soc,t] + jnSoc[soc,t] / nPop['a15t100',t]) * nOvf2Soc[ovf,soc]);
# Indkomstoverførsler følger antallet af modtagere
E_dvOvf2dBesk[ovf,t]$(tx0[t]).. dvOvf2dBesk[ovf,t] =E= vOvfSats[ovf,t] * dnOvf2dBesk[ovf,t];
E_dvOvf2dBesk_hh[t]$(tx0[t]).. dvOvf2dBesk['hh',t] =E= sum(ovfhh, dvOvf2dBesk[ovfhh,t]);
E_dvOvf2dPop[ovf,t]$(tx0[t]).. dvOvf2dPop[ovf,t] =E= vOvfSats[ovf,t] * dnOvf2dPop[ovf,t];
E_dvOvf2dPop_hh[t]$(tx0[t]).. dvOvf2dPop['hh',t] =E= sum(ovfhh, dvOvf2dPop[ovfhh,t]);
# Korrektionsfaktor som sikre afbalancering af overførsler-indkomster fordelt på henholdsvis alder og overførselstype.
E_fHhOvf[t]$(tx0[t] and t.val > 2015).. vOvf['hh',t] =E= sum(a, vHhOvf[a,t] * nPop[a,t]);
# Opdeling af ubeskattede og skattepligtige overførsler
E_vOvfUbeskat_tot[t]$(tx0[t] and t.val > 2000).. vOvfUbeskat['tot',t] =E= sum(ovfhh$(ubeskat[ovfhh]), vOvf[ovfhh,t]);
E_vOvfUbeskat[a,t]$(a.val <= 100 and tx0[t] and t.val > 2015)..
# vOvfUbeskat[a,t] =E= rOvfUbeskat[a,t] * (vOvfUbeskat['tot',t] / nPop[a,t]);
vOvfUbeskat[a,t] =E= uOvfUbeskat[a,t] * vOvfUbeskat['tot',t] / sum(aa, uOvfUbeskat[aa,t] * nPop[aa,t]);
E_vOvfSkatPl_tot[t]$(tx0[t] and t.val > 2015).. vOvfSkatPl[aTot,t] =E= sum(a, vOvfSkatPl[a,t] * nPop[a,t]);
E_vOvfSkatPl[a,t]$(tx0[t] and t.val > 2015).. vOvfSkatPl[a,t] =E= vHhOvf[a,t] - vOvfUbeskat[a,t];
# Offentlige investeringer
E_vOffInv[t]$(tx0[t]).. vOffInv[t] =E= vI_s[iTot,'off',t];
# Subsidier
E_vOffSub[t]$(tx0[t]).. vOffSub[t] =E= vPunktSub[t] + vSubY[t] - vSubEU[t];
E_vSubYRes[t]$(tx0[t]).. vSubYRes[t] =E= rvYsubRest2BVT[t] * vBVT['tot',t];
E_vSubY[t]$(tx0[t]).. vSubY[t] =E= vtSubLoen['tot',t] + vSubYRes[t];
E_vSubEU[t]$(tx0[t]).. vSubEU[t] =E= vBNP[t] * rSubEU2BNP[t];
# Øvrige offentlige udgifter
E_vOffUdRest[t]$(tx0[t])..
vOffUdRest[t] =E= vOffLandKoeb[t] + vOffTilUdl[t] + vOffTilHh[t] + vOffTilVirk[t];
E_vOffTilHh[t]$(tx0[t]).. vOffTilHh[t] =E= vOffTilHhKap[t] + vOffTilHhOev[t];
E_vOffLandKoeb[t]$(tx0[t]).. vOffLandKoeb[t] =E= vBNP[t] * rOffLandKoeb2BNP[t];
E_vOffTilUdl[t]$(tx0[t]).. vOffTilUdl[t] =E= vBNP[t] * rOffTilUdl2BNP[t];
E_vOffTilHhKap[t]$(tx0[t]).. vOffTilHhKap[t] =E= vBNP[t] * rOffTilHhKap2BNP[t];
E_vOffTilHhOev[t]$(tx0[t]).. vOffTilHhOev[t] =E= vBNP[t] * rOffTilHhOev2BNP[t];
E_vOffTilVirk[t]$(tx0[t]).. vOffTilVirk[t] =E= vBNP[t] * rOffTilVirk2BNP[t];
$ENDBLOCK
$ENDIF | GAMS | 4 | gemal/MAKRO | Model/GovExpenses.gms | [
"MIT"
] |
// Copyright 2021 The Google Research Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SCANN_DISTANCE_MEASURES_ONE_TO_ONE_JACCARD_DISTANCE_H_
#define SCANN_DISTANCE_MEASURES_ONE_TO_ONE_JACCARD_DISTANCE_H_
#include <cstdint>
#include "scann/distance_measures/distance_measure_base.h"
#include "scann/distance_measures/one_to_one/binary_distance_measure_base.h"
#include "scann/distance_measures/one_to_one/common.h"
#include "scann/utils/reduction.h"
#include "scann/utils/types.h"
namespace research_scann {
class GeneralJaccardDistance final : public DistanceMeasure {
public:
SCANN_DECLARE_DISTANCE_MEASURE_VIRTUAL_METHODS(NOT_SPECIALLY_OPTIMIZED);
private:
template <typename T>
SCANN_INLINE double GetDistanceDenseImpl(const DatapointPtr<T>& a,
const DatapointPtr<T>& b) const;
template <typename T>
SCANN_INLINE double GetDistanceSparseImpl(const DatapointPtr<T>& a,
const DatapointPtr<T>& b) const;
template <typename T>
SCANN_INLINE double GetDistanceHybridImpl(const DatapointPtr<T>& a,
const DatapointPtr<T>& b) const {
LOG(FATAL) << "Not implemented yet.";
}
};
class BinaryJaccardDistance final : public BinaryDistanceMeasureBase {
public:
string_view name() const final;
using BinaryDistanceMeasureBase::GetDistanceDense;
using BinaryDistanceMeasureBase::GetDistanceHybrid;
using BinaryDistanceMeasureBase::GetDistanceSparse;
double GetDistanceDense(const DatapointPtr<uint8_t>& a,
const DatapointPtr<uint8_t>& b) const final;
double GetDistanceSparse(const DatapointPtr<uint8_t>& a,
const DatapointPtr<uint8_t>& b) const final;
double GetDistanceHybrid(const DatapointPtr<uint8_t>& a,
const DatapointPtr<uint8_t>& b) const final {
return GetDistanceSparse(a, b);
}
};
template <typename T>
double GeneralJaccardDistance::GetDistanceDenseImpl(
const DatapointPtr<T>& a, const DatapointPtr<T>& b) const {
double intersection = 0.0;
double sum = 0.0;
for (size_t i = 0; i < a.dimensionality(); ++i) {
T en1 = a.values()[i];
T en2 = b.values()[i];
intersection += std::min(en1, en2);
sum += std::max(en1, en2);
}
if (!sum) {
return 0;
}
double ratio = intersection / sum;
return 1.0 - ratio;
}
template <typename T>
double GeneralJaccardDistance::GetDistanceSparseImpl(
const DatapointPtr<T>& a, const DatapointPtr<T>& b) const {
double intersection = 0.0;
double sum = 0.0;
auto s1 = a.nonzero_entries();
auto s2 = b.nonzero_entries();
std::vector<pair<DimensionIndex, double> > w1;
std::vector<pair<DimensionIndex, double> > w2;
std::vector<pair<DimensionIndex, double> > merged(s1 + s2);
w1.reserve(s1);
for (size_t i = 0; i < s1; ++i) {
w1.push_back(std::make_pair(a.indices()[i], a.values()[i]));
}
w2.reserve(s2);
for (size_t i = 0; i < s2; ++i) {
w2.push_back(std::make_pair(b.indices()[i], b.values()[i]));
}
std::merge(w1.begin(), w1.end(), w2.begin(), w2.end(), merged.begin());
while (true) {
if (merged.empty()) break;
if (merged.size() == 1) {
sum += merged[0].second;
break;
}
auto in1 = merged[merged.size() - 1].first;
auto in2 = merged[merged.size() - 2].first;
if (in1 == in2) {
sum += std::max(merged[merged.size() - 1].second,
merged[merged.size() - 2].second);
intersection += std::min(merged[merged.size() - 1].second,
merged[merged.size() - 2].second);
merged.pop_back();
merged.pop_back();
} else {
sum += merged[merged.size() - 1].second;
merged.pop_back();
}
}
if (!sum) {
return 0;
}
double ratio = intersection / sum;
return 1.0 - ratio;
}
} // namespace research_scann
#endif
| C | 4 | DionysisChristopoulos/google-research | scann/scann/distance_measures/one_to_one/jaccard_distance.h | [
"Apache-2.0"
] |
source("../common.r")
loadLibrary("gridExtra")
data = jmhCSV("fieldSerializer.csv")
data$references = sub("true", "refs", data$references)
data$references = sub("false", "no refs", data$references)
data$chunked = sub("true", ", chunked", data$chunked)
data$chunked = sub("false", "", data$chunked)
data$Type = paste(data$references, data$chunked, sep="")
data = data[,grep("^(Benchmark|Type|Score|Error|objectType)$", colnames(data))] # keep only these columns
g1 = jmhBarChart(subset(data, objectType == "sample"), "Type", "Serializer settings", "Sample", "Round trips per second")
g2 = jmhBarChart(subset(data, objectType == "media"), "Type", "Serializer settings", "Media", "Round trips per second")
if (!rstudio) {
g1 = g1 + ggtitle("FieldSerializerBenchmark")
png("fieldSerializer.png", 1024, 890)
}
grid.arrange(g1, g2)
| R | 4 | JiMu-Bao/kryo | benchmarks/charts/fieldSerializer.r | [
"BSD-3-Clause"
] |
actor a {
// returns caller id
public shared(c) func getCaller() : async Principal {
c.caller
};
// returns self id when called (internally or externally)
public shared func getSelf() : async Principal {
await getCaller();
};
};
actor class C () {
// returns caller id
public shared(c) func getCaller() : async Principal {
c.caller
};
// returns self id when called (internally or externally)
public shared func getSelf() : async Principal {
await getCaller();
};
};
let alias = a;
let b = await C();
let c = await C();
ignore async {
let id_a = await a.getSelf();
let id_b = await b.getSelf();
let id_c = await c.getSelf();
let id_alias = await alias.getSelf();
// check ids are distinct
assert (id_a != id_b);
assert (id_b != id_c);
assert (id_c != id_a);
assert (id_alias == id_a);
};
// test caller alternation is correct
actor Ping {
// returns caller id
public shared(c) func getCaller() : async Principal {
c.caller
};
// returns self id when called (internally or externally)
public func getSelf() : async Principal {
await getCaller();
};
public shared(c) func call (n:Nat) : async () {
if (n > 0) {
assert (c.caller == (await Pong.getSelf()));
await Pong.call(n - 1);
};
};
};
actor Pong {
// returns caller id
public shared(c) func getCaller() : async Principal {
c.caller
};
// returns self id when called (internally or externally)
public func getSelf() : async Principal {
await getCaller();
};
public shared(c) func call (n:Nat) : async () {
if (n > 0) {
assert c.caller == (await Ping.getSelf());
await Ping.call(n - 1);
};
};
public func test(n:Nat) {
ignore async await Ping.call(n);
};
};
Pong.test(5);
//no support for multiple-toplevel actors and await
//SKIP comp
| Modelica | 5 | olaszakos/motoko | test/run/multi-caller.mo | [
"Apache-2.0"
] |
//SPDX-License-Identifier: MIT
pragma solidity ^0.5.16;
contract LegacyWireTestParent {
event Overridden(uint);
}
contract LegacyWireTestAbstract {
event AbstractEvent();
event AbstractOverridden(uint indexed);
function interfaceAndOverrideTest() public; //just here to make the contract abstract
}
contract LegacyWireTest is LegacyWireTestParent, LegacyWireTestAbstract {
event Overridden(uint indexed);
event AbstractOverridden(uint);
function interfaceAndOverrideTest() public {
emit AbstractEvent();
emit AbstractOverridden(107);
emit LegacyWireTestAbstract.AbstractOverridden(683);
emit Overridden(107);
emit LegacyWireTestParent.Overridden(683);
}
}
| Solidity | 3 | Shuchi305/newrepo | packages/decoder/test/legacy/contracts/WireTest.sol | [
"MIT"
] |
/**
*
*/
import Util;
import OpenApi;
import OpenApiUtil;
import EndpointUtil;
extends OpenApi;
init(config: OpenApi.Config){
super(config);
@endpointRule = 'regional';
@endpointMap = {
ap-northeast-1 = 'polardbx.aliyuncs.com',
ap-northeast-2-pop = 'polardbx.aliyuncs.com',
ap-south-1 = 'polardbx.aliyuncs.com',
ap-southeast-2 = 'polardbx.aliyuncs.com',
ap-southeast-3 = 'polardbx.aliyuncs.com',
ap-southeast-5 = 'polardbx.aliyuncs.com',
cn-beijing-finance-1 = 'polardbx.aliyuncs.com',
cn-beijing-finance-pop = 'polardbx.aliyuncs.com',
cn-beijing-gov-1 = 'polardbx.aliyuncs.com',
cn-beijing-nu16-b01 = 'polardbx.aliyuncs.com',
cn-edge-1 = 'polardbx.aliyuncs.com',
cn-fujian = 'polardbx.aliyuncs.com',
cn-haidian-cm12-c01 = 'polardbx.aliyuncs.com',
cn-hangzhou-bj-b01 = 'polardbx.aliyuncs.com',
cn-hangzhou-finance = 'polardbx.aliyuncs.com',
cn-hangzhou-internal-prod-1 = 'polardbx.aliyuncs.com',
cn-hangzhou-internal-test-1 = 'polardbx.aliyuncs.com',
cn-hangzhou-internal-test-2 = 'polardbx.aliyuncs.com',
cn-hangzhou-internal-test-3 = 'polardbx.aliyuncs.com',
cn-hangzhou-test-306 = 'polardbx.aliyuncs.com',
cn-hongkong-finance-pop = 'polardbx.aliyuncs.com',
cn-huhehaote-nebula-1 = 'polardbx.aliyuncs.com',
cn-north-2-gov-1 = 'polardbx.aliyuncs.com',
cn-qingdao-nebula = 'polardbx.aliyuncs.com',
cn-shanghai-et15-b01 = 'polardbx.aliyuncs.com',
cn-shanghai-et2-b01 = 'polardbx.aliyuncs.com',
cn-shanghai-finance-1 = 'polardbx.aliyuncs.com',
cn-shanghai-inner = 'polardbx.aliyuncs.com',
cn-shanghai-internal-test-1 = 'polardbx.aliyuncs.com',
cn-shenzhen-finance-1 = 'polardbx.aliyuncs.com',
cn-shenzhen-inner = 'polardbx.aliyuncs.com',
cn-shenzhen-st4-d01 = 'polardbx.aliyuncs.com',
cn-shenzhen-su18-b01 = 'polardbx.aliyuncs.com',
cn-wuhan = 'polardbx.aliyuncs.com',
cn-wulanchabu = 'polardbx.aliyuncs.com',
cn-yushanfang = 'polardbx.aliyuncs.com',
cn-zhangbei = 'polardbx.aliyuncs.com',
cn-zhangbei-na61-b01 = 'polardbx.aliyuncs.com',
cn-zhangjiakou-na62-a01 = 'polardbx.aliyuncs.com',
cn-zhengzhou-nebula-1 = 'polardbx.aliyuncs.com',
eu-central-1 = 'polardbx.aliyuncs.com',
eu-west-1 = 'polardbx.aliyuncs.com',
eu-west-1-oxs = 'polardbx.aliyuncs.com',
me-east-1 = 'polardbx.aliyuncs.com',
rus-west-1-pop = 'polardbx.aliyuncs.com',
};
checkConfig(config);
@endpoint = getEndpoint('polardbx', @regionId, @endpointRule, @network, @suffix, @endpointMap, @endpoint);
}
function getEndpoint(productId: string, regionId: string, endpointRule: string, network: string, suffix: string, endpointMap: map[string]string, endpoint: string) throws: string{
if (!Util.empty(endpoint)) {
return endpoint;
}
if (!Util.isUnset(endpointMap) && !Util.empty(endpointMap[regionId])) {
return endpointMap[regionId];
}
return EndpointUtil.getEndpointRules(productId, regionId, endpointRule, network, suffix);
}
model AllocateInstancePublicConnectionRequest {
connectionStringPrefix?: string(name='ConnectionStringPrefix'),
DBInstanceName?: string(name='DBInstanceName'),
ownerAccount?: string(name='OwnerAccount'),
ownerId?: long(name='OwnerId'),
port?: string(name='Port'),
regionId?: string(name='RegionId'),
resourceOwnerAccount?: string(name='ResourceOwnerAccount'),
resourceOwnerId?: long(name='ResourceOwnerId'),
}
model AllocateInstancePublicConnectionResponseBody = {
requestId?: string(name='RequestId'),
}
model AllocateInstancePublicConnectionResponse = {
headers: map[string]string(name='headers'),
body: AllocateInstancePublicConnectionResponseBody(name='body'),
}
async function allocateInstancePublicConnectionWithOptions(request: AllocateInstancePublicConnectionRequest, runtime: Util.RuntimeOptions): AllocateInstancePublicConnectionResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('AllocateInstancePublicConnection', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function allocateInstancePublicConnection(request: AllocateInstancePublicConnectionRequest): AllocateInstancePublicConnectionResponse {
var runtime = new Util.RuntimeOptions{};
return allocateInstancePublicConnectionWithOptions(request, runtime);
}
model CancelPolarxOrderRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
scaleOutToken?: string(name='ScaleOutToken'),
}
model CancelPolarxOrderResponseBody = {
requestId?: string(name='RequestId'),
}
model CancelPolarxOrderResponse = {
headers: map[string]string(name='headers'),
body: CancelPolarxOrderResponseBody(name='body'),
}
async function cancelPolarxOrderWithOptions(request: CancelPolarxOrderRequest, runtime: Util.RuntimeOptions): CancelPolarxOrderResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CancelPolarxOrder', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function cancelPolarxOrder(request: CancelPolarxOrderRequest): CancelPolarxOrderResponse {
var runtime = new Util.RuntimeOptions{};
return cancelPolarxOrderWithOptions(request, runtime);
}
model CheckCloudResourceAuthorizedRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
roleArn?: string(name='RoleArn'),
}
model CheckCloudResourceAuthorizedResponseBody = {
data?: {
authorizationState?: string(name='AuthorizationState'),
roleArn?: string(name='RoleArn'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model CheckCloudResourceAuthorizedResponse = {
headers: map[string]string(name='headers'),
body: CheckCloudResourceAuthorizedResponseBody(name='body'),
}
async function checkCloudResourceAuthorizedWithOptions(request: CheckCloudResourceAuthorizedRequest, runtime: Util.RuntimeOptions): CheckCloudResourceAuthorizedResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CheckCloudResourceAuthorized', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function checkCloudResourceAuthorized(request: CheckCloudResourceAuthorizedRequest): CheckCloudResourceAuthorizedResponse {
var runtime = new Util.RuntimeOptions{};
return checkCloudResourceAuthorizedWithOptions(request, runtime);
}
model CreateAccountRequest {
accountDescription?: string(name='AccountDescription'),
accountName?: string(name='AccountName'),
accountPassword?: string(name='AccountPassword'),
accountPrivilege?: string(name='AccountPrivilege'),
DBInstanceName?: string(name='DBInstanceName'),
DBName?: string(name='DBName'),
regionId?: string(name='RegionId'),
securityAccountName?: string(name='SecurityAccountName'),
securityAccountPassword?: string(name='SecurityAccountPassword'),
}
model CreateAccountResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model CreateAccountResponse = {
headers: map[string]string(name='headers'),
body: CreateAccountResponseBody(name='body'),
}
async function createAccountWithOptions(request: CreateAccountRequest, runtime: Util.RuntimeOptions): CreateAccountResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreateAccount', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createAccount(request: CreateAccountRequest): CreateAccountResponse {
var runtime = new Util.RuntimeOptions{};
return createAccountWithOptions(request, runtime);
}
model CreateBackupRequest {
backupType?: string(name='BackupType'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model CreateBackupResponseBody = {
data?: [
{
backupSetId?: long(name='BackupSetId'),
}
](name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model CreateBackupResponse = {
headers: map[string]string(name='headers'),
body: CreateBackupResponseBody(name='body'),
}
async function createBackupWithOptions(request: CreateBackupRequest, runtime: Util.RuntimeOptions): CreateBackupResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreateBackup', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createBackup(request: CreateBackupRequest): CreateBackupResponse {
var runtime = new Util.RuntimeOptions{};
return createBackupWithOptions(request, runtime);
}
model CreateDBRequest {
accountName?: string(name='AccountName'),
accountPrivilege?: string(name='AccountPrivilege'),
charset?: string(name='Charset'),
DBInstanceName?: string(name='DBInstanceName'),
dbDescription?: string(name='DbDescription'),
dbName?: string(name='DbName'),
regionId?: string(name='RegionId'),
securityAccountName?: string(name='SecurityAccountName'),
securityAccountPassword?: string(name='SecurityAccountPassword'),
}
model CreateDBResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model CreateDBResponse = {
headers: map[string]string(name='headers'),
body: CreateDBResponseBody(name='body'),
}
async function createDBWithOptions(request: CreateDBRequest, runtime: Util.RuntimeOptions): CreateDBResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreateDB', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createDB(request: CreateDBRequest): CreateDBResponse {
var runtime = new Util.RuntimeOptions{};
return createDBWithOptions(request, runtime);
}
model CreateDBInstanceRequest {
autoRenew?: boolean(name='AutoRenew'),
clientToken?: string(name='ClientToken'),
DBNodeClass?: string(name='DBNodeClass'),
DBNodeCount?: int32(name='DBNodeCount'),
engineVersion?: string(name='EngineVersion'),
isReadDBInstance?: boolean(name='IsReadDBInstance'),
networkType?: string(name='NetworkType'),
payType?: string(name='PayType'),
period?: string(name='Period'),
primaryDBInstanceName?: string(name='PrimaryDBInstanceName'),
regionId?: string(name='RegionId'),
resourceGroupId?: string(name='ResourceGroupId'),
usedTime?: int32(name='UsedTime'),
VPCId?: string(name='VPCId'),
vSwitchId?: string(name='VSwitchId'),
zoneId?: string(name='ZoneId'),
}
model CreateDBInstanceResponseBody = {
DBInstanceName?: string(name='DBInstanceName'),
orderId?: string(name='OrderId'),
requestId?: string(name='RequestId'),
}
model CreateDBInstanceResponse = {
headers: map[string]string(name='headers'),
body: CreateDBInstanceResponseBody(name='body'),
}
async function createDBInstanceWithOptions(request: CreateDBInstanceRequest, runtime: Util.RuntimeOptions): CreateDBInstanceResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreateDBInstance', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createDBInstance(request: CreateDBInstanceRequest): CreateDBInstanceResponse {
var runtime = new Util.RuntimeOptions{};
return createDBInstanceWithOptions(request, runtime);
}
model CreatePolarxInstanceRequest {
clientToken?: string(name='ClientToken'),
description?: string(name='Description'),
duration?: int32(name='Duration'),
instanceSeries?: string(name='InstanceSeries'),
isAutoRenew?: boolean(name='IsAutoRenew'),
masterInstId?: string(name='MasterInstId'),
mySQLVersion?: int32(name='MySQLVersion'),
payType?: string(name='PayType'),
pricingCycle?: string(name='PricingCycle'),
quantity?: int32(name='Quantity'),
regionId?: string(name='RegionId'),
specification?: string(name='Specification'),
type?: string(name='Type'),
vpcId?: string(name='VpcId'),
vswitchId?: string(name='VswitchId'),
zoneId?: string(name='ZoneId'),
isHa?: boolean(name='isHa'),
}
model CreatePolarxInstanceResponseBody = {
data?: {
drdsInstanceIdList?: {
drdsInstanceIdList?: [ string ](name='drdsInstanceIdList')
}(name='DrdsInstanceIdList'),
orderId?: long(name='OrderId'),
}(name='Data'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model CreatePolarxInstanceResponse = {
headers: map[string]string(name='headers'),
body: CreatePolarxInstanceResponseBody(name='body'),
}
async function createPolarxInstanceWithOptions(request: CreatePolarxInstanceRequest, runtime: Util.RuntimeOptions): CreatePolarxInstanceResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreatePolarxInstance', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createPolarxInstance(request: CreatePolarxInstanceRequest): CreatePolarxInstanceResponse {
var runtime = new Util.RuntimeOptions{};
return createPolarxInstanceWithOptions(request, runtime);
}
model CreatePolarxOrderRequest {
DBInstanceName?: string(name='DBInstanceName'),
nodeCount?: string(name='NodeCount'),
regionId?: string(name='RegionId'),
}
model CreatePolarxOrderResponseBody = {
orderResultList?: [
{
DBInstanceName?: string(name='DBInstanceName'),
orderId?: long(name='OrderId'),
}
](name='OrderResultList'),
requestId?: string(name='RequestId'),
}
model CreatePolarxOrderResponse = {
headers: map[string]string(name='headers'),
body: CreatePolarxOrderResponseBody(name='body'),
}
async function createPolarxOrderWithOptions(request: CreatePolarxOrderRequest, runtime: Util.RuntimeOptions): CreatePolarxOrderResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreatePolarxOrder', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createPolarxOrder(request: CreatePolarxOrderRequest): CreatePolarxOrderResponse {
var runtime = new Util.RuntimeOptions{};
return createPolarxOrderWithOptions(request, runtime);
}
model CreateSuperAccountRequest {
accountDescription?: string(name='AccountDescription'),
accountName?: string(name='AccountName'),
accountPassword?: string(name='AccountPassword'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model CreateSuperAccountResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model CreateSuperAccountResponse = {
headers: map[string]string(name='headers'),
body: CreateSuperAccountResponseBody(name='body'),
}
async function createSuperAccountWithOptions(request: CreateSuperAccountRequest, runtime: Util.RuntimeOptions): CreateSuperAccountResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('CreateSuperAccount', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function createSuperAccount(request: CreateSuperAccountRequest): CreateSuperAccountResponse {
var runtime = new Util.RuntimeOptions{};
return createSuperAccountWithOptions(request, runtime);
}
model DeleteAccountRequest {
accountName?: string(name='AccountName'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
securityAccountName?: string(name='SecurityAccountName'),
securityAccountPassword?: string(name='SecurityAccountPassword'),
}
model DeleteAccountResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DeleteAccountResponse = {
headers: map[string]string(name='headers'),
body: DeleteAccountResponseBody(name='body'),
}
async function deleteAccountWithOptions(request: DeleteAccountRequest, runtime: Util.RuntimeOptions): DeleteAccountResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DeleteAccount', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function deleteAccount(request: DeleteAccountRequest): DeleteAccountResponse {
var runtime = new Util.RuntimeOptions{};
return deleteAccountWithOptions(request, runtime);
}
model DeleteDBRequest {
DBInstanceName?: string(name='DBInstanceName'),
dbName?: string(name='DbName'),
regionId?: string(name='RegionId'),
}
model DeleteDBResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DeleteDBResponse = {
headers: map[string]string(name='headers'),
body: DeleteDBResponseBody(name='body'),
}
async function deleteDBWithOptions(request: DeleteDBRequest, runtime: Util.RuntimeOptions): DeleteDBResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DeleteDB', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function deleteDB(request: DeleteDBRequest): DeleteDBResponse {
var runtime = new Util.RuntimeOptions{};
return deleteDBWithOptions(request, runtime);
}
model DeleteDBInstanceRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DeleteDBInstanceResponseBody = {
requestId?: string(name='RequestId'),
}
model DeleteDBInstanceResponse = {
headers: map[string]string(name='headers'),
body: DeleteDBInstanceResponseBody(name='body'),
}
async function deleteDBInstanceWithOptions(request: DeleteDBInstanceRequest, runtime: Util.RuntimeOptions): DeleteDBInstanceResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DeleteDBInstance', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function deleteDBInstance(request: DeleteDBInstanceRequest): DeleteDBInstanceResponse {
var runtime = new Util.RuntimeOptions{};
return deleteDBInstanceWithOptions(request, runtime);
}
model DescribeAccountListRequest {
accountName?: string(name='AccountName'),
accountType?: string(name='AccountType'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeAccountListResponseBody = {
data?: [
{
accountDescription?: string(name='AccountDescription'),
accountName?: string(name='AccountName'),
accountPrivilege?: string(name='AccountPrivilege'),
accountType?: string(name='AccountType'),
DBInstanceName?: string(name='DBInstanceName'),
DBName?: string(name='DBName'),
gmtCreated?: string(name='GmtCreated'),
}
](name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeAccountListResponse = {
headers: map[string]string(name='headers'),
body: DescribeAccountListResponseBody(name='body'),
}
async function describeAccountListWithOptions(request: DescribeAccountListRequest, runtime: Util.RuntimeOptions): DescribeAccountListResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeAccountList', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeAccountList(request: DescribeAccountListRequest): DescribeAccountListResponse {
var runtime = new Util.RuntimeOptions{};
return describeAccountListWithOptions(request, runtime);
}
model DescribeBackupPolicyRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeBackupPolicyResponseBody = {
data?: [
{
backupPeriod?: string(name='BackupPeriod'),
backupPlanBegin?: string(name='BackupPlanBegin'),
backupSetRetention?: int32(name='BackupSetRetention'),
backupType?: string(name='BackupType'),
backupWay?: string(name='BackupWay'),
DBInstanceName?: string(name='DBInstanceName'),
forceCleanOnHighSpaceUsage?: int32(name='ForceCleanOnHighSpaceUsage'),
isEnabled?: int32(name='IsEnabled'),
localLogRetention?: int32(name='LocalLogRetention'),
logLocalRetentionSpace?: int32(name='LogLocalRetentionSpace'),
removeLogRetention?: int32(name='RemoveLogRetention'),
}
](name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeBackupPolicyResponse = {
headers: map[string]string(name='headers'),
body: DescribeBackupPolicyResponseBody(name='body'),
}
async function describeBackupPolicyWithOptions(request: DescribeBackupPolicyRequest, runtime: Util.RuntimeOptions): DescribeBackupPolicyResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeBackupPolicy', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeBackupPolicy(request: DescribeBackupPolicyRequest): DescribeBackupPolicyResponse {
var runtime = new Util.RuntimeOptions{};
return describeBackupPolicyWithOptions(request, runtime);
}
model DescribeBackupSetListRequest {
DBInstanceName?: string(name='DBInstanceName'),
endTime?: long(name='EndTime'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
regionId?: string(name='RegionId'),
startTime?: long(name='StartTime'),
}
model DescribeBackupSetListResponseBody = {
data?: [
{
backupModel?: int32(name='BackupModel'),
backupSetId?: long(name='BackupSetId'),
backupSetSize?: long(name='BackupSetSize'),
backupType?: int32(name='BackupType'),
beginTime?: long(name='BeginTime'),
endTime?: long(name='EndTime'),
status?: int32(name='Status'),
}
](name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeBackupSetListResponse = {
headers: map[string]string(name='headers'),
body: DescribeBackupSetListResponseBody(name='body'),
}
async function describeBackupSetListWithOptions(request: DescribeBackupSetListRequest, runtime: Util.RuntimeOptions): DescribeBackupSetListResponse {
Util.validateModel(request);
var query = OpenApiUtil.query(Util.toMap(request));
var req = new OpenApi.OpenApiRequest{
query = query,
};
return doRPCRequest('DescribeBackupSetList', '2020-02-02', 'HTTPS', 'GET', 'AK', 'json', req, runtime);
}
async function describeBackupSetList(request: DescribeBackupSetListRequest): DescribeBackupSetListResponse {
var runtime = new Util.RuntimeOptions{};
return describeBackupSetListWithOptions(request, runtime);
}
model DescribeBinaryLogListRequest {
DBInstanceName?: string(name='DBInstanceName'),
endTime?: string(name='EndTime'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
regionId?: string(name='RegionId'),
startTime?: string(name='StartTime'),
}
model DescribeBinaryLogListResponseBody = {
logList?: [
{
beginTime?: string(name='BeginTime'),
createdTime?: string(name='CreatedTime'),
downloadLink?: string(name='DownloadLink'),
endTime?: string(name='EndTime'),
fileName?: string(name='FileName'),
id?: long(name='Id'),
logSize?: long(name='LogSize'),
modifiedTime?: string(name='ModifiedTime'),
purgeStatus?: int32(name='PurgeStatus'),
uploadHost?: string(name='UploadHost'),
uploadStatus?: int32(name='UploadStatus'),
}
](name='LogList'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
requestId?: string(name='RequestId'),
totalNumber?: int32(name='TotalNumber'),
}
model DescribeBinaryLogListResponse = {
headers: map[string]string(name='headers'),
body: DescribeBinaryLogListResponseBody(name='body'),
}
async function describeBinaryLogListWithOptions(request: DescribeBinaryLogListRequest, runtime: Util.RuntimeOptions): DescribeBinaryLogListResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeBinaryLogList', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeBinaryLogList(request: DescribeBinaryLogListRequest): DescribeBinaryLogListResponse {
var runtime = new Util.RuntimeOptions{};
return describeBinaryLogListWithOptions(request, runtime);
}
model DescribeCharacterSetRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeCharacterSetResponseBody = {
data?: {
characterSet?: [ string ](name='CharacterSet'),
engine?: string(name='Engine'),
}(name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeCharacterSetResponse = {
headers: map[string]string(name='headers'),
body: DescribeCharacterSetResponseBody(name='body'),
}
async function describeCharacterSetWithOptions(request: DescribeCharacterSetRequest, runtime: Util.RuntimeOptions): DescribeCharacterSetResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeCharacterSet', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeCharacterSet(request: DescribeCharacterSetRequest): DescribeCharacterSetResponse {
var runtime = new Util.RuntimeOptions{};
return describeCharacterSetWithOptions(request, runtime);
}
model DescribeDBInstanceAttributeRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeDBInstanceAttributeResponseBody = {
DBInstance?: {
commodityCode?: string(name='CommodityCode'),
connAddrs?: [
{
connectionString?: string(name='ConnectionString'),
port?: string(name='Port'),
type?: string(name='Type'),
VPCId?: string(name='VPCId'),
vSwitchId?: string(name='VSwitchId'),
vpcInstanceId?: string(name='VpcInstanceId'),
}
](name='ConnAddrs'),
connectionString?: string(name='ConnectionString'),
createTime?: string(name='CreateTime'),
DBInstanceType?: string(name='DBInstanceType'),
DBNodeClass?: string(name='DBNodeClass'),
DBNodeCount?: int32(name='DBNodeCount'),
DBNodes?: [
{
computeNodeId?: string(name='ComputeNodeId'),
dataNodeId?: string(name='DataNodeId'),
id?: string(name='Id'),
nodeClass?: string(name='NodeClass'),
regionId?: string(name='RegionId'),
zoneId?: string(name='ZoneId'),
}
](name='DBNodes'),
DBType?: string(name='DBType'),
DBVersion?: string(name='DBVersion'),
description?: string(name='Description'),
engine?: string(name='Engine'),
expireDate?: string(name='ExpireDate'),
expired?: string(name='Expired'),
id?: string(name='Id'),
kindCode?: int32(name='KindCode'),
latestMinorVersion?: string(name='LatestMinorVersion'),
lockMode?: string(name='LockMode'),
maintainEndTime?: string(name='MaintainEndTime'),
maintainStartTime?: string(name='MaintainStartTime'),
minorVersion?: string(name='MinorVersion'),
network?: string(name='Network'),
payType?: string(name='PayType'),
port?: string(name='Port'),
readDBInstances?: [ string ](name='ReadDBInstances'),
regionId?: string(name='RegionId'),
rightsSeparationEnabled?: boolean(name='RightsSeparationEnabled'),
rightsSeparationStatus?: string(name='RightsSeparationStatus'),
status?: string(name='Status'),
storageUsed?: long(name='StorageUsed'),
type?: string(name='Type'),
VPCId?: string(name='VPCId'),
vSwitchId?: string(name='VSwitchId'),
zoneId?: string(name='ZoneId'),
}(name='DBInstance'),
requestId?: string(name='RequestId'),
}
model DescribeDBInstanceAttributeResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBInstanceAttributeResponseBody(name='body'),
}
async function describeDBInstanceAttributeWithOptions(request: DescribeDBInstanceAttributeRequest, runtime: Util.RuntimeOptions): DescribeDBInstanceAttributeResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBInstanceAttribute', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBInstanceAttribute(request: DescribeDBInstanceAttributeRequest): DescribeDBInstanceAttributeResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBInstanceAttributeWithOptions(request, runtime);
}
model DescribeDBInstanceConfigRequest {
configName?: string(name='ConfigName'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeDBInstanceConfigResponseBody = {
data?: {
configName?: string(name='ConfigName'),
configValue?: string(name='ConfigValue'),
dbInstanceName?: string(name='DbInstanceName'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeDBInstanceConfigResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBInstanceConfigResponseBody(name='body'),
}
async function describeDBInstanceConfigWithOptions(request: DescribeDBInstanceConfigRequest, runtime: Util.RuntimeOptions): DescribeDBInstanceConfigResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBInstanceConfig', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBInstanceConfig(request: DescribeDBInstanceConfigRequest): DescribeDBInstanceConfigResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBInstanceConfigWithOptions(request, runtime);
}
model DescribeDBInstanceSSLRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeDBInstanceSSLResponseBody = {
data?: {
certCommonName?: string(name='CertCommonName'),
SSLEnabled?: boolean(name='SSLEnabled'),
SSLExpiredTime?: string(name='SSLExpiredTime'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeDBInstanceSSLResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBInstanceSSLResponseBody(name='body'),
}
async function describeDBInstanceSSLWithOptions(request: DescribeDBInstanceSSLRequest, runtime: Util.RuntimeOptions): DescribeDBInstanceSSLResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBInstanceSSL', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBInstanceSSL(request: DescribeDBInstanceSSLRequest): DescribeDBInstanceSSLResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBInstanceSSLWithOptions(request, runtime);
}
model DescribeDBInstanceTDERequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeDBInstanceTDEResponseBody = {
data?: {
TDEStatus?: string(name='TDEStatus'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeDBInstanceTDEResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBInstanceTDEResponseBody(name='body'),
}
async function describeDBInstanceTDEWithOptions(request: DescribeDBInstanceTDERequest, runtime: Util.RuntimeOptions): DescribeDBInstanceTDEResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBInstanceTDE', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBInstanceTDE(request: DescribeDBInstanceTDERequest): DescribeDBInstanceTDEResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBInstanceTDEWithOptions(request, runtime);
}
model DescribeDBInstanceTopologyRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeDBInstanceTopologyResponseBody = {
data?: {
logicInstanceTopology?: {
DBInstanceConnType?: string(name='DBInstanceConnType'),
DBInstanceCreateTime?: string(name='DBInstanceCreateTime'),
DBInstanceDescription?: string(name='DBInstanceDescription'),
DBInstanceId?: string(name='DBInstanceId'),
DBInstanceName?: string(name='DBInstanceName'),
DBInstanceStatus?: int32(name='DBInstanceStatus'),
DBInstanceStatusDescription?: string(name='DBInstanceStatusDescription'),
DBInstanceStorage?: int32(name='DBInstanceStorage'),
engine?: string(name='Engine'),
engineVersion?: string(name='EngineVersion'),
items?: [
{
characterType?: string(name='CharacterType'),
connectionIp?: [
{
connectionString?: string(name='ConnectionString'),
DBInstanceNetType?: int32(name='DBInstanceNetType'),
port?: string(name='Port'),
}
](name='ConnectionIp'),
DBInstanceConnType?: int32(name='DBInstanceConnType'),
DBInstanceCreateTime?: string(name='DBInstanceCreateTime'),
DBInstanceDescription?: string(name='DBInstanceDescription'),
DBInstanceId?: string(name='DBInstanceId'),
DBInstanceName?: string(name='DBInstanceName'),
DBInstanceStatus?: int32(name='DBInstanceStatus'),
DBInstanceStatusDescription?: string(name='DBInstanceStatusDescription'),
diskSize?: long(name='DiskSize'),
engine?: string(name='Engine'),
engineVersion?: string(name='EngineVersion'),
lockMode?: int32(name='LockMode'),
lockReason?: string(name='LockReason'),
maintainEndTime?: string(name='MaintainEndTime'),
maintainStartTime?: string(name='MaintainStartTime'),
maxConnections?: int32(name='MaxConnections'),
maxIops?: int32(name='MaxIops'),
}
](name='Items'),
lockMode?: int32(name='LockMode'),
lockReason?: string(name='LockReason'),
maintainEndTime?: string(name='MaintainEndTime'),
maintainStartTime?: string(name='MaintainStartTime'),
}(name='LogicInstanceTopology'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeDBInstanceTopologyResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBInstanceTopologyResponseBody(name='body'),
}
async function describeDBInstanceTopologyWithOptions(request: DescribeDBInstanceTopologyRequest, runtime: Util.RuntimeOptions): DescribeDBInstanceTopologyResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBInstanceTopology', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBInstanceTopology(request: DescribeDBInstanceTopologyRequest): DescribeDBInstanceTopologyResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBInstanceTopologyWithOptions(request, runtime);
}
model DescribeDBInstancesRequest {
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
regionId?: string(name='RegionId'),
}
model DescribeDBInstancesResponseBody = {
DBInstances?: [
{
commodityCode?: string(name='CommodityCode'),
createTime?: string(name='CreateTime'),
DBType?: string(name='DBType'),
DBVersion?: string(name='DBVersion'),
description?: string(name='Description'),
engine?: string(name='Engine'),
expireTime?: string(name='ExpireTime'),
expired?: boolean(name='Expired'),
id?: string(name='Id'),
lockMode?: string(name='LockMode'),
lockReason?: string(name='LockReason'),
minorVersion?: string(name='MinorVersion'),
network?: string(name='Network'),
nodeClass?: string(name='NodeClass'),
nodeCount?: int32(name='NodeCount'),
nodes?: [
{
classCode?: string(name='ClassCode'),
id?: string(name='Id'),
regionId?: string(name='RegionId'),
zoneId?: string(name='ZoneId'),
}
](name='Nodes'),
payType?: string(name='PayType'),
readDBInstances?: [ string ](name='ReadDBInstances'),
regionId?: string(name='RegionId'),
status?: string(name='Status'),
storageUsed?: long(name='StorageUsed'),
type?: string(name='Type'),
VPCId?: string(name='VPCId'),
zoneId?: string(name='ZoneId'),
}
](name='DBInstances'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
requestId?: string(name='RequestId'),
totalNumber?: int32(name='TotalNumber'),
}
model DescribeDBInstancesResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBInstancesResponseBody(name='body'),
}
async function describeDBInstancesWithOptions(request: DescribeDBInstancesRequest, runtime: Util.RuntimeOptions): DescribeDBInstancesResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBInstances', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBInstances(request: DescribeDBInstancesRequest): DescribeDBInstancesResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBInstancesWithOptions(request, runtime);
}
model DescribeDBNodePerformanceRequest {
characterType?: string(name='CharacterType'),
DBInstanceName?: string(name='DBInstanceName'),
DBNodeIds?: string(name='DBNodeIds'),
endTime?: string(name='EndTime'),
key?: string(name='Key'),
regionId?: string(name='RegionId'),
startTime?: string(name='StartTime'),
}
model DescribeDBNodePerformanceResponseBody = {
DBInstanceName?: string(name='DBInstanceName'),
endTime?: string(name='EndTime'),
performanceKeys?: {
performanceItem?: [
{
DBNodeId?: string(name='DBNodeId'),
measurement?: string(name='Measurement'),
metricName?: string(name='MetricName'),
points?: {
performanceItemValue?: [
{
timestamp?: long(name='Timestamp'),
value?: string(name='Value'),
}
](name='PerformanceItemValue')
}(name='Points'),
}
](name='PerformanceItem')
}(name='PerformanceKeys'),
requestId?: string(name='RequestId'),
startTime?: string(name='StartTime'),
}
model DescribeDBNodePerformanceResponse = {
headers: map[string]string(name='headers'),
body: DescribeDBNodePerformanceResponseBody(name='body'),
}
async function describeDBNodePerformanceWithOptions(request: DescribeDBNodePerformanceRequest, runtime: Util.RuntimeOptions): DescribeDBNodePerformanceResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDBNodePerformance', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDBNodePerformance(request: DescribeDBNodePerformanceRequest): DescribeDBNodePerformanceResponse {
var runtime = new Util.RuntimeOptions{};
return describeDBNodePerformanceWithOptions(request, runtime);
}
model DescribeDbListRequest {
DBInstanceName?: string(name='DBInstanceName'),
DBName?: string(name='DBName'),
regionId?: string(name='RegionId'),
}
model DescribeDbListResponseBody = {
data?: [
{
accounts?: [
{
accountName?: string(name='AccountName'),
accountPrivilege?: string(name='AccountPrivilege'),
}
](name='Accounts'),
characterSetName?: string(name='CharacterSetName'),
DBDescription?: string(name='DBDescription'),
DBInstanceName?: string(name='DBInstanceName'),
DBName?: string(name='DBName'),
}
](name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeDbListResponse = {
headers: map[string]string(name='headers'),
body: DescribeDbListResponseBody(name='body'),
}
async function describeDbListWithOptions(request: DescribeDbListRequest, runtime: Util.RuntimeOptions): DescribeDbListResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDbList', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDbList(request: DescribeDbListRequest): DescribeDbListResponse {
var runtime = new Util.RuntimeOptions{};
return describeDbListWithOptions(request, runtime);
}
model DescribeDistributeTableListRequest {
DBInstanceName?: string(name='DBInstanceName'),
dbName?: string(name='DbName'),
regionId?: string(name='RegionId'),
}
model DescribeDistributeTableListResponseBody = {
data?: {
tables?: [
{
dbKey?: string(name='DbKey'),
tableName?: string(name='TableName'),
tableType?: string(name='TableType'),
tbKey?: string(name='TbKey'),
}
](name='Tables'),
}(name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeDistributeTableListResponse = {
headers: map[string]string(name='headers'),
body: DescribeDistributeTableListResponseBody(name='body'),
}
async function describeDistributeTableListWithOptions(request: DescribeDistributeTableListRequest, runtime: Util.RuntimeOptions): DescribeDistributeTableListResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeDistributeTableList', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeDistributeTableList(request: DescribeDistributeTableListRequest): DescribeDistributeTableListResponse {
var runtime = new Util.RuntimeOptions{};
return describeDistributeTableListWithOptions(request, runtime);
}
model DescribeParameterTemplatesRequest {
DBInstanceId?: string(name='DBInstanceId'),
paramLevel?: string(name='ParamLevel'),
regionId?: string(name='RegionId'),
}
model DescribeParameterTemplatesResponseBody = {
data?: {
engine?: string(name='Engine'),
engineVersion?: string(name='EngineVersion'),
parameterCount?: int32(name='ParameterCount'),
parameters?: [
{
checkingCode?: string(name='CheckingCode'),
dynamic?: int32(name='Dynamic'),
parameterDescription?: string(name='ParameterDescription'),
parameterName?: string(name='ParameterName'),
parameterValue?: string(name='ParameterValue'),
revisable?: int32(name='Revisable'),
}
](name='Parameters'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeParameterTemplatesResponse = {
headers: map[string]string(name='headers'),
body: DescribeParameterTemplatesResponseBody(name='body'),
}
async function describeParameterTemplatesWithOptions(request: DescribeParameterTemplatesRequest, runtime: Util.RuntimeOptions): DescribeParameterTemplatesResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeParameterTemplates', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeParameterTemplates(request: DescribeParameterTemplatesRequest): DescribeParameterTemplatesResponse {
var runtime = new Util.RuntimeOptions{};
return describeParameterTemplatesWithOptions(request, runtime);
}
model DescribeParametersRequest {
DBInstanceId?: string(name='DBInstanceId'),
paramLevel?: string(name='ParamLevel'),
regionId?: string(name='RegionId'),
}
model DescribeParametersResponseBody = {
data?: {
configParameters?: [
{
parameterDescription?: string(name='ParameterDescription'),
parameterName?: string(name='ParameterName'),
parameterValue?: string(name='ParameterValue'),
}
](name='ConfigParameters'),
engine?: string(name='Engine'),
engineVersion?: string(name='EngineVersion'),
runningParameters?: [
{
parameterDescription?: string(name='ParameterDescription'),
parameterName?: string(name='ParameterName'),
parameterValue?: string(name='ParameterValue'),
}
](name='RunningParameters'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeParametersResponse = {
headers: map[string]string(name='headers'),
body: DescribeParametersResponseBody(name='body'),
}
async function describeParametersWithOptions(request: DescribeParametersRequest, runtime: Util.RuntimeOptions): DescribeParametersResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeParameters', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeParameters(request: DescribeParametersRequest): DescribeParametersResponse {
var runtime = new Util.RuntimeOptions{};
return describeParametersWithOptions(request, runtime);
}
model DescribePolarxDataNodesRequest {
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
regionId?: string(name='RegionId'),
}
model DescribePolarxDataNodesResponseBody = {
DBInstanceDataNodes?: [
{
DBInstanceDescription?: string(name='DBInstanceDescription'),
DBInstanceId?: string(name='DBInstanceId'),
DBInstanceName?: string(name='DBInstanceName'),
}
](name='DBInstanceDataNodes'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
requestId?: string(name='RequestId'),
totalNumber?: int32(name='TotalNumber'),
}
model DescribePolarxDataNodesResponse = {
headers: map[string]string(name='headers'),
body: DescribePolarxDataNodesResponseBody(name='body'),
}
async function describePolarxDataNodesWithOptions(request: DescribePolarxDataNodesRequest, runtime: Util.RuntimeOptions): DescribePolarxDataNodesResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribePolarxDataNodes', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describePolarxDataNodes(request: DescribePolarxDataNodesRequest): DescribePolarxDataNodesResponse {
var runtime = new Util.RuntimeOptions{};
return describePolarxDataNodesWithOptions(request, runtime);
}
model DescribePolarxDbInstancesRequest {
dbName?: string(name='DbName'),
drdsInstanceId?: string(name='DrdsInstanceId'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
}
model DescribePolarxDbInstancesResponseBody = {
dbInstances?: {
dbInstance?: [
{
createTime?: string(name='CreateTime'),
DBInstanceId?: string(name='DBInstanceId'),
DBType?: string(name='DBType'),
DBVersion?: string(name='DBVersion'),
description?: string(name='Description'),
engine?: string(name='Engine'),
expireTime?: string(name='ExpireTime'),
lockMode?: string(name='LockMode'),
network?: string(name='Network'),
nodeClass?: string(name='NodeClass'),
nodeCount?: int32(name='NodeCount'),
payType?: string(name='PayType'),
regionId?: string(name='RegionId'),
status?: string(name='Status'),
statusDesc?: string(name='StatusDesc'),
storageUsed?: int32(name='StorageUsed'),
VPCId?: string(name='VPCId'),
zoneId?: string(name='ZoneId'),
lockReason?: string(name='lockReason'),
}
](name='DbInstance')
}(name='DbInstances'),
pageNumber?: string(name='PageNumber'),
pageSize?: string(name='PageSize'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
total?: string(name='Total'),
}
model DescribePolarxDbInstancesResponse = {
headers: map[string]string(name='headers'),
body: DescribePolarxDbInstancesResponseBody(name='body'),
}
async function describePolarxDbInstancesWithOptions(request: DescribePolarxDbInstancesRequest, runtime: Util.RuntimeOptions): DescribePolarxDbInstancesResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribePolarxDbInstances', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describePolarxDbInstances(request: DescribePolarxDbInstancesRequest): DescribePolarxDbInstancesResponse {
var runtime = new Util.RuntimeOptions{};
return describePolarxDbInstancesWithOptions(request, runtime);
}
model DescribePolarxPgInstancesRequest {
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
regionId?: string(name='RegionId'),
resourceGroupId?: string(name='ResourceGroupId'),
}
model DescribePolarxPgInstancesResponseBody = {
DBInstances?: [
{
commodityCode?: string(name='CommodityCode'),
createTime?: string(name='CreateTime'),
DBType?: string(name='DBType'),
DBVersion?: string(name='DBVersion'),
description?: string(name='Description'),
engine?: string(name='Engine'),
expireTime?: string(name='ExpireTime'),
expired?: boolean(name='Expired'),
id?: string(name='Id'),
lockMode?: string(name='LockMode'),
lockReason?: string(name='LockReason'),
network?: string(name='Network'),
nodeClass?: string(name='NodeClass'),
nodeCount?: int32(name='NodeCount'),
payType?: string(name='PayType'),
regionId?: string(name='RegionId'),
status?: string(name='Status'),
storageUsed?: int32(name='StorageUsed'),
VPCId?: string(name='VPCId'),
zoneId?: string(name='ZoneId'),
}
](name='DBInstances'),
expireDate?: string(name='ExpireDate'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
requestId?: string(name='RequestId'),
}
model DescribePolarxPgInstancesResponse = {
headers: map[string]string(name='headers'),
body: DescribePolarxPgInstancesResponseBody(name='body'),
}
async function describePolarxPgInstancesWithOptions(request: DescribePolarxPgInstancesRequest, runtime: Util.RuntimeOptions): DescribePolarxPgInstancesResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribePolarxPgInstances', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describePolarxPgInstances(request: DescribePolarxPgInstancesRequest): DescribePolarxPgInstancesResponse {
var runtime = new Util.RuntimeOptions{};
return describePolarxPgInstancesWithOptions(request, runtime);
}
model DescribeRegionsResponseBody = {
code?: int32(name='Code'),
errorCode?: int32(name='ErrorCode'),
message?: string(name='Message'),
regions?: {
region?: [
{
regionId?: string(name='RegionId'),
supportPolarx10?: boolean(name='SupportPolarx10'),
supportPolarx20?: boolean(name='SupportPolarx20'),
zones?: {
zone?: [
{
vpcEnabled?: boolean(name='VpcEnabled'),
zoneId?: string(name='ZoneId'),
}
](name='Zone')
}(name='Zones'),
}
](name='Region')
}(name='Regions'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeRegionsResponse = {
headers: map[string]string(name='headers'),
body: DescribeRegionsResponseBody(name='body'),
}
async function describeRegionsWithOptions(runtime: Util.RuntimeOptions): DescribeRegionsResponse {
var req = new OpenApi.OpenApiRequest{};
return doRPCRequest('DescribeRegions', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeRegions(): DescribeRegionsResponse {
var runtime = new Util.RuntimeOptions{};
return describeRegionsWithOptions(runtime);
}
model DescribeScaleOutMigrateTaskListRequest {
DBInstanceName?: string(name='DBInstanceName'),
ownerAccount?: string(name='OwnerAccount'),
ownerId?: long(name='OwnerId'),
resourceOwnerAccount?: string(name='ResourceOwnerAccount'),
resourceOwnerId?: long(name='ResourceOwnerId'),
}
model DescribeScaleOutMigrateTaskListResponseBody = {
progress?: int32(name='Progress'),
requestId?: string(name='RequestId'),
}
model DescribeScaleOutMigrateTaskListResponse = {
headers: map[string]string(name='headers'),
body: DescribeScaleOutMigrateTaskListResponseBody(name='body'),
}
async function describeScaleOutMigrateTaskListWithOptions(request: DescribeScaleOutMigrateTaskListRequest, runtime: Util.RuntimeOptions): DescribeScaleOutMigrateTaskListResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeScaleOutMigrateTaskList', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeScaleOutMigrateTaskList(request: DescribeScaleOutMigrateTaskListRequest): DescribeScaleOutMigrateTaskListResponse {
var runtime = new Util.RuntimeOptions{};
return describeScaleOutMigrateTaskListWithOptions(request, runtime);
}
model DescribeSecurityIpsRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeSecurityIpsResponseBody = {
data?: {
DBInstanceName?: string(name='DBInstanceName'),
groupItems?: [
{
groupName?: string(name='GroupName'),
securityIPList?: string(name='SecurityIPList'),
}
](name='GroupItems'),
}(name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model DescribeSecurityIpsResponse = {
headers: map[string]string(name='headers'),
body: DescribeSecurityIpsResponseBody(name='body'),
}
async function describeSecurityIpsWithOptions(request: DescribeSecurityIpsRequest, runtime: Util.RuntimeOptions): DescribeSecurityIpsResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeSecurityIps', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeSecurityIps(request: DescribeSecurityIpsRequest): DescribeSecurityIpsResponse {
var runtime = new Util.RuntimeOptions{};
return describeSecurityIpsWithOptions(request, runtime);
}
model DescribeTasksRequest {
DBInstanceId?: string(name='DBInstanceId'),
endTime?: string(name='EndTime'),
ownerAccount?: string(name='OwnerAccount'),
ownerId?: long(name='OwnerId'),
pageNumber?: int32(name='PageNumber'),
pageSize?: int32(name='PageSize'),
regionId?: string(name='RegionId'),
resourceOwnerAccount?: string(name='ResourceOwnerAccount'),
resourceOwnerId?: long(name='ResourceOwnerId'),
startTime?: string(name='StartTime'),
status?: string(name='Status'),
taskAction?: string(name='TaskAction'),
}
model DescribeTasksResponseBody = {
items?: [
{
beginTime?: string(name='BeginTime'),
DBName?: string(name='DBName'),
finishTime?: string(name='FinishTime'),
progress?: string(name='Progress'),
progressInfo?: string(name='ProgressInfo'),
scaleOutToken?: string(name='ScaleOutToken'),
status?: string(name='Status'),
taskAction?: string(name='TaskAction'),
taskErrorCode?: string(name='TaskErrorCode'),
taskErrorMessage?: string(name='TaskErrorMessage'),
taskId?: string(name='TaskId'),
}
](name='Items'),
pageNumber?: int32(name='PageNumber'),
pageRecordCount?: int32(name='PageRecordCount'),
requestId?: string(name='RequestId'),
totalRecordCount?: int32(name='TotalRecordCount'),
}
model DescribeTasksResponse = {
headers: map[string]string(name='headers'),
body: DescribeTasksResponseBody(name='body'),
}
async function describeTasksWithOptions(request: DescribeTasksRequest, runtime: Util.RuntimeOptions): DescribeTasksResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeTasks', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeTasks(request: DescribeTasksRequest): DescribeTasksResponse {
var runtime = new Util.RuntimeOptions{};
return describeTasksWithOptions(request, runtime);
}
model DescribeUserEncryptionKeyListRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model DescribeUserEncryptionKeyListResponseBody = {
data?: {
keyIds?: [ string ](name='KeyIds'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model DescribeUserEncryptionKeyListResponse = {
headers: map[string]string(name='headers'),
body: DescribeUserEncryptionKeyListResponseBody(name='body'),
}
async function describeUserEncryptionKeyListWithOptions(request: DescribeUserEncryptionKeyListRequest, runtime: Util.RuntimeOptions): DescribeUserEncryptionKeyListResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('DescribeUserEncryptionKeyList', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function describeUserEncryptionKeyList(request: DescribeUserEncryptionKeyListRequest): DescribeUserEncryptionKeyListResponse {
var runtime = new Util.RuntimeOptions{};
return describeUserEncryptionKeyListWithOptions(request, runtime);
}
model GetPolarxCommodityRequest {
DBInstanceName?: string(name='DBInstanceName'),
orderType?: string(name='OrderType'),
regionId?: string(name='RegionId'),
}
model GetPolarxCommodityResponseBody = {
componentList?: [
{
name?: string(name='Name'),
type?: string(name='Type'),
values?: [ string ](name='Values'),
}
](name='ComponentList'),
DBInstance?: {
commodityCode?: string(name='CommodityCode'),
connAddrs?: [
{
connectionString?: string(name='ConnectionString'),
port?: string(name='Port'),
type?: string(name='Type'),
VPCId?: string(name='VPCId'),
vSwitchId?: string(name='VSwitchId'),
}
](name='ConnAddrs'),
connectionString?: string(name='ConnectionString'),
createTime?: string(name='CreateTime'),
DBInstanceType?: string(name='DBInstanceType'),
DBNodeClass?: string(name='DBNodeClass'),
DBNodeCount?: int32(name='DBNodeCount'),
DBNodes?: [
{
id?: string(name='Id'),
nodeClass?: string(name='NodeClass'),
regionId?: string(name='RegionId'),
zoneId?: string(name='ZoneId'),
}
](name='DBNodes'),
DBType?: string(name='DBType'),
DBVersion?: string(name='DBVersion'),
description?: string(name='Description'),
engine?: string(name='Engine'),
expireDate?: string(name='ExpireDate'),
expired?: string(name='Expired'),
id?: string(name='Id'),
latestMinorVersion?: string(name='LatestMinorVersion'),
lockMode?: string(name='LockMode'),
maintainEndTime?: string(name='MaintainEndTime'),
maintainStartTime?: string(name='MaintainStartTime'),
minorVersion?: string(name='MinorVersion'),
network?: string(name='Network'),
payType?: string(name='PayType'),
port?: string(name='Port'),
readDBInstances?: [ string ](name='ReadDBInstances'),
regionId?: string(name='RegionId'),
status?: string(name='Status'),
storageUsed?: long(name='StorageUsed'),
type?: string(name='Type'),
VPCId?: string(name='VPCId'),
vSwitchId?: string(name='VSwitchId'),
zoneId?: string(name='ZoneId'),
}(name='DBInstance'),
requestId?: string(name='RequestId'),
}
model GetPolarxCommodityResponse = {
headers: map[string]string(name='headers'),
body: GetPolarxCommodityResponseBody(name='body'),
}
async function getPolarxCommodityWithOptions(request: GetPolarxCommodityRequest, runtime: Util.RuntimeOptions): GetPolarxCommodityResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('GetPolarxCommodity', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function getPolarxCommodity(request: GetPolarxCommodityRequest): GetPolarxCommodityResponse {
var runtime = new Util.RuntimeOptions{};
return getPolarxCommodityWithOptions(request, runtime);
}
model ModifyAccountDescriptionRequest {
accountDescription?: string(name='AccountDescription'),
accountName?: string(name='AccountName'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model ModifyAccountDescriptionResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model ModifyAccountDescriptionResponse = {
headers: map[string]string(name='headers'),
body: ModifyAccountDescriptionResponseBody(name='body'),
}
async function modifyAccountDescriptionWithOptions(request: ModifyAccountDescriptionRequest, runtime: Util.RuntimeOptions): ModifyAccountDescriptionResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifyAccountDescription', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifyAccountDescription(request: ModifyAccountDescriptionRequest): ModifyAccountDescriptionResponse {
var runtime = new Util.RuntimeOptions{};
return modifyAccountDescriptionWithOptions(request, runtime);
}
model ModifyDBInstanceClassRequest {
clientToken?: string(name='ClientToken'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
targetDBInstanceClass?: string(name='TargetDBInstanceClass'),
}
model ModifyDBInstanceClassResponseBody = {
orderId?: string(name='OrderId'),
requestId?: string(name='RequestId'),
}
model ModifyDBInstanceClassResponse = {
headers: map[string]string(name='headers'),
body: ModifyDBInstanceClassResponseBody(name='body'),
}
async function modifyDBInstanceClassWithOptions(request: ModifyDBInstanceClassRequest, runtime: Util.RuntimeOptions): ModifyDBInstanceClassResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifyDBInstanceClass', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifyDBInstanceClass(request: ModifyDBInstanceClassRequest): ModifyDBInstanceClassResponse {
var runtime = new Util.RuntimeOptions{};
return modifyDBInstanceClassWithOptions(request, runtime);
}
model ModifyDBInstanceConfigRequest {
configName?: string(name='ConfigName'),
configValue?: string(name='ConfigValue'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model ModifyDBInstanceConfigResponseBody = {
requestId?: string(name='RequestId'),
}
model ModifyDBInstanceConfigResponse = {
headers: map[string]string(name='headers'),
body: ModifyDBInstanceConfigResponseBody(name='body'),
}
async function modifyDBInstanceConfigWithOptions(request: ModifyDBInstanceConfigRequest, runtime: Util.RuntimeOptions): ModifyDBInstanceConfigResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifyDBInstanceConfig', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifyDBInstanceConfig(request: ModifyDBInstanceConfigRequest): ModifyDBInstanceConfigResponse {
var runtime = new Util.RuntimeOptions{};
return modifyDBInstanceConfigWithOptions(request, runtime);
}
model ModifyDBInstanceDescriptionRequest {
DBInstanceDescription?: string(name='DBInstanceDescription'),
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model ModifyDBInstanceDescriptionResponseBody = {
requestId?: string(name='RequestId'),
}
model ModifyDBInstanceDescriptionResponse = {
headers: map[string]string(name='headers'),
body: ModifyDBInstanceDescriptionResponseBody(name='body'),
}
async function modifyDBInstanceDescriptionWithOptions(request: ModifyDBInstanceDescriptionRequest, runtime: Util.RuntimeOptions): ModifyDBInstanceDescriptionResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifyDBInstanceDescription', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifyDBInstanceDescription(request: ModifyDBInstanceDescriptionRequest): ModifyDBInstanceDescriptionResponse {
var runtime = new Util.RuntimeOptions{};
return modifyDBInstanceDescriptionWithOptions(request, runtime);
}
model ModifyDatabaseDescriptionRequest {
DBInstanceName?: string(name='DBInstanceName'),
dbDescription?: string(name='DbDescription'),
dbName?: string(name='DbName'),
regionId?: string(name='RegionId'),
}
model ModifyDatabaseDescriptionResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model ModifyDatabaseDescriptionResponse = {
headers: map[string]string(name='headers'),
body: ModifyDatabaseDescriptionResponseBody(name='body'),
}
async function modifyDatabaseDescriptionWithOptions(request: ModifyDatabaseDescriptionRequest, runtime: Util.RuntimeOptions): ModifyDatabaseDescriptionResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifyDatabaseDescription', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifyDatabaseDescription(request: ModifyDatabaseDescriptionRequest): ModifyDatabaseDescriptionResponse {
var runtime = new Util.RuntimeOptions{};
return modifyDatabaseDescriptionWithOptions(request, runtime);
}
model ModifyParameterRequest {
clientToken?: string(name='ClientToken'),
DBInstanceId?: string(name='DBInstanceId'),
paramLevel?: string(name='ParamLevel'),
parameters?: string(name='Parameters'),
regionId?: string(name='RegionId'),
}
model ModifyParameterResponseBody = {
requestId?: string(name='RequestId'),
}
model ModifyParameterResponse = {
headers: map[string]string(name='headers'),
body: ModifyParameterResponseBody(name='body'),
}
async function modifyParameterWithOptions(request: ModifyParameterRequest, runtime: Util.RuntimeOptions): ModifyParameterResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifyParameter', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifyParameter(request: ModifyParameterRequest): ModifyParameterResponse {
var runtime = new Util.RuntimeOptions{};
return modifyParameterWithOptions(request, runtime);
}
model ModifySecurityIpsRequest {
DBInstanceName?: string(name='DBInstanceName'),
groupName?: string(name='GroupName'),
modifyMode?: string(name='ModifyMode'),
regionId?: string(name='RegionId'),
securityIPList?: string(name='SecurityIPList'),
}
model ModifySecurityIpsResponseBody = {
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model ModifySecurityIpsResponse = {
headers: map[string]string(name='headers'),
body: ModifySecurityIpsResponseBody(name='body'),
}
async function modifySecurityIpsWithOptions(request: ModifySecurityIpsRequest, runtime: Util.RuntimeOptions): ModifySecurityIpsResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ModifySecurityIps', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function modifySecurityIps(request: ModifySecurityIpsRequest): ModifySecurityIpsResponse {
var runtime = new Util.RuntimeOptions{};
return modifySecurityIpsWithOptions(request, runtime);
}
model ReleaseInstancePublicConnectionRequest {
currentConnectionString?: string(name='CurrentConnectionString'),
DBInstanceName?: string(name='DBInstanceName'),
ownerAccount?: string(name='OwnerAccount'),
ownerId?: long(name='OwnerId'),
regionId?: string(name='RegionId'),
resourceOwnerAccount?: string(name='ResourceOwnerAccount'),
resourceOwnerId?: long(name='ResourceOwnerId'),
}
model ReleaseInstancePublicConnectionResponseBody = {
requestId?: string(name='RequestId'),
}
model ReleaseInstancePublicConnectionResponse = {
headers: map[string]string(name='headers'),
body: ReleaseInstancePublicConnectionResponseBody(name='body'),
}
async function releaseInstancePublicConnectionWithOptions(request: ReleaseInstancePublicConnectionRequest, runtime: Util.RuntimeOptions): ReleaseInstancePublicConnectionResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('ReleaseInstancePublicConnection', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function releaseInstancePublicConnection(request: ReleaseInstancePublicConnectionRequest): ReleaseInstancePublicConnectionResponse {
var runtime = new Util.RuntimeOptions{};
return releaseInstancePublicConnectionWithOptions(request, runtime);
}
model RestartDBInstanceRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
}
model RestartDBInstanceResponseBody = {
requestId?: string(name='RequestId'),
}
model RestartDBInstanceResponse = {
headers: map[string]string(name='headers'),
body: RestartDBInstanceResponseBody(name='body'),
}
async function restartDBInstanceWithOptions(request: RestartDBInstanceRequest, runtime: Util.RuntimeOptions): RestartDBInstanceResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('RestartDBInstance', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function restartDBInstance(request: RestartDBInstanceRequest): RestartDBInstanceResponse {
var runtime = new Util.RuntimeOptions{};
return restartDBInstanceWithOptions(request, runtime);
}
model UpdateBackupPolicyRequest {
backupPeriod?: string(name='BackupPeriod'),
backupPlanBegin?: string(name='BackupPlanBegin'),
backupSetRetention?: int32(name='BackupSetRetention'),
backupType?: string(name='BackupType'),
backupWay?: string(name='BackupWay'),
DBInstanceName?: string(name='DBInstanceName'),
forceCleanOnHighSpaceUsage?: int32(name='ForceCleanOnHighSpaceUsage'),
isEnabled?: int32(name='IsEnabled'),
localLogRetention?: int32(name='LocalLogRetention'),
logLocalRetentionSpace?: int32(name='LogLocalRetentionSpace'),
regionId?: string(name='RegionId'),
removeLogRetention?: int32(name='RemoveLogRetention'),
}
model UpdateBackupPolicyResponseBody = {
data?: [
{
backupPeriod?: string(name='BackupPeriod'),
backupPlanBegin?: string(name='BackupPlanBegin'),
backupSetRetention?: int32(name='BackupSetRetention'),
backupType?: string(name='BackupType'),
backupWay?: string(name='BackupWay'),
DBInstanceName?: string(name='DBInstanceName'),
forceCleanOnHighSpaceUsage?: int32(name='ForceCleanOnHighSpaceUsage'),
isEnabled?: int32(name='IsEnabled'),
localLogRetention?: int32(name='LocalLogRetention'),
logLocalRetentionSpace?: int32(name='LogLocalRetentionSpace'),
removeLogRetention?: int32(name='RemoveLogRetention'),
}
](name='Data'),
message?: string(name='Message'),
requestId?: string(name='RequestId'),
success?: boolean(name='Success'),
}
model UpdateBackupPolicyResponse = {
headers: map[string]string(name='headers'),
body: UpdateBackupPolicyResponseBody(name='body'),
}
async function updateBackupPolicyWithOptions(request: UpdateBackupPolicyRequest, runtime: Util.RuntimeOptions): UpdateBackupPolicyResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('UpdateBackupPolicy', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function updateBackupPolicy(request: UpdateBackupPolicyRequest): UpdateBackupPolicyResponse {
var runtime = new Util.RuntimeOptions{};
return updateBackupPolicyWithOptions(request, runtime);
}
model UpdateDBInstanceSSLRequest {
certCommonName?: string(name='CertCommonName'),
DBInstanceName?: string(name='DBInstanceName'),
enableSSL?: boolean(name='EnableSSL'),
regionId?: string(name='RegionId'),
}
model UpdateDBInstanceSSLResponseBody = {
data?: {
taskId?: long(name='TaskId'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model UpdateDBInstanceSSLResponse = {
headers: map[string]string(name='headers'),
body: UpdateDBInstanceSSLResponseBody(name='body'),
}
async function updateDBInstanceSSLWithOptions(request: UpdateDBInstanceSSLRequest, runtime: Util.RuntimeOptions): UpdateDBInstanceSSLResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('UpdateDBInstanceSSL', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function updateDBInstanceSSL(request: UpdateDBInstanceSSLRequest): UpdateDBInstanceSSLResponse {
var runtime = new Util.RuntimeOptions{};
return updateDBInstanceSSLWithOptions(request, runtime);
}
model UpdateDBInstanceTDERequest {
DBInstanceName?: string(name='DBInstanceName'),
encryptionKey?: string(name='EncryptionKey'),
regionId?: string(name='RegionId'),
roleArn?: string(name='RoleArn'),
TDEStatus?: int32(name='TDEStatus'),
}
model UpdateDBInstanceTDEResponseBody = {
data?: {
taskId?: string(name='TaskId'),
}(name='Data'),
requestId?: string(name='RequestId'),
}
model UpdateDBInstanceTDEResponse = {
headers: map[string]string(name='headers'),
body: UpdateDBInstanceTDEResponseBody(name='body'),
}
async function updateDBInstanceTDEWithOptions(request: UpdateDBInstanceTDERequest, runtime: Util.RuntimeOptions): UpdateDBInstanceTDEResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('UpdateDBInstanceTDE', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function updateDBInstanceTDE(request: UpdateDBInstanceTDERequest): UpdateDBInstanceTDEResponse {
var runtime = new Util.RuntimeOptions{};
return updateDBInstanceTDEWithOptions(request, runtime);
}
model UpdatePolarDBXInstanceNodeRequest {
clientToken?: string(name='ClientToken'),
DBInstanceName?: string(name='DBInstanceName'),
dbInstanceNodeCount?: string(name='DbInstanceNodeCount'),
regionId?: string(name='RegionId'),
}
model UpdatePolarDBXInstanceNodeResponseBody = {
orderId?: string(name='OrderId'),
requestId?: string(name='RequestId'),
}
model UpdatePolarDBXInstanceNodeResponse = {
headers: map[string]string(name='headers'),
body: UpdatePolarDBXInstanceNodeResponseBody(name='body'),
}
async function updatePolarDBXInstanceNodeWithOptions(request: UpdatePolarDBXInstanceNodeRequest, runtime: Util.RuntimeOptions): UpdatePolarDBXInstanceNodeResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('UpdatePolarDBXInstanceNode', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function updatePolarDBXInstanceNode(request: UpdatePolarDBXInstanceNodeRequest): UpdatePolarDBXInstanceNodeResponse {
var runtime = new Util.RuntimeOptions{};
return updatePolarDBXInstanceNodeWithOptions(request, runtime);
}
model UpgradeDBInstanceKernelVersionRequest {
DBInstanceName?: string(name='DBInstanceName'),
regionId?: string(name='RegionId'),
switchTime?: string(name='SwitchTime'),
upgradeTime?: string(name='UpgradeTime'),
}
model UpgradeDBInstanceKernelVersionResponseBody = {
DBInstanceName?: string(name='DBInstanceName'),
requestId?: string(name='RequestId'),
targetMinorVersion?: string(name='TargetMinorVersion'),
taskId?: string(name='TaskId'),
}
model UpgradeDBInstanceKernelVersionResponse = {
headers: map[string]string(name='headers'),
body: UpgradeDBInstanceKernelVersionResponseBody(name='body'),
}
async function upgradeDBInstanceKernelVersionWithOptions(request: UpgradeDBInstanceKernelVersionRequest, runtime: Util.RuntimeOptions): UpgradeDBInstanceKernelVersionResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('UpgradeDBInstanceKernelVersion', '2020-02-02', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function upgradeDBInstanceKernelVersion(request: UpgradeDBInstanceKernelVersionRequest): UpgradeDBInstanceKernelVersionResponse {
var runtime = new Util.RuntimeOptions{};
return upgradeDBInstanceKernelVersionWithOptions(request, runtime);
}
| Tea | 4 | aliyun/alibabacloud-sdk | polardbx-20200202/main.tea | [
"Apache-2.0"
] |
<%--
User: Benjamin CAURE
Date: 4/14/2020
--%>
<%@ page pageEncoding="UTF-8" contentType="text/html;charset=UTF-8" language="java" %>
<%@ taglib prefix="form" uri="http://www.springframework.org/tags/form" %>
<html>
<head>
<title>Send HTML Email</title>
</head>
<body>
<div>
<h3>Send Email Using Text Template</h3>
<form:form method="POST" modelAttribute="mailObject" >
<fieldset>
<div style="width: 100%;max-width: 1280px">
<table>
<tr>
<th><label for="input_to">Recipient email</label></th>
<td><form:input path="to" id="input_to" type="email"/>
<small>Enter email address</small><br/>
<form:errors path="to" cssStyle="color:red;font-size:small"/>
</td>
</tr>
<tr>
<th><label for="input_recipient_name">Recipient name</label></th>
<td><form:input path="recipientName" id="input_recipient_name"/>
<small>Enter the recipient name</small><br/>
<form:errors path="recipientName" cssStyle="color:red;font-size:small"/>
</td>
</tr>
<tr>
<th><label for="input_subject">Subject</label></th>
<td><form:input path="subject" id="input_subject"/>
<small>Enter the subject</small><br/>
<form:errors path="subject" cssStyle="color:red;font-size:small"/>
</td>
</tr>
<tr>
<th><label for="input_text">Text</label></th>
<td><form:textarea path="text"
rows="5" cols="50"
id="input_text"/>
<form:errors path="text" cssStyle="color:red;font-size:small"/>
</td>
</tr>
<tr>
<th><label for="input_sender_name">Sender name</label></th>
<td><form:input path="senderName" id="input_sender_name"/>
<small>Enter the sender name</small><br/>
<form:errors path="senderName" cssStyle="color:red;font-size:small"/>
</td>
</tr>
<tr>
<th><label for="input_template_engine">Template Engine</label></th>
<td><form:select path="templateEngine" id="input_template_engine" items="${templateEngines}"/>
<small>Select the template engine</small><br/>
<form:errors path="templateEngine" cssStyle="color:red;font-size:small"/>
</td>
</tr>
<tr>
<th></th>
<td>
<input type="submit" value="Send">
</td>
</tr>
</table>
</div>
</fieldset>
</form:form>
</div>
</body>
</html>
| Java Server Pages | 4 | DBatOWL/tutorials | spring-web-modules/spring-mvc-basics-2/src/main/webapp/WEB-INF/views/mail/sendHtml.jsp | [
"MIT"
] |
.page-header
h1
@media screen and (min-width 42rem)
margin-bottom 1rem
@media screen and (max-width 42rem)
margin-bottom 0.5rem
.main-content
.site-footer
text-align center
@media screen and (max-width 42rem)
margin-top -1rem
.example-list
display flex
justify-content space-between
flex-wrap wrap
@media screen and (min-width 42rem)
margin 2rem 0 2rem 0
@media screen and (max-width 42rem)
margin 1rem 0
.example-item
background-color $color-white
padding 0.8rem
border 1px solid rgba(0, 0, 0, 0.1)
box-shadow 0 1px 2px 0 rgba(0, 0, 0, 0.1)
text-align center
margin-bottom 1rem
&.placeholder
visibility hidden
height 0
margin 0
padding 0
@media screen and (min-width 42rem)
flex 0 1 28%
@media screen and (max-width 42rem)
flex 0 1 100%
margin-bottom 1rem
.view
position fixed
top 0
left 0
bottom 0
right 0
z-index 1
padding 20px
background #fff
transform translate3d(0, 0, 0)
&.move-enter, &.move-leave-active
transform translate3d(100%, 0, 0)
&.move-enter-active, &.move-leave-active
transition transform 0.3s
| Stylus | 4 | cym2050/better-scroll | packages/react-examples/src/index.styl | [
"MIT"
] |
( Generated from test_namespace1_in.muv by the MUV compiler. )
( https://github.com/revarbat/pymuv )
lvar foo__ltuaa
: foo__abc[ _a -- ret ]
foo__ltuaa @ _a @ +
;
: foo__def[ _a -- ret ]
_a @ 2 * foo__abc
;
: _main[ _arg -- ret ]
3 foo__def
;
: __start
"me" match me ! me @ location loc ! trig trigger !
42 foo__ltuaa !
_main
;
| MUF | 2 | revarbat/pymuv | tests/test_namespace1_cmp.muf | [
"MIT"
] |
#
# SRT - Secure, Reliable, Transport
# Copyright (c) 2018 Haivision Systems Inc.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# API description:
# Expected variables:
# - options: dictionary "option-name" : "description"
# if there's '=' in option name, it expects an argument. Otherwise it's boolean.
# - alias: optional, you can make shortcuts to longer named options. Remember to use = in target name.
#
# Optional procedures:
# - preprocess: run before command-line arguments ($argv) are reviewed
# - postprocess: run after options are reviewed and all data filled in
#
# Available variables in postprocess:
#
# - optval (array): contains all option names with their assigned values
# - cmakeopt (scalar): a list of all options for "cmake" command line
# Options processed here internally, not passed to cmake
set internal_options {
with-compiler-prefix=<prefix> "set C/C++ toolchains <prefix>gcc and <prefix>g++"
with-compiler-type=<name> "compiler type: gcc(default), cc, others simply add ++ for C++"
with-srt-name=<name> "Override srt library name"
with-haicrypt-name=<name> "Override haicrypt library name (if compiled separately)"
}
# Options that refer directly to variables used in CMakeLists.txt
set cmake_options {
cygwin-use-posix "Should the POSIX API be used for cygwin. Ignored if the system isn't cygwin. (default: OFF)"
enable-encryption "Should encryption features be enabled (default: ON)"
enable-c++11 "Should the c++11 parts (srt-live-transmit) be enabled (default: ON)"
enable-apps "Should the Support Applications be Built? (default: ON)"
enable-testing "Should developer testing applications be built (default: OFF)"
enable-c++-deps "Extra library dependencies in srt.pc for C language (default: OFF)"
enable-heavy-logging "Should heavy debug logging be enabled (default: OFF)"
enable-logging "Should logging be enabled (default: ON)"
enable-debug=<0,1,2> "Enable debug mode (0=disabled, 1=debug, 2=rel-with-debug)"
enable-haicrypt-logging "Should logging in haicrypt be enabled (default: OFF)"
enable-inet-pton "Set to OFF to prevent usage of inet_pton when building against modern SDKs (default: ON)"
enable-code-coverage "Enable code coverage reporting (default: OFF)"
enable-monotonic-clock "Enforced clock_gettime with monotonic clock on GC CV /temporary fix for #729/ (default: OFF)"
enable-profile "Should instrument the code for profiling. Ignored for non-GNU compiler. (default: OFF)"
enable-relative-libpath "Should applications contain relative library paths, like ../lib (default: OFF)"
enable-shared "Should libsrt be built as a shared library (default: ON)"
enable-static "Should libsrt be built as a static library (default: ON)"
enable-suflip "Should suflip tool be built (default: OFF)"
enable-getnameinfo "In-logs sockaddr-to-string should do rev-dns (default: OFF)"
enable-unittests "Enable unit tests (default: OFF)"
enable-thread-check "Enable #include <threadcheck.h> that implements THREAD_* macros"
openssl-crypto-library=<filepath> "Path to a library."
openssl-include-dir=<path> "Path to a file."
openssl-ssl-library=<filepath> "Path to a library."
pkg-config-executable=<filepath> "pkg-config executable"
pthread-include-dir=<path> "Path to a file."
pthread-library=<filepath> "Path to a library."
use-busy-waiting "Enable more accurate sending times at a cost of potentially higher CPU load (default: OFF)"
use-gnustl "Get c++ library/headers from the gnustl.pc"
use-enclib "Encryption library to be used: openssl(default), gnutls, mbedtls"
use-gnutls "DEPRECATED. Use USE_ENCLIB=openssl|gnutls|mbedtls instead"
use-openssl-pc "Use pkg-config to find OpenSSL libraries (default: ON)"
use-static-libstdc++ "Should use static rather than shared libstdc++ (default: OFF)"
}
set options $internal_options$cmake_options
# Just example. Available in the system.
set alias {
--prefix --cmake-install-prefix=
}
proc pkg-config args {
return [string trim [exec pkg-config {*}$args]]
}
proc flagval v {
set out ""
foreach o $v {
lappend out [string trim [string range $o 2 en]]
}
return $out
}
set haicrypt_name ""
set srt_name ""
proc preprocess {} {
# Prepare windows basic path info
set ::CYGWIN 0
set e [catch {exec uname -o} res]
# We have Cygwin, if uname -o returns "cygwin" and does not fail.
if { !$e && $res == "Cygwin" } {
set ::CYGWIN 1
puts "CYGWIN DETECTED"
}
set ::HAVE_LINUX [expr {$::tcl_platform(os) == "Linux"}]
set ::HAVE_DARWIN [expr {$::tcl_platform(os) == "Darwin"}]
set ::CYGWIN_USE_POSIX 0
if { "--cygwin-use-posix" in $::optkeys } {
set ::CYGWIN_USE_POSIX 1
}
set ::HAVE_WINDOWS 0
if { $::tcl_platform(platform) == "windows" } {
puts "WINDOWS PLATFORM detected"
set ::HAVE_WINDOWS 1
}
if { $::CYGWIN && !$::CYGWIN_USE_POSIX } {
puts "CYGWIN - MINGW enforced"
# Make Cygwin tools see it right, to compile for MinGW
if { "--with-compiler-prefix" ni $::optkeys } {
set ::optval(--with-compiler-prefix) /bin/x86_64-w64-mingw32-
}
# Extract drive C: information
set drive_path [exec mount -p | tail -1 | cut {-d } -f 1]
set ::DRIVE_C $drive_path/c
set ::HAVE_WINDOWS 1
} else {
# Don't check for Windows, non-Windows parts will not use it.
set ::DRIVE_C C:
}
# Alias to old name --with-gnutls, which enforces using gnutls instead of openssl
if { [info exists ::optval(--with-gnutls)] } {
unset ::optval(--with-gnutls)
set ::optval(--use-enclib) gnutls
puts "WARNING: --with-gnutls is a deprecated alias to --use-enclib=gnutls, please use the latter one"
}
# Alias to old name --use-gnutls, which enforces using gnutls instead of openssl
if { [info exists ::optval(--use-gnutls)] } {
unset ::optval(--use-gnutls)
set ::optval(--use-enclib) gnutls
puts "WARNING: --use-gnutls is a deprecated alias to --use-enclib=gnutls, please use the latter one"
}
if { [info exists ::optval(--with-target-path)] } {
set ::target_path $::optval(--with-target-path)
unset ::optval(--with-target-path)
puts "NOTE: Explicit target path: $::target_path"
}
if { "--with-srt-name" in $::optkeys } {
set ::srt_name $::optval(--with-srt-name)
unset ::optval(--with-srt-name)
}
if { "--with-haicrypt-name" in $::optkeys } {
set ::haicrypt_name $::optval(--with-haicrypt-name)
unset ::optval(--with-haicrypt-name)
}
}
proc GetCompilerCommand {} {
# Expect that the compiler was set through:
# --with-compiler-prefix
# --cmake-c[++]-compiler
# (cmake-toolchain-file will set things up without the need to check things here)
if { [info exists ::optval(--with-compiler-prefix)] } {
set prefix $::optval(--with-compiler-prefix)
return ${prefix}gcc
}
if { [info exists ::optval(--cmake-c-compiler)] } {
return $::optval(--cmake-c-compiler)
}
if { [info exists ::optval(--cmake-c++-compiler)] } {
return $::optval(--cmake-c++-compiler)
}
if { [info exists ::optval(--cmake-cxx-compiler)] } {
return $::optval(--cmake-cxx-compiler)
}
puts "NOTE: Cannot obtain compiler, assuming toolchain file will do what's necessary"
return ""
}
proc postprocess {} {
set iscross 0
# Check if there was any option that changed the toolchain. If so, don't apply any autodetection-based toolchain change.
set all_options [array names ::optval]
set toolchain_changed no
foreach changer {
--with-compiler-prefix
--cmake-c-compiler
--cmake-c++-compiler
--cmake-cxx-compiler
--cmake-toolchain-file
} {
if { $changer in $all_options } {
puts "NOTE: toolchain changed by '$changer' option"
set toolchain_changed yes
break
}
}
set cygwin_posix 0
if { "--cygwin-use-posix" in $all_options } {
# Will enforce OpenSSL autodetection
set cygwin_posix 1
}
if { $toolchain_changed } {
# Check characteristics of the compiler - in particular, whether the target is different
# than the current target.
set compiler_path ""
set cmd [GetCompilerCommand]
if { $cmd != "" } {
set gcc_version [exec $cmd -v 2>@1]
set target ""
set compiler_path [file dirname $cmd]
foreach l [split $gcc_version \n] {
if { [string match Target:* $l] } {
set target [lindex $l 1] ;# [0]Target: [1]x86_64-some-things-further
set target_platform [lindex [split $target -] 0] ;# [0]x86_64 [1]redhat [2]linux
break
}
}
if { $target_platform == "" } {
puts "NOTE: can't obtain target from gcc -v: $l"
} else {
if { $target_platform != $::tcl_platform(machine) } {
puts "NOTE: foreign target type detected ($target)" ;# - setting CROSSCOMPILING flag"
#lappend ::cmakeopt "-DHAVE_CROSSCOMPILER=1"
set iscross 1
}
}
}
}
if { $::srt_name != "" } {
lappend ::cmakeopt "-DTARGET_srt=$::srt_name"
}
if { $::haicrypt_name != "" } {
lappend ::cmakeopt "-DTARGET_haicrypt=$::haicrypt_name"
}
set have_openssl 0
if { [lsearch -glob $::optkeys --openssl*] != -1 } {
set have_openssl 1
}
set have_gnutls 0
if { [lsearch -glob $::optkeys --use-gnutls] != -1 } {
set have_gnutls 1
}
if { $have_openssl && $have_gnutls } {
puts "NOTE: SSL library is exclusively selectable. Thus, --use-gnutls option will be ignored"
set have_gnutls 0
}
if { $have_gnutls } {
lappend ::cmakeopt "-DUSE_GNUTLS=ON"
}
if {$iscross} {
proc check-target-path {path} {
puts "Checking path '$path'"
if { [file isdir $path]
&& [file isdir $path/bin]
&& [file isdir $path/include]
&& ([file isdir $path/lib] || [file isdir $path/lib64]) } {
return yes
}
return no
}
if { ![info exists ::target_path] } {
# Try to autodetect the target path by having the basic 3 directories.
set target_path ""
set compiler_prefix [file dirname $compiler_path] ;# strip 'bin' directory
puts "NOTE: no --with-target-path found, will try to autodetect at $compiler_path"
foreach path [list $compiler_path $compiler_prefix/$target] {
if { [check-target-path $path] } {
set target_path $path
puts "NOTE: target path detected: $target_path"
break
}
}
if { $target_path == "" } {
puts "ERROR: Can't determine compiler's platform files root path (using compiler command path). Specify --with-target-path."
exit 1
}
} else {
set target_path $::target_path
# Still, check if correct.
if { ![check-target-path $target_path] } {
puts "ERROR: path in --with-target-path does not contain typical subdirectories"
exit 1
}
puts "NOTE: Using explicit target path: $target_path"
}
# Add this for cmake, should it need for something
lappend ::cmakeopt "-DCMAKE_PREFIX_PATH=$target_path"
# Add explicitly the path for pkg-config
# which lib
if { [file isdir $target_path/lib64/pkgconfig] } {
set ::env(PKG_CONFIG_PATH) $target_path/lib64/pkgconfig
puts "PKG_CONFIG_PATH: Found pkgconfig in lib64 for '$target_path' - using it"
} elseif { [file isdir $target_path/lib/pkgconfig] } {
set ::env(PKG_CONFIG_PATH) $target_path/lib/pkgconfig
puts "PKG_CONFIG_PATH: Found pkgconfig in lib for '$target_path' - using it"
} else {
puts "PKG_CONFIG_PATH: NOT changed, no pkgconfig in '$target_path'"
}
# Otherwise don't set PKG_CONFIG_PATH and we'll see.
}
if { $::HAVE_DARWIN && !$toolchain_changed} {
if { $have_gnutls } {
# Use gnutls explicitly, as found in brew
set er [catch {exec brew info gnutls} res]
if { $er } {
error "Cannot find gnutls in brew"
}
} else {
# ON Darwin there's a problem with linking against the Mac-provided OpenSSL.
# This must use brew-provided OpenSSL.
#
if { !$have_openssl } {
set er [catch {exec brew info openssl} res]
if { $er } {
error "You must have OpenSSL installed from 'brew' tool. The standard Mac version is inappropriate."
}
lappend ::cmakeopt "-DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include"
lappend ::cmakeopt "-DOPENSSL_LIBRARIES=/usr/local/opt/openssl/lib/libcrypto.a"
}
}
}
}
| Tcl | 5 | attenuation/srs | trunk/3rdparty/srt-1-fit/configure-data.tcl | [
"MIT"
] |
mod a {
pub struct Foo {
x: u32,
}
impl Foo {
pub fn new() -> Foo { Foo { x: 0 } }
}
}
fn main() {
let f = a::Foo::new();
f.x; //~ ERROR E0616
}
| Rust | 3 | Eric-Arellano/rust | src/test/ui/error-codes/E0616.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
{% extends "base.ahk" %}
{% block body %}
MouseGetPos,,, MouseWin
FileAppend, %MouseWin%, *
{% endblock body %}
| AutoHotkey | 3 | epth/ahk | ahk/templates/window/from_mouse.ahk | [
"MIT"
] |
$(OBJDIR)/pretty.cmi:
| D | 0 | heechul/crest-z3 | cil/obj/.depend/pretty.di | [
"BSD-3-Clause"
] |
/*All Units in inches */
CylinderFaces = 100;
MMPerInch = 25.4;
/* Negligible thickness added to prevent rendering issues in OpenSCAD*/
ZF = .0002;
BeltBoltDiam = 1/4 + .005;
BeltBoltSpacingX = 1.825;
BeltBoltSpacingY = 1.116;
BeltBoltRiserDiam = .75;
BeltBoltRiserH = 1;
CarriageBaseX = BeltBoltSpacingX * 2 + BeltBoltRiserDiam;
CarriageBaseY = 2;
CarriageBaseZ = 0.25;
ShaftRiserDiam = 1;
ShaftRiserH = 3;
ShaftDiam = (12 + .5) /MMPerInch;
GlideShaftSpacing = 3.5;
/*Taken from SCS12LUU Linear bearing mechanical drawing */
M5ScrewToEdge = 5.75 / MMPerInch;
M5ScrewSpacingY = (42 - (2 * 5.75)) / MMPerInch;
M5ScrewSpacingX = 50 / MMPerInch;
M5ScrewDiam = 5.2 / MMPerInch;
/*Taken from SCS12UU Linear bearing mechanical drawing */
SB_M5ScrewToEdge = 5.75 / MMPerInch;
SB_M5ScrewSpacingY = (42 - (2 * 5.75)) / MMPerInch;
SB_M5ScrewSpacingX = 26 / MMPerInch;
SB_M5ScrewDiam = 5.2 / MMPerInch;
BearingX = 70 / MMPerInch;
BearingY = 42 / MMPerInch;
BearingZ = 28 / MMPerInch;
MotorMountOffsetZ = CarriageBaseZ + .25;
MotorShaftDiam = 1/4;
MotorShaftLen = 20.6/ MMPerInch;
MotorFrameSize = 57 / MMPerInch;
MotorScrewSpacing = 47 / MMPerInch;
MotorScrewSpacingFromEdge = (MotorFrameSize - MotorScrewSpacing)/2;
MotorMountThick = 1/2;
PulleyOuterDiam = 1.63;
PulleyInnerDiam = 1.401;
MotorFaceToPulleyCenter = .57677;
difference(){
union(){
/* Create the base plate */
translate([0,0, CarriageBaseZ/2]) cube(size = [CarriageBaseX, CarriageBaseY, CarriageBaseZ], center = true);
/*Add the shaft risers */
translate([GlideShaftSpacing/2,0,ShaftRiserH/2]) cylinder(d = ShaftRiserDiam, h = ShaftRiserH, $fn = CylinderFaces, center = true);
translate([-GlideShaftSpacing/2,0,ShaftRiserH/2]) cylinder(d = ShaftRiserDiam, h = ShaftRiserH, $fn = CylinderFaces, center = true);
/* Add motor mount */
translate([0, MotorFaceToPulleyCenter - MotorMountThick, ShaftRiserH/2]) cube(size = [GlideShaftSpacing, MotorMountThick, ShaftRiserH], center = true);
/* Add belt plate connection risers */
translate([BeltBoltSpacingX,BeltBoltSpacingY,BeltBoltRiserH/2]) cylinder(d = BeltBoltRiserDiam, h = BeltBoltRiserH, $fn = CylinderFaces, center = true);
translate([-BeltBoltSpacingX,BeltBoltSpacingY,BeltBoltRiserH/2]) cylinder(d = BeltBoltRiserDiam, h = BeltBoltRiserH, $fn = CylinderFaces, center = true);
translate([BeltBoltSpacingX,BeltBoltSpacingY/2, BeltBoltRiserH/2]) cube(size = [BeltBoltRiserDiam, BeltBoltSpacingY, BeltBoltRiserH], center = true);
translate([-BeltBoltSpacingX,BeltBoltSpacingY/2, BeltBoltRiserH/2]) cube(size = [BeltBoltRiserDiam, BeltBoltSpacingY, BeltBoltRiserH], center = true);
// Linear bearing rendering: Not part of the peice. Comment out
//translate([0, 0, -BearingZ/2]) cube(size = [BearingX, BearingY, BearingZ], center = true);
}
/* Drill shaft holes */
translate([GlideShaftSpacing/2,0,ShaftRiserH/2 + CarriageBaseZ]) cylinder(d = ShaftDiam, h = ShaftRiserH, $fn = CylinderFaces, center = true);
translate([-GlideShaftSpacing/2,0,ShaftRiserH/2 + CarriageBaseZ]) cylinder(d = ShaftDiam, h = ShaftRiserH, $fn = CylinderFaces, center = true);
/* Drill bearing mount holes */
translate([M5ScrewSpacingX/2,M5ScrewSpacingY/2,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-M5ScrewSpacingX/2,M5ScrewSpacingY/2,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([M5ScrewSpacingX/2,-M5ScrewSpacingY/2,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-M5ScrewSpacingX/2,-M5ScrewSpacingY/2,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
/* Drill holes for the smaller linear bearing mount */
translate([SB_M5ScrewSpacingX/2, SB_M5ScrewSpacingY/2, 0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([SB_M5ScrewSpacingX/2, -SB_M5ScrewSpacingY/2, 0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-SB_M5ScrewSpacingX/2, SB_M5ScrewSpacingY/2, 0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-SB_M5ScrewSpacingX/2, -SB_M5ScrewSpacingY/2, 0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
/* Carve space in the motor mount for the pulley */
translate([0, 0, MotorFrameSize/2 + MotorMountOffsetZ]) rotate([90,0,0]) cylinder(d = PulleyOuterDiam + .25, h = 10, $fn = CylinderFaces, center = true);
/* Carve a slot for the pulley to exit */
translate([0,0, ShaftRiserH + MotorMountOffsetZ]) cube(size = [PulleyInnerDiam + .15, 10, ShaftRiserH + ZF], center = true);
/* Drill holes to mount motor */
translate([MotorScrewSpacing/2, 0, MotorScrewSpacingFromEdge + MotorMountOffsetZ]) rotate([90,0,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-MotorScrewSpacing/2, 0, MotorScrewSpacingFromEdge + MotorMountOffsetZ]) rotate([90,0,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([MotorScrewSpacing/2, 0, MotorScrewSpacingFromEdge + MotorMountOffsetZ + MotorScrewSpacing]) rotate([90,0,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-MotorScrewSpacing/2, 0, MotorScrewSpacingFromEdge + MotorMountOffsetZ + MotorScrewSpacing]) rotate([90,0,0]) cylinder(d = M5ScrewDiam, h = 10, $fn = CylinderFaces, center = true);
/*Drill holes for belt plate */
translate([BeltBoltSpacingX,BeltBoltSpacingY,0]) cylinder(d = BeltBoltDiam, h = 10, $fn = CylinderFaces, center = true);
translate([-BeltBoltSpacingX,BeltBoltSpacingY,0]) cylinder(d = BeltBoltDiam, h = 10, $fn = CylinderFaces, center = true);
} | OpenSCAD | 4 | nygrenj/Windows-iotcore-samples | Demos/AirHockeyRobot/CS/Robot Parts/YCarriage_MotorMount.scad | [
"MIT"
] |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.build.bom;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import groovy.namespace.QName;
import groovy.util.Node;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.plugins.JavaPlatformExtension;
import org.gradle.api.plugins.JavaPlatformPlugin;
import org.gradle.api.plugins.PluginContainer;
import org.gradle.api.publish.PublishingExtension;
import org.gradle.api.publish.maven.MavenPom;
import org.gradle.api.publish.maven.MavenPublication;
import org.springframework.boot.build.DeployedPlugin;
import org.springframework.boot.build.MavenRepositoryPlugin;
import org.springframework.boot.build.bom.Library.Group;
import org.springframework.boot.build.bom.Library.Module;
import org.springframework.boot.build.bom.bomr.UpgradeBom;
/**
* {@link Plugin} for defining a bom. Dependencies are added as constraints in the
* {@code api} configuration. Imported boms are added as enforced platforms in the
* {@code api} configuration.
*
* @author Andy Wilkinson
*/
public class BomPlugin implements Plugin<Project> {
static final String API_ENFORCED_CONFIGURATION_NAME = "apiEnforced";
@Override
public void apply(Project project) {
PluginContainer plugins = project.getPlugins();
plugins.apply(DeployedPlugin.class);
plugins.apply(MavenRepositoryPlugin.class);
plugins.apply(JavaPlatformPlugin.class);
JavaPlatformExtension javaPlatform = project.getExtensions().getByType(JavaPlatformExtension.class);
javaPlatform.allowDependencies();
createApiEnforcedConfiguration(project);
BomExtension bom = project.getExtensions().create("bom", BomExtension.class, project.getDependencies(),
project);
project.getTasks().create("bomrCheck", CheckBom.class, bom);
project.getTasks().create("bomrUpgrade", UpgradeBom.class, bom);
new PublishingCustomizer(project, bom).customize();
}
private void createApiEnforcedConfiguration(Project project) {
Configuration apiEnforced = project.getConfigurations().create(API_ENFORCED_CONFIGURATION_NAME,
(configuration) -> {
configuration.setCanBeConsumed(false);
configuration.setCanBeResolved(false);
configuration.setVisible(false);
});
project.getConfigurations().getByName(JavaPlatformPlugin.ENFORCED_API_ELEMENTS_CONFIGURATION_NAME)
.extendsFrom(apiEnforced);
project.getConfigurations().getByName(JavaPlatformPlugin.ENFORCED_RUNTIME_ELEMENTS_CONFIGURATION_NAME)
.extendsFrom(apiEnforced);
}
private static final class PublishingCustomizer {
private final Project project;
private final BomExtension bom;
private PublishingCustomizer(Project project, BomExtension bom) {
this.project = project;
this.bom = bom;
}
private void customize() {
PublishingExtension publishing = this.project.getExtensions().getByType(PublishingExtension.class);
publishing.getPublications().withType(MavenPublication.class).all(this::configurePublication);
}
private void configurePublication(MavenPublication publication) {
publication.pom(this::customizePom);
}
@SuppressWarnings("unchecked")
private void customizePom(MavenPom pom) {
pom.withXml((xml) -> {
Node projectNode = xml.asNode();
Node properties = new Node(null, "properties");
this.bom.getProperties().forEach(properties::appendNode);
Node dependencyManagement = findChild(projectNode, "dependencyManagement");
if (dependencyManagement != null) {
addPropertiesBeforeDependencyManagement(projectNode, properties);
replaceVersionsWithVersionPropertyReferences(dependencyManagement);
addExclusionsToManagedDependencies(dependencyManagement);
addTypesToManagedDependencies(dependencyManagement);
}
else {
projectNode.children().add(properties);
}
addPluginManagement(projectNode);
});
}
@SuppressWarnings("unchecked")
private void addPropertiesBeforeDependencyManagement(Node projectNode, Node properties) {
for (int i = 0; i < projectNode.children().size(); i++) {
if (isNodeWithName(projectNode.children().get(i), "dependencyManagement")) {
projectNode.children().add(i, properties);
break;
}
}
}
private void replaceVersionsWithVersionPropertyReferences(Node dependencyManagement) {
Node dependencies = findChild(dependencyManagement, "dependencies");
if (dependencies != null) {
for (Node dependency : findChildren(dependencies, "dependency")) {
String groupId = findChild(dependency, "groupId").text();
String artifactId = findChild(dependency, "artifactId").text();
String versionProperty = this.bom.getArtifactVersionProperty(groupId, artifactId);
if (versionProperty != null) {
findChild(dependency, "version").setValue("${" + versionProperty + "}");
}
}
}
}
private void addExclusionsToManagedDependencies(Node dependencyManagement) {
Node dependencies = findChild(dependencyManagement, "dependencies");
if (dependencies != null) {
for (Node dependency : findChildren(dependencies, "dependency")) {
String groupId = findChild(dependency, "groupId").text();
String artifactId = findChild(dependency, "artifactId").text();
this.bom.getLibraries().stream().flatMap((library) -> library.getGroups().stream())
.filter((group) -> group.getId().equals(groupId))
.flatMap((group) -> group.getModules().stream())
.filter((module) -> module.getName().equals(artifactId))
.flatMap((module) -> module.getExclusions().stream()).forEach((exclusion) -> {
Node exclusions = findOrCreateNode(dependency, "exclusions");
Node node = new Node(exclusions, "exclusion");
node.appendNode("groupId", exclusion.getGroupId());
node.appendNode("artifactId", exclusion.getArtifactId());
});
}
}
}
private void addTypesToManagedDependencies(Node dependencyManagement) {
Node dependencies = findChild(dependencyManagement, "dependencies");
if (dependencies != null) {
for (Node dependency : findChildren(dependencies, "dependency")) {
String groupId = findChild(dependency, "groupId").text();
String artifactId = findChild(dependency, "artifactId").text();
Set<String> types = this.bom.getLibraries().stream()
.flatMap((library) -> library.getGroups().stream())
.filter((group) -> group.getId().equals(groupId))
.flatMap((group) -> group.getModules().stream())
.filter((module) -> module.getName().equals(artifactId)).map(Module::getType)
.filter(Objects::nonNull).collect(Collectors.toSet());
if (types.size() > 1) {
throw new IllegalStateException(
"Multiple types for " + groupId + ":" + artifactId + ": " + types);
}
if (types.size() == 1) {
String type = types.iterator().next();
dependency.appendNode("type", type);
}
}
}
}
private void addPluginManagement(Node projectNode) {
for (Library library : this.bom.getLibraries()) {
for (Group group : library.getGroups()) {
Node plugins = findOrCreateNode(projectNode, "build", "pluginManagement", "plugins");
for (String pluginName : group.getPlugins()) {
Node plugin = new Node(plugins, "plugin");
plugin.appendNode("groupId", group.getId());
plugin.appendNode("artifactId", pluginName);
String versionProperty = library.getVersionProperty();
String value = (versionProperty != null) ? "${" + versionProperty + "}"
: library.getVersion().getVersion().toString();
plugin.appendNode("version", value);
}
}
}
}
private Node findOrCreateNode(Node parent, String... path) {
Node current = parent;
for (String nodeName : path) {
Node child = findChild(current, nodeName);
if (child == null) {
child = new Node(current, nodeName);
}
current = child;
}
return current;
}
private Node findChild(Node parent, String name) {
for (Object child : parent.children()) {
if (child instanceof Node) {
Node node = (Node) child;
if ((node.name() instanceof QName) && name.equals(((QName) node.name()).getLocalPart())) {
return node;
}
if (name.equals(node.name())) {
return node;
}
}
}
return null;
}
@SuppressWarnings("unchecked")
private List<Node> findChildren(Node parent, String name) {
return (List<Node>) parent.children().stream().filter((child) -> isNodeWithName(child, name))
.collect(Collectors.toList());
}
private boolean isNodeWithName(Object candidate, String name) {
if (candidate instanceof Node) {
Node node = (Node) candidate;
if ((node.name() instanceof QName) && name.equals(((QName) node.name()).getLocalPart())) {
return true;
}
if (name.equals(node.name())) {
return true;
}
}
return false;
}
}
}
| Java | 5 | techAi007/spring-boot | buildSrc/src/main/java/org/springframework/boot/build/bom/BomPlugin.java | [
"Apache-2.0"
] |
BEGIN { show = 0 ; print "/*" }
/^\-/ { trim = 1 ; print "" }
/^Package/ { show = 1 }
!NF { trim = 0 }
trim { sub("^ +", "", $0) }
show { print $0 }
END { print "*/\npackage " package_name }
| Awk | 4 | tcd/gofpdf | doc/go.awk | [
"MIT"
] |
/* @generated */
digraph cfg {
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_1" [label="1: Start call_method_with_default_parameters\nFormals: \nLocals: a_ptr:A* \n " color=yellow style=filled]
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_1" -> "call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_5" ;
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_2" [label="2: Exit call_method_with_default_parameters \n " color=yellow style=filled]
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_3" [label="3: Call _fun_A::fun_default \n n$0=*&a_ptr:A* [line 18, column 3]\n _=*n$0:A [line 18, column 3]\n n$2=_fun_A::fun_default(n$0:A*,1:int,10:int,20:int) [line 18, column 3]\n " shape="box"]
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_3" -> "call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_2" ;
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_4" [label="4: Call _fun_A::fun_default \n n$3=*&a_ptr:A* [line 17, column 3]\n _=*n$3:A [line 17, column 3]\n n$5=_fun_A::fun_default(n$3:A*,1:int,2:int,20:int) [line 17, column 3]\n " shape="box"]
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_4" -> "call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_3" ;
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_5" [label="5: Call _fun_A::fun_default \n n$6=*&a_ptr:A* [line 16, column 3]\n _=*n$6:A [line 16, column 3]\n n$8=_fun_A::fun_default(n$6:A*,1:int,2:int,3:int) [line 16, column 3]\n " shape="box"]
"call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_5" -> "call_method_with_default_parameters#7436997991634263214.eaaed1a0020d12e677ebd0f9049f2e4a_4" ;
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_1" [label="1: Start A::fun_default\nFormals: this:A* a:int b:int c:int\nLocals: \n " color=yellow style=filled]
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_1" -> "fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_3" ;
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_2" [label="2: Exit A::fun_default \n " color=yellow style=filled]
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_3" [label="3: Return Stmt \n n$0=*&a:int [line 11, column 59]\n n$1=*&b:int [line 11, column 63]\n n$2=*&c:int [line 11, column 67]\n " shape="box"]
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_3" -> "fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_4" ;
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_4" [label="4: Return Stmt \n *&return:int=((n$0 + n$1) + n$2) [line 11, column 52]\n " shape="box"]
"fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_4" -> "fun_default#A#(5743605731228394805).32785ab130b2379c4e3e1b8b23953e73_2" ;
}
| Graphviz (DOT) | 4 | JacobBarthelmeh/infer | infer/tests/codetoanalyze/cpp/shared/methods/default_parameters.cpp.dot | [
"MIT"
] |
/* This file was generated by upbc (the upb compiler) from the input
* file:
*
* envoy/config/core/v3/resolver.proto
*
* Do not edit -- your changes will be discarded when the file is
* regenerated. */
#ifndef ENVOY_CONFIG_CORE_V3_RESOLVER_PROTO_UPBDEFS_H_
#define ENVOY_CONFIG_CORE_V3_RESOLVER_PROTO_UPBDEFS_H_
#include "upb/def.h"
#include "upb/port_def.inc"
#ifdef __cplusplus
extern "C" {
#endif
#include "upb/def.h"
#include "upb/port_def.inc"
extern upb_def_init envoy_config_core_v3_resolver_proto_upbdefinit;
UPB_INLINE const upb_msgdef *envoy_config_core_v3_DnsResolverOptions_getmsgdef(upb_symtab *s) {
_upb_symtab_loaddefinit(s, &envoy_config_core_v3_resolver_proto_upbdefinit);
return upb_symtab_lookupmsg(s, "envoy.config.core.v3.DnsResolverOptions");
}
UPB_INLINE const upb_msgdef *envoy_config_core_v3_DnsResolutionConfig_getmsgdef(upb_symtab *s) {
_upb_symtab_loaddefinit(s, &envoy_config_core_v3_resolver_proto_upbdefinit);
return upb_symtab_lookupmsg(s, "envoy.config.core.v3.DnsResolutionConfig");
}
#ifdef __cplusplus
} /* extern "C" */
#endif
#include "upb/port_undef.inc"
#endif /* ENVOY_CONFIG_CORE_V3_RESOLVER_PROTO_UPBDEFS_H_ */
| C | 3 | warlock135/grpc | src/core/ext/upbdefs-generated/envoy/config/core/v3/resolver.upbdefs.h | [
"Apache-2.0"
] |
static const q7_t in_com1[300] = {
0xD0, 0xCE, 0x16, 0x9B, 0x19, 0xE4, 0x10, 0x06,
0x1E, 0x07, 0x12, 0xD5, 0xDA, 0x0D, 0xF4, 0xF4,
0xE5, 0xDE, 0x23, 0xD6, 0xC9, 0x27, 0x22, 0x08,
0x1D, 0x13, 0x80, 0xF4, 0x1A, 0x38, 0x15, 0x22,
0x57, 0x38, 0xEF, 0x26, 0x11, 0xD8, 0x04, 0x0E,
0xF5, 0xE7, 0xF7, 0x18, 0x12, 0x2C, 0x0B, 0xBD,
0xFA, 0x05, 0xC1, 0xED, 0x25, 0xD1, 0xFA, 0x1D,
0xFF, 0x10, 0x1B, 0x46, 0x4E, 0x93, 0xDF, 0x30,
0x05, 0xF0, 0xF7, 0x02, 0xF3, 0x1A, 0x0B, 0x0A,
0xCB, 0x17, 0xE7, 0xD5, 0xF9, 0xC8, 0x0B, 0xF9,
0x31, 0xC2, 0xDC, 0x43, 0xF4, 0xF2, 0x33, 0xF4,
0xAA, 0xC3, 0xFE, 0xCB, 0xF6, 0x2F, 0x0A, 0xEA,
0xF8, 0xCE, 0x0D, 0xE5, 0xDD, 0xD5, 0xEF, 0xF6,
0xFC, 0xCC, 0xE6, 0x06, 0x0B, 0x1B, 0xE8, 0xFB,
0xE0, 0xF8, 0xDD, 0x2E, 0x00, 0x3C, 0x0D, 0x43,
0x12, 0xD0, 0xFF, 0x03, 0xF9, 0x46, 0x12, 0x01,
0x37, 0xC5, 0x06, 0x07, 0x26, 0xD4, 0xF7, 0xDF,
0xF8, 0x19, 0x09, 0xF3, 0xCF, 0x36, 0x13, 0x32,
0xE9, 0xC3, 0xF2, 0x2B, 0x02, 0xB9, 0x30, 0x2C,
0x5C, 0xEC, 0x2A, 0xE4, 0xE2, 0x18, 0x4B, 0xE4,
0x37, 0x1F, 0x5A, 0x24, 0xB9, 0x0A, 0xE0, 0xE3,
0xED, 0xE4, 0xE3, 0xE1, 0xF7, 0x14, 0x14, 0xEA,
0x0C, 0xF6, 0xFB, 0x05, 0x35, 0xD2, 0xFF, 0x25,
0xBD, 0x0A, 0x15, 0x2B, 0xDB, 0x27, 0xE0, 0x1D,
0xE4, 0x3A, 0xFE, 0xF8, 0xFB, 0xFA, 0xD0, 0x00,
0x02, 0x35, 0x0D, 0xD2, 0x04, 0xCF, 0x08, 0xC6,
0x34, 0x07, 0xF6, 0xE8, 0xF3, 0xE6, 0x10, 0x29,
0x38, 0x04, 0x28, 0x1F, 0x12, 0xEC, 0xDE, 0xCF,
0xFA, 0x04, 0xC6, 0xE9, 0xEE, 0xFF, 0xD4, 0x28,
0x11, 0x1E, 0xFE, 0xD4, 0xE6, 0xF5, 0x06, 0x15,
0x20, 0xC6, 0xF7, 0x1B, 0x00, 0x5B, 0x34, 0xFB,
0xE6, 0xEB, 0xC7, 0x23, 0xF6, 0xBD, 0xE3, 0xDD,
0x00, 0x00, 0x1D, 0x56, 0xF7, 0xFB, 0xD4, 0x13,
0xD7, 0x0A, 0x32, 0x46, 0x46, 0x0C, 0x0B, 0xFB,
0xF9, 0x15, 0x2E, 0x01, 0xD4, 0xE1, 0xEA, 0x00,
0xB8, 0xEC, 0xBA, 0x37, 0x04, 0xFF, 0x1B, 0xE3,
0x37, 0x05, 0x20, 0xB6, 0xFA, 0x0A, 0xB4, 0xD7,
0x03, 0x1E, 0x0B, 0x0B
};
static const q7_t in_com2[300] = {
0x30, 0x32, 0x16, 0x65, 0x19, 0x1C, 0x10, 0x06,
0x1E, 0x07, 0x12, 0x2B, 0x26, 0x0D, 0x0C, 0x0C,
0x1B, 0x22, 0x23, 0x2A, 0x37, 0x27, 0x22, 0x08,
0x1D, 0x13, 0x7F, 0x0C, 0x1A, 0x38, 0x15, 0x22,
0x57, 0x38, 0x11, 0x26, 0x11, 0x28, 0x04, 0x0E,
0x0B, 0x19, 0x09, 0x18, 0x12, 0x2C, 0x0B, 0x43,
0x06, 0x05, 0x3F, 0x13, 0x25, 0x2F, 0x06, 0x1D,
0x01, 0x10, 0x1B, 0x46, 0x4E, 0x6D, 0x21, 0x30,
0x05, 0x10, 0x09, 0x02, 0x0D, 0x1A, 0x0B, 0x0A,
0x35, 0x17, 0x19, 0x2B, 0x07, 0x38, 0x0B, 0x07,
0x31, 0x3E, 0x24, 0x43, 0x0C, 0x0E, 0x33, 0x0C,
0x56, 0x3D, 0x02, 0x35, 0x0A, 0x2F, 0x0A, 0x16,
0x08, 0x32, 0x0D, 0x1B, 0x23, 0x2B, 0x11, 0x0A,
0x04, 0x34, 0x1A, 0x06, 0x0B, 0x1B, 0x18, 0x05,
0x20, 0x08, 0x23, 0x2E, 0x00, 0x3C, 0x0D, 0x43,
0x12, 0x30, 0x01, 0x03, 0x07, 0x46, 0x12, 0x01,
0x37, 0x3B, 0x06, 0x07, 0x26, 0x2C, 0x09, 0x21,
0x08, 0x19, 0x09, 0x0D, 0x31, 0x36, 0x13, 0x32,
0x17, 0x3D, 0x0E, 0x2B, 0x02, 0x47, 0x30, 0x2C,
0x5C, 0x14, 0x2A, 0x1C, 0x1E, 0x18, 0x4B, 0x1C,
0x37, 0x1F, 0x5A, 0x24, 0x47, 0x0A, 0x20, 0x1D,
0x13, 0x1C, 0x1D, 0x1F, 0x09, 0x14, 0x14, 0x16,
0x0C, 0x0A, 0x05, 0x05, 0x35, 0x2E, 0x01, 0x25,
0x43, 0x0A, 0x15, 0x2B, 0x25, 0x27, 0x20, 0x1D,
0x1C, 0x3A, 0x02, 0x08, 0x05, 0x06, 0x30, 0x00,
0x02, 0x35, 0x0D, 0x2E, 0x04, 0x31, 0x08, 0x3A,
0x34, 0x07, 0x0A, 0x18, 0x0D, 0x1A, 0x10, 0x29,
0x38, 0x04, 0x28, 0x1F, 0x12, 0x14, 0x22, 0x31,
0x06, 0x04, 0x3A, 0x17, 0x12, 0x01, 0x2C, 0x28,
0x11, 0x1E, 0x02, 0x2C, 0x1A, 0x0B, 0x06, 0x15,
0x20, 0x3A, 0x09, 0x1B, 0x00, 0x5B, 0x34, 0x05,
0x1A, 0x15, 0x39, 0x23, 0x0A, 0x43, 0x1D, 0x23,
0x00, 0x00, 0x1D, 0x56, 0x09, 0x05, 0x2C, 0x13,
0x29, 0x0A, 0x32, 0x46, 0x46, 0x0C, 0x0B, 0x05,
0x07, 0x15, 0x2E, 0x01, 0x2C, 0x1F, 0x16, 0x00,
0x48, 0x14, 0x46, 0x37, 0x04, 0x01, 0x1B, 0x1D,
0x37, 0x05, 0x20, 0x4A, 0x06, 0x0A, 0x4C, 0x29,
0x03, 0x1E, 0x0B, 0x0B
};
static const q7_t in_absminmax[300] = {
0xF0, 0xE7, 0xD7, 0xDE, 0x1A, 0xF4, 0x01, 0x19,
0x0C, 0xDD, 0x0C, 0x09, 0x12, 0x2D, 0xC9, 0xDF,
0x3A, 0xFC, 0xE7, 0xFC, 0x16, 0xEF, 0xF5, 0x43,
0xEC, 0x05, 0xD4, 0xD0, 0x1D, 0xF5, 0xFC, 0xEE,
0xF7, 0xE6, 0xFD, 0x14, 0x26, 0x08, 0xDA, 0xE0,
0x26, 0x12, 0x00, 0x0F, 0xDF, 0xD7, 0xF4, 0xFE,
0xE8, 0x00, 0xDD, 0x15, 0x2F, 0x1F, 0x37, 0xDF,
0xEB, 0x10, 0x13, 0x2A, 0xEA, 0xF9, 0xE2, 0x38,
0xFD, 0xDF, 0xF8, 0x10, 0x06, 0xD0, 0x0C, 0x0E,
0xF6, 0xED, 0xCA, 0x43, 0xF4, 0xEA, 0xEA, 0x17,
0x2B, 0x01, 0xE5, 0x06, 0x3C, 0x12, 0x54, 0x35,
0x36, 0x09, 0xE5, 0xAB, 0xF3, 0xF1, 0x06, 0xE1,
0x39, 0xF6, 0xE7, 0xFC, 0xFA, 0xF9, 0x20, 0x3E,
0xDB, 0x28, 0x10, 0x22, 0x09, 0xF8, 0xE3, 0xCE,
0xFF, 0x15, 0x27, 0xCE, 0xBC, 0xBC, 0x34, 0xEE,
0xF2, 0xD3, 0x2A, 0xE9, 0x08, 0x5D, 0x12, 0x22,
0xC4, 0x14, 0x20, 0xF4, 0xD3, 0x10, 0xF1, 0x9C,
0xE3, 0x29, 0x1E, 0xA6, 0xE3, 0x0E, 0x50, 0xF8,
0x06, 0xFC, 0x53, 0x27, 0xD6, 0x3D, 0x19, 0x23,
0xFD, 0xBF, 0x36, 0x04, 0xF1, 0x21, 0xFD, 0x1E,
0xE9, 0x2C, 0xBD, 0x21, 0xF2, 0x10, 0xF5, 0x9F,
0xC5, 0x65, 0xC0, 0x07, 0x39, 0x00, 0x11, 0xDA,
0xB2, 0x34, 0x11, 0xC3, 0x2D, 0xFD, 0xE8, 0x10,
0x54, 0x24, 0xCD, 0x26, 0xAA, 0x04, 0xF7, 0x12,
0xBE, 0x44, 0xE2, 0x00, 0xE4, 0x1B, 0xBB, 0x0C,
0x1A, 0x2F, 0xDD, 0xF5, 0x12, 0x23, 0xE9, 0xEB,
0xCD, 0xDA, 0x11, 0x20, 0xE2, 0xDB, 0x01, 0x0D,
0xFF, 0x42, 0x09, 0x44, 0xD4, 0x0D, 0xE8, 0xFC,
0xC0, 0x0D, 0xFB, 0xE2, 0xF2, 0xCE, 0xF2, 0x21,
0xD8, 0x63, 0x0F, 0x28, 0x02, 0x0B, 0xCE, 0xB7,
0xD5, 0xE5, 0xFE, 0x9E, 0x1F, 0x17, 0xC9, 0x01,
0xF7, 0x5E, 0x03, 0xE1, 0xF9, 0xFD, 0x09, 0xD1,
0xF6, 0xF7, 0x04, 0x16, 0xB4, 0x02, 0x7F, 0xDB,
0x40, 0xF1, 0x1E, 0xEA, 0x08, 0xDF, 0xEF, 0x06,
0x90, 0xE3, 0xDC, 0x1B, 0xFD, 0xFF, 0xD6, 0xA6,
0x0A, 0xD3, 0x0A, 0x0D, 0xEB, 0xE0, 0x3D, 0xF6,
0x07, 0x16, 0xAD, 0xF2, 0x09, 0xF2, 0xE9, 0xDE,
0xF3, 0xF0, 0xD2, 0x0D
};
static const q7_t in_max_maxidx[280] = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x66, 0x73
};
static const q7_t in_min_maxidx[280] = {
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x0D, 0x00
};
static const q7_t in_absmax_maxidx[280] = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x66, 0x73
};
static const q7_t in_absmin_maxidx[280] = {
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x73,
0x73, 0x73, 0x73, 0x73, 0x73, 0x73, 0x0D, 0x00
};
static const q7_t ref_max_val[4] = {
0x1E, 0x38, 0x57, 0x73
};
static const uint16_t ref_max_idx[4] = {
0x0008, 0x001D, 0x0020, 0x0117
};
static const q7_t ref_min_val[4] = {
0x9B, 0x80, 0x80, 0x00
};
static const uint16_t ref_min_idx[4] = {
0x0003, 0x001A, 0x001A, 0x0117
};
static const q7_t ref_absmax_val[4] = {
0x37, 0x43, 0x43, 0x73
};
static const uint16_t ref_absmax_idx[4] = {
0x000E, 0x0017, 0x0017, 0x0117
};
static const q7_t ref_absmin_val[4] = {
0x01, 0x01, 0x00, 0x00
};
static const uint16_t ref_absmin_idx[4] = {
0x0006, 0x0006, 0x002A, 0x0117
};
static const q7_t ref_mean[4] = {
0x1E, 0x21, 0x20, 0x20
};
static const q31_t ref_power[3] = {
0x00005635, 0x0000DAB8, 0x0001224B
};
| Max | 1 | Trifunik/zephyr | tests/lib/cmsis_dsp/statistics/src/q7.pat | [
"Apache-2.0"
] |
import CalendarLocale from 'rc-picker/lib/locale/hr_HR';
import TimePickerLocale from '../../time-picker/locale/hr_HR';
import { PickerLocale } from '../generatePicker';
// Merge into a locale object
const locale: PickerLocale = {
lang: {
placeholder: 'Odaberite datum',
yearPlaceholder: 'Odaberite godinu',
quarterPlaceholder: 'Odaberite četvrtinu',
monthPlaceholder: 'Odaberite mjesec',
weekPlaceholder: 'Odaberite tjedan',
rangePlaceholder: ['Početni datum', 'Završni datum'],
rangeYearPlaceholder: ['Početna godina', 'Završna godina'],
rangeMonthPlaceholder: ['Početni mjesec', 'Završni mjesec'],
rangeWeekPlaceholder: ['Početni tjedan', 'Završni tjedan'],
...CalendarLocale,
},
timePickerLocale: {
...TimePickerLocale,
},
};
// All settings at:
// https://github.com/ant-design/ant-design/blob/master/components/date-picker/locale/example.json
export default locale;
| TypeScript | 4 | jawmeschege/ant-design | components/date-picker/locale/hr_HR.tsx | [
"MIT"
] |
[lang] {}
[ lang] {}
[lang ] {}
[ lang ] {}
[ lang ] {}
[
lang
] {}
span[lang] {}
span[ lang] {}
span[lang ] {}
span[ lang ] {}
span[ lang ] {}
span[lang='pt'] {}
span[lang ='pt'] {}
span[lang= 'pt'] {}
span[lang = 'pt'] {}
span[lang = 'pt'] {}
span[lang='pt' ] {}
span[lang='pt' ] {}
span[
lang
=
'pt'
] {}
span[ lang ~= 'en-us' ] {}
span[ lang ~= 'en-us' ] {}
span[ lang |='zh' ] {}
span[
lang
~=
'en-us'
] {}
a[ href ^= '#' ] {}
a[ href $= '.cn' ] {}
a[ href *= 'example' ] {}
a[
href
*=
'example'
] {}
input[ type = 'radio' i ] {}
input[ type = 'radio' i ] {}
input[ type ~= 'radio' i ] {}
input[ type ~= 'radio' i ] {}
input[
type
~=
'radio'
i
] {}
img[ alt = 'person' ][ src = 'lorem' ] {}
img[ alt = 'person' ][ src = 'lorem' ] {}
img[ alt ~= 'person' ][ src *= 'lorem' ] {}
img[ alt ~= 'person' ][ src *= 'lorem' ] {}
img[
alt
~=
'person'
][
src
*=
'lorem'
] {}
section:has(:not([type='radio'], [type='checkbox'])) {}
section:has(:not([type='radio' i], [type='checkbox' i])) {}
section:has(:not([ type = 'radio' ], [ type = 'checkbox' ])) {}
section:has(:not([ type = 'radio' i ], [ type = 'checkbox' i ])) {}
section:has(:not([ type = 'radio' ], [ type = 'checkbox' ])) {}
section:has(:not([ type = 'radio' i ], [ type = 'checkbox' i ])) {}
section:has(:not([
type
=
'radio'
], [
type
=
'checkbox'
])) {}
section:has(:not([
type
=
'radio'
i
], [
type
=
'checkbox'
i
])) {}
[foo|att=val] {}
[ foo | att = val ] {}
[ foo | att = val ] {}
[
foo
|
att
=
val
] {}
[*|att] {}
[ * | att ] {}
[ * | att ] {}
[
*
|
att
] {}
[|att] {}
[ | att ] {}
[ | att ] {}
[
|
att
] {}
| CSS | 1 | fuelingtheweb/prettier | tests/css_attribute/spaces.css | [
"MIT"
] |
--TEST--
Bug #68503 (date_diff on two dates with timezone set localised returns wrong results)
--FILE--
<?php
date_default_timezone_set('Europe/London');
echo date_diff(new DateTime("2015-02-01"), new DateTime("2015-05-01"))->format( '%yY %mM %dD' ), "\n";
date_default_timezone_set('UTC');
echo date_diff(new DateTime("2015-02-01"), new DateTime("2015-05-01"))->format( '%yY %mM %dD' ), "\n";
?>
--EXPECT--
0Y 3M 0D
0Y 3M 0D
| PHP | 3 | NathanFreeman/php-src | ext/date/tests/bug68503.phpt | [
"PHP-3.01"
] |
/*
GNU linker script for SAMD21
*/
/* Specify the memory areas */
MEMORY
{
FLASH (rx) : ORIGIN = 0x00002000, LENGTH = 256K - 8K
RAM (xrw) : ORIGIN = 0x20000000, LENGTH = 32K
}
/* Top end of the stack, with room for double-tap variable */
_estack = ORIGIN(RAM) + LENGTH(RAM) - 8;
_sstack = _estack - 8K;
_sheap = _ebss;
_eheap = _sstack;
| Linker Script | 4 | sebastien-riou/micropython | ports/samd/boards/samd21x18a.ld | [
"MIT"
] |
def http(request):
"""Responds to any HTTP request.
Args:
request (flask.Request): HTTP request object.
Returns:
The response text or any set of values that can be turned into a
Response object using
`make_response <http://flask.pocoo.org/docs/1.0/api/#flask.Flask.make_response>`.
"""
return f'Hello World!'
| Python | 4 | Arun-kc/serverless | lib/plugins/create/templates/google-python/main.py | [
"MIT"
] |
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(* Proofs about llvm to llair translation that do involve the semantics *)
open HolKernel boolLib bossLib Parse lcsymtacs;
open listTheory arithmeticTheory pred_setTheory finite_mapTheory wordsTheory integer_wordTheory;
open optionTheory rich_listTheory pathTheory alistTheory pairTheory sumTheory;
open settingsTheory miscTheory memory_modelTheory;
open llvmTheory llvm_propTheory llvm_ssaTheory llairTheory llair_propTheory llvm_to_llairTheory;
open llvm_to_llair_propTheory;
new_theory "llvm_to_llair_sem_prop";
set_grammar_ancestry ["llvm", "llair", "llair_prop", "llvm_to_llair", "llvm_ssa", "llvm_to_llair_prop"];
numLib.prefer_num ();
Definition translate_trace_def:
(translate_trace gmap Tau = Tau) ∧
(translate_trace gmap Error = Error) ∧
(translate_trace gmap (Exit i) = (Exit i)) ∧
(translate_trace gmap (W gv bytes) = W (translate_glob_var gmap gv) bytes)
End
Inductive v_rel:
(∀w. v_rel (FlatV (PtrV w)) (FlatV (IntV (w2i w) llair$pointer_size))) ∧
(∀w. v_rel (FlatV (W1V w)) (FlatV (IntV (w2i w) 1))) ∧
(∀w. v_rel (FlatV (W8V w)) (FlatV (IntV (w2i w) 8))) ∧
(∀w. v_rel (FlatV (W32V w)) (FlatV (IntV (w2i w) 32))) ∧
(∀w. v_rel (FlatV (W64V w)) (FlatV (IntV (w2i w) 64))) ∧
(∀vs1 vs2.
list_rel v_rel vs1 vs2
⇒
v_rel (AggV vs1) (AggV vs2))
End
Definition take_to_call_def:
(take_to_call [] = []) ∧
(take_to_call (i::is) =
if terminator i ∨ is_call i then [i] else i :: take_to_call is)
End
Inductive pc_rel:
(* LLVM side points to a normal instruction *)
(∀prog emap ip bp d b idx b' prev_i gmap (*rest*).
(* Both are valid pointers to blocks in the same function *)
dest_fn ip.f = label_to_fname bp ∧
alookup prog ip.f = Some d ∧
alookup d.blocks ip.b = Some b ∧
ip.i = Offset idx ∧
idx < length b.body ∧
get_block (translate_prog prog) bp b' ∧
(* The LLVM side is at the start of a block, or immediately following a
* call, which will also start a new block in llair *)
(idx ≠ 0 ⇒ get_instr prog (ip with i := Offset (idx - 1)) (Inl prev_i) ∧
is_call prev_i) ∧
(bp, b')::rest =
fst (translate_instrs (translate_label (dest_fn ip.f) ip.b (num_calls (take idx b.body)))
gmap emap (get_regs_to_keep d) (take_to_call (drop idx b.body)))
⇒
pc_rel prog gmap emap ip bp) ∧
(* If the LLVM side points to phi instructions, the llair side
* should point to a block generated from them *)
(∀prog gmap emap ip from_l phis to_l bp.
bp = Mov_name (dest_fn ip.f) (option_map dest_label from_l) to_l ∧
get_instr prog ip (Inr (from_l, phis)) ∧
ip.b = Some (Lab to_l) ∧
(* We should have just jumped here from block from_l *)
(∃d b. alookup prog ip.f = Some d ∧
alookup d.blocks from_l = Some b ∧
ip.b ∈ set (map Some (instr_to_labs (last b.body)))) (*∧
get_block (translate_prog prog) bp
(generate_move_block (dest_fn ip.f) gmap emap phis from_l (Lab to_l)) *)
⇒
pc_rel prog gmap emap ip bp)
End
(* Define when an LLVM state is related to a llair one.
* Parameterised on a map for locals relating LLVM registers to llair
* expressions that compute the value in that register. This corresponds to part
* of the translation's state.
*)
Definition emap_invariant_def:
emap_invariant prog emap s s' r =
∃v v' e.
v_rel v.value v' ∧
flookup s.locals r = Some v ∧
flookup emap r = Some e ∧ eval_exp s' e v'
End
Definition local_state_rel_def:
local_state_rel prog emap s s' ⇔
(* Live LLVM registers are mapped and have a related value in the emap
* (after evaluating) *)
(∀r. r ∈ live prog s.ip ⇒ emap_invariant prog emap s s' r)
End
Definition globals_rel_def:
globals_rel gmap gl gl' ⇔
BIJ (translate_glob_var gmap) (fdom gl) (fdom gl') ∧
∀k. k ∈ fdom gl ⇒
nfits (w2n (snd (gl ' k))) llair$pointer_size ∧
w2n (snd (gl ' k)) = gl' ' (translate_glob_var gmap k)
End
Definition mem_state_rel_def:
mem_state_rel prog gmap emap (s:llvm$state) (s':llair$state) ⇔
local_state_rel prog emap s s' ∧
reachable prog s.ip ∧
globals_rel gmap s.globals s'.glob_addrs ∧
heap_ok s.heap ∧
erase_tags s.heap = s'.heap ∧
s.status = s'.status
End
(* Define when an LLVM state is related to a llair one
* Parameterised on a map for locals relating LLVM registers to llair
* expressions that compute the value in that register. This corresponds to part
* of the translation's state.
*)
Definition state_rel_def:
state_rel prog gmap emap (s:llvm$state) (s':llair$state) ⇔
(s.status = Partial ⇒ pc_rel prog gmap emap s.ip s'.bp) ∧
mem_state_rel prog gmap emap s s'
End
Theorem mem_state_ignore_bp[simp]:
∀prog gmap emap s s' b.
mem_state_rel prog gmap emap s (s' with bp := b) ⇔
mem_state_rel prog gmap emap s s'
Proof
rw [local_state_rel_def, mem_state_rel_def, emap_invariant_def] >> eq_tac >> rw [] >>
first_x_assum drule >> rw [] >>
`eval_exp (s' with bp := b) e v' ⇔ eval_exp s' e v'`
by (irule eval_exp_ignores >> rw []) >>
metis_tac []
QED
Triviality lemma:
((s:llair$state) with status := Complete code).locals = s.locals ∧
((s:llair$state) with status := Complete code).glob_addrs = s.glob_addrs
Proof
rw []
QED
Theorem mem_state_rel_exited:
∀prog gmap emap s s' code.
mem_state_rel prog gmap emap s s'
⇒
mem_state_rel prog gmap emap (s with status := Complete code) (s' with status := Complete code)
Proof
rw [mem_state_rel_def, local_state_rel_def, emap_invariant_def] >>
metis_tac [eval_exp_ignores, lemma]
QED
Theorem mem_state_rel_no_update:
∀prog gmap emap s1 s1' v res_v r i i'.
assigns prog s1.ip = {} ∧
mem_state_rel prog gmap emap s1 s1' ∧
i ∈ next_ips prog s1.ip
⇒
mem_state_rel prog gmap emap (s1 with ip := i) s1'
Proof
rw [mem_state_rel_def, local_state_rel_def, emap_invariant_def]
>- (
first_x_assum (qspec_then `r` mp_tac) >> simp [Once live_gen_kill, PULL_EXISTS] >>
metis_tac [next_ips_same_func])
>- metis_tac [next_ips_reachable]
QED
Theorem exp_assigns_sing:
∀inst prog ip r t.
get_instr prog ip (Inl inst) ∧ classify_instr inst = Exp r t ⇒ assigns prog ip = {(r,t)}
Proof
rw [get_instr_cases, EXTENSION, IN_DEF, assigns_cases, PULL_EXISTS] >>
Cases_on `el idx b.body` >> fs [classify_instr_def, instr_assigns_def] >>
Cases_on `p` >> fs [classify_instr_def, instr_assigns_def]
QED
Theorem mem_state_rel_update:
∀prog gmap emap s1 s1' regs_to_keep v res_v r e i inst.
good_emap s1.ip.f prog regs_to_keep gmap emap ∧
get_instr prog s1.ip (Inl inst) ∧
classify_instr inst = Exp r t ∧
r ∉ regs_to_keep ∧
mem_state_rel prog gmap emap s1 s1' ∧
eval_exp s1' (translate_instr_to_exp gmap emap inst) res_v ∧
v_rel v.value res_v ∧
i ∈ next_ips prog s1.ip
⇒
mem_state_rel prog gmap emap
(s1 with <|ip := i; locals := s1.locals |+ (r, v) |>)
s1'
Proof
rw [mem_state_rel_def, local_state_rel_def, emap_invariant_def, good_emap_def]
>- (
rw [FLOOKUP_UPDATE]
>- (
HINT_EXISTS_TAC >> rw [] >>
first_x_assum (qspec_then `s1.ip` mp_tac) >> simp [] >> rw [] >>
first_x_assum (qspecl_then [`r`, `t`, `inst`] mp_tac) >> rw []) >>
`i.f = s1.ip.f` by metis_tac [next_ips_same_func] >> simp [] >>
first_x_assum irule >>
simp [Once live_gen_kill, PULL_EXISTS, METIS_PROVE [] ``x ∨ y ⇔ (~y ⇒ x)``] >>
drule exp_assigns_sing >> rw [] >>
metis_tac [exp_assigns_sing])
>- metis_tac [next_ips_reachable]
QED
Theorem emap_inv_updates_keep_same_ip1:
∀prog emap ip s s' vs res_vs rtys r t.
list_rel v_rel (map (\v. v.value) vs) res_vs ∧
length rtys = length vs ∧
(r,t) ∈ set rtys ∧
all_distinct (map fst rtys) ∧
flookup emap r = Some (Var (translate_reg r t) F)
⇒
emap_invariant prog emap
(s with locals := s.locals |++ zip (map fst rtys, vs))
(s' with locals := s'.locals |++ zip (map (\(r,ty). translate_reg r ty) rtys, res_vs))
r
Proof
rw [emap_invariant_def, flookup_fupdate_list] >>
CASE_TAC >> rw []
>- (
fs [ALOOKUP_NONE, MAP_REVERSE] >> rfs [MAP_ZIP] >> fs [MEM_MAP] >>
metis_tac [FST]) >>
rename [`alookup (reverse (zip _)) _ = Some v`] >>
fs [Once MEM_SPLIT_APPEND_last] >>
fs [alookup_some, MAP_EQ_APPEND, reverse_eq_append] >> rw [] >>
rfs [zip_eq_append] >> rw [] >> rw [] >>
fs [] >> rw [] >>
qpat_x_assum `reverse _ ++ _ = zip _` (mp_tac o GSYM) >> rw [zip_eq_append] >>
fs [] >> rw [] >>
rename [`[_] = zip (x,y)`] >>
Cases_on `x` >> Cases_on `y` >> fs [] >>
rw [] >> fs [LIST_REL_SPLIT1] >> rw [] >>
HINT_EXISTS_TAC >> rw [] >>
rw [Once eval_exp_cases, flookup_fupdate_list] >>
qmatch_goalsub_abbrev_tac `reverse (zip (a, b))` >>
`length a = length b`
by (
rw [Abbr `a`, Abbr `b`] >>
metis_tac [LIST_REL_LENGTH, LENGTH_MAP, LENGTH_ZIP, LENGTH_REVERSE, ADD_COMM, ADD_ASSOC]) >>
CASE_TAC >> rw [] >> fs [alookup_some, reverse_eq_append]
>- (fs [ALOOKUP_NONE] >> rfs [MAP_REVERSE, MAP_ZIP] >> fs [Abbr `a`]) >>
rfs [zip_eq_append] >>
unabbrev_all_tac >>
rw [] >>
qpat_x_assum `reverse _ ++ _ = zip _` (mp_tac o GSYM) >> rw [zip_eq_append] >>
fs [] >> rw [] >>
rename [`[_] = zip (a,b)`] >>
Cases_on `a` >> Cases_on `b` >> fs [] >>
rw [] >> fs [] >> rw [] >>
fs [ALOOKUP_NONE] >> fs [] >>
rev_full_simp_tac pure_ss [SWAP_REVERSE_SYM] >>
rw [] >> fs [MAP_REVERSE] >> rfs [MAP_ZIP] >>
fs [MIN_DEF] >>
BasicProvers.EVERY_CASE_TAC >> fs [] >>
rfs [] >> rw [] >>
fs [MAP_MAP_o, combinTheory.o_DEF, LAMBDA_PROD] >>
rename [`map fst l1 ++ [_] ++ map fst l2 = l3 ++ [_] ++ l4`,
`map _ l1 ++ [translate_reg _ _] ++ _ = l5 ++ _ ++ l6`,
`l7 ++ [v1:llair$flat_v reg_v] ++ l8 = l9 ++ [v2] ++ l10`] >>
`map fst l1 = l3 ∧ map fst l2 = l4`
by (
irule append_split_last >>
qexists_tac `h` >> rw [MEM_MAP] >>
CCONTR_TAC >> fs [] >>
`all_distinct (map fst l1 ++ [fst y] ++ map fst l2)` by metis_tac [] >>
fs [ALL_DISTINCT_APPEND, MEM_MAP] >>
metis_tac [FST, pair_CASES]) >>
`length l2 = length l6` suffices_by metis_tac [append_split_eq, LIST_REL_LENGTH, LENGTH_MAP] >>
rename1 `translate_reg r t` >>
`~mem (translate_reg r t) (map (λ(r,ty). translate_reg r ty) l2)`
by (
rw [MEM_MAP] >> pairarg_tac >> fs [] >>
rename1 `translate_reg r1 t1 = translate_reg r2 t2` >>
Cases_on `r1` >> Cases_on `r2` >> rw [translate_reg_def] >>
metis_tac [MEM_MAP, FST]) >>
metis_tac [append_split_last, LENGTH_MAP]
QED
Theorem emap_inv_updates_keep_same_ip2:
∀prog emap s s' vs res_vs rtys r regs_to_keep gmap.
is_ssa prog ∧
good_emap s.ip.f prog regs_to_keep gmap emap ∧
r ∈ live prog s.ip ∧
assigns prog s.ip = set rtys ∧
emap_invariant prog emap s s' r ∧
list_rel v_rel (map (\v. v.value) vs) res_vs ∧
length rtys = length vs ∧
reachable prog s.ip ∧
¬mem r (map fst rtys)
⇒
emap_invariant prog emap
(s with locals := s.locals |++ zip (map fst rtys, vs))
(s' with locals := s'.locals |++ zip (map (\(r,ty). translate_reg r ty) rtys, res_vs))
r
Proof
rw [emap_invariant_def, alistTheory.flookup_fupdate_list] >> rw [] >>
CASE_TAC >> rw []
>- (
qexists_tac `v'` >> rw [] >>
qmatch_goalsub_abbrev_tac `eval_exp s_upd _ _` >>
`DRESTRICT s_upd.locals (exp_uses e) = DRESTRICT s'.locals (exp_uses e) ∧
s_upd.glob_addrs = s'.glob_addrs`
suffices_by metis_tac [eval_exp_ignores_unused] >>
rw [Abbr `s_upd`] >>
qmatch_goalsub_abbrev_tac `_ |++ l = _` >>
`l = []` suffices_by rw [FUPDATE_LIST_THM] >>
rw [Abbr `l`, FILTER_EQ_NIL, LAMBDA_PROD] >>
`(λ(p1,p2:llair$flat_v reg_v). p1 ∉ exp_uses e) = (\x. fst x ∉ exp_uses e)`
by (rw [EXTENSION, IN_DEF] >> pairarg_tac >> rw []) >>
`length rtys = length res_vs` by metis_tac [LIST_REL_LENGTH, LENGTH_MAP] >>
rw [every_zip_fst, EVERY_MAP] >> rw [LAMBDA_PROD] >>
rw [EVERY_EL] >> pairarg_tac >> rw [] >>
qmatch_goalsub_rename_tac `translate_reg r1 ty1 ∉ exp_uses _` >>
fs [good_emap_def] >>
first_x_assum (qspec_then `s.ip` mp_tac) >> rw [] >>
first_x_assum drule >> simp [] >>
disch_then (qspec_then `translate_reg r1 ty1` mp_tac) >> rw [] >>
CCONTR_TAC >> fs [] >>
`ip_equiv ip2 s.ip`
by (
fs [is_ssa_def, EXTENSION, IN_DEF] >>
Cases_on `r1` >> fs [translate_reg_def, untranslate_reg_def] >>
rename1 `Reg reg = fst regty` >>
Cases_on `regty` >> fs [] >> rw [] >>
`assigns prog s.ip (Reg reg, ty1)`
suffices_by metis_tac [reachable_dominates_same_func, FST] >>
metis_tac [EL_MEM, IN_DEF]) >>
metis_tac [dominates_irrefl, ip_equiv_dominates2]) >>
drule ALOOKUP_MEM >> rw [MEM_MAP, MEM_ZIP] >>
metis_tac [EL_MEM, LIST_REL_LENGTH, LENGTH_MAP]
QED
Theorem local_state_rel_next_ip:
∀prog emap ip2 s s'.
local_state_rel prog emap s s' ∧
ip2 ∈ next_ips prog s.ip ∧
(∀r. r ∈ assigns prog s.ip ⇒ emap_invariant prog emap s s' (fst r))
⇒
local_state_rel prog emap (s with ip := ip2) s'
Proof
rw [local_state_rel_def, emap_invariant_def] >>
Cases_on `r ∈ live prog s.ip` >> fs [] >>
pop_assum mp_tac >> simp [Once live_gen_kill, PULL_EXISTS] >> rw [] >>
first_x_assum (qspec_then `ip2` mp_tac) >> rw [] >>
first_x_assum drule >> rw [] >>
ntac 3 HINT_EXISTS_TAC >> rw [] >>
first_x_assum irule >> rw [] >>
metis_tac [next_ips_same_func]
QED
Theorem local_state_rel_updates_keep:
∀rtys prog emap s s' vs res_vs i regs_to_keep gmap.
is_ssa prog ∧
good_emap s.ip.f prog regs_to_keep gmap emap ∧
all_distinct (map fst rtys) ∧
set rtys = assigns prog s.ip ∧
(¬∃inst r t. get_instr prog s.ip (Inl inst) ∧ classify_instr inst = Exp r t ∧ r ∉ regs_to_keep) ∧
local_state_rel prog emap s s' ∧
length vs = length rtys ∧
list_rel v_rel (map (\v. v.value) vs) res_vs ∧
i ∈ next_ips prog s.ip ∧
reachable prog s.ip
⇒
local_state_rel prog emap
(s with <| ip := i; locals := s.locals |++ zip (map fst rtys, vs) |>)
(s' with locals := s'.locals |++ zip (map (\(r,ty). translate_reg r ty) rtys, res_vs))
Proof
rw [] >> irule local_state_rel_next_ip >>
fs [local_state_rel_def] >> rw []
>- (
irule emap_inv_updates_keep_same_ip1 >> rw [] >>
fs [good_emap_def] >>
first_x_assum (qspec_then `s.ip` mp_tac) >> rw [] >>
qexists_tac `snd r` >> rw [] >>
first_x_assum irule >> rw [] >>
fs [assigns_cases, IN_DEF, not_exp_def] >>
metis_tac []) >>
Cases_on `mem r (map fst rtys)`
>- (
irule emap_inv_updates_keep_same_ip1 >> rw [] >>
`∃t. (r,t) ∈ set rtys` by (fs [MEM_MAP] >> metis_tac [FST, pair_CASES]) >>
rfs [good_emap_def] >>
first_x_assum (qspec_then `s.ip` mp_tac) >> rw [] >>
`(∃inst. get_instr prog s.ip (Inl inst)) ∨ (∃phis. get_instr prog s.ip (Inr phis))`
by (fs [IN_DEF, assigns_cases] >> metis_tac []) >>
metis_tac [FST, PAIR_EQ, SND,not_exp_def])
>- (
irule emap_inv_updates_keep_same_ip2 >> rw [] >>
metis_tac [])
QED
Theorem local_state_rel_update_keep:
∀prog emap s s' v res_v r i ty regs_to_keep gmap.
is_ssa prog ∧
good_emap s.ip.f prog regs_to_keep gmap emap ∧
assigns prog s.ip = {(r,ty)} ∧
(¬∃inst r t. get_instr prog s.ip (Inl inst) ∧ classify_instr inst = Exp r t ∧ r ∉ regs_to_keep) ∧
local_state_rel prog emap s s' ∧
v_rel v.value res_v ∧
reachable prog s.ip ∧
i ∈ next_ips prog s.ip
⇒
local_state_rel prog emap
(s with <| ip := i; locals := s.locals |+ (r, v) |>)
(s' with locals := s'.locals |+ (translate_reg r ty, res_v))
Proof
rw [] >>
drule local_state_rel_updates_keep >>
disch_then (qspecl_then [`[(r,ty)]`, `emap`, `s`, `s'`] mp_tac) >>
simp [] >> disch_then drule >>
disch_then (qspecl_then [`[v]`, `[res_v]`] mp_tac) >>
simp [] >> disch_then drule >>
rw [FUPDATE_LIST_THM]
QED
Theorem mem_state_rel_update_keep:
∀prog gmap emap s s' v res_v r ty i inst regs_to_keep.
is_ssa prog ∧
good_emap s.ip.f prog regs_to_keep gmap emap ∧
assigns prog s.ip = {(r,ty)} ∧
(¬∃inst r t. get_instr prog s.ip (Inl inst) ∧ classify_instr inst = Exp r t ∧ r ∉ regs_to_keep) ∧
mem_state_rel prog gmap emap s s' ∧
v_rel v.value res_v ∧
reachable prog s.ip ∧
i ∈ next_ips prog s.ip
⇒
mem_state_rel prog gmap emap
(s with <| ip := i; locals := s.locals |+ (r, v) |>)
(s' with locals := s'.locals |+ (translate_reg r ty, res_v))
Proof
rw [mem_state_rel_def]
>- (
irule local_state_rel_update_keep >> rw [] >>
metis_tac [get_instr_func, INL_11, instr_class_11, instr_class_distinct,
classify_instr_def])
>- metis_tac [next_ips_reachable]
QED
Triviality lemma:
((s:llair$state) with heap := h).locals = s.locals ∧
((s:llair$state) with heap := h).glob_addrs = s.glob_addrs
Proof
rw []
QED
Theorem mem_state_rel_heap_update:
∀prog gmap emap s s' h h'.
mem_state_rel prog gmap emap s s' ∧
heap_ok h ∧
erase_tags h = erase_tags h'
⇒
mem_state_rel prog gmap emap (s with heap := h) (s' with heap := h')
Proof
rw [mem_state_rel_def, erase_tags_def, local_state_rel_def] >>
rw [heap_component_equality] >>
fs [fmap_eq_flookup, FLOOKUP_o_f] >> rw [] >>
first_x_assum (qspec_then `x` mp_tac) >>
BasicProvers.EVERY_CASE_TAC >> rw [] >>
fs [emap_invariant_def]
>- metis_tac [eval_exp_ignores, lemma]
>- metis_tac [eval_exp_ignores, lemma] >>
Cases_on `x'` >> Cases_on `x''` >> fs []
QED
Theorem v_rel_bytes:
∀v v'. v_rel v v' ⇒ llvm_value_to_bytes v = llair_value_to_bytes v'
Proof
ho_match_mp_tac v_rel_ind >>
rw [v_rel_cases, llvm_value_to_bytes_def, llair_value_to_bytes_def] >>
rw [value_to_bytes_def, llvmTheory.unconvert_value_def, w2n_i2n,
llairTheory.unconvert_value_def, llairTheory.pointer_size_def,
llvmTheory.pointer_size_def] >>
pop_assum mp_tac >>
qid_spec_tac `vs1` >>
Induct_on `vs2` >> rw [] >> rw []
QED
Theorem bytes_v_rel_lem:
(∀f s bs t.
f = (λn t w. convert_value t w) ∧
s = type_to_shape t ∧
first_class_type t
⇒
(quotient_pair$### v_rel $=)
(bytes_to_value f s bs)
(bytes_to_value (λn t w. convert_value t w) (type_to_shape (translate_ty t)) bs)) ∧
(∀f n s bs t.
f = (λn t w. convert_value t w) ∧
s = type_to_shape t ∧
first_class_type t
⇒
(quotient_pair$### (list_rel v_rel) $=)
(read_array f n s bs)
(read_array (λn t w. convert_value t w) n (type_to_shape (translate_ty t)) bs)) ∧
(∀f ss bs ts.
f = (λn t w. convert_value t w) ∧
ss = map type_to_shape ts ∧
every first_class_type ts
⇒
(quotient_pair$### (list_rel v_rel) $=)
(read_str f ss bs)
(read_str (λn t w. convert_value t w) (map (type_to_shape o translate_ty) ts) bs))
Proof
ho_match_mp_tac bytes_to_value_ind >>
rw [llvmTheory.type_to_shape_def, translate_ty_def, type_to_shape_def,
sizeof_def, llvmTheory.sizeof_def, bytes_to_value_def, pointer_size_def,
convert_value_def, llvmTheory.convert_value_def, quotient_pairTheory.PAIR_REL]
>- (
Cases_on `t'` >>
fs [llvmTheory.type_to_shape_def, llvmTheory.sizeof_def, llvmTheory.first_class_type_def] >>
TRY (Cases_on `s`) >>
rw [llvmTheory.sizeof_def, le_read_num_def, translate_size_def,
convert_value_def, llvmTheory.convert_value_def, translate_ty_def,
type_to_shape_def, bytes_to_value_def, sizeof_def, llvmTheory.sizeof_def] >>
simp [v_rel_cases] >> rw [word_0_w2i, w2i_1] >>
fs [pointer_size_def, llvmTheory.pointer_size_def] >>
qmatch_goalsub_abbrev_tac `l2n 256 l` >>
qmatch_goalsub_abbrev_tac `n2i n dim` >>
`n < 2 ** dim`
by (
qspecl_then [`l`, `256`] mp_tac numposrepTheory.l2n_lt >>
rw [] >>
`256 ** length l ≤ 2 ** dim` suffices_by decide_tac >>
`256 = 2 ** 8` by rw [] >>
full_simp_tac bool_ss [] >>
REWRITE_TAC [GSYM EXP_EXP_MULT] >>
rw [EXP_BASE_LE_MONO] >>
unabbrev_all_tac >> rw []) >>
metis_tac [w2i_n2w, dimword_def, dimindex_8, dimindex_32, dimindex_64])
>- (
Cases_on `t` >>
fs [llvmTheory.type_to_shape_def, llvmTheory.sizeof_def, llvmTheory.first_class_type_def] >>
rw [PAIR_MAP] >>
pairarg_tac >> fs [type_to_shape_def, translate_ty_def, bytes_to_value_def] >>
first_x_assum (qspec_then `t'` mp_tac) >> simp [] >>
simp [v_rel_cases] >>
pairarg_tac >> fs [] >>
pairarg_tac >> fs [] >> rw [])
>- (
Cases_on `t` >>
fs [llvmTheory.type_to_shape_def, llvmTheory.sizeof_def, llvmTheory.first_class_type_def] >>
rw [PAIR_MAP] >>
fs [type_to_shape_def, translate_ty_def, bytes_to_value_def] >>
pairarg_tac >> fs [PAIR_MAP] >>
first_x_assum (qspec_then `l` mp_tac) >> simp [] >>
simp [v_rel_cases] >>
pairarg_tac >> fs [] >>
pairarg_tac >> fs [MAP_MAP_o] >> rw [] >> fs [ETA_THM])
>- (
rpt (pairarg_tac >> fs []) >>
first_x_assum (qspec_then `t` mp_tac) >> rw [] >>
first_x_assum (qspec_then `t` mp_tac) >> rw [])
>- (
Cases_on `ts` >> fs [bytes_to_value_def] >>
rpt (pairarg_tac >> fs []) >>
first_x_assum (qspec_then `h` mp_tac) >> simp [] >> strip_tac >>
fs [] >> rfs [] >> fs [] >>
first_x_assum (qspec_then `t` mp_tac) >> simp [] >> strip_tac >>
fs [MAP_MAP_o] >> rw [])
QED
Theorem bytes_v_rel:
∀t bs.
first_class_type t ⇒
v_rel (fst (bytes_to_llvm_value t bs))
(fst (bytes_to_llair_value (translate_ty t) bs))
Proof
rw [bytes_to_llvm_value_def, bytes_to_llair_value_def] >>
qspecl_then [`bs`, `t`] mp_tac (CONJUNCT1 (SIMP_RULE (srw_ss()) [] bytes_v_rel_lem)) >>
rw [quotient_pairTheory.PAIR_REL] >>
pairarg_tac >> fs [] >>
pairarg_tac >> fs []
QED
Triviality n2i_lem:
∀n. nfits n llair$pointer_size ⇒
n2i n llair$pointer_size = IntV (w2i (n2w n : word64)) llair$pointer_size
Proof
rw [pointer_size_def] >>
`2 ** 64 = dimword (:64)` by rw [dimword_64] >>
full_simp_tac bool_ss [nfits_def] >>
drule w2i_n2w >> rw []
QED
Theorem translate_constant_correct_lem:
(∀c s prog gmap emap s' v.
mem_state_rel prog gmap emap s s' ∧
eval_const s.globals c v
⇒
∃v'. eval_exp s' (translate_const gmap c) v' ∧ v_rel v v') ∧
(∀(cs : (ty # const) list) s prog gmap emap s' vs.
mem_state_rel prog gmap emap s s' ∧
list_rel (eval_const s.globals o snd) cs vs
⇒
∃v'. list_rel (eval_exp s') (map (translate_const gmap o snd) cs) v' ∧ list_rel v_rel vs v') ∧
(∀(tc : ty # const) s prog gmap emap s' v.
mem_state_rel prog gmap emap s s' ∧
eval_const s.globals (snd tc) v
⇒
∃v'. eval_exp s' (translate_const gmap (snd tc)) v' ∧ v_rel v v')
Proof
ho_match_mp_tac const_induction >> rw [translate_const_def] >>
pop_assum mp_tac >> simp [Once eval_const_cases]
>- (
rw [Once eval_exp_cases] >>
simp [Once eval_const_cases, translate_size_def, v_rel_cases] >>
metis_tac [truncate_2comp_i2w_w2i, dimindex_1, dimindex_8, dimindex_32, dimindex_64])
>- (
rw [Once eval_exp_cases] >>
simp [v_rel_cases, PULL_EXISTS, MAP_MAP_o] >>
fs [combinTheory.o_DEF, LAMBDA_PROD] >> rw [] >>
first_x_assum drule >> disch_then irule >>
fs [LIST_REL_EL_EQN] >> rw [] >> rfs [] >>
first_x_assum drule >>
simp [EL_MAP] >>
Cases_on `el n cs` >> simp [])
>- (
rw [Once eval_exp_cases] >>
simp [v_rel_cases, PULL_EXISTS, MAP_MAP_o] >>
fs [combinTheory.o_DEF, LAMBDA_PROD] >> rw [] >>
first_x_assum drule >> disch_then irule >>
fs [LIST_REL_EL_EQN] >> rw [] >> rfs [] >>
first_x_assum drule >>
simp [EL_MAP] >>
Cases_on `el n cs` >> simp [])
(* TODO: unimplemented GEP stuff *)
>- cheat
>- (
rw [Once eval_exp_cases] >> simp [v_rel_cases] >>
fs [mem_state_rel_def, globals_rel_def] >>
first_x_assum (qspec_then `g` mp_tac) >> rw [] >>
qexists_tac `w2n w` >> rw [] >> fs [FLOOKUP_DEF]
>- (
rfs [] >>
qpat_x_assum `w2n _ = _` (mp_tac o GSYM) >> rw [] >>
simp [n2i_lem])
>- rfs [BIJ_DEF, INJ_DEF, SURJ_DEF])
>- metis_tac []
QED
Theorem translate_constant_correct:
∀c s prog gmap emap s' g v.
mem_state_rel prog gmap emap s s' ∧
eval_const s.globals c v
⇒
∃v'. eval_exp s' (translate_const gmap c) v' ∧ v_rel v v'
Proof
metis_tac [translate_constant_correct_lem]
QED
Theorem translate_const_no_reg[simp]:
∀gmap c. r ∉ exp_uses (translate_const gmap c)
Proof
ho_match_mp_tac translate_const_ind >>
rw [translate_const_def, exp_uses_def, MEM_MAP, METIS_PROVE [] ``x ∨ y ⇔ (~x ⇒ y)``]
>- (pairarg_tac >> fs [] >> metis_tac [])
>- (pairarg_tac >> fs [] >> metis_tac [])
(* TODO: unimplemented GEP stuff *)
>- cheat
QED
Theorem translate_arg_correct:
∀s a v prog gmap emap s'.
mem_state_rel prog gmap emap s s' ∧
eval s a v ∧
arg_to_regs a ⊆ live prog s.ip
⇒
∃v'. eval_exp s' (translate_arg gmap emap a) v' ∧ v_rel v.value v'
Proof
Cases_on `a` >> rw [eval_cases, translate_arg_def] >> rw []
>- metis_tac [translate_constant_correct] >>
CASE_TAC >> fs [PULL_EXISTS, mem_state_rel_def, local_state_rel_def, emap_invariant_def, arg_to_regs_def] >>
res_tac >> rfs [] >> metis_tac [eval_exp_ignores]
QED
Theorem is_allocated_mem_state_rel:
∀prog gmap emap s1 s1'.
mem_state_rel prog gmap emap s1 s1'
⇒
(∀i. is_allocated i s1.heap ⇔ is_allocated i s1'.heap)
Proof
rw [mem_state_rel_def, is_allocated_def, erase_tags_def] >>
pop_assum mp_tac >> pop_assum (mp_tac o GSYM) >> rw []
QED
Theorem restricted_i2w_11:
∀i (w:'a word). INT_MIN (:'a) ≤ i ∧ i ≤ INT_MAX (:'a) ⇒ (i2w i : 'a word) = i2w (w2i w) ⇒ i = w2i w
Proof
rw [i2w_def]
>- (
Cases_on `n2w (Num (-i)) = INT_MINw` >>
rw [w2i_neg, w2i_INT_MINw] >>
fs [word_L_def] >>
`∃j. 0 ≤ j ∧ i = -j` by intLib.COOPER_TAC >>
rw [] >>
fs [] >>
`INT_MIN (:'a) < dimword (:'a)` by metis_tac [INT_MIN_LT_DIMWORD] >>
`Num j MOD dimword (:'a) = Num j`
by (irule LESS_MOD >> intLib.COOPER_TAC) >>
fs []
>- intLib.COOPER_TAC
>- (
`Num j < INT_MIN (:'a)` by intLib.COOPER_TAC >>
fs [w2i_n2w_pos, integerTheory.INT_OF_NUM]))
>- (
fs [GSYM INT_MAX, INT_MAX_def] >>
`Num i < INT_MIN (:'a)` by intLib.COOPER_TAC >>
rw [w2i_n2w_pos, integerTheory.INT_OF_NUM] >>
intLib.COOPER_TAC)
QED
Theorem translate_sub_correct:
∀prog gmap emap s1 s1' nsw nuw ty v1 v1' v2 v2' e2' e1' result.
do_sub nuw nsw v1 v2 ty = Some result ∧
eval_exp s1' e1' v1' ∧
v_rel v1.value v1' ∧
eval_exp s1' e2' v2' ∧
v_rel v2.value v2'
⇒
∃v3'.
eval_exp s1' (Sub (translate_ty ty) e1' e2') v3' ∧
v_rel result.value v3'
Proof
rw [] >>
simp [Once eval_exp_cases] >>
fs [do_sub_def] >> rw [] >>
rfs [v_rel_cases] >> rw [] >> fs [] >>
BasicProvers.EVERY_CASE_TAC >> fs [PULL_EXISTS, translate_ty_def, translate_size_def] >>
pairarg_tac >> fs [] >>
fs [PAIR_MAP, wordsTheory.FST_ADD_WITH_CARRY] >>
rw [] >>
qmatch_goalsub_abbrev_tac `w2i (-1w * w1 + w2)` >>
qexists_tac `w2i w2` >> qexists_tac `w2i w1` >> simp [] >>
unabbrev_all_tac >> rw []
>- (
irule restricted_i2w_11 >> simp [word_sub_i2w] >>
`dimindex (:1) = 1` by rw [] >>
drule truncate_2comp_i2w_w2i >>
rw [word_sub_i2w] >>
metis_tac [w2i_ge, w2i_le, SIMP_CONV (srw_ss()) [] ``INT_MIN (:1)``,
SIMP_CONV (srw_ss()) [] ``INT_MAX (:1)``])
>- (
irule restricted_i2w_11 >> simp [word_sub_i2w] >>
`dimindex (:8) = 8` by rw [] >>
drule truncate_2comp_i2w_w2i >>
rw [word_sub_i2w] >>
metis_tac [w2i_ge, w2i_le, SIMP_CONV (srw_ss()) [] ``INT_MIN (:8)``,
SIMP_CONV (srw_ss()) [] ``INT_MAX (:8)``])
>- (
irule restricted_i2w_11 >> simp [word_sub_i2w] >>
`dimindex (:32) = 32` by rw [] >>
drule truncate_2comp_i2w_w2i >>
rw [word_sub_i2w] >>
metis_tac [w2i_ge, w2i_le, SIMP_CONV (srw_ss()) [] ``INT_MIN (:32)``,
SIMP_CONV (srw_ss()) [] ``INT_MAX (:32)``])
>- (
irule restricted_i2w_11 >> simp [word_sub_i2w] >>
`dimindex (:64) = 64` by rw [] >>
drule truncate_2comp_i2w_w2i >>
rw [word_sub_i2w] >>
metis_tac [w2i_ge, w2i_le, SIMP_CONV (srw_ss()) [] ``INT_MIN (:64)``,
SIMP_CONV (srw_ss()) [] ``INT_MAX (:64)``])
QED
Theorem translate_extract_correct:
∀prog gmap emap s1 s1' a v v1' e1' cs vs ns result.
mem_state_rel prog gmap emap s1 s1' ∧
list_rel (eval_const s1.globals) cs vs ∧
map signed_v_to_num vs = map Some ns ∧
extract_value v ns = Some result ∧
eval_exp s1' e1' v1' ∧
v_rel v v1'
⇒
∃v2'.
eval_exp s1' (foldl (λe c. Select e (translate_const gmap c)) e1' cs) v2' ∧
v_rel result v2'
Proof
Induct_on `cs` >> rw [] >> rfs [] >> fs [extract_value_def]
>- metis_tac [] >>
first_x_assum irule >>
Cases_on `ns` >> fs [] >>
qmatch_goalsub_rename_tac `translate_const gmap c` >>
qmatch_assum_rename_tac `eval_const _ _ v3` >>
`∃v2'. eval_exp s1' (translate_const gmap c) v2' ∧ v_rel v3 v2'`
by metis_tac [translate_constant_correct] >>
Cases_on `v` >> fs [extract_value_def] >>
qpat_x_assum `v_rel (AggV _) _` mp_tac >>
simp [Once v_rel_cases] >> rw [] >>
simp [Once eval_exp_cases, PULL_EXISTS] >>
fs [LIST_REL_EL_EQN] >>
qmatch_assum_rename_tac `_ = map Some is` >>
Cases_on `v3` >> fs [signed_v_to_num_def, signed_v_to_int_def] >> rw [] >>
`∃i. v2' = FlatV i` by fs [v_rel_cases] >> fs [] >>
qmatch_assum_rename_tac `option_join _ = Some x` >>
`∃size. i = IntV (&x) size` suffices_by metis_tac [] >> rw [] >>
qpat_x_assum `v_rel _ _` mp_tac >>
simp [v_rel_cases] >> rw [] >> fs [signed_v_to_int_def] >> rw [] >>
intLib.COOPER_TAC
QED
Theorem translate_update_correct:
∀prog gmap emap s1 s1' a v1 v1' v2 v2' e2 e2' e1' cs vs ns result.
mem_state_rel prog gmap emap s1 s1' ∧
list_rel (eval_const s1.globals) cs vs ∧
map signed_v_to_num vs = map Some ns ∧
insert_value v1 v2 ns = Some result ∧
eval_exp s1' e1' v1' ∧
v_rel v1 v1' ∧
eval_exp s1' e2' v2' ∧
v_rel v2 v2'
⇒
∃v3'.
eval_exp s1' (translate_updatevalue gmap e1' e2' cs) v3' ∧
v_rel result v3'
Proof
Induct_on `cs` >> rw [] >> rfs [] >> fs [insert_value_def, translate_updatevalue_def]
>- metis_tac [] >>
simp [Once eval_exp_cases, PULL_EXISTS] >>
Cases_on `ns` >> fs [] >>
Cases_on `v1` >> fs [insert_value_def] >>
rename [`insert_value (el x _) _ ns`] >>
Cases_on `insert_value (el x l) v2 ns` >> fs [] >> rw [] >>
qpat_x_assum `v_rel (AggV _) _` mp_tac >> simp [Once v_rel_cases] >> rw [] >>
simp [v_rel_cases] >>
qmatch_goalsub_rename_tac `translate_const gmap c` >>
qexists_tac `vs2` >> simp [] >>
qmatch_assum_rename_tac `eval_const _ _ v3` >>
`∃v4'. eval_exp s1' (translate_const gmap c) v4' ∧ v_rel v3 v4'`
by metis_tac [translate_constant_correct] >>
`∃idx_size. v4' = FlatV (IntV (&x) idx_size)`
by (
pop_assum mp_tac >> simp [Once v_rel_cases] >>
rw [] >> fs [signed_v_to_num_def, signed_v_to_int_def] >>
intLib.COOPER_TAC) >>
first_x_assum drule >>
disch_then drule >>
disch_then drule >>
disch_then drule >>
disch_then (qspecl_then [`el x vs2`, `v2'`, `e2'`, `Select e1' (translate_const gmap c)`] mp_tac) >>
simp [Once eval_exp_cases] >>
metis_tac [EVERY2_LUPDATE_same, LIST_REL_LENGTH, LIST_REL_EL_EQN]
QED
val sizes = [``:1``, ``:8``, ``:32``, ``:64``];
val trunc_thms =
LIST_CONJ (map (fn x => SIMP_RULE (srw_ss()) [] (INST_TYPE [``:'a`` |-> x] truncate_2comp_i2w_w2i))
sizes);
val signed2unsigned_thms =
LIST_CONJ (map (fn x => SIMP_RULE (srw_ss()) [] (INST_TYPE [``:'a`` |-> x] (GSYM w2n_signed2unsigned)))
sizes);
Definition good_cast_def:
(good_cast Trunc (FlatV (IntV i size)) from_bits to_t ⇔
from_bits = size ∧ llair$sizeof_bits to_t < from_bits) ∧
(good_cast Zext (FlatV (IntV i size)) from_bits to_t ⇔
from_bits = size ∧ from_bits < sizeof_bits to_t) ∧
(good_cast Sext (FlatV (IntV i size)) from_bits to_t ⇔
from_bits = size ∧ from_bits < sizeof_bits to_t) ∧
(good_cast Ptrtoint _ _ _ ⇔ T) ∧
(good_cast Inttoptr _ _ _ ⇔ T)
End
Theorem translate_cast_correct:
∀prog gmap emap s1' cop from_bits to_ty v1 v1' e1' result.
do_cast cop v1.value to_ty = Some result ∧
eval_exp s1' e1' v1' ∧
v_rel v1.value v1' ∧
good_cast cop v1' from_bits (translate_ty to_ty)
⇒
∃v3'.
eval_exp s1' ((if (cop = Zext) then Unsigned else Signed)
(if cop = Trunc then sizeof_bits (translate_ty to_ty) else from_bits)
e1' (translate_ty to_ty)) v3' ∧
v_rel result v3'
Proof
rw [] >> simp [Once eval_exp_cases, PULL_EXISTS, Once v_rel_cases]
>- ( (* Zext *)
fs [do_cast_def, OPTION_JOIN_EQ_SOME, unsigned_v_to_num_some, w64_cast_some,
translate_ty_def, sizeof_bits_def, translate_size_def] >>
rw [] >>
rfs [v_rel_cases] >> rw [] >>
qmatch_assum_abbrev_tac `eval_exp _ _ (FlatV (IntV i s))` >>
qexists_tac `i` >> qexists_tac `s` >> rw [] >>
unabbrev_all_tac >>
fs [good_cast_def, translate_ty_def, sizeof_bits_def, translate_size_def] >>
rw [trunc_thms, signed2unsigned_thms] >>
rw [GSYM w2w_def, w2w_w2w, WORD_ALL_BITS] >>
rw [w2i_w2w_expand])
>- ( (* Trunc *)
fs [do_cast_def] >> rw [] >>
fs [OPTION_JOIN_EQ_SOME, w64_cast_some, unsigned_v_to_num_some,
signed_v_to_int_some, mk_ptr_some] >>
rw [sizeof_bits_def, translate_ty_def, translate_size_def] >>
rfs [] >> fs [v_rel_cases] >>
rw [] >>
qmatch_assum_abbrev_tac `eval_exp _ _ (FlatV (IntV i s))` >>
qexists_tac `s` >> qexists_tac `i` >> rw [] >>
unabbrev_all_tac >>
fs [good_cast_def, translate_ty_def, sizeof_bits_def, translate_size_def] >>
rw [w2w_n2w, GSYM w2w_def, trunc_thms, pointer_size_def] >>
rw [i2w_w2i_extend, WORD_w2w_OVER_MUL] >>
rw [w2w_w2w, WORD_ALL_BITS, word_bits_w2w] >>
rw [word_mul_def]) >>
Cases_on `cop` >> fs [] >> rw []
>- ( (* Sext *)
fs [do_cast_def] >> rw [] >>
fs [OPTION_JOIN_EQ_SOME, w64_cast_some, unsigned_v_to_num_some,
signed_v_to_int_some, mk_ptr_some] >>
rw [sizeof_bits_def, translate_ty_def, translate_size_def] >>
rfs [] >> fs [v_rel_cases] >>
rw [] >>
qmatch_assum_abbrev_tac `eval_exp _ _ (FlatV (IntV i s))` >>
qexists_tac `s` >> qexists_tac `i` >> rw [] >>
unabbrev_all_tac >>
fs [good_cast_def, translate_ty_def, sizeof_bits_def, translate_size_def] >>
rw [trunc_thms, w2w_i2w] >>
irule (GSYM w2i_i2w)
>- (
`w2i w ≤ INT_MAX (:1) ∧ INT_MIN (:1) ≤ w2i w` by metis_tac [w2i_le, w2i_ge] >>
fs [] >> intLib.COOPER_TAC)
>- (
`w2i w ≤ INT_MAX (:1) ∧ INT_MIN (:1) ≤ w2i w` by metis_tac [w2i_le, w2i_ge] >>
fs [] >> intLib.COOPER_TAC)
>- (
`w2i w ≤ INT_MAX (:1) ∧ INT_MIN (:1) ≤ w2i w` by metis_tac [w2i_le, w2i_ge] >>
fs [] >> intLib.COOPER_TAC)
>- (
`w2i w ≤ INT_MAX (:8) ∧ INT_MIN (:8) ≤ w2i w` by metis_tac [w2i_le, w2i_ge] >>
fs [] >> intLib.COOPER_TAC)
>- (
`w2i w ≤ INT_MAX (:8) ∧ INT_MIN (:8) ≤ w2i w` by metis_tac [w2i_le, w2i_ge] >>
fs [] >> intLib.COOPER_TAC)
>- (
`w2i w ≤ INT_MAX (:32) ∧ INT_MIN (:32) ≤ w2i w` by metis_tac [w2i_le, w2i_ge] >>
fs [] >> intLib.COOPER_TAC))
(* TODO: pointer to int and int to pointer casts *)
>> cheat
QED
Theorem const_idx_uses[simp]:
∀cs gmap e.
exp_uses (foldl (λe c. Select e (translate_const gmap c)) e cs) = exp_uses e
Proof
Induct_on `cs` >> rw [exp_uses_def] >> rw [EXTENSION]
QED
Theorem exp_uses_trans_upd_val[simp]:
∀cs gmap e1 e2. exp_uses (translate_updatevalue gmap e1 e2 cs) =
(if cs = [] then {} else exp_uses e1) ∪ exp_uses e2
Proof
Induct_on `cs` >> rw [exp_uses_def, translate_updatevalue_def] >>
rw [EXTENSION] >>
metis_tac []
QED
(* TODO: identify some lemmas to cut down on the duplicated proof in the very
* similar cases *)
Theorem translate_instr_to_exp_correct:
∀gmap emap instr r t s1 s1' s2 prog l regs_to_keep.
prog_ok prog ∧ is_ssa prog ∧
good_emap s1.ip.f prog regs_to_keep gmap emap ∧
classify_instr instr = Exp r t ∧
mem_state_rel prog gmap emap s1 s1' ∧
get_instr prog s1.ip (Inl instr) ∧
step_instr prog s1 instr l s2 ∧
is_implemented instr
⇒
∃pv s2'.
l = Tau ∧
s2.ip = inc_pc s1.ip ∧
mem_state_rel prog gmap emap s2 s2' ∧
(r ∉ regs_to_keep ⇒ s1' = s2') ∧
(r ∈ regs_to_keep ⇒
step_inst s1' (Move [(translate_reg r t, translate_instr_to_exp gmap emap instr)]) Tau s2')
Proof
recInduct translate_instr_to_exp_ind >>
simp [translate_instr_to_exp_def, classify_instr_def] >>
conj_tac
>- ( (* Sub *)
rw [step_instr_cases, get_instr_cases, update_result_def] >>
qpat_x_assum `Sub _ _ _ _ _ _ = el _ _` (assume_tac o GSYM) >>
`bigunion (image arg_to_regs {a1; a2}) ⊆ live prog s1.ip`
by (
simp [Once live_gen_kill, SUBSET_DEF, uses_cases, IN_DEF, get_instr_cases,
instr_uses_def] >>
metis_tac []) >>
fs [] >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >> rw [] >>
drule translate_sub_correct >>
simp [] >>
disch_then (qspecl_then [`s1'`, `v'`, `v''`] mp_tac) >> simp [] >>
disch_then drule >> disch_then drule >> rw [] >>
rename1 `eval_exp _ (Sub _ _ _) res_v` >>
rename1 `r ∈ _` >>
simp [inc_pc_def, llvmTheory.inc_pc_def] >>
`assigns prog s1.ip = {(r,ty)}`
by rw [assigns_cases, EXTENSION, IN_DEF, get_instr_cases, instr_assigns_def] >>
`reachable prog s1.ip` by fs [mem_state_rel_def] >>
`s1.ip with i := inc_bip (Offset idx) ∈ next_ips prog s1.ip`
by (
drule prog_ok_nonterm >>
simp [get_instr_cases, PULL_EXISTS] >>
ntac 3 (disch_then drule) >>
simp [terminator_def, next_ips_cases, IN_DEF, inc_pc_def]) >>
Cases_on `r ∉ regs_to_keep` >> rw []
>- (
irule mem_state_rel_update >> rw []
>- (
fs [get_instr_cases, classify_instr_def, translate_instr_to_exp_def] >>
metis_tac [])
>- metis_tac [])
>- (
simp [step_inst_cases, PULL_EXISTS] >>
qexists_tac `res_v` >> rw [] >>
rw [update_results_def, GSYM FUPDATE_EQ_FUPDATE_LIST] >>
irule mem_state_rel_update_keep >> rw [] >>
qexists_tac `regs_to_keep` >> rw [] >>
CCONTR_TAC >> fs [] >>
drule exp_assigns_sing >> disch_then drule >> rw [] >>
metis_tac [])) >>
conj_tac
>- ( (* Extractvalue *)
rw [step_instr_cases, get_instr_cases, update_result_def] >>
qpat_x_assum `Extractvalue _ _ _ = el _ _` (assume_tac o GSYM) >>
`arg_to_regs a ⊆ live prog s1.ip`
by (
simp [Once live_gen_kill, SUBSET_DEF, uses_cases, IN_DEF, get_instr_cases,
instr_uses_def]) >>
drule translate_extract_correct >> rpt (disch_then drule) >>
drule translate_arg_correct >> disch_then drule >>
simp [] >> strip_tac >>
disch_then drule >> simp [] >> rw [] >>
rename1 `eval_exp _ (foldl _ _ _) res_v` >>
rw [inc_pc_def, llvmTheory.inc_pc_def] >>
rename1 `r ∈ _` >>
`assigns prog s1.ip = {(r,THE (extract_type t (map cidx_to_num cs)))}`
by rw [assigns_cases, EXTENSION, IN_DEF, get_instr_cases, instr_assigns_def] >>
`reachable prog s1.ip` by fs [mem_state_rel_def] >>
`s1.ip with i := inc_bip (Offset idx) ∈ next_ips prog s1.ip`
by (
drule prog_ok_nonterm >>
simp [get_instr_cases, PULL_EXISTS] >>
ntac 3 (disch_then drule) >>
simp [terminator_def, next_ips_cases, IN_DEF, inc_pc_def]) >>
Cases_on `r ∈ regs_to_keep` >> rw []
>- (
simp [step_inst_cases, PULL_EXISTS] >>
qexists_tac `res_v` >> rw [] >>
rw [update_results_def, GSYM FUPDATE_EQ_FUPDATE_LIST] >>
irule mem_state_rel_update_keep >> rw [] >>
qexists_tac `regs_to_keep` >> rw [] >>
CCONTR_TAC >> fs [] >>
drule exp_assigns_sing >> disch_then drule >> rw [] >>
metis_tac [])
>- (
irule mem_state_rel_update >> rw []
>- (
fs [get_instr_cases, classify_instr_def, translate_instr_to_exp_def] >>
metis_tac [])
>- metis_tac [])) >>
conj_tac
>- ( (* Updatevalue *)
rw [step_instr_cases, get_instr_cases, update_result_def] >>
qpat_x_assum `Insertvalue _ _ _ _ = el _ _` (assume_tac o GSYM) >>
`arg_to_regs a1 ⊆ live prog s1.ip ∧
arg_to_regs a2 ⊆ live prog s1.ip`
by (
ONCE_REWRITE_TAC [live_gen_kill] >>
simp [SUBSET_DEF, uses_cases, IN_DEF, get_instr_cases,
instr_uses_def]) >>
drule translate_update_correct >> rpt (disch_then drule) >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >>
simp [] >> strip_tac >> strip_tac >>
disch_then (qspecl_then [`v'`, `v''`] mp_tac) >> simp [] >>
disch_then drule >> disch_then drule >>
rw [] >>
rename1 `eval_exp _ (translate_updatevalue _ _ _ _) res_v` >>
rw [inc_pc_def, llvmTheory.inc_pc_def] >>
rename1 `r ∈ _` >>
`assigns prog s1.ip = {(r,t1)}`
by rw [assigns_cases, EXTENSION, IN_DEF, get_instr_cases, instr_assigns_def] >>
`reachable prog s1.ip` by fs [mem_state_rel_def] >>
`s1.ip with i := inc_bip (Offset idx) ∈ next_ips prog s1.ip`
by (
drule prog_ok_nonterm >>
simp [get_instr_cases, PULL_EXISTS] >>
ntac 3 (disch_then drule) >>
simp [terminator_def, next_ips_cases, IN_DEF, inc_pc_def]) >>
Cases_on `r ∈ regs_to_keep` >> rw []
>- (
simp [step_inst_cases, PULL_EXISTS] >>
qexists_tac `res_v` >> rw [] >>
rw [update_results_def, GSYM FUPDATE_EQ_FUPDATE_LIST] >>
irule mem_state_rel_update_keep >> rw [] >>
qexists_tac `regs_to_keep` >> rw [] >>
CCONTR_TAC >> fs [] >>
drule exp_assigns_sing >> disch_then drule >> rw [] >>
metis_tac [])
>- (
irule mem_state_rel_update >> rw []
>- (
fs [get_instr_cases, classify_instr_def, translate_instr_to_exp_def] >>
metis_tac [])
>- metis_tac [])) >>
conj_tac
>- ( (* Cast *)
simp [step_instr_cases, get_instr_cases, update_result_def] >>
rpt strip_tac >>
qpat_x_assum `Cast _ _ _ _ = el _ _` (assume_tac o GSYM) >>
`arg_to_regs a1 ⊆ live prog s1.ip`
by (
simp [Once live_gen_kill, SUBSET_DEF, uses_cases, IN_DEF, get_instr_cases,
instr_uses_def] >>
metis_tac []) >>
fs [] >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >> strip_tac >>
drule translate_cast_correct >> ntac 2 (disch_then drule) >>
simp [] >>
disch_then (qspec_then `sizeof_bits (translate_ty t1)` mp_tac) >>
impl_tac
(* TODO: prog_ok should enforce that the type is consistent *)
>- cheat >>
strip_tac >>
rename1 `eval_exp _ _ res_v` >>
simp [inc_pc_def, llvmTheory.inc_pc_def] >>
rename1 `r ∈ _` >>
`assigns prog s1.ip = {(r,t)}`
by rw [assigns_cases, EXTENSION, IN_DEF, get_instr_cases, instr_assigns_def] >>
`reachable prog s1.ip` by fs [mem_state_rel_def] >>
`s1.ip with i := inc_bip (Offset idx) ∈ next_ips prog s1.ip`
by (
drule prog_ok_nonterm >>
simp [get_instr_cases, PULL_EXISTS] >>
ntac 3 (disch_then drule) >>
simp [terminator_def, next_ips_cases, IN_DEF, inc_pc_def]) >>
Cases_on `r ∈ regs_to_keep` >> simp []
>- (
simp [step_inst_cases, PULL_EXISTS] >>
qexists_tac `res_v` >> rw [] >>
fs [] >>
rw [update_results_def, GSYM FUPDATE_EQ_FUPDATE_LIST] >>
irule mem_state_rel_update_keep >> rw [] >>
qexists_tac `regs_to_keep` >> rw [] >>
CCONTR_TAC >> fs [] >>
drule exp_assigns_sing >> disch_then drule >> rw [] >>
metis_tac [])
>- (
irule mem_state_rel_update >> rw []
>- (
fs [get_instr_cases, classify_instr_def, translate_instr_to_exp_def] >>
metis_tac [])
>- metis_tac [])) >>
rw [is_implemented_def]
QED
Triviality eval_exp_help:
(s1 with heap := h).locals = s1.locals
Proof
rw []
QED
Theorem translate_instr_to_inst_correct:
∀gmap emap instr r t s1 s1' s2 prog l regs_to_keep.
classify_instr instr = Non_exp ∧
prog_ok prog ∧ is_ssa prog ∧
good_emap s1.ip.f prog regs_to_keep gmap emap ∧
mem_state_rel prog gmap emap s1 s1' ∧
get_instr prog s1.ip (Inl instr) ∧
step_instr prog s1 instr l s2
⇒
∃pv s2'.
s2.ip = inc_pc s1.ip ∧
mem_state_rel prog gmap emap s2 s2' ∧
step_inst s1' (translate_instr_to_inst gmap emap instr) (translate_trace gmap l) s2'
Proof
rw [step_instr_cases] >>
fs [classify_instr_def, translate_instr_to_inst_def]
>- ( (* Load *)
fs [step_inst_cases, get_instr_cases, PULL_EXISTS] >>
qpat_x_assum `Load _ _ _ = el _ _` (assume_tac o GSYM) >>
`arg_to_regs a1 ⊆ live prog s1.ip`
by (
simp [Once live_gen_kill, SUBSET_DEF, uses_cases, IN_DEF, get_instr_cases,
instr_uses_def] >>
metis_tac []) >>
fs [] >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >> rw [] >>
qpat_x_assum `v_rel (FlatV _) _` mp_tac >> simp [Once v_rel_cases] >> rw [] >>
`∃n. r = Reg n` by (Cases_on `r` >> metis_tac []) >>
qexists_tac `n` >> qexists_tac `translate_ty t` >>
HINT_EXISTS_TAC >> rw [] >>
qexists_tac `freeable` >> rw [translate_trace_def]
>- rw [inc_pc_def, llvmTheory.inc_pc_def, update_result_def]
>- (
simp [GSYM translate_reg_def, llvmTheory.inc_pc_def, update_result_def,
update_results_def, GSYM FUPDATE_EQ_FUPDATE_LIST,
extend_emap_non_exp_def] >>
irule mem_state_rel_update_keep >>
rw []
>- rw [assigns_cases, IN_DEF, EXTENSION, get_instr_cases, instr_assigns_def]
>- (
`s1.ip with i := inc_bip (Offset idx) = inc_pc s1.ip` by rw [inc_pc_def] >>
simp [] >> irule prog_ok_nonterm >>
simp [get_instr_cases, terminator_def])
>- metis_tac [next_ips_reachable, mem_state_rel_def]
>- (
fs [w2n_i2n, pointer_size_def, mem_state_rel_def] >>
metis_tac [bytes_v_rel, get_bytes_erase_tags])
>- (
qexists_tac `regs_to_keep` >> rw [] >>
CCONTR_TAC >> fs [get_instr_cases] >>
fs [] >> rw [] >> fs [] >> rw [] >>
rfs [classify_instr_def]))
>- rw [translate_reg_def]
>- (
fs [w2n_i2n, pointer_size_def, mem_state_rel_def] >>
metis_tac [is_allocated_erase_tags]))
>- ( (* Store *)
fs [step_inst_cases, get_instr_cases, PULL_EXISTS] >>
qpat_x_assum `Store _ _ = el _ _` (assume_tac o GSYM) >>
`bigunion (image arg_to_regs {a1; a2}) ⊆ live prog s1.ip`
by (
simp [Once live_gen_kill, SUBSET_DEF, uses_cases, IN_DEF, get_instr_cases,
instr_uses_def] >>
metis_tac []) >>
fs [] >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >>
first_x_assum (mp_then.mp_then mp_then.Any mp_tac translate_arg_correct) >>
disch_then drule >> disch_then drule >> rw [] >>
qpat_x_assum `v_rel (FlatV _) _` mp_tac >> simp [Once v_rel_cases] >> rw [] >>
drule v_rel_bytes >> rw [] >>
fs [w2n_i2n, pointer_size_def] >>
HINT_EXISTS_TAC >> rw [] >>
qexists_tac `freeable` >> rw [] >>
qexists_tac `v'` >> rw []
>- rw [llvmTheory.inc_pc_def, inc_pc_def]
>- (
simp [llvmTheory.inc_pc_def] >>
irule mem_state_rel_no_update >> rw []
>- rw [assigns_cases, EXTENSION, IN_DEF, get_instr_cases, instr_assigns_def]
>- (
`s1.ip with i := inc_bip (Offset idx) = inc_pc s1.ip` by rw [inc_pc_def] >>
simp [] >> irule prog_ok_nonterm >>
simp [get_instr_cases, terminator_def]) >>
irule mem_state_rel_heap_update >>
rw [set_bytes_unchanged, erase_tags_set_bytes] >>
fs [mem_state_rel_def, extend_emap_non_exp_def] >>
metis_tac [set_bytes_heap_ok])
>- (
fs [mem_state_rel_def] >>
fs [is_allocated_def, heap_component_equality, erase_tags_def] >>
metis_tac [])
>- (
fs [get_obs_cases, llvmTheory.get_obs_cases] >> rw [translate_trace_def] >>
fs [mem_state_rel_def, globals_rel_def]
>- (
fs [FLOOKUP_DEF] >>
first_x_assum drule >> rw []
>- metis_tac [BIJ_DEF, SURJ_DEF] >>
pop_assum (mp_tac o GSYM) >> rw [w2n_i2n])
>- (
CCONTR_TAC >> fs [FRANGE_DEF] >>
fs [BIJ_DEF, SURJ_DEF, INJ_DEF] >>
first_x_assum drule >> rw [] >>
CCONTR_TAC >> fs [] >> rw [] >>
first_x_assum drule >> rw [] >>
CCONTR_TAC >> fs [] >> rw [] >>
`i2n (IntV (w2i w) (dimindex (:64))) = w2n w` by metis_tac [w2n_i2n, dimindex_64] >>
fs [] >> rw [] >>
fs [METIS_PROVE [] ``~x ∨ y ⇔ (x ⇒ y)``] >>
first_x_assum drule >> rw [] >>
metis_tac [pair_CASES, SND])))
QED
Theorem classify_instr_term_call:
∀i. (classify_instr i = Term ⇒ terminator i) ∧
(classify_instr i = Call ⇒ is_call i ∨ terminator i)
Proof
Cases >> rw [classify_instr_def, is_call_def, terminator_def] >>
Cases_on `p` >> rw [classify_instr_def]
QED
Definition untranslate_glob_var_def:
untranslate_glob_var (Var_name n ty) = Glob_var n
End
Definition untranslate_trace_def:
(untranslate_trace Tau = Tau) ∧
(untranslate_trace Error = Error) ∧
(untranslate_trace (Exit i) = (Exit i)) ∧
(untranslate_trace (W gv bytes) = W (untranslate_glob_var gv) bytes)
End
Theorem un_translate_glob_inv:
∀x t. untranslate_glob_var (translate_glob_var gmap x) = x
Proof
Cases_on `x` >> rw [translate_glob_var_def] >>
CASE_TAC >> rw [untranslate_glob_var_def]
QED
Theorem un_translate_trace_inv:
∀x. untranslate_trace (translate_trace gmap x) = x
Proof
Cases >> rw [translate_trace_def, untranslate_trace_def] >>
metis_tac [un_translate_glob_inv]
QED
Theorem take_to_call_lem:
∀i idx body.
idx < length body ∧ el idx body = i ∧ ¬terminator i ∧ ¬is_call i ⇒
take_to_call (drop idx body) = i :: take_to_call (drop (idx + 1) body)
Proof
Induct_on `idx` >> rw []
>- (Cases_on `body` >> fs [take_to_call_def] >> rw []) >>
Cases_on `body` >> fs [] >>
simp [ADD1]
QED
Theorem inc_translate_label:
∀f l x. inc_label (translate_label f l x) = translate_label f l (x + 1)
Proof
rw [] >> Cases_on `l` >> rw [translate_label_def, inc_label_def] >>
Cases_on `x'` >> rw [translate_label_def, inc_label_def]
QED
Theorem translate_instrs_correct1:
∀prog s1 tr s2.
multi_step prog s1 tr s2 ⇒
∀s1' regs_to_keep b' gmap emap d b idx rest l.
prog_ok prog ∧ is_ssa prog ∧
mem_state_rel prog gmap emap s1 s1' ∧
good_emap s1.ip.f prog regs_to_keep gmap emap ∧
alookup prog s1.ip.f = Some d ∧
alookup d.blocks s1.ip.b = Some b ∧
s1.ip.i = Offset idx ∧
(l,b')::rest =
fst (translate_instrs (translate_label (dest_fn s1.ip.f) s1.ip.b (num_calls (take idx b.body)))
gmap emap regs_to_keep (take_to_call (drop idx b.body))) ∧
every is_implemented b.body
⇒
∃s2' tr'.
step_block (translate_prog prog) s1' b'.cmnd b'.term tr' s2' ∧
filter ($≠ Tau) tr' = filter ($≠ Tau) (map (translate_trace gmap) tr) ∧
state_rel prog gmap emap s2 s2'
Proof
ho_match_mp_tac multi_step_ind >> rw_tac std_ss []
>- (
fs [last_step_cases]
>- ( (* Phi (not handled here) *)
fs [get_instr_cases])
>- ( (* Terminator *)
rename1 `Inl instr` >>
`is_implemented instr`
by (fs [get_instr_cases] >> metis_tac [EVERY_MEM, EL_MEM]) >>
`(∃code. l = Exit code) ∨ l = Tau `
by (
fs [llvmTheory.step_cases] >>
`i' = instr` by metis_tac [get_instr_func, INL_11] >>
fs [step_instr_cases] >> rfs [terminator_def]) >>
fs [get_instr_cases, translate_trace_def] >> rw [] >>
`el idx b.body = el 0 (drop idx b.body)` by rw [EL_DROP] >>
fs [] >>
Cases_on `drop idx b.body` >> fs [DROP_NIL] >> rw []
>- ( (* Exit *)
fs [llvmTheory.step_cases, get_instr_cases, step_instr_cases,
translate_instrs_def, take_to_call_def, classify_instr_def,
translate_instr_to_term_def, translate_instr_to_inst_def,
llvmTheory.get_obs_cases] >>
simp [Once step_block_cases, step_term_cases, PULL_EXISTS, step_inst_cases] >>
drule translate_arg_correct >>
disch_then drule >> impl_tac
>- (
`get_instr prog s1.ip (Inl (Exit a))` by rw [get_instr_cases] >>
drule get_instr_live >>
simp [uses_cases, SUBSET_DEF, IN_DEF, PULL_EXISTS] >>
rw [] >> first_x_assum irule >>
disj1_tac >>
metis_tac [instr_uses_def]) >>
rw [] >>
qexists_tac `s1' with status := Complete code` >>
qexists_tac `[Exit code]` >>
rw []
>- (
rfs [translate_instrs_def, classify_instr_def] >>
rw [translate_instr_to_term_def] >>
fs [v_rel_cases] >> fs [signed_v_to_int_def] >> metis_tac []) >>
rw [state_rel_def] >>
metis_tac [mem_state_rel_exited]) >>
fs [take_to_call_def] >>
rfs [] >>
fs [translate_instrs_def] >>
Cases_on `el idx b.body` >> fs [terminator_def, classify_instr_def, translate_trace_def] >> rw []
>- fs [is_implemented_def]
>- ( (* Br *)
simp [translate_instr_to_term_def, Once step_block_cases] >>
simp [step_term_cases, PULL_EXISTS, RIGHT_AND_OVER_OR, EXISTS_OR_THM] >>
pairarg_tac >> rw [] >>
fs [llvmTheory.step_cases] >>
drule get_instr_live >> disch_tac >>
drule translate_arg_correct >>
fs [step_instr_cases] >> fs [] >>
TRY (fs [get_instr_cases] >> NO_TAC) >>
`a = a'` by fs [get_instr_cases] >>
disch_then drule >>
impl_tac
>- (
fs [SUBSET_DEF, IN_DEF] >> rfs [uses_cases, get_instr_cases, instr_uses_def] >>
fs [IN_DEF]) >>
disch_tac >> fs [] >>
fs [v_rel_cases, GSYM PULL_EXISTS] >>
GEN_EXISTS_TAC "idx'" `w2i tf` >> simp [GSYM PULL_EXISTS] >> conj_tac
>- metis_tac [] >>
rename1 `el _ _ = Br e lab1 lab2` >>
qpat_abbrev_tac `target = if tf = 0w then l2 else l1` >>
`last b.body = Br e l1 l2 ∧
<|f := s1.ip.f; b := Some target; i := Phi_ip s1.ip.b|> ∈ next_ips prog s1.ip`
by (
fs [prog_ok_def, get_instr_cases] >>
last_x_assum drule >> disch_then drule >>
strip_tac >> conj_asm1_tac
>- (
CCONTR_TAC >>
`Br a l1 l2 ∈ set (front (b.body))`
by (
`mem (Br a l1 l2) (front b.body ++ [last b.body])`
by metis_tac [EL_MEM, APPEND_FRONT_LAST] >>
fs [] >> metis_tac []) >>
fs [EVERY_MEM] >> first_x_assum drule >> rw [terminator_def])
>- (
rw [next_ips_cases, IN_DEF, assigns_cases] >>
disj1_tac >>
qexists_tac `Br a l1 l2` >>
rw [instr_next_ips_def, Abbr `target`] >>
fs [get_instr_cases, instr_to_labs_def] >>
metis_tac [blockHeader_nchotomy])) >>
qmatch_goalsub_abbrev_tac `state_rel _ _ _ _ (_ with bp := target')` >>
rw [state_rel_def]
>- (
fs [get_instr_cases] >>
`every (λlab. ∃b phis landing. alookup d.blocks (Some lab) = Some b ∧ b.h = Head phis landing)
(instr_to_labs (last b.body))`
by (fs [prog_ok_def, EVERY_MEM] >> metis_tac []) >>
rfs [instr_to_labs_def] >>
rw [Once pc_rel_cases, get_instr_cases, get_block_cases, PULL_EXISTS] >>
fs [GSYM PULL_EXISTS, Abbr `target`] >>
rw [MEM_MAP, instr_to_labs_def] >>
`s1.ip.b = option_map Lab l' ∧ dest_fn s1.ip.f = f`
by (
Cases_on `s1.ip.b` >>
fs [translate_label_def] >>
Cases_on `x` >>
fs [translate_label_def]) >>
rw [OPTION_MAP_COMPOSE, combinTheory.o_DEF, dest_label_def, Abbr
`target'`, word_0_w2i, METIS_PROVE [w2i_eq_0] ``∀w. 0 = w2i w ⇔ w = 0w``] >>
TRY (Cases_on `l'` >> rw [] >> NO_TAC))
>- (
fs [mem_state_rel_def, local_state_rel_def, emap_invariant_def] >> rw []
>- (
qpat_x_assum `∀r. r ∈ live _ _ ⇒ P r` mp_tac >>
simp [Once live_gen_kill] >> disch_then (qspec_then `r` mp_tac) >>
impl_tac >> rw [] >>
rw [PULL_EXISTS] >>
disj1_tac >>
qexists_tac `<|f := s1.ip.f; b := Some target; i := Phi_ip s1.ip.b|>` >>
rw [] >>
rw [IN_DEF, assigns_cases] >>
CCONTR_TAC >> fs [] >>
imp_res_tac get_instr_func >> fs [] >> rw [] >>
fs [instr_assigns_def])
>- (
fs [reachable_def] >>
qexists_tac `path ++ [<|f := s1.ip.f; b := Some target; i := Phi_ip s1.ip.b|>]` >>
rw_tac std_ss [good_path_append, GSYM APPEND] >> rw []
>- (rw [Once good_path_cases] >> fs [next_ips_cases, IN_DEF] >> metis_tac [])
>- metis_tac [ip_equiv_next_ips, ip_equiv_sym]
>- metis_tac [ip_equiv_refl])))
>- fs [is_implemented_def]
>- fs [is_implemented_def]
>- ( (* Exit *)
fs [llvmTheory.step_cases, get_instr_cases, step_instr_cases])
>- fs [is_implemented_def])
>- ( (* Call *)
rename1 `Inl instr` >>
`is_implemented instr`
by (fs [get_instr_cases] >> metis_tac [EVERY_MEM, EL_MEM]) >>
Cases_on `instr` >>
fs [is_call_def, is_implemented_def])
>- ( (* Stuck *)
rw [translate_trace_def] >>
(* TODO: need to know that stuck LLVM instructions translate to stuck
* llair instructions. This will follow from knowing that when a llair
* instruction takes a step, the LLVM source can take the same step, ie,
* the backward direction of the proof. *)
cheat))
>- ( (* Middle of the block *)
fs [llvmTheory.step_cases] >> TRY (fs [get_instr_cases] >> NO_TAC) >>
`i' = i` by metis_tac [get_instr_func, INL_11] >> fs [] >>
rename [`step_instr _ _ _ _ s2`, `state_rel _ _ _ s3 _`,
`mem_state_rel _ _ _ s1 s1'`] >>
Cases_on `∃r t. classify_instr i = Exp r t` >> fs [] >>
`is_implemented i`
by (fs [get_instr_cases] >> metis_tac [EVERY_MEM, EL_MEM])
>- ( (* instructions that compile to expressions *)
drule translate_instr_to_exp_correct >>
ntac 6 (disch_then drule) >>
rw [] >> fs [translate_trace_def] >>
`reachable prog (inc_pc s1.ip)`
by metis_tac [prog_ok_nonterm, next_ips_reachable, mem_state_rel_def] >>
first_x_assum drule >>
simp [inc_pc_def, inc_bip_def] >>
disch_then drule >>
`take_to_call (drop idx b.body) = i :: take_to_call (drop (idx + 1) b.body)`
by (
irule take_to_call_lem >> simp [] >>
fs [get_instr_cases]) >>
`num_calls (take (idx + 1) b.body) = num_calls (take idx b.body)`
by (fs [get_instr_cases] >> rw [num_calls_def, TAKE_EL_SNOC, FILTER_SNOC]) >>
fs [translate_instrs_def, inc_translate_label] >>
Cases_on `r ∉ regs_to_keep` >> fs [] >> rw []
>- (
`emap = emap |+ (r,translate_instr_to_exp gmap emap i)`
by (
fs [good_emap_def, fmap_eq_flookup, FLOOKUP_UPDATE] >>
rw [] >> metis_tac []) >>
metis_tac []) >>
pairarg_tac >> fs [] >> rw [] >>
`emap |+ (r,Var (translate_reg r t) F) = emap`
by (
fs [good_emap_def, fmap_eq_flookup, FLOOKUP_UPDATE] >>
rw [] >>
`(r,t) ∈ assigns prog s1.ip`
by (
drule exp_assigns_sing >>
disch_then drule >>
rw []) >>
metis_tac [FST, SND, not_exp_def]) >>
fs [] >>
rename1 `translate_instrs _ _ _ _ _ = (bs, emap1)` >>
Cases_on `bs` >> fs [add_to_first_block_def] >>
rename1 `translate_instrs _ _ _ _ _ = (b1::bs, _)` >>
Cases_on `b1` >> fs [add_to_first_block_def] >> rw [] >>
rename1 `state_rel prog gmap emap3 s3 s3'` >>
qexists_tac `s3'` >> rw [] >>
qexists_tac `Tau::tr'` >> rw [] >>
simp [Once step_block_cases] >>
metis_tac [])
>- ( (* Non-expression instructions *)
Cases_on `classify_instr i` >> fs [classify_instr_term_call]
>- (
drule translate_instr_to_inst_correct >>
ntac 6 (disch_then drule) >>
strip_tac >> fs [] >>
first_x_assum drule >> simp [inc_pc_def, inc_bip_def] >>
disch_then (qspecl_then [`regs_to_keep`] mp_tac) >> simp [] >>
strip_tac >>
`take_to_call (drop idx b.body) = i :: take_to_call (drop (idx + 1) b.body)`
by (
irule take_to_call_lem >> simp [] >>
fs [get_instr_cases]) >>
`num_calls (take (idx + 1) b.body) = num_calls (take idx b.body)`
by (fs [get_instr_cases] >> rw [num_calls_def, TAKE_EL_SNOC, FILTER_SNOC]) >>
fs [translate_instrs_def, inc_translate_label] >>
pairarg_tac >> fs [] >>
`extend_emap_non_exp emap i = emap`
by (
Cases_on `∀r t. instr_assigns i ≠ {(r,t)}`
>- metis_tac [extend_emap_non_exp_no_assigns] >>
fs [] >>
drule extend_emap_non_exp_assigns >>
rw [] >> fs [good_emap_def] >>
first_x_assum (qspec_then `s1.ip` mp_tac) >> rw [] >>
last_x_assum (qspec_then `i` mp_tac) >>
simp [not_exp_def] >>
disch_then (qspec_then `(r,t)` mp_tac) >>
impl_tac
>- (
fs [IN_DEF, assigns_cases, EXTENSION] >>
metis_tac []) >>
rw [fmap_eq_flookup, FLOOKUP_UPDATE] >> rw [] >> rw []) >>
fs [] >>
rename1 `translate_instrs _ _ _ _ _ = (bs, emap1)` >>
Cases_on `bs` >> fs [add_to_first_block_def] >>
rename1 `translate_instrs _ _ _ _ _ = (b1::bs, _)` >>
Cases_on `b1` >> fs [add_to_first_block_def] >> fs [] >>
rw [] >>
rename1 `state_rel prog gmap emap3 s3 s3'` >>
qexists_tac `s3'` >> simp [] >>
qexists_tac `translate_trace gmap l::tr'` >> rw [] >>
simp [Once step_block_cases]
>- (disj2_tac >> qexists_tac `s2'` >> rw [])
>- (disj2_tac >> qexists_tac `s2'` >> rw [])
>- metis_tac [])
>- metis_tac [classify_instr_term_call]))
QED
Theorem do_phi_vals:
∀prog gmap emap from_l s s' phis updates.
mem_state_rel prog gmap emap s s' ∧
list_rel (do_phi from_l s) phis updates ∧
BIGUNION (set (map (phi_uses from_l) phis)) ⊆ live prog s.ip
⇒
∃es vs.
list_rel v_rel (map (λx. (snd x).value) updates) vs ∧
list_rel (eval_exp s') es vs ∧
map fst updates = map fst (map phi_assigns phis) ∧
map (λx. case x of Phi r t largs =>
case option_map (λarg. translate_arg gmap emap arg) (alookup largs from_l) of
None => (translate_reg r t,Nondet)
| Some e => (translate_reg r t,e))
phis
= map2 (\p. λe. case p of Phi r t largs => (translate_reg r t, e)) phis es
Proof
Induct_on `phis` >> rw [] >> fs [] >>
first_x_assum drule >> disch_then drule >> rw [PULL_EXISTS] >>
Cases_on `h` >> fs [do_phi_cases] >>
drule translate_arg_correct >>
disch_then drule >>
impl_tac
>- (fs [phi_uses_def] >> rfs []) >>
rw [PULL_EXISTS, phi_assigns_def] >> metis_tac []
QED
Triviality case_phi_lift:
∀f g. f (case x of Phi x y z => g x y z) = case x of Phi x y z => f (g x y z)
Proof
Cases_on `x` >> rw []
QED
Triviality id2:
(λ(v,r). (v,r)) = I
Proof
rw [FUN_EQ_THM] >> Cases_on `x` >> rw []
QED
Theorem build_phi_block_correct_helper[local]:
∀phis es.
map (λx. case x of
Phi r t largs =>
case option_map (λarg. translate_arg gmap emap arg) (alookup largs from_l) of
None => (translate_reg r t,Nondet)
| Some e => (translate_reg r t,e)) phis =
map2 (λp e. case p of Phi r t largs => (translate_reg r t,e)) phis es ∧
length phis = length es
⇒
es = map (λx. case x of Phi r t largs =>
case option_map (λarg. translate_arg gmap emap arg) (alookup largs from_l) of
None => Nondet
| Some e => e)
phis
Proof
Induct >> rw [] >> Cases_on `es` >> fs [] >>
CASE_TAC >> fs [] >> CASE_TAC >> fs []
QED
Theorem build_phi_block_correct:
∀prog s1 s1' to_l from_l phis updates f gmap emap entry bloc regs_to_keep.
prog_ok prog ∧ is_ssa prog ∧
good_emap s1.ip.f prog regs_to_keep gmap emap ∧
get_instr prog s1.ip (Inr (from_l,phis)) ∧
list_rel (do_phi from_l s1) phis updates ∧
mem_state_rel prog gmap emap s1 s1' ∧
BIGUNION (set (map (phi_uses from_l) phis)) ⊆ live prog s1.ip ∧
bloc = generate_move_block f gmap emap phis from_l to_l
⇒
∃s2'.
s2'.bp = translate_label f (Some to_l) 0 ∧
step_block (translate_prog prog) s1' bloc.cmnd bloc.term [Tau; Tau] s2' ∧
mem_state_rel prog gmap emap
(inc_pc (s1 with locals := s1.locals |++ updates)) s2'
Proof
rw [translate_header_def, generate_move_block_def] >>
rw [Once step_block_cases] >>
rw [Once step_block_cases] >>
rw [step_term_cases, PULL_EXISTS] >>
simp [Once eval_exp_cases, truncate_2comp_def] >>
drule do_phi_vals >> ntac 2 (disch_then drule) >>
rw [] >> drule build_phi_block_correct_helper >>
pop_assum kall_tac >>
`length phis = length es` by metis_tac [LENGTH_MAP, LIST_REL_LENGTH] >>
disch_then drule >>
rw [] >> fs [LIST_REL_MAP1, combinTheory.o_DEF, case_phi_lift] >>
simp [step_inst_cases, PULL_EXISTS] >>
qexists_tac `0` >> qexists_tac `vs` >> rw []
>- (
simp [LIST_REL_MAP1, combinTheory.o_DEF] >> fs [LIST_REL_EL_EQN] >>
rw [] >>
first_x_assum (qspec_then `n` mp_tac) >> simp [] >>
CASE_TAC >> simp [] >> CASE_TAC >> simp [build_move_for_lab_def] >>
CASE_TAC >> simp [] >> fs []) >>
fs [header_to_emap_upd_def] >>
simp [llvmTheory.inc_pc_def, update_results_def] >>
`s1.ip with i := inc_bip s1.ip.i ∈ next_ips prog s1.ip`
by (
simp [next_ips_cases, IN_DEF, inc_pc_def] >> disj2_tac >>
qexists_tac `from_l` >> qexists_tac `phis` >>
fs [get_instr_cases, EXISTS_OR_THM, inc_bip_def, prog_ok_def] >>
res_tac >> Cases_on `b.body` >> fs []) >>
fs [mem_state_rel_def] >> rw []
>- (
first_assum (mp_then.mp_then mp_then.Any mp_tac local_state_rel_updates_keep) >>
rpt (disch_then (fn x => first_assum (mp_then.mp_then mp_then.Any mp_tac x))) >>
disch_then
(qspecl_then [`map (λ(x:phi). case x of Phi r t _ => (r,t)) phis`,
`map snd updates`, `vs`] mp_tac) >>
simp [] >> impl_tac >> rw [id2]
>- (
fs [prog_ok_def] >>
first_x_assum drule >>
simp [MAP_MAP_o, combinTheory.o_DEF] >>
`(λp. case p of Phi r v1 v2 => r) = (λx. fst (case x of Phi r t v2 => (r,t)))`
by (rw [FUN_EQ_THM] >> CASE_TAC >> rw []) >>
rw [])
>- (
rw [EXTENSION, IN_DEF, assigns_cases] >> eq_tac >> rw [] >> fs [LIST_TO_SET_MAP]
>- (
disj2_tac >> qexists_tac `from_l` >> qexists_tac `phis` >> rw [] >>
HINT_EXISTS_TAC >> CASE_TAC >> rw [phi_assigns_def])
>- metis_tac [get_instr_func, sum_distinct]
>- (
rw [] >> rename1 `mem x1 phis1` >>
qexists_tac `x1` >> Cases_on `x1` >> rw [phi_assigns_def] >>
metis_tac [get_instr_func, INR_11, PAIR_EQ]))
>- (
rw [assigns_cases, EXTENSION, IN_DEF] >>
metis_tac [get_instr_func, sum_distinct, INR_11, PAIR_EQ])
>- metis_tac [LENGTH_MAP]
>- rw [LIST_REL_MAP1, combinTheory.o_DEF] >>
`map fst (map (λx. case x of Phi r t v2 => (r,t)) phis) =
map fst (map phi_assigns phis)`
by (rw [LIST_EQ_REWRITE, EL_MAP] >> CASE_TAC >> rw [phi_assigns_def]) >>
fs [MAP_MAP_o, combinTheory.o_DEF, case_phi_lift] >>
`zip (map (λx. fst (phi_assigns x)) phis, map snd updates) = updates`
by (
qpat_x_assum `map fst _ = map (λx. fst (phi_assigns x)) _` mp_tac >>
simp [LIST_EQ_REWRITE, EL_MAP] >>
`length phis = length updates` by metis_tac [LIST_REL_LENGTH] >>
rw [EL_ZIP, LENGTH_MAP, EL_MAP] >>
rename1 `_ = el n updates` >>
first_x_assum drule >>
Cases_on `el n updates` >> rw []) >>
`(λx. case x of Phi r t v2 => translate_reg r t) = (λx. fst (build_move_for_lab gmap emap from_l x))`
by (
rw [FUN_EQ_THM] >>
CASE_TAC >> rw [build_move_for_lab_def] >> CASE_TAC >> rw []) >>
fs [])
>- (irule next_ips_reachable >> qexists_tac `s1.ip` >> rw [])
QED
Theorem translate_instrs_take_to_call:
∀l gmap emap regs body.
body ≠ [] ∧ terminator (last body) ⇒
fst (translate_instrs l gmap emap regs (take_to_call body)) =
[HD (fst (translate_instrs l gmap emap regs body))]
Proof
Induct_on `body` >> rw [translate_instrs_def, take_to_call_def] >>
rename1 `classify_instr inst` >> Cases_on `classify_instr inst` >> fs [] >>
fs [] >> rw [] >> fs []
>- metis_tac [classify_instr_lem, instr_class_distinct]
>- metis_tac [classify_instr_lem, instr_class_distinct]
>- metis_tac [classify_instr_lem, instr_class_distinct]
>- (
Cases_on `body` >> fs []
>- rw [translate_instrs_def] >>
pairarg_tac >> rw [])
>- metis_tac [classify_instr_lem, instr_class_distinct]
>- metis_tac [classify_instr_lem, instr_class_distinct]
>- metis_tac [classify_instr_lem, instr_class_distinct]
>- (
Cases_on `body` >> fs []
>- rw [translate_instrs_def] >>
pairarg_tac >> rw [])
>- (
`body ≠ []` by (Cases_on `body` >> fs []) >>
fs [LAST_DEF] >>
pairarg_tac >> fs [] >> pairarg_tac >> fs [] >>
`bs = [HD bs']` by metis_tac [FST] >>
Cases_on `bs'` >> fs []
>- metis_tac [translate_instrs_not_empty] >>
Cases_on `h` >> fs [add_to_first_block_def])
>- (
`body ≠ []` by (Cases_on `body` >> fs []) >>
fs [LAST_DEF] >>
pairarg_tac >> fs [])
>- (
`body ≠ []` by (Cases_on `body` >> fs []) >>
fs [LAST_DEF] >>
pairarg_tac >> fs [] >> pairarg_tac >> fs [] >>
`bs = [HD bs']` by metis_tac [FST] >>
Cases_on `bs'` >> fs []
>- metis_tac [translate_instrs_not_empty] >>
Cases_on `h` >> fs [add_to_first_block_def])
>- metis_tac [classify_instr_lem, instr_class_distinct]
QED
Theorem multi_step_to_step_block:
∀prog emap s1 tr s2 s1'.
prog_ok prog ∧ is_ssa prog ∧
dominator_ordered prog ∧
good_emap s1.ip.f prog (get_regs_to_keep (THE (alookup prog s1.ip.f))) (get_gmap prog) emap ∧
multi_step prog s1 tr s2 ∧
s1.status = Partial ∧
state_rel prog (get_gmap prog) emap s1 s1' ∧
every (\(l, d). every (\(l, b). every is_implemented b.body) d.blocks) prog
⇒
∃s2' b tr'.
get_block (translate_prog prog) s1'.bp b ∧
step_block (translate_prog prog) s1' b.cmnd b.term tr' s2' ∧
filter ($≠ Tau) tr' = filter ($≠ Tau) (map (translate_trace (get_gmap prog)) tr) ∧
state_rel prog (get_gmap prog) emap s2 s2'
Proof
rw [] >> ntac 2 (pop_assum mp_tac) >> simp [Once state_rel_def] >> rw [Once pc_rel_cases]
>- (
(* Non-phi instruction *)
drule translate_instrs_correct1 >> simp [] >>
disch_then drule >>
disch_then drule >>
rfs [] >> disch_then drule >>
impl_tac
>- (
fs [EVERY_MEM] >> rw [] >>
`mem (s1.ip.f, d) prog` by fs [alookup_some] >>
first_x_assum drule >> rw [] >>
`mem (s1.ip.b, b) d.blocks` by fs [alookup_some] >>
first_x_assum drule >> rw []) >>
rw [] >>
rename1 `step_block _ s1' b1.cmnd b1.term tr1 s2'` >>
qexists_tac `s2'` >> qexists_tac `b1` >> qexists_tac `tr1` >> simp []) >>
(* Phi instruction *)
reverse (fs [Once multi_step_cases])
>- metis_tac [get_instr_func, sum_distinct] >>
qpat_x_assum `last_step _ _ _ _` mp_tac >>
simp [last_step_cases] >> strip_tac
>- (
fs [llvmTheory.step_cases]
>- metis_tac [get_instr_func, sum_distinct] >>
fs [translate_trace_def] >> rw [] >>
`(from_l', phis') = (from_l, phis) ∧ x = (from_l, phis)` by metis_tac [get_instr_func, INR_11] >>
fs [] >> rw [] >>
rfs [MEM_MAP] >>
Cases_on `s1.ip.f` >> fs [dest_fn_def] >>
drule get_block_translate_prog_mov >> rpt (disch_then drule) >> rw [PULL_EXISTS] >>
`∃block l. alookup d.blocks (Some (Lab to_l)) = Some block ∧ block.h = Head phis l`
by (
fs [prog_ok_def, EVERY_MEM] >>
last_x_assum drule >> disch_then drule >> rw [] >>
first_x_assum drule >> rw [] >>
rw [] >>
fs [get_instr_cases] >>
rfs [] >> rw [] >> fs []) >>
`every (λ(l,b). every is_implemented b.body) d.blocks`
by (
`mem (Fn s, d) prog` by fs [alookup_some] >>
fs [EVERY_MEM] >> rw [] >> pairarg_tac >> fs [] >> rw [] >>
first_x_assum drule >> simp [] >>
disch_then drule >> simp []) >>
first_x_assum drule >> rw [] >>
qmatch_assum_abbrev_tac `get_block _ _ bloc` >>
GEN_EXISTS_TAC "b" `bloc` >>
rw [] >>
qpat_x_assum `_ = Fn _` (assume_tac o GSYM) >> fs [] >>
drule build_phi_block_correct >> rpt (disch_then drule) >>
simp [Abbr `bloc`] >>
disch_then (qspecl_then [`Lab to_l`, `s`] mp_tac) >>
simp [] >>
impl_tac
>- (
drule get_instr_live >> rw [SUBSET_DEF, uses_cases, IN_DEF] >>
first_x_assum irule >> disj2_tac >> metis_tac []) >>
rw [] >>
qexists_tac `s2'` >>
rw [CONJ_ASSOC, LEFT_EXISTS_AND_THM, RIGHT_EXISTS_AND_THM]
>- (qexists_tac `[Tau; Tau]` >> rw []) >>
fs [state_rel_def] >> rw [] >>
fs [llvmTheory.inc_pc_def] >>
fs [pc_rel_cases, get_instr_cases, PULL_EXISTS, translate_label_def,
dest_fn_def, inc_bip_def, label_to_fname_def] >>
fs [] >> rw [] >> fs [get_block_cases, PULL_EXISTS, label_to_fname_def] >>
rfs [] >> rw [] >>
qpat_x_assum `Fn _ = _` (assume_tac o GSYM) >> fs [] >>
drule alookup_translate_prog >> rw [] >>
rw [GSYM PULL_EXISTS, dest_fn_def]
>- (fs [prog_ok_def] >> res_tac >> fs [] >> Cases_on `b'.body` >> fs []) >>
rw [PULL_EXISTS, translate_def_def] >>
`b'.body ≠ [] ∧ terminator (last b'.body) ∧
every (λi. ¬terminator i) (front b'.body) ∧
every (λb. (snd b).h = Entry ⇔ fst b = None) d.blocks ∧
0 ≤ num_calls b'.body`
by (
fs [prog_ok_def] >> res_tac >> fs [] >>
fs [EVERY_MEM]) >>
qmatch_goalsub_abbrev_tac `translate_blocks f gmap _ regs edg _` >>
`translate_blocks f gmap fempty regs edg d.blocks = translate_blocks f gmap emap regs edg d.blocks`
by (
irule translate_blocks_emap_restr_live >>
unabbrev_all_tac >> rw []
>- metis_tac [prog_ok_terminator_last]
>- (
fs [prog_ok_def] >> fs [EVERY_MEM] >> rw [] >>
pairarg_tac >> rw [] >> metis_tac [FST, SND])
>- metis_tac [dominator_ordered_linear_live, similar_emap_def, DRESTRICT_IS_FEMPTY]) >>
rw [] >>
drule alookup_translate_blocks >> rpt (disch_then drule) >>
impl_tac
>- (
fs [prog_ok_def, EVERY_MEM] >> rw [] >>
irule ALOOKUP_ALL_DISTINCT_MEM >> rw [] >>
imp_res_tac ALOOKUP_MEM >>
res_tac >> fs []) >>
simp [translate_label_def] >>
rw [] >> rw [dest_label_def, num_calls_def] >>
rw [translate_instrs_take_to_call] >>
qmatch_goalsub_abbrev_tac `_ = HD (fst (translate_instrs a1 b1 c1 d1 e1))` >>
Cases_on `translate_instrs a1 b1 c1 d1 e1` >> rw [] >>
rename1 `_ = HD bl` >> Cases_on `bl` >> rw []
>- metis_tac [translate_instrs_not_empty, classify_instr_lem] >>
rename1 `(_,_) = bl` >> Cases_on `bl` >> rw [] >>
metis_tac [translate_instrs_first_lab])
>- metis_tac [get_instr_func, sum_distinct]
>- metis_tac [get_instr_func, sum_distinct]
>- (
(* TODO: LLVM "eval" gets stuck *)
cheat)
QED
Theorem step_block_to_multi_step:
∀prog s1 s1' tr s2' b.
state_rel prog gmap emap s1 s1' ∧
get_block (translate_prog prog) s1'.bp b ∧
step_block (translate_prog prog) s1' b.cmnd b.term tr s2'
⇒
∃s2.
multi_step prog s1 (map untranslate_trace tr) s2 ∧
state_rel prog gmap emap s2 s2'
Proof
(* TODO, LLVM can simulate llair direction *)
cheat
QED
Theorem trans_trace_not_tau:
∀types. ($≠ Tau) ∘ translate_trace types = ($≠ Tau)
Proof
rw [FUN_EQ_THM] >> eq_tac >> rw [translate_trace_def] >>
TRY (Cases_on `y`) >> fs [translate_trace_def]
QED
Theorem untrans_trace_not_tau:
∀types. ($≠ Tau) ∘ untranslate_trace = ($≠ Tau)
Proof
rw [FUN_EQ_THM] >> eq_tac >> rw [untranslate_trace_def] >>
TRY (Cases_on `y`) >> fs [untranslate_trace_def]
QED
Theorem translate_prog_correct_lem1:
∀path.
okpath (multi_step prog) path ∧ finite path
⇒
∀emap s1'.
prog_ok prog ∧
is_ssa prog ∧
dominator_ordered prog ∧
good_emap (first path).ip.f prog (get_regs_to_keep (THE (alookup prog (first path).ip.f))) (get_gmap prog) emap ∧
state_rel prog (get_gmap prog) emap (first path) s1' ∧
every (\(l, d). every (\(l, b). every is_implemented b.body) d.blocks) prog
⇒
∃path'.
finite path' ∧
okpath (step (translate_prog prog)) path' ∧
first path' = s1' ∧
LMAP (filter ($≠ Tau)) (labels path') =
LMAP (map (translate_trace (get_gmap prog)) o filter ($≠ Tau)) (labels path) ∧
state_rel prog (get_gmap prog) emap (last path) (last path')
Proof
ho_match_mp_tac finite_okpath_ind >> rw []
>- (qexists_tac `stopped_at s1'` >> rw [] >> metis_tac []) >>
fs [] >>
rename1 `state_rel _ _ _ s1 s1'` >>
Cases_on `s1.status ≠ Partial`
>- fs [Once multi_step_cases, llvmTheory.step_cases, last_step_cases] >>
fs [] >>
drule multi_step_to_step_block >> simp [] >>
rpt (disch_then drule) >> rw [] >>
(* TODO: this won't be true once calls are added *)
`s1.ip.f = (first path).ip.f` by cheat >>
fs [] >>
first_x_assum drule >> disch_then drule >> rw [] >>
qexists_tac `pcons s1' tr' path'` >> rw [] >>
rw [FILTER_MAP, combinTheory.o_DEF, trans_trace_not_tau] >>
simp [step_cases] >> qexists_tac `b` >> simp [] >>
qpat_x_assum `state_rel _ _ _ _ s1'` mp_tac >>
rw [state_rel_def, mem_state_rel_def]
QED
Theorem translate_prog_correct_lem2:
∀path'.
okpath (step (translate_prog prog)) path' ∧ finite path'
⇒
∀s1.
prog_ok prog ∧
state_rel prog gmap emap s1 (first path')
⇒
∃path.
finite path ∧
okpath (multi_step prog) path ∧
first path = s1 ∧
labels path = LMAP (map untranslate_trace) (labels path') ∧
state_rel prog gmap emap (last path) (last path')
Proof
ho_match_mp_tac finite_okpath_ind >> rw []
>- (qexists_tac `stopped_at s1` >> rw []) >>
fs [step_cases] >>
drule step_block_to_multi_step >> ntac 2 (disch_then drule) >> rw [] >>
first_x_assum drule >> rw [] >>
qexists_tac `pcons s1 (map untranslate_trace r) path` >> rw []
QED
Theorem translate_global_var_11:
∀path.
okpath (step (translate_prog prog)) path ∧ finite path
⇒
∀x t1 bytes t2 l.
labels path = fromList l ∧
MEM (W (Var_name x t1) bytes) (flat l) ∧
MEM (W (Var_name x t2) bytes) (flat l)
⇒
t1 = t2
Proof
(* TODO, LLVM can simulate llair direction *)
cheat
QED
Theorem prefix_take_filter_lemma:
∀l lsub.
lsub ≼ l
⇒
filter (λy. Tau ≠ y) lsub =
take (length (filter (λy. Tau ≠ y) lsub)) (filter (λy. Tau ≠ y) l)
Proof
Induct_on `lsub` >> rw [] >>
Cases_on `l` >> fs [] >> rw []
QED
Theorem multi_step_lab_label:
∀prog s1 ls s2.
multi_step prog s1 ls s2 ⇒ s2.status ≠ Partial
⇒
∃ls'. (∃i. ls = ls' ++ [Exit i]) ∨ ls = ls' ++ [Error]
Proof
ho_match_mp_tac multi_step_ind >> rw [] >> fs [] >>
fs [last_step_cases, llvmTheory.step_cases, step_instr_cases,
update_result_def, llvmTheory.inc_pc_def] >>
rw [] >> fs []
QED
Theorem prefix_filter_len_eq:
∀l1 l2 x.
l1 ≼ l2 ++ [x] ∧
length (filter P l1) = length (filter P (l2 ++ [x])) ∧
P x
⇒
l1 = l2 ++ [x]
Proof
Induct_on `l1` >> rw [FILTER_APPEND] >>
Cases_on `l2` >> fs [] >> rw [] >> rfs [ADD1] >>
first_x_assum irule >> rw [FILTER_APPEND]
QED
Theorem translate_prog_correct:
∀prog s1 s1' emap.
prog_ok prog ∧ is_ssa prog ∧ dominator_ordered prog ∧
good_emap s1.ip.f prog (get_regs_to_keep (THE (alookup prog s1.ip.f))) (get_gmap prog) emap ∧
state_rel prog (get_gmap prog) emap s1 s1' ∧
every (\(l, d). every (\(l, b). every is_implemented b.body) d.blocks) prog
⇒
multi_step_sem prog s1 = image (I ## map untranslate_trace) (sem (translate_prog prog) s1')
Proof
rw [sem_def, multi_step_sem_def, EXTENSION] >> eq_tac >> rw []
>- (
drule translate_prog_correct_lem1 >> simp [] >>
rpt (disch_then drule) >> rw [EXISTS_PROD] >>
PairCases_on `x` >> rw [] >>
qexists_tac `map (translate_trace (get_gmap prog)) x1` >> rw []
>- rw [MAP_MAP_o, combinTheory.o_DEF, un_translate_trace_inv] >>
qexists_tac `path'` >> rw [] >>
fs [IN_DEF, observation_prefixes_cases, toList_some] >> rw [] >>
`∃labs. labels path' = fromList labs`
by (
fs [GSYM finite_labels] >>
imp_res_tac llistTheory.LFINITE_toList >>
fs [toList_some]) >>
fs [] >>
rfs [lmap_fromList, combinTheory.o_DEF, MAP_MAP_o] >>
simp [FILTER_FLAT, MAP_FLAT, MAP_MAP_o, combinTheory.o_DEF, FILTER_MAP]
>- fs [state_rel_def, mem_state_rel_def]
>- fs [state_rel_def, mem_state_rel_def] >>
rename [`labels path' = fromList l'`, `labels path = fromList l`,
`state_rel _ _ _ (last path) (last path')`, `lsub ≼ flat l`] >>
Cases_on `lsub = flat l` >> fs []
>- (
qexists_tac `flat l'` >>
rw [FILTER_FLAT, MAP_FLAT, MAP_MAP_o, combinTheory.o_DEF] >>
fs [state_rel_def, mem_state_rel_def]) >>
`filter (λy. Tau ≠ y) (flat l') = map (translate_trace (get_gmap prog)) (filter (λy. Tau ≠ y) (flat l))`
by rw [FILTER_FLAT, MAP_FLAT, MAP_MAP_o, combinTheory.o_DEF, FILTER_MAP] >>
qexists_tac `take_prop ($≠ Tau) (length (filter ($≠ Tau) lsub)) (flat l')` >>
rw [] >> rw [GSYM MAP_TAKE]
>- metis_tac [prefix_take_filter_lemma] >>
CCONTR_TAC >> fs [] >>
`(last path).status = (last path').status` by fs [state_rel_def, mem_state_rel_def] >>
drule take_prop_eq >> strip_tac >>
`length (filter (λy. Tau ≠ y) (flat l')) = length (filter (λy. Tau ≠ y) (flat l))`
by rw [] >>
fs [] >> drule filter_is_prefix >>
disch_then (qspec_then `$≠ Tau` assume_tac) >>
drule IS_PREFIX_LENGTH >> strip_tac >> fs [] >>
`length (filter (λy. Tau ≠ y) lsub) = length (filter (λy. Tau ≠ y) (flat l))` by rw [] >>
fs [] >> rw [] >>
qspec_then `path` assume_tac finite_path_end_cases >> rfs [] >> fs [] >> rw []
>- (`l = []` by metis_tac [llistTheory.fromList_EQ_LNIL] >> fs [] >> rfs []) >>
rfs [labels_plink] >>
rename1 `LAPPEND (labels path) [|last_l'|] = _` >>
`toList (LAPPEND (labels path) [|last_l'|]) = Some l` by metis_tac [llistTheory.from_toList] >>
drule llistTheory.toList_LAPPEND_APPEND >> strip_tac >>
fs [llistTheory.toList_THM] >> rw [] >>
drule multi_step_lab_label >> strip_tac >> rfs [] >> fs [] >>
drule prefix_filter_len_eq >> rw [] >>
qexists_tac `$≠ Tau` >> rw [])
>- (
fs [toList_some] >>
drule translate_prog_correct_lem2 >> simp [] >>
disch_then drule >> rw [] >>
qexists_tac `path'` >> rw [] >>
fs [IN_DEF, observation_prefixes_cases, toList_some] >> rw [] >>
rfs [lmap_fromList] >>
simp [GSYM MAP_FLAT, FILTER_MAP, untrans_trace_not_tau]
>- fs [state_rel_def, mem_state_rel_def]
>- fs [state_rel_def, mem_state_rel_def] >>
qexists_tac `map untranslate_trace l2'` >>
simp [GSYM MAP_FLAT, FILTER_MAP, untrans_trace_not_tau] >>
`INJ untranslate_trace (set l2' ∪ set (flat l2)) UNIV`
by (
drule is_prefix_subset >> rw [SUBSET_DEF] >>
`set l2' ∪ set (flat l2) = set (flat l2)` by (rw [EXTENSION] >> metis_tac []) >>
simp [] >>
simp [INJ_DEF] >> rpt gen_tac >>
Cases_on `x` >> Cases_on `y` >> simp [untranslate_trace_def] >>
Cases_on `a` >> Cases_on `a'` >> simp [untranslate_glob_var_def] >>
metis_tac [translate_global_var_11]) >>
fs [INJ_MAP_EQ_IFF, inj_map_prefix_iff] >> rw [] >>
fs [state_rel_def, mem_state_rel_def])
QED
export_theory ();
| Standard ML | 5 | JacobBarthelmeh/infer | sledge/semantics/llvm_to_llair_sem_propScript.sml | [
"MIT"
] |
#!/bin/bash
set -ex
image="$1"
shift
if [ -z "${image}" ]; then
echo "Usage: $0 IMAGE"
exit 1
fi
function extract_version_from_image_name() {
eval export $2=$(echo "${image}" | perl -n -e"/$1(\d+(\.\d+)?(\.\d+)?)/ && print \$1")
if [ "x${!2}" = x ]; then
echo "variable '$2' not correctly parsed from image='$image'"
exit 1
fi
}
function extract_all_from_image_name() {
# parts $image into array, splitting on '-'
keep_IFS="$IFS"
IFS="-"
declare -a parts=($image)
IFS="$keep_IFS"
unset keep_IFS
for part in "${parts[@]}"; do
name=$(echo "${part}" | perl -n -e"/([a-zA-Z]+)\d+(\.\d+)?(\.\d+)?/ && print \$1")
vername="${name^^}_VERSION"
# "py" is the odd one out, needs this special case
if [ "x${name}" = xpy ]; then
vername=ANACONDA_PYTHON_VERSION
fi
# skip non-conforming fields such as "pytorch", "linux" or "xenial" without version string
if [ -n "${name}" ]; then
extract_version_from_image_name "${name}" "${vername}"
fi
done
}
if [[ "$image" == *-xenial* ]]; then
UBUNTU_VERSION=16.04
elif [[ "$image" == *-artful* ]]; then
UBUNTU_VERSION=17.10
elif [[ "$image" == *-bionic* ]]; then
UBUNTU_VERSION=18.04
elif [[ "$image" == *-focal* ]]; then
UBUNTU_VERSION=20.04
elif [[ "$image" == *ubuntu* ]]; then
extract_version_from_image_name ubuntu UBUNTU_VERSION
elif [[ "$image" == *centos* ]]; then
extract_version_from_image_name centos CENTOS_VERSION
fi
if [ -n "${UBUNTU_VERSION}" ]; then
OS="ubuntu"
elif [ -n "${CENTOS_VERSION}" ]; then
OS="centos"
else
echo "Unable to derive operating system base..."
exit 1
fi
DOCKERFILE="${OS}/Dockerfile"
if [[ "$image" == *cuda* ]]; then
DOCKERFILE="${OS}-cuda/Dockerfile"
elif [[ "$image" == *rocm* ]]; then
DOCKERFILE="${OS}-rocm/Dockerfile"
fi
TRAVIS_DL_URL_PREFIX="https://s3.amazonaws.com/travis-python-archives/binaries/ubuntu/14.04/x86_64"
# It's annoying to rename jobs every time you want to rewrite a
# configuration, so we hardcode everything here rather than do it
# from scratch
case "$image" in
pytorch-linux-xenial-py3.8)
ANACONDA_PYTHON_VERSION=3.8
CMAKE_VERSION=3.10.3
GCC_VERSION=7
# Do not install PROTOBUF, DB, and VISION as a test
;;
pytorch-linux-xenial-py3.6-gcc5.4)
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
GCC_VERSION=5
PROTOBUF=yes
DB=yes
VISION=yes
KATEX=yes
;;
pytorch-linux-xenial-py3.6-gcc7.2)
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
GCC_VERSION=7
# Do not install PROTOBUF, DB, and VISION as a test
;;
pytorch-linux-xenial-py3.6-gcc7)
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
GCC_VERSION=7
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-xenial-cuda10.2-cudnn7-py3-gcc7)
CUDA_VERSION=10.2
CUDNN_VERSION=7
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
GCC_VERSION=7
PROTOBUF=yes
DB=yes
VISION=yes
KATEX=yes
;;
pytorch-linux-xenial-cuda11.1-cudnn8-py3-gcc7)
CUDA_VERSION=11.1
CUDNN_VERSION=8
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
GCC_VERSION=7
PROTOBUF=yes
DB=yes
VISION=yes
KATEX=yes
;;
pytorch-linux-xenial-cuda11.3-cudnn8-py3-gcc7)
CUDA_VERSION=11.3.0 # Deviating from major.minor to conform to nvidia's Docker image names
CUDNN_VERSION=8
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
GCC_VERSION=7
PROTOBUF=yes
DB=yes
VISION=yes
KATEX=yes
;;
pytorch-linux-xenial-py3-clang5-asan)
ANACONDA_PYTHON_VERSION=3.6
CLANG_VERSION=5.0
CMAKE_VERSION=3.10.3
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-xenial-py3-clang7-asan)
ANACONDA_PYTHON_VERSION=3.6
CLANG_VERSION=7
CMAKE_VERSION=3.10.3
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-xenial-py3-clang7-onnx)
ANACONDA_PYTHON_VERSION=3.6
CLANG_VERSION=7
CMAKE_VERSION=3.10.3
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-xenial-py3-clang5-android-ndk-r19c)
ANACONDA_PYTHON_VERSION=3.6
CLANG_VERSION=5.0
CMAKE_VERSION=3.10.3
LLVMDEV=yes
PROTOBUF=yes
ANDROID=yes
ANDROID_NDK_VERSION=r19c
GRADLE_VERSION=6.8.3
NINJA_VERSION=1.9.0
;;
pytorch-linux-xenial-py3.6-clang7)
ANACONDA_PYTHON_VERSION=3.6
CMAKE_VERSION=3.10.3
CLANG_VERSION=7
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-bionic-py3.6-clang9)
ANACONDA_PYTHON_VERSION=3.6
CLANG_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
VULKAN_SDK_VERSION=1.2.162.1
SWIFTSHADER=yes
;;
pytorch-linux-bionic-py3.8-gcc9)
ANACONDA_PYTHON_VERSION=3.8
GCC_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-bionic-cuda10.2-cudnn7-py3.6-clang9)
CUDA_VERSION=10.2
CUDNN_VERSION=7
ANACONDA_PYTHON_VERSION=3.6
CLANG_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-bionic-cuda10.2-cudnn7-py3.9-gcc7)
CUDA_VERSION=10.2
CUDNN_VERSION=7
ANACONDA_PYTHON_VERSION=3.9
GCC_VERSION=7
PROTOBUF=yes
DB=yes
VISION=yes
;;
pytorch-linux-bionic-cuda11.0-cudnn8-py3.6-gcc9)
CUDA_VERSION=11.0
CUDNN_VERSION=8
ANACONDA_PYTHON_VERSION=3.6
GCC_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
ROCM_VERSION=3.9
;;
pytorch-linux-bionic-rocm4.1-py3.6)
ANACONDA_PYTHON_VERSION=3.6
GCC_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
ROCM_VERSION=4.1
;;
pytorch-linux-bionic-rocm4.2-py3.6)
ANACONDA_PYTHON_VERSION=3.6
GCC_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
ROCM_VERSION=4.2
;;
pytorch-linux-bionic-rocm4.3.1-py3.6)
ANACONDA_PYTHON_VERSION=3.6
GCC_VERSION=9
PROTOBUF=yes
DB=yes
VISION=yes
ROCM_VERSION=4.3.1
;;
*)
# Catch-all for builds that are not hardcoded.
PROTOBUF=yes
DB=yes
VISION=yes
echo "image '$image' did not match an existing build configuration"
if [[ "$image" == *xenial* ]]; then
CMAKE_VERSION=3.10.3
fi
if [[ "$image" == *py* ]]; then
extract_version_from_image_name py ANACONDA_PYTHON_VERSION
fi
if [[ "$image" == *cuda* ]]; then
extract_version_from_image_name cuda CUDA_VERSION
extract_version_from_image_name cudnn CUDNN_VERSION
fi
if [[ "$image" == *rocm* ]]; then
extract_version_from_image_name rocm ROCM_VERSION
fi
if [[ "$image" == *gcc* ]]; then
extract_version_from_image_name gcc GCC_VERSION
fi
if [[ "$image" == *clang* ]]; then
extract_version_from_image_name clang CLANG_VERSION
fi
if [[ "$image" == *devtoolset* ]]; then
extract_version_from_image_name devtoolset DEVTOOLSET_VERSION
fi
if [[ "$image" == *glibc* ]]; then
extract_version_from_image_name glibc GLIBC_VERSION
fi
if [[ "$image" == *cmake* ]]; then
extract_version_from_image_name cmake CMAKE_VERSION
fi
;;
esac
# Set Jenkins UID and GID if running Jenkins
if [ -n "${JENKINS:-}" ]; then
JENKINS_UID=$(id -u jenkins)
JENKINS_GID=$(id -g jenkins)
fi
tmp_tag=$(basename "$(mktemp -u)" | tr '[:upper:]' '[:lower:]')
# Build image
# TODO: build-arg THRIFT is not turned on for any image, remove it once we confirm
# it's no longer needed.
docker build \
--no-cache \
--progress=plain \
--build-arg "TRAVIS_DL_URL_PREFIX=${TRAVIS_DL_URL_PREFIX}" \
--build-arg "BUILD_ENVIRONMENT=${image}" \
--build-arg "PROTOBUF=${PROTOBUF:-}" \
--build-arg "THRIFT=${THRIFT:-}" \
--build-arg "LLVMDEV=${LLVMDEV:-}" \
--build-arg "DB=${DB:-}" \
--build-arg "VISION=${VISION:-}" \
--build-arg "EC2=${EC2:-}" \
--build-arg "JENKINS=${JENKINS:-}" \
--build-arg "JENKINS_UID=${JENKINS_UID:-}" \
--build-arg "JENKINS_GID=${JENKINS_GID:-}" \
--build-arg "UBUNTU_VERSION=${UBUNTU_VERSION}" \
--build-arg "CENTOS_VERSION=${CENTOS_VERSION}" \
--build-arg "DEVTOOLSET_VERSION=${DEVTOOLSET_VERSION}" \
--build-arg "GLIBC_VERSION=${GLIBC_VERSION}" \
--build-arg "CLANG_VERSION=${CLANG_VERSION}" \
--build-arg "ANACONDA_PYTHON_VERSION=${ANACONDA_PYTHON_VERSION}" \
--build-arg "GCC_VERSION=${GCC_VERSION}" \
--build-arg "CUDA_VERSION=${CUDA_VERSION}" \
--build-arg "CUDNN_VERSION=${CUDNN_VERSION}" \
--build-arg "ANDROID=${ANDROID}" \
--build-arg "ANDROID_NDK=${ANDROID_NDK_VERSION}" \
--build-arg "GRADLE_VERSION=${GRADLE_VERSION}" \
--build-arg "VULKAN_SDK_VERSION=${VULKAN_SDK_VERSION}" \
--build-arg "SWIFTSHADER=${SWIFTSHADER}" \
--build-arg "CMAKE_VERSION=${CMAKE_VERSION:-}" \
--build-arg "NINJA_VERSION=${NINJA_VERSION:-}" \
--build-arg "KATEX=${KATEX:-}" \
--build-arg "ROCM_VERSION=${ROCM_VERSION:-}" \
-f $(dirname ${DOCKERFILE})/Dockerfile \
-t "$tmp_tag" \
"$@" \
.
# NVIDIA dockers for RC releases use tag names like `11.0-cudnn8-devel-ubuntu18.04-rc`,
# for this case we will set UBUNTU_VERSION to `18.04-rc` so that the Dockerfile could
# find the correct image. As a result, here we have to replace the
# "$UBUNTU_VERSION" == "18.04-rc"
# with
# "$UBUNTU_VERSION" == "18.04"
UBUNTU_VERSION=$(echo ${UBUNTU_VERSION} | sed 's/-rc$//')
function drun() {
docker run --rm "$tmp_tag" $*
}
if [[ "$OS" == "ubuntu" ]]; then
if !(drun lsb_release -a 2>&1 | grep -qF Ubuntu); then
echo "OS=ubuntu, but:"
drun lsb_release -a
exit 1
fi
if !(drun lsb_release -a 2>&1 | grep -qF "$UBUNTU_VERSION"); then
echo "UBUNTU_VERSION=$UBUNTU_VERSION, but:"
drun lsb_release -a
exit 1
fi
fi
if [ -n "$ANACONDA_PYTHON_VERSION" ]; then
if !(drun python --version 2>&1 | grep -qF "Python $ANACONDA_PYTHON_VERSION"); then
echo "ANACONDA_PYTHON_VERSION=$ANACONDA_PYTHON_VERSION, but:"
drun python --version
exit 1
fi
fi
if [ -n "$GCC_VERSION" ]; then
if !(drun gcc --version 2>&1 | grep -q " $GCC_VERSION\\W"); then
echo "GCC_VERSION=$GCC_VERSION, but:"
drun gcc --version
exit 1
fi
fi
if [ -n "$CLANG_VERSION" ]; then
if !(drun clang --version 2>&1 | grep -qF "clang version $CLANG_VERSION"); then
echo "CLANG_VERSION=$CLANG_VERSION, but:"
drun clang --version
exit 1
fi
fi
if [ -n "$KATEX" ]; then
if !(drun katex --version); then
echo "KATEX=$KATEX, but:"
drun katex --version
exit 1
fi
fi
| Shell | 4 | xiaohanhuang/pytorch | .circleci/docker/build.sh | [
"Intel"
] |
( Generated from test_precedence1_in.muv by the MUV compiler. )
( https://github.com/revarbat/pymuv )
: _main[ _arg -- ret ]
3 var! _a
4 var! _b
5 var! _c
6 var! _d
7 var! _e
_a @ _b @ _c @ _d @ + * + _e @ - var! _f
40
;
: __start
"me" match me ! me @ location loc ! trig trigger !
_main
;
| MUF | 2 | revarbat/pymuv | tests/test_precedence1_cmp.muf | [
"MIT"
] |
create database cped;
| SQL | 1 | imtbkcat/tidb-lightning | tests/checkpoint_error_destroy/good-data/cped-schema-create.sql | [
"Apache-2.0"
] |
unique template site/ceph/server/infernalis;
include 'components/dirperm/config';
"/software/components/dirperm/paths" = {
foreach (idx; mp; value('/system/filesystems')) {
if (match(mp['mountpoint'], format('^%s', CEPH_OSD_MP_BASE))) {
append(SELF, dict(
"path", mp['mountpoint'],
"owner", "ceph:ceph",
"perm", "0755",
"type", "d",
));
};
};
SELF;
};
include 'common/sysctl/service';
prefix "/software/components/metaconfig/services/{/etc/sysctl.conf}/contents";
'kernel.pid_max' = 4194303;
| Pan | 3 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/Pan/infernalis.pan | [
"MIT"
] |
// run-pass
#![allow(unused_mut)]
#![allow(unused_assignments)]
#![allow(unused_variables)]
// edition:2015
// aux-build:edition-kw-macro-2018.rs
#[macro_use]
extern crate edition_kw_macro_2018;
pub fn check_async() {
let mut async = 1; // OK
let mut r#async = 1; // OK
r#async = consumes_async!(async); // OK
// r#async = consumes_async!(r#async); // ERROR, not a match
// r#async = consumes_async_raw!(async); // ERROR, not a match
r#async = consumes_async_raw!(r#async); // OK
if passes_ident!(async) == 1 {} // OK
if passes_ident!(r#async) == 1 {} // OK
// one_async::async(); // ERROR, unresolved name
// one_async::r#async(); // ERROR, unresolved name
two_async::async(); // OK
two_async::r#async(); // OK
}
mod one_async {
// produces_async! {} // ERROR, reserved
}
mod two_async {
produces_async_raw! {} // OK
}
fn main() {}
| Rust | 4 | Eric-Arellano/rust | src/test/ui/edition-keywords-2015-2018.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="20dp"
android:height="20dp"
android:viewportWidth="20"
android:viewportHeight="20"
android:tint="?attr/colorControlNormal">
<path
android:fillColor="@android:color/white"
android:pathData="M3.75,6L3.75,6C3.34,6 3,6.34 3,6.75v8.75C3,16.33 3.67,17 4.5,17h8.75c0.41,0 0.75,-0.34 0.75,-0.75l0,0c0,-0.41 -0.34,-0.75 -0.75,-0.75H4.5V6.75C4.5,6.34 4.16,6 3.75,6z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M7.5,3L7.5,3C6.67,3 6,3.67 6,4.5v0h1.5V3z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M6,6.17h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M7.5,12.5H6v0C6,13.33 6.67,14 7.5,14h0V12.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M15.5,4.5H17v0C17,3.67 16.33,3 15.5,3h0V4.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M6,9.33h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M15.5,6.17h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M15.5,9.33h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M9.17,3h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M12.33,3h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M9.17,12.5h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M12.33,12.5h1.5v1.5h-1.5z"/>
<path
android:fillColor="@android:color/white"
android:pathData="M15.5,14L15.5,14c0.83,0 1.5,-0.67 1.5,-1.5v0h-1.5V14z"/>
</vector>
| XML | 2 | Imudassir77/material-design-icons | android/action/flip_to_back/materialiconsround/black/res/drawable/round_flip_to_back_20.xml | [
"Apache-2.0"
] |
(ns open-api-petstore.specs.order
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(def order-data
{
(ds/opt :id) int?
(ds/opt :petId) int?
(ds/opt :quantity) int?
(ds/opt :shipDate) inst?
(ds/opt :status) string?
(ds/opt :complete) boolean?
})
(def order-spec
(ds/spec
{:name ::order
:spec order-data}))
| Clojure | 3 | MalcolmScoffable/openapi-generator | samples/client/petstore/clojure/src/open_api_petstore/specs/order.clj | [
"Apache-2.0"
] |
--TEST--
Bug #72447: Type Confusion in php_bz2_filter_create()
--EXTENSIONS--
bz2
--FILE--
<?php
$input = "AAAAAAAA";
$param = array('blocks' => $input);
$fp = fopen('testfile', 'w');
stream_filter_append($fp, 'bzip2.compress', STREAM_FILTER_WRITE, $param);
fclose($fp);
?>
--CLEAN--
<?php
unlink('testfile');
?>
--EXPECTF--
Warning: stream_filter_append(): Invalid parameter given for number of blocks to allocate. (0) in %s%ebug72447.php on line %d
| PHP | 3 | NathanFreeman/php-src | ext/bz2/tests/bug72447.phpt | [
"PHP-3.01"
] |
{{/* vim: set filetype=mustache: */}}
{{/*
Expand the name of the chart.
*/}}
{{- define "name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.analyzer.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "analyzer"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.catalog.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "catalog"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.api.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "api"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.policy-engine.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "policy"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.simplequeue.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "simplequeue"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.enterprise.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "enterprise"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.enterprise-ui.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "enterprise-ui"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.enterprise-feeds.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "enterprise-feeds"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.enterprise-reports.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "enterprise-reports"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "anchore-engine.enterprise-notifications.fullname" -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- printf "%s-%s-%s" .Release.Name $name "enterprise-notifications"| trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified dependency name for the db.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "postgres.fullname" -}}
{{- printf "%s-%s" .Release.Name "postgresql" | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified dependency name for the feeds db.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "postgres.anchore-feeds-db.fullname" -}}
{{- printf "%s-%s" .Release.Name "anchore-feeds-db" | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified dependency name for the db.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
*/}}
{{- define "redis.fullname" -}}
{{- printf "%s-%s" .Release.Name "anchore-ui-redis" | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Return Anchore Engine default admin password
*/}}
{{- define "anchore-engine.defaultAdminPassword" -}}
{{- if .Values.anchoreGlobal.defaultAdminPassword }}
{{- .Values.anchoreGlobal.defaultAdminPassword -}}
{{- else -}}
{{- randAlphaNum 32 -}}
{{- end -}}
{{- end -}}
| Smarty | 4 | kevinpollet/charts | stable/anchore-engine/templates/_helpers.tpl | [
"Apache-2.0"
] |
val doubleIt = (i:Int) => i * 2
new Array[Int](5, doubleIt) | X10 | 3 | RalfGuder/LaTeX-examples | documents/Programmierparadigmen/scripts/x10/array-example.x10 | [
"MIT"
] |
package com.baeldung.client;
import com.baeldung.shared.MessageService;
import com.baeldung.shared.MessageServiceAsync;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.event.dom.client.KeyUpEvent;
import com.google.gwt.event.dom.client.KeyUpHandler;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.DialogBox;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwt.user.client.ui.VerticalPanel;
/**
* Entry point classes define <code>onModuleLoad()</code>.
*/
public class Google_web_toolkit implements EntryPoint {
private final MessageServiceAsync messageServiceAsync = GWT.create(MessageService.class);
public void onModuleLoad() {
Button sendButton = new Button("Submit");
TextBox nameField = new TextBox();
nameField.setText("Hi there");
Label warningLabel = new Label();
sendButton.addStyleName("sendButton");
RootPanel.get("nameFieldContainer").add(nameField);
RootPanel.get("sendButtonContainer").add(sendButton);
RootPanel.get("errorLabelContainer").add(warningLabel);
Button closeButton = new Button("Thanks");
closeButton.getElement().setId("closeButton");
Label textToServerLabel = new Label();
HTML serverResponseLabel = new HTML();
VerticalPanel vPanel = new VerticalPanel();
vPanel.addStyleName("vPanel");
vPanel.add(new HTML("Sending message to the server:"));
vPanel.add(textToServerLabel);
vPanel.add(new HTML("<br><b>Server replies:</b>"));
vPanel.add(serverResponseLabel);
vPanel.setHorizontalAlignment(VerticalPanel.ALIGN_RIGHT);
vPanel.add(closeButton);
vPanel.setVisible(false);
RootPanel.get("serverResponseContainer").add(vPanel);
closeButton.addClickHandler(event -> {
sendButton.setEnabled(true);
sendButton.setFocus(true);
vPanel.setVisible(false);
});
class MyHandler implements ClickHandler, KeyUpHandler {
public void onClick(ClickEvent event) {
sendMessageToServer();
}
public void onKeyUp(KeyUpEvent event) {
if (event.getNativeKeyCode() == KeyCodes.KEY_ENTER) {
sendMessageToServer();
}
}
private void sendMessageToServer() {
warningLabel.setText("");
String textToServer = nameField.getText();
if (textToServer == null || textToServer.isEmpty()) {
warningLabel.setText("Please enter the message");
return;
}
sendButton.setEnabled(false);
textToServerLabel.setText(textToServer);
serverResponseLabel.setText("");
messageServiceAsync.sendMessage(textToServer, new AsyncCallback<String>() {
public void onFailure(Throwable caught) {
serverResponseLabel.addStyleName("serverResponseLabelError");
serverResponseLabel.setHTML("server error occurred");
closeButton.setFocus(true);
}
public void onSuccess(String result) {
serverResponseLabel.removeStyleName("serverResponseLabelError");
serverResponseLabel.setHTML(result);
closeButton.setFocus(true);
vPanel.setVisible(true);
}
});
}
}
// Add a handler to send the name to the server
MyHandler handler = new MyHandler();
sendButton.addClickHandler(handler);
nameField.addKeyUpHandler(handler);
}
}
| Java | 4 | zeesh49/tutorials | google-web-toolkit/src/main/java/com/baeldung/client/Google_web_toolkit.java | [
"MIT"
] |
describe(`About Page`, () => {
it(`successfully loads`, () => {
cy.visit(`/about`)
})
it(`contains the title with an SVG icon and text "Gatsbygram"`, () => {
cy.getTestElement(`site-title`).get(`svg`)
cy.getTestElement(`site-title`).contains(`Gatsbygram`)
})
it(`clicking on site title takes to home page`, () => {
cy.getTestElement(`site-title`).click()
cy.url().should(`eq`, `${Cypress.config(`baseUrl`)}/`)
// go back to about page for further testing
cy.visit(`/about`)
})
it(`contains a link to about page in nav bar and it works`, () => {
cy.getTestElement(`about-link`).contains(`About`)
cy.getTestElement(`about-link`).click()
cy.url().should(`eq`, `${Cypress.config(`baseUrl`)}/about/`)
})
it(`displays title of the page`, () => {
cy.getTestElement(`about-title`).contains(`About Gatsbygram`)
})
})
| JavaScript | 4 | cwlsn/gatsby | examples/gatsbygram/cypress/integration/about-page-spec.js | [
"MIT"
] |
/home/spinalvm/hdl/riscv-compliance/work//I-LHU-01.elf: file format elf32-littleriscv
Disassembly of section .text.init:
80000000 <_start>:
80000000: 00001f97 auipc t6,0x1
80000004: 000f8f93 mv t6,t6
80000008: 00001117 auipc sp,0x1
8000000c: 03810113 addi sp,sp,56 # 80001040 <codasip_signature_start>
80000010: 000fd183 lhu gp,0(t6) # 80001000 <test_A1_data>
80000014: 002fd203 lhu tp,2(t6)
80000018: 00312023 sw gp,0(sp)
8000001c: 00412223 sw tp,4(sp)
80000020: 00001c17 auipc s8,0x1
80000024: fe5c0c13 addi s8,s8,-27 # 80001005 <test_A2_data+0x1>
80000028: 00001297 auipc t0,0x1
8000002c: 02028293 addi t0,t0,32 # 80001048 <test_A2_res>
80000030: fffc5c83 lhu s9,-1(s8)
80000034: 001c5d03 lhu s10,1(s8)
80000038: 0192a023 sw s9,0(t0)
8000003c: 01a2a223 sw s10,4(t0)
80000040: 00001397 auipc t2,0x1
80000044: fc738393 addi t2,t2,-57 # 80001007 <test_A2_data+0x3>
80000048: 00001417 auipc s0,0x1
8000004c: 00840413 addi s0,s0,8 # 80001050 <test_A3_res>
80000050: 0013df03 lhu t5,1(t2)
80000054: 0033df83 lhu t6,3(t2)
80000058: 01e42023 sw t5,0(s0)
8000005c: 01f42223 sw t6,4(s0)
80000060: 00001517 auipc a0,0x1
80000064: 7ac50513 addi a0,a0,1964 # 8000180c <_end+0x77c>
80000068: 00001597 auipc a1,0x1
8000006c: ff058593 addi a1,a1,-16 # 80001058 <test_A4_res>
80000070: 80055603 lhu a2,-2048(a0)
80000074: 80255683 lhu a3,-2046(a0)
80000078: 00c5a023 sw a2,0(a1)
8000007c: 00d5a223 sw a3,4(a1)
80000080: 00000697 auipc a3,0x0
80000084: 79368693 addi a3,a3,1939 # 80000813 <complience_halt_break+0x687>
80000088: 00001717 auipc a4,0x1
8000008c: fd870713 addi a4,a4,-40 # 80001060 <test_A5_res>
80000090: 7fd6d783 lhu a5,2045(a3)
80000094: 7ff6d803 lhu a6,2047(a3)
80000098: 00f72023 sw a5,0(a4)
8000009c: 01072223 sw a6,4(a4)
800000a0: 00001817 auipc a6,0x1
800000a4: f7880813 addi a6,a6,-136 # 80001018 <test_B_data>
800000a8: 00001897 auipc a7,0x1
800000ac: fc088893 addi a7,a7,-64 # 80001068 <test_B_res>
800000b0: ffc85903 lhu s2,-4(a6)
800000b4: ffe85983 lhu s3,-2(a6)
800000b8: 00085a03 lhu s4,0(a6)
800000bc: 00285a83 lhu s5,2(a6)
800000c0: 00485b03 lhu s6,4(a6)
800000c4: 00685b83 lhu s7,6(a6)
800000c8: 0128a023 sw s2,0(a7)
800000cc: 0138a223 sw s3,4(a7)
800000d0: 0148a423 sw s4,8(a7)
800000d4: 0158a623 sw s5,12(a7)
800000d8: 0168a823 sw s6,16(a7)
800000dc: 0178aa23 sw s7,20(a7)
800000e0: 00001a97 auipc s5,0x1
800000e4: f40a8a93 addi s5,s5,-192 # 80001020 <test_C_data>
800000e8: 00001b17 auipc s6,0x1
800000ec: f98b0b13 addi s6,s6,-104 # 80001080 <test_C_res>
800000f0: 000ad003 lhu zero,0(s5)
800000f4: 000b2023 sw zero,0(s6)
800000f8: 00001a97 auipc s5,0x1
800000fc: f2ca8a93 addi s5,s5,-212 # 80001024 <test_D_data>
80000100: 00001b17 auipc s6,0x1
80000104: f84b0b13 addi s6,s6,-124 # 80001084 <test_D_res>
80000108: 000aab83 lw s7,0(s5)
8000010c: 000bdc03 lhu s8,0(s7)
80000110: 000c0c93 mv s9,s8
80000114: 019b2023 sw s9,0(s6)
80000118: 00001c97 auipc s9,0x1
8000011c: f14c8c93 addi s9,s9,-236 # 8000102c <test_E1_data>
80000120: 00001d17 auipc s10,0x1
80000124: f68d0d13 addi s10,s10,-152 # 80001088 <test_E1_res>
80000128: 000cdc83 lhu s9,0(s9)
8000012c: 019d2023 sw s9,0(s10)
80000130: 00001d97 auipc s11,0x1
80000134: f01d8d93 addi s11,s11,-255 # 80001031 <test_E2_data+0x1>
80000138: 00001e17 auipc t3,0x1
8000013c: f54e0e13 addi t3,t3,-172 # 8000108c <test_E2_res>
80000140: fffddd83 lhu s11,-1(s11)
80000144: 01be2023 sw s11,0(t3)
80000148: 00001517 auipc a0,0x1
8000014c: ef850513 addi a0,a0,-264 # 80001040 <codasip_signature_start>
80000150: 00001597 auipc a1,0x1
80000154: f4058593 addi a1,a1,-192 # 80001090 <_end>
80000158: f0100637 lui a2,0xf0100
8000015c: f2c60613 addi a2,a2,-212 # f00fff2c <_end+0x700fee9c>
80000160 <complience_halt_loop>:
80000160: 02b50663 beq a0,a1,8000018c <complience_halt_break>
80000164: 00c52683 lw a3,12(a0)
80000168: 00d62023 sw a3,0(a2)
8000016c: 00852683 lw a3,8(a0)
80000170: 00d62023 sw a3,0(a2)
80000174: 00452683 lw a3,4(a0)
80000178: 00d62023 sw a3,0(a2)
8000017c: 00052683 lw a3,0(a0)
80000180: 00d62023 sw a3,0(a2)
80000184: 01050513 addi a0,a0,16
80000188: fd9ff06f j 80000160 <complience_halt_loop>
8000018c <complience_halt_break>:
8000018c: f0100537 lui a0,0xf0100
80000190: f2050513 addi a0,a0,-224 # f00fff20 <_end+0x700fee90>
80000194: 00052023 sw zero,0(a0)
...
Disassembly of section .data:
80001000 <test_A1_data>:
80001000: f222 fsw fs0,36(sp)
80001002: 11f1 addi gp,gp,-4
80001004 <test_A2_data>:
80001004: 44f4 lw a3,76(s1)
80001006: 0xf666f333
80001008 <test_A3_data>:
80001008: f666 fsw fs9,44(sp)
8000100a: 55f5 li a1,-3
8000100c <test_A4_data>:
8000100c: 88f8 0x88f8
8000100e: 0xaaaf777
80001010 <test_A5_data>:
80001010: 0aaa slli s5,s5,0xa
80001012: 9909 andi a0,a0,-30
80001014: cc0c sw a1,24(s0)
80001016: 0xeee0bbb
80001018 <test_B_data>:
80001018: 0eee slli t4,t4,0x1b
8000101a: dd0d beqz a0,80000f54 <complience_halt_break+0xdc8>
8000101c: 00f0 addi a2,sp,76
8000101e: 0fff 0xfff
80001020 <test_C_data>:
80001020: 5678 lw a4,108(a2)
80001022: 1234 addi a3,sp,296
80001024 <test_D_data>:
80001024: 1028 addi a0,sp,40
80001026: 8000 0x8000
80001028 <test_D_data2_label>:
80001028: def0 sw a2,124(a3)
8000102a: 9abc 0x9abc
8000102c <test_E1_data>:
8000102c: 3210 fld fa2,32(a2)
8000102e: 7654 flw fa3,44(a2)
80001030 <test_E2_data>:
80001030: ba98 fsd fa4,48(a3)
80001032: fedc fsw fa5,60(a3)
...
80001040 <codasip_signature_start>:
80001040: ffff 0xffff
80001042: ffff 0xffff
80001044: ffff 0xffff
80001046: ffff 0xffff
80001048 <test_A2_res>:
80001048: ffff 0xffff
8000104a: ffff 0xffff
8000104c: ffff 0xffff
8000104e: ffff 0xffff
80001050 <test_A3_res>:
80001050: ffff 0xffff
80001052: ffff 0xffff
80001054: ffff 0xffff
80001056: ffff 0xffff
80001058 <test_A4_res>:
80001058: ffff 0xffff
8000105a: ffff 0xffff
8000105c: ffff 0xffff
8000105e: ffff 0xffff
80001060 <test_A5_res>:
80001060: ffff 0xffff
80001062: ffff 0xffff
80001064: ffff 0xffff
80001066: ffff 0xffff
80001068 <test_B_res>:
80001068: ffff 0xffff
8000106a: ffff 0xffff
8000106c: ffff 0xffff
8000106e: ffff 0xffff
80001070: ffff 0xffff
80001072: ffff 0xffff
80001074: ffff 0xffff
80001076: ffff 0xffff
80001078: ffff 0xffff
8000107a: ffff 0xffff
8000107c: ffff 0xffff
8000107e: ffff 0xffff
80001080 <test_C_res>:
80001080: ffff 0xffff
80001082: ffff 0xffff
80001084 <test_D_res>:
80001084: ffff 0xffff
80001086: ffff 0xffff
80001088 <test_E1_res>:
80001088: ffff 0xffff
8000108a: ffff 0xffff
8000108c <test_E2_res>:
8000108c: ffff 0xffff
8000108e: ffff 0xffff
| ObjDump | 3 | cbrune/VexRiscv | src/test/resources/asm/I-LHU-01.elf.objdump | [
"MIT"
] |
domain: "{ S1[i0, i1, i2, i3] : i3 <= 4 - 2i0 - 2i1 and i3 >= i2 and i2 <= 9 - 2i0 and i2 >= 0 and i2 >= 1 - 2i0 and i3 <= 1 + i2 and i2 <= 1 and i3 >= 1 - 2i0 - 2i1 }"
child:
context: "{ [] }"
child:
schedule: "[{ S1[i0, i1, i2, i3] -> [(2i0)] }]"
options: "{ separate[i0] }"
child:
schedule: "[{ S1[i0, i1, i2, i3] -> [(2i0 + 2i1)] }]"
options: "{ separate[i0] }"
child:
schedule: "[{ S1[i0, i1, i2, i3] -> [(2i0 + i2)] }]"
options: "{ separate[i0] }"
child:
schedule: "[{ S1[i0, i1, i2, i3] -> [(2i0 + 2i1 + i3)] }]"
options: "{ separate[i0] }"
| Smalltalk | 1 | chelini/isl-haystack | test_inputs/codegen/cloog/reservoir-tang-xue1.st | [
"MIT"
] |
function! health#full_render#check()
call health#report_start("report 1")
call health#report_ok("life is fine")
call health#report_warn("no what installed", ["pip what", "make what"])
call health#report_start("report 2")
call health#report_info("stuff is stable")
call health#report_error("why no hardcopy", [":h :hardcopy", ":h :TOhtml"])
endfunction
| VimL | 3 | uga-rosa/neovim | test/functional/fixtures/autoload/health/full_render.vim | [
"Vim"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.