text stringlengths 1 1.05M |
|---|
//
// MIMediaTracker.h
// MappIntelligenceSDK
//
// Created by <NAME> on 08/01/2021.
// Copyright © 2021 Mapp Digital US, LLC. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "MIMediaEvent.h"
NS_ASSUME_NONNULL_BEGIN
@interface MIMediaTracker : NSObject
+ (nullable instancetype)sharedInstance;
-(BOOL) shouldTrack: (MIMediaEvent *) event;
@end
NS_ASSUME_NONNULL_END
|
package config
import (
"os"
"gopkg.in/yaml.v2"
"github.com/feeltheajf/ztca/api"
"github.com/feeltheajf/ztca/dto"
"github.com/feeltheajf/ztca/fs"
"github.com/feeltheajf/ztca/log"
"github.com/feeltheajf/ztca/pki"
)
const (
App = "ztca"
File = App + ".yml"
)
type Config struct {
API *api.Config `yaml:"api"`
DB *dto.Config `yaml:"db"`
Log *log.Config `yaml:"log"`
CA *pki.Config `yaml:"ca"`
}
func Load(path string) (*Config, error) {
raw, err := fs.Read(path)
if err != nil {
return nil, err
}
cfg := defaultConfig()
if err := yaml.UnmarshalStrict([]byte(os.ExpandEnv(raw)), cfg); err != nil {
return nil, err
}
return cfg, nil
}
func defaultConfig() *Config {
return &Config{
//
}
}
|
#!/usr/bin/env bash
. ${BUILDPACK_TEST_RUNNER_HOME}/lib/test_utils.sh
. ${BUILDPACK_HOME}/bin/vars.sh
cd "${BUILDPACK_HOME}"
testCompile() {
compile
assertCapturedSuccess
assertCaptured "Installing vmagent ${VMAGENT_VERSION}"
assertFileExists "${BUILD_DIR}/bin/vmagent"
assertFileExists "${BUILD_DIR}/bin/start-vmagent"
assertFileExists "${BUILD_DIR}/config/vmagent-prometheus.yml.erb"
assertFileSHA256 "${VMAGENT_SHA256}" "${BUILD_DIR}/bin/vmagent"
}
assertFileExists() {
local path=$1
assertTrue "${path} should exist" "[ -f ${path} ]"
}
assertFileSHA256() {
local sha=$1
local path=$2
assertEquals "${sha} ${path}" "$(sha256sum "${path}")"
}
|
<gh_stars>0
const dev = {
s3: {
REGION: "us-east-1",
BUCKET: "aa-notes-app-2-api-dev-attachmentsbucket-1oce82r8q3ow1"
},
apiGateway: {
REGION: "us-east-1",
URL: "https://16qusl8rwb.execute-api.us-east-1.amazonaws.com/dev"
},
cognito: {
REGION: "us-east-1",
USER_POOL_ID: "us-east-1_PYJMGtE3G",
APP_CLIENT_ID: "7jah267rq3oqtuklqupqnct0b",
IDENTITY_POOL_ID: "us-east-1:e544324d-b8f1-4930-b918-55febb6df399"
},
STRIPE_KEY: "<KEY>",
};
const prod = {
s3: {
REGION: "us-east-1",
BUCKET: "aa-notes-app-2-api-prod-attachmentsbucket-8u9itownwa9n"
},
apiGateway: {
REGION: "us-east-1",
URL: "https://kwhp3j2l5m.execute-api.us-east-1.amazonaws.com/prod"
},
cognito: {
REGION: "us-east-1",
USER_POOL_ID: "us-east-1_JDYYh52QW",
APP_CLIENT_ID: "6scbuhib2agu76at1n21emfb08",
IDENTITY_POOL_ID: "us-east-1:4e1b96cf-919f-4a2b-8939-534a81cb9be9"
},
STRIPE_KEY: "<KEY>",
};
// Default to dev if not set
const config = process.env.REACT_APP_STAGE === 'prod'
? prod
: dev;
export default {
// Add common config values here
MAX_ATTACHMENT_SIZE: 5000000,
...config
}; |
SELECT AVG(cost) AS 'Average Cost' FROM hotel_rooms
WHERE city = 'New York City' |
# Initializing Variables
_env=$1
_imagetag=blackpositivityapi-$_env
echo $_imagetag
#Stop container
docker stop $_imagetag
#Remove container
docker rm --force $_imagetag
#Remove image
docker rmi $_imagetag
#Build new image
docker build --no-cache -t "$_imagetag" .
#Run new image
docker run -e ASPNETCORE_ENVIRONMENT=$_env -d -p 3006:80 --name "$_imagetag" $_imagetag
echo $_imagetag new version up!
exit 0 |
<gh_stars>0
package aula06;
public class Exercicio1 {
public static void main(String[] args) {
String produto1="computador";
String produto2="mesa";
int idade=30;
int codigo=3200;
char sexo='f';
double preco1= 2100.0;
double preco2= 650.50;
double medida= 53.234567;
System.out.printf(produto1+"com preço de "+"%.2f%n",preco1);
System.out.printf(produto2+ "com preço de "+"%.2f%n",preco2);
System.out.println
("idade: "+idade+ ",codigo "+codigo+ ",sexo: "+sexo);
System.out.printf("%.8f%n",medida);
System.out.printf("%.3f%n",medida);
}
}
|
import { OptionsService } from './Options';
import { WebSocketServerService } from './WebSocketServer';
export declare class HttpServerService {
private optionsService;
private webSocketServerService;
/** WebApp root */
private rootPath;
/** Start port number */
private _minPort;
/** Start port getter */
get minPort(): number;
/** Maximum port number */
private _maxPort;
/** Maximum port getter */
get maxPort(): number;
/** Current port number */
private _port;
/** Current port getter */
get port(): number;
/** The server instance */
private server;
/** Denotes if the server is started */
private serverStarted;
constructor(optionsService: OptionsService, webSocketServerService: WebSocketServerService);
/**
* Starts the http server
* Check if running before starting
*/
serve(): Promise<void>;
/**
* Stops the http server
* Check if running before stop
*/
stop(): Promise<void>;
/** Denotes if the HTTP server is running */
started(): boolean;
/**
* Open the browser for the current server
* Do not open if not started
*/
open(): Promise<void>;
/**
* Get the URL of the current session
* Returns null if not started
*/
url(): string | null;
/** Test ports and returns the first one available */
private findAvailablePort;
}
|
<gh_stars>1-10
#!/usr/bin/env python3
from PyQt5 import QtWidgets
from pyqtcmd import NeedsSelectionUICommandMixin
from dsrlib.domain import commands, WorkspaceMixin, Configuration
from dsrlib.ui.mixins import MainWindowMixin
from .base import UICommand
class AddConfigurationUICommand(WorkspaceMixin, UICommand):
def __init__(self, *args, **kwargs):
super().__init__(*args, icon='add', tip=_('Create a new configuration'), **kwargs)
def do(self):
cmd = commands.AddConfigurationCommand(workspace=self.workspace(), configuration=Configuration())
self.history().run(cmd)
class AddConfigurationButton(MainWindowMixin, WorkspaceMixin, QtWidgets.QPushButton):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setFlat(True)
self.setFixedWidth(32)
AddConfigurationUICommand(self, workspace=self.workspace(), mainWindow=self.mainWindow()).set_button(self)
class DeleteConfigurationsUICommand(NeedsSelectionUICommandMixin, WorkspaceMixin, UICommand):
def __init__(self, *args, **kwargs):
super().__init__(*args, icon='del', tip=_('Delete the selected configuration'), **kwargs)
def do(self):
cmd = commands.DeleteConfigurationsCommand(workspace=self.workspace(), configurations=self.container().selection())
self.history().run(cmd)
class DeleteConfigurationsButton(MainWindowMixin, WorkspaceMixin, QtWidgets.QPushButton):
def __init__(self, *args, container, **kwargs):
super().__init__(*args, **kwargs)
self.setFlat(True)
self.setFixedWidth(32)
DeleteConfigurationsUICommand(self, workspace=self.workspace(), container=container, mainWindow=self.mainWindow()).set_button(self)
|
CREATE DATABASE login_system;
USE login_system;
CREATE TABLE users
(
user_id INT UNSIGNED NOT NULL AUTO_INCREMENT,
username VARCHAR(50) NOT NULL UNIQUE,
password VARCHAR(50) NOT NULL,
PRIMARY KEY (user_id)
);
CREATE TABLE roles
(
role_id INT UNSIGNED NOT NULL AUTO_INCREMENT,
role_name VARCHAR(50) NOT NULL UNIQUE,
PRIMARY KEY (role_id)
);
CREATE TABLE user_roles
(
user_id INT UNSIGNED NOT NULL,
role_id INT UNSIGNED NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(user_id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles(role_id) ON DELETE CASCADE
);
CREATE TABLE levels
(
level_id INT UNSIGNED NOT NULL AUTO_INCREMENT,
level_name VARCHAR(50) NOT NULL UNIQUE,
PRIMARY KEY (level_id)
);
CREATE TABLE level_roles
(
level_id INT UNSIGNED NOT NULL,
role_id INT UNSIGNED NOT NULL,
FOREIGN KEY (level_id) REFERENCES levels(level_id) ON DELETE CASCADE,
FOREIGN KEY (role_id) REFERENCES roles(role_id) ON DELETE CASCADE
); |
const { ethers, upgrades } = require("hardhat");
// TO DO: Place the address of your proxy here!
const proxyAddress = "0x63E2Ef487EE78e0391548f57102B0681343eC69A";
// const V1_implementation_address = "0x09D42cFF54f36653C82cc42ED38178526f0542C5"
// const V2_implementation_address = "0xaed5a104602bf9673e85b9fafdc5f0c49b538421"
async function main() {
const BUSDVYNCSTAKEV2 = await ethers.getContractFactory("BUSDVYNCSTAKEV2");
const upgraded = await upgrades.upgradeProxy(proxyAddress, BUSDVYNCSTAKEV2);
console.log((await upgraded.dataAddress()).toString());
console.log(
(await upgraded.dataAddress()).toString() ==
"0xbA9fFDe1CE983a5eD91Ba7b2298c812F6C633542"
);
console.log(
(await upgraded.version()).toString(),
"version by calling variable"
);
console.log(
(await upgraded.getVersion()).toString(),
"version by calling function"
);
await upgraded.setVersion(2);
console.log(
(await upgraded.version()).toString(),
"version by calling variable"
);
console.log(
(await upgraded.getVersion()).toString(),
"version by calling function"
);
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
|
import { combineReducers } from 'redux';
import { makeCommunicationReducer } from 'shared/helpers/redux';
import { ReducersMap } from 'shared/types/redux';
import * as NS from '../../namespace';
import { initial } from '../data/initial';
// tslint:disable:max-line-length
export const communicationReducer = combineReducers<NS.IReduxState['communication']>({
savePersonalInfo: makeCommunicationReducer<NS.ISavePersonalInfo, NS.ISavePersonalInfoSuccess, NS.ISavePersonalInfoFail>(
'PROFILE:SAVE_PERSONAL_INFO',
'PROFILE:SAVE_PERSONAL_INFO_SUCCESS',
'PROFILE:SAVE_PERSONAL_INFO_FAIL',
initial.communication.savePersonalInfo,
),
uploadDocument: makeCommunicationReducer<NS.IUploadDocument, NS.IUploadDocumentSuccess, NS.IUploadDocumentFail>(
'PROFILE:UPLOAD_DOCUMENT',
'PROFILE:UPLOAD_DOCUMENT_SUCCESS',
'PROFILE:UPLOAD_DOCUMENT_FAIL',
initial.communication.uploadDocument,
),
removeDocument: makeCommunicationReducer<NS.IRemoveDocument, NS.IRemoveDocumentSuccess, NS.IRemoveDocumentFail>(
'PROFILE:REMOVE_DOCUMENT',
'PROFILE:REMOVE_DOCUMENT_SUCCESS',
'PROFILE:REMOVE_DOCUMENT_FAIL',
initial.communication.removeDocument,
),
uploadImage: makeCommunicationReducer<NS.IUploadImage, NS.IUploadImageSuccess, NS.IUploadImageFail>(
'PROFILE:UPLOAD_IMAGE',
'PROFILE:UPLOAD_IMAGE_SUCCESS',
'PROFILE:UPLOAD_IMAGE_FAIL',
initial.communication.uploadImage,
),
} as ReducersMap<NS.IReduxState['communication']>);
|
<reponame>mauroccvieira/learning-with-rpg
import Character from "../entities/character.entity";
import { CreateStarterAttributes } from "../gateway/create-starter-attributes.interface";
export function createNPC(
name: string,
getAttributes: CreateStarterAttributes
) {
return new Character(name, getAttributes());
}
|
<gh_stars>1-10
package com.ervin.litepal.table;
import org.litepal.annotation.Column;
import org.litepal.crud.DataSupport;
/**
* Created by Ervin on 2015/11/11.
*/
public class Profile extends DataSupport {
@Column(unique = true, defaultValue = "unknown")
private String name;
private int sex;
private float weight;
private float height;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getSex() {
return sex;
}
public void setSex(int sex) {
this.sex = sex;
}
public float getWeight() {
return weight;
}
public void setWeight(float weight) {
this.weight = weight;
}
public float getHeight() {
return height;
}
public void setHeight(float height) {
this.height = height;
}
}
|
<reponame>korlando7/nodebook
import * as parseArgs from 'minimist';
import { resolve as resolvePath } from 'path';
import * as fs from 'fs';
export default async function sanitizeParameters(rawargv: string[]) {
const argv = parseArgs(rawargv, {
boolean: 'docker',
string: ['notebooks'],
});
// --docker
const docker = argv.docker;
// --notebooks
let notebooks;
if (!("notebooks" in argv) || typeof argv.notebooks !== "string" || argv.notebooks.trim() === '') {
if (argv['_'].length > 0) {
notebooks = argv['_'].shift().trim();
} else {
throw new Error("--notebooks path/to/notebooks is required if path not provided as argument.");
}
} else {
notebooks = argv.notebooks;
}
notebooks = resolvePath(notebooks);
if (!fs.existsSync(notebooks)) {
throw new Error("Notebooks path does not exist.");
}
if (!fs.statSync(notebooks).isDirectory()) {
throw new Error("Notebooks path is not a directory.");
}
// Check for unknown parameters
if (argv['_'].length > 0) {
// ex: node . "abcdef"
throw new Error("Unknown argument(s): " + argv['_'].join(', '));
}
const known = ['notebooks', 'docker'];
const unknown = Object.keys(argv).filter((key, _) => key !== '_' && (known.indexOf(key) === -1));
if (unknown.length > 0) {
throw new Error("Unknown parameter(s): " + unknown.join(', '));
}
return { notebooks, docker };
}
|
import random
def encrypt(string):
"""Encrypts a given string using a random 2D matrix"""
# Generate random 2D matrix
matrix = [[random.randint(0, 9) for x in range(len(string))] for y in range(len(string))]
encrypted_str = ""
# Encrypt the string
for col in range(len(string)):
for row in range(len(string)):
encrypted_str += str(matrix[row][col] ^ ord(string[row]))
return encrypted_str
def decrypt(string):
"""Decrypts a given encrypted string"""
encrypted_str = ""
matrix_size = int(len(string) ** 0.5)
encrypted_str = ""
# Generate a 2D matrix from the encrypted string
matrix = []
for row in range(matrix_size):
matrix.append([])
for col in range(matrix_size):
matrix[row].append(int(string[col + (row * matrix_size): col + (row * matrix_size) + 1]))
# Decrypt the string
for col in range(matrix_size):
for row in range(matrix_size):
encrypted_str += chr(matrix[row][col] ^ matrix[row][0])
return encrypted_str |
<filename>vue.config.js<gh_stars>1-10
ifProd = () => process.env.NODE_ENV === 'production'
module.exports = {
lintOnSave: false,
productionSourceMap: ifProd(),
chainWebpack: (config) => {
// Inline images & svgs into build
config.module
.rule('images')
.use('url-loader')
.options({
limit: 4096 * 3
})
config.module
.rule('svg')
.use('file-loader')
.loader('url-loader')
.options({
limit: 4096 * 3
})
},
css: {
extract: false
}
}
|
/**
* @author ooooo
* @date 2020/12/29 17:36
*/
#ifndef CPP_0330__SOLUTION1_H_
#define CPP_0330__SOLUTION1_H_
#include <vector>
#include <iostream>
using namespace std;
class Solution {
public:
int minPatches(vector<int> &nums, int n) {
int patches = 0;
long long x = 1;
int length = nums.size(), index = 0;
while (x <= n) {
if (index < length && nums[index] <= x) {
x += nums[index];
index++;
} else {
x <<= 1;
patches++;
}
}
return patches;
}
};
#endif //CPP_0330__SOLUTION1_H_
|
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.transport.message;
/**
* Converts the message data into objects specific to the corresponding transport channel.
*
* @author <NAME>
*
*/
public interface MessageBuilder {
/**
* Convert the message into objects specific to the corresponding transport channel.
*
* @param messageData
* the data to convert
* @param isEncrypted
* the data signature
* @return the conversion result
*/
Object[] build(byte[] messageData, boolean isEncrypted);
/**
* Convert the message into objects specific to the corresponding transport channel.
*
* @param messageData
* the data to convert
* @param messageSignature
* the data signature
* @param isEncrypted
* information about encryption
* @return the conversion result
*/
Object[] build(byte[] messageData, byte[] messageSignature, boolean isEncrypted);
}
|
#!/bin/bash
#"input-file,I"
#"output-file,O"
#"-debug" false
#"-n" sample_size = 1000
# "-uniform_radius" hedcut.uniform_disk_size = false;
#"-radius" hedcut.disk_size = 1;
#"-iteration" hedcut.cvt_iteration_limit = 100;
#"-maxD" hedcut.max_site_displacement 1.01
#"-black" hedcut.black_disk = false;
#"-avg" hedcut.average_termination = false;
#"-gpu" hedcut.gpu = false;
#"-subpixel" hedcut.subpixels = 1;
#"input-file,I"
#"output-file,O"
#"stipples,s" 4000 "Number of Stipple Points to use" )
#"colour-output,c", "Produce a coloured stipple drawing" );
#"threshold,t", 0.1f "How long to wait for Voronoi diagram to converge"
#"no-overlap,n", "Ensure that stipple points do not overlap with each other" )
#"fixed-radius,f", "Fixed radius stipple points imply a significant loss of tonal properties" )
#"sizing-factor,z", 1.0f "The final stipple radius is multiplied by this factor" )
#"subpixels,p" 5, "Controls the tile size of centroid computations." )
#"log,l", "Determines output verbosity" );
n=1000 #-n -s
d=1.0 #-maxD -t
t=0.14 #
r=7 #-radius -z
z=0.7
#i=1000 #-iteration n/a
#p=1 #-subpixel -p
echo "run_hedcuter1"
./run_hedcuter -I images/einstein-medium.png -O results/hedcuter/1-1.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/einstein-medium.png -O results/hedcuter/1-2.svg -black -avg -n $n -radius $r -maxD $d
echo "run_voroni1"
./run_voronoi -I images/einstein-medium.png -O results/voronoi/1-1.svg -s $n -z $z -t $t
./run_voronoi -I images/einstein-medium.png -O results/voronoi/1-2.svg -s $n -z $z -t $t
n=500
echo "run_hedcuter2"
./run_hedcuter -I images/einstein-medium.png -O results/hedcuter/2-1.svg -black -avg -n $n -radius $r -maxD $d
n=2000
./run_hedcuter -I images/einstein-medium.png -O results/hedcuter/2-2.svg -black -avg -n $n -radius $r -maxD $d
n=500
echo "run_voroni2"
./run_voronoi -I images/einstein-medium.png -O results/voronoi/2-1.svg -s $n -z $z -t $t
n=2000
./run_voronoi -I images/einstein-medium.png -O results/voronoi/2-2.svg -s $n -z $z -t $t
n=1000
echo "run_hedcuter3"
./run_hedcuter -I images/einstein-medium-high-contrast.png -O results/hedcuter/3-1.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/einstein-medium-low-contrast.png -O results/hedcuter/3-2.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/einstein-medium-high-bright.png -O results/hedcuter/3-3.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/einstein-medium-low-bright.png -O results/hedcuter/3-4.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/einstein.png -O results/hedcuter/3-5.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/einstein-small.png -O results/hedcuter/3-6.svg -black -avg -n $n -radius $r -maxD $d
echo "run_voroni3"
./run_voronoi -I images/einstein-medium-high-contrast.png -O results/voronoi/3-1.svg -s $n -z $z -t $t
./run_voronoi -I images/einstein-medium-low-contrast.png -O results/voronoi/3-2.svg -s $n -z $z -t $t
./run_voronoi -I images/einstein-medium-high-bright.png -O results/voronoi/3-3.svg -s $n -z $z -t $t
./run_voronoi -I images/einstein-medium-low-bright.png -O results/voronoi/3-4.svg -s $n -z $z -t $t
./run_voronoi -I images/einstein.png -O results/voronoi/3-5.svg -s $n -z $z -t $t
./run_voronoi -I images/einstein-small.png -O results/voronoi/3-6.svg -s $n -z $z -t $t
echo "run_hedcuter4"
./run_hedcuter -I images/phoenix.png -O results/hedcuter/4-1.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/gradient.png -O results/hedcuter/4-2.svg -black -avg -n $n -radius $r -maxD $d
./run_hedcuter -I images/erinking.png -O results/hedcuter/4-3.svg -black -avg -n $n -radius $r -maxD $d
echo "run_voroni4"
./run_voronoi -I images/phoenix.png -O results/voronoi/4-1.svg -s $n -z $z -t $t
./run_voronoi -I images/gradient.png -O results/voronoi/4-2.svg -s $n -z $z -t $t
./run_voronoi -I images/erinking.png -O results/voronoi/4-3.svg -s $n -z $z -t $t
for pro_file in `ls results/voronoi/*.svg`
do
app_file=$(basename "$pro_file")
app_file="${app_file%.*}"
svg2pdf $pro_file results/voronoi/$app_file.pdf
done
for pro_file in `ls results/hedcuter/*.svg`
do
app_file=$(basename "$pro_file")
app_file="${app_file%.*}"
svg2pdf $pro_file results/hedcuter/$app_file.pdf
done
|
import React, { useState, useContext, useEffect } from "react";
import { Head, InputBox } from "Components";
import UserContext from "Context/UserContext";
import { useSelector } from "react-redux";
import { jobSekkerProfessionalDetailsUpdate } from "../API/EmploymentAPI";
import { freelacerProfessionalDetailsUpdate } from "../API/FreelancerApi";
import Img from "react-cool-img";
import { CustomError, CustomSuccess } from "./Toasts";
import { getLookUpByPrefix } from "API/Api";
import { crossSvg, HourlyRateSvg } from "Assets";
import { PART_TIME } from "Utils/Constants";
export default function SetHourlyRate({
setIsSetHourlyRateOpen,
selectedHourlyRate,
}) {
const [hourlyRate, setHourlyRate] = useState(selectedHourlyRate);
const [joiningAvailbiltyAPI, setJoiningAvailbiltyAPI] = useState([]);
const [joiningAvailbilty, setJoiningAvailbilty] = useState([]);
const [canRelocate, setCanRelocate] = useState(0);
const [workOffice, setWorkOffice] = useState(0);
const [workRemote, setWorkRemote] = useState(true);
const [jobTypeDropDownSelected, setJobTypeDropDownSelected] = useState(null);
const user = useContext(UserContext);
let { jobsekker } = useSelector((state) => state.jobsekker);
let { freelancer } = useSelector((state) => state.freelancer);
if (jobsekker.Id === undefined) {
jobsekker = freelancer;
}
useEffect(() => {
setJoiningAvailbilty(jobsekker.Availbility);
setCanRelocate(jobsekker.canRelocate);
setWorkRemote(jobsekker.workRemote);
setWorkOffice(jobsekker.workOffice);
setJobTypeDropDownSelected(jobsekker.JobTypeLookupDetail);
getLookUpByPrefix("JOINAVAIL")
.then(({ data }) => {
let formattedData = [];
data.result.map((e) => {
formattedData.push({ label: e.Title, value: e.Id });
});
setJoiningAvailbiltyAPI(formattedData);
})
.catch((error) => {
// console.log(error);
});
}, []);
const saveProfessionalDetailData = () => {
let relocate = canRelocate;
let officeWork = workOffice;
if (jobTypeDropDownSelected != null) {
if (
jobTypeDropDownSelected.value == PART_TIME ||
jobTypeDropDownSelected.value == PART_TIME
) {
relocate = false;
officeWork = false;
}
}
if (user.JobSeekerId) {
let data = {
Id: user.JobSeekerId,
ExpectedSalary: hourlyRate,
AvailabilityLookupDetailId:
joiningAvailbilty && joiningAvailbilty.value,
WorkOffice: officeWork ? 1 : 0,
CanRelocate: relocate ? 1 : 0,
WorkRemote: workRemote ? 1 : 0,
JobTypeLookupDetailId:
jobTypeDropDownSelected != null
? jobTypeDropDownSelected.value
: null,
LinkedInProfile: jobsekker.linkedInProfile,
GoogleProfile: jobsekker.googleProfile,
FacebookProfile: jobsekker.facebookProfile,
};
jobSekkerProfessionalDetailsUpdate(data)
.then(({ data }) => {
window.location.reload();
//CustomSuccess("Professional Detail Update Successfully...");
})
.catch((error) => {
CustomError("Failed to Update Professional Detail ");
});
} else {
let data = {
Id: user.FreelancerId,
HourlyRate: hourlyRate,
AvailabilityLookupDetailId:
joiningAvailbilty && joiningAvailbilty.value,
WorkOffice: officeWork ? 1 : 0,
CanRelocate: relocate ? 1 : 0,
WorkRemote: workRemote ? 1 : 0,
JobTypeLookupDetailId:
jobTypeDropDownSelected != null
? jobTypeDropDownSelected.value
: null,
LinkedInProfile: jobsekker.linkedInProfile,
GoogleProfile: jobsekker.googleProfile,
FacebookProfile: jobsekker.facebookProfile,
};
freelacerProfessionalDetailsUpdate(data)
.then(({ data }) => {
window.location.reload();
//CustomSuccess("Professional Detail Update Successfully...");
})
.catch((error) => {
CustomError("Failed to Update Professional Detail ");
});
}
};
useEffect(() => {
document.body.style.overflow = "hidden";
return () => {
document.body.style.overflow = "auto";
};
}, []);
return (
<>
<Head title="AIDApro | Add Skills" description="Add skills" />
<div className="pupup__container">
<form
className="pupup__container__from animate__animated animate__slideInDown"
style={{ maxWidth: "700px" }}
onSubmit={() => {
setIsSetHourlyRateOpen(false);
}}
>
<button
className="pupup__container__from__button"
type="button"
onClick={(e) => {
e.preventDefault();
setIsSetHourlyRateOpen(false);
}}
title="close popup"
>
{crossSvg}
</button>
<div className="pupup__container__from__wrapper">
<div className="pupup__container__from__wrapper__header">
<Img
loading="lazy"
src={HourlyRateSvg}
alt="HourlyRateSvg"
style={{ width: 40, height: 40, marginRight: ".5em" }}
/>
<div className="pupup__container__from__wrapper__header__content">
<div className="pupup__container__from__wrapper__header__content__heading">
Work Details
</div>
<div className="pupup__container__from__wrapper__header__content__info">
Add your hourly rate hare (€) and select when you can start
working
</div>
</div>
</div>
<div className="pupup__container__from__wrapper__form">
<div className="pupup__container__from__wrapper__form__row">
<InputBox
placeholder="Amount"
value={hourlyRate}
style={{
height: "fit-content",
}}
onChange={(event) => {
setHourlyRate(event.currentTarget.value);
}}
/>
</div>
<div className="pupup__container__from__wrapper__form__row">
<InputBox
variant="select"
placeholder="Select"
options={joiningAvailbiltyAPI}
value={joiningAvailbilty}
onChange={(event) => setJoiningAvailbilty(event)}
style={{
height: "fit-content",
minHeight: "45px",
}}
/>
</div>
</div>
<div className="pupup__container__from__wrapper__cta">
<button
type="submit"
className="header__nav__btn btn__secondary"
style={{
height: "50px",
width: "180px",
}}
onClick={() => {
if (joiningAvailbilty.length > 0 || hourlyRate != null)
saveProfessionalDetailData();
else setIsSetHourlyRateOpen(false);
}}
title="set professional data"
>
Set
</button>
</div>
</div>
</form>
</div>
</>
);
}
|
<reponame>AYCHDO/Dominus
import collections
from pycoin import ecdsa
from ..script import der, opcodes, tools
bytes_from_int = chr if bytes == str else lambda x: bytes([x])
class ScriptType(object):
def __init__(self):
raise NotImplemented()
@classmethod
def subclasses(cls, skip_self=True):
for c in cls.__subclasses__():
for c1 in c.subclasses(skip_self=False):
yield c1
if not skip_self:
yield cls
@classmethod
def from_address(cls, text, netcodes=None):
for sc in cls.subclasses():
try:
st = sc.from_address(text, netcodes=netcodes)
return st
except Exception:
pass
@classmethod
def from_script(cls, script, netcode="BTC"):
for sc in cls.subclasses():
try:
st = sc.from_script(script)
return st
except Exception:
pass
@classmethod
def match(cls, script):
template = cls.TEMPLATE
r = collections.defaultdict(list)
pc1 = pc2 = 0
while 1:
if pc1 == len(script) and pc2 == len(template):
return r
if pc1 >= len(script) or pc2 >= len(template):
break
opcode1, data1, pc1 = tools.get_opcode(script, pc1)
opcode2, data2, pc2 = tools.get_opcode(template, pc2)
if opcode2 == opcodes.OP_PUBKEY:
l1 = len(data1)
if l1 < 33 or l1 > 120:
break
r["PUBKEY_LIST"].append(data1)
elif opcode2 == opcodes.OP_PUBKEYHASH:
if len(data1) != 160/8:
break
r["PUBKEYHASH_LIST"].append(data1)
elif opcode2 == opcodes.OP_NULLDATA:
if not (0 < len(data1) <= 40):
break
r["NULLDATA_LIST"].append(data1)
elif (opcode1, data1) != (opcode2, data2):
break
raise ValueError("script doesn't match")
def _create_script_signature(self, secret_exponent, sign_value, signature_type):
order = ecdsa.generator_secp256k1.order()
r, s = ecdsa.sign(ecdsa.generator_secp256k1, secret_exponent, sign_value)
if s + s > order:
s = order - s
return der.sigencode_der(r, s) + bytes_from_int(signature_type)
@staticmethod
def _dummy_signature(signature_type):
order = ecdsa.generator_secp256k1.order()
r, s = order - 1, order // 2
return der.sigencode_der(r, s) + bytes_from_int(signature_type)
def address(self, netcode='BTC'):
return self.info()["address"]
def solve(self, **kwargs):
"""
The kwargs required depend upon the script type.
"""
raise NotImplemented()
|
#!/bin/bash
export JAVA_HOME=/opt/ibm.jdk.1.8/jre
export PATH=$PATH:$JAVA_HOME/bin
/opt/apache-jmeter-3.0/bin/jmeter.sh & |
#!/bin/sh
_VERSION="0.0.1"
# This script will install and use pm2.
# We want to register the process-engine-runtime as service,
# that ist started during system startup.
if [[ $(uname) != "Darwin" ]]; then
echo "This tool currently works only for macOS. Sorry."
exit -1
fi
# Due to a bug in pm2, the macOS-Version does not support the removal of started processes.
# Ref: https://github.com/Unitech/pm2/issues/1349 and https://github.com/Unitech/pm2/issues/3732
cat << EOF
There is a bug in pm2, that prevents us from cleanly removing the process-engine-runtime from the autostart list of your system."
You can try to delete the pm2 launch agent, but this will likely remove ALL of your services from the autostart list.
Anyway, this could help you:
$ rm ~/Library/LaunchAgents/pm2.*
We're sorry for that, and will fix it as soon as it is fixed in pm2.
The ProccessEngine Team.
EOF
|
<gh_stars>0
import { ReportForm } from './report-form';
import React, { useState, useCallback } from 'react';
import { ReportFormConfig, ReportFormQuery, ShareButtonConfig } from '../defs';
import { observer } from 'mobx-react-lite';
import { Button } from '../../../common/forms/button';
import { FontAwesomeIcon } from '../../../util/icon';
import { useI18n } from '../../../i18n/react';
import styled from '../../../util/styled';
import { ShareButton } from './share-button';
interface GameFooterProps {
roomName: string;
reportForm: ReportFormConfig;
shareButton: ShareButtonConfig;
onSubmit: (query: ReportFormQuery) => void;
}
export const GameFooter: React.FunctionComponent<GameFooterProps> = observer(
({ roomName, reportForm, shareButton, onSubmit }) => {
const t = useI18n('game_client');
const [reportFormOpen, setReportFormOpen] = useState(false);
const submitHandler = useCallback(
(query: ReportFormQuery) => {
setReportFormOpen(false);
onSubmit(query);
},
[onSubmit],
);
return (
<>
<ButtonContainer>
<ShareButton shareButton={shareButton} roomName={roomName} />
<Button onClick={() => setReportFormOpen(state => !state)}>
<FontAwesomeIcon icon={['far', 'paper-plane']} />{' '}
{t('reportForm.title')}
</Button>
</ButtonContainer>
<ReportForm
open={reportFormOpen}
reportForm={reportForm}
onSubmit={submitHandler}
/>
</>
);
},
);
/**
* Wrapper of the footer buttons.
*/
const ButtonContainer = styled.div`
text-align: right;
`;
|
# /bin/bash
NAME='tsar'
# conditionally start container
[[ $(docker ps -f "name=$NAME" --format '{{.Names}}') == $NAME ]] ||
(cd ../../ && make run)
docker attach tsar --detach-keys="ctrl-c"
# see: https://tldp.org/HOWTO/Keyboard-and-Console-HOWTO-4.html
echo -e \\033c
|
const isObject = x => (
x !== null &&
x !== undefined &&
x.constructor &&
x.constructor.name === 'Object'
)
export default isObject
|
<reponame>liftsecurity/sorrow
"use strict";
var _slicedToArray = function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } };
var _toConsumableArray = function (arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; return arr2; } else { return Array.from(arr); } };
(function (root, factory) {
module.exports = factory();
/*if (typeof define === "function" && define.amd&& 1===5) {
define(factory);
} else if (typeof exports === "object") {
//require("traceur/bin/traceur-runtime");
} else {
const oldWu = root.wu;
root.wu = factory();
root.wu.noConflict = () => {
const wu = root.wu;
root.wu = oldWu;
return wu;
};
}*/
})(undefined, function () {
"use strict";
function wu(iterable) {
if (!isIterable(iterable)) {
throw new Error("wu: `" + iterable + "` is not iterable!");
}
return new Wu(iterable);
}
function Wu(iterable) {
var iterator = getIterator(iterable);
this.next = iterator.next.bind(iterator);
}
wu.prototype = Wu.prototype;
// This is known as @@iterator in the ES6 spec.
Object.defineProperty(wu, "iteratorSymbol", {
value: (function () {
// Try and create a Proxy to intercept the actual symbol used to get the
// iterator. We prefer this to Symbol.iterator because some versions of
// SpiderMonkey use the string "@@iteratorSymbol" despite exposing the
// Symbol.iterator symbol!
if (typeof Proxy === "function") {
var _iteratorNormalCompletion;
var _didIteratorError;
var _iteratorError;
var _iterator, _step;
var _ret = (function () {
var symbol = undefined;
try {
var proxy = new Proxy({}, {
get: function get(_, name) {
symbol = name;
throw Error();
}
});
_iteratorNormalCompletion = true;
_didIteratorError = false;
_iteratorError = undefined;
try {
for (_iterator = proxy[Symbol.iterator](); !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var _ = _step.value;
break;
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator["return"]) {
_iterator["return"]();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
} catch (e) {}
if (symbol) {
return {
v: symbol
};
}
})();
if (typeof _ret === "object") return _ret.v;
}
// Check if `Symbol.iterator` exists and use that if possible.
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
return Symbol.iterator;
}
throw new Error("Cannot find iterator symbol.");
})()
});
wu.prototype[wu.iteratorSymbol] = function () {
return this;
};
/*
* Internal utilities
*/
// An internal placeholder value.
var MISSING = {};
// Return whether a thing is iterable.
var isIterable = function isIterable(thing) {
return thing && typeof thing[wu.iteratorSymbol] === "function";
};
// Get the iterator for the thing or throw an error.
var getIterator = function getIterator(thing) {
if (isIterable(thing)) {
return thing[wu.iteratorSymbol]();
}
throw new TypeError("Not iterable: " + thing);
};
// Define a static method on `wu` and set its prototype to the shared
// `Wu.prototype`.
var staticMethod = function staticMethod(name, fn) {
fn.prototype = Wu.prototype;
wu[name] = fn;
};
// Define a function that is attached as both a `Wu.prototype` method and a
// curryable static method on `wu` directly that takes an iterable as its last
// parameter.
var prototypeAndStatic = function prototypeAndStatic(name, fn) {
var expectedArgs = arguments[2] === undefined ? fn.length : arguments[2];
return (function () {
fn.prototype = Wu.prototype;
Wu.prototype[name] = fn;
// +1 for the iterable, which is the `this` value of the function so it
// isn't reflected by the length property.
expectedArgs += 1;
wu[name] = wu.curryable(function () {
var _wu;
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
var iterable = args.pop();
return (_wu = wu(iterable))[name].apply(_wu, args);
}, expectedArgs);
})();
};
// A decorator for rewrapping a method's returned iterable in wu to maintain
// chainability.
var rewrap = function rewrap(fn) {
return function () {
for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
return wu(fn.call.apply(fn, [this].concat(args)));
};
};
var rewrapStaticMethod = function rewrapStaticMethod(name, fn) {
return staticMethod(name, rewrap(fn));
};
var rewrapPrototypeAndStatic = function rewrapPrototypeAndStatic(name, fn, expectedArgs) {
return prototypeAndStatic(name, rewrap(fn), expectedArgs);
};
// Return a wrapped version of `fn` bound with the initial arguments
// `...args`.
function curry(fn, args) {
return function () {
for (var _len3 = arguments.length, moreArgs = Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
moreArgs[_key3] = arguments[_key3];
}
return fn.call.apply(fn, [this].concat(_toConsumableArray(args), moreArgs));
};
}
/*
* Public utilities
*/
staticMethod("curryable", function (fn) {
var expected = arguments[1] === undefined ? fn.length : arguments[1];
return (function () {
return function f() {
for (var _len4 = arguments.length, args = Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
args[_key4] = arguments[_key4];
}
return args.length >= expected ? fn.apply(this, args) : curry(f, args);
};
})();
});
rewrapStaticMethod("entries", regeneratorRuntime.mark(function callee$1$0(obj) {
var _iteratorNormalCompletion2, _didIteratorError2, _iteratorError2, _iterator2, _step2, k;
return regeneratorRuntime.wrap(function callee$1$0$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion2 = true;
_didIteratorError2 = false;
_iteratorError2 = undefined;
context$2$0.prev = 3;
_iterator2 = Object.keys(obj)[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done) {
context$2$0.next = 12;
break;
}
k = _step2.value;
context$2$0.next = 9;
return [k, obj[k]];
case 9:
_iteratorNormalCompletion2 = true;
context$2$0.next = 5;
break;
case 12:
context$2$0.next = 18;
break;
case 14:
context$2$0.prev = 14;
context$2$0.t0 = context$2$0["catch"](3);
_didIteratorError2 = true;
_iteratorError2 = context$2$0.t0;
case 18:
context$2$0.prev = 18;
context$2$0.prev = 19;
if (!_iteratorNormalCompletion2 && _iterator2["return"]) {
_iterator2["return"]();
}
case 21:
context$2$0.prev = 21;
if (!_didIteratorError2) {
context$2$0.next = 24;
break;
}
throw _iteratorError2;
case 24:
return context$2$0.finish(21);
case 25:
return context$2$0.finish(18);
case 26:
case "end":
return context$2$0.stop();
}
}, callee$1$0, this, [[3, 14, 18, 26], [19,, 21, 25]]);
}));
rewrapStaticMethod("keys", regeneratorRuntime.mark(function callee$1$1(obj) {
return regeneratorRuntime.wrap(function callee$1$1$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
return context$2$0.delegateYield(Object.keys(obj), "t1", 1);
case 1:
case "end":
return context$2$0.stop();
}
}, callee$1$1, this);
}));
rewrapStaticMethod("values", regeneratorRuntime.mark(function callee$1$2(obj) {
var _iteratorNormalCompletion3, _didIteratorError3, _iteratorError3, _iterator3, _step3, k;
return regeneratorRuntime.wrap(function callee$1$2$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion3 = true;
_didIteratorError3 = false;
_iteratorError3 = undefined;
context$2$0.prev = 3;
_iterator3 = Object.keys(obj)[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done) {
context$2$0.next = 12;
break;
}
k = _step3.value;
context$2$0.next = 9;
return obj[k];
case 9:
_iteratorNormalCompletion3 = true;
context$2$0.next = 5;
break;
case 12:
context$2$0.next = 18;
break;
case 14:
context$2$0.prev = 14;
context$2$0.t2 = context$2$0["catch"](3);
_didIteratorError3 = true;
_iteratorError3 = context$2$0.t2;
case 18:
context$2$0.prev = 18;
context$2$0.prev = 19;
if (!_iteratorNormalCompletion3 && _iterator3["return"]) {
_iterator3["return"]();
}
case 21:
context$2$0.prev = 21;
if (!_didIteratorError3) {
context$2$0.next = 24;
break;
}
throw _iteratorError3;
case 24:
return context$2$0.finish(21);
case 25:
return context$2$0.finish(18);
case 26:
case "end":
return context$2$0.stop();
}
}, callee$1$2, this, [[3, 14, 18, 26], [19,, 21, 25]]);
}));
/*
* Infinite iterators
*/
rewrapPrototypeAndStatic("cycle", regeneratorRuntime.mark(function callee$1$3() {
var saved, _iteratorNormalCompletion4, _didIteratorError4, _iteratorError4, _iterator4, _step4, x;
return regeneratorRuntime.wrap(function callee$1$3$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
saved = [];
_iteratorNormalCompletion4 = true;
_didIteratorError4 = false;
_iteratorError4 = undefined;
context$2$0.prev = 4;
_iterator4 = this[Symbol.iterator]();
case 6:
if (_iteratorNormalCompletion4 = (_step4 = _iterator4.next()).done) {
context$2$0.next = 14;
break;
}
x = _step4.value;
context$2$0.next = 10;
return x;
case 10:
saved.push(x);
case 11:
_iteratorNormalCompletion4 = true;
context$2$0.next = 6;
break;
case 14:
context$2$0.next = 20;
break;
case 16:
context$2$0.prev = 16;
context$2$0.t3 = context$2$0["catch"](4);
_didIteratorError4 = true;
_iteratorError4 = context$2$0.t3;
case 20:
context$2$0.prev = 20;
context$2$0.prev = 21;
if (!_iteratorNormalCompletion4 && _iterator4["return"]) {
_iterator4["return"]();
}
case 23:
context$2$0.prev = 23;
if (!_didIteratorError4) {
context$2$0.next = 26;
break;
}
throw _iteratorError4;
case 26:
return context$2$0.finish(23);
case 27:
return context$2$0.finish(20);
case 28:
if (!saved) {
context$2$0.next = 32;
break;
}
return context$2$0.delegateYield(saved, "t4", 30);
case 30:
context$2$0.next = 28;
break;
case 32:
case "end":
return context$2$0.stop();
}
}, callee$1$3, this, [[4, 16, 20, 28], [21,, 23, 27]]);
}));
rewrapStaticMethod("count", regeneratorRuntime.mark(function callee$1$4() {
var start = arguments[0] === undefined ? 0 : arguments[0];
var step = arguments[1] === undefined ? 1 : arguments[1];
var n;
return regeneratorRuntime.wrap(function callee$1$4$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
n = start;
case 1:
if (!true) {
context$2$0.next = 7;
break;
}
context$2$0.next = 4;
return n;
case 4:
n += step;
context$2$0.next = 1;
break;
case 7:
case "end":
return context$2$0.stop();
}
}, callee$1$4, this);
}));
rewrapStaticMethod("repeat", regeneratorRuntime.mark(function callee$1$5(thing) {
var times = arguments[1] === undefined ? Infinity : arguments[1];
var i;
return regeneratorRuntime.wrap(function callee$1$5$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
if (!(times === Infinity)) {
context$2$0.next = 8;
break;
}
case 1:
if (!true) {
context$2$0.next = 6;
break;
}
context$2$0.next = 4;
return thing;
case 4:
context$2$0.next = 1;
break;
case 6:
context$2$0.next = 15;
break;
case 8:
i = 0;
case 9:
if (!(i < times)) {
context$2$0.next = 15;
break;
}
context$2$0.next = 12;
return thing;
case 12:
i++;
context$2$0.next = 9;
break;
case 15:
case "end":
return context$2$0.stop();
}
}, callee$1$5, this);
}));
/*
* Iterators that terminate once the input sequence has been exhausted
*/
rewrapStaticMethod("chain", regeneratorRuntime.mark(function callee$1$6() {
for (var _len5 = arguments.length, iterables = Array(_len5), _key5 = 0; _key5 < _len5; _key5++) {
iterables[_key5] = arguments[_key5];
}
var _iteratorNormalCompletion5, _didIteratorError5, _iteratorError5, _iterator5, _step5, it;
return regeneratorRuntime.wrap(function callee$1$6$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion5 = true;
_didIteratorError5 = false;
_iteratorError5 = undefined;
context$2$0.prev = 3;
_iterator5 = iterables[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion5 = (_step5 = _iterator5.next()).done) {
context$2$0.next = 11;
break;
}
it = _step5.value;
return context$2$0.delegateYield(it, "t5", 8);
case 8:
_iteratorNormalCompletion5 = true;
context$2$0.next = 5;
break;
case 11:
context$2$0.next = 17;
break;
case 13:
context$2$0.prev = 13;
context$2$0.t6 = context$2$0["catch"](3);
_didIteratorError5 = true;
_iteratorError5 = context$2$0.t6;
case 17:
context$2$0.prev = 17;
context$2$0.prev = 18;
if (!_iteratorNormalCompletion5 && _iterator5["return"]) {
_iterator5["return"]();
}
case 20:
context$2$0.prev = 20;
if (!_didIteratorError5) {
context$2$0.next = 23;
break;
}
throw _iteratorError5;
case 23:
return context$2$0.finish(20);
case 24:
return context$2$0.finish(17);
case 25:
case "end":
return context$2$0.stop();
}
}, callee$1$6, this, [[3, 13, 17, 25], [18,, 20, 24]]);
}));
rewrapPrototypeAndStatic("chunk", regeneratorRuntime.mark(function callee$1$7() {
var n = arguments[0] === undefined ? 2 : arguments[0];
var items, index, _iteratorNormalCompletion6, _didIteratorError6, _iteratorError6, _iterator6, _step6, item;
return regeneratorRuntime.wrap(function callee$1$7$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
items = [];
index = 0;
_iteratorNormalCompletion6 = true;
_didIteratorError6 = false;
_iteratorError6 = undefined;
context$2$0.prev = 5;
_iterator6 = this[Symbol.iterator]();
case 7:
if (_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done) {
context$2$0.next = 18;
break;
}
item = _step6.value;
items[index++] = item;
if (!(index === n)) {
context$2$0.next = 15;
break;
}
context$2$0.next = 13;
return items;
case 13:
items = [];
index = 0;
case 15:
_iteratorNormalCompletion6 = true;
context$2$0.next = 7;
break;
case 18:
context$2$0.next = 24;
break;
case 20:
context$2$0.prev = 20;
context$2$0.t7 = context$2$0["catch"](5);
_didIteratorError6 = true;
_iteratorError6 = context$2$0.t7;
case 24:
context$2$0.prev = 24;
context$2$0.prev = 25;
if (!_iteratorNormalCompletion6 && _iterator6["return"]) {
_iterator6["return"]();
}
case 27:
context$2$0.prev = 27;
if (!_didIteratorError6) {
context$2$0.next = 30;
break;
}
throw _iteratorError6;
case 30:
return context$2$0.finish(27);
case 31:
return context$2$0.finish(24);
case 32:
if (!index) {
context$2$0.next = 35;
break;
}
context$2$0.next = 35;
return items;
case 35:
case "end":
return context$2$0.stop();
}
}, callee$1$7, this, [[5, 20, 24, 32], [25,, 27, 31]]);
}), 1);
rewrapPrototypeAndStatic("concatMap", regeneratorRuntime.mark(function callee$1$8(fn) {
var _iteratorNormalCompletion7, _didIteratorError7, _iteratorError7, _iterator7, _step7, x;
return regeneratorRuntime.wrap(function callee$1$8$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion7 = true;
_didIteratorError7 = false;
_iteratorError7 = undefined;
context$2$0.prev = 3;
_iterator7 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion7 = (_step7 = _iterator7.next()).done) {
context$2$0.next = 11;
break;
}
x = _step7.value;
return context$2$0.delegateYield(fn(x), "t8", 8);
case 8:
_iteratorNormalCompletion7 = true;
context$2$0.next = 5;
break;
case 11:
context$2$0.next = 17;
break;
case 13:
context$2$0.prev = 13;
context$2$0.t9 = context$2$0["catch"](3);
_didIteratorError7 = true;
_iteratorError7 = context$2$0.t9;
case 17:
context$2$0.prev = 17;
context$2$0.prev = 18;
if (!_iteratorNormalCompletion7 && _iterator7["return"]) {
_iterator7["return"]();
}
case 20:
context$2$0.prev = 20;
if (!_didIteratorError7) {
context$2$0.next = 23;
break;
}
throw _iteratorError7;
case 23:
return context$2$0.finish(20);
case 24:
return context$2$0.finish(17);
case 25:
case "end":
return context$2$0.stop();
}
}, callee$1$8, this, [[3, 13, 17, 25], [18,, 20, 24]]);
}));
rewrapPrototypeAndStatic("drop", regeneratorRuntime.mark(function callee$1$9(n) {
var i, _iteratorNormalCompletion8, _didIteratorError8, _iteratorError8, _iterator8, _step8, x;
return regeneratorRuntime.wrap(function callee$1$9$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
i = 0;
_iteratorNormalCompletion8 = true;
_didIteratorError8 = false;
_iteratorError8 = undefined;
context$2$0.prev = 4;
_iterator8 = this[Symbol.iterator]();
case 6:
if (_iteratorNormalCompletion8 = (_step8 = _iterator8.next()).done) {
context$2$0.next = 16;
break;
}
x = _step8.value;
if (!(i++ < n)) {
context$2$0.next = 10;
break;
}
return context$2$0.abrupt("continue", 13);
case 10:
context$2$0.next = 12;
return x;
case 12:
return context$2$0.abrupt("break", 16);
case 13:
_iteratorNormalCompletion8 = true;
context$2$0.next = 6;
break;
case 16:
context$2$0.next = 22;
break;
case 18:
context$2$0.prev = 18;
context$2$0.t10 = context$2$0["catch"](4);
_didIteratorError8 = true;
_iteratorError8 = context$2$0.t10;
case 22:
context$2$0.prev = 22;
context$2$0.prev = 23;
if (!_iteratorNormalCompletion8 && _iterator8["return"]) {
_iterator8["return"]();
}
case 25:
context$2$0.prev = 25;
if (!_didIteratorError8) {
context$2$0.next = 28;
break;
}
throw _iteratorError8;
case 28:
return context$2$0.finish(25);
case 29:
return context$2$0.finish(22);
case 30:
return context$2$0.delegateYield(this, "t11", 31);
case 31:
case "end":
return context$2$0.stop();
}
}, callee$1$9, this, [[4, 18, 22, 30], [23,, 25, 29]]);
}));
rewrapPrototypeAndStatic("dropWhile", regeneratorRuntime.mark(function callee$1$10() {
var fn = arguments[0] === undefined ? Boolean : arguments[0];
var _iteratorNormalCompletion9, _didIteratorError9, _iteratorError9, _iterator9, _step9, x;
return regeneratorRuntime.wrap(function callee$1$10$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion9 = true;
_didIteratorError9 = false;
_iteratorError9 = undefined;
context$2$0.prev = 3;
_iterator9 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion9 = (_step9 = _iterator9.next()).done) {
context$2$0.next = 15;
break;
}
x = _step9.value;
if (!fn(x)) {
context$2$0.next = 9;
break;
}
return context$2$0.abrupt("continue", 12);
case 9:
context$2$0.next = 11;
return x;
case 11:
return context$2$0.abrupt("break", 15);
case 12:
_iteratorNormalCompletion9 = true;
context$2$0.next = 5;
break;
case 15:
context$2$0.next = 21;
break;
case 17:
context$2$0.prev = 17;
context$2$0.t12 = context$2$0["catch"](3);
_didIteratorError9 = true;
_iteratorError9 = context$2$0.t12;
case 21:
context$2$0.prev = 21;
context$2$0.prev = 22;
if (!_iteratorNormalCompletion9 && _iterator9["return"]) {
_iterator9["return"]();
}
case 24:
context$2$0.prev = 24;
if (!_didIteratorError9) {
context$2$0.next = 27;
break;
}
throw _iteratorError9;
case 27:
return context$2$0.finish(24);
case 28:
return context$2$0.finish(21);
case 29:
return context$2$0.delegateYield(this, "t13", 30);
case 30:
case "end":
return context$2$0.stop();
}
}, callee$1$10, this, [[3, 17, 21, 29], [22,, 24, 28]]);
}), 1);
rewrapPrototypeAndStatic("enumerate", regeneratorRuntime.mark(function callee$1$11() {
return regeneratorRuntime.wrap(function callee$1$11$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
return context$2$0.delegateYield(_zip([this, wu.count()]), "t14", 1);
case 1:
case "end":
return context$2$0.stop();
}
}, callee$1$11, this);
}));
rewrapPrototypeAndStatic("filter", regeneratorRuntime.mark(function callee$1$12() {
var fn = arguments[0] === undefined ? Boolean : arguments[0];
var _iteratorNormalCompletion10, _didIteratorError10, _iteratorError10, _iterator10, _step10, x;
return regeneratorRuntime.wrap(function callee$1$12$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion10 = true;
_didIteratorError10 = false;
_iteratorError10 = undefined;
context$2$0.prev = 3;
_iterator10 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion10 = (_step10 = _iterator10.next()).done) {
context$2$0.next = 13;
break;
}
x = _step10.value;
if (!fn(x)) {
context$2$0.next = 10;
break;
}
context$2$0.next = 10;
return x;
case 10:
_iteratorNormalCompletion10 = true;
context$2$0.next = 5;
break;
case 13:
context$2$0.next = 19;
break;
case 15:
context$2$0.prev = 15;
context$2$0.t15 = context$2$0["catch"](3);
_didIteratorError10 = true;
_iteratorError10 = context$2$0.t15;
case 19:
context$2$0.prev = 19;
context$2$0.prev = 20;
if (!_iteratorNormalCompletion10 && _iterator10["return"]) {
_iterator10["return"]();
}
case 22:
context$2$0.prev = 22;
if (!_didIteratorError10) {
context$2$0.next = 25;
break;
}
throw _iteratorError10;
case 25:
return context$2$0.finish(22);
case 26:
return context$2$0.finish(19);
case 27:
case "end":
return context$2$0.stop();
}
}, callee$1$12, this, [[3, 15, 19, 27], [20,, 22, 26]]);
}), 1);
rewrapPrototypeAndStatic("flatten", regeneratorRuntime.mark(function callee$1$13() {
var shallow = arguments[0] === undefined ? false : arguments[0];
var _iteratorNormalCompletion11, _didIteratorError11, _iteratorError11, _iterator11, _step11, x;
return regeneratorRuntime.wrap(function callee$1$13$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion11 = true;
_didIteratorError11 = false;
_iteratorError11 = undefined;
context$2$0.prev = 3;
_iterator11 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion11 = (_step11 = _iterator11.next()).done) {
context$2$0.next = 16;
break;
}
x = _step11.value;
if (!(typeof x !== "string" && isIterable(x))) {
context$2$0.next = 11;
break;
}
return context$2$0.delegateYield(shallow ? x : wu(x).flatten(), "t16", 9);
case 9:
context$2$0.next = 13;
break;
case 11:
context$2$0.next = 13;
return x;
case 13:
_iteratorNormalCompletion11 = true;
context$2$0.next = 5;
break;
case 16:
context$2$0.next = 22;
break;
case 18:
context$2$0.prev = 18;
context$2$0.t17 = context$2$0["catch"](3);
_didIteratorError11 = true;
_iteratorError11 = context$2$0.t17;
case 22:
context$2$0.prev = 22;
context$2$0.prev = 23;
if (!_iteratorNormalCompletion11 && _iterator11["return"]) {
_iterator11["return"]();
}
case 25:
context$2$0.prev = 25;
if (!_didIteratorError11) {
context$2$0.next = 28;
break;
}
throw _iteratorError11;
case 28:
return context$2$0.finish(25);
case 29:
return context$2$0.finish(22);
case 30:
case "end":
return context$2$0.stop();
}
}, callee$1$13, this, [[3, 18, 22, 30], [23,, 25, 29]]);
}), 1);
rewrapPrototypeAndStatic("invoke", regeneratorRuntime.mark(function callee$1$14(name) {
for (var _len6 = arguments.length, args = Array(_len6 > 1 ? _len6 - 1 : 0), _key6 = 1; _key6 < _len6; _key6++) {
args[_key6 - 1] = arguments[_key6];
}
var _iteratorNormalCompletion12, _didIteratorError12, _iteratorError12, _iterator12, _step12, x;
return regeneratorRuntime.wrap(function callee$1$14$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion12 = true;
_didIteratorError12 = false;
_iteratorError12 = undefined;
context$2$0.prev = 3;
_iterator12 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion12 = (_step12 = _iterator12.next()).done) {
context$2$0.next = 12;
break;
}
x = _step12.value;
context$2$0.next = 9;
return x[name].apply(x, args);
case 9:
_iteratorNormalCompletion12 = true;
context$2$0.next = 5;
break;
case 12:
context$2$0.next = 18;
break;
case 14:
context$2$0.prev = 14;
context$2$0.t18 = context$2$0["catch"](3);
_didIteratorError12 = true;
_iteratorError12 = context$2$0.t18;
case 18:
context$2$0.prev = 18;
context$2$0.prev = 19;
if (!_iteratorNormalCompletion12 && _iterator12["return"]) {
_iterator12["return"]();
}
case 21:
context$2$0.prev = 21;
if (!_didIteratorError12) {
context$2$0.next = 24;
break;
}
throw _iteratorError12;
case 24:
return context$2$0.finish(21);
case 25:
return context$2$0.finish(18);
case 26:
case "end":
return context$2$0.stop();
}
}, callee$1$14, this, [[3, 14, 18, 26], [19,, 21, 25]]);
}));
rewrapPrototypeAndStatic("map", regeneratorRuntime.mark(function callee$1$15(fn) {
var _iteratorNormalCompletion13, _didIteratorError13, _iteratorError13, _iterator13, _step13, x;
return regeneratorRuntime.wrap(function callee$1$15$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion13 = true;
_didIteratorError13 = false;
_iteratorError13 = undefined;
context$2$0.prev = 3;
_iterator13 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion13 = (_step13 = _iterator13.next()).done) {
context$2$0.next = 12;
break;
}
x = _step13.value;
context$2$0.next = 9;
return fn(x);
case 9:
_iteratorNormalCompletion13 = true;
context$2$0.next = 5;
break;
case 12:
context$2$0.next = 18;
break;
case 14:
context$2$0.prev = 14;
context$2$0.t19 = context$2$0["catch"](3);
_didIteratorError13 = true;
_iteratorError13 = context$2$0.t19;
case 18:
context$2$0.prev = 18;
context$2$0.prev = 19;
if (!_iteratorNormalCompletion13 && _iterator13["return"]) {
_iterator13["return"]();
}
case 21:
context$2$0.prev = 21;
if (!_didIteratorError13) {
context$2$0.next = 24;
break;
}
throw _iteratorError13;
case 24:
return context$2$0.finish(21);
case 25:
return context$2$0.finish(18);
case 26:
case "end":
return context$2$0.stop();
}
}, callee$1$15, this, [[3, 14, 18, 26], [19,, 21, 25]]);
}));
rewrapPrototypeAndStatic("pluck", regeneratorRuntime.mark(function callee$1$16(name) {
var _iteratorNormalCompletion14, _didIteratorError14, _iteratorError14, _iterator14, _step14, x;
return regeneratorRuntime.wrap(function callee$1$16$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion14 = true;
_didIteratorError14 = false;
_iteratorError14 = undefined;
context$2$0.prev = 3;
_iterator14 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion14 = (_step14 = _iterator14.next()).done) {
context$2$0.next = 12;
break;
}
x = _step14.value;
context$2$0.next = 9;
return x[name];
case 9:
_iteratorNormalCompletion14 = true;
context$2$0.next = 5;
break;
case 12:
context$2$0.next = 18;
break;
case 14:
context$2$0.prev = 14;
context$2$0.t20 = context$2$0["catch"](3);
_didIteratorError14 = true;
_iteratorError14 = context$2$0.t20;
case 18:
context$2$0.prev = 18;
context$2$0.prev = 19;
if (!_iteratorNormalCompletion14 && _iterator14["return"]) {
_iterator14["return"]();
}
case 21:
context$2$0.prev = 21;
if (!_didIteratorError14) {
context$2$0.next = 24;
break;
}
throw _iteratorError14;
case 24:
return context$2$0.finish(21);
case 25:
return context$2$0.finish(18);
case 26:
case "end":
return context$2$0.stop();
}
}, callee$1$16, this, [[3, 14, 18, 26], [19,, 21, 25]]);
}));
rewrapPrototypeAndStatic("reductions", regeneratorRuntime.mark(function callee$1$17(fn) {
var initial = arguments[1] === undefined ? undefined : arguments[1];
var val, _iteratorNormalCompletion15, _didIteratorError15, _iteratorError15, _iterator15, _step15, x, _iteratorNormalCompletion16, _didIteratorError16, _iteratorError16, _iterator16, _step16;
return regeneratorRuntime.wrap(function callee$1$17$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
val = initial;
if (!(val === undefined)) {
context$2$0.next = 28;
break;
}
_iteratorNormalCompletion15 = true;
_didIteratorError15 = false;
_iteratorError15 = undefined;
context$2$0.prev = 5;
_iterator15 = this[Symbol.iterator]();
case 7:
if (_iteratorNormalCompletion15 = (_step15 = _iterator15.next()).done) {
context$2$0.next = 14;
break;
}
x = _step15.value;
val = x;
return context$2$0.abrupt("break", 14);
case 11:
_iteratorNormalCompletion15 = true;
context$2$0.next = 7;
break;
case 14:
context$2$0.next = 20;
break;
case 16:
context$2$0.prev = 16;
context$2$0.t21 = context$2$0["catch"](5);
_didIteratorError15 = true;
_iteratorError15 = context$2$0.t21;
case 20:
context$2$0.prev = 20;
context$2$0.prev = 21;
if (!_iteratorNormalCompletion15 && _iterator15["return"]) {
_iterator15["return"]();
}
case 23:
context$2$0.prev = 23;
if (!_didIteratorError15) {
context$2$0.next = 26;
break;
}
throw _iteratorError15;
case 26:
return context$2$0.finish(23);
case 27:
return context$2$0.finish(20);
case 28:
context$2$0.next = 30;
return val;
case 30:
_iteratorNormalCompletion16 = true;
_didIteratorError16 = false;
_iteratorError16 = undefined;
context$2$0.prev = 33;
_iterator16 = this[Symbol.iterator]();
case 35:
if (_iteratorNormalCompletion16 = (_step16 = _iterator16.next()).done) {
context$2$0.next = 42;
break;
}
x = _step16.value;
context$2$0.next = 39;
return val = fn(val, x);
case 39:
_iteratorNormalCompletion16 = true;
context$2$0.next = 35;
break;
case 42:
context$2$0.next = 48;
break;
case 44:
context$2$0.prev = 44;
context$2$0.t22 = context$2$0["catch"](33);
_didIteratorError16 = true;
_iteratorError16 = context$2$0.t22;
case 48:
context$2$0.prev = 48;
context$2$0.prev = 49;
if (!_iteratorNormalCompletion16 && _iterator16["return"]) {
_iterator16["return"]();
}
case 51:
context$2$0.prev = 51;
if (!_didIteratorError16) {
context$2$0.next = 54;
break;
}
throw _iteratorError16;
case 54:
return context$2$0.finish(51);
case 55:
return context$2$0.finish(48);
case 56:
return context$2$0.abrupt("return", val);
case 57:
case "end":
return context$2$0.stop();
}
}, callee$1$17, this, [[5, 16, 20, 28], [21,, 23, 27], [33, 44, 48, 56], [49,, 51, 55]]);
}), 2);
rewrapPrototypeAndStatic("reject", regeneratorRuntime.mark(function callee$1$18() {
var fn = arguments[0] === undefined ? Boolean : arguments[0];
var _iteratorNormalCompletion17, _didIteratorError17, _iteratorError17, _iterator17, _step17, x;
return regeneratorRuntime.wrap(function callee$1$18$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion17 = true;
_didIteratorError17 = false;
_iteratorError17 = undefined;
context$2$0.prev = 3;
_iterator17 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion17 = (_step17 = _iterator17.next()).done) {
context$2$0.next = 13;
break;
}
x = _step17.value;
if (fn(x)) {
context$2$0.next = 10;
break;
}
context$2$0.next = 10;
return x;
case 10:
_iteratorNormalCompletion17 = true;
context$2$0.next = 5;
break;
case 13:
context$2$0.next = 19;
break;
case 15:
context$2$0.prev = 15;
context$2$0.t23 = context$2$0["catch"](3);
_didIteratorError17 = true;
_iteratorError17 = context$2$0.t23;
case 19:
context$2$0.prev = 19;
context$2$0.prev = 20;
if (!_iteratorNormalCompletion17 && _iterator17["return"]) {
_iterator17["return"]();
}
case 22:
context$2$0.prev = 22;
if (!_didIteratorError17) {
context$2$0.next = 25;
break;
}
throw _iteratorError17;
case 25:
return context$2$0.finish(22);
case 26:
return context$2$0.finish(19);
case 27:
case "end":
return context$2$0.stop();
}
}, callee$1$18, this, [[3, 15, 19, 27], [20,, 22, 26]]);
}), 1);
rewrapPrototypeAndStatic("slice", regeneratorRuntime.mark(function callee$1$19() {
var start = arguments[0] === undefined ? 0 : arguments[0];
var stop = arguments[1] === undefined ? Infinity : arguments[1];
var _iteratorNormalCompletion18, _didIteratorError18, _iteratorError18, _iterator18, _step18, _step18$value, x, i;
return regeneratorRuntime.wrap(function callee$1$19$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
if (!(stop < start)) {
context$2$0.next = 2;
break;
}
throw new RangeError("parameter `stop` (= " + stop + ") must be >= `start` (= " + start + ")");
case 2:
_iteratorNormalCompletion18 = true;
_didIteratorError18 = false;
_iteratorError18 = undefined;
context$2$0.prev = 5;
_iterator18 = this.enumerate()[Symbol.iterator]();
case 7:
if (_iteratorNormalCompletion18 = (_step18 = _iterator18.next()).done) {
context$2$0.next = 20;
break;
}
_step18$value = _slicedToArray(_step18.value, 2);
x = _step18$value[0];
i = _step18$value[1];
if (!(i < start)) {
context$2$0.next = 13;
break;
}
return context$2$0.abrupt("continue", 17);
case 13:
if (!(i >= stop)) {
context$2$0.next = 15;
break;
}
return context$2$0.abrupt("break", 20);
case 15:
context$2$0.next = 17;
return x;
case 17:
_iteratorNormalCompletion18 = true;
context$2$0.next = 7;
break;
case 20:
context$2$0.next = 26;
break;
case 22:
context$2$0.prev = 22;
context$2$0.t24 = context$2$0["catch"](5);
_didIteratorError18 = true;
_iteratorError18 = context$2$0.t24;
case 26:
context$2$0.prev = 26;
context$2$0.prev = 27;
if (!_iteratorNormalCompletion18 && _iterator18["return"]) {
_iterator18["return"]();
}
case 29:
context$2$0.prev = 29;
if (!_didIteratorError18) {
context$2$0.next = 32;
break;
}
throw _iteratorError18;
case 32:
return context$2$0.finish(29);
case 33:
return context$2$0.finish(26);
case 34:
case "end":
return context$2$0.stop();
}
}, callee$1$19, this, [[5, 22, 26, 34], [27,, 29, 33]]);
}), 2);
rewrapPrototypeAndStatic("spreadMap", regeneratorRuntime.mark(function callee$1$20(fn) {
var _iteratorNormalCompletion19, _didIteratorError19, _iteratorError19, _iterator19, _step19, x;
return regeneratorRuntime.wrap(function callee$1$20$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion19 = true;
_didIteratorError19 = false;
_iteratorError19 = undefined;
context$2$0.prev = 3;
_iterator19 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion19 = (_step19 = _iterator19.next()).done) {
context$2$0.next = 12;
break;
}
x = _step19.value;
context$2$0.next = 9;
return fn.apply(undefined, _toConsumableArray(x));
case 9:
_iteratorNormalCompletion19 = true;
context$2$0.next = 5;
break;
case 12:
context$2$0.next = 18;
break;
case 14:
context$2$0.prev = 14;
context$2$0.t25 = context$2$0["catch"](3);
_didIteratorError19 = true;
_iteratorError19 = context$2$0.t25;
case 18:
context$2$0.prev = 18;
context$2$0.prev = 19;
if (!_iteratorNormalCompletion19 && _iterator19["return"]) {
_iterator19["return"]();
}
case 21:
context$2$0.prev = 21;
if (!_didIteratorError19) {
context$2$0.next = 24;
break;
}
throw _iteratorError19;
case 24:
return context$2$0.finish(21);
case 25:
return context$2$0.finish(18);
case 26:
case "end":
return context$2$0.stop();
}
}, callee$1$20, this, [[3, 14, 18, 26], [19,, 21, 25]]);
}));
rewrapPrototypeAndStatic("take", regeneratorRuntime.mark(function callee$1$21(n) {
var i, _iteratorNormalCompletion20, _didIteratorError20, _iteratorError20, _iterator20, _step20, x;
return regeneratorRuntime.wrap(function callee$1$21$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
if (!(n < 1)) {
context$2$0.next = 2;
break;
}
return context$2$0.abrupt("return");
case 2:
i = 0;
_iteratorNormalCompletion20 = true;
_didIteratorError20 = false;
_iteratorError20 = undefined;
context$2$0.prev = 6;
_iterator20 = this[Symbol.iterator]();
case 8:
if (_iteratorNormalCompletion20 = (_step20 = _iterator20.next()).done) {
context$2$0.next = 17;
break;
}
x = _step20.value;
context$2$0.next = 12;
return x;
case 12:
if (!(++i >= n)) {
context$2$0.next = 14;
break;
}
return context$2$0.abrupt("break", 17);
case 14:
_iteratorNormalCompletion20 = true;
context$2$0.next = 8;
break;
case 17:
context$2$0.next = 23;
break;
case 19:
context$2$0.prev = 19;
context$2$0.t26 = context$2$0["catch"](6);
_didIteratorError20 = true;
_iteratorError20 = context$2$0.t26;
case 23:
context$2$0.prev = 23;
context$2$0.prev = 24;
if (!_iteratorNormalCompletion20 && _iterator20["return"]) {
_iterator20["return"]();
}
case 26:
context$2$0.prev = 26;
if (!_didIteratorError20) {
context$2$0.next = 29;
break;
}
throw _iteratorError20;
case 29:
return context$2$0.finish(26);
case 30:
return context$2$0.finish(23);
case 31:
case "end":
return context$2$0.stop();
}
}, callee$1$21, this, [[6, 19, 23, 31], [24,, 26, 30]]);
}));
rewrapPrototypeAndStatic("takeWhile", regeneratorRuntime.mark(function callee$1$22() {
var fn = arguments[0] === undefined ? Boolean : arguments[0];
var _iteratorNormalCompletion21, _didIteratorError21, _iteratorError21, _iterator21, _step21, x;
return regeneratorRuntime.wrap(function callee$1$22$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion21 = true;
_didIteratorError21 = false;
_iteratorError21 = undefined;
context$2$0.prev = 3;
_iterator21 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion21 = (_step21 = _iterator21.next()).done) {
context$2$0.next = 14;
break;
}
x = _step21.value;
if (fn(x)) {
context$2$0.next = 9;
break;
}
return context$2$0.abrupt("break", 14);
case 9:
context$2$0.next = 11;
return x;
case 11:
_iteratorNormalCompletion21 = true;
context$2$0.next = 5;
break;
case 14:
context$2$0.next = 20;
break;
case 16:
context$2$0.prev = 16;
context$2$0.t27 = context$2$0["catch"](3);
_didIteratorError21 = true;
_iteratorError21 = context$2$0.t27;
case 20:
context$2$0.prev = 20;
context$2$0.prev = 21;
if (!_iteratorNormalCompletion21 && _iterator21["return"]) {
_iterator21["return"]();
}
case 23:
context$2$0.prev = 23;
if (!_didIteratorError21) {
context$2$0.next = 26;
break;
}
throw _iteratorError21;
case 26:
return context$2$0.finish(23);
case 27:
return context$2$0.finish(20);
case 28:
case "end":
return context$2$0.stop();
}
}, callee$1$22, this, [[3, 16, 20, 28], [21,, 23, 27]]);
}), 1);
rewrapPrototypeAndStatic("tap", regeneratorRuntime.mark(function callee$1$23() {
var fn = arguments[0] === undefined ? console.log.bind(console) : arguments[0];
var _iteratorNormalCompletion22, _didIteratorError22, _iteratorError22, _iterator22, _step22, x;
return regeneratorRuntime.wrap(function callee$1$23$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
_iteratorNormalCompletion22 = true;
_didIteratorError22 = false;
_iteratorError22 = undefined;
context$2$0.prev = 3;
_iterator22 = this[Symbol.iterator]();
case 5:
if (_iteratorNormalCompletion22 = (_step22 = _iterator22.next()).done) {
context$2$0.next = 13;
break;
}
x = _step22.value;
fn(x);
context$2$0.next = 10;
return x;
case 10:
_iteratorNormalCompletion22 = true;
context$2$0.next = 5;
break;
case 13:
context$2$0.next = 19;
break;
case 15:
context$2$0.prev = 15;
context$2$0.t28 = context$2$0["catch"](3);
_didIteratorError22 = true;
_iteratorError22 = context$2$0.t28;
case 19:
context$2$0.prev = 19;
context$2$0.prev = 20;
if (!_iteratorNormalCompletion22 && _iterator22["return"]) {
_iterator22["return"]();
}
case 22:
context$2$0.prev = 22;
if (!_didIteratorError22) {
context$2$0.next = 25;
break;
}
throw _iteratorError22;
case 25:
return context$2$0.finish(22);
case 26:
return context$2$0.finish(19);
case 27:
case "end":
return context$2$0.stop();
}
}, callee$1$23, this, [[3, 15, 19, 27], [20,, 22, 26]]);
}), 1);
rewrapPrototypeAndStatic("unique", regeneratorRuntime.mark(function callee$1$24() {
var seen, _iteratorNormalCompletion23, _didIteratorError23, _iteratorError23, _iterator23, _step23, x;
return regeneratorRuntime.wrap(function callee$1$24$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
seen = new Set();
_iteratorNormalCompletion23 = true;
_didIteratorError23 = false;
_iteratorError23 = undefined;
context$2$0.prev = 4;
_iterator23 = this[Symbol.iterator]();
case 6:
if (_iteratorNormalCompletion23 = (_step23 = _iterator23.next()).done) {
context$2$0.next = 15;
break;
}
x = _step23.value;
if (seen.has(x)) {
context$2$0.next = 12;
break;
}
context$2$0.next = 11;
return x;
case 11:
seen.add(x);
case 12:
_iteratorNormalCompletion23 = true;
context$2$0.next = 6;
break;
case 15:
context$2$0.next = 21;
break;
case 17:
context$2$0.prev = 17;
context$2$0.t29 = context$2$0["catch"](4);
_didIteratorError23 = true;
_iteratorError23 = context$2$0.t29;
case 21:
context$2$0.prev = 21;
context$2$0.prev = 22;
if (!_iteratorNormalCompletion23 && _iterator23["return"]) {
_iterator23["return"]();
}
case 24:
context$2$0.prev = 24;
if (!_didIteratorError23) {
context$2$0.next = 27;
break;
}
throw _iteratorError23;
case 27:
return context$2$0.finish(24);
case 28:
return context$2$0.finish(21);
case 29:
seen.clear();
case 30:
case "end":
return context$2$0.stop();
}
}, callee$1$24, this, [[4, 17, 21, 29], [22,, 24, 28]]);
}));
var _zip = rewrap(regeneratorRuntime.mark(function callee$1$25(iterables) {
var longest = arguments[1] === undefined ? false : arguments[1];
var iters, numIters, numFinished, finished, zipped, _iteratorNormalCompletion24, _didIteratorError24, _iteratorError24, _iterator24, _step24, it, _it$next, value, done;
return regeneratorRuntime.wrap(function callee$1$25$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
if (iterables.length) {
context$2$0.next = 2;
break;
}
return context$2$0.abrupt("return");
case 2:
iters = iterables.map(getIterator);
numIters = iterables.length;
numFinished = 0;
finished = false;
case 6:
if (finished) {
context$2$0.next = 44;
break;
}
zipped = [];
_iteratorNormalCompletion24 = true;
_didIteratorError24 = false;
_iteratorError24 = undefined;
context$2$0.prev = 11;
_iterator24 = iters[Symbol.iterator]();
case 13:
if (_iteratorNormalCompletion24 = (_step24 = _iterator24.next()).done) {
context$2$0.next = 26;
break;
}
it = _step24.value;
_it$next = it.next();
value = _it$next.value;
done = _it$next.done;
if (!done) {
context$2$0.next = 22;
break;
}
if (longest) {
context$2$0.next = 21;
break;
}
return context$2$0.abrupt("return");
case 21:
if (++numFinished == numIters) {
finished = true;
}
case 22:
if (value === undefined) {
// Leave a hole in the array so that you can distinguish an iterable
// that's done (via `index in array == false`) from an iterable
// yielding `undefined`.
zipped.length++;
} else {
zipped.push(value);
}
case 23:
_iteratorNormalCompletion24 = true;
context$2$0.next = 13;
break;
case 26:
context$2$0.next = 32;
break;
case 28:
context$2$0.prev = 28;
context$2$0.t30 = context$2$0["catch"](11);
_didIteratorError24 = true;
_iteratorError24 = context$2$0.t30;
case 32:
context$2$0.prev = 32;
context$2$0.prev = 33;
if (!_iteratorNormalCompletion24 && _iterator24["return"]) {
_iterator24["return"]();
}
case 35:
context$2$0.prev = 35;
if (!_didIteratorError24) {
context$2$0.next = 38;
break;
}
throw _iteratorError24;
case 38:
return context$2$0.finish(35);
case 39:
return context$2$0.finish(32);
case 40:
context$2$0.next = 42;
return zipped;
case 42:
context$2$0.next = 6;
break;
case 44:
case "end":
return context$2$0.stop();
}
}, callee$1$25, this, [[11, 28, 32, 40], [33,, 35, 39]]);
}));
rewrapStaticMethod("zip", regeneratorRuntime.mark(function callee$1$26() {
for (var _len7 = arguments.length, iterables = Array(_len7), _key7 = 0; _key7 < _len7; _key7++) {
iterables[_key7] = arguments[_key7];
}
return regeneratorRuntime.wrap(function callee$1$26$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
return context$2$0.delegateYield(_zip(iterables), "t31", 1);
case 1:
case "end":
return context$2$0.stop();
}
}, callee$1$26, this);
}));
rewrapStaticMethod("zipLongest", regeneratorRuntime.mark(function callee$1$27() {
for (var _len8 = arguments.length, iterables = Array(_len8), _key8 = 0; _key8 < _len8; _key8++) {
iterables[_key8] = arguments[_key8];
}
return regeneratorRuntime.wrap(function callee$1$27$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
return context$2$0.delegateYield(_zip(iterables, true), "t32", 1);
case 1:
case "end":
return context$2$0.stop();
}
}, callee$1$27, this);
}));
rewrapStaticMethod("zipWith", regeneratorRuntime.mark(function callee$1$28(fn) {
for (var _len9 = arguments.length, iterables = Array(_len9 > 1 ? _len9 - 1 : 0), _key9 = 1; _key9 < _len9; _key9++) {
iterables[_key9 - 1] = arguments[_key9];
}
return regeneratorRuntime.wrap(function callee$1$28$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
return context$2$0.delegateYield(_zip(iterables).spreadMap(fn), "t33", 1);
case 1:
case "end":
return context$2$0.stop();
}
}, callee$1$28, this);
}));
/*
* Functions that force iteration to completion and return a value.
*/
// The maximum number of milliseconds we will block the main thread at a time
// while in `asyncEach`.
wu.MAX_BLOCK = 15;
// The number of milliseconds to yield to the main thread between bursts of
// work.
wu.TIMEOUT = 1;
prototypeAndStatic("asyncEach", function (fn) {
var maxBlock = arguments[1] === undefined ? wu.MAX_BLOCK : arguments[1];
var timeout = arguments[2] === undefined ? wu.TIMEOUT : arguments[2];
var iter = getIterator(this);
return new Promise(function (resolve, reject) {
(function loop() {
var start = Date.now();
var _iteratorNormalCompletion25 = true;
var _didIteratorError25 = false;
var _iteratorError25 = undefined;
try {
for (var _iterator25 = iter[Symbol.iterator](), _step25; !(_iteratorNormalCompletion25 = (_step25 = _iterator25.next()).done); _iteratorNormalCompletion25 = true) {
var x = _step25.value;
try {
fn(x);
} catch (e) {
reject(e);
return;
}
if (Date.now() - start > maxBlock) {
setTimeout(loop, timeout);
return;
}
}
} catch (err) {
_didIteratorError25 = true;
_iteratorError25 = err;
} finally {
try {
if (!_iteratorNormalCompletion25 && _iterator25["return"]) {
_iterator25["return"]();
}
} finally {
if (_didIteratorError25) {
throw _iteratorError25;
}
}
}
resolve();
})();
});
}, 3);
prototypeAndStatic("every", function () {
var fn = arguments[0] === undefined ? Boolean : arguments[0];
var _iteratorNormalCompletion26 = true;
var _didIteratorError26 = false;
var _iteratorError26 = undefined;
try {
for (var _iterator26 = this[Symbol.iterator](), _step26; !(_iteratorNormalCompletion26 = (_step26 = _iterator26.next()).done); _iteratorNormalCompletion26 = true) {
var x = _step26.value;
if (!fn(x)) {
return false;
}
}
} catch (err) {
_didIteratorError26 = true;
_iteratorError26 = err;
} finally {
try {
if (!_iteratorNormalCompletion26 && _iterator26["return"]) {
_iterator26["return"]();
}
} finally {
if (_didIteratorError26) {
throw _iteratorError26;
}
}
}
return true;
}, 1);
prototypeAndStatic("find", function (fn) {
var _iteratorNormalCompletion27 = true;
var _didIteratorError27 = false;
var _iteratorError27 = undefined;
try {
for (var _iterator27 = this[Symbol.iterator](), _step27; !(_iteratorNormalCompletion27 = (_step27 = _iterator27.next()).done); _iteratorNormalCompletion27 = true) {
var x = _step27.value;
if (fn(x)) {
return x;
}
}
} catch (err) {
_didIteratorError27 = true;
_iteratorError27 = err;
} finally {
try {
if (!_iteratorNormalCompletion27 && _iterator27["return"]) {
_iterator27["return"]();
}
} finally {
if (_didIteratorError27) {
throw _iteratorError27;
}
}
}
});
prototypeAndStatic("forEach", function (fn) {
var _iteratorNormalCompletion28 = true;
var _didIteratorError28 = false;
var _iteratorError28 = undefined;
try {
for (var _iterator28 = this[Symbol.iterator](), _step28; !(_iteratorNormalCompletion28 = (_step28 = _iterator28.next()).done); _iteratorNormalCompletion28 = true) {
var x = _step28.value;
fn(x);
}
} catch (err) {
_didIteratorError28 = true;
_iteratorError28 = err;
} finally {
try {
if (!_iteratorNormalCompletion28 && _iterator28["return"]) {
_iterator28["return"]();
}
} finally {
if (_didIteratorError28) {
throw _iteratorError28;
}
}
}
});
prototypeAndStatic("has", function (thing) {
return this.some(function (x) {
return x === thing;
});
});
prototypeAndStatic("reduce", function (fn) {
var initial = arguments[1] === undefined ? undefined : arguments[1];
var val = initial;
if (val === undefined) {
var _iteratorNormalCompletion29 = true;
var _didIteratorError29 = false;
var _iteratorError29 = undefined;
try {
for (var _iterator29 = this[Symbol.iterator](), _step29; !(_iteratorNormalCompletion29 = (_step29 = _iterator29.next()).done); _iteratorNormalCompletion29 = true) {
var x = _step29.value;
val = x;
break;
}
} catch (err) {
_didIteratorError29 = true;
_iteratorError29 = err;
} finally {
try {
if (!_iteratorNormalCompletion29 && _iterator29["return"]) {
_iterator29["return"]();
}
} finally {
if (_didIteratorError29) {
throw _iteratorError29;
}
}
}
}
var _iteratorNormalCompletion30 = true;
var _didIteratorError30 = false;
var _iteratorError30 = undefined;
try {
for (var _iterator30 = this[Symbol.iterator](), _step30; !(_iteratorNormalCompletion30 = (_step30 = _iterator30.next()).done); _iteratorNormalCompletion30 = true) {
var x = _step30.value;
val = fn(val, x);
}
} catch (err) {
_didIteratorError30 = true;
_iteratorError30 = err;
} finally {
try {
if (!_iteratorNormalCompletion30 && _iterator30["return"]) {
_iterator30["return"]();
}
} finally {
if (_didIteratorError30) {
throw _iteratorError30;
}
}
}
return val;
}, 2);
prototypeAndStatic("some", function () {
var fn = arguments[0] === undefined ? Boolean : arguments[0];
var _iteratorNormalCompletion31 = true;
var _didIteratorError31 = false;
var _iteratorError31 = undefined;
try {
for (var _iterator31 = this[Symbol.iterator](), _step31; !(_iteratorNormalCompletion31 = (_step31 = _iterator31.next()).done); _iteratorNormalCompletion31 = true) {
var x = _step31.value;
if (fn(x)) {
return true;
}
}
} catch (err) {
_didIteratorError31 = true;
_iteratorError31 = err;
} finally {
try {
if (!_iteratorNormalCompletion31 && _iterator31["return"]) {
_iterator31["return"]();
}
} finally {
if (_didIteratorError31) {
throw _iteratorError31;
}
}
}
return false;
}, 1);
prototypeAndStatic("toArray", function () {
return [].concat(_toConsumableArray(this));
});
/*
* Methods that return an array of iterables.
*/
var MAX_CACHE = 500;
var _tee = rewrap(regeneratorRuntime.mark(function callee$1$29(iterator, cache) {
var items, index, _iterator$next, done, value;
return regeneratorRuntime.wrap(function callee$1$29$(context$2$0) {
while (1) switch (context$2$0.prev = context$2$0.next) {
case 0:
items = cache.items;
index = 0;
case 2:
if (!true) {
context$2$0.next = 25;
break;
}
if (!(index === items.length)) {
context$2$0.next = 14;
break;
}
_iterator$next = iterator.next();
done = _iterator$next.done;
value = _iterator$next.value;
if (!done) {
context$2$0.next = 10;
break;
}
if (cache.returned === MISSING) {
cache.returned = value;
}
return context$2$0.abrupt("break", 25);
case 10:
context$2$0.next = 12;
return items[index++] = value;
case 12:
context$2$0.next = 23;
break;
case 14:
if (!(index === cache.tail)) {
context$2$0.next = 21;
break;
}
value = items[index];
if (index === MAX_CACHE) {
items = cache.items = items.slice(index);
index = 0;
cache.tail = 0;
} else {
items[index] = undefined;
cache.tail = ++index;
}
context$2$0.next = 19;
return value;
case 19:
context$2$0.next = 23;
break;
case 21:
context$2$0.next = 23;
return items[index++];
case 23:
context$2$0.next = 2;
break;
case 25:
if (cache.tail === index) {
items.length = 0;
}
return context$2$0.abrupt("return", cache.returned);
case 27:
case "end":
return context$2$0.stop();
}
}, callee$1$29, this);
}));
_tee.prototype = Wu.prototype;
prototypeAndStatic("tee", function () {
var n = arguments[0] === undefined ? 2 : arguments[0];
var iterables = new Array(n);
var cache = { tail: 0, items: [], returned: MISSING };
while (n--) {
iterables[n] = _tee(this, cache);
}
return iterables;
}, 1);
prototypeAndStatic("unzip", function () {
var n = arguments[0] === undefined ? 2 : arguments[0];
return this.tee(n).map(function (iter, i) {
return iter.pluck(i);
});
}, 1);
/*
* Number of chambers.
*/
wu.tang = { clan: 36 };
return wu;
});
// We don't have a cached item for this index, we need to force its
// evaluation.
// If we are the last iterator to use a cached value, clean up after
// ourselves.
// We have an item in the cache for this index, so yield it.
|
<filename>src/components/FindAuthData/FindIdForm/FindIdCertification/FindIdCertification.tsx
import { Dispatch, SetStateAction } from "react";
import * as FIF from "../FindIdForm.style";
import * as FIC from "./FindIdCertification.style";
interface IFindIdCertificationProps {
certificationNumber: string;
setCertificationNumber: Dispatch<SetStateAction<string>>;
}
const FindIdCertification: React.FC<IFindIdCertificationProps> = ({
certificationNumber,
setCertificationNumber,
}) => {
return (
<>
<FIC.FindIdCertificationBox>
<FIC.FindIdCertificationTitle>
인증번호 입력
</FIC.FindIdCertificationTitle>
<FIC.FindIdCertificationInputWrap>
<FIF.FindIdFormTextInput
isWrite={certificationNumber !== ""}
onChange={(e) => setCertificationNumber(e.target.value)}
placeholder="인증번호를 입력해주세요."
/>
<FIF.FindIdFormCertificationButton
isWrite={certificationNumber !== ""}
disabled={certificationNumber === ""}
>
인증메일 재발송
</FIF.FindIdFormCertificationButton>
</FIC.FindIdCertificationInputWrap>
</FIC.FindIdCertificationBox>
</>
);
};
export default FindIdCertification;
|
<filename>sedkit/relations.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: <NAME>, <EMAIL>
# !python3
"""
This is the code used to generate the polynomial relations
used in sedkit's calculations
"""
from pkg_resources import resource_filename
import astropy.io.ascii as ii
import astropy.units as q
import astropy.table as at
from astroquery.vizier import Vizier
from bokeh.plotting import figure, show
import numpy as np
from . import utilities as u
from .uncertainties import Unum
V = Vizier(columns=["**"])
class Relation:
"""A base class to store raw data, fit a polynomial, and evaluate quickly"""
def __init__(self, file, add_columns=None, ref=None, **kwargs):
"""Load the data
Parameters
----------
file: str
The file to load
"""
# Load the file into a table
self.data = ii.read(file, **kwargs)
self.ref = ref
# Fill in masked values
self.data = self.data.filled(np.nan)
# Dict of relations
self.relations = {}
# Add additional columns
if isinstance(add_columns, dict):
for colname, values in add_columns.items():
self.add_column(colname, values)
def add_column(self, colname, values):
"""
Add the values to the data table
Parameters
----------
colname: str
The column name
values: sequence
The values for the column
"""
# Check the colname
if colname in self.parameters:
raise KeyError("{}: column name already exists!".format(colname))
# Check the length
if len(values) != len(self.data):
raise ValueError("{} != {}: number of values must match number of data rows.".format(len(values), len(self.data)))
# Add the column
self.data[colname] = values
def add_relation(self, rel_name, order, xrange=None, xunit=None, yunit=None, plot=True):
"""
Create a polynomial of the given *order* for *yparam* as a function of *xparam*
which can be evaluated at any x value
Parameters
----------
rel_name: str
The relation name, i.e. 'yparam(xparam)'
order: int
The order of the polynomial fit
xrange: sequence
The range of x-values to consider
xunit: astropy.units.quantity.Quantity
The units of the x parameter values
yunit: astropy.units.quantity.Quantity
The units of the y parameter values
"""
# Get params
xparam, yparam = self._parse_rel_name(rel_name)
# Make sure params are in the table
if xparam not in self.parameters or yparam not in self.parameters:
raise NameError("{}, {}: Make sure both parameters are in the data, {}".format(xparam, yparam, self.data.colnames))
# Grab data
rel = {'xparam': xparam, 'yparam': yparam, 'order': order, 'x': np.array(self.data[xparam]), 'y': np.array(self.data[yparam]),
'coeffs': None, 'C_p': None, 'matrix': None, 'yi': None, 'C_yi': None, 'sig_yi': None, 'xunit': xunit or 1, 'yunit': yunit or 1}
# Set x range for fit
if xrange is not None:
idx = np.where(np.logical_and(rel['x'] > xrange[0], rel['x'] < xrange[1]))
rel['x'] = rel['x'][idx]
rel['y'] = rel['y'][idx]
# Remove masked and NaN values
rel['x'], rel['y'] = self.validate_data(rel['x'], rel['y'])
# Determine monotonicity
rel['monotonic'] = u.monotonic(rel['x'])
# Set weighting
rel['weight'] = np.ones_like(rel['x'])
if '{}_unc'.format(yparam) in self.data.colnames:
rel['weight'] = 1. / self.data['{}_unc'].format(yparam)
# Try to fit a polynomial
try:
# Fit polynomial
rel['coeffs'], rel['C_p'] = np.polyfit(rel['x'], rel['y'], rel['order'], w=rel['weight'], cov=True)
# Matrix with rows 1, spt, spt**2, ...
rel['matrix'] = np.vstack([rel['x']**(order-i) for i in range(order + 1)]).T
# Matrix multiplication calculates the polynomial values
rel['yi'] = np.dot(rel['matrix'], rel['coeffs'])
# C_y = TT*C_z*TT.T
rel['C_yi'] = np.dot(rel['matrix'], np.dot(rel['C_p'], rel['matrix'].T))
# Standard deviations are sqrt of diagonal
rel['sig_yi'] = np.sqrt(np.diag(rel['C_yi']))
except Exception as exc:
print(exc)
print("Could not fit a polynomial to [{}, {}, {}, {}]. Try different values.".format(xparam, yparam, order, xrange))
# Add relation to dict
self.relations['{}({})'.format(yparam, xparam)] = rel
if plot:
show(self.plot(rel_name))
def evaluate(self, rel_name, x_val, plot=False):
"""
Evaluate the given relation at the given xval
Parameters
----------
rel_name: str
The relation name, i.e. 'yparam(xparam)'
x_val: float, int
The xvalue to evaluate
Returns
-------
y_val, y_unc, ref
The value, uncertainty, and reference
"""
# Check to see if the polynomial has been derived
if not rel_name in self.relations:
print("Please run 'add_relation' method for {} before trying to evaluate.".format(rel_name))
return
if x_val is None:
return None
else:
try:
# Get the relation
rel = self.relations[rel_name]
# Evaluate the polynomial
if isinstance(x_val, (list, tuple)):
# With uncertainties
x = Unum(*x_val)
y = x.polyval(rel['coeffs'])
x_val = x.nominal
y_val = y.nominal * rel['yunit']
y_upper = y.upper * rel['yunit']
y_lower = y.lower * rel['yunit']
else:
# Without uncertainties
x_val = x_val.value if hasattr(x_val, 'unit') else x_val
y_val = np.polyval(rel['coeffs'], x_val) * rel['yunit']
y_lower = y_upper = None
if plot:
plt = self.plot(rel_name)
plt.circle([x_val], [y_val.value], color='red', size=10, legend='{}({})'.format(rel['yparam'], x_val))
if y_upper:
plt.line([x_val, x_val], [y_val - y_lower, y_val + y_upper], color='red')
show(plt)
if y_upper:
return y_val, y_upper, y_lower, self.ref
else:
return y_val, self.ref
except ValueError as exc:
print(exc)
print("Could not evaluate the {} relation at {}".format(rel_name, x_val))
return None
@property
def parameters(self):
"""
List of parameters in the data table
"""
return self.data.colnames
def _parse_rel_name(self, rel_name):
"""
Parse the rel_name into xparam and yparam
Parameters
----------
rel_name: str
The relation name, i.e. 'yparam(xparam)'
Returns
-------
str, str
The xparam and yparam of the relation
"""
return rel_name.replace(')', '').split('(')[::-1]
def plot(self, rel_name, **kwargs):
"""
Plot the data for the given parameters
"""
# Get params
xparam, yparam = self._parse_rel_name(rel_name)
if not xparam in self.parameters or not yparam in self.parameters:
raise ValueError("{}, {}: Both parameters need to be in the relation. Try {}".format(xparam, yparam, self.relations))
# Make the figure
fig = figure(x_axis_label=xparam, y_axis_label=yparam)
x, y = self.validate_data(self.data[xparam], self.data[yparam])
fig.circle(x, y, legend='Data', **kwargs)
if rel_name in self.relations:
# Get the relation
rel = self.relations[rel_name]
# Plot polynomial values
xaxis = np.linspace(rel['x'].min(), rel['x'].max(), 100)
evals = np.polyval(rel['coeffs'], xaxis)
fig.line(xaxis, evals, color='black', legend='Fit')
return fig
def validate_data(self, X, Y):
"""
Validate the data for onlu numbers
Parameters
----------
X: sequence
The x-array
Y: sequence
The y-array
Returns
-------
sequence
The validated arrays
"""
valid = np.asarray([(float(x), float(y)) for x, y in zip(X, Y) if u.isnumber(x) and u.isnumber(y)]).T
if len(valid) == 0:
raise ValueError("No valid data in the arrays")
else:
return valid
class DwarfSequence(Relation):
"""A class to evaluate the Main Sequence in arbitrary parameter spaces"""
def __init__(self, **kwargs):
"""
Initialize a Relation object with the Dwarf Sequence data
"""
# Get the file
file = resource_filename('sedkit', 'data/dwarf_sequence.txt')
# Replace '...' with NaN
fill_values = [('...', np.nan), ('....', np.nan), ('.....', np.nan)]
# Initialize Relation object
super().__init__(file, fill_values=fill_values, ref='2013ApJS..208....9P', **kwargs)
self.add_column('spt', [u.specType(i)[0] for i in self.data['SpT']])
# Add well-characterized relations
self.add_relation('Teff(spt)', 12, yunit=q.K, plot=False)
self.add_relation('Teff(Lbol)', 9, yunit=q.K, plot=False)
self.add_relation('radius(Lbol)', 9, yunit=q.R_sun, plot=False)
self.add_relation('radius(spt)', 11, yunit=q.R_sun, plot=False)
self.add_relation('radius(M_J)', 9, yunit=q.R_sun, plot=False)
self.add_relation('radius(M_Ks)', 9, yunit=q.R_sun, plot=False)
self.add_relation('mass(Lbol)', 9, yunit=q.M_sun, plot=False)
self.add_relation('mass(M_Ks)', 9, yunit=q.M_sun, plot=False)
self.add_relation('mass(M_J)', 9, yunit=q.M_sun, plot=False)
class SpectralTypeRadius:
def __init__(self, orders=(5, 3), name='Spectral Type vs. Radius'):
"""Initialize the object
Parameters
----------
order: int
The order polynomial to fit to the spt-radius data
"""
self.name = name
self.generate(orders)
def get_radius(self, spt, plot=False):
"""Get the radius for the given spectral type
Parameters
----------
spt: str, int
The alphanumeric (e.g. 'A0') or integer (0-99 => O0-Y9) spectral
type
plot: bool
Generate a plots
Returns
-------
tuple
The radius and uncertainty in solar radii
"""
# Convert to integer
if isinstance(spt, (str, bytes)):
spt = u.specType(spt)[0]
# Test valid ranges
if not isinstance(spt, (int, float)) or not 30 <= spt <= 99:
raise ValueError("Please provide a spectral type within [30, 99]")
# Evaluate the polynomials
if spt > 64:
data = self.MLTY
else:
data = self.AFGK
radius = np.polyval(data['coeffs'], spt)*q.R_sun
radius_unc = np.interp(spt, data['spt'], data['sig_yi'])*q.R_sun
if plot:
fig = self.plot()
fig.triangle([spt], [radius.value], color='red', size=15, legend=u.specType(spt))
show(fig)
return radius.round(3), radius_unc.round(3)
def generate(self, orders):
"""
Generate a polynomial that describes the radius as a function of
spectral type for empirically measured AFGKM main sequence stars
(Boyajian+ 2012b, 2013) and MLTY model isochrone interpolated stars
(Filippazzoet al. 2015, 2016)
Parameters
----------
orders: sequence
The order polynomials to fit to the MLTY and AFGK data
generate: bool
Generate the polynomials
"""
# ====================================================================
# Boyajian AFGKM data
# ====================================================================
afgk = resource_filename('sedkit', 'data/AFGK_radii.txt')
afgk_data = ii.read(afgk, format='csv', comment='#')
# ====================================================================
# Filippazzo MLTY data
# ====================================================================
# Get the data
cat1 = V.query_constraints('J/ApJ/810/158/table1')[0]
cat2 = V.query_constraints('J/ApJ/810/158/table9')[0]
# Join the tables to getthe spectral types and radii in one table
mlty_data = at.join(cat1, cat2, keys='ID', join_type='outer')
# Only keep field age
mlty_data = mlty_data[mlty_data['b_Age'] >= 0.5]
# Rename columns
mlty_data.rename_column('SpT', 'spectral_type')
mlty_data.rename_column('Rad', 'radius')
mlty_data.rename_column('e_Rad', 'radius_unc')
# Make solar radii units
mlty_data['radius'] = (mlty_data['radius'].value * q.Rjup).to(q.R_sun)
mlty_data['radius_unc'] = (mlty_data['radius_unc'].value * q.Rjup).to(q.R_sun)
# ====================================================================
# Fit and save the data
# ====================================================================
for data, name, order, ref, rng in zip([afgk_data, mlty_data],
['AFGK', 'MLTY'], orders,
['Boyajian+ 2012b, 2013', 'Filippazzo+ 2015'],
[(30, 65), (65, 99)]):
# Container for data
container = {}
# Translate string SPT to numbers
spts = []
keep = []
for n,i in enumerate(data['spectral_type']):
try:
spt = u.specType(i)
spts.append(spt)
keep.append(n)
except:
pass
# Filter bad spectral types
data = data[keep]
# Add the number to the table
num, *_, lum = np.array(spts).T
data['spt'] = num.astype(float)
data['lum'] = lum
# Filter out sub-giants
data = data[(data['spt'] > rng[0]) & (data['spt'] < rng[1])]
data = data[data['lum'] == 'V']
data = data[((data['radius'] < 1.8) & (data['spt'] > 37)) | (data['spt'] <= 37)]
# Filter out nans
data = data[data['radius'] < 4]
data = data[data['radius'] > 0]
data = data[data['radius_unc'] > 0]
container['data'] = data[data['spt'] > 0]
container['rng'] = rng
# Fit polynomial
container['coeffs'], container['C_p'] = np.polyfit(data['spt'], data['radius'], order, w=1./data['radius_unc'], cov=True)
# Do the interpolation for plotting
container['spt'] = np.arange(np.nanmin(data['spt'])-3, np.nanmax(data['spt'])+1)
# Matrix with rows 1, spt, spt**2, ...
container['sptT'] = np.vstack([container['spt']**(order-i) for i in range(order+1)]).T
# Matrix multiplication calculates the polynomial values
container['yi'] = np.dot(container['sptT'], container['coeffs'])
# C_y = TT*C_z*TT.T
container['C_yi'] = np.dot(container['sptT'], np.dot(container['C_p'], container['sptT'].T))
# Standard deviations are sqrt of diagonal
container['sig_yi'] = np.sqrt(np.diag(container['C_yi']))
# Store the new order
container['order'] = order
# Set the reference
container['ref'] = ref
# Add the container as an attribute
setattr(self, name, container)
def plot(self, draw=False):
"""Plot the relation
Parameters
----------
draw: bool
Draw the figure, else return it
Returns
-------
bokeh.plotting.figure
The plotted figure
"""
AFGK_color = '#1f77b4'
MLTY_color = '#2ca02c'
# Configure plot
TOOLS = ['pan', 'reset', 'box_zoom', 'wheel_zoom', 'save']
xlab = 'Spectral Type'
ylab = 'Solar Radii'
fig = figure(plot_width=800, plot_height=500, title=self.name,
x_axis_label=xlab, y_axis_label=ylab,
tools=TOOLS)
# Plot the fit
for n, (data, color) in enumerate(zip([self.AFGK, self.MLTY], [AFGK_color, MLTY_color])):
# Add the data
if n == 0:
fig.circle(data['data']['spt'], data['data']['radius'], size=8,
color=color, legend=data['ref'])
else:
fig.square(data['data']['spt'], data['data']['radius'], size=8,
color=color, legend=data['ref'])
# Add the fit line and uncertainty
fig.line(data['spt'], data['yi'], color=color,
legend='Order {} Fit'.format(data['order']))
x = np.append(data['spt'], data['spt'][::-1])
y = np.append(data['yi']-data['sig_yi'], (data['yi']+data['sig_yi'])[::-1])
fig.patch(x, y, fill_alpha=0.1, line_alpha=0, color=color)
if draw:
show(fig)
else:
return fig
|
<filename>website/migrations/0014_auto_20201015_2305.py
# Generated by Django 2.2 on 2020-10-15 17:35
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('website', '0013_order'),
]
operations = [
migrations.AddField(
model_name='order',
name='img',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='img', to='website.Product', to_field='image'),
),
migrations.AlterField(
model_name='product',
name='image',
field=models.ImageField(blank=True, default='default.jpg', null=True, unique=True, upload_to='images/'),
),
]
|
The basic idea behind merging two sorted linked lists is to have two pointers, one each on both the lists, and move them forward depending on the value at their nodes. So in order to merge two sorted linked lists without using extra space, we can start two pointers at the beginning of both the lists, and then compare the elements that the pointers are pointing to. Whichever element is smaller, that element's node can be moved to the pre-merged linked list and the pointer for that list can be moved one step forward. This process continues until both the pointers reach the end of their respective linked lists. |
<filename>DeviceCode/pal/OpenSSL/OpenSSL_1_0_0/Include/openssl/ecdh.h
#include "../../crypto/ecdh/ecdh.h"
|
const links = document.querySelectorAll('ul li');
const pages = document.querySelectorAll('.page');
const allBtns = document.querySelectorAll('.close-btn');
let selectedPageBtn = "";
for(let i = 0; i < links.length; i++){
links[i].addEventListener('click', function(event){
event.preventDefault();
for(let i = 0; i < pages.length; i++){
pages[i].style.display = "none";
}
pages[i].style.display = "block";
selectedPageBtn = i;
document.querySelector('header').style.filter = "blur(2px)";
});
}
for(let i = 0; i < allBtns.length; i++){
allBtns[i].addEventListener('click', function(){
allBtns[i].parentElement.style.display = "none";
document.querySelector('header').style.filter = "blur(0px)";
});
}
|
<gh_stars>0
module.exports = {
host: process.env.SQL_URL || 'localhost',
dialect: process.env.SQL_DIALECT || 'mssql',
passoperatorsAliasesword: false,
pool: {
max: 5,
min: 0,
acquire: 30000,
idle: 10000
},
define: {
timestamps: false
}
}
|
<gh_stars>0
package org.museautomation.ui.valuesource.groups;
import org.museautomation.core.*;
import org.museautomation.core.values.descriptor.*;
import java.util.*;
/**
* Finds/builds the available value source type groups for the project.
*
* @author <NAME> (see LICENSE.txt for license details)
*/
public class ValueSourceTypeGroups
{
public static ValueSourceTypeGroup get(MuseProject project)
{
ValueSourceTypeGroup group = all_groups.get(project);
if (group == null)
{
// lookup all types
group = new ValueSourceTypeList("all");
for (ValueSourceDescriptor descriptor : project.getValueSourceDescriptors().findAll())
group.add(descriptor, descriptor.getGroupName());
all_groups.put(project, group);
}
return group;
}
private static Map<MuseProject, ValueSourceTypeGroup> all_groups = new HashMap<>();
}
|
import { SynthUtils, expect as cdkExpect, haveResourceLike, ABSENT, objectLike, arrayWith } from '@aws-cdk/assert';
import * as cdk from '@aws-cdk/core';
import { AutoScalingGroupLT } from '../src/index';
import { Vpc, SubnetType, InstanceClass, InstanceType, InstanceSize, MachineImage } from '@aws-cdk/aws-ec2';
import { BlockDeviceVolume } from '@aws-cdk/aws-autoscaling';
import '@aws-cdk/assert/jest';
test('Should match snapshot', () => {
//WHEN
const app = new cdk.App();
const stack = new cdk.Stack(app, 'aws-autoscaling');
const vpc = new Vpc(stack, 'vpc', {
maxAzs: 3,
subnetConfiguration: [
{
name: 'App',
subnetType: SubnetType.ISOLATED,
cidrMask: 26,
},
],
});
// ASG using Launch Template
new AutoScalingGroupLT(stack, 'SpotAsgLaunchTemplate', {
useInstanceTemplate: true,
machineImage: MachineImage.latestAmazonLinux(),
vpc,
vpcSubnets: { subnetGroupName: 'App' },
minCapacity: 0,
maxCapacity: 5,
desiredCapacity: 1,
instanceType: InstanceType.of(InstanceClass.T3, InstanceSize.MEDIUM),
spotPrice: '0.03',
});
// ASG using Launch Config
new AutoScalingGroupLT(stack, 'DefaultAsgLaunchConfig', {
useInstanceTemplate: false,
machineImage: MachineImage.latestAmazonLinux(),
vpc,
vpcSubnets: { subnetGroupName: 'App' },
minCapacity: 0,
maxCapacity: 5,
desiredCapacity: 1,
instanceType: InstanceType.of(InstanceClass.T3, InstanceSize.MEDIUM),
// spotPrice: '0.03',
});
// THEN
expect(SynthUtils.toCloudFormation(stack)).toMatchSnapshot();
});
test('When useInstanceTemplate flag set to true, should create ASG with default Launch Template settings', () => {
//WHEN
const app = new cdk.App();
const stack = new cdk.Stack(app, 'aws-autoscaling');
const vpc = new Vpc(stack, 'vpc', {
maxAzs: 3,
subnetConfiguration: [
{
name: 'App',
subnetType: SubnetType.ISOLATED,
cidrMask: 26,
},
],
});
const spotasg = new AutoScalingGroupLT(stack, 'DefaultSpotAsgLaunchTemplate', {
useInstanceTemplate: true,
machineImage: MachineImage.latestAmazonLinux(),
vpc,
vpcSubnets: { subnetGroupName: 'App' },
minCapacity: 0,
maxCapacity: 5,
desiredCapacity: 1,
instanceType: InstanceType.of(InstanceClass.T3, InstanceSize.MEDIUM),
spotPrice: '0.03',
});
spotasg.scaleOnCpuUtilization('CPUScale', { targetUtilizationPercent: 70 });
const MixedInstancesPolicy = {
InstancesDistribution: {
OnDemandAllocationStrategy: 'prioritized',
OnDemandBaseCapacity: 0,
OnDemandPercentageAboveBaseCapacity: 0,
SpotAllocationStrategy: 'capacity-optimized',
SpotMaxPrice: '0.03',
},
LaunchTemplate: {
LaunchTemplateSpecification: {
LaunchTemplateId: {
Ref: 'DefaultSpotAsgLaunchTemplate6D11B85A',
},
Version: {
'Fn::GetAtt': ['DefaultSpotAsgLaunchTemplate6D11B85A', 'LatestVersionNumber'],
},
},
Overrides: [
{
InstanceType: 't3.medium',
},
{
InstanceType: 't3.small',
},
],
},
};
// THEN
// ASG should have Launch Template and scaling policy
cdkExpect(stack).to(haveResourceLike('AWS::EC2::LaunchTemplate'));
cdkExpect(stack).to(haveResourceLike('AWS::AutoScaling::ScalingPolicy'));
cdkExpect(stack).notTo(haveResourceLike('AWS::AutoScaling::LaunchConfiguration'));
// ASG should have MixedInstancesPolicy
cdkExpect(stack).to(
haveResourceLike('AWS::AutoScaling::AutoScalingGroup', { MixedInstancesPolicy: MixedInstancesPolicy })
);
});
test('Switching off or not passing LauchTemplate flag should create standard ASG from base AutoScalingGroup', () => {
//WHEN
const app = new cdk.App();
const stack = new cdk.Stack(app, 'autoscaling-default');
const vpc = new Vpc(stack, 'vpc', {
maxAzs: 3,
subnetConfiguration: [
{
name: 'App',
subnetType: SubnetType.ISOLATED,
cidrMask: 26,
},
],
});
const stack1 = new cdk.Stack(app, 'autoscaling-default1');
const vpc1 = new Vpc(stack1, 'vpc', {
maxAzs: 3,
subnetConfiguration: [
{
name: 'App',
subnetType: SubnetType.ISOLATED,
cidrMask: 26,
},
],
});
// ASG LT with only required props
const spotasg = new AutoScalingGroupLT(stack, 'StandardASG', {
machineImage: MachineImage.latestAmazonLinux(),
vpc,
vpcSubnets: { subnetGroupName: 'App' },
minCapacity: 0,
maxCapacity: 5,
desiredCapacity: 1,
instanceType: InstanceType.of(InstanceClass.T3, InstanceSize.MEDIUM),
});
spotasg.scaleOnCpuUtilization('CPUScale', { targetUtilizationPercent: 70 });
new AutoScalingGroupLT(stack1, 'StandardASGWithProp', {
useInstanceTemplate: false, // Use standard @aws-cdk Base Class AutoScalingGroup
machineImage: MachineImage.latestAmazonLinux(),
vpc: vpc1,
vpcSubnets: { subnetGroupName: 'App' },
minCapacity: 0,
maxCapacity: 5,
desiredCapacity: 1,
instanceType: InstanceType.of(InstanceClass.T3, InstanceSize.MEDIUM),
});
// THEN
// ASG should have Launch Config and scaling policy
cdkExpect(stack).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration'));
cdkExpect(stack).to(haveResourceLike('AWS::AutoScaling::ScalingPolicy'));
// ASG should have Launch Config and should not have LaunchTemplate
cdkExpect(stack1).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration'));
cdkExpect(stack1).notTo(haveResourceLike('AWS::EC2::LaunchTemplate'));
});
test('Should be able to overwrite LauchTemplate related properties', () => {
//WHEN
const app = new cdk.App();
const stack = new cdk.Stack(app, 'aws-autoscaling');
const vpc = new Vpc(stack, 'vpc', {
maxAzs: 3,
subnetConfiguration: [
{
name: 'App',
subnetType: SubnetType.ISOLATED,
cidrMask: 26,
},
],
});
// Overwrite launchTemplateOverrides and instancesDistribution (onDemandBaseCapacity, spotAllocationStrategy)
// Also overwrite blockDevices to check if new volume mapping will be created or not
const spotasg = new AutoScalingGroupLT(stack, 'OverwriteLTASG', {
useInstanceTemplate: true,
launchTemplateOverrides: [{ instanceType: 'm2.small' }, { instanceType: 'm2.large' }],
instancesDistribution: {
onDemandBaseCapacity: 1,
spotAllocationStrategy: 'lowest-price',
},
machineImage: MachineImage.latestAmazonLinux(),
vpc,
vpcSubnets: { subnetGroupName: 'App' },
minCapacity: 0,
maxCapacity: 5,
desiredCapacity: 1,
instanceType: InstanceType.of(InstanceClass.T3, InstanceSize.MEDIUM),
blockDevices: [{ volume: BlockDeviceVolume.ebs(20), deviceName: 'xvdb' }],
spotPrice: '0.03',
});
spotasg.scaleOnCpuUtilization('CPUScale', { targetUtilizationPercent: 70 });
// THEN
// ASG should have Launch Template and scaling policy
cdkExpect(stack).to(haveResourceLike('AWS::EC2::LaunchTemplate'));
cdkExpect(stack).to(haveResourceLike('AWS::AutoScaling::ScalingPolicy'));
cdkExpect(stack).notTo(haveResourceLike('AWS::AutoScaling::LaunchConfiguration'));
// Launch Template should have new BlockDeviceMappings
cdkExpect(stack).to(
haveResourceLike('AWS::EC2::LaunchTemplate', {
LaunchTemplateData: objectLike({
BlockDeviceMappings: [
{
DeviceName: 'xvdb',
Ebs: {
VolumeSize: 20,
},
},
],
}),
})
);
// ASG should have overwritten properties
cdkExpect(stack).to(
haveResourceLike('AWS::AutoScaling::AutoScalingGroup', {
LaunchConfigurationName: ABSENT,
MixedInstancesPolicy: objectLike({
InstancesDistribution: objectLike({ OnDemandBaseCapacity: 1, SpotAllocationStrategy: 'lowest-price' }),
LaunchTemplate: objectLike({
Overrides: arrayWith(
{
InstanceType: 'm2.small',
},
{
InstanceType: 'm2.large',
}
),
}),
}),
})
);
});
|
//
// KRFindPasswordView.h
// KRLogin
//
// Created by LX on 2017/12/15.
// Copyright © 2017年 Ace. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface KRFindPasswordView : UIView
@property (nonatomic, copy) void (^smsCodeBlock)();
@property (nonatomic, copy) void (^nextButtonClickBlock)();
@end
|
/*
* Call functions sequentially and with delay
* ------------------------------------------
*
* @param events [array] of objects {
* @prop action [function] with optional callback
* @prop delay [int]
* @prop increment [int]
* }
* @param repeat [int]
*/
export const cascade = ( events, repeat = 1 ) => {
let eventsLength = events.length,
increment = 0,
delay = 0;
for( let j = 0; j < repeat; j++ ) {
const recursive = ( i ) => {
if( i < eventsLength ) {
let event = events[i],
eventDelay = event.hasOwnProperty( 'delay' ) ? event.delay : delay;
if( event.hasOwnProperty( 'increment' ) ) {
if( !increment && eventDelay )
delay = eventDelay;
increment = event.increment;
} else {
delay = eventDelay;
}
setTimeout( () => {
let indexArg = repeat > 1 ? j : i;
// check if contains two args ( second arg is done callback )
if( event.action.length === 2 ) {
event.action( indexArg, () => {
recursive( i + 1 );
} );
} else {
event.action( indexArg );
recursive( i + 1 );
}
}, delay );
delay += increment;
}
};
recursive( 0 );
}
};
|
<filename>config/src/main/java/org/moskito/control/config/datarepository/RetrieverInstanceConfig.java
package org.moskito.control.config.datarepository;
import com.google.gson.annotations.SerializedName;
import org.configureme.annotations.ConfigureMe;
import java.util.Arrays;
/**
* This
*
* @author lrosenberg
* @since 09.06.18 23:40
*/
@ConfigureMe
public class RetrieverInstanceConfig {
/**
* Name of the retriever.
*/
@SerializedName("name")
private String name;
/**
* Configuration parameter.
*/
@SerializedName("configuration")
private String configuration;
/**
* Mappings for variables.
*/
@SerializedName("@mappings")
private VariableMapping[] mappings;
public String getConfiguration() {
return configuration;
}
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
public VariableMapping[] getMappings() {
return mappings;
}
public void setMappings(VariableMapping[] mappings) {
this.mappings = mappings;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "RetrieverInstanceConfig{" +
"name='" + name + '\'' +
", configuration='" + configuration + '\'' +
", mappings=" + Arrays.toString(mappings) +
'}';
}
}
|
package com.devin.client.mysise.model.parse;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.devin.client.mysise.model.base.WebBody;
import com.devin.client.mysise.model.bean.Schedule;
import com.devin.client.mysise.model.url.Url;
public class ParseSchedular {
private static Schedule schedule = new Schedule();
private static Document document;
public static Schedule getSchedule(){
init();
return schedule;
}
private static void init(){
WebBody.initStudent(Url.studentSchedularURL);
document = WebBody.getDocument();
parseAllSchedular();
}
private static void parseAllSchedular(){
Elements elements = document.select("tr[bgcolor=#FFFFFF]");
for(Element e : elements){
getWeekSchedular(e.select("td"));
}
}
private static void getWeekSchedular(Elements elements){
schedule.getTime().add(getTimeSchedular(elements));
schedule.getOne().add(getOneSchedular(elements));
schedule.getTwo().add(getTwoSchedular(elements));
schedule.getThree().add(getThreeSchedular(elements));
schedule.getFour().add(getFourSchedular(elements));
schedule.getFive().add(getFiveSchedular(elements));
}
private static String getTimeSchedular(Elements elements){
return elements.get(0).text();
}
private static String getOneSchedular(Elements elements){
return elements.get(1).text();
}
private static String getTwoSchedular(Elements elements){
return elements.get(2).text();
}
private static String getThreeSchedular(Elements elements){
return elements.get(3).text();
}
private static String getFourSchedular(Elements elements){
return elements.get(4).text();
}
private static String getFiveSchedular(Elements elements){
return elements.get(5).text();
}
}
|
public class Substr {
public static void main(String[] args) {
String str = "Hello";
int n = str.length();
// Loop through all substrings of size 3
for (int i = 0; i < n - 2; i++) {
String sub = str.substring(i, i + 3);
System.out.println(sub);
}
}
}
// Output: Hel, ell, llo |
import requests
from .exceptions import APIException
from .resource import APIResource
class ApiClient(object):
"""
A request client for the api
"""
_auth = None
_resource = APIResource
def get(self, url, **kwargs):
"""
Makes an HTTP GET request to the API. Any keyword arguments will
be converted to query string parameters.
"""
return self._request("get", url, **kwargs)
def post(self, url, **kwargs):
"""
Makes an HTTP POST request to the API.
"""
return self._request("post", url, **kwargs)
def put(self, url, **kwargs):
"""
Makes an HTTP PUT request to the API.
"""
return self._request("put", url, **kwargs)
def patch(self, url, **kwargs):
"""
Makes an HTTP patch request to the API.
"""
return self._request("patch", url, **kwargs)
def delete(self, url, **kwargs):
"""
Makes an HTTP DELETE request to the API.
"""
return self._request("delete", url, **kwargs)
def set_resource(self, resource):
self._resource = resource
def _request(self, method, url, **kwargs):
content = None
try:
result = requests.request(method, url, **kwargs)
content = result.content
result.raise_for_status()
status_code = result.status_code
except Exception:
# catching all exception
raise APIException(content)
return self._resource.from_json(content) if content and status_code != 204 else True
|
/*
* =============================================================================
* Copyright (c) 2021 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v2.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v20.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* =============================================================================
*/
package acme;
import java.io.Serializable;
import java.rmi.Remote;
import java.rmi.RemoteException;
public interface Processor extends Remote {
@FunctionalInterface
interface Processable extends Serializable {
void process() throws Throwable;
}
void process(Processable p) throws RemoteException;
}
|
<gh_stars>1-10
const { runScript } = require("../scriptWrapper");
const logger = require("../../common/logger");
const { clearAppointments } = require("./utils/clearUtils");
runScript(async () => {
logger.info("Suppression de tous les appointments ....");
await clearAppointments();
logger.info("Appointments supprimés avec succès !");
});
|
<gh_stars>1-10
/* BinFileToRead.cpp */
//----------------------------------------------------------------------------------------
//
// Project: CCore 3.60
//
// Tag: Fundamental
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2019 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <CCore/inc/BinFileToRead.h>
#include <CCore/inc/Exception.h>
namespace CCore {
namespace Private_BinFileToRead {
class NullBinFileToRead : public BinFileToReadBase
{
public:
NullBinFileToRead() {}
~NullBinFileToRead() {}
virtual void open(StrLen,FileOpenFlags)
{
Printf(Exception,"CCore::NullBinFileToRead::open(...) : no such file");
}
virtual void close()
{
Printf(NoException,"CCore::NullBinFileToRead::close() : file is not opened");
}
virtual ulen read(FilePosType,uint8 *,ulen)
{
Printf(Exception,"CCore::NullBinFileToRead::read(...) : file is not opened");
return 0;
}
virtual void read_all(FilePosType,uint8 *,ulen)
{
Printf(Exception,"CCore::NullBinFileToRead::read_all(...) : file is not opened");
}
};
NullBinFileToRead Object CCORE_INITPRI_3 ;
} // namespace Private_BinFileToRead
using namespace Private_BinFileToRead;
/* GetNullBinFileToReadPtr() */
BinFileToReadBase * GetNullBinFileToReadPtr() { return &Object; }
/* guard functions */
void GuardBinFileTooLong(StrLen file_name,ulen max_len,FilePosType file_len)
{
Printf(Exception,"CCore::BinFileToMem::BinFileToMem(...,#.q;,max_len=#;) : file is too long #;",file_name,max_len,file_len);
}
/* class BinFileToMem */
BinFileToMem::BinFileToMem(BinFileToRead file,StrLen file_name,ulen max_len)
{
file->open(file_name);
try
{
auto file_len=file->getLen();
if( file_len>max_len ) GuardBinFileTooLong(file_name,max_len,file_len);
ulen len=(ulen)file_len;
file->read_all(0,alloc(len),len);
}
catch(...)
{
file->close();
throw;
}
file->close();
}
} // namespace CCore
|
#!/usr/bin/env bash
. "$(dirname "$0")/testlib.sh"
# These tests rely on behavior found in 2.9.0 to perform themselves,
# specifically:
# - core.hooksPath support
ensure_git_version_isnt $VERSION_LOWER "2.9.0"
begin_test "install with supported core.hooksPath"
(
set -e
repo_name="supported-custom-hooks-path"
git init "$repo_name"
cd "$repo_name"
hooks_dir="custom_hooks_dir"
mkdir -p "$hooks_dir"
git config --local core.hooksPath "$hooks_dir"
git lfs install 2>&1 | tee install.log
grep "Updated git hooks" install.log
[ -e "$hooks_dir/pre-push" ]
[ ! -e ".git/pre-push" ]
[ -e "$hooks_dir/post-checkout" ]
[ ! -e ".git/post-checkout" ]
[ -e "$hooks_dir/post-commit" ]
[ ! -e ".git/post-commit" ]
[ -e "$hooks_dir/post-merge" ]
[ ! -e ".git/post-merge" ]
)
end_test
|
/**
* Remove the minimum number of invalid parentheses in order to make the input string valid. Return all possible results.
Note: The input string may contain letters other than the parentheses ( and ).
Example 1:
Input: "()())()"
Output: ["()()()", "(())()"]
Example 2:
Input: "(a)())()"
Output: ["(a)()()", "(a())()"]
Example 3:
Input: ")("
Output: [""]
*/
/** Hint 5
* For every left parenthesis, we should have a corresponding right parenthesis. We can make use of two counters which keep track of misplaced left and right parenthesis and in one iteration we can find out these two values.
0 1 2 3 4 5 6 7
( ) ) ) ( ( ( )
i = 0, left = 1, right = 0
i = 1, left = 0, right = 0
i = 2, left = 0, right = 1
i = 3, left = 0, right = 2
i = 4, left = 1, right = 2
i = 5, left = 2, right = 2
i = 6, left = 3, right = 2
i = 7, left = 2, right = 2
We have 2 misplaced left and 2 misplaced right parentheses.
*/
// Leet #301
//so while we check the validity with the stack, and stacks that have containing elements must be removed.
//but this is also a shifting/sliding window
const RemoveMinimumNumberParenthesis = () => {
}
export default RemoveMinimumNumberParenthesis; |
import random
# Open a file
with open('file.txt','r') as f:
lines = f.readlines()
# Randomly select a line
index = random.randint(0, len(lines) - 1)
# Delete the line
del lines[index]
# Write updated content to the file
with open('file.txt','w') as f:
f.writelines(lines) |
#!/bin/bash
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# 修改openwrt登陆地址,把下面的192.168.2.2修改成你想要的就可以了
sed -i 's/192.168.1.1/192.168.2.1/g' package/base-files/files/bin/config_generate
# openfros 中默认ip是 192.168.66.1
sed -i 's/192.168.66.1/192.168.2.1/g' package/base-files/files/bin/config_generate
# 修复核心及添加温度显示
sed -i 's|pcdata(boardinfo.system or "?")|luci.sys.exec("uname -m") or "?"|g' feeds/luci/modules/luci-mod-admin-full/luasrc/view/admin_status/index.htm
sed -i 's/or "1"%>/or "1"%> ( <%=luci.sys.exec("expr `cat \/sys\/class\/thermal\/thermal_zone0\/temp` \/ 1000") or "?"%> \℃ ) /g' feeds/luci/modules/luci-mod-admin-full/luasrc/view/admin_status/index.htm
# 修改主机名字,把OpenWrt-123修改你喜欢的就行(不能纯数字或者使用中文)
sed -i '/uci commit system/i\uci set system.@system[0].hostname='OPW-ALEX'' package/lean/default-settings/files/zzz-default-settings
# openfros 单独处理
# sed -i '/uci commit system/i\uci set system.@system[0].hostname='OPW-ALEX'' package/lean/default-settings/files/zzz-default-settings
# 版本号里显示一个自己的名字(281677160 build $(TZ=UTC-8 date "+%Y.%m.%d") @ 这些都是后增加的)
sed -i "s/OpenWrt /Alex build $(TZ=UTC-8 date "+%Y.%m.%d") @ OpenWrt /g" package/lean/default-settings/files/zzz-default-settings
# openfros 单独处理
# sed -i "s/FROS/Alex build $(TZ=UTC-8 date "+%Y.%m.%d") @ FROS/g" package/lean/default-settings/files/zzz-default-settings
sed -i "s/FROS/Alex build $(TZ=UTC-8 date "+%Y.%m.%d") On (OpenWrt R21.3.27 | openwrt_luci 18.06.8) @ FROS /g" package/lean/default-settings/files/zzz-default-settings
# 修改 argon 为默认主题,可根据你喜欢的修改成其他的(不选择那些会自动改变为默认主题的主题才有效果)
# sed -i 's/luci-theme-bootstrap/luci-theme-argon/g' feeds/luci/collections/luci/Makefile
# 设置密码为空(安装固件时无需密码登陆,然后自己修改想要的密码)
sed -i 's@.*CYXluq4wUazHjmCDBCqXF*@#&@g' package/lean/default-settings/files/zzz-default-settings
# # Add extra wireless drivers
# # svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-18.06-k5.4/package/kernel/rtl8812au-ac
# # svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-18.06-k5.4/package/kernel/rtl8821cu
# # svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-18.06-k5.4/package/kernel/rtl8188eu
# # svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-18.06-k5.4/package/kernel/rtl8192du
# # svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-18.06-k5.4/package/kernel/rtl88x2bu
# svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-21.02/package/kernel/rtl8812au-ac
# svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-21.02/package/kernel/rtl8821cu
# svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-21.02/package/kernel/rtl8188eu
# svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-21.02/package/kernel/rtl8192du
# svn co https://github.com/immortalwrt/immortalwrt/branches/openwrt-21.02/package/kernel/rtl88x2bu
# # Add apk (Apk Packages Manager)
# svn co https://github.com/openwrt/packages/trunk/utils/apk
# # popd
# # Fix mt76 wireless driver
# pushd package/kernel/mt76
# sed -i '/mt7662u_rom_patch.bin/a\\techo mt76-usb disable_usb_sg=1 > $\(1\)\/etc\/modules.d\/mt76-usb' Makefile
# popd
# 只编译k3固件
# sed -i 's|^TARGET_|# TARGET_|g; s|# TARGET_DEVICES += phicomm-k3|TARGET_DEVICES += phicomm-k3|' ./target/linux/bcm53xx/image/Makefile
# 替换https-dns-proxy.init文件,解决用LEDE源码加入passwall编译固件后DNS转发127.0.0.1#5053和12.0.0.1#5054问题
#curl -fsSL https://raw.githubusercontent.com/Lienol/openwrt-packages/dev-19.07/net/https-dns-proxy/files/https-dns-proxy.init > feeds/packages/net/https-dns-proxy/files/https-dns-proxy.init
# 修改内核版本 K3
# sed -i 's/KERNEL_PATCHVER:=4.19/KERNEL_PATCHVER:=5.4/g' target/linux/bcm53xx/Makefile
#sed -i 's/KERNEL_TESTING_PATCHVER:=5.4/KERNEL_TESTING_PATCHVER:=4.19/g' target/linux/x86/Makefile
# 修改插件名字(修改名字后不知道会不会对插件功能有影响,自己多测试)
#sed -i 's/"BaiduPCS Web"/"百度网盘"/g' package/lean/luci-app-baidupcs-web/luasrc/controller/baidupcs-web.lua
#sed -i 's/cbi("qbittorrent"),_("qBittorrent")/cbi("qbittorrent"),_("BT下载")/g' package/lean/luci-app-qbittorrent/luasrc/controller/qbittorrent.lua
#sed -i 's/"aMule设置"/"电驴下载"/g' package/lean/luci-app-amule/po/zh-cn/amule.po
#sed -i 's/"网络存储"/"存储"/g' package/lean/luci-app-amule/po/zh-cn/amule.po
#sed -i 's/"网络存储"/"存储"/g' package/lean/luci-app-vsftpd/po/zh-cn/vsftpd.po
#sed -i 's/"Turbo ACC 网络加速"/"网络加速"/g' package/lean/luci-app-flowoffload/po/zh-cn/flowoffload.po
#sed -i 's/"Turbo ACC 网络加速"/"网络加速"/g' package/lean/luci-app-sfe/po/zh-cn/sfe.po
#sed -i 's/"实时流量监测"/"流量"/g' package/lean/luci-app-wrtbwmon/po/zh-cn/wrtbwmon.po
#sed -i 's/"KMS 服务器"/"KMS激活"/g' package/lean/luci-app-vlmcsd/po/zh-cn/vlmcsd.zh-cn.po
#sed -i 's/"TTYD 终端"/"命令窗"/g' package/lean/luci-app-ttyd/po/zh-cn/terminal.po
#sed -i 's/"USB 打印服务器"/"打印服务"/g' package/lean/luci-app-usb-printer/po/zh-cn/usb-printer.po
#sed -i 's/"网络存储"/"存储"/g' package/lean/luci-app-usb-printer/po/zh-cn/usb-printer.po
#sed -i 's/"Web 管理"/"Web"/g' package/lean/luci-app-webadmin/po/zh-cn/webadmin.po
#sed -i 's/"管理权"/"改密码"/g' feeds/luci/modules/luci-base/po/zh-cn/base.po
#sed -i 's/"带宽监控"/"监视"/g' feeds/luci/applications/luci-app-nlbwmon/po/zh-cn/nlbwmon.po
|
def calculate_completion_percentage(tasks: dict) -> float:
completed_tasks = sum(1 for task_completed in tasks.values() if task_completed)
total_tasks = len(tasks)
completion_percentage = (completed_tasks / total_tasks) * 100
return round(completion_percentage, 2) |
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// Name :
// Author : Avi
// Revision : $Revision: #93 $
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description :
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// file deepcode ignore CppConstantBinaryExpression: <comment the reason here>
#include <stdexcept>
#include <boost/python.hpp>
#include <boost/python/raw_function.hpp>
#include "Defs.hpp"
#include "Suite.hpp"
#include "PrintStyle.hpp"
#include "File.hpp"
#include "JobCreationCtrl.hpp"
#include "Simulator.hpp"
#include "BoostPythonUtil.hpp"
#include "Edit.hpp"
#include "DefsDoc.hpp"
#include "GlossaryDoc.hpp"
using namespace ecf;
using namespace boost::python;
using namespace std;
namespace bp = boost::python;
// See: http://wiki.python.org/moin/boost.python/HowTo#boost.function_objects
void save_as_defs(const Defs& theDefs, const std::string& filename, PrintStyle::Type_t the_style_enum)
{
PrintStyle style(the_style_enum);
std::stringstream ss; ss << theDefs;
std::string file_creation_error_msg;
if (!File::create(filename,ss.str(),file_creation_error_msg)) {
std::string error = "save_as_defs failed: ";
error += file_creation_error_msg;
throw std::runtime_error(error);
}
}
void save_as_defs_1(const Defs& theDefs, const std::string& filename)
{
save_as_defs(theDefs,filename,PrintStyle::DEFS);
}
static defs_ptr create_defs(const std::string& file_name)
{
defs_ptr defs = Defs::create();
std::string errorMsg,warningMsg;
if (!defs->restore(file_name,errorMsg,warningMsg)) {
throw std::runtime_error(errorMsg);
}
if (!warningMsg.empty()) std::cerr << warningMsg;
return defs;
}
std::string check_defs(defs_ptr defs)
{
std::string error_msg;
std::string warning_msg;
if (defs.get() && !defs->check(error_msg,warning_msg)) {
error_msg += "\n";
error_msg += warning_msg;
return error_msg;
}
return warning_msg;
}
void restore_from_checkpt(defs_ptr defs, const std::string& file_name)
{
defs->restore(file_name);
}
std::string simulate(defs_ptr defs)
{
if (defs.get()) {
// name output file after name of the first suite
std::string defs_filename = "pyext.def";
if (!defs->suiteVec().empty()) {
defs_filename = (*defs->suiteVec().begin())->name() + ".def";
}
Simulator simulator;
std::string errorMsg;
if (!simulator.run(*defs, defs_filename, errorMsg)) {
return errorMsg;
}
}
return string();
}
SState::State get_server_state(defs_ptr self) { return self->server().get_state(); }
/// Since we don't pass in a child pos, the nodes are added to the end
suite_ptr add_suite(defs_ptr self,suite_ptr s){ self->addSuite(s); return s; }
std::vector<task_ptr> get_all_tasks(defs_ptr self){ std::vector<task_ptr> tasks; self->get_all_tasks(tasks); return tasks; }
std::vector<node_ptr> get_all_nodes(defs_ptr self){ std::vector<node_ptr> nodes; self->get_all_nodes(nodes); return nodes; }
// Context management, Only used to provide indentation
defs_ptr defs_enter(defs_ptr self) { return self;}
bool defs_exit(defs_ptr self,const bp::object& type,const bp::object& value,const bp::object& traceback){return false;}
std::string check_job_creation(defs_ptr defs, bool throw_on_error, bool verbose){
job_creation_ctrl_ptr jobCtrl = std::make_shared<JobCreationCtrl>();
if (verbose) jobCtrl->set_verbose(verbose);
defs->check_job_creation(jobCtrl);
if (!jobCtrl->get_error_msg().empty() && throw_on_error) {
throw std::runtime_error(jobCtrl->get_error_msg());
}
return jobCtrl->get_error_msg();
}
// Add server user variables
defs_ptr add_variable(defs_ptr self,const std::string& name, const std::string& value) {
self->set_server().add_or_update_user_variables(name,value); return self;}
defs_ptr add_variable_int(defs_ptr self,const std::string& name, int value) {
self->set_server().add_or_update_user_variables(name, boost::lexical_cast<std::string>(value)); return self;}
defs_ptr add_variable_var(defs_ptr self,const Variable& var) {
self->set_server().add_or_update_user_variables(var.name(),var.theValue()); return self;}
defs_ptr add_variable_dict(defs_ptr self,const bp::dict& dict) {
std::vector<std::pair<std::string,std::string> > vec;
BoostPythonUtil::dict_to_str_vec(dict,vec);
std::vector<std::pair<std::string,std::string> >::iterator i;
auto vec_end = vec.end();
for(i = vec.begin(); i != vec_end; ++i) {
self->set_server().add_or_update_user_variables((*i).first,(*i).second);
}
return self;
}
void delete_variable(defs_ptr self,const std::string& name) { self->set_server().delete_user_variable(name);}
void sort_attributes(defs_ptr self,ecf::Attr::Type attr) { self->sort_attributes(attr);}
void sort_attributes1(defs_ptr self,ecf::Attr::Type attr, bool recurse) { self->sort_attributes(attr,recurse); }
void sort_attributes2(defs_ptr self,ecf::Attr::Type attr, bool recurse,const bp::list& list){
std::vector<std::string> no_sort;
BoostPythonUtil::list_to_str_vec(list,no_sort);
self->sort_attributes(attr,recurse,no_sort);
}
void sort_attributes3(defs_ptr self,const std::string& attribute_name, bool recursive,const bp::list& list){
std::string attribute = attribute_name; boost::algorithm::to_lower(attribute);
ecf::Attr::Type attr = Attr::to_attr(attribute_name);
if (attr == ecf::Attr::UNKNOWN) {
std::stringstream ss; ss << "sort_attributes: the attribute " << attribute_name << " is not valid";
throw std::runtime_error(ss.str());
}
std::vector<std::string> no_sort;
BoostPythonUtil::list_to_str_vec(list,no_sort);
self->sort_attributes(attr,recursive,no_sort);
}
// Support sized and Container protocol
size_t defs_len(defs_ptr self) { return self->suiteVec().size();}
bool defs_container(defs_ptr self, const std::string& name){return (self->findSuite(name)) ? true : false;}
static object do_add(defs_ptr self, const bp::object& arg) {
//std::cout << "defs::do_add \n";
if (arg.ptr() == object().ptr()) return object(self); // *IGNORE* None
else if (extract<suite_ptr>(arg).check()) self->addSuite(extract<suite_ptr>(arg)) ;
else if (extract<dict>(arg).check()) add_variable_dict(self,extract<dict>(arg));
else if (extract<Edit>(arg).check()) {
Edit edit = extract<Edit>(arg);
const std::vector<Variable>& vec = edit.variables();
for(const auto & i : vec) self->set_server().add_or_update_user_variables(i.name(),i.theValue());
}
else if (extract<bp::list>(arg).check()){
bp::list the_list = extract<bp::list>(arg);
int the_list_size = len(the_list);
for(int i = 0; i < the_list_size; ++i) (void) do_add(self,the_list[i]); // recursive
}
else if (extract<Variable>(arg).check()) {
Variable var = extract<Variable>(arg);
self->set_server().add_or_update_user_variables(var.name(),var.theValue());
}
else throw std::runtime_error("ExportDefs::add : Unknown type");
return object(self);
}
static object add(bp::tuple args, dict kwargs) {
int the_list_size = len(args);
defs_ptr self = extract<defs_ptr>(args[0]); // self
if (!self) throw std::runtime_error("ExportDefs::add() : first argument is not a Defs");
for (int i = 1; i < the_list_size; ++i) (void)do_add(self,args[i]);
(void)add_variable_dict(self,kwargs);
return object(self); // return defs as python object, relies class_<Defs>... for type registration
}
static object defs_iadd(defs_ptr self, const bp::list& list) {
//std::cout << "defs_iadd list " << self->name() << "\n";
int the_list_size = len(list);
for(int i = 0; i < the_list_size; ++i) (void)do_add(self,list[i]);
return object(self); // return node_ptr as python object, relies class_<Node>... for type registration
}
static object defs_getattr(defs_ptr self, const std::string& attr) {
// cout << " defs_getattr self.name() : " << self->name() << " attr " << attr << "\n";
suite_ptr child = self->findSuite(attr);
if (child) return object(child);
Variable var = self->server().findVariable(attr);
if (!var.empty()) return object(var);
std::stringstream ss; ss << "ExportDefs::defs_getattr : function of name '" << attr << "' does not exist *OR* suite or defs variable";
throw std::runtime_error(ss.str());
return object();
}
object defs_raw_constructor(bp::tuple args, dict kw) {
// cout << "defs_raw_constructor len(args):" << len(args) << endl;
// args[0] is Defs(i.e self)
bp::list the_list;
std::string name;
for (int i = 1; i < len(args) ; ++i) {
if (extract<string>(args[i]).check()) name = extract<string>(args[i]);
else the_list.append(args[i]);
}
if (!name.empty() && len(the_list) > 0)
throw std::runtime_error("defs_raw_constructor: Can't mix string with other arguments. String argument specifies a path(loads a definition from disk)");
return args[0].attr("__init__")(the_list,kw); // calls -> init(list attr, dict kw)
}
defs_ptr defs_init( bp::list the_list, bp::dict kw) {
// cout << " defs_init: the_list: " << len(the_list) << " dict: " << len(kw) << endl;
defs_ptr defs = Defs::create();
(void) add_variable_dict(defs,kw);
(void) defs_iadd(defs,the_list);
return defs;
}
void export_Defs()
{
class_<Defs,defs_ptr>( "Defs", DefsDoc::add_definition_doc(),init<>("Create a empty Defs"))
.def("__init__",raw_function(&defs_raw_constructor,0)) // will call -> task_init
.def("__init__",make_constructor(&defs_init))
.def("__init__",make_constructor(&create_defs), DefsDoc::add_definition_doc())
.def(self == self ) // __eq__
.def("__copy__", copyObject<Defs>) // __copy__ uses copy constructor
.def("__str__", &Defs::toString) // __str__
.def("__enter__", &defs_enter) // allow with statement, hence indentation support
.def("__exit__", &defs_exit) // allow with statement, hence indentation support
.def("__len__", &defs_len) // Sized protocol
.def("__contains__", &defs_container) // Container protocol
.def("__iter__", bp::range(&Defs::suite_begin, &Defs::suite_end)) // iterable protocol
.def("__getattr__", &defs_getattr) /* Any attempt to resolve a property, method, or field name that doesn't actually exist on the object itself will be passed to __getattr__*/
.def("__iadd__", &defs_iadd) // defs += [ Suite('s1'), Edit(var='value'), Variable('a','b') [ Suite('t2') ] ]
.def("__iadd__", &do_add) // defs += Suite("s1")
.def("__add__", &do_add)
.def("add", raw_function(add,1),DefsDoc::add())
.def("add_suite", &add_suite, DefsDoc::add_suite_doc())
.def("add_suite", &Defs::add_suite, GlossaryDoc::list() )
.def("add_extern", &Defs::add_extern, DefsDoc::add_extern_doc())
.def("auto_add_externs", &Defs::auto_add_externs, DefsDoc::add_extern_doc())
.def("add_variable", &add_variable, DefsDoc::add_variable_doc())
.def("add_variable", &add_variable_int)
.def("add_variable", &add_variable_var)
.def("add_variable", &add_variable_dict)
.def("sort_attributes", &sort_attributes)
.def("sort_attributes", &sort_attributes1)
.def("sort_attributes", &sort_attributes2)
.def("sort_attributes", &sort_attributes3,( bp::arg("attribute_type"), bp::arg("recursive")=true, bp::arg("no_sort")=bp::list() ))
.def("sort_attributes", &Defs::sort_attributes,(bp::arg("attribute_type"),bp::arg("recursive")=true))
.def("delete_variable", &delete_variable,"An empty string will delete all user variables")
.def("find_suite", &Defs::findSuite,"Given a name, find the corresponding `suite`_")
.def("find_abs_node", &Defs::findAbsNode,"Given a path, find the the `node`_")
.def("find_node_path", &Defs::find_node_path,"Given a type(suite,family,task) and a name, return path of the first match, otherwise return an empty string")
.def("find_node", &Defs::find_node,"Given a type(suite,family,task) and a path to a node, return the node.")
.def("get_all_nodes", &get_all_nodes,"Returns all the `node`_ s in the definition")
.def("get_all_tasks", &get_all_tasks,"Returns all the `task`_ nodes")
.def("has_time_dependencies", &Defs::hasTimeDependencies,"returns True if the `suite definition`_ has any time `dependencies`_")
.def("save_as_checkpt", &Defs::save_as_checkpt, "Save the in memory `suite definition`_ as a `check point`_ file. This includes all node state.")
.def("restore_from_checkpt", &restore_from_checkpt, "Restore the `suite definition`_ from a `check point`_ file stored on disk")
.def("save_as_defs", &save_as_defs, "Save the in memory `suite definition`_ into a file. The file name must be passed as an argument\n\n")
.def("save_as_defs", &save_as_defs_1, "Save the in memory `suite definition`_ into a file. The file name must be passed as an argument\n\n")
.def("check", &check_defs, DefsDoc::check())
.def("simulate", &simulate, DefsDoc::simulate())
.def("check_job_creation", &check_job_creation,(bp::arg("throw_on_error")=false,bp::arg("verbose")=false),DefsDoc::check_job_creation_doc() )
.def("check_job_creation", &Defs::check_job_creation)
.def("generate_scripts", &Defs::generate_scripts, DefsDoc::generate_scripts_doc() )
.def("get_state", &Defs::state )
.def("get_server_state", &get_server_state, DefsDoc::get_server_state() )
.add_property("suites", bp::range( &Defs::suite_begin, &Defs::suite_end),"Returns a list of `suite`_ s")
.add_property("externs", bp::range( &Defs::extern_begin, &Defs::extern_end),"Returns a list of `extern`_ s" )
.add_property("user_variables", bp::range( &Defs::user_variables_begin, &Defs::user_variables_end),"Returns a list of user defined `variable`_ s" )
.add_property("server_variables", bp::range( &Defs::server_variables_begin, &Defs::server_variables_end),"Returns a list of server `variable`_ s" )
;
#if ECF_ENABLE_PYTHON_PTR_REGISTER
bp::register_ptr_to_python<defs_ptr>(); // needed for mac and boost 1.6
#endif
}
|
#!/bin/bash
echo "Konteynerleri Baslatiyorum"
docker start $(docker ps -aq)
echo "Baslatildi :)"
clear |
package facade.amazonaws.services
import scalajs._
import scalajs.js.annotation.JSImport
import scala.scalajs.js.|
import scala.concurrent.Future
import facade.amazonaws._
package object comprehendmedical {
type AnyLengthString = String
type AttributeList = js.Array[Attribute]
type BoundedLengthString = String
type ClientRequestTokenString = String
type ComprehendMedicalAsyncJobPropertiesList = js.Array[ComprehendMedicalAsyncJobProperties]
type EntityList = js.Array[Entity]
type ICD10CMAttributeList = js.Array[ICD10CMAttribute]
type ICD10CMConceptList = js.Array[ICD10CMConcept]
type ICD10CMEntityList = js.Array[ICD10CMEntity]
type ICD10CMTraitList = js.Array[ICD10CMTrait]
type IamRoleArn = String
type JobId = String
type JobName = String
type KMSKey = String
type ManifestFilePath = String
type MaxResultsInteger = Int
type ModelVersion = String
type OntologyLinkingBoundedLengthString = String
type RxNormAttributeList = js.Array[RxNormAttribute]
type RxNormConceptList = js.Array[RxNormConcept]
type RxNormEntityList = js.Array[RxNormEntity]
type RxNormTraitList = js.Array[RxNormTrait]
type S3Bucket = String
type S3Key = String
type Timestamp = js.Date
type TraitList = js.Array[Trait]
type UnmappedAttributeList = js.Array[UnmappedAttribute]
implicit final class ComprehendMedicalOps(private val service: ComprehendMedical) extends AnyVal {
@inline def describeEntitiesDetectionV2JobFuture(params: DescribeEntitiesDetectionV2JobRequest): Future[DescribeEntitiesDetectionV2JobResponse] = service.describeEntitiesDetectionV2Job(params).promise().toFuture
@inline def describeICD10CMInferenceJobFuture(params: DescribeICD10CMInferenceJobRequest): Future[DescribeICD10CMInferenceJobResponse] = service.describeICD10CMInferenceJob(params).promise().toFuture
@inline def describePHIDetectionJobFuture(params: DescribePHIDetectionJobRequest): Future[DescribePHIDetectionJobResponse] = service.describePHIDetectionJob(params).promise().toFuture
@inline def describeRxNormInferenceJobFuture(params: DescribeRxNormInferenceJobRequest): Future[DescribeRxNormInferenceJobResponse] = service.describeRxNormInferenceJob(params).promise().toFuture
@inline def detectEntitiesV2Future(params: DetectEntitiesV2Request): Future[DetectEntitiesV2Response] = service.detectEntitiesV2(params).promise().toFuture
@inline def detectPHIFuture(params: DetectPHIRequest): Future[DetectPHIResponse] = service.detectPHI(params).promise().toFuture
@inline def inferICD10CMFuture(params: InferICD10CMRequest): Future[InferICD10CMResponse] = service.inferICD10CM(params).promise().toFuture
@inline def inferRxNormFuture(params: InferRxNormRequest): Future[InferRxNormResponse] = service.inferRxNorm(params).promise().toFuture
@inline def listEntitiesDetectionV2JobsFuture(params: ListEntitiesDetectionV2JobsRequest): Future[ListEntitiesDetectionV2JobsResponse] = service.listEntitiesDetectionV2Jobs(params).promise().toFuture
@inline def listICD10CMInferenceJobsFuture(params: ListICD10CMInferenceJobsRequest): Future[ListICD10CMInferenceJobsResponse] = service.listICD10CMInferenceJobs(params).promise().toFuture
@inline def listPHIDetectionJobsFuture(params: ListPHIDetectionJobsRequest): Future[ListPHIDetectionJobsResponse] = service.listPHIDetectionJobs(params).promise().toFuture
@inline def listRxNormInferenceJobsFuture(params: ListRxNormInferenceJobsRequest): Future[ListRxNormInferenceJobsResponse] = service.listRxNormInferenceJobs(params).promise().toFuture
@inline def startEntitiesDetectionV2JobFuture(params: StartEntitiesDetectionV2JobRequest): Future[StartEntitiesDetectionV2JobResponse] = service.startEntitiesDetectionV2Job(params).promise().toFuture
@inline def startICD10CMInferenceJobFuture(params: StartICD10CMInferenceJobRequest): Future[StartICD10CMInferenceJobResponse] = service.startICD10CMInferenceJob(params).promise().toFuture
@inline def startPHIDetectionJobFuture(params: StartPHIDetectionJobRequest): Future[StartPHIDetectionJobResponse] = service.startPHIDetectionJob(params).promise().toFuture
@inline def startRxNormInferenceJobFuture(params: StartRxNormInferenceJobRequest): Future[StartRxNormInferenceJobResponse] = service.startRxNormInferenceJob(params).promise().toFuture
@inline def stopEntitiesDetectionV2JobFuture(params: StopEntitiesDetectionV2JobRequest): Future[StopEntitiesDetectionV2JobResponse] = service.stopEntitiesDetectionV2Job(params).promise().toFuture
@inline def stopICD10CMInferenceJobFuture(params: StopICD10CMInferenceJobRequest): Future[StopICD10CMInferenceJobResponse] = service.stopICD10CMInferenceJob(params).promise().toFuture
@inline def stopPHIDetectionJobFuture(params: StopPHIDetectionJobRequest): Future[StopPHIDetectionJobResponse] = service.stopPHIDetectionJob(params).promise().toFuture
@inline def stopRxNormInferenceJobFuture(params: StopRxNormInferenceJobRequest): Future[StopRxNormInferenceJobResponse] = service.stopRxNormInferenceJob(params).promise().toFuture
}
}
package comprehendmedical {
@js.native
@JSImport("aws-sdk/clients/comprehendmedical", JSImport.Namespace, "AWS.ComprehendMedical")
class ComprehendMedical() extends js.Object {
def this(config: AWSConfig) = this()
@deprecated("This operation is deprecated, use DetectEntitiesV2 instead.", "forever") def detectEntities(params: DetectEntitiesRequest): Request[DetectEntitiesResponse] = js.native
def describeEntitiesDetectionV2Job(params: DescribeEntitiesDetectionV2JobRequest): Request[DescribeEntitiesDetectionV2JobResponse] = js.native
def describeICD10CMInferenceJob(params: DescribeICD10CMInferenceJobRequest): Request[DescribeICD10CMInferenceJobResponse] = js.native
def describePHIDetectionJob(params: DescribePHIDetectionJobRequest): Request[DescribePHIDetectionJobResponse] = js.native
def describeRxNormInferenceJob(params: DescribeRxNormInferenceJobRequest): Request[DescribeRxNormInferenceJobResponse] = js.native
def detectEntitiesV2(params: DetectEntitiesV2Request): Request[DetectEntitiesV2Response] = js.native
def detectPHI(params: DetectPHIRequest): Request[DetectPHIResponse] = js.native
def inferICD10CM(params: InferICD10CMRequest): Request[InferICD10CMResponse] = js.native
def inferRxNorm(params: InferRxNormRequest): Request[InferRxNormResponse] = js.native
def listEntitiesDetectionV2Jobs(params: ListEntitiesDetectionV2JobsRequest): Request[ListEntitiesDetectionV2JobsResponse] = js.native
def listICD10CMInferenceJobs(params: ListICD10CMInferenceJobsRequest): Request[ListICD10CMInferenceJobsResponse] = js.native
def listPHIDetectionJobs(params: ListPHIDetectionJobsRequest): Request[ListPHIDetectionJobsResponse] = js.native
def listRxNormInferenceJobs(params: ListRxNormInferenceJobsRequest): Request[ListRxNormInferenceJobsResponse] = js.native
def startEntitiesDetectionV2Job(params: StartEntitiesDetectionV2JobRequest): Request[StartEntitiesDetectionV2JobResponse] = js.native
def startICD10CMInferenceJob(params: StartICD10CMInferenceJobRequest): Request[StartICD10CMInferenceJobResponse] = js.native
def startPHIDetectionJob(params: StartPHIDetectionJobRequest): Request[StartPHIDetectionJobResponse] = js.native
def startRxNormInferenceJob(params: StartRxNormInferenceJobRequest): Request[StartRxNormInferenceJobResponse] = js.native
def stopEntitiesDetectionV2Job(params: StopEntitiesDetectionV2JobRequest): Request[StopEntitiesDetectionV2JobResponse] = js.native
def stopICD10CMInferenceJob(params: StopICD10CMInferenceJobRequest): Request[StopICD10CMInferenceJobResponse] = js.native
def stopPHIDetectionJob(params: StopPHIDetectionJobRequest): Request[StopPHIDetectionJobResponse] = js.native
def stopRxNormInferenceJob(params: StopRxNormInferenceJobRequest): Request[StopRxNormInferenceJobResponse] = js.native
}
/** An extracted segment of the text that is an attribute of an entity, or otherwise related to an entity, such as the dosage of a medication taken. It contains information about the attribute such as id, begin and end offset within the input text, and the segment of the input text.
*/
@js.native
trait Attribute extends js.Object {
var BeginOffset: js.UndefOr[Int]
var Category: js.UndefOr[EntityType]
var EndOffset: js.UndefOr[Int]
var Id: js.UndefOr[Int]
var RelationshipScore: js.UndefOr[Float]
var RelationshipType: js.UndefOr[RelationshipType]
var Score: js.UndefOr[Float]
var Text: js.UndefOr[String]
var Traits: js.UndefOr[TraitList]
var Type: js.UndefOr[EntitySubType]
}
object Attribute {
@inline
def apply(
BeginOffset: js.UndefOr[Int] = js.undefined,
Category: js.UndefOr[EntityType] = js.undefined,
EndOffset: js.UndefOr[Int] = js.undefined,
Id: js.UndefOr[Int] = js.undefined,
RelationshipScore: js.UndefOr[Float] = js.undefined,
RelationshipType: js.UndefOr[RelationshipType] = js.undefined,
Score: js.UndefOr[Float] = js.undefined,
Text: js.UndefOr[String] = js.undefined,
Traits: js.UndefOr[TraitList] = js.undefined,
Type: js.UndefOr[EntitySubType] = js.undefined
): Attribute = {
val __obj = js.Dynamic.literal()
BeginOffset.foreach(__v => __obj.updateDynamic("BeginOffset")(__v.asInstanceOf[js.Any]))
Category.foreach(__v => __obj.updateDynamic("Category")(__v.asInstanceOf[js.Any]))
EndOffset.foreach(__v => __obj.updateDynamic("EndOffset")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
RelationshipScore.foreach(__v => __obj.updateDynamic("RelationshipScore")(__v.asInstanceOf[js.Any]))
RelationshipType.foreach(__v => __obj.updateDynamic("RelationshipType")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
Text.foreach(__v => __obj.updateDynamic("Text")(__v.asInstanceOf[js.Any]))
Traits.foreach(__v => __obj.updateDynamic("Traits")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Attribute]
}
}
@js.native
sealed trait AttributeName extends js.Any
object AttributeName {
val SIGN = "SIGN".asInstanceOf[AttributeName]
val SYMPTOM = "SYMPTOM".asInstanceOf[AttributeName]
val DIAGNOSIS = "DIAGNOSIS".asInstanceOf[AttributeName]
val NEGATION = "NEGATION".asInstanceOf[AttributeName]
@inline def values = js.Array(SIGN, SYMPTOM, DIAGNOSIS, NEGATION)
}
/** Provides information for filtering a list of detection jobs.
*/
@js.native
trait ComprehendMedicalAsyncJobFilter extends js.Object {
var JobName: js.UndefOr[JobName]
var JobStatus: js.UndefOr[JobStatus]
var SubmitTimeAfter: js.UndefOr[Timestamp]
var SubmitTimeBefore: js.UndefOr[Timestamp]
}
object ComprehendMedicalAsyncJobFilter {
@inline
def apply(
JobName: js.UndefOr[JobName] = js.undefined,
JobStatus: js.UndefOr[JobStatus] = js.undefined,
SubmitTimeAfter: js.UndefOr[Timestamp] = js.undefined,
SubmitTimeBefore: js.UndefOr[Timestamp] = js.undefined
): ComprehendMedicalAsyncJobFilter = {
val __obj = js.Dynamic.literal()
JobName.foreach(__v => __obj.updateDynamic("JobName")(__v.asInstanceOf[js.Any]))
JobStatus.foreach(__v => __obj.updateDynamic("JobStatus")(__v.asInstanceOf[js.Any]))
SubmitTimeAfter.foreach(__v => __obj.updateDynamic("SubmitTimeAfter")(__v.asInstanceOf[js.Any]))
SubmitTimeBefore.foreach(__v => __obj.updateDynamic("SubmitTimeBefore")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ComprehendMedicalAsyncJobFilter]
}
}
/** Provides information about a detection job.
*/
@js.native
trait ComprehendMedicalAsyncJobProperties extends js.Object {
var DataAccessRoleArn: js.UndefOr[IamRoleArn]
var EndTime: js.UndefOr[Timestamp]
var ExpirationTime: js.UndefOr[Timestamp]
var InputDataConfig: js.UndefOr[InputDataConfig]
var JobId: js.UndefOr[JobId]
var JobName: js.UndefOr[JobName]
var JobStatus: js.UndefOr[JobStatus]
var KMSKey: js.UndefOr[KMSKey]
var LanguageCode: js.UndefOr[LanguageCode]
var ManifestFilePath: js.UndefOr[ManifestFilePath]
var Message: js.UndefOr[AnyLengthString]
var ModelVersion: js.UndefOr[ModelVersion]
var OutputDataConfig: js.UndefOr[OutputDataConfig]
var SubmitTime: js.UndefOr[Timestamp]
}
object ComprehendMedicalAsyncJobProperties {
@inline
def apply(
DataAccessRoleArn: js.UndefOr[IamRoleArn] = js.undefined,
EndTime: js.UndefOr[Timestamp] = js.undefined,
ExpirationTime: js.UndefOr[Timestamp] = js.undefined,
InputDataConfig: js.UndefOr[InputDataConfig] = js.undefined,
JobId: js.UndefOr[JobId] = js.undefined,
JobName: js.UndefOr[JobName] = js.undefined,
JobStatus: js.UndefOr[JobStatus] = js.undefined,
KMSKey: js.UndefOr[KMSKey] = js.undefined,
LanguageCode: js.UndefOr[LanguageCode] = js.undefined,
ManifestFilePath: js.UndefOr[ManifestFilePath] = js.undefined,
Message: js.UndefOr[AnyLengthString] = js.undefined,
ModelVersion: js.UndefOr[ModelVersion] = js.undefined,
OutputDataConfig: js.UndefOr[OutputDataConfig] = js.undefined,
SubmitTime: js.UndefOr[Timestamp] = js.undefined
): ComprehendMedicalAsyncJobProperties = {
val __obj = js.Dynamic.literal()
DataAccessRoleArn.foreach(__v => __obj.updateDynamic("DataAccessRoleArn")(__v.asInstanceOf[js.Any]))
EndTime.foreach(__v => __obj.updateDynamic("EndTime")(__v.asInstanceOf[js.Any]))
ExpirationTime.foreach(__v => __obj.updateDynamic("ExpirationTime")(__v.asInstanceOf[js.Any]))
InputDataConfig.foreach(__v => __obj.updateDynamic("InputDataConfig")(__v.asInstanceOf[js.Any]))
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
JobName.foreach(__v => __obj.updateDynamic("JobName")(__v.asInstanceOf[js.Any]))
JobStatus.foreach(__v => __obj.updateDynamic("JobStatus")(__v.asInstanceOf[js.Any]))
KMSKey.foreach(__v => __obj.updateDynamic("KMSKey")(__v.asInstanceOf[js.Any]))
LanguageCode.foreach(__v => __obj.updateDynamic("LanguageCode")(__v.asInstanceOf[js.Any]))
ManifestFilePath.foreach(__v => __obj.updateDynamic("ManifestFilePath")(__v.asInstanceOf[js.Any]))
Message.foreach(__v => __obj.updateDynamic("Message")(__v.asInstanceOf[js.Any]))
ModelVersion.foreach(__v => __obj.updateDynamic("ModelVersion")(__v.asInstanceOf[js.Any]))
OutputDataConfig.foreach(__v => __obj.updateDynamic("OutputDataConfig")(__v.asInstanceOf[js.Any]))
SubmitTime.foreach(__v => __obj.updateDynamic("SubmitTime")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ComprehendMedicalAsyncJobProperties]
}
}
@js.native
trait DescribeEntitiesDetectionV2JobRequest extends js.Object {
var JobId: JobId
}
object DescribeEntitiesDetectionV2JobRequest {
@inline
def apply(
JobId: JobId
): DescribeEntitiesDetectionV2JobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeEntitiesDetectionV2JobRequest]
}
}
@js.native
trait DescribeEntitiesDetectionV2JobResponse extends js.Object {
var ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties]
}
object DescribeEntitiesDetectionV2JobResponse {
@inline
def apply(
ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties] = js.undefined
): DescribeEntitiesDetectionV2JobResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobProperties.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobProperties")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeEntitiesDetectionV2JobResponse]
}
}
@js.native
trait DescribeICD10CMInferenceJobRequest extends js.Object {
var JobId: JobId
}
object DescribeICD10CMInferenceJobRequest {
@inline
def apply(
JobId: JobId
): DescribeICD10CMInferenceJobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeICD10CMInferenceJobRequest]
}
}
@js.native
trait DescribeICD10CMInferenceJobResponse extends js.Object {
var ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties]
}
object DescribeICD10CMInferenceJobResponse {
@inline
def apply(
ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties] = js.undefined
): DescribeICD10CMInferenceJobResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobProperties.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobProperties")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeICD10CMInferenceJobResponse]
}
}
@js.native
trait DescribePHIDetectionJobRequest extends js.Object {
var JobId: JobId
}
object DescribePHIDetectionJobRequest {
@inline
def apply(
JobId: JobId
): DescribePHIDetectionJobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribePHIDetectionJobRequest]
}
}
@js.native
trait DescribePHIDetectionJobResponse extends js.Object {
var ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties]
}
object DescribePHIDetectionJobResponse {
@inline
def apply(
ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties] = js.undefined
): DescribePHIDetectionJobResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobProperties.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobProperties")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribePHIDetectionJobResponse]
}
}
@js.native
trait DescribeRxNormInferenceJobRequest extends js.Object {
var JobId: JobId
}
object DescribeRxNormInferenceJobRequest {
@inline
def apply(
JobId: JobId
): DescribeRxNormInferenceJobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DescribeRxNormInferenceJobRequest]
}
}
@js.native
trait DescribeRxNormInferenceJobResponse extends js.Object {
var ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties]
}
object DescribeRxNormInferenceJobResponse {
@inline
def apply(
ComprehendMedicalAsyncJobProperties: js.UndefOr[ComprehendMedicalAsyncJobProperties] = js.undefined
): DescribeRxNormInferenceJobResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobProperties.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobProperties")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DescribeRxNormInferenceJobResponse]
}
}
@js.native
trait DetectEntitiesRequest extends js.Object {
var Text: BoundedLengthString
}
object DetectEntitiesRequest {
@inline
def apply(
Text: BoundedLengthString
): DetectEntitiesRequest = {
val __obj = js.Dynamic.literal(
"Text" -> Text.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DetectEntitiesRequest]
}
}
@js.native
trait DetectEntitiesResponse extends js.Object {
var Entities: EntityList
var ModelVersion: String
var PaginationToken: js.UndefOr[String]
var UnmappedAttributes: js.UndefOr[UnmappedAttributeList]
}
object DetectEntitiesResponse {
@inline
def apply(
Entities: EntityList,
ModelVersion: String,
PaginationToken: js.UndefOr[String] = js.undefined,
UnmappedAttributes: js.UndefOr[UnmappedAttributeList] = js.undefined
): DetectEntitiesResponse = {
val __obj = js.Dynamic.literal(
"Entities" -> Entities.asInstanceOf[js.Any],
"ModelVersion" -> ModelVersion.asInstanceOf[js.Any]
)
PaginationToken.foreach(__v => __obj.updateDynamic("PaginationToken")(__v.asInstanceOf[js.Any]))
UnmappedAttributes.foreach(__v => __obj.updateDynamic("UnmappedAttributes")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DetectEntitiesResponse]
}
}
@js.native
trait DetectEntitiesV2Request extends js.Object {
var Text: BoundedLengthString
}
object DetectEntitiesV2Request {
@inline
def apply(
Text: BoundedLengthString
): DetectEntitiesV2Request = {
val __obj = js.Dynamic.literal(
"Text" -> Text.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DetectEntitiesV2Request]
}
}
@js.native
trait DetectEntitiesV2Response extends js.Object {
var Entities: EntityList
var ModelVersion: String
var PaginationToken: js.UndefOr[String]
var UnmappedAttributes: js.UndefOr[UnmappedAttributeList]
}
object DetectEntitiesV2Response {
@inline
def apply(
Entities: EntityList,
ModelVersion: String,
PaginationToken: js.UndefOr[String] = js.undefined,
UnmappedAttributes: js.UndefOr[UnmappedAttributeList] = js.undefined
): DetectEntitiesV2Response = {
val __obj = js.Dynamic.literal(
"Entities" -> Entities.asInstanceOf[js.Any],
"ModelVersion" -> ModelVersion.asInstanceOf[js.Any]
)
PaginationToken.foreach(__v => __obj.updateDynamic("PaginationToken")(__v.asInstanceOf[js.Any]))
UnmappedAttributes.foreach(__v => __obj.updateDynamic("UnmappedAttributes")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DetectEntitiesV2Response]
}
}
@js.native
trait DetectPHIRequest extends js.Object {
var Text: BoundedLengthString
}
object DetectPHIRequest {
@inline
def apply(
Text: BoundedLengthString
): DetectPHIRequest = {
val __obj = js.Dynamic.literal(
"Text" -> Text.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DetectPHIRequest]
}
}
@js.native
trait DetectPHIResponse extends js.Object {
var Entities: EntityList
var ModelVersion: String
var PaginationToken: js.UndefOr[String]
}
object DetectPHIResponse {
@inline
def apply(
Entities: EntityList,
ModelVersion: String,
PaginationToken: js.UndefOr[String] = js.undefined
): DetectPHIResponse = {
val __obj = js.Dynamic.literal(
"Entities" -> Entities.asInstanceOf[js.Any],
"ModelVersion" -> ModelVersion.asInstanceOf[js.Any]
)
PaginationToken.foreach(__v => __obj.updateDynamic("PaginationToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DetectPHIResponse]
}
}
/** Provides information about an extracted medical entity.
*/
@js.native
trait Entity extends js.Object {
var Attributes: js.UndefOr[AttributeList]
var BeginOffset: js.UndefOr[Int]
var Category: js.UndefOr[EntityType]
var EndOffset: js.UndefOr[Int]
var Id: js.UndefOr[Int]
var Score: js.UndefOr[Float]
var Text: js.UndefOr[String]
var Traits: js.UndefOr[TraitList]
var Type: js.UndefOr[EntitySubType]
}
object Entity {
@inline
def apply(
Attributes: js.UndefOr[AttributeList] = js.undefined,
BeginOffset: js.UndefOr[Int] = js.undefined,
Category: js.UndefOr[EntityType] = js.undefined,
EndOffset: js.UndefOr[Int] = js.undefined,
Id: js.UndefOr[Int] = js.undefined,
Score: js.UndefOr[Float] = js.undefined,
Text: js.UndefOr[String] = js.undefined,
Traits: js.UndefOr[TraitList] = js.undefined,
Type: js.UndefOr[EntitySubType] = js.undefined
): Entity = {
val __obj = js.Dynamic.literal()
Attributes.foreach(__v => __obj.updateDynamic("Attributes")(__v.asInstanceOf[js.Any]))
BeginOffset.foreach(__v => __obj.updateDynamic("BeginOffset")(__v.asInstanceOf[js.Any]))
Category.foreach(__v => __obj.updateDynamic("Category")(__v.asInstanceOf[js.Any]))
EndOffset.foreach(__v => __obj.updateDynamic("EndOffset")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
Text.foreach(__v => __obj.updateDynamic("Text")(__v.asInstanceOf[js.Any]))
Traits.foreach(__v => __obj.updateDynamic("Traits")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Entity]
}
}
@js.native
sealed trait EntitySubType extends js.Any
object EntitySubType {
val NAME = "NAME".asInstanceOf[EntitySubType]
val DOSAGE = "DOSAGE".asInstanceOf[EntitySubType]
val ROUTE_OR_MODE = "ROUTE_OR_MODE".asInstanceOf[EntitySubType]
val FORM = "FORM".asInstanceOf[EntitySubType]
val FREQUENCY = "FREQUENCY".asInstanceOf[EntitySubType]
val DURATION = "DURATION".asInstanceOf[EntitySubType]
val GENERIC_NAME = "GENERIC_NAME".asInstanceOf[EntitySubType]
val BRAND_NAME = "BRAND_NAME".asInstanceOf[EntitySubType]
val STRENGTH = "STRENGTH".asInstanceOf[EntitySubType]
val RATE = "RATE".asInstanceOf[EntitySubType]
val ACUITY = "ACUITY".asInstanceOf[EntitySubType]
val TEST_NAME = "TEST_NAME".asInstanceOf[EntitySubType]
val TEST_VALUE = "TEST_VALUE".asInstanceOf[EntitySubType]
val TEST_UNITS = "TEST_UNITS".asInstanceOf[EntitySubType]
val PROCEDURE_NAME = "PROCEDURE_NAME".asInstanceOf[EntitySubType]
val TREATMENT_NAME = "TREATMENT_NAME".asInstanceOf[EntitySubType]
val DATE = "DATE".asInstanceOf[EntitySubType]
val AGE = "AGE".asInstanceOf[EntitySubType]
val CONTACT_POINT = "CONTACT_POINT".asInstanceOf[EntitySubType]
val EMAIL = "EMAIL".asInstanceOf[EntitySubType]
val IDENTIFIER = "IDENTIFIER".asInstanceOf[EntitySubType]
val URL = "URL".asInstanceOf[EntitySubType]
val ADDRESS = "ADDRESS".asInstanceOf[EntitySubType]
val PROFESSION = "PROFESSION".asInstanceOf[EntitySubType]
val SYSTEM_ORGAN_SITE = "SYSTEM_ORGAN_SITE".asInstanceOf[EntitySubType]
val DIRECTION = "DIRECTION".asInstanceOf[EntitySubType]
val QUALITY = "QUALITY".asInstanceOf[EntitySubType]
val QUANTITY = "QUANTITY".asInstanceOf[EntitySubType]
val TIME_EXPRESSION = "TIME_EXPRESSION".asInstanceOf[EntitySubType]
val TIME_TO_MEDICATION_NAME = "TIME_TO_MEDICATION_NAME".asInstanceOf[EntitySubType]
val TIME_TO_DX_NAME = "TIME_TO_DX_NAME".asInstanceOf[EntitySubType]
val TIME_TO_TEST_NAME = "TIME_TO_TEST_NAME".asInstanceOf[EntitySubType]
val TIME_TO_PROCEDURE_NAME = "TIME_TO_PROCEDURE_NAME".asInstanceOf[EntitySubType]
val TIME_TO_TREATMENT_NAME = "TIME_TO_TREATMENT_NAME".asInstanceOf[EntitySubType]
@inline def values = js.Array(
NAME,
DOSAGE,
ROUTE_OR_MODE,
FORM,
FREQUENCY,
DURATION,
GENERIC_NAME,
BRAND_NAME,
STRENGTH,
RATE,
ACUITY,
TEST_NAME,
TEST_VALUE,
TEST_UNITS,
PROCEDURE_NAME,
TREATMENT_NAME,
DATE,
AGE,
CONTACT_POINT,
EMAIL,
IDENTIFIER,
URL,
ADDRESS,
PROFESSION,
SYSTEM_ORGAN_SITE,
DIRECTION,
QUALITY,
QUANTITY,
TIME_EXPRESSION,
TIME_TO_MEDICATION_NAME,
TIME_TO_DX_NAME,
TIME_TO_TEST_NAME,
TIME_TO_PROCEDURE_NAME,
TIME_TO_TREATMENT_NAME
)
}
@js.native
sealed trait EntityType extends js.Any
object EntityType {
val MEDICATION = "MEDICATION".asInstanceOf[EntityType]
val MEDICAL_CONDITION = "MEDICAL_CONDITION".asInstanceOf[EntityType]
val PROTECTED_HEALTH_INFORMATION = "PROTECTED_HEALTH_INFORMATION".asInstanceOf[EntityType]
val TEST_TREATMENT_PROCEDURE = "TEST_TREATMENT_PROCEDURE".asInstanceOf[EntityType]
val ANATOMY = "ANATOMY".asInstanceOf[EntityType]
val TIME_EXPRESSION = "TIME_EXPRESSION".asInstanceOf[EntityType]
@inline def values = js.Array(MEDICATION, MEDICAL_CONDITION, PROTECTED_HEALTH_INFORMATION, TEST_TREATMENT_PROCEDURE, ANATOMY, TIME_EXPRESSION)
}
/** The detected attributes that relate to an entity. This includes an extracted segment of the text that is an attribute of an entity, or otherwise related to an entity. InferICD10CM detects the following attributes: <code>Direction</code>, <code>System, Organ or Site</code>, and <code>Acuity</code>.
*/
@js.native
trait ICD10CMAttribute extends js.Object {
var BeginOffset: js.UndefOr[Int]
var Category: js.UndefOr[ICD10CMEntityType]
var EndOffset: js.UndefOr[Int]
var Id: js.UndefOr[Int]
var RelationshipScore: js.UndefOr[Float]
var RelationshipType: js.UndefOr[ICD10CMRelationshipType]
var Score: js.UndefOr[Float]
var Text: js.UndefOr[String]
var Traits: js.UndefOr[ICD10CMTraitList]
var Type: js.UndefOr[ICD10CMAttributeType]
}
object ICD10CMAttribute {
@inline
def apply(
BeginOffset: js.UndefOr[Int] = js.undefined,
Category: js.UndefOr[ICD10CMEntityType] = js.undefined,
EndOffset: js.UndefOr[Int] = js.undefined,
Id: js.UndefOr[Int] = js.undefined,
RelationshipScore: js.UndefOr[Float] = js.undefined,
RelationshipType: js.UndefOr[ICD10CMRelationshipType] = js.undefined,
Score: js.UndefOr[Float] = js.undefined,
Text: js.UndefOr[String] = js.undefined,
Traits: js.UndefOr[ICD10CMTraitList] = js.undefined,
Type: js.UndefOr[ICD10CMAttributeType] = js.undefined
): ICD10CMAttribute = {
val __obj = js.Dynamic.literal()
BeginOffset.foreach(__v => __obj.updateDynamic("BeginOffset")(__v.asInstanceOf[js.Any]))
Category.foreach(__v => __obj.updateDynamic("Category")(__v.asInstanceOf[js.Any]))
EndOffset.foreach(__v => __obj.updateDynamic("EndOffset")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
RelationshipScore.foreach(__v => __obj.updateDynamic("RelationshipScore")(__v.asInstanceOf[js.Any]))
RelationshipType.foreach(__v => __obj.updateDynamic("RelationshipType")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
Text.foreach(__v => __obj.updateDynamic("Text")(__v.asInstanceOf[js.Any]))
Traits.foreach(__v => __obj.updateDynamic("Traits")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ICD10CMAttribute]
}
}
@js.native
sealed trait ICD10CMAttributeType extends js.Any
object ICD10CMAttributeType {
val ACUITY = "ACUITY".asInstanceOf[ICD10CMAttributeType]
val DIRECTION = "DIRECTION".asInstanceOf[ICD10CMAttributeType]
val SYSTEM_ORGAN_SITE = "SYSTEM_ORGAN_SITE".asInstanceOf[ICD10CMAttributeType]
val QUALITY = "QUALITY".asInstanceOf[ICD10CMAttributeType]
val QUANTITY = "QUANTITY".asInstanceOf[ICD10CMAttributeType]
val TIME_TO_DX_NAME = "TIME_TO_DX_NAME".asInstanceOf[ICD10CMAttributeType]
val TIME_EXPRESSION = "TIME_EXPRESSION".asInstanceOf[ICD10CMAttributeType]
@inline def values = js.Array(ACUITY, DIRECTION, SYSTEM_ORGAN_SITE, QUALITY, QUANTITY, TIME_TO_DX_NAME, TIME_EXPRESSION)
}
/** The ICD-10-CM concepts that the entity could refer to, along with a score indicating the likelihood of the match.
*/
@js.native
trait ICD10CMConcept extends js.Object {
var Code: js.UndefOr[String]
var Description: js.UndefOr[String]
var Score: js.UndefOr[Float]
}
object ICD10CMConcept {
@inline
def apply(
Code: js.UndefOr[String] = js.undefined,
Description: js.UndefOr[String] = js.undefined,
Score: js.UndefOr[Float] = js.undefined
): ICD10CMConcept = {
val __obj = js.Dynamic.literal()
Code.foreach(__v => __obj.updateDynamic("Code")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ICD10CMConcept]
}
}
/** The collection of medical entities extracted from the input text and their associated information. For each entity, the response provides the entity text, the entity category, where the entity text begins and ends, and the level of confidence that Amazon Comprehend Medical has in the detection and analysis. Attributes and traits of the entity are also returned.
*/
@js.native
trait ICD10CMEntity extends js.Object {
var Attributes: js.UndefOr[ICD10CMAttributeList]
var BeginOffset: js.UndefOr[Int]
var Category: js.UndefOr[ICD10CMEntityCategory]
var EndOffset: js.UndefOr[Int]
var ICD10CMConcepts: js.UndefOr[ICD10CMConceptList]
var Id: js.UndefOr[Int]
var Score: js.UndefOr[Float]
var Text: js.UndefOr[OntologyLinkingBoundedLengthString]
var Traits: js.UndefOr[ICD10CMTraitList]
var Type: js.UndefOr[ICD10CMEntityType]
}
object ICD10CMEntity {
@inline
def apply(
Attributes: js.UndefOr[ICD10CMAttributeList] = js.undefined,
BeginOffset: js.UndefOr[Int] = js.undefined,
Category: js.UndefOr[ICD10CMEntityCategory] = js.undefined,
EndOffset: js.UndefOr[Int] = js.undefined,
ICD10CMConcepts: js.UndefOr[ICD10CMConceptList] = js.undefined,
Id: js.UndefOr[Int] = js.undefined,
Score: js.UndefOr[Float] = js.undefined,
Text: js.UndefOr[OntologyLinkingBoundedLengthString] = js.undefined,
Traits: js.UndefOr[ICD10CMTraitList] = js.undefined,
Type: js.UndefOr[ICD10CMEntityType] = js.undefined
): ICD10CMEntity = {
val __obj = js.Dynamic.literal()
Attributes.foreach(__v => __obj.updateDynamic("Attributes")(__v.asInstanceOf[js.Any]))
BeginOffset.foreach(__v => __obj.updateDynamic("BeginOffset")(__v.asInstanceOf[js.Any]))
Category.foreach(__v => __obj.updateDynamic("Category")(__v.asInstanceOf[js.Any]))
EndOffset.foreach(__v => __obj.updateDynamic("EndOffset")(__v.asInstanceOf[js.Any]))
ICD10CMConcepts.foreach(__v => __obj.updateDynamic("ICD10CMConcepts")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
Text.foreach(__v => __obj.updateDynamic("Text")(__v.asInstanceOf[js.Any]))
Traits.foreach(__v => __obj.updateDynamic("Traits")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ICD10CMEntity]
}
}
@js.native
sealed trait ICD10CMEntityCategory extends js.Any
object ICD10CMEntityCategory {
val MEDICAL_CONDITION = "MEDICAL_CONDITION".asInstanceOf[ICD10CMEntityCategory]
@inline def values = js.Array(MEDICAL_CONDITION)
}
@js.native
sealed trait ICD10CMEntityType extends js.Any
object ICD10CMEntityType {
val DX_NAME = "DX_NAME".asInstanceOf[ICD10CMEntityType]
val TIME_EXPRESSION = "TIME_EXPRESSION".asInstanceOf[ICD10CMEntityType]
@inline def values = js.Array(DX_NAME, TIME_EXPRESSION)
}
@js.native
sealed trait ICD10CMRelationshipType extends js.Any
object ICD10CMRelationshipType {
val OVERLAP = "OVERLAP".asInstanceOf[ICD10CMRelationshipType]
val SYSTEM_ORGAN_SITE = "SYSTEM_ORGAN_SITE".asInstanceOf[ICD10CMRelationshipType]
@inline def values = js.Array(OVERLAP, SYSTEM_ORGAN_SITE)
}
/** Contextual information for the entity. The traits recognized by InferICD10CM are <code>DIAGNOSIS</code>, <code>SIGN</code>, <code>SYMPTOM</code>, and <code>NEGATION</code>.
*/
@js.native
trait ICD10CMTrait extends js.Object {
var Name: js.UndefOr[ICD10CMTraitName]
var Score: js.UndefOr[Float]
}
object ICD10CMTrait {
@inline
def apply(
Name: js.UndefOr[ICD10CMTraitName] = js.undefined,
Score: js.UndefOr[Float] = js.undefined
): ICD10CMTrait = {
val __obj = js.Dynamic.literal()
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ICD10CMTrait]
}
}
@js.native
sealed trait ICD10CMTraitName extends js.Any
object ICD10CMTraitName {
val NEGATION = "NEGATION".asInstanceOf[ICD10CMTraitName]
val DIAGNOSIS = "DIAGNOSIS".asInstanceOf[ICD10CMTraitName]
val SIGN = "SIGN".asInstanceOf[ICD10CMTraitName]
val SYMPTOM = "SYMPTOM".asInstanceOf[ICD10CMTraitName]
@inline def values = js.Array(NEGATION, DIAGNOSIS, SIGN, SYMPTOM)
}
@js.native
trait InferICD10CMRequest extends js.Object {
var Text: OntologyLinkingBoundedLengthString
}
object InferICD10CMRequest {
@inline
def apply(
Text: OntologyLinkingBoundedLengthString
): InferICD10CMRequest = {
val __obj = js.Dynamic.literal(
"Text" -> Text.asInstanceOf[js.Any]
)
__obj.asInstanceOf[InferICD10CMRequest]
}
}
@js.native
trait InferICD10CMResponse extends js.Object {
var Entities: ICD10CMEntityList
var ModelVersion: js.UndefOr[String]
var PaginationToken: js.UndefOr[String]
}
object InferICD10CMResponse {
@inline
def apply(
Entities: ICD10CMEntityList,
ModelVersion: js.UndefOr[String] = js.undefined,
PaginationToken: js.UndefOr[String] = js.undefined
): InferICD10CMResponse = {
val __obj = js.Dynamic.literal(
"Entities" -> Entities.asInstanceOf[js.Any]
)
ModelVersion.foreach(__v => __obj.updateDynamic("ModelVersion")(__v.asInstanceOf[js.Any]))
PaginationToken.foreach(__v => __obj.updateDynamic("PaginationToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[InferICD10CMResponse]
}
}
@js.native
trait InferRxNormRequest extends js.Object {
var Text: OntologyLinkingBoundedLengthString
}
object InferRxNormRequest {
@inline
def apply(
Text: OntologyLinkingBoundedLengthString
): InferRxNormRequest = {
val __obj = js.Dynamic.literal(
"Text" -> Text.asInstanceOf[js.Any]
)
__obj.asInstanceOf[InferRxNormRequest]
}
}
@js.native
trait InferRxNormResponse extends js.Object {
var Entities: RxNormEntityList
var ModelVersion: js.UndefOr[String]
var PaginationToken: js.UndefOr[String]
}
object InferRxNormResponse {
@inline
def apply(
Entities: RxNormEntityList,
ModelVersion: js.UndefOr[String] = js.undefined,
PaginationToken: js.UndefOr[String] = js.undefined
): InferRxNormResponse = {
val __obj = js.Dynamic.literal(
"Entities" -> Entities.asInstanceOf[js.Any]
)
ModelVersion.foreach(__v => __obj.updateDynamic("ModelVersion")(__v.asInstanceOf[js.Any]))
PaginationToken.foreach(__v => __obj.updateDynamic("PaginationToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[InferRxNormResponse]
}
}
/** The input properties for an entities detection job. This includes the name of the S3 bucket and the path to the files to be analyzed.
*/
@js.native
trait InputDataConfig extends js.Object {
var S3Bucket: S3Bucket
var S3Key: js.UndefOr[S3Key]
}
object InputDataConfig {
@inline
def apply(
S3Bucket: S3Bucket,
S3Key: js.UndefOr[S3Key] = js.undefined
): InputDataConfig = {
val __obj = js.Dynamic.literal(
"S3Bucket" -> S3Bucket.asInstanceOf[js.Any]
)
S3Key.foreach(__v => __obj.updateDynamic("S3Key")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[InputDataConfig]
}
}
@js.native
sealed trait JobStatus extends js.Any
object JobStatus {
val SUBMITTED = "SUBMITTED".asInstanceOf[JobStatus]
val IN_PROGRESS = "IN_PROGRESS".asInstanceOf[JobStatus]
val COMPLETED = "COMPLETED".asInstanceOf[JobStatus]
val PARTIAL_SUCCESS = "PARTIAL_SUCCESS".asInstanceOf[JobStatus]
val FAILED = "FAILED".asInstanceOf[JobStatus]
val STOP_REQUESTED = "STOP_REQUESTED".asInstanceOf[JobStatus]
val STOPPED = "STOPPED".asInstanceOf[JobStatus]
@inline def values = js.Array(SUBMITTED, IN_PROGRESS, COMPLETED, PARTIAL_SUCCESS, FAILED, STOP_REQUESTED, STOPPED)
}
@js.native
sealed trait LanguageCode extends js.Any
object LanguageCode {
val en = "en".asInstanceOf[LanguageCode]
@inline def values = js.Array(en)
}
@js.native
trait ListEntitiesDetectionV2JobsRequest extends js.Object {
var Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter]
var MaxResults: js.UndefOr[MaxResultsInteger]
var NextToken: js.UndefOr[String]
}
object ListEntitiesDetectionV2JobsRequest {
@inline
def apply(
Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter] = js.undefined,
MaxResults: js.UndefOr[MaxResultsInteger] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListEntitiesDetectionV2JobsRequest = {
val __obj = js.Dynamic.literal()
Filter.foreach(__v => __obj.updateDynamic("Filter")(__v.asInstanceOf[js.Any]))
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListEntitiesDetectionV2JobsRequest]
}
}
@js.native
trait ListEntitiesDetectionV2JobsResponse extends js.Object {
var ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList]
var NextToken: js.UndefOr[String]
}
object ListEntitiesDetectionV2JobsResponse {
@inline
def apply(
ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListEntitiesDetectionV2JobsResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobPropertiesList.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobPropertiesList")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListEntitiesDetectionV2JobsResponse]
}
}
@js.native
trait ListICD10CMInferenceJobsRequest extends js.Object {
var Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter]
var MaxResults: js.UndefOr[MaxResultsInteger]
var NextToken: js.UndefOr[String]
}
object ListICD10CMInferenceJobsRequest {
@inline
def apply(
Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter] = js.undefined,
MaxResults: js.UndefOr[MaxResultsInteger] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListICD10CMInferenceJobsRequest = {
val __obj = js.Dynamic.literal()
Filter.foreach(__v => __obj.updateDynamic("Filter")(__v.asInstanceOf[js.Any]))
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListICD10CMInferenceJobsRequest]
}
}
@js.native
trait ListICD10CMInferenceJobsResponse extends js.Object {
var ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList]
var NextToken: js.UndefOr[String]
}
object ListICD10CMInferenceJobsResponse {
@inline
def apply(
ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListICD10CMInferenceJobsResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobPropertiesList.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobPropertiesList")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListICD10CMInferenceJobsResponse]
}
}
@js.native
trait ListPHIDetectionJobsRequest extends js.Object {
var Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter]
var MaxResults: js.UndefOr[MaxResultsInteger]
var NextToken: js.UndefOr[String]
}
object ListPHIDetectionJobsRequest {
@inline
def apply(
Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter] = js.undefined,
MaxResults: js.UndefOr[MaxResultsInteger] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListPHIDetectionJobsRequest = {
val __obj = js.Dynamic.literal()
Filter.foreach(__v => __obj.updateDynamic("Filter")(__v.asInstanceOf[js.Any]))
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListPHIDetectionJobsRequest]
}
}
@js.native
trait ListPHIDetectionJobsResponse extends js.Object {
var ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList]
var NextToken: js.UndefOr[String]
}
object ListPHIDetectionJobsResponse {
@inline
def apply(
ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListPHIDetectionJobsResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobPropertiesList.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobPropertiesList")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListPHIDetectionJobsResponse]
}
}
@js.native
trait ListRxNormInferenceJobsRequest extends js.Object {
var Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter]
var MaxResults: js.UndefOr[MaxResultsInteger]
var NextToken: js.UndefOr[String]
}
object ListRxNormInferenceJobsRequest {
@inline
def apply(
Filter: js.UndefOr[ComprehendMedicalAsyncJobFilter] = js.undefined,
MaxResults: js.UndefOr[MaxResultsInteger] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListRxNormInferenceJobsRequest = {
val __obj = js.Dynamic.literal()
Filter.foreach(__v => __obj.updateDynamic("Filter")(__v.asInstanceOf[js.Any]))
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListRxNormInferenceJobsRequest]
}
}
@js.native
trait ListRxNormInferenceJobsResponse extends js.Object {
var ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList]
var NextToken: js.UndefOr[String]
}
object ListRxNormInferenceJobsResponse {
@inline
def apply(
ComprehendMedicalAsyncJobPropertiesList: js.UndefOr[ComprehendMedicalAsyncJobPropertiesList] = js.undefined,
NextToken: js.UndefOr[String] = js.undefined
): ListRxNormInferenceJobsResponse = {
val __obj = js.Dynamic.literal()
ComprehendMedicalAsyncJobPropertiesList.foreach(__v => __obj.updateDynamic("ComprehendMedicalAsyncJobPropertiesList")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListRxNormInferenceJobsResponse]
}
}
/** The output properties for a detection job.
*/
@js.native
trait OutputDataConfig extends js.Object {
var S3Bucket: S3Bucket
var S3Key: js.UndefOr[S3Key]
}
object OutputDataConfig {
@inline
def apply(
S3Bucket: S3Bucket,
S3Key: js.UndefOr[S3Key] = js.undefined
): OutputDataConfig = {
val __obj = js.Dynamic.literal(
"S3Bucket" -> S3Bucket.asInstanceOf[js.Any]
)
S3Key.foreach(__v => __obj.updateDynamic("S3Key")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[OutputDataConfig]
}
}
@js.native
sealed trait RelationshipType extends js.Any
object RelationshipType {
val EVERY = "EVERY".asInstanceOf[RelationshipType]
val WITH_DOSAGE = "WITH_DOSAGE".asInstanceOf[RelationshipType]
val ADMINISTERED_VIA = "ADMINISTERED_VIA".asInstanceOf[RelationshipType]
val FOR = "FOR".asInstanceOf[RelationshipType]
val NEGATIVE = "NEGATIVE".asInstanceOf[RelationshipType]
val OVERLAP = "OVERLAP".asInstanceOf[RelationshipType]
val DOSAGE = "DOSAGE".asInstanceOf[RelationshipType]
val ROUTE_OR_MODE = "ROUTE_OR_MODE".asInstanceOf[RelationshipType]
val FORM = "FORM".asInstanceOf[RelationshipType]
val FREQUENCY = "FREQUENCY".asInstanceOf[RelationshipType]
val DURATION = "DURATION".asInstanceOf[RelationshipType]
val STRENGTH = "STRENGTH".asInstanceOf[RelationshipType]
val RATE = "RATE".asInstanceOf[RelationshipType]
val ACUITY = "ACUITY".asInstanceOf[RelationshipType]
val TEST_VALUE = "TEST_VALUE".asInstanceOf[RelationshipType]
val TEST_UNITS = "TEST_UNITS".asInstanceOf[RelationshipType]
val DIRECTION = "DIRECTION".asInstanceOf[RelationshipType]
val SYSTEM_ORGAN_SITE = "SYSTEM_ORGAN_SITE".asInstanceOf[RelationshipType]
@inline def values = js.Array(
EVERY,
WITH_DOSAGE,
ADMINISTERED_VIA,
FOR,
NEGATIVE,
OVERLAP,
DOSAGE,
ROUTE_OR_MODE,
FORM,
FREQUENCY,
DURATION,
STRENGTH,
RATE,
ACUITY,
TEST_VALUE,
TEST_UNITS,
DIRECTION,
SYSTEM_ORGAN_SITE
)
}
/** The extracted attributes that relate to this entity. The attributes recognized by InferRxNorm are <code>DOSAGE</code>, <code>DURATION</code>, <code>FORM</code>, <code>FREQUENCY</code>, <code>RATE</code>, <code>ROUTE_OR_MODE</code>.
*/
@js.native
trait RxNormAttribute extends js.Object {
var BeginOffset: js.UndefOr[Int]
var EndOffset: js.UndefOr[Int]
var Id: js.UndefOr[Int]
var RelationshipScore: js.UndefOr[Float]
var Score: js.UndefOr[Float]
var Text: js.UndefOr[String]
var Traits: js.UndefOr[RxNormTraitList]
var Type: js.UndefOr[RxNormAttributeType]
}
object RxNormAttribute {
@inline
def apply(
BeginOffset: js.UndefOr[Int] = js.undefined,
EndOffset: js.UndefOr[Int] = js.undefined,
Id: js.UndefOr[Int] = js.undefined,
RelationshipScore: js.UndefOr[Float] = js.undefined,
Score: js.UndefOr[Float] = js.undefined,
Text: js.UndefOr[String] = js.undefined,
Traits: js.UndefOr[RxNormTraitList] = js.undefined,
Type: js.UndefOr[RxNormAttributeType] = js.undefined
): RxNormAttribute = {
val __obj = js.Dynamic.literal()
BeginOffset.foreach(__v => __obj.updateDynamic("BeginOffset")(__v.asInstanceOf[js.Any]))
EndOffset.foreach(__v => __obj.updateDynamic("EndOffset")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
RelationshipScore.foreach(__v => __obj.updateDynamic("RelationshipScore")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
Text.foreach(__v => __obj.updateDynamic("Text")(__v.asInstanceOf[js.Any]))
Traits.foreach(__v => __obj.updateDynamic("Traits")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[RxNormAttribute]
}
}
@js.native
sealed trait RxNormAttributeType extends js.Any
object RxNormAttributeType {
val DOSAGE = "DOSAGE".asInstanceOf[RxNormAttributeType]
val DURATION = "DURATION".asInstanceOf[RxNormAttributeType]
val FORM = "FORM".asInstanceOf[RxNormAttributeType]
val FREQUENCY = "FREQUENCY".asInstanceOf[RxNormAttributeType]
val RATE = "RATE".asInstanceOf[RxNormAttributeType]
val ROUTE_OR_MODE = "ROUTE_OR_MODE".asInstanceOf[RxNormAttributeType]
val STRENGTH = "STRENGTH".asInstanceOf[RxNormAttributeType]
@inline def values = js.Array(DOSAGE, DURATION, FORM, FREQUENCY, RATE, ROUTE_OR_MODE, STRENGTH)
}
/** The RxNorm concept that the entity could refer to, along with a score indicating the likelihood of the match.
*/
@js.native
trait RxNormConcept extends js.Object {
var Code: js.UndefOr[String]
var Description: js.UndefOr[String]
var Score: js.UndefOr[Float]
}
object RxNormConcept {
@inline
def apply(
Code: js.UndefOr[String] = js.undefined,
Description: js.UndefOr[String] = js.undefined,
Score: js.UndefOr[Float] = js.undefined
): RxNormConcept = {
val __obj = js.Dynamic.literal()
Code.foreach(__v => __obj.updateDynamic("Code")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[RxNormConcept]
}
}
/** The collection of medical entities extracted from the input text and their associated information. For each entity, the response provides the entity text, the entity category, where the entity text begins and ends, and the level of confidence that Amazon Comprehend Medical has in the detection and analysis. Attributes and traits of the entity are also returned.
*/
@js.native
trait RxNormEntity extends js.Object {
var Attributes: js.UndefOr[RxNormAttributeList]
var BeginOffset: js.UndefOr[Int]
var Category: js.UndefOr[RxNormEntityCategory]
var EndOffset: js.UndefOr[Int]
var Id: js.UndefOr[Int]
var RxNormConcepts: js.UndefOr[RxNormConceptList]
var Score: js.UndefOr[Float]
var Text: js.UndefOr[OntologyLinkingBoundedLengthString]
var Traits: js.UndefOr[RxNormTraitList]
var Type: js.UndefOr[RxNormEntityType]
}
object RxNormEntity {
@inline
def apply(
Attributes: js.UndefOr[RxNormAttributeList] = js.undefined,
BeginOffset: js.UndefOr[Int] = js.undefined,
Category: js.UndefOr[RxNormEntityCategory] = js.undefined,
EndOffset: js.UndefOr[Int] = js.undefined,
Id: js.UndefOr[Int] = js.undefined,
RxNormConcepts: js.UndefOr[RxNormConceptList] = js.undefined,
Score: js.UndefOr[Float] = js.undefined,
Text: js.UndefOr[OntologyLinkingBoundedLengthString] = js.undefined,
Traits: js.UndefOr[RxNormTraitList] = js.undefined,
Type: js.UndefOr[RxNormEntityType] = js.undefined
): RxNormEntity = {
val __obj = js.Dynamic.literal()
Attributes.foreach(__v => __obj.updateDynamic("Attributes")(__v.asInstanceOf[js.Any]))
BeginOffset.foreach(__v => __obj.updateDynamic("BeginOffset")(__v.asInstanceOf[js.Any]))
Category.foreach(__v => __obj.updateDynamic("Category")(__v.asInstanceOf[js.Any]))
EndOffset.foreach(__v => __obj.updateDynamic("EndOffset")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
RxNormConcepts.foreach(__v => __obj.updateDynamic("RxNormConcepts")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
Text.foreach(__v => __obj.updateDynamic("Text")(__v.asInstanceOf[js.Any]))
Traits.foreach(__v => __obj.updateDynamic("Traits")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[RxNormEntity]
}
}
@js.native
sealed trait RxNormEntityCategory extends js.Any
object RxNormEntityCategory {
val MEDICATION = "MEDICATION".asInstanceOf[RxNormEntityCategory]
@inline def values = js.Array(MEDICATION)
}
@js.native
sealed trait RxNormEntityType extends js.Any
object RxNormEntityType {
val BRAND_NAME = "BRAND_NAME".asInstanceOf[RxNormEntityType]
val GENERIC_NAME = "GENERIC_NAME".asInstanceOf[RxNormEntityType]
@inline def values = js.Array(BRAND_NAME, GENERIC_NAME)
}
/** The contextual information for the entity. InferRxNorm recognizes the trait <code>NEGATION</code>, which is any indication that the patient is not taking a medication.
*/
@js.native
trait RxNormTrait extends js.Object {
var Name: js.UndefOr[RxNormTraitName]
var Score: js.UndefOr[Float]
}
object RxNormTrait {
@inline
def apply(
Name: js.UndefOr[RxNormTraitName] = js.undefined,
Score: js.UndefOr[Float] = js.undefined
): RxNormTrait = {
val __obj = js.Dynamic.literal()
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[RxNormTrait]
}
}
@js.native
sealed trait RxNormTraitName extends js.Any
object RxNormTraitName {
val NEGATION = "NEGATION".asInstanceOf[RxNormTraitName]
@inline def values = js.Array(NEGATION)
}
@js.native
trait StartEntitiesDetectionV2JobRequest extends js.Object {
var DataAccessRoleArn: IamRoleArn
var InputDataConfig: InputDataConfig
var LanguageCode: LanguageCode
var OutputDataConfig: OutputDataConfig
var ClientRequestToken: js.UndefOr[ClientRequestTokenString]
var JobName: js.UndefOr[JobName]
var KMSKey: js.UndefOr[KMSKey]
}
object StartEntitiesDetectionV2JobRequest {
@inline
def apply(
DataAccessRoleArn: IamRoleArn,
InputDataConfig: InputDataConfig,
LanguageCode: LanguageCode,
OutputDataConfig: OutputDataConfig,
ClientRequestToken: js.UndefOr[ClientRequestTokenString] = js.undefined,
JobName: js.UndefOr[JobName] = js.undefined,
KMSKey: js.UndefOr[KMSKey] = js.undefined
): StartEntitiesDetectionV2JobRequest = {
val __obj = js.Dynamic.literal(
"DataAccessRoleArn" -> DataAccessRoleArn.asInstanceOf[js.Any],
"InputDataConfig" -> InputDataConfig.asInstanceOf[js.Any],
"LanguageCode" -> LanguageCode.asInstanceOf[js.Any],
"OutputDataConfig" -> OutputDataConfig.asInstanceOf[js.Any]
)
ClientRequestToken.foreach(__v => __obj.updateDynamic("ClientRequestToken")(__v.asInstanceOf[js.Any]))
JobName.foreach(__v => __obj.updateDynamic("JobName")(__v.asInstanceOf[js.Any]))
KMSKey.foreach(__v => __obj.updateDynamic("KMSKey")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartEntitiesDetectionV2JobRequest]
}
}
@js.native
trait StartEntitiesDetectionV2JobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StartEntitiesDetectionV2JobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StartEntitiesDetectionV2JobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartEntitiesDetectionV2JobResponse]
}
}
@js.native
trait StartICD10CMInferenceJobRequest extends js.Object {
var DataAccessRoleArn: IamRoleArn
var InputDataConfig: InputDataConfig
var LanguageCode: LanguageCode
var OutputDataConfig: OutputDataConfig
var ClientRequestToken: js.UndefOr[ClientRequestTokenString]
var JobName: js.UndefOr[JobName]
var KMSKey: js.UndefOr[KMSKey]
}
object StartICD10CMInferenceJobRequest {
@inline
def apply(
DataAccessRoleArn: IamRoleArn,
InputDataConfig: InputDataConfig,
LanguageCode: LanguageCode,
OutputDataConfig: OutputDataConfig,
ClientRequestToken: js.UndefOr[ClientRequestTokenString] = js.undefined,
JobName: js.UndefOr[JobName] = js.undefined,
KMSKey: js.UndefOr[KMSKey] = js.undefined
): StartICD10CMInferenceJobRequest = {
val __obj = js.Dynamic.literal(
"DataAccessRoleArn" -> DataAccessRoleArn.asInstanceOf[js.Any],
"InputDataConfig" -> InputDataConfig.asInstanceOf[js.Any],
"LanguageCode" -> LanguageCode.asInstanceOf[js.Any],
"OutputDataConfig" -> OutputDataConfig.asInstanceOf[js.Any]
)
ClientRequestToken.foreach(__v => __obj.updateDynamic("ClientRequestToken")(__v.asInstanceOf[js.Any]))
JobName.foreach(__v => __obj.updateDynamic("JobName")(__v.asInstanceOf[js.Any]))
KMSKey.foreach(__v => __obj.updateDynamic("KMSKey")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartICD10CMInferenceJobRequest]
}
}
@js.native
trait StartICD10CMInferenceJobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StartICD10CMInferenceJobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StartICD10CMInferenceJobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartICD10CMInferenceJobResponse]
}
}
@js.native
trait StartPHIDetectionJobRequest extends js.Object {
var DataAccessRoleArn: IamRoleArn
var InputDataConfig: InputDataConfig
var LanguageCode: LanguageCode
var OutputDataConfig: OutputDataConfig
var ClientRequestToken: js.UndefOr[ClientRequestTokenString]
var JobName: js.UndefOr[JobName]
var KMSKey: js.UndefOr[KMSKey]
}
object StartPHIDetectionJobRequest {
@inline
def apply(
DataAccessRoleArn: IamRoleArn,
InputDataConfig: InputDataConfig,
LanguageCode: LanguageCode,
OutputDataConfig: OutputDataConfig,
ClientRequestToken: js.UndefOr[ClientRequestTokenString] = js.undefined,
JobName: js.UndefOr[JobName] = js.undefined,
KMSKey: js.UndefOr[KMSKey] = js.undefined
): StartPHIDetectionJobRequest = {
val __obj = js.Dynamic.literal(
"DataAccessRoleArn" -> DataAccessRoleArn.asInstanceOf[js.Any],
"InputDataConfig" -> InputDataConfig.asInstanceOf[js.Any],
"LanguageCode" -> LanguageCode.asInstanceOf[js.Any],
"OutputDataConfig" -> OutputDataConfig.asInstanceOf[js.Any]
)
ClientRequestToken.foreach(__v => __obj.updateDynamic("ClientRequestToken")(__v.asInstanceOf[js.Any]))
JobName.foreach(__v => __obj.updateDynamic("JobName")(__v.asInstanceOf[js.Any]))
KMSKey.foreach(__v => __obj.updateDynamic("KMSKey")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartPHIDetectionJobRequest]
}
}
@js.native
trait StartPHIDetectionJobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StartPHIDetectionJobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StartPHIDetectionJobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartPHIDetectionJobResponse]
}
}
@js.native
trait StartRxNormInferenceJobRequest extends js.Object {
var DataAccessRoleArn: IamRoleArn
var InputDataConfig: InputDataConfig
var LanguageCode: LanguageCode
var OutputDataConfig: OutputDataConfig
var ClientRequestToken: js.UndefOr[ClientRequestTokenString]
var JobName: js.UndefOr[JobName]
var KMSKey: js.UndefOr[KMSKey]
}
object StartRxNormInferenceJobRequest {
@inline
def apply(
DataAccessRoleArn: IamRoleArn,
InputDataConfig: InputDataConfig,
LanguageCode: LanguageCode,
OutputDataConfig: OutputDataConfig,
ClientRequestToken: js.UndefOr[ClientRequestTokenString] = js.undefined,
JobName: js.UndefOr[JobName] = js.undefined,
KMSKey: js.UndefOr[KMSKey] = js.undefined
): StartRxNormInferenceJobRequest = {
val __obj = js.Dynamic.literal(
"DataAccessRoleArn" -> DataAccessRoleArn.asInstanceOf[js.Any],
"InputDataConfig" -> InputDataConfig.asInstanceOf[js.Any],
"LanguageCode" -> LanguageCode.asInstanceOf[js.Any],
"OutputDataConfig" -> OutputDataConfig.asInstanceOf[js.Any]
)
ClientRequestToken.foreach(__v => __obj.updateDynamic("ClientRequestToken")(__v.asInstanceOf[js.Any]))
JobName.foreach(__v => __obj.updateDynamic("JobName")(__v.asInstanceOf[js.Any]))
KMSKey.foreach(__v => __obj.updateDynamic("KMSKey")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartRxNormInferenceJobRequest]
}
}
@js.native
trait StartRxNormInferenceJobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StartRxNormInferenceJobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StartRxNormInferenceJobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StartRxNormInferenceJobResponse]
}
}
@js.native
trait StopEntitiesDetectionV2JobRequest extends js.Object {
var JobId: JobId
}
object StopEntitiesDetectionV2JobRequest {
@inline
def apply(
JobId: JobId
): StopEntitiesDetectionV2JobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[StopEntitiesDetectionV2JobRequest]
}
}
@js.native
trait StopEntitiesDetectionV2JobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StopEntitiesDetectionV2JobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StopEntitiesDetectionV2JobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StopEntitiesDetectionV2JobResponse]
}
}
@js.native
trait StopICD10CMInferenceJobRequest extends js.Object {
var JobId: JobId
}
object StopICD10CMInferenceJobRequest {
@inline
def apply(
JobId: JobId
): StopICD10CMInferenceJobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[StopICD10CMInferenceJobRequest]
}
}
@js.native
trait StopICD10CMInferenceJobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StopICD10CMInferenceJobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StopICD10CMInferenceJobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StopICD10CMInferenceJobResponse]
}
}
@js.native
trait StopPHIDetectionJobRequest extends js.Object {
var JobId: JobId
}
object StopPHIDetectionJobRequest {
@inline
def apply(
JobId: JobId
): StopPHIDetectionJobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[StopPHIDetectionJobRequest]
}
}
@js.native
trait StopPHIDetectionJobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StopPHIDetectionJobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StopPHIDetectionJobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StopPHIDetectionJobResponse]
}
}
@js.native
trait StopRxNormInferenceJobRequest extends js.Object {
var JobId: JobId
}
object StopRxNormInferenceJobRequest {
@inline
def apply(
JobId: JobId
): StopRxNormInferenceJobRequest = {
val __obj = js.Dynamic.literal(
"JobId" -> JobId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[StopRxNormInferenceJobRequest]
}
}
@js.native
trait StopRxNormInferenceJobResponse extends js.Object {
var JobId: js.UndefOr[JobId]
}
object StopRxNormInferenceJobResponse {
@inline
def apply(
JobId: js.UndefOr[JobId] = js.undefined
): StopRxNormInferenceJobResponse = {
val __obj = js.Dynamic.literal()
JobId.foreach(__v => __obj.updateDynamic("JobId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[StopRxNormInferenceJobResponse]
}
}
/** Provides contextual information about the extracted entity.
*/
@js.native
trait Trait extends js.Object {
var Name: js.UndefOr[AttributeName]
var Score: js.UndefOr[Float]
}
object Trait {
@inline
def apply(
Name: js.UndefOr[AttributeName] = js.undefined,
Score: js.UndefOr[Float] = js.undefined
): Trait = {
val __obj = js.Dynamic.literal()
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
Score.foreach(__v => __obj.updateDynamic("Score")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Trait]
}
}
/** An attribute that we extracted, but were unable to relate to an entity.
*/
@js.native
trait UnmappedAttribute extends js.Object {
var Attribute: js.UndefOr[Attribute]
var Type: js.UndefOr[EntityType]
}
object UnmappedAttribute {
@inline
def apply(
Attribute: js.UndefOr[Attribute] = js.undefined,
Type: js.UndefOr[EntityType] = js.undefined
): UnmappedAttribute = {
val __obj = js.Dynamic.literal()
Attribute.foreach(__v => __obj.updateDynamic("Attribute")(__v.asInstanceOf[js.Any]))
Type.foreach(__v => __obj.updateDynamic("Type")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UnmappedAttribute]
}
}
}
|
<reponame>pyrsia/website
import React from 'react';
import clsx from 'clsx';
import Link from '@docusaurus/Link';
import styles from './styles.module.css';
const FeatureList = [
{
title: 'ACTORS & ENTITIES',
description: (
<>
Attestations proved non-repudiation so developers know exactly who wrote the code,
how it was built and how the artifact was published. Built on standard like Sigstore's
Cosign to NPM Packages Signing allows developers to rapidly add their containers to pyrsia
network along side their favorite container registries. Other notable efforts include Notary V2.
</>
),
},
{
title: 'SOURCE REVISION',
description: (
<>
Open interoperable standards such as Git Commit Signatures help to pin down the exact commit where
the source code originates from. Know the author and commit who wrote the code. Know who built
and published the code from their secured environment.
</>
),
},
{
title: 'ARTIFACT DESCRIPTIONS',
description: (
<>
Open interoperable standards such as the Linux Foundation's SPDX, OWASP CycloneDX, or Build Info as
used to collect facts and communicate facts about packages. Leverage and integrate with the
technologies you are already relying on to secure your supply chain.
</>
),
},
{
title: 'IMMUTABLE LEDGER',
description: (
<>
Every package ever published forever record in an unchanging ledger persists the consistent
reproducible data pointing to a uniquely identifiable artifact that can always be available
in the network.
</>
),
},
{
title: 'DISTRIBUTED NETWORK',
description: (
<>
Each node connects through relays to discover each other. When a node download a package
that's requested by it's peer it's cached and seeding the network when another node looks
for the same package. This provides a robust and highly available package ecosystem not
limited by a central repository.
</>
),
},
{
title: 'REPUTABLE PARTNERS',
description: (
<>
In order to bootstrap trust, only a select few reputable entities will build and publishing
images. These image will be available to everyone. Participates of the OpenSSF will all the have
chance to volunteer their resources to help establish the first distributed network, but we are
most excited for the future!
</>
),
},
];
function Feature({title, description}) {
return (
<div className={clsx('col col--4')}>
<div className="padding-horiz--md padding-bottom--lg padding-top--md">
<h3 className='fs24 fw700'>{title}</h3>
<div className={clsx('padding-bottom--sm', styles.featureDesc)}>{description}</div>
</div>
</div>
);
}
export default function VisionFeatures() {
return (
<section className={styles.features} id="visionfeatures">
<div className={clsx('container', styles.bgTorch)}>
<div className="row">
<div className={clsx('col col--12 text--center padding-bottom--lg')}>
<h2 className={clsx('fs40 fw700', styles.headline)}>
<span className="colorWhite">Gain confidence by having</span> transparency on the source of
the packages <span className="colorWhite">you need</span>
</h2>
</div>
</div>
<div className="row padding-bottom--xl">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}
|
#!/bin/bash
cd $(dirname "$0")
node-gyp configure
node-gyp build |
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.tachometer = void 0;
var tachometer = {
"viewBox": "0 0 1792 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M384 1152q0-53-37.5-90.5t-90.5-37.5-90.5 37.5-37.5 90.5 37.5 90.5 90.5 37.5 90.5-37.5 37.5-90.5zM576 704q0-53-37.5-90.5t-90.5-37.5-90.5 37.5-37.5 90.5 37.5 90.5 90.5 37.5 90.5-37.5 37.5-90.5zM1004 1185l101-382q6-26-7.5-48.5t-38.5-29.5-48 6.5-30 39.5l-101 382q-60 5-107 43.5t-63 98.5q-20 77 20 146t117 89 146-20 89-117q16-60-6-117t-72-91zM1664 1152q0-53-37.5-90.5t-90.5-37.5-90.5 37.5-37.5 90.5 37.5 90.5 90.5 37.5 90.5-37.5 37.5-90.5zM1024 512q0-53-37.5-90.5t-90.5-37.5-90.5 37.5-37.5 90.5 37.5 90.5 90.5 37.5 90.5-37.5 37.5-90.5zM1472 704q0-53-37.5-90.5t-90.5-37.5-90.5 37.5-37.5 90.5 37.5 90.5 90.5 37.5 90.5-37.5 37.5-90.5zM1792 1152q0 261-141 483-19 29-54 29h-1402q-35 0-54-29-141-221-141-483 0-182 71-348t191-286 286-191 348-71 348 71 286 191 191 286 71 348z"
}
}]
};
exports.tachometer = tachometer; |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHumanoid-v1_ddpg_softcopy_epsilon_greedy_seed5_run2_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHumanoid-v1 --random-seed 5 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHumanoid-v1/ddpg_softcopy_epsilon_greedy_seed5_run2 --continuous-act-space-flag --double-ddpg-flag
|
package org.slos.battle.abilities.attribute.summoner;
import org.slos.battle.abilities.Ability;
import org.slos.battle.abilities.AbilityClassification;
import org.slos.battle.abilities.AbilityEffect;
import org.slos.battle.abilities.AbilityType;
import org.slos.battle.abilities.attribute.BuffAbility;
import org.slos.battle.abilities.buff.BattleAttributeBuff;
import org.slos.battle.abilities.buff.Buff;
import org.slos.battle.abilities.rule.target.TargetEnemyRule;
import org.slos.battle.abilities.rule.target.TargetFriendlyRule;
import org.slos.battle.abilities.rule.target.TargetRuleset;
import org.slos.battle.monster.BattleAttributeType;
import java.util.Collections;
public class SummonerSpeedAbility extends Ability implements BuffAbility {
int buffQuantity;
public SummonerSpeedAbility(int buffQuantity) {
super(AbilityType.SUMMONER_SPEED, AbilityClassification.BUFF);
this.buffQuantity = buffQuantity;
}
@Override
public Buff getBuffEffect() {
return new BattleAttributeBuff(BattleAttributeType.SPEED, buffQuantity);
}
@Override
public AbilityEffect getEffect() {
return null;
}
@Override
public TargetRuleset getTargetRuleset() {
if (buffQuantity > 0) {
return new TargetRuleset(Collections.singletonList(new TargetFriendlyRule()));
}
else {
return new TargetRuleset(Collections.singletonList(new TargetEnemyRule()));
}
}
}
|
<gh_stars>10-100
Zest.Telephony.Views.ApplicationView = Backbone.View.extend({
el: '#telephony-widget',
initialize: function() {
if (! this.el) {
var logger = this.options.logger || console;
logger.log("No root element to create the telephony widget");
return;
}
Zest.Telephony.Push.init();
},
disableCallControl: function(opts) {
this.widgetView.disableCallControl(opts);
},
render: function() {
var $el = $(this.el);
this.widgetView = new Zest.Telephony.Views.WidgetView({
loanId: $el.data("loan_id"),
csrId: $el.data("csr_id"),
agentNumber: $el.data("agent_phone_number")
});
$el.append(this.widgetView.render().el);
return this;
}
});
Zest.Telephony.Application = (function () {
var instance;
return {
init: function(agent) {
var $widgetWrapper = $("#telephony-widget");
agent = agent || this.setupAgent();
agent.isValid({
done_callback: $.proxy(this.done, this),
fail_callback: $.proxy(this.fail, this)
});
},
setupAgent: function() {
var $widgetWrapper = $("#telephony-widget");
return new Zest.Telephony.Models.Agent({
csr_id: $widgetWrapper.data('csr_id'),
csr_type: $widgetWrapper.data('csr_type'),
csr_generate_caller_id: $widgetWrapper.data('agent_generate_caller_id'),
csr_name: $widgetWrapper.data('agent_name'),
csr_phone_number: $widgetWrapper.data('agent_phone_number'),
csr_phone_ext: $widgetWrapper.data('agent_phone_ext'),
csr_sip_number: $widgetWrapper.data('agent_sip_number'),
csr_call_center_name: $widgetWrapper.data('agent_call_center_name'),
csr_phone_type: $widgetWrapper.data('agent_phone_type') || "",
csr_transferable_agents: JSON.stringify($widgetWrapper.data('agent_transferable_agents') || [])
});
},
getInstance: function () {
if ( !instance ) {
instance = new Zest.Telephony.Views.ApplicationView();
}
return instance;
},
done: function(data) {
this.getInstance().render();
$(document).trigger("telephony:WidgetReady", { conversation_id: data.conversation_id });
},
fail: function(xhr, testStatus) {
var body = JSON.parse(xhr.responseText);
$("#telephony-widget").text(body.errors);
}
};
})();
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2875-1
#
# Security announcement date: 2014-03-12 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:51 UTC
#
# Operating System: Debian 7 (Wheezy)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - cups-filters:1.0.18-2.1+deb7u1
#
# Last versions recommanded by security team:
# - cups-filters:1.0.18-2.1+deb7u2
#
# CVE List:
# - CVE-2013-6474
# - CVE-2013-6475
# - CVE-2013-6476
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade cups-filters=1.0.18-2.1+deb7u2 -y
|
<filename>public/demo-audio/main.js
function splitBuffer(buffer, length) {
const result = [];
const channels = [];
for (let i = 0; i < buffer.numberOfChannels; i++) {
channels.push(buffer.getChannelData(i));
}
for (let offset = 0; offset < buffer.length; offset += length) {
const len = Math.min(length, buffer.length - offset);
const small_buf = new AudioBuffer({
length: len,
numberOfChannels: buffer.numberOfChannels,
sampleRate: buffer.sampleRate,
});
for (let i = 0; i < buffer.numberOfChannels; i++) {
small_buf.copyToChannel(channels[i].slice(offset, offset + len), i);
}
result.push(small_buf);
}
return result;
}
async function main() {
// 2 channels file
const raw_music_wav = await fetch(
'https://cdn.glitch.com/f92b40ba-41b8-4076-a8c7-f66c1ccfd371%2Fnews.wav?v=1631836633526',
);
// One channel file
// let raw_music_wav = await fetch("https://cdn.glitch.com/f92b40ba-41b8-4076-a8c7-f66c1ccfd371%2Fmusic.wav?v=1616487361153");
const outputCtx = new AudioContext();
const music_buffer = await outputCtx.decodeAudioData(
await raw_music_wav.arrayBuffer(),
);
const sampleRate = music_buffer.sampleRate;
let total_encoded_size = 0;
const decoder = new AudioDecoder({
error(e) {
console.log(e);
},
async output(audio_data) {
const source = outputCtx.createBufferSource();
source.buffer = new AudioBuffer({
length: audio_data.numberOfFrames,
numberOfChannels: audio_data.numberOfChannels,
sampleRate: audio_data.sampleRate,
});
for (let i = 0; i < audio_data.numberOfChannels; i++) {
audio_data.copyTo(source.buffer.getChannelData(i), {
planeIndex: i,
frameOffset: 0,
frameCount: audio_data.numberOfFrames,
format: 'f32-planar',
});
}
source.connect(outputCtx.destination);
source.start(audio_data.timestamp / 1000000);
audio_data.close();
},
});
const encoder = new AudioEncoder({
error(e) {
console.log(e);
},
output(chunk, metadata) {
total_encoded_size += chunk.byteLength;
if (metadata.decoderConfig) {
decoder.configure(metadata.decoderConfig);
}
decoder.decode(chunk);
},
});
const config = {
numberOfChannels: music_buffer.numberOfChannels,
sampleRate,
codec: 'opus',
bitrate: 48000,
};
encoder.configure(config);
let base_time = outputCtx.currentTime + 0.3;
const buffers = splitBuffer(music_buffer, sampleRate / 2);
for (const buffer of buffers) {
const planar_data = new Float32Array(
buffer.length * buffer.numberOfChannels,
);
for (let i = 0; i < buffer.numberOfChannels; i++) {
planar_data.set(buffer.getChannelData(i), i * buffer.length);
}
const audio_data = new AudioData({
timestamp: base_time * 1000000,
data: planar_data,
numberOfChannels: buffer.numberOfChannels,
numberOfFrames: buffer.length,
sampleRate,
format: 'f32-planar',
});
base_time += buffer.duration;
encoder.encode(audio_data);
}
await encoder.flush();
await decoder.flush();
document.getElementById('total').innerText = `Total encoded size: ${total_encoded_size}`;
}
|
package net.onpointcoding.betterchristmaschests.mixin;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import net.minecraft.client.render.entity.DonkeyEntityRenderer;
import net.minecraft.entity.EntityType;
import net.minecraft.entity.passive.AbstractDonkeyEntity;
import net.minecraft.util.Identifier;
import net.onpointcoding.betterchristmaschests.BetterChristmasChests;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import java.util.Map;
// Donkeys and mules can have chests attached to them
@Mixin(DonkeyEntityRenderer.class)
public class MixinDonkeyEntityRenderer {
// A map for Christmas textures for each horse type
private static final Map<EntityType<?>, Identifier> CHRISTMAS_TEXTURES = Maps.newHashMap(ImmutableMap.of(EntityType.DONKEY, new Identifier("betterchristmaschests:textures/entity/horse/christmas_donkey.png"), EntityType.MULE, new Identifier("betterchristmaschests:textures/entity/horse/christmas_mule.png")));
@Inject(at = @At("HEAD"), method = "getTexture", cancellable = true)
public void getTexture(AbstractDonkeyEntity abstractDonkeyEntity, CallbackInfoReturnable<Identifier> callbackInfoReturnable) {
if (BetterChristmasChests.getInstance().isChristmas() && BetterChristmasChests.getInstance().enableChristmasDonkey())
callbackInfoReturnable.setReturnValue(CHRISTMAS_TEXTURES.get(abstractDonkeyEntity.getType()));
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-STG/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-STG/1024+0+512-N-IP-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_first_two_thirds_full --eval_function last_element_eval |
<filename>Labs/LangSM/main.cpp
/* main.cpp */
//----------------------------------------------------------------------------------------
//
// Project: Labs/LangSM
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2017 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <CCore/inc/Print.h>
#include <CCore/inc/Exception.h>
#include <CCore/inc/algon/SortUnique.h>
#include <CCore/inc/lang/LangDataMap.h>
namespace App {
/* using */
using namespace CCore;
/* classes */
struct Trans;
struct State;
class StateMachine;
class TopStateMachine;
class BottomStateMachine;
struct RedFinal1;
struct RedFinal2;
struct RedFinal3;
template <class T> class RefFinalList;
/* struct Trans */
struct Trans
{
ulen eindex;
ulen sindex;
};
/* struct State */
struct State
{
ulen sindex;
ulen value;
DynArray<Trans> trans;
};
/* class StateMachine */
class StateMachine : NoCopy
{
protected:
DynArray<State> states;
public:
explicit StateMachine() {}
~StateMachine() {}
PtrLen<const State> getStates() const { return Range(states); }
};
/* class TopStateMachine */
class TopStateMachine : public StateMachine
{
private:
PtrLen<const Lang::TypeDef::Final> finals;
ulen atom_count;
ulen element_count;
private:
static void Set(Trans &obj,const Lang::TypeDef::State::Transition &t)
{
obj.eindex=t.element->index;
obj.sindex=t.state->index;
}
static void Set(State &obj,ulen sindex,const Lang::TypeDef::State &s)
{
obj.sindex=sindex;
obj.value=s.final->index;
auto r=s.transitions.getRange();
auto ptr=obj.trans.extend_raw(r.len).ptr;
for(ulen i=0; i<r.len ;i++) Set(ptr[i],r[i]);
}
public:
explicit TopStateMachine(const Lang::TypeDef::Lang &lang)
{
auto r=lang.states.getRange();
auto base=states.extend_default(r.len).ptr;
for(ulen i=0; i<r.len ;i++) Set(base[i],i,r[i]);
finals=lang.finals.getRange();
atom_count=lang.atoms.len;
element_count=lang.elements.len;
}
~TopStateMachine() {}
PtrLen<const Lang::TypeDef::Final> getFinals() const { return finals; }
ulen getAtomCount() const { return atom_count; }
ulen getElementCount() const { return element_count; }
};
/* class BottomStateMachine */
class BottomStateMachine : public StateMachine
{
DynArray<ulen> map;
private:
struct Rec : CmpComparable<Rec>
{
const State *states;
const ulen *groups;
ulen ind;
Rec(const State *states_,const ulen *groups_,ulen ind_) : states(states_),groups(groups_),ind(ind_) {}
// cmp objects
ulen getGroup() const { return groups[ind]; }
PtrLen<const Trans> getTrans() const { return Range(states[ind].trans); }
static CmpResult CmpTrans(const ulen *groups,Trans a,Trans b)
{
if( auto ret=Cmp(a.eindex,b.eindex) ) return ret;
return Cmp(groups[a.sindex],groups[b.sindex]);
}
static CmpResult CmpTrans(const ulen *groups,PtrLen<const Trans> a,PtrLen<const Trans> b)
{
if( auto ret=Cmp(a.len,b.len) ) return ret;
for(ulen i=0; i<a.len ;i++) if( auto ret=CmpTrans(groups,a[i],b[i]) ) return ret;
return CmpEqual;
}
CmpResult objCmp(const Rec &obj) const
{
if( auto ret=Cmp(getGroup(),obj.getGroup()) ) return ret;
return CmpTrans(groups,getTrans(),obj.getTrans());
}
};
private:
static void SetIf(ulen &dst,ulen ind)
{
if( dst==MaxULen ) dst=ind;
}
static void Set(DynArray<Trans> &trans,PtrLen<const Trans> top,const ulen maptop[])
{
auto tr=trans.extend_copy(top);
for(auto &t : tr ) t.sindex=maptop[t.sindex];
}
void build(PtrLen<const State> top,PtrLen<const ulen> values,PtrLen<const ulen> groups,ulen group_count)
{
// map
map.extend_copy(groups);
auto st=states.extend_default(group_count);
// section
DynArray<ulen> section(DoFill(group_count),MaxULen);
auto sect=Range(section);
for(ulen i=0; i<groups.len ;i++)
{
ulen j=groups[i];
SetIf(sect[j],i);
}
// states
for(ulen i=0; i<st.len ;i++)
{
State &s=st[i];
ulen j=sect[i];
s.sindex=i;
s.value=values[j];
Set(s.trans,Range(top[j].trans),groups.ptr);
}
}
public:
BottomStateMachine(const StateMachine &top,FuncType<ulen,ulen> func)
{
auto states=top.getStates();
DynArray<ulen> new_value(states.len);
for(ulen i=0; i<states.len ;i++) new_value[i]=func(states[i].value);
// groups
DynArray<ulen> buf_groups(DoCopy(states.len),new_value.getPtr());
DynArray<ulen> buf_next(states.len);
DynArray<ulen> buf_indexes(states.len);
auto groups=Range(buf_groups);
auto next_groups=Range(buf_next);
auto indexes=Range(buf_indexes);
// compress
ulen group_count=0;
for(;;)
{
for(ulen i=0; i<indexes.len ;i++) indexes[i]=i;
auto by = [=] (ulen ind) { return Rec(states.ptr,groups.ptr,ind); } ;
ulen g=0;
auto func = [&] (PtrLen<ulen> list)
{
for(ulen ind : list ) next_groups[ind]=g;
g++;
} ;
Algon::IncrSortThenApplyUniqueRangeBy(indexes,by,func);
if( !Change(group_count,g) ) break;
Printf(Con,"group count = #;\n",group_count);
Swap(groups,next_groups);
}
Printf(Con,"final group count = #;\n\n",group_count);
// final
if( ulen g0=next_groups[0] )
{
for(ulen &g : next_groups )
{
if( g==0 ) g=g0;
else if( g==g0 ) g=0;
}
}
build(states,Range(new_value),next_groups,group_count);
}
~BottomStateMachine() {}
ulen mapTop(ulen top_sindex) const { return map[top_sindex]; }
};
/* struct RedFinal1 */
struct RedFinal1 : CmpComparable<RedFinal1>
{
struct RedAction : CmpComparable<RedAction>
{
ulen a1index;
ulen r1index;
RedAction(ulen a1index_,ulen r1index_) : a1index(a1index_),r1index(r1index_) {}
// cmp objects
CmpResult objCmp(const RedAction &obj) const
{
return AlphaCmp(a1index,obj.a1index,r1index,obj.r1index);
}
};
DynArray<RedAction> actions;
ulen index = MaxULen ;
explicit RedFinal1(const Lang::TypeDef::Final &final)
{
for(auto action : final.actions.getRange() )
{
if( +action.atom && !action.rule ) continue;
ulen a1index = +action.atom ? action.atom->index : 0 ;
ulen r1index = +action.rule ? action.rule->index : 0 ;
actions.append_fill(a1index,r1index);
}
Sort(Range(actions));
}
// cmp objects
CmpResult objCmp(const RedFinal1 &obj) const
{
return RangeCmp(Range(actions),Range(obj.actions));
}
};
/* struct RedFinal2 */
struct RedFinal2
{
DynArray<ulen> actions;
ulen index = MaxULen ;
explicit RedFinal2(const Lang::TypeDef::Final &final)
{
DynArray<ulen> temp;
for(auto action : final.actions.getRange() )
{
if( !action.rule ) continue;
ulen rindex = action.rule->index ;
temp.append_fill(rindex);
}
Algon::SortThenApplyUnique(Range(temp), [&] (ulen rindex) { actions.append_copy(rindex); } );
if( ulen len=actions.getLen() ; len>1 ) Printf(Con,"Multiple #;\n",len);
}
// cmp objects
CmpResult objCmp(const RedFinal2 &obj) const
{
return RangeCmp(Range(actions),Range(obj.actions));
}
};
/* struct RedFinal3 */
struct RedFinal3
{
bool has_rule = false ;
ulen index = MaxULen ;
explicit RedFinal3(const Lang::TypeDef::Final &final)
{
for(auto action : final.actions.getRange() )
{
if( !action.rule ) continue;
has_rule=true;
break;
}
}
// cmp objects
CmpResult objCmp(const RedFinal3 &obj) const
{
return Cmp(has_rule,obj.has_rule);
}
};
/* class RefFinalList<T> */
template <class T>
class RefFinalList : NoCopy
{
DynArray<T> list;
struct Rec : CmpComparable<Rec>
{
T *ptr;
// cmp objects
CmpResult objCmp(const Rec &obj) const
{
return Cmp(*ptr,*obj.ptr);
}
};
public:
explicit RefFinalList(PtrLen<const Lang::TypeDef::Final> finals)
: list(DoCast(finals.len),finals.ptr)
{
auto r=Range(list);
DynArray<Rec> temp(DoRaw(r.len));
auto dst=temp.getPtr();
for(; +r ;++r,++dst) dst->ptr=r.ptr;
ulen index=0;
Algon::SortThenApplyUniqueRange(Range(temp), [&] (PtrLen<Rec> list) { for(auto rec : list ) rec.ptr->index=index; index++; } );
Printf(Con,"red final count = #;\n\n",index);
}
~RefFinalList() {}
ulen map(ulen findex) const { return list[findex].index; }
};
/* CheckShift() */
void CheckShift(Lang::TypeDef::Final final,PtrLen<const Trans> trans,ulen atom_count)
{
for(auto action : final.actions.getRange() )
{
if( !action.atom || +action.rule ) continue;
ulen aindex=action.atom->index;
if( !trans )
{
Printf(Con,"CheckShift failed #; : #; null trans\n",final.index,aindex);
return;
}
if( trans->eindex!=aindex )
{
Printf(Con,"CheckShift failed #; : #; != #;\n",final.index,aindex,trans->eindex);
return;
}
++trans;
}
if( +trans && trans->eindex<atom_count )
{
Printf(Con,"CheckShift failed #; : extra trans\n",final.index);
}
}
void CheckShift(const TopStateMachine &top,const BottomStateMachine &bottom)
{
auto states=top.getStates();
auto finals=top.getFinals();
auto bottom_states=bottom.getStates();
for(ulen i=0; i<states.len ;i++)
CheckShift(finals[states[i].value],Range(bottom_states[bottom.mapTop(i)].trans),top.getAtomCount());
}
/* Main() */
void Main(StrLen file_name)
{
Lang::DataMap data(file_name);
data.sanity();
auto &lang=data.getLang();
Printf(Con,"state count = #;\n",lang.states.len);
Printf(Con,"final count = #;\n\n",lang.finals.len);
TopStateMachine top(lang);
Printf(Con,"--- Bottom ---\n\n");
BottomStateMachine bottom(top, [] (ulen) { return 0; } );
CheckShift(top,bottom);
Printf(Con,"--- Bottom1 ---\n\n");
RefFinalList<RedFinal1> redlist1(top.getFinals());
BottomStateMachine bottom1(top, [&,states=top.getStates()] (ulen sindex) { return redlist1.map(states[sindex].value); } );
Printf(Con,"--- Bottom2 ---\n\n");
RefFinalList<RedFinal2> redlist2(top.getFinals());
BottomStateMachine bottom2(top, [&,states=top.getStates()] (ulen sindex) { return redlist2.map(states[sindex].value); } );
Printf(Con,"--- Bottom3 ---\n\n");
RefFinalList<RedFinal3> redlist3(top.getFinals());
BottomStateMachine bottom3(top, [&,states=top.getStates()] (ulen sindex) { return redlist3.map(states[sindex].value); } );
}
} // namespace App
/* main() */
using namespace App;
int main(int argc,const char *argv[])
{
try
{
ReportException report;
if( argc!=2 ) return 1;
Main(argv[1]);
report.guard();
return 0;
}
catch(CatchType)
{
return 1;
}
}
|
import { serialHandler } from './serial-handler.js';
/**
* UI specific code
* This code is only meant to handle the elements and interactions in this example.
* For the actual Web Serial API code, check `/src/serial-handler.ts`.
* If you're not familiar with TypeScript code, just ignore the `<TYPE>` and `:TYPE` parts.
*/
class WebSerialDemoApp {
connectButtonElem = <HTMLButtonElement>document.getElementById('connect-to-serial')!;
messageButtons = document.querySelectorAll<HTMLButtonElement>('.message-button')!;
messageInput = <HTMLInputElement>document.getElementById('message-input')!;
submitButton = <HTMLElement>document.getElementById('submit-button')!;
serialMessagesContainer = <HTMLOListElement>document.getElementById('serial-messages-container')!;
constructor() {
this.connectButtonElem.onclick = async () => {
await serialHandler.init();
this.messageButtons.forEach((button: HTMLButtonElement) => {
button.removeAttribute('disabled');
});
};
this.messageButtons.forEach((button: HTMLButtonElement) => {
button.onclick = () => {
serialHandler.write(String(button.dataset.value));
this.getSerialMessage();
}
});
}
async getSerialMessage() {
const now = new Date();
const listElement = document.createElement('li');
listElement.innerText = `Message received at ${now.getHours()}:${now.getMinutes()}.${now.getMilliseconds()}: ${await serialHandler.read()}`;
this.serialMessagesContainer.appendChild(listElement);
console.log(listElement)
}
}
new WebSerialDemoApp(); |
wget --no-check-certificate 'https://drive.google.com/uc?export=download&id=1dPHIl8ZnfDz_fxNd2ZeBYedTat2lfxcO' -O './data/training_label.txt'
wget --no-check-certificate 'https://drive.google.com/uc?export=download&id=1x1rJOX_ETqnOZjdMAbEE2pqIjRNa8xcc' -O '.data/training_nolabel.txt'
wget --no-check-certificate 'https://drive.google.com/uc?export=download&id=16CtnQwSDCob9xmm6EdHHR7PNFNiOrQ30' -O './data/testing_data.txt'
|
#!/bin/bash
# this may not work if you add it to your $PATH, but that is not expected
SCRIPT_DIR="$(dirname $0)"
# path/name of jmeter binary
JMETER="${JMETER:=jmeter}"
# properties file to load
PROPERTIES_FILE="${PROPERTIES_FILE:=$SCRIPT_DIR/jmeter-docker.properties}"
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <test> [extra arguments]"
echo 'Will run $JMETER [extra arguments] -t <test> -p $PROPERTIES_FILE'
echo "JMETER=$JMETER"
echo "PROPERTIES_FILE=$PROPERTIES_FILE"
exit 1
fi
test="$1"
shift
exec $JMETER $@ -t "$test" -q "$PROPERTIES_FILE"
|
<reponame>ekelly/yabs
import React, { useContext } from "react";
import { View, Text, StyleSheet, FlatList } from "react-native";
import { Context as BillContext, Transaction, selectTransactionParticipants } from "../context/BillContext";
import { ROUNDED_CORNER_RADIUS } from "../Constants";
import SwipeDeleteComponent from "./SwipeDeleteComponent";
interface TransactionRowProps {
transaction: Transaction
}
const TransactionRow = ({ transaction }: TransactionRowProps) => {
const { state, actions: { deleteTransaction }} = useContext(BillContext);
const transactionParticipants = selectTransactionParticipants(state, transaction.id);
return (
<SwipeDeleteComponent onDelete={() => deleteTransaction(transaction.id)}>
<FlatList
data={transactionParticipants}
renderItem={({ item }) => {
let adjustment = transaction.adjustments.find(adjustment => adjustment.id === item.id);
if (adjustment) {
let amt = adjustment.adjustAmount;
return (<View style={styles.row}>
<Text style={styles.rowText}>{item.name}: {amt > 0 ? "+" : ""}{amt}</Text>
</View>);
} else {
return null;
}
}}
keyExtractor={transactionParticipant => transactionParticipant.id}
/>
</SwipeDeleteComponent>
);
}
const styles = StyleSheet.create({
row: {
padding: 10,
margin: 10,
borderWidth: 1,
borderRadius: ROUNDED_CORNER_RADIUS
},
rowText: {
fontSize: 20
}
});
export default TransactionRow; |
import tables as PT
# This describes indexes in the "pt_undistorted" tuple. These are
# used in MainBrain.py, flydra_tracker.py, and kalmanize.py
PT_TUPLE_IDX_X = 0
PT_TUPLE_IDX_Y = 1
PT_TUPLE_IDX_AREA = 2
PT_TUPLE_IDX_SLOPE = 3
PT_TUPLE_IDX_ECCENTRICITY = 4
# 3D coordinates of plane formed by camera center and slope line
# centered on object.
PT_TUPLE_IDX_P1 = 5
PT_TUPLE_IDX_P2 = 6
PT_TUPLE_IDX_P3 = 7
PT_TUPLE_IDX_P4 = 8
PT_TUPLE_IDX_LINE_FOUND = 9
PT_TUPLE_IDX_FRAME_PT_IDX = 10
PT_TUPLE_IDX_CUR_VAL_IDX = 11
PT_TUPLE_IDX_MEAN_VAL_IDX = 12
PT_TUPLE_IDX_SUMSQF_VAL_IDX = 13
WIRE_ORDER_CUR_VAL_IDX = 6
WIRE_ORDER_MEAN_VAL_IDX = 7
WIRE_ORDER_SUMSQF_VAL_IDX = 8
# 2D data format for PyTables:
class Info2D(PT.IsDescription):
camn = PT.UInt16Col(pos=0)
frame = PT.Int64Col(pos=1)
timestamp = PT.FloatCol(
pos=2
) # when the image trigger happened (returned by timestamp modeler on MainBrain)
cam_received_timestamp = PT.FloatCol(
pos=3
) # when the image was acquired by flydra software (on camera computer)
x = PT.Float32Col(pos=4)
y = PT.Float32Col(pos=5)
area = PT.Float32Col(pos=6)
slope = PT.Float32Col(pos=7)
eccentricity = PT.Float32Col(pos=8)
frame_pt_idx = PT.UInt8Col(
pos=9
) # index of point if there were > 1 points in frame
cur_val = PT.UInt8Col(pos=10)
mean_val = PT.Float32Col(pos=11)
sumsqf_val = PT.Float32Col(pos=12) # estimate of <x^2> (running_sumsqf)
class TextLogDescription(PT.IsDescription):
mainbrain_timestamp = PT.FloatCol(pos=0)
cam_id = PT.StringCol(255, pos=1)
host_timestamp = PT.FloatCol(pos=2)
message = PT.StringCol(255, pos=3)
class CamSyncInfo(PT.IsDescription):
cam_id = PT.StringCol(256, pos=0)
camn = PT.UInt16Col(pos=1)
hostname = PT.StringCol(2048, pos=2)
class HostClockInfo(PT.IsDescription):
remote_hostname = PT.StringCol(255, pos=0)
start_timestamp = PT.FloatCol(pos=1)
remote_timestamp = PT.FloatCol(pos=2)
stop_timestamp = PT.FloatCol(pos=3)
class TriggerClockInfo(PT.IsDescription):
start_timestamp = PT.FloatCol(pos=0)
framecount = PT.Int64Col(pos=1)
tcnt = PT.UInt16Col(pos=2)
stop_timestamp = PT.FloatCol(pos=3)
class MovieInfo(PT.IsDescription):
cam_id = PT.StringCol(16, pos=0)
filename = PT.StringCol(255, pos=1)
approx_start_frame = PT.Int64Col(pos=2)
approx_stop_frame = PT.Int64Col(pos=3)
class ExperimentInfo(PT.IsDescription):
uuid = PT.StringCol(32, pos=0)
|
<filename>node_modules/react-icons-kit/md/ic_post_add.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_post_add = void 0;
var ic_post_add = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": []
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M17,19.22H5V7h7V5H5C3.9,5,3,5.9,3,7v12c0,1.1,0.9,2,2,2h12c1.1,0,2-0.9,2-2v-7h-2V19.22z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M17,19.22H5V7h7V5H5C3.9,5,3,5.9,3,7v12c0,1.1,0.9,2,2,2h12c1.1,0,2-0.9,2-2v-7h-2V19.22z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M19,2h-2v3h-3c0.01,0.01,0,2,0,2h3v2.99c0.01,0.01,2,0,2,0V7h3V5h-3V2z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M19,2h-2v3h-3c0.01,0.01,0,2,0,2h3v2.99c0.01,0.01,2,0,2,0V7h3V5h-3V2z"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "2",
"width": "8",
"x": "7",
"y": "9"
},
"children": [{
"name": "rect",
"attribs": {
"height": "2",
"width": "8",
"x": "7",
"y": "9"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"points": "7,12 7,14 15,14 15,12 12,12"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "7,12 7,14 15,14 15,12 12,12"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"height": "2",
"width": "8",
"x": "7",
"y": "15"
},
"children": [{
"name": "rect",
"attribs": {
"height": "2",
"width": "8",
"x": "7",
"y": "15"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_post_add = ic_post_add; |
# plugin.sh - Devstack extras script to install tacker
# Save trace setting
XTRACE=$(set +o | grep xtrace)
set -o xtrace
echo_summary "tacker's plugin.sh was called with args $1 and $2 ..."
. $DEST/tacker/devstack/lib/tacker
(set -o posix; set)
# check for service enabled
if is_service_enabled tacker; then
if [[ "$1" == "stack" && "$2" == "install" ]]; then
# Perform installation of service source
echo_summary "Installing Tacker"
install_tacker
elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then
# Configure after the other layer 1 and 2 services have been configured
echo_summary "Configuring Tacker"
configure_tacker
create_tacker_accounts
elif [[ "$1" == "stack" && "$2" == "extra" ]]; then
# Initialize and start the tacker service
echo_summary "Initializing Tacker"
init_tacker
echo_summary "Starting Tacker API and conductor"
start_tacker
if is_service_enabled horizon; then
echo_summary "Installing tacker horizon"
tacker_horizon_install
fi
if [[ "${TACKER_MODE}" == "all" ]]; then
echo_summary "Modifying Heat policy.json file"
modify_heat_flavor_policy_rule
echo_summary "Setup initial tacker network"
tacker_create_initial_network
echo_summary "Check and download images for tacker initial"
tacker_check_and_download_images
echo_summary "Registering default VIM"
tacker_register_default_vim
if is_service_enabled ceilometer; then
echo_summary "Configure maintenance event types"
configure_maintenance_event_types
fi
fi
fi
if [[ "$1" == "unstack" ]]; then
# Shut down tacker services
if is_service_enabled horizon; then
echo_summary "Uninstall tacker horizon"
tacker_horizon_uninstall
fi
stop_tacker
fi
if [[ "$1" == "clean" ]]; then
# Remove state and transient data
# Remember clean.sh first calls unstack.sh
cleanup_tacker
fi
fi
# Restore xtrace
$XTRACE
# Tell emacs to use shell-script-mode
## Local variables:
## mode: shell-script
## End:
|
#!/bin/bash
# defaults
progname="$(basename $0)"
macprefix=""
realm="asterisk"
password=""
addlxml=""
userhost=""
identities=""
numidentity=0
pflag=false
usage="$progname: Create SNOM provisioning XML file.
Usage: $progname [-x <macprefix>] [-r <realm>] [-p <password>] [-i <additional XML text>]
[-g <user_host_ip_or_name>] <macsuffix> <extension> [<extension>...]
-p: SIP password
-x: macprefix (default: <none>)
-r: realm of SIP server (default: asterisk)
-g: user_host (Snom parameter, SIP registration host)
-a: include additional XML text in config file phone settings
-h: print help and exit
Examples: \"$progname -x 000412 -r pbxrealm -p 84aa83 -g 190.187.43.2 3BFA37 301\" generates 0004123BFA37.xml
\"$progname -p pswd -a '<dhcp perm=\"\">on</dhcp>' 0004133EF23A 7004 7010\" generates 00041333EF23A.xml"
while getopts :x:r:p:a:g:h flag
do
case $flag in
p) pflag=true; password="$OPTARG";;
x) macprefix="$OPTARG";;
r) realm="$OPTARG";;
g) userhost="$OPTARG";;
a) addlxml="$OPTARG
";;
h) echo "$usage"; exit;;
\?) echo "Invalid option -$OPTARG"; echo "$usage"; exit 1;;
esac
done
shift $(( OPTIND - 1 )) # shift past the last flag or argument
if [ $# -lt 2 ]
then
echo "$progname: Error: insufficient arguments."
exit 1
fi
if ! $pflag
then
echo "$progname: Error: password must be specified."
exit 1
fi
mac=$1; xmlfile=`echo -n $macprefix$mac | tr '[a-z]' '[A-Z]'`.xml
echo -n "File $xmlfile. "
shift
while [ $# -gt 0 ]
do
numidentity=$(( numidentity + 1 ))
extension=$1
hash=`echo -n "$extension:$realm:$password" | md5sum | awk '{print $1}'`
if [ $userhost ]; then
identities="$identities <user_host idx=\"$numidentity\" perm=\"\">$userhost</user_host>
"
fi
identities="$identities\
<user_realname idx=\"$numidentity\" perm=\"\">$extension</user_realname>
<user_name idx=\"$numidentity\" perm=\"\">$extension</user_name>
<user_hash idx=\"$numidentity\" perm=\"\">$hash</user_hash>
"
echo -n "Identity $numidentity extension $extension hash $extension:$realm:$password. "
shift
done
echo
#echo "Removing file $xmlfile..."
rm -rf $xmlfile
#echo "Creating file $xmlfile..."
cat > $xmlfile << _EOF_
<?xml version="1.0" encoding="utf-8"?>
<settings>
<phone-settings e="2">
$identities$addlxml </phone-settings>
</settings>
_EOF_
|
<filename>pkg/v2/tkr/util/testdata/gendata.go
// Copyright 2022 VMware, Inc. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package testdata
import (
"fmt"
"reflect"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/util/rand"
clusterv1 "sigs.k8s.io/cluster-api/api/v1beta1"
"sigs.k8s.io/cluster-api/util/conditions"
runv1 "github.com/vmware-tanzu/tanzu-framework/apis/run/v1alpha3"
"github.com/vmware-tanzu/tanzu-framework/pkg/v2/tkr/resolver/data"
"github.com/vmware-tanzu/tanzu-framework/pkg/v2/tkr/util/version"
)
var (
osUbuntu = runv1.OSInfo{
Type: "linux",
Name: "ubuntu",
Version: "20.04",
Arch: "amd64",
}
osAmazon = runv1.OSInfo{
Type: "linux",
Name: "amazon",
Version: "2",
Arch: "amd64",
}
osPhoton = runv1.OSInfo{
Type: "linux",
Name: "photon",
Version: "3",
Arch: "amd64",
}
)
var osInfos = []runv1.OSInfo{osUbuntu, osAmazon, osPhoton}
var regionPrefixes = []string{"us", "ap", "eu", "sa"}
var regionDirections = []string{"central", "north", "south", "west", "east"}
const maxMDs = 5
func ChooseK8sVersionPrefix(v string) string {
versionPrefixes := version.Prefixes(v)
vs := make([]string, 0, len(versionPrefixes))
for v := range versionPrefixes {
vs = append(vs, v)
}
return vs[rand.Intn(len(vs))]
}
func ChooseK8sVersion(osImagesByK8sVersion map[string]data.OSImages) string {
ks := make([]string, 0, len(osImagesByK8sVersion))
for k := range osImagesByK8sVersion {
ks = append(ks, k)
}
return ks[rand.Intn(len(ks))]
}
func ChooseK8sVersionFromTKRs(tkrs data.TKRs) string {
ks := make([]string, 0, len(tkrs))
for _, tkr := range tkrs {
ks = append(ks, tkr.Spec.Kubernetes.Version)
}
return ks[rand.Intn(len(ks))]
}
func GenOSImages(k8sVersions []string, numOSImages int) data.OSImages {
result := make(data.OSImages, numOSImages)
for range make([]struct{}, numOSImages) {
osImage := GenOSImage(k8sVersions)
result[osImage.Name] = osImage
}
return result
}
var osImageAPIVersion, osImageKind = runv1.GroupVersion.WithKind(reflect.TypeOf(runv1.OSImage{}).Name()).ToAPIVersionAndKind()
func GenOSImage(k8sVersions []string) *runv1.OSImage {
k8sVersion := k8sVersions[rand.Intn(len(k8sVersions))]
os := osInfos[rand.Intn(len(osInfos))]
image := GenAMIInfo()
return &runv1.OSImage{
TypeMeta: metav1.TypeMeta{
Kind: osImageKind,
APIVersion: osImageAPIVersion,
},
ObjectMeta: metav1.ObjectMeta{Name: GenOSImageName(k8sVersion, os, image)},
Spec: runv1.OSImageSpec{
KubernetesVersion: k8sVersion,
OS: os,
Image: image,
},
Status: runv1.OSImageStatus{
Conditions: GenConditions(),
},
}
}
func GenConditions() []clusterv1.Condition {
var result []clusterv1.Condition
for _, condType := range []clusterv1.ConditionType{runv1.ConditionCompatible, runv1.ConditionValid} {
if cond := GenFalseCondition(condType); cond != nil {
result = append(result, *cond)
}
}
return result
}
func GenFalseCondition(condType clusterv1.ConditionType) *clusterv1.Condition {
if rand.Intn(10) < 2 { // 20%
return conditions.FalseCondition(condType, rand.String(10), clusterv1.ConditionSeverityWarning, rand.String(20))
}
return nil
}
func GenAMIInfo() runv1.MachineImageInfo {
return runv1.MachineImageInfo{
Type: "ami",
Ref: map[string]interface{}{
"id": rand.String(10),
"region": GenRegion(),
"foo": map[string]interface{}{
"bar": rand.Intn(2) == 1,
},
},
}
}
func GenRegion() string {
return fmt.Sprintf("%s-%s-%v", regionPrefixes[rand.Intn(len(regionPrefixes))], regionDirections[rand.Intn(len(regionDirections))], rand.Intn(3))
}
func GenOSImageName(k8sVersion string, os runv1.OSInfo, image runv1.MachineImageInfo) string {
return fmt.Sprintf("%s-%s-%s-%s-%s", version.Label(k8sVersion), image.Type, os.Name, os.Version, rand.String(5))
}
func SortOSImagesByK8sVersion(allOSImages data.OSImages) map[string]data.OSImages {
result := make(map[string]data.OSImages, len(allOSImages))
for _, osImage := range allOSImages {
osImages, exists := result[osImage.Spec.KubernetesVersion]
if !exists {
osImages = data.OSImages{}
result[osImage.Spec.KubernetesVersion] = osImages
}
osImages[osImage.Name] = osImage
}
return result
}
func GenTKRs(numTKRs int, osImagesByK8sVersion map[string]data.OSImages) data.TKRs {
result := make(data.TKRs, numTKRs)
for range make([]struct{}, numTKRs) {
tkr := GenTKR(osImagesByK8sVersion)
result[tkr.Name] = tkr
}
return result
}
var tkrAPIVersion, tkrKind = runv1.GroupVersion.WithKind(reflect.TypeOf(runv1.TanzuKubernetesRelease{}).Name()).ToAPIVersionAndKind()
func GenTKR(osImagesByK8sVersion map[string]data.OSImages) *runv1.TanzuKubernetesRelease {
k8sVersion := ChooseK8sVersion(osImagesByK8sVersion)
tkrSuffix := fmt.Sprintf("-tkg.%v", rand.Intn(3)+1)
v := k8sVersion + tkrSuffix
return &runv1.TanzuKubernetesRelease{
TypeMeta: metav1.TypeMeta{
APIVersion: tkrAPIVersion,
Kind: tkrKind,
},
ObjectMeta: metav1.ObjectMeta{
Name: version.Label(v),
},
Spec: runv1.TanzuKubernetesReleaseSpec{
Version: v,
OSImages: OsImageRefs(RandSubsetOfOSImages(osImagesByK8sVersion[k8sVersion])),
Kubernetes: runv1.KubernetesSpec{
Version: k8sVersion,
},
},
}
}
func OsImageRefs(osImages data.OSImages) []corev1.LocalObjectReference {
if len(osImages) == 0 {
return nil
}
result := make([]corev1.LocalObjectReference, 0, len(osImages))
for _, osImage := range osImages {
result = append(result, corev1.LocalObjectReference{Name: osImage.Name})
}
return result
}
func RandSubsetOfOSImages(osImages data.OSImages) data.OSImages {
result := make(data.OSImages, len(osImages))
for name, osImage := range osImages {
if rand.Intn(2) == 1 {
result[name] = osImage
}
}
return result
}
func RandSubsetOfTKRs(tkrs data.TKRs) data.TKRs {
result := make(data.TKRs, len(tkrs))
for name, tkr := range tkrs {
if rand.Intn(2) == 1 {
result[name] = tkr
}
}
return result
}
func GenQueryAllForK8sVersion(k8sVersionPrefix string) data.Query {
return data.Query{
ControlPlane: GenOSImageQueryAllForK8sVersion(k8sVersionPrefix),
MachineDeployments: GenMDQueriesAllForK8sVersion(k8sVersionPrefix),
}
}
func GenMDQueriesAllForK8sVersion(k8sVersionPrefix string) map[string]data.OSImageQuery {
numMDs := rand.Intn(maxMDs) + 1
result := make(map[string]data.OSImageQuery, numMDs)
for range make([]struct{}, numMDs) {
result[rand.String(rand.IntnRange(8, 12))] = GenOSImageQueryAllForK8sVersion(k8sVersionPrefix)
}
return result
}
func GenOSImageQueryAllForK8sVersion(k8sVersionPrefix string) data.OSImageQuery {
return data.OSImageQuery{
K8sVersionPrefix: k8sVersionPrefix,
TKRSelector: labels.Everything(),
OSImageSelector: labels.Everything(),
}
}
|
<filename>packages/deprecated-prism-ui/components/icon/svg/checkbox-unselected.tsx
import React from 'react';
const SVG = ({
height = '100%',
width = '100%',
className = '',
viewBox = '0 0 16 16',
}) => (
<svg
className={className}
focusable="false"
height={height}
version="1.1"
viewBox={viewBox}
width={width}
x="0px"
xmlSpace="preserve"
xmlns="http://www.w3.org/2000/svg"
xmlnsXlink="http://www.w3.org/1999/xlink"
y="0px"
>
{/* Generator: Sketch 50 (54983) - http://www.bohemiancoding.com/sketch */}
<title>Rectangle 29</title>
<desc>Created with Sketch.</desc>
<defs />
<g
fill="none"
fillRule="evenodd"
id="Symbols"
stroke="none"
strokeWidth="1"
>
<g
fill="#FFFFFF"
id="Country-List"
stroke="#D4D4D4"
transform="translate(-16.000000, -12.000000)"
>
<rect
height="15"
id="Rectangle-29"
rx="3"
width="15"
x="16.5"
y="12.5"
/>
</g>
</g>
</svg>
);
export default SVG;
|
#!/usr/bin/bash
export IMAGE_GALLERY_BOOTSTRAP_VERSION="1.0"
aws s3 cp s3://edu.au.cc.kats-image-gallery-config/ec2-prod-latest.sh ./
/usr/bin/bash ec2-prod-latest.sh
|
<reponame>singulart/Pxls<gh_stars>0
package space.pxls.server;
import com.google.gson.JsonObject;
import io.undertow.Handlers;
import io.undertow.Undertow;
import io.undertow.server.handlers.AllowedMethodsHandler;
import io.undertow.server.handlers.form.EagerFormParsingHandler;
import io.undertow.server.handlers.resource.ClassPathResourceManager;
import io.undertow.server.handlers.resource.FileResourceManager;
import io.undertow.util.Headers;
import io.undertow.util.Methods;
import io.undertow.websockets.core.AbstractReceiveListener;
import io.undertow.websockets.core.BufferedTextMessage;
import io.undertow.websockets.core.WebSocketChannel;
import io.undertow.websockets.core.WebSockets;
import io.undertow.websockets.spi.WebSocketHttpExchange;
import java.util.concurrent.ConcurrentMap;
import space.pxls.App;
import space.pxls.server.packets.chat.*;
import space.pxls.server.packets.socket.*;
import space.pxls.tasks.UserAuthedTask;
import space.pxls.user.User;
import space.pxls.util.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.function.Predicate;
public class UndertowServer {
private final int port;
private final PacketHandler socketHandler;
private final WebHandler webHandler;
private final ConcurrentHashMap<Integer, User> authedUsers = new ConcurrentHashMap<>();
private final Set<PxlsWebSocketConnection> connections;
private Undertow server;
private final ExecutorService userTaskExecutor = Executors.newFixedThreadPool(4);
public UndertowServer(int port) {
this.port = port;
webHandler = new WebHandler();
socketHandler = new PacketHandler(this);
connections = ConcurrentHashMap.newKeySet();
}
public void start() {
var pathHandler = new PxlsPathHandler()
.addPermGatedExactPath("/ws", "board.socket", Handlers.websocket(this::webSocketHandler))
.addPermGatedPrefixPath("/ws", "board.socket", Handlers.websocket(this::webSocketHandler))
.addPermGatedPrefixPath("/info", "board.info", webHandler::info)
.addPermGatedPrefixPath("/boarddata", "board.data", webHandler::data)
.addPermGatedPrefixPath("/heatmap", "board.data", webHandler::heatmap)
.addPermGatedPrefixPath("/virginmap", "board.data", webHandler::virginmap)
.addPermGatedPrefixPath("/placemap", "board.data", webHandler::placemap)
.addPermGatedPrefixPath("/initialboarddata", "board.data", webHandler::initialdata)
.addPermGatedPrefixPath("/auth", "user.auth", new RateLimitingHandler(webHandler::auth, "http:auth", (int) App.getConfig().getDuration("server.limits.auth.time", TimeUnit.SECONDS), App.getConfig().getInt("server.limits.auth.count")))
.addPermGatedPrefixPath("/signin", "user.auth", webHandler::signIn)
.addPermGatedPrefixPath("/signup", "user.auth", new RateLimitingHandler(webHandler::signUp, "http:signUp", (int) App.getConfig().getDuration("server.limits.signup.time", TimeUnit.SECONDS), App.getConfig().getInt("server.limits.signup.count")))
.addPermGatedPrefixPath("/logout", "user.auth", webHandler::logout)
.addPermGatedPrefixPath("/lookup", "board.lookup", new RateLimitingHandler(webHandler::lookup, "http:lookup", (int) App.getConfig().getDuration("server.limits.lookup.time", TimeUnit.SECONDS), App.getConfig().getInt("server.limits.lookup.count")))
.addPermGatedPrefixPath("/report", "board.report", webHandler::report)
.addPermGatedPrefixPath("/reportChat", "chat.report", webHandler::chatReport)
.addPermGatedPrefixPath("/whoami", "user.auth", webHandler::whoami)
.addPermGatedPrefixPath("/users", "user.online", webHandler::users)
.addPermGatedPrefixPath("/chat/setColor", "user.chatColorChange", new RateLimitingHandler(webHandler::chatColorChange, "http:chatColorChange", (int) App.getConfig().getDuration("server.limits.chatColorChange.time", TimeUnit.SECONDS), App.getConfig().getInt("server.limits.chatColorChange.count")))
.addPermGatedPrefixPath("/setDiscordName", "user.discordNameChange", new RateLimitingHandler(webHandler::discordNameChange, "http:discordName", (int) App.getConfig().getDuration("server.limits.discordNameChange.time", TimeUnit.SECONDS), App.getConfig().getInt("server.limits.discordNameChange.count")))
.addPermGatedPrefixPath("/admin", "user.admin", Handlers.resource(new ClassPathResourceManager(App.class.getClassLoader(), "public/admin/")).setCacheTime(10))
.addPermGatedPrefixPath("/admin/ban", "user.ban", webHandler::ban)
.addPermGatedPrefixPath("/admin/unban", "user.unban", webHandler::unban)
.addPermGatedPrefixPath("/admin/permaban", "user.permaban", webHandler::permaban)
.addPermGatedPrefixPath("/admin/shadowban", "user.shadowban", webHandler::shadowban)
.addPermGatedPrefixPath("/admin/chatban", "chat.ban", webHandler::chatban)
.addPermGatedPrefixPath("/admin/check", "board.check", webHandler::check)
.addPermGatedPrefixPath("/admin/delete", "chat.delete", webHandler::deleteChatMessage)
.addPermGatedPrefixPath("/admin/chatPurge", "chat.purge", webHandler::chatPurge)
.addPermGatedPrefixPath("/execNameChange", "user.namechange", webHandler::execNameChange)
.addPermGatedPrefixPath("/admin/flagNameChange", "user.namechange.flag", webHandler::flagNameChange)
.addPermGatedPrefixPath("/admin/forceNameChange", "user.namechange.force", webHandler::forceNameChange)
.addPermGatedPrefixPath("/admin/faction/edit", "faction.edit.other", new JsonReader(webHandler::adminEditFaction))
.addPermGatedPrefixPath("/admin/faction/delete", "faction.delete.other", new JsonReader(webHandler::adminDeleteFaction))
.addPermGatedPrefixPath("/admin/setFactionBlocked", "faction.setblocked", new AllowedMethodsHandler(webHandler::setFactionBlocked, Methods.POST))
.addPermGatedPrefixPath("/createNotification", "notification.create", webHandler::createNotification)
.addPermGatedPrefixPath("/sendNotificationToDiscord", "notification.discord", webHandler::sendNotificationToDiscord)
.addPermGatedPrefixPath("/setNotificationExpired", "notification.expired", webHandler::setNotificationExpired)
.addPermGatedPrefixPath("/notifications", "notification.list", webHandler::notificationsList)
.addExactPath("/", webHandler::index)
.addExactPath("/index.html", webHandler::index)
.addExactPath("/factions", new AllowedMethodsHandler(webHandler::getRequestingUserFactions, Methods.GET))
.addPrefixPath("/", Handlers.resource(new ClassPathResourceManager(App.class.getClassLoader(), "public/")).setCacheTime(10))
.addPrefixPath("/emoji", Handlers.resource(new FileResourceManager(new File(App.getStorageDir().resolve("emoji").toString()))).setCacheTime(604800));
int managedFactionsTimeSecondsLimit = (int) App.getConfig().getDuration("server.limits.manageFactions.time", TimeUnit.SECONDS);
PxlsRoutingHandler routingHandler = PxlsHandlers.routing()
.getPermGated("/profile", "user.profile", webHandler::profileView)
.getPermGated("/profile/{who}", "user.profile.other", webHandler::profileView)
.getPermGated("/factions/{fid}", "faction.data", new JsonReader(new RateLimitingHandler(webHandler::manageFactions, "http:manageFactions",
managedFactionsTimeSecondsLimit, App.getConfig().getInt("server.limits.manageFactions.count"), App.getConfig().getBoolean("server.limits.manageFactions.global"))))
.postPermGated("/factions", "faction.create", new JsonReader(new RateLimitingHandler(webHandler::manageFactions, "http:manageFactions",
managedFactionsTimeSecondsLimit, App.getConfig().getInt("server.limits.manageFactions.count"), App.getConfig().getBoolean("server.limits.manageFactions.global"))))
.putPermGated("/factions/{fid}", "faction.edit", new JsonReader(new RateLimitingHandler(webHandler::manageFactions, "http:manageFactions",
managedFactionsTimeSecondsLimit, App.getConfig().getInt("server.limits.manageFactions.count"), App.getConfig().getBoolean("server.limits.manageFactions.global"))))
.deletePermGated("/factions/{fid}", "faction.delete", new JsonReader(new RateLimitingHandler(webHandler::manageFactions, "http:manageFactions",
managedFactionsTimeSecondsLimit, App.getConfig().getInt("server.limits.manageFactions.count"), App.getConfig().getBoolean("server.limits.manageFactions.global"))))
.setFallbackHandler(pathHandler);
server = Undertow.builder()
.addHttpListener(port, "0.0.0.0")
.setIoThreads(32)
.setWorkerThreads(128)
.setHandler(new IPReader(new AuthReader(new EagerFormParsingHandler().setNext(routingHandler)))).build();
server.start();
}
private void webSocketHandler(WebSocketHttpExchange exchange, WebSocketChannel channel) {
User user = exchange.getAttachment(AuthReader.USER);
String ip = exchange.getAttachment(IPReader.IP);
// IMPORTANT LOGIC
socketHandler.connect(channel, user);
PxlsWebSocketConnection con = new PxlsWebSocketConnection(channel, user);
connections.add(con);
System.err.println(connections.size());
if (user != null) {
user.getConnections().add(channel);
// aaaaaaand update the useragent
List<String> agentAr = exchange.getRequestHeaders().get(Headers.USER_AGENT.toString());
String agent = "";
if (agentAr != null) {
agent = agentAr.get(0);
}
if (agent == null) {
agent = "";
}
user.setUserAgent(agent);
userTaskExecutor.submit(new UserAuthedTask(channel, user, ip)); //ip at this point should have gone through all the checks to extract an actual IP from behind a reverse proxy
}
channel.getReceiveSetter().set(new AbstractReceiveListener() {
@Override
protected void onFullTextMessage(WebSocketChannel channel, BufferedTextMessage message) throws IOException {
super.onFullTextMessage(channel, message);
String data = message.getData();
JsonObject jsonObj = App.getGson().fromJson(data, JsonObject.class);
String type = jsonObj.get("type").getAsString();
if (type.equals("pixel")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientPlace.class), ip);
if (type.equals("undo")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientUndo.class), ip);
if (type.equals("captcha")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientCaptcha.class), ip);
if (type.equals("admin_placement_overrides")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientAdminPlacementOverrides.class), ip);
if (type.equals("admin_message")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientAdminMessage.class), ip);
if (type.equals("shadowbanme")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientShadowBanMe.class), ip);
if (type.equals("banme")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientBanMe.class), ip);
if (type.equalsIgnoreCase("ChatHistory")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientChatHistory.class), ip);
if (type.equalsIgnoreCase("ChatbanState")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientChatbanState.class), ip);
if (type.equalsIgnoreCase("ChatMessage")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientChatMessage.class), ip);
if (type.equalsIgnoreCase("ChatLookup")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientChatLookup.class), ip);
// old thing, will auto-shadowban
if (type.equals("place")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientPlace.class), ip);
// lol
if (type.equals("placepixel")) socketHandler.accept(channel, user, App.getGson().fromJson(jsonObj, ClientBanMe.class), ip);
}
});
channel.getCloseSetter().set(c -> {
connections.remove(con);
if (user != null) {
user.getConnections().remove(channel);
}
socketHandler.disconnect(user);
});
channel.resumeReceives();
// send to front-end
send(channel, new NearConfig());
}
public Set<PxlsWebSocketConnection> getConnections() {
return connections;
}
public void broadcast(Object obj) {
String json = App.getGson().toJson(obj);
if (connections != null) {
for (PxlsWebSocketConnection channel : connections) {
sendRaw(channel, json);
}
}
}
public void broadcastRaw(String raw) {
if (connections != null) {
connections.forEach(channel -> sendRaw(channel, raw));
}
}
public void broadcastNoShadow(Object obj) {
broadcastToUserPredicate(obj, user -> !user.isShadowBanned());
}
private final Predicate<User> userCanReceiveStaffBroadcasts = user -> user.hasPermission("user.receivestaffbroadcasts");
public void broadcastToStaff(Object obj) {
broadcastToUserPredicate(obj, userCanReceiveStaffBroadcasts);
}
public void broadcastToUserPredicate(Object obj, Predicate<User> predicate) {
String json = App.getGson().toJson(obj);
getAuthedUsers()
.values()
.stream()
.filter(predicate)
.forEach(user -> user.getConnections()
.forEach(con -> WebSockets.sendText(json, con, null))
);
}
public void broadcastPredicate(Object obj, Predicate<PxlsWebSocketConnection> predicate) {
String json = App.getGson().toJson(obj);
connections.parallelStream()
.filter(predicate)
.forEach(con -> WebSockets.sendText(json, con.getChannel(), null));
}
public void broadcastSeparateForStaff(Object nonStaffObj, Object staffObj) {
String nonStaffJSON = nonStaffObj != null ? App.getGson().toJson(nonStaffObj) : null;
String staffJSON = staffObj != null ? App.getGson().toJson(staffObj) : null;
broadcastMapped(con -> {
boolean sendStaffObject = con.getUser().isPresent() && userCanReceiveStaffBroadcasts.test(con.getUser().get());
return sendStaffObject ? staffJSON : nonStaffJSON;
});
}
public void broadcastMapped(Function<PxlsWebSocketConnection, String> mapper) {
connections.parallelStream()
.forEach(con -> {
String json = mapper.apply(con);
if (json != null) {
WebSockets.sendText(json, con.getChannel(), null);
}
});
}
public void send(WebSocketChannel channel, Object obj) {
sendRaw(channel, App.getGson().toJson(obj));
}
public void send(User user, Object obj) {
sendRaw(user, App.getGson().toJson(obj));
}
public void sendRaw(User user, String raw) {
user.getConnections().forEach(channel -> sendRaw(channel, raw));
}
private void sendRaw(PxlsWebSocketConnection channel, String str) {
WebSockets.sendText(str, channel.getChannel(), null);
}
private void sendRaw(WebSocketChannel channel, String str) {
WebSockets.sendText(str, channel, null);
}
public PacketHandler getPacketHandler() {
return socketHandler;
}
public void addAuthedUser(User user) {
if (!authedUsers.containsKey(user.getId()) && !user.isBanned() && !user.isShadowBanned()) {
authedUsers.put(user.getId(), user);
}
}
public void removeAuthedUser(User user) {
authedUsers.remove(user.getId());
}
public ConcurrentMap<Integer, User> getAuthedUsers() {
return this.authedUsers;
}
public int getNonIdledUsersCount() {
int nonIdles = 0;
for (User value : App.getServer().getAuthedUsers().values()) {
if (!value.isIdled()) ++nonIdles;
}
return nonIdles;
}
public Undertow getServer() {
return server;
}
public WebHandler getWebHandler() {
return webHandler;
}
}
|
package game.rps.project;
/**
* @author <NAME>
* @version 1.0.2 3/4/21
*/
public interface Game
{
void rules();
void game();
}
|
from dagster import pipeline
@pipeline
def define_pipeline():
pass
|
SELECT username
FROM users
WHERE username LIKE 'M%' |
import Storage from "../common/storage";
const YOUChainController = {
async send(method, params){
return Storage.youchain.currentProvider.send(method, params);
},
};
export default YOUChainController; |
#!/bin/bash
source "/vagrant/scripts/common.sh"
function installLocalTez {
echo "install Tez from local file"
FILE=/vagrant/resources/$TEZ_ARCHIVE
tar -xzf $FILE -C /usr/local
}
function installRemoteTez {
echo "install Tez from remote file"
curl ${CURL_OPTS} -o /vagrant/resources/$TEZ_ARCHIVE -O -L $TEZ_MIRROR_DOWNLOAD
tar -xzf /vagrant/resources/$TEZ_ARCHIVE -C /usr/local
}
function setupTez {
#echo "copy Tez to HDFS"
#hdfs dfs -put -f /usr/local/tez /user/.
echo "Copy Tez configuration"
cp -f $TEZ_RES_DIR/mapred-site.xml /usr/local/hadoop/etc/hadoop/.
cp -f $TEZ_RES_DIR/tez-site.xml /usr/local/tez/conf/.
#hdfs dfs -put -f ${HIVE_EXEC_JAR} /user/tez/.
}
function setupEnvVars {
echo "creating Tez environment variables"
cp -f $TEZ_RES_DIR/tez.sh /etc/profile.d/tez.sh
. /etc/profile.d/tez.sh
}
function installTez {
if resourceExists $TEZ_ARCHIVE; then
installLocalTez
else
installRemoteTez
fi
ln -s /usr/local/$TEZ_RELEASE /usr/local/tez
}
echo "setup Tez"
installTez
setupTez
setupEnvVars
echo "Tez setup complete"
|
<filename>drpc-provider/src/main/java/org/destiny/drpc/boot/RpcBootStrap.java
package org.destiny.drpc.boot;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* @author 王康
* <EMAIL>
* ------------------------------------------------------------------
* <p></p>
* ------------------------------------------------------------------
* Corpright 2018 Netease, Inc. All rights reserved.
* NETEASE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
* @version JDK 1.8.0_101
* @since 2017/8/22 16:38
*/
public class RpcBootStrap {
public static void main(String[] args) {
ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("spring-provider.xml");
context.start();
System.out.println("RpcBootStrap.main");
}
}
|
package com.example.radibarq.pokemongotradecenter;
import android.net.Uri;
/**
* Created by radibarq on 10/24/16.
*/
public class User {
String displayName;
String email;
Uri photoUrl;
String id;
User(String displayName, String email, Uri photoUrl, String id)
{
this.displayName = displayName;
this.email = email;
this.photoUrl = photoUrl;
this.id = id;
}
User()
{
}
}
|
#!/bin/bash
# Copyright (C) 2018-2020 LEIDOS.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# CARMA packages checkout script
# Optional argument to set the root checkout directory with no ending '/' default is '~'
set -ex
dir=~
if [[ -n ${1} ]]; then
dir=${1}
fi
git clone https://github.com/usdot-fhwa-stol/carma-msgs.git ${dir}/src/CARMAMsgs --branch develop --depth 1
git clone https://github.com/usdot-fhwa-stol/carma-utils.git ${dir}/src/CARMAUtils --branch develop --depth 1
|
import expect from 'expect.js';
import activateStub from '../src/stub/MatrixProtoStub';
import Config from './configuration.js';
let config = new Config();
describe('Matrix-Stub connect to ' + config.homeserver + ' with messagingnode ' + config.messagingnode, function() {
class Bus {
constructor(owner, doLog) {
this.owner = owner;
this.doLog = doLog;
}
postMessage(msg) {
if (this.doLog === true)
console.log('Bus ' + this.owner + ' got msg: ' + JSON.stringify(msg));
if ( this.sendCallback )
this.sendCallback(msg);
}
addListener(url, sendCallback) {
// not required here
}
}
/*
* The connection of a stub without credentials must be treated as extra domain connect.
*/
it('stub connection', function(done) {
let bus = new Bus("generic", false);
let configuration = {
messagingnode : config.messagingnode,
runtimeURL : "runtime://" + config.homeserver + "/6225"
}
let stub = activateStub('hyperty-runtime://' + config.homeserver + '/protostub/1', bus, configuration).instance;
stub.connect().then( () => {
stub.disconnect();
done();
},
(err) => {
expect.fail();
});
});
});
|
#!/bin/bash
check_keycloak_credentials(){
$base_dir/cqube/keycloak/bin/kcadm.sh get realms --no-config --server http://localhost:8080/auth --realm master --user $1 --password $2 > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
echo "Error - Invalid keycloak user or password"; fail=1
fi
$base_dir/cqube/keycloak/bin/kcadm.sh get realms/$realm_name --no-config --server http://localhost:8080/auth --realm master --user $1 --password $2 > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
echo "Error - Unable to find cQube realm"; fail=1
fi
}
check_kc_config_otp(){
if ! [[ $2 == "true" || $2 == "false" ]]; then
echo "Error - Please enter either true or false for $1"; fail=1
fi
}
check_timeout()
{
if [[ $2 =~ ^[0-9]+[M|D]$ ]] ; then
raw_value="$( echo "$2" | sed -e 's/[M|D]$//' )"
if [[ ! $raw_value == 0 ]]; then
if [[ $2 =~ M$ ]] ; then
if [[ $raw_value -ge 30 && $raw_value -le 5256000 ]]; then
timeout_value=$(($raw_value*60))
else
echo "Error - Minutes should be between 30 and 5256000"; fail=1
fi
fi
if [[ $2 =~ D$ ]] ; then
if [[ $raw_value -ge 1 && $raw_value -le 3650 ]]; then
timeout_value=$(($raw_value*24*60*60))
else
echo "Error - Days should be between 1 and 3650"; fail=1
fi
fi
else
echo "Error - Timeout should not be 0"; fail=1
fi
else
echo "Error - please enter proper value as mentioned in comments"; fail=1
fi
sed -i '/session_timeout_in_seconds:/d' roles/keycloak/vars/main.yml
echo "session_timeout_in_seconds: $timeout_value" >> roles/keycloak/vars/main.yml
}
check_state()
{
sc=$(cat $base_dir/cqube/.cqube_config | grep CQUBE_STATE_CODE )
installed_state_code=$(cut -d "=" -f2 <<< "$sc")
if [[ ! "$2" == "$installed_state_code" ]]; then
echo "Error - State code should be same as previous installation. Please refer the state_list file and enter the correct value."; fail=1
fi
}
check_static_datasource(){
if ! [[ $2 == "udise" || $2 == "state" ]]; then
echo "Error - Please enter either udise or state for $1"; fail=1
fi
}
check_base_dir(){
base_dir_status=0
if [[ ! "$2" = /* ]] || [[ ! -d $2 ]]; then
echo "Error - Please enter the absolute path or make sure the directory is present."; fail=1
base_dir_status=1
else
if [[ -e "$2/cqube/.cqube_config" ]]; then
dir=$(cat $2/cqube/.cqube_config | grep CQUBE_BASE_DIR )
base_dir_path=$(cut -d "=" -f2 <<< "$dir")
if [[ ! "$2" == "$base_dir_path" ]]; then
echo "Error - Base directory should be same as previous installation directory"; fail=1
base_dir_status=1
fi
else
echo "Error - Base directory should be same as previous installation directory"; fail=1
base_dir_status=1
fi
fi
if [[ $base_dir_status == 1 ]]; then
echo "Please rectify the base_dir error and restart the upgradation"
exit 1
fi
}
check_s3_bucket(){
s3_bucket=$(cat $base_dir/cqube/.cqube_config | grep $3 )
s3_bucket_name=$(cut -d "=" -f2 <<< "$s3_bucket")
if [[ ! "$2" == "$s3_bucket_name" ]]; then
echo "Error - $1 must be same as previously used bucket"; fail=1
fi
}
check_version(){
# getting the installed version
if [[ ! "$base_dir" = /* ]] || [[ ! -d $base_dir ]]; then
echo "Error - Please enter the absolute path or make sure the directory is present.";
exit 1
else
if [[ -e "$base_dir/cqube/.cqube_config" ]]; then
installed_ver=$(cat $base_dir/cqube/.cqube_config | grep CQUBE_VERSION )
installed_version=$(cut -d "=" -f2 <<< "$installed_ver")
else
echo "Error - Invalid base_dir or Unable to find the cQube in given base_dir";
exit 1
fi
fi
# getting this release version
if [[ -e ".version" ]]; then
this_version=$(awk ''/^cqube_version:' /{ if ($2 !~ /#.*/) {print $2}}' .version)
this_version=$(sed -e 's/^"//' -e 's/"$//' <<<"$this_version")
if [[ $this_version == "" ]] || [[ ! `echo $this_version | grep -E '^[0-9]{1,2}\.[0-9]{1,2}\.?[0-9]{1,2}?$'` ]]; then
echo "Error - cQube's constant settings are affected. Re-clone the repository again";
exit 1
fi
else
echo "Error - cQube's constant settings are affected. Re-clone the repository again";
exit 1
fi
reupgrade=0
if [[ $installed_version == $this_version ]]; then
echo "cQube is already upgraded to $this_version version.";
while true; do
read -p "Do you wish to rerun the upgrade (yes/no)? " yn
case $yn in
yes)
reupgrade=1
break;;
no) exit;;
* ) echo "Please answer yes or no.";;
esac
done
fi
if [[ $reupgrade == 0 ]]; then
if [[ ! $installed_version == $version_upgradable_from ]]; then
echo "Version $this_version is only upgradeable from $version_upgradable_from version";
exit 1
fi
fi
}
check_postgres(){
echo "Checking for Postgres ..."
temp=$(psql -V > /dev/null 2>&1; echo $?)
check_postgres_status=0
if [ ! $temp == 0 ]; then
echo "Error - Unable to check the Postgres."; fail=1
check_postgres_status=1
fi
}
check_sys_user(){
result=`who | head -1 | awk '{print $1}'`
if [[ `egrep -i ^$2: /etc/passwd ; echo $?` != 0 && $result != $2 ]]; then
echo "Error - Please check the system_user_name."; fail=1
fi
}
check_ip()
{
local ip=$2
ip_stat=1
ip_pass=0
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
OIFS=$IFS
IFS='.'
ip=($ip)
IFS=$OIFS
[[ ${ip[0]} -le 255 && ${ip[1]} -le 255 \
&& ${ip[2]} -le 255 && ${ip[3]} -le 255 ]]
ip_stat=$?
if [[ ! $ip_stat == 0 ]]; then
echo "Error - Invalid value for $key"; fail=1
ip_pass=0
fi
is_local_ip=`ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'` > /dev/null 2>&1
if [[ $ip_pass == 0 && $is_local_ip != *$2* ]]; then
echo "Error - Invalid value for $key. Please enter the local ip of this system."; fail=1
fi
else
echo "Error - Invalid value for $key"; fail=1
fi
}
check_vpn_ip()
{
local ip=$2
ip_stat=1
ip_pass=0
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
OIFS=$IFS
IFS='.'
ip=($ip)
IFS=$OIFS
[[ ${ip[0]} -le 255 && ${ip[1]} -le 255 \
&& ${ip[2]} -le 255 && ${ip[3]} -le 255 ]]
ip_stat=$?
if [[ ! $ip_stat == 0 ]]; then
echo "Error - Invalid value for $key"; fail=1
ip_pass=0
fi
else
echo "Error - Invalid value for $key"; fail=1
fi
}
check_aws_key(){
aws_key_status=0
export AWS_ACCESS_KEY_ID=$1
export AWS_SECRET_ACCESS_KEY=$2
aws s3api list-buckets > /dev/null 2>&1
if [ ! $? -eq 0 ]; then echo "Error - Invalid aws access or secret keys"; fail=1
aws_key_status=1
fi
}
check_db_naming(){
dir=$(cat $base_dir/cqube/.cqube_config | grep $3 )
temp_db_name=$(cut -d "=" -f2 <<< "$dir")
if [[ ! $temp_db_name == $2 ]]; then
echo "Error - $1 should be same as previous installation"; fail=1
fi
}
check_db_password(){
if [[ $check_postgres_status == 0 ]]; then
export PGPASSWORD=$3
psql -h localhost -d $1 -U $2 -c "\l" > /dev/null 2>&1
if [ ! $? -eq 0 ]; then echo "Error - Invalid Postgres credentials"; fail=1
fi
else
"Error - Since unable to check Postgres, credentials could not verified."; fail=1
fi
}
# Only for release 1.9
check_length(){
len_status=1
str_length=${#1}
if [[ $str_length -ge 3 && $str_length -le 63 ]]; then
len_status=0
return $len_status;
else
return $len_status;
fi
}
check_api_endpoint(){
temp_ep=`grep '^KEYCLOAK_HOST =' $base_dir/cqube/dashboard/server_side/.env | awk '{print $3}' | sed s/\"//g`
if [[ ! $temp_ep == "https://$2" ]]; then
echo "Error - Change in domain name. Please verify the api_endpoint "; fail=1
fi
}
check_aws_default_region(){
region_len=${#2}
if [[ $region_len -ge 9 ]] && [[ $region_len -le 15 ]]; then
curl https://s3.$2.amazonaws.com > /dev/null 2>&1
if [[ ! $? == 0 ]]; then
echo "Error - There is a problem reaching the aws default region. Please check the $1 value." ; fail=1
fi
fi
}
check_mem(){
mem_total_kb=`grep MemTotal /proc/meminfo | awk '{print $2}'`
mem_total=$(($mem_total_kb/1024))
if [ $(( $mem_total / 1024 )) -ge 30 ] && [ $(($mem_total / 1024)) -le 60 ] ; then
min_shared_mem=$(echo $mem_total*13/100 | bc)
min_work_mem=$(echo $mem_total*2/100 | bc)
min_java_arg_2=$(echo $mem_total*13/100 | bc)
min_java_arg_3=$(echo $mem_total*65/100 | bc)
echo """---
shared_buffers: ${min_shared_mem}MB
work_mem: ${min_work_mem}MB
java_arg_2: -Xms${min_java_arg_2}m
java_arg_3: -Xmx${min_java_arg_3}m""" > memory_config.yml
elif [ $(( $mem_total / 1024 )) -gt 60 ]; then
max_shared_mem=$(echo $mem_total*13/100 | bc)
max_work_mem=$(echo $mem_total*2/100 | bc)
max_java_arg_2=$(echo $mem_total*7/100 | bc)
max_java_arg_3=$(echo $mem_total*65/100 | bc)
echo """---
shared_buffers: ${max_shared_mem}MB
work_mem: ${max_work_mem}MB
java_arg_2: -Xms${max_java_arg_2}m
java_arg_3: -Xmx${max_java_arg_3}m""" > memory_config.yml
else
echo "Error - Minimum Memory requirement to install cQube is 32GB. Please increase the RAM size.";
exit 1
fi
}
get_config_values(){
key=$1
vals[$key]=$(awk ''/^$key:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
}
bold=$(tput bold)
normal=$(tput sgr0)
fail=0
if [[ ! $# -eq 0 ]]; then
core_install=$1
else
core_install="NA"
fi
echo -e "\e[0;33m${bold}Validating the config file...${normal}"
# An array of mandatory values
declare -a arr=("diksha_columns" "state_code" "static_datasource" "management" "system_user_name" "base_dir" "db_user" "db_name" "db_password" \
"s3_access_key" "s3_secret_key" "s3_input_bucket" "s3_output_bucket" "s3_emission_bucket" \
"aws_default_region" "local_ipv4_address" "vpn_local_ipv4_address" "api_endpoint" "keycloak_adm_passwd" "keycloak_adm_user" \
"keycloak_config_otp" "session_timeout")
# Create and empty array which will store the key and value pair from config file
declare -A vals
# Constant variables
realm_name=cQube
# Getting aws keys
aws_access_key=$(awk ''/^s3_access_key:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
aws_secret_key=$(awk ''/^s3_secret_key:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
# Getting base_dir
base_dir=$(awk ''/^base_dir:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
# Getting keycloak_adm_user and keycloak_adm_passwd
keycloak_adm_user=$(awk ''/^keycloak_adm_user:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
keycloak_adm_passwd=$(awk ''/^keycloak_adm_passwd:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
# Getting db_user, db_name and db_password
db_user=$(awk ''/^db_user:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
db_name=$(awk ''/^db_name:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
db_password=$(awk ''/^db_password:' /{ if ($2 !~ /#.*/) {print $2}}' upgradation_config.yml)
check_mem
# Check the version before starting validation
version_upgradable_from=1.13
check_version
# Iterate the array and retrieve values for mandatory fields from config file
for i in ${arr[@]}
do
get_config_values $i
done
for i in ${arr[@]}
do
key=$i
value=${vals[$key]}
case $key in
diksha_columns)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_kc_config_otp $key $value
fi
;;
state_code)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_state $key $value
fi
;;
static_datasource)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_static_datasource $key $value
fi
;;
management)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
fi
;;
system_user_name)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_sys_user $key $value
fi
;;
base_dir)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_base_dir $key $value
fi
;;
s3_access_key)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
fi
;;
s3_secret_key)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_aws_key $aws_access_key $aws_secret_key
fi
;;
s3_input_bucket)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_s3_bucket $key $value "CQUBE_S3_INPUT"
fi
;;
s3_output_bucket)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_s3_bucket $key $value "CQUBE_S3_OUTPUT"
fi
;;
s3_emission_bucket)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_s3_bucket $key $value "CQUBE_S3_EMISSION"
fi
;;
local_ipv4_address)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_ip $key $value
fi
;;
vpn_local_ipv4_address)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_vpn_ip $key $value
fi
;;
db_user)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_postgres
check_db_naming $key $value CQUBE_DB_USER
fi
;;
db_name)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_db_naming $key $value CQUBE_DB_NAME
fi
;;
keycloak_adm_user)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
fi
;;
keycloak_adm_passwd)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_keycloak_credentials $keycloak_adm_user $keycloak_adm_passwd
fi
;;
keycloak_config_otp)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_kc_config_otp $key $value
fi
;;
db_password)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_db_password $db_name $db_user $db_password
fi
;;
api_endpoint)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_api_endpoint $key $value
fi
;;
aws_default_region)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check. Recommended value is ap-south-1"; fail=1
else
check_aws_default_region
fi
;;
session_timeout)
if [[ $value == "" ]]; then
echo "Error - in $key. Unable to get the value. Please check."; fail=1
else
check_timeout $key $value
fi
;;
*)
if [[ $value == "" ]]; then
echo -e "\e[0;31m${bold}Error - Value for $key cannot be empty. Please fill this value${normal}"; fail=1
fi
;;
esac
done
if [[ $fail -eq 1 ]]; then
echo -e "\e[0;34m${bold}Config file has errors. Please rectify the issues and restart the upgradation${normal}"
exit 1
else
echo -e "\e[0;32m${bold}Config file successfully validated${normal}"
fi
|
#!/bin/sh
# -*- tab-width: 4 -*- ;; Emacs
# vi: set tabstop=4 :: Vi/ViM
# If running as xarchInstall, only use binaries in `/stand'
[ "$xarchInstall" ] && export PATH=/stand
############################################################ CONFIGURATION
# Indicate that we want color output
ANSI_COLOR=YES
PERL_VERSION="5.12.4"
MAKE_CONF=/etc/make.conf
MANPATH_CONFIG=/etc/manpath.config
PKG_PREFIX=/usr/local
BANNER=`date +"%F %T"`
POST_INSTALL_SH=/usr/local/etc/rc.d/perl_post_install.sh
############################################################ FUNCTIONS
# Include standard functions
. /dist/druid/etc/sh.subr
############################################################ MAIN SOURCE
task_begin "Configuring perl..."
eval_spin << END_SPIN
# Introduce a delay if configured
[ "$HUMAN_DELAY" ] && sleep $HUMAN_DELAY
#
# Make symbolic links (if /usr/bin files don't already exist)
#
[ -e /usr/bin/perl ] ||
ln -sf $PKG_PREFIX/bin/perl$PERL_VERSION /usr/bin/perl
[ -e /usr/bin/perl5 ] ||
ln -sf $PKG_PREFIX/bin/perl$PERL_VERSION /usr/bin/perl5
#
# Make some standard directories
#
mkdir -p $PKG_PREFIX/lib/perl5/site_perl/$PERL_VERSION/mach/auto
mkdir -p $PKG_PREFIX/lib/perl5/site_perl/$PERL_VERSION/auto
mkdir -p $PKG_PREFIX/lib/perl5/$PERL_VERSION/man/man3
#
# Make a mini boot-script to call h2ph on first-boot
#
mkdir -p ${POST_INSTALL_SH%/*}
cat >> $POST_INSTALL_SH << EOF
#!/bin/sh
cd /usr/include && $PKG_PREFIX/bin/h2ph *.h machine/*.h sys/*.h >/dev/null
rm -f $POST_INSTALL_SH
EOF
chmod +x $POST_INSTALL_SH
#
# Configure persistent make(1) variables
#
cat >> $MAKE_CONF << EOF
# added by use.perl $BANNER
PERL_VERSION=$PERL_VERSION
EOF
#
# Configure additional search paths for man(1)
#
cat >> $MANPATH_CONFIG << EOF
# added by use.perl
OPTIONAL_MANPATH $PKG_PREFIX/lib/perl5/$PERL_VERSION/man
OPTIONAL_MANPATH $PKG_PREFIX/lib/perl5/$PERL_VERSION/perl/man
EOF
# XXX keep the below work-around which accounts for a bug in sh(1) XXX
cat >> /dev/null << EOF
EOF
END_SPIN
task_end $?
################################################################################
# END
################################################################################
#
# $Header: /cvsroot/druidbsd/druidbsd/druid83/src/freebsd/repos/8.3-RELEASE-amd64/run_once/perl_setup.sh,v 1.1 2012/10/10 23:31:19 devinteske Exp $
#
# $Copyright: 2006-2012 Devin Teske. All rights reserved. $
#
# $Log: perl_setup.sh,v $
# Revision 1.1 2012/10/10 23:31:19 devinteske
# Commit initial public beta release of 8.3 series (beta 57; on-par with
# the 9.0 series released recently, also beta 57).
#
#
################################################################################
|
<gh_stars>1-10
module Clients
class MemberDocumentClient
def self.call(authority_member)
begin
if authority_member.blank?
return []
end
conn = AmqpConnectionProvider.start_connection
begin
req = Amqp::Requestor.new(conn)
di, rprops, rbody = req.request({
:routing_key => "member_documents.find_by_hbx_member_id",
:headers => {
:hbx_member_id => authority_member.hbx_member_id
}
}, "", 2)
dlr = Parsers::DocumentListResponse.parse(rbody)
return [] if dlr.document.blank?
dlr.document
ensure
conn.close
end
rescue Bunny::TCPConnectionFailed => e
[]
rescue Timeout::Error => e
[]
end
end
end
end
|
#!/bin/sh
set -ex
if [ -z "${EGRESS_SOURCE}" ]; then
echo "No EGRESS_SOURCE specified"
exit 1
fi
if [ -z "${EGRESS_DESTINATION}" ]; then
echo "No EGRESS_DESTINATION specified"
exit 1
fi
if [ -z "${EGRESS_GATEWAY}" ]; then
echo "No EGRESS_GATEWAY specified"
exit 1
fi
# The pod may die and get restarted; only try to add the
# address/route/rules if they are not already there.
if ! ip route get ${EGRESS_DESTINATION} | grep -q macvlan0; then
ip addr add ${EGRESS_SOURCE}/32 dev macvlan0
ip link set up dev macvlan0
ip route add ${EGRESS_GATEWAY}/32 dev macvlan0
ip route add ${EGRESS_DESTINATION}/32 via ${EGRESS_GATEWAY} dev macvlan0
iptables -t nat -A PREROUTING -i eth0 -j DNAT --to-destination ${EGRESS_DESTINATION}
iptables -t nat -A POSTROUTING -j SNAT --to-source ${EGRESS_SOURCE}
fi
# Update neighbor ARP caches in case another node previously had the IP. (This is
# the same code ifup uses.)
arping -q -A -c 1 -I macvlan0 ${EGRESS_SOURCE}
( sleep 2;
arping -q -U -c 1 -I macvlan0 ${EGRESS_SOURCE} || true ) > /dev/null 2>&1 < /dev/null &
# Now we just wait until we are killed...
#
# Kubernetes will use SIGTERM to kill us, but bash ignores SIGTERM by
# default in interactive shells, and it thinks this shell is
# interactive due to the way in which docker invokes it. We can get
# bash to react to SIGTERM if we explicitly set a trap for it, except
# that bash doesn't process signal traps while it is waiting for a
# process to finish, and we have to be waiting for a process to finish
# because there's no way to sleep forever within bash.
#
# Fortunately, signal traps do interrupt the "wait" builtin. So...
# set up a SIGTERM trap, run a command that sleeps forever *in the
# background*, and then wait for either the command to finish or the
# signal to arrive.
trap "exit" TERM
tail -f /dev/null &
wait
# We don't have to do any cleanup because deleting the network
# namespace will clean everything up for us.
|
#ifndef VESA_H
#define VESA_H
#include <types.h>
typedef struct VESA_INFO
{
unsigned char VESASignature[4] __attribute__ ((packed));
unsigned short VESAVersion __attribute__ ((packed));
unsigned long OEMStringPtr __attribute__ ((packed));
unsigned char Capabilities[4] __attribute__ ((packed));
unsigned long VideoModePtr __attribute__ ((packed));
unsigned short TotalMemory __attribute__ ((packed));
unsigned short OemSoftwareRev __attribute__ ((packed));
unsigned long OemVendorNamePtr __attribute__ ((packed));
unsigned long OemProductNamePtr __attribute__ ((packed));
unsigned long OemProductRevPtr __attribute__ ((packed));
unsigned char Reserved[222] __attribute__ ((packed));
unsigned char OemData[256] __attribute__ ((packed));
} VESA_INFO;
typedef struct MODE_INFO
{
unsigned short ModeAttributes __attribute__ ((packed));
unsigned char WinAAttributes __attribute__ ((packed));
unsigned char WinBAttributes __attribute__ ((packed));
unsigned short WinGranularity __attribute__ ((packed));
unsigned short WinSize __attribute__ ((packed));
unsigned short WinASegment __attribute__ ((packed));
unsigned short WinBSegment __attribute__ ((packed));
unsigned long WinFuncPtr __attribute__ ((packed));
unsigned short BytesPerScanLine __attribute__ ((packed));
unsigned short XResolution __attribute__ ((packed));
unsigned short YResolution __attribute__ ((packed));
unsigned char XCharSize __attribute__ ((packed));
unsigned char YCharSize __attribute__ ((packed));
unsigned char NumberOfPlanes __attribute__ ((packed));
unsigned char BitsPerPixel __attribute__ ((packed));
unsigned char NumberOfBanks __attribute__ ((packed));
unsigned char MemoryModel __attribute__ ((packed));
unsigned char BankSize __attribute__ ((packed));
unsigned char NumberOfImagePages __attribute__ ((packed));
unsigned char Reserved_page __attribute__ ((packed));
unsigned char RedMaskSize __attribute__ ((packed));
unsigned char RedMaskPos __attribute__ ((packed));
unsigned char GreenMaskSize __attribute__ ((packed));
unsigned char GreenMaskPos __attribute__ ((packed));
unsigned char BlueMaskSize __attribute__ ((packed));
unsigned char BlueMaskPos __attribute__ ((packed));
unsigned char ReservedMaskSize __attribute__ ((packed));
unsigned char ReservedMaskPos __attribute__ ((packed));
unsigned char DirectColorModeInfo __attribute__ ((packed));
unsigned long PhysBasePtr __attribute__ ((packed));
unsigned long OffScreenMemOffset __attribute__ ((packed));
unsigned short OffScreenMemSize __attribute__ ((packed));
unsigned char Reserved[206] __attribute__ ((packed));
} MODE_INFO;
//////////////////////////////////////////////////////////
int GoGraphics();
int get_vesa_info();
int get_mode_info(int mode);
//int find_vesa_mode(int w, int h);
//int set_vesa_mode(int w, int h);
int set_vesa_mode(int mode_number);
void set_vesa_bank(int bank_number);
void putpixel_vesa_640x480(int x, int y, int color);
void copy_to_vesa_screen(char *memory_buffer, int screen_size);
#endif
|
package org.jooby.issues;
import org.jooby.Request;
import org.jooby.Response;
import org.jooby.Route;
import org.jooby.mvc.GET;
import org.jooby.mvc.Path;
import org.jooby.test.ServerFeature;
import org.junit.Test;
public class Issue924 extends ServerFeature {
@Path("/924")
public static class Filter {
@GET
public void filter(Request req, Response rsp, Route.Chain chain) throws Throwable {
chain.next(req, rsp);
}
}
{
use(Filter.class);
get("/924", () -> "next");
}
@Test
public void chainFromMvc() throws Exception {
request().get("/924")
.expect("next")
.expect(200);
}
}
|
#!/bin/bash
set -ex
npm run build
|
#!/bin/sh
sh j.sh bundle install
sh j.sh jekyll clean -s my_site
exec sh j.sh jekyll build -s my_site
|
class UsersController < ApplicationController
get '/signup' do
if !logged_in?
erb :"/users/signup"
else
redirect to "/users/#{@user.id}"
end
end
post '/signup' do
if params[:username] == "" || params[:email] == "" || params[:password] == ""
redirect to '/signup'
elsif
User.all.find_by(email: params["email"])
redirect to '/signup'
else
@user = User.create(username: params["username"], email: params["email"], password: params["password"])
@user.save
session[:user_id] = @user.id
redirect to "/users/#{@user.id}"
end
end
get '/login' do
@user = current_user
if !logged_in?
erb :"/users/login"
else
redirect to "/users/#{@user.id}"
end
end
post '/login' do
@user = User.find_by(email: params[:email])
if @user && @user.authenticate(params[:password])
session[:user_id] = @user.id
redirect to "/users/#{@user.id}"
else
redirect to "/signup"
end
end
get '/logout' do
if logged_in?
session.destroy
redirect to '/login'
else
redirect to '/'
end
end
# GET: /users
get "/users" do
erb :"/users/index.html"
end
# GET: /users/5
get "/users/:id" do
@user = current_user
erb :"/users/show.html"
end
# GET: /users/5/edit
get "/users/:id/edit" do
erb :"/users/edit.html"
end
end
|
#!/bin/sh
/usr/local/nginx/sbin/nginx
npm run start:pm2
|
<reponame>marceloF5/ui-root
import React from 'react';
import { renderToString } from 'react-dom/server';
import App from './layout/app';
/* ==========================================================================
Server side rendering
- Exports function that receives a view model and outputs rendered html as a string
========================================================================== */
function render(initialState = {}) {
console.log('SSR rendered');
const html = renderToString(<App countInitial={initialState.data.count} />);
return html;
}
export default render;
|
package org.intellij.sonar;
import com.intellij.codeInspection.InspectionToolProvider;
import org.intellij.sonar.analysis.NewIssuesGlobalInspectionTool;
import org.intellij.sonar.analysis.OldIssuesGlobalInspectionTool;
public class SonarInspectionToolProvider implements InspectionToolProvider {
@Override
public Class[] getInspectionClasses() {
return new Class[]{NewIssuesGlobalInspectionTool.class,OldIssuesGlobalInspectionTool.class};
}
}
|
#! /bin/bash
exec > regions.go
DB=$HOME/tmp/sqlite-latest.sqlite
cat <<EOF
package mapdata
// this file has been generated from the latest sqlite sde
// do not edit
var regionNameByID = map[uint64]string{
EOF
sqlite3 $DB <<EOF
select regionID || ': "' || regionName || '", ' from mapRegions;
EOF
echo '}'
|
<filename>src/components/Hooks/useSendNotificationDefault/useSendNotificationDefault.js<gh_stars>1-10
import { useConfig } from 'components/Hooks'
export const useSendNotificationDefault = (key) => {
const config = useConfig()
const sendNotificationDefault = config.items.filter((item) => {
return item.Key === key
})[0].YesNoValue
return [
{
name: 'sendNotification',
label: 'Send Email Notification',
initialValue: sendNotificationDefault,
control: 'switch',
},
]
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.