text
stringlengths 1
1.05M
|
|---|
<reponame>nebulatgs/cli
package ui
import (
"errors"
"fmt"
"reflect"
"sort"
"strings"
"github.com/manifoldco/promptui"
"github.com/railwayapp/cli/entity"
)
type Prompt string
type Selection string
const (
InitNew Selection = "Empty Project"
InitFromTemplate Selection = "Starter Template"
)
func PromptInit() (Selection, error) {
_, selection, err := selectString("Starting Point", []string{string(InitNew), string(InitFromTemplate)})
return Selection(selection), err
}
func PromptText(text string) (string, error) {
prompt := promptui.Prompt{
Label: text,
}
return prompt.Run()
}
func hasTeams(projects []*entity.Project) bool {
teamKeys := make(map[string]bool)
teams := make([]string, 0)
for _, project := range projects {
if project.Team != nil {
if _, value := teamKeys[*project.Team]; !value {
teamKeys[*project.Team] = true
teams = append(teams, *project.Team)
}
}
}
return len(teams) > 1
}
func promptTeams(projects []*entity.Project) (*string, error) {
if hasTeams(projects) {
teams := make([]string, 0)
teamCheck := make(map[string]bool)
for _, project := range projects {
if project.Team == nil {
continue
}
// Ensure teams are only appended once by checking teamCheck
if _, hasSeenTeam := teamCheck[*project.Team]; !hasSeenTeam {
teams = append(teams, *project.Team)
teamCheck[*project.Team] = true
}
}
_, team, err := selectString("Team", teams)
return &team, err
}
return nil, nil
}
func PromptProjects(projects []*entity.Project) (*entity.Project, error) {
// Check if need to prompt teams
team, err := promptTeams(projects)
if err != nil {
return nil, err
}
filteredProjects := make([]*entity.Project, 0)
if team == nil {
filteredProjects = projects
} else {
for _, project := range projects {
if *project.Team == *team {
filteredProjects = append(filteredProjects, project)
}
}
}
sort.Slice(filteredProjects, func(i int, j int) bool {
return filteredProjects[i].UpdatedAt > filteredProjects[j].UpdatedAt
})
i, _, err := selectCustom("Project", filteredProjects, func(index int) string {
return filteredProjects[index].Name
})
return filteredProjects[i], err
}
// PromptStarterTemplates prompts the user to select one of the provided starter templates
func PromptStarterTemplates(starters []*entity.Starter) (*entity.Starter, error) {
i, _, err := selectCustom("Starter", starters, func(index int) string {
return starters[index].Title
})
return starters[i], err
}
func PromptIsRepoPrivate() (bool, error) {
_, visibility, err := selectString("Visibility", []string{"Public", "Private"})
return visibility == "Private", err
}
func PromptEnvVars(envVars []*entity.StarterEnvVar) (map[string]string, error) {
variables := make(map[string]string)
if len(envVars) > 0 {
fmt.Printf("\n%s\n", Bold("Environment Variables"))
}
for _, envVar := range envVars {
prompt := promptui.Prompt{
Label: envVar.Name,
Default: envVar.Default,
}
if envVar.Optional {
fmt.Printf("\n%s %s\n", envVar.Desc, GrayText("(Optional)"))
} else {
fmt.Printf("\n%s %s\n", envVar.Desc, GrayText("(Required)"))
prompt.Validate = validatorRequired("value required")
}
v, err := prompt.Run()
if err != nil {
return nil, err
}
variables[envVar.Name] = v
}
// Extra newline to match the ones outputted in the loop
fmt.Print("\n")
return variables, nil
}
func PromptProjectName() (string, error) {
prompt := promptui.Prompt{
Label: "Enter project name",
Templates: &promptui.PromptTemplates{
Prompt: "{{ . }} ",
Valid: fmt.Sprintf("%s {{ . | bold }}: ", promptui.IconGood),
Invalid: fmt.Sprintf("%s {{ . | bold }}: ", promptui.IconBad),
Success: fmt.Sprintf("%s {{ . | magenta | bold }}: ", promptui.IconGood),
},
Validate: validatorRequired("project name required"),
}
return prompt.Run()
}
func PromptConfirmProjectName() (string, error) {
prompt := promptui.Prompt{
Label: "Confirm project name",
Templates: &promptui.PromptTemplates{
Prompt: "{{ . }} ",
Valid: fmt.Sprintf("%s {{ . | bold }}: ", promptui.IconGood),
Invalid: fmt.Sprintf("%s {{ . | bold }}: ", promptui.IconBad),
Success: fmt.Sprintf("%s {{ . | magenta | bold }}: ", promptui.IconGood),
},
}
return prompt.Run()
}
// PromptGitHubScopes prompts the user to select one of the provides scopes
func PromptGitHubScopes(scopes []string) (string, error) {
if len(scopes) == 1 {
return scopes[0], nil
}
_, scope, err := selectString("GitHub Owner", scopes)
return scope, err
}
func PromptEnvironments(environments []*entity.Environment) (*entity.Environment, error) {
if len(environments) == 1 {
environment := environments[0]
fmt.Printf("%s Environment: %s\n", promptui.IconGood, BlueText(environment.Name))
return environment, nil
}
i, _, err := selectCustom("Environment", environments, func(index int) string {
return environments[index].Name
})
if err != nil {
return nil, err
}
return environments[i], nil
}
func PromptPlugins(plugins []string) (string, error) {
i, _, err := selectString("Plugin", plugins)
return plugins[i], err
}
// PromptYesNo prompts the user to continue an action using the common (y/N) action
func PromptYesNo(msg string) (bool, error) {
fmt.Printf("%s (y/N): ", msg)
var response string
_, err := fmt.Scan(&response)
if err != nil {
return false, err
}
response = strings.ToLower(response)
isNo := response == "n" || response == "no"
isYes := response == "y" || response == "yes"
if isYes {
return true, nil
} else if isNo {
return false, nil
} else {
fmt.Println("Please type yes or no and then press enter:")
return PromptYesNo(msg)
}
}
func validatorRequired(errorMsg string) func(s string) error {
return func(s string) error {
if strings.TrimSpace(s) == "" {
return errors.New(errorMsg)
}
return nil
}
}
// selectWrapper wraps an arbitrary stringify function + associated index, used by the select
// helpers so it can accept an arbitrary slice. It also implements the Stringer interface so
// it can automatically be printed by %s
type selectItemWrapper struct {
stringify func(index int) string
index int
}
// String adheres to the Stringer interface and returns the string representation from the
// stringify function
func (w selectItemWrapper) String() string {
return w.stringify(w.index)
}
// selectString prompts the user to select a string from the provided slice
func selectString(label string, items []string) (int, string, error) {
return selectCustom(label, items, func(index int) string {
return fmt.Sprintf("%v", items[index])
})
}
// selectCustom prompts the user to select an item from the provided slice. A stringify function is passed, which
// is responsible for returning a label for the item, when called.
func selectCustom(label string, items interface{}, stringify func(index int) string) (int, string, error) {
v := reflect.ValueOf(items)
if v.Kind() != reflect.Slice {
panic(fmt.Errorf("forEachValue: expected slice type, found %q", v.Kind().String()))
}
wrappedItems := make([]selectItemWrapper, 0)
for i := 0; i < v.Len(); i++ {
wrappedItems = append(wrappedItems, selectItemWrapper{
stringify: stringify,
index: i,
})
}
options := &promptui.Select{
Label: fmt.Sprintf("Select %s", label),
Items: wrappedItems,
Size: 10,
Templates: &promptui.SelectTemplates{
Active: fmt.Sprintf(`%s {{ . | underline }}`, promptui.IconSelect),
Inactive: ` {{ . }}`,
Selected: fmt.Sprintf("%s %s: {{ . | magenta | bold }} ", promptui.IconGood, label),
},
Searcher: func(input string, i int) bool {
return strings.Contains(
strings.ToLower(stringify(i)),
strings.ToLower(input),
)
},
}
return options.Run()
}
|
string = "This is a sample string!";
for (let i=0; i<string.length; i++){
if (string[i].valueOf() === '!') {
string = string.replace("!", "");
}
}
console.log(string);
|
#!/bin/bash
set -e
readonly NAMESPACE=${1}
readonly ITERATION=${2}
readonly TMP_FOLDER=${3}
output_dir=$4
readonly GIT_URL=https://github.com/eclipse/che.git
readonly WORK_GIT_DIR=/data/repo
echo "NAMESPACE: ${NAMESPACE}"
echo "ITERATION: ${ITERATION}"
echo "TMP_FOLDER: ${TMP_FOLDER}"
echo "output_dir: ${output_dir}"
readonly GIT_POD=$(oc get pod -n ${NAMESPACE} | grep -v deploy | grep git | awk '{print $1}')
for i_index in $(seq 1 ${ITERATION});
do
### git clone
echo "${NAMESPACE} iteration: ${i_index}"
OUTPUT_RESULT_FILE=${output_dir}/result_${NAMESPACE}_${i_index}.txt
MY_TIME=-1
start_time=$(date +%s)
oc exec -n ${NAMESPACE} "${GIT_POD}" -- rm -rf "${WORK_GIT_DIR}"
oc exec -n ${NAMESPACE} "${GIT_POD}" -- mkdir -p "${WORK_GIT_DIR}"
clone_start_time=$(date +%s)
oc exec -n ${NAMESPACE} "${GIT_POD}" -- git -C "${WORK_GIT_DIR}" clone "${GIT_URL}"
clone_time=$(($(date +%s) - ${clone_start_time}))
echo "${NAMESPACE} iteration: ${i_index}: git-clone is done in ${clone_time} secs" | tee -a "${OUTPUT_RESULT_FILE}"
echo "git status ..."
oc exec -n ${NAMESPACE} "${GIT_POD}" -- git -C "${WORK_GIT_DIR}/che" status
### tar & untar
oc exec -n ${NAMESPACE} "${GIT_POD}" -- rm -rf "${WORK_GIT_DIR}/untar"
oc exec -n ${NAMESPACE} "${GIT_POD}" -- mkdir -p "${WORK_GIT_DIR}/untar"
tar_start_time=$(date +%s)
oc exec -n ${NAMESPACE} "${GIT_POD}" -- tar -zcf "${WORK_GIT_DIR}/che.tar.gz" "${WORK_GIT_DIR}/che"
tar_time=$(($(date +%s) - ${tar_start_time}))
echo "${NAMESPACE} iteration: ${i_index}: tar is done in ${tar_time} secs" | tee -a "${OUTPUT_RESULT_FILE}"
echo "ls che.tar.gz ..."
oc exec -n ${NAMESPACE} "${GIT_POD}" -- ls -al "${WORK_GIT_DIR}/che.tar.gz"
un_tar_start_time=$(date +%s)
oc exec -n ${NAMESPACE} "${GIT_POD}" -- tar -zxf "${WORK_GIT_DIR}/che.tar.gz" -C "${WORK_GIT_DIR}/untar"
un_tar_time=$(($(date +%s) - ${un_tar_start_time}))
echo "${NAMESPACE} iteration: ${i_index}: un_t(ar) is done in ${un_tar_time} secs" | tee -a "${OUTPUT_RESULT_FILE}"
echo "du untar folder ..."
oc exec -n ${NAMESPACE} "${GIT_POD}" -- du -sh "${WORK_GIT_DIR}/untar"
rm_start_time=$(date +%s)
oc exec -n ${NAMESPACE} "${GIT_POD}" -- rm -rf "${WORK_GIT_DIR}/untar"
rm_time=$(($(date +%s) - ${rm_start_time}))
echo "${NAMESPACE} iteration: ${i_index}: rm is done in ${rm_time} secs" | tee -a "${OUTPUT_RESULT_FILE}"
MY_TIME=$(($(date +%s) - ${start_time}))
echo "${NAMESPACE} iteration: ${i_index}: finished in ${MY_TIME} secs" | tee -a "${OUTPUT_RESULT_FILE}"
done
|
#! /bin/sh
if [ -n "$PBS_JOBNAME" ]
then
source "${PBS_O_HOME}/.bash_profile"
cd "$PBS_O_WORKDIR"
module load gcc/5.3.0
fi
prefix=../../data/genomes/msg/ecoevolity-output/run-6-
ecoevolity --seed 732264873 --prefix "$prefix" --relax-missing-sites --relax-constant-sites --relax-triallelic-sites ../../data/genomes/msg/ecoevolity-configs/cyrtodactylus-rate200.yml 1>../../data/genomes/msg/ecoevolity-output/run-6-cyrtodactylus-rate200.out 2>&1
|
exports.up = function(knex) {
return knex.schema.createTable('cards', table => {
table.integer('base_link')
table.increments('code').primary()
table.integer('deck_limit')
table.text('faction_code')
table.integer('faction_cost')
table.text('flavor')
table.text('illustrator')
table.integer('influence_limit')
table.text('keywords')
table.integer('minimum_deck_size')
table.text('pack_code')
table.integer('position')
table.integer('quantity')
table.text('side_code')
table.text('stripped_text')
table.text('text')
table.text('title')
table.text('type_code')
table.boolean('uniqueness')
table.integer('agenda_points')
table.integer('trash_cost')
})
};
exports.down = function(knex) {
return knex.schema.dropTableIfExists('cards')
};
|
#!/bin/bash
LIBS="${LDFLAGS}" make CC="${CC}" CXX="${CXX}" multi
mkdir -p $PREFIX/bin
cp bwa-mem2* $PREFIX/bin
|
def is_divisible_by_5_and_7(num):
if (num % 5 == 0 and num % 7 == 0):
return True
else:
return False
print(is_divisible_by_5_and_7(50))
|
<reponame>mtwilliams/vigilant
require File.expand_path "../helpers", __FILE__
context "Vigilant Test Generation" do
setup do
@path = File.join(File.dirname(__FILE__), "fixtures")
@generator = Vigilant.new(@path)
@generator.generate_tests
end
test "can parse tests" do
assert_equal 1, @generator.num_tests
end
end
|
package sri.mobile.examples.uiexplorer.components
import org.scalajs.dom
import sri.core._
import sri.universal.apis.{Layout, LayoutAnimation, LayoutEvent}
import sri.universal.components._
import sri.universal.styles.InlineStyleSheetUniversal
import scala.scalajs.js
import scala.scalajs.js.{JSON, undefined, UndefOr => U}
object LayoutEventsExample extends UIExample {
case class State(containerStyle: js.UndefOr[js.Any] = undefined,
extraText: String = "",
imageLayout: js.UndefOr[Layout] = undefined,
textLayout: js.UndefOr[Layout] = undefined,
viewLayout: js.UndefOr[Layout] = undefined,
viewStyle: js.UndefOr[js.Any] = js.undefined)
class Component extends ComponentS[State] {
initialState(State(viewStyle = styles.dynamicView(20)))
def render() = {
UIExplorerPage(
View(style = styles.containerStyle)(
TextC(
"layout events are called on mount and whenever layout is recalculated. Note that the layout event will typically be received",
Text(style = styles.italicText)("before"),
"the layout has updated on screen, especially when using layout animations.",
Text(style = styles.pressText, onPress = () => animateViewLayout)(
" Press here to change layout.")
),
View(onLayout = onViewLayout _,
style = styles.view(state.viewStyle.getOrElse(null)))(
Image(
onLayout = onImageLayout _,
style = styles.image,
source = ImageSource(uri =
"https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851561_767334496626293_1958532586_n.png")
),
TextC(
s"ViewLayout : ${JSON.stringify(state.viewLayout.getOrElse(""))} \n\n"),
Text(style = styles.text, onLayout = onTextLayout _)(
s"A simple piece of text.${state.extraText}"),
TextC(
s"""
|
|Text w/h : ${if (state.textLayout.isDefined)
s"${state.textLayout.get.width}/${state.textLayout.get.height}"
else "?/?"}
|Image x/y : ${if (state.imageLayout.isDefined)
s"${state.imageLayout.get.x}/${state.imageLayout.get.y}"
else "?/?"}
""".stripMargin
)
)
)
)
}
def onViewLayout(e: LayoutEvent) = {
dom.window.console.log(s"received view layout event \n", e.nativeEvent)
setState((state: State) => state.copy(viewLayout = e.nativeEvent.layout))
}
def onTextLayout(e: LayoutEvent) = {
dom.window.console.log(s"received text layout event \n", e.nativeEvent)
setState((state: State) => state.copy(textLayout = e.nativeEvent.layout))
}
def onImageLayout(e: LayoutEvent) = {
dom.window.console.log(s"received image layout event \n", e.nativeEvent)
setState(
(state: State) => state.copy(imageLayout = e.nativeEvent.layout))
}
def animateViewLayout() = {
LayoutAnimation.configureNext(
LayoutAnimation.Presets.spring,
() => {
println(s"layout animation done")
setState(
(state: State) =>
state.copy(extraText =
" And a bunch more text to wrap around a few lines",
containerStyle = styles.containerStyle))
}
)
setState((state: State) =>
state.copy(viewStyle = styles.dynamicView(
if (state.viewStyle.getOrElse("margin", 0).asInstanceOf[Double] > 20)
20
else 60)))
}
}
val component = () => CreateElementNoProps[Component]()
object styles extends InlineStyleSheetUniversal {
import dsl._
def view(another: js.Any) =
js.Array(another,
styleUR(padding := 12,
borderColor := "black",
borderWidth := 0.5,
backgroundColor := "transparent"))
val text = style(alignSelf.flexStart,
borderColor := "rgba(0, 0, 255, 0.2)",
borderWidth := 0.5)
val image =
style(width := 50, height := 50, marginBottom := 10, alignSelf.center)
val pressText = style(fontWeight.bold)
val italicText = style(fontStyle.italic)
val containerStyle = style(width := 280)
def dynamicView(value: Double) = styleUR(margin := value)
}
override def title: String = "Layout Events"
override def description: String =
"Examples that show how Layout events can be used to measure view size and position"
}
|
#!/bin/bash
set -euo pipefail
: ${TOKEN:?}
: ${COLUMN_ID:?}
: ${BRANCH:?}
: ${DESCRIPTION:?}
curl \
-X POST \
-H "Accept: application/vnd.github.inertia-preview+json" \
-H "Authorization: token $TOKEN" \
https://api.github.com/projects/columns/$COLUMN_ID/cards \
-d @- << EOF 2>/dev/null | jq -r '.url'
{
"note" : "_$(date +%Y-%0m-%0d)_ - stemcell ${DESCRIPTION} [**$BRANCH**]\n\nIt's time to build a new version of stemcells which include the latest upstream vulnerability fixes. The pipeline should have automatically triggered when this story was created to kick off that process. Detailed instructions on what to do to make a patch lives [here](https://github.com/pivotal-cf/bosh-team/blob/master/stemcells/flow-of-stemcell-pipelines.md#cutting-a-new-stemcell-patch)\\n\\nEnsure the OS image and subsequent stemcell builds finished successfully.\\n\\n**Acceptance Criteria**\\n\\n* A new version of the stemcell can be published from the pipeline."
}
EOF
|
import org.testng.ITestResult;
import org.testng.Reporter;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Test;
public class ExtendReportsClass {
@Test
public void exampleTest() {
// TestNG test method implementation
}
@AfterMethod
public void generateCustomReport(ITestResult result) {
// Capture additional information such as metadata, screenshots, and logs
String testName = result.getName();
String testStatus = getResultStatus(result.getStatus());
// Add custom information to the test report
Reporter.log("Test Name: " + testName);
Reporter.log("Test Status: " + testStatus);
// Add more custom information as needed
// Capture and attach screenshots to the test report
captureAndAttachScreenshot();
// Capture and attach logs to the test report
captureAndAttachLogs();
}
private String getResultStatus(int status) {
switch (status) {
case ITestResult.SUCCESS:
return "PASS";
case ITestResult.FAILURE:
return "FAIL";
case ITestResult.SKIP:
return "SKIP";
default:
return "UNKNOWN";
}
}
private void captureAndAttachScreenshot() {
// Logic to capture and attach screenshots to the test report
}
private void captureAndAttachLogs() {
// Logic to capture and attach logs to the test report
}
}
|
<gh_stars>1-10
from fbchat import Client
from fbchat.models import *
# Change this to your group id
old_thread_id = "1234567890"
# Change these to match your liking
old_color = ThreadColor.MESSENGER_BLUE
old_emoji = "👍"
old_title = "Old group chat name"
old_nicknames = {
"12345678901": "User nr. 1's nickname",
"12345678902": "User nr. 2's nickname",
"12345678903": "User nr. 3's nickname",
"12345678904": "User nr. 4's nickname",
}
class KeepBot(Client):
def onColorChange(self, author_id, new_color, thread_id, thread_type, **kwargs):
if old_thread_id == thread_id and old_color != new_color:
print(
"{} changed the thread color. It will be changed back".format(author_id)
)
self.changeThreadColor(old_color, thread_id=thread_id)
def onEmojiChange(self, author_id, new_emoji, thread_id, thread_type, **kwargs):
if old_thread_id == thread_id and new_emoji != old_emoji:
print(
"{} changed the thread emoji. It will be changed back".format(author_id)
)
self.changeThreadEmoji(old_emoji, thread_id=thread_id)
def onPeopleAdded(self, added_ids, author_id, thread_id, **kwargs):
if old_thread_id == thread_id and author_id != self.uid:
print("{} got added. They will be removed".format(added_ids))
for added_id in added_ids:
self.removeUserFromGroup(added_id, thread_id=thread_id)
def onPersonRemoved(self, removed_id, author_id, thread_id, **kwargs):
# No point in trying to add ourself
if (
old_thread_id == thread_id
and removed_id != self.uid
and author_id != self.uid
):
print("{} got removed. They will be re-added".format(removed_id))
self.addUsersToGroup(removed_id, thread_id=thread_id)
def onTitleChange(self, author_id, new_title, thread_id, thread_type, **kwargs):
if old_thread_id == thread_id and old_title != new_title:
print(
"{} changed the thread title. It will be changed back".format(author_id)
)
self.changeThreadTitle(
old_title, thread_id=thread_id, thread_type=thread_type
)
def onNicknameChange(
self, author_id, changed_for, new_nickname, thread_id, thread_type, **kwargs
):
if (
old_thread_id == thread_id
and changed_for in old_nicknames
and old_nicknames[changed_for] != new_nickname
):
print(
"{} changed {}'s' nickname. It will be changed back".format(
author_id, changed_for
)
)
self.changeNickname(
old_nicknames[changed_for],
changed_for,
thread_id=thread_id,
thread_type=thread_type,
)
client = KeepBot("<email>", "<password>")
client.listen()
|
#!/usr/bin/env bash
# -ethi 8 ; default intensity, lower for OS rendering
# -erate ; submit hashrate
# -r 1 ; restart on failure
# -tt 1 ; only show temp and fan speed; dop not manage; not possible on linux with nvidia gpu
# -tstop 89 ; stop mining on this temp
# -fanmax 70 ;fan max speed; does not work on linux nvidia
# -fanmin 30; fan min speed; does not work on linux nvidia
# -ttli ; reduce entire mining intensity automatically if GPU temperature is above value ; stop on -tt
POOL="eu-eth.hiveon.net:4444"
exec ./claymore/ethdcrminer64 -epool ${POOL} -ewal ${WALLET}.AsteroidMiner -epsw x -mode 1 -dbg -1 -mport -${PORT} -etha 0 -ethi ${INTENSITY} -erate 1 -r 0 -retrydelay 1 -ftime 55 -tt 1 -tstop 95 -ttli 93 -tstart 55
|
#!/bin/bash
NODE_CONFIG_JSON="/root/nodeconfig.json"
DEFAULT_CAPABILITIES_JSON="/root/defaultcapabilities.json"
APPIUM_LOG="/var/log/appium.log"
CMD="xvfb-run appium --log $APPIUM_LOG"
if [ ! -z "${SALT_MASTER}" ]; then
echo "[INIT] ENV SALT_MASTER it not empty, salt-minion will be prepared"
echo "master: ${SALT_MASTER}" >> /etc/salt/minion
salt-minion &
echo "[INIT] salt-minion is running..."
fi
if [ "$REMOTE_ADB" = true ]; then
/root/wireless_connect.sh
fi
if [ "$CONNECT_TO_GRID" = true ]; then
if [ "$CUSTOM_NODE_CONFIG" != true ]; then
/root/generate_config.sh $NODE_CONFIG_JSON
fi
CMD+=" --nodeconfig $NODE_CONFIG_JSON"
fi
if [ "$DEFAULT_CAPABILITIES" = true ]; then
CMD+=" --default-capabilities $DEFAULT_CAPABILITIES_JSON"
fi
if [ "$RELAXED_SECURITY" = true ]; then
CMD+=" --relaxed-security"
fi
pkill -x xvfb-run
rm -rf /tmp/.X99-lock
$CMD
|
"""
Create an HTML table containing rows 1 to 10, each cell being a multiple of the row number.
"""
print("<table>")
for i in range(1, 11):
print("<tr>")
for j in range(1, 11):
print("<td>" + str(i * j) + "</td>")
print("</tr>")
print("</table>")
|
<reponame>ownrecipes/ownrecipes-web<gh_stars>0
import IngredientGroups from '../components/IngredientGroups';
import createComponentWithIntlAndRouter from '../../test/createComponentWithIntlAndRouter';
import data from './data';
import { PendingState } from '../../common/store/GenericReducerType';
test('Ingredient Group component test', () => {
const checkIngredient = jest.fn();
const component = createComponentWithIntlAndRouter(
<IngredientGroups groups={data.ingredient_groups} pending={PendingState.COMPLETED} checkIngredient={checkIngredient} />
);
const tree = component.toJSON();
expect(tree).toMatchSnapshot();
});
|
<reponame>mevoly/-
// pages/order/index.js
import{ request } from "../../request/index.js";
import regeneratorRuntime from '../../lib/runtime/runtime';
Page({
data: {
orders:[],
tabs:[
{
id:0,
value:"全部",
isActive:true
},
{
id:1,
value:"待付款",
isActive:false
},
{
id:2,
value:"待发货",
isActive:false
},
{
id:2,
value:"退款/退货",
isActive:false
}
],
},
onShow(options){
const token =wx.getStorageSync("token");
if(!token){
wx.navigateTo({
url: '/pages/auth/index',
});
}
let pages = getCurrentPages();
let currentPage=pages[pages.length-1];
const {type}=currentPage.options;
//激活选中标题 type-1
this.changeTitleByIndex(type-1);
this.getOrders(type);
},
//获取订单列表
async getOrders(type){
const res=await request({url:"/my/order/all",data:{type}});
this.setData({
orders: res.orders.map(v=>({...v,create_time_cn:(new Date(v.create_time*1000).toLocaleString())}))
})
},
//根据索引激活标题
changeTitleByIndex(index){
//2修改数组源
let {tabs}=this.data;
tabs.forEach((v,i)=>i===index?v.isActive=true:v.isActive=false);
//3 赋值
this.setData({
tabs
})
},
handleTabsItemChange(e){
//1获取被点击的标题索引
const {index}=e.detail;
this.changeTitleByIndex(index);
this.getOrders(index+1);
},
})
|
package com.ulfy.master.application.vm;
import com.ulfy.android.mvvm.IView;
import com.ulfy.android.task.LoadDataUiTask;
import com.ulfy.android.task.LoadListPageUiTask;
import com.ulfy.android.utils.LogUtils;
import com.ulfy.master.application.base.BaseVM;
import com.ulfy.master.ui.view.TaskView;
import java.util.ArrayList;
import java.util.List;
public class TaskVM extends BaseVM {
/**
* 加载普通任务的数据
*/
public String data;
/**
* 加载分页数据的任务
*/
public List<String> listPageDataList = new ArrayList<>();
public LoadListPageUiTask.LoadListPageUiTaskInfo<String> loadPageDataTaskInfo = new LoadListPageUiTask.LoadListPageUiTaskInfo<>(listPageDataList);
public LoadDataUiTask.OnExecute loadDataOnExe() {
return new LoadDataUiTask.OnExecute() {
@Override public void onExecute(LoadDataUiTask task) {
try {
task.notifyStart("正在加载...");
task.notifySuccess("加载完成");
} catch (Exception e) {
LogUtils.log("加载失败", e);
task.notifyFail(e);
}
}
};
}
public LoadDataUiTask.OnExecute loadDataTaskOnExe() {
return new LoadDataUiTask.OnExecute() {
@Override public void onExecute(LoadDataUiTask task) {
try {
task.notifyStart("正在加载...");
Thread.sleep(1000);
data = "LoadDataTask数据";
task.notifySuccess("加载完成");
} catch (Exception e) {
LogUtils.log("加载失败", e);
task.notifyFail(e);
}
}
};
}
public LoadListPageUiTask.OnLoadListPage loadListDataTaskPerPageOnExe() {
return new LoadListPageUiTask.OnLoadSimpleListPage() {
@Override protected void loadSimplePage(LoadListPageUiTask task, List<Object> modelList, List<Object> tempList, int page, int pageSize) throws Exception {
Thread.sleep(1000);
for (int i = 0; i < 3; i++) {
tempList.add(String.format("LoadListDataTask数据:页 %d - %d", page, i));
}
}
};
}
@Override public Class<? extends IView> getViewClass() {
return TaskView.class;
}
}
|
class ShoppingCart {
constructor() {
this.productList = {};
this.quantityList = [];
this.totalPrice = 0;
}
addItems(productItems) {
for (const item of productItems) {
if (this.productList.hasOwnProperty(item)) {
const existingProduct = this.productList[item];
const newProduct = Object.assign({quantity: existingProduct.quantity + 1}, item);
this.productList[item] = newProduct;
const existingProductIndex = this.quantityList.findIndex(p => p.product === item);
if (existingProductIndex !== -1) {
this.quantityList[existingProductIndex].quantity++;
} else {
const productItem = {
product: item,
quantity: 1
};
this.quantityList.push(productItem);
}
} else {
this.productList[item] = {price: 0};
const productItem = {
product: item,
quantity: 1
};
this.quantityList.push(productItem);
}
}
}
removeItems(productItems) {
for (const item of productItems) {
if (this.productList.hasOwnProperty(item)) {
const existingProduct = this.productList[item];
const newProduct = Object.assign({quantity: existingProduct.quantity - 1}, item);
this.productList[item] = newProduct;
const existingProductIndex = this.quantityList.findIndex(p => p.product === item);
if (existingProductIndex !== -1) {
this.quantityList[existingProductIndex].quantity--;
}
}
}
}
calculateTotal() {
let total = 0;
this.quantityList.forEach(item => {
const productPrice = this.productList[item.product].price;
total += item.quantity * productPrice;
});
this.totalPrice = total;
}
}
|
package org.insightcentre.nlp.saffron.data;
import java.util.HashSet;
import java.util.Set;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* A concept represented by one or more terms
*
* @author <NAME>
*
*/
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public class Concept {
public final static String JSON_ID = "id";
public final static String JSON_PREFERRED_TERM = "preferred_term";
public final static String JSON_SYNONYMS = "synonyms";
private String id;
@JsonIgnore
private Term preferredTerm;
@JsonIgnore
private Set<Term> synonyms;
@JsonCreator
public Concept(@JsonProperty(value = JSON_ID, required = true)String id,
@JsonProperty(value = JSON_PREFERRED_TERM, required = true) String preferredTerm,
@JsonProperty(value = JSON_SYNONYMS) Set<String> synonyms) {
this(id, new Term.Builder(preferredTerm).build());
if (synonyms != null)
this.setSynonymsFromStrings(synonyms);
}
public Concept(String id, Term preferredTerm) {
this.setId(id);
this.setPreferredTerm(preferredTerm);
this.setSynonyms(new HashSet<Term>());
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Term getPreferredTerm() {
return preferredTerm;
}
@JsonGetter(JSON_PREFERRED_TERM)
public String getPreferredTermString() {
return preferredTerm.getString();
}
public void setPreferredTerm(Term preferredTerm) {
this.preferredTerm = preferredTerm;
}
public Set<Term> getSynonyms() {
return synonyms;
}
@JsonGetter(JSON_SYNONYMS)
public Set<String> getSynonymsStrings() {
Set<String> synonyms = new HashSet<String>();
for(Term term: this.getSynonyms()) {
synonyms.add(term.getString());
}
return synonyms;
}
public void addSynonym(Term synonym) {
this.synonyms.add(synonym);
}
public void setSynonyms(Set<Term> synonyms) {
this.synonyms = synonyms;
}
public void setSynonymsFromStrings(Set<String> synonyms) {
for(String term: synonyms) {
this.addSynonym(new Term.Builder(term).build());
}
}
@Override
public String toString() {
return "Concept [id=" + id + ", preferredTerm=" + preferredTerm + ", synonyms=" + synonyms + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((preferredTerm == null) ? 0 : preferredTerm.hashCode());
result = prime * result + ((synonyms == null) ? 0 : synonyms.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Concept other = (Concept) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
if (preferredTerm == null) {
if (other.preferredTerm != null)
return false;
} else if (!preferredTerm.equals(other.preferredTerm))
return false;
if (synonyms == null) {
if (other.synonyms != null)
return false;
} else if (!synonyms.equals(other.synonyms))
return false;
return true;
}
public static class Builder {
private Concept concept;
public Builder(String id, String preferredTerm) {
this.concept = new Concept(id, new Term.Builder(preferredTerm).build());
}
public Builder id(String id) {
this.concept.setId(id);
return this;
}
public Builder preferredTerm(Term preferredTerm) {
this.concept.setPreferredTerm(preferredTerm);
return this;
}
public Builder preferredTerm(String preferredTerm) {
this.concept.setPreferredTerm(new Term.Builder(preferredTerm).build());
return this;
}
public Builder synonyms(Set<Term> synonyms) {
this.concept.setSynonyms(synonyms);
return this;
}
public Builder synonymString(Set<String> synonyms) {
this.concept.setSynonymsFromStrings(synonyms);
return this;
}
public Builder addSynonym(Term synonym) {
if (this.concept.getSynonyms() == null)
this.concept.setSynonyms(new HashSet<Term>());
this.concept.addSynonym(synonym);
return this;
}
public Builder addSynonym(String synonym) {
if (this.concept.getSynonyms() == null)
this.concept.setSynonyms(new HashSet<Term>());
this.concept.addSynonym(new Term.Builder(synonym).build());
return this;
}
public Concept build() {
return this.concept;
}
}
}
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.services;
import org.apache.tapestry5.MarkupWriter;
import org.apache.tapestry5.internal.structure.PageResetListener;
import org.apache.tapestry5.plastic.MethodDescription;
import org.apache.tapestry5.plastic.PlasticUtils;
import org.apache.tapestry5.runtime.Component;
import org.apache.tapestry5.runtime.ComponentEvent;
import org.apache.tapestry5.runtime.Event;
import org.apache.tapestry5.runtime.PageLifecycleListener;
import org.apache.tapestry5.services.transform.ComponentClassTransformWorker2;
import java.lang.reflect.Modifier;
/**
* Constants used by implementations {@link ComponentClassTransformWorker2}.
*
* Note: render phase methods on transformed components will not be invoked <em>unless</em>
* {@linkplain org.apache.tapestry5.model.MutableComponentModel#addRenderPhase(Class) the component model is updated to
* identify the use of the corresponding render phase}. This represents an optimization introduced in Tapestry 5.1.
*/
public final class TransformConstants
{
// Shared parameters of a whole bunch of lifecycle methods, representing the different
// component render states.
private static final String[] RENDER_PHASE_METHOD_PARAMETERS =
{MarkupWriter.class.getName(), Event.class.getName()};
/**
* Description for
* {@link org.apache.tapestry5.runtime.Component#dispatchComponentEvent(org.apache.tapestry5.runtime.ComponentEvent)}
* .
*
* @see org.apache.tapestry5.annotations.OnEvent
* @since 5.3
*/
public static final MethodDescription DISPATCH_COMPONENT_EVENT_DESCRIPTION = PlasticUtils.getMethodDescription(
Component.class, "dispatchComponentEvent", ComponentEvent.class);
/**
* Description for {@link org.apache.tapestry5.runtime.PageLifecycleListener#containingPageDidLoad()}.
*
* @since 5.3
*/
public static final MethodDescription CONTAINING_PAGE_DID_LOAD_DESCRIPTION = PlasticUtils.getMethodDescription(
PageLifecycleListener.class, "containingPageDidLoad");
/**
* Description for {@link org.apache.tapestry5.internal.structure.PageResetListener#containingPageDidReset()}. Note that the {@link PageResetListener}
* interface is not automatically implemented by components. ]
*
* @see org.apache.tapestry5.annotations.PageReset
* @see org.apache.tapestry5.internal.transform.PageResetAnnotationWorker
* @since 5.3
*/
public static final MethodDescription CONTAINING_PAGE_DID_RESET_DESCRIPTION = PlasticUtils.getMethodDescription(PageResetListener.class, "containingPageDidReset");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#postRenderCleanup()}.
*
* @since 5.3
*/
public static final MethodDescription POST_RENDER_CLEANUP_DESCRIPTION = PlasticUtils.getMethodDescription(Component.class, "postRenderCleanup");
/**
* Description for {@link org.apache.tapestry5.runtime.PageLifecycleListener#containingPageDidDetach()}.
*
* @since 5.3
*/
public static final MethodDescription CONTAINING_PAGE_DID_DETACH_DESCRIPTION = PlasticUtils.getMethodDescription(PageLifecycleListener.class, "containingPageDidDetach");
/**
* Description for {@link org.apache.tapestry5.runtime.PageLifecycleListener#containingPageDidAttach()}.
*
* @since 5.3
*/
public static final MethodDescription CONTAINING_PAGE_DID_ATTACH_DESCRIPTION = PlasticUtils.getMethodDescription(PageLifecycleListener.class, "containingPageDidAttach");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#setupRender(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.SetupRender
* @since 5.3
*/
public static final MethodDescription SETUP_RENDER_DESCRIPTION = renderPhaseDescription("setupRender");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#beginRender(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.BeginRender
* @since 5.3
*/
public static final MethodDescription BEGIN_RENDER_DESCRIPTION = renderPhaseDescription("beginRender");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#beforeRenderTemplate(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.BeforeRenderTemplate
* @since 5.3
*/
public static final MethodDescription BEFORE_RENDER_TEMPLATE_DESCRIPTION = renderPhaseDescription("beforeRenderTemplate");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#afterRenderTemplate(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.BeforeRenderTemplate
* @since 5.3
*/
public static final MethodDescription AFTER_RENDER_TEMPLATE_DESCRIPTION = renderPhaseDescription("afterRenderTemplate");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#beforeRenderBody(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.BeforeRenderBody
* @since 5.3
*/
public static final MethodDescription BEFORE_RENDER_BODY_DESCRIPTION = renderPhaseDescription("beforeRenderBody");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#afterRenderBody(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.AfterRenderBody
* @since 5.3
*/
public static final MethodDescription AFTER_RENDER_BODY_DESCRIPTION = renderPhaseDescription("afterRenderBody");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#afterRender(MarkupWriter, Event)}
*
* @see org.apache.tapestry5.annotations.AfterRender
* @since 5.3
*/
public static final MethodDescription AFTER_RENDER_DESCRIPTION = renderPhaseDescription("afterRender");
/**
* Description for {@link org.apache.tapestry5.runtime.Component#cleanupRender(MarkupWriter, Event)}.
*
* @see org.apache.tapestry5.annotations.CleanupRender
* @since 5.3
*/
public static final MethodDescription CLEANUP_RENDER_DESCRIPTION = renderPhaseDescription("cleanupRender");
private static MethodDescription renderPhaseDescription(String name)
{
return new MethodDescription(Modifier.PUBLIC, "void", name, RENDER_PHASE_METHOD_PARAMETERS, null, null);
}
}
|
<reponame>uk-gov-mirror/alphagov.support-api
class ContentItemEnrichmentWorker
include Sidekiq::Worker
def perform(problem_report_id)
problem_report = ProblemReport.find(problem_report_id)
path = URI(problem_report.path).path # normalise the path before looking it up
problem_report.content_item = fetch_content_item(path)
problem_report.save!
end
private
def fetch_content_item(path)
looked_up_item = SupportApi.content_item_lookup.lookup(path)
if (content_item = ContentItem.find_by(path: path))
content_item.tap { |item| item.organisations = build_orgs(looked_up_item.organisations) } # refresh the orgs
else
ContentItem.new(path: looked_up_item.path, organisations: build_orgs(looked_up_item.organisations))
end
end
def build_orgs(org_hashes)
org_hashes.map do |org_data|
Organisation.create_with(org_data).find_or_create_by(content_id: org_data[:content_id])
end
end
end
|
#!/bin/bash
create_network() {
export NETWORK_NAME="${1}"
envsubst <"./templates/network.xml" >"./templates/network_tmp.xml"
if [ -z "$(virsh net-info "${NETWORK_NAME}")" ]; then
# Network not found, create network NETWORK_NAME
virsh net-create --file ./templates/network_tmp.xml
echo Creating network "${NETWORK_NAME}"
else
echo Network already exists.
fi
rm -r ./templates/network_tmp.xml
}
create_pool() {
# Create pool
if [ -z "$(virsh pool-info default)" ]; then
# Default pool not found, create pool
echo Creating default pool
virsh pool-create --file ./templates/pool.xml
else
echo Pool exists.
fi
}
create_domain() {
export VM_NAME="${1}"
export VOLUME_NAME="${2}"
export CPU_COUNT="${3}"
export NETWORK_NAME="${4}"
envsubst <"./templates/vm.xml" >"./templates/vm_tmp.xml"
# Create volume
if [ -z "$(virsh vol-info "${VOLUME_NAME}" default)" ]; then
# Create volume
echo Creating volume "${VOLUME_NAME}"
virsh vol-create-as default "${VOLUME_NAME}" 8G
else
# Old volume with same name found, delete old one and create new
echo Deleting old volume
virsh vol-delete "${VOLUME_NAME}" default
echo Creating volume
virsh vol-create-as default "${VOLUME_NAME}" 8G
fi
# Create virtual machine
if [ -z "$(virsh dominfo "${VM_NAME}")" ]; then
# Create new virtual machine
echo Creating "${VM_NAME}" virtual machine
virsh create --file ./templates/vm_tmp.xml
else
# Old vm with same name found, delete old one and create new
echo Deleting old virtual machine
virsh destroy "${VM_NAME}"
virsh undefine "${VM_NAME}"
echo Creating virtual machine
virsh create --file ./templates/vm_tmp.xml
fi
rm -r ./templates/vm_tmp.xml
}
delete_dev_env() {
VM_NAME_PREFIX="${1}"
NETWORK_NAME="${2}"
VOLUME_POSTFIX="${3}"
VM_COUNT="${4}"
if [ "$(virsh net-info "${NETWORK_NAME}")" ]; then
echo Deleting network ${NETWORK_NAME}
virsh net-destroy ${NETWORK_NAME}
virsh net-undefine ${NETWORK_NAME}
fi
for ((n = 0; n < "${VM_COUNT}"; n++)); do
VM_NAME="${VM_NAME_PREFIX}_${n}"
VOLUME_NAME="${VM_NAME}_${VOLUME_POSTFIX}"
echo Destroying and undefining virtual machine "${VM_NAME}"
virsh destroy "${VM_NAME}"
virsh undefine "${VM_NAME}"
echo Deleting volume "${VOLUME_NAME}"
virsh vol-delete "${VOLUME_NAME}" default
done
}
|
#!/usr/bin/env bash
set -eo pipefail
UNAME=$(command -v uname)
SORT=$(command -v sort)
GREP=$(command -v grep)
CUT=$(command -v cut)
WC=$(command -v wc)
TR=$(command -v tr)
if [ "" = "${CHECK}" ] || [ "0" = "${CHECK}" ]; then
if [ "" = "${CHECK_DNN_MB}" ]; then CHECK_DNN_MB=64; fi
if [ "" = "${CHECK_DNN_ITERS}" ]; then CHECK_DNN_ITERS=1000; fi
else # check
if [ "" = "${CHECK_DNN_MB}" ]; then CHECK_DNN_MB=64; fi
if [ "" = "${CHECK_DNN_ITERS}" ]; then CHECK_DNN_ITERS=1; fi
fi
if [ $# -ne 7 ]
then
echo "Usage: $(basename $0) mb iters numa (1-mcdram/0-DDR) TYPE ('A'-ALL/'F'-FP/'B'-BP/'U'-WU) FORMAT ('A'-ALL/'L'-LIBXSMM/'T'-Tensorflow/'M'-Mixed) padding; using default values; using default values: 64 1000 1 f32 A L 0"
MB=${CHECK_DNN_MB}
ITERS=${CHECK_DNN_ITERS}
NUMA=-1
BIN=f32
TYPE="A"
FORMAT="L"
PAD=0
else
MB=$1
ITERS=$2
NUMA=$3
BIN=$4
TYPE=$5
FORMAT=$6
PAD=$7
fi
if [ "" != "${GREP}" ] && [ "" != "${CUT}" ] && [ "" != "${SORT}" ] && [ "" != "${WC}" ] && [ -e /proc/cpuinfo ]; then
export NS=$(${GREP} "physical id" /proc/cpuinfo | ${SORT} -u | ${WC} -l | ${TR} -d " ")
export NC=$((NS*$(${GREP} -m1 "cpu cores" /proc/cpuinfo | ${TR} -d " " | ${CUT} -d: -f2)))
export NT=$(${GREP} "core id" /proc/cpuinfo | ${WC} -l | ${TR} -d " ")
elif [ "" != "${UNAME}" ] && [ "" != "${CUT}" ] && [ "Darwin" = "$(${UNAME})" ]; then
export NS=$(sysctl hw.packages | ${CUT} -d: -f2 | tr -d " ")
export NC=$(sysctl hw.physicalcpu | ${CUT} -d: -f2 | tr -d " ")
export NT=$(sysctl hw.logicalcpu | ${CUT} -d: -f2 | tr -d " ")
fi
if [ "" != "${NC}" ] && [ "" != "${NT}" ]; then
export HT=$((NT/(NC)))
else
export NS=1 NC=1 NT=1 HT=1
fi
if [ "" != "${GREP}" ] && [ "" != "${CUT}" ] && [ "" != "$(command -v numactl)" ]; then
export NN=$(numactl -H | ${GREP} available: | ${CUT} -d' ' -f2)
else
export NN=${NS}
fi
CPUFLAGS=$(if [ "" != "${GREP}" ] && [ "" != "${CUT}" ] && [ -e /proc/cpuinfo ]; then ${GREP} -m1 flags /proc/cpuinfo | ${CUT} -d: -f2-; fi)
if [ "" != "${GREP}" ] && [ "" != "$(echo "${CPUFLAGS}" | ${GREP} -o avx512er)" ]; then
if [ "0" != "$((0>NUMA))" ] && [ "0" != "$((NS<NN))" ]; then
NUMACTL="numactl --preferred=${NS} ${TOOL_COMMAND}"
elif [ "0" != "$((0<=NUMA && NUMA<NN))" ]; then
NUMACTL="numactl --preferred=${NUMA} ${TOOL_COMMAND}"
elif [ "1" != "${NS}" ]; then
#NUMACTL="numactl -i all ${TOOL_COMMAND}"
NUMACTL="${TOOL_COMMAND}"
fi
else
NUMACTL="${TOOL_COMMAND}"
fi
if [ "" = "${OMP_NUM_THREADS}" ] || [ "0" = "${OMP_NUM_THREADS}" ]; then
if [ "" = "${KMP_AFFINITY}" ]; then
export KMP_AFFINITY=compact,granularity=fine KMP_HW_SUBSET=1T
fi
export OMP_NUM_THREADS=$((NC))
fi
if [ "" = "${LIBXSMM_TARGET_HIDDEN}" ] || [ "0" = "${LIBXSMM_TARGET_HIDDEN}" ]; then
echo "OMP_NUM_THREADS=${OMP_NUM_THREADS} NUMACTL=\"${NUMACTL}\""
echo
fi
# ./layer_example_${BIN} iters inpWidth inpHeight nImg nIfm nOfm kw kh padw padh stride type
#
if [ "${BIN}" != "f32" ]; then
true
else
${NUMACTL} ./layer_example_${BIN} ${ITERS} 224 224 ${MB} 3 64 7 7 3 3 2 ${TYPE} ${FORMAT} ${PAD}
fi
${NUMACTL} ./layer_example_${BIN} ${ITERS} 56 56 ${MB} 64 256 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 56 56 ${MB} 64 64 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 56 56 ${MB} 64 64 3 3 1 1 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 56 56 ${MB} 256 64 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 28 28 ${MB} 256 512 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 28 28 ${MB} 256 128 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 28 28 ${MB} 128 128 3 3 1 1 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 28 28 ${MB} 128 512 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 28 28 ${MB} 512 128 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 14 14 ${MB} 512 1024 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 14 14 ${MB} 512 256 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 14 14 ${MB} 256 256 3 3 1 1 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 14 14 ${MB} 256 1024 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 14 14 ${MB} 1024 256 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 7 7 ${MB} 1024 2048 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 7 7 ${MB} 1024 512 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 7 7 ${MB} 512 512 3 3 1 1 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 7 7 ${MB} 512 2048 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
${NUMACTL} ./layer_example_${BIN} ${ITERS} 7 7 ${MB} 2048 512 1 1 0 0 1 ${TYPE} ${FORMAT} ${PAD}
|
# 動詞の原形をすべて抽出せよ.
require_relative '30'
res = NEKO_MORPHS_LIST.map do |morphs|
morphs
.select { |morph| morph[:pos] == '動詞' }
.map { |morph| morph[:base] }
end.flatten
p res.first(20)
File.open(File.expand_path('../output/32.txt', __dir__), 'w') { |f| f.puts res }
|
#!/bin/bash
eeyoreFile="./MiniC_to_Eeyore/eeyore"
optimizeFile="./optimization/optimize"
tiggerFile="./Eeyore_to_Tigger/tigger"
riscv32File="./Tigger_to_RISCV32/riscv32"
optimize_flag=0
minic_file=""
output_file=""
if [ ! -f "$eeyoreFile" ]; then
cd ./MiniC_to_Eeyore/
make
cd ..
fi
if [ ! -f "$optimizeFile" ]; then
cd ./optimization/
make
cd ..
fi
if [ ! -f "$tiggerFile" ]; then
cd ./Eeyore_to_Tigger/
make
cd ..
fi
if [ ! -f "$riscv32File" ]; then
cd ./Tigger_to_RISCV32/
make
cd ..
fi
while getopts "c:Oo:h" arg
do
case $arg in
h)
echo "Using ./minic.sh -c [file] -o [output] [-O](for optimize)"
exit
;;
c)
minic_file=$OPTARG
;;
O)
optimize_flag=1
;;
o)
output_file=$OPTARG
;;
?)
exit
;;
esac
done
if [ $optimize_flag == 0 ]; then
$eeyoreFile < $minic_file > ./tmp.e
$tiggerFile < ./tmp.e > ./tmp.t
$riscv32File < ./tmp.t > $output_file
rm ./tmp.e
rm ./tmp.t
exit
else
$eeyoreFile < $minic_file > ./tmp.e
$optimizeFile < ./tmp.e > ./tmp.oe
$tiggerFile < ./tmp.oe > ./tmp.t
$riscv32File < ./tmp.t > $output_file
rm ./tmp.e
rm ./tmp.oe
rm ./tmp.t
exit
fi
|
#!/bin/bash
#SBATCH -J Act_swish_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py swish 299 Adagrad 4 0.7465563430034715 0.01387787799416186 glorot_normal 0.3
|
<filename>Card.java
import java.util.Random;
/**
* @author <NAME>
* TODO:
* 1. Declare and initiliaze list of suits in arrays
* 2. Declare and initiliaze list of ranks in arrays
* 3. Declare and initiliaze list of card values in arrays
* 4. Shuffle deck of cards
* 5. Remove first card in deck
*/
public class Card {
String[] SUITS = { "Clubs", "Diamonds", "Hearts", "Spades" }; // initialising the suits of the deck in a String array named 'SUITS'
String[] RANKS = { "Ace", "2", "3", "4", "5", "6", "7", "8", "9", "10", "Jack", "Queen", "King" }; // initialising the ranks of the deck in a String array named 'RANKS'
int[] CARD_VALUE = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 0 };
public String[] deckLabels = new String[SUITS.length * RANKS.length];
public int[] deckValues = new int[SUITS.length * RANKS.length];
// constructor
public Card() {
initilizeDeck();
}
private void initilizeDeck() {
for (int i = 0; i < RANKS.length; i++) {
for (int j = 0; j < SUITS.length; j++) {
deckLabels[SUITS.length * i + j] = RANKS[i] + " of " + SUITS[j];
deckValues[SUITS.length * i + j] = CARD_VALUE[i];
}
}
}
public void shuffleDeck() {
Random random = new Random();
for (int i = 0; i < deckLabels.length; i++) {
int r = i + (int) (random.nextDouble() * (deckLabels.length - i));
String temporaryDeck = deckLabels[r];
int temporaryDeckValue = deckValues[r];
// swapping and shuffling
deckLabels[r] = deckLabels[i];
deckLabels[i] = temporaryDeck;
deckValues[r] = deckValues[i];
deckValues[i] = temporaryDeckValue;
}
}
// Remove first element from the deck arrays.
public void removeFirstCardInDeck() {
for (int j = 0; j < deckLabels.length - 1; j++) {
deckLabels[j] = deckLabels[j + 1];
deckValues[j] = deckValues[j + 1];
}
}
}
|
<filename>resources/js/store/getters.js
let getters = {
newEvent: state => {
return state.newEvent;
},
datesArr: state => {
return state.datesArr;
},
eventList: state => {
return state.eventList;
},
calendarDate: state => {
return state.calendarDate;
}
}
export default getters;
|
#!/usr/bin/env bash
help(){
echo "SciELO Usage COUNTER - Batch script Parse Log"
echo "Please, inform:"
echo " 1. The file MMDB (parameter -m)"
echo " 2. The file robots (parameter -r)"
echo " 3. The output directory (parameter -o)"
echo " 4. The list of files paths (parameter -f)"
echo ""
echo "For example:"
echo ""
echo " scripts/batch_parse.sh -m data/map.mmdb -r data/counter-robots.txt -o data -f logs_paths.txt"
echo ""
}
run(){
FILE_MMDB=$1;
FILE_ROBOTS=$2;
OUTPUT_DIR=$3;
INPUT=$4;
for i in `cat "$INPUT"`; do
LOGFILE=$i;
echo "[Processando] $LOGFILE";
parse-log -m "$FILE_MMDB" -r "$FILE_ROBOTS" -o "$OUTPUT_DIR" file -f $LOGFILE;
done
}
while getopts f:m:r:o: opts; do
case ${opts} in
# Diretório de arquivos de log ou arquivo indicando caminhos dos arquivos a serem processados
f) INPUT=${OPTARG} ;;
# Arquivo de mapas em formato mmdb
m) FILE_MMDB=${OPTARG} ;;
# Arquivo de bots em formato txt
r) FILE_ROBOTS=${OPTARG} ;;
# Diretório de resultados
o) OUTPUT_DIR=${OPTARG} ;;
esac
done
if [[ -z "$INPUT" || -z "$FILE_ROBOTS" || -z "$FILE_MMDB" || -z "$OUTPUT_DIR" ]]
then
help;
exit;
else
run $FILE_MMDB $FILE_ROBOTS $OUTPUT_DIR $INPUT;
fi
|
# Add .js and .css files to the page
<head>
<meta charset="utf-8">
<title>My Webpage</title>
<!-- Add .js and .css files to the page -->
<link rel="stylesheet" href="styles.css">
<script src="script1.js"></script>
<script src="script2.js"></script>
</head>
# Compress images
<body>
<!-- Compress images -->
<img src="image1.jpg" alt="image1" width="400" height="400" />
<img src="image2.jpg" alt="image2" width="400" height="400" />
<img src="image3.jpg" alt="image3" width="400" height="400" />
</body>
# Use a video streaming service
<body>
<!-- Use a video streaming service -->
<video src="video.mp4" width="400" height="400" controls></video>
</body>
# Add caching
<head>
<!-- Add caching -->
<meta http-equiv="Cache-Control" content=”max-age=3600″>
</head>
|
#!/usr/bin/env bash
CUDA_VISIBLE_DEVICES=3 taskset -c 0-2 python main.py --policy PytorchSAC --env dm.walker.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.001 --fourier_dim 1024 --seed 10 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=4 taskset -c 3-5 python main.py --policy PytorchSAC --env dm.walker.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.001 --fourier_dim 1024 --seed 20 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=7 taskset -c 6-8 python main.py --policy PytorchSAC --env dm.walker.run --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.001 --fourier_dim 1024 --seed 30 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=3 taskset -c 9-11 python main.py --policy PytorchSAC --env dm.hopper.hop --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.001 --fourier_dim 1024 --seed 10 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=4 taskset -c 12-14 python main.py --policy PytorchSAC --env dm.hopper.hop --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.001 --fourier_dim 1024 --seed 20 --max_timesteps 1000000 &
CUDA_VISIBLE_DEVICES=7 taskset -c 15-17 python main.py --policy PytorchSAC --env dm.hopper.hop --start_timesteps 5000 --hidden_dim 1024 --batch_size 1024 --n_hidden 2 --lr 1e-4 --network_class FourierMLP --concatenate_fourier --train_B --sigma 0.001 --fourier_dim 1024 --seed 30 --max_timesteps 1000000 &
|
#!/bin/bash
set -xe
SCRIPTDIR=$(readlink -f $(dirname $0))
# TODO : This should be removed once PATH contains sbin
# https://review.openstack.org/#/c/91655/
export PATH=$PATH:/usr/local/sbin:/usr/sbin
sudo useradd -U -s /bin/bash -d /opt/stack/new -m stack || true
sudo useradd -U -s /bin/bash -m tempest || true
export INSTALLDIR=$BASE/new
bash -xe $SCRIPTDIR/prepare_devstack.sh
export DEVSTACK_GATE_VIRT_DRIVER=docker
export REQUIREMENTS_MODE=soft
export KEEP_LOCALRC=1
export ENABLED_SERVICES+=-tr-api,-tr-cond,-tr-mgr,-trove,-ceilometer-acentral,-ceilometer-acompute,-ceilometer-alarm-evaluator,-ceilometer-alarm-notifier,-ceilometer-anotification,-ceilometer-api,-ceilometer-collector,-s-account,-s-container,-s-object,-s-proxy,-sahara
export DEVSTACK_GATE_TEMPEST_REGEX='^(?!.*?(volume|resize|suspend|rescue|cinder|migrate)).*'
export DEVSTACK_GATE_TEMPEST=1
export DEVSTACK_GATE_TEMPEST_FULL=0
$INSTALLDIR/devstack-gate/devstack-vm-gate.sh
|
<filename>parquet-hive/parquet-hive-storage-handler/src/test/java/parquet/hive/TestMapredParquetOuputFormat.java
/**
* Copyright 2013 Criteo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package parquet.hive;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.util.Progressable;
import org.junit.Before;
import org.junit.Test;
import parquet.hadoop.ParquetFileReader;
import parquet.hadoop.ParquetInputSplit;
import parquet.hadoop.metadata.BlockMetaData;
import parquet.hadoop.metadata.ParquetMetadata;
import parquet.hive.read.DataWritableReadSupport;
import parquet.schema.MessageType;
/**
*
* TestHiveOuputFormat
*
*
* @author <NAME>our <<EMAIL>>
*
*/
public class TestMapredParquetOuputFormat {
private Map<Integer, ArrayWritable> mapData;
private Configuration conf;
private JobConf job;
private Path dir;
private File testFile;
private Reporter reporter;
@Before
public void setUp() throws Exception {
conf = new Configuration();
job = new JobConf(conf);
dir = new Path("target/tests/from_java/deprecatedoutputformat/");
testFile = new File(dir.toString(), "customer");
if (testFile.exists()) {
if (!testFile.delete()) {
throw new RuntimeException("can not remove existing file " + testFile.getAbsolutePath());
}
}
reporter = Reporter.NULL;
Map<String, String> map = new HashMap<String, String>();
map.put("testkey", "testvalue");
map.put("foo", "bar");
List<Integer> list = new ArrayList<Integer>();
list.add(0);
list.add(12);
list.add(17);
mapData = new HashMap<Integer, ArrayWritable>();
mapData.clear();
for (int i = 0; i < 1000; i++) {
mapData.put(i, UtilitiesTestMethods.createArrayWritable(i, i % 11 == 0 ? null : "name_" + i, i % 12 == 0 ? null : "add_" + i,
i % 13 == 0 ? null : i, i % 14 == 0 ? null : "phone_" + i, i % 15 == 0 ? null : 1.2d * i, i % 16 == 0 ? null : "mktsegment_" + i,
i % 17 == 0 ? null : "comment_" + i, i % 18 == 0 ? null : map, i % 19 == 0 ? null : list));
}
}
@Test
public void testParquetHiveOutputFormat() throws Exception {
final HiveOutputFormat<Void, ArrayWritable> format = new MapredParquetOutputFormat();
final Properties tableProperties = new Properties();
// Set the configuration parameters
tableProperties.setProperty("columns",
"c_custkey,c_name,c_address,c_nationkey,c_phone,c_acctbal,c_mktsegment,c_comment,c_map,c_list");
tableProperties.setProperty("columns.types",
"int:string:string:int:string:double:string:string:map<string,string>:array<int>");
tableProperties.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
System.out.println("First part, write the data");
job.set("mapred.task.id", "attempt_201304241759_32973_m_000002_0"); // FAKE ID
final FakeStatus reporter = new FakeStatus();
final org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter recordWriter = format.getHiveRecordWriter(
job,
new Path(testFile.getAbsolutePath()),
NullWritable.class,
false,
tableProperties,
reporter);
// create key/value
for (final Map.Entry<Integer, ArrayWritable> entry : mapData.entrySet()) {
recordWriter.write(entry.getValue());
}
recordWriter.close(false);
assertTrue("File not created", testFile.exists());
System.out.println("Second part, check if everything is ok");
checkWrite();
}
private void checkWrite() throws IOException, InterruptedException {
final ParquetMetadata readFooter = ParquetFileReader.readFooter(conf, new Path(testFile.getAbsolutePath()));
final MessageType schema = readFooter.getFileMetaData().getSchema();
long size = 0;
final List<BlockMetaData> blocks = readFooter.getBlocks();
for (final BlockMetaData block : blocks) {
size += block.getTotalByteSize();
}
final FileInputFormat<Void, ArrayWritable> format = new MapredParquetInputFormat();
final String[] locations = new String[] {"localhost"};
final String schemaToString = schema.toString();
final String columnsStr = "message customer {\n"
+ " optional int32 c_custkey;\n"
+ " optional binary c_name;\n"
+ " optional binary c_address;\n"
+ " optional int32 c_nationkey;\n"
+ " optional binary c_phone;\n"
+ " optional double c_acctbal;\n"
+ " optional binary c_mktsegment;\n"
+ " optional binary c_comment;\n"
+ " optional group c_map (MAP_KEY_VALUE) {\n"
+ " repeated group map {\n"
+ " required binary key;\n"
+ " optional binary value;\n"
+ " }\n"
+ " }\n"
+ " optional group c_list (LIST) {\n"
+ " repeated group bag {\n"
+ " optional int32 array_element;\n"
+ " }\n"
+ " }\n"
+ "}";
final Map<String, String> readSupportMetaData = new HashMap<String, String>();
readSupportMetaData.put(DataWritableReadSupport.HIVE_SCHEMA_KEY, columnsStr);
final ParquetInputSplit realSplit = new ParquetInputSplit(new Path(testFile.getAbsolutePath()), 0, size, locations, blocks,
schemaToString, schemaToString, readFooter.getFileMetaData().getKeyValueMetaData(), readSupportMetaData);
final MapredParquetInputFormat.InputSplitWrapper splitWrapper = new MapredParquetInputFormat.InputSplitWrapper(realSplit);
// construct the record reader
final RecordReader<Void, ArrayWritable> reader = format.getRecordReader(splitWrapper, job, reporter);
// create key/value
final Void key = reader.createKey();
final ArrayWritable value = reader.createValue();
int count = 0;
while (reader.next(key, value)) {
assertTrue(count < mapData.size());
assertTrue(key == null);
final Writable[] arrValue = value.get();
final Writable[] writableArr = arrValue;
final ArrayWritable expected = mapData.get(((IntWritable) writableArr[0]).get());
final Writable[] arrExpected = expected.get();
assertEquals(arrValue.length, 10);
final boolean deepEquals = UtilitiesTestMethods.smartCheckArray(arrValue, arrExpected, new Integer[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
assertTrue(deepEquals);
count++;
}
reader.close();
assertEquals("Number of lines found and data written don't match", count, mapData.size());
}
// FAKE Class in order to compile
private class FakeStatus extends org.apache.hadoop.mapreduce.StatusReporter implements Progressable {
@Override
public Counter getCounter(final Enum<?> e) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Counter getCounter(final String string, final String string1) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void progress() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void setStatus(final String string) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public float getProgress() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
}
}
|
/******************************************************************************
* Copyright 2011 Kitware Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "DeleteResourceUI.h"
/** Constructor */
DeleteResourceUI::DeleteResourceUI(QWidget* parent, bool server)
: QDialog(parent), m_Server(server)
{
this->setupUi(this);
}
DeleteResourceUI::~DeleteResourceUI()
{
}
void DeleteResourceUI::Init()
{
if( this->m_Server )
{
m_DeleteLabel->setText("<b>Are you sure you want to delete the selected resource on the server?</b>");
m_DeleteFileCheckbox->hide();
}
else
{
m_DeleteLabel->setText("<b>Are you sure you want to delete the selected resource on the client?</b>");
m_DeleteFileCheckbox->show();
}
}
int DeleteResourceUI::exec()
{
this->Init();
return QDialog::exec();
}
void DeleteResourceUI::accept()
{
emit DeleteResource(m_DeleteFileCheckbox->isChecked() );
QDialog::accept();
}
|
<filename>angular/src/app/service/blog.service.ts<gh_stars>0
import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { CommonResponse } from '../util/response/commonResponse';
import { HttpLocation } from '../util/httpLocation';
@Injectable({
providedIn: 'root'
})
export class BlogService {
response: CommonResponse;
location;
constructor(
private http: HttpClient,
) {
this.location = new HttpLocation();
}
getAllBlogs() {
return this.http.get('http://' + this.location.host + '/api/blog');
}
getOneBlog(blogId) {
return this.http.get('http://' + this.location.host + '/api/blog/' + blogId);
}
getComments(blogId) {
return this.http.get('http://' + this.location.host + '/api/comment/blog/' + blogId);
}
getBlogsByPage(page) {
return this.http.get('http://' + this.location.host + '/api/blog/', {
params: {
page,
}
});
}
}
|
<reponame>twipixel/pixi
'use strict';
describe('PIXI.Point', function ()
{
it('should create a new point', function ()
{
const pt = new PIXI.Point();
expect(pt.x).to.equal(0);
expect(pt.y).to.equal(0);
});
it('should clone a new point', function ()
{
const p1 = new PIXI.Point(10, 20);
expect(p1.x).to.equal(10);
expect(p1.y).to.equal(20);
const p2 = p1.clone();
expect(p2.x).to.equal(10);
expect(p2.y).to.equal(20);
expect(p1).to.not.equal(p2);
expect(p1.equals(p2)).to.be.true;
});
it('should copy from one point to another', function ()
{
const p1 = new PIXI.Point(10, 20);
const p2 = new PIXI.Point(2, 5);
p1.copy(p2);
expect(p1.x).to.equal(2);
expect(p1.y).to.equal(5);
});
it('should set a new value', function ()
{
const p1 = new PIXI.Point(10, 20);
p1.set();
expect(p1.x).to.equal(0);
expect(p1.y).to.equal(0);
p1.set(1);
expect(p1.x).to.equal(1);
expect(p1.y).to.equal(1);
p1.set(1, 0);
expect(p1.x).to.equal(1);
expect(p1.y).to.equal(0);
});
});
|
<reponame>MrBattary/ncstore-back<filename>src/main/java/com/netcracker/ncstore/dto/OrderGetDTO.java<gh_stars>0
package com.netcracker.ncstore.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.UUID;
@AllArgsConstructor
@Getter
public class OrderGetDTO {
private final UUID orderId;
private final String email;
}
|
#!/bin/bash
set -e
curl http://localhost:8060/$1/photo
|
#!/bin/bash
CVM_HOME=$(pwd)
log(){
printf "$1\n"
}
main(){
log "Dependency grab started on $(date +"%x %I:%M %p").";
if [[ ! -f "$CVM_HOME/setup.exe" ]];then log "Please copy the Cygwin setup file to $CVM_HOME/setup.exe before running this script.";else
$CVM_HOME/setup.exe -X -q -W -P libvncserver-devel,libcairo-devel,libboost-devel,libsqlite3-devel,libsasl2-devel,libturbojpeg-devel,libjpeg-devel,wget,git,make,unzip
if [ "$(uname -m)" == "x86_64" ]; then
$CVM_HOME/setup.exe -X -q -W -s http://ctm.crouchingtigerhiddenfruitbat.org/pub/cygwin/circa/64bit/2016/08/30/104235 -P gcc-core,gcc-g++
else
$CVM_HOME/setup.exe -X -q -W -s http://ctm.crouchingtigerhiddenfruitbat.org/pub/cygwin/circa/2016/08/30/104223 -P gcc-core,gcc-g++
fi
log "Dependency grab finished.";
fi
log "SIDE NOTE: YOU ARE NOT DONE, please install a pthread equivalent that provides libpthread.a"; # remove this if there is one
};main;
|
/**
* ngTable: Table + Angular JS
*
* @author <NAME> <<EMAIL>>
* @url https://github.com/esvit/ng-table/
* @license New BSD License <http://creativecommons.org/licenses/BSD/>
*/
import angular from 'angular';
ngTableFilterConfigProvider.$inject = [];
function ngTableFilterConfigProvider(){
var config;
var defaultConfig = {
defaultBaseUrl: 'ng-table/filters/',
defaultExt: '.html',
aliasUrls: {}
};
this.$get = ngTableFilterConfig;
this.resetConfigs = resetConfigs;
this.setConfig = setConfig;
init();
/////////
function init(){
resetConfigs();
}
function resetConfigs(){
config = defaultConfig;
}
function setConfig(customConfig){
var mergeConfig = angular.extend({}, config, customConfig);
mergeConfig.aliasUrls = angular.extend({}, config.aliasUrls, customConfig.aliasUrls);
config = mergeConfig;
}
/////////
ngTableFilterConfig.$inject = [];
function ngTableFilterConfig(){
var publicConfig;
var service = {
config: publicConfig,
getTemplateUrl: getTemplateUrl,
getUrlForAlias: getUrlForAlias
};
Object.defineProperty(service, "config", {
get: function(){
return publicConfig = publicConfig || angular.copy(config);
},
enumerable: true
});
return service;
/////////
function getTemplateUrl(filterDef, filterKey){
if (angular.isObject(filterDef)){
filterDef = filterDef.id;
}
if (filterDef.indexOf('/') !== -1){
return filterDef;
}
return service.getUrlForAlias(filterDef, filterKey);
}
function getUrlForAlias(aliasName/*, filterKey*/){
return config.aliasUrls[aliasName] || config.defaultBaseUrl + aliasName + config.defaultExt;
}
}
}
export { ngTableFilterConfigProvider };
|
/*
* @Description:
* @Author: jumbo
* @Date: 2021-11-03 10:28:15
* @LastEditors: jumbo
* @LastEditTime: 2021-11-03 12:09:25
*/
import { requireEnv } from '@app/core';
import { Injectable } from '@nestjs/common';
const SERVE_ENV = requireEnv('SERVE_ENV');
@Injectable()
export class LoggerService {
log(index: string, data: any, prodOnly?: any, devOnly?: any) {
if (SERVE_ENV == 'development') {
console.log(index, data);
}
return;
}
error(index: string, err: Error, data: any, prodOnly?: any, devOnly?: any) {
this.log(
index + '-error',
{
...data,
error: {
data: err && JSON.stringify((err as any).data),
name: err.name,
message: err.message,
stack: err.stack,
},
},
prodOnly,
devOnly,
);
}
fail(index: string, err: Error, data: any, prodOnly?: any, devOnly?: any) {
this.log(
index + '-fail',
{
...data,
error: {
data: err && JSON.stringify((err as any).data),
name: err.name,
message: err.message,
stack: undefined,
},
},
prodOnly,
devOnly,
);
}
}
|
#!/bin/bash
pred_src=data_pred/pred_src
pred_res=data_pred/pred_res
cat labeling/unlabeled/data_cln.tsv | sort -R | head -1000 > $pred_src
python 5_Pred.py $pred_src | sort -t$'\t' -k1,1gr > $pred_res
cat $pred_res | awk -F'\t' '{if($1>0.4 && $1<0.6) print $2}' > labeling/unlabeled/iter2_boundary
|
#!/bin/sh -x
. /etc/sysconfig/heat-params
echo "configuring kubernetes (minion)"
_prefix=${CONTAINER_INFRA_PREFIX:-docker.io/openstackmagnum/}
_addtl_mounts=''
if [ "$NETWORK_DRIVER" = "calico" ]; then
mkdir -p /opt/cni
_addtl_mounts=',{"type":"bind","source":"/opt/cni","destination":"/opt/cni","options":["bind","rw","slave","mode=777"]}'
if [ "`systemctl status NetworkManager.service | grep -o "Active: active"`" = "Active: active" ]; then
CALICO_NM=/etc/NetworkManager/conf.d/calico.conf
[ -f ${CALICO_NM} ] || {
echo "Writing File: $CALICO_NM"
mkdir -p $(dirname ${CALICO_NM})
cat << EOF > ${CALICO_NM}
[keyfile]
unmanaged-devices=interface-name:cali*;interface-name:tunl*
EOF
}
systemctl restart NetworkManager
fi
fi
if [ "$(echo "${INSTALL_DISABLED}" | tr '[:upper:]' '[:lower:]')" = "false" ]; then
atomic install --storage ostree --system --system-package=no --set=ADDTL_MOUNTS=${_addtl_mounts} --name=kubelet ${_prefix}kubernetes-kubelet:${KUBE_TAG}
atomic install --storage ostree --system --system-package=no --name=kube-proxy ${_prefix}kubernetes-proxy:${KUBE_TAG}
else
atomic uninstall kube-apiserver kube-controller-manager kube-scheduler etcd
rm -rf /etc/etcd
fi
CERT_DIR=/etc/kubernetes/certs
PROTOCOL=https
ETCD_SERVER_IP=${ETCD_SERVER_IP:-$KUBE_MASTER_IP}
KUBE_PROTOCOL="https"
KUBELET_KUBECONFIG=/etc/kubernetes/kubelet-config.yaml
PROXY_KUBECONFIG=/etc/kubernetes/proxy-config.yaml
if [ "$TLS_DISABLED" = "True" ]; then
PROTOCOL=http
KUBE_PROTOCOL="http"
fi
KUBE_MASTER_URI="$KUBE_PROTOCOL://$KUBE_MASTER_IP:$KUBE_API_PORT"
if [ -z "${KUBE_NODE_IP}" ]; then
KUBE_NODE_IP=$(curl -s http://169.254.169.254/latest/meta-data/local-ipv4)
fi
HOSTNAME_OVERRIDE=$(hostname --short | sed 's/\.novalocal//')
cat << EOF >> ${KUBELET_KUBECONFIG}
apiVersion: v1
clusters:
- cluster:
certificate-authority: ${CERT_DIR}/ca.crt
server: ${KUBE_MASTER_URI}
name: kubernetes
contexts:
- context:
cluster: kubernetes
user: system:node:${HOSTNAME_OVERRIDE}
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: system:node:${HOSTNAME_OVERRIDE}
user:
as-user-extra: {}
client-certificate: ${CERT_DIR}/kubelet.crt
client-key: ${CERT_DIR}/kubelet.key
EOF
cat << EOF >> ${PROXY_KUBECONFIG}
apiVersion: v1
clusters:
- cluster:
certificate-authority: ${CERT_DIR}/ca.crt
server: ${KUBE_MASTER_URI}
name: kubernetes
contexts:
- context:
cluster: kubernetes
user: kube-proxy
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: kube-proxy
user:
as-user-extra: {}
client-certificate: ${CERT_DIR}/proxy.crt
client-key: ${CERT_DIR}/proxy.key
EOF
if [ "$TLS_DISABLED" = "True" ]; then
sed -i 's/^.*user:$//' ${KUBELET_KUBECONFIG}
sed -i 's/^.*client-certificate.*$//' ${KUBELET_KUBECONFIG}
sed -i 's/^.*client-key.*$//' ${KUBELET_KUBECONFIG}
sed -i 's/^.*certificate-authority.*$//' ${KUBELET_KUBECONFIG}
fi
chmod 0644 ${KUBELET_KUBECONFIG}
chmod 0644 ${PROXY_KUBECONFIG}
sed -i '
/^KUBE_ALLOW_PRIV=/ s/=.*/="--allow-privileged='"$KUBE_ALLOW_PRIV"'"/
/^KUBE_ETCD_SERVERS=/ s|=.*|="--etcd-servers=http://'"$ETCD_SERVER_IP"':2379"|
/^KUBE_MASTER=/ s|=.*|="--master='"$KUBE_MASTER_URI"'"|
' /etc/kubernetes/config
# NOTE: Kubernetes plugin for Openstack requires that the node name registered
# in the kube-apiserver be the same as the Nova name of the instance, so that
# the plugin can use the name to query for attributes such as IP, etc.
# The hostname of the node is set to be the Nova name of the instance, and
# the option --hostname-override for kubelet uses the hostname to register the node.
# Using any other name will break the load balancer and cinder volume features.
mkdir -p /etc/kubernetes/manifests
KUBELET_ARGS="--pod-manifest-path=/etc/kubernetes/manifests --cadvisor-port=0 --kubeconfig ${KUBELET_KUBECONFIG} --hostname-override=${HOSTNAME_OVERRIDE}"
KUBELET_ARGS="${KUBELET_ARGS} --address=${KUBE_NODE_IP} --port=10250 --read-only-port=0 --anonymous-auth=false --authorization-mode=Webhook --authentication-token-webhook=true"
KUBELET_ARGS="${KUBELET_ARGS} --cluster_dns=${DNS_SERVICE_IP} --cluster_domain=${DNS_CLUSTER_DOMAIN}"
KUBELET_ARGS="${KUBELET_ARGS} --volume-plugin-dir=/var/lib/kubelet/volumeplugins"
KUBELET_ARGS="${KUBELET_ARGS} ${KUBELET_OPTIONS}"
if [ -n "$TRUST_ID" ] && [ "$(echo "${CLOUD_PROVIDER_ENABLED}" | tr '[:upper:]' '[:lower:]')" = "true" ]; then
KUBELET_ARGS="$KUBELET_ARGS --cloud-provider=openstack --cloud-config=/etc/kubernetes/kube_openstack_config"
fi
# Workaround for Cinder support (fixed in k8s >= 1.6)
if [ ! -f /usr/bin/udevadm ]; then
ln -s /sbin/udevadm /usr/bin/udevadm
fi
# For using default log-driver, other options should be ignored
sed -i 's/\-\-log\-driver\=journald//g' /etc/sysconfig/docker
KUBELET_ARGS="${KUBELET_ARGS} --pod-infra-container-image=${CONTAINER_INFRA_PREFIX:-gcr.io/google_containers/}pause:3.0"
if [ -n "${INSECURE_REGISTRY_URL}" ]; then
echo "INSECURE_REGISTRY='--insecure-registry ${INSECURE_REGISTRY_URL}'" >> /etc/sysconfig/docker
fi
KUBELET_ARGS="${KUBELET_ARGS} --client-ca-file=${CERT_DIR}/ca.crt --tls-cert-file=${CERT_DIR}/kubelet.crt --tls-private-key-file=${CERT_DIR}/kubelet.key"
# specified cgroup driver
KUBELET_ARGS="${KUBELET_ARGS} --cgroup-driver=${CGROUP_DRIVER}"
systemctl disable docker
if cat /usr/lib/systemd/system/docker.service | grep 'native.cgroupdriver'; then
cp /usr/lib/systemd/system/docker.service /etc/systemd/system/
sed -i "s/\(native.cgroupdriver=\)\w\+/\1$CGROUP_DRIVER/" \
/etc/systemd/system/docker.service
else
cat > /etc/systemd/system/docker.service.d/cgroupdriver.conf << EOF
ExecStart=---exec-opt native.cgroupdriver=$CGROUP_DRIVER
EOF
fi
systemctl daemon-reload
systemctl enable docker
cat > /etc/kubernetes/get_require_kubeconfig.sh <<EOF
#!/bin/bash
KUBE_VERSION=\$(kubelet --version | awk '{print \$2}')
min_version=v1.8.0
if [[ "\${min_version}" != \$(echo -e "\${min_version}\n\${KUBE_VERSION}" | sort -s -t. -k 1,1 -k 2,2n -k 3,3n | head -n1) && "\${KUBE_VERSION}" != "devel" ]]; then
echo "--require-kubeconfig"
fi
EOF
chmod +x /etc/kubernetes/get_require_kubeconfig.sh
if [ "$NETWORK_DRIVER" = "calico" ]; then
KUBELET_ARGS="${KUBELET_ARGS} --network-plugin=cni --cni-conf-dir=/etc/cni/net.d --cni-bin-dir=/opt/cni/bin"
fi
sed -i '
/^KUBELET_ADDRESS=/ s/=.*/="--address=0.0.0.0"/
/^KUBELET_HOSTNAME=/ s/=.*/=""/
s/^KUBELET_API_SERVER=.*$//
/^KUBELET_ARGS=/ s|=.*|="'"\$(/etc/kubernetes/get_require_kubeconfig.sh) ${KUBELET_ARGS}"'"|
' /etc/kubernetes/kubelet
cat > /etc/kubernetes/proxy << EOF
KUBE_PROXY_ARGS="--kubeconfig=${PROXY_KUBECONFIG} --cluster-cidr=${PODS_NETWORK_CIDR}"
EOF
if [ "$NETWORK_DRIVER" = "flannel" ]; then
if [ "$(echo "${INSTALL_DISABLED}" | tr '[:upper:]' '[:lower:]')" = "false" ]; then
atomic install --storage ostree --system --system-package=no \
--name=flanneld ${_prefix}flannel:${FLANNEL_TAG}
fi
if [ "$TLS_DISABLED" = "True" ]; then
FLANNEL_OPTIONS=""
ETCD_CURL_OPTIONS=""
else
FLANNEL_CERT_DIR=/etc/flanneld/certs
FLANNEL_OPTIONS="-etcd-cafile $FLANNEL_CERT_DIR/ca.crt"
FLANNEL_OPTIONS="$FLANNEL_OPTIONS -etcd-certfile $FLANNEL_CERT_DIR/proxy.crt"
FLANNEL_OPTIONS="$FLANNEL_OPTIONS -etcd-keyfile $FLANNEL_CERT_DIR/proxy.key"
ETCD_CURL_OPTIONS="--cacert $FLANNEL_CERT_DIR/ca.crt --cert $FLANNEL_CERT_DIR/proxy.crt --key $FLANNEL_CERT_DIR/proxy.key"
fi
FLANNELD_CONFIG=/etc/sysconfig/flanneld
cat >> $FLANNELD_CONFIG <<EOF
FLANNEL_ETCD_ENDPOINTS="$PROTOCOL://${ETCD_SERVER_IP}:2379"
FLANNEL_ETCD_PREFIX="/atomic.io/network"
FLANNEL_OPTIONS="$FLANNEL_OPTIONS"
EOF
# Make sure etcd has a flannel configuration
. $FLANNELD_CONFIG
until curl -sf $ETCD_CURL_OPTIONS \
"$FLANNEL_ETCD_ENDPOINTS/v2/keys${FLANNEL_ETCD_PREFIX}/config?quorum=false&recursive=false&sorted=false"
do
echo "Waiting for flannel configuration in etcd..."
sleep 5
done
fi
cat >> /etc/environment <<EOF
KUBERNETES_MASTER=$KUBE_MASTER_URI
EOF
hostname `hostname | sed 's/.novalocal//'`
systemctl restart --no-block kube-proxy
|
#!/bin/bash
cd /metacat
export METACAT_SERVER_CFG=/config/config.yaml
export PYTHONPATH=/metacat/product/server:/metacat/product/lib:/metacat/wsdbtools
httpd
while true; do
sleep 1
done
|
<reponame>tizenorg/platform.core.pim.contacts-service
/*
* Contacts Service
*
* Copyright (c) 2010 - 2012 Samsung Electronics Co., Ltd. All rights reserved.
*
* Contact: <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <fcntl.h>
#include <unistd.h>
#include "cts-internal.h"
#include "cts-sqlite.h"
#include "cts-schema.h"
static const char *CTS_RESTRICTION_CHECK_FILE="/opt/data/contacts-svc/.CONTACTS_SVC_RESTRICTION_CHECK";
static int cts_restriction_permit;
int cts_restriction_init(void)
{
if (!cts_restriction_permit) {
int fd = open(CTS_RESTRICTION_CHECK_FILE, O_RDONLY);
if (0 <= fd) {
close(fd);
cts_restriction_permit = TRUE;
} else {
ERR("Restriction Mode");
}
}
if (!cts_restriction_permit) {
int ret;
const char *query;
query = "CREATE TEMP VIEW "CTS_TABLE_RESTRICTED_DATA_VIEW" AS SELECT * FROM "CTS_TABLE_DATA" WHERE is_restricted != 1";
ret = cts_query_exec(query);
retvm_if(CTS_SUCCESS != ret, ret, "cts_query_exec() Failed(%d)", ret);
}
return CTS_SUCCESS;
}
void cts_restriction_final(void)
{
cts_restriction_permit = FALSE;
}
int cts_restriction_get_permit(void)
{
return cts_restriction_permit;
}
/**
* This function make restricted contact.
* If process does not have permission for restriction, this function will be failed.
*
* @param[in] contact The contacts service struct
* @return #CTS_SUCCESS on success, Negative value(#cts_error) on error
*/
API int contacts_svc_struct_set_restriction(CTSstruct *contact)
{
contact_t *record = (contact_t *)contact;
retv_if(NULL == contact, CTS_ERR_ARG_NULL);
retv_if(FALSE == cts_restriction_permit, CTS_ERR_ENV_INVALID);
record->is_restricted = TRUE;
return CTS_SUCCESS;
}
|
<filename>src/main/java/malte0811/controlengineering/blocks/placement/HorizontalStructurePlacement.java
package malte0811.controlengineering.blocks.placement;
import blusunrize.immersiveengineering.api.utils.DirectionUtils;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.mojang.datafixers.util.Pair;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.world.item.context.BlockPlaceContext;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Rotation;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.block.state.properties.Property;
import net.minecraft.world.level.levelgen.structure.templatesystem.StructurePlaceSettings;
import net.minecraft.world.level.levelgen.structure.templatesystem.StructureTemplate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.function.Function;
public class HorizontalStructurePlacement<T extends Comparable<T>> implements PlacementBehavior<Direction> {
private final Property<Direction> facingProperty;
private final Property<T> offsetProperty;
private final BiMap<T, BlockPos> getBaseOffset;
public HorizontalStructurePlacement(
Property<Direction> facingProperty, Property<T> offsetProperty, Function<T, BlockPos> getBaseOffset
) {
this.facingProperty = facingProperty;
this.offsetProperty = offsetProperty;
this.getBaseOffset = HashBiMap.create();
for (T t : offsetProperty.getPossibleValues()) {
this.getBaseOffset.put(t, getBaseOffset.apply(t));
}
}
public static HorizontalStructurePlacement<Integer> column(
Property<Direction> facing, Property<Integer> columnHeight
) {
return new HorizontalStructurePlacement<>(facing, columnHeight, BlockPos.ZERO::above);
}
public static HorizontalStructurePlacement<Boolean> column2(Property<Direction> facing, Property<Boolean> upper) {
return new HorizontalStructurePlacement<>(facing, upper, b -> (b ? BlockPos.ZERO.above() : BlockPos.ZERO));
}
@Override
public Direction getPlacementData(BlockPlaceContext ctx) {
return ctx.getHorizontalDirection();
}
@Override
public Pair<Direction, BlockPos> getPlacementDataAndOffset(BlockState state, BlockEntity be) {
T offset = state.getValue(this.offsetProperty);
Direction facing = state.getValue(this.facingProperty);
return Pair.of(facing, getPhysicalOffset(facing, offset));
}
@Override
public Collection<BlockPos> getPlacementOffsets(Direction data) {
List<BlockPos> list = new ArrayList<>();
for (BlockPos p : this.getBaseOffset.values()) {
list.add(getPhysicalOffset(data, p));
}
return list;
}
@Override
public BlockState getStateForOffset(Block owner, BlockPos physicalOffset, Direction data) {
return owner.defaultBlockState()
.setValue(facingProperty, data)
.setValue(offsetProperty, getBaseOffset.inverse().get(getLogicalOffset(data, physicalOffset)));
}
@Override
public boolean isValidAtOffset(BlockPos physicalOffset, BlockState state, BlockEntity be, Direction data) {
if (!state.hasProperty(facingProperty)
|| !state.hasProperty(offsetProperty)
|| state.getValue(facingProperty) != data) {
return false;
}
T logical = state.getValue(offsetProperty);
BlockPos physical = getPhysicalOffset(data, logical);
return physicalOffset.equals(physical);
}
private BlockPos getPhysicalOffset(Direction facing, T logicalOffset) {
return getPhysicalOffset(facing, this.getBaseOffset.get(logicalOffset));
}
private static BlockPos getPhysicalOffset(Direction facing, BlockPos logicalOffset) {
Rotation rot = DirectionUtils.getRotationBetweenFacings(Direction.NORTH, facing);
StructurePlaceSettings placeSet = new StructurePlaceSettings().setRotation(rot);
return StructureTemplate.calculateRelativePosition(placeSet, logicalOffset);
}
private static BlockPos getLogicalOffset(Direction facing, BlockPos physicalOffset) {
Rotation rot = DirectionUtils.getRotationBetweenFacings(facing, Direction.NORTH);
StructurePlaceSettings placeSet = new StructurePlaceSettings().setRotation(rot);
return StructureTemplate.calculateRelativePosition(placeSet, physicalOffset);
}
}
|
echo -e "\n\t **** buiding k8s-client-python latest image *********** \n\t"
docker build -t k8s-client-python .
echo -e "\n\t **** buiding k8s-client-python latest image *********** \n\t"
docker build -t k8s-client-python:stable .
#docker build -t amitbachar/k8s-client-python .
|
#!/bin/sh
mkdir -p rawnetworkmodels/REPLACEME-networkmodel
seriationct-build-clustered-network.py --experiment REPLACEME --outputdirectory rawnetworkmodels/REPLACEME-networkmodel --numclusters 3 --slices 5 --nodespercluster 10 --interconnectfraction 0.2 --centroidmin 100 --centroidmax 1000 --clusterspread 10 --intercluster_edgeweight 1 --intracluster_edgeweight 100 --debug 1
cp bin/build-networkmodel.sh rawnetworkmodels/REPLACEME-networkmodel
cd rawnetworkmodels
zip -r REPLACEME-network-gml.zip REPLACEME-networkmodel/*.gml
zip -r REPLACEME-full-network.zip REPLACEME-networkmodel
cp REPLACEME-networkmodel/REPLACEMEXY.txt ../xyfiles
mv REPLACEME-network-gml.zip ../networks
|
#include <unistd.h>
class User {
public:
User(const char* username) {
// Implementation to initialize user based on username
}
bool operator bool() const {
// Implementation to check if user exists
}
uid_t uid() const {
// Implementation to retrieve user ID
}
gid_t gid() const {
// Implementation to retrieve group ID
}
};
class Process {
public:
int drop_privileges_forever(const char* newname) {
int res = 0;
User oldUser, newUser(newname);
if (newUser && (oldUser.uid() == 0)) {
res = ::setgid(newUser.gid());
if (res == 0) {
res = ::setuid(newUser.uid());
}
}
return res;
}
};
|
<gh_stars>1-10
package webapp.controller.more;
import org.noear.solon.annotation.Controller;
import org.noear.solon.annotation.Mapping;
import org.noear.solon.extend.cors.annotation.CrossOrigin;
/**
* @author noear 2021/5/22 created
*/
@Mapping("/demo2/cross")
@CrossOrigin(origins = "*")
@Controller
public class CrossController {
@Mapping("test")
public void test(){
}
}
|
#!/bin/bash
# LinuxGSM mods_list.sh module
# Author: Daniel Gibbs
# Contributors: http://linuxgsm.com/contrib
# Website: https://linuxgsm.com
# Description: Lists and defines available mods for LinuxGSM supported servers; works along with mods_core.sh.
# Usage: To add a mod, you need to add an array variable following the guide to set proper values;
# Usage: Then add this array to the mods_global_array.
# Usage: If needed, you can scrape the download URL first.
functionselfname="$(basename "$(readlink -f "${BASH_SOURCE[0]}")")"
# Get a proper URL for mods that don't provide a good one (optional)
fn_script_log_info "Retrieving latest mods URLs"
# Metamod (Half-life 1 Classic Engine)
metamodversion="1.21.1-am"
metamodlatestfile="metamod-${metamodversion}.zip"
metamoddownloadurl="https://www.amxmodx.org/release/${metamodlatestfile}"
metamodurl="${metamoddownloadurl}"
# AMX Mod X: Base
amxxbaseversion="1.8.2"
amxxbasemod="base"
amxxbaselatestfile="amxmodx-${amxxbaseversion}-${amxxbasemod}-linux.tar.gz"
amxxbasedownloadurl="https://www.amxmodx.org/release/${amxxbaselatestfile}"
amxxbaseurl="${amxxbasedownloadurl}"
# AMX Mod X: Counter-Strike
amxxcsversion="1.8.2"
amxxcsmod="cstrike"
amxxcslatestfile="amxmodx-${amxxbaseversion}-${amxxcsmod}-linux.tar.gz"
amxxcsdownloadurl="https://www.amxmodx.org/release/${amxxcslatestfile}"
amxxcsurl="${amxxcsdownloadurl}"
# AMX Mod X: Day of Defeat
amxxdodversion="1.8.2"
amxxdodmod="dod"
amxxdodlatestfile="amxmodx-${amxxdodversion}-${amxxdodmod}-linux.tar.gz"
amxxdoddownloadurl="https://www.amxmodx.org/release/${amxxdodlatestfile}"
amxxdodurl="${amxxdoddownloadurl}"
# AMX Mod X: Team Fortress Classic
amxxtfcversion="1.8.2"
amxxtfcmod="tfc"
amxxtfclatestfile="amxmodx-${amxxtfcversion}-${amxxtfcmod}-linux.tar.gz"
amxxtfcdownloadurl="https://www.amxmodx.org/release/${amxxtfclatestfile}"
amxxtfcurl="${amxxtfcdownloadurl}"
# AMX Mod X: Natural Selection
amxxnsversion="1.8.2"
amxxnsmod="ns"
amxxnslatestfile="amxmodx-${amxxnsversion}-${amxxnsmod}-linux.tar.gz"
amxxnsdownloadurl="https://www.amxmodx.org/release/${amxxnslatestfile}"
amxxnsurl="${amxxnsdownloadurl}"
# AMX Mod X: The Specialists
amxxtsversion="1.8.2"
amxxtsmod="ts"
amxxtslatestfile="amxmodx-${amxxtsversion}-${amxxtsmod}-linux.tar.gz"
amxxtsdownloadurl="https://www.amxmodx.org/release/${amxxtslatestfile}"
amxxtsurl="${amxxtsdownloadurl}"
# Metamod:Source
metamodsourceversion="1.11"
metamodsourcescrapeurl="https://mms.alliedmods.net/mmsdrop/${metamodsourceversion}/mmsource-latest-linux"
metamodsourcelatestfile=$(wget "${metamodsourcescrapeurl}" -q -O -)
metamodsourcedownloadurl="https://www.metamodsource.net/latest.php?os=linux&version=${metamodsourceversion}"
metamodsourceurl="${metamodsourcedownloadurl}"
# Sourcemod
sourcemodversion="1.10"
sourcemodscrapeurl="https://sm.alliedmods.net/smdrop/${sourcemodversion}/sourcemod-latest-linux"
sourcemodlatestfile=$(wget "${sourcemodscrapeurl}" -q -O -)
sourcemoddownloadurl="https://www.sourcemod.net/latest.php?os=linux&version=${sourcemodversion}"
sourcemodurl="${sourcemoddownloadurl}"
# Steamworks
steamworksscrapeurl="https://users.alliedmods.net/~kyles/builds/SteamWorks"
steamworkslatestfile=$(curl --connect-timeout 10 -sL ${steamworksscrapeurl} | grep -m 1 linux | cut -d '"' -f 4)
steamworksdownloadurl="${steamworksscrapeurl}/${steamworkslatestfile}"
steamworksurl="${steamworksdownloadurl}"
# CS:GO Mods
get5lastbuild=$(curl --connect-timeout 10 -sL https://ci.splewis.net/job/get5/lastSuccessfulBuild/api/json | jq -r '.artifacts[]')
get5latestfile=$(echo -e "${get5lastbuild}" | jq -r '.fileName')
get5latestfilepath=$(echo -e "${get5lastbuild}" | jq -r '.relativePath')
get5url="https://ci.splewis.net/job/get5/lastSuccessfulBuild/artifact/${get5latestfilepath}"
csgopracticelatest=$(curl --connect-timeout 10 -sL https://api.github.com/repos/splewis/csgo-practice-mode/releases/latest | jq '.assets[]')
csgopracticelatestfile=$(echo -e "${csgopracticelatest}" | jq -r '.name')
csgopracticelatestlink=$(echo -e "${csgopracticelatest}" | jq -r '.browser_download_url')
csgopuglatest=$(curl --connect-timeout 10 -sL https://api.github.com/repos/splewis/csgo-pug-setup/releases/latest | jq '.assets[]')
csgopuglatestfile=$(echo -e "${csgopuglatest}" | jq -r '.name')
csgopuglatestlink=$(echo -e "${csgopuglatest}" | jq -r '.browser_download_url')
# Oxide
oxiderustlatestlink=$(curl --connect-timeout 10 -sL https://api.github.com/repos/OxideMod/Oxide.Rust/releases/latest | jq -r '.assets[]|select(.browser_download_url | contains("linux")) | .browser_download_url')
oxidehurtworldlatestlink=$(curl --connect-timeout 10 -sL https://api.github.com/repos/OxideMod/Oxide.Hurtworld/releases/latest | jq -r '.assets[].browser_download_url')
oxidesdtdlatestlink=$(curl --connect-timeout 10 -sL https://api.github.com/repos/OxideMod/Oxide.SevenDaysToDie/releases/latest | jq -r '.assets[]|select(.browser_download_url | contains("linux")) | .browser_download_url' )
# Valheim Plus
valeimpluslatestlink=$(curl --connect-timeout 10 -sL https://api.github.com/repos/valheimPlus/ValheimPlus/releases/latest | jq -r '.assets[]|select(.browser_download_url | contains("UnixServer.tar.gz")) | .browser_download_url')
# Define mods information (required)
# Separator name
modseparator="MOD"
# REQUIRED: mod_info_name=( MOD "modcommand" "Pretty Name" "URL" "filename" "modsubdirs" "LowercaseOn/Off" "/files/to/keep;" "/install/path" "ENGINES" "GAMES" "NOTGAMES" "AUTHOR_URL" "Short Description" )
# Example 1) Well made mod: mod_info_name=( MOD "awesomemod" "This is an Awesome Mod" "https://awesomemod.com/latest.zip" "awesomemod.zip" "0" "LowercaseOff" "OVERWRITE" "${systemdir}/addons" "source;unity3d;" "GAMES" "NOTGAMES" "https://awesomemod.com/" "This mod knows that 42 is the answer" )
# Example 2) Poorly made mod: mod_info_name=( MOD "stupidmod" "This is a stupid mod" "${crappymodurl}" "StupidMod.zip" "2" "LowercaseOn" "cfg;data/crappymod;" "${systemdir}" "source;" "GAMES" "Garry's mod;Counter-Strike: Source;" "This mod is dumber than dumb" )
# None of those values can be empty
# index | Usage
# [0] | MOD: separator, all mods must begin with it
# [1] | "modcommand": the LGSM name and command to install the mod (must be unique and lowercase)
# [2] | "Pretty Name": the common name people use to call the mod that will be displayed to the user
# [3] | "URL": link to the mod archive file; can be a variable previously defined while scraping a URL
# [4] | "filename": the output filename
# [5] | "modsubdirs": in how many subdirectories is the mod (none is 0) (not used at release, but could be in the future)
# [6] | "LowercaseOn/Off": LowercaseOff or LowercaseOn: enable/disable converting extracted files and directories to lowercase (some games require it)
# [7] | "modinstalldir": the directory in which to install the mode (use LGSM dir variables such as ${systemdir})
# [8] | "/files/to/keep;", files & directories that should not be overwritten upon update, separated and ended with a semicolon; you can also use "OVERWRITE" value to ignore the value or "NOUPDATE" to disallow updating; for files to keep upon uninstall, see fn_mod_tidy_files_list from mods_core.sh
# [9] | "Supported Engines;": list them according to LGSM ${engine} variables, separated and ended with a semicolon, or use ENGINES to ignore the value
# [10] | "Supported Games;": list them according to LGSM ${gamename} variables, separated and ended with a semicolon, or use GAMES to ignore the value
# [11] | "Unsupported Games;": list them according to LGSM ${gamename} variables, separated and ended with a semicolon, or use NOTGAMES to ignore the value (useful to exclude a game when using Supported Engines)
# [12] | "AUTHOR_URL" is the author's website, displayed to the user when chosing mods to install
# [13] | "Short Description" a description showed to the user upon installation/removal
# Half-life 1 Engine Mods
mod_info_metamod=( MOD "metamod" "Metamod" "${metamodurl}" "${metamodlatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/metamod/plugins.ini;" "ENGINES" "Counter-Strike 1.6;Day of Defeat;Team Fortress Classic;Natural Selection;The Specialists;Half-Life: Deathmatch;" "NOTGAMES" "https://github.com/alliedmodders/metamod-hl1" "Plugins Framework" )
mod_info_base_amxx=( MOD "amxmodx" "AMX Mod X: Base" "${amxxbaseurl}" "${amxxbaselatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/amxmodx/configs;" "ENGINES" "Counter-Strike 1.6;Day of Defeat;Team Fortress Classic;Natural Selection;The Specialists;Half-Life: Deathmatch;" "NOTGAMES" "https://www.amxmodx.org" "Admin Features (requires Metamod)" )
# CS 1.6 (HL1) Engine Mods
mod_info_cs_amxx=( MOD "amxmodxcs" "AMX Mod X: Counter-Strike" "${amxxcsurl}" "${amxxcslatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/amxmodx/configs;" "ENGINES" "Counter-Strike 1.6;" "NOTGAMES" "https://www.amxmodx.org" "Admin Features (requires Metamod & AMX Mod X: Base)" )
# DOD (HL1) Engine Mods
mod_info_dod_amxx=( MOD "amxmodxdod" "AMX Mod X: Day of Defeat" "${amxxdodurl}" "${amxxdodlatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/amxmodx/configs;" "ENGINES" "Day of Defeat;" "NOTGAMES" "https://www.amxmodx.org" "Admin Features (requires Metamod & AMX Mod X: Base)" )
# TFC (HL1) Engine Mods
mod_info_tfc_amxx=( MOD "amxmodxtfc" "AMX Mod X: Team Fortress Classic" "${amxxtfcurl}" "${amxxtfclatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/amxmodx/configs;" "ENGINES" "Team Fortress Classic;" "NOTGAMES" "https://www.amxmodx.org" "Admin Features (requires Metamod & AMX Mod X: Base)" )
# NS (Natural Selection) (HL1) Engine Mods
mod_info_ns_amxx=( MOD "amxmodxns" "AMX Mod X: Natural Selection" "${amxxnsurl}" "${amxxnslatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/amxmodx/configs;" "ENGINES" "Natural Selection;" "NOTGAMES" "https://www.amxmodx.org" "Admin Features (requires Metamod & AMX Mod X: Base)" )
# TS (The Specialists) (HL1) Engine Mods
mod_info_ts_amxx=( MOD "amxmodxts" "AMX Mod X: The Specialists" "${amxxtsurl}" "${amxxtslatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/amxmodx/configs;" "ENGINES" "The Specialists;" "NOTGAMES" "https://www.amxmodx.org" "Admin Features (requires Metamod & AMX Mod X: Base)" )
# Source mods
mod_info_metamodsource=( MOD "metamodsource" "Metamod: Source" "${metamodsourceurl}" "${metamodsourcelatestfile}" "0" "LowercaseOff" "${systemdir}" "addons/metamod/metaplugins.ini;" "source;" "GAMES" "NOTGAMES" "https://www.sourcemm.net" "Plugins Framework" )
mod_info_sourcemod=( MOD "sourcemod" "SourceMod" "${sourcemodurl}" "${sourcemodlatestfile}" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "source;" "GAMES" "NOTGAMES" "http://www.sourcemod.net" "Admin Features (requires Metamod: Source)" )
mod_info_steamworks=( MOD "steamworks" "SteamWorks" "${steamworksurl}" "${steamworkslatestfile}" "0" "LowercaseOff" "${systemdir}" "OVERWRITE" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/KyleSanderson/SteamWorks" "Exposing SteamWorks functions to SourcePawn" )
mod_info_stripper=( MOD "stripper" "Stripper Source" "http://www.bailopan.net/stripper/snapshots/1.2/stripper-1.2.2-git129-linux.tar.gz" "stripper-1.2.2-git129-linux.tar.gz" "0" "LowercaseOff" "${systemdir}" "addons/stripper/maps;" "ENGINES" "Counter-Strike: Global Offensive;Counter-Strike: Source;Day of Defeat: Source;Half Life: Deathmatch;Half Life 2: Deathmatch;Insurgency;Left 4 Dead;Left 4 Dead 2;Nuclear Dawn;Team Fortress 2;" "NOTGAMES" "http://www.bailopan.net/stripper/" "Add or remove objects from map (requires MetaMod)")
# CS:GO Mods
mod_info_gokz=( MOD "gokz" "GOKZ" "https://bitbucket.org/kztimerglobalteam/gokz/downloads/GOKZ-latest.zip" "gokz-latest.zip" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://bitbucket.org/kztimerglobalteam/gokz/src/master/" "Implements the KZ game mode (requires SourceMod and MetaMod)" )
mod_info_ttt=( MOD "ttt" "Trouble in Terrorist Town" "https://csgottt.com/downloads/ttt-latest-dev-${sourcemodversion}.zip" "ttt-latest.zip" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/Bara/TroubleinTerroristTown" "Implements the TTT game mode (requires SourceMod and MetaMod)" )
mod_info_get5=( MOD "get5" "Get 5" "${get5url}" "${get5latestfile}" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/splewis/get5" "Plugin for competitive matches/scrims (requires SourceMod and MetaMod)" )
mod_info_prac=( MOD "prac" "csgo practice mode" "${csgopracticelatestlink}" "${csgopracticelatestfile}" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/splewis/csgo-practice-mode" "Practice Mode is a sourcemod plugin for helping players/teams run practices." )
mod_info_pug=( MOD "pug" "PUG" "${csgopuglatestlink}" "${csgopuglatestfile}" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/splewis/csgo-pug-setup" "plugin for setting up private pug/10man games" )
mod_info_dhook=( MOD "dhook" "dhook" "https://forums.alliedmods.net/attachment.php?attachmentid=190123&d=1625050030" "dhooks-2.2.0d17.zip" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://forums.alliedmods.net/showpost.php?p=2588686&postcount=589" "DHooks 2.2.0 - Required for GOKZ" )
mod_info_movement=( MOD "movementapi" "movementapi" "https://github.com/danzayau/MovementAPI/releases/download/2.1.0/MovementAPI-v2.1.0.zip" "MovementAPI-v2.1.0.zip" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/danzayau/MovementAPI" "Movement API 2.1.0 - Required for GOKZ" )
mod_info_cleaner=( MOD "cleaner" "cleaner" "https://github.com/e54385991/console-cleaner/archive/refs/heads/master.zip" "console-cleaner.zip" "0" "LowercaseOff" "${systemdir}" "cfg;addons/sourcemod/configs;" "ENGINES" "Counter-Strike: Global Offensive;" "NOTGAMES" "https://github.com/e54385991/console-cleaner" "Console Cleaner - Optional for GOKZ" )
# Garry's Mod Addons
mod_info_ulib=( MOD "ulib" "ULib" "https://codeload.github.com/TeamUlysses/ulib/zip/master" "ulib-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://ulyssesmod.net" "Complete Framework" )
mod_info_ulx=( MOD "ulx" "ULX" "https://codeload.github.com/TeamUlysses/ulx/zip/master" "ulx-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://ulyssesmod.net" "Admin Panel (requires ULib)" )
mod_info_utime=( MOD "utime" "UTime" "https://github.com/TeamUlysses/utime/archive/master.zip" "utime-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://ulyssesmod.net" "Keep track of players play time" )
mod_info_uclip=( MOD "uclip" "UClip" "https://github.com/TeamUlysses/uclip/archive/master.zip" "uclip-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://ulyssesmod.net" "An alternative to noclip" )
mod_info_acf=( MOD "acf" "Armoured Combat Framework" "https://github.com/nrlulz/ACF/archive/master.zip" "acf-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "acf-master/lua/acf/shared/guns;" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/nrlulz/ACF" "Realistic Wepons & Engines" )
mod_info_acf_missiles=( MOD "acfmissiles" "ACF Missiles" "https://github.com/Bubbus/ACF-Missiles/archive/master.zip" "acf-missiles-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/Bubbus/ACF-Missiles" "More missiles for ACF" )
mod_info_advdupe2=( MOD "advdupe2" "Advanced Duplicator 2" "https://github.com/wiremod/advdupe2/archive/master.zip" "advdupe2-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://www.wiremod.com" "Save your constructions. Second version" )
mod_info_pac3=( MOD "pac3" "PAC3" "https://github.com/CapsAdmin/pac3/archive/master.zip" "pac3-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/CapsAdmin/pac3" "Advanced player model customization" )
mod_info_wiremod=( MOD "wiremod" "Wiremod" "https://github.com/wiremod/wire/archive/master.zip" "wire-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/wiremod/wire" "Base Wiremod Addon")
mod_info_wiremodextras=( MOD "wiremod-extras" "Wiremod Extras" "https://github.com/wiremod/wire-extras/archive/master.zip" "wire-extras-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/wiremod/wire-extras/" "Addition to Wiremod, Extra Content")
mod_info_advduplicator=( MOD "advdupe1" "Advanced Duplicator 1" "https://github.com/wiremod/advduplicator/archive/master.zip" "advduplicator-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/wiremod/advduplicator" "Save your constructions. First version" )
mod_info_trackassemblytool=( MOD "trackassemblytool" "Track Assembly Tool" "https://github.com/dvdvideo1234/trackassemblytool/archive/master.zip" "trackassemblytool-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/TrackAssemblyTool" "Assembles segmented track. Supports wire" )
mod_info_physpropertiesadv=( MOD "physpropertiesadv" "Phys Properties Adv" "https://github.com/dvdvideo1234/physpropertiesadv/archive/master.zip" "physpropertiesadv-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/PhysPropertiesAdv" "Advanced configurable properties" )
mod_info_controlsystemse2=( MOD "controlsystemse2" "Control Systems E2" "https://github.com/dvdvideo1234/controlsystemse2/archive/master.zip" "controlsystemse2-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/ControlSystemsE2" "PID controllers and fast traces for E2. Minor included in wire-extas" )
mod_info_e2pistontiming=( MOD "e2pistontiming" "E2 Piston Timing" "https://github.com/dvdvideo1234/e2pistontiming/archive/master.zip" "e2pistontiming-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/E2PistonTiming" "Routine driven piston engine timings for E2" )
mod_info_propcannontool=( MOD "propcannontool" "Prop Cannon Tool" "https://github.com/dvdvideo1234/propcannontool/archive/master.zip" "propcannontool-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/PropCannonTool" "Cannon entity that can fire props. Supports wire" )
mod_info_gearassemblytool=( MOD "gearassemblytool" "Gear Assembly Tool" "https://github.com/dvdvideo1234/gearassemblytool/archive/master.zip" "gearassemblytool-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/GearAssemblyTool" "Assembles segmented gearbox" )
mod_info_spinnertool=( MOD "spinnertool" "Spinner Tool" "https://github.com/dvdvideo1234/spinnertool/archive/master.zip" "spinnertool-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/SpinnerTool" "Torque lever controlled spinner. Supports wire" )
mod_info_surfacefrictiontool=( MOD "surfacefrictiontool" "Surface Friction Tool" "https://github.com/dvdvideo1234/surfacefrictiontool/archive/master.zip" "surfacefrictiontool-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/SurfaceFrictionTool" "Controls the surface friction of a prop" )
mod_info_magneticdipole=( MOD "magneticdipole" "Magnetic Dipole" "https://github.com/dvdvideo1234/magneticdipole/archive/master.zip" "magneticdipole-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/MagneticDipole" "Magnet entity that runs forces on its poles. Supports wire" )
mod_info_environmentorganizer=( MOD "environmentorganizer" "Environment Organizer" "https://github.com/dvdvideo1234/environmentorganizer/archive/master.zip" "environmentorganizer-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/EnvironmentOrganizer" "Installs routines designed for server settings adjustment" )
mod_info_precision_alignment=( MOD "precision-alignment" "Precision Alignment" "https://github.com/Mista-Tea/precision-alignment/archive/master.zip" "precision-alignment-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/Mista-Tea/precision-alignment" "Creates precise constraints and aligments" )
mod_info_improved_stacker=( MOD "improved-stacker" "Improved Stacker" "https://github.com/Mista-Tea/improved-stacker/archive/master.zip" "improved-stacker-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/Mista-Tea/improved-stacker" "Stacks entities in the direction chosen" )
mod_info_improved_weight=( MOD "improved-weight" "Improved Weight" "https://github.com/Mista-Tea/improved-weight/archive/master.zip" "improved-weight-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/Mista-Tea/improved-weight" "Weight tool but with more features" )
mod_info_improved_antinoclip=( MOD "improved-antinoclip" "Improved Antinoclip" "https://github.com/Mista-Tea/improved-antinoclip/archive/master.zip" "improved-antinoclip-master.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/Mista-Tea/improved-antinoclip" "Controls clipping trough an object" )
mod_info_darkrp=( MOD "darkrp" "DarkRP" "https://github.com/FPtje/DarkRP/archive/master.zip" "darkrp-master.zip" "0" "LowercaseOn" "${systemdir}/gamemodes" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://darkrp.com" "Most popular gamemode" )
mod_info_darkrpmodification=( MOD "darkrpmodification" "DarkRP Modification" "https://github.com/FPtje/darkrpmodification/archive/master.zip" "darkrpmodification-master.zip" "0" "LowercaseOff" "${systemdir}/addons" "NOUPDATE" "ENGINES" "Garry's Mod;" "NOTGAMES" "http://darkrp.com" "Customize DarkRP settings" )
mod_info_laserstool=( MOD "laserstool" "Laser STool" "https://github.com/dvdvideo1234/laserstool/archive/main.zip" "laserstool-main.zip" "0" "LowercaseOn" "${systemdir}/addons" "OVERWRITE" "ENGINES" "Garry's Mod;" "NOTGAMES" "https://github.com/dvdvideo1234/LaserSTool" "Scripted tool that spawns laser entities, simulates light rays and even kill players" )
# Oxidemod
mod_info_rustoxide=( MOD "rustoxide" "Oxide for Rust" "${oxiderustlatestlink}" "Oxide.Rust-linux.zip" "0" "LowercaseOff" "${systemdir}" "OVERWRITE" "ENGINES" "Rust;" "NOTGAMES" "https://umod.org/games/rust" "Allows for the use of plugins" )
mod_info_hwoxide=( MOD "hwoxide" "Oxide for Hurtworld" "${oxidehurtworldlatestlink}" "Oxide.Hurtworld.zip" "0" "LowercaseOff" "${systemdir}" "OVERWRITE" "ENGINES" "Hurtworld;" "NOTGAMES" "https://umod.org/games/hurtworld" "Allows for the use of plugins" )
mod_info_sdtdoxide=( MOD "sdtdoxide" "Oxide for 7 Days To Die" "${oxidesdtdlatestlink}" "Oxide.SevenDaysToDie.zip" "0" "LowercaseOff" "${systemdir}" "OVERWRITE" "ENGINES" "7 Days To Die;" "NOTGAMES" "https://umod.org/games/7-days-to-die" "Allows for the use of plugins" )
# ValheimPlus
mod_info_valheimplus=( MOD "valheimplus" "Valheim PLUS" "${valeimpluslatestlink}" "ValheimPlus.tar.gz" "0" "LowercaseOff" "${systemdir}" "OVERWRITE" "ENGINES" "Valheim;" "NOTGAMES" "https://github.com/valheimPlus/ValheimPlus" "Mod to improve Valheim gameplay")
# REQUIRED: Set all mods info into the global array
mods_global_array=( "${mod_info_metamod[@]}" "${mod_info_base_amxx[@]}" "${mod_info_cs_amxx[@]}" "${mod_info_dod_amxx[@]}" "${mod_info_tfc_amxx[@]}" "${mod_info_ns_amxx[@]}" "${mod_info_ts_amxx[@]}" "${mod_info_metamodsource[@]}" "${mod_info_sourcemod[@]}" "${mod_info_steamworks[@]}" "${mod_info_gokz[@]}" "${mod_info_ttt[@]}" "${mod_info_get5[@]}" "${mod_info_prac[@]}" "${mod_info_pug[@]}" "${mod_info_dhook[@]}" "${mod_info_movement[@]}" "${mod_info_cleaner[@]}" "${mod_info_ulib[@]}" "${mod_info_ulx[@]}" "${mod_info_utime[@]}" "${mod_info_uclip[@]}" "${mod_info_acf[@]}" "${mod_info_acf_missiles[@]}" "${mod_info_acf_sweps[@]}" "${mod_info_advdupe2[@]}" "${mod_info_pac3[@]}" "${mod_info_wiremod[@]}" "${mod_info_wiremodextras[@]}" "${mod_info_darkrp[@]}" "${mod_info_darkrpmodification[@]}" "${mod_info_rustoxide[@]}" "${mod_info_hwoxide[@]}" "${mod_info_sdtdoxide[@]}" "${mod_info_advduplicator[@]}" "${mod_info_trackassemblytool[@]}" "${mod_info_physpropertiesadv[@]}" "${mod_info_controlsystemse2[@]}" "${mod_info_e2pistontiming[@]}" "${mod_info_propcannontool[@]}" "${mod_info_gearassemblytool[@]}" "${mod_info_spinnertool[@]}" "${mod_info_surfacefrictiontool[@]}" "${mod_info_magneticdipole[@]}" "${mod_info_environmentorganizer[@]}" "${mod_info_precision_alignment[@]}" "${mod_info_improved_stacker[@]}" "${mod_info_improved_weight[@]}" "${mod_info_improved_antinoclip[@]}" "${mod_info_laserstool[@]}" "${mod_info_valheimplus[@]}" )
|
python3 run.py --data_dir ../data/ --output_path ./output/ --target "virginica" --criterion "entropy" --max_depth None --min_samples_split 2
|
package com.common.base.dialog;
import android.content.Context;
import android.text.TextUtils;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.common.R;
import com.common.R2;
import androidx.annotation.NonNull;
import androidx.annotation.StringRes;
import butterknife.BindView;
import butterknife.OnClick;
/**
* @author <NAME>
* create at 2018/11/2
* description: 确认弹出框
*/
public class ConfirmDialog extends BaseDialog {
@BindView(R2.id.tv_title)
TextView mTvTitle;
@BindView(R2.id.tv_content)
TextView mTvContent;
@BindView(R2.id.cb_confirm_check)
ImageView mCbConfirmCheck;
@BindView(R2.id.ll_confirm_check)
LinearLayout mLlConfirmCheck;
@BindView(R2.id.tv_cancel)
TextView mTvCancel;
@BindView(R2.id.tv_confirm)
TextView mTvConfirm;
private String title;
private String content;
// private String cancel;
// private String confirm;
private boolean isShowTips;
private OnConfirmListener mOnConfirmListener;
private boolean hideCancel = false;
public ConfirmDialog(@NonNull Context context, @StringRes int titleId) {
this(context, context.getString(titleId), null);
}
public ConfirmDialog(@NonNull Context context, @StringRes int titleId,
@StringRes int contentId) {
this(context, context.getString(titleId), context.getString(contentId));
}
public ConfirmDialog(@NonNull Context context, String title, String content) {
this(context, title, content, false
// , null, null
);
}
public ConfirmDialog(@NonNull Context context, @StringRes int titleId,
@StringRes int contentId, boolean isShowTips
// ,
// @StringRes int confirmId,
// @StringRes int cancelId
) {
this(context, context.getString(titleId), context.getString(contentId), isShowTips
// , context.getString(confirmId), context.getString(cancelId)
);
}
public ConfirmDialog(@NonNull Context context, String title, String content, boolean isShowTips
// ,String confirm, String cancel
) {
super(context);
this.title = title;
this.content = content;
this.isShowTips = isShowTips;
// this.confirm = confirm;
// this.cancel = cancel;
}
@Override
protected int getContentView() {
return R.layout.common_dialog_confirm;
}
@Override
protected void initView() {
mTvTitle.setText(title);
mLlConfirmCheck.setVisibility(isShowTips ? View.VISIBLE : View.GONE);
mTvContent.setText(content);
mTvContent.setVisibility(TextUtils.isEmpty(content) ? View.GONE : View.VISIBLE);
// if(!TextUtils.isEmpty(confirm)) {
// mBtnConfirm.setText(confirm);
// }
// if(!TextUtils.isEmpty(cancel)) {
// mBtnCancel.setText(cancel);
// }
}
@OnClick({R2.id.tv_cancel, R2.id.tv_confirm, R2.id.cb_confirm_check})
public void onViewClicked(View view) {
int i = view.getId();
if(i == R.id.tv_cancel) {
if(mOnConfirmListener != null) {
mOnConfirmListener.cancel();
}
dismiss();
} else if(i == R.id.tv_confirm) {
if(mOnConfirmListener != null) {
mOnConfirmListener.confirm(mCbConfirmCheck.isSelected());
}
dismiss();
} else if(i == R.id.cb_confirm_check) {
mCbConfirmCheck.setSelected(!mCbConfirmCheck.isSelected());
}
}
public void setHideCancel() {
hideCancel = true;
}
@Override
public void show() {
super.show();
if(hideCancel) {
mTvCancel.setVisibility(View.GONE);
findViewById(R.id.view_line).setVisibility(View.GONE);
}
}
public ConfirmDialog setOnConfirmListener(OnConfirmListener mOnConfirmListener) {
this.mOnConfirmListener = mOnConfirmListener;
return this;
}
public interface OnConfirmListener {
void confirm(boolean isSelect);
default void cancel() {
}
}
}
|
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 6 16:42:29 2019
@author: tslin
"""
from pyparsing import Literal,Word, White, alphas, nestedExpr, quotedString, cStyleComment, alphanums, nums, StringStart, StringEnd
class SmilesPattern:
def __init__(self):
pass
def addRawStr(toks):
if 'branch' in toks:
toks['rawStr']=toks['branch']
else:
toks['rawStr']=''.join(toks[:])
return toks
#whitespace = " \t\n"
whitespace = White().leaveWhitespace()
### ATOM SECTION ###
# Organic Subset section
_aliphatic_organic = ( Literal('Cl').setResultsName('symbol') \
| Literal('Br').setResultsName('symbol') \
| Word('BCNOSPFI',exact=1).setResultsName('symbol') ).setResultsName('organic')
_aromatic_organic = ( Literal('c').setResultsName('symbol') \
| Word('bnosp',exact=1).setResultsName('symbol') ).setResultsName('organic')
#_aliphatic_organic.setResultsName('organic')
#_aromatic_organic.setResultsName('organic')
# Bracketed Atoms section
_isotope = Word(nums,min=1)
_element_symbols =Literal('He') | Literal('Li') | Literal('Be') | Literal('Ne') | Literal('Na') | Literal('Mg') \
| Literal('Al') | Literal('Si') | Literal('Cl') | Literal('Ar') | Literal('Ca') | Literal('Sc') \
| Literal('Ti') | Literal('Cr') | Literal('Mn') | Literal('Fe') | Literal('Co') | Literal('Ni') \
| Literal('Cu') | Literal('Zn') | Literal('Ga') | Literal('Ge') | Literal('As') | Literal('Se') \
| Literal('Br') | Literal('Kr') | Literal('Rb') | Literal('Sr') | Literal('Zr') | Literal('Nb') \
| Literal('Mo') | Literal('Tc') | Literal('Ru') | Literal('Rh') | Literal('Pd') | Literal('Ag') \
| Literal('Cd') | Literal('In') | Literal('Sn') | Literal('Sb') | Literal('Te') | Literal('Xe') \
| Literal('Cs') | Literal('Ba') | Literal('Hf') | Literal('Ta') | Literal('Re') | Literal('Os') \
| Literal('Ir') | Literal('Pt') | Literal('Au') | Literal('Hg') | Literal('Tl') | Literal('Pb') \
| Literal('Bi') | Literal('Po') | Literal('At') | Literal('Rn') | Literal('Fr') | Literal('Ra') \
| Literal('Rf') | Literal('Db') | Literal('Sg') | Literal('Bh') | Literal('Hs') | Literal('Mt') \
| Literal('Ds') | Literal('Rg') | Literal('La') | Literal('Ce') | Literal('Pr') | Literal('Nd') \
| Literal('Pm') | Literal('Sm') | Literal('Eu') | Literal('Gd') | Literal('Tb') | Literal('Dy') \
| Literal('Ho') | Literal('Er') | Literal('Tm') | Literal('Yb') | Literal('Lu') | Literal('Ac') \
| Literal('Th') | Literal('Pa') | Literal('Np') | Literal('Pu') | Literal('Am') | Literal('Cm') \
| Literal('Bk') | Literal('Cf') | Literal('Es') | Literal('Fm') | Literal('Md') | Literal('No') \
| Literal('Lr') \
| Literal('H') | Literal('B') | Literal('C') | Literal('N') | Literal('O') | Literal('F') | Literal('P') \
| Literal('S') | Literal('K') | Literal('V') | Literal('Y') | Literal('I') | Literal('W') | Literal('U')
_aromatic_symbols = Literal('se') | Literal('as') | Word('cnops',exact=1)
_symbol = _element_symbols | _aromatic_symbols | Literal('*')
# Chirality section
_chiral = Literal('@@') | Literal('@') #| Literal('@TH1') | Literal('@TH2') \
#| Literal('@SP1') | Literal('@SP2') | Literal('@SP3') \
#| Literal('@AL1') | Literal('@AL2') | '@TB'+Word(nums,min=1,max=2) | '@OH'+Word(nums,min=1,max=2)
_chiral.setParseAction(''.join)
# Hydrogens section
_hcount = Literal('H') + (Word('123456789',exact=1)*(0,1)).setResultsName('nH')
#_hcount.setParseAction(''.join)
# Charge section
_charge = ('-' + Word('123456789',exact=1)*(0,1)) | ('+' + Word('123456789',exact=1)*(0,1)) | Literal('--') | Literal('++')
#_charge.setParseAction(''.join)
# Atom Class section
_class = ':' + Word(nums,min=1)
# Bracketed Atom definition
_bracket_atom = '[' + _isotope.setResultsName('isotope')*(0,1) \
+ _symbol.setResultsName('symbol') \
+ _chiral.setResultsName('chiral')*(0,1) \
+ _hcount.setResultsName('hcount')*(0,1) \
+ _charge.setResultsName('charge')*(0,1) \
+ _class.setResultsName('_class')*(0,1) \
+ ']'
#_bracket_atom.setResultsName('bracket_atom')
# Atom definition
#_atom = _aliphatic_organic | _aromatic_organic | _bracket_atom | Literal('*').setResultsName('symbol')
_atom = _aliphatic_organic \
| _aromatic_organic \
| _bracket_atom.setResultsName('bracket_atom') \
| Literal('*').setResultsName('symbol')
#def addRawStr(toks):
# toks['rawStr']=''.join(toks)
# return toks
#_atom.setParseAction(addRawStr)
_atom.leaveWhitespace()
#_atom.setParseAction(''.join)
#_atom.setParseAction(lambda locn,tokens: (locn,''.join(tokens[:])))
### BOND SECTION ###
_bond = Word('-=#:\/',exact=1)
_bond.leaveWhitespace()
#_bond.setParseAction(addRawStr)
#_ringbond = _bond*(0,1) + \
# (Word(nums,exact=1).setParseAction(lambda tok:[''.join(tok)] ) | \
# (Literal('%')+Word(nums,exact=2).setResultsName('ringid')).setParseAction(lambda tok:[''.join(tok[:])] ) )
_ringbond = (_bond*(0,1)).setResultsName('ringbondtype') + \
(Word(nums,exact=1).setResultsName('ringid') | \
Literal('%')+Word(nums,exact=2).setResultsName('ringid') )
_ringbond.leaveWhitespace()
#_ringbond.setParseAction(addRawStr)
_dot = Literal('.')
#_dot.setParseAction(addRawStr)
_smilesChar = _ringbond.setResultsName('ringbond') | _bond.setResultsName('bond') \
| _atom.setResultsName('atom') | _dot.setResultsName('dot')
_branchContent = _smilesChar*(1,None)
_branchContent.setParseAction(lambda toks: ''.join(toks))
_branch = nestedExpr('(',')',content=_branchContent)
_branch.setParseAction(lambda toks: '('+''.join([str(item) for sublist in toks for item in sublist])+')')
_smilesElement = _smilesChar | _branch.setResultsName('branch')
_smilesElement.setParseAction(addRawStr)
#_ringbond.setParseAction(lambda tok:tok[0],tok[1])
#_bonds = (_bond | _ringbond)*(0,1)
#_bonds.setParseAction(''.join)
if __name__ == "__main__":
testStr = '[12C]/[OH](C(CC(C)C)C)[CH2](C)(C)CC'
testStr = 'F/C=C/F'
pattern = SmilesPattern._smilesElement
res = pattern.parseString(testStr)
if 'dot' in res.keys():
print('dot')
elif 'branch' in res.keys():
print('branch')
elif 'bond' in res.keys():
print('bond')
elif 'ringbond' in res.keys():
print('ringbond')
else:
print('atom')
print(res.rawStr)
|
#!/bin/bash
#root privileges are needed
cd /var/log
cat /dev/null > messages
cat /dev/null > wtmp
echo "Лог-файлы очищены."
|
def celsius_to_fahrenheit(celsius):
return (celsius * 1.8) + 32
|
<filename>SQL/HowToSQL.sql
/* List of best-selling games
* from the Microsoft Store
*/
-- Create
CREATE TABLE IF NOT EXISTS Games (
GameID INTEGER PRIMARY KEY AUTOINCREMENT,
Name TEXT NOT NULL,
Ratings INTEGER,
Price REAL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS Reviews (
ReviewID INTEGER PRIMARY KEY AUTOINCREMENT,
GameID INTEGER REFERENCES Games(GameID), -- foreign key
Day DATE,
Title TEXT,
Stars INTEGER NOT NULL,
Review TEXT
);
INSERT INTO Games (Name, Ratings)
VALUES ("Phantasy Star Online 2", 939);
INSERT INTO Games (Name, Ratings, Price)
VALUES ("Minecraft", 57636, 29.99);
INSERT INTO Games (Name, Ratings, Price)
VALUES ("ROBLOX", 1939, 0);
INSERT INTO Games (Name, Ratings, Price)
VALUES ("Microsoft Flight Simulator", 51, 119.99);
INSERT INTO Games (Name, Ratings, Price)
VALUES ("Candy Crush Soda Saga", 1996, 0);
INSERT INTO Games (Name, Ratings, Price)
VALUES ("Caesars Casino", 1837, 0);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Phantasy Star Online 2"),
"2020-07-08",
"garbage store",
1,
"Microsoft store doesn't work for ****"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Phantasy Star Online 2"),
"2020-08-03",
"Store issue",
5,
"The game itself is amazing. It's just the Microsoft Store breaks the game sometimes"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Phantasy Star Online 2"),
"2020-07-24",
"garbo",
1,
"Deserves nothing more than one star if you give it more you are not very smart. That's why games don't get fixed. Worst launch in history lol. if I restart my pc it uninstalls the game Update: Still broken unfortunately, talked to Microsoft in a call and chat, even screen shared to get them to uninstall the game, after they failed (which led to another of many restores) they blamed sega for the issues, don't spend money like I did spend quite a bit and also just got prem because seemed like the game was fine then it broke again of course and now cant seem to play at all. ac is non refundable so ya know."
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Minecraft"),
"2020-08-14",
"our family favorite",
5,
"this game is awesome because I can play, and my 7 & 6 year old can play...and we all have fun!! They can play together or by themselves...but the best is that we can all 3 play together. It makes for hours of fun. The cross platform is great. I play on a laptop, 1 kid on a switch, the other kid on a tablet or ps4. It's the best hours of pass time during quarantine for us. Especially because the game doesn't get boring...you can build endlessly, or work for it in survival. This game actually even taught my 7 year old to read and write lol. From wanting to chat so bad, she eventually learned how to read it and type it. It boosts their creativity, and my youngest has gained confidence of being able to learn and teach mommy some things. I have not one bad thing to say!!"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Minecraft"),
"2020-08-12",
"Minecraft a holy experience.",
5,
"This game is the pure embodiment of heavenly light and holy entertainment. It must have be forged in the heart of a dying star. This game is the purest form of gaming to ever exist. I have been blessed to have received this game as a gift. Bless Mojang, Microsoft, Notch and every one who has ever worked on this game. I am honored to have played this game as it has changed my life for good. Thank you, and god bless you all."
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Minecraft"),
"2020-08-13",
"Amazing",
5,
"its better then fortnite"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "ROBLOX"),
"2020-06-11",
"VERY GOOD BUT SOME SMALL PROBLEMS",
5,
"BEST GAME EVER I PLAY DID 3.5 HOURS EVERYDAY except I HATE the chat area you can't even type NUMBERS??!?!?!?!?!?!!"
);
INSERT INTO Reviews (GameID, Day, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "ROBLOX"),
"2020-06-10",
5,
"THERE IS NEVER BEST GAME AND PRO GAME!!!!!!"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "ROBLOX"),
"2020-08-09",
"AMAZING APP",
5,
"I really like this game. It kills time and is really fun."
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Candy Crush Soda Saga"),
"2020-08-06",
"Wonderful",
5,
"Love it. Being home bound I play a lot."
);
INSERT INTO Reviews (GameID, Day, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "Candy Crush Soda Saga"),
"2020-07-30",
5,
"fun"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "<NAME>"),
"2020-08-14",
"Great Fun kinda got harder",
5,
"Running out of turns lol"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "<NAME>"),
"2019-06-18",
"GOOD TIMES",
5,
"FUN FUN FUN"
);
INSERT INTO Reviews (GameID, Day, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "<NAME>"),
"2019-06-05",
5,
"Great graphics"
);
INSERT INTO Reviews (GameID, Day, Title, Stars, Review)
VALUES (
(SELECT GameID FROM Games WHERE Name = "<NAME>"),
"2019-05-19",
"Can't win if you don't play!",
5,
"The different slots or great!"
);
-- Read
SELECT * FROM Games;
SELECT * FROM Reviews
LIMIT 5;
SELECT Name, Ratings AS "Reviews"
FROM Games
WHERE Ratings BETWEEN 1000 AND 3000
OR Price = 0
ORDER BY Ratings DESC;
SELECT GameID, Stars, Review
FROM Reviews
WHERE Review LIKE "%great%";
SELECT GameID, COUNT(*) AS "TotalReviews"
FROM Reviews
GROUP BY GameID
HAVING COUNT(*) > 0;
SELECT GameID, AVG(stars) AS "AverageRating"
FROM Reviews
GROUP BY GameID;
SELECT Games.Name, Games.Price, Reviews.Day, Reviews.Title, Reviews.Stars
FROM Games
JOIN Reviews -- inner join
ON Games.GameID = Reviews.GameID
WHERE Title IS NOT NULL;
-- Update
UPDATE Reviews
SET Stars = 3
WHERE Title = "Great Fun kinda got harder";
-- Delete
DELETE FROM Reviews
WHERE Title IS NULL;
DROP TABLE Games;
DROP TABLE Reviews;
|
package org.softuni.residentevil.service;
import org.modelmapper.ModelMapper;
import org.softuni.residentevil.domain.api.Bindable;
import org.softuni.residentevil.domain.api.Identifiable;
import org.softuni.residentevil.domain.api.Viewable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.annotation.Validated;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.lang.reflect.ParameterizedType;
import java.util.List;
import java.util.Optional;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* Abstract class, implements common functionality for services
*
* @param <E> Entity
* @param <I> ID class used by Entity
* @param <R> Repository for Entity
*/
@Validated
@Transactional
abstract class BaseService<E extends Identifiable<I>, I, R extends JpaRepository<E, I>> implements Service<E, I> {
protected final R repository;
protected final ModelMapper mapper;
private final Class<E> entityClass;
protected BaseService(R repository,
ModelMapper mapper) {
this.repository = repository;
this.mapper = mapper;
entityClass = initEntityClass();
}
protected abstract Logger logger();
@Override
public <B extends Bindable<E>> void create(@NotNull @Valid B bindingModel) {
createAndGet(bindingModel);
}
@Override
public <B extends Bindable<E>, V extends Viewable<E>>
V createAndGet(@NotNull @Valid B bindingModel, @NotNull Class<V> viewModelClass) {
E entity = createAndGet(bindingModel);
return mapper.map(entity, viewModelClass);
}
@Override
@Transactional(readOnly = true)
public <V extends Viewable<E>> Optional<V> findById(@NotNull I id, @NotNull Class<V> viewModelClass) {
return repository
.findById(id)
.map(e -> mapper.map(e, viewModelClass));
}
@Override
@Transactional(readOnly = true)
public <V extends Viewable<E>> List<V> findAll(@NotNull Class<V> viewModelClass) {
return repository
.findAll()
.stream()
.map(t -> mapper.map(t, viewModelClass))
.collect(Collectors.toList());
}
@Override
public boolean deleteById(@NotNull I id) {
return deleteByIdAndGet(id)
.isPresent();
}
@Override
public <V extends Viewable<E>> Optional<V> deleteByIdAndGet(@NotNull I id, @NotNull Class<V> viewModelClass) {
return deleteByIdAndGet(id)
.map(e -> mapper.map(e, viewModelClass));
}
private <B extends Bindable<E>> E createAndGet(B bindingModel) {
return repository.save(mapper.map(bindingModel, entityClass));
}
private Optional<E> deleteByIdAndGet(I id) {
return repository
.findById(id)
.map(e -> {
repository.delete(e);
return e;
});
}
@SuppressWarnings("unchecked")
private Class<E> initEntityClass() {
return (Class<E>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0];
}
}
|
import itertools
d = dict(itertools.izip(the_keys, the_values))
|
<filename>radiaTest-server/server/schema/framework.py
from typing import Optional
from pydantic import BaseModel, validator
from pydantic.networks import HttpUrl
from server.schema.base import PermissionBase
from server.schema import PermissionType
class FrameworkBase(PermissionBase):
name: str
url: HttpUrl
logs_path: str
adaptive: bool = False
@validator("permission_type")
def check_permission_type(cls, v):
if v != "public":
raise ValueError("framework's permission_type must be public.")
return v
class FrameworkQuery(BaseModel):
name: Optional[str]
url: Optional[HttpUrl]
logs_path: Optional[str]
adaptive: Optional[bool]
class GitRepoBase(PermissionBase):
name: str
git_url: HttpUrl
sync_rule: bool = True
framework_id: int
@validator("permission_type")
def check_permission_type(cls, v):
if v != "group":
raise ValueError("gitrepo's permission_type must be group.")
return v
class GitRepoQuery(BaseModel):
name: Optional[str]
git_url: Optional[HttpUrl]
sync_rule: Optional[bool]
framework_id: Optional[int]
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_5k = void 0;
var ic_5k = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zm-8 7.5H8v1h2c.55 0 1 .45 1 1V14c0 .55-.45 1-1 1H6.5v-1.5h3v-1h-3V9H11v1.5zm7 4.5h-1.75l-1.75-2.25V15H13V9h1.5v2.25L16.25 9H18l-2.25 3L18 15z"
},
"children": []
}]
};
exports.ic_5k = ic_5k;
|
#!/bin/bash
while true; do
sleep 1m
su debian-spamd -c 'sa-update' && kill -HUP `cat /var/run/spamd.pid`
sleep 1d
done
|
/**
* Copyright 2021 The IcecaneDB Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package frontend
var (
_ Statement = (*SelectStatement)(nil)
)
type SelectStatement struct {
Selections []*SelectionItem
Distinct bool
From FromItem
Where Expression // must evaluate to a boolean
Limit Expression // must evaluate to an integer
}
func (cts *SelectStatement) Accept(v Visitor) (node Node, ok bool) {
panic("")
}
func (cts *SelectStatement) statement() {}
|
# Copyright 2015 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
HOST_BUILD_DIR=${WORK_DIR}/build_host
HOST_INSTALL_DIR=${WORK_DIR}/install_host
export ac_cv_func_getrlimit=no
export EXTRA_AM_CPPFLAGS="-Dmain=nacl_main"
export EXTRA_LIBS="${NACL_CLI_MAIN_LIB} -lppapi_simple \
-lnacl_io -lppapi -l${NACL_CXX_LIB}"
BuildHostBinutils() {
MakeDir ${HOST_BUILD_DIR}
ChangeDir ${HOST_BUILD_DIR}
CC="gcc" EXTRA_LIBS="" EXTRA_AM_CPPFLAGS="" \
LogExecute ${SRC_DIR}/configure --prefix=${HOST_INSTALL_DIR} \
--target=avr \
--disable-nls
EXTRA_LIBS="" EXTRA_AM_CPPFLAGS="" LogExecute make
EXTRA_LIBS="" EXTRA_AM_CPPFLAGS="" LogExecute make install
}
ConfigureStep() {
ChangeDir ${SRC_DIR}
BuildHostBinutils
export PATH="${HOST_INSTALL_DIR}/bin:${PATH}"
ChangeDir ${BUILD_DIR}
EXTRA_CONFIGURE_ARGS="\
--target=avr \
--disable-nls \
--disable-werror \
--enable-deterministic-archives \
--without-zlib"
DefaultConfigureStep
}
BuildStep() {
export CONFIG_SITE
DefaultBuildStep
}
PublishStep() {
MakeDir ${PUBLISH_DIR}
for nexe in binutils/*.nexe gas/*.nexe ld/*.nexe; do
local name=$(basename $nexe .nexe | sed 's/-new//')
cp ${nexe} ${PUBLISH_DIR}/${name}_${NACL_ARCH}${NACL_EXEEXT}
pushd ${PUBLISH_DIR}
LogExecute python ${NACL_SDK_ROOT}/tools/create_nmf.py \
${PUBLISH_DIR}/${name}_*${NACL_EXEEXT} \
-s . \
-o ${name}.nmf
popd
done
DefaultPublishStep
}
|
import React from 'react';
import selectedReducer from '../reducers';
import types from '../../SmartComponents/modules/types';
import fixtures from './reducer.fixtures';
import { ServerIcon } from '@patternfly/react-icons';
import DriftTooltip from '../../SmartComponents/DriftTooltip/DriftTooltip';
describe('compare reducer', () => {
let reducer;
let inventoryActions;
beforeEach(() => {
inventoryActions = { LOAD_ENTITIES_FULFILLED: 'LOAD_ENTITIES_FULFILLED' },
reducer = selectedReducer(inventoryActions);
});
it('should handle LOAD_ENTITIES_FULFILLED', () => {
reducer = selectedReducer(
inventoryActions,
undefined,
false,
[]
);
expect(
reducer({
columns: fixtures.columns,
rows: fixtures.results,
selectedSystemIds: []
}, {
payload: {
results: fixtures.results
},
type: inventoryActions.LOAD_ENTITIES_FULFILLED
})
).toEqual({
rows: fixtures.results,
columns: fixtures.columns,
selectedSystemIds: [],
selectedSystems: []
});
});
it('should handle LOAD_ENTITIES_FULFILLED with historical dropdown', () => {
reducer = selectedReducer(
inventoryActions
);
expect(
reducer({
columns: fixtures.columnsWithHSP,
rows: fixtures.results,
selectedSystemIds: []
}, {
payload: {
results: fixtures.results
},
type: inventoryActions.LOAD_ENTITIES_FULFILLED
})
).toEqual({
rows: fixtures.results,
columns: fixtures.columnsWithHSP,
selectedSystemIds: [],
selectedSystems: []
});
});
it('should handle SELECT_ENTITY', () => {
expect(
reducer({ rows: [], selectedSystemIds: []}, {
payload:
{
id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9', selected: true
},
type: types.SELECT_ENTITY
})
).toEqual({
rows: [],
columns: undefined,
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9'
],
selectedSystems: []
});
});
it('should handle multiple SELECT_ENTITY', () => {
expect(
reducer({ rows: [], selectedSystemIds: [ '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' ]}, {
payload:
{
id: 'f35b1e1d-d231-43f2-8e4f-8f9cb01e3aa2', selected: true
},
type: types.SELECT_ENTITY
})
).toEqual({
rows: [],
columns: undefined,
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9',
'f35b1e1d-d231-43f2-8e4f-8f9cb01e3aa2'
],
selectedSystems: []
});
});
it('should handle false SELECT_ENTITY', () => {
expect(
reducer({ rows: [], selectedSystemIds: [ '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' ]}, {
payload:
{
id: 'f35b1e1d-d231-43f2-8e4f-8f9cb01e3aa2', selected: false
},
type: types.SELECT_ENTITY
})
).toEqual({
rows: [],
columns: undefined,
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9'
],
selectedSystems: []
});
});
it('should handle null SELECT_ENTITY', () => {
expect(
reducer({ rows: [], selectedSystemIds: []}, {
payload:
{
id: null, selected: true
},
type: types.SELECT_ENTITY
})
).toEqual({
rows: [],
columns: undefined,
selectedSystemIds: [],
selectedSystems: []
});
});
it('should handle null SELECT_ENTITY with selected ids', () => {
expect(
reducer({ rows: [], selectedSystemIds: [ '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' ]}, {
payload:
{
id: null, selected: true
},
type: types.SELECT_ENTITY
})
).toEqual({
rows: [],
columns: undefined,
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9'
],
selectedSystems: []
});
});
it('should handle 0 SELECT_ENTITY', () => {
expect(
reducer({ selectedSystemIds: [], rows: []}, {
payload:
{
id: 0, selected: true
},
type: types.SELECT_ENTITY
})
).toEqual({
columns: undefined,
selectedSystemIds: [],
rows: [],
selectedSystems: []
});
});
it('should handle 0 SELECT_ENTITY with selected ids', () => {
expect(
reducer({ selectedSystemIds: [ '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' ], rows: []}, {
payload:
{
id: 0, selected: true
},
type: types.SELECT_ENTITY
})
).toEqual({
columns: undefined,
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9'
],
rows: [],
selectedSystems: []
});
});
it('should handle 0 SELECT_ENTITY false', () => {
expect(
reducer({
selectedSystemIds: [ '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9', '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' ],
rows: [{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' }, { id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' }]
}, {
payload:
{
id: 0, selected: false
},
type: types.SELECT_ENTITY
})
).toEqual({
columns: undefined,
selectedSystemIds: [],
rows: [{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' }, { id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' }],
selectedSystems: []
});
});
it('should handle 0 SELECT_ENTITY false on without bulk select', () => {
expect(
reducer({
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9',
'9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1',
'2c84bfvc-8t9q-52r9-b4c3-847fg51l09e1'
],
rows: [{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' }, { id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' }]
}, {
payload:
{
id: 0, selected: false
},
type: types.SELECT_ENTITY
})
).toEqual({
columns: undefined,
selectedSystemIds: [ '2c84bfvc-8t9q-52r9-b4c3-847fg51l09e1' ],
rows: [{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' }, { id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' }],
selectedSystems: []
});
});
it('should handle 0 SELECT_ENTITY false on bulk select', () => {
expect(
reducer({
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9',
'9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1',
'2c84bfvc-8t9q-52r9-b4c3-847fg51l09e1'
],
rows: [{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' }, { id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' }]
}, {
payload:
{
id: 0, selected: false, bulk: true
},
type: types.SELECT_ENTITY
})
).toEqual({
columns: undefined,
selectedSystemIds: [],
rows: [{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' }, { id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1' }],
selectedSystems: []
});
});
it('should set row to selected if in selectedSystemIds but not selected', () => {
expect(
reducer({
selectedSystemIds: [
'9c79efcc-8f9a-47c7-b0f2-142ff52e89e9'
],
rows: [
{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9', selected: undefined },
{ id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1', selected: undefined }
]
}, {
payload:
{
id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1', selected: false
},
type: types.SELECT_ENTITY
})
).toEqual({
columns: undefined,
selectedSystemIds: [ '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9' ],
rows: [
{ id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9', selected: true },
{ id: '9c83bfcc-8t7a-47c7-b4r2-142fg52e89e1', selected: undefined }
],
selectedSystems: [{
id: '9c79efcc-8f9a-47c7-b0f2-142ff52e89e9',
name: undefined,
icon: <DriftTooltip
content='System'
body={ <ServerIcon /> }
/>
}]
});
});
});
|
#!/bin/bash -x
# -e " Exit immediately if a command exits with a non-zero status.
# -x Print commands and their arguments as they are executed.
: ${BASE_URL:=https://127.0.0.1}
: ${USERNAME_CLI:=admin@example.com}
: ${PASSWORD_CLI:=cloudbreak}
: ${CLI_TEST_FILES:=spec/integration/*.rb}
: ${CLUSTER_DEFINITION_URL:? required}
readonly TEST_CONTAINER_NAME=cli-test-runner
image-update() {
declare desc="Refresh the Test Runner Docker image"
docker pull hortonworks/cloud-cli-e2e
}
image-cleanup() {
declare desc="Removes all exited containers and old images"
container-remove-stuck
container-remove-exited
docker rmi $(docker images -q -f dangling=true)
docker images | grep cloud-cli-e2e | tr -s ' ' | cut -d ' ' -f 2 | xargs -I {} docker rmi hortonworks/cloud-cli-e2e:{}
}
container-remove-exited() {
declare desc="Remove Exited or Dead containers"
local exited_containers=$(docker ps -a -f status=exited -f status=dead -q)
if [[ -n "$exited_containers" ]]; then
echo "Remove Exited or Dead docker containers"
docker rm $exited_containers;
else
echo "There is no Exited or Dead container"
fi
}
container-remove-stuck() {
declare desc="Checking $TEST_CONTAINER_NAME container is running"
if [[ "$(docker inspect -f {{.State.Running}} $TEST_CONTAINER_NAME 2> /dev/null)" == "true" ]]; then
echo "Delete the running " $TEST_CONTAINER_NAME " container"
docker rm -f $TEST_CONTAINER_NAME
fi
}
cbd-version() {
if [[ -z "$(echo $TARGET_CBD_VERSION)" ]]; then
export TARGET_CBD_VERSION=$(curl -sk $BASE_URL/cb/info | grep -oP "(?<=\"version\":\")[^\"]*")
if [[ -z "$(echo $TARGET_CBD_VERSION)" ]]; then
export TARGET_CBD_VERSION=MOCK
fi
fi
echo "CBD version: "$TARGET_CBD_VERSION
}
test-regression() {
docker run -i \
--rm \
--privileged \
--net=host \
--name $TEST_CONTAINER_NAME \
-v $(pwd):/aruba \
-v $(pwd)/tmp/responses:/responses \
-v $(pwd)/requests:/requests \
-v $(pwd)/../build/Linux:/usr/local/bin \
-v $(pwd)/scripts/aruba-docker.sh:/entrypoint.sh \
-e "BASE_URL=$BASE_URL" \
-e "USERNAME_CLI=$USERNAME_CLI" \
-e "PASSWORD_CLI=$PASSWORD_CLI" \
-e "OS_V2_ENDPOINT=$OS_V2_ENDPOINT" \
-e "OS_V2_USERNAME=$OS_V2_USERNAME" \
-e "OS_V2_PASSWORD=$OS_V2_PASSWORD" \
-e "OS_V2_TENANT_NAME=$OS_V2_TENANT_NAME" \
-e "OS_V3_ENDPOINT=$OS_V3_ENDPOINT" \
-e "OS_V3_USERNAME=$OS_V3_USERNAME" \
-e "OS_V3_PASSWORD=$OS_V3_PASSWORD" \
-e "OS_V3_KEYSTONE_SCOPE=$OS_V3_KEYSTONE_SCOPE" \
-e "OS_V3_USER_DOMAIN=$OS_V3_USER_DOMAIN" \
-e "OS_V3_PROJECT_NAME=$OS_V3_PROJECT_NAME" \
-e "OS_V3_PROJECT_DOMAIN=$OS_V3_PROJECT_DOMAIN" \
-e "OS_APIFACING=$OS_APIFACING" \
-e "OS_REGION=$OS_REGION" \
-e "AWS_ROLE_ARN=$AWS_ROLE_ARN" \
-e "TARGET_CBD_VERSION=$TARGET_CBD_VERSION" \
-e "INTEGRATIONTEST_RDSCONFIG_RDSUSER=$INTEGRATIONTEST_RDSCONFIG_RDSUSER" \
-e "INTEGRATIONTEST_RDSCONFIG_RDSPASSWORD=$INTEGRATIONTEST_RDSCONFIG_RDSPASSWORD" \
-e "INTEGRATIONTEST_RDSCONFIG_RDSCONNECTIONURL=$INTEGRATIONTEST_RDSCONFIG_RDSCONNECTIONURL" \
-e "INTEGRATIONTEST_LDAPCONFIG_LDAPSERVERHOST=$INTEGRATIONTEST_LDAPCONFIG_LDAPSERVERHOST" \
-e "INTEGRATIONTEST_LDAPCONFIG_BINDPASSWORD=$INTEGRATIONTEST_LDAPCONFIG_BINDPASSWORD" \
-e "INTEGRATIONTEST_PROXYCONFIG_PROXYHOST=$INTEGRATIONTEST_PROXYCONFIG_PROXYHOST" \
-e "INTEGRATIONTEST_PROXYCONFIG_PROXYUSER=$INTEGRATIONTEST_PROXYCONFIG_PROXYUSER" \
-e "INTEGRATIONTEST_PROXYCONFIG_PROXYPASSWORD=$INTEGRATIONTEST_PROXYCONFIG_PROXYPASSWORD" \
-e "CLI_TEST_FILES=$CLI_TEST_FILES" \
-e "CLUSTER_DEFINITION_URL=$CLUSTER_DEFINITION_URL" \
hortonworks/cloud-cli-e2e
RESULT=$?
}
main() {
image-cleanup
image-update
cbd-version
test-regression
exit $RESULT
}
main "$@"
|
#!/bin/sh
export LD_LIBRARY_PATH=/usr/local/lib
# Start the DI tool.
/cvdi-stream-build/ppm -m /ppm_data/road_file.csv -c /ppm_data/${PPM_CONFIG_FILE} -b ${DOCKER_HOST_IP}:9092
|
import Web3 from "web3";
export default new Web3(Web3.givenProvider || new Web3.providers.HttpProvider('https://rinkeby.infura.io/'));
|
#!/usr/bin/env bash
set -e
set -u
set -o pipefail
SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
SOURCEPATH="${SCRIPTPATH}/../../.lib/conf.sh"
BINARY="${SCRIPTPATH}/../../../bin/pwncat"
# shellcheck disable=SC1090
source "${SOURCEPATH}"
# -------------------------------------------------------------------------------------------------
# GLOBALS
# -------------------------------------------------------------------------------------------------
RHOST="${1:-localhost}"
RPORT="${2:-4444}"
STARTUP_WAIT="${3:-4}"
RUNS="${4:-1}"
PYTHON="python${5:-}"
PYVER="$( "${PYTHON}" -V 2>&1 | head -1 || true )"
THOST="www.google.com"
TPORT="80"
# -------------------------------------------------------------------------------------------------
# TEST FUNCTIONS
# -------------------------------------------------------------------------------------------------
print_test_case "${PYVER}"
run_test() {
local srv_opts="${1// / }"
local cli_opts="${2// / }"
local curr_mutation="${3}"
local total_mutation="${4}"
local curr_round="${5}"
local total_round="${6}"
local data=
print_h1 "[ROUND: ${curr_round}/${total_round}] (mutation: ${curr_mutation}/${total_mutation}) Starting Test Round (srv '${srv_opts}' vs cli '${cli_opts}')"
run "sleep 1"
###
### Create data and files
###
data="$(tmp_file)"
printf "HEAD / HTTP/1.1\\n\\n" > "${data}"
srv_stdout="$(tmp_file)"
srv_stderr="$(tmp_file)"
cli1_stdout="$(tmp_file)"
cli1_stderr="$(tmp_file)"
cli2_stdout="$(tmp_file)"
cli2_stderr="$(tmp_file)"
cli3_stdout="$(tmp_file)"
cli3_stderr="$(tmp_file)"
# --------------------------------------------------------------------------------
# START: SERVER
# --------------------------------------------------------------------------------
print_h2 "(1/5) Start: Server"
# Start Server
print_info "Start Server"
# shellcheck disable=SC2086
if ! srv_pid="$( run_bg "" "${PYTHON}" "${BINARY}" ${srv_opts} "${srv_stdout}" "${srv_stderr}" )"; then
printf ""
fi
# Wait until Server is up
run "sleep ${STARTUP_WAIT}"
# [SERVER] Ensure Server is running
test_case_instance_is_running "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# [SERVER] Ensure Server has no errors
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# START: CLIENT-1
# --------------------------------------------------------------------------------
print_h2 "(2/5) Start: Client-1 (without Proxy)"
# Start Client
print_info "Start Client-1"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "cat ${data}" "${PYTHON}" "${BINARY}" ${cli_opts} "${THOST}" "${TPORT}" "${cli1_stdout}" "${cli1_stderr}" )"; then
printf ""
fi
test_case_instance_is_running "Client-1" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
wait_for_data_transferred "^Content-Type:" "" "" "Client-1" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}"
wait_for_data_transferred "^Set-Cookie:" "" "" "Client-1" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}"
test_case_instance_has_no_errors "Client-1" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
action_stop_instance "Client-1" "${cli_pid}" "${cli1_stdout}" "${cli1_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# START: CLIENT-2
# --------------------------------------------------------------------------------
print_h2 "(3/5) Start: Client-2 (with Proxy)"
# Start Client
print_info "Start Client-2"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "cat ${data}" "${PYTHON}" "${BINARY}" ${cli_opts} "${RHOST}" "${RPORT}" "${cli2_stdout}" "${cli2_stderr}" )"; then
printf ""
fi
test_case_instance_is_running "Client-2" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
wait_for_data_transferred "^Content-Type:" "" "" "Client-2" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}"
wait_for_data_transferred "^Set-Cookie:" "" "" "Client-1" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}"
test_case_instance_has_no_errors "Client-2" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
action_stop_instance "Client-2" "${cli_pid}" "${cli2_stdout}" "${cli2_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# START: CLIENT-3
# --------------------------------------------------------------------------------
print_h2 "(4/5) Start: Client-3 (with Proxy)"
# Start Client
print_info "Start Client-3"
# shellcheck disable=SC2086
if ! cli_pid="$( run_bg "cat ${data}" "${PYTHON}" "${BINARY}" ${cli_opts} "${RHOST}" "${RPORT}" "${cli3_stdout}" "${cli3_stderr}" )"; then
printf ""
fi
test_case_instance_is_running "Client-3" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
wait_for_data_transferred "^Content-Type:" "" "" "Client-3" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
wait_for_data_transferred "^Set-Cookie:" "" "" "Client-1" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}"
test_case_instance_has_no_errors "Client-3" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
action_stop_instance "Client-3" "${cli_pid}" "${cli3_stdout}" "${cli3_stderr}" "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
# --------------------------------------------------------------------------------
# STOP: SERVER
# --------------------------------------------------------------------------------
print_h2 "(5/5) Stop: Server"
action_stop_instance "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
test_case_instance_has_no_errors "Server" "${srv_pid}" "${srv_stdout}" "${srv_stderr}"
}
# -------------------------------------------------------------------------------------------------
# MAIN ENTRYPOINT
# -------------------------------------------------------------------------------------------------
for curr_round in $(seq "${RUNS}"); do
# server opts client opts
run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvvv" "-vvvv" "1" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvv " "-vvvv" "2" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vv " "-vvvv" "3" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -v " "-vvvv" "4" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} " "-vvvv" "5" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvvv" "-vvv " "6" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvvv" "-vv " "7" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvvv" "-v " "8" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvvv" " " "9" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vvv " "-vvv " "10" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -vv " "-vv " "11" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} -v " "-v " "12" "13" "${curr_round}" "${RUNS}"
#run_test "--local ${RHOST}:${RPORT} ${THOST} ${TPORT} " " " "13" "13" "${curr_round}" "${RUNS}"
done
|
pyscenic grn \
./Data/DC_exp.csv \
./Data/hs_hgnc_curated_tfs.txt \
--num_workers 8 \
-o ./Data/exp_matrix.adjacencies.tsv
|
/*
* File: NameSurfer.java
* ---------------------
* When it is finished, this program will implements the viewer for
* the baby-name database described in the assignment handout.
*/
import acm.graphics.GCanvas;
import acm.program.*;
import javafx.scene.control.RadioButton;
import java.awt.Color;
import java.awt.event.*;
import java.util.HashMap;
import javax.swing.*;
public class NameSurferExt extends Program implements NameSurferConstants {
/* Method: init() */
/**
* This method has the responsibility for reading in the data base and
* initializing the interactors at the bottom of the window.
*/
private JButton buttonGraph, buttonClear, buttonRemove;
private JTextField tf;
private JRadioButton yellow, black, red, blue, graphic, dgram;
private NameSurferGraphExt graph;
private NameSurferDiagramExt diagram;
private HashMap<NameSurferEntry, Color> entries = new HashMap<>();
private NameSurferEntry lastEntry;
private NameSurferDataBase file = new NameSurferDataBase(NAMES_DATA_FILE);
public void init() {
diagram = new NameSurferDiagramExt();
graph = new NameSurferGraphExt();
addInteractors();
addActionListeners();
}
// add buttons text field and label
private void addInteractors() {
add(new JLabel("Name"), SOUTH);
tf = new JTextField(10);
add(tf, SOUTH);
tf.addActionListener(this);
initButtons();
initSouthRadioButtons();
initNorthRadioButtons();
}
private void initNorthRadioButtons() {
add(new JLabel("Choose view"), NORTH);
graphic = new JRadioButton("Graph");
graphic.addActionListener(this);
add(graphic, NORTH);
dgram = new JRadioButton("Diagram");
dgram.addActionListener(this);
add(dgram, NORTH);
ButtonGroup group = new ButtonGroup();
group.add(graphic);
group.add(dgram);
}
private void initButtons() {
buttonGraph = new JButton("Graph");
add(buttonGraph, SOUTH);
buttonRemove = new JButton("Remove");
add(buttonRemove, SOUTH);
buttonClear = new JButton("Clear");
add(buttonClear, SOUTH);
}
private void initSouthRadioButtons() {
add(new JLabel(" Choose colour"), SOUTH);
black = new JRadioButton("black");
black.setSelected(true);
add(black, SOUTH);
red = new JRadioButton("red");
add(red, SOUTH);
blue = new JRadioButton("blue");
add(blue, SOUTH);
yellow = new JRadioButton("yellow");
add(yellow, SOUTH);
ButtonGroup group = new ButtonGroup();
group.add(black);
group.add(red);
group.add(blue);
group.add(yellow);
}
/* Method: actionPerformed(e) */
/**
* This class is responsible for detecting when the buttons are clicked, so you
* will have to define a method to respond to button actions.
*/
public void actionPerformed(ActionEvent e) {
checkButtons(e);
checkRadioButtons(e);
}
private void checkButtons(ActionEvent e) {
if ((e.getSource() == tf || e.getSource() == buttonGraph) && file.findEntry(tf.getText()) != null) {
lastEntry = file.findEntry(tf.getText());
if (!entries.containsKey(lastEntry)) {
entries.put(lastEntry, chooseColor());
graph.update(entries);
diagram.drawLastEntry(lastEntry);
}
}
if (e.getSource() == buttonRemove && file.findEntry(tf.getText()) != null) {
entries.remove(file.findEntry(tf.getText()));
graph.update(entries);
}
if (e.getSource() == buttonClear) {
entries.clear();
graph.update(entries);
}
}
private void checkRadioButtons(ActionEvent e) {
if (e.getSource() == graphic) {
removeAll();
graph = new NameSurferGraphExt();
add(graph);
graph.update(entries);
validate();
}
if (e.getSource() == dgram) {
removeAll();
diagram = new NameSurferDiagramExt();
add(diagram);
diagram.drawLastEntry(lastEntry);
validate();
}
}
private Color chooseColor() {
if (black.isSelected())
return Color.BLACK;
if (red.isSelected())
return Color.RED;
if (blue.isSelected())
return Color.BLUE;
return Color.YELLOW;
}
}
|
class MediaEntity {
let displayUrl: String
let expandedUrl: String
let idStr: String
let indicies: [Int]
let mediaUrl: String
let mediaUrlHttps: String
let sizes: [String: Any]
let sourceStatusId: Int?
let sourceStatusIdStr: String?
init?(displayUrl: String, expandedUrl: String, idStr: String, indicies: [Int], mediaUrl: String, mediaUrlHttps: String, sizes: [String: Any], sourceStatusId: Int?, sourceStatusIdStr: String?) {
self.displayUrl = displayUrl
self.expandedUrl = expandedUrl
self.idStr = idStr
self.indicies = indicies
self.mediaUrl = mediaUrl
self.mediaUrlHttps = mediaUrlHttps
self.sizes = sizes
self.sourceStatusId = sourceStatusId
self.sourceStatusIdStr = sourceStatusIdStr
}
}
|
#!/bin/bash
set -euxo pipefail
# Start an agent using the testing account API key to send some data
docker run -d --name sysdig-agent --restart always --privileged --net host --pid host -e ACCESS_KEY=$PYTHON_SDC_TEST_ACCESS_KEY -e COLLECTOR=collector-staging.sysdigcloud.com -e SECURE=true -e TAGS= -v /var/run/docker.sock:/host/var/run/docker.sock -v /dev:/host/dev -v /proc:/host/proc:ro -v /boot:/host/boot:ro -v /lib/modules:/host/lib/modules:ro -v /usr:/host/usr:ro --shm-size=512m quay.io/sysdig/agent
# make sure the agent starts sending data and the backend makes it available via API
sleep 60
# Start the falco event generator to generate policy events in Secure
docker run --rm -d -it falcosecurity/event-generator run syscall
|
package com.mounacheikhna.decor;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* Created by cheikhna on 30/04/15.
*/
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class AttrsDecoratorTest {
@Mock View view;
@Mock TextView textView;
@Mock ViewGroup parent;
@Mock AttributeSet attributeSet;
Context context;
TestAttrsDecorator attrsDecorator;
@Before
public void setUp() throws Exception {
initMocks(this);
context = RuntimeEnvironment.application;
attrsDecorator = spy(new TestAttrsDecorator()); //spying on this object under test is temporary TODO: don't use Spy
}
@Test
public void decorNotAppliedOnWidgetOfAnotherType() throws Exception {
String name = "android.widget.ImageView";
ImageView imageView = mock(ImageView.class);
attrsDecorator.apply(imageView, parent, name, context, attributeSet);
assertThat(attrsDecorator.values).isNull();
}
@Test
public void decorNotAppliedOnWidgetButWithoutAttr() throws Exception {
String name = "android.widget.TextView";
spyWithTypedArray(null);
attrsDecorator.apply(textView, parent, name, context, attributeSet);
assertThat(attrsDecorator.values).isNull();
}
@Test
public void decorNotAppliedOnWidgetWithAttrWithoutValue() throws Exception {
TypedArray typedArray = mockTypedArray(1, false);// we suppose we dont have custom attr here
spyWithTypedArray(typedArray);
String name = "android.widget.TextView";
attrsDecorator.apply(textView, parent, name, context, attributeSet);
assertThat(attrsDecorator.values).isNotNull();
verify(typedArray).recycle();
assertThat(attrsDecorator.getDecorStrValue()).isNull();
}
@Test
public void decorAppliedWithAttrValue() throws Exception {
TypedArray typedArray = mockTypedArray(1, true);
when(typedArray.getString(0)).thenReturn("test");
spyWithTypedArray(typedArray);
String name = "android.widget.TextView";
attrsDecorator.apply(textView, parent, name, context, attributeSet);
assertThat(attrsDecorator.values).isNotNull();
assertThat(attrsDecorator.values.length()).isGreaterThan(0);
verify(typedArray).recycle();
assertThat(attrsDecorator.getDecorStrValue()).isEqualTo("test");
}
private void spyWithTypedArray(TypedArray typedArray) {
doReturn(typedArray).when(attrsDecorator).obtainAttributes(context, attributeSet);
}
/*private void mockTheme(TypedArray typedArray) {
Resources.Theme theme = mock(Resources.Theme.class);
when(theme.obtainStyledAttributes(attributeSet, attrsDecorator.styleable(), 0, 0)).thenReturn(typedArray);
when(context.getTheme()).thenReturn(theme);
}*/
private TypedArray mockTypedArray(int length, boolean valueToReturn) {
TypedArray typedArray = mock(TypedArray.class);
when(typedArray.length()).thenReturn(length);
when(typedArray.hasValue(0)).thenReturn(valueToReturn);
when(typedArray.getValue(eq(0), any(TypedValue.class))).thenReturn(valueToReturn);
doNothing().when(typedArray).recycle();
return typedArray;
}
}
|
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import threading
import warnings
from datetime import datetime
from typing import Union, Sequence, Dict, Optional
from pandas import PeriodIndex, DataFrame
from qf_lib.common.enums.expiration_date_field import ExpirationDateField
from qf_lib.common.enums.price_field import PriceField
from qf_lib.common.tickers.tickers import HaverTicker, Ticker
from qf_lib.common.utils.logging.qf_parent_logger import qf_logger
from qf_lib.containers.dataframe.qf_dataframe import QFDataFrame
from qf_lib.containers.futures.future_tickers.future_ticker import FutureTicker
from qf_lib.containers.series.qf_series import QFSeries
from qf_lib.data_providers.abstract_price_data_provider import AbstractPriceDataProvider
from qf_lib.settings import Settings
try:
import Haver
is_haver_installed = True
except ImportError:
is_haver_installed = False
warnings.warn("No Haver installed. If you would like to use HaverDataProvider first install the Haver library.")
class HaverDataProvider(AbstractPriceDataProvider):
"""
Constructs a new ``HaverDataProvider`` instance.
Parameters
----------
settings
Settings object, which should contain path to the directory with the Haver database
"""
get_lock = threading.Lock()
def __init__(self, settings: Settings):
self.db_location = settings.haver_path
self.connected = False
self.logger = qf_logger.getChild(self.__class__.__name__)
def get_history(self, tickers: Union[HaverTicker, Sequence[HaverTicker]], fields=None, start_date: datetime = None,
end_date: datetime = None, **kwargs) -> Union[QFSeries, QFDataFrame]:
""" Gets historical fields for Haver tickers.
Parameters
-------------
tickers: HaverTicker, Sequence[HaverTicker]
Haver tickers, for which the prices should be returned
fields
should be equal to None as each ticker corresponds to one timeseries and there is no such thing as a field
in the Haver DB
start_date: datetime
date representing the beginning of historical period from which data should be retrieved
end_date: datetime
date representing the end of historical period from which data should be retrieved;
if no end_date was provided, by default the current date will be used
Returns
------------
QFSeries, QFDataFrame
"""
if fields is not None:
self.logger.warning("Data field is provided but it will nor be used")
self._connect_if_needed()
HaverDataProvider.get_lock.acquire()
try:
if isinstance(tickers, HaverTicker):
return self._get_single(tickers, start_date, end_date)
else:
result = QFDataFrame()
# we should go one by one as making single large query will keep just common dates
for ticker in tickers:
series = self._get_single(ticker, start_date, end_date)
result[ticker] = series
return result
finally:
HaverDataProvider.get_lock.release()
def supported_ticker_types(self):
return {HaverTicker}
def price_field_to_str_map(self, ticker: HaverTicker = None) -> Dict[PriceField, Optional[str]]:
"""
Haver stores only end of day figures. Use PriceField.Close to obtain them
"""
price_field_dict = {PriceField.Close: None} # Field representation is none as Field is unused
return price_field_dict
@staticmethod
def _get_single(haver_ticker, start_date: datetime, end_date) -> QFSeries:
if start_date is not None:
start_date = start_date.date()
if end_date is not None:
end_date = end_date.date()
ticker_str = [haver_ticker.database_name + ':' + haver_ticker.ticker]
raw_series = Haver.data(ticker_str, startdate=start_date, enddate=end_date)
if isinstance(raw_series, DataFrame):
if isinstance(raw_series.index, PeriodIndex):
raw_series.index = raw_series.index.to_timestamp()
result = QFSeries(raw_series.iloc[:, 0])
else:
result = QFSeries()
result.name = haver_ticker.as_string()
return result
def connect(self):
if is_haver_installed:
if os.path.exists(self.db_location):
Haver.path(self.db_location)
self.connected = True
else:
self.logger.warning("Couldn't access Haver directory. Probably there is no access to the O: drive.")
def _connect_if_needed(self):
"""
Calling this function should be the first line in any method accessing the database
It was introduced to make the use of the database lazy and connect only when used requests data
"""
if not self.connected:
self.connect()
if not self.connected:
raise ConnectionError("No Haver connection.")
def _get_futures_chain_dict(self, tickers: Union[FutureTicker, Sequence[FutureTicker]],
expiration_date_fields: Union[str, Sequence[str]]) -> Dict[FutureTicker, QFDataFrame]:
raise NotImplementedError("Downloading Future Chain Tickers in HaverDataProvider is not supported yet")
def expiration_date_field_str_map(self, ticker: Ticker = None) -> Dict[ExpirationDateField, str]:
pass
|
package disgo
import (
"github.com/DisgoOrg/disgo/api"
"github.com/DisgoOrg/disgo/internal"
)
// New Initialises a new Disgo client
func New(token string, options api.Options) (api.Disgo, error) {
return internal.New(token, options)
}
// NewBuilder creates an api.DisgoBuilder for the client
func NewBuilder(token string) api.DisgoBuilder {
return internal.NewBuilder(token)
}
|
def gaussianElimination(eqtn):
# Create matrix
matrix = createMatrix(eqtn)
# Reduce matrix to reduced echelon form, keeping track of the transformation
transformationMatrix, reducedMatrix = reduceMatrix(matrix)
# Solve the system of linear equations
solutions = solveReducedMatrix(reducedMatrix)
# Use the transformation matrix to get the original solutions
originalSolutions = transformSolutions(transformationMatrix, solutions)
return originalSolutions
# Returns the augmented matrix of the equation
def createMatrix(eqtn):
matrix = []
for i in range(len(eqtn)):
matrix.append([float(x) for x in eqtn[i]])
return matrix
# Reduce the augmented matrix to reduced echelon form
def reduceMatrix(matrix):
transformationMatrix = identityMatrix(len(matrix))
reducedMatrix = deepcopy(matrix)
for i in range(len(matrix)):
reducedMatrix, vector = rowReduce(reducedMatrix, i, transformationMatrix)
return transformationMatrix, reducedMatrix
# Solve the reduced matrix
def solveReducedMatrix(reducedMatrix):
solutions = []
for i in reversed(range(len(reducedMatrix))):
solutions.insert(0, reducedMatrix[i][-1] / reducedMatrix[i][i])
for j in range(0, i):
reducedMatrix[j][-1] -= (reducedMatrix[j][i] * solutions[0])
return solutions
# Transform the solutions to match the original system of equations
def transformSolutions(transformationMatrix, solutions):
transformedSolutions = [0 for x in range(len(solutions))]
for i in range(len(transformationMatrix)):
num = 0
for j in range(len(solutions)):
num += transformationMatrix[i][j] * solutions[j]
transformedSolutions[i] = num
return transformedSolutions
|
/**
* Created by ferdi on 06.02.17.
*/
package com.yahoo.ycsb.db;
|
# Copyright 2022 The ML Fairness Gym Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
find . -name '*_test.py' | sed 's:^./::' | sed 's:.py$::' | sed 's:/:.:g' | grep -v "^lib\." | xargs -n1 python -m
|
/* Only the outmost loop can be parallelized
*/
#include "omp.h"
void foo()
{
int n = 100;
int m = 100;
double b[n][m];
int i;
int j;
#pragma omp parallel for private (i,j) firstprivate (n,m)
for (i = 0; i <= n - 1; i += 1) {
for (j = 0; j <= m - 1; j += 1) {
b[i][j] = b[i][j - 1];
}
}
}
/*
Unparallelizable loop at line:9 due to the following dependencies:
1*1 TRUE_DEP DATA_DEP; commonlevel = 1 CarryLevel = 0 Is precise SgPntrArrRefExp:(b[i])[j]@10:14->SgPntrArrRefExp:((b[i])[j - 1])@10:19 == -1;||::
*/
|
const IRC = require('irc');
const Commands = require('./commands');
const { API, DB, Log } = require('./utility');
const FIVE_MINUTES = 5 * 60 * 1000;
const BOT_NAME = 'bbqbot';
const IRC_NET = 'irc.libera.chat';
const CHANNEL = '##bbq';
const IRC_OPTS = {
userName: BOT_NAME,
realName: 'BBQ Bot for r/BBQ: Low and Slow',
channels: [CHANNEL],
autoConnect: true,
retryCount: 3,
};
const Client = new IRC.Client(IRC_NET, BOT_NAME, IRC_OPTS);
function getPostFullName(post) {
return post.kind + '_' + post.data.id;
}
async function sendNewPostToChannel(post) {
Log.info('Sending post to channel', { post });
Client.say(CHANNEL, `Top BBQ Post Of The Day: ${API.SHORT_URL + post.data.id}`);
}
function timePassed() {
const results = DB.getTimestamp();
const now = new Date();
Log.info('Comparing next_post date to now', { now: now.toISOString(), next_post: results.next_post });
return now > new Date(results.next_post);
}
async function core() {
if (!timePassed()) {
Log.info('Time has not passed, so do not make the API request.');
return;
}
let topFivePosts = await API.getTopPosts(5);
for (const post of topFivePosts) {
const postFullName = getPostFullName(post);
if (!DB.isInDatabase(postFullName)) {
DB.putNewPostInDatabase(postFullName);
sendNewPostToChannel(post);
DB.setNextPostTime();
return;
}
}
Log.info('The top 5 posts have all been sent before.');
}
// The following is to grant moderator priveledges (provided the bot can) to BOT_OWNER
// with BOT_HOST. The host is in there to prevent impersonators if using only the nick
// alone to auto-mod.
// function joinListener(channel, to, message) {
// if (channel === CHANNEL && to === BOT_OWNER && message.host === BOT_HOST) {
// try {
// Client.send('MODE', CHANNEL, '+o', BOT_OWNER);
// } catch (err) {
// Log.warning('Unable to grant moderator privs, bot is probably not channel moderator', { err });
// }
// }
// }
async function commandHandler(_, message) {
if (!message.startsWith(Commands.COMMAND_PREFIX)) return;
Log.info('Received a command.', { command: message });
const command = message.split(' ');
switch (command[1]) {
case 'hot':
Commands.handleHottest(command[2])
.then((message) => Client.say(CHANNEL, message))
.catch((err) => Client.say(CHANNEL, err.message));
break;
case 'link':
Client.say(CHANNEL, Commands.handleLink());
break;
case 'new':
Commands.handleNewest(command[2])
.then((message) => Client.say(CHANNEL, message))
.catch((err) => Client.say(CHANNEL, err.message));
break;
case 'top':
Commands.handleTop(command[2], command[3])
.then((message) => Client.say(CHANNEL, message))
.catch((err) => Client.say(CHANNEL, err.message));
break;
default:
Client.say(CHANNEL, 'How to use: !bb [COMMAND]. Available commands: hot, link, new, top');
}
}
async function main() {
// Keep this as console Log to ensure connection.
console.log('The bot has connected to IRC');
setInterval(core, FIVE_MINUTES);
}
Client.addListener(`message${CHANNEL}`, commandHandler).addListener('registered', main);
// .addListener('join', joinListener);
|
package kr.co.gardener.admin.service.forest.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import kr.co.gardener.admin.dao.user.PlantLevelDao;
import kr.co.gardener.admin.model.forest.PlantLevel;
import kr.co.gardener.admin.model.forest.list.PlantLevelList;
import kr.co.gardener.admin.service.forest.PlantLevelService;
import kr.co.gardener.main.vo.PlantImage;
import kr.co.gardener.util.Pager;
@Service
public class PlantLevelServiceImpl implements PlantLevelService {
@Autowired
PlantLevelDao dao;
Map<Integer, PlantImage> plantImage;
@Override
public List<PlantLevel> list() {
return dao.list();
}
private void initPlantImage() {
plantImage = new HashMap<Integer, PlantImage>();
List<PlantImage> list = dao.plantImage();
for(PlantImage item : list) {
plantImage.put(item.getPlantId(), item);
}
}
@Override
public void add(PlantLevel item) {
dao.add(item);
}
@Override
public PlantLevel item(int plantLevelId) {
return dao.item(plantLevelId);
}
@Override
public void update(PlantLevel item) {
dao.update(item);
}
@Override
public void delete(int plantLevelId) {
dao.delete(plantLevelId);
}
@Override
public PlantLevelList list_pager(Pager pager) {
PlantLevelList list = new PlantLevelList();
list.setList(dao.list_pager(pager));
list.setPager(pager);
pager.setTotal(dao.total(pager));
list.paseComboList(dao.combo());
return list;
}
@Override
public void insert_list(PlantLevelList list) {
dao.insert_list(list.getList());
}
@Override
public void delete_list(PlantLevelList list) {
dao.delete_list(list.getList());
}
@Override
public void update_list(PlantLevelList list) {
dao.update_list(list.getList());
}
@Override
public List<PlantLevel> random() {
List<PlantLevel> list = dao.maturePlant();
List<PlantLevel> item = new ArrayList<PlantLevel>();
Random r = new Random();
for(int a = 0 ; a< 3 ;a++)
item.add(list.get(r.nextInt(list.size()-1)));
return item;
}
@Override
public String PlantImage(int plantId,int plantLevel) {
if(plantImage == null)
initPlantImage();
return plantImage.get(plantId).getPlantImage().get(plantLevel-1);
}
}
|
<reponame>yano3/chitanda-san-v2<gh_stars>1-10
require 'faraday'
require 'faraday_middleware'
module ChitandaSan
module SyoboiCalendar
module Connection
def get(uri, params = nil)
request(:get, uri, params)
end
def request(method, uri, params)
connection.send(method, uri, params)
end
def connection
@connection ||= Faraday.new(url: @base_url) do |config|
config.response :xml
config.adapter :net_http
end
end
end
end
end
|
import cn.hutool.core.date.DateUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
/**
* @author zs
*@date 2021/9/26.
*/
public class Test8 {
public static void main(String[] args) {
String r = DateUtil.parse("2021-10-10 20:01:01").toString("yyyy-MM-dd");
System.out.println(r);
}
}
|
#!/usr/bin/env bash
# Install location of UCSF Chimera
CHIMERA_HOME=/opt/UCSF/Chimera64-1.10.2
# Command location of UCSF Chimera
chimera=$CHIMERA_HOME/bin/chimera
# ligand preparation location of Autodock vina
prepare_ligand4=$CHIMERA_HOME/lib/python2.7/site-packages/AutoDockTools/Utilities24/prepare_ligand4.py
if (( $# != 2 )); then
echo "Usage: ./mol2_to_pdbqt.sh mol2_folder pdbqt_folder"
exit 1
fi
mol2_folder=$1
pdbqt_folder=$2
for f in `ls $mol2_folder/*.mol2`
do
ligname=`basename $f | cut -d'.' -f1`
ligname=$ligname".pdbqt"
$chimera --nogui --nostatus --script "$prepare_ligand4 -l $f -o $pdbqt_folder/$ligname "
echo $f " converted to " $pdbqt_folder/$ligname
done
|
<gh_stars>0
var structarmnn_1_1_abs_queue_descriptor =
[
[ "Validate", "structarmnn_1_1_abs_queue_descriptor.xhtml#a041e495449e22774a34d92b0904c10bf", null ]
];
|
#!/usr/bin/env bash
# Functions to interact with a container that includes the client caasp4
# binaries and terraform
_set_env_vars() {
JSON=$(skuba_container terraform output -json)
LB="$(echo "$JSON" | jq -r '.ip_load_balancer.value|to_entries|map(.value)|@tsv')"
export LB
MASTERS="$(echo "$JSON" | jq -r '.ip_masters.value|to_entries|map(.value)|@tsv')"
export MASTERS
WORKERS="$(echo "$JSON" | jq -r '.ip_workers.value|to_entries|map(.value)|@tsv')"
export WORKERS
ALL="$MASTERS $WORKERS"
export ALL
}
_define_node_group() {
_set_env_vars
case "$1" in
"all")
GROUP="$ALL"
;;
"masters")
GROUP="$MASTERS"
;;
"workers")
GROUP="$WORKERS"
;;
*)
GROUP="$1"
;;
esac
}
DEBUG_MODE=${DEBUG_MODE:-false}
if [ $DEBUG_MODE = true ]; then
DEBUG=1
else
DEBUG=0
fi
skuba_container() {
# Usage:
# skuba_container <commands to run in a punctured container>
local app_path="$PWD"
if [[ "$1" == "$CLUSTER_NAME" ]]; then
local app_path="$PWD/$1"
shift
fi
docker run -i --rm \
-v "$app_path":/app:rw \
-v "$(dirname "$SSH_AUTH_SOCK")":"$(dirname "$SSH_AUTH_SOCK")" \
-v "/etc/passwd:/etc/passwd:ro" \
--env-file <( env| cut -f1 -d= ) \
-e SSH_AUTH_SOCK="$SSH_AUTH_SOCK" \
-u "$(id -u)":"$(id -g)" \
skuba/$CAASP_VER "$@"
}
_ssh2() {
local host=$1
shift
ssh -o UserKnownHostsFile=/dev/null \
-o StrictHostKeyChecking=no \
-F /dev/null \
-o LogLevel=ERROR \
"sles@$host" "$@"
}
skuba_wait_ssh() {
# Usage:
# wait_ssh <target>
timeout=$2
local target="${1:-all}"
_define_node_group "$target"
for n in $GROUP; do
secs=0
set +e
_ssh2 $n exit
while test $? -gt 0
do
if [ $secs -gt $timeout ] ; then
echo "Timeout while waiting for $n"
exit 2
else
sleep 5
secs=$(( secs + 5 ))
_ssh2 $n exit
fi
done
set -e
done
}
skuba_reboots() {
# usage:
# reboots disable
local action="${1:-disable}"
if [[ "$action" == "disable" ]]; then
kubectl -n kube-system annotate ds kured weave.works/kured-node-lock='{"nodeid":"manual"}'
else
kubectl -n kube-system annotate ds kured weave.works/kured-node-lock-
fi
}
skuba_run_cmd() {
# Usage:
# run_cmd <target> "sudo ..."
# run_cmd all "sudo ..."
# run_cmd masters "sudo ..."
local target="${1:-all}"
_define_node_group "$target"
for n in $GROUP; do
_ssh2 "$n" "$@"
done
}
skuba_use_scp() {
# Usage:
# use_scp <target> <src_files> <dest_files>
# use_scp masters <src_files> <dest_files>
# use_scp workers <src_files> <dest_files>
local target="${1:-all}"
_define_node_group "$target"
SRC="$2"
DEST="$3"
local options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -F /dev/null -o LogLevel=ERROR -r"
for n in $GROUP; do
scp "$options" "$SRC" sles@$n:"$DEST"
done
}
skuba_show_images() {
kubectl get pods --all-namespaces -o jsonpath="{.items[*].spec.containers[*].image}" | tr -s '[[:space:]]' '\n'
}
skuba_updates() {
# Usage:
# updates <target> <action>
# updates all disable
local target="${1:-all}"
local action="${2:-disable}"
_define_node_group "$target"
for n in $GROUP; do
_ssh2 "$n" "sudo systemctl $action --now skuba-update.timer"
done
}
_init_control_plane() {
if ! [[ -d "$CLUSTER_NAME" ]]; then
skuba_container skuba cluster init --control-plane "$LB" "$CLUSTER_NAME"
fi
}
_deploy_masters() {
local i=0
for n in $1; do
local j
j="$(printf "%03g" $i)"
if [[ $i -eq 0 ]]; then
skuba_container "$CLUSTER_NAME" skuba node bootstrap --user sles --sudo --target "$n" "master$j" -v "$DEBUG"
wait
fi
if [[ $i -ne 0 ]]; then
skuba_container "$CLUSTER_NAME" skuba node join --role master --user sles --sudo --target "$n" "master$j" -v "$DEBUG"
wait
fi
((++i))
done
}
_deploy_workers() {
local i=0
for n in $1; do
local j
j="$(printf "%03g" $i)"
(skuba_container "$CLUSTER_NAME" skuba node join --role worker --user sles --sudo --target "$n" "worker$j" -v "$DEBUG") &
wait
((++i))
done
}
skuba_deploy() {
# Usage: deploy
set -x
_set_env_vars
_init_control_plane
pushd "$(pwd)"/ || exit
_deploy_masters "$MASTERS"
_deploy_workers "$WORKERS"
KUBECONFIG="" skuba_container $CLUSTER_NAME skuba cluster status
}
skuba_node_upgrade() {
# Usage:
# skuba_node_upgrade <target>
# skuba_node_upgrade all
# skuba_node_upgrade masters
# skuba_node_upgrade workers
local target="${1:-all}"
_define_node_group "$target"
local i=0
for n in $GROUP; do
skuba_container "$CLUSTER_NAME" skuba node upgrade \
apply --user sles --sudo --target "$n" -v "$DEBUG"
done
}
|
const adder = (nums) => {
let sum = 0;
for (let i = 0; i < nums.length; i++) {
sum += nums[i];
}
return sum;
};
module.exports = adder;
|
#!/bin/bash
for i in {0..3}
do
s=""
if [ $i -lt 3 ]
then
s=$i
fi
CMD="llvm-as aes/aes\_ac\_tx$s.ll";
echo $CMD;
eval $CMD;
CMD="clang++ -O3 aes/aes\_ac\_tx$s.bc -o aes/aes\_tx$s.exe";
echo $CMD;
eval $CMD;
done
if [ $1 != "none" ]
then
CMD="rsync -avz --exclude-from ~/Dropbox/rsync.ignore /home/ruiz/git/dg/tx_benches/aes/aes\_tx*.exe zhang.5944@$1.cse.ohio-state.edu:/home/zhang.5944/sec_examples/";
echo $CMD;
eval $CMD;
fi
|
#!/bin/bash -ex
[ -f MicrosoftJIRASSO6x7x.obr ] || wget https://download.microsoft.com/download/3/C/9/3C94C886-0C89-4182-A758-6DB982E1F398/MicrosoftJIRASSO6x7x.obr
if [ ! -f MicrosoftJIRASSO6x7x/MSSsoJiraPlugin-1.0.1.jar ]
then
mkdir -p MicrosoftJIRASSO6x7x
cd MicrosoftJIRASSO6x7x
jar xf ../MicrosoftJIRASSO6x7x.obr
cd ..
fi
if [ ! -f MicrosoftJIRASSO6x7x/MSSsoJiraPlugin-1.0.1/templates/jira-configure/jiraSSOConfiguration.vm ] ; then
mkdir -p MicrosoftJIRASSO6x7x/MSSsoJiraPlugin-1.0.1/
cd MicrosoftJIRASSO6x7x/MSSsoJiraPlugin-1.0.1/
jar xf ../MSSsoJiraPlugin-1.0.1.jar
cd ../..
fi
cp -f jiraSSOConfiguration.vm MicrosoftJIRASSO6x7x/MSSsoJiraPlugin-1.0.1/templates/jira-configure/jiraSSOConfiguration.vm
rm -f ../MSSsoJiraPlugin-1.0.1.jar
cd MicrosoftJIRASSO6x7x/MSSsoJiraPlugin-1.0.1/
jar cf ../MSSsoJiraPlugin-1.0.1.jar .
|
#!/bin/bash
LSDIR='/usr/local/lsws'
OWASP_DIR="${LSDIR}/conf/owasp"
RULE_FILE='modsec_includes.conf'
LS_HTTPD_CONF="${LSDIR}/conf/httpd_config.xml"
OLS_HTTPD_CONF="${LSDIR}/conf/httpd_config.conf"
EPACE=' '
echow(){
FLAG=${1}
shift
echo -e "\033[1m${EPACE}${FLAG}\033[0m${@}"
}
help_message(){
echo -e "\033[1mOPTIONS\033[0m"
echow '-E, --enable'
echo "${EPACE}${EPACE}Will Enable mod_secure module with latest OWASP version of rules"
echow '-D, --disable'
echo "${EPACE}${EPACE}Will Disable mod_secure module with latest OWASP version of rules"
echow '-H, --help'
echo "${EPACE}${EPACE}Display help and exit."
exit 0
}
check_lsv(){
if [ -f ${LSDIR}/bin/openlitespeed ]; then
LSV='openlitespeed'
elif [ -f ${LSDIR}/bin/litespeed ]; then
LSV='lsws'
else
echo 'Version not exist, abort!'
exit 1
fi
}
check_input(){
if [ -z "${1}" ]; then
help_message
exit 1
fi
}
mk_owasp_dir(){
if [ -d ${OWASP_DIR} ] ; then
rm -rf ${OWASP_DIR}
fi
mkdir -p ${OWASP_DIR}
if [ ${?} -ne 0 ] ; then
echo "Unable to create directory: ${OWASP_DIR}, exit!"
exit 1
fi
}
fst_match_line(){
FIRST_LINE_NUM=$(grep -n -m 1 "${1}" ${2} | awk -F ':' '{print $1}')
}
fst_match_after(){
FIRST_NUM_AFTER=$(tail -n +${1} ${2} | grep -n -m 1 ${3} | awk -F ':' '{print $1}')
}
lst_match_line(){
fst_match_after ${1} ${2} ${3}
LAST_LINE_NUM=$((${FIRST_LINE_NUM}+${FIRST_NUM_AFTER}-1))
}
enable_ols_modsec(){
grep 'module mod_security {' ${OLS_HTTPD_CONF} >/dev/null 2>&1
if [ ${?} -eq 0 ] ; then
echo "Already configured for modsecurity."
else
echo 'Enable modsecurity'
sed -i "s=module cache=module mod_security {\nmodsecurity on\
\nmodsecurity_rules \`\nSecRuleEngine On\n\`\nmodsecurity_rules_file \
${OWASP_DIR}/${RULE_FILE}\n ls_enabled 1\n}\
\n\nmodule cache=" ${OLS_HTTPD_CONF}
fi
}
enable_ls_modsec(){
grep '<enableCensorship>1</enableCensorship>' ${LS_HTTPD_CONF} >/dev/null 2>&1
if [ ${?} -eq 0 ] ; then
echo "LSWS already configured for modsecurity"
else
echo 'Enable modsecurity'
sed -i \
"s=<enableCensorship>0</enableCensorship>=<enableCensorship>1</enableCensorship>=" ${LS_HTTPD_CONF}
sed -i \
"s=</censorshipControl>=</censorshipControl>\n\
<censorshipRuleSet>\n\
<name>ModSec</name>\n\
<enabled>1</enabled>\n\
<ruleSet>include ${OWASP_DIR}/modsec_includes.conf</ruleSet>\n\
</censorshipRuleSet>=" ${LS_HTTPD_CONF}
fi
}
enable_modsec(){
if [ "${LSV}" = 'lsws' ]; then
enable_ls_modsec
elif [ "${LSV}" = 'openlitespeed' ]; then
enable_ols_modsec
fi
}
disable_ols_modesec(){
grep 'module mod_security {' ${OLS_HTTPD_CONF} >/dev/null 2>&1
if [ ${?} -eq 0 ] ; then
echo 'Disable modsecurity'
fst_match_line 'module mod_security' ${OLS_HTTPD_CONF}
lst_match_line ${FIRST_LINE_NUM} ${OLS_HTTPD_CONF} '}'
sed -i "${FIRST_LINE_NUM},${LAST_LINE_NUM}d" ${OLS_HTTPD_CONF}
else
echo 'Already disabled for modsecurity'
fi
}
disable_ls_modesec(){
grep '<enableCensorship>0</enableCensorship>' ${LS_HTTPD_CONF}
if [ ${?} -eq 0 ] ; then
echo 'Already disabled for modsecurity'
else
echo 'Disable modsecurity'
sed -i \
"s=<enableCensorship>1</enableCensorship>=<enableCensorship>0</enableCensorship>=" ${LS_HTTPD_CONF}
fst_match_line 'censorshipRuleSet' ${LS_HTTPD_CONF}
lst_match_line ${FIRST_LINE_NUM} ${LS_HTTPD_CONF} '/censorshipRuleSet'
sed -i "${FIRST_LINE_NUM},${LAST_LINE_NUM}d" ${LS_HTTPD_CONF}
fi
}
disable_modsec(){
check_lsv
if [ "${LSV}" = 'lsws' ]; then
disable_ls_modesec
elif [ "${LSV}" = 'openlitespeed' ]; then
disable_ols_modesec
fi
}
install_git(){
if [ ! -f /usr/bin/git ]; then
echo 'Install git'
apt-get install git -y >/dev/null 2>&1
fi
}
install_owasp(){
cd ${OWASP_DIR}
echo 'Download OWASP rules'
git clone https://github.com/SpiderLabs/owasp-modsecurity-crs.git >/dev/null 2>&1
}
configure_owasp(){
echo 'Config OWASP rules.'
cd ${OWASP_DIR}
echo "include modsecurity.conf
include owasp-modsecurity-crs/crs-setup.conf
include owasp-modsecurity-crs/rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
include owasp-modsecurity-crs/rules/REQUEST-901-INITIALIZATION.conf
include owasp-modsecurity-crs/rules/REQUEST-903.9001-DRUPAL-EXCLUSION-RULES.conf
include owasp-modsecurity-crs/rules/REQUEST-903.9002-WORDPRESS-EXCLUSION-RULES.conf
include owasp-modsecurity-crs/rules/REQUEST-903.9003-NEXTCLOUD-EXCLUSION-RULES.conf
include owasp-modsecurity-crs/rules/REQUEST-903.9004-DOKUWIKI-EXCLUSION-RULES.conf
include owasp-modsecurity-crs/rules/REQUEST-903.9005-CPANEL-EXCLUSION-RULES.conf
include owasp-modsecurity-crs/rules/REQUEST-903.9006-XENFORO-EXCLUSION-RULES.conf
include owasp-modsecurity-crs/rules/REQUEST-905-COMMON-EXCEPTIONS.conf
include owasp-modsecurity-crs/rules/REQUEST-910-IP-REPUTATION.conf
include owasp-modsecurity-crs/rules/REQUEST-911-METHOD-ENFORCEMENT.conf
include owasp-modsecurity-crs/rules/REQUEST-912-DOS-PROTECTION.conf
include owasp-modsecurity-crs/rules/REQUEST-913-SCANNER-DETECTION.conf
include owasp-modsecurity-crs/rules/REQUEST-920-PROTOCOL-ENFORCEMENT.conf
include owasp-modsecurity-crs/rules/REQUEST-921-PROTOCOL-ATTACK.conf
include owasp-modsecurity-crs/rules/REQUEST-930-APPLICATION-ATTACK-LFI.conf
include owasp-modsecurity-crs/rules/REQUEST-931-APPLICATION-ATTACK-RFI.conf
include owasp-modsecurity-crs/rules/REQUEST-932-APPLICATION-ATTACK-RCE.conf
include owasp-modsecurity-crs/rules/REQUEST-933-APPLICATION-ATTACK-PHP.conf
include owasp-modsecurity-crs/rules/REQUEST-934-APPLICATION-ATTACK-NODEJS.conf
include owasp-modsecurity-crs/rules/REQUEST-941-APPLICATION-ATTACK-XSS.conf
include owasp-modsecurity-crs/rules/REQUEST-942-APPLICATION-ATTACK-SQLI.conf
include owasp-modsecurity-crs/rules/REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION.conf
include owasp-modsecurity-crs/rules/REQUEST-944-APPLICATION-ATTACK-JAVA.conf
include owasp-modsecurity-crs/rules/REQUEST-949-BLOCKING-EVALUATION.conf
include owasp-modsecurity-crs/rules/RESPONSE-950-DATA-LEAKAGES.conf
include owasp-modsecurity-crs/rules/RESPONSE-951-DATA-LEAKAGES-SQL.conf
include owasp-modsecurity-crs/rules/RESPONSE-952-DATA-LEAKAGES-JAVA.conf
include owasp-modsecurity-crs/rules/RESPONSE-953-DATA-LEAKAGES-PHP.conf
include owasp-modsecurity-crs/rules/RESPONSE-954-DATA-LEAKAGES-IIS.conf
include owasp-modsecurity-crs/rules/RESPONSE-959-BLOCKING-EVALUATION.conf
include owasp-modsecurity-crs/rules/RESPONSE-980-CORRELATION.conf
include owasp-modsecurity-crs/rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf">modsec_includes.conf
echo "SecRuleEngine On">modsecurity.conf
cd ${OWASP_DIR}/owasp-modsecurity-crs
if [ -f crs-setup.conf.example ]; then
mv crs-setup.conf.example crs-setup.conf
fi
cd ${OWASP_DIR}/owasp-modsecurity-crs/rules
if [ -f REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf.example ]; then
mv REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf.example REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
fi
if [ -f RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example ]; then
mv RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
fi
}
main_owasp(){
mk_owasp_dir
install_git
install_owasp
configure_owasp
check_lsv
enable_modsec
}
check_input ${1}
while [ ! -z "${1}" ]; do
case ${1} in
-[hH] | -help | --help)
help_message
;;
-[eE] | -enable | --enable)
main_owasp
;;
-[dD] | -disable | --disable)
disable_modsec
;;
*)
help_message
;;
esac
shift
done
|
#!/bin/bash
# reference: wenet
# To be run from one directory above this script.
. ./path.sh
#
root=corpus/aishell1
text=corpus/aishell1/local/lm/text
lexicon=corpus/aishell1/local/dict/lexicon.txt
for f in "$text" "$lexicon"; do
[ ! -f $x ] && echo "$0: No such file $f" && exit 1;
done
# Check SRILM tools
if ! which ngram-count > /dev/null; then
echo "srilm tools are not found, please download it and install it from: "
echo "http://www.speech.sri.com/projects/srilm/download.html"
echo "Then add the tools to your PATH"
exit 1;
fi
# This script takes no arguments. It assumes you have already run
# aishell_data_prep.sh.
# It takes as input the files
# data/local/lm/text
# data/local/dict/lexicon.txt
dir=$root/local/lm
mkdir -p $dir
cleantext=$dir/text.no_oov
cat $text | awk -v lex=$lexicon 'BEGIN{while((getline<lex) >0){ seen[$1]=1; } }
{for(n=1; n<=NF;n++) { if (seen[$n]) { printf("%s ", $n); } else {printf("<SPOKEN_NOISE> ");} } printf("\n");}' \
> $cleantext || exit 1;
cat $cleantext | awk '{for(n=2;n<=NF;n++) print $n; }' | sort | uniq -c | \
sort -nr > $dir/word.counts || exit 1;
# Get counts from acoustic training transcripts, and add one-count
# for each word in the lexicon (but not silence, we don't want it
# in the LM-- we'll add it optionally later).
cat $cleantext | awk '{for(n=2;n<=NF;n++) print $n; }' | \
cat - <(grep -w -v '!SIL' $lexicon | awk '{print $1}') | \
sort | uniq -c | sort -nr > $dir/unigram.counts || exit 1;
cat $dir/unigram.counts | awk '{print $2}' | cat - <(echo "<s>"; echo "</s>" ) > $dir/wordlist
heldout_sent=10000 # Don't change this if you want result to be comparable with
# kaldi_lm results
mkdir -p $dir
cat $cleantext | awk '{for(n=2;n<=NF;n++){ printf $n; if(n<NF) printf " "; else print ""; }}' | \
head -$heldout_sent > $dir/heldout
cat $cleantext | awk '{for(n=2;n<=NF;n++){ printf $n; if(n<NF) printf " "; else print ""; }}' | \
tail -n +$heldout_sent > $dir/train
ngram-count -text $dir/train -order 3 -limit-vocab -vocab $dir/wordlist -unk \
-map-unk "<UNK>" -kndiscount -interpolate -lm $dir/lm.arpa
ngram -lm $dir/lm.arpa -ppl $dir/heldout
|
#!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_DIR/variables.sh"
source "$CURRENT_DIR/helpers.sh"
source "$CURRENT_DIR/process_restore_helpers.sh"
source "$CURRENT_DIR/spinner_helpers.sh"
# delimiter
d=$'\t'
# Global variable.
# Used during the restore: if a pane already exists from before, it is
# saved in the array in this variable. Later, process running in existing pane
# is also not restored. That makes the restoration process more idempotent.
EXISTING_PANES_VAR=""
RESTORING_FROM_SCRATCH="false"
RESTORE_PANE_CONTENTS="false"
is_line_type() {
local line_type="$1"
local line="$2"
echo "$line" |
\grep -q "^$line_type"
}
check_saved_session_exists() {
local resurrect_file="$(last_resurrect_file)"
if [ ! -f $resurrect_file ]; then
display_message "Tmux resurrect file not found!"
return 1
fi
}
pane_exists() {
local session_name="$1"
local window_number="$2"
local pane_index="$3"
tmux list-panes -t "${session_name}:${window_number}" -F "#{pane_index}" 2>/dev/null |
\grep -q "^$pane_index$"
}
register_existing_pane() {
local session_name="$1"
local window_number="$2"
local pane_index="$3"
local pane_custom_id="${session_name}:${window_number}:${pane_index}"
local delimiter=$'\t'
EXISTING_PANES_VAR="${EXISTING_PANES_VAR}${delimiter}${pane_custom_id}"
}
is_pane_registered_as_existing() {
local session_name="$1"
local window_number="$2"
local pane_index="$3"
local pane_custom_id="${session_name}:${window_number}:${pane_index}"
[[ "$EXISTING_PANES_VAR" =~ "$pane_custom_id" ]]
}
restore_from_scratch_true() {
RESTORING_FROM_SCRATCH="true"
}
is_restoring_from_scratch() {
[ "$RESTORING_FROM_SCRATCH" == "true" ]
}
restore_pane_contents_true() {
RESTORE_PANE_CONTENTS="true"
}
is_restoring_pane_contents() {
[ "$RESTORE_PANE_CONTENTS" == "true" ]
}
window_exists() {
local session_name="$1"
local window_number="$2"
tmux list-windows -t "$session_name" -F "#{window_index}" 2>/dev/null |
\grep -q "^$window_number$"
}
session_exists() {
local session_name="$1"
tmux has-session -t "$session_name" 2>/dev/null
}
first_window_num() {
tmux show -gv base-index
}
tmux_socket() {
echo $TMUX | cut -d',' -f1
}
# Tmux option stored in a global variable so that we don't have to "ask"
# tmux server each time.
cache_tmux_default_command() {
local default_shell="$(get_tmux_option "default-shell" "")"
export TMUX_DEFAULT_COMMAND="$(get_tmux_option "default-command" "$default_shell")"
}
tmux_default_command() {
echo "$TMUX_DEFAULT_COMMAND"
}
pane_creation_command() {
echo "cat '$(pane_contents_file "restore" "${1}:${2}.${3}")'; exec $(tmux_default_command)"
}
new_window() {
local session_name="$1"
local window_number="$2"
local window_name="$3"
local dir="$4"
local pane_index="$5"
local pane_id="${session_name}:${window_number}.${pane_index}"
if is_restoring_pane_contents && pane_contents_file_exists "$pane_id"; then
local pane_creation_command="$(pane_creation_command "$session_name" "$window_number" "$pane_index")"
tmux new-window -d -t "${session_name}:${window_number}" -n "$window_name" -c "$dir" "$pane_creation_command"
else
tmux new-window -d -t "${session_name}:${window_number}" -n "$window_name" -c "$dir"
fi
}
new_session() {
local session_name="$1"
local window_number="$2"
local window_name="$3"
local dir="$4"
local pane_index="$5"
local pane_id="${session_name}:${window_number}.${pane_index}"
if is_restoring_pane_contents && pane_contents_file_exists "$pane_id"; then
local pane_creation_command="$(pane_creation_command "$session_name" "$window_number" "$pane_index")"
TMUX="" tmux -S "$(tmux_socket)" new-session -d -s "$session_name" -n "$window_name" -c "$dir" "$pane_creation_command"
else
TMUX="" tmux -S "$(tmux_socket)" new-session -d -s "$session_name" -n "$window_name" -c "$dir"
fi
# change first window number if necessary
local created_window_num="$(first_window_num)"
if [ $created_window_num -ne $window_number ]; then
tmux move-window -s "${session_name}:${created_window_num}" -t "${session_name}:${window_number}"
fi
}
new_pane() {
local session_name="$1"
local window_number="$2"
local window_name="$3"
local dir="$4"
local pane_index="$5"
local pane_id="${session_name}:${window_number}.${pane_index}"
if is_restoring_pane_contents && pane_contents_file_exists "$pane_id"; then
local pane_creation_command="$(pane_creation_command "$session_name" "$window_number" "$pane_index")"
tmux split-window -h -t "${session_name}:${window_number}" -c "$dir" "$pane_creation_command"
else
tmux split-window -h -t "${session_name}:${window_number}" -c "$dir"
fi
# minimize window so more panes can fit
tmux resize-pane -t "${session_name}:${window_number}" -L "999"
}
restore_pane() {
local pane="$1"
while IFS=$d read line_type session_name window_number window_name window_active window_flags pane_index dir pane_active pane_command pane_full_command; do
dir="$(remove_first_char "$dir")"
window_name="$(remove_first_char "$window_name")"
pane_full_command="$(remove_first_char "$pane_full_command")"
if pane_exists "$session_name" "$window_number" "$pane_index"; then
tmux rename-window -t "$window_number" "$window_name"
if is_restoring_from_scratch; then
# overwrite the pane
# happens only for the first pane if it's the only registered pane for the whole tmux server
local pane_id="$(tmux display-message -p -F "#{pane_id}" -t "$session_name:$window_number")"
new_pane "$session_name" "$window_number" "$window_name" "$dir" "$pane_index"
tmux kill-pane -t "$pane_id"
else
# Pane exists, no need to create it!
# Pane existence is registered. Later, its process also won't be restored.
register_existing_pane "$session_name" "$window_number" "$pane_index"
fi
elif window_exists "$session_name" "$window_number"; then
tmux rename-window -t "$window_number" "$window_name"
new_pane "$session_name" "$window_number" "$window_name" "$dir" "$pane_index"
elif session_exists "$session_name"; then
new_window "$session_name" "$window_number" "$window_name" "$dir" "$pane_index"
else
new_session "$session_name" "$window_number" "$window_name" "$dir" "$pane_index"
fi
done < <(echo "$pane")
}
restore_state() {
local state="$1"
echo "$state" |
while IFS=$d read line_type client_session client_last_session; do
tmux switch-client -t "$client_last_session"
tmux switch-client -t "$client_session"
done
}
restore_grouped_session() {
local grouped_session="$1"
echo "$grouped_session" |
while IFS=$d read line_type grouped_session original_session alternate_window active_window; do
TMUX="" tmux -S "$(tmux_socket)" new-session -d -s "$grouped_session" -t "$original_session"
done
}
restore_active_and_alternate_windows_for_grouped_sessions() {
local grouped_session="$1"
echo "$grouped_session" |
while IFS=$d read line_type grouped_session original_session alternate_window_index active_window_index; do
alternate_window_index="$(remove_first_char "$alternate_window_index")"
active_window_index="$(remove_first_char "$active_window_index")"
if [ -n "$alternate_window_index" ]; then
tmux switch-client -t "${grouped_session}:${alternate_window_index}"
fi
if [ -n "$active_window_index" ]; then
tmux switch-client -t "${grouped_session}:${active_window_index}"
fi
done
}
never_ever_overwrite() {
local overwrite_option_value="$(get_tmux_option "$overwrite_option" "")"
[ -n "$overwrite_option_value" ]
}
detect_if_restoring_from_scratch() {
if never_ever_overwrite; then
return
fi
local total_number_of_panes="$(tmux list-panes -a | wc -l | sed 's/ //g')"
if [ "$total_number_of_panes" -eq 1 ]; then
restore_from_scratch_true
fi
}
detect_if_restoring_pane_contents() {
if capture_pane_contents_option_on; then
cache_tmux_default_command
restore_pane_contents_true
fi
}
# functions called from main (ordered)
restore_all_panes() {
detect_if_restoring_from_scratch # sets a global variable
detect_if_restoring_pane_contents # sets a global variable
if is_restoring_pane_contents; then
pane_content_files_restore_from_archive
fi
while read line; do
if is_line_type "pane" "$line"; then
restore_pane "$line"
fi
done < $(last_resurrect_file)
if is_restoring_pane_contents; then
rm "$(pane_contents_dir "restore")"/*
fi
}
restore_pane_layout_for_each_window() {
\grep '^window' $(last_resurrect_file) |
while IFS=$d read line_type session_name window_number window_active window_flags window_layout; do
tmux select-layout -t "${session_name}:${window_number}" "$window_layout"
done
}
restore_shell_history() {
awk 'BEGIN { FS="\t"; OFS="\t" } /^pane/ { print $2, $3, $7, $10; }' $(last_resurrect_file) |
while IFS=$d read session_name window_number pane_index pane_command; do
if ! is_pane_registered_as_existing "$session_name" "$window_number" "$pane_index"; then
local pane_id="$session_name:$window_number.$pane_index"
local history_file="$(resurrect_history_file "$pane_id" "$pane_command")"
if [ "$pane_command" = "bash" ]; then
local read_command="history -r '$history_file'"
tmux send-keys -t "$pane_id" "$read_command" C-m
elif [ "$pane_command" = "zsh" ]; then
local accept_line="$(expr "$(zsh -i -c bindkey | grep -m1 '\saccept-line$')" : '^"\(.*\)".*')"
local read_command="fc -R '$history_file'; clear"
tmux send-keys -t "$pane_id" "$read_command" "$accept_line"
fi
fi
done
}
restore_all_pane_processes() {
if restore_pane_processes_enabled; then
local pane_full_command
awk 'BEGIN { FS="\t"; OFS="\t" } /^pane/ && $11 !~ "^:$" { print $2, $3, $7, $8, $11; }' $(last_resurrect_file) |
while IFS=$d read session_name window_number pane_index dir pane_full_command; do
dir="$(remove_first_char "$dir")"
pane_full_command="$(remove_first_char "$pane_full_command")"
restore_pane_process "$pane_full_command" "$session_name" "$window_number" "$pane_index" "$dir"
done
fi
}
restore_active_pane_for_each_window() {
awk 'BEGIN { FS="\t"; OFS="\t" } /^pane/ && $9 == 1 { print $2, $3, $7; }' $(last_resurrect_file) |
while IFS=$d read session_name window_number active_pane; do
tmux switch-client -t "${session_name}:${window_number}"
tmux select-pane -t "$active_pane"
done
}
restore_zoomed_windows() {
awk 'BEGIN { FS="\t"; OFS="\t" } /^pane/ && $6 ~ /Z/ && $9 == 1 { print $2, $3; }' $(last_resurrect_file) |
while IFS=$d read session_name window_number; do
tmux resize-pane -t "${session_name}:${window_number}" -Z
done
}
restore_grouped_sessions() {
while read line; do
if is_line_type "grouped_session" "$line"; then
restore_grouped_session "$line"
restore_active_and_alternate_windows_for_grouped_sessions "$line"
fi
done < $(last_resurrect_file)
}
restore_active_and_alternate_windows() {
awk 'BEGIN { FS="\t"; OFS="\t" } /^window/ && $5 ~ /[*-]/ { print $2, $4, $3; }' $(last_resurrect_file) |
sort -u |
while IFS=$d read session_name active_window window_number; do
tmux switch-client -t "${session_name}:${window_number}"
done
}
restore_active_and_alternate_sessions() {
while read line; do
if is_line_type "state" "$line"; then
restore_state "$line"
fi
done < $(last_resurrect_file)
}
main() {
if supported_tmux_version_ok && check_saved_session_exists; then
start_spinner "Restoring..." "Tmux restore complete!"
execute_hook "pre-restore-all"
restore_all_panes
restore_pane_layout_for_each_window >/dev/null 2>&1
execute_hook "pre-restore-history"
if save_shell_history_option_on; then
restore_shell_history
fi
execute_hook "pre-restore-pane-processes"
restore_all_pane_processes
# below functions restore exact cursor positions
restore_active_pane_for_each_window
restore_zoomed_windows
restore_grouped_sessions # also restores active and alt windows for grouped sessions
restore_active_and_alternate_windows
restore_active_and_alternate_sessions
execute_hook "post-restore-all"
stop_spinner
display_message "Tmux restore complete!"
fi
}
main
|
class Position2D {
constructor(x, y) {
this.x = x;
this.y = y;
}
distanceTo(otherPosition) {
let deltaX = otherPosition.x - this.x;
let deltaY = otherPosition.y - this.y;
return Math.sqrt((deltaX * deltaX) + (deltaY * deltaY));
}
}
|
#!bin/bash
##Update data
Rscript R/update-data.R
##Update delays
Rscript R/update-delays.R
##Update Rt estimates
Rscript R/update-rt.R
##Update report
Rscript -e "rmarkdown::render('docs/index.Rmd')"
|
<gh_stars>1-10
import React, { Component } from 'react';
class MasterSwitch extends Component {
powerOn() {
fetch("http://" + window.location.hostname +":3000/api/control/" + this.props.deviceInfo._id + "/switch/" + 255);
}
powerOff() {
fetch("http://" + window.location.hostname +":3000/api/control/" + this.props.deviceInfo._id + "/switch/" + 0);
}
removeClick = () => {
this.props.removeFunction();
}
render() {
let LastCommandPower = "off"
if (this.props.deviceInfo.channels[0].basicValue/255 == 0) LastCommandPower = "off";
else LastCommandPower = "on";
return (
<div class="device" id={this.props.deviceInfo._id}>
<section class="deviceHeader dis-flx">
<img alt="Master Switch Icon" class="smallIcon neutralIcon" src="images/deviceIcons/all_switch_on.svg" ></img>
<h3 class="deviceTitle">Master Switch</h3>
</section>
<section class="deviceContent">
<h4 class="deviceStatus">All Power Plugs Power Draw: {this.props.deviceInfo.channels[0].sensorValue/10} Watts</h4>
<h4 class="deviceStatus">Last Power Command: {LastCommandPower}</h4>
<div class="controls">
<button class="powerOn power" onClick={this.powerOn.bind(this)}>On</button>
<div class="vl"></div>
<button class="powerOff power" onClick={this.powerOff.bind(this)}>Off</button>
</div>
</section>
<section class="deviceFooter dis-flx">
<h5>Last Updated: ...</h5>
<img class="smallIcon roundButton dangerIcon" onClick={this.removeClick} src="images/generalIcons/remove.svg"></img>
</section>
</div>
);
}
}
export default MasterSwitch;
|
// Copyright 2017 The TIE Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Directive for the learner "practice question" view.
*/
tie.directive('learnerView', [function() {
return {
restrict: 'E',
scope: {
tieId: '@',
questionId: '@',
showOutput: '=',
showError: '=',
showFeedback: '='
},
template: `
<div class="tie-wrapper protractor-test-tie-wrapper">
<div class="tie-question-ui-outer">
<div class="tie-question-ui-inner">
<div class="tie-question-container">
<h1 class="tie-question-title">{{title}}</h1>
<div class="tie-previous-instructions" ng-if="!pageIsIframed">
<div ng-repeat="previousInstruction in previousInstructions track by $index">
<div ng-repeat="instruction in previousInstruction track by $index">
<p ng-if="instruction.type == 'text'">
{{instruction.content}}
</p>
<pre class="tie-question-code" ng-if="instruction.type == 'code'">{{instruction.content}}</pre>
</div>
<hr>
</div>
</div>
<div class="tie-instructions" ng-if="!pageIsIframed">
<div ng-repeat="instruction in instructions">
<p ng-if="instruction.type == 'text'">
{{instruction.content}}
</p>
<pre class="tie-question-code" ng-if="instruction.type == 'code'">{{instruction.content}}</pre>
</div>
</div>
</div>
<div class="tie-window-container">
<div class="tie-feedback-ui protractor-test-feedback-ui">
<div class="tie-feedback-window" ng-if="showFeedback">
<div class="tie-feedback-container" ng-class="{'pulse-animation-enabled': pulseAnimationEnabled}">
<pre class="tie-feedback-text" ng-if="!feedbackIsDisplayed">{{feedbackWindowMessage}}</pre>
<transcript-paragraphs-container ng-if="feedbackIsDisplayed"></transcript-paragraphs-container>
</div>
</div>
</div>
<div class="tie-coding-ui protractor-test-coding-ui">
<div class="tie-lang-terminal">
<div class="tie-user-terminal" ng-class="{'print-mode': showOutput}">
<div class="tie-coding-terminal">
<div class="tie-codemirror-container"
tabindex="0"
ng-keypress="onKeypressCodemirrorContainer($event)"
ng-focus="onFocusCodemirrorContainer()">
<ui-codemirror ui-codemirror-opts="codeMirrorOptions"
ng-model="editorContents.code"
ng-change="onCodeChange()"
ng-if="!accessibleMode"
class="protractor-test-code-input-element">
</ui-codemirror>
<ui-codemirror ng-model="editorContents.code"
ui-codemirror-opts="accessibleCodeMirrorOptions"
ng-change="onCodeChange()"
ng-if="accessibleMode"
class="protractor-test-code-input-element">
</ui-codemirror>
</div>
</div>
</div>
<div class="tie-code-auto-save"
ng-show="autosaveTextIsDisplayed">
Saving code...
</div>
<button class="tie-submit-button tie-button tie-button-green protractor-test-submit-code-button" ng-if="pageIsIframed" ng-click="submitToParentPage(editorContents.code)" title="Click anytime you want to submit your code">
Submit for Grading
</button>
<button class="tie-run-button tie-button protractor-test-run-code-button" ng-class="{'tie-button-green': !pageIsIframed}" ng-click="submitCode(editorContents.code)" ng-disabled="SessionHistoryService.isNewTranscriptParagraphPending()" title="Click anytime you want feedback on your code">
RUN
</button>
<div class="tie-snapshot-container">
<div class="tie-previous-snapshot-button-container">
<button class="tie-previous-button tie-button protractor-test-previous-button"
ng-click="revertToPreviousSnapshot()"
ng-disabled="previousButtonIsDisabled"
title="Click to go back to the previous snapshot.">
PREVIOUS
</button>
<button class="tie-snapshot-button tie-button protractor-test-snapshot-button"
ng-click="showSnapshotMenu()"
title="Click to view all previous snapshots.">
▼
</button>
</div>
<div class="tie-snapshot-menu"
ng-show="snapshotMenuIsOpen">
<ul class="tie-snapshot-menu-content protractor-test-snapshot-menu">
<li ng-repeat="i in totalSnapshots"
ng-click="revertToSelectedSnapshot(i.number)">
{{i.title}}
</li>
</ul>
</div>
</div>
</div>
</div>
<div class="tie-output-ui protractor-test-output-ui">
<div class="tie-lang-terminal">
<div class="tie-user-terminal" ng-class="{'print-mode': showOutput}">
<div class="tie-print-terminal" ng-if="showOutput && showError">
<div class="tie-stdout">{{(stdout || syntaxError)}}</div>
</div>
<div class="tie-print-terminal" ng-if="showOutput && !showError">
<div class="tie-stdout">{{(stdout)}}</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="tie-options-row">
<ul>
<li class="tie-footer-left-aligned-link" ng-if="TERMS_OF_USE_URL">
<a target="_blank" ng-href="{{TERMS_OF_USE_URL}}">Terms of Use</a>
</li>
<li class="tie-footer-left-aligned-link" ng-click="onPrivacyClick()">
<a href="#" class="protractor-test-privacy-link">Privacy</a>
</li>
<li class="tie-footer-right-aligned-link">
<a target="_blank" class="protractor-test-about-link" ng-href="{{ABOUT_TIE_URL}}">{{ABOUT_TIE_LABEL}}</a>
</li>
</ul>
</div>
</div>
</div>
<div aria-live="assertive">
<div role="alert" ng-if="ariaLiveMessage.text">{{ariaLiveMessage.text}}</div>
</div>
<style>
div.CodeMirror span.CodeMirror-matchingbracket {
color: rgb(75, 206, 75);
}
.tie-arrow-highlighter {
background-color: white;
border-radius: 100px;
box-shadow: 0px 0px 42px 67px white;
height: 50px;
left: calc(50% - 25px);
position: absolute;
top: calc(50% - 25px);
width: 50px;
}
.tie-button {
background-color: #ffffff;
border-radius: 4px;
border-style: none;
color: black;
cursor: pointer;
display: block;
font-family: Roboto, 'Helvetica Neue', 'Lucida Grande', sans-serif;
font-size: 12px;
height: 30px;
margin-right: 10px;
outline: none;
padding: 1px 6px;
width: 110px;
}
.tie-button:hover {
border: 1px solid #e4e4e4;
}
.tie-button:active {
border-color: #a0a0a0;
}
.tie-button-blue {
background-color: #448AFF;
border: none;
color: #ffffff;
outline: none;
}
.tie-button-blue:hover {
background-color: #2979FF;
}
.tie-button-blue:active {
background-color: #2962FF;
}
.tie-button-red {
background-color: #d8d8d8;
border: none;
color: #000000;
outline: none;
}
.tie-button-red:hover {
background-color: #bdbdbd;
}
.tie-button-red:active {
background-color: #b0b0b0;
}
.tie-button-green {
background-color: #a2d6a4;
border: none;
outline: none;
}
.tie-button-green:hover {
background-color: #669e68;
}
.tie-button-green:active {
background-color: #669e68;
}
.tie-button-green[disabled] {
opacity: 0.4;
}
.tie-button-green[disabled]:hover {
border: none;
}
.tie-button-gray {
background-color: #a7a7a7;
border: none;
outline: none;
}
.tie-button-gray:hover {
border-color: #777777;
}
.tie-button-gray:active {
background-color: #777777;
}
.tie-button-gray[disabled] {
opacity: 0.4;
}
.tie-button-gray[disabled]:hover {
border: none;
}
.tie-code-auto-save {
font-family: Roboto, 'Helvetica Neue', 'Lucida Grande', sans-serif;
font-size: 13px;
float: left;
margin-top: 14px;
margin-left: 0;
}
.tie-coding-terminal .CodeMirror {
/* Overwriting codemirror defaults */
height: 100%;
}
.tie-codemirror-container {
width: 100%;
}
.tie-coding-terminal {
background-color: rgb(255, 255, 255);
display: flex;
font-size: 13px;
height: 100%;
position: relative;
width: 100%;
-webkit-font-smoothing: antialiased;
}
.print-mode .tie-coding-terminal {
display: flex;
font-size: 13px;
height: 228px;
position: relative;
width: 100%;
}
.tie-coding-window {
display: flex;
}
.tie-coding-terminal:focus, .tie-run-button:focus,
.tie-select-menu:focus {
outline: 0;
}
.tie-coding-ui, .tie-feedback-ui, .tie-output-ui {
display: inline-block;
margin: 8px;
white-space: normal;
}
.tie-output-ui {
width: 25%;
}
.tie-coding-ui {
width: 50%;
}
.tie-feedback-container {
line-height: 1.2em;
}
.tie-feedback-text {
white-space: pre-line;
font-family: sans-serif;
padding: 5%;
}
.tie-feedback-window {
background-color: #FFFFF7;
font-size: 14px;
height: 228px;
width: 100%;
max-width: 700px;
overflow: auto;
padding: 0;
}
.tie-feedback-error-string {
color: #F44336;
}
.tie-footer-left-aligned-link {
float: left;
}
.tie-footer-right-aligned-link {
float: right;
}
.tie-instructions {
white-space: normal;
}
.tie-lang-select-menu {
float: left;
margin-top: 10px;
}
.tie-lang-terminal {
display: inline;
}
.tie-options-row {
padding-left: 32px;
padding-right: 32px;
}
.tie-options-row a {
color: #696969;
display: block;
line-height: 25px;
padding: 5px;
text-decoration: none;
}
.tie-options-row li {
margin: 5px;
}
.tie-options-row ul {
font-size: 11px;
list-style-type: none;
margin: 0;
padding: 0;
}
.tie-options-row a:hover {
text-decoration: underline;
}
.tie-previous-instructions {
opacity: 0.5;
}
.tie-print-terminal {
background-color: #ffffff;
height: 228px;
overflow: auto;
width: 100%;
}
.tie-question-container {
width: 100%;
margin: 8px;
}
.tie-question-code {
background-color: #ffffff;
border: 1px solid #ccc;
font-family: monospace;
font-size: 13px;
padding: 10px;
white-space: -moz-pre-wrap;
white-space: -o-pre-wrap;
white-space: -pre-wrap;
white-space: pre-wrap;
word-wrap: break-word;
}
.tie-question-title {
color: #212121;
font-size: 18px;
}
.tie-feedback-ui {
width: 25%;
vertical-align: top;
}
.tie-question-ui-inner {
display: flex;
padding-left: 32px;
padding-right: 32px;
white-space: nowrap;
max-width: 1170px;
flex-direction: column;
}
.tie-question-ui-outer {
margin-left: auto;
margin-right: auto;
max-width: 1170px;
min-width: 1058px;
padding-top: 30px;
}
.tie-run-button, .tie-step-button, .tie-snapshot-button, .tie-previous-button {
float: right;
margin-right: 0;
margin-top: 10px;
position: relative;
}
.tie-step-button {
margin-right: 5px;
}
.tie-previous-snapshot-button-container {
display: flex;
flex-direction: row;
}
.tie-previous-button {
border-radius: 4px 0px 0px 4px;
border-right: 1px solid #a9a9a9;
}
.tie-previous-button:hover {
background-color: #ddd;
border-right: 1px solid #a9a9a9;
}
.tie-previous-button:hover + .tie-snapshot-button {
background-color: #ddd;
}
.tie-previous-button:disabled {
background-color: #ddd;
color: #a9a9a9;
}
.tie-snapshot-container {
position: relative;
float: right;
margin-right: 5px;
}
.tie-snapshot-button {
border-radius: 0px 4px 4px 0px;
width: 25px;
}
.tie-snapshot-button:hover {
background-color: #ddd;
}
.tie-snapshot-menu {
background-color: #ffffff;
border-radius: 4px;
width: 110px;
position: absolute;
top: 40px;
right: 0px;
box-shadow: 5px 10px 18px #a9a9a9;
}
.tie-snapshot-menu-content {
list-style-type: none;
font-size: 13px;
padding: 0px;
margin: 0px;
border-bottom: 1px solid #ddd;
width: 100%;
text-align: left;
cursor: pointer;
}
.tie-snapshot-menu-content li:last-child {
font-weight: bold;
border-bottom: none;
}
.tie-snapshot-menu-content li {
padding: 10px;
}
.tie-snapshot-menu-content li:hover {
background-color: #ddd;
}
.tie-stdout {
font-family: monospace;
font-size: 13px;
line-height: 1.2em;
padding: 10% 5% 10% 5%;
white-space: pre-wrap;
}
.tie-submit-button {
float: right;
margin-left: 7px;
margin-right: 0;
margin-top: 10px;
position: relative;
width: 122px;
}
.tie-user-terminal {
height: 228px;
display: flex;
}
.tie-window-container {
display: flex;
}
.CodeMirror-linenumber {
/* Increase the contrast of the line numbers from the background. */
color: #424242;
}
.CodeMirror-line.tie-syntax-error-line {
background: #FBC2C4;
}
.tie-wrapper {
height: 100%;
}
@media screen and (max-width: 1058px) {
.tie-options-row {
padding-left: 0;
padding-right: 0;
}
.tie-question-ui-inner {
display: flex;
flex-direction: column;
padding-left: 0;
padding-right: 0;
white-space: nowrap;
width: 677px;
}
.tie-question-ui-outer {
margin-left: auto;
margin-right: auto;
min-width: 691px;
padding-top: 30px;
width: 662px;
}
.tie-feedback-window {
background-color: #FFFFF7;
font-size: 14px;
overflow: auto;
padding: 0;
width: 662px;
}
.tie-coding-ui, .tie-feedback-ui, .tie-output-ui {
width: 662px;
}
.tie-question-ui-inner {
width: 662px;
}
.tie-window-container {
flex-direction: column;
}
}
</style>
`,
controller: [
'$scope', '$interval', '$timeout', '$location', '$window',
'ConversationManagerService', 'QuestionDataService', 'LANGUAGE_PYTHON',
'FeedbackObjectFactory', 'LearnerViewSubmissionResultObjectFactory',
'EventHandlerService', 'LocalStorageService',
'ServerHandlerService', 'SessionIdService', 'ThemeNameService',
'UnpromptedFeedbackManagerService', 'CurrentQuestionService',
'FeedbackDisplayService', 'PrintTerminalService', 'ParentPageService',
'ALL_SUPPORTED_LANGUAGES', 'SUPPORTED_LANGUAGE_LABELS',
'SessionHistoryService', 'AutosaveService', 'SECONDS_TO_MILLISECONDS',
'CODE_CHANGE_DEBOUNCE_SECONDS', 'DISPLAY_AUTOSAVE_TEXT_SECONDS',
'SERVER_URL', 'DEFAULT_QUESTION_ID', 'FEEDBACK_CATEGORIES',
'DEFAULT_EVENT_BATCH_PERIOD_SECONDS', 'DELAY_STYLE_CHANGES',
'CODE_RESET_CONFIRMATION_MESSAGE', 'PRIVACY_URL', 'ABOUT_TIE_URL',
'ABOUT_TIE_LABEL', 'TERMS_OF_USE_URL',
function(
$scope, $interval, $timeout, $location, $window,
ConversationManagerService, QuestionDataService, LANGUAGE_PYTHON,
FeedbackObjectFactory, LearnerViewSubmissionResultObjectFactory,
EventHandlerService, LocalStorageService,
ServerHandlerService, SessionIdService, ThemeNameService,
UnpromptedFeedbackManagerService, CurrentQuestionService,
FeedbackDisplayService, PrintTerminalService, ParentPageService,
ALL_SUPPORTED_LANGUAGES, SUPPORTED_LANGUAGE_LABELS,
SessionHistoryService, AutosaveService, SECONDS_TO_MILLISECONDS,
CODE_CHANGE_DEBOUNCE_SECONDS, DISPLAY_AUTOSAVE_TEXT_SECONDS,
SERVER_URL, DEFAULT_QUESTION_ID, FEEDBACK_CATEGORIES,
DEFAULT_EVENT_BATCH_PERIOD_SECONDS, DELAY_STYLE_CHANGES,
CODE_RESET_CONFIRMATION_MESSAGE, PRIVACY_URL, ABOUT_TIE_URL,
ABOUT_TIE_LABEL, TERMS_OF_USE_URL) {
$scope.PRIVACY_URL = PRIVACY_URL;
$scope.ABOUT_TIE_URL = ABOUT_TIE_URL;
$scope.ABOUT_TIE_LABEL = ABOUT_TIE_LABEL;
$scope.TERMS_OF_USE_URL = TERMS_OF_USE_URL;
$scope.SessionHistoryService = SessionHistoryService;
var KEY_CODE_ENTER = 13;
var ARIA_LIVE_MESSAGE_RANDOM_ID_RANGE = 10000;
var ARIA_LIVE_MESSAGE_TIMEOUT_MILLISECONDS = 2000;
var ARIA_LIVE_MESSAGE_CODEMIRROR_CONTAINER_FOCUSED = (
'Press Enter to access the code editor.');
var ARIA_LIVE_MESSAGE_CODEMIRROR_ENTERED = (
'Press Escape to exit the code editor.');
/**
* Name of the class for styling highlighted syntax errors.
*
* @type {string}
* @constant
*/
var CSS_CLASS_SYNTAX_ERROR = 'tie-syntax-error-line';
/**
* Sets a local variable language to the value of the constant
* LANGUAGE_PYTHON.
*
* @type {string}
*/
var language = LANGUAGE_PYTHON;
/**
* A dictionary of labels, keyed by their supported language
*/
$scope.supportedLanguageLabels = SUPPORTED_LANGUAGE_LABELS;
$scope.supportedLanguageCount = Object.keys(
SUPPORTED_LANGUAGE_LABELS).length;
/**
* Sets a local variable currentSnapshotIndex to the current
* snapshot index.
*
* @type {number}
*/
$scope.currentSnapshotIndex = 0;
/**
* Defines the total number of snapshots in the editor.
*
* @type {Array}
*/
$scope.totalSnapshots = [];
/**
* Defines whether the snapshot menu is displayed.
*/
$scope.snapshotMenuIsOpen = false;
/**
* Defines whether the previous button is disabled.
*/
$scope.previousButtonIsDisabled = false;
/**
* Defines the feedback message to be displayed in the feedback window.
*/
$scope.feedbackWindowMessage = "As you run your code feedback will " +
"appear here.\n\nYou can also use the STEP THROUGH button to walk " +
"through your code line by line, or the PREVIOUS button to return " +
"to any submissions you've made previously for this exercise.";
/**
* Defines whether the feedback is displayed in the feedback window.
*/
$scope.feedbackIsDisplayed = false;
/**
* Defines the output to be displayed.
*/
$scope.stdout = "Click 'Run' to see the output of your code.";
/**
* Defines whether feedback is supported, and thus whether the feedback
* window should be displayed.
*/
$scope.feedbackIsSupported =
FeedbackDisplayService.isFeedbackSupported();
/**
* Defines whether printing is supported, and thus whether the print
* terminal should be displayed.
*/
$scope.printingIsSupported = PrintTerminalService.isPrintingSupported();
/**
* Defines whether error printing is supported, and thus whether syntax
* errors should be displayed in the output window.
*/
$scope.errorPrintingIsSupported =
PrintTerminalService.isErrorPrintingSupported();
/**
* Defines whether TIE is currently being framed by the expected
* parent origin. If it is, the "Submit Code" button should be
* displayed.
*/
$scope.pageIsIframed = ParentPageService.isIframed();
/**
* The ARIA alert message to show temporarily, as well as a random
* integer generated to uniquely identify when it was first shown. When
* the time comes for the message to be removed, the random identifier
* is used to verify first that the removed message matches the
* originally-added one.
*
* @type {string}
*/
$scope.ariaLiveMessage = {
text: '',
randomIdentifier: -1
};
/**
* We use an object here to prevent the child scope introduced by ng-if
* from shadowing the parent scope.
*
* See http://stackoverflow.com/a/21512751
* .
* @type {{code: string}}
*/
$scope.editorContents = {
code: ''
};
// Whether to show the more accessible version of the CodeMirror
// editor. "Accessible mode" is triggered by the user tabbing to the
// editor.
$scope.accessibleMode = false;
/**
* Stores a promise for the $interval process that automatically
* retriggers the codeChangeEvent, so that that process can be
* cancelled later.
*
* @type {Promise|null}
*/
$scope.codeChangeLoopPromise = null;
/**
* String to store the code being cached within this directive
* controller (this code may not have been submitted yet). This is used
* to detect a local code change so that we can show appropriate
* feedback if the learner is going down a rabbit-hole.
*
* @type {string}
*/
var cachedCode;
/**
* Stores the feedback to be shown when the user completes a question.
*
* @type {Feedback}
*/
var congratulatoryFeedback = FeedbackObjectFactory.create(
FEEDBACK_CATEGORIES.SUCCESSFUL, true);
/**
* Stores the index of the task that the user is currently trying to
* complete.
*
* @type {number}
*/
var currentTaskIndex = null;
/**
* Stores the `div` node from the DOM where the feedback will be
* rendered.
*
* @type {DOM}
*/
var feedbackWindowDiv =
document.getElementsByClassName('tie-feedback-window')[0];
/**
* Shows an aria-live message alert for 2 seconds.
*
* @param {string} message The message to show.
*/
var showAriaLiveMessage = function(messageText) {
var randomInt = Math.random(ARIA_LIVE_MESSAGE_RANDOM_ID_RANGE);
$scope.ariaLiveMessage.text = messageText;
$scope.ariaLiveMessage.randomIdentifier = randomInt;
$timeout(function() {
if ($scope.ariaLiveMessage.randomIdentifier === randomInt) {
$scope.ariaLiveMessage.text = '';
$scope.ariaLiveMessage.randomIdentifier = -1;
}
}, ARIA_LIVE_MESSAGE_TIMEOUT_MILLISECONDS);
};
$scope.onVisibilityChange = function() {
var question = CurrentQuestionService.getCurrentQuestion();
// If the question is null (such as if there's an error getting it)
// then we 404 on a server version. If that happens, this throws a
// mysterious console error since we call null.getTasks(), which is
// bad. This prevents that error.
if (question) {
var tasks = question.getTasks();
// When a user changes tabs (or comes back), add a SessionPause
// or SessionResumeEvent, respectively.
var hiddenAttributeName = (
$scope.determineHiddenAttributeNameForBrowser());
if (hiddenAttributeName !== null && tasks !== null &&
tasks.length > currentTaskIndex) {
if ($scope.isDocumentHidden(hiddenAttributeName)) {
EventHandlerService.createSessionPauseEvent(
tasks[currentTaskIndex].getId());
} else {
EventHandlerService.createSessionResumeEvent(
tasks[currentTaskIndex].getId());
}
}
}
};
// Move document[hiddenAttributeName] getter into function for testing.
$scope.isDocumentHidden = function(hiddenAttributeName) {
return document[hiddenAttributeName];
};
// Move document.hidden getter into function for testing.
$scope.getHiddenAttribute = function() {
return document.hidden;
};
// Move document.msHidden getter into function for testing.
$scope.getMsHiddenAttribute = function() {
return document.msHidden;
};
// Move document.webkitHidden getter into function for testing.
$scope.getWebkitHiddenAttribute = function() {
return document.webkitHidden;
};
/**
* Different browsers call the "hidden" attribute different things.
* This method determines what the current browser calls its "hidden"
* attribute and returns it.
*/
$scope.determineHiddenAttributeNameForBrowser = function() {
if (typeof $scope.getHiddenAttribute() !== 'undefined') {
// Opera 12.10 and Firefox 18 and later support
return 'hidden';
} else if (typeof $scope.getMsHiddenAttribute() !== 'undefined') {
return 'msHidden';
} else if (typeof $scope.getWebkitHiddenAttribute() !== 'undefined') {
return 'webkitHidden';
}
return null;
};
$scope.determineVisibilityChangeAttributeNameForBrowser = function() {
// Handle page visibility change
if (typeof $scope.getHiddenAttribute() !== 'undefined') {
// Opera 12.10 and Firefox 18 and later support
return 'visibilitychange';
} else if (typeof $scope.getMsHiddenAttribute() !== 'undefined') {
return 'msvisibilitychange';
} else if (
typeof $scope.getWebkitHiddenAttribute() !== 'undefined') {
return 'webkitvisibilitychange';
}
// This should never happen, as hiddenAttributeName relies on the same
// criteria to be non-null.
return null;
};
$scope.setEventListenerForVisibilityChange = function() {
var hiddenAttributeName = (
$scope.determineHiddenAttributeNameForBrowser());
if (typeof document.addEventListener === 'undefined' ||
hiddenAttributeName === null) {
// Browser either doesn't support addEventListener or
// the Page Visibility API.
} else {
var visibilityChange = (
$scope.determineVisibilityChangeAttributeNameForBrowser());
if (visibilityChange !== null) {
document.addEventListener(
visibilityChange, $scope.onVisibilityChange, false);
}
}
};
$scope.setEventListenerForVisibilityChange();
/**
* Takes you to the Privacy page, if specified.
*/
$scope.onPrivacyClick = function() {
if (PRIVACY_URL !== null) {
$window.open(PRIVACY_URL, '_blank');
}
};
/**
* Triggers the SendEventBatch method on an interval defined by
* DEFAULT_EVENT_BATCH_PERIOD_SECONDS.
*/
if (ServerHandlerService.doesServerExist()) {
$interval(function() {
EventHandlerService.sendCurrentEventBatch();
}, DEFAULT_EVENT_BATCH_PERIOD_SECONDS * SECONDS_TO_MILLISECONDS);
}
/**
* Initializes the appropriate values in $scope for the question
* instructions, stored code, starter code and feedback.
*/
var initLearnerViewDirective = function() {
LocalStorageService.init($scope.tieId);
SessionHistoryService.init();
// The pulseAnimationEnabled var is set to false to prevent
// transcript paragraph pulse animation when switching from
// light to dark mode and vise versa. This is set to false
// in resetCode.
$scope.pulseAnimationEnabled = false;
SessionIdService.resetSessionId();
// Load the feedback, tasks, and stored code and initialize the
// event services.
var questionId = CurrentQuestionService.getCurrentQuestionId();
if (questionId === null) {
$window.location.href = '/client/404.html';
return;
}
var question = CurrentQuestionService.getCurrentQuestion();
var tasks = question.getTasks();
currentTaskIndex = 0;
$scope.title = question.getTitle();
$scope.instructions = tasks[currentTaskIndex].getInstructions();
$scope.previousInstructions = [];
$scope.languageLabel = SUPPORTED_LANGUAGE_LABELS[
$scope.codeMirrorOptions.mode];
UnpromptedFeedbackManagerService.reset(tasks);
cachedCode = AutosaveService.getLastSavedCode(language);
$scope.editorContents.code = (
cachedCode || question.getStarterCode(language));
var snapshotIndex = SessionHistoryService.getSnapshotIndex();
if (snapshotIndex === 0) {
// Save starter code if this is the first time seeing the question.
SessionHistoryService.saveStarterCodeSnapshot(
question.getStarterCode(language));
$scope.totalSnapshots.push({number: snapshotIndex,
title: 'Starter Code'});
$scope.previousButtonIsDisabled = true;
} else {
// If the user has previous submissions before refreshing the page,
// on refresh, the previous submissions are still in local storage
// but must be added to the previous snapshot menu.
// The code in the editor window should be the last submission made.
$scope.revertToSelectedSnapshot(snapshotIndex);
// Add snapshots to dropdown if previous snapshots exist.
var snapshotIndexCounter = 0;
while (snapshotIndexCounter < snapshotIndex) {
if (snapshotIndexCounter === 0) {
$scope.totalSnapshots.push({number: snapshotIndexCounter,
title: 'Starter Code'});
} else {
$scope.totalSnapshots.push({number: snapshotIndexCounter,
title: 'Snapshot ' + snapshotIndexCounter.toString()});
}
snapshotIndexCounter++;
}
$scope.totalSnapshots.push({number: snapshotIndex,
title: 'Latest'});
$scope.currentSnapshotIndex = snapshotIndex;
}
EventHandlerService.init(
SessionIdService.getSessionId(), questionId,
CurrentQuestionService.getCurrentQuestionVersion());
EventHandlerService.createQuestionStartEvent();
EventHandlerService.createTaskStartEvent(
tasks[currentTaskIndex].getId());
// Only adds intro message if TIE is iframed, meaning the question
// is not shown, and there is nothing currently in the feedback
// window.
if (ParentPageService.isIframed() &&
SessionHistoryService.getBindableSessionTranscript().length === 0) {
SessionHistoryService.addIntroMessageToTranscript();
}
};
/**
* Highlights the syntax errors in the coding UI
*
* @param {number} lineNumber
*/
var highlightLine = function(lineNumber) {
var actualLineNumber = lineNumber - 1;
var codeLines = document.querySelectorAll('.CodeMirror-line');
// This check is needed in cases where the code is something like
// "def methodName(s):". The syntax error refers to the follow-up
// line (since the function declaration has no body), but that line
// is empty so we can't highlight it.
if (actualLineNumber < codeLines.length) {
codeLines[actualLineNumber].classList.add(CSS_CLASS_SYNTAX_ERROR);
}
};
/**
* Clears all highlight from syntax errors in the coding UI
*/
var clearAllHighlights = function() {
var codeLines = document.querySelectorAll('.' +
CSS_CLASS_SYNTAX_ERROR);
for (var i = 0; i < codeLines.length; i++) {
codeLines[i].classList.remove(CSS_CLASS_SYNTAX_ERROR);
}
};
/**
* Displays congratulations when the question is complete.
* Also sends a QuestionCompleteEvent to the backend.
*/
$scope.completeQuestion = function() {
congratulatoryFeedback.clear();
if ($scope.pageIsIframed) {
congratulatoryFeedback.appendTextParagraph(
"Good work! Your code solves this question.");
congratulatoryFeedback.appendTextParagraph(
"Click the \"Submit for Grading\" button to get credit!");
congratulatoryFeedback.appendImageParagraph('congrats_cake.gif');
} else {
congratulatoryFeedback.appendTextParagraph(
"Good work! You've completed this question.");
congratulatoryFeedback.appendImageParagraph('congrats_cake.gif');
congratulatoryFeedback.appendTextParagraph(
"(You can continue to submit additional answers, " +
"if you wish.)");
}
SessionHistoryService.addFeedbackToTranscript(
congratulatoryFeedback.getParagraphs());
$scope.feedbackIsDisplayed = true;
EventHandlerService.createQuestionCompleteEvent();
};
/**
* Sets the feedback to the appropriate text according to the feedback
* passed into the function.
*
* @param {Feedback} feedback
* @param {string} code
*/
$scope.setFeedback = function(feedback, code) {
var question = CurrentQuestionService.getCurrentQuestion();
var tasks = question.getTasks();
EventHandlerService.createCodeSubmitEvent(
tasks[currentTaskIndex].getId(),
feedback.getParagraphsAsListOfDicts(),
feedback.getFeedbackCategory(), code);
if (feedback.isAnswerCorrect()) {
// If the feedback is correct, create a TaskCompleteEvent first.
EventHandlerService.createTaskCompleteEvent(
tasks[currentTaskIndex].getId());
if (question.isLastTask(currentTaskIndex)) {
$scope.completeQuestion();
} else {
$scope.showNextTask();
}
} else {
var feedbackParagraphs = feedback.getParagraphs();
var errorLineNumber = feedback.getErrorLineNumber();
for (var i = 0; i < feedbackParagraphs.length; i++) {
clearAllHighlights();
if (errorLineNumber !== null) {
highlightLine(errorLineNumber);
break;
}
}
SessionHistoryService.addFeedbackToTranscript(feedbackParagraphs);
$scope.feedbackIsDisplayed = true;
}
// Skulpt processing happens outside an Angular context, so
// $scope.$apply() is needed to force a DOM update.
if (!ServerHandlerService.doesServerExist()) {
$scope.$apply();
}
$scope.scrollToTopOfFeedbackWindow();
};
/**
* Sets the code in the code editor to the snapshot index
* passed in as a parameter.
*
* @param {number} selectedSnapshotIndex The snapshot index
* selected in the previous snapshots dropdown.
*/
$scope.revertToSelectedSnapshot = function(selectedSnapshotIndex) {
$scope.currentSnapshotIndex = selectedSnapshotIndex;
var selectedSnapshot = null;
if (selectedSnapshotIndex === 0) {
selectedSnapshot = SessionHistoryService.getStarterCodeSnapshot();
$scope.previousButtonIsDisabled = true;
} else {
selectedSnapshot = SessionHistoryService.getPreviousSnapshot(
selectedSnapshotIndex);
$scope.previousButtonIsDisabled = false;
}
if (selectedSnapshot === null) {
throw Error('Could not retrieve code for snapshot at index ' +
selectedSnapshotIndex);
} else {
$scope.editorContents.code = selectedSnapshot;
EventHandlerService.createCodeRestoreEvent();
}
$scope.snapshotMenuIsOpen = false;
};
/**
* Sets the code in the code editor to the previous snapshot.
*/
$scope.revertToPreviousSnapshot = function() {
var previousSnapshotIndex = $scope.currentSnapshotIndex - 1;
$scope.revertToSelectedSnapshot(previousSnapshotIndex);
};
/**
* Open and close the snapshot menu.
*/
$scope.showSnapshotMenu = function() {
if ($scope.snapshotMenuIsOpen) {
$scope.snapshotMenuIsOpen = false;
} else {
$scope.snapshotMenuIsOpen = true;
}
};
/**
* Stores the CodeMirror editor instance.
*/
var codemirrorEditorInstance = null;
/**
* Returns a copy of the options that are needed to run codeMirror
* correctly.
*
* @param {boolean} enableAccessibility Whether to return the
* configuration that is optimized for accessible usage.
*/
var getCodemirrorOptions = function(enableAccessibility) {
var basicOptions = {
autofocus: true,
extraKeys: {
Tab: function(cm) {
var spaces = Array(cm.getOption('indentUnit') + 1).join(' ');
cm.replaceSelection(spaces);
// Move the cursor to the end of the selection.
var endSelectionPos = cm.getDoc().getCursor('head');
cm.getDoc().setCursor(endSelectionPos);
},
Esc: function() {
document.getElementsByClassName(
'tie-codemirror-container')[0].focus();
}
},
indentUnit: 4,
lineNumbers: true,
matchBrackets: true,
mode: LANGUAGE_PYTHON,
onLoad: function(editorInstance) {
codemirrorEditorInstance = editorInstance;
},
smartIndent: true,
tabSize: 4,
tabindex: -1,
theme: 'default'
};
if (enableAccessibility) {
// Note that this option cannot be changed while the CodeMirror
// instance is running. This mode has some disadvantages for
// sighted users, e.g. mouse highlighting in the CodeMirror area is
// not visible.
basicOptions.inputStyle = 'contenteditable';
basicOptions.autofocus = false;
}
return angular.copy(basicOptions);
};
$scope.codeMirrorOptions = getCodemirrorOptions(false);
$scope.accessibleCodeMirrorOptions = getCodemirrorOptions(true);
$scope.onKeypressCodemirrorContainer = function(evt) {
if (evt.keyCode === KEY_CODE_ENTER) {
// Enter key is pressed.
evt.preventDefault();
evt.stopPropagation();
if (codemirrorEditorInstance) {
codemirrorEditorInstance.focus();
showAriaLiveMessage(ARIA_LIVE_MESSAGE_CODEMIRROR_ENTERED);
}
}
};
$scope.onFocusCodemirrorContainer = function() {
$scope.accessibleMode = true;
showAriaLiveMessage(ARIA_LIVE_MESSAGE_CODEMIRROR_CONTAINER_FOCUSED);
};
/**
* Sets the question window to scroll to the top.
*/
$scope.scrollToTopOfFeedbackWindow = function() {
if ($scope.isFeedbackSupported) {
feedbackWindowDiv.scrollTop = 0;
}
};
/**
* Changes the UI to show the next task and its instructions for the
* given question.
*/
$scope.showNextTask = function() {
var question = CurrentQuestionService.getCurrentQuestion();
var tasks = question.getTasks();
currentTaskIndex++;
$scope.previousInstructions.push($scope.instructions);
$scope.instructions = tasks[currentTaskIndex].getInstructions();
SessionHistoryService.reset();
EventHandlerService.createTaskStartEvent(
tasks[currentTaskIndex].getId());
};
/**
* Calls the processes necessary to start the code submission process.
*
* @param {string} code
*/
$scope.submitCode = function(code) {
SessionHistoryService.addCodeToTranscript(code);
// Find the index of the snapshot with the title "Latest" in the menu.
var latestSnapshotIndex = -1;
for (var index = 0; index < $scope.totalSnapshots.length; index++) {
if ($scope.totalSnapshots[index].title === 'Latest') {
latestSnapshotIndex = index;
}
}
// Updates latest snapshot in menu, if found.
if (latestSnapshotIndex >= 0) {
var latestSnapshot = $scope.totalSnapshots[latestSnapshotIndex];
$scope.totalSnapshots.push({number: latestSnapshot.number,
title: 'Snapshot ' + latestSnapshot.number.toString()});
$scope.totalSnapshots.splice(latestSnapshotIndex, 1);
}
// Adds new snapshot as latest in menu.
var snapshotIndex = SessionHistoryService.getSnapshotIndex();
$scope.totalSnapshots.push({number: snapshotIndex,
title: 'Latest'});
$scope.currentSnapshotIndex = snapshotIndex;
$scope.previousButtonIsDisabled = false;
// Gather all tasks from the first one up to the current one.
var question = CurrentQuestionService.getCurrentQuestion();
var tasks = question.getTasks();
var orderedTasks = tasks.slice(0, currentTaskIndex + 1);
ConversationManagerService.processSolutionAsync(
orderedTasks, question.getStarterCode(language),
code, question.getAuxiliaryCode(language), language
).then(function(learnerViewSubmissionResult) {
var feedback = learnerViewSubmissionResult.getFeedback();
$scope.setFeedback(feedback, code);
$scope.stdout = learnerViewSubmissionResult.getStdout();
$scope.syntaxError = learnerViewSubmissionResult.getSyntaxError();
});
$scope.autosaveCode();
};
/**
* Sends the user code to the parent page if the parent page
* origin matches the expected framing origin.
*
* @param {string} rawCode
*/
$scope.submitToParentPage = function(rawCode) {
ParentPageService.sendRawCode(rawCode);
SessionHistoryService.addCodeToTranscript(rawCode);
SessionHistoryService.addSubmissionConfirmationToTranscript();
};
/**
* Clears the cached code and the code stored in local storage.
*/
$scope.resetCode = function() {
if (currentTaskIndex > 0 &&
!window.confirm(CODE_RESET_CONFIRMATION_MESSAGE)) {
return;
}
var question = CurrentQuestionService.getCurrentQuestion();
$scope.editorContents.code = question.getStarterCode(language);
EventHandlerService.createCodeResetEvent();
$scope.autosaveCode();
// Clear the code and feedback from localStorage, so that it is not
// retrieved in the subsequent initialization.
SessionHistoryService.reset();
// Start a brand-new question session.
initLearnerViewDirective();
};
/**
* Displays a notification for the given number of seconds to let the
* user know their code has been autosaved.
*
* @param {number} displaySeconds
*/
var triggerAutosaveNotification = function(displaySeconds) {
$scope.autosaveTextIsDisplayed = true;
$timeout(function() {
$scope.autosaveTextIsDisplayed = false;
}, displaySeconds * SECONDS_TO_MILLISECONDS);
};
/**
* Called when a user code change is detected, with a minimum time of
* CODE_CHANGE_DEBOUNCE_SECONDS between intervals.
*/
$scope.onCodeChange = function() {
var question = CurrentQuestionService.getCurrentQuestion();
var tasks = question.getTasks();
if ($scope.codeChangeLoopPromise === null) {
$scope.codeChangeLoopPromise = $interval(function() {
if (angular.equals(cachedCode, $scope.editorContents.code)) {
// No code change, stop the onCodeChange loop.
$interval.cancel($scope.codeChangeLoopPromise);
$scope.codeChangeLoopPromise = null;
return;
}
// Code change detected. Actually do the operations that should
// be triggered by a code change, such as autosaving.
$scope.autosaveCode();
// Check for unprompted feedback to add to the feedback log.
var potentialFeedbackParagraphs = (
UnpromptedFeedbackManagerService.runTipsCheck(
language, $scope.editorContents.code,
tasks[currentTaskIndex].getId()));
if (potentialFeedbackParagraphs !== null) {
SessionHistoryService.addFeedbackToTranscript(
potentialFeedbackParagraphs);
$scope.feedbackIsDisplayed = true;
}
}, CODE_CHANGE_DEBOUNCE_SECONDS * SECONDS_TO_MILLISECONDS);
}
};
/**
* Saves the user's code to the browser's local storage.
*/
$scope.autosaveCode = function() {
cachedCode = $scope.editorContents.code;
if (LocalStorageService.isAvailable()) {
AutosaveService.saveCode(language, $scope.editorContents.code);
triggerAutosaveNotification(DISPLAY_AUTOSAVE_TEXT_SECONDS);
}
};
$scope.autosaveTextIsDisplayed = false;
CurrentQuestionService.init(
initLearnerViewDirective, $scope.questionId);
/**
* Refreshes UI if window loads at 0 height or width (Firefox IFrame
Case)
*/
$timeout(function() {
if ($window.innerWidth === 0 || $window.innerHeight === 0) {
codemirrorEditorInstance.refresh();
}
});
}
]
};
}]);
|
# this must be SOURCE'd, not EXECUTED
# BS_HOME should be set globally
#export BS_HOME=/somewhere
export LUA_PATH="$BS_HOME/lua/share/lua/5.2/?.lua;$BS_HOME/lua/share/lua/5.2/?/init.lua;;"
export LUA_CPATH="$BS_HOME/lua/lib/lua/5.2/?.so;;"
|
#!/usr/bin/env bash
# Copyright 2020 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o pipefail
# =================================================
# Store for later
if [ -z "$1" ]; then
ARGS=("")
else
ARGS=("$@")
fi
set -eu
# Run build
run() {
# Switch on modules unconditionally
export GO111MODULE=on
# Jump into project directory
pushd $(basedir) >/dev/null 2>&1
# Print help if requested
if $(has_flag --help -h); then
display_help
exit 0
fi
if $(has_flag --watch -w); then
# Build and test first
go_build
if $(has_flag --test -t); then
go_test
fi
# Go in endless loop, to be stopped with CTRL-C
watch
fi
# Fast mode: Only compile and maybe run test
if $(has_flag --fast -f); then
go_build
if $(has_flag --test -t); then
go_test
fi
exit 0
fi
# Run only tests
if $(has_flag --test -t); then
go_test
exit 0
fi
# Run only codegen
if $(has_flag --codegen -c); then
codegen
exit 0
fi
# Cross compile only
if $(has_flag --all -x); then
cross_build || (echo "✋ Cross platform build failed" && exit 1)
exit 0
fi
# Default flow
codegen
go_build
go_test
echo "────────────────────────────────────────────"
./$PLUGIN version
}
codegen() {
# Update dependencies
update_deps
# Format source code and cleanup imports
source_format
# Lint source code
(( ! IS_PROW )) && source_lint
# Check for license headers
check_license
}
go_fmt() {
echo "🧹 ${S}Format"
find $(echo $SOURCE_DIRS) -name "*.go" -print0 | xargs -0 gofmt -s -w
}
source_format() {
set +e
run_go_tool golang.org/x/tools/cmd/goimports goimports -w $(echo $SOURCE_DIRS)
find $(echo $SOURCE_DIRS) -name "*.go" -print0 | xargs -0 gofmt -s -w
set -e
}
source_lint() {
echo "🔍 Lint"
run_go_tool github.com/golangci/golangci-lint/cmd/golangci-lint golangci-lint run || \
{ echo "--- FAIL: golangci-lint failed please fix the reported errors"; return 1; }
}
go_build() {
echo "🚧 Compile"
go build -ldflags "$(build_flags $(basedir))" -o $PLUGIN "./$MAIN_SOURCE_DIR/..."
}
go_test() {
local test_output=$(mktemp /tmp/${PLUGIN}-output.XXXXXX)
local red=""
local reset=""
# Use color only when a terminal is set
if [ -t 1 ]; then
red="[31m"
reset="[39m"
fi
echo "🧪 ${X}Test"
set +e
go test -v ./internal/... ./pkg/... >$test_output 2>&1
local err=$?
if [ $err -ne 0 ]; then
echo "🔥 ${red}Failure${reset}"
cat $test_output | sed -e "s/^.*\(FAIL.*\)$/$red\1$reset/"
rm $test_output
exit $err
fi
rm $test_output
}
check_license() {
echo "⚖️ ${S}License"
local required_keywords=("Authors" "Apache License" "LICENSE-2.0")
local extensions_to_check=("sh" "go" "yaml" "yml" "json")
local check_output=$(mktemp /tmp/${PLUGIN}-licence-check.XXXXXX)
for ext in "${extensions_to_check[@]}"; do
find . -name "*.$ext" -a \! -path "./vendor/*" -a \! -path "./.*" -a \! -path "./third_party/*" -print0 |
while IFS= read -r -d '' path; do
for rword in "${required_keywords[@]}"; do
if ! grep -q "$rword" "$path"; then
echo " $path" >> $check_output
fi
done
done
done
if [ -s $check_output ]; then
echo "🔥 No license header found in:"
cat $check_output | sort | uniq
echo "🔥 Please fix and retry."
rm $check_output
exit 1
fi
rm $check_output
}
update_deps() {
echo "🚒 Update"
$(basedir)/hack/update-deps.sh
}
watch() {
local command="./hack/build.sh --fast"
local fswatch_opts='-e "^\..*$" -o $SOURCE_DIRS'
if $(has_flag --test -t); then
command="$command --test"
fi
if $(has_flag --verbose); then
fswatch_opts="$fswatch_opts -v"
fi
set +e
which fswatch >/dev/null 2>&1
if [ $? -ne 0 ]; then
local green="[32m"
local reset="[39m"
echo "🤷 Watch: Cannot find ${green}fswatch${reset}"
echo "🌏 Please see ${green}http://emcrisostomo.github.io/fswatch/${reset} for installation instructions"
exit 1
fi
set -e
echo "🔁 Watch"
fswatch $fswatch_opts | xargs -n1 -I{} sh -c "$command && echo 👌 OK"
}
# Dir where this script is located
basedir() {
# Default is current directory
local script=${BASH_SOURCE[0]}
# Resolve symbolic links
if [ -L $script ]; then
if readlink -f $script >/dev/null 2>&1; then
script=$(readlink -f $script)
elif readlink $script >/dev/null 2>&1; then
script=$(readlink $script)
elif realpath $script >/dev/null 2>&1; then
script=$(realpath $script)
else
echo "ERROR: Cannot resolve symbolic link $script"
exit 1
fi
fi
local dir=$(dirname "$script")
local full_dir=$(cd "${dir}/.." && pwd)
echo ${full_dir}
}
# Checks if a flag is present in the arguments.
has_flag() {
filters="$@"
for var in "${ARGS[@]}"; do
for filter in $filters; do
if [ "$var" = "$filter" ]; then
echo 'true'
return
fi
done
done
echo 'false'
}
cross_build() {
local basedir=$(basedir)
local ld_flags="$(build_flags $basedir)"
local failed=0
echo "⚔️ ${S}Compile"
export CGO_ENABLED=0
echo " 🐧 ${PLUGIN}-linux-amd64"
GOOS=linux GOARCH=amd64 go build -ldflags "${ld_flags}" -o ./${PLUGIN}-linux-amd64 "./$MAIN_SOURCE_DIR/..."|| failed=1
echo " 💪 ${PLUGIN}-linux-arm64"
GOOS=linux GOARCH=arm64 go build -mod=vendor -ldflags "${ld_flags}" -o ./${PLUGIN}-linux-arm64 ./cmd/... || failed=1
echo " 🍏 ${PLUGIN}-darwin-amd64"
GOOS=darwin GOARCH=amd64 go build -ldflags "${ld_flags}" -o ./${PLUGIN}-darwin-amd64 "./$MAIN_SOURCE_DIR/..." || failed=1
echo " 🍎 ${PLUGIN}-arm64"
GOOS=darwin GOARCH=arm64 go build -mod=vendor -ldflags "${ld_flags}" -o ./${PLUGIN}-darwin-arm64 ./cmd/... || failed=1
echo " 🎠 ${PLUGIN}-windows-amd64.exe"
GOOS=windows GOARCH=amd64 go build -ldflags "${ld_flags}" -o ./${PLUGIN}-windows-amd64.exe "./$MAIN_SOURCE_DIR/..." || failed=1
echo " Z ${PLUGIN}-linux-s390x"
GOOS=linux GOARCH=s390x go build -mod=vendor -ldflags "${ld_flags}" -o ./${PLUGIN}-linux-s390x ./cmd/... || failed=1
echo " P ${PLUGIN}-linux-ppc64le"
GOOS=linux GOARCH=ppc64le go build -mod=vendor -ldflags "${ld_flags}" -o ./${PLUGIN}-linux-ppc64le ./cmd/... || failed=1
return ${failed}
}
# Spaced fillers needed for certain emojis in certain terminals
S=""
X=""
# Calculate space fixing variables S and X
apply_emoji_fixes() {
# Temporary fix for iTerm issue https://gitlab.com/gnachman/iterm2/issues/7901
if [ -n "${ITERM_PROFILE:-}" ]; then
S=" "
# This issue has been fixed with iTerm2 3.3.7, so let's check for this
# We can remove this code altogether if iTerm2 3.3.7 is in common usage everywhere
if [ -n "${TERM_PROGRAM_VERSION}" ]; then
args=$(echo $TERM_PROGRAM_VERSION | sed -e 's#[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)#\1 \2 \3#')
expanded=$(printf '%03d%03d%03d' $args)
if [ $expanded -lt "003003007" ]; then
X=" "
fi
fi
fi
}
# Display a help message.
display_help() {
cat <<EOT
Build script for Kn plugin $PLUGIN
Usage: $(basename $BASH_SOURCE) [... options ...]
with the following options:
-f --fast Only compile (without dep update, formatting, testing, doc gen)
-t --test Run tests when used with --fast or --watch
-c --codegen Runs formatting, doc gen and update without compiling/testing
-w --watch Watch for source changes and recompile in fast mode
-x --all Only build cross platform binaries without code-generation/testing
-h --help Display this help message
--verbose More output
--debug Debug information for this script (set -x)
You can add a symbolic link to this build script into your PATH so that it can be
called from everywhere. E.g.:
ln -s $(basedir)/hack/build.sh /usr/local/bin/$PLUGIN-build.sh
Examples:
* Update deps, format, license check,
gen docs, compile, test: ........... build.sh
* Compile only: ...................... build.sh --fast
* Run only tests: .................... build.sh --test
* Compile with tests: ................ build.sh -f -t
* Automatic recompilation: ........... build.sh --watch
* Build cross platform binaries: ..... build.sh --all
EOT
}
if $(has_flag --debug); then
export PS4='+($(basename ${BASH_SOURCE[0]}):${LINENO}): ${FUNCNAME[0]:+${FUNCNAME[0]}(): }'
set -x
fi
# Global variables
source $(basedir)/hack/global_vars.sh
# Shared funcs from hack repo
source $(basedir)/vendor/knative.dev/hack/library.sh
# Shared funcs with CI
source $(basedir)/hack/build-flags.sh
# Fixe emoji labels for certain terminals
apply_emoji_fixes
run $*
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.