text stringlengths 1 1.05M |
|---|
import random, string
def randomPassword(length):
# Generate a list of random characters
gen_pass = ''.join(random.choice(string.ascii_letters
+ string.digits)
for _ in range(length))
return gen_pass
random_password = randomPassword(8)
print(random_password) |
#!/bin/sh
srcdir=`dirname $0`
test -z "$srcdir" && srcdir=.
PROJECT=exiv2-sharp
autoreconf -v --force --install -I config -I m4
if test x$NOCONFIGURE = x; then
echo Running $srcdir/configure $conf_flags "$@" ...
$srcdir/configure $conf_flags "$@" \
&& echo Now type \`make\' to compile $PROJECT || exit 1
else
echo Skipping configure process.
fi
|
package com.school.infrastructure.messaging.interceptors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.integration.config.GlobalChannelInterceptor;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.support.ChannelInterceptorAdapter;
import org.springframework.stereotype.Component;
import static com.school.infrastructure.messaging.Sink.STUDENT_DELETE_INPUT;
@Slf4j
@Component
@GlobalChannelInterceptor(patterns = {STUDENT_DELETE_INPUT})
public class MessagingInterceptor extends ChannelInterceptorAdapter {
@Override
public Message<?> preSend(Message<?> message, MessageChannel channel) {
log.info("Things we can do before the event reaches the Sink");
return super.preSend(message, channel);
}
}
|
LPVOID FindNearestFreeMemoryBlock(LPVOID nearest) {
MEMORY_BASIC_INFORMATION mbi = { 0 };
LPVOID nearestFreeBlock = NULL;
for (SIZE_T addr = (SIZE_T)nearest; addr > (SIZE_T)nearest - 0x80000000; addr = (SIZE_T)mbi.BaseAddress - 1) {
if (!VirtualQuery((LPCVOID)addr, &mbi, sizeof(mbi)))
break;
if (mbi.State == MEM_FREE) {
nearestFreeBlock = mbi.BaseAddress;
break;
}
}
return nearestFreeBlock;
} |
<gh_stars>1-10
#ifndef __POWERUPBLUE_H__
#define __POWERUPBLUE_H__
#include "Weapon.h"
#include "Path.h"
class PowerupBlue : public Weapon
{
public:
// Constructor (x y coordinates in the world)
// Creates animation and movement data and the collider
PowerupBlue(int x, int y);
void Update() override;
void OnCollision(Collider* collider) override;
private:
// A set of steps that define the position in the screen
// And an animation for each step
Path pathBlue;
Animation animPowerBlue;
};
#endif // __POWERUPBLUE_H__#pragma once |
#!/bin/bash
cd "$(dirname "$0")/.."
# python3 setup.py block_on_version |
python3 setup.py block_on_version clean sdist bdist_wheel || exit
python3 -m twine upload dist/*
|
# books.py
@app.route('/search', methods=['GET'])
def search():
query = request.args.get('query')
results = search_books(query)
return render_template('results.html', query=query, results=results)
# main.py
# search books function
def search_books(query):
# your search code goes here...
return books |
<gh_stars>0
import { BasePublisher, ItemUpdatedEvent, Subjects } from '../../common';
export class ItemUpdatedPublisher extends BasePublisher<ItemUpdatedEvent> {
readonly subject = Subjects.ItemUpdated;
}
|
#!/usr/bin/env bash
# die on error
set -e
# https://gist.github.com/cjus/1047794
echo 'Retrieving latest deploy...'
url=`curl -H "Authorization: Bearer $NETLIFY_PUBLISH_KEY" https://api.netlify.com/api/v1/sites/dsgndhat.me/deploys`
temp=`echo $url | sed 's/\\\\\//\//g' | sed 's/[{}]//g' | awk -v k="text" '{n=split($0,a,","); for (i=1; i<=n; i++) print a[i]}' | sed 's/\"\:\"/\|/g' | sed 's/[\,]/ /g' | sed 's/\"//g' | grep -w -m 1 'id'`
# https://www.netlify.com/docs/api/#deploys
echo "Publishing build ${temp##*|}..."
curl -X POST -H "Authorization: Bearer $NETLIFY_PUBLISH_KEY" -d "{}" "https://api.netlify.com/api/v1/sites/dsgndhat.me/deploys/${temp##*|}/restore"
|
<reponame>KLisabeth/beauty_salon
import React, { useEffect } from "react";
import { useDispatch, useSelector } from "react-redux";
import { listServiceCategories } from "../../actions/serviceActions";
import { Navbar, Nav, NavDropdown, Badge } from "react-bootstrap";
import { signout } from "../../actions/userActions";
import {IoIosBasket} from "react-icons/io";
function Header() {
const dispatch = useDispatch();
const userSignin = useSelector((state) => state.userSignin);
const { userInfo } = userSignin;
const cart = useSelector((state) => state.cart);
const { cartItems } = cart;
useEffect(() => {
dispatch(listServiceCategories());
return () => {
//
};
}, [dispatch]);
const handleSignout = () => {
dispatch(signout());
document.location.href = "/";
};
return (
<div>
<Navbar collapseOnSelect expand="lg" id="navbar">
<Navbar.Brand id="title" href="/">
Pretty Woman
</Navbar.Brand>
<Navbar.Toggle aria-controls="responsive-navbar-nav" />
<Navbar.Collapse id="responsive-navbar-nav" className="">
<Nav className="mr-auto">
<Nav.Link href="/about">About us</Nav.Link>
<Nav.Link href="/gallery">Gallery</Nav.Link>
<Nav.Link href="/appointment">All our Services</Nav.Link>
</Nav>
<Nav className="mr-3">
{userInfo ? (
<NavDropdown title={userInfo.name} id="basic-nav-dropdown">
<NavDropdown.Item href="/office">My Scheduler</NavDropdown.Item>
<NavDropdown.Item onClick={handleSignout}>
Sign Out
</NavDropdown.Item>
</NavDropdown>
):(
<Nav.Link href="/signin">Admin</Nav.Link>
)}
{userInfo && userInfo.isAdmin && (
<NavDropdown title={userInfo.name} id="basic-nav-dropdown">
<NavDropdown.Item href="/administration">Administration</NavDropdown.Item>
<NavDropdown.Item onClick={handleSignout}>
Sign Out
</NavDropdown.Item>
</NavDropdown>
)}
<Nav.Link href="/cart/">
<IoIosBasket className="basket" />{" "}
{cartItems.length > 0 && (
<Badge pill variant="light">
{cartItems.length}
</Badge>
)}
</Nav.Link>
</Nav>
</Navbar.Collapse>
</Navbar>
</div>
);
}
export default Header;
|
require "test_helper"
class Madmin::FieldTest < ActiveSupport::TestCase
test "required?" do
assert PostResource.attributes[:title].field.required?
refute PostResource.attributes[:id].field.required?
end
test "searchable?" do
assert UserResource.attributes[:first_name].field.searchable?
refute UserResource.attributes[:created_at].field.searchable?
end
end
|
#!/bin/sh
# SUMMARY: Test the wireguard example
# LABELS:
set -e
# Source libraries. Uncomment if needed/defined
#. "${RT_LIB}"
. "${RT_PROJECT_ROOT}/_lib/lib.sh"
NAME=wireguard
clean_up() {
rm -f ${NAME}*
}
trap clean_up EXIT
# Test code goes here
moby build "${LINUXKIT_EXAMPLES_DIR}/${NAME}.yml"
exit 0
|
package com.mycompany.smartparkingmanagement.helper;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
public class Helper {
public static boolean deleteFile(String path){
boolean f = false;
try{
File file = new File(path);
f = file.delete();
}catch(Exception e){
e.printStackTrace();
}
return f;
}
public static boolean saveFile(InputStream is,String path){
boolean f = false;
try{
byte b[] = new byte[is.available()];
is.read(b);
FileOutputStream fo = new FileOutputStream(path);
fo.write(b);
fo.flush();
fo.close();
f= true;
}catch (Exception e){
e.printStackTrace();
}
return f;
}
}
|
#!/bin/bash
#
###################
# flash RW_LEGACY #
###################
function flash_rwlegacy()
{
#set working dir
cd /tmp
# set dev mode legacy boot flag
if [ "${isChromeOS}" = true ]; then
crossystem dev_boot_legacy=1 > /dev/null 2>&1
crossystem dev_boot_altfw=1 > /dev/null 2>&1
fi
echo_green "\nInstall/Update RW_LEGACY Firmware (Legacy BIOS)"
#determine proper file
if [ "$device" = "link" ]; then
rwlegacy_file=$seabios_link
elif [[ "$isHswBox" = true || "$isBdwBox" = true ]]; then
rwlegacy_file=$seabios_hswbdw_box
elif [[ "$isHswBook" = true || "$isBdwBook" = true ]]; then
rwlegacy_file=$seabios_hswbdw_book
elif [ "$isByt" = true ]; then
rwlegacy_file=$seabios_baytrail
elif [ "$isBsw" = true ]; then
rwlegacy_file=$seabios_braswell
elif [ "$isSkl" = true ]; then
rwlegacy_file=$seabios_skylake
elif [ "$isApl" = true ]; then
rwlegacy_file=$seabios_apl
elif [ "$kbl_use_rwl18" = true ]; then
rwlegacy_file=$seabios_kbl_18
elif [ "$isStr" = true ]; then
rwlegacy_file=$rwl_altfw_stoney
elif [ "$isKbl" = true ]; then
rwlegacy_file=$seabios_kbl
elif [ "$isWhl" = true ]; then
rwlegacy_file=$rwl_altfw_whl
elif [ "$isCmlBox" = true ]; then
rwlegacy_file=$rwl_altfw_cml
elif [ "$isJsl" = true ]; then
rwlegacy_file=$rwl_altfw_jsl
elif [ "$isZen2" = true ]; then
rwlegacy_file=$rwl_altfw_zen2
elif [ "$isTgl" = true ]; then
rwlegacy_file=$rwl_altfw_tgl
else
echo_red "Unknown or unsupported device (${device}); cannot update RW_LEGACY firmware."
read -ep "Press enter to return to the main menu"
return 1
fi
preferUSB=false
useHeadless=false
if [[ -z "$1" && "$rwlegacy_file" != *"altfw"* ]]; then
echo -e ""
#USB boot priority
echo_yellow "Default to booting from USB?"
read -ep "If N, always boot from internal storage unless selected from boot menu. [y/N] "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] && preferUSB=true
echo -e ""
#headless?
if [[ "$rwlegacy_file" = "$seabios_hswbdw_box" && "$device" != "monroe" ]]; then
echo_yellow "Install \"headless\" firmware?"
read -ep "This is only needed for servers running without a connected display. [y/N] "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] && useHeadless=true
echo -e ""
fi
fi
#download SeaBIOS update
echo_yellow "\nDownloading RW_LEGACY firmware update\n(${rwlegacy_file})"
$CURL -sLO ${rwlegacy_source}${rwlegacy_file}.md5
$CURL -sLO ${rwlegacy_source}${rwlegacy_file}
#verify checksum on downloaded file
md5sum -c ${rwlegacy_file}.md5 --quiet 2> /dev/null
[[ $? -ne 0 ]] && { exit_red "RW_LEGACY download checksum fail; download corrupted, cannot flash"; return 1; }
#preferUSB?
if [ "$preferUSB" = true ]; then
#swanky special case
if [[ "$device" = "swanky" ]]; then
$CURL -sLo bootorder "${cbfs_source}bootorder.usb2"
else
$CURL -sLo bootorder "${cbfs_source}bootorder.usb"
fi
if [ $? -ne 0 ]; then
echo_red "Unable to download bootorder file; boot order cannot be changed."
else
${cbfstoolcmd} ${rwlegacy_file} remove -n bootorder > /dev/null 2>&1
${cbfstoolcmd} ${rwlegacy_file} add -n bootorder -f /tmp/bootorder -t raw > /dev/null 2>&1
fi
fi
#useHeadless?
if [ "$useHeadless" = true ]; then
$CURL -sLO "${cbfs_source}${hswbdw_headless_vbios}"
if [ $? -ne 0 ]; then
echo_red "Unable to download headless VGA BIOS; headless firmware cannot be installed."
else
${cbfstoolcmd} ${rwlegacy_file} remove -n pci8086,0406.rom > /dev/null 2>&1
rc0=$?
${cbfstoolcmd} ${rwlegacy_file} add -f ${hswbdw_headless_vbios} -n pci8086,0406.rom -t optionrom > /dev/null 2>&1
rc1=$?
if [[ "$rc0" -ne 0 || "$rc1" -ne 0 ]]; then
echo_red "Warning: error installing headless VGA BIOS"
else
echo_yellow "Headless VGA BIOS installed"
fi
fi
fi
#handle NINJA VGABIOS
if [[ "$device" = "ninja" ]]; then
#extract vbios from stock BOOT_STUB, inject into RWL
${cbfstoolcmd} bios.bin extract -r BOOT_STUB -n pci8086,0f31.rom -f vgabios.bin > /dev/null 2>&1
rc0=$?
${cbfstoolcmd} ${rwlegacy_file} remove -n pci8086,0f31.rom > /dev/null 2>&1
rc1=$?
${cbfstoolcmd} ${rwlegacy_file} add -f vgabios.bin -n pci8086,0f31.rom -t optionrom > /dev/null 2>&1
rc2=$?
if [[ "$rc0" -ne 0 || "$rc1" -ne 0 || "$rc2" -ne 0 ]]; then
echo_red "Warning: error installing VGA BIOS"
else
echo_yellow "VGA BIOS installed"
fi
fi
#flash updated legacy BIOS
echo_yellow "Installing RW_LEGACY firmware"
${flashromcmd} -w -i RW_LEGACY:${rwlegacy_file} -o /tmp/flashrom.log > /dev/null 2>&1
if [ $? -ne 0 ]; then
cat /tmp/flashrom.log
echo_red "An error occurred flashing the RW_LEGACY firmware."
else
echo_green "RW_LEGACY firmware successfully installed/updated."
# update firmware type
firmwareType="Stock ChromeOS w/RW_LEGACY"
#Prevent from trying to boot stock ChromeOS install
rm -rf /tmp/boot/efi > /dev/null 2>&1
rm -rf /tmp/boot/syslinux > /dev/null 2>&1
fi
if [ -z "$1" ]; then
read -ep "Press [Enter] to return to the main menu."
fi
}
#############################
# Install coreboot Firmware #
#############################
function flash_coreboot()
{
fwTypeStr=""
if [[ "$hasLegacyOption" = true && "$unlockMenu" = true ]]; then
fwTypeStr="Legacy/UEFI"
else
fwTypeStr="UEFI"
fi
echo_green "\nInstall/Update ${fwTypeStr} Full ROM Firmware"
echo_yellow "IMPORTANT: flashing the firmware has the potential to brick your device,
requiring relatively inexpensive hardware and some technical knowledge to
recover.Not all boards can be tested prior to release, and even then slight
differences in hardware can lead to unforseen failures.
If you don't have the ability to recover from a bad flash, you're taking a risk.
You have been warned."
[[ "$isChromeOS" = true ]] && echo_yellow "Also, flashing Full ROM firmware will remove your ability to run ChromeOS."
read -ep "Do you wish to continue? [y/N] "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] || return
#spacing
echo -e ""
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot flash Full ROM firmware."; return 1; }
#special warning for CR50 devices
if [[ "$isStock" = true && "$hasCR50" = true ]]; then
echo_yellow "NOTICE: flashing your Chromebook is serious business.
To ensure recovery in case something goes wrong when flashing,
be sure to set the ccd capability 'FlashAP Always' using your
USB-C debug cable, otherwise recovery will involve disassembling
your device (which is very difficult in some cases)."
echo_yellow "If you wish to continue, type: 'I ACCEPT' and press enter."
read -e
[[ "$REPLY" = "I ACCEPT" ]] || return
fi
#UEFI or legacy firmware
if [[ ! -z "$1" || ( "$isUEFI" = true && "$unlockMenu" = false ) || "$hasLegacyOption" = false ]]; then
useUEFI=true
else
useUEFI=false
if [[ "$hasUEFIoption" = true ]]; then
echo -e ""
echo_yellow "Install UEFI-compatible firmware?"
echo -e "UEFI firmware is the preferred option for all OSes.
Legacy SeaBIOS firmware is deprecated but available for Chromeboxes to enable
PXE (network boot) capability and compatibility with Legacy OS installations.\n"
REPLY=""
while [[ "$REPLY" != "U" && "$REPLY" != "u" && "$REPLY" != "L" && "$REPLY" != "l" ]]
do
read -ep "Enter 'U' for UEFI, 'L' for Legacy: "
if [[ "$REPLY" = "U" || "$REPLY" = "u" ]]; then
useUEFI=true
fi
done
fi
fi
# Windows support disclaimer
if [[ "$isStock" = true && "$useUEFI" = true && "$runsWindows" = false ]]; then
clear
echo_red "VERY IMPORTANT:"
echo -e "Although UEFI firmware is available for your device,
running Windows on it is $RED_TEXT**NOT SUPPORTED**$NORMAL, no matter what
some Youtube video claims. If you post on reddit asking for
help, your post will likely be locked or deleted. Additionally,
your device may not be fully functional under Linux either.
Do your homework and be sure you understand what you are getting into."
echo_yellow "\nIf you still wish to continue, type: 'I UNDERSTAND' and press enter
(or just press enter to return to the main menu)"
read -e
[[ "$REPLY" = "I UNDERSTAND" ]] || return
fi
#UEFI notice if flashing from ChromeOS or Legacy
if [[ "$useUEFI" = true && ! -d /sys/firmware/efi ]]; then
[[ "$isChromeOS" = true ]] && currOS="ChromeOS" || currOS="Your Legacy-installed OS"
echo_yellow "
NOTE: After flashing UEFI firmware, you will need to install a UEFI-compatible
OS; ${currOS} will no longer be bootable. See https://mrchromebox.tech/#faq"
REPLY=""
read -ep "Press Y to continue or any other key to abort. "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] || return
fi
#determine correct file / URL
firmware_source=${fullrom_source}
if [[ "$hasUEFIoption" = true || "$hasLegacyOption" = true ]]; then
if [ "$useUEFI" = true ]; then
eval coreboot_file=$`echo "coreboot_uefi_${device}"`
else
eval coreboot_file=$`echo "coreboot_${device}"`
fi
else
exit_red "Unknown or unsupported device (${device^^}); cannot continue."; return 1
fi
#peppy special case
if [ "$device" = "peppy" ]; then
hasElan=$(cat /proc/bus/input/devices | grep "Elan")
hasCypress=$(cat /proc/bus/input/devices | grep "Cypress")
if [[ $hasElan = "" && $hasCypress = "" ]]; then
echo -e ""
read -ep "Unable to automatically determine trackpad type. Does your Peppy have an Elan pad? [y/N] "
if [[ "$REPLY" = "y" || "$REPLY" = "Y" ]]; then
if [ "$useUEFI" = true ]; then
coreboot_file=${coreboot_uefi_peppy_elan}
else
coreboot_file=${coreboot_peppy_elan}
fi
fi
elif [[ $hasElan != "" ]]; then
if [ "$useUEFI" = true ]; then
coreboot_file=${coreboot_uefi_peppy_elan}
else
coreboot_file=${coreboot_peppy_elan}
fi
fi
fi
#auron special case (upgrade from coolstar legacy rom)
if [ "$device" = "auron" ]; then
echo -e ""
echo_yellow "Unable to determine Chromebook model"
echo -e "Because of your current firmware, I'm unable to
determine the exact mode of your Chromebook. Are you using
an Acer C740 (Auron_Paine) or Acer C910/CB5-571 (Auron_Yuna)?
"
REPLY=""
while [[ "$REPLY" != "P" && "$REPLY" != "p" && "$REPLY" != "Y" && "$REPLY" != "y" ]]
do
read -ep "Enter 'P' for Auron_Paine, 'Y' for Auron_Yuna: "
if [[ "$REPLY" = "Y" || "$REPLY" = "y" ]]; then
if [ "$useUEFI" = true ]; then
coreboot_file=${coreboot_uefi_auron_yuna}
else
coreboot_file=${coreboot_auron_yuna}
fi
else
if [ "$useUEFI" = true ]; then
coreboot_file=${coreboot_uefi_auron_paine}
else
coreboot_file=${coreboot_auron_paine}
fi
fi
done
fi
#extract device serial if present in cbfs
${cbfstoolcmd} /tmp/bios.bin extract -n serial_number -f /tmp/serial.txt >/dev/null 2>&1
# create backup if existing firmware is stock
if [[ "$isStock" == "true" ]]; then
if [[ "$hasShellball" = "false" ]]; then
REPLY=y
else
echo_yellow "\nCreate a backup copy of your stock firmware?"
read -ep "This is highly recommended in case you wish to return your device to stock
configuration/run ChromeOS, or in the (unlikely) event that things go south
and you need to recover using an external EEPROM programmer. [Y/n] "
fi
[[ "$REPLY" = "n" || "$REPLY" = "N" ]] && true || backup_firmware
#check that backup succeeded
[ $? -ne 0 ] && return 1
fi
#headless?
useHeadless=false
if [[ $useUEFI = false && ( "$isHswBox" = true || "$isBdwBox" = true ) ]]; then
echo -e ""
echo_yellow "Install \"headless\" firmware?"
read -ep "This is only needed for servers running without a connected display. [y/N] "
if [[ "$REPLY" = "Y" || "$REPLY" = "y" ]]; then
useHeadless=true
fi
fi
#USB boot priority
preferUSB=false
if [[ $useUEFI = false ]]; then
echo -e ""
echo_yellow "Default to booting from USB?"
echo -e "If you default to USB, then any bootable USB device
will have boot priority over the internal SSD.
If you default to SSD, you will need to manually select
the USB Device from the Boot Menu in order to boot it.
"
REPLY=""
while [[ "$REPLY" != "U" && "$REPLY" != "u" && "$REPLY" != "S" && "$REPLY" != "s" ]]
do
read -ep "Enter 'U' for USB, 'S' for SSD: "
if [[ "$REPLY" = "U" || "$REPLY" = "u" ]]; then
preferUSB=true
fi
done
fi
#add PXE?
addPXE=false
if [[ $useUEFI = false && "$hasLAN" = true ]]; then
echo -e ""
echo_yellow "Add PXE network booting capability?"
read -ep "(This is not needed for by most users) [y/N] "
if [[ "$REPLY" = "Y" || "$REPLY" = "y" ]]; then
addPXE=true
echo -e ""
echo_yellow "Boot PXE by default?"
read -ep "(will fall back to SSD/USB) [y/N] "
if [[ "$REPLY" = "Y" || "$REPLY" = "y" ]]; then
pxeDefault=true
fi
fi
fi
#download firmware file
cd /tmp
echo_yellow "\nDownloading Full ROM firmware\n(${coreboot_file})"
$CURL -sLO "${firmware_source}${coreboot_file}"
$CURL -sLO "${firmware_source}${coreboot_file}.sha1"
#verify checksum on downloaded file
sha1sum -c ${coreboot_file}.sha1 --quiet > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "Firmware download checksum fail; download corrupted, cannot flash."; return 1; }
#preferUSB?
if [[ "$preferUSB" = true && $useUEFI = false ]]; then
$CURL -sLo bootorder "${cbfs_source}bootorder.usb"
if [ $? -ne 0 ]; then
echo_red "Unable to download bootorder file; boot order cannot be changed."
else
${cbfstoolcmd} ${coreboot_file} remove -n bootorder > /dev/null 2>&1
${cbfstoolcmd} ${coreboot_file} add -n bootorder -f /tmp/bootorder -t raw > /dev/null 2>&1
fi
fi
#persist serial number?
if [ -f /tmp/serial.txt ]; then
echo_yellow "Persisting device serial number"
${cbfstoolcmd} ${coreboot_file} add -n serial_number -f /tmp/serial.txt -t raw > /dev/null 2>&1
fi
#useHeadless?
if [ "$useHeadless" = true ]; then
$CURL -sLO "${cbfs_source}${hswbdw_headless_vbios}"
if [ $? -ne 0 ]; then
echo_red "Unable to download headless VGA BIOS; headless firmware cannot be installed."
else
${cbfstoolcmd} ${coreboot_file} remove -n pci8086,0406.rom > /dev/null 2>&1
${cbfstoolcmd} ${coreboot_file} add -f ${hswbdw_headless_vbios} -n pci8086,0406.rom -t optionrom > /dev/null 2>&1
fi
fi
#addPXE?
if [ "$addPXE" = true ]; then
$CURL -sLO "${cbfs_source}${pxe_optionrom}"
if [ $? -ne 0 ]; then
echo_red "Unable to download PXE option ROM; PXE capability cannot be added."
else
${cbfstoolcmd} ${coreboot_file} add -f ${pxe_optionrom} -n pci10ec,8168.rom -t optionrom > /dev/null 2>&1
#PXE default?
if [ "$pxeDefault" = true ]; then
${cbfstoolcmd} ${coreboot_file} extract -n bootorder -f /tmp/bootorder > /dev/null 2>&1
${cbfstoolcmd} ${coreboot_file} remove -n bootorder > /dev/null 2>&1
sed -i '1s/^/\/pci@i0cf8\/pci-bridge@1c\/*@0\n/' /tmp/bootorder
${cbfstoolcmd} ${coreboot_file} add -n bootorder -f /tmp/bootorder -t raw > /dev/null 2>&1
fi
fi
fi
#Persist RW_MRC_CACHE for BSW Full ROM firmware
${cbfstoolcmd} /tmp/bios.bin read -r RW_MRC_CACHE -f /tmp/mrc.cache > /dev/null 2>&1
if [[ $isBsw = "true" && $isFullRom = "true" && $? -eq 0 ]]; then
${cbfstoolcmd} ${coreboot_file} write -r RW_MRC_CACHE -f /tmp/mrc.cache > /dev/null 2>&1
fi
#Persist SMMSTORE if exists
${cbfstoolcmd} /tmp/bios.bin read -r SMMSTORE -f /tmp/smmstore > /dev/null 2>&1
if [[ $useUEFI = "true" && $? -eq 0 ]]; then
${cbfstoolcmd} ${coreboot_file} write -r SMMSTORE -f /tmp/smmstore > /dev/null 2>&1
fi
# persist VPD if possible
if extract_vpd /tmp/bios.bin ; then
# try writing to RO_VPD FMAP region
if ! ${cbfstoolcmd} ${coreboot_file} write -r RO_VPD -f /tmp/vpd.bin > /dev/null 2>&1 ; then
# fall back to vpd.bin in CBFS
${cbfstoolcmd} ${coreboot_file} add -n vpd.bin -f /tmp/vpd.bin -t raw > /dev/null 2>&1
fi
fi
#disable software write-protect
echo_yellow "Disabling software write-protect and clearing the WP range"
${flashromcmd} --wp-disable > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error disabling software write-protect; unable to flash firmware."; return 1
fi
#clear SW WP range
${flashromcmd} --wp-range 0 0 > /dev/null 2>&1
if [ $? -ne 0 ]; then
# use new command format as of commit 99b9550
${flashromcmd} --wp-range 0,0 > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error clearing software write-protect range; unable to flash firmware."; return 1
fi
fi
#flash Full ROM firmware
#flash without verify, to avoid IFD mismatch upon verification
echo_yellow "Installing Full ROM firmware (may take up to 90s)"
${flashromcmd} -n -w "${coreboot_file}" -o /tmp/flashrom.log > /dev/null 2>&1
if [ $? -ne 0 ]; then
cat /tmp/flashrom.log
exit_red "An error occurred flashing the Full ROM firmware. DO NOT REBOOT!"; return 1
else
echo_green "Full ROM firmware successfully installed/updated."
#Prevent from trying to boot stock ChromeOS install
if [[ "$isStock" = true && "$isChromeOS" = true ]]; then
rm -rf /tmp/boot/efi > /dev/null 2>&1
rm -rf /tmp/boot/syslinux > /dev/null 2>&1
fi
#Warn about long RAM training time
echo_yellow "IMPORTANT:\nThe first boot after flashing may take substantially
longer than subsequent boots -- up to 30s or more.
Be patient and eventually your device will boot :)"
# Add note on touchpad firmware for EVE
if [[ "${device^^}" = "EVE" && "$isStock" = true ]]; then
echo_yellow "IMPORTANT:\n
If you're going to run Windows on your Pixelbook, you must downgrade
the touchpad firmware now (before rebooting) otherwise it will not work.
Select the D option from the main main in order to do so."
fi
#set vars to indicate new firmware type
isStock=false
isFullRom=true
# Add NVRAM reset note for 4.12 release
if [[ "$isUEFI" = true && "$useUEFI" = true ]]; then
echo_yellow "IMPORTANT:\n
This update uses a new format to store UEFI NVRAM data, and
will reset your BootOrder and boot entries. You may need to
manually Boot From File and reinstall your bootloader if
booting from the internal storage device fails."
fi
if [[ "$useUEFI" = "true" ]]; then
firmwareType="Full ROM / UEFI (pending reboot)"
isUEFI=true
else
firmwareType="Full ROM / Legacy (pending reboot)"
fi
fi
read -ep "Press [Enter] to return to the main menu."
}
#########################
# Downgrade Touchpad FW #
#########################
function downgrade_touchpad_fw()
{
# offer to downgrade touchpad firmware on EVE
if [[ "${device^^}" = "EVE" ]]; then
echo_green "\nDowngrade Touchpad Firmware"
echo_yellow "If you plan to run Windows on your Pixelbook, it is necessary to downgrade
the touchpad firmware, otherwise the touchpad will not work."
echo_yellow "You should do this after flashing the UEFI firmware, but before rebooting."
read -ep "Do you wish to downgrade the touchpad firmware now? [y/N] "
if [[ "$REPLY" = "y" || "$REPLY" = "Y" ]] ; then
# ensure firmware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot downgrade touchpad firmware."; return 1; }
# download TP firmware
echo_yellow "\nDownloading touchpad firmware\n(${touchpad_eve_fw})"
$CURL -s -LO "${other_source}${touchpad_eve_fw}"
$CURL -s -LO "${other_source}${touchpad_eve_fw}.sha1"
#verify checksum on downloaded file
sha1sum -c ${touchpad_eve_fw}.sha1 --quiet > /dev/null 2>&1
if [[ $? -eq 0 ]]; then
# flash TP firmware
echo_green "Flashing touchpad firmware -- do not touch the touchpad while updating!"
${flashromcmd} -p ec:type=tp -i EC_RW -w ${touchpad_eve_fw} -o /tmp/flashrom.log >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo_green "Touchpad firmware successfully downgraded."
echo_yellow "Please reboot your Pixelbook now."
else
echo_red "Error flashing touchpad firmware:"
cat /tmp/flashrom.log
echo_yellow "\nThis function sometimes doesn't work under Linux, in which case it is\nrecommended to try under ChromiumOS."
fi
else
echo_red "Touchpad firmware download checksum fail; download corrupted, cannot flash."
fi
read -ep "Press [Enter] to return to the main menu."
fi
fi
}
##########################
# Restore Stock Firmware #
##########################
function restore_stock_firmware()
{
echo_green "\nRestore Stock Firmware"
echo_yellow "Standard disclaimer: flashing the firmware has the potential to
brick your device, requiring relatively inexpensive hardware and some
technical knowledge to recover. You have been warned."
read -ep "Do you wish to continue? [y/N] "
[[ "$REPLY" = "Y" || "$REPLY" = "y" ]] || return
#spacing
echo -e ""
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot restore stock firmware."; return 1; }
firmware_file=""
read -ep "Do you have a firmware backup file on USB? [y/N] "
if [[ "$REPLY" = "y" || "$REPLY" = "Y" ]]; then
read -ep "
Connect the USB/SD device which contains the backed-up stock firmware and press [Enter] to continue. "
list_usb_devices
[ $? -eq 0 ] || { exit_red "No USB devices available to read firmware backup."; return 1; }
read -ep "Enter the number for the device which contains the stock firmware backup: " usb_dev_index
[ $usb_dev_index -gt 0 ] && [ $usb_dev_index -le $num_usb_devs ] || { exit_red "Error: Invalid option selected."; return 1; }
usb_device="${usb_devs[${usb_dev_index}-1]}"
mkdir /tmp/usb > /dev/null 2>&1
mount "${usb_device}" /tmp/usb > /dev/null 2>&1
if [ $? -ne 0 ]; then
mount "${usb_device}1" /tmp/usb
fi
if [ $? -ne 0 ]; then
echo_red "USB device failed to mount; cannot proceed."
read -ep "Press [Enter] to return to the main menu."
umount /tmp/usb > /dev/null 2>&1
return
fi
#select file from USB device
echo_yellow "\n(Potential) Firmware Files on USB:"
ls /tmp/usb/*.{rom,ROM,bin,BIN} 2>/dev/null | xargs -n 1 basename 2>/dev/null
if [ $? -ne 0 ]; then
echo_red "No firmware files found on USB device."
read -ep "Press [Enter] to return to the main menu."
umount /tmp/usb > /dev/null 2>&1
return
fi
echo -e ""
read -ep "Enter the firmware filename: " firmware_file
firmware_file=/tmp/usb/${firmware_file}
if [ ! -f ${firmware_file} ]; then
echo_red "Invalid filename entered; unable to restore stock firmware."
read -ep "Press [Enter] to return to the main menu."
umount /tmp/usb > /dev/null 2>&1
return
fi
#text spacing
echo -e ""
else
if [[ "$hasShellball" = false ]]; then
exit_red "\nUnfortunately I don't have a stock firmware available to download for '${boardName^^}' at this time."
return 1
fi
#download firmware extracted from recovery image
echo_yellow "\nThat's ok, I'll download a shellball firmware for you."
if [ "${boardName^^}" = "PANTHER" ]; then
echo -e "Which device do you have?\n"
echo "1) Asus CN60 [PANTHER]"
echo "2) HP CB1 [ZAKO]"
echo "3) Dell 3010 [TRICKY]"
echo "4) Acer CXI [MCCLOUD]"
echo "5) LG Chromebase [MONROE]"
echo ""
read -ep "? " fw_num
if [[ $fw_num -lt 1 || $fw_num -gt 5 ]]; then
exit_red "Invalid input - cancelling"
return 1
fi
#confirm menu selection
echo -e ""
read -ep "Confirm selection number ${fw_num} [y/N] "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] || { exit_red "User cancelled restoring stock firmware"; return; }
#download firmware file
echo -e ""
echo_yellow "Downloading recovery image firmware file"
case "$fw_num" in
1) _device="panther";
;;
2) _device="zako";
;;
3) _device="tricky";
;;
4) _device="mccloud";
;;
5) _device="monroe";
;;
esac
else
#confirm device detection
echo_yellow "Confirm system details:"
echo -e "Device: ${deviceDesc}"
echo -e "Board Name: ${boardName^^}"
echo -e ""
read -ep "? [y/N] "
if [[ "$REPLY" != "y" && "$REPLY" != "Y" ]]; then
exit_red "Device detection failed; unable to restoring stock firmware"
return 1
fi
echo -e ""
_device=${boardName,,}
fi
#download shellball ROM
echo_yellow "Downloading shellball.${_device}.bin"
$CURL -sLo /tmp/stock-firmware.rom ${shellball_source}shellball.${_device}.bin;
[[ $? -ne 0 ]] && { exit_red "Error downloading; unable to restore stock firmware."; return 1; }
#extract VPD from current firmware if present
if extract_vpd /tmp/bios.bin ; then
#merge with recovery image firmware
if [ -f /tmp/vpd.bin ]; then
echo_yellow "Merging VPD into recovery image firmware"
${cbfstoolcmd} /tmp/stock-firmware.rom write -r RO_VPD -f /tmp/vpd.bin > /dev/null 2>&1
fi
fi
firmware_file=/tmp/stock-firmware.rom
fi
#disable software write-protect
${flashromcmd} --wp-disable > /dev/null 2>&1
if [ $? -ne 0 ]; then
#if [[ $? -ne 0 && ( "$isBsw" = false || "$isFullRom" = false ) ]]; then
exit_red "Error disabling software write-protect; unable to restore stock firmware."; return 1
fi
#clear SW WP range
${flashromcmd} --wp-range 0 0 > /dev/null 2>&1
if [ $? -ne 0 ]; then
# use new command format as of commit 99b9550
${flashromcmd} --wp-range 0,0 > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error clearing software write-protect range; unable to restore stock firmware."; return 1
fi
fi
#flash stock firmware
echo_yellow "Restoring stock firmware"
# we won't verify here, since we need to flash the entire BIOS region
# but don't want to get a mismatch from the IFD or ME
${flashromcmd} -n -w "${firmware_file}" -o /tmp/flashrom.log > /dev/null 2>&1
if [ $? -ne 0 ]; then
cat /tmp/flashrom.log
exit_red "An error occurred restoring the stock firmware. DO NOT REBOOT!"; return 1
fi
#all good
echo_green "Stock firmware successfully restored."
echo_green "After rebooting, you will need to restore ChromeOS using the ChromeOS recovery media,
then re-run this script to reset the Firmware Boot Flags (GBB Flags) to factory default."
read -ep "Press [Enter] to return to the main menu."
#set vars to indicate new firmware type
isStock=true
isFullRom=false
isUEFI=false
firmwareType="Stock ChromeOS (pending reboot)"
}
########################
# Extract firmware VPD #
########################
function extract_vpd()
{
#check params
[[ -z "$1" ]] && { exit_red "Error: extract_vpd(): missing function parameter"; return 1; }
firmware_file="$1"
#try FMAP extraction
if ! ${cbfstoolcmd} ${firmware_file} read -r RO_VPD -f /tmp/vpd.bin >/dev/null 2>&1 ; then
#try CBFS extraction
if ! ${cbfstoolcmd} ${firmware_file} extract -n vpd.bin -f /tmp/vpd.bin >/dev/null 2>&1 ; then
return 1
fi
fi
echo_yellow "VPD extracted from current firmware"
return 0
}
#########################
# Backup stock firmware #
#########################
function backup_firmware()
{
echo -e ""
read -ep "Connect the USB/SD device to store the firmware backup and press [Enter]
to continue. This is non-destructive, but it is best to ensure no other
USB/SD devices are connected. "
list_usb_devices
if [ $? -ne 0 ]; then
backup_fail "No USB devices available to store firmware backup."
return 1
fi
read -ep "Enter the number for the device to be used for firmware backup: " usb_dev_index
if [ $usb_dev_index -le 0 ] || [ $usb_dev_index -gt $num_usb_devs ]; then
backup_fail "Error: Invalid option selected."
return 1
fi
usb_device="${usb_devs[${usb_dev_index}-1]}"
mkdir /tmp/usb > /dev/null 2>&1
mount "${usb_device}" /tmp/usb > /dev/null 2>&1
if [ $? != 0 ]; then
mount "${usb_device}1" /tmp/usb
fi
if [ $? -ne 0 ]; then
backup_fail "USB backup device failed to mount; cannot proceed."
return 1
fi
backupname="stock-firmware-${boardName}-$(date +%Y%m%d).rom"
echo_yellow "\nSaving firmware backup as ${backupname}"
cp /tmp/bios.bin /tmp/usb/${backupname}
if [ $? -ne 0 ]; then
backup_fail "Failure reading stock firmware for backup; cannot proceed."
return 1
fi
sync
umount /tmp/usb > /dev/null 2>&1
rmdir /tmp/usb
echo_green "Firmware backup complete. Remove the USB stick and press [Enter] to continue."
read -ep ""
}
function backup_fail()
{
umount /tmp/usb > /dev/null 2>&1
rmdir /tmp/usb > /dev/null 2>&1
exit_red "\n$@"
}
####################
# Set Boot Options #
####################
function set_boot_options()
{
# set boot options via firmware boot flags
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot set Boot Options / GBB Flags."; return 1; }
[[ -z "$1" ]] && legacy_text="Legacy Boot" || legacy_text="$1"
echo_green "\nSet Firmware Boot Options (GBB Flags)"
echo_yellow "Select your preferred boot delay and default boot option.
You can always override the default using [CTRL+D] or
[CTRL+L] on the Developer Mode boot screen"
echo -e "1) Short boot delay (1s) + ${legacy_text} default
2) Long boot delay (30s) + ${legacy_text} default
3) Short boot delay (1s) + ChromeOS default
4) Long boot delay (30s) + ChromeOS default
5) Reset to factory default
6) Cancel/exit
"
local _flags=0x0
while :
do
read -ep "? " n
case $n in
1) _flags=0x4A9; break;;
2) _flags=0x4A8; break;;
3) _flags=0xA9; break;;
4) _flags=0xA8; break;;
5) _flags=0x0; break;;
6) read -ep "Press [Enter] to return to the main menu."; break;;
*) echo -e "invalid option";;
esac
done
[[ $n -eq 6 ]] && return
echo_yellow "\nSetting boot options..."
#disable software write-protect
${flashromcmd} --wp-disable > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error disabling software write-protect; unable to set GBB flags."; return 1
fi
${flashromcmd} -r -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError reading firmware (non-stock?); unable to set boot options."; return 1; }
${gbbutilitycmd} --set --flags="${_flags}" /tmp/gbb.temp > /dev/null
[[ $? -ne 0 ]] && { exit_red "\nError setting boot options."; return 1; }
${flashromcmd} -w -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError writing back firmware; unable to set boot options."; return 1; }
echo_green "\nFirmware Boot options successfully set."
read -ep "Press [Enter] to return to the main menu."
}
###################
# Set Hardware ID #
###################
function set_hwid()
{
# set HWID using gbb_utility
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot set HWID."; return 1; }
echo_green "Set Hardware ID (HWID) using gbb_utility"
#get current HWID
_hwid="$(crossystem hwid)" >/dev/null 2>&1
if [ $? -eq 0 ]; then
echo_yellow "Current HWID is $_hwid"
fi
read -ep "Enter a new HWID (use all caps): " hwid
echo -e ""
read -ep "Confirm changing HWID to $hwid [y/N] " confirm
if [[ "$confirm" = "Y" || "$confirm" = "y" ]]; then
echo_yellow "\nSetting hardware ID..."
#disable software write-protect
${flashromcmd} --wp-disable > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error disabling software write-protect; unable to set HWID."; return 1
fi
${flashromcmd} -r -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError reading firmware (non-stock?); unable to set HWID."; return 1; }
${gbbutilitycmd} --set --hwid="${hwid}" /tmp/gbb.temp > /dev/null
[[ $? -ne 0 ]] && { exit_red "\nError setting HWID."; return 1; }
${flashromcmd} -w -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError writing back firmware; unable to set HWID."; return 1; }
echo_green "Hardware ID successfully set."
fi
read -ep "Press [Enter] to return to the main menu."
}
##################
# Remove Bitmaps #
##################
function remove_bitmaps()
{
# remove bitmaps from GBB using gbb_utility
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot remove bitmaps."; return 1; }
echo_green "\nRemove ChromeOS Boot Screen Bitmaps"
read -ep "Confirm removing ChromeOS bitmaps? [y/N] " confirm
if [[ "$confirm" = "Y" || "$confirm" = "y" ]]; then
echo_yellow "\nRemoving bitmaps..."
#disable software write-protect
${flashromcmd} --wp-disable > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error disabling software write-protect; unable to remove bitmaps."; return 1
fi
${flashromcmd} -r -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError reading firmware (non-stock?); unable to remove bitmaps."; return 1; }
touch /tmp/null-images > /dev/null 2>&1
${gbbutilitycmd} --set --bmpfv=/tmp/null-images /tmp/gbb.temp > /dev/null
[[ $? -ne 0 ]] && { exit_red "\nError removing bitmaps."; return 1; }
${flashromcmd} -w -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError writing back firmware; unable to remove bitmaps."; return 1; }
echo_green "ChromeOS bitmaps successfully removed."
fi
read -ep "Press [Enter] to return to the main menu."
}
##################
# Restore Bitmaps #
##################
function restore_bitmaps()
{
# restore bitmaps from GBB using gbb_utility
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot restore bitmaps."; return 1; }
echo_green "\nRestore ChromeOS Boot Screen Bitmaps"
read -ep "Confirm restoring ChromeOS bitmaps? [y/N] " confirm
if [[ "$confirm" = "Y" || "$confirm" = "y" ]]; then
echo_yellow "\nRestoring bitmaps..."
#disable software write-protect
${flashromcmd} --wp-disable > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "Error disabling software write-protect; unable to restore bitmaps."; return 1
fi
#download shellball
$CURL -sLo /tmp/shellball.rom ${shellball_source}shellball.${device}.bin;
[[ $? -ne 0 ]] && { exit_red "Error downloading shellball; unable to restore bitmaps."; return 1; }
#extract GBB region, bitmaps
${cbfstoolcmd} /tmp/shellball.rom read -r GBB -f gbb.new >/dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "Error extracting GBB region from shellball; unable to restore bitmaps."; return 1; }
${flashromcmd} -r -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError reading firmware (non-stock?); unable to restore bitmaps."; return 1; }
${gbbutilitycmd} --get --bmpfv=/tmp/bmpfv /tmp/gbb.new > /dev/null
${gbbutilitycmd} --set --bmpfv=/tmp/bmpfv /tmp/gbb.temp > /dev/null
[[ $? -ne 0 ]] && { exit_red "\nError restoring bitmaps."; return 1; }
${flashromcmd} -w -i GBB:/tmp/gbb.temp > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "\nError writing back firmware; unable to restore bitmaps."; return 1; }
echo_green "ChromeOS bitmaps successfully restored."
fi
read -ep "Press [Enter] to return to the main menu."
}
####################
# Modify BOOT_STUB #
####################
function modify_boot_stub()
{
# backup BOOT_STUB into RW_LEGACY
# modify BOOT_STUB for legacy booting
# flash back modified slots
#check baytrail
[[ "$isByt" = false ]] && { exit_red "\nThis functionality is only available for Baytrail ChromeOS devices currently"; return 1; }
echo_green "\nInstall/Update BOOT_STUB Firmware (Legacy BIOS)"
echo_yellow "Standard disclaimer: flashing the firmware has the potential to
brick your device, requiring relatively inexpensive hardware and some
technical knowledge to recover. You have been warned."
echo_yellow "Also, flashing the BOOT_STUB will remove the ability to run ChromeOS,
so only proceed if you're going to run Linux exclusively."
read -ep "Do you wish to continue? [y/N] "
[[ "$REPLY" = "Y"|| "$REPLY" = "y" ]] || return
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot flash/modify BOOT_STUB firmware."; return 1; }
# cd to working dir
cd /tmp
#download SeaBIOS payload
$CURL -sLO ${bootstub_source}/${bootstub_payload_baytrail}
$CURL -sLO ${bootstub_source}/${bootstub_payload_baytrail}.md5
#verify checksum on downloaded file
md5sum -c ${bootstub_payload_baytrail}.md5 --quiet > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "SeaBIOS payload download checksum fail; download corrupted, cannot flash."; return 1; }
#read BOOT_STUB and RW_LEGACY slots
echo_yellow "\nReading current firmware"
${flashromcmd} -r -i BOOT_STUB:boot_stub.bin > /dev/null 2>&1
rc0=$?
${flashromcmd} -r -i RW_LEGACY:rw_legacy.bin > /dev/null 2>&1
rc1=$?
[[ $rc0 -ne 0 || $rc1 -ne 0 ]] && { exit_red "Error reading current firmware, unable to flash."; return 1; }
#if BOOT_STUB is stock
${cbfstoolcmd} boot_stub.bin extract -n fallback/vboot -f whocares -m x86 > /dev/null 2>&1
if [[ "$isChromeOS" = true || $? -eq 0 ]]; then
#copy BOOT_STUB into top 1MB of RW_LEGACY
echo_yellow "Backing up stock BOOT_STUB"
dd if=boot_stub.bin of=rw_legacy.bin bs=1M conv=notrunc > /dev/null 2>&1
#flash back
${flashromcmd} -w -i RW_LEGACY:rw_legacy.bin > /dev/null 2>&1
else
echo_yellow "Non-stock BOOT_STUB, skipping backup"
fi
#USB boot priority
read -ep "Default to booting from USB? If N, always boot from internal storage unless selected from boot menu. [y/N] "
if [[ "$REPLY" = "y" || "$REPLY" = "Y" ]]; then
$CURL -sLo bootorder ${cbfs_source}/bootorder.usb
else
$CURL -sLo bootorder ${cbfs_source}/bootorder.emmc
fi
#modify BOOT_STUB for legacy booting
echo_yellow "\nModifying BOOT_STUB for legacy boot"
${cbfstoolcmd} boot_stub.bin remove -n fallback/payload > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n fallback/vboot > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n bootorder > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/boot-menu-wait > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard0 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard1 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard2 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard3 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard4 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard5 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin remove -n etc/sdcard6 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-payload -n fallback/payload -f ${bootstub_payload_baytrail} -c lzma > /dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "There was an error modifying the BOOT_STUB payload, nothing has been flashed."; return 1
else
${cbfstoolcmd} boot_stub.bin add -n bootorder -f bootorder -t raw > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 3000 -n etc/boot-menu-wait > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd071f000 -n etc/sdcard0 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd071d000 -n etc/sdcard1 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd071c000 -n etc/sdcard2 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd081f000 -n etc/sdcard3 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd081c000 -n etc/sdcard4 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd091f000 -n etc/sdcard5 > /dev/null 2>&1
${cbfstoolcmd} boot_stub.bin add-int -i 0xd091c000 -n etc/sdcard6 > /dev/null 2>&1
#flash modified BOOT_STUB back
echo_yellow "Flashing modified BOOT_STUB firmware"
${flashromcmd} -w -i BOOT_STUB:boot_stub.bin > /dev/null 2>&1
if [ $? -ne 0 ]; then
#flash back stock BOOT_STUB
dd if=rw_legacy.bin of=boot_stub.bin bs=1M count=1 > /dev/null 2>&1
${flashromcmd} -w -i BOOT_STUB:boot_stub.bin > /dev/null 2>&1
echo_red "There was an error flashing the modified BOOT_STUB, but the stock one has been restored."
else
echo_green "BOOT_STUB firmware successfully flashed"
fi
fi
read -ep "Press [Enter] to return to the main menu."
}
#####################
# Restore BOOT_STUB #
#####################
function restore_boot_stub()
{
# read backed-up BOOT_STUB from RW_LEGACY
# verify valid for device
# flash back to BOOT_STUB
# set GBB flags to ensure dev mode, legacy boot
# offer RW_LEGACY update
#check OS
[[ "$isChromeOS" = true ]] && { exit_red "\nThis functionality is not available under ChromeOS."; return 1; }
echo_green "\nRestore stock BOOT_STUB firmware"
echo_yellow "Standard disclaimer: flashing the firmware has the potential to
brick your device, requiring relatively inexpensive hardware and some
technical knowledge to recover. You have been warned."
read -ep "Do you wish to continue? [y/N] "
[[ "$REPLY" = "Y" || "$REPLY" = "y" ]] || return
# ensure hardware write protect disabled
[[ "$wpEnabled" = true ]] && { exit_red "\nHardware write-protect enabled, cannot restore BOOT_STUB firmware."; return 1; }
# cd to working dir
cd /tmp
#read backed-up BOOT_STUB from RW_LEGACY slot
echo_yellow "\nReading current firmware"
${flashromcmd} -r -i BOOT_STUB:boot_stub.bin > /dev/null 2>&1
rc0=$?
${flashromcmd} -r -i RW_LEGACY:rw_legacy.bin > /dev/null 2>&1
rc1=$?
${flashromcmd} -r -i GBB:gbb.bin > /dev/null 2>&1
rc2=$?
if [[ $rc0 -ne 0 || $rc1 -ne 0 || $rc2 -ne 0 ]]; then
exit_red "Error reading current firmware, unable to flash."; return 1
fi
#truncate to 1MB
dd if=rw_legacy.bin of=boot_stub.stock bs=1M count=1 > /dev/null 2>&1
#verify valid BOOT_STUB
${cbfstoolcmd} boot_stub.stock extract -n config -f config.${device} > /dev/null 2>&1
if [[ $? -ne 0 ]]; then
echo_yellow "No valid BOOT_STUB backup found; attempting to download/extract from a shellball ROM"
#download and extract from shellball ROM
$CURL -sLo /tmp/shellball.rom ${shellball_source}shellball.${device}.bin
if [[ $? -ne 0 ]]; then
exit_red "No valid BOOT_STUB backup found; error downloading shellball ROM; unable to restore stock BOOT_STUB."
return 1
fi
${cbfstoolcmd} shellball.rom read -r BOOT_STUB -f boot_stub.stock >/dev/null 2>&1
if [ $? -ne 0 ]; then
exit_red "No valid BOOT_STUB backup found; error reading shellball ROM; unable to restore stock BOOT_STUB."
return 1
fi
${cbfstoolcmd} boot_stub.stock extract -n config -f config.${device} > /dev/null 2>&1
if [[ $? -ne 0 ]]; then
exit_red "No BOOT_STUB backup available; unable to restore stock BOOT_STUB"
return 1
fi
fi
#verify valid for this device
cat config.${device} | grep ${device} > /dev/null 2>&1
[[ $? -ne 0 ]] && { exit_red "No valid BOOT_STUB backup found; unable to restore stock BOOT_STUB"; return 1; }
#restore stock BOOT_STUB
echo_yellow "Restoring stock BOOT_STUB"
${flashromcmd} -w -i BOOT_STUB:boot_stub.stock > /dev/null 2>&1
if [ $? -ne 0 ]; then
#flash back non-stock BOOT_STUB
${flashromcmd} -w -i BOOT_STUB:boot_stub.bin > /dev/null 2>&1
exit_red "There was an error restoring the stock BOOT_STUB, but the modified one has been left in place."; return 1
fi
#ensure GBB flags are sane
${gbbutilitycmd} --set --flags=0x88 gbb.bin > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo_red "Warning: there was an error setting the GBB flags." || return 1
fi
${flashromcmd} -w -i GBB:gbb.bin > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo_red "Warning: there was an error flashing the GBB region; GBB flags in unknown state" || return 1
fi
#update legacy BIOS
flash_rwlegacy skip_prompt > /dev/null
echo_green "Stock BOOT_STUB firmware successfully restored"
#all done
read -ep "Press [Enter] to return to the main menu."
}
function clear_nvram() {
echo_green "\nClear UEFI NVRAM"
echo_yellow "Clearing the NVRAM will remove all EFI variables\nand reset the boot order to the default."
read -ep "Would you like to continue? [y/N] "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] || return
echo_yellow "\nClearing NVRAM..."
${flashromcmd} -E -i SMMSTORE > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo_red "\nFailed to erase SMMSTORE firmware region; NVRAM not cleared."
return 1;
fi
#all done
echo_green "NVRAM has been cleared."
read -ep "Press Enter to continue"
}
########################
# Firmware Update Menu #
########################
function menu_fwupdate() {
if [[ "$isFullRom" = true ]]; then
uefi_menu
else
stock_menu
fi
}
function show_header() {
printf "\ec"
echo -e "${NORMAL}\n ChromeOS Device Firmware Utility Script ${script_date} ${NORMAL}"
echo -e "${NORMAL} (c) Mr Chromebox <mrchromebox@gmail.com> ${NORMAL}"
echo -e "${MENU}*********************************************************${NORMAL}"
echo -e "${MENU}**${NUMBER} Device: ${NORMAL}${deviceDesc} (${boardName^^})"
echo -e "${MENU}**${NUMBER} Platform: ${NORMAL}$deviceCpuType"
echo -e "${MENU}**${NUMBER} Fw Type: ${NORMAL}$firmwareType"
echo -e "${MENU}**${NUMBER} Fw Ver: ${NORMAL}$fwVer ($fwDate)"
if [[ $isUEFI == true && $hasUEFIoption = true ]]; then
# check if update available
curr_yy=`echo $fwDate | cut -f 3 -d '/'`
curr_mm=`echo $fwDate | cut -f 1 -d '/'`
curr_dd=`echo $fwDate | cut -f 2 -d '/'`
eval coreboot_file=$`echo "coreboot_uefi_${device}"`
date=`echo $coreboot_file | grep -o "mrchromebox.*" | cut -f 2 -d '_' | cut -f 1 -d '.'`
uefi_yy=`echo $date | cut -c1-4`
uefi_mm=`echo $date | cut -c5-6`
uefi_dd=`echo $date | cut -c7-8`
if [[ ("$firmwareType" != *"pending"*) && (($uefi_yy > $curr_yy) || \
($uefi_yy == $curr_yy && $uefi_mm > $curr_mm) || \
($uefi_yy == $curr_yy && $uefi_mm == $curr_mm && $uefi_dd > $curr_dd)) ]]; then
echo -e "${MENU}**${NORMAL} ${GREEN_TEXT}Update Available ($uefi_mm/$uefi_dd/$uefi_yy)${NORMAL}"
fi
fi
if [ "$wpEnabled" = true ]; then
echo -e "${MENU}**${NUMBER} Fw WP: ${RED_TEXT}Enabled${NORMAL}"
WP_TEXT=${RED_TEXT}
else
echo -e "${MENU}**${NUMBER} Fw WP: ${NORMAL}Disabled"
WP_TEXT=${GREEN_TEXT}
fi
echo -e "${MENU}*********************************************************${NORMAL}"
}
function stock_menu() {
show_header
if [[ "$unlockMenu" = true || ( "$isFullRom" = false && "$isBootStub" = false && "$isUnsupported" = false ) ]]; then
echo -e "${MENU}**${WP_TEXT} ${NUMBER} 1)${MENU} Install/Update RW_LEGACY Firmware ${NORMAL}"
else
echo -e "${GRAY_TEXT}** ${GRAY_TEXT} 1)${GRAY_TEXT} Install/Update RW_LEGACY Firmware ${NORMAL}"
fi
if [[ "$unlockMenu" = true || "$hasUEFIoption" = true || "$hasLegacyOption" = true ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 2)${MENU} Install/Update UEFI (Full ROM) Firmware ${NORMAL}"
else
echo -e "${GRAY_TEXT}** ${GRAY_TEXT} 2)${GRAY_TEXT} Install/Update UEFI (Full ROM) Firmware${NORMAL}"
fi
if [[ "${device^^}" = "EVE" ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} D)${MENU} Downgrade Touchpad Firmware ${NORMAL}"
fi
if [[ "$unlockMenu" = true || ( "$isFullRom" = false && "$isBootStub" = false ) ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 3)${MENU} Set Boot Options (GBB flags) ${NORMAL}"
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 4)${MENU} Set Hardware ID (HWID) ${NORMAL}"
else
echo -e "${GRAY_TEXT}** ${GRAY_TEXT} 3)${GRAY_TEXT} Set Boot Options (GBB flags)${NORMAL}"
echo -e "${GRAY_TEXT}** ${GRAY_TEXT} 4)${GRAY_TEXT} Set Hardware ID (HWID) ${NORMAL}"
fi
if [[ "$unlockMenu" = true || ( "$isFullRom" = false && "$isBootStub" = false && \
("$isHsw" = true || "$isBdw" = true || "$isByt" = true || "$isBsw" = true )) ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 5)${MENU} Remove ChromeOS Bitmaps ${NORMAL}"
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 6)${MENU} Restore ChromeOS Bitmaps ${NORMAL}"
fi
if [[ "$unlockMenu" = true || ( "$isChromeOS" = false && "$isFullRom" = true ) ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 7)${MENU} Restore Stock Firmware (full) ${NORMAL}"
fi
if [[ "$unlockMenu" = true || ( "$isByt" = true && "$isBootStub" = true && "$isChromeOS" = false ) ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 8)${MENU} Restore Stock BOOT_STUB ${NORMAL}"
fi
if [[ "$unlockMenu" = true || "$isUEFI" = true ]]; then
echo -e "${MENU}**${WP_TEXT} ${NUMBER} C)${MENU} Clear UEFI NVRAM ${NORMAL}"
fi
echo -e "${MENU}*********************************************************${NORMAL}"
echo -e "${ENTER_LINE}Select a menu option or${NORMAL}"
echo -e "${nvram}${RED_TEXT}R${NORMAL} to reboot ${NORMAL} ${RED_TEXT}P${NORMAL} to poweroff ${NORMAL} ${RED_TEXT}Q${NORMAL} to quit ${NORMAL}"
read -e opt
case $opt in
1) if [[ "$unlockMenu" = true || "$isChromeOS" = true || "$isFullRom" = false \
&& "$isBootStub" = false && "$isUnsupported" = false ]]; then
flash_rwlegacy
fi
menu_fwupdate
;;
2) if [[ "$unlockMenu" = true || "$hasUEFIoption" = true || "$hasLegacyOption" = true ]]; then
flash_coreboot
fi
menu_fwupdate
;;
[dD]) if [[ "${device^^}" = "EVE" ]]; then
downgrade_touchpad_fw
fi
menu_fwupdate
;;
3) if [[ "$unlockMenu" = true || "$isChromeOS" = true || "$isUnsupported" = false \
&& "$isFullRom" = false && "$isBootStub" = false ]]; then
set_boot_options
fi
menu_fwupdate
;;
4) if [[ "$unlockMenu" = true || "$isChromeOS" = true || "$isUnsupported" = false \
&& "$isFullRom" = false && "$isBootStub" = false ]]; then
set_hwid
fi
menu_fwupdate
;;
5) if [[ "$unlockMenu" = true || ( "$isFullRom" = false && "$isBootStub" = false && \
( "$isHsw" = true || "$isBdw" = true || "$isByt" = true || "$isBsw" = true ) ) ]]; then
remove_bitmaps
fi
menu_fwupdate
;;
6) if [[ "$unlockMenu" = true || ( "$isFullRom" = false && "$isBootStub" = false && \
( "$isHsw" = true || "$isBdw" = true || "$isByt" = true || "$isBsw" = true ) ) ]]; then
restore_bitmaps
fi
menu_fwupdate
;;
7) if [[ "$unlockMenu" = true || "$isChromeOS" = false && "$isUnsupported" = false \
&& "$isFullRom" = true ]]; then
restore_stock_firmware
fi
menu_fwupdate
;;
8) if [[ "$unlockMenu" = true || "$isBootStub" = true ]]; then
restore_boot_stub
fi
menu_fwupdate
;;
[rR]) echo -e "\nRebooting...\n";
cleanup
reboot
exit
;;
[pP]) echo -e "\nPowering off...\n";
cleanup
poweroff
exit
;;
[qQ]) cleanup;
exit;
;;
[U]) if [ "$unlockMenu" = false ]; then
echo_yellow "\nAre you sure you wish to unlock all menu functions?"
read -ep "Only do this if you really know what you are doing... [y/N]? "
[[ "$REPLY" = "y" || "$REPLY" = "Y" ]] && unlockMenu=true
fi
menu_fwupdate
;;
[cC]) if [[ "$unlockMenu" = true || "$isUEFI" = true ]]; then
clear_nvram
fi
menu_fwupdate
;;
*) clear
menu_fwupdate;
;;
esac
}
function uefi_menu() {
show_header
if [[ "$hasUEFIoption" = true ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 1)${MENU} Install/Update UEFI (Full ROM) Firmware ${NORMAL}"
else
echo -e "${GRAY_TEXT}** ${GRAY_TEXT} 1)${GRAY_TEXT} Install/Update UEFI (Full ROM) Firmware${NORMAL}"
fi
if [[ "$isChromeOS" = false && "$isFullRom" = true ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} 2)${MENU} Restore Stock Firmware ${NORMAL}"
else
echo -e "${GRAY_TEXT}** ${GRAY_TEXT} 2)${GRAY_TEXT} Restore Stock ChromeOS Firmware ${NORMAL}"
fi
if [[ "${device^^}" = "EVE" ]]; then
echo -e "${MENU}**${WP_TEXT} [WP]${NUMBER} D)${MENU} Downgrade Touchpad Firmware ${NORMAL}"
fi
if [[ "$unlockMenu" = true || "$isUEFI" = true ]]; then
echo -e "${MENU}**${WP_TEXT} ${NUMBER} C)${MENU} Clear UEFI NVRAM ${NORMAL}"
fi
echo -e "${MENU}*********************************************************${NORMAL}"
echo -e "${ENTER_LINE}Select a menu option or${NORMAL}"
echo -e "${nvram}${RED_TEXT}R${NORMAL} to reboot ${NORMAL} ${RED_TEXT}P${NORMAL} to poweroff ${NORMAL} ${RED_TEXT}Q${NORMAL} to quit ${NORMAL}"
read -e opt
case $opt in
1) if [[ "$hasUEFIoption" = true ]]; then
flash_coreboot
fi
uefi_menu
;;
2) if [[ "$isChromeOS" = false && "$isUnsupported" = false \
&& "$isFullRom" = true ]]; then
restore_stock_firmware
menu_fwupdate
else
uefi_menu
fi
;;
[dD]) if [[ "${device^^}" = "EVE" ]]; then
downgrade_touchpad_fw
fi
uefi_menu
;;
[rR]) echo -e "\nRebooting...\n";
cleanup
reboot
exit
;;
[pP]) echo -e "\nPowering off...\n";
cleanup
poweroff
exit
;;
[qQ]) cleanup;
exit;
;;
[cC]) if [[ "$isUEFI" = true ]]; then
clear_nvram
fi
uefi_menu
;;
*) clear
uefi_menu;
;;
esac
}
|
import {
HttpClient,
HttpEvent,
HttpHandler,
HttpHeaders,
HttpInterceptor,
HttpRequest,
HttpResponse,
HTTP_INTERCEPTORS
} from '@angular/common/http';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { Component, NgModule, NgModuleFactoryLoader, Type } from '@angular/core';
import { fakeAsync, TestBed, tick } from '@angular/core/testing';
import { Router, RouterModule } from '@angular/router';
import { RouterTestingModule, SpyNgModuleFactoryLoader } from '@angular/router/testing';
import { Observable } from 'rxjs';
import { mapTo } from 'rxjs/operators';
import * as Mock from 'mockjs';
import { AlainMockConfig, ALAIN_CONFIG } from '@delon/util/config';
import { MockRequest } from './interface';
import { DelonMockModule } from './mock.module';
import { MockStatusError } from './status.error';
const USER_LIST = { users: [1, 2], a: 0 };
const DATA = {
USERS: {
'GET /users': USER_LIST,
'/users/1': Mock.mock({ id: 1, 'rank|3': '★★★' }),
'/users/:id': (req: MockRequest) => req.params,
'/array': [1, 2],
'/fn/queryString': (req: MockRequest) => req.queryString,
'/fn/header': (req: MockRequest) => req.headers,
'/HttpResponse': () => new HttpResponse({ body: 'Body', headers: new HttpHeaders({ token: '1' }) }),
'POST /fn/body': (req: MockRequest) => req.body,
'POST /users/1': { uid: 1, action: 'add' },
'/404': () => {
throw new MockStatusError(404);
},
'/500': () => {
throw new Error('500');
}
}
};
let otherRes = new HttpResponse();
class OtherInterceptor implements HttpInterceptor {
intercept(req: HttpRequest<any>, next: HttpHandler): Observable<HttpEvent<any>> {
return next.handle(req.clone()).pipe(mapTo(otherRes));
}
}
describe('mock: interceptor', () => {
let http: HttpClient;
let httpMock: HttpTestingController;
function genModule(
data: any,
options: AlainMockConfig,
imports: any[] = [],
spyConsole: boolean = true,
providers?: any[]
): void {
TestBed.configureTestingModule({
declarations: [RootComponent],
imports: [
HttpClientTestingModule,
RouterTestingModule.withRoutes([
{
path: 'lazy',
loadChildren: 'expected'
}
]),
DelonMockModule.forRoot({ data })
].concat(imports),
providers: ([{ provide: ALAIN_CONFIG, useValue: { mock: options } }] as any[]).concat(providers || [])
});
http = TestBed.inject<HttpClient>(HttpClient);
httpMock = TestBed.inject(HttpTestingController as Type<HttpTestingController>);
if (spyConsole) {
spyOn(console, 'log');
spyOn(console, 'warn');
}
}
describe('[default]', () => {
beforeEach(() => genModule(DATA, { executeOtherInterceptors: false, delay: 1 }));
it('should be init', (done: () => void) => {
http.get('/users').subscribe((res: any) => {
expect(res).not.toBeNull();
expect(res.users).not.toBeNull();
expect(res.users.length).toBe(DATA.USERS['GET /users'].users.length);
done();
});
});
it('should response array', (done: () => void) => {
http.get('/array').subscribe((res: any) => {
expect(res).not.toBeNull();
expect(Array.isArray(res)).toBe(true);
done();
});
});
it('should response via callback', (done: () => void) => {
const key = '/fn/queryString';
http.get(key, { params: { pi: '1' } }).subscribe((res: any) => {
expect(res).not.toBeNull();
expect(res.pi).toBe('1');
done();
});
});
it('should be get the default querystring', (done: () => void) => {
const key = '/fn/queryString?a=1';
http.get(key).subscribe((res: any) => {
expect(res.a).toBe('1');
done();
});
});
it('should return route params', (done: () => void) => {
const key = '/users/2';
http.get(key).subscribe((res: any) => {
expect(res).not.toBeNull();
expect(res.id).toBe('2');
done();
});
});
it('should return body', (done: () => void) => {
const key = '/fn/body';
http.post(key, { token: 'asdf' }).subscribe((res: any) => {
expect(res).not.toBeNull();
expect(res.token).toBe('asdf');
done();
});
});
it('should return header', (done: () => void) => {
const key = '/fn/header';
http.get(key, { headers: { token: 'asdf' } }).subscribe((res: any) => {
expect(res).not.toBeNull();
expect(res.token).toBe('asdf');
done();
});
});
it('should return HttpResponse', (done: () => void) => {
const key = '/HttpResponse';
http.get(key, { observe: 'response' }).subscribe((res: HttpResponse<any>) => {
expect(res).not.toBeNull();
expect(res.body).toBe('Body');
expect(res.headers.get('token')).toBe('1');
done();
});
});
it('should response HttpStatus: 404', (done: () => void) => {
http.get('/404').subscribe(
() => {
expect(false).toBe(true);
done();
},
() => {
expect(true).toBe(true);
done();
}
);
});
it('muse be use MockStatusError to throw status error', (done: () => void) => {
http.get('/500').subscribe(
() => {
expect(false).toBe(true);
done();
},
() => {
expect(true).toBe(true);
done();
}
);
});
it('should request POST', (done: () => void) => {
http.post('/users/1', { data: true }, { observe: 'response' }).subscribe((res: HttpResponse<any>) => {
expect(res.body).not.toBeNull();
expect(res.body.uid).toBe(1);
expect(res.body.action).toBe('add');
done();
});
});
it('should normal request if non-mock url', (done: () => void) => {
http.get('/non-mock', { responseType: 'text' }).subscribe(value => {
expect(value).toBe('ok!');
done();
});
httpMock.expectOne('/non-mock').flush('ok!');
});
it('should be array of queryString', (done: () => void) => {
const key = '/fn/queryString?a=1&b=1&b=2&b=3';
http.get(key).subscribe((res: any) => {
expect(Array.isArray(res.b)).toBe(true);
expect(+res.b[0]).toBe(1);
expect(+res.b[1]).toBe(2);
done();
});
});
});
describe('[disabled log]', () => {
it('with request', (done: () => void) => {
genModule(DATA, { delay: 1, log: false });
http.get('/users').subscribe(() => {
expect(console.log).not.toHaveBeenCalled();
done();
});
});
it('with error request', (done: () => void) => {
genModule(DATA, { delay: 1, log: false });
http.get('/404').subscribe(
() => {
expect(false).toBe(true);
done();
},
() => {
expect(console.log).not.toHaveBeenCalled();
expect(true).toBe(true);
done();
}
);
});
});
describe('[lazy module]', () => {
beforeEach(() => genModule(DATA, { delay: 1 }));
it('should work', fakeAsync(() => {
const loader = TestBed.inject(NgModuleFactoryLoader) as SpyNgModuleFactoryLoader;
const router = TestBed.inject<Router>(Router);
@Component({
selector: 'lazy',
template: '<router-outlet></router-outlet>'
})
class LayoutComponent {}
@Component({
selector: 'child',
template: 'length-{{res.users.length}}'
})
class ChildComponent {
res: any = {};
constructor(HTTP: HttpClient) {
HTTP.get('/users').subscribe(res => (this.res = res));
}
}
@NgModule({
declarations: [LayoutComponent, ChildComponent],
imports: [DelonMockModule.forChild(), RouterModule.forChild([{ path: 'child', component: ChildComponent }])]
})
class LazyModule {}
loader.stubbedModules = { expected: LazyModule };
const fixture = TestBed.createComponent(RootComponent);
fixture.detectChanges();
router.navigateByUrl(`/lazy/child`);
tick(500);
fixture.detectChanges();
const text = (fixture.nativeElement as HTMLElement).textContent;
expect(text).toContain('length-2');
}));
});
describe('[executeOtherInterceptors]', () => {
beforeEach(() => {
genModule(DATA, { delay: 1, executeOtherInterceptors: true }, [], true, [
{ provide: HTTP_INTERCEPTORS, useClass: OtherInterceptor, multi: true }
]);
});
it('shoul working', done => {
otherRes = new HttpResponse({ body: { a: 1 } });
http.get('/users').subscribe((res: any) => {
expect(res).not.toBeNull();
expect(res.a).toBe(1);
done();
});
});
});
});
@Component({
selector: 'root-cmp',
template: ` <router-outlet></router-outlet> `
})
class RootComponent {}
|
#!/bin/bash
EZ_PATH=/home/ubuntu/ez-segway/src
N="ez-segway"
if [ $# -eq 0 ]; then
echo "No arguments supplied"
exit 1
fi
tmux new-session -d -s $N
tmux new-window -t $N:100 -n 'mininet' "sudo $EZ_PATH/topo.py --method central --topo $1"
sleep 1
tmux send-keys -t $N:100 'h2 ping h3' Enter
OFP_PORT=$((6733))
WSAPI_PORT=$((8733))
tmux new-window -t $N:1 -n "ctrl" "TOPO_INPUT=$1 ryu-manager --ofp-tcp-listen-port $OFP_PORT --use-stderr --verbose $EZ_PATH/central_ctrl.py"
tmux select-window -t $N:100
tmux attach-session -t $N
|
std::vector<std::pair<char, int>> getTop5Chars(const std::string &inputStr)
{
std::map<char, int> charCount;
for (const char &c : inputStr)
{
if (charCount.find(c) == charCount.end())
{
charCount.insert(std::make_pair(c, 1));
}
else
{
charCount[c]++;
}
}
std::vector<std::pair<char, int>> top5;
for (const auto &charCountPair : charCount)
{
if (top5.size() < 5)
{
top5.push_back(charCountPair);
}
else
{
auto minIt = std::min_element(top5.begin(), top5.end(),
[](const std::pair<char, int> &p1,const std::pair<char, int> &p2)
{return p1.second < p2.second;});
if (minIt != top5.end() && minIt->second < charCountPair.second)
{
top5.erase(minIt);
top5.push_back(charCountPair);
}
}
}
return top5;
} |
#!/usr/bin/env bats
# This file is used to test the installation and removal
# of a Debian package.
# WARNING: This testing file must be executed as root and can
# dramatically change your system. It removes the 'elasticsearch'
# user/group and also many directories. Do not execute this file
# unless you know exactly what you are doing.
# The test case can be executed with the Bash Automated
# Testing System tool available at https://github.com/sstephenson/bats
# Thanks to Sam Stephenson!
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Load test utilities
load packaging_test_utils
load os_package
load plugins
# Cleans everything for the 1st execution
setup() {
skip_not_dpkg
export_elasticsearch_paths
}
##################################
# Install DEB package
##################################
@test "[DEB] dpkg command is available" {
clean_before_test
dpkg --version
}
@test "[DEB] package is available" {
count=$(ls elasticsearch-$(cat version).deb | wc -l)
[ "$count" -eq 1 ]
}
@test "[DEB] deb is signed by our dummy key" {
# dpkg-sig verifies the signature against some user's .gnupg directory
# and its simpler to have that directory be the vagrant user's directory
# rather than root's.
sudo -u vagrant dpkg-sig --verify elasticsearch*.deb | grep 'GOODSIG'
}
@test "[DEB] package is not installed" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 1 ]
}
@test "[DEB] install package" {
dpkg -i elasticsearch-$(cat version).deb
}
@test "[DEB] package is installed" {
dpkg -s 'elasticsearch'
}
@test "[DEB] verify package installation" {
verify_package_installation
}
@test "[DEB] elasticsearch isn't started by package install" {
# Wait a second to give Elasticsearch a change to start if it is going to.
# This isn't perfect by any means but its something.
sleep 1
! ps aux | grep elasticsearch | grep java
# You might be tempted to use jps instead of the above but that'd have to
# look like:
# ! sudo -u elasticsearch jps | grep -i elasticsearch
# which isn't really easier to read than the above.
}
@test "[DEB] test elasticsearch" {
# Install scripts used to test script filters and search templates before
# starting Elasticsearch so we don't have to wait for elasticsearch to scan for
# them.
install_elasticsearch_test_scripts
start_elasticsearch_service
run_elasticsearch_tests
}
##################################
# Uninstall DEB package
##################################
@test "[DEB] remove package" {
dpkg -r 'elasticsearch'
}
@test "[DEB] package has been removed" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 0 ]
echo "$output" | grep -i "status" | grep -i "deinstall ok"
}
@test "[DEB] verify package removal" {
# The removal must stop the service
count=$(ps | grep Elasticsearch | wc -l)
[ "$count" -eq 0 ]
# The removal must disable the service
# see prerm file
if is_systemd; then
# Debian systemd distros usually returns exit code 3
run systemctl status elasticsearch.service
[ "$status" -eq 3 ]
run systemctl is-enabled elasticsearch.service
[ "$status" -eq 1 ]
fi
# Those directories are deleted when removing the package
# see postrm file
assert_file_not_exist "/var/log/elasticsearch"
assert_file_not_exist "/usr/share/elasticsearch/plugins"
assert_file_not_exist "/var/run/elasticsearch"
# The configuration files are still here
assert_file_exist "/etc/elasticsearch"
assert_file_exist "/etc/elasticsearch/elasticsearch.yml"
assert_file_exist "/etc/elasticsearch/logging.yml"
# The env file is still here
assert_file_exist "/etc/default/elasticsearch"
# The service files are still here
assert_file_exist "/etc/init.d/elasticsearch"
assert_file_exist "/usr/lib/systemd/system/elasticsearch.service"
}
@test "[DEB] purge package" {
# User installed scripts aren't removed so we'll just get them ourselves
rm -rf $ESSCRIPTS
dpkg --purge 'elasticsearch'
}
@test "[DEB] verify package purge" {
# all remaining files are deleted by the purge
assert_file_not_exist "/etc/elasticsearch"
assert_file_not_exist "/etc/elasticsearch/elasticsearch.yml"
assert_file_not_exist "/etc/elasticsearch/logging.yml"
assert_file_not_exist "/etc/default/elasticsearch"
assert_file_not_exist "/etc/init.d/elasticsearch"
assert_file_not_exist "/usr/lib/systemd/system/elasticsearch.service"
assert_file_not_exist "/usr/share/elasticsearch"
assert_file_not_exist "/usr/share/doc/elasticsearch"
assert_file_not_exist "/usr/share/doc/elasticsearch/copyright"
}
@test "[DEB] package has been completly removed" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 1 ]
}
@test "[DEB] reinstall package" {
dpkg -i elasticsearch-$(cat version).deb
}
@test "[DEB] package is installed by reinstall" {
dpkg -s 'elasticsearch'
}
@test "[DEB] verify package reinstallation" {
verify_package_installation
}
@test "[DEB] repurge package" {
dpkg --purge 'elasticsearch'
}
@test "[DEB] package has been completly removed again" {
run dpkg -s 'elasticsearch'
[ "$status" -eq 1 ]
}
|
function fibonacci(n) {
if (n <= 1) {
return n;
}
return fibonacci(n-1) + fibonacci(n-2);
}
console.log(fibonacci(10)); |
parallel --jobs 6 < ./results/exp_lustre/run-4/lustre_1n_6t_6d_1000f_617m_5i/jobs/jobs_n0.txt
|
from oeqa.oetest import oeRuntimeTest
class OpenCVDNN1Test(oeRuntimeTest):
def test_opencv_dnn_1(self):
# Classify an image using opencv-dnn
(status, output) = self.target.run('dnn-test.py /usr/share/Caffe/data/deploy.prototxt /usr/share/Caffe/models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel /usr/share/Caffe/data/dog.jpg')
# The dog must be identified as Saluki, gazelle hound. It's
# index 176.
self.assertEqual(status, 0, msg="Error messages: %s" % output)
lines = output.split("\n")
self.assertEqual(lines[-1], "176", msg="Misclassified image(%s): %s " % (lines[-1], output))
|
#!/bin/bash
dieharder -d 9 -g 28 -S 1991483480
|
// No5App.cpp: implementation of the CNo5App class.
//
//////////////////////////////////////////////////////////////////////
#include "stdafx.h"
#include "no5app.h"
#include "path.h"
#include "imainframe.h"
#include "no5options.h"
#include "ignorelist.h"
#include "resource.h"
#include "recentrooms.h"
#include "no5obj.h"
#include "ddforms.tlh"
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
CNo5App::CNo5App()
{
m_pAppPath = new CPath();
NO5YAHOO::SmileyMapCreate(&m_psm);
m_pFrame = NULL;
m_pgo = NULL;
m_pvo = NULL;
m_pio = new CIgnoreOptions();
m_pRecentRooms = NULL;
m_pNo5 = NULL;
m_scripts = 0;
}
CNo5App::~CNo5App()
{
delete m_pAppPath;
NO5YAHOO::SmileyMapDestroy(&m_psm);
delete m_pio;
if(m_pRecentRooms)
delete m_pRecentRooms;
}
BOOL CNo5App::Init(void)
{
#ifdef _DEBUG
m_pAppPath->SetPath(PATH_CURDIR);
#else
CPath path(PATH_MODULE);
m_pAppPath->SetPath(path.GetLocation());
#endif
if(m_pAppPath->ExistLocation()){
if(!m_psm->IsLoaded()){
BOOL res = m_psm->LoadMap(GetSmileysFile());
ATLASSERT(res);
}
g_app.LoadName(IDR_MAINFRAME);
// create IGeneralOptions instance and load from file
ATLASSERT(!GetGeneralOptions());
SetGeneralOptions(IGeneralOptions::CreateMe());
GetGeneralOptions()->SetFileName(GetOptionsFile());
GetGeneralOptions()->GetInMemoryOptions();
// create IVisualOptions instance and load from file
ATLASSERT(!GetVisualOptions());
SetVisualOptions(IVisualOptions::CreateMe());
GetVisualOptions()->SetFile(GetVisualOptionsFile());
// load ignore options
LoadIgnoreOptions();
LoadIgnoreList();
LoadRecentRooms();
// register type lib
_Module.RegisterTypeLib();
// create script object
HRESULT hr;
hr = CComObject<CNo5Obj>::CreateInstance(&m_pNo5);
if(SUCCEEDED(hr)){
CComPtr<INo5Obj> sp;
hr = m_pNo5->QueryInterface(&sp);
if(SUCCEEDED(hr)){
hr = m_pNo5->PrivateInit();
sp.Detach();
}
}
if(SUCCEEDED(hr)){
CComPtr<DDFORMSLib::IFormEditor2> sp;
hr = sp.CoCreateInstance(DDFORMSLib::CLSID_FormEditor);
if(FAILED(hr)){
CString path = GetPath();
path += "ddforms.dll";
path += " /s";
UINT u = (UINT)::ShellExecute(GetDesktopWindow(),"open","regsvr32",path,NULL,SW_HIDE);
if(u > 32){
path = GetPath();
path += "ddcontrolpack.dll";
path += " /s";
u = (UINT)::ShellExecute(GetDesktopWindow(),"open","regsvr32",path,NULL,SW_HIDE);
path = GetPath();
path += "ddproppageall.dll";
path += " /s";
u = (UINT)::ShellExecute(GetDesktopWindow(),"open","regsvr32",path,NULL,SW_HIDE);
}
if(u < 32)
MessageBox(GetDesktopWindow(),"form dlls not registered",GetName(),MB_ICONINFORMATION);
else
hr = S_OK;
}
}
ATLASSERT(SUCCEEDED(hr));
}
return m_pAppPath->ExistLocation();
}
void CNo5App::Term(void)
{
WriteIgnoreOptions();
GetGeneralOptions()->WriteInMemoryOptions();
IGeneralOptions *pgo = GetGeneralOptions();
IGeneralOptions::DestroyMe(&pgo);
IVisualOptions *pvo = GetVisualOptions();
IVisualOptions::DestroyMe(&pvo);
UnloadIgnoreList();
UnloadRecentRooms();
m_pNo5->Release();
// unregister type lib
_Module.UnRegisterTypeLib();
}
CString CNo5App::GetPath(void)
{
return m_pAppPath->GetLocation();
}
CString CNo5App::GetModuleFileName(void)
{
CPath path(PATH_MODULE);
return path.GetLongPath();
}
CString CNo5App::GetOptionsFile(void)
{
return GetPath() + _T("no5.ini");
}
CString CNo5App::GetVisualOptionsFile(void)
{
return GetOptionsFile();
}
CString CNo5App::GetScriptsFolder(void)
{
return GetPath() + _T("scripts\\");
}
CString CNo5App::GetFormsFolder(void)
{
return GetPath() + _T("forms\\");
}
CString CNo5App::GetSmileysFolder(void)
{
return GetPath() + _T("smileys\\");
}
CString CNo5App::GetSmileysFile(void)
{
return GetPath() + _T("smileys.txt");
}
CString CNo5App::GetColorsFile(void)
{
return GetPath() + _T("colors.ini");
}
CString CNo5App::GetIgnoreFile(void)
{
return GetPath() + _T("ignore.txt");
}
CString CNo5App::GetRecentRoomsFile(void)
{
return GetPath() + _T("rooms.txt");
}
CString CNo5App::GetSnapshotFolder(void)
{
return GetPath() + _T("snapshots\\");
}
CString CNo5App::GetVideoFolder(void)
{
return GetPath() + _T("videos\\");
}
CString CNo5App::GetBotsFolder(void)
{
return GetPath() + _T("bots\\");
}
void CNo5App::LoadName(UINT uResourceID)
{
BOOL res = m_name.LoadString(uResourceID);
ATLASSERT(res);
}
CIgnoreOptions & CNo5App::GetIgnoreOptions(void)
{
return *m_pio;
}
void CNo5App::LoadIgnoreOptions(void)
{
ATLASSERT(m_pgo);
if(m_pgo){
BOOL res = m_pgo->GetIgnoreOptions(*m_pio);
ATLASSERT(res);
}
}
void CNo5App::WriteIgnoreOptions(void)
{
ATLASSERT(m_pgo);
if(m_pgo){
BOOL res = m_pgo->SetIgnoreOptions(*m_pio);
ATLASSERT(res);
}
}
void CNo5App::LoadIgnoreList(void)
{
bool res;
ATLASSERT(!m_pIgnoreList);
IIgnoreList::CreateMe(&m_pIgnoreList);
res = m_pIgnoreList->read(GetIgnoreFile());
ATLASSERT(res);
}
void CNo5App::UnloadIgnoreList(void)
{
if(m_pIgnoreList){
if(m_pIgnoreList->is_dirty()){
bool res = m_pIgnoreList->write(g_app.GetIgnoreFile());
ATLASSERT(res);
}
IIgnoreList::DestroyMe(&m_pIgnoreList);
}
}
CRecentRooms * CNo5App::GetRecentRooms(void)
{
if(!m_pRecentRooms){
LoadRecentRooms();
}
return m_pRecentRooms;
}
void CNo5App::LoadRecentRooms(void)
{
BOOL res;
if(!m_pRecentRooms){
m_pRecentRooms = new CRecentRooms();
}
res = m_pRecentRooms->Read(GetRecentRoomsFile());
ATLASSERT(res);
}
void CNo5App::UnloadRecentRooms(void)
{
if(m_pRecentRooms){
if(m_pRecentRooms->IsDirty()){
BOOL res = m_pRecentRooms->Write(GetRecentRoomsFile());
ATLASSERT(res);
}
delete m_pRecentRooms;
m_pRecentRooms = NULL;
}
}
|
const InputGroup = require('./inputGroup');
module.exports = class DashBoard {
constructor(name, nbmrInputs, path, description) {
this.name;
this.inputGroup;
this.blocks = [];
this.blocksLog = [];
this.path;
this.description;
this.saved;
if (arguments.length === 1) {
this.constructFromJson(name);
} else {
this.newConstructor(name, nbmrInputs, path, description);
}
}
newConstructor(name, nbmrInputs, path, description) {
this.name = name;
this.blocks = [];
this.blocksLog = [];
this.path = path;
this.description = description;
this.inputGroup = new InputGroup(nbmrInputs);
this.saved = true;
}
constructFromJson(dashBoardJson) {
this.name = dashBoardJson.name;
this.blocks = dashBoardJson.blocks;
this.blocksLog = dashBoardJson.blocksLog;
this.path = dashBoardJson.path;
this.description = dashBoardJson.description;
this.inputGroup = new InputGroup(dashBoardJson.inputGroup, true);
this.saved = dashBoardJson.saved;
}
}; |
package IPRepository.SAXIBridgeComponent
import chisel3._
class WriteCounterAct extends Module {
val io = IO(new Bundle {
val wvalid = Input(Bool())
val wready = Input(Bool())
val bvalid = Input(Bool())
val bready = Input(Bool())
val counter = Output(UInt(8.W))
})
val counter = RegInit(0.U(8.W))
when(io.wvalid & io.wready) {
counter := counter + 1.U
}.elsewhen(io.bvalid & io.bready) {
counter := 0.U
}
io.counter := counter
}
object WriteCounterAct {
/**
* generate write actual reg value
* @param wvalid wire
* @param wready reg
* @param bvalid reg
* @param bready wire
* @return
*/
def apply(wvalid: Bool,
wready: Bool,
bvalid: Bool,
bready: Bool):
UInt = {
val module = Module(new WriteCounterAct)
module.io.wvalid := wvalid
module.io.wready := wready
module.io.bvalid := bvalid
module.io.bready := bready
module.io.counter
}
}
|
<reponame>Urtzik/frontend
import { ActionDetail } from "@material/mwc-list";
import "@material/mwc-list/mwc-list-item";
import { mdiDotsVertical } from "@mdi/js";
import { css, CSSResultGroup, html, LitElement, TemplateResult } from "lit";
import { customElement, property, query, state } from "lit/decorators";
import { fireEvent } from "../../../../src/common/dom/fire_event";
import { slugify } from "../../../../src/common/string/slugify";
import "../../../../src/components/buttons/ha-progress-button";
import "../../../../src/components/ha-button-menu";
import { createCloseHeading } from "../../../../src/components/ha-dialog";
import "../../../../src/components/ha-svg-icon";
import { getSignedPath } from "../../../../src/data/auth";
import { extractApiErrorMessage } from "../../../../src/data/hassio/common";
import {
fetchHassioSnapshotInfo,
HassioSnapshotDetail,
} from "../../../../src/data/hassio/snapshot";
import {
showAlertDialog,
showConfirmationDialog,
} from "../../../../src/dialogs/generic/show-dialog-box";
import { HassDialog } from "../../../../src/dialogs/make-dialog-manager";
import { haStyle, haStyleDialog } from "../../../../src/resources/styles";
import { HomeAssistant } from "../../../../src/types";
import "../../components/supervisor-snapshot-content";
import type { SupervisorSnapshotContent } from "../../components/supervisor-snapshot-content";
import { HassioSnapshotDialogParams } from "./show-dialog-hassio-snapshot";
@customElement("dialog-hassio-snapshot")
class HassioSnapshotDialog
extends LitElement
implements HassDialog<HassioSnapshotDialogParams> {
@property({ attribute: false }) public hass!: HomeAssistant;
@state() private _error?: string;
@state() private _snapshot?: HassioSnapshotDetail;
@state() private _dialogParams?: HassioSnapshotDialogParams;
@state() private _restoringSnapshot = false;
@query("supervisor-snapshot-content")
private _snapshotContent!: SupervisorSnapshotContent;
public async showDialog(params: HassioSnapshotDialogParams) {
this._snapshot = await fetchHassioSnapshotInfo(this.hass, params.slug);
this._dialogParams = params;
this._restoringSnapshot = false;
}
public closeDialog() {
this._snapshot = undefined;
this._dialogParams = undefined;
this._restoringSnapshot = false;
this._error = undefined;
fireEvent(this, "dialog-closed", { dialog: this.localName });
}
protected render(): TemplateResult {
if (!this._dialogParams || !this._snapshot) {
return html``;
}
return html`
<ha-dialog
open
scrimClickAction
@closed=${this.closeDialog}
.heading=${createCloseHeading(this.hass, this._computeName)}
>
${this._restoringSnapshot
? html` <ha-circular-progress active></ha-circular-progress>`
: html`<supervisor-snapshot-content
.hass=${this.hass}
.supervisor=${this._dialogParams.supervisor}
.snapshot=${this._snapshot}
>
</supervisor-snapshot-content>`}
${this._error ? html`<p class="error">Error: ${this._error}</p>` : ""}
<mwc-button
.disabled=${this._restoringSnapshot}
slot="secondaryAction"
@click=${this._restoreClicked}
>
Restore
</mwc-button>
<ha-button-menu
fixed
slot="primaryAction"
@action=${this._handleMenuAction}
@closed=${(ev: Event) => ev.stopPropagation()}
>
<mwc-icon-button slot="trigger" alt="menu">
<ha-svg-icon .path=${mdiDotsVertical}></ha-svg-icon>
</mwc-icon-button>
<mwc-list-item>Download Snapshot</mwc-list-item>
<mwc-list-item class="error">Delete Snapshot</mwc-list-item>
</ha-button-menu>
</ha-dialog>
`;
}
static get styles(): CSSResultGroup {
return [
haStyle,
haStyleDialog,
css`
ha-svg-icon {
color: var(--primary-text-color);
}
ha-circular-progress {
display: block;
text-align: center;
}
`,
];
}
private _handleMenuAction(ev: CustomEvent<ActionDetail>) {
switch (ev.detail.index) {
case 0:
this._downloadClicked();
break;
case 1:
this._deleteClicked();
break;
}
}
private async _restoreClicked() {
const snapshotDetails = this._snapshotContent.snapshotDetails();
this._restoringSnapshot = true;
if (this._snapshotContent.snapshotType === "full") {
await this._fullRestoreClicked(snapshotDetails);
} else {
await this._partialRestoreClicked(snapshotDetails);
}
this._restoringSnapshot = false;
}
private async _partialRestoreClicked(snapshotDetails) {
if (
this._dialogParams?.supervisor !== undefined &&
this._dialogParams?.supervisor.info.state !== "running"
) {
await showAlertDialog(this, {
title: "Could not restore snapshot",
text: `Restoring a snapshot is not possible right now because the system is in ${this._dialogParams?.supervisor.info.state} state.`,
});
return;
}
if (
!(await showConfirmationDialog(this, {
title: "Are you sure you want partially to restore this snapshot?",
confirmText: "restore",
dismissText: "cancel",
}))
) {
return;
}
if (!this._dialogParams?.onboarding) {
this.hass
.callApi(
"POST",
`hassio/snapshots/${this._snapshot!.slug}/restore/partial`,
snapshotDetails
)
.then(
() => {
this.closeDialog();
},
(error) => {
this._error = error.body.message;
}
);
} else {
fireEvent(this, "restoring");
fetch(`/api/hassio/snapshots/${this._snapshot!.slug}/restore/partial`, {
method: "POST",
body: JSON.stringify(snapshotDetails),
});
this.closeDialog();
}
}
private async _fullRestoreClicked(snapshotDetails) {
if (
this._dialogParams?.supervisor !== undefined &&
this._dialogParams?.supervisor.info.state !== "running"
) {
await showAlertDialog(this, {
title: "Could not restore snapshot",
text: `Restoring a snapshot is not possible right now because the system is in ${this._dialogParams?.supervisor.info.state} state.`,
});
return;
}
if (
!(await showConfirmationDialog(this, {
title:
"Are you sure you want to wipe your system and restore this snapshot?",
confirmText: "restore",
dismissText: "cancel",
}))
) {
return;
}
if (!this._dialogParams?.onboarding) {
this.hass
.callApi(
"POST",
`hassio/snapshots/${this._snapshot!.slug}/restore/full`,
snapshotDetails
)
.then(
() => {
this.closeDialog();
},
(error) => {
this._error = error.body.message;
}
);
} else {
fireEvent(this, "restoring");
fetch(`/api/hassio/snapshots/${this._snapshot!.slug}/restore/full`, {
method: "POST",
body: JSON.stringify(snapshotDetails),
});
this.closeDialog();
}
}
private async _deleteClicked() {
if (
!(await showConfirmationDialog(this, {
title: "Are you sure you want to delete this snapshot?",
confirmText: "delete",
dismissText: "cancel",
}))
) {
return;
}
this.hass
.callApi("POST", `hassio/snapshots/${this._snapshot!.slug}/remove`)
.then(
() => {
if (this._dialogParams!.onDelete) {
this._dialogParams!.onDelete();
}
this.closeDialog();
},
(error) => {
this._error = error.body.message;
}
);
}
private async _downloadClicked() {
let signedPath: { path: string };
try {
signedPath = await getSignedPath(
this.hass,
`/api/hassio/snapshots/${this._snapshot!.slug}/download`
);
} catch (err) {
await showAlertDialog(this, {
text: extractApiErrorMessage(err),
});
return;
}
if (window.location.href.includes("ui.nabu.casa")) {
const confirm = await showConfirmationDialog(this, {
title: "Potential slow download",
text:
"Downloading snapshots over the Nabu Casa URL will take some time, it is recomended to use your local URL instead, do you want to continue?",
confirmText: "continue",
dismissText: "cancel",
});
if (!confirm) {
return;
}
}
const a = document.createElement("a");
a.href = signedPath.path;
a.download = `home_assistant_snapshot_${slugify(this._computeName)}.tar`;
this.shadowRoot!.appendChild(a);
a.click();
this.shadowRoot!.removeChild(a);
}
private get _computeName() {
return this._snapshot
? this._snapshot.name || this._snapshot.slug
: "Unnamed snapshot";
}
}
declare global {
interface HTMLElementTagNameMap {
"dialog-hassio-snapshot": HassioSnapshotDialog;
}
}
|
CREATE TABLE customers (
name varchar(255) NOT NULL,
address varchar(255) NOT NULL,
phone_number varchar(255) NOT NULL
);
CREATE TABLE orders (
customer_id int NOT NULL,
order_date datetime NOT NULL,
total_amount decimal NOT NULL
); |
<filename>pkg/catalog/types.go<gh_stars>0
// Copyright 2019 Hewlett Packard Enterprise Development LP
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package catalog
// configmeta is a representation of a virtual cluster config, based on both
// the app type definition and the deploy-time spec provided in the cluster
// CR. It is arranged in a format to be consumed by the app setup Python
// packages.
type configmeta struct {
Version string `json:"version"`
Services map[string]ngRefkeysMap `json:"services"`
Nodegroups map[string]nodegroup `json:"nodegroups"`
Distros map[string]refkeysMap `json:"distros"`
Cluster cluster `json:"cluster"`
Node *node `json:"node"`
}
type ngRefkeysMap map[string]refkeysMap
type refkeysMap map[string]refkeys
type refkeys struct {
BdvlibRefKey []string `json:"bdvlibrefkey"`
}
type nodegroup struct {
Roles map[string]role `json:"roles"`
DistroID string `json:"distro_id"`
CatalogEntryVersion string `json:"catalog_entry_version"`
ConfigMeta map[string]string `json:"config_metadata"`
}
type cluster struct {
Name string `json:"name"`
Isolated bool `json:"isolated"`
ID string `json:"id"`
ConfigMeta map[string]refkeys `json:"config_metadata"`
}
type node struct {
RoleID string `json:"role_id"`
NodegroupID string `json:"nodegroup_id"`
ID string `json:"id"`
Hostname string `json:"hostname"`
FQDN string `json:"fqdn"`
Domain string `json:"domain"`
DistroID string `json:"distro_id"`
DependsOn refkeysMap `json:"depends_on"`
}
type role struct {
Services map[string]service `json:"services"`
NodeIDs []string `json:"node_ids"`
Hostnames []string `json:"hostnames"`
FQDNs []string `json:"fqdns"`
FQDNMappings map[string]string `json:"fqdn_mappings"`
Flavor flavor `json:"flavor"`
}
type service struct {
Qualifiers []string `json:"qualifiers"`
Name string `json:"name"`
ID string `json:"id"`
Hostnames refkeys `json:"hostnames"`
GlobalID string `json:"global_id"`
FQDNs refkeys `json:"fqdns"`
ExportedService string `json:"exported_service"`
Endpoints []string `json:"endpoints"`
}
type flavor struct {
Storage string `json:"storage"`
Name string `json:"name"`
Memory string `json:"memory"`
Description string `json:"description"`
Cores string `json:"cores"`
}
// ServicePortInfo - A mapping between a Service Port ID and the port number
type ServicePortInfo struct {
ID string
Port int32
}
|
#!/bin/bash
set -ex
DBPath=".ci/ci.sqlite3"
ScriptPath=$( cd "$(dirname "$0")" ; pwd -P )
WarningNumber=$($ScriptPath/XCPrettyJSONExtractNumberOfWarning.swift $logFile)
envman add --key WARNING_NUMBER --value "$WarningNumber"
$ScriptPath/relapse "warning_relapse_$BITRISE_SCHEME" "$WarningNumber" "<" $DBPath
# update saved database
git add --all
git commit -am "Update Warning Count Database"
git push origin HEAD:$BITRISE_GIT_BRANCH
|
str = "hello world"
frequency_count = Hash.new(0)
str.each_char do |char|
frequency_count[char] += 1
end
frequency_count.each do |key, value|
puts "#{key} = #{value}"
end
# Output:
# h = 1
# e = 1
# l = 3
# o = 2
# w = 1
# r = 1
# d = 1 |
def generate_ahok_table(ahok):
table_html = "<table>"
for index, message in enumerate(ahok):
table_html += "<tr>"
table_html += f"<td>{index + 1}</td>"
table_html += f"<td>{message['From_User']}</td>"
if not message['text']:
table_html += "<td>N/A</td>"
else:
table_html += f"<td>{message['text']}</td>"
table_html += "</tr>"
table_html += "</table>"
return table_html |
"""
The aiomysql integration instruments the aiomysql library to trace MySQL queries.
Enabling
~~~~~~~~
The integration is enabled automatically when using
:ref:`ddtrace-run<ddtracerun>` or :func:`patch_all()<ddtrace.patch_all>`.
Or use :func:`patch()<ddtrace.patch>` to manually enable the integration::
from ddtrace import patch
patch(aiomysql=True)
Instance Configuration
~~~~~~~~~~~~~~~~~~~~~~
To configure the integration on an per-connection basis use the
``Pin`` API::
from ddtrace import Pin
import asyncio
import aiomysql
# This will report a span with the default settings
conn = await aiomysql.connect(host="127.0.0.1", port=3306,
user="root", password="", db="mysql",
loop=loop)
# Use a pin to override the service name for this connection.
Pin.override(conn, service="mysql-users")
cur = await conn.cursor()
await cur.execute("SELECT 6*7 AS the_answer;")
"""
from ...internal.utils.importlib import require_modules
required_modules = ["aiomysql"]
with require_modules(required_modules) as missing_modules:
if not missing_modules:
from .patch import patch
from .patch import unpatch
__all__ = ["patch", "unpatch"]
|
<reponame>prabhu-raja/gulp-app
/*
var gulp = require('gulp'),
sass = require('gulp-sass'),
autoprefixer = require('gulp-autoprefixer'),
cssMin = require('gulp-cssmin'),
sourcemaps = require('gulp-sourcemaps');
*/
var gulp = require('gulp'),
plugins = require('gulp-load-plugins')();
// our CSS Task
gulp.task('css', function () {
/*gulp.src(['./src/sass/main.scss'])
.pipe(sourcemaps.init())
.pipe(sass().on('error', sass.logError))
.pipe(cssMin())
.pipe(autoprefixer())
.pipe(sourcemaps.write())
.pipe(gulp.dest('./dist/css'));
*/
return gulp.src(['./src/sass/main.scss'])
.pipe(plugins.sourcemaps.init())
.pipe(plugins.sass().on('error', plugins.sass.logError))
.pipe(plugins.cssmin())
.pipe(plugins.autoprefixer())
.pipe(plugins.sourcemaps.write())
.pipe(gulp.dest('./dist/css'));
});
// our JavaScript Task
gulp.task('js', function () {
return gulp.src([
'./node_modules/jquery/dist/jquery.min.js',
'./src/js/magic.js',
'./src/js/admin.js'
])
.pipe(plugins.babel({
presets: ['es2015']
}))
.pipe(plugins.concat('all.js'))
.pipe(plugins.uglify())
.pipe(gulp.dest('./dist/js'));
});
gulp.task('watch', function () {
gulp.watch(['./src/sass/*.scss'], ['css']);
gulp.watch(['./src/js/*.js'],['js'])
});
gulp.task('default', function () {
console.log(`Look at my first gulp task!`);
}); |
<filename>lib/js/src/rpc/structs/DisplayCapabilities.js
/*
* Copyright (c) 2019, Livio, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the Livio Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
import { RpcStruct } from '../RpcStruct.js';
import { TextField } from './TextField.js';
import { ImageField } from './ImageField.js';
import { ScreenParams } from './ScreenParams.js';
import { DisplayType } from '../enums/DisplayType.js';
import { MediaClockFormat } from '../enums/MediaClockFormat.js';
class DisplayCapabilities extends RpcStruct {
constructor (parameters) {
super(parameters);
}
/**
* @param {DisplayType} displayType
* @return {DisplayCapabilities}
*/
setDisplayType (displayType) {
this.validateType(DisplayType, displayType);
this.setParameter(DisplayCapabilities.KEY_DISPLAY_TYPE, displayType);
return this;
}
/**
* @return {DisplayType}
*/
getDisplayType () {
return this.getObject(DisplayType, DisplayCapabilities.KEY_DISPLAY_TYPE);
}
/**
* @param {String} displayName
* @return {DisplayCapabilities}
*/
setDisplayName (displayName) {
this.setParameter(DisplayCapabilities.KEY_DISPLAY_NAME, displayName);
return this;
}
/**
* @return {String}
*/
getDisplayName () {
return this.getParameter(DisplayCapabilities.KEY_DISPLAY_NAME);
}
/**
* @param {Array<TextField>} textFields
* @return {DisplayCapabilities}
*/
setTextFields (textFields) {
// TODO make work with arrays
// this.validateType(TextField, textFields);
this.setParameter(DisplayCapabilities.KEY_TEXT_FIELDS, textFields);
return this;
}
/**
* @return {Array<TextField>}
*/
getTextFields () {
return this.getObject(TextField, DisplayCapabilities.KEY_TEXT_FIELDS);
}
/**
* @param {Array<ImageField>} imageFields
* @return {DisplayCapabilities}
*/
setImageFields (imageFields) {
// TODO make work with arrays
// this.validateType(ImageField, imageFields);
this.setParameter(DisplayCapabilities.KEY_IMAGE_FIELDS, imageFields);
return this;
}
/**
* @return {Array<ImageField>}
*/
getImageFields () {
return this.getObject(ImageField, DisplayCapabilities.KEY_IMAGE_FIELDS);
}
/**
* @param {Array<MediaClockFormat>} mediaClockFormats
* @return {DisplayCapabilities}
*/
setMediaClockFormats (mediaClockFormats) {
// TODO make work with arrays
// this.validateType(ImageField, mediaClockFormats);
this.setParameter(DisplayCapabilities.KEY_MEDIA_CLOCK_FORMATS, mediaClockFormats);
return this;
}
/**
* @return {Array<MediaClockFormat>}
*/
getMediaClockFormats () {
return this.getObject(MediaClockFormat, DisplayCapabilities.KEY_MEDIA_CLOCK_FORMATS);
}
/**
* @param {Boolean} graphicSupported
* @return {DisplayCapabilities}
*/
setGraphicsSupported (graphicSupported) {
this.setParameter(DisplayCapabilities.KEY_GRAPHICS_SUPPORTED, graphicSupported);
return this;
}
/**
* @return {Boolean}
*/
getGraphicsSupported () {
return this.getParameter(DisplayCapabilities.KEY_GRAPHICS_SUPPORTED);
}
/**
* @param {Array<String>} templatesAvailable
* @return {DisplayCapabilities}
*/
setTemplatesAvailable (templatesAvailable) {
// TODO make work with arrays
// this.validateType(String, templatesAvailable);
this.setParameter(DisplayCapabilities.KEY_TEMPLATES_AVAILABLE, templatesAvailable);
return this;
}
/**
* @return {Array<String>}
*/
getTemplatesAvailable () {
return this.getParameter(DisplayCapabilities.KEY_TEMPLATES_AVAILABLE);
}
/**
* @param {ScreenParams} screenParams
* @return {DisplayCapabilities}
*/
setScreenParams (screenParams) {
this.validateType(ScreenParams, screenParams);
this.setParameter(DisplayCapabilities.KEY_SCREEN_PARAMS, screenParams);
return this;
}
/**
* @return {ScreenParams}
*/
getScreenParams () {
return this.getObject(ScreenParams, DisplayCapabilities.KEY_SCREEN_PARAMS);
}
/**
* @param {Array<Number>} numCustomPresetsAvailable
* @return {DisplayCapabilities}
*/
setNumCustomPresetsAvailable (numCustomPresetsAvailable) {
// TODO make work with arrays
// this.validateType(Number, numCustomPresetsAvailable);
this.setParameter(DisplayCapabilities.KEY_NUM_CUSTOM_PRESETS_AVAILABLE, numCustomPresetsAvailable);
return this;
}
/**
* @return {Array<Number>}
*/
getNumCustomPresetsAvailable () {
return this.getParameter(DisplayCapabilities.KEY_NUM_CUSTOM_PRESETS_AVAILABLE);
}
}
DisplayCapabilities.KEY_DISPLAY_TYPE = 'displayType';
DisplayCapabilities.KEY_DISPLAY_NAME = 'displayName';
DisplayCapabilities.KEY_TEXT_FIELDS = 'textFields';
DisplayCapabilities.KEY_IMAGE_FIELDS = 'imageFields';
DisplayCapabilities.KEY_MEDIA_CLOCK_FORMATS = 'mediaClockFormats';
DisplayCapabilities.KEY_GRAPHICS_SUPPORTED = 'graphicSupported';
DisplayCapabilities.KEY_TEMPLATES_AVAILABLE = 'templatesAvailable';
DisplayCapabilities.KEY_SCREEN_PARAMS = 'screenParams';
DisplayCapabilities.KEY_NUM_CUSTOM_PRESETS_AVAILABLE = 'numCustomPresetsAvailable';
export { DisplayCapabilities };
|
package com.jsgygujun.code.flink.chapter05
import org.apache.flink.streaming.api.scala._
/**
*
* @author <EMAIL>
* @since 2020/8/20 2:44 下午
*/
object RollingSum {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val inputStream: DataStream[(Int, Int, Int)] = env.fromElements(
(1, 2, 2), (2, 3, 1), (2, 2, 4), (1, 5, 3))
val resultStream: DataStream[(Int, Int, Int)] = inputStream
.keyBy(0)
.sum(1)
resultStream.print()
/** Rolling Sum
* 3> (1,2,2)
* 4> (2,3,1)
* 4> (2,5,1)
* 3> (1,7,2)
*/
env.execute("Rolling Sum Example")
}
}
|
#!/bin/bash
sudo apt update
if [[ `which java` == "" ]]
then
sudo apt-get install openjdk-11-jdk
fi
if [[ `which mvn` == "" ]]
then
sudo apt install maven
fi
if [[ `which ffmpeg` == "" ]]
then
sudo apt install ffmpeg
fi
java -jar install.jar |
<reponame>DeIaube/YiXing
package arouter.dawn.zju.edu.module_forum.adapter;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import androidx.appcompat.widget.SwitchCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CompoundButton;
import java.util.ArrayList;
import java.util.List;
import arouter.dawn.zju.edu.module_forum.R;
import arouter.dawn.zju.edu.module_forum.config.Constants;
import baselib.util.SPUtil;
/**
* @Auther: Dawn
* @Date: 2018/11/22 22:01
* @Description:
* 更改论坛首页Tab页面的适配器
*/
public class ForumAlterTabAdapter extends RecyclerView.Adapter<ForumAlterTabAdapter.ForumAlterTabHolder> {
private Context mContext;
private List<String> mTitles;
private List<String> mKeys;
private TabStatusListener mTabStatusListener;
public void setTabStatusListener(TabStatusListener mTabStatusListener) {
this.mTabStatusListener = mTabStatusListener;
}
public ForumAlterTabAdapter(Context mContext) {
this.mContext = mContext;
initData();
}
public interface TabStatusListener {
void statusChange();
}
private void initData() {
mTitles = new ArrayList<>();
mKeys = new ArrayList<>();
mTitles.add(Constants.TYPE_HUMANITY);
mKeys.add(Constants.TYPE_HUMANITY_KEY);
mTitles.add(Constants.TYPE_INTELLECTUALITY);
mKeys.add(Constants.TYPE_INTELLECTUALITY_KEY);
mTitles.add(Constants.TYPE_KEISURE);
mKeys.add(Constants.TYPE_KEISURE_KEY);
mTitles.add(Constants.TYPE_SPORTS);
mKeys.add(Constants.TYPE_SPORTS_KEY);
mTitles.add(Constants.TYPE_FINANCE);
mKeys.add(Constants.TYPE_FINANCE_KEY);
mTitles.add(Constants.TYPE_FASHION);
mKeys.add(Constants.TYPE_FASHION_KEY);
mTitles.add(Constants.TYPE_EMOTION);
mKeys.add(Constants.TYPE_EMOTION_KEY);
}
@NonNull
@Override
public ForumAlterTabHolder onCreateViewHolder(@NonNull ViewGroup viewGroup, int i) {
View rootView = LayoutInflater.from(mContext).
inflate(R.layout.item_forum_alter_tab, viewGroup, false);
return new ForumAlterTabHolder(rootView);
}
@Override
public void onBindViewHolder(@NonNull ForumAlterTabHolder forumAlterTabHolder, int i) {
forumAlterTabHolder.switchCompat.setChecked(SPUtil.getBoolean(mKeys.get(i), true));
forumAlterTabHolder.switchCompat.setText(mTitles.get(i));
forumAlterTabHolder.position = i;
}
@Override
public int getItemCount() {
return mTitles.size();
}
class ForumAlterTabHolder extends RecyclerView.ViewHolder {
SwitchCompat switchCompat;
int position;
public ForumAlterTabHolder(@NonNull View itemView) {
super(itemView);
switchCompat = itemView.findViewById(R.id.switchWidget);
switchCompat.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
SPUtil.put(mKeys.get(position), isChecked);
if (mTabStatusListener != null) {
mTabStatusListener.statusChange();
}
}
});
}
}
}
|
<reponame>avidas/Github-Watchdog
import os
from github import GitHubClient
from sendmail import send_email
def main():
gh_client = GitHubClient.create()
owner = os.environ.get('OWNER', None)
events = gh_client.get_public_user_events(owner)
latest_event = GitHubClient.get_latest_push_event(events)
time_since_last = GitHubClient.get_time_since_last(latest_event)
num_hours = int(os.environ.get('NUM_HOURS'))
if GitHubClient.is_older_than_limit(time_since_last, num_hours):
send_email('You have not published to Github for ' + str(time_since_last))
if __name__== '__main__':
main()
|
#!/bin/sh
#|| goto :windows_detected
{ # put the whole thing in a block so as not to behave weirdly if interrupted
set -e
#TODO command line parameter handling for non-interactive mode.
# Do we already have a repo?
if [ \( -d .git \) -a \( -f easyinstall.sh \) -a \( -f src/BlockArea.cpp \) ]; then # A good enough indicator that we're in the Cuberite git repo.
cd ../
echo "Cuberite repository detected. This should make the process faster, especially if you compiled before."
fi
# Error functions.
error ()
{
echo
echo "-----------------"
echo "Script aborted, reason:"
echo $1
exit 1
}
missingDepsExit ()
{
if [ "$1" != "" ]; then
echo "You can install the missing depndencies via:"
echo "$1"
fi
echo
echo "Please install the dependencies, then come back."
echo
exit 2
}
# Echo: Greetings.
echo "
Hello, this script will download and compile Cuberite.
On subsequent runs, it will update Cuberite.
The compilation and download will occur in the current directory.
If you're updating, you should run: <Path to Cuberite>/compile.sh
Compiling from source takes time, but it usually generates faster
executables. If you prefer ready-to-use binaries or if you want
more info, please visit: http://cuberite.org/"
### Dependency checks start. ###
MISSING_PACKAGES=""
# Most distros have the following default package and executable names.
GCC_EXE_NAME="g++"
CLANG_EXE_NAME="clang"
COMPILER_PACKAGE_NAME="gcc g++"
# Left side: Executable Name, Right side: Package Name. Note that this is TAB delimited. Spaces will not work.
PROGRAMS='git git
make make
cmake cmake'
# If any OS deviates from the defaults, detect the OS here, and change PROGRAMS, COMPILER_PACKAGE_NAME, etc. as needed.
# Fedora, CentOS, RHEL, Mageia, openSUSE, Mandriva
if (rpm --help > /dev/null 2> /dev/null); then
COMPILER_PACKAGE_NAME="gcc-c++"
fi
# Compiler check.
GCC_EXISTS=0
CLANG_EXISTS=0
$GCC_EXE_NAME --help > /dev/null 2> /dev/null && GCC_EXISTS=1
$CLANG_EXE_NAME --help > /dev/null 2> /dev/null && CLANG_EXISTS=1
if [ $GCC_EXISTS -eq 0 -a $CLANG_EXISTS -eq 0 ]; then
MISSING_PACKAGES=" $COMPILER_PACKAGE_NAME"
fi
# Depdendency check.
checkPackages ()
{
echo "$PROGRAMS" | while read line; do
EXE_NAME=`echo "$line" | cut -f 1`
PACKAGE_NAME=`echo "$line" | cut -f 2`
command -v $EXE_NAME > /dev/null 2> /dev/null || echo -n " $PACKAGE_NAME"
done
}
MISSING_PACKAGES="$MISSING_PACKAGES`checkPackages`"
if [ "$MISSING_PACKAGES" != "" ]; then
echo
echo "-----------------"
echo "You have missing compilation dependencies:"
echo $MISSING_PACKAGES
echo
# apt-get guide.
apt-get --help > /dev/null 2> /dev/null && \
missingDepsExit "sudo apt-get install$MISSING_PACKAGES"
# yum guide.
yum --help > /dev/null 2> /dev/null && \
missingDepsExit "sudo yum install$MISSING_PACKAGES"
# zypper guide.
zypper --help > /dev/null 2> /dev/null && \
missingDepsExit "sudo zypper install$MISSING_PACKAGES"
# pacman guide.
pacman --help > /dev/null 2> /dev/null && \
missingDepsExit "sudo pacman -S$MISSING_PACKAGES"
# urpmi guide.
urpmi --help > /dev/null 2> /dev/null && \
missingDepsExit "sudo urpmi$MISSING_PACKAGES"
missingDepsExit ""
fi
### Dependency checks end. ###
# Bypass Branch choice and choose master. Because it's the only branch right now.
BRANCH="master"
### Inactive code start. ###
inactiveCode ()
{
# Echo: Branch choice.
echo "
You can choose between 3 branches:
* (S)Stable: Choose the stable branch if you want the most
reliable server.
* (T)Testing: The testing branch is less stable,
but using it and reporting bugs helps us a lot!
* (D)Dev: The least stable of the three. (Master branch)
Choose the development branch if you want to try new,
bleeding-edge features.
"
# Input: Branch choice.
echo -n "Choose the branch (s/t/d): "
read BRANCH
if [ \( "$BRANCH" = "s" \) -o \( "$BRANCH" = "S" \) ]; then
#BRANCH="stable"
error "We don't have a stable branch yet, please use testing, sorry."
elif [ \( $BRANCH = "t" \) -o \( $BRANCH = "T" \) ]; then
BRANCH="testing"
elif [ \( $BRANCH = "d" \) -o \( $BRANCH = "D" \) ]; then
BRANCH="master"
else
error "Unrecognized user input."
fi
}
### Inactive code end. ###
# Echo: Compile mode choice.
echo "
Choose compile mode:
* (N)Normal: Compiles normally.
Generates the fastest build.
* (D)Debug: Compiles in debug mode.
Makes your console and crashes more verbose.
A bit slower than Normal mode. If you plan to help
development by reporting bugs, this is preferred.
Note that the script will connect to the internet in order to fetch
code after this step. It will then compile your program.
"
# Input: Compile mode choice.
echo -n "Choose compile mode: (n/d): "
read BUILDTYPE
if [ \( "$BUILDTYPE" = "d" \) -o \( "$BUILDTYPE" = "D" \) ]; then
BUILDTYPE="Debug"
elif [ \( "$BUILDTYPE" = "n" \) -o \( "$BUILDTYPE" = "N" \) ]; then
BUILDTYPE="Release"
else
error "Unrecognized user input."
fi
# Echo: Downloading began.
echo
echo " --- Downloading Cuberite's source code from the $BRANCH branch..."
if [ ! -d cuberite ]; then
# Git: Clone.
echo " --- Looks like your first run, cloning the whole code..."
git clone https://github.com/cuberite/cuberite.git
cd cuberite
else
# Git: Fetch.
cd cuberite
echo " --- Updating the $BRANCH branch..."
git fetch origin $BRANCH || error "git fetch failed"
git checkout $BRANCH || error "git checkout failed"
git merge origin/$BRANCH || error "git merge failed"
fi
# Git: Submodules.
echo " --- Updating submodules..."
git submodule update --init
# Cmake.
echo " --- Running cmake..."
if [ ! -d build-cuberite ]; then mkdir build-cuberite; fi
cd build-cuberite
cmake .. -DCMAKE_BUILD_TYPE=$BUILDTYPE || error "cmake failed"
# Make.
echo " --- Compiling..."
make -j 2 || error "Compiling failed"
echo
# Echo: Compilation complete.
cd ../Server
echo
echo "-----------------"
echo "Compilation done!"
echo
echo "Cuberite awaits you at:"
if [ "$BUILDTYPE" = "Debug" ]; then
echo "`pwd`/Cuberite_debug"
else
echo "`pwd`/Cuberite"
fi
cd ..
echo "
You can always update Cuberite by executing:
`pwd`/compile.sh
Enjoy :)"
exit 0
:windows_detected
echo "This script is not available for Windows yet, sorry."
echo "You can still download the Windows binaries from: http://cuberite.org"
echo "You can also manually compile for Windows. See: https://github.com/cuberite/cuberite"
}
|
package io.api.etherscan.core.impl;
import com.google.gson.Gson;
import io.api.etherscan.error.ApiException;
import io.api.etherscan.error.EtherScanException;
import io.api.etherscan.error.ParseException;
import io.api.etherscan.error.RateLimitException;
import io.api.etherscan.executor.IHttpExecutor;
import io.api.etherscan.manager.IQueueManager;
import io.api.etherscan.model.utility.StringResponseTO;
import io.api.etherscan.util.BasicUtils;
import java.util.Map;
/**
* Base provider for API Implementations
*
* @author GoodforGod
* @see EtherScanApi
* @since 28.10.2018
*/
abstract class BasicProvider {
static final int MAX_END_BLOCK = Integer.MAX_VALUE;
static final int MIN_START_BLOCK = 0;
static final String ACT_PREFIX = "&action=";
private final String module;
private final String baseUrl;
private final IHttpExecutor executor;
private final IQueueManager queue;
private final Gson gson;
BasicProvider(final IQueueManager queue,
final String module,
final String baseUrl,
final IHttpExecutor executor) {
this.queue = queue;
this.module = "&module=" + module;
this.baseUrl = baseUrl;
this.executor = executor;
this.gson = new Gson();
}
<T> T convert(final String json, final Class<T> tClass) {
try {
final T t = gson.fromJson(json, tClass);
if (t instanceof StringResponseTO && ((StringResponseTO) t).getResult().startsWith("Max rate limit reached")) {
throw new RateLimitException(((StringResponseTO) t).getResult());
}
return t;
} catch (Exception e) {
try {
final Map<String, Object> map = gson.fromJson(json, Map.class);
final Object result = map.get("result");
if (result instanceof String && ((String) result).startsWith("Max rate limit reached"))
throw new RateLimitException(((String) result));
throw new ParseException(e.getMessage() + ", for response: " + json, e.getCause(), json);
} catch (ApiException ex) {
throw ex;
} catch (Exception ex) {
throw new ParseException(e.getMessage() + ", for response: " + json, e.getCause(), json);
}
}
}
String getRequest(final String urlParameters) {
queue.takeTurn();
final String url = baseUrl + module + urlParameters;
final String result = executor.get(url);
if (BasicUtils.isEmpty(result))
throw new EtherScanException("Server returned null value for GET request at URL - " + url);
return result;
}
String postRequest(final String urlParameters, final String dataToPost) {
queue.takeTurn();
final String url = baseUrl + module + urlParameters;
return executor.post(url, dataToPost);
}
<T> T getRequest(final String urlParameters, final Class<T> tClass) {
return convert(getRequest(urlParameters), tClass);
}
<T> T postRequest(final String urlParameters, final String dataToPost, final Class<T> tClass) {
return convert(postRequest(urlParameters, dataToPost), tClass);
}
}
|
class AddCurrencyAndCurrencyRateToOrders < ActiveRecord::Migration
def change
add_column :orders, :currency, :string, default: 'USD', null: false, after: :id
add_column :orders, :currency_rate, :decimal, precision: 8, scale: 4, null: false, default: 1, after: :currency
end
end
|
<reponame>knkgun/curve25519-voi<filename>internal/scalar128/scalar128.go
// Copyright (c) 2021 Oasis Labs Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Package scalar128 implements the fast generation of random 128-bit
// scalars for the purpose of batch verification.
package scalar128
import (
"bytes"
"fmt"
"io"
"golang.org/x/crypto/chacha20"
"github.com/oasisprotocol/curve25519-voi/curve/scalar"
)
const randomizerSize = scalar.ScalarSize / 2
var zeroRandomizer [scalar.ScalarSize]byte
// The ideas behind this implementation is based on "Faster batch fogery
// identification" (https://eprint.iacr.org/2012/549.pdf), which is basically
// one of the few places where how to select an appropriate z_i is discussed.
//
// It turns out that even in the no-assembly (`purego`) case, calling chacha20
// is more performant than reading from the system entropy source, at least
// on my 64-bit Intel Linux systems.
// FixRawRangeVartime adjusts the raw scalar to be in the correct range,
// in variable-time.
//
// From the paper:
//
// Of course, it is also safe to simply generate z_i as a uniform
// random b-bit integer, disregarding the negligible chance that
// z_i == 0; but this requires minor technical modifications to
// the security guarantees stated below, so we prefer to require
// z != 0.
//
func FixRawRangeVartime(rawScalar *[scalar.ScalarSize]byte) {
// From the paper:
//
// As precomputation we choose z_1, z_2, . . . , z_n independently
// and uniformly at random from the set {1, 2, 3, . . . , 2^b},
// where b is the security level. There are several reasonable ways
// to do this: for example, generate a uniform random b-bit integer
// and add 1, or generate a uniform random b-bit integer and
// replace 0 with 2^b.
//
// We go with the latter approach and replace 0 with 2^b, since
// it is significantly faster for the common case.
if bytes.Equal(rawScalar[:], zeroRandomizer[:]) {
rawScalar[randomizerSize] = 1
}
}
// Generator is a random 128-bit scalar generator.
type Generator struct {
cipher *chacha20.Cipher
// Go's escape analysis insists on sticking this on the heap, if
// it is declared in the function when building with `purego`,
// but not if assembly is being used for chacha20.
tmp [scalar.ScalarSize]byte
}
// SetScalarVartime sets the scalar to a random 128-bit scalar, in
// variable-time.
func (gen *Generator) SetScalarVartime(s *scalar.Scalar) error {
// perf: This probably isn't needed, XOR-ing the previous output
// with more keystream is still random.
for i := range gen.tmp[:randomizerSize+1] {
gen.tmp[i] = 0
}
gen.cipher.XORKeyStream(gen.tmp[:randomizerSize], gen.tmp[:randomizerSize])
FixRawRangeVartime(&gen.tmp)
// Since the random scalar is 128-bits, there is no need to reduce.
if _, err := s.SetBits(gen.tmp[:]); err != nil {
return fmt.Errorf("internal/scalar128: failed to deserialize random scalar: %w", err)
}
return nil
}
// NewGenerator constructs a new random scalar generator, using entropy
// from the provided source.
func NewGenerator(rand io.Reader) (*Generator, error) {
var (
key [chacha20.KeySize]byte
nonce [chacha20.NonceSize]byte
)
if _, err := io.ReadFull(rand, key[:]); err != nil {
return nil, fmt.Errorf("internal/scalar128: failed to read random key: %w", err)
}
cipher, err := chacha20.NewUnauthenticatedCipher(key[:], nonce[:])
if err != nil {
return nil, fmt.Errorf("internal/scalar128: failed initialize stream cipher: %w", err)
}
return &Generator{
cipher: cipher,
}, nil
}
|
import random
letters = 'abcdefghijklmnopqrstuvwxyz1234567890'
def randomStr(stringLength=4):
"""Generate a random string of fixed length """
rand = ''.join(random.choice(letters) for i in range(stringLength))
return rand
print(randomStr()) |
#!/bin/bash
cd /darkstar
/darkstar/dsgame
|
<html>
<head>
<title>Dynamic Time</title>
<script>
function updateTime() {
let timeString = new Date().toLocaleTimeString();
document.querySelector("#time-display").innerHTML = timeString;
}
setInterval(updateTime, 1000);
</script>
</head>
<body>
<div id="time-display"></div>
</body>
</html> |
class User:
def __init__(self, user_id, username):
self.id = user_id
self.username = username
def __str__(self):
return f"<User {self.id}: {self.username}>"
def remove_data_from_db(users):
"""
Simulates the process of removing user data from a database.
Args:
users: A list of User objects representing the users whose data needs to be removed from the database.
"""
for user in users:
# Simulate the removal of user data from the database
print(f"Removing user data for {user} from the database...")
print("User data successfully removed from the database.") |
const Boom = require('boom');
const bcrypt = require('bcrypt');
const mongoose = require('mongoose');
const Group = require('../models/group');
const User = - require('../models/user');
const jwt = require('jsonwebtoken');
/**
* Checks if a group share token is valid
* If yes, return the group if not send error
* @param {Request} req
* @param {Result} res
* @param {Callback} next
*/
async function verifyShareToken(req, res, next) {
try {
let verified = await Group.verifyShareToken(req.body.share_token, process.env.SECRET);
if (!verified.group)
return res.send(Boom.badRequest(verified.message));
res.locals.user = verified.group;
} catch (err) {
return res.send(Boom.badRequest(err));
}
if (next)
next();
}
/**
* Checks if a user has access to a group
* If yes, return the group else send error
* @param {Request} req
* @param {Result} res
* @param {Callback} next
*/
async function verifyGroupAccess(req, res, next) {
try {
let user = res.locals.user;
let group = await Group.findOne({ _id: mongoose.Types.ObjectId((req.headers.group_id) ? req.headers.group_id : req.body.group_id) });
if (!group)
return res.send(Boom.badRequest("Invalid group_id"));
if (!user)
return res.send(Boom.badRequest("Invalid user!"));
let verified = await group.verifyGroupAccess(user);
if (!verified)
return res.send(Boom.badRequest("User doesn't have access to this group"));
res.locals.group = group;
} catch (err) {
return res.send(Boom.badRequest(err));
}
if (next)
next();
}
module.exports = {
verifyShareToken: verifyShareToken,
verifyGroupAccess: verifyGroupAccess
} |
#include <iostream>
#include <boost/timer/timer.hpp>
//configuration
#define ST_ASIO_SERVER_PORT 9527
#define ST_ASIO_MAX_OBJECT_NUM 102400
#define ST_ASIO_REUSE_OBJECT //use objects pool
#define ST_ASIO_DELAY_CLOSE 5 //define this to avoid hooks for async call (and slightly improve efficiency)
#define ST_ASIO_MSG_BUFFER_SIZE 1024
#define ST_ASIO_INPUT_QUEUE non_lock_queue //we will never operate sending buffer concurrently, so need no locks
#define ST_ASIO_DECREASE_THREAD_AT_RUNTIME
//configuration
#include "../include/ext/tcp.h"
using namespace st_asio_wrapper;
using namespace st_asio_wrapper::tcp;
using namespace st_asio_wrapper::ext::tcp;
#define QUIT_COMMAND "quit"
#define STATUS "status"
#define STATISTIC "statistic"
#define LIST_ALL_CLIENT "list all client"
#define INCREASE_THREAD "increase thread"
#define DECREASE_THREAD "decrease thread"
class echo_socket : public client_socket
{
public:
echo_socket(i_matrix& matrix_) : client_socket(matrix_), msg_len(ST_ASIO_MSG_BUFFER_SIZE - ST_ASIO_HEAD_LEN) {unpacker()->stripped(false);}
void begin(size_t msg_len_) {msg_len = msg_len_;}
void check_delay(float max_delay) {if (is_connected() && (double) last_send_time.elapsed().wall / 1000000000 > max_delay) force_shutdown();}
protected:
virtual void on_connect()
{
boost::asio::ip::tcp::no_delay option(true);
lowest_layer().set_option(option);
char* buff = new char[msg_len];
memset(buff, 'Y', msg_len); //what should we send?
last_send_time.stop();
last_send_time.start();
send_msg(buff, msg_len, true);
delete[] buff;
client_socket::on_connect();
}
//msg handling
virtual bool on_msg_handle(out_msg_type& msg) {handle_msg(msg); return true;}
private:
void handle_msg(out_msg_type& msg)
{
last_send_time.stop();
last_send_time.start();
direct_send_msg(msg, true);
}
private:
size_t msg_len;
boost::timer::cpu_timer last_send_time;
};
class echo_client : public multi_client_base<echo_socket>
{
public:
echo_client(service_pump& service_pump_) : multi_client_base<echo_socket>(service_pump_) {}
void begin(float max_delay, size_t msg_len)
{
do_something_to_all(boost::bind(&echo_socket::begin, _1, msg_len));
set_timer(TIMER_END, 5000, (boost::lambda::bind(&echo_client::check_delay, this, max_delay), true));
}
void check_delay(float max_delay) {do_something_to_all(boost::bind(&echo_socket::check_delay, _1, max_delay));}
};
int main(int argc, const char* argv[])
{
printf("usage: %s [<message size=" ST_ASIO_SF "> [<max delay=%f (seconds)> [<service thread number=1> [<port=%d> [<ip=%s> [link num=16]]]]]]\n",
argv[0], ST_ASIO_MSG_BUFFER_SIZE - ST_ASIO_HEAD_LEN, 1.f, ST_ASIO_SERVER_PORT, ST_ASIO_SERVER_IP);
if (argc >= 2 && (0 == strcmp(argv[1], "--help") || 0 == strcmp(argv[1], "-h")))
return 0;
else
puts("type " QUIT_COMMAND " to end.");
///////////////////////////////////////////////////////////
size_t link_num = 16;
if (argc > 6)
link_num = std::min(ST_ASIO_MAX_OBJECT_NUM, std::max(atoi(argv[6]), 1));
printf("exec: concurrent_client with " ST_ASIO_SF " links\n", link_num);
///////////////////////////////////////////////////////////
service_pump sp;
echo_client client(sp);
// argv[5] = "::1" //ipv6
// argv[5] = "127.0.0.1" //ipv4
std::string ip = argc > 5 ? argv[5] : ST_ASIO_SERVER_IP;
unsigned short port = argc > 4 ? atoi(argv[4]) : ST_ASIO_SERVER_PORT;
int thread_num = 1;
if (argc > 3)
thread_num = std::min(16, std::max(thread_num, atoi(argv[3])));
//add one thread will seriously impact IO throughput when doing performance benchmark, this is because the business logic is very simple (send original messages back,
//or just add up total message size), under this scenario, just one service thread without receiving buffer will obtain the best IO throughput.
//the server has such behavior too.
for (size_t i = 0; i < link_num; ++i)
client.add_socket(port, ip);
float max_delay = 1.f;
if (argc > 2)
max_delay = std::max(.1f, (float) atof(argv[2]));
size_t msg_len = ST_ASIO_MSG_BUFFER_SIZE - ST_ASIO_HEAD_LEN;
if (argc > 1)
msg_len = std::max((size_t) 1, std::min(msg_len, (size_t) atoi(argv[1])));
client.begin(max_delay, msg_len);
sp.start_service(thread_num);
while(sp.is_running())
{
std::string str;
std::getline(std::cin, str);
if (str.empty())
;
else if (QUIT_COMMAND == str)
sp.stop_service();
else if (STATISTIC == str)
{
printf("link #: " ST_ASIO_SF ", valid links: " ST_ASIO_SF ", invalid links: " ST_ASIO_SF "\n\n", client.size(), client.valid_size(), client.invalid_object_size());
puts(client.get_statistic().to_string().data());
}
else if (STATUS == str)
client.list_all_status();
else if (LIST_ALL_CLIENT == str)
client.list_all_object();
else if (INCREASE_THREAD == str)
sp.add_service_thread(1);
else if (DECREASE_THREAD == str)
sp.del_service_thread(1);
}
return 0;
}
|
<gh_stars>0
import {ElementFinder, element, by} from 'protractor';
export class CheckoutPage{
proceedToCheckoutBtn: ElementFinder;
checkOutBtn: ElementFinder;
cartQuantityEle: ElementFinder;
deleteBtn: ElementFinder;
emptyNotification: ElementFinder;
constructor() {
this.proceedToCheckoutBtn = element(by.css("div.button-container a span"));
this.checkOutBtn = element(by.css("p.cart_navigation a:nth-child(1)"));
this.cartQuantityEle = element(by.css("span.ajax_cart_quantity:nth-child(2)"));
this.deleteBtn = element(by.css("i.icon-trash"));
this.emptyNotification = element(by.css("p.alert"));
}
} |
#!/bin/bash
GRAY='\033[1;30m'; YELLOW='\033[1;33m'
BLUE='\033[0;34m'; NC='\033[0m'
BOLD='\e[1m'
SCRIPTPATH="$( cd "$(dirname "$0")/" >/dev/null 2>&1 ; pwd -P )"
stucked=()
total=0
selected_file=$1
test_files=$(find $(pwd) | grep "test.*.cpp" | grep -v ".w.$")
printf "" > test_output
for test in $test_files; do
total=$((total+1))
cd $(dirname ${test})
printf "> [ Run Test ${GRAY}${test}${NC} ]\n"
current_dict=$(cd -)
bname=$(basename ${test})
if [ "$bname" = "$selected_file" ]; then
g++ $bname -o ./$bname.out -I$SCRIPTPATH && ./$bname.out && rm ./$bname.out
return_code=$?
else
g++ $bname -o ./$bname.out -I$SCRIPTPATH && ./$bname.out >> $current_dict/test_output && rm ./$bname.out
return_code=$?
fi
cd $current_dict
if [[ $return_code -eq 0 ]]; then
printf "${BLUE}${BOLD}\n\t◁ PASS ✔ ▷\n\n${NC}"
else
printf "${YELLOW}${BOLD}\n\t◁ STUCK ✘ ▷\n\n${NC}"
stucked=( "${stucked[@]}" "$bname" )
fi
done
printf "\n${BLUE}${BOLD}⚠ Stucked: ${YELLOW}${#stucked[@]}${NC}/${BLUE}$total${NC}\n"
for file in ${stucked[@]}; do
printf "${YELLOW}\t➤ $file${NC}\n"
done
echo
while true; do
read -p "Do you wanna keep output of test processes? [Yy/Nn] " yn
case $yn in
[Yy]* ) exit;;
[Nn]* ) rm test_output; break;;
* ) echo "Please answer yes or no.";;
esac
done
|
#!/usr/bin/env sh
# Setup displays layout
autorandr --change
# Display wallpapers
nitrogen --restore
# cur_layout="$(autorandr --current)"
# # Properly set DPI & keyboard layout
# if [ "$cur_layout" = loftie ]; then
# xrandr --dpi 120
# setxkbmap -layout us -variant altgr-intl
# # Display sys tray on correct display
# elif [ "$cur_layout" = default ]; then
# xrandr --output eDP-1-1 --primary
# fi
|
#!/bin/bash
# Copyright 2018 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Test that pre/post integration tests don't run if unset.
source $(dirname $0)/presubmit-integration-tests-common.sh
function check_results() {
(( ! PRE_INTEGRATION_TESTS )) || failed "Pre integration tests did run"
(( CUSTOM_INTEGRATION_TESTS )) || failed "Custom integration tests did not run"
(( ! POST_INTEGRATION_TESTS )) || failed "Post integration tests did run"
echo ">> All tests passed"
}
echo ">> Testing custom test integration function"
unset -f pre_integration_tests
unset -f post_integration_tests
main $@
|
def fibo(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibo(n-1) + fibo(n-2)
n = 7
print(fibo(n)) |
<reponame>yamanakahirofumi/RogueInJava<filename>src/main/java/org/hiro/things/sticktype/Drain.java
package org.hiro.things.sticktype;
import org.hiro.Chase;
import org.hiro.Const;
import org.hiro.Fight;
import org.hiro.Global;
import org.hiro.IOUtil;
import org.hiro.Room;
import org.hiro.Util;
import org.hiro.character.Player;
import org.hiro.map.RoomInfoEnum;
import org.hiro.things.ObjectType;
import org.hiro.things.OriginalMonster;
import org.hiro.things.Stick;
import java.util.ArrayList;
import java.util.List;
public class Drain extends Stick {
public Drain(){
super();
}
@Override
public void shake(Player player) {
/*
* take away 1/2 of hero's hit points, then take it away
* evenly from the monsters in the room (or next to hero
* if he is in a passage)
*/
if (player.getHp() < 2) {
IOUtil.msg("you are too weak to use it");
} else {
drain(player);
}
}
/*
* drain:
* Do drain hit points from player shtick
*/
private void drain(Player player) {/*
* First cnt how many things we need to spread the hit points among
*/
Room corp;
if (Util.getPlace(player.getPosition()).p_ch == ObjectType.DOOR) {
corp = Global.passages[Util.flat(player.getPosition()) & Const.F_PNUM];
} else {
corp = null;
}
boolean pass = player.getRoom().containInfo(RoomInfoEnum.ISGONE);
List<OriginalMonster> drainList = new ArrayList<>();
for (OriginalMonster mp : Global.mlist) {
if (mp.getRoom().equals(player.getRoom()) || mp.getRoom().equals(corp) ||
(pass && Util.getPlace(mp.getPosition()).p_ch == ObjectType.DOOR &&
Global.passages[Util.flat(mp.getPosition()) & Const.F_PNUM].equals(player.getRoom()))) {
drainList.add(mp);
}
}
if (drainList.size() == 0) {
IOUtil.msg("you have a tingling feeling");
return;
}
Global.player.getStatus().s_hpt /= 2;
int cnt = player.getHp() / drainList.size();
/*
* Now zot all of the monsters
*/
for (OriginalMonster dp : drainList) {
if ((dp.getStatus().s_hpt -= cnt) <= 0) {
Fight.killed(dp, Chase.see_monst(dp));
} else {
Chase.runto(dp.getPosition());
}
}
this.use();
}
}
|
#!/bin/bash
go build -ldflags "-s -w" -tags "prod forceposix" ./cmd/gjpush
|
package render
import (
"errors"
"fmt"
"strange-secrets.com/mantra/algebra"
"strange-secrets.com/mantra/render/sampling"
"strange-secrets.com/mantra/scene"
"strange-secrets.com/mantra/scene/shading"
"time"
)
const (
TestImageWidth = 1024 * 2 // 640
TestImageHeight = 1024 // 480
)
type Renderer struct {
Images map[string] Target
Time algebra.MnFloat
}
func NewRenderer() *Renderer {
return &Renderer{
Images: make(map[string] Target),
}
}
// Creates a new image within the renderer which may be used as a target for a render operation.
func (r *Renderer) CreateRenderTarget(name string, width int, height int) (Target, error) {
if _, ok := r.Images[name]; ok {
return nil, fmt.Errorf("cannot create render target \"%s\", name already in use", name)
}
renderTarget, err := NewGammaTarget(name, width, height)
if err != nil {
return nil, err
}
r.Images[name] = renderTarget
return renderTarget, nil
}
func (r *Renderer) Render(imageName string, cameraName string, world *scene.Scene) error {
cameraNode := world.GetNode(cameraName)
if cameraNode == nil {
return fmt.Errorf("cannot find camera \"%s\" for rendering", cameraName)
}
if target, ok := r.Images[imageName]; ok {
return r.renderImage(cameraNode, target, world)
}
return fmt.Errorf("cannot render image \"%s\", image could not be found", imageName)
}
func (r *Renderer) renderImage(cameraNode *scene.Node, target Target, world *scene.Scene) error {
if cameraNode == nil {
return errors.New("cannot render image without camera node")
}
if target == nil {
return errors.New("cannot render nil image")
}
start := time.Now()
// We intend to use multi-threading for these blocks of pixels
block := Block{
X: 0,
Y: 0,
Width: TestImageWidth,
Height: TestImageHeight,
}
r.renderBlock(block, cameraNode, target, world)
fmt.Println(fmt.Sprintf("render completed %dms", time.Since(start).Milliseconds()))
return nil
}
func (r *Renderer) renderImageAsync(cameraNode *scene.Node, target Target, world *scene.Scene) error {
if cameraNode == nil {
return errors.New("cannot render image without camera node")
}
if target == nil {
return errors.New("cannot render nil image")
}
lbc := NewLinearBlockChannel(target, 16, 16)
start := time.Now()
for loop := 0; loop < 4; loop++ {
go r.processBlocks(lbc, cameraNode, target, world)
}
fmt.Println(fmt.Sprintf("render completed %dms", time.Since(start).Milliseconds()))
return nil
}
func (r *Renderer) processBlocks(bc BlockChannel, cameraNode *scene.Node, target Target, world *scene.Scene) {
for block := bc.NextBlock(); block.Width != 0; block = bc.NextBlock() {
r.renderBlock(block, cameraNode, target, world)
}
}
func (r *Renderer) renderBlock(block Block, cameraNode *scene.Node, target Target, world *scene.Scene) {
//sampler := sampling.NewPixelSampler()
sampler := sampling.NewRandomSampler(8)
for y := 0; y < block.Height; y++ {
for x := 0; x < block.Width; x++ {
sampler.Reset(x + block.X, y + block.Y)
target.Set(x + block.X, y + block.Y, r.renderPixel(sampler.Reset(x + block.X, y + block.Y), cameraNode, world))
}
}
}
func (r *Renderer) renderPixel(pixel algebra.Vector2, sampler sampling.Sampler2D, cameraNode *scene.Node, world *scene.Scene) algebra.Vector3 {
halfWidth := float64(TestImageWidth) / 2.0
halfHeight := float64(TestImageHeight) / 2.0
renderInfo := shading.RenderInfo{
Pixel: algebra.ZeroVector2,
Width: TestImageWidth,
Height: TestImageHeight,
Aspect: float64(TestImageWidth) / float64(TestImageHeight),
}
result := algebra.ZeroVector3
valid := true
// Here we should split the pixel up into multiple rays for anti-aliasing
for renderInfo.Pixel, valid = sampler.Next(pixel); valid {
renderInfo.Pixel.X = (renderInfo.Pixel.X - halfWidth) / halfWidth
renderInfo.Pixel.Y = -(renderInfo.Pixel.Y - halfHeight) / halfHeight
direction := cameraNode.CastRay(renderInfo.Pixel.X, renderInfo.Pixel.Y, renderInfo.Aspect)
traceInfo := scene.NewTraceInfo(renderInfo, cameraNode.Location, direction)
// TODO: Perhaps ShadeRay should return a color?
result = result.Add(world.ShadeRay(traceInfo))
}
return result.DivideScalar(algebra.MnFloat(sampler.Length()))
}
|
<filename>src/model/SizingPeriod.cpp
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include "SizingPeriod.hpp"
#include "SizingPeriod_Impl.hpp"
#include "Site.hpp"
#include "Site_Impl.hpp"
#include "SkyTemperature.hpp"
#include "SkyTemperature_Impl.hpp"
#include "Model.hpp"
#include "Model_Impl.hpp"
#include "../utilities/core/Assert.hpp"
namespace openstudio {
namespace model {
namespace detail {
SizingPeriod_Impl::SizingPeriod_Impl(const IdfObject& idfObject, Model_Impl* model, bool keepHandle)
: ParentObject_Impl(idfObject, model, keepHandle) {}
SizingPeriod_Impl::SizingPeriod_Impl(const openstudio::detail::WorkspaceObject_Impl& other, Model_Impl* model, bool keepHandle)
: ParentObject_Impl(other, model, keepHandle) {}
SizingPeriod_Impl::SizingPeriod_Impl(const SizingPeriod_Impl& other, Model_Impl* model, bool keepHandle)
: ParentObject_Impl(other, model, keepHandle) {}
// return the parent object in the hierarchy
boost::optional<ParentObject> SizingPeriod_Impl::parent() const {
boost::optional<ParentObject> result(model().getOptionalUniqueModelObject<Site>());
return result;
}
// set the parent, child may have to call methods on the parent
bool SizingPeriod_Impl::setParent(ParentObject& newParent) {
if (newParent.optionalCast<Site>() && (newParent.model() == model())) {
return true;
}
return false;
}
// return any children objects in the hierarchy
std::vector<ModelObject> SizingPeriod_Impl::children() const {
std::vector<ModelObject> result;
SkyTemperatureVector sts = getObject<ModelObject>().getModelObjectSources<SkyTemperature>();
result.insert(result.end(), sts.begin(), sts.end());
return result;
}
std::vector<IddObjectType> SizingPeriod_Impl::allowableChildTypes() const {
IddObjectTypeVector result;
result.push_back(SkyTemperature::iddObjectType());
return result;
}
} // namespace detail
/// constructor
SizingPeriod::SizingPeriod(IddObjectType type, const Model& model) : ParentObject(type, model) {
OS_ASSERT(getImpl<detail::SizingPeriod_Impl>());
}
// constructor
SizingPeriod::SizingPeriod(std::shared_ptr<detail::SizingPeriod_Impl> impl) : ParentObject(std::move(impl)) {}
// ensure that this object does not contain the date 2/29
void SizingPeriod::ensureNoLeapDays() {
getImpl<detail::SizingPeriod_Impl>()->ensureNoLeapDays();
}
} // namespace model
} // namespace openstudio
|
def dic_to_query(dictionary):
keys = []
values = []
for key, value in dictionary.items():
keys.append(key)
values.append(value)
return keys, values |
<reponame>julisevla3/Checkpoint2_To-Do<gh_stars>0
//Capturando as informações
let campoEmailLogin = document.getElementById("inputEmail");
let campoSenhaLogin = document.getElementById("inputPassword");
let botaoAcessar = document.getElementById("botaoAcessar");
let campoEmailLoginNormalizado;
let campoSenhaLoginNormalizado;
//variável de controle da validação
let emailEValido = false;
//Definindo objeto
const usuarioObjeto = {
email: "",
password: "",
};
botaoAcessar.addEventListener("click", function (evento) {
if (validacaoTelaDeLogin()) {
evento.preventDefault();
mostrarSpinner();
//Normalizando as informações
campoEmailLoginNormalizado = retiraEspacosDeUmValor(campoEmailLogin.value);
campoSenhaLoginNormalizado = retiraEspacosDeUmValor(campoSenhaLogin.value);
campoEmailLoginNormalizado = converteValorRecebidoParaMinusculo(campoEmailLoginNormalizado);
//Populando o objeto com as informações normalizadas
usuarioObjeto.email = campoEmailLoginNormalizado;
usuarioObjeto.password = <PASSWORD>;
let usuarioJson = JSON.stringify(usuarioObjeto);
let endPoinLogin = "https://ctd-todo-api.herokuapp.com/v1/users/login";
let configuração = {
method: "POST",
body: usuarioJson,
headers: {
"content-type": "application/json",
},
};
fetch(endPoinLogin, configuração)
.then((resultado) => {
console.log(resultado.status);
if(resultado.status == 201) {
// alert("Usuario logado com sucesso")
console.log(resultado);
// window.location.href = "tarefas.html";
} else if (resultado.status == 400) {
alert("Existe alguma informação divergente")
window.location.href = "index.html";
}
return resultado.json();
})
.then((resultado) => {
console.log(resultado.jwt);
setTimeout(() => {
localStorage.setItem("jwt", resultado.jwt);
window.location.href = "tarefas.html";
}, 4000);
ocultarSpinner();
})
.catch((erros) => {
console.log(erros);
});
} else {
alert("Ambos os campos devem ser informados");
evento.preventDefault(); //Não permite que o formulário seja executado / realizado o 'submit'
// ocultarSpinner();
}
});
//Validando o campo de Email
campoEmailLogin.addEventListener("blur", function () {
//Captura o elemento "small"
let inputEmailValidacao = document.getElementById("inputEmailValidacao");
//Se o campo estiver com algum valor...
if (campoEmailLogin.value != "") {
inputEmailValidacao.innerText = "";
campoEmailLogin.style.border = ``;
emailEValido = true;
//Se o campo estiver sem nenhum valor...
} else {
inputEmailValidacao.innerText = "Campo obrigatório";
inputEmailValidacao.style.color = "#EE1729EC";
inputEmailValidacao.style.fontSize = "8pt";
inputEmailValidacao.style.fontWeight = "bold";
campoEmailLogin.style.border = `1px solid #EE1729EC`;
emailEValido = false;
};
//Chama a função de validar, para "atualizar" o status da validação principal da tela de login
validacaoTelaDeLogin();
});
function validacaoTelaDeLogin() {
if (emailEValido) {
botaoAcessar.removeAttribute("disabled");
botaoAcessar.innerText = "Acessar";
return true;
} else {
botaoAcessar.setAttribute("disabled", true);
botaoAcessar.innerText = "Bloqueado";
return false;
}
}
|
#!/bin/bash
echo "The current date is $(date '+%d-%m-%Y')" |
package kr.co.gardener.util;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
public class SeleniumCrawler {
private WebDriver driver;
private List<WebElement> element;
private String url;
public SeleniumCrawler() {
Properties pro = FileUploader.makeProp();
String driverPath = pro.getProperty("java.chromeDriver.path");
System.setProperty("webdriver.chrome.driver", driverPath);
ChromeOptions options = new ChromeOptions();
options.addArguments("--start-maximized");
options.addArguments("--disable-popup-blocking");
options.addArguments("headless");
driver = new ChromeDriver(options);
}
public List<String> imageSearch(String search) {
url = "https://www.google.co.kr/imghp?hl=ko&tab=ri&authuser=0&ogbl";
List<String> list = new ArrayList<>();
try {
driver.get(url);
WebElement elem = driver.findElement(By.name("q"));
elem.sendKeys(search);
elem.submit();
// img.rg_i 태그 가져오기.
element = driver.findElements(By.cssSelector("img.rg_i"));
for(WebElement item : element) {
list.add(item.getAttribute("src"));
}
return list;
}catch (Exception e) {
e.printStackTrace();
} finally {
driver.close(); // 5. 브라우저 종료
}
return null;
}
}
|
<filename>powershell-test-agent/src/test/java/jetbrains/buildServer/powershell/agent/CommandLineProviderTest.java
/*
* Copyright 2000-2022 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.buildServer.powershell.agent;
import com.intellij.openapi.util.SystemInfo;
import jetbrains.buildServer.powershell.agent.detect.PowerShellInfo;
import jetbrains.buildServer.powershell.common.PowerShellConstants;
import jetbrains.buildServer.powershell.common.PowerShellEdition;
import jetbrains.buildServer.powershell.common.PowerShellExecutionMode;
import jetbrains.buildServer.util.TestFor;
import org.jetbrains.annotations.NotNull;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.lib.legacy.ClassImposteriser;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.util.*;
/**
* Created with IntelliJ IDEA.
*
* @author <NAME> (<EMAIL>)
*/
public class CommandLineProviderTest extends BasePowerShellUnitTest {
private static final String SCRIPT_FILE_NAME = "script.ps1";
private Mockery m;
private PowerShellCommandLineProvider myProvider;
private File myScriptFile;
private File myScriptsRootDir;
private PowerShellInfo myInfo;
@SuppressWarnings("ResultOfMethodCallIgnored")
@Override
@BeforeMethod
public void setUp() throws Exception {
super.setUp();
m = new Mockery() {{
setImposteriser(ClassImposteriser.INSTANCE);
}};
myInfo = m.mock(PowerShellInfo.class);
final PowerShellEdition edition = SystemInfo.isWindows ? PowerShellEdition.DESKTOP : PowerShellEdition.CORE;
m.checking(new Expectations() {{
allowing(myInfo).getEdition();
will(returnValue(edition));
allowing(myInfo).isVirtual();
will(returnValue(false));
}});
myProvider = new PowerShellCommandLineProvider();
myScriptsRootDir = createTempDir();
myScriptFile = new File(myScriptsRootDir, SCRIPT_FILE_NAME);
myScriptFile.createNewFile();
super.registerAsTempFile(myScriptFile);
}
@Override
@AfterMethod
public void tearDown() throws Exception {
super.tearDown();
m.assertIsSatisfied();
}
@Test(dataProvider = "versionsProvider")
@TestFor(issues = "TW-33472")
public void testStringProvided(@NotNull final String version) throws Exception {
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
final String expectedVersionArg = "-Version";
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, version);
addExecutionExpectations(myInfo, version);
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
// powershell.exe -Version $version
assertTrue(result.size() >= 2);
assertEquals(expectedVersionArg, result.get(0));
assertEquals(version, result.get(1));
}
}
@Test(dataProvider = "versionsProvider")
@TestFor(issues = "TW-34557")
public void testScriptArgumentsProvided(@NotNull final String version) throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_EXECUTION_MODE, PowerShellExecutionMode.PS1.getValue());
final String args = "arg1 arg2 arg3";
runnerParams.put(PowerShellConstants.RUNNER_SCRIPT_ARGUMENTS, args);
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, version);
final Map<String, String> sharedConfigParams = new HashMap<>();
addExecutionExpectations(myInfo, version);
final List<String> expected = new ArrayList<String>() {{
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
add("-Version");
add(version);
}
add("-NonInteractive");
add("-File");
add(myScriptFile.getPath());
addAll(Arrays.asList(args.split("\\s+")));
}};
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
assertSameElements(result, expected);
}
@SuppressWarnings({"ResultOfMethodCallIgnored"})
@Test
@TestFor(issues = "TW-34557")
public void testUseDefaultPowerShellIfVersionAny() throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
m.checking(new Expectations() {{
allowing(myInfo).getExecutablePath();
will(returnValue("executablePath"));
never(myInfo).getVersion();
}});
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
for (String str: result) {
if ("-Version".equals(str)) {
fail("PowerShell version should not be supplied if Any is selected in runner parameters");
}
}
}
@SuppressWarnings("Duplicates")
@Test
@TestFor(issues = "TW-34410")
public void testFromFile() throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_EXECUTION_MODE, PowerShellExecutionMode.PS1.getValue());
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, "3.0");
addExecutionExpectations(myInfo, "3.0");
final List<String> expected = new ArrayList<String>() {{
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
add("-Version");
add("3.0");
}
add("-NonInteractive");
add("-File");
add(myScriptFile.getPath());
}};
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
assertSameElements(result, expected);
}
@Test
@SuppressWarnings({"ResultOfMethodCallIgnored", "Duplicates"})
public void testNotEscapeSpacesForFile() throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_EXECUTION_MODE, PowerShellExecutionMode.PS1.getValue());
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, "3.0");
final String subdirName = "sub dir";
final File subDir = new File(myScriptsRootDir, subdirName);
subDir.mkdir();
final String fileName = "some script.ps1";
final File scriptFile = new File(subDir, fileName);
scriptFile.createNewFile();
addExecutionExpectations(myInfo, "3.0");
final List<String> expected = new ArrayList<String>() {{
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
add("-Version");
add("3.0");
}
add("-NonInteractive");
add("-File");
add(myScriptFile.getPath());
}};
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
assertSameElements(result, expected);
}
@Test
@SuppressWarnings({"ResultOfMethodCallIgnored"})
public void testLeavePathAsIsForCommand() throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_EXECUTION_MODE, PowerShellExecutionMode.STDIN.getValue());
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, "3.0");
final String subdirName = "sub dir";
final File subDir = new File(myScriptsRootDir, subdirName);
subDir.mkdir();
addExecutionExpectations(myInfo, "3.0");
final List<String> expected = new ArrayList<String>() {{
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
add("-Version");
add("3.0");
}
add("-NonInteractive");
add("-Command");
add("-");
add("<");
add(myScriptFile.getPath());
}};
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
assertSameElements(result, expected);
}
@Test
@TestFor(issues = "TW-35063")
public void testMultiWordArgs_File() throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_EXECUTION_MODE, PowerShellExecutionMode.PS1.getValue());
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, "3.0");
runnerParams.put(PowerShellConstants.RUNNER_SCRIPT_ARGUMENTS, "arg1\r\n\"arg2.1 arg2.2\"\r\narg3\r\narg4 arg5");
addExecutionExpectations(myInfo, "3.0");
final List<String> expected = new ArrayList<String>() {{
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
add("-Version");
add("3.0");
}
add("-NonInteractive");
add("-File");
add(myScriptFile.getPath());
add("arg1");
add("\"arg2.1 arg2.2\"");
add("arg3");
add("arg4");
add("arg5");
}};
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
assertSameElements(result, expected);
}
@Test
@TestFor(issues = "TW-37730")
public void testEscapeCmdChar_File() throws Exception {
final Map<String, String> runnerParams = new HashMap<>();
final Map<String, String> sharedConfigParams = new HashMap<>();
runnerParams.put(PowerShellConstants.RUNNER_EXECUTION_MODE, PowerShellExecutionMode.PS1.getValue());
runnerParams.put(PowerShellConstants.RUNNER_MIN_VERSION, "3.0");
runnerParams.put(PowerShellConstants.RUNNER_SCRIPT_ARGUMENTS, "-PassToPowerShell\n^MatchTheWholeString$");
addExecutionExpectations(myInfo, "3.0");
final List<String> expected = new ArrayList<String>() {{
if (PowerShellCommandLineProvider.isExplicitVersionSupported(myInfo)) {
add("-Version");
add("3.0");
}
add("-NonInteractive");
add("-File");
add(myScriptFile.getPath());
add("-PassToPowerShell");
add("^MatchTheWholeString$");
}};
final List<String> result = myProvider.provideCommandLine(myInfo, runnerParams, myScriptFile, false, sharedConfigParams);
assertSameElements(result, expected);
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void addExecutionExpectations(final PowerShellInfo myInfo, @NotNull final String version) {
m.checking(new Expectations() {{
allowing(myInfo).getExecutablePath();
will(returnValue("executablePath"));
allowing(myInfo).getVersion();
will(returnValue(version));
}});
}
@DataProvider(name = "versionsProvider")
public Object[][] getVersions() {
String[] versions = {"1.0", "2.0", "3.0", "4.0", "5.0", "5.1", "6.0"};
final Object[][] result = new Object[versions.length][];
for (int i = 0; i < versions.length; i++) {
result[i] = new Object[] {versions[i]};
}
return result;
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.metadata;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.slice.Slice;
import io.trino.Session;
import io.trino.connector.CatalogName;
import io.trino.metadata.ResolvedFunction.ResolvedFunctionDecoder;
import io.trino.operator.aggregation.AggregationMetadata;
import io.trino.operator.window.WindowFunctionSupplier;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.AggregateFunction;
import io.trino.spi.connector.AggregationApplicationResult;
import io.trino.spi.connector.BeginTableExecuteResult;
import io.trino.spi.connector.CatalogSchemaName;
import io.trino.spi.connector.CatalogSchemaTableName;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.ConnectorCapabilities;
import io.trino.spi.connector.ConnectorOutputMetadata;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.connector.Constraint;
import io.trino.spi.connector.ConstraintApplicationResult;
import io.trino.spi.connector.JoinApplicationResult;
import io.trino.spi.connector.JoinCondition;
import io.trino.spi.connector.JoinStatistics;
import io.trino.spi.connector.JoinType;
import io.trino.spi.connector.LimitApplicationResult;
import io.trino.spi.connector.MaterializedViewFreshness;
import io.trino.spi.connector.ProjectionApplicationResult;
import io.trino.spi.connector.SampleApplicationResult;
import io.trino.spi.connector.SampleType;
import io.trino.spi.connector.SortItem;
import io.trino.spi.connector.SystemTable;
import io.trino.spi.connector.TableColumnsMetadata;
import io.trino.spi.connector.TableScanRedirectApplicationResult;
import io.trino.spi.connector.TopNApplicationResult;
import io.trino.spi.expression.ConnectorExpression;
import io.trino.spi.function.InvocationConvention;
import io.trino.spi.function.OperatorType;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.GrantInfo;
import io.trino.spi.security.Identity;
import io.trino.spi.security.Privilege;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.security.TrinoPrincipal;
import io.trino.spi.statistics.ComputedStatistics;
import io.trino.spi.statistics.TableStatistics;
import io.trino.spi.statistics.TableStatisticsMetadata;
import io.trino.spi.type.Type;
import io.trino.sql.analyzer.TypeSignatureProvider;
import io.trino.sql.planner.PartitioningHandle;
import io.trino.sql.tree.QualifiedName;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import static io.trino.metadata.FunctionId.toFunctionId;
import static io.trino.metadata.FunctionKind.SCALAR;
import static io.trino.metadata.RedirectionAwareTableHandle.noRedirection;
import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_FOUND;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER;
public abstract class AbstractMockMetadata
implements Metadata
{
public static Metadata dummyMetadata()
{
return new AbstractMockMetadata() {};
}
private final ResolvedFunctionDecoder functionDecoder = new ResolvedFunctionDecoder(TESTING_TYPE_MANAGER::getType);
@Override
public Set<ConnectorCapabilities> getConnectorCapabilities(Session session, CatalogName catalogName)
{
throw new UnsupportedOperationException();
}
@Override
public boolean catalogExists(Session session, String catalogName)
{
throw new UnsupportedOperationException();
}
@Override
public boolean schemaExists(Session session, CatalogSchemaName schema)
{
throw new UnsupportedOperationException();
}
@Override
public List<String> listSchemaNames(Session session, String catalogName)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableHandle> getTableHandle(Session session, QualifiedObjectName tableName)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableHandle> getTableHandleForStatisticsCollection(Session session, QualifiedObjectName tableName, Map<String, Object> analyzeProperties)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableExecuteHandle> getTableHandleForExecute(Session session, TableHandle tableHandle, String procedureName, Map<String, Object> executeProperties)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableLayout> getLayoutForTableExecute(Session session, TableExecuteHandle tableExecuteHandle)
{
throw new UnsupportedOperationException();
}
@Override
public BeginTableExecuteResult<TableExecuteHandle, TableHandle> beginTableExecute(Session session, TableExecuteHandle tableExecuteHandle, TableHandle updatedSourceTableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public void finishTableExecute(Session session, TableExecuteHandle handle, Collection<Slice> fragments, List<Object> tableExecuteState)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<SystemTable> getSystemTable(Session session, QualifiedObjectName tableName)
{
throw new UnsupportedOperationException();
}
@Override
public TableProperties getTableProperties(Session session, TableHandle handle)
{
throw new UnsupportedOperationException();
}
@Override
public TableHandle makeCompatiblePartitioning(Session session, TableHandle table, PartitioningHandle partitioningHandle)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<PartitioningHandle> getCommonPartitioning(Session session, PartitioningHandle left, PartitioningHandle right)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<Object> getInfo(Session session, TableHandle handle)
{
throw new UnsupportedOperationException();
}
@Override
public TableSchema getTableSchema(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public TableMetadata getTableMetadata(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public TableStatistics getTableStatistics(Session session, TableHandle tableHandle, Constraint constraint)
{
throw new UnsupportedOperationException();
}
@Override
public List<QualifiedObjectName> listTables(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
@Override
public Map<String, ColumnHandle> getColumnHandles(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public ColumnMetadata getColumnMetadata(Session session, TableHandle tableHandle, ColumnHandle columnHandle)
{
throw new UnsupportedOperationException();
}
@Override
public List<TableColumnsMetadata> listTableColumns(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
@Override
public void createSchema(Session session, CatalogSchemaName schema, Map<String, Object> properties, TrinoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void dropSchema(Session session, CatalogSchemaName schema)
{
throw new UnsupportedOperationException();
}
@Override
public void renameSchema(Session session, CatalogSchemaName source, String target)
{
throw new UnsupportedOperationException();
}
@Override
public void setSchemaAuthorization(Session session, CatalogSchemaName source, TrinoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void createTable(Session session, String catalogName, ConnectorTableMetadata tableMetadata, boolean ignoreExisting)
{
throw new UnsupportedOperationException();
}
@Override
public void renameTable(Session session, TableHandle tableHandle, QualifiedObjectName newTableName)
{
throw new UnsupportedOperationException();
}
@Override
public void setTableProperties(Session session, TableHandle tableHandle, Map<String, Optional<Object>> properties)
{
throw new UnsupportedOperationException();
}
@Override
public void setTableComment(Session session, TableHandle tableHandle, Optional<String> comment)
{
throw new UnsupportedOperationException();
}
@Override
public void setColumnComment(Session session, TableHandle tableHandle, ColumnHandle column, Optional<String> comment)
{
throw new UnsupportedOperationException();
}
@Override
public void renameColumn(Session session, TableHandle tableHandle, ColumnHandle source, String target)
{
throw new UnsupportedOperationException();
}
@Override
public void addColumn(Session session, TableHandle tableHandle, ColumnMetadata column)
{
throw new UnsupportedOperationException();
}
@Override
public void dropColumn(Session session, TableHandle tableHandle, ColumnHandle column)
{
throw new UnsupportedOperationException();
}
@Override
public void setTableAuthorization(Session session, CatalogSchemaTableName table, TrinoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void dropTable(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public void truncateTable(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableLayout> getNewTableLayout(Session session, String catalogName, ConnectorTableMetadata tableMetadata)
{
throw new UnsupportedOperationException();
}
@Override
public OutputTableHandle beginCreateTable(Session session, String catalogName, ConnectorTableMetadata tableMetadata, Optional<TableLayout> layout)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<ConnectorOutputMetadata> finishCreateTable(Session session, OutputTableHandle tableHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableLayout> getInsertLayout(Session session, TableHandle target)
{
throw new UnsupportedOperationException();
}
@Override
public TableStatisticsMetadata getStatisticsCollectionMetadataForWrite(Session session, String catalogName, ConnectorTableMetadata tableMetadata)
{
throw new UnsupportedOperationException();
}
@Override
public TableStatisticsMetadata getStatisticsCollectionMetadata(Session session, String catalogName, ConnectorTableMetadata tableMetadata)
{
throw new UnsupportedOperationException();
}
@Override
public AnalyzeTableHandle beginStatisticsCollection(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public void finishStatisticsCollection(Session session, AnalyzeTableHandle tableHandle, Collection<ComputedStatistics> computedStatistics)
{
throw new UnsupportedOperationException();
}
@Override
public void cleanupQuery(Session session)
{
throw new UnsupportedOperationException();
}
@Override
public InsertTableHandle beginInsert(Session session, TableHandle tableHandle, List<ColumnHandle> columns)
{
throw new UnsupportedOperationException();
}
@Override
public boolean supportsMissingColumnsOnInsert(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<ConnectorOutputMetadata> finishInsert(Session session, InsertTableHandle tableHandle, Collection<Slice> fragments, Collection<ComputedStatistics> computedStatistics)
{
throw new UnsupportedOperationException();
}
@Override
public boolean delegateMaterializedViewRefreshToConnector(Session session, QualifiedObjectName viewName)
{
throw new UnsupportedOperationException();
}
@Override
public ListenableFuture<Void> refreshMaterializedView(Session session, QualifiedObjectName viewName)
{
throw new UnsupportedOperationException();
}
@Override
public InsertTableHandle beginRefreshMaterializedView(Session session, TableHandle tableHandle, List<TableHandle> sourceTableHandles)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<ConnectorOutputMetadata> finishRefreshMaterializedView(
Session session,
TableHandle tableHandle,
InsertTableHandle insertHandle,
Collection<Slice> fragments,
Collection<ComputedStatistics> computedStatistics,
List<TableHandle> sourceTableHandles)
{
throw new UnsupportedOperationException();
}
@Override
public ColumnHandle getDeleteRowIdColumnHandle(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public ColumnHandle getUpdateRowIdColumnHandle(Session session, TableHandle tableHandle, List<ColumnHandle> updatedColumns)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableHandle> applyDelete(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public OptionalLong executeDelete(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public TableHandle beginDelete(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public void finishDelete(Session session, TableHandle tableHandle, Collection<Slice> fragments)
{
throw new UnsupportedOperationException();
}
@Override
public TableHandle beginUpdate(Session session, TableHandle tableHandle, List<ColumnHandle> updatedColumns)
{
throw new UnsupportedOperationException();
}
@Override
public void finishUpdate(Session session, TableHandle tableHandle, Collection<Slice> fragments)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<CatalogName> getCatalogHandle(Session session, String catalogName)
{
throw new UnsupportedOperationException();
}
@Override
public Map<String, Catalog> getCatalogs(Session session)
{
throw new UnsupportedOperationException();
}
@Override
public List<QualifiedObjectName> listViews(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
@Override
public Map<QualifiedObjectName, ViewInfo> getViews(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<ViewDefinition> getView(Session session, QualifiedObjectName viewName)
{
throw new UnsupportedOperationException();
}
@Override
public Map<String, Object> getSchemaProperties(Session session, CatalogSchemaName schemaName)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TrinoPrincipal> getSchemaOwner(Session session, CatalogSchemaName schemaName)
{
throw new UnsupportedOperationException();
}
@Override
public void createView(Session session, QualifiedObjectName viewName, ViewDefinition definition, boolean replace)
{
throw new UnsupportedOperationException();
}
@Override
public void renameView(Session session, QualifiedObjectName source, QualifiedObjectName target)
{
throw new UnsupportedOperationException();
}
@Override
public void setViewAuthorization(Session session, CatalogSchemaTableName view, TrinoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void dropView(Session session, QualifiedObjectName viewName)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<ResolvedIndex> resolveIndex(Session session, TableHandle tableHandle, Set<ColumnHandle> indexableColumns, Set<ColumnHandle> outputColumns, TupleDomain<ColumnHandle> tupleDomain)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<LimitApplicationResult<TableHandle>> applyLimit(Session session, TableHandle table, long limit)
{
return Optional.empty();
}
@Override
public Optional<ConstraintApplicationResult<TableHandle>> applyFilter(Session session, TableHandle table, Constraint constraint)
{
return Optional.empty();
}
@Override
public Optional<SampleApplicationResult<TableHandle>> applySample(Session session, TableHandle table, SampleType sampleType, double sampleRatio)
{
return Optional.empty();
}
@Override
public Optional<AggregationApplicationResult<TableHandle>> applyAggregation(
Session session,
TableHandle table,
List<AggregateFunction> aggregations,
Map<String, ColumnHandle> assignments,
List<List<ColumnHandle>> groupingSets)
{
return Optional.empty();
}
@Override
public Optional<JoinApplicationResult<TableHandle>> applyJoin(
Session session,
JoinType joinType,
TableHandle left,
TableHandle right,
List<JoinCondition> joinConditions,
Map<String, ColumnHandle> leftAssignments,
Map<String, ColumnHandle> rightAssignments,
JoinStatistics statistics)
{
return Optional.empty();
}
//
// Roles and Grants
//
@Override
public boolean isCatalogManagedSecurity(Session session, String catalog)
{
throw new UnsupportedOperationException();
}
@Override
public boolean roleExists(Session session, String role, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public void createRole(Session session, String role, Optional<TrinoPrincipal> grantor, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public void dropRole(Session session, String role, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public Set<String> listRoles(Session session, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public void grantRoles(Session session, Set<String> roles, Set<TrinoPrincipal> grantees, boolean adminOption, Optional<TrinoPrincipal> grantor, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeRoles(Session session, Set<String> roles, Set<TrinoPrincipal> grantees, boolean adminOption, Optional<TrinoPrincipal> grantor, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public Set<RoleGrant> listApplicableRoles(Session session, TrinoPrincipal principal, Optional<String> catalog)
{
throw new UnsupportedOperationException();
}
@Override
public Set<String> listEnabledRoles(Identity identity)
{
throw new UnsupportedOperationException();
}
@Override
public Set<String> listEnabledRoles(Session session, String catalog)
{
throw new UnsupportedOperationException();
}
@Override
public Set<RoleGrant> listAllRoleGrants(Session session, Optional<String> catalog, Optional<Set<String>> roles, Optional<Set<String>> grantees, OptionalLong limit)
{
throw new UnsupportedOperationException();
}
@Override
public Set<RoleGrant> listRoleGrants(Session session, Optional<String> catalog, TrinoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void grantSchemaPrivileges(Session session, CatalogSchemaName schemaName, Set<Privilege> privileges, TrinoPrincipal grantee, boolean grantOption)
{
throw new UnsupportedOperationException();
}
@Override
public void denySchemaPrivileges(Session session, CatalogSchemaName schemaName, Set<Privilege> privileges, TrinoPrincipal grantee)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeSchemaPrivileges(Session session, CatalogSchemaName schemaName, Set<Privilege> privileges, TrinoPrincipal grantee, boolean grantOption)
{
throw new UnsupportedOperationException();
}
@Override
public void grantTablePrivileges(Session session, QualifiedObjectName tableName, Set<Privilege> privileges, TrinoPrincipal grantee, boolean grantOption)
{
throw new UnsupportedOperationException();
}
@Override
public void denyTablePrivileges(Session session, QualifiedObjectName tableName, Set<Privilege> privileges, TrinoPrincipal grantee)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeTablePrivileges(Session session, QualifiedObjectName tableName, Set<Privilege> privileges, TrinoPrincipal grantee, boolean grantOption)
{
throw new UnsupportedOperationException();
}
@Override
public List<GrantInfo> listTablePrivileges(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
//
// Functions
//
@Override
public void addFunctions(List<? extends SqlFunction> functions)
{
throw new UnsupportedOperationException();
}
@Override
public List<FunctionMetadata> listFunctions()
{
throw new UnsupportedOperationException();
}
@Override
public ResolvedFunction decodeFunction(QualifiedName name)
{
return functionDecoder.fromQualifiedName(name)
.orElseThrow(() -> new IllegalArgumentException("Function is not resolved: " + name));
}
@Override
public ResolvedFunction resolveFunction(Session session, QualifiedName name, List<TypeSignatureProvider> parameterTypes)
{
String nameSuffix = name.getSuffix();
if (nameSuffix.equals("rand") && parameterTypes.isEmpty()) {
BoundSignature boundSignature = new BoundSignature(nameSuffix, DOUBLE, ImmutableList.of());
return new ResolvedFunction(
boundSignature,
toFunctionId(boundSignature.toSignature()),
SCALAR,
true,
new FunctionNullability(false, ImmutableList.of()),
ImmutableMap.of(),
ImmutableSet.of());
}
throw new TrinoException(FUNCTION_NOT_FOUND, name + "(" + Joiner.on(", ").join(parameterTypes) + ")");
}
@Override
public ResolvedFunction resolveOperator(Session session, OperatorType operatorType, List<? extends Type> argumentTypes)
throws OperatorNotFoundException
{
throw new UnsupportedOperationException();
}
@Override
public ResolvedFunction getCoercion(Session session, OperatorType operatorType, Type fromType, Type toType)
{
throw new UnsupportedOperationException();
}
@Override
public ResolvedFunction getCoercion(Session session, QualifiedName name, Type fromType, Type toType)
{
throw new UnsupportedOperationException();
}
@Override
public boolean isAggregationFunction(QualifiedName name)
{
throw new UnsupportedOperationException();
}
@Override
public FunctionMetadata getFunctionMetadata(ResolvedFunction resolvedFunction)
{
BoundSignature signature = resolvedFunction.getSignature();
if (signature.getName().equals("rand") && signature.getArgumentTypes().isEmpty()) {
return new FunctionMetadata(signature.toSignature(), new FunctionNullability(false, ImmutableList.of()), false, false, "", SCALAR);
}
throw new TrinoException(FUNCTION_NOT_FOUND, signature.toString());
}
@Override
public AggregationFunctionMetadata getAggregationFunctionMetadata(ResolvedFunction resolvedFunction)
{
throw new UnsupportedOperationException();
}
@Override
public WindowFunctionSupplier getWindowFunctionImplementation(ResolvedFunction resolvedFunction)
{
throw new UnsupportedOperationException();
}
@Override
public AggregationMetadata getAggregateFunctionImplementation(ResolvedFunction resolvedFunction)
{
throw new UnsupportedOperationException();
}
@Override
public FunctionInvoker getScalarFunctionInvoker(ResolvedFunction resolvedFunction, InvocationConvention invocationConvention)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<ProjectionApplicationResult<TableHandle>> applyProjection(Session session, TableHandle table, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments)
{
return Optional.empty();
}
@Override
public Optional<TopNApplicationResult<TableHandle>> applyTopN(Session session, TableHandle handle, long topNFunctions, List<SortItem> sortItems, Map<String, ColumnHandle> assignments)
{
return Optional.empty();
}
@Override
public void createMaterializedView(Session session, QualifiedObjectName viewName, MaterializedViewDefinition definition, boolean replace, boolean ignoreExisting)
{
throw new UnsupportedOperationException();
}
@Override
public void dropMaterializedView(Session session, QualifiedObjectName viewName)
{
throw new UnsupportedOperationException();
}
@Override
public List<QualifiedObjectName> listMaterializedViews(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
@Override
public Map<QualifiedObjectName, ViewInfo> getMaterializedViews(Session session, QualifiedTablePrefix prefix)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<MaterializedViewDefinition> getMaterializedView(Session session, QualifiedObjectName viewName)
{
throw new UnsupportedOperationException();
}
@Override
public MaterializedViewFreshness getMaterializedViewFreshness(Session session, QualifiedObjectName name)
{
throw new UnsupportedOperationException();
}
@Override
public void renameMaterializedView(Session session, QualifiedObjectName existingViewName, QualifiedObjectName newViewName)
{
throw new UnsupportedOperationException();
}
@Override
public void setMaterializedViewProperties(Session session, QualifiedObjectName viewName, Map<String, Optional<Object>> properties)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableScanRedirectApplicationResult> applyTableScanRedirect(Session session, TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
@Override
public RedirectionAwareTableHandle getRedirectionAwareTableHandle(Session session, QualifiedObjectName tableName)
{
return noRedirection(getTableHandle(session, tableName));
}
@Override
public RedirectionAwareTableHandle getRedirectionAwareTableHandle(Session session, QualifiedObjectName tableName, Optional<TableVersion> startVersion, Optional<TableVersion> endVersion)
{
throw new UnsupportedOperationException();
}
@Override
public boolean isValidTableVersion(Session session, QualifiedObjectName tableName, TableVersion version)
{
throw new UnsupportedOperationException();
}
@Override
public Optional<TableHandle> getTableHandle(Session session, QualifiedObjectName table, Optional<TableVersion> startVersion, Optional<TableVersion> endVersion)
{
throw new UnsupportedOperationException();
}
}
|
// https://open.kattis.com/problems/provincesandgold
#include<bits/stdc++.h>
using namespace std;
int main(){
ios::sync_with_stdio(0);
cin.tie(0);
int g,s,c,t;
cin>>g>>s>>c;
t=g*3+s*2+c;
if(t>=8)cout<<"Province or ";
else if(t>=5)cout<<"Duchy or ";
else if(t>=2)cout<<"Estate or ";
if(t>=6)cout<<"Gold\n";
else if(t>=3)cout<<"Silver\n";
else cout<<"Copper\n";
}
|
<filename>src/ts-sliding-window.ts
import * as moment from 'moment';
import BigNumber from 'bignumber.js';
export type TOptions = {
tsProp?: string | 'timestamp',
propsForId: string[], //Property Names used to create unique Id for sum
targetProp: string,
windowSize: number,
refleshRate: number,
}
class SlidingWindow {
queue: any[];
timer: NodeJS.Timeout;
_sums: { [key: string]: BigNumber };
opts: TOptions;
constructor(opts: TOptions) {
this.opts = opts;
this.opts.tsProp = opts.tsProp || 'timestamp';
this.queue = [];
this.timer;
this._sums = {};
if (this.opts.refleshRate > this.opts.windowSize) throw new Error(`'windowSize' should be larger than 'refleshRate'`);
}
public start() {
this.timer = setInterval(() => this._slide(), this.opts.refleshRate);
}
public stop() {
clearInterval(this.timer);
}
public push(item) {
let idx;
for (idx = this.queue.length - 1; idx >= 0; idx--) {
if (item[this.opts.tsProp].isSameOrAfter(this.queue[idx][this.opts.tsProp])) break;
}
this.queue.splice(idx + 1, 0, item);
const sumId = this._getSumId(item);
if (this._sums.hasOwnProperty(sumId)) {
this._sums[sumId] = this._sums[sumId].plus(new BigNumber(item[this.opts.targetProp]));
} else {
this._sums[sumId] = new BigNumber(0);
this._sums[sumId] = this._sums[sumId].plus(new BigNumber(item[this.opts.targetProp]));
}
}
public sum(): { [key: string]: BigNumber } {
this._slide();
return this._sums;
}
public show(prop: string) {
return this[prop];
}
test(item) {
const test1 = item.hasOwnProperty(this.opts.tsProp) && moment.isMoment(item[this.opts.tsProp]);
const test2 = BigNumber.isBigNumber(new BigNumber(item[this.opts.targetProp]));
const test3 = this.opts.propsForId.every((elem) => item.hasOwnProperty(elem));
console.log(`Does item has the property specified in tsProp and Is it moment.Moment type?`, test1);
console.log(`Does item has the target property which castable to BigNumber type?`, test2);
console.log(`Does item has all properties specified in propsForId? `, test3);
}
private _getSumId(item): string {
const propNames = this.opts.propsForId.map((elem) => item[elem]);
return propNames.join('_');
}
private _slide() {
const threshold = moment().subtract(this.opts.windowSize);
let lastIndex = this.queue.length - 1;
let idx;
for (idx = 0; idx <= lastIndex; idx++) {
if (this.queue[idx][this.opts.tsProp].isBefore(threshold)) {
const sumId = this._getSumId(this.queue[idx]);
this._sums[sumId] = this._sums[sumId].minus(this.queue[idx][this.opts.targetProp]);
} else if (this.queue[idx][this.opts.tsProp].isSameOrAfter(threshold)) {
break;
}
}
this.queue.splice(0, idx);
}
}
module.exports = SlidingWindow; |
#!/usr/bin/env bash
set -u
total_tests="$(cat test/test-* | grep -c "run_test")"
echo "1..$total_tests"
pushd test > /dev/null || exit 1
test_error=""
for test_case in test-* ; do
bash "$(basename "$test_case")" || test_error=1
done
popd > /dev/null || true
if [ -n "$test_error" ] ; then
echo "# there are failed tests!" >&2
exit 1
fi
|
import os
# Initialize an empty dictionary to store the registered platforms and their corresponding classes
registry = {}
class PlatformRegistry:
def __call__(self, clazz):
# Register the platform and its corresponding class in the registry dictionary
registry[self.platform] = clazz
def platform(self):
# Decide on which platform this runs
platform = os.uname()[0].lower()
if platform == "darwin":
platform = "osx"
# Check if any platforms are registered
if not registry:
raise SyntaxError("No platforms are registered.")
# Retrieve the corresponding class for the detected platform
return registry[platform]()
# Example usage:
# Create an instance of PlatformRegistry
platform_registry = PlatformRegistry()
# Register a platform and its corresponding class
@platform_registry
class OSXPlatform:
pass
# Retrieve the class for the current platform
current_platform = platform_registry.platform() |
alias fs='foreman start'
alias fdev='foreman start -f Procfile-dev'
|
#!/usr/bin/bash -l
#SBATCH -p intel -N 1 -n 4 --mem 32gb --out logs/make_gvcf_subsample.%a.log --time 72:00:00 -a 4
module load htslib
module load samtools
module load picard
module load gatk/4
module load bcftools
MEM=32g
SAMPFILE=samples.csv
if [ -f config.txt ]; then
source config.txt
fi
DICT=$(echo $REFGENOME | sed 's/fasta$/dict/')
if [ ! -f $DICT ]; then
picard CreateSequenceDictionary R=$GENOMEIDX O=$DICT
fi
mkdir -p $GVCFFOLDER
CPU=1
if [ $SLURM_CPUS_ON_NODE ]; then
CPU=$SLURM_CPUS_ON_NODE
fi
N=${SLURM_ARRAY_TASK_ID}
if [ ! $N ]; then
N=$1
fi
if [ ! $N ]; then
echo "need to provide a number by --array slurm or on the cmdline"
exit
fi
hostname
date
IFS=,
tail -n +2 $SAMPFILE | sed -n ${N}p | while read STRAIN SAMPID
do
# BEGIN THIS PART IS PROJECT SPECIFIC LIKELY
# END THIS PART IS PROJECT SPECIFIC LIKELY
echo "STRAIN is $STRAIN"
GVCF=$GVCFFOLDER/$STRAIN.subsample.g.vcf
ALNFILE=$ALNFOLDER/$STRAIN.subsample.$HTCEXT
if [ -s $GVCF.gz ]; then
echo "Skipping $STRAIN - Already called $STRAIN.g.vcf.gz"
exit
fi
if [[ ! -f $GVCF || $ALNFILE -nt $GVCF ]]; then
time gatk --java-options -Xmx${MEM} HaplotypeCaller \
--emit-ref-confidence GVCF --sample-ploidy 1 \
--input $ALNFILE --reference $REFGENOME \
--output $GVCF --native-pair-hmm-threads $CPU \
-G StandardAnnotation -G AS_StandardAnnotation -G StandardHCAnnotation
fi
bgzip --threads $CPU -f $GVCF
tabix $GVCF.gz
done
date
|
#!/bin/sh
# Workaround to make sure dependency service is up and running
sleep 60
branch=${NOZZLE_BRANCH:-master}
# Build nozzle
go get -d github.com/cloudfoundry-community/splunk-firehose-nozzle
cd /go/src/github.com/cloudfoundry-community/splunk-firehose-nozzle
git checkout ${branch}
make build
# Run proc_monitor
echo "Run proc monitor"
git clone https://github.com/chenziliang/proc_monitor && git checkout develop
cd proc_monitor
screen -S proc_monitor -m -d python proc_monitor.py
cd ..
duration=${NOZZLE_DURATION:-1200}
# Run perf test
echo "Run nozzle perf tests"
python ci/perf.py --run nozzle --duration ${duration}
|
#!/usr/bin/env bash
# download and install the latest Node Version Manager
echo -e "\e[1;34mInstalling NVM...\e[0m"
git clone https://github.com/creationix/nvm /home/vagrant/.nvm
# append NVM script to /home/vagrant/.profile
SOURCE_STR="\n[[ -s /home/vagrant/.nvm/nvm.sh ]] && . /home/vagrant/.nvm/nvm.sh # Add NVM to PATH for scripting"
echo -e $SOURCE_STR >> /home/vagrant/.profile
source /home/vagrant/.profile
# change owner of .nvm directory
chown -R vagrant. /home/vagrant/.nvm/
# get the latest version of Node from the home page
NODEJS_VERSION=`nvm ls-remote | awk "END{print}" $1`
# install Node
echo -e "\e[1;34mInstalling Node $NODEJS_VERSION...\e[0m"
nvm install $NODEJS_VERSION
# set a default Node version
nvm alias default $NODEJS_VERSION
nvm use default
# install global Node packages
echo -e "\e[1;34mInstalling Bower...\e[0m"
npm install -g bower
echo -e "\e[1;34mInstalling Grunt-CLI...\e[0m"
npm install -g grunt-cli
# reload user profile
source /home/vagrant/.profile
|
//
// YJNetworkManager.h
// WYNews
//
// Created by Annabelle on 16/6/15.
// Copyright © 2016年 annabelle. All rights reserved.
//
#import <AFNetworking.h>
/// 网络管理器 - 统一管理所有网络请求
@interface YJNetworkManager : AFHTTPSessionManager
// 建立全局访问点
+ (instancetype)sharedManager;
// 接口方法
- (void)newsListWithCategory:(NSString *)category startIndex:(NSInteger)startIdx completion:(void (^)(NSArray *, NSError *))completion;
@end
|
<reponame>IgnacioZentenoSmith/proyectoFacturacion
document.getElementById('inputPeriodo').addEventListener("change", getCurrentDate, false);
function getCurrentDate() {
//Saca el valor del formulario de la fecha
let formAction = document.getElementById('inputPeriodoForm').action;
//Elimina su fecha inicial
formAction = formAction.slice(0, -7);
//Agrega la fecha del input
formAction = formAction + this.value;
document.getElementById('inputPeriodoForm').action = formAction;
}
|
#!/usr/bin/env bash
set -e
"$(dirname "$0")/npm_install.sh" docker-langserver dockerfile-language-server-nodejs
|
#!/bin/bash
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
EXECUTABLES="toybox"
# Toybox wants to build in its current directory.
BUILD_DIR=${SRC_DIR}
NACLPORTS_CPPFLAGS+=" -DBYTE_ORDER=LITTLE_ENDIAN"
NACLPORTS_CPPFLAGS+=" -I${NACLPORTS_INCLUDE}/nacl-spawn"
NACLPORTS_LDFLAGS+=" -lnacl_spawn -lppapi_simple -lnacl_io -lppapi -lppapi_cpp"
export HOSTCC=cc
if [ "${NACL_LIBC}" = "newlib" ]; then
NACLPORTS_CPPFLAGS+=" -I${NACLPORTS_INCLUDE}/glibc-compat"
NACLPORTS_LDFLAGS+=" -lglibc-compat"
fi
ConfigureStep() {
LogExecute cp ${START_DIR}/toybox.config ${SRC_DIR}/.config
}
BuildStep() {
# We can't use NACL_CROSS_PREFIX without also redefining the CC and HOSTCC
# variables.
if [[ "${NACLCXX}" = *clang++ ]]; then
CC=clang++
else
CC=gcc
NACLPORTS_LDFLAGS+=" -l${NACL_CPP_LIB}"
fi
export CROSS_COMPILE="${NACL_CROSS_PREFIX}-"
export LDFLAGS="${NACLPORTS_LDFLAGS}"
export CFLAGS="${NACLPORTS_CPPFLAGS} ${NACLPORTS_CFLAGS}"
export CC
make clean
DefaultBuildStep
}
InstallStep() {
MakeDir ${PUBLISH_DIR}
local ASSEMBLY_DIR="${PUBLISH_DIR}/toybox"
MakeDir ${ASSEMBLY_DIR}
cp ${BUILD_DIR}/toybox ${ASSEMBLY_DIR}/toybox_${NACL_ARCH}${NACL_EXEEXT}
ChangeDir ${ASSEMBLY_DIR}
LogExecute python ${NACL_SDK_ROOT}/tools/create_nmf.py \
${ASSEMBLY_DIR}/toybox_*${NACL_EXEEXT} \
-s . \
-o toybox.nmf
LogExecute python ${TOOLS_DIR}/create_term.py toybox.nmf
InstallNaClTerm ${ASSEMBLY_DIR}
LogExecute cp ${START_DIR}/manifest.json ${ASSEMBLY_DIR}
LogExecute cp ${START_DIR}/icon_16.png ${ASSEMBLY_DIR}
LogExecute cp ${START_DIR}/icon_48.png ${ASSEMBLY_DIR}
LogExecute cp ${START_DIR}/icon_128.png ${ASSEMBLY_DIR}
ChangeDir ${PUBLISH_DIR}
LogExecute zip -r toybox-0.4.7.zip toybox
}
|
import traceback
import tornado.gen
try:
print(response.body) # Corrected variable name from 'he.response.body' to 'response.body'
except Exception as e:
traceback.print_exc() # Moved the traceback print inside the except block to handle exceptions
# Handle the exception or log the error as per the application's requirements
# Assuming the code is within a Tornado coroutine, use tornado.gen.Return to return the response and zoom_user
raise tornado.gen.Return((response, zoom_user)) |
#!/bin/bash
SOURCE_ARTI_CREDS="admin:password"
TARGET_ARTI_CREDS="admin:password"
SOURCE_ARTI_URL="http://<SOURCE>/artifactory"
TARGET_ARTI_URL="http://<TARGET>/artifactory"
# PUT will fail if the repository already exists. It will also fail for virtual creation if it references a virtual that has not been created yet. In that case, you need to run the script again.
declare -a ALL_REPOSITORIES
ALL_REPOSITORIES=($(curl -su $SOURCE_ARTI_CREDS "$SOURCE_ARTI_URL/api/repositories" | jq -r '.[]| .key' | sed 's/ /%20/g'))
for elem in "${ALL_REPOSITORIES[@]}"; do
echo $elem
JSON=$(curl -su $SOURCE_ARTI_CREDS "$SOURCE_ARTI_URL/api/repositories/$elem")
curl -XPUT -su $TARGET_ARTI_CREDS "$TARGET_ARTI_URL/api/repositories/$elem" -d ''"$JSON"'' -H "Content-Type: application/json"
done
|
### grep
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
alias grep="grep --color=always"
# alias rgrep="grep --line-number --color=always --recursive --exclude-dir='.svn'"
xgrep(){
grep --line-number --color=always --recursive --exclude-dir='.svn' --exclude='tags' "$*" *;
}
|
<filename>kernel-d-socket/socket-business-websocket/src/main/java/cn/stylefeng/roses/kernel/socket/business/websocket/server/WebSocketServer.java
package cn.stylefeng.roses.kernel.socket.business.websocket.server;
import cn.hutool.core.util.ObjectUtil;
import cn.stylefeng.roses.kernel.jwt.api.context.JwtContext;
import cn.stylefeng.roses.kernel.jwt.api.pojo.payload.DefaultJwtPayload;
import cn.stylefeng.roses.kernel.socket.api.enums.ClientMessageTypeEnum;
import cn.stylefeng.roses.kernel.socket.api.enums.ServerMessageTypeEnum;
import cn.stylefeng.roses.kernel.socket.api.enums.SystemMessageTypeEnum;
import cn.stylefeng.roses.kernel.socket.api.message.SocketMsgCallbackInterface;
import cn.stylefeng.roses.kernel.socket.api.session.pojo.SocketSession;
import cn.stylefeng.roses.kernel.socket.business.websocket.message.SocketMessageCenter;
import cn.stylefeng.roses.kernel.socket.business.websocket.operator.channel.GunsSocketOperator;
import cn.stylefeng.roses.kernel.socket.business.websocket.pojo.WebSocketMessageDTO;
import cn.stylefeng.roses.kernel.socket.business.websocket.session.SessionCenter;
import com.alibaba.fastjson.JSON;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import javax.websocket.*;
import javax.websocket.server.PathParam;
import javax.websocket.server.ServerEndpoint;
import java.io.IOException;
/**
* 消息监听处理器
*
* @author majianguo
* @date 2021/6/1 下午2:35
*/
@Slf4j
@ServerEndpoint(value = "/webSocket/{token}")
@Component
public class WebSocketServer {
/**
* 连接建立调用的方法
* <p>
* 暂时无用,需要在建立连接的时候做一些事情的话可以修改这里
*
* @param session 会话信息
* @author majianguo
* @date 2021/6/21 下午5:14
**/
@OnOpen
public void onOpen(Session session, @PathParam("token") String token) {
String userId = null;
try {
// 解析用户信息
DefaultJwtPayload defaultPayload = JwtContext.me().getDefaultPayload(token);
userId = defaultPayload.getUserId().toString();
} catch (io.jsonwebtoken.JwtException e) {
try {
session.close();
} catch (IOException ioException) {
ioException.printStackTrace();
}
}
// 操作api包装
GunsSocketOperator gunsSocketOperator = new GunsSocketOperator(session);
// 回复消息
WebSocketMessageDTO replyMsg = new WebSocketMessageDTO();
replyMsg.setServerMsgType(ServerMessageTypeEnum.SYS_REPLY_MSG_TYPE.getCode());
replyMsg.setToUserId(userId);
// 创建会话对象
SocketSession<GunsSocketOperator> socketSession = new SocketSession<>();
try {
// 设置回复内容
replyMsg.setData(session.getId());
socketSession.setSessionId(session.getId());
socketSession.setUserId(userId);
socketSession.setSocketOperatorApi(gunsSocketOperator);
socketSession.setToken(token);
socketSession.setConnectionTime(System.currentTimeMillis());
// 维护会话
SessionCenter.addSocketSession(socketSession);
} finally {
// 回复消息
gunsSocketOperator.writeAndFlush(replyMsg);
// 触发首次连接回调
SocketMsgCallbackInterface socketMsgCallbackInterface = SocketMessageCenter.getSocketMsgCallbackInterface(SystemMessageTypeEnum.SYS_LISTENER_ONOPEN.getCode());
if (ObjectUtil.isNotEmpty(socketMsgCallbackInterface)) {
// 触发回调
socketMsgCallbackInterface.callback(SystemMessageTypeEnum.SYS_LISTENER_ONOPEN.getCode(), null, socketSession);
}
}
}
/**
* 连接关闭调用的方法
*
* @param session 会话信息
* @author majianguo
* @date 2021/6/21 下午5:14
**/
@OnClose
public void onClose(Session session) {
try {
SocketSession<GunsSocketOperator> socketSession = SessionCenter.getSessionBySessionId(session.getId());
// 触发首次连接回调
SocketMsgCallbackInterface socketMsgCallbackInterface = SocketMessageCenter.getSocketMsgCallbackInterface(SystemMessageTypeEnum.SYS_LISTENER_ONCLOSE.getCode());
if (ObjectUtil.isNotEmpty(socketSession) && ObjectUtil.isNotEmpty(socketMsgCallbackInterface)) {
// 触发回调
socketMsgCallbackInterface.callback(SystemMessageTypeEnum.SYS_LISTENER_ONCLOSE.getCode(), null, socketSession);
}
} finally {
SessionCenter.deleteById(session.getId());
}
}
/**
* 收到消息调用的方法
*
* @param message 接收到的消息
* @param socketChannel 会话信息
* @author majianguo
* @date 2021/6/21 下午5:14
**/
@OnMessage
public void onMessage(String message, Session socketChannel) {
// 转换为Java对象
WebSocketMessageDTO WebSocketMessageDTO = JSON.parseObject(message, WebSocketMessageDTO.class);
// 维护通道是否已初始化
SocketSession<GunsSocketOperator> socketSession = SessionCenter.getSessionBySessionId(socketChannel.getId());
// 心跳包
if (ObjectUtil.isNotEmpty(socketSession) && ClientMessageTypeEnum.USER_HEART.getCode().equals(WebSocketMessageDTO.getClientMsgType())) {
// 更新会话最后活跃时间
if (ObjectUtil.isNotEmpty(socketSession)) {
socketSession.setLastActiveTime(System.currentTimeMillis());
}
return;
}
// 用户ID为空不处理直接跳过
if (ObjectUtil.isEmpty(WebSocketMessageDTO.getFormUserId())) {
return;
}
// 会话建立成功执行业务逻辑
if (ObjectUtil.isNotEmpty(socketSession)) {
// 更新最后会话时间
socketSession.setLastActiveTime(System.currentTimeMillis());
// 找到该消息的处理器
SocketMsgCallbackInterface socketMsgCallbackInterface = SocketMessageCenter.getSocketMsgCallbackInterface(WebSocketMessageDTO.getClientMsgType());
if (ObjectUtil.isNotEmpty(socketMsgCallbackInterface)) {
// 触发回调
socketMsgCallbackInterface.callback(WebSocketMessageDTO.getClientMsgType(), WebSocketMessageDTO, socketSession);
} else {
socketChannel.getAsyncRemote().sendText("{\"serverMsgType\":\"404\"}");
}
}
}
/**
* 会话发送异常调用的方法
*
* @param session 会话信息
* @param error 错误信息
* @author majianguo
* @date 2021/6/21 下午5:14
**/
@OnError
public void onError(Session session, Throwable error) {
SocketSession<GunsSocketOperator> socketSession = SessionCenter.getSessionBySessionId(session.getId());
// 触发首次连接回调
SocketMsgCallbackInterface socketMsgCallbackInterface = SocketMessageCenter.getSocketMsgCallbackInterface(SystemMessageTypeEnum.SYS_LISTENER_ONERROR.getCode());
if (ObjectUtil.isNotEmpty(socketMsgCallbackInterface)) {
// 触发回调
socketMsgCallbackInterface.callback(SystemMessageTypeEnum.SYS_LISTENER_ONERROR.getCode(), error, socketSession);
}
}
}
|
#pragma once
extern vector<string*> errors;
void yyerror(const char*);
void sem_error(int,const char*);
|
<gh_stars>10-100
//
// Created by ooooo on 2020/1/14.
//
#ifndef CPP_0748__SOLUTION1_H_
#define CPP_0748__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <unordered_map>
using namespace std;
class Solution {
public:
bool check(string word, unordered_map<char, int> &m) {
unordered_map<char, int> wordMap;
for (const auto &c: word) wordMap[c]++;
for (const auto &entry :m) {
if (entry.second > wordMap[entry.first]) return false;
}
return true;
}
string shortestCompletingWord(string licensePlate, vector<string> &words) {
unordered_map<char, int> m;
for (const auto &item: licensePlate) {
char c = tolower(item);
if (c >= 'a' && c <= 'z') m[c]++;
}
int index = INT_MAX;
string ans = "";
for (int i = 0; i < words.size(); ++i) {
if (check(words[i], m)) {
if (ans.empty() || words[i].size() < ans.size()
|| (words[i].size() == ans.size() && i < index)) {
ans = words[i];
index = i;
}
}
}
return ans;
}
};
#endif //CPP_0748__SOLUTION1_H_
|
<filename>offer/src/main/java/com/java/study/algorithm/zuo/fsenior/class01/Code02_LeastOperatorsToExpressNumber.java
package com.java.study.algorithm.zuo.fsenior.class01;
public class Code02_LeastOperatorsToExpressNumber{
} |
<reponame>coder-blog/satellite
package com.satellite.agent;
import com.alibaba.dubbo.container.Main;
import java.io.IOException;
/**
* Unit test for simple App.
*/
public class AgentDubboStartup {
public static void main(String[] args) throws IOException {
// ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("META-INF/spring/dubbo-provider.xml");
// context.start();
// System.in.read();
Main.main(args);
}
}
|
<filename>app/src/main/java/com/cjy/flb/bean/BindingID.java<gh_stars>0
package com.cjy.flb.bean;
/**
* Created by Administrator on 2015/12/7 0007.
*/
public class BindingID {
private String url;
private String name;
private String MedicId;
public BindingID() {
}
public BindingID(String url, String name, String medicId) {
this.url = url;
this.name = name;
MedicId = medicId;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getMedicId() {
return MedicId;
}
public void setMedicId(String medicId) {
MedicId = medicId;
}
}
|
import sys
# get user input
user_input = input("Please enter something: ")
# print the user input
print(user_input) |
/*
* C compiler file arm/coff.c
* Copyright (C) Codemist Ltd, 1988
* Copyright (C) Advanced Risc Machines Limited, 1993
* 'COFF' (system V unix) output routines
* SPDX-Licence-Identifier: Apache-2.0
*/
/*
* RCS $Revision$ Codemist 13j
* Checkin $Date$
* Revising $Author$
*/
/* AM: Feb 90: add AUX items for sections. Fix bug in common refs */
/* (find #if (R_DIR32 == 17) below for detailed explanation). */
/* Memo: stuff CC_BANNER in the object file in the .comment section. */
/* Put the COFF style debug info in the file too. */
/* This will cause review of the ordering of symbols */
/* target.h shall specify: TARGET_HAS_COFF, target_coff_magic = <number>, */
/* and (optionally) target_coff_prefix. */
/* maybe (one day) also the target_coff_<relocations> below too. */
#ifndef __STDC__
# include <strings.h>
# define SEEK_SET 0
#else
# include <string.h>
#endif
#include <stddef.h>
#include <time.h> /* see time() below */
#include "globals.h" /* loads host.h,options.h,target.h,defaults.h */
#include "mcdep.h"
#include "mcdpriv.h"
#include "aeops.h"
#include "store.h"
#include "codebuf.h"
#include "regalloc.h"
#include "util.h"
#include "bind.h" /* evaluate */
#include "sem.h" /* alignoftype, sizeoftype, structfield */
#include "builtin.h"
#include "xrefs.h"
#include "errors.h"
#include "simplify.h"
#include "coff.h" /* Codemist private version */
#include "fname.h"
/* The following values are suitable for the 88000 ocs, but act as */
/* suitable defaults. Values are best specified in 'target.h'. */
#ifndef R_DIR32
# define R_DIR32 133 /* absolute relocation. */
#endif
#ifndef R_PCRLONG
# define R_PCRLONG 129 /* pc relative relocation. */
#endif
#ifndef R_REFHI
# define R_REFHI 131 /* R_HVRT16 */
#endif
#ifndef R_REFLO
# define R_REFLO 132 /* R_LVRT16 */
#endif
#ifndef target_coff_prefix
# define target_coff_prefix "_"
#endif
/* We now follow 'as' and always generate a BSS section which is */
/* usually empty. There MAY be common ext. ref. problems otherwise. */
#ifdef TARGET_IS_STARDENT
#define NSCNS 2
#else
#define NSCNS 3
#endif
#define N_TEXT 1
#define N_DATA 2
#define N_BSS 3
#define HDRSIZE (sizeof(struct filehdr) + NSCNS*sizeof(struct scnhdr))
#define xr_section xr_objflg
#define xr_deleted xr_objflg1
/* The following #defines give the logical section origins. */
#define ORG_TEXT 0
#define ORG_DATA 0
#define ORG_BSS 0
#define SYM_FULLY_RELOCATED ((Symstr *)0) /* internal PCreloc ref */
static bool byte_reversing = 0;
/* byte_reversing == (host_lsbytefirst != target_lsbytefirst). */
/* (But faster to test one static per word than 2 externs per word). */
/* Private interfaces to debug table generator */
static int32 dbg_fixup(ExtRef *syms, int32 symcount);
static void dbg_outsymtab(void);
static int32 dbg_lineinfo(void);
static int32 obj_fwrite_cnt;
#define ROR(x, n) (((x) << (32-(n))) | ((x) >> (n)))
static unsigned32 TargetWord(unsigned32 w) {
if (byte_reversing) {
unsigned32 t = w ^ ROR(w, 16);
t &= ~0xff0000;
w = ROR(w, 8);
return w ^ (t >> 8);
} else
return w;
}
static unsigned TargetHalf(unsigned32 w) {
if (byte_reversing) {
return (unsigned)(((w >> 8) & 0xff) | ((w & 0xff) << 8));
} else
return (unsigned)w;
}
static void obj_fwrite(void *buff, int32 n, int32 m, FILE *f) {
if (debugging(DEBUG_OBJ)) {
int32 i;
fprintf(f, "%.6lx:", (long)obj_fwrite_cnt);
obj_fwrite_cnt += n*m;
for (i=0; i<n*m; i++)
fprintf(f, " %.2x", (int)((unsigned8 *)buff)[i]);
fprintf(f, "\n");
} else if (n == 4 && byte_reversing) {
/* word by word output */
unsigned32 v, t, *p, w;
for (p = (unsigned32 *)buff; m > 0; --m, ++p) {
v = *p;
t = v ^ ROR(v, 16);
t &= ~0xff0000;
v = ROR(v, 8);
w = v ^ (t >> 8);
fwrite(&w, 4, 1, f);
}
} else
fwrite(buff, (size_t)n, (size_t)m, f);
}
FILE *objstream;
/* imports: codebase, dataloc */
static unsigned32 ncoderelocs, ndatarelocs, obj_symcount;
static unsigned32 constdatabase;
ExtRef *obj_symlist;
CodeXref *codexrefs;
static int32 obj_lnno;
/* In general, COFF requires references to symbols which are defined */
/* in codeseg, dataseg or bssseg to be replaced by a reference to */
/* segment+offset. Hence the code for a C routine with an 'extern' */
/* reference is not complete until we know whether it was truly extern */
/* or merely 'forward'. So buffer the code (in codevec) from each fn */
/* into 'xcodevec' and then call relocate_code_refs_to_locals() before */
/* writing xcodevec into the COFF .o file. */
#define MAXCODESEGS 256
static int32 (*(xcodevec[MAXCODESEGS]))[CODEVECSEGSIZE], codesize;
#ifdef TARGET_HAS_BYTE_INSTRUCTIONS
#define xcode_byte_(q) ((unsigned8 *)(*xcodevec[(q)>>(CODEVECSEGBITS+2)])) \
[(q)&(CODEVECSEGSIZE*4-1)]
#else
#define xcode_inst_(q) (*xcodevec[(q)>>(CODEVECSEGBITS+2)]) \
[((q)>>2)&(CODEVECSEGSIZE-1)]
#endif
#define get_code(q) xcode_inst_(q)
#define set_code(q,n) xcode_inst_(q)=n
/* The following code is in flux and allows for the generic compiler */
/* interfaces to use byte addresses which are corrected here. It now */
/* occurs to AM that we could have used word addresses more, but this */
/* could also be a can of worms. Let's try this way FIRST. */
#ifdef TARGET_IS_ADENART
# define machine_address_(v,f) ((v)/((f) & xr_code ? 3:8))
#else
# define machine_address_(v,f) (v)
#endif
static void buffer_code(int32 *src, int32 nwords) {
int32 *p;
for (p = src; nwords > 0; --nwords) {
int32 hi = codesize >> (CODEVECSEGBITS+2);
int32 lo = (codesize >> 2) & (CODEVECSEGSIZE-1);
if (lo == 0) { /* need another segment */
if (hi >= MAXCODESEGS) cc_fatalerr(coff_fatalerr_toobig);
xcodevec[hi] = (int32(*)[CODEVECSEGSIZE]) GlobAlloc(
SU_Other, sizeof(*xcodevec[0]));
}
(*xcodevec[hi])[lo] = *p++; codesize += 4;
}
}
static int32 obj_checksym(Symstr *s);
/* @@@ The code here is getting silly. X_JmpAddr is not really */
/* a PCreloc. */
static void relocate_code_refs_to_locals(void) {
/* This proc. should soon callback to a routine in gen.c: */
CodeXref *cxr;
for (cxr = codexrefs; cxr != NULL; cxr = cxr->codexrcdr)
{ Symstr *s = cxr->codexrsym;
ExtRef *x = ((void)obj_checksym(s), symext_(s));
int32 codeoff = cxr->codexroff & 0xffffff;
int32 w = get_code(codeoff);
switch (cxr->codexroff & 0xff000000) {
case X_PCreloc: /* on the MIPS also means X_JmpAddr -- rationalise */
/* pcrelative code ref (presumed to) to code. */
/* @@@ cast of array to code pointer causes the following */
/* syserr(). Needs fixing properly. */
if (!(x->extflags & xr_code))
syserr(syserr_coff_reloc);
if (x->extflags & (xr_defloc | xr_defext)) {
/* defined in this compilation unit so relocate... */
cxr->codexrsym = SYM_FULLY_RELOCATED;
/* @@@ AM: before 'rationalising' this code, it is IMPORTANT to build */
/* in the 29000 or 88000 32 bits in 64 bits relocation modes. */
/* AM: note that in all the following cases any offset in the code is */
/* ignored and simply overwritten. Change this one day? */
#ifdef TARGET_IS_ARM
/* On the ARM relocate a B or BL; offset in WORDs; prefetch 8 bytes. */
#define obj_codeupdate(n) \
set_code(codeoff, (w & 0xff000000) | (((n)-8) >> 2) & 0x00ffffff)
#endif
obj_codeupdate(x->extoffset-codeoff);
} else {
/* Branch to external symbol. Most Unices expect to be */
/* tight branch to self. (On the ARM, the Unix linker */
/* expects unrelocated branch to be -2 words, */
obj_codeupdate(0);
}
if (debugging(DEBUG_OBJ))
cc_msg("Fixup %.8lx extoff=%.8lx, codeoff=%.8lx, make %.8lx\n",
(long)w, (long)x->extoffset, (long)codeoff,
(long)get_code(codeoff));
break;
case X_absreloc:
case X_backaddrlit: /* abs ref to external */
/* Code here may need changing if you set ORG_TEXT &c to non-zero. */
if (x->extflags & (xr_defloc | xr_defext)) {
/* code ref to local code or data via address literal...
* (or clipper/vax absolute 32 bit branch).
* Unix linker cannot cope with filling in the address literal
* with the value of a locally defined symbol: we have to convert
* it into a reference relative to v$codeseg or v$dataseg.
* This process is completed by obj_checksym().
* AM: note that we do take notice of the old value here.
*/
if (x->extflags & xr_constdata) w += constdatabase;
set_code(codeoff, w + x->extoffset);
}
break;
}
}
}
static void obj_writecode(void) {
int i = 0;
#if (alignof_double > 4) /* TARGET_ALIGNS_DOUBLES */
if (codesize & 7) {
static int32 pad[] = {0,0,0};
buffer_code(pad, 1); /* Double word aligned */
}
#endif
relocate_code_refs_to_locals();
while ((codesize>>2) - CODEVECSEGSIZE*i > CODEVECSEGSIZE)
obj_fwrite(xcodevec[i++], 4, CODEVECSEGSIZE, objstream);
obj_fwrite(xcodevec[i], 4,(codesize>>2)-CODEVECSEGSIZE*i, objstream);
if (ferror(objstream)) cc_fatalerr(driver_fatalerr_io_object);
}
void obj_codewrite(Symstr *name) {
/* Called after each routine is compiled -- code_instvec_ (doubly */
/* indexed) has codep (multiple of 4) bytes of code. */
/* In BSD a.out, this has to be buffered to the end of compilation */
/* so that the BSD linker can be cow-towed to. */
/* #define COMPILING_ON_SMALL_MEMORY can be used to buffer on disc. */
int32 i, nwords;
IGNORE(name);
if (byte_reversing) {
int32 n = codep, w;
for (i = 0; i < n; i += 4) {
w = totargetsex(code_inst_(i), code_flag_(i));
buffer_code(&w, 1);
}
} else
for (i = 0, nwords = codep>>2; nwords > 0; ++i) {
int32 seg = nwords > CODEVECSEGSIZE ? CODEVECSEGSIZE : nwords;
buffer_code(code_instvec_(i), seg); nwords -= seg;
}
}
/* the remaining fns are intended to be internal only */
/* In COFF, the data segment label is required to be .data etc. */
#define coffname_(s) ((s) == bindsym_(codesegment) ? ".text" : \
(s) == bindsym_(datasegment) ? ".data" : \
(s) == bindsym_(bsssegment) ? ".bss" : \
symname_(s))
/* TEMPHACK: */
typedef union AuxEnt {
struct syment pad;
struct { long int x_scnlen;
unsigned short x_nreloc;
unsigned short x_nlinno;
} x_scn;
} AuxEnt;
typedef struct StringTabEntry StringTabEntry;
struct StringTabEntry {
StringTabEntry *cdr;
char name[1];
};
static StringTabEntry *stringtab;
static unsigned32 stringtabpos;
static void obj_stab(char *name, int32 val, int sect, int type, int sclass, void *auxp) {
size_t n = strlen(name),
k = sclass != C_EXT || name[0]=='.' ? 0 : sizeof(target_coff_prefix)-1;
struct syment v;
memclr(&v, SYMESZ);
if (debugging(DEBUG_OBJ)) cc_msg("sym %s ", name);
if (n+k > SYMNMLEN) { /* NB: '>' (not '>=') is OK here */
/* do the long form, name in the string table */
StringTabEntry *p = (StringTabEntry *)SynAlloc((int32)sizeof(StringTabEntry)+n+k);
if (k > 0) memcpy(p->name, target_coff_prefix, k);
memcpy(&p->name[k], name, n+1);
v.n_zeroes = 0;
v.n_offset = TargetWord(stringtabpos);
stringtabpos += (size_t)(n+k+1);
cdr_(p) = stringtab;
stringtab = p;
} else {
if (k > 0) strncpy(v.n_name, target_coff_prefix, k);
strncpy(v.n_name+k, name, SYMNMLEN-k);
}
v.n_value = TargetWord(val);
v.n_scnum = TargetHalf(sect);
v.n_type = TargetHalf(type); /* not set yet */
v.n_sclass = sclass;
v.n_numaux = auxp == NULL ? 0 : 1;
obj_fwrite(&v, 1, SYMESZ, objstream);
if (auxp != NULL) obj_fwrite(auxp, 1, SYMESZ, objstream);
}
/* TEMPHACK: */
static void SetSegAux(AuxEnt *p, int32 seglen, int32 nrelocs, int32 nlno) {
memclr(p, sizeof(*p));
p->x_scn.x_scnlen = TargetWord(seglen);
p->x_scn.x_nreloc = TargetHalf(nrelocs);
p->x_scn.x_nlinno = TargetHalf(nlno);
}
static void obj_outsymtab(int32 mask, int32 maskedval) {
ExtRef *x;
for (x = obj_symlist; x != 0; x = x->extcdr) {
Symstr *s = x->extsym;
int32 flags = x->extflags;
if ((flags & mask) == maskedval) {
char *name = coffname_(s);
/* The next line stores the value (= offset in segment) for code or */
/* data definitions, and stores zero for external refs, except for */
/* pcc-style common refs, in which it stores the length. */
int32 val = x->extoffset;
/* note that C_EXTDEF is documented as "internal to C, use C_EXT". */
int sclass = flags & xr_defloc ? C_STAT : C_EXT;
AuxEnt *auxp = NULL;
AuxEnt aux;
int sect;
if (!(flags & xr_defloc+xr_defext))
sect = N_UNDEF;
else if (flags & xr_code)
sect = N_TEXT, val += ORG_DATA;
else if (flags & xr_constdata)
sect = N_TEXT, val += ORG_DATA+constdatabase;
else if (flags & xr_bss)
sect = N_BSS, val += ORG_BSS;
else
sect = N_DATA, val += ORG_DATA;
/* TEMPHACK: */ if (s == bindsym_(codesegment)) SetSegAux(auxp = &aux, codesize, ncoderelocs, obj_lnno);
/* TEMPHACK: */ if (s == bindsym_(datasegment)) SetSegAux(auxp = &aux, data.size, ndatarelocs, 0);
/* TEMPHACK: */ if (s == bindsym_(bsssegment)) SetSegAux(auxp = &aux, bss_size, 0, 0);
obj_stab(name, val, sect, T_NULL, sclass, auxp);
}
}
}
static void obj_outstringtab(void) { /* write the string table, preceded by its length */
obj_fwrite(&stringtabpos, sizeof(stringtabpos), 1, objstream);
{ StringTabEntry *p;
stringtab = (StringTabEntry *)dreverse((List *)stringtab);
for (p = stringtab; p != 0; p = cdr_(p))
obj_fwrite(p->name, 1, (int32)strlen(p->name)+1, objstream);
}
}
static int32 obj_checksym(Symstr *s) {
ExtRef *x = symext_(s);
if (x != 0) {
if (!(x->extflags & xr_defloc+xr_defext) ||
s==bindsym_(codesegment) || s==bindsym_(datasegment) || s==bindsym_(bsssegment))
/* honest external or segment defining symbol */
return x->extindex;
else
return obj_checksym(x->extflags & (xr_code+xr_constdata) ? bindsym_(codesegment) :
x->extflags & xr_bss ? bindsym_(bsssegment) :
bindsym_(datasegment));
}
syserr(syserr_coff_checksym, s);
return 0;
}
static void obj_wr_reloc(struct reloc *p, int r) {
p->r_type = TargetHalf(r);
/* NB note that sizeof(struct reloc) > RELSZ on many machines */
obj_fwrite(p, 1, RELSZ, objstream);
ncoderelocs++;
}
static void obj_coderelocation() {
CodeXref *x;
struct reloc v;
for (x = codexrefs; x!=NULL; x = x->codexrcdr) {
Symstr *s = x->codexrsym;
if (s == SYM_FULLY_RELOCATED) continue;
v.r_vaddr = TargetWord((x->codexroff & 0xffffff) + ORG_TEXT);
v.r_symndx = TargetWord(obj_checksym(s));
switch (x->codexroff & 0xff000000) {
case X_PCreloc: /* PC rel ref to external */
if (debugging(DEBUG_OBJ)) cc_msg("pcreloc$r ", s);
obj_wr_reloc(&v, R_PCRLONG); /* target_coff_pcrel? */
break;
case X_absreloc: /* abs ref to external */
case X_backaddrlit: /* ditto, but literal */
if (debugging(DEBUG_OBJ)) cc_msg("addreloc$r ", s);
obj_wr_reloc(&v, R_DIR32); /* target_coff_abs? */
break;
default:
syserr(syserr_coff_reloc1, (long)x->codexroff);
break;
}
}
}
static int32 obj_datarelocation(DataXref *x, int32 offset) {
struct reloc v;
int32 n = 0;
for (; x != NULL; x = x->dataxrcdr, n++) {
Symstr *s = x->dataxrsym;
/* all data relocs are implicitly X_absreloc so far */
if (debugging(DEBUG_OBJ)) cc_msg("data reloc $r ", s);
v.r_type = TargetHalf(R_DIR32); /* target_coff_abs? */
v.r_vaddr = TargetWord(x->dataxroff+offset+ORG_DATA); /* & 0xffffff ? */
v.r_symndx = TargetWord(obj_checksym(s));
/* NB note that sizeof(struct reloc) > RELSZ on many machines */
obj_fwrite(&v, 1, RELSZ, objstream);
}
return n;
}
static void obj_writedata(DataInit *p) { /* follows gendc exactly! */
for (; p != 0; p = p->datacdr)
{ int32 rpt = p->rpt, sort = p->sort, len = p->len;
unsigned32 val = p->val;
switch (sort) {
case LIT_LABEL: /* name only present for c.xxxasm */
break;
default: syserr(syserr_coff_gendata, (long)sort);
case LIT_BXXX: /* The following are the same as LIT_NUMBER */
case LIT_BBX: /* except for cross-sex compilation */
case LIT_BBBX:
case LIT_BBBB:
case LIT_HX:
case LIT_HH:
case LIT_BBH:
case LIT_HBX:
case LIT_HBB:
if (byte_reversing) {
unsigned32 t;
val = totargetsex(val, (int)sort);
t = val ^ ROR(val, 16);
t &= ~0xff0000;
val = ROR(val, 8);
val = val ^ (t >> 8);
}
while (rpt-- != 0) obj_fwrite(&val, 1, len, objstream);
break;
case LIT_NUMBER:
if (len != 4) syserr(syserr_coff_datalen, (long)len);
/* beware: sex dependent... */
while (rpt-- != 0) obj_fwrite(&val, 4, 1, objstream);
break;
case LIT_ADCON: /* (possibly external) name + offset */
{ Symstr *sv = (Symstr *)len; /* this reloc also in dataxrefs */
ExtRef *xr= symext_(sv);
(void)obj_checksym(sv);
if (xr->extflags & (xr_defloc|xr_defext)) {
val += xr->extoffset;
if (xr->extflags & xr_constdata) val += constdatabase;
}
/* beware: sex dependent... */
while (rpt-- != 0) obj_fwrite(&val, 4, 1, objstream);
break;
}
case LIT_FPNUM:
{ FloatCon *fc = (FloatCon *)val;
/* do we need 'len' when the length is in fc->floatlen?? */
if (len != 4 && len != 8)
syserr(syserr_coff_data, (long)rpt, (long)len, fc->floatstr);
/* The following strange code ensures that doubles are correctly */
/* sex-reversed if required to be - obj_fwrite() only reverses */
/* items of length 4... This is a trap for the unwary. */
while (rpt-- != 0) {
obj_fwrite(&(fc->floatbin.irep[0]), 4, 1, objstream);
if (len == 4) continue;
obj_fwrite(&(fc->floatbin.irep[1]), 4, 1, objstream);
}
break;
}
}
}
}
/* exported functions... */
int32 obj_symref(Symstr *s, int flags, int32 loc) {
ExtRef *x;
if ((x = symext_(s)) == 0) { /* saves a quadratic loop */
if (obj_symcount > 0x7fffffff)
cc_fatalerr(coff_fatalerr_toomany);
x = (ExtRef *)GlobAlloc(SU_Xref, sizeof(ExtRef));
x->extcdr = obj_symlist,
x->extsym = s,
x->extindex = obj_symcount++,
x->extflags = 0,
x->extoffset = 0;
obj_symlist = symext_(s) = x;
/* TEMPHACK: */
if (s == bindsym_(codesegment) || s == bindsym_(datasegment) || s == bindsym_(bsssegment)) {
x->extflags |= xr_section;
obj_symcount++;
}
}
/* The next two lines cope with further ramifications of the abolition of */
/* xr_refcode/refdata in favour of xr_code/data without xr_defloc/defext */
/* qualification. This reduces the number of bits, but needs more */
/* checking in that a symbol defined as data, and then called via */
/* casting to a code pointer may acquire defloc+data and then get */
/* xr_code or'ed in. Suffice it to say this causes confusion. */
/* AM wonders if gen.c ought to be more careful instead. */
if (flags & (xr_defloc+xr_defext)) x->extflags &= ~(xr_code+xr_data);
if (x->extflags & (xr_defloc+xr_defext)) flags &= ~(xr_code+xr_data);
/* end of fix, but perhaps we should be more careful about mult. defs.? */
x->extflags |= flags;
if (flags & xr_defloc+xr_defext) {
/* private or public data or code */
x->extoffset = machine_address_(loc,flags);
} else if ((loc > 0) && !(flags & xr_code) &&
!(x->extflags & xr_defloc+xr_defext)) {
/* common data, not already defined */
/* -- put length in x->extoffset */
if (loc > x->extoffset) x->extoffset = machine_address_(loc,flags);
}
/* The next line returns the offset of a function in the codesegment */
/* if it has been previously defined -- this saves store on the arm */
/* and allows short branches on other machines. Otherwise it */
/* returns -1 for undefined objects or data objects. */
return ((x->extflags & (xr_defloc+xr_defext)) && (x->extflags & xr_code) ?
x->extoffset : -1);
}
/* For fortran... */
void obj_common_start(Symstr *name) {
/* There is no real support in COFF for common definitions (BLOCK */
/* DATA). What needs to be done is to turn the block name into */
/* an exported symbol in the normal data area (.data). */
labeldata(name);
obj_symref(name, xr_data+xr_defext, data.size);
}
void obj_common_end(void) {}
void obj_init() {
ncoderelocs = 0, ndatarelocs = 0, obj_symcount = 0;
obj_symlist = 0;
data.xrefs = 0;
codexrefs = 0;
codesize = 0; /* remove */
stringtabpos = sizeof(stringtabpos);
stringtab = NULL;
obj_lnno = 0;
byte_reversing = (target_lsbytefirst != host_lsbytefirst);
/* codesegment and datasegment are arranged elsewhere */
obj_symref(bindsym_(bsssegment), xr_bss+xr_defloc, 0L);
}
void obj_header() {
struct filehdr h;
struct scnhdr s;
if ((ncoderelocs | ndatarelocs) & ~(unsigned32)(unsigned short)-1)
cc_fatalerr(coff_fatalerr_toomany);
obj_fwrite_cnt = 0;
h.f_magic = TargetHalf(target_coff_magic);
h.f_nscns = TargetHalf(NSCNS);
h.f_timdat = TargetWord(time(NULL)); /* hope unix format -- norcroft use too. */
h.f_symptr = TargetWord(HDRSIZE + codesize + data.size +
/* @@@ round to multiple of 4? (RELSZ = 10) */
ncoderelocs*RELSZ + ndatarelocs*RELSZ + obj_lnno*6);
h.f_nsyms = TargetWord(obj_symcount);
h.f_opthdr = 0; /* no optional header */
h.f_flags = 0; /* default F_xxxx flags */
obj_fwrite(&h, 1, sizeof(h), objstream);
/* code section header */
strncpy(s.s_name, ".text", sizeof(s.s_name));
s.s_paddr = s.s_vaddr = TargetWord(ORG_TEXT);
s.s_size = TargetWord(codesize);
s.s_scnptr = TargetWord(HDRSIZE);
s.s_relptr = TargetWord(HDRSIZE + codesize + data.size);
s.s_lnnoptr = TargetWord(obj_lnno == 0 ? 0 : HDRSIZE + codesize + data.size +
ncoderelocs*RELSZ + ndatarelocs*RELSZ);
s.s_nreloc = TargetHalf(ncoderelocs);
s.s_nlnno = TargetHalf(obj_lnno);
s.s_flags = TargetWord(STYP_TEXT);
obj_fwrite(&s, 1, sizeof(s), objstream);
/* data section header */
strncpy(s.s_name, ".data", sizeof(s.s_name));
s.s_paddr = s.s_vaddr = TargetWord(ORG_DATA);
s.s_size = TargetWord(data.size);
s.s_scnptr = TargetWord(HDRSIZE + codesize);
s.s_relptr = TargetWord(HDRSIZE + codesize + data.size + ncoderelocs*RELSZ);
s.s_lnnoptr = 0; /* no line number info */
s.s_nreloc = TargetHalf(ndatarelocs);
s.s_nlnno = 0; /* no line number info */
s.s_flags = TargetWord(STYP_DATA);
obj_fwrite(&s, 1, sizeof(s), objstream);
/* bss section header */
strncpy(s.s_name, ".bss", sizeof(s.s_name));
s.s_paddr = s.s_vaddr = TargetWord(ORG_BSS);
s.s_size = TargetWord(bss_size);
s.s_scnptr = 0;
s.s_relptr = 0;
s.s_lnnoptr = 0; /* no line number info */
s.s_nreloc = 0; /* no relocations */
s.s_nlnno = 0; /* no line number info */
s.s_flags = TargetWord(STYP_BSS);
obj_fwrite(&s, 1, sizeof(s), objstream);
}
void obj_trailer() {
codexrefs = (CodeXref *)dreverse((List *)codexrefs);
data.xrefs = (DataXref *)dreverse((List *)data.xrefs);
constdata.xrefs = (DataXref *)dreverse((List *)constdata.xrefs);
obj_symlist = (ExtRef *)dreverse((List *)obj_symlist);
/* oldest = smallest numbered first */
obj_symcount = dbg_fixup(obj_symlist, obj_symcount);
if (debugging(DEBUG_OBJ)) cc_msg("writecode\n");
constdatabase = codesize;
obj_writecode();
obj_writedata(constdata.head);
codesize += constdata.size;
if (debugging(DEBUG_OBJ)) cc_msg("writedata\n");
obj_writedata(data.head);
if (debugging(DEBUG_OBJ)) cc_msg("coderelocation\n");
obj_coderelocation();
ncoderelocs += obj_datarelocation(constdata.xrefs, constdatabase);
if (debugging(DEBUG_OBJ)) cc_msg("datarelocation\n");
ndatarelocs = obj_datarelocation(data.xrefs, 0L);
obj_lnno = dbg_lineinfo();
if (debugging(DEBUG_OBJ)) cc_msg("symtab\n");
dbg_outsymtab();
if (debugging(DEBUG_OBJ)) cc_msg("rewind\n");
rewind(objstream); /* works for hex format too */
if (debugging(DEBUG_OBJ)) cc_msg("rewriting header\n");
obj_header(); /* re-write header at top of file */
/* file now opened and closed in main(). */
}
#ifdef TARGET_HAS_DEBUGGER
/* Storage classes */
#define C_NULL 0
#define C_AUTO 1
/*#define C_EXT 2*/ /* extern (ref) (in coff.h) */
/*#define C_STAT 3*/ /* static (in coff.h) */
#define C_REG 4
#define C_EXTDEF 5
#define C_LABEL 6
#define C_ULABEL 7 /* undefined label (?) */
#define C_MOS 8 /* member of structure */
#define C_ARG 9
#define C_STRTAG 10 /* structure tag */
#define C_MOU 11 /* member of union */
#define C_UNTAG 12 /* union tag */
#define C_TPDEF 13 /* typedef */
#define C_USTATIC 14 /* unitialised static - what is this? */
#define C_ENTAG 15 /* enumeration tag */
#define C_MOE 16 /* member of enumeration */
#define C_REGPARM 17 /* register param */
#define C_FIELD 18 /* bitfield (presumably also implicitly MOS) */
#define C_BLOCK 100
#define C_FCN 101
#define C_EOS 102 /* end of structure (enumeration, union?) */
#define C_FILE 103
#define C_HIDDEN 106 /* static with possibly clashing name */
/* Section numbers */
#define N_DEBUG (-2)
#define N_ABS (-1)
/*#define N_UNDEF 0*/ /* in coff.h */
/*#define N_TEXT 1*/ /* see above */
/*#define N_DATA 2*/ /* see above */
/*#define N_BSS 3*/ /* see above */
/* fundamental types */
/*#define T_NULL 0*/ /* in coff.h */
#define T_LDOUBLE 1
#define T_CHAR 2 /* (signed) */
#define T_SHORT 3
#define T_INT 4
#define T_LONG 5
#define T_FLOAT 6
#define T_DOUBLE 7
#define T_STRUCT 8
#define T_UNION 9
#define T_ENUM 10
#define T_MOE 11
#define T_UCHAR 12
#define T_USHORT 13
#define T_UINT 14
#define T_ULONG 15
/* derived types */
#define DT_NON 0
#define DT_PTR 1
#define DT_FCN 2
#define DT_ARRAY 3
int usrdbgmask;
static int32 anonindex;
#define DbgAlloc(n) GlobAlloc(SU_Dbg, n)
typedef struct DbgList DbgList;
typedef struct {
int w;
unsigned8 modct, dimct;
DbgList *tag;
int32 *dims;
} Dbg_Type;
typedef struct StructElt StructElt;
typedef struct EnumElt EnumElt;
struct StructElt {
StructElt *cdr; /* must be first for dreverse */
int32 val;
char *name;
Dbg_Type type;
};
struct EnumElt {
EnumElt *cdr; /* must be first for dreverse */
int32 val;
char *name;
};
typedef struct FileCoord FileCoord;
struct FileCoord {
FileCoord *cdr;
int line;
int32 codeaddr;
};
#define DbgListVarSize(variant) \
((size_t)(sizeof(p->car.variant)+offsetof(DbgList,car)))
/* The following is the *internal* data structure in which debug info. */
/* is buffered. */
#define S_Undef 16
typedef enum {
Ignore,
File,
Proc,
EndProc,
BlockStart,
BlockEnd,
Var,
Type,
Enum,
Union,
Struct,
UndefEnum = Enum+S_Undef,
UndefUnion = Union+S_Undef,
UndefStruct = Struct+S_Undef,
UndefVar = Var+S_Undef
} DbgListSort;
struct DbgList {
DbgList *cdr;
DbgListSort sort;
int32 index;
union {
struct {
char *name;
DbgList *next;
} File;
struct {
Dbg_Type type;
int stgclass;
Symstr *name;
unsigned8 oldstyle;
int sourcepos;
int32 filepos;
int32 entryaddr, bodyaddr; /* see dbg_bodyproc */
DbgList *end; /* corresponding EndProc item */
FileCoord *linelist;
} Proc;
struct {
int sourcepos;
int32 endaddr;
char *fileentry;
} EndProc;
struct {
Dbg_Type type;
int stgclass;
Symstr *name;
int32 location;
int section;
int sourcepos;
} Var;
struct {
int32 entries;
int32 size;
bool named;
union {
int32 anonindex;
char *name;
} tag;
union {
StructElt *s;
EnumElt *e;
} elts;
} Tag;
struct {
union {
DbgList *next;
int32 codeaddr;
} s;
DbgList *p; /* block starts only: previous block start if */
/* not yet closed, else block end */
} Block;
} car;
};
static DbgList *locdbglist, *globdbglist, *statdbglist, *dbglistproc;
static DbgList *dbglistblockstart;
static DbgList *dbglistfile;
static void CheckFile(char *f) {
if (dbglistfile == NULL || dbglistfile->car.File.name != f) {
if (locdbglist == NULL || locdbglist != dbglistfile || statdbglist != NULL) {
/* A file containing something of interest (otherwise lose it) */
DbgList *p = (DbgList *)DbgAlloc(DbgListVarSize(File));
locdbglist = (DbgList *)nconc((List *)statdbglist, (List *)locdbglist);
statdbglist = NULL;
cdr_(p) = locdbglist;
p->sort = File;
p->index = 0;
p->car.File.next = NULL;
if (dbglistfile != NULL) dbglistfile->car.File.next = p;
locdbglist = p;
dbglistfile = p;
}
dbglistfile->car.File.name = f;
}
}
VoidStar dbg_notefileline(FileLine fl) {
if (usrdbg(DBG_LINE) && dbglistproc != NULL) {
FileCoord *p, *l = dbglistproc->car.Proc.linelist;
/* @@@ beware - odd use of #line *MAY* (check) set off the syserr() */
if (l != NULL && l->line > fl.l) syserr(syserr_debugger_line);
p = (FileCoord *)DbgAlloc(sizeof(FileCoord));
cdr_(p) = l; p->line = fl.l; p->codeaddr = -1;
dbglistproc->car.Proc.linelist = p;
return (VoidStar)p;
}
return DUFF_ADDR;
}
static DbgList *dbglistscope;
/* The 'dbgaddr' arg has type 'void *' to keep the debugger types local to */
/* this file. This does not make it any less of a (ANSI approved) hack. */
void dbg_addcodep(VoidStar dbgaddr, int32 codeaddr) {
if (dbgaddr == NULL) { /* J_INFOSCOPE */
/* c.flowgraf outputs a J_INFOSCOPE immediately after calling
* dbg_scope, to mark the relevant code address.
*/
if (debugging(DEBUG_Q)) cc_msg("-- scope at 0x%lx\n", codeaddr);
{ DbgList *next, *p = dbglistscope;
for (; p != NULL; p = next) {
next = p->car.Block.s.next;
p->car.Block.s.codeaddr = codeaddr;
}
dbglistscope = NULL;
}
} else if (usrdbg(DBG_LINE)) {
FileCoord *p = (FileCoord *)dbgaddr;
if (debugging(DEBUG_Q))
cc_msg("%p (line %u) @ %.6lx\n", p, p->line, (long)codeaddr);
/* The following test avoids setting nextincode/codeaddr twice */
/* This is currently needed in case FileLine's are duplicated. */
if (p->codeaddr == -1) p->codeaddr = codeaddr;
}
}
static int32 dbg_lineinfo(void) {
DbgList *p = locdbglist;
struct { int32 addr; int16 line; } lineno;
int32 count = 0;
if (usrdbg(DBG_LINE))
for (; p != NULL; p = cdr_(p)) {
if (p->sort == Proc) {
int startline = p->car.Proc.sourcepos;
int lastline = startline;
int32 addr = 0;
FileCoord *coord = (FileCoord *)dreverse((List *)p->car.Proc.linelist);
lineno.addr = TargetWord(p->index);
lineno.line = 0;
obj_fwrite(&lineno, 1, 6, objstream);
count++;
for (; coord != NULL; coord = cdr_(coord))
if (coord->line > lastline && coord->codeaddr > addr) {
addr = coord->codeaddr;
lastline = coord->line;
lineno.addr = TargetWord(addr);
lineno.line = TargetHalf((int32)lastline - startline);
obj_fwrite(&lineno, 1, 6, objstream);
count++;
}
}
}
return count;
}
/* End of file/line co-ordinate code */
static int32 AuxEntryCount(Dbg_Type *t) {
return t->dimct != 0 ? 1 :
t->tag != NULL ? 1 :
0;
}
static void TypeRep(TypeExpr *, Dbg_Type *, DbgList **);
static void StructEntry(DbgList *p, TagBinder *b, SET_BITMAP sort, int32 size, DbgList **list) {
ClassMember *l = tagbindmems_(b);
StructElt *elts = NULL;
int32 count = 0;
StructPos sp;
structpos_init(&sp, b);
for (; l != 0; l = memcdr_(l)) {
structfield(l, sort, &sp);
if (memsv_(l)) { /* memsv is 0 for padding bit fields */
StructElt *el = (StructElt *)DbgAlloc(sizeof(StructElt));
cdr_(el) = elts;
el->val = sp.woffset;
TypeRep(sp.bsize != 0 ? te_int : memtype_(l), &el->type, list);
el->name = symname_(memsv_(l));
elts = el;
count += 1 + AuxEntryCount(&el->type);
}
}
p->car.Tag.size = size;
p->car.Tag.elts.s = (StructElt *)dreverse((List *)elts);
p->car.Tag.entries = count;
}
static void EnumEntry(DbgList *p, TagBinder *b) {
BindList *l = tagbindenums_(b);
EnumElt *elts = NULL;
int32 count = 0;
for (; l != NULL; l = l->bindlistcdr, count++) {
EnumElt *el = (EnumElt *)DbgAlloc(sizeof(EnumElt));
Binder *elt = l->bindlistcar;
cdr_(el) = elts;
el->name = symname_(bindsym_(elt));
el->val = bindenumval_(elt);
elts = el;
}
p->car.Tag.entries = count;
p->car.Tag.elts.e = (EnumElt *)dreverse((List *)elts);
}
static DbgList *StructRep(TagBinder *b, int32 size, DbgList **list) {
DbgList *p = (DbgList *)b->tagbinddbg;
if (p == NULL) {
DbgListSort sort = (attributes_(b) & bitoftype_(s_enum)) ? Enum :
(attributes_(b) & bitoftype_(s_union)) ? Union :
Struct;
p = (DbgList*)DbgAlloc(DbgListVarSize(Tag));
p->sort = (attributes_(b) & TB_DEFD) ? sort : (DbgListSort)(sort+S_Undef);
p->index = -1;
p->car.Tag.elts.e = NULL;
if (isgensym(tagbindsym_(b))) {
p->car.Tag.named = NO;
p->car.Tag.tag.anonindex = anonindex++;
} else {
p->car.Tag.named = YES;
p->car.Tag.tag.name = symname_(tagbindsym_(b));
}
cdr_(p) = *list;
*list = p;
b->tagbinddbg = (int32)p;
} else if ((attributes_(b) & TB_DEFD) && (p->sort & S_Undef))
/* Previously forward referenced and now defined */
p->sort = (DbgListSort)(p->sort & ~S_Undef);
else
return p;
if (attributes_(b) & bitoftype_(s_enum))
EnumEntry(p, b);
else
StructEntry(p, b, attributes_(b) & CLASSBITS, size, list);
return p;
}
#define SetDerivedType(p, n) (((p)->w |= (n) << ((p)->modct*2+4)), (p)->modct++)
#define SetBaseType(p, n) ((p)->w |= (n))
static void TypeRep_Struct(TypeExpr *x, Dbg_Type *typep, int tagtype, DbgList **list) {
TagBinder *b = typespectagbind_(x);
int32 size = attributes_(b) & TB_DEFD ? sizeoftype(x) : 0;
typep->tag = StructRep(b, size, list);
SetBaseType(typep, tagtype);
}
static void TypeRep(TypeExpr *x, Dbg_Type *typep, DbgList **list) {
int32 dim[4], size = 0;
int dimct = 0;
typep->modct = 0;
typep->w = 0;
typep->tag = NULL;
for (;; x = typearg_(x)) {
x = princtype(x); /* lose intermediate typedefs */
switch (h0_(x)) {
case t_content:
case t_ref: /* @@@ OK? */
SetDerivedType(typep, DT_PTR);
break;
case t_subscript:
SetDerivedType(typep, DT_ARRAY);
if (dimct == 0) size = sizeoftype(x);
dim[dimct++] = typesubsize_(x) == NULL ? 1 : evaluate(typesubsize_(x));
break;
case t_fnap:
SetDerivedType(typep, DT_FCN);
break;
case s_typespec:
{ SET_BITMAP m = typespecmap_(x);
switch (m & -m) { /* LSB - unsigned32/long etc. are higher */
case bitoftype_(s_char):
{ int32 mcr = mcrepoftype(x);
SetBaseType(typep, (mcr & MCR_SORT_MASK) == MCR_SORT_SIGNED ?
T_CHAR : T_UCHAR);
goto ExitLoop;
}
case bitoftype_(s_int):
if (m & BITFIELD) syserr(syserr_dbg_bitfield);
{ int32 mcr = mcrepoftype(x);
int32 n = mcr & MCR_SIZE_MASK;
SetBaseType(typep, (mcr & MCR_SORT_MASK) == MCR_SORT_SIGNED ?
(n == 2 ? T_SHORT : T_INT) :
(n == 2 ? T_USHORT : T_UINT));
goto ExitLoop;
}
case bitoftype_(s_double):
SetBaseType(typep, (m & bitoftype_(s_short)) ? T_FLOAT : T_DOUBLE);
goto ExitLoop;
case bitoftype_(s_enum):
TypeRep_Struct(x, typep, T_ENUM, list);
goto ExitLoop;
case bitoftype_(s_struct):
case bitoftype_(s_class):
TypeRep_Struct(x, typep, T_STRUCT, list);
goto ExitLoop;
case bitoftype_(s_union):
TypeRep_Struct(x, typep, T_UNION, list);
goto ExitLoop;
case bitoftype_(s_void):
SetBaseType(typep, T_NULL); /* ? no T_VOID */
goto ExitLoop;
default:
break;
}
}
/* drop through */
default:
syserr(syserr_dbg_typerep, (VoidStar)x, (long)typespecmap_(x));
SetBaseType(typep, T_NULL);
goto ExitLoop;
}
}
ExitLoop:
typep->dimct = dimct;
if (dimct == 0)
typep->dims = NULL;
else {
typep->dims = (int32 *)DbgAlloc(((int32)dimct+1)*sizeof(int32));
typep->dims[0] = size;
memcpy(&typep->dims[1], &dim[0], dimct*sizeof(int32));
}
}
static void AddVar(Symstr *name, int sourcepos, int stgclass,
int base, int32 addr, TypeExpr *type, int32 undef,
DbgList **list) {
DbgList *p = (DbgList *)DbgAlloc(DbgListVarSize(Var));
p->sort = (DbgListSort)(Var+undef);
p->car.Var.stgclass = stgclass;
p->car.Var.sourcepos = sourcepos;
p->car.Var.location = addr;
p->car.Var.name = name;
p->car.Var.section = base;
p->cdr = *list;
*list = p;
TypeRep(type, &p->car.Var.type, list);
}
void dbg_topvar(Symstr *name, int32 addr, TypeExpr *t, int stgclass,
FileLine fl) {
if (usrdbg(DBG_PROC)) {
int base = N_UNDEF;
int stg = (stgclass & DS_REG) ? C_REG :
(stgclass & DS_EXT) ? C_EXT:
C_STAT;
if (stgclass & DS_BSS)
base = N_BSS;
else if (stgclass & DS_CODE)
base = N_TEXT;
else if (!(stgclass & DS_REG) && (stgclass & (DS_EXT+DS_UNDEF)) != DS_EXT+DS_UNDEF)
base = N_DATA;
if (debugging(DEBUG_Q)) cc_msg("top var $r %x @ %.6lx\n", name, stgclass, (long)addr);
CheckFile(fl.f);
{ DbgList **list = stgclass & DS_EXT ? &globdbglist : &statdbglist;
int32 undef = stgclass & DS_UNDEF ? S_Undef : 0;
if (stgclass != 0 && stg != C_REG) {
DbgList *p;
for (p = *list ; p != NULL ; p = cdr_(p))
if ( (p->sort == UndefVar || p->sort == Var) &&
p->car.Var.stgclass == stg &&
p->car.Var.name == name) {
if (!undef) {
p->sort = Var;
p->car.Var.location = addr;
p->car.Var.section = base;
p->car.Var.sourcepos = fl.l;
}
return;
}
}
AddVar(name, fl.l, stg, base, addr, t, undef, list);
}
}
}
void dbg_type(Symstr *name, TypeExpr *t, FileLine fl) {
/* This only gets called on top-level types */
DbgList *p = (DbgList*) DbgAlloc(DbgListVarSize(Var));
Dbg_Type type;
if (debugging(DEBUG_Q)) cc_msg("type $r\n", name);
CheckFile(fl.f);
TypeRep(t, &type, &globdbglist);
if (!isgensym(name)) {
p->sort = Type;
p->car.Var.name = name;
p->car.Var.type = type;
p->car.Var.sourcepos = fl.l;
p->cdr = globdbglist;
globdbglist = p;
}
}
static DbgList *locvars;
void dbg_proc(Symstr *name, TypeExpr *t, bool ext, FileLine fl) {
if (usrdbg(DBG_ANY)) {
DbgList *p = (DbgList*) DbgAlloc(DbgListVarSize(Proc));
if (debugging(DEBUG_Q)) cc_msg("startproc $r\n", name);
CheckFile(fl.f);
t = princtype(t);
p->sort = Proc;
if (h0_(t) != t_fnap) syserr(syserr_dbg_proc);
TypeRep(t, &p->car.Proc.type, &locdbglist);
p->car.Proc.oldstyle = typefnaux_(t).oldstyle;
p->car.Proc.sourcepos = fl.l;
p->car.Proc.filepos = fl.filepos;
p->car.Proc.entryaddr = 0; /* fill in at dbg_enterproc */
p->car.Proc.bodyaddr = 0; /* fill in at dbg_bodyproc */
p->car.Proc.end = 0; /* fill in at dbg_endproc */
p->car.Proc.name = name;
p->car.Proc.stgclass = ext ? C_EXT : C_STAT;
p->car.Proc.linelist = NULL;
p->cdr = locdbglist; /* do this last (typerep above) */
dbglistproc = locdbglist = p; /* so can be filled in */
locvars = NULL;
}
}
void dbg_enterproc(void)
{ if (usrdbg(DBG_ANY))
{ DbgList *p = dbglistproc;
if (p == 0 || p->sort != Proc || p->car.Proc.entryaddr != 0)
syserr(syserr_dbg_proc1);
if (debugging(DEBUG_Q))
cc_msg("enter $r @ %.6lx\n",
p->car.Proc.name, (long)codebase);
p->car.Proc.entryaddr = codebase;
}
}
/* The following routine records the post-entry codeaddr of a proc */
void dbg_bodyproc(void)
{ if (usrdbg(DBG_ANY))
{ DbgList *p = dbglistproc;
if (p == 0 || p->sort != Proc || p->car.Proc.bodyaddr != 0)
syserr(syserr_dbg_proc1);
if (debugging(DEBUG_Q))
cc_msg("body $r @ %.6lx\n",
p->car.Proc.name, (long)(codebase+codep));
p->car.Proc.bodyaddr = codebase+codep;
}
}
void dbg_return(int32 addr) {
IGNORE(addr);
}
void dbg_xendproc(FileLine fl) {
if (usrdbg(DBG_ANY)) {
DbgList *q = dbglistproc;
DbgList *p = (DbgList*) DbgAlloc(DbgListVarSize(EndProc));
if (q == 0 || q->sort != Proc || q->car.Proc.end != 0)
syserr(syserr_dbg_proc1);
if (debugging(DEBUG_Q))
cc_msg("endproc $r @ %.6lx\n",
q->car.Proc.name, (long)(codebase+codep));
q->car.Proc.end = p;
p->sort = EndProc;
p->car.EndProc.sourcepos = fl.l;
p->car.EndProc.endaddr = codebase+codep;
p->car.EndProc.fileentry = fl.f;
p->cdr = locdbglist;
locdbglist = p;
dbglistproc = NULL;
}
}
/* dbg_locvar() registers the name and line of a declaration, and internalises
* the type. Location info cannot be added until after register allocation.
* See also dbg_scope which completes.
* (Type internalisation cannot be done then, because by that time the tree
* has evaporated).
* Also remember that dead code elimination may remove some decls.
*/
void dbg_locvar(Binder *b, FileLine fl) {
Symstr *name = bindsym_(b);
if (usrdbg(DBG_VAR)) {
if (isgensym(name)) {
if (bindstg_(b) & bitofstg_(s_typedef)) {
Dbg_Type type;
TypeRep(bindtype_(b), &type, &locdbglist);
}
} else if (!(bindstg_(b) & bitofstg_(s_extern))) {
if (debugging(DEBUG_Q)) cc_msg("note loc var $b\n", b);
AddVar((Symstr *)b, fl.l, 0, NULL, 0, bindtype_(b), S_Undef, &locvars);
}
}
}
static DbgList *FindLocVar(Binder *b) {
DbgList *p, **pp = &locvars;
for (; (p = *pp) != NULL; pp = &cdr_(p))
if (p->sort == UndefVar && (Binder *)p->car.Var.name == b) {
*pp = cdr_(p);
return p;
}
return NULL;
}
void dbg_locvar1(Binder *b) {
Symstr *name = bindsym_(b);
int base = N_UNDEF;
DbgList *p = FindLocVar(b);
int stgclass;
int stgclassname;
int32 addr = bindaddr_(b);
if (p == NULL || p->car.Var.sourcepos == -1) {
if (debugging(DEBUG_Q)) cc_msg(" omitted");
return; /* invented variable name (e.g. s_let) */
}
cdr_(p) = locdbglist;
locdbglist = p;
p->sort = Var;
p->car.Var.name = name;
switch (bindstg_(b) & PRINCSTGBITS) {
case bitofstg_(s_typedef):
if (debugging(DEBUG_Q)) cc_msg(" <typedef>");
p->sort = Type;
return;
case bitofstg_(s_static):
stgclass = C_HIDDEN, stgclassname = 'S';
base = (bindstg_(b) & u_constdata) ? N_TEXT :
(bindstg_(b) & u_bss) ? N_BSS :
N_DATA;
break;
case bitofstg_(s_auto):
if (bindxx_(b) != GAP) {
stgclass = (addr & BINDADDR_MASK) == BINDADDR_ARG ? C_REGPARM : C_REG;
stgclassname = 'R', addr = register_number(bindxx_(b));
} else switch (addr & BINDADDR_MASK) {
case BINDADDR_ARG:
stgclass = C_ARG, stgclassname = 'A', addr = local_fpaddress(addr);
break;
case BINDADDR_LOC:
stgclass = C_AUTO, stgclassname = 'P', addr = local_fpaddress(addr);
break;
case 0:
/* probably declared but not used case (where addr is still a bindlist) */
p->sort = Ignore;
if (bindstg_(b) & b_bindaddrlist) {
if (debugging(DEBUG_Q)) cc_msg(" unused - omitted");
return;
}
/* otherwise, fall into internal error case */
default:
syserr(syserr_dbg_table, name, (long)bindstg_(b), (long)addr);
return;
}
break;
default:
syserr(syserr_dbg_table, name, (long)bindstg_(b), (long)addr);
return;
}
if (debugging(DEBUG_Q)) cc_msg(" %c %lx", stgclassname, (long)addr);
p->car.Var.stgclass = stgclass;
p->car.Var.location = addr;
p->car.Var.section = base;
}
bool dbg_scope(BindListList *newbll, BindListList *oldbll)
{ int32 entering = length((List *)newbll) - length((List *)oldbll);
if (entering == 0) return NO;
if (entering < 0)
{ BindListList *t = newbll;
newbll = oldbll, oldbll = t;
}
if (length((List *)oldbll) > 0) {
BindListList *bll = newbll;
DbgList *last = NULL;
for (bll = newbll; bll != oldbll; bll = bll->bllcdr) {
if (bll == 0) syserr(syserr_dbg_scope);
if (bll->bllcar != 0) {
DbgList *p = (DbgList *)DbgAlloc(DbgListVarSize(Block));
dbglistscope = p;
cdr_(p) = locdbglist;
if (entering > 0) {
p->sort = BlockStart;
p->car.Block.p = dbglistblockstart;
dbglistblockstart = p;
} else {
DbgList *q = dbglistblockstart;
p->sort = BlockEnd;
p->car.Block.p = NULL;
dbglistblockstart = q->car.Block.p;
q->car.Block.p = p;
}
p->car.Block.s.next = last; /* filled in soon by INFOSCOPE */
locdbglist = p;
last = p;
}
}
}
if (debugging(DEBUG_Q)) cc_msg("scope %ld\n", entering);
for (; newbll != oldbll; newbll = newbll->bllcdr)
{ SynBindList *bl;
if (newbll == 0) syserr(syserr_dbg_scope);
for (bl = newbll->bllcar; bl; bl = bl->bindlistcdr)
{ Binder *b = bl->bindlistcar;
if (bindstg_(b) & b_dbgbit) continue; /* for this and next line */
bindstg_(b) |= b_dbgbit; /* see end of routine cmt */
if (debugging(DEBUG_Q))
cc_msg(" %s $b",
entering>=0 ? "binding" : "unbinding",
b);
if (entering >= 0)
dbg_locvar1(b);
if (debugging(DEBUG_Q))
cc_msg("\n");
}
}
return YES;
/* Ask for INFOSCOPE item to get called back more or less immediately */
/* from the local cg (INFOSCOPE item) to fill in the codeaddr */
}
/* Dummy procedure not yet properly implemented, included here to keep in */
/* step with dbx.c */
void dbg_commblock(Binder *b, SynBindList *members, FileLine fl) {
IGNORE(b); IGNORE(members); IGNORE(fl);
}
static union {
char c[18];
unsigned8 b[18];
unsigned16 h[9];
unsigned32 w[5];
} aux;
static void ClearAux() {
memset(&aux, 0, 18);
}
static void SetNextIndex(DbgList *p, int n) {
aux.w[3] = (p == NULL) ? 0 : TargetWord(p->index+n);
}
static void SetSourcePos(int32 n) {
aux.h[2] = TargetHalf(n);
}
static void SetStructSize(DbgList *p) {
aux.h[3] = TargetHalf(p->car.Tag.size);
}
static void SetStructRef(DbgList *p) {
aux.w[0] = TargetWord(p->index);
SetStructSize(p);
}
static void *TypeAuxEntry(Dbg_Type *t, int pos) {
if (AuxEntryCount(t) == 0) return NULL;
ClearAux();
if (t->tag != NULL) SetStructRef(t->tag);
if (t->dimct != 0) {
int i = 0;
for (; i <= t->dimct; i++)
aux.h[3+i] = TargetHalf(t->dims[i]);
SetSourcePos(pos);
}
return &aux;
}
static int32 globindex;
typedef enum {
ne_all,
ne_def,
ne_undef
} NEType;
static void WriteEntries(DbgList *p, NEType flag) {
for (; p != NULL; p = cdr_(p))
if (flag == ne_all ||
(flag == ne_def ? p->sort != UndefVar : p->sort == UndefVar))
switch (p->sort) {
default:
syserr(syserr_dbg_write, (long)p->sort);
break;
case File:
ClearAux();
{ UnparsedName un;
fname_parse(p->car.File.name, "c C h H", &un);
un.plen = un.vlen = 0;
un.vol = un.path = NULL;
fname_unparse(&un, FNAME_AS_NAME, aux.c, 15);
}
obj_stab(".file", p->car.File.next == NULL ? globindex : p->car.File.next->index,
N_DEBUG, T_NULL, C_FILE, &aux);
break;
case Proc:
ClearAux();
TypeAuxEntry(&p->car.Proc.type, 0);
aux.w[1] = TargetWord(p->car.Proc.end->car.EndProc.endaddr - p->car.Proc.entryaddr);
aux.w[2] = TargetWord(p->car.Proc.filepos);
aux.h[8] = TargetHalf(p->car.Proc.oldstyle ? 14 : 15);
/* Normal arithmetic promotions, 64 bit IEEE fp (what does this mean?) */
SetNextIndex(p->car.Proc.end, 2);
obj_stab(symname_(p->car.Proc.name), p->car.Proc.entryaddr, N_TEXT,
p->car.Proc.type.w, p->car.Proc.stgclass, &aux);
ClearAux();
SetSourcePos(p->car.Proc.sourcepos);
SetNextIndex(p->car.Proc.end, 2);
obj_stab(".bf", p->car.Proc.bodyaddr, N_TEXT, T_NULL, C_FCN, &aux);
break;
case EndProc:
ClearAux();
SetSourcePos(p->car.EndProc.sourcepos);
obj_stab(".ef", p->car.EndProc.endaddr, N_TEXT, T_NULL, C_FCN, &aux);
break;
case UndefVar:
obj_stab(symname_(p->car.Var.name), 0, N_UNDEF, p->car.Var.type.w, C_EXT,
TypeAuxEntry(&p->car.Var.type, p->car.Var.sourcepos));
break;
case Var:
obj_stab(symname_(p->car.Var.name), p->car.Var.location, p->car.Var.section,
p->car.Var.type.w, p->car.Var.stgclass,
TypeAuxEntry(&p->car.Var.type, p->car.Var.sourcepos));
break;
case Type:
obj_stab(symname_(p->car.Var.name), 0, N_DEBUG, p->car.Var.type.w,
C_TPDEF, TypeAuxEntry(&p->car.Var.type, p->car.Var.sourcepos));
break;
case Struct:
case Union:
case UndefStruct:
case UndefUnion:
{ char b[16];
char *name;
int t, cl;
if (p->sort == Struct || p->sort == UndefStruct)
t = T_STRUCT, cl = C_STRTAG;
else
t = T_UNION, cl = C_UNTAG;
if (p->car.Tag.named)
name = p->car.Tag.tag.name;
else {
sprintf(b, ".%ldfake", p->car.Tag.tag.anonindex);
name = b;
}
ClearAux();
SetStructSize(p);
SetNextIndex(cdr_(p), 0);
obj_stab(name, 0, N_DEBUG, t, cl, &aux);
{ StructElt *q = p->car.Tag.elts.s;
for (; q != NULL; q = cdr_(q))
obj_stab(q->name, q->val, N_ABS, q->type.w, C_MOS, TypeAuxEntry(&q->type, 0));
}
ClearAux();
SetStructRef(p);
obj_stab(".eos", p->car.Tag.size, N_ABS, T_NULL, C_EOS, &aux);
break;
}
case Enum:
{ char b[16];
char *name;
if (p->car.Tag.named)
name = p->car.Tag.tag.name;
else {
sprintf(b, ".%ldfake", p->car.Tag.tag.anonindex);
name = b;
}
ClearAux();
SetStructSize(p);
SetNextIndex(cdr_(p), 0);
obj_stab(name, 0, N_DEBUG, T_ENUM, C_ENTAG, &aux);
{ EnumElt *q = p->car.Tag.elts.e;
for (; q != NULL; q = cdr_(q))
obj_stab(q->name, q->val, N_ABS, T_MOE, C_MOE, NULL);
}
ClearAux();
SetStructRef(p);
obj_stab(".eos", p->car.Tag.size, N_ABS, T_NULL, C_EOS, &aux);
break;
}
case BlockStart:
ClearAux();
SetNextIndex(p->car.Block.p, 2);
obj_stab(".bb",
p->car.Block.s.codeaddr, N_TEXT, T_NULL, C_BLOCK, &aux);
break;
case BlockEnd:
ClearAux();
obj_stab(".eb",
p->car.Block.s.codeaddr, N_TEXT, T_NULL, C_BLOCK, &aux);
break;
}
}
static void FixDbgList(DbgList *p) {
for (; p != NULL; p = cdr_(p)) {
if (p->sort == UndefVar) {
ExtRef *x = symext_(p->car.Var.name);
if (x != NULL && (x->extflags & (xr_defloc+xr_defext))) {
p->sort = Var;
p->car.Var.location = x->extoffset;
p->car.Var.section = x->extflags & xr_bss ? N_BSS : N_DATA;
}
}
if (p->sort == Proc || p->sort == Var || p->sort == UndefVar) {
ExtRef *x = symext_(p->car.Var.name);
if (x != NULL) x->extflags |= xr_deleted;
}
}
}
static void RenumberExtRef(Symstr *sym, int32 index) {
ExtRef *x = symext_(sym);
if (x != NULL) {
if (!(x->extflags & xr_deleted)) syserr("Missed duplicate symbol");
x->extindex = index;
}
}
static int32 NumberEntries(DbgList *p, int32 index, NEType flag) {
for (; p != NULL; p = cdr_(p)) {
if (flag == ne_all ||
(flag == ne_def ? p->sort != UndefVar : p->sort == UndefVar)) {
p->index = index;
switch (p->sort) {
case Ignore: break;
case Proc: RenumberExtRef(p->car.Proc.name, index);
index += 4; break; /* .bf entry too */
case BlockStart:
case BlockEnd:
case File:
case EndProc: index += 2; break;
case UndefVar:
case Var: RenumberExtRef(p->car.Var.name, index);
/* and fall through */
case Type: index += 1 + AuxEntryCount(&p->car.Var.type); break;
case Enum: index += 2 + p->car.Tag.entries + 2; break;
case UndefUnion:
case UndefStruct:
case Union:
case Struct: { StructElt *q = p->car.Tag.elts.s;
index += 4; /* tag + .eos */
for (; q != NULL; q = cdr_(q))
index += 1+AuxEntryCount(&q->type);
break;
}
default: syserr("%d in NumberEntries", p->sort);
}
}
}
return index;
}
static int32 RenumberSyms(ExtRef *x, int32 index, int32 mask, int32 maskedval) {
for (; x != NULL; x = x->extcdr)
if ((x->extflags & mask) == maskedval) {
x->extindex = index++;
if (x->extflags & xr_section) index++;
}
return index;
}
static int32 dbg_fixup(ExtRef *symlist, int32 index) {
/* Mark symbols with entries in symlist also present in a dbglist.
If symbols in a dbglist are marked as undefined, but the symlist entry is
marked as defined, fix up position and storage class in the dbglist entry
(bss and tentatives: maybe someone should call dbg_topvar when the
placement finally gets made).
Number the symbols in dbglists, and the remaining unmarked symbols in
symlist (locdbglist, then defloc things in symlist (should be just sections),
then defined things in globdbglist, then defext things in symlist, then
undefined things in globdbglist then symlist). Rewrite the index of marked
symbols in symlist.
*/
if (usrdbg(DBG_ANY)) {
locdbglist = (DbgList *)dreverse(nconc((List *)statdbglist, (List *)locdbglist));
globdbglist = (DbgList *)dreverse((List *)globdbglist);
FixDbgList(locdbglist);
FixDbgList(globdbglist);
index = NumberEntries(locdbglist, 0, ne_all);
globindex = index = RenumberSyms(symlist, index, xr_deleted+xr_defloc+xr_defext, xr_defloc);
index = NumberEntries(globdbglist, index, ne_def);
index = RenumberSyms(symlist, index, xr_deleted+xr_defloc+xr_defext, xr_defext);
index = NumberEntries(globdbglist, index, ne_undef);
return RenumberSyms(symlist, index, xr_deleted+xr_defloc+xr_defext, 0);
} else
return index;
}
static void dbg_outsymtab() {
if (usrdbg(DBG_ANY)) {
WriteEntries(locdbglist, ne_all);
obj_outsymtab(xr_deleted+xr_defloc+xr_defext, xr_defloc);
WriteEntries(globdbglist, ne_def);
obj_outsymtab(xr_deleted+xr_defloc+xr_defext, xr_defext);
WriteEntries(globdbglist, ne_undef);
obj_outsymtab(xr_deleted+xr_defloc+xr_defext, 0);
} else
obj_outsymtab(0, 0);
obj_outstringtab();
}
void dbg_init(void) {
anonindex = 0;
locdbglist = statdbglist = globdbglist = NULL;
dbglistfile = dbglistproc = dbglistblockstart = NULL;
dbglistscope = NULL;
}
#endif /* TARGET_HAS_DEBUGGER */
/* end of coffobj.c */
|
dropdb quizzing
createdb quizzing -U postgres
psql -U postgres -d quizzing -f /home/wbond/Documents/Projects/quizzing/datasource/src/main/sql/baseline.sql |
SCRIPT_NAME=tic4xcoff
OUTPUT_FORMAT="coff2-tic4x"
OUTPUT_ARCH="tic3x"
ARCH=tic3x
TEMPLATE_NAME=ticoff
OUTPUT_FORMAT_TEMPLATE=tic4x
ONCHIP=yes
|
def start_vowel(input_string):
vowel_list = ["a", "e", "i", "o", "u"]
if input_string[0] in vowel_list:
print("String starts with a vowel")
else:
print("String does not start with a vowel")
start_vowel("abc") |
import requests
from bs4 import BeautifulSoup
URL = "https://www.example.com"
# Getting the webpage, creating a Response object.
resp = requests.get(URL)
# Extracting the source code of the page.
data = resp.text
# Passing the source code to BeautifulSoup to create a BeautifulSoup object for it.
soup = BeautifulSoup(data, 'html.parser')
# Extracting all the <a> tags into a list.
tags = soup.find_all('a')
# Extracting URLs from the attribute href in the <a> tags.
links = []
for tag in tags:
links.append(tag.get('href'))
# Printing out the extracted links
print("The extracted links are:")
for link in links:
print(link) |
/*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <cstdlib>
#include <iostream>
#include "Graph.hpp"
Cell::Cell(const Cell& other) : op(other.op), value(other.value), successor(other.successor) {
ref_count = other.ref_count.load();
input[0] = other.input[0];
input[1] = other.input[1];
}
void Graph::create_random_dag(std::size_t number_of_nodes) {
my_vertex_set.resize(number_of_nodes);
for (std::size_t k = 0; k < number_of_nodes; ++k) {
Cell& c = my_vertex_set[k];
int op = int((rand() >> 8) % 5u);
if (op > int(k))
op = int(k);
switch (op) {
default:
c.op = OP_VALUE;
c.value = Cell::value_type((float)k);
break;
case 1: c.op = OP_NEGATE; break;
case 2: c.op = OP_SUB; break;
case 3: c.op = OP_ADD; break;
case 4: c.op = OP_MUL; break;
}
for (int j = 0; j < ArityOfOp[c.op]; ++j) {
Cell& input = my_vertex_set[rand() % k];
c.input[j] = &input;
}
}
}
void Graph::print() {
for (std::size_t k = 0; k < my_vertex_set.size(); ++k) {
std::cout << "Cell " << k << ":";
for (std::size_t j = 0; j < my_vertex_set[k].successor.size(); ++j)
std::cout << " " << int(my_vertex_set[k].successor[j] - &my_vertex_set[0]);
std::cout << "\n";
}
}
void Graph::get_root_set(std::vector<Cell*>& root_set) {
for (std::size_t k = 0; k < my_vertex_set.size(); ++k) {
my_vertex_set[k].successor.clear();
}
root_set.clear();
for (std::size_t k = 0; k < my_vertex_set.size(); ++k) {
Cell& c = my_vertex_set[k];
c.ref_count = ArityOfOp[c.op];
for (int j = 0; j < ArityOfOp[c.op]; ++j) {
c.input[j]->successor.push_back(&c);
}
if (ArityOfOp[c.op] == 0)
root_set.push_back(&my_vertex_set[k]);
}
}
void Cell::update() {
switch (op) {
case OP_VALUE: break;
case OP_NEGATE: value = -(input[0]->value); break;
case OP_ADD: value = input[0]->value + input[1]->value; break;
case OP_SUB: value = input[0]->value - input[1]->value; break;
case OP_MUL: value = input[0]->value * input[1]->value; break;
}
}
|
<reponame>songlijiang/songlijiang.github.io<gh_stars>1-10
'use strict';
var sinon = require('sinon');
var expect = require('chai').expect;
describe('Console list', function() {
var Hexo = require('../../../lib/hexo');
var hexo = new Hexo(__dirname);
var Post = hexo.model('Post');
var listPosts = require('../../../lib/plugins/console/list/post').bind(hexo);
before(function() {
var log = console.log;
sinon.stub(console, 'log', function() {
return log.apply(log, arguments);
});
});
after(function() {
console.log.restore();
});
it('no post', function() {
listPosts();
expect(console.log.calledWith(sinon.match('Date'))).to.be.true;
expect(console.log.calledWith(sinon.match('Title'))).to.be.true;
expect(console.log.calledWith(sinon.match('Path'))).to.be.true;
expect(console.log.calledWith(sinon.match('Category'))).to.be.true;
expect(console.log.calledWith(sinon.match('Tags'))).to.be.true;
expect(console.log.calledWith(sinon.match('No posts.'))).to.be.true;
});
it('post', function() {
var posts = [
{source: 'foo', slug: 'foo', title: 'Its', date: 1e8},
{source: 'bar', slug: 'bar', title: 'Math', date: 1e8 + 1},
{source: 'baz', slug: 'baz', title: 'Dude', date: 1e8 - 1}
];
return hexo.init()
.then(function() {
return Post.insert(posts);
}).then(function() {
hexo.locals.invalidate();
})
.then(function() {
listPosts();
expect(console.log.calledWith(sinon.match('Date'))).to.be.true;
expect(console.log.calledWith(sinon.match('Title'))).to.be.true;
expect(console.log.calledWith(sinon.match('Path'))).to.be.true;
expect(console.log.calledWith(sinon.match('Category'))).to.be.true;
expect(console.log.calledWith(sinon.match('Tags'))).to.be.true;
for (var i = 0; i < posts.length; i++) {
expect(console.log.calledWith(sinon.match(posts[i].source))).to.be.true;
expect(console.log.calledWith(sinon.match(posts[i].slug))).to.be.true;
expect(console.log.calledWith(sinon.match(posts[i].title))).to.be.true;
}
});
});
});
|
import { Component } from '@angular/core';
@Component({
selector: 'books-search',
template: `
<h2>Books Search</h2>
<input type="text" [(ngModel)]="searchTerm">
<button (click)="search()">Search</button>
<ul *ngIf="results.length > 0">
<li *ngFor="let result of results">{{ result }}</li>
</ul>
`
})
export class BookSearchComponent {
searchTerm = '';
results = [];
search() {
// logic to search the database here
}
} |
#!/bin/tcsh
#!/bin/tcsh
#PBS -A NTDD0005
#PBS -N testb
#PBS -q regular
#PBS -l walltime=12:00:00
#PBS -j oe
#PBS -M apinard@ucar.edu
#PBS -l select=1:ncpus=1
module load conda
conda activate ldcpy_env
setenv TMPDIR /glade/scratch/$USER/temp
mkdir -p $TMPDIR
python ./compute_batch.py -o '/glade/scratch/apinard/3D/PS_calcs.csv' -j './batch_scripts/3d_dssim_scripts/PS.json' -ts 210 -tt 225 -v -ld
|
#!/bin/bash
oc new-app --name todoapp -i nodejs:6 \
--context-dir todo-single
--build-env npm_config_registry=\
http://nexus-common.apps.cluster.domain.example.com/repository/nodejs \
-e DATABASE_NAME=tododb \
-e DATABASE_USER=todoapp \
-e DATABASE_PASSWORD=redhat \
-e DATABASE_SVC=tododb \
-e DATABASE_INIT=true \
https://github.com/yourgituser/DO288-apps
|
#!/bin/bash
./fizzbuzz.pl 1 100 > ../../src/test/resources/jp/vmi/indylisp/fizzbuzz-result.txt
|
<filename>token_manager.py
import os
CLIENT_ID = os.getenv('POL_ID', '')
CLIENT_SECRET = os.getenv('POL_SECRET', '')
ACCESS_TOKEN = os.getenv('ACCESS_TOKEN', 'NOT SPECIFIED')
class FakeCursor:
def execute(self, cmd):
# storage of org,refresh_token,access_token sets not implemented yet
raise Exception('Need to implement DB storage and token refresh for prod')
class TokenManager:
def __init__(self):
self.cur = FakeCursor()
return
def save_org_token(self, org, access, refresh):
# clear existing entry for org if present.
cmd = "delete from %s where org = %d" % (TABLE_NAME, org)
self.cur.execute(cmd)
cmd = """insert into %s (org, accessToken, refreshToken) values (%s, "%s", "%s")""" % (TABLE_NAME, org, access, refresh)
self.cur.execute(cmd)
def get_testing_token(self):
return ACCESS_TOKEN
def get_token(self, org):
cmd = "select * from %s where org = %d limit 1"
self.cur.execute(cmd)
entry = self.cur.fetchone()
return {
"org": entry[0],
"accessToken": entry[1],
"refreshToken": entry[2]
}
def post_token_data(self, code):
return {
"clientId": CLIENT_ID,
"clientSecret": CLIENT_SECRET,
"code": code,
"grantType": "authorization_code"
}
def post_refresh_data(self, refresh):
return {
"clientId": CLIENT_ID,
"clientSecret": CLIENT_SECRET,
"refreshToken": refresh,
"grantType": "refresh_token"
}
|
<gh_stars>0
package weixin.liuliangbao.jsonbean.FlowCard;
/**
* Created by aa on 2015/12/21.
*/
public class extractionCodeBean implements java.io.Serializable {
private java.lang.String id;
private java.lang.String code;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
}
|
#include"WeldedBeam.h"
#define _USE_MATH_DEFINES
#include<math.h>
individual WB::WeldedBeam(individual ind) {
//h=0, b=1, l=2, t=3
double h = ind.var[0], b = ind.var[1], l = ind.var[2], t = ind.var[3];
double tau_, delta_, pc_;
double tau1_ = 6000.0 / sqrt(2) / h / l;
double tau2_ = (6000.0 * (14.0 + 0.5*l)*sqrt(0.25*(pow(l, 2) + pow(h + t, 2)))) / (2 * (0.707*h*l*(pow(l, 2) / 12.0 + 0.25*pow(h + t, 2))));
tau_ = sqrt(pow(tau1_, 2) + pow(tau2_, 2) + (l*tau1_*tau2_) / sqrt(0.25*(pow(l, 2) + pow(h + t, 2))));
delta_ = 504000.0 / pow(t, 2) / b;
pc_ = 64746.022*(1.0 - 0.0282346*t)*t*pow(b, 3);
//objectives
ind.cost[0] = 1.10471*h * h * l + 0.04811*t * b * (14.0 + l);
ind.cost[1] = 2.1952 / pow(t, 3) / b;
//constraints
ind.constraint[0] = 13600.0 - tau_;
ind.constraint[1] = 30000.0 - delta_;
ind.constraint[2] = b - h;
ind.constraint[3] = pc_ - 60000.0;
return ind;
}
void WB::check() {
individual x;
x.var.resize(12);//size of phenotype
x.cost.resize(num_obj);//number of tasks
for (int i = 0; i < 12; ++i) {
x.var[i] = 0.5;
}
x.var[0] = 0;
x.var[1] = 1;
x = WeldedBeam(x);
cout << x.cost[0] << " " << x.cost[1] << " " << x.cost[2] << endl;
}
individual WB::problems(int swit, individual ind) {
switch (swit) {
case 1:
ind = WeldedBeam(ind);
break;
default:
break;
}
return ind;
} |
case x in
abc${a}) ;; esac
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.