text stringlengths 1 1.05M |
|---|
#!/bin/bash
#*******************************************************************************
# Copyright 2017 Talentica Software Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#*******************************************************************************
source vagrant_init_functions.sh
#get no of jobs
no_of_jobs=$(cat hadoop_conf.json |jq '.number_of_jobs')
no_of_jobs=$(echo "$no_of_jobs" | tr -d '"')
export MASTER_IP
export file_size_input
export file_size_output
export process_id
export process_instance_id
export process_instance_detail_id
export job_id
export jar_file_path
export input_file_path
export class_name
export output_file_name
export desired_input_file_location_master
export desired_output_file_location_master
export desired_job_file_location_master
export desired_input_file_location_hdfs
export desired_output_file_location_hdfs
export expected_result_file_path
export desired_expected_result_location_master
export jar_name
export file_name
export delete_hdfs_file_name
export mysql_server
export mysql_username
export mysql_password
get_master_ip
for (( i=0; $i <$no_of_jobs; ++i ))
do
#Read all values in of jason file
read_json $i
#get start time for job submit
time_submit=$(date +'%Y:%m:%d %H:%M:%S')
#insertion of time submit in job table.
mysql -h $mysql_server -D hungryhippos_tester -u$mysql_username -p$mysql_password -e "INSERT INTO job (status,date_time_submitted,user_id,file_system) VALUES ('submitted', '$time_submit','1','Hadoop');"
#get job id of current job
job_id=$(mysql -h $mysql_server -D hungryhippos_tester -u$mysql_username -p$mysql_password -se "select job_id from job where date_time_submitted='$time_submit';")
#start timer for whole script
start=$(date +%s.%N)
#perform below block if expected result in available with user
if [ "$expected_result_file_path" != "" ]
then
expected_result_file_path_operations
fi
#setup master node for hadoop operations (Includes creating folders as per given in json file )
master_setup_for_job_execution
#get start time for job
time_started=$(date +'%Y:%m:%d %H:%M:%S')
mysql -h $mysql_server -D hungryhippos_tester -u$mysql_username -p$mysql_password -e "update job set status='started', date_time_started='$time_started' where job_id='$job_id';"
start_upload=$(date +%s.%N)
#upload data from master node to hdfs
data_publishing
time_upload=$(echo "$(date +%s.%N) - $start_upload" | bc)
echo -e "\nExecution time for uploading file to hdfs: $time_upload seconds"
#show uploaded data (Only for verification)
show_data
#execute submitted job
job_execution
#Copy result file from hdfs
transfer_from_hdfs
#Sort copied result file
sort_compare_output
#Get file size of input
get_hdfs_file_size_input $desired_input_file_location_hdfs/${file_name}
#filesize=$?
#update tabale job_input
job_input_dbwrite $job_id $desired_input_file_location_hdfs/${file_name} $file_size_input
get_hdfs_file_size_output $desired_output_file_location_hdfs/$output_file_name
#filesize=$?
#update table job_output
job_output_dbwrite $job_id $desired_output_file_location_hdfs/${output_file_name} $file_size_output
#Delete hdfs file if file name provided
if [ "$delete_hdfs_file_name" != "" ]
then
#Delete hdfs file
delete_hdfs_file
fi
#stop timer for whole script
time_script=$(echo "$(date +%s.%N) - $start" | bc)
printf "Execution time for running whole script for Job `expr $i + 1`: $time_script seconds"
#get end time for job
time_finished=$(date +'%Y:%m:%d %H:%M:%S')
mysql -h $mysql_server -D hungryhippos_tester -u$mysql_username -p$mysql_password -e "update job set status='finished', date_time_finished='$time_finished' where job_id='$job_id';"
echo -e "\n-------------------Job `expr $i + 1` completed-------------------------"
done
|
<!--
Copyright 2020 Kansaneläkelaitos
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
-->
package fi.kela.kanta.to;
import java.io.Serializable;
public class ValmisteenKayttotapaTO implements Serializable {
private static final long serialVersionUID = 135111113252356L;
// Lääkemuoto:CV
private String laakemuoto;
// Lääkemuodon lyhenne:String
private String laakemuodonLyhenne;
// Lääkkeenantoreitti:CV
private String laakeenantoreitti;
public String getLaakemuoto() {
return laakemuoto;
}
public void setLaakemuoto(String laakemuoto) {
this.laakemuoto = laakemuoto;
}
public String getLaakemuodonLyhenne() {
return laakemuodonLyhenne;
}
public void setLaakemuodonLyhenne(String laakemuodonLyhenne) {
this.laakemuodonLyhenne = laakemuodonLyhenne;
}
public String getLaakeenantoreitti() {
return laakeenantoreitti;
}
public void setLaakeenantoreitti(String laakeenantoreitti) {
this.laakeenantoreitti = laakeenantoreitti;
}
}
|
<filename>services/issuer/test/backOffice.spec.ts
import { getRandomString, getTestAgent, getTestSdrRequestData } from './utils'
import request from 'supertest'
import express, { Express } from 'express'
import { Agent } from 'daf-core'
import { Connection } from 'typeorm'
import fs from 'fs'
import backOffice from '../src/services/backOffice'
import { keccak256 } from 'js-sha3'
import CredentialRequest from '../src/lib/CredentialRequest'
describe('backOfficeService tests', () => {
let
app: Express, agent: Agent, connection: Connection,
database: string, user: string, password: string, issuerDid: string
const generateAndSaveSdr = async () => {
const sdr = await getTestSdrRequestData()
const message = await agent.handleMessage({ raw: sdr.body, metaData: [] })
const hash = (keccak256(message.raw) as any).toString('hex')
const credRequest = { status: 'pending', message, hash }
const createdRequest = await connection.getRepository(CredentialRequest).save(credRequest)
return { sdr, createdRequest }
}
beforeEach(async () => {
({ agent, connection, database } = await getTestAgent())
issuerDid = (await agent.identityManager.getIdentities())[0].did
expect(issuerDid).toContain('rsk:testnet')
user = getRandomString()
password = <PASSWORD>()
app = express()
await backOffice(app, agent, user, password)
})
afterEach(async () => {
await connection.close()
if (process.env.CI === 'true' && process.env.CIRCLECI === 'true') fs.copyFileSync(database, './artifacts')
else fs.unlinkSync(database)
})
describe('POST /auth', () => {
it('should return 200 if valid user & pwd', async () => {
await request(app).post('/auth').auth(user, password).expect(200)
})
it('should return 401 if invalid pwd', async () => {
await request(app).post('/auth').auth(user, 'invalid').expect(401)
})
it('should return 401 if invalid user', async () => {
await request(app).post('/auth').auth('invalid', password).expect(401)
})
})
describe('GET /identity', () => {
it('should return 401 if no auth sent', async () => {
await request(app).get('/identity').expect(401)
})
it('should return current identity', async () => {
const { text } = await request(app).get('/identity').auth(user, password).expect(200)
expect(text).toEqual(issuerDid)
})
it('should throw 500 if no identity set', async () => {
// close the connection generated in the beforeEach statement
await connection.close();
({ agent, connection } = await getTestAgent(false, database))
await backOffice(app, agent, user, password)
await request(app).get('/identity').auth(user, password).expect(500)
})
})
describe('GET /__health', () => {
it('should return a 200 with no auth required', async () => {
await request(app).get('/__health').expect(200)
})
})
describe('GET /requests', () => {
it('should return 401 if no auth sent', async () => {
await request(app).get('/requests').expect(401)
})
it('should return empty list if no requests', async () => {
const { text } = await request(app).get('/requests').auth(user, password).expect(200)
const reqs = JSON.parse(text)
expect(reqs).toHaveLength(0)
})
it('should return created sdr', async () => {
const { sdr } = await generateAndSaveSdr()
const { from, fullName } = sdr
const { text } = await request(app).get('/requests').auth(user, password).expect(200)
const reqs = JSON.parse(text)
expect(reqs).toHaveLength(1)
expect(reqs[0].status).toEqual('pending')
expect(reqs[0].from).toEqual(from)
expect(reqs[0].fullName).toEqual(fullName)
})
})
describe('PUT /request/:id/status', () => {
let path, createdRequest
beforeEach(async () => {
({ createdRequest } = await generateAndSaveSdr())
path = `/request/${createdRequest.id}/status`
})
it('should return 401 if no auth sent', async () => {
await request(app).put(path).expect(401)
})
it('should return 400 if invalid status', async () => {
const status = getRandomString()
const { text } = await request(app)
.put(path).set('Content-Type', 'application/json')
.auth(user, password).send({ status }).expect(400)
expect(text).toEqual('Invalid action')
})
describe
.each([['granted'], ['denied']])
('should update the status', (status) => {
it('should update the credential request status', async () => {
const { text } = await request(app)
.put(path).set('Content-Type', 'application/json')
.auth(user, password).send({ status }).expect(200)
const updated = JSON.parse(text)
expect(updated.id).toEqual(createdRequest.id)
expect(updated.status).toEqual(status)
// verify that the db is updated as well
const retrieved = await connection.getRepository(CredentialRequest).findOneOrFail({ id: updated.id })
expect(retrieved.status).toEqual(status)
})
})
})
})
|
use std::cmp;
struct Grid {
row_count: usize,
region_index: usize,
}
struct Cell {
region_index: usize,
row_offset: usize,
column: usize,
}
impl Grid {
fn update_and_create_cell(&mut self, offset: usize, column: usize) -> Result<Cell, Error> {
self.row_count = cmp::max(self.row_count, offset + 1);
Ok(Cell {
region_index: self.region_index,
row_offset: offset,
column: column.into(),
})
}
} |
from ploceidae.utilities.dependency_visibility_enum import DependencyVisibilityEnum
class TestDependencyGrouping:
def test_grouped_dependencies_are_resolved_to_dependent(self, basic_configurator):
container = basic_configurator.get_container()
dependency_decorator = basic_configurator.get_dependency_wrapper()
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def a():
return "a"
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def b():
return "b"
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def c(b):
return b + "c"
def x(*deps):
return deps
resolved_deps = container.wire_dependencies(x)
assert all(dep in resolved_deps for dep in (a(), b(), c(b())))
def test_grouped_dependencies_with_tranformations_are_resolved_to_dependent(self, basic_configurator):
container = basic_configurator.get_container()
dependency_decorator = basic_configurator.get_dependency_wrapper()
class A(object):
def __init__(self):
self.x = 0
def increment_and_return(self):
self.x += 1
return self
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL, transformation=lambda x: x.increment_and_return())
def a():
return A()
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL, transformation=lambda x: x.increment_and_return())
def b():
x = A()
x.x = 2
return x
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL, transformation=lambda x: x.increment_and_return())
def c(b):
x = A()
x.x = 2 + b.x
return x
def x(*deps):
return deps
resolved_deps = container.wire_dependencies(x)
assert all(y in [x.x for x in resolved_deps] for y in [1, 3, 6])
def test_grouped_dependencies_with_tranformations_are_resolved_to_dependent2(self, basic_configurator):
container = basic_configurator.get_container()
dependency_decorator = basic_configurator.get_dependency_wrapper()
class A(object):
def __init__(self):
self.x = 0
def increment_and_return(self):
self.x += 1
return self
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL, transformation=lambda x: x.increment_and_return())
def a():
return A()
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def b():
x = A()
x.x = 2
return x
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL, transformation=lambda x: x.increment_and_return())
def c(b):
x = A()
x.x = b.x + 1
return x
def x(*deps):
return deps
resolved_deps = container.wire_dependencies(x)
assert all(y in [x.x for x in resolved_deps] for y in [1, 2, 4])
def test_dependencies_that_are_grouped_can_be_resolved_with_normal_dependencies(self, basic_configurator):
container = basic_configurator.get_container()
dependency_decorator = basic_configurator.get_dependency_wrapper()
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def a():
return "a"
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def b():
return "b"
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def c(b):
return b + "c"
def x(a, b, c, *deps):
return (a, b, c), deps
resolved_deps = container.wire_dependencies(x)
assert resolved_deps[0] == (a(), b(), c(b()))
assert all(dep in resolved_deps[1] for dep in (a(), b(), c(b())))
def test_dependency_that_is_both_grouped_and_normal(self, basic_configurator):
container = basic_configurator.get_container()
dependency_decorator = basic_configurator.get_dependency_wrapper()
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def a():
return "a"
def b(a, *deps):
return (a,) + deps
assert container.wire_dependencies(b) == ("a", "a")
def test_dependency_that_is_grouped_can_be_resolved_independently_of_group(self, basic_configurator):
container = basic_configurator.get_container()
dependency_decorator = basic_configurator.get_dependency_wrapper()
@dependency_decorator(group="deps", visibility=DependencyVisibilityEnum.GLOBAL)
def a():
return "a"
def b(a):
return "b" + a
def c(*deps):
return deps
assert container.wire_dependencies(b) == "ba"
assert container.wire_dependencies(c) == ("a",)
def test_dependency_that_has_same_name_as_group(self, basic_configurator):
dep = 3
dependency_decorator = basic_configurator.get_dependency_wrapper()
container = basic_configurator.get_container()
@dependency_decorator(group="group", visibility=DependencyVisibilityEnum.GLOBAL)
def group():
return dep
def a(group):
return group
def b(*group):
return group
assert container.wire_dependencies(a) == dep
assert container.wire_dependencies(b) == (dep,) |
<gh_stars>10-100
/**
* @file Regroup all the functions use for sanitize the informations coming and going in the api
* @author <NAME>
*/
'use strict';
const consts_global = require('./../constants/consts_global');
const consts_sanitizer = require('./../constants/consts_sanitizer');
const utils_global = require('./utils_global');
const Entities = require('html-entities').AllHtmlEntities;
const entities = new Entities();
module.exports = {
/**
* Sanitize the number of pornhub to a javascript type Number
* Most of the number in pornhub contains letters or commas, so it need to be filter
*
* @params {string} value The string that represents a number
* @return {number} The string converted into a number javascript
**/
sanitizer_number: value => {
value = value.replace(/[()&A-Za-z,%]/g, '');
value = Number(value);
return value;
},
/**
* Sanitize the string from pornhub to a single ligne.
* This function remove the tabs, break line and trim the string
*
* @params {string} value The string that we want to sanitize
* @return {string} The string sanitized
**/
sanitizer_string: value => {
value = value.replace(/[\t\n]/g, '');
value = value.trim();
value = entities.decode(value);
return value;
},
/**
* Convert a string or number into a boolean
*
* @params {(string|number)} value The string or number representing a boolean
* @return {boolean} True or False depending of the value passed
**/
sanitizer_boolean: value => {
return Boolean(value);
},
/**
* Sanitize the key passed to the API
* Replace space and : and put the key in uppercase
*
* @params {string} value The API key that we want to sanitize
* @return {string} The key sanitized
**/
sanitizer_key: value => {
value = module.exports.sanitizer_string(value);
value = value.replace(/\s/g, '_');
value = value.replace(/:/g, '');
value = value.toUpperCase();
return value;
},
/**
* Convert the value with an unit to a number javascript
*
* @params {string} value The string with an unit representing a number
* @return {number} The javascript number
**/
sanitizer_KM_to_unit: value => {
if (value.includes('K')) {
return Number(value.replace('K', '')) * 1000;
}
if (value.includes('M')) {
return Number(value.replace('M', '')) * 1000000;
}
return value;
},
/**
* Convert a string date to a javascript date
*
* @params {string} value A string representing a date
* @return {Date} The javascript date representing the value passed in argument
**/
sanitizer_date: value => {
return new Date(value);
},
/**
* Create a complete url pornhub from a route passed in argument
*
* @params {string} value A pornhub route
* @return {string} A complete url pornhub
**/
sanitizer_url_pornhub: value => {
return consts_global.links.BASE_URL + value;
},
/**
* Simply return the variable passed in argument
* This function is used for getting the function sanitizer possible and givig every type a function
*
* @params {Object} A value
* @return {Object} The same value without any change
**/
sanitizer_normal: value => {
return value;
},
/**
* Sanitize every string element of an array of string
* and remove duplicate value
*
* @params {array} array An array containing string
* @return {array} Return the array with unique value and sanitized string
**/
sanitizer_array: array => {
if (Array.isArray(array)) {
array = array.map(x => module.exports.sanitizer_string(x));
return utils_global.remove_duplicate(array);
}
return module.exports.sanitizer_string(array);
},
/**
* Sanitize all the data receive from pornhub to a more flexible format
*
* @params {Object} datas The datas that we want to sanitize
* @return {Object} The datas sanitized
**/
sanitizer: datas => {
const rsl = Object.keys(consts_global.type).map(x => {
if (datas[x] === null || datas[x] === undefined) {
return;
}
// Choose the sanitizer to apply to the current value depending of the type
const sanitizer = consts_sanitizer.sanitizer_by_type[consts_global.type[x].toUpperCase()];
const sanitize_data = module.exports[sanitizer](datas[x]);
return [x.toLowerCase(), sanitize_data];
}).filter(x => x);
return Object.fromEntries(rsl);
}
};
|
#!/bin/bash
# A list of service commands
COMMANDS=( reload restart )
for COMMAND in "${COMMANDS[@]}"
do
echo $COMMAND
done
# List all of the configuration files in the _/etc/apache2/sites-available/_ directory
VHOSTS_PATH=/etc/apache2/sites-available/*.conf
for FILENAME in $VHOSTS_PATH
do
echo $FILENAME
done
|
<filename>test/bench/change_connection_scheduler.rb<gh_stars>0
require_relative './bench_init'
context "Changing the client connection scheduler" do
host = Connection::Controls::Host::Localhost.example
port = Connection::Controls::TestServer.port
client_connection = Connection::Client.build host, port
client_connection.connect
original_scheduler = client_connection.scheduler
scheduler = Connection::Scheduler::Substitute.build
client_connection.change_connection_scheduler scheduler
test "Scheduler is changed on the underlying connection" do
assert client_connection.connection.scheduler == scheduler
refute client_connection.connection.scheduler == original_scheduler
end
client_connection.close
end
|
/*
A step(x) operation works like this: it changes a number x into x - s(x), where s(x) is the sum of x's digits. You like applying functions to numbers, so given the number n, you decide to build a decreasing sequence of numbers: n, step(n), step(step(n)), etc., with 0 as the last element.
Building a single sequence isn't enough for you, so you replace all elements of the sequence with the sums of their digits (s(x)). Now you're curious as to which number appears in the new sequence most often. If there are several answers, return the maximal one.
Example
For n = 88, the output should be
mostFrequentDigitSum(n) = 9.
Here is the first sequence you built: 88, 72, 63, 54, 45, 36, 27, 18, 9, 0;
And here is s(x) for each of its elements: 16, 9, 9, 9, 9, 9, 9, 9, 9, 0.
As you can see, the most frequent number in the second sequence is 9.
For n = 8, the output should be
mostFrequentDigitSum(n) = 8.
At first you built the following sequence: 8, 0
s(x) for each of its elements is: 8, 0
As you can see, the answer is 8 (it appears as often as 0, but is greater than it).
Input/Output
[execution time limit] 4 seconds (js)
[input] integer n
Guaranteed constraints:
1 ≤ n ≤ 105.
[output] integer
The most frequent number in the sequence s(n), s(step(n)), s(step(step(n))), etc.
*/
function mostFrequentDigitSum(n) {
const numbers = {};
let numOfOccurences = 0;
let mostFrequentSum = 0;
while (n > 0) {
const sum = `${n}`.split("").reduce((total, num) => total + Number(num), 0);
numbers[sum] = (numbers[sum] || 0) + 1;
n = n - sum;
}
for (const key in numbers) {
if (numbers[key] > numOfOccurences) {
numOfOccurences = numbers[key];
mostFrequentSum = key;
} else if (numbers[key] === numOfOccurences) {
mostFrequentSum = mostFrequentSum > key ? mostFrequentSum : key;
}
}
return Number(mostFrequentSum);
}
const q1 = 88; // 9
const q2 = 8; // 8
const q3 = 1; // 1
const q4 = 17; // 9
const q5 = 239; // 9
const q6 = 994; // 9
const q7 = 99999; // 18
console.log(mostFrequentDigitSum(q1));
console.log(mostFrequentDigitSum(q2));
console.log(mostFrequentDigitSum(q3));
console.log(mostFrequentDigitSum(q4));
console.log(mostFrequentDigitSum(q5));
console.log(mostFrequentDigitSum(q6));
console.log(mostFrequentDigitSum(q7));
|
#!/usr/bin/env bash
set -e
# For symbol tests
echo "--- build libsqlite"
./dev/libsqlite3-pcre/build.sh
# For searcher
echo "--- comby install"
./dev/comby-install-or-upgrade.sh
# For code insights test
./dev/codeinsights-db.sh &
export CODEINSIGHTS_PGDATASOURCE=postgres://postgres:password@127.0.0.1:5435/postgres
export DB_STARTUP_TIMEOUT=120s # codeinsights-db needs more time to start in some instances.
# Separate out time for go mod from go test
echo "--- go mod download"
go mod download
echo "--- go test"
go test -timeout 4m -coverprofile=coverage.txt -covermode=atomic -race ./...
# Test lib submodule
pushd lib >/dev/null
echo "--- go mod download lib"
go mod download
echo "--- go test lib"
go test -timeout 4m -coverprofile=coverage.txt -covermode=atomic -race ./...
popd >/dev/null
# Test enterprise/lib submodule
pushd enterprise/lib >/dev/null
echo "--- go mod download enterprise/lib"
go mod download
echo "--- go test enterprise/lib"
go test -timeout 4m -coverprofile=coverage.txt -covermode=atomic -race ./...
popd >/dev/null
|
import os
import numpy as np
import raisimpy as raisim
import math
import time
raisim.World.setLicenseFile(os.path.dirname(os.path.abspath(__file__)) + "/../../rsc/activation.raisim")
world = raisim.World()
ground = world.addGround()
# launch raisim servear
server = raisim.RaisimServer(world)
server.launchServer(8080)
visSphere = server.addVisualSphere("v_sphere", 1, 1, 1, 1, 1)
visBox = server.addVisualBox("v_box", 1, 1, 1, 1, 1, 1, 1)
visCylinder = server.addVisualCylinder("v_cylinder", 1, 1, 0, 1, 0, 1)
visCapsule = server.addVisualCapsule("v_capsule", 1, 0.5, 0, 0, 1, 1)
visSphere.setPosition(np.array([2, 0, 0]))
visCylinder.setPosition(np.array([0, 2, 0]))
visCapsule.setPosition(np.array([2, 2, 0]))
lines = server.addVisualPolyLine("lines")
lines.setColor(0, 0, 1, 1)
for i in range(0, 100):
lines.addPoint(np.array([math.sin(i * 0.1), math.cos(i * 0.1), i * 0.01]))
counter = 0
for i in range(500000):
counter = counter + 1
visBox.setColor(1, 1, (counter % 255 + 1) / 256., 1)
visSphere.setColor(1, (counter % 255 + 1) / 256., 1, 1)
lines.setColor(1 - (counter % 255 + 1) / 256., 1, (counter % 255 + 1) / 256., 1)
visBox.setBoxSize((counter % 255 + 1) / 256. + 0.01, 1, 1)
time.sleep(world.getTimeStep())
server.killServer()
|
import fetch from 'node-fetch';
export interface CachedResponse {
ok: boolean;
status: number;
statusText: string;
url: string;
text: string;
}
export class CachedFetch {
private cache = new Map<string, Promise<CachedResponse>>();
async fetch(url: string): Promise<CachedResponse> {
if (this.cache.has(url)) {
return await this.cache.get(url);
}
// Cannot simply return the `fetch(url)` promise as `res.text()` may only be called once.
const promise = (async (): Promise<CachedResponse> => {
const res = await fetch(url);
return {
ok: res.ok,
status: res.status,
statusText: res.statusText,
url: res.url,
text: await res.text(),
};
})();
this.cache.set(url, promise);
return promise;
}
async fetchFirstFiltered(urls: string[], filter: (res: CachedResponse) => boolean): Promise<CachedResponse | null> {
for (const url of urls) {
const res = await this.fetch(url);
if (filter(res)) {
return res;
}
}
return null;
}
}
|
package com.github.holyloop.jencode.model;
import java.util.Date;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = true)
public class Project extends BaseEntity {
private Long id;
private String title;
private String author;
private String description;
private String projectPath;
private Byte deleted;
private Date addTime;
private Date modifyTime;
} |
<filename>scrapy_frontera/converters.py<gh_stars>10-100
import uuid
import logging
from scrapy.http.request import Request as ScrapyRequest
from scrapy.http.response import Response as ScrapyResponse
from scrapy.utils.request import request_fingerprint
from w3lib.util import to_bytes, to_native_str
from frontera.core.models import Request as FrontierRequest
from frontera.core.models import Response as FrontierResponse
from frontera.utils.converters import BaseRequestConverter, BaseResponseConverter
from .utils import get_callback_name
_LOG = logging.getLogger(__name__)
class RequestConverter(BaseRequestConverter):
"""Converts between frontera and Scrapy request objects"""
def __init__(self, spider):
self.spider = spider
def to_frontier(self, scrapy_request):
"""request: Scrapy > Frontier"""
if isinstance(scrapy_request.cookies, dict):
cookies = scrapy_request.cookies
else:
cookies = dict(sum([list(d.items()) for d in scrapy_request.cookies], []))
cb = scrapy_request.callback
if callable(cb):
cb = _find_method(self.spider, cb)
eb = scrapy_request.errback
if callable(eb):
eb = _find_method(self.spider, eb)
statevars = self.spider.crawler.settings.getlist("FRONTERA_SCHEDULER_STATE_ATTRIBUTES", [])
meta = {
b"scrapy_callback": cb,
b"scrapy_cb_kwargs": scrapy_request.cb_kwargs,
b"scrapy_errback": eb,
b"scrapy_meta": scrapy_request.meta,
b"scrapy_body": scrapy_request.body,
b"spider_state": [(attr, getattr(self.spider, attr, None)) for attr in statevars],
b"origin_is_frontier": True,
}
fingerprint_scrapy_request = scrapy_request
if fingerprint_scrapy_request.dont_filter:
# if dont_filter is True, we need to simulate
# not filtering by generating a different fingerprint each time we see same request.
# So let's altere randomly the url
fake_url = fingerprint_scrapy_request.url + str(uuid.uuid4())
fingerprint_scrapy_request = fingerprint_scrapy_request.replace(url=fake_url)
meta[b"frontier_fingerprint"] = scrapy_request.meta.get(
"frontier_fingerprint", request_fingerprint(fingerprint_scrapy_request)
)
callback_slot_prefix_map = self.spider.crawler.settings.getdict("FRONTERA_SCHEDULER_CALLBACK_SLOT_PREFIX_MAP")
frontier_slot_prefix_num_slots = callback_slot_prefix_map.get(get_callback_name(scrapy_request))
if frontier_slot_prefix_num_slots:
frontier_slot_prefix, *rest = frontier_slot_prefix_num_slots.split("/", 1)
meta[b"frontier_slot_prefix"] = frontier_slot_prefix
if rest:
meta[b"frontier_number_of_slots"] = int(rest[0])
return FrontierRequest(
url=scrapy_request.url,
method=scrapy_request.method,
headers=dict(scrapy_request.headers.items()),
cookies=cookies,
meta=meta,
)
def from_frontier(self, frontier_request):
"""request: Frontier > Scrapy"""
cb = frontier_request.meta.get(b"scrapy_callback", None)
if cb and self.spider:
cb = _get_method(self.spider, cb)
eb = frontier_request.meta.get(b"scrapy_errback", None)
if eb and self.spider:
eb = _get_method(self.spider, eb)
body = frontier_request.meta.get(b"scrapy_body", None)
cb_kwargs = frontier_request.meta[b"scrapy_cb_kwargs"]
meta = frontier_request.meta[b"scrapy_meta"]
meta.pop("cf_store", None)
for attr, val in frontier_request.meta.get(b"spider_state", []):
prev_value = getattr(self.spider, attr, None)
if prev_value is not None and prev_value != val:
_LOG.error(
"State for attribute '%s' change from '%s' to '%s' attempted by request <%s> so crawl may loose consistency. \
Per request state should be propagated via request attributes.",
attr,
prev_value,
val,
frontier_request.url,
)
elif prev_value != val:
setattr(self.spider, attr, val)
_LOG.info("State for attribute '%s' set to %s by request <%s>", attr, val, frontier_request.url)
return ScrapyRequest(
url=frontier_request.url,
callback=cb,
errback=eb,
body=body,
method=to_native_str(frontier_request.method),
headers=frontier_request.headers,
cookies=frontier_request.cookies,
meta=meta,
cb_kwargs=cb_kwargs,
dont_filter=True,
)
class ResponseConverter(BaseResponseConverter):
"""Converts between frontera and Scrapy response objects"""
def __init__(self, spider, request_converter):
self.spider = spider
self._request_converter = request_converter
def to_frontier(self, scrapy_response):
"""response: Scrapy > Frontier"""
frontier_request = scrapy_response.meta.get(
"frontier_request", self._request_converter.to_frontier(scrapy_response.request)
)
frontier_request.meta[b"scrapy_meta"] = scrapy_response.meta
return FrontierResponse(
url=scrapy_response.url,
status_code=scrapy_response.status,
headers=dict(scrapy_response.headers.items()),
body=scrapy_response.body,
request=frontier_request,
)
def from_frontier(self, response):
"""response: Frontier > Scrapy"""
return ScrapyResponse(
url=response.url,
status=response.status_code,
headers=response.headers,
body=response.body,
request=self._request_converter.from_frontier(response.request),
)
def _find_method(obj, func):
if obj and hasattr(func, "__self__") and func.__self__ is obj:
return to_bytes(func.__func__.__name__)
else:
raise ValueError("Function %s is not a method of: %s" % (func, obj))
def _get_method(obj, name):
name = to_native_str(name)
try:
return getattr(obj, name)
except AttributeError:
raise ValueError("Method %r not found in: %s" % (name, obj))
|
/*
* Copyright 2006-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.brte.sample.common.domain.trade;
import java.io.Serializable;
import java.math.BigDecimal;
import org.apache.commons.lang3.builder.HashCodeBuilder;
/**
* Trade 엔티티
*
* @author 배치실행개발팀
* @since 2012. 07.25
* @version 1.0
* @see <pre>
* 개정이력(Modification Information)
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2012. 07.25 배치실행개발팀 최초 생성
* </pre>
*/
public class Trade implements Serializable {
private static final long serialVersionUID = -5557049363045084134L;
private String isin = "";
private long quantity = 0;
private BigDecimal price = new BigDecimal(0);
private String customer = "";
private Long id;
private long version = 0;
public Trade() {
}
public Trade(String isin, long quantity, BigDecimal price, String customer) {
this.isin = isin;
this.quantity = quantity;
this.price = price;
this.customer = customer;
}
public Trade(long id) {
this.id = id;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
public void setCustomer(String customer) {
this.customer = customer;
}
public void setIsin(String isin) {
this.isin = isin;
}
public void setPrice(BigDecimal price) {
this.price = price;
}
public void setQuantity(long quantity) {
this.quantity = quantity;
}
public String getIsin() {
return isin;
}
public BigDecimal getPrice() {
return price;
}
public long getQuantity() {
return quantity;
}
public String getCustomer() {
return customer;
}
@Override
public String toString() {
return "Trade: [isin=" + this.isin + ",quantity=" + this.quantity + ",price=" + this.price + ",customer=" + this.customer + "]";
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Trade)) {
return false;
}
if (o == this) {
return true;
}
Trade t = (Trade) o;
return isin.equals(t.getIsin()) && quantity == t.getQuantity() && price.equals(t.getPrice()) && customer.equals(t.getCustomer());
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
}
|
#!/bin/sh
# author: Thus0
# last modified: 2022-02-05 08:39
#
# Copyright 2022 All rights reserved
# loop
tail -f /dev/null
# vim: set sw=4 ts=4 et:
|
import React from 'react';
import PropTypes from 'prop-types';
import classNames from 'classnames';
import ListSpacingContext from './contexts/listSpacing';
import icBEM from './utils/icBEM';
import prefixClass from './utils/prefixClass';
import anchored, {
anchoredPropTypes,
ANCHORED_PLACEMENT,
} from './mixins/anchored';
import closable from './mixins/closable';
import renderToLayer from './mixins/renderToLayer';
import './styles/Popover.scss';
export const COMPONENT_NAME = prefixClass('popover');
const ROOT_BEM = icBEM(COMPONENT_NAME);
export const BEM = {
root: ROOT_BEM,
arrow: ROOT_BEM.element('arrow'),
container: ROOT_BEM.element('container'),
};
const POPOVER_PADDING = 24;
function Popover({
onClick,
// from anchored()
placement,
arrowStyle,
nodeRef,
remainingSpace,
// from closable()
onInsideClick,
// React props
className,
children,
...otherProps
}) {
const bemClass = BEM.root.modifier(placement);
const rootClassName = classNames(bemClass.toString(), className);
/**
* The `remainingSpace` is the space for whole popover.
* What we want here is to always show keep `remainingSpace === popoverHeight`
* The `maxHeight` is for `BEM.container`, which doesn't include root class padding.
* So we need to minus POPOVER_PADDING here.
*/
const maxHeight = remainingSpace ? remainingSpace - POPOVER_PADDING : undefined;
const handleWrapperClick = (event) => {
onInsideClick(event);
onClick(event);
};
return (
<ListSpacingContext.Provider value={false}>
<div
role="presentation"
className={rootClassName}
ref={nodeRef}
onClick={handleWrapperClick}
{...otherProps}
>
<span className={BEM.arrow} style={arrowStyle} />
<div
className={BEM.container}
style={{ maxHeight }}
>
{children}
</div>
</div>
</ListSpacingContext.Provider>
);
}
Popover.propTypes = {
onClick: PropTypes.func,
placement: anchoredPropTypes.placement,
arrowStyle: anchoredPropTypes.arrowStyle,
nodeRef: anchoredPropTypes.nodeRef,
remainingSpace: anchoredPropTypes.remainingSpace,
onInsideClick: PropTypes.func.isRequired,
};
Popover.defaultProps = {
onClick: () => {},
placement: ANCHORED_PLACEMENT.BOTTOM,
arrowStyle: {},
nodeRef: undefined,
remainingSpace: undefined,
};
export { Popover as PurePopover };
export default renderToLayer(
closable({
onEscape: true,
onClickOutside: true,
onClickInside: true,
})(
anchored()(Popover)
)
);
|
#!/bin/bash
KERNEL_IMAGETYPE=zImage
if [ -z "${MACHINE}" ]; then
echo "Environment variable MACHINE not set"
echo "Example: export MACHINE=raspberrypi3 or export MACHINE=raspberrypi0-wifi"
exit 1
fi
case "${MACHINE}" in
raspberrypi|raspberrypi0|raspberrypi0-wifi|raspberrypi-cm)
DTBS="bcm2708-rpi-0-w.dtb \
bcm2708-rpi-b.dtb \
bcm2708-rpi-b-plus.dtb \
bcm2708-rpi-cm.dtb"
;;
raspberrypi2|raspberrypi3|raspberrypi-cm3)
DTBS="bcm2709-rpi-2-b.dtb \
bcm2710-rpi-3-b.dtb \
bcm2710-rpi-cm3.dtb"
;;
*)
echo "Invalid MACHINE: ${MACHINE}"
exit 1
esac
BOOTLDRFILES="bootcode.bin \
cmdline.txt \
config.txt \
fixup_cd.dat \
fixup.dat \
fixup_db.dat \
fixup_x.dat \
start_cd.elf \
start_db.elf \
start.elf \
start_x.elf"
if [ "x${1}" = "x" ]; then
echo -e "\nUsage: ${0} <block device>\n"
exit 0
fi
if [ ! -d /media/card ]; then
echo "Temporary mount point [/media/card] not found"
exit 1
fi
if [ -z "$OETMP" ]; then
echo -e "\nWorking from local directory"
SRCDIR=.
else
echo -e "\nOETMP: $OETMP"
if [ ! -d ${OETMP}/deploy/images/${MACHINE} ]; then
echo "Directory not found: ${OETMP}/deploy/images/${MACHINE}"
exit 1
fi
SRCDIR=${OETMP}/deploy/images/${MACHINE}
fi
for f in ${BOOTLDRFILES}; do
if [ ! -f ${SRCDIR}/bcm2835-bootfiles/${f} ]; then
echo "Bootloader file not found: ${SRCDIR}/bcm2835-bootfiles/$f"
exit 1
fi
done
have_one_dtb=0
for f in ${DTBS}; do
if [ -f ${SRCDIR}/${KERNEL_IMAGETYPE}-${f} ]; then
have_one_dtb=1
fi
done
if [ $have_one_dtb -eq 0 ]; then
echo "No dtb found for this MACHINE $MACHINE and KERNEL_IMAGETYPE $KERNEL_IMAGETYPE"
exit 1
fi
if [ ! -f ${SRCDIR}/${KERNEL_IMAGETYPE} ]; then
echo "Kernel file not found: ${SRCDIR}/${KERNEL_IMAGETYPE}"
exit 1
fi
if [ -b ${1} ]; then
DEV=${1}
else
DEV=/dev/${1}1
if [ ! -b ${DEV} ]; then
DEV=/dev/${1}p1
if [ ! -b ${DEV} ]; then
echo "Block device not found: /dev/${1}1 or /dev/${1}p1"
exit 1
fi
fi
fi
echo "Formatting FAT partition on ${DEV}"
sudo mkfs.vfat -F 32 ${DEV} -n BOOT
echo "Mounting ${DEV}"
sudo mount ${DEV} /media/card
if [ "$?" -ne 0 ]; then
echo "Error mounting ${DEV} at /media/card"
exit 1
fi
echo "Copying bootloader files"
sudo cp ${SRCDIR}/bcm2835-bootfiles/* /media/card
if [ $? -ne 0 ]; then
echo "Error copying bootloader files"
sudo umount ${DEV}
exit 1
fi
echo "Creating overlay directory"
sudo mkdir /media/card/overlays
if [ $? -ne 0 ]; then
echo "Error creating overlays directory"
sudo umount ${DEV}
exit 1
fi
echo "Copying overlay dtbos"
for f in ${SRCDIR}/${KERNEL_IMAGETYPE}-*.dtbo; do
if [ -L $f ]; then
sudo cp $f /media/card/overlays
fi
done
if [ $? -ne 0 ]; then
echo "Error copying overlays"
sudo umount ${DEV}
exit 1
fi
echo "Stripping ${KERNEL_IMAGETYPE}- from overlay dtbos"
case "${KERNEL_IMAGETYPE}" in
Image)
sudo rename 's/Image-([\w\-]+).dtbo/$1.dtbo/' /media/card/overlays/*.dtbo
;;
zImage)
sudo rename 's/zImage-([\w\-]+).dtbo/$1.dtbo/' /media/card/overlays/*.dtbo
;;
uImage)
sudo rename 's/uImage-([\w\-]+).dtbo/$1.dtbo/' /media/card/overlays/*.dtbo
;;
esac
if [ $? -ne 0 ]; then
echo "Error stripping overlays"
sudo umount ${DEV}
exit 1
fi
echo "Copying dtbs"
for f in ${DTBS}; do
if [ -f ${SRCDIR}/${KERNEL_IMAGETYPE}-${f} ]; then
sudo cp ${SRCDIR}/${KERNEL_IMAGETYPE}-${f} /media/card/${f}
if [ $? -ne 0 ]; then
echo "Error copying dtb: $f"
sudo umount ${DEV}
exit 1
fi
fi
done
echo "Copying kernel"
sudo cp ${SRCDIR}/${KERNEL_IMAGETYPE} /media/card/${KERNEL_IMAGETYPE}
if [ $? -ne 0 ]; then
echo "Error copying kernel"
sudo umount ${DEV}
exit 1
fi
if [ -f ${SRCDIR}/u-boot.bin ]; then
echo "Copying u-boot.bin to card"
sudo cp ${SRCDIR}/u-boot.bin /media/card
if [ $? -ne 0 ]; then
echo "Error copying u-boot"
sudo umount ${DEV}
exit 1
fi
if [ -f ${SRCDIR}/boot.scr ]; then
echo "Copying boot.scr to card"
sudo cp ${SRCDIR}/boot.scr /media/card
if [ $? -ne 0 ]; then
echo "Error copying boot.scr"
sudo umount ${DEV}
exit 1
fi
else
echo "WARNING: No boot script found!"
fi
fi
if [ -f ./config.txt ]; then
echo "Copying local config.txt to card"
sudo cp ./config.txt /media/card
if [ $? -ne 0 ]; then
echo "Error copying local config.txt to card"
sudo umount ${DEV}
exit 1
fi
fi
if [ -f ./cmdline.txt ]; then
echo "Copying local cmdline.txt to card"
sudo cp ./cmdline.txt /media/card
if [ $? -ne 0 ]; then
echo "Error copying local cmdline.txt to card"
sudo umount ${DEV}
exit 1
fi
fi
echo "Unmounting ${DEV}"
sudo umount ${DEV}
echo "Done"
|
import { Injectable, NotFoundException } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { AddTodoDto } from 'src/todo/Dto/AddTodotDo';
import { UpdateTodoDto } from 'src/todo/Dto/UpdateToDoDto';
import { TodoEntity } from 'src/todo/entities/todo.entity';
import { Brackets, Like, Repository } from 'typeorm';
import { StatusCritereDto } from './Dto/StatusCritereDto';
import { TodoStatusEnum } from './enums/todo-status';
import { Todo } from './Model/todo.model';
@Injectable()
export class ToDoService {
constructor(
@InjectRepository(TodoEntity)
private todoRepository: Repository<TodoEntity>,
){}
findTodo(id:number,toDo:Todo[]) :Todo {
const todo = toDo.find(todo=>todo.id==id);
if(!todo) throw new NotFoundException() ;
else return todo ;
}
async getDbTodo(id:number) {
await this.todoRepository.findOne(id);
}
async getTodos() {
await this.todoRepository.find();
}
async getTodosWithPagination(page : number) {
return await this.todoRepository.createQueryBuilder("todo")
.take(5)
.skip((page - 1) * 5 )
.getMany()
;
}
async getTodoStats() {
const count = await this.todoRepository
.createQueryBuilder("todo")
.select( "status , count(todo.createdAt) as count" )
.groupBy('status')
.getRawMany() ;
return count ;
}
async getTodoWithCritere(statusCritereDto :StatusCritereDto) {
const queryBuilder = this.todoRepository.createQueryBuilder("todo");
console.log(statusCritereDto) ;
return await queryBuilder
.where(new Brackets(qb =>{
qb.where({ name: Like(`%${statusCritereDto.critere}%`) })
qb.orWhere({ description: Like(`%${statusCritereDto.critere}%`) })
}))
.andWhere('todo.status = :status' , {status: statusCritereDto.status})
.getMany();
}
delete (toDos:Todo[],id:number){
const todo = this.findTodo(id,toDos);
const index =toDos.indexOf(todo);
toDos.splice(index,1);
return toDos;
}
async deleteDbTodo (id:number){
return await this.todoRepository.delete(id);
}
async softdelete(id: number) {
return await this.todoRepository.softDelete(id);
}
async restoreTodo(id: number) {
const todoToRecover = await this.todoRepository.findOne(id);
return await this.todoRepository.recover(todoToRecover);
}
getTodo(toDos:Todo[],id:number){
return this.findTodo(id,toDos) ;
}
updateTodo(toDos:Todo[],id:number,updatetodoDto:UpdateTodoDto){
const todo = this.findTodo(id,toDos);
const index =toDos.indexOf(todo);
toDos[index].name=updatetodoDto.name ;
toDos[index].description=updatetodoDto.description ;
return toDos ;
}
async updateDbTodo(id:number,updatetodoDto:UpdateTodoDto){
return await this.todoRepository.update(id, updatetodoDto);
}
addToDo(toDos:Todo[],addTodoDto:AddTodoDto){
let todo: Todo;
todo.id = toDos[toDos.length-1].id +1 ;
todo.name=addTodoDto.name ;
todo.description = addTodoDto.description ;
toDos.push(todo);
return toDos;
}
addDbTodo(todo:AddTodoDto) : Promise<TodoEntity> {
return this.todoRepository.save(todo);
}
}
|
#!/bin/bash
sed -i 's/autoindex off/autoindex on/g' /etc/nginx/sites-available/site
service nginx reload |
<gh_stars>0
package main
import (
"bufio"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
func main() {
if len(os.Args) != 2 {
panic(fmt.Sprintf("Wrong args: bosh-io-stemcells-index-dir(eg stemcells-index)"))
}
err := StemcellNotes{StemcellsIndexDir: os.Args[1]}.Import(os.Stdin)
if err != nil {
panic(fmt.Sprintf("Failed: %s", err))
}
}
type StemcellNotes struct {
StemcellsIndexDir string
}
type key struct {
VersionRaw string
}
type noteVal struct {
Content string
}
// eg {"VersionRaw":"3363.24"} | {"Content":"..."}
func (r StemcellNotes) Import(data io.Reader) error {
rd := bufio.NewReader(data)
for {
line, err := rd.ReadString('\n')
if err != nil {
if err == io.EOF {
break
}
return fmt.Errorf("Reading data: %s", err)
}
if len(strings.TrimSpace(line)) == 0 {
continue
}
pieces := strings.SplitN(line, "|", 2)
if len(pieces) != 2 {
return fmt.Errorf("Parsing line: '%s'", line)
}
pieces[0] = strings.TrimSpace(pieces[0])
pieces[1] = strings.TrimSpace(pieces[1])
var k key
err = json.Unmarshal([]byte(pieces[0]), &k)
if err != nil {
return fmt.Errorf("Unmarshaling key: %s", pieces[0])
}
if len(k.VersionRaw) == 0 {
return fmt.Errorf("Invalid key from '%s'", pieces[0])
}
fmt.Printf("[%#v] processing\n", k)
var val noteVal
err = json.Unmarshal([]byte(pieces[1]), &val)
if err != nil {
return fmt.Errorf("Unmarshaling val: %s: %s", pieces[1], err)
}
path := filepath.Join(r.StemcellsIndexDir, k.VersionRaw)
err = os.MkdirAll(path, os.ModePerm)
if err != nil {
return fmt.Errorf("Creating dir: %s", err)
}
sourcePath := filepath.Join(path, "notes.v1.yml")
content, err := json.MarshalIndent(val, "", " ")
if err != nil {
return fmt.Errorf("Marshaling val: %s", err)
}
err = ioutil.WriteFile(sourcePath, []byte(content), 0644)
if err != nil {
return fmt.Errorf("Writing file: path=%s %s", sourcePath, err)
}
}
return nil
}
|
# build the base image
docker build -f dockerfiles/dockerfile-app -t sandbox-editor-app .
|
#!/bin/sh
file=$1
touch $file.melodi
echo "Reading file - $file"
awk -v file="$file.melodi" -F " " 'BEGIN{l="f"}
#find the lines containing fields of interest, set flat 'l' to true and print line
{ if ($1 ~ /^TI/ || $1 ~ /^MH$/ || $1 ~ /^JT/ || $1 ~ /^DCOM/ || $1 ~ /^PMID/) {l="t"; print $0 > file; next};
#find overflow lines (start with spaces) after fields of interest and print
if (l=="t" && $0 ~ /^ /) {print $0 > file; l="t";}
else{l="f";}
}' $file
echo "Creating output file - $file.melodi.gz"
#gzip $file.melodi |
#!/bin/sh
# (c) Copyright 2009 - 2010 Xilinx, Inc. All rights reserved.
#
# This file contains confidential and proprietary information
# of Xilinx, Inc. and is protected under U.S. and
# international copyright and other intellectual property
# laws.
#
# DISCLAIMER
# This disclaimer is not a license and does not grant any
# rights to the materials distributed herewith. Except as
# otherwise provided in a valid license issued to you by
# Xilinx, and to the maximum extent permitted by applicable
# law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
# WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
# AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
# BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
# INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
# (2) Xilinx shall not be liable (whether in contract or tort,
# including negligence, or under any other theory of
# liability) for any loss or damage of any kind or nature
# related to, arising under or in connection with these
# materials, including for any direct, or any indirect,
# special, incidental, or consequential loss or damage
# (including loss of data, profits, goodwill, or any type of
# loss or damage suffered as a result of any action brought
# by a third party) even if such damage or loss was
# reasonably foreseeable or Xilinx had been advised of the
# possibility of the same.
#
# CRITICAL APPLICATIONS
# Xilinx products are not designed or intended to be fail-
# safe, or for use in any application requiring fail-safe
# performance, such as life-support or safety devices or
# systems, Class III medical devices, nuclear facilities,
# applications related to the deployment of airbags, or any
# other applications that could lead to death, personal
# injury, or severe property or environmental damage
# (individually and collectively, "Critical
# Applications"). Customer assumes the sole risk and
# liability of any use of Xilinx products in Critical
# Applications, subject only to applicable laws and
# regulations governing limitations on product liability.
#
# THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
# PART OF THIS FILE AT ALL TIMES.
# Clean up the results directory
rm -rf results
mkdir results
#Synthesize the Wrapper Files
echo 'Synthesizing example design with Synplify'
synplify_pro -batch synplify.prj -licensetype synplifypro_xilinx
# Copy the netlist generated by Coregen
echo 'Copying files from the netlist directory to the results directory'
cp ../../dist_mem_gen_v7_2.ngc results/
# Copy the constraints files generated by Coregen
echo 'Copying files from constraints directory to results directory'
cp ../example_design/dist_mem_gen_v7_2_exdes.ucf results/
cd results
echo 'Running ngdbuild'
ngdbuild -p xc3s50-pq208-5 -sd ../../../ dist_mem_gen_v7_2_exdes
echo 'Running map'
map dist_mem_gen_v7_2_exdes -o mapped.ncd
echo 'Running par'
par mapped.ncd routed.ncd
echo 'Running trce'
trce -e 10 routed.ncd mapped.pcf -o routed
echo 'Running design through bitgen'
bitgen -w routed
echo 'Running netgen to create gate level Verilog model'
netgen -ofmt verilog -sim -tm dist_mem_gen_v7_2_exdes -pcf mapped.pcf -w -sdf_anno false routed.ncd routed.v
|
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class SlicingTaskInfo {
private String taskId;
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public String getTaskId() {
return taskId;
}
}
class SlicingTaskList {
private Map<String, SlicingTaskInfo> taskMap;
public SlicingTaskList() {
taskMap = new HashMap<>();
}
public void addSlicingTask(SlicingTaskInfo slicingTaskInfo) {
taskMap.put(slicingTaskInfo.getTaskId(), slicingTaskInfo);
}
public SlicingTaskInfo getSlicingTask(String taskId) {
return taskMap.get(taskId);
}
public void removeSlicingTask(String taskId) {
taskMap.remove(taskId);
}
}
public class Main {
public static void main(String[] args) {
SlicingTaskList slicingTaskList = new SlicingTaskList();
List<SlicingTaskInfo> slicingTaskInfoList = new ArrayList<>();
SlicingTaskInfo slicingTaskInfo = new SlicingTaskInfo();
slicingTaskInfo.setTaskId("03af656e-7602-47dd-97ee-0c7ca5c39cab");
slicingTaskList.addSlicingTask(slicingTaskInfo);
SlicingTaskInfo retrievedTask = slicingTaskList.getSlicingTask("03af656e-7602-47dd-97ee-0c7ca5c39cab");
System.out.println("Retrieved Task ID: " + retrievedTask.getTaskId());
slicingTaskList.removeSlicingTask("03af656e-7602-47dd-97ee-0c7ca5c39cab");
}
} |
module WOZLLA.utils {
export interface Poolable {
isPoolable:boolean;
release();
}
export class ObjectPool<T extends Poolable> {
_minCount;
_factory;
_pool:Array<T>;
constructor(minCount:number, factory:()=>T) {
this._minCount = minCount;
this._factory = factory;
this._pool = [];
for(var i=0; i<this._minCount; i++) {
this._pool.push(this._factory());
}
}
retain():T {
var object:T = this._pool.shift();
if(object) {
return object;
}
return this._factory();
}
release(obj:T) {
if(this._pool.indexOf(obj) !== -1) {
return;
}
this._pool.push(obj);
}
}
} |
<gh_stars>0
/************************************************************
Copyright (c) 1993 by Silicon Graphics Computer Systems, Inc.
Permission to use, copy, modify, and distribute this
software and its documentation for any purpose and without
fee is hereby granted, provided that the above copyright
notice appear in all copies and that both that copyright
notice and this permission notice appear in supporting
documentation, and that the name of Silicon Graphics not be
used in advertising or publicity pertaining to distribution
of the software without specific prior written permission.
Silicon Graphics makes no representation about the suitability
of this software for any purpose. It is provided "as is"
without any express or implied warranty.
SILICON GRAPHICS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON
GRAPHICS BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL
DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH
THE USE OR PERFORMANCE OF THIS SOFTWARE.
********************************************************/
#define NEED_MAP_READERS
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "Xlibint.h"
#include <X11/extensions/XKBproto.h>
#include "XKBlibint.h"
static Status
_XkbReadKeyTypes(XkbReadBufferPtr buf,XkbDescPtr xkb,xkbGetMapReply *rep)
{
int i,n,lastMapCount;
XkbKeyTypePtr type;
if ( rep->nTypes>0 ) {
n = rep->firstType+rep->nTypes;
if (xkb->map->num_types>=n)
n= xkb->map->num_types;
else if (XkbAllocClientMap(xkb,XkbKeyTypesMask,n)!=Success)
return BadAlloc;
type = &xkb->map->types[rep->firstType];
for (i=0;i<(int)rep->nTypes;i++,type++) {
xkbKeyTypeWireDesc *desc;
register int ndx;
ndx= i+rep->firstType;
if (ndx>=xkb->map->num_types)
xkb->map->num_types= ndx+1;
desc= (xkbKeyTypeWireDesc *)_XkbGetReadBufferPtr(buf,
SIZEOF(xkbKeyTypeWireDesc));
if (desc==NULL)
return BadLength;
lastMapCount= type->map_count;
if ( desc->nMapEntries>0 ) {
if ((type->map==NULL)||(desc->nMapEntries>type->map_count)) {
XkbKTMapEntryRec *prev_map = type->map;
type->map= _XkbTypedRealloc(type->map,desc->nMapEntries,
XkbKTMapEntryRec);
if (type->map==NULL) {
_XkbFree(prev_map);
return BadAlloc;
}
}
}
else if (type->map!=NULL) {
Xfree(type->map);
type->map_count= 0;
type->map= NULL;
}
if ( desc->preserve && (desc->nMapEntries>0) ) {
if ((!type->preserve)||
(desc->nMapEntries>lastMapCount)) {
XkbModsRec *prev_preserve = type->preserve;
type->preserve= _XkbTypedRealloc(type->preserve,
desc->nMapEntries,
XkbModsRec);
if (type->preserve==NULL) {
_XkbFree(prev_preserve);
return BadAlloc;
}
}
}
else if (type->preserve!=NULL) {
Xfree(type->preserve);
type->preserve= NULL;
}
type->mods.mask = desc->mask;
type->mods.real_mods = desc->realMods;
type->mods.vmods = desc->virtualMods;
type->num_levels = desc->numLevels;
type->map_count = desc->nMapEntries;
if (desc->nMapEntries>0) {
register xkbKTMapEntryWireDesc *wire;
register XkbKTMapEntryPtr entry;
register int size;
size= type->map_count*SIZEOF(xkbKTMapEntryWireDesc);
wire= (xkbKTMapEntryWireDesc *)_XkbGetReadBufferPtr(buf,size);
if (wire==NULL)
return BadLength;
entry= type->map;
for (n=0;n<type->map_count;n++,wire++,entry++) {
entry->active= wire->active;
entry->level= wire->level;
entry->mods.mask= wire->mask;
entry->mods.real_mods= wire->realMods;
entry->mods.vmods= wire->virtualMods;
}
if (desc->preserve) {
register xkbModsWireDesc * pwire;
register XkbModsPtr preserve;
register int sz;
sz= desc->nMapEntries*SIZEOF(xkbModsWireDesc);
pwire=(xkbModsWireDesc *)_XkbGetReadBufferPtr(buf,sz);
if (pwire==NULL)
return BadLength;
preserve= type->preserve;
for (n=0;n<desc->nMapEntries;n++,pwire++,preserve++) {
preserve->mask= pwire->mask;
preserve->vmods= pwire->virtualMods;
preserve->real_mods= pwire->realMods;
}
}
}
}
}
return Success;
}
static Status
_XkbReadKeySyms(XkbReadBufferPtr buf,XkbDescPtr xkb,xkbGetMapReply *rep)
{
register int i;
XkbClientMapPtr map;
map= xkb->map;
if (map->key_sym_map==NULL) {
register int offset;
XkbSymMapPtr oldMap;
xkbSymMapWireDesc *newMap;
map->key_sym_map= _XkbTypedCalloc((xkb->max_key_code+1),XkbSymMapRec);
if (map->key_sym_map==NULL)
return BadAlloc;
if (map->syms==NULL) {
int sz;
sz= (rep->totalSyms*12)/10;
sz= ((sz+(unsigned)128)/128)*128;
map->syms = _XkbTypedCalloc(sz,KeySym);
if (map->syms==NULL)
return BadAlloc;
map->size_syms = sz;
}
offset = 1;
oldMap = &map->key_sym_map[rep->firstKeySym];
for (i=0;i<(int)rep->nKeySyms;i++,oldMap++) {
newMap= (xkbSymMapWireDesc *)
_XkbGetReadBufferPtr(buf,SIZEOF(xkbSymMapWireDesc));
if (newMap==NULL)
return BadLength;
oldMap->kt_index[0]= newMap->ktIndex[0];
oldMap->kt_index[1]= newMap->ktIndex[1];
oldMap->kt_index[2]= newMap->ktIndex[2];
oldMap->kt_index[3]= newMap->ktIndex[3];
oldMap->group_info= newMap->groupInfo;
oldMap->width= newMap->width;
oldMap->offset= offset;
if (offset+newMap->nSyms>=map->size_syms) {
register int sz;
KeySym *prev_syms = map->syms;
sz= map->size_syms+128;
map->syms= _XkbTypedRealloc(map->syms,sz,KeySym);
if (map->syms==NULL) {
_XkbFree(prev_syms);
map->size_syms= 0;
return BadAlloc;
}
map->size_syms= sz;
}
if (newMap->nSyms>0) {
_XkbReadBufferCopyKeySyms(buf,(KeySym *)&map->syms[offset],
newMap->nSyms);
offset+= newMap->nSyms;
}
else {
map->syms[offset]= 0;
}
}
map->num_syms= offset;
}
else {
xkbSymMapWireDesc * newMap;
XkbSymMapPtr oldMap;
KeySym * newSyms;
int tmp;
oldMap = &map->key_sym_map[rep->firstKeySym];
for (i=0;i<(int)rep->nKeySyms;i++,oldMap++) {
newMap= (xkbSymMapWireDesc *)
_XkbGetReadBufferPtr(buf,SIZEOF(xkbSymMapWireDesc));
if (newMap==NULL)
return BadLength;
if (newMap->nSyms>0)
tmp= newMap->nSyms;
else tmp= 0;
newSyms = XkbResizeKeySyms(xkb,i+rep->firstKeySym,tmp);
if (newSyms==NULL)
return BadAlloc;
if (newMap->nSyms>0)
_XkbReadBufferCopyKeySyms(buf,newSyms,newMap->nSyms);
else newSyms[0]= NoSymbol;
oldMap->kt_index[0] = newMap->ktIndex[0];
oldMap->kt_index[1] = newMap->ktIndex[1];
oldMap->kt_index[2] = newMap->ktIndex[2];
oldMap->kt_index[3] = newMap->ktIndex[3];
oldMap->group_info = newMap->groupInfo;
oldMap->width = newMap->width;
}
}
return Success;
}
static Status
_XkbReadKeyActions(XkbReadBufferPtr buf,XkbDescPtr info,xkbGetMapReply *rep)
{
int i;
CARD8 numDescBuf[248];
CARD8* numDesc = NULL;
register int nKeyActs;
Status ret = Success;
if ( (nKeyActs=rep->nKeyActs)>0 ) {
XkbSymMapPtr symMap;
if (nKeyActs < sizeof numDescBuf) numDesc = numDescBuf;
else numDesc = Xmalloc (nKeyActs * sizeof(CARD8));
if (!_XkbCopyFromReadBuffer(buf, (char *)numDesc, nKeyActs)) {
ret = BadLength;
goto done;
}
i= XkbPaddedSize(nKeyActs)-nKeyActs;
if ((i>0)&&(!_XkbSkipReadBufferData(buf,i))) {
ret = BadLength;
goto done;
}
symMap = &info->map->key_sym_map[rep->firstKeyAct];
for (i=0;i<(int)rep->nKeyActs;i++,symMap++) {
if (numDesc[i]==0) {
info->server->key_acts[i+rep->firstKeyAct]= 0;
}
else {
XkbAction *newActs;
/* 8/16/93 (ef) -- XXX! Verify size here (numdesc must be */
/* either zero or XkbKeyNumSyms(info,key) */
newActs=XkbResizeKeyActions(info,i+rep->firstKeyAct,
numDesc[i]);
if (newActs==NULL) {
ret = BadAlloc;
goto done;
}
if (!_XkbCopyFromReadBuffer(buf,(char *)newActs,
(int)(numDesc[i]*sizeof(XkbAction)))) {
ret = BadLength;
goto done;
}
}
}
}
done:
if (numDesc != NULL && numDesc != numDescBuf) Xfree (numDesc);
return ret;
}
static Status
_XkbReadKeyBehaviors(XkbReadBufferPtr buf,XkbDescPtr xkb,xkbGetMapReply *rep)
{
register int i;
xkbBehaviorWireDesc *wire;
if ( rep->totalKeyBehaviors>0 ) {
if ( xkb->server->behaviors == NULL ) {
int size = xkb->max_key_code+1;
xkb->server->behaviors = _XkbTypedCalloc(size,XkbBehavior);
if (xkb->server->behaviors==NULL)
return BadAlloc;
}
else {
bzero(&xkb->server->behaviors[rep->firstKeyBehavior],
(rep->nKeyBehaviors*sizeof(XkbBehavior)));
}
for (i=0;i<rep->totalKeyBehaviors;i++) {
wire= (xkbBehaviorWireDesc *)_XkbGetReadBufferPtr(buf,
SIZEOF(xkbBehaviorWireDesc));
if (wire==NULL)
return BadLength;
xkb->server->behaviors[wire->key].type= wire->type;
xkb->server->behaviors[wire->key].data= wire->data;
}
}
return Success;
}
static Status
_XkbReadVirtualMods(XkbReadBufferPtr buf,XkbDescPtr xkb,xkbGetMapReply *rep)
{
if ( rep->virtualMods ) {
register int i,bit,nVMods;
register char *data;
for (i=nVMods=0,bit=1;i<XkbNumVirtualMods;i++,bit<<=1) {
if (rep->virtualMods&bit)
nVMods++;
}
data= _XkbGetReadBufferPtr(buf,XkbPaddedSize(nVMods));
if (data==NULL)
return BadLength;
for (i=0,bit=1;(i<XkbNumVirtualMods)&&(nVMods>0);i++,bit<<=1) {
if (rep->virtualMods&bit) {
xkb->server->vmods[i]= *data++;
nVMods--;
}
}
}
return Success;
}
static Status
_XkbReadExplicitComponents( XkbReadBufferPtr buf,
XkbDescPtr xkb,
xkbGetMapReply * rep)
{
register int i;
unsigned char *wire;
if ( rep->totalKeyExplicit>0 ) {
if ( xkb->server->explicit == NULL ) {
int size = xkb->max_key_code+1;
xkb->server->explicit = _XkbTypedCalloc(size,unsigned char);
if (xkb->server->explicit==NULL)
return BadAlloc;
}
else {
bzero(&xkb->server->explicit[rep->firstKeyExplicit],
rep->nKeyExplicit);
}
i= XkbPaddedSize(2*rep->totalKeyExplicit);
wire=(unsigned char *)_XkbGetReadBufferPtr(buf,i);
if (!wire)
return BadLength;
for (i=0;i<rep->totalKeyExplicit;i++,wire+=2) {
xkb->server->explicit[wire[0]]= wire[1];
}
}
return Success;
}
static Status
_XkbReadModifierMap(XkbReadBufferPtr buf,XkbDescPtr xkb,xkbGetMapReply *rep)
{
register int i;
unsigned char *wire;
if ( rep->totalModMapKeys>0 ) {
if ((xkb->map->modmap==NULL)&&
(XkbAllocClientMap(xkb,XkbModifierMapMask,0)!=Success)) {
return BadAlloc;
}
else {
bzero(&xkb->map->modmap[rep->firstModMapKey],rep->nModMapKeys);
}
i= XkbPaddedSize(2*rep->totalModMapKeys);
wire=(unsigned char *)_XkbGetReadBufferPtr(buf,i);
if (!wire)
return BadLength;
for (i=0;i<rep->totalModMapKeys;i++,wire+=2) {
xkb->map->modmap[wire[0]]= wire[1];
}
}
return Success;
}
static Status
_XkbReadVirtualModMap(XkbReadBufferPtr buf,XkbDescPtr xkb,xkbGetMapReply *rep)
{
register int i;
xkbVModMapWireDesc * wire;
XkbServerMapPtr srv;
if ( rep->totalVModMapKeys>0 ) {
if (((xkb->server==NULL)||(xkb->server->vmodmap==NULL))&&
(XkbAllocServerMap(xkb,XkbVirtualModMapMask,0)!=Success)) {
return BadAlloc;
}
else {
srv= xkb->server;
if (rep->nVModMapKeys > rep->firstVModMapKey)
bzero((char *)&srv->vmodmap[rep->firstVModMapKey],
(rep->nVModMapKeys - rep->firstVModMapKey) *
sizeof(unsigned short));
}
srv= xkb->server;
i= rep->totalVModMapKeys*SIZEOF(xkbVModMapWireDesc);
wire=(xkbVModMapWireDesc *)_XkbGetReadBufferPtr(buf,i);
if (!wire)
return BadLength;
for (i=0;i<rep->totalVModMapKeys;i++,wire++) {
if ((wire->key>=xkb->min_key_code)&&(wire->key<=xkb->max_key_code))
srv->vmodmap[wire->key]= wire->vmods;
}
}
return Success;
}
static xkbGetMapReq *
_XkbGetGetMapReq(Display *dpy,XkbDescPtr xkb)
{
xkbGetMapReq *req;
GetReq(kbGetMap, req);
req->reqType = dpy->xkb_info->codes->major_opcode;
req->xkbReqType = X_kbGetMap;
req->deviceSpec = xkb->device_spec;
req->full = req->partial = 0;
req->firstType = req->nTypes = 0;
req->firstKeySym = req->nKeySyms = 0;
req->firstKeyAct = req->nKeyActs = 0;
req->firstKeyBehavior = req->nKeyBehaviors = 0;
req->virtualMods = 0;
req->firstKeyExplicit = req->nKeyExplicit = 0;
req->firstModMapKey = req->nModMapKeys = 0;
req->firstVModMapKey = req->nVModMapKeys = 0;
return req;
}
Status
_XkbReadGetMapReply( Display * dpy,
xkbGetMapReply *rep,
XkbDescPtr xkb,
int * nread_rtrn)
{
int extraData;
unsigned mask;
if ( xkb->device_spec == XkbUseCoreKbd )
xkb->device_spec= rep->deviceID;
xkb->min_key_code = rep->minKeyCode;
xkb->max_key_code = rep->maxKeyCode;
if (!xkb->map) {
mask= rep->present&XkbAllClientInfoMask;
if (mask&&(XkbAllocClientMap(xkb,mask,rep->nTypes)!=Success))
return BadAlloc;
}
if (!xkb->server) {
mask= rep->present&XkbAllServerInfoMask;
if (mask&&(XkbAllocServerMap(xkb,mask,rep->totalActs)!=Success))
return BadAlloc;
}
extraData= (int)(rep->length*4);
extraData-= (SIZEOF(xkbGetMapReply)-SIZEOF(xGenericReply));
if (rep->length) {
XkbReadBufferRec buf;
int left;
if (_XkbInitReadBuffer(dpy,&buf,extraData)) {
Status status= Success;
if (nread_rtrn!=NULL)
*nread_rtrn= extraData;
if (status==Success)
status= _XkbReadKeyTypes(&buf,xkb,rep);
if (status==Success)
status= _XkbReadKeySyms(&buf,xkb,rep);
if (status==Success)
status= _XkbReadKeyActions(&buf,xkb,rep);
if (status==Success)
status= _XkbReadKeyBehaviors(&buf,xkb,rep);
if (status==Success)
status= _XkbReadVirtualMods(&buf,xkb,rep);
if (status==Success)
status= _XkbReadExplicitComponents(&buf,xkb,rep);
if (status==Success)
status= _XkbReadModifierMap(&buf,xkb,rep);
if (status==Success)
status= _XkbReadVirtualModMap(&buf,xkb,rep);
left= _XkbFreeReadBuffer(&buf);
if (status!=Success) return status;
else if ( left || buf.error ) return BadLength;
}
else return BadAlloc;
}
return Success;
}
static Status
_XkbHandleGetMapReply(Display *dpy,XkbDescPtr xkb)
{
xkbGetMapReply rep;
if (!_XReply(dpy, (xReply *)&rep,
( (SIZEOF(xkbGetMapReply)-SIZEOF(xGenericReply)) >> 2 ),
xFalse)) {
return BadImplementation;
}
return _XkbReadGetMapReply(dpy,&rep,xkb,NULL);
}
Status
XkbGetUpdatedMap(Display *dpy,unsigned which,XkbDescPtr xkb)
{
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if (which) {
register xkbGetMapReq *req;
Status status;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->full = which;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
return Success;
}
XkbDescPtr
XkbGetMap(Display *dpy,unsigned which,unsigned deviceSpec)
{
XkbDescPtr xkb;
xkb = _XkbTypedCalloc(1,XkbDescRec);
if (xkb) {
xkb->device_spec = deviceSpec;
xkb->map = _XkbTypedCalloc(1,XkbClientMapRec);
if ((xkb->map==NULL)||
((which)&&(XkbGetUpdatedMap(dpy,which,xkb)!=Success))) {
if (xkb->map) {
Xfree(xkb->map);
xkb->map= NULL;
}
Xfree(xkb);
return NULL;
}
xkb->dpy= dpy;
}
return xkb;
}
Status
XkbGetKeyTypes(Display *dpy,unsigned first,unsigned num,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyTypes))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstType = first;
req->nTypes = num;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetKeyActions(Display *dpy,unsigned first,unsigned num,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyCount))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstKeyAct = first;
req->nKeyActs = num;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetKeySyms(Display *dpy,unsigned first,unsigned num,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyCount))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstKeySym = first;
req->nKeySyms = num;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetKeyBehaviors(Display *dpy,unsigned first,unsigned num,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyCount))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstKeyBehavior = first;
req->nKeyBehaviors = num;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetVirtualMods(Display *dpy,unsigned which,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->virtualMods = which;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetKeyExplicitComponents( Display * dpy,
unsigned first,
unsigned num,
XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyCount))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstKeyExplicit = first;
req->nKeyExplicit = num;
if ((xkb!=NULL) && (xkb->server!=NULL) && (xkb->server->explicit!=NULL)) {
if ((num>0)&&(first>=xkb->min_key_code)&&(first+num<=xkb->max_key_code))
bzero(&xkb->server->explicit[first],num);
}
if (xkb)
status= _XkbHandleGetMapReply(dpy, xkb);
else
status = BadMatch;
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetKeyModifierMap(Display *dpy,unsigned first,unsigned num,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyCount))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstModMapKey = first;
req->nModMapKeys = num;
if ((xkb!=NULL) && (xkb->map!=NULL) && (xkb->map->modmap!=NULL)) {
if ((num>0)&&(first>=xkb->min_key_code)&&(first+num<=xkb->max_key_code))
bzero(&xkb->map->modmap[first],num);
}
if (xkb)
status= _XkbHandleGetMapReply(dpy, xkb);
else
status = BadMatch;
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetKeyVirtualModMap(Display *dpy,unsigned first,unsigned num,XkbDescPtr xkb)
{
register xkbGetMapReq *req;
Status status;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
if ((num<1)||(num>XkbMaxKeyCount))
return BadValue;
LockDisplay(dpy);
req = _XkbGetGetMapReq(dpy, xkb);
req->firstVModMapKey = first;
req->nVModMapKeys = num;
if ((xkb!=NULL) && (xkb->map!=NULL) && (xkb->map->modmap!=NULL)) {
if ((num>0)&&(first>=xkb->min_key_code)&&(first+num<=xkb->max_key_code))
bzero(&xkb->server->vmodmap[first],num*sizeof(unsigned short));
}
if (xkb)
status= _XkbHandleGetMapReply(dpy, xkb);
else
status = BadMatch;
UnlockDisplay(dpy);
SyncHandle();
return status;
}
Status
XkbGetMapChanges(Display *dpy,XkbDescPtr xkb,XkbMapChangesPtr changes)
{
xkbGetMapReq *req;
if ((dpy->flags & XlibDisplayNoXkb) ||
(!dpy->xkb_info && !XkbUseExtension(dpy,NULL,NULL)))
return BadAccess;
LockDisplay(dpy);
if (changes->changed) {
Status status= Success;
req = _XkbGetGetMapReq(dpy, xkb);
req->full = 0;
req->partial = changes->changed;
req->firstType = changes->first_type;
req->nTypes = changes->num_types;
req->firstKeySym = changes->first_key_sym;
req->nKeySyms = changes->num_key_syms;
req->firstKeyAct = changes->first_key_act;
req->nKeyActs = changes->num_key_acts;
req->firstKeyBehavior = changes->first_key_behavior;
req->nKeyBehaviors = changes->num_key_behaviors;
req->virtualMods = changes->vmods;
req->firstKeyExplicit = changes->first_key_explicit;
req->nKeyExplicit = changes->num_key_explicit;
req->firstModMapKey = changes->first_modmap_key;
req->nModMapKeys = changes->num_modmap_keys;
req->firstVModMapKey = changes->first_vmodmap_key;
req->nVModMapKeys = changes->num_vmodmap_keys;
status= _XkbHandleGetMapReply(dpy, xkb);
UnlockDisplay(dpy);
SyncHandle();
return status;
}
UnlockDisplay(dpy);
return Success;
}
|
<reponame>Ayvytr/MvpCommons<filename>lib/http/src/main/java/com/http/GsonResponseBodyConverter.java
package com.http;
import android.text.TextUtils;
import com.google.gson.Gson;
import java.io.IOException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import okhttp3.ResponseBody;
import retrofit2.Converter;
public class GsonResponseBodyConverter<T> implements Converter<ResponseBody, T> {
private final Gson gson;
private final Type type;
public GsonResponseBodyConverter(Gson gson, Type type) {
this.gson = gson;
this.type = type;
}
@Override
public T convert(ResponseBody value) throws IOException {
String response = value.string();
if (TextUtils.isEmpty(response)) {
if (type instanceof ParameterizedType) {
ParameterizedType paraType = (ParameterizedType) type;
String rawType = paraType.getRawType().toString();
if (rawType != null && rawType.contains("java.util.List")) {
response = "[]";
return gson.fromJson(response, type);
}else{
response = "{}";
return gson.fromJson(response, type);
}
}else{
return getEmptyResult();
}
}
return gson.fromJson(response, type);
}
private T getEmptyResult() {
T result = null;
try {
result = ((Class<T>) type).newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return result;
}
} |
#-----------------------------------------------------
# This is a shell script for
# 1. building a graph using GraphFrames package.
# 2. applying Breadth-first search (BFS) algorithm
#-----------------------------------------------------
# @author Mahmoud Parsian
#-----------------------------------------------------
export SPARK_HOME="/home/book/spark-3.2.0"
export SPARK_PROG="/home/book/code/chap06/breadth_first_search_example.py"
export GRAPH_FRAMES="graphframes:graphframes:0.8.2-spark3.2-s_2.12"
#
# run the PySpark program:
$SPARK_HOME/bin/spark-submit --packages $GRAPH_FRAMES $SPARK_PROG
|
package com.algorithm.sort;
public class BubbleSort implements BaseSort {
@Override
public int[] sort(int[] input) {
for (int i = input.length - 2; i >= 0; i--) {
for (int j = 0; j <= i; j++) {
if (input[j] > input[j + 1]) {
swap(input, j, j + 1);
}
}
}
return input;
}
private void swap(int[] nums, int i, int j) {
nums[i] = nums[i] + nums[j];
nums[j] = nums[i] - nums[j];
nums[i] = nums[i] - nums[j];
}
}
|
<reponame>rafsaf/fastapi-template
from asyncio import AbstractEventLoop as EventLoop
from typing import Dict
from fastapi.testclient import TestClient
from app.core.config import settings
from app.tests.conftest import default_superuser, default_user
def test_get_access_token(
client: TestClient, event_loop: EventLoop, normal_user_token_headers: Dict[str, str]
) -> None:
login_data = {"username": default_user.email, "password": <PASSWORD>}
r = client.post(f"{settings.API_STR}/login/access-token", data=login_data)
tokens = r.json()
assert r.status_code == 200
assert "access_token" in tokens
assert tokens["access_token"]
def test_use_access_token(
client: TestClient, superuser_token_headers: Dict[str, str]
) -> None:
r = client.post(
f"{settings.API_STR}/login/test-token",
headers=superuser_token_headers,
)
result = r.json()
assert r.status_code == 200
assert "email" in result
|
<filename>src/library/queryHelper.js<gh_stars>0
const async = require("async");
const sql = require('mssql')
mode = process.env.NODE_ENV || 'dev';
const config = require('config').get(mode);
//load sql
const { getPool,closePool,closeAll } = require('./sqlServerConnect')
const selectLastId = ";select @@IDENTITY AS \'id\';"
//create random pool name
function randomPoolName() {
return Math.random().toString(36).substr(2, 10)
}
// run query
async function runQuery(db_config_name,query,sqlParams) {
// const pool_name = randomPoolName()
//ใช้ชื่อ pool เดียวกับ ชื่อ db config
const pool_name = db_config_name
const pool = await getPool(pool_name, config.database[db_config_name])
const req = pool.request();
// if(sqlParams) {
// sqlParams.forEach(function (param) {
// req.input(param.name, param.type, param.value);
// });
// }
//เอา Query parameter มาทำป้องกัน SQL Injection
if (sqlParams) {
const promises = sqlParams.map(async (param) => {
let data_value = null
//ถ้ามีสร้าง value จาก Function อื่น
if (param.value?.hasOwnProperty('fnCall')) {
data_value = await module.exports[param.value.fnCall.fnName](param.value.fnCall.fnParams)
} else {
data_value = param.value
}
req.input(param.name, param.type, data_value)
}
)
await Promise.all(promises)
}
return await req.query(query)
// const result = await req.query(query)
//ปิด connection
// closeAll()
// closePool(pool_name)
// return result
}
// run หลาย query ตามลำดับ (ส่วนใหญ่ใช้กับ insert) ถ้าอันใดไม่ผ่าน จะ rollback ทั้งหมด
function runTransactionQuery(db_config_name,queries) {
const pool_name = db_config_name
return new Promise(async (resolve, reject) => {
const pool = await getPool(pool_name, config.database[db_config_name])
// const pool = new sql.ConnectionPool(config.database[db_config_name])
return pool.connect().then((p) => {
const transaction = new sql.Transaction(p);
return transaction.begin((err) => {
const request = new sql.Request(transaction);
if (err) {
reject(err);
}
return async.eachSeries(queries, async (query, callback) => {
if (query?.hasOwnProperty('chainValueArray')) {
let mapping_values = query.chainValueArray.mapping_value
let all_params = []
const mappingValue_promises = mapping_values.map(async (mapping_val,idx) => {
let each_params = []
const chainValue_promises = query.sqlParams.map(async (param,index) => {
let data_value = null
if(param.name==query.chainValueArray.mapping_input) {
data_value = mapping_values[idx]
}else{
if (param.value&¶m.value?.hasOwnProperty('fnCall')) {
data_value = await module.exports[param.value.fnCall.fnName](param.value.fnCall.fnParams)
} else {
data_value = param.value ? param.value:null
}
}
const dynamic_input_name = param.name+`_idx_${idx}`
each_params[index] = {}
each_params[index].name = dynamic_input_name
each_params[index].value = data_value
each_params[index].type = param.type
})
await Promise.all(chainValue_promises)
const extract_params = each_params.map((ei,i) => {
request.input(ei.name, ei.type,ei.value);
return '@'+ei.name
}).join(',')
// console.log(extract_params)
all_params.push('('+extract_params+')')
})
await Promise.all(mappingValue_promises)
Object.keys(all_params).map((o) => {
return o
}).join(',')
const qq = query.query+' '+all_params;
// console.log(qq)
// console.log(all_params)
return request.query(qq)
}else {
//ใส่ params
if (query.sqlParams) {
const promises = query.sqlParams.map(async (param) => {
let data_value = null
//ถ้ามีสร้าง value จาก Function อื่น
if (param.value?.hasOwnProperty('fnCall')) {
data_value = await module.exports[param.value.fnCall.fnName](param.value.fnCall.fnParams)
} else {
data_value = param.value
}
request.input(param.name, param.type, data_value)
}
)
await Promise.all(promises)
return request.query(query.query)
} else {
return request.query(query.query)
}
}
}, async (err2) => {
if ( err2 ) {
await transaction.rollback(() => {
pool.close();
reject(err2);
});
} else {
await transaction.commit(() => {
pool.close();
resolve(true);
});
}
});
});
});
});
}
// run store
async function runStoredProcedure(db_config_name,res, proc, sqlParams) {
const pool_name = db_config_name
const pool = await getPool(pool_name, config.database[db_config_name])
return pool.then((pool2) => {
const req = pool2.request();
sqlParams.forEach(function(param) {
req.input(param.name, param.type, param.value);
});
req.execute(proc, (err, recordset) => {
res.json(recordset[0]);
});
});
}
//gen runno
async function genRunno(fnParams)
{
const db_config_name = fnParams.db_name
const table = fnParams.table
const col = fnParams.col
const right_digit = fnParams.right_digit
const current_date = new Date()
const ym = current_date.getFullYear()+('0' + (current_date.getMonth()+1)).slice(-2)
const sql_query = "select max(right("+col+","+right_digit+")) as "+col+" from "+ table +" (nolock) where "+col+" like '"+ym+"%' ";
let result_runno = ym+(("1").padStart(right_digit, "0"))
try {
const result = await runQuery(
db_config_name,
sql_query
)
if(result.recordset[0][col]!=null) {
result_runno = ym+(parseInt(result.recordset[0][col])+1).toString().padStart(right_digit, "0")
}
} catch (error) {
result_runno = false
}
return result_runno
}
//get last insert Record
async function selectLastRunno(fnParams)
{
const db_config_name = fnParams.db_name
const table = fnParams.table
const col = fnParams.col
const sql_query = "SELECT TOP 1 "+col+" FROM "+table+" (nolock) ORDER BY "+col+" DESC";
let result_runno = false
try {
const result = await runQuery(
db_config_name,
sql_query
)
if(result.recordset[0][col]!=null) {
result_runno = result.recordset[0][col]
}
} catch (error) {
result_runno = false
}
return result_runno
}
//create pool transaction from dbconfig
async function poolTransaction(db_config_name) {
const pool_name = db_config_name
const pool = await getPool(pool_name, config.database[db_config_name]);
const poolTransaction = pool.transaction();
return poolTransaction;
}
// run query transaction
async function runQueryWithTransaction(transaction,query,sqlParams) {
const req = transaction.request(); //transaction request
//เอา Query parameter มาทำป้องกัน SQL Injection
if (sqlParams) {
const promises = sqlParams.map(async (param) => {
let data_value = null
//ถ้ามีสร้าง value จาก Function อื่น
if (param.value?.hasOwnProperty('fnCall')) {
data_value = await module.exports[param.value.fnCall.fnName](param.value.fnCall.fnParams)
} else {
data_value = param.value
}
req.input(param.name, param.type, data_value)
}
)
await Promise.all(promises)
}
return await req.query(query)
// const result = await req.query(query)
//ปิด connection
// closeAll()
// closePool(pool_name)
// return result
}
//find
async function findBy(fnParams)
{
const db_config_name = fnParams.db_name
const table = fnParams.table
let cols = ' * '
let where = ''
let whereAnd = ''
let whereOr = ''
let checkedValue = ''
let checkedOperator = ''
let finalParams = []
let limitResult = ''
let orders = ''
if(fnParams.select && fnParams.select.length) {
cols = fnParams.select.map((m) => {return m}).join(', ')
}
if(fnParams.order_by && fnParams.order_by.length) {
orders += "\nORDER BY "
orders += fnParams.order_by.map((m) => {
return `${m[0]} ${m[1]}`
}).join(', ')
}
if((fnParams.whereAnd && fnParams.whereAnd.length) || (fnParams.whereOr && fnParams.whereOr.length) ) {
where += "\nWHERE "
if(fnParams.whereAnd && fnParams.whereAnd.length) {
whereAnd += "("
whereAnd += fnParams.whereAnd.map((m) => {
checkedValue = `@${m.name}`
checkedOperator = m.operator
if(m.operator === '%like') {
checkedValue = `'%'+@${m.name}`
checkedOperator = `LIKE`
} else if (m.operator === 'like%') {
checkedValue = `@${m.name}+'%'`
checkedOperator = `LIKE`
} else if (m.operator === 'like') {
checkedValue = `'%'+@${m.name}+'%'`
checkedOperator = `LIKE`
}
return `${m.field} ${checkedOperator} ${checkedValue}`
}).join(' AND ')
whereAnd += ")"
finalParams = [...finalParams.concat(fnParams.whereAnd)]
}
if(fnParams.whereOr && fnParams.whereOr.length) {
whereOr += "("
whereOr += fnParams.whereOr.map((m) => {
checkedValue = `@${m.name}`
checkedOperator = m.operator
if(m.operator === '%like') {
checkedValue = `'%'+@${m.name}`
checkedOperator = `LIKE`
} else if (m.operator === 'like%') {
checkedValue = `@${m.name}+'%'`
checkedOperator = `LIKE`
} else if (m.operator === 'like') {
checkedValue = `'%'+@${m.name}+'%'`
checkedOperator = `LIKE`
}
return `${m.field} ${checkedOperator} ${checkedValue}`
}).join(' OR ')
whereOr += ")"
finalParams = [...finalParams.concat(fnParams.whereOr)]
}
where += whereAnd + (whereAnd&&whereOr ? ` AND `:``) + whereOr
}
if((fnParams.rowResult && fnParams.rowResult.length) && fnParams.rowResult === 'row') {
limitResult = `TOP 1 `
}
const sql_query = `SELECT ${limitResult} ${cols} \nFROM ${table} (nolock) ${where} ${orders}`
let finalResult = false
try {
const result = await runQuery(
db_config_name,
sql_query,
finalParams
)
if((fnParams.rowResult && fnParams.rowResult.length) && fnParams.rowResult === 'row') {
if(result.recordset[0]!=null) {
finalResult = result.recordset[0]
}
} else {
if(result.recordset!=null) {
finalResult = result.recordset
}
}
} catch (error) {
// console.log(error)
finalResult = false
}
// console.log(sql_query)
// console.log(finalResult)
return finalResult
}
module.exports = {
runQuery,
runTransactionQuery,
runStoredProcedure,
genRunno,
selectLastRunno,
selectLastId,
poolTransaction,
runQueryWithTransaction,
findBy,
}
|
#!/bin/bash
export TOKEN="<%= @token %>"
SERVICES_FILE="/etc/consul-template/output/consul-services-generated.ini"
SERVICES_LAST="$SERVICES_FILE.last"
NEW_SHA=$(sha256sum $SERVICES_FILE)
logger -t consul consuldnsbackup: Created new file: $NEW_SHA
if [ -f $SERVICES_FILE ]; then
if [ -f $SERVICES_LAST ]; then
OLD_SHA=$(sha256sum $SERVICES_LAST)
SERVICES_DIFF=$(diff -u $SERVICES_LAST $SERVICES_FILE)
curl -s -X POST -H "Content-type: application/json" \
-d "{
\"title\": \"Consul Services Update\",
\"text\": \"New File: $NEW_SHA\nOld File: $OLD_SHA\n\nDiff:$SERVICES_DIFF\",
\"alert_type\": \"info\"
}" \
'<%= @url %>/api/v1/events?api_key=<%= @api_key %>' > /dev/null
fi
cp -f $SERVICES_FILE $SERVICES_LAST
SERVICES_DATA=$(cat $SERVICES_FILE)
/usr/local/bin/consulkv set consuldnsbackup/data "$SERVICES_DATA"
logger -t consul consuldnsbackup: Updated KV data.
fi
|
<gh_stars>0
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { MovieModule } from './movie/movie.module';
import { APP_FILTER, APP_INTERCEPTOR } from '@nestjs/core';
import { HttpErrorFilter } from './shared/http-error.filter';
import { LogInterceptor } from './shared/log.interceptor';
import { GenreModule } from './genre/genre.module';
import { UserModule } from './user/user.module';
import { SharedModule } from './shared/shared.module';
import { AuthModule } from './auth/auth.module';
import { BookModule } from './book/book.module';
import { UserInfoModule } from './userinfo/userinfo.module';
import { CategoryModule } from './category/category.module';
import { GameModule } from './game/game.module';
import { YoutubevideoModule } from './youtubevideo/youtubevideo.module';
import { YoutubecategoryModule } from './youtubecategory/youtubecategory.module';
import { YoutubechannelModule } from './youtubechannel/youtubechannel.module';
@Module({
imports: [TypeOrmModule.forRoot(), MovieModule, GenreModule, UserModule, SharedModule, AuthModule, BookModule, UserInfoModule, CategoryModule, GameModule, YoutubevideoModule, YoutubecategoryModule, YoutubechannelModule],
controllers: [AppController],
providers: [
AppService,
{
provide: APP_FILTER,
useClass: HttpErrorFilter
},
{
provide: APP_INTERCEPTOR,
useClass: LogInterceptor
}
],
})
export class AppModule {}
|
def two_sum(arr, target):
d ={}
for i in range(len(arr)):
if target - arr[i] not in d:
d[arr[i]] = i
else:
return d[target - arr[i]], i
if __name__ == '__main__':
arr = [2,7,11,15]
target = 9
result = two_sum(arr, target)
print(result) |
<filename>lib/fog/ecloud/models/compute/hardware_configuration.rb<gh_stars>1-10
module Fog
module Compute
class Ecloud
class HardwareConfiguration < Fog::Ecloud::Model
identity :href
attribute :processor_count, :aliases => :ProcessorCount, :type => :integer
attribute :mem, :aliases => :Memory
attribute :storage, :aliases => :Disks
attribute :network_cards, :aliases => :Nics
def id
href.scan(/\d+/)[0]
end
end
end
end
end
|
// Generated by script, don't edit it please.
import createSvgIcon from '../createSvgIcon';
import PinedSvg from '@rsuite/icon-font/lib/status/Pined';
const Pined = createSvgIcon({
as: PinedSvg,
ariaLabel: 'pined',
category: 'status',
displayName: 'Pined'
});
export default Pined;
|
<reponame>BrianNewsom/echonest-react
$( "#hot-tracks" ).on( "click", function( event ) {
var trackurl = 'http://developer.echonest.com/api/v4/song/search?api_key=QQELH8UNTWLVBRQIB&sort=song_hotttnesss-desc&bucket=song_hotttnesss'
React.render(
<TrackList url={trackurl} />,
document.getElementById('content')
)
})
$( "#hot-artists" ).on( "click", function( event ) {
var queryString = 'http://developer.echonest.com/api/v4/artist/top_hottt?api_key=' + echonest.api_key + '&format=json&results=12&start=0&bucket=hotttnesss'
React.render(
<ArtistList url={queryString}/>,
document.getElementById('content')
)
})
|
<reponame>CN-3211/vt-cesium2.0<gh_stars>0
/*
* @Date: 2021-12-28 20:05:14
* @LastEditors: huangzh873
* @LastEditTime: 2022-03-28 22:05:01
* @FilePath: /cesium-web-vue/src/components/mapInfo/mapInfo.ts
*/
import { throttle } from '@/utils/index';
import { Viewer, ScreenSpaceEventHandler, Cartesian2, ScreenSpaceEventType, EllipsoidGeodesic, Math as CMath } from 'cesium';
interface infos { lng: string, lat: string, height: string, ViewpointsHeight: string, scale: string }
const computeMapScale = (viewer: Viewer, infos: infos) => {
const canvas = viewer.canvas;
const globe = viewer.scene.globe;
// 窗口坐标(canvas.width/2, canvas.height - 1)和(canvas.width/2 + 1, canvas.height - 1)
// 二者相隔距离仅为一个像素
const leftPixelPosition = new Cartesian2(canvas.width/2, canvas.height/2);
const rightPixelPosition = new Cartesian2(canvas.width/2 + 1, canvas.height/2);
viewer.scene.postRender.addEventListener(() => {
const leftPixelRay = viewer.scene.camera.getPickRay(leftPixelPosition)
const rightPixelRay = viewer.scene.camera.getPickRay(rightPixelPosition)
if(!leftPixelRay || !rightPixelRay) {
return;
}
// 两个像素点在globe上对应的Cartesian3坐标
const leftPixelCar3 = globe.pick(leftPixelRay, viewer.scene);
const rightPixelCar3 = globe.pick(rightPixelRay, viewer.scene);
if(!leftPixelCar3 || !rightPixelCar3) {
return
}
const leftCartographic = globe.ellipsoid.cartesianToCartographic(leftPixelCar3);
const rightCartographic = globe.ellipsoid.cartesianToCartographic(rightPixelCar3);
const geodesic = new EllipsoidGeodesic(); // 椭球测地线
// 设置测地线的起点和终点
geodesic.setEndPoints(leftCartographic, rightCartographic);
const pixelDistance = geodesic.surfaceDistance * 100 / 1000;
if(pixelDistance < 1) {
infos.scale = (pixelDistance * 1000).toFixed(2) + 'm'
} else {
infos.scale = pixelDistance.toFixed(2) + 'km'
}
})
}
/**
* @description: 实时拾取鼠标的坐标信息,并更新infos对象的内容
* @param {*}
* @return {*}
*/
const pickMousePosition = (viewer: Viewer, infos: infos): any => {
const handler3D = new ScreenSpaceEventHandler(viewer.scene.canvas);
const handlerFunc = movement => {
const pick = new Cartesian2(movement.endPosition.x, movement.endPosition.y);
const pickRay = viewer.camera.getPickRay(pick);
if (!pickRay) {
return
}
const cartesian = viewer.scene.globe.pick(pickRay, viewer.scene);
if (!cartesian) {
return
}
const cartographic = viewer.scene.globe.ellipsoid.cartesianToCartographic(cartesian);
// 经纬度
const lat = CMath.toDegrees(cartographic.latitude);
const lng = CMath.toDegrees(cartographic.longitude);
// 海拔
const height = viewer.scene.globe.getHeight(cartographic);
// 视点海拔高度
const ViewpointsHeight = viewer.scene.camera.positionCartographic.height;
infos.lng = lng ? lng.toFixed(2) + '' : '';
infos.lat = lat ? lat.toFixed(2) + '' : '';
infos.height = height ? height.toFixed(2) + '' : '';
infos.ViewpointsHeight = ViewpointsHeight ? ViewpointsHeight.toFixed(2) + '' : '';
}
handler3D.setInputAction(throttle(handlerFunc, 500), ScreenSpaceEventType.MOUSE_MOVE)
}
export { pickMousePosition, computeMapScale } |
#!/bin/sh
grep 9 $input
|
#! /bin/bash
binaries=(cleon
eosio-abigen
dimension-launcher
eosio-s2wasm
eosio-wast2wasm
eosiocpp
keond
nodeon
eosio-applesdemo)
if [ -d "/usr/local/eosio" ]; then
printf "\tDo you wish to remove this install? (requires sudo)\n"
select yn in "Yes" "No"; do
case $yn in
[Yy]* )
if [ "$(id -u)" -ne 0 ]; then
printf "\n\tThis requires sudo, please run ./eosio_uninstall.sh with sudo\n\n"
echo "sudo"
exit -1
fi
pushd /usr/local &> /dev/null
rm -rf eosio
pushd bin &> /dev/null
for binary in ${binaries[@]}; do
rm ${binary}
done
# Handle cleanup of directories created from installation.
if [ "$1" == "--full" ]; then
if [ -d ~/Library/Application\ Support/eosio ]; then rm -rf ~/Library/Application\ Support/eosio; fi # Mac OS
if [ -d ~/.local/share/dimension ]; then rm -rf ~/.local/share/dimension; fi # Linux
fi
popd &> /dev/null
break;;
[Nn]* )
printf "\tAborting uninstall\n\n"
exit -1;;
esac
done
fi
echo "done"
|
import pandas as pd
def combine_tag_encoding(movie: pd.DataFrame, tag_dummy: pd.DataFrame) -> pd.DataFrame:
# Concatenate the tag_dummy one-hot encoding table to the original movie files
movie = pd.concat([movie, pd.DataFrame(tag_dummy)], axis=1)
# Extend the column names of the movie dataset to include the tag information
movie_col = list(movie.columns)
movie_col.extend(['tag' + str(i) for i in range(len(tag_dummy.columns))])
movie.columns = movie_col
# Drop the original 'tag' column from the combined dataset
movie = movie.drop('tag', axis=1)
return movie |
#!/bin/bash
unset COOKIECUTTER_SCALA_PLAY_RUN_PREREQUISITES
unset COOKIECUTTER_SCALA_PLAY_RUN |
<gh_stars>100-1000
/**
* Copyright (c) 2015, <NAME>, Quintelligence d.o.o. and contributors
* All rights reserved.
*
* This source code is licensed under the FreeBSD license found in the
* LICENSE file in the root directory of this source tree.
*/
namespace TSvm {
///////////////////////////////////////////////////////////////////////////////
// TLinParam
TLinParam::TLinParam(TSIn& SIn) { Load(SIn); }
void TLinParam::Load(TSIn& SIn){
Cost.Load(SIn);
Unbalance.Load(SIn);
Eps.Load(SIn);
SampleSize.Load(SIn);
MxIter.Load(SIn);
MxTime.Load(SIn);
MnDiff.Load(SIn);
Verbose.Load(SIn);
}
void TLinParam::Save(TSOut& SOut) const {
TFlt(Cost).Save(SOut);
TFlt(Unbalance).Save(SOut);
TFlt(Eps).Save(SOut);
TInt(SampleSize).Save(SOut);
TInt(MxIter).Save(SOut);
TInt(MxTime).Save(SOut);
TFlt(MnDiff).Save(SOut);
TBool(Verbose).Save(SOut);
}
///////////////////////////////////////////////////////////////////////////////
// TLinModel
TLinModel::TLinModel(TSIn& SIn) { Load(SIn); }
void TLinModel::Load(TSIn& SIn){
WgtV.Load(SIn);
Bias.Load(SIn);
Param.Load(SIn);
}
void TLinModel::Save(TSOut& SOut) const {
WgtV.Save(SOut);
Bias.Save(SOut);
Param.Save(SOut);
}
void TLinModel::UpdateParams(const PJsonVal& ParamVal) {
if (ParamVal->IsObjKey("c")) { Param.Cost = ParamVal->GetObjNum("c"); }
if (ParamVal->IsObjKey("j")) { Param.Unbalance = ParamVal->GetObjNum("j"); }
if (ParamVal->IsObjKey("eps")) { Param.Eps = ParamVal->GetObjNum("eps"); }
if (ParamVal->IsObjKey("batchSize")) { Param.SampleSize = ParamVal->GetObjInt("batchSize"); }
if (ParamVal->IsObjKey("maxIterations")) { Param.MxIter = ParamVal->GetObjInt("maxIterations"); }
if (ParamVal->IsObjKey("maxTime")) { Param.MxTime = TFlt::Round(1000.0 * ParamVal->GetObjNum("maxTime")); }
if (ParamVal->IsObjKey("minDiff")) { Param.MnDiff = ParamVal->GetObjNum("minDiff"); }
if (ParamVal->IsObjKey("verbose")) { Param.Verbose = ParamVal->GetObjBool("verbose"); }
}
PJsonVal TLinModel::GetParams() const {
PJsonVal ParamVal = TJsonVal::NewObj();
ParamVal->AddToObj("c", Param.Cost);
ParamVal->AddToObj("j", Param.Unbalance);
ParamVal->AddToObj("eps", Param.Eps);
ParamVal->AddToObj("batchSize", Param.SampleSize);
ParamVal->AddToObj("maxIterations", Param.MxIter);
ParamVal->AddToObj("maxTime", Param.MxTime / 1000.0); // convert from miliseconds to seconds
ParamVal->AddToObj("minDiff", Param.MnDiff);
ParamVal->AddToObj("verbose", Param.Verbose);
return ParamVal;
}
double TLinModel::Predict(const TFltV& Vec) const {
return TLinAlg::DotProduct(WgtV, Vec) + Bias;
}
double TLinModel::Predict(const TIntFltKdV& SpVec) const {
return TLinAlg::DotProduct(WgtV, SpVec) + Bias;
}
double TLinModel::Predict(const TFltVV& Mat, const int& ColN) const {
return TLinAlg::DotProduct(Mat, ColN, WgtV) + Bias;
}
void TLinModel::FitClassification(const TFltVV& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& LogNotify, const PNotify& ErrorNotify) {
SolveClassification(VecV, Dims, Vecs, TargetV, LogNotify, ErrorNotify);
}
void TLinModel::FitRegression(const TFltVV& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& LogNotify, const PNotify& ErrorNotify) {
SolveRegression(VecV, Dims, Vecs, TargetV, LogNotify, ErrorNotify);
}
void TLinModel::FitClassification(const TVec<TIntFltKdV>& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& LogNotify, const PNotify& ErrorNotify) {
SolveClassification(VecV, Dims, Vecs, TargetV, LogNotify, ErrorNotify);
}
void TLinModel::FitRegression(const TVec<TIntFltKdV>& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& LogNotify, const PNotify& ErrorNotify) {
SolveRegression(VecV, Dims, Vecs, TargetV, LogNotify, ErrorNotify);
}
template <class TVecV>
void TLinModel::SolveClassification(const TVecV& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& _LogNotify, const PNotify& ErrorNotify) {
// asserts for input parameters
EAssertR(Dims > 0, "Dimensionality must be positive!");
EAssertR(Vecs > 0, "Number of vectors must be positive!");
EAssertR(Vecs == TargetV.Len(), "Number of vectors must be equal to the number of targets!");
EAssertR(Param.Cost > 0.0, "Cost parameter must be positive!");
EAssertR(Param.SampleSize > 0, "Sampling size must be positive!");
EAssertR(Param.MxIter > 1, "Number of iterations to small!");
// hide output if not verbose
PNotify LogNotify = Param.Verbose ? _LogNotify : TNotify::NullNotify;
LogNotify->OnStatusFmt("SVM parameters: c=%.2f, j=%.2f", Param.Cost, Param.Unbalance);
// initialization
TRnd Rnd(1);
const double Lambda = 1.0 / (double(Vecs) * Param.Cost);
// we start with random normal vector
WgtV = TFltV(Dims); TLinAlgTransform::FillRnd(WgtV, Rnd); TLinAlg::Normalize(WgtV);
// make it of appropriate length
TLinAlg::MultiplyScalar(1.0 / (2.0 * TMath::Sqrt(Lambda)), WgtV, WgtV);
// allocate space for updates
TFltV NewWgtV(Dims);
// split vectors into positive and negative
TIntV PosVecIdV, NegVecIdV;
for (int VecN = 0; VecN < Vecs; VecN++) {
if (TargetV[VecN] > 0.0) {
PosVecIdV.Add(VecN);
} else {
NegVecIdV.Add(VecN);
}
}
const int PosVecs = PosVecIdV.Len(), NegVecs = NegVecIdV.Len();
// prepare sampling ratio between positive and negative
// - the ration is uniform over the records when Unbalance == 1.0
// - if smaller then 1.0, then there is bias towards negative
// - if larger then 1.0, then there is bias towards positives
double SamplingRatio = (double(PosVecs) * Param.Unbalance) /
(double(PosVecs) * Param.Unbalance + double(NegVecs));
LogNotify->OnStatusFmt("Sampling ration 1 positive vs %.2f negative [%.2f]",
(1.0 / SamplingRatio - 1.0), SamplingRatio);
TTmTimer Timer(Param.MxTime); int Iters = 0; double Diff = 1.0;
LogNotify->OnStatusFmt("Limits: %d iterations, %.3f seconds, %.8f weight difference",
Param.MxIter, (double)Param.MxTime /1000.0, Param.MnDiff);
// initialize profiler
TTmProfiler Profiler;
const int ProfilerPre = Profiler.AddTimer("Pre");
const int ProfilerBatch = Profiler.AddTimer("Batch");
const int ProfilerPost = Profiler.AddTimer("Post");
// function for writing progress reports
int PosCount = 0, NegCount = 0;
auto ProgressNotify = [&]() {
LogNotify->OnStatusFmt(" %d iterations, %.3f seconds, last weight difference %g, ratio %.2f",
Iters, Timer.GetStopWatch().GetMSec() / 1000.0, Diff,
(double)PosCount / (double)(PosCount + NegCount));
PosCount = 0; NegCount = 0;
};
for (int IterN = 0; IterN < Param.MxIter; IterN++) {
if (IterN % 100 == 0) { ProgressNotify(); }
Profiler.StartTimer(ProfilerPre);
// tells how much we can move
const double Nu = 1.0 / (Lambda * double(IterN + 2)); // ??
const double VecUpdate = Nu / double(Param.SampleSize);
// initialize updated normal vector
TLinAlg::MultiplyScalar((1.0 - Nu * Lambda), WgtV, NewWgtV);
Profiler.StopTimer(ProfilerPre);
// classify examples from the sample
Profiler.StartTimer(ProfilerBatch);
int DiffCount = 0;
for (int SampleN = 0; SampleN < Param.SampleSize; SampleN++) {
int VecN = 0;
if (Rnd.GetUniDev() > SamplingRatio) {
// we select negative vector
VecN = NegVecIdV[Rnd.GetUniDevInt(NegVecs)];
NegCount++;
} else {
// we select positive vector
VecN = PosVecIdV[Rnd.GetUniDevInt(PosVecs)];
PosCount++;
}
const double VecCfyVal = TargetV[VecN];
const double CfyVal = VecCfyVal * TLinAlg::DotProduct(VecV, VecN, WgtV);
if (CfyVal < 1.0) {
// with update from the stochastic sub-gradient
TLinAlg::AddVec(VecUpdate * VecCfyVal, VecV, VecN, NewWgtV, NewWgtV);
DiffCount++;
}
}
Profiler.StopTimer(ProfilerBatch);
Profiler.StartTimer(ProfilerPost);
// project the current solution on to a ball
const double WgtNorm = 1.0 / (TLinAlg::Norm(NewWgtV) * TMath::Sqrt(Lambda));
if (WgtNorm < 1.0) { TLinAlg::MultiplyScalar(WgtNorm, NewWgtV, NewWgtV); }
// compute the difference with respect to the previous iteration
Diff = 2.0 * TLinAlg::EuclDist(WgtV, NewWgtV) / (TLinAlg::Norm(WgtV) + TLinAlg::Norm(NewWgtV));
// remember new solution, but only when we actually did some changes
WgtV = NewWgtV;
Profiler.StopTimer(ProfilerPost);
// count
Iters++;
// check stopping criteria with respect to time
if (Timer.IsTimeUp()) {
LogNotify->OnStatusFmt("Finishing due to reached time limit of %.3f seconds", (double)Param.MxTime / 1000.0);
break;
}
// check stopping criteria with respect to result difference
//if (DiffCount > 0 && (1.0 - DiffCos) < MnDiff) {
if (DiffCount > 0 && Diff < Param.MnDiff) {
LogNotify->OnStatusFmt("Finishing due to reached difference limit of %g", Param.MnDiff);
break;
}
}
if (Iters == Param.MxIter) {
LogNotify->OnStatusFmt("Finished due to iteration limit of %d", Iters);
}
ProgressNotify();
Profiler.PrintReport(LogNotify);
}
template <class TVecV>
void TLinModel::SolveRegression(const TVecV& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& _LogNotify, const PNotify& ErrorNotify) {
// asserts for input parameters
EAssertR(Dims > 0, "Dimensionality must be positive!");
EAssertR(Vecs > 0, "Number of vectors must be positive!");
EAssertR(Vecs == TargetV.Len(), "Number of vectors must be equal to the number of targets!");
EAssertR(Param.Cost > 0.0, "Cost parameter must be positive!");
EAssertR(Param.SampleSize > 0, "Sampling size must be positive!");
EAssertR(Param.MxIter > 1, "Number of iterations to small!");
EAssertR(Param.MnDiff >= 0, "Min difference must be nonnegative!");
// hide output if not verbose
PNotify LogNotify = Param.Verbose ? _LogNotify : TNotify::NullNotify;
// initialization
TRnd Rnd(1);
const double Lambda = 1.0 / (double(Vecs) * Param.Cost);
// we start with random normal vector
WgtV = TFltV(Dims); TLinAlgTransform::FillRnd(WgtV, Rnd); TLinAlg::Normalize(WgtV);
// Scale it to appropriate norm
TLinAlg::MultiplyScalar(1.0 / (2.0 * TMath::Sqrt(Lambda)), WgtV, WgtV);
// True norm is a product of Norm and TLinAlg::Norm(WgtV)
// The reason for this is that we can have very cheap updates for sparse
// vectors - we do not need to touch all elements of WgtV in each subgradient
// update.
double Norm = 1.0;
double Normw = 1.0 / (2.0 * TMath::Sqrt(Lambda));
TTmTimer Timer(Param.MxTime); int Iters = 0; double Diff = 1.0;
LogNotify->OnStatusFmt("Limits: %d iterations, %.3f seconds, %.8f weight difference",
Param.MxIter, (double)Param.MxTime / 1000.0, Param.MnDiff);
// initialize profiler
TTmProfiler Profiler;
const int ProfilerPre = Profiler.AddTimer("Pre");
const int ProfilerBatch = Profiler.AddTimer("Batch");
// function for writing progress reports
auto ProgressNotify = [&]() {
LogNotify->OnStatusFmt(" %d iterations, %.3f seconds, last weight difference %g",
Iters, Timer.GetStopWatch().GetMSec() / 1000.0, Diff);
};
// Since we are using weight vector overrepresentation using Norm, we need to
// compensate the scaling when adding examples using Coef
double Coef = 1.0;
for (int IterN = 0; IterN < Param.MxIter; IterN++) {
if (IterN % 100 == 0) { ProgressNotify(); }
Profiler.StartTimer(ProfilerPre);
// tells how much we can move
const double Nu = 1.0 / (Lambda * double(IterN + 2));
// update Coef which counters Norm
Coef /= (1 - Nu * Lambda);
const double VecUpdate = Nu / (double(Param.SampleSize)) * Coef;
Profiler.StopTimer(ProfilerPre);
// Track the upper bound on the change of norm of WgtV
Diff = 0.0;
// process examples from the sample
Profiler.StartTimer(ProfilerBatch);
// store which examples will lead to gradient updates (and their factors)
TVec<TPair<TFlt, TInt> > Updates(Param.SampleSize, 0);
// in the first pass we find which samples will lead to updates
for (int SampleN = 0; SampleN < Param.SampleSize; SampleN++) {
const int VecN = Rnd.GetUniDevInt(Vecs);
// target
const double Target = TargetV[VecN];
// prediction
double Dot = TLinAlg::DotProduct(VecV, VecN, WgtV);
// Used in bound computation
double NorX = TLinAlg::Norm(VecV, VecN);
// For predictions we need to use the Norm to scale correctly
const double Pred = Norm * Dot;
// difference
const double Loss = Target - Pred;
// do the update based on the difference
if (Loss < -Param.Eps) { // y_i - z < -eps
// update from the negative stochastic sub-gradient: -x
Updates.Add(TPair<TFlt, TInt>(-VecUpdate, VecN));
// update the norm of WgtV
Normw = sqrt(Normw*Normw - 2 * VecUpdate * Dot + VecUpdate * VecUpdate * NorX * NorX);
// update the bound on the change of norm of WgtV
Diff += VecUpdate * NorX;
} else if (Loss > Param.Eps) { // y_i - z > eps
// update from the negative stochastic sub-gradient: x
Updates.Add(TPair<TFlt, TInt>(VecUpdate, VecN));
// update the norm of WgtV
Normw = sqrt(Normw*Normw + 2 * VecUpdate * Dot + VecUpdate * VecUpdate * NorX * NorX);
// update the bound on the change of norm of WgtV
Diff += VecUpdate * NorX;
} // else nothing to do, we are within the epsilon tube
}
// Diff now estimates the upper bound on |w - w_old|/|w|
Diff /= Normw;
// in the second pass we update
for (int UpdateN = 0; UpdateN < Updates.Len(); UpdateN++) {
TLinAlg::AddVec(Updates[UpdateN].Val1, VecV, Updates[UpdateN].Val2, WgtV, WgtV);
}
Norm *= (1 - Nu * Lambda);
Profiler.StopTimer(ProfilerBatch);
// renormalizing is not needed according to new results:
// "Pegasos: Primal Estimated sub-GrAdient SOlver for SVM"
// <NAME>, <NAME>, <NAME>, <NAME>.
// Mathematical Programming, Series B, 127(1):3-30, 2011.
// count
Iters++;
// check stopping criteria with respect to time
if (Timer.IsTimeUp()) {
LogNotify->OnStatusFmt("Finishing due to reached time limit of %.3f seconds", (double)Param.MxTime / 1000.0);
break;
}
// check stopping criteria with respect to result difference
if (Diff < Param.MnDiff) {
LogNotify->OnStatusFmt("Finishing due to reached difference limit of %g", Param.MnDiff);
break;
}
}
if (Iters == Param.MxIter) {
LogNotify->OnStatusFmt("Finished due to iteration limit of %d", Iters);
}
// Finally we use the Norm factor to rescale the weight vector
TLinAlg::MultiplyScalar(Norm, WgtV, WgtV);
ProgressNotify();
Profiler.PrintReport(LogNotify);
}
///////////////////////////////////////////////////////////////////////////////
// TSvmLibParam
TLibSvmParam::TLibSvmParam(TSIn& SIn) { Load(SIn); }
void TLibSvmParam::Load(TSIn& SIn){
Type.Load(SIn);
Kernel.Load(SIn);
Cost.Load(SIn);
Unbalance.Load(SIn);
Eps.Load(SIn);
Gamma.Load(SIn);
P.Load(SIn);
Degree.Load(SIn);
Nu.Load(SIn);
Coef0.Load(SIn);
CacheSize.Load(SIn);
Verbose.Load(SIn);
}
void TLibSvmParam::Save(TSOut& SOut) const {
TInt(Type).Save(SOut);
TInt(Kernel).Save(SOut);
TFlt(Cost).Save(SOut);
TFlt(Unbalance).Save(SOut);
TFlt(Eps).Save(SOut);
TFlt(Gamma).Save(SOut);
TFlt(P).Save(SOut);
TInt(Degree).Save(SOut);
TFlt(Nu).Save(SOut);
TFlt(Coef0).Save(SOut);
TFlt(CacheSize).Save(SOut);
TBool(Verbose).Save(SOut);
}
svm_parameter_t TLibSvmParam::GetParamStruct() const {//returns svm_parameter_t for LIBSVM train
svm_parameter_t svm_parameter;
svm_parameter.svm_type = Type;//default
svm_parameter.kernel_type = Kernel;//default
svm_parameter.degree = Degree;
svm_parameter.gamma = Gamma;
svm_parameter.coef0 = Coef0;
// training only
svm_parameter.C = Cost;
svm_parameter.nu = Nu;
svm_parameter.nr_weight = 2;
svm_parameter.weight_label = (int *)malloc(2 * sizeof(int)); // deleted in svm_destroy_param
svm_parameter.weight_label[0] = -1;
svm_parameter.weight_label[1] = 1;
svm_parameter.weight = (double *)malloc(2 * sizeof(double)); // deleted in svm_destroy_param
svm_parameter.weight[0] = 1;
svm_parameter.weight[1] = Unbalance;
svm_parameter.cache_size = CacheSize;
svm_parameter.eps = Eps;
svm_parameter.p = P; // not needed but it has to be positive as it is checked
svm_parameter.shrinking = 0;
svm_parameter.probability = 0;
return svm_parameter;
}
///////////////////////////////////////////////////////////////////////////////
// TSvmLibModelPredictParam
TLibSvmPredictParam::TLibSvmPredictParam(TSIn& SIn) { Load(SIn); }
void TLibSvmPredictParam::Load(TSIn& SIn){
Type.Load(SIn);
Kernel.Load(SIn);
Gamma.Load(SIn);
Degree.Load(SIn);
Coef0.Load(SIn);
}
void TLibSvmPredictParam::Save(TSOut& SOut) const {
TInt(Type).Save(SOut);
TInt(Kernel).Save(SOut);
TFlt(Gamma).Save(SOut);
TInt(Degree).Save(SOut);
TFlt(Coef0).Save(SOut);
}
svm_parameter_t TLibSvmPredictParam::GetPredictParamStruct() const {//returns svm_parameter_t for LIBSVM predict
svm_parameter_t svm_parameter;
svm_parameter.svm_type = Type;//default
svm_parameter.kernel_type = Kernel;//default
svm_parameter.degree = Degree;
svm_parameter.gamma = Gamma;
svm_parameter.coef0 = Coef0;
return svm_parameter;
}
///////////////////////////////////////////////////////////////////////////////
// TLibSvmModel
TLibSvmModel::TLibSvmModel(TSIn& SIn) { Load(SIn); }
void TLibSvmModel::Load(TSIn& SIn){
WgtV.Load(SIn);
Bias.Load(SIn);
Param.Load(SIn);
PredictParam.Load(SIn);
SupportVectors.Load(SIn);
Coef.Load(SIn);
Rho.Load(SIn);
NSupportVectors.Load(SIn);
}
void TLibSvmModel::Save(TSOut& SOut) const {
WgtV.Save(SOut);
Bias.Save(SOut);
Param.Save(SOut);
PredictParam.Save(SOut);
SupportVectors.Save(SOut);
Coef.Save(SOut);
Rho.Save(SOut);
NSupportVectors.Save(SOut);
}
void TLibSvmModel::UpdateParams(const PJsonVal& ParamVal) {
if (ParamVal->IsObjKey("kernel")) {
TStr KernelStr = ParamVal->GetObjStr("kernel");
Param.Kernel = LIBSVM_LINEAR;
if (KernelStr == "LINEAR") { Param.Kernel = LIBSVM_LINEAR; }
else if (KernelStr == "POLY") { Param.Kernel = LIBSVM_POLY; }
else if (KernelStr == "RBF") { Param.Kernel = LIBSVM_RBF; }
else if (KernelStr == "SIGMOID") { Param.Kernel = LIBSVM_SIGMOID; }
else if (KernelStr == "PRECOMPUTED") { Param.Kernel = LIBSVM_PRECOMPUTED; }
}
if (ParamVal->IsObjKey("svmType")) {
TStr TypeStr = ParamVal->GetObjStr("svmType");
Param.Type = DEFAULT;
if (TypeStr == "C_SVC") { Param.Type = LIBSVM_CSVC; }
else if (TypeStr == "NU_SVC") { Param.Type = LIBSVM_NUSVC; }
else if (TypeStr == "ONE_CLASS") { Param.Type = LIBSVM_ONECLASS; }
else if (TypeStr == "EPSILON_SVR") { Param.Type = LIBSVM_EPSILONSVR; }
else if (TypeStr == "NU_SVR") { Param.Type = LIBSVM_NUSVC; }
}
if (ParamVal->IsObjKey("c")) { Param.Cost = ParamVal->GetObjNum("c"); }
if (ParamVal->IsObjKey("j")) { Param.Unbalance = ParamVal->GetObjNum("j"); }
if (ParamVal->IsObjKey("eps")) { Param.Eps = ParamVal->GetObjNum("eps"); }
if (ParamVal->IsObjKey("gamma")) { Param.Gamma = ParamVal->GetObjNum("gamma"); }
if (ParamVal->IsObjKey("p")) { Param.P = ParamVal->GetObjNum("p"); }
if (ParamVal->IsObjKey("degree")) { Param.Degree = ParamVal->GetObjInt("degree"); }
if (ParamVal->IsObjKey("nu")) { Param.Nu = ParamVal->GetObjNum("nu"); }
if (ParamVal->IsObjKey("coef0")) { Param.Coef0 = ParamVal->GetObjNum("coef0"); }
if (ParamVal->IsObjKey("cacheSize")) { Param.CacheSize = ParamVal->GetObjNum("cacheSize"); }
if (ParamVal->IsObjKey("verbose")) { Param.Verbose = ParamVal->GetObjBool("verbose"); }
}
PJsonVal TLibSvmModel::GetParams() const {
PJsonVal ParamVal = TJsonVal::NewObj();
TStr KernelStr = "LINEAR";
if (Param.Kernel == LIBSVM_LINEAR) { KernelStr = "LINEAR"; }
else if (Param.Kernel == LIBSVM_POLY) { KernelStr = "POLY"; }
else if (Param.Kernel == LIBSVM_RBF) { KernelStr = "RBF"; }
else if (Param.Kernel == LIBSVM_SIGMOID) { KernelStr = "SIGMOID"; }
else if (Param.Kernel == LIBSVM_PRECOMPUTED) { KernelStr = "PRECOMPUTED"; }
ParamVal->AddToObj("kernel", KernelStr);
TStr TypeStr = "default";
if (Param.Type == LIBSVM_CSVC) { TypeStr = "C_SVC"; }
else if (Param.Type == LIBSVM_NUSVC) { TypeStr = "NU_SVC"; }
else if (Param.Type == LIBSVM_ONECLASS) { TypeStr = "ONE_CLASS"; }
else if (Param.Type == LIBSVM_EPSILONSVR) { TypeStr = "EPSILON_SVR"; }
else if (Param.Type == LIBSVM_NUSVR) { TypeStr = "NU_SVR"; }
else if (Param.Type == DEFAULT) { TypeStr = "default"; }
ParamVal->AddToObj("svmType", TypeStr);
ParamVal->AddToObj("c", Param.Cost);
ParamVal->AddToObj("j", Param.Unbalance);
ParamVal->AddToObj("eps", Param.Eps);
ParamVal->AddToObj("gamma", Param.Gamma);
ParamVal->AddToObj("p", Param.P);
ParamVal->AddToObj("degree", Param.Degree);
ParamVal->AddToObj("nu", Param.Nu);
ParamVal->AddToObj("coef0", Param.Coef0);
ParamVal->AddToObj("cacheSize", Param.CacheSize);
ParamVal->AddToObj("verbose", Param.Verbose);
return ParamVal;
}
svm_model_t* TLibSvmModel::GetModelStruct() const {
svm_model_t* svm_model = new svm_model_t;
svm_model->param = PredictParam.GetPredictParamStruct();
int DimX = SupportVectors.GetXDim();
int DimY = SupportVectors.GetYDim();
svm_model->l = DimX;
svm_model->SV = (svm_node_t **)malloc(DimX * sizeof(svm_node_t *));
for (int Idx = 0; Idx < DimX; Idx++){
svm_model->SV[Idx] = (svm_node_t *)malloc((DimY+ 1) * sizeof(svm_node_t));
for (int cIdx = 0; cIdx < DimY; cIdx ++){
svm_model->SV[Idx][cIdx].index = cIdx;
svm_model->SV[Idx][cIdx].value = SupportVectors.GetXY(Idx, cIdx);
}
svm_model->SV[Idx][DimY].index = -1;
}
DimX = Coef.GetXDim();
DimY = Coef.GetYDim();
svm_model->nr_class = DimX + 1;
svm_model->sv_coef = (double **)malloc(DimX * sizeof(double *));
for (int Idx = 0; Idx < DimX; Idx++){
svm_model->sv_coef[Idx] = (double *)malloc(DimY * sizeof(double));
for (int cIdx = 0; cIdx < DimY; cIdx ++){
svm_model->sv_coef[Idx][cIdx] = Coef.GetXY(Idx, cIdx);
}
}
DimX = Rho.Len();
svm_model->rho = (double *)malloc(DimX * sizeof(double));
for (int Idx = 0; Idx < DimX; Idx++){
svm_model->rho[Idx] = Rho[Idx];
}
// not needed (and therefore not saved)
svm_model->free_sv = 0;
svm_model->probA = NULL;
svm_model->probB = NULL;
svm_model->sv_indices = NULL;
// classification specific
svm_model->nSV = NULL;
svm_model->label = NULL;
if (Param.Type == C_SVC || Param.Type == NU_SVC){
DimX = NSupportVectors.Len();
svm_model->nSV = (int *)malloc(DimX * sizeof(int));
for (int Idx = 0; Idx < DimX; Idx++){
svm_model->nSV[Idx] = NSupportVectors[Idx];
}
DimX = svm_model->nr_class;
svm_model->label = (int *)malloc(DimX * sizeof(int));
for (int Idx = 0; Idx < DimX; Idx++){
svm_model->label[Idx] = Idx;
}
}
return svm_model;
}
void TLibSvmModel::DeleteModelStruct(svm_model_t* svm_model) const {
// free svm_model->SV
int DimX = SupportVectors.GetXDim();
for (int Idx = 0; Idx < DimX; Idx++){
free(svm_model->SV[Idx]);
svm_model->SV[Idx] = NULL;
}
free(svm_model->SV);
svm_model->SV = NULL;
// free svm_model->sv_coef
DimX = Coef.GetXDim();
for (int Idx = 0; Idx < DimX; Idx++){
free(svm_model->sv_coef[Idx]);
svm_model->sv_coef[Idx] = NULL;
}
free(svm_model->sv_coef);
svm_model->sv_coef = NULL;
// free svm_model->rho
free(svm_model->rho);
svm_model->rho = NULL;
// free svm_model->nSV and svm_model->label if allocated
if (Param.Type == C_SVC || Param.Type == NU_SVC){
free(svm_model->nSV);
svm_model->nSV = NULL;
free(svm_model->label);
svm_model->label = NULL;
}
delete svm_model;
svm_model = NULL;
}
void TLibSvmModel::ConvertResults(svm_model_t* svm_model, int Dim){
PredictParam = TLibSvmPredictParam(Param.Type, Param.Kernel, Param.Gamma, Param.Degree, Param.Coef0);
WgtV = TFltV(Dim);
Bias = -svm_model->rho[0]; // LIBSVM does w*x-b, while we do w*x+b; thus the sign flip
SupportVectors = TFltVV(svm_model->l, Dim);
Coef = TFltVV(svm_model->nr_class - 1, svm_model->l);
Rho = TFltV(svm_model->nr_class * (svm_model->nr_class - 1)/2);
// compute normal vector from support vectors
EAssertR(TLinAlg::Norm(WgtV) == 0.0, "Expected a zero weight vector.");
for (int Idx = 0; Idx < svm_model->l; ++Idx) {
svm_node_t* SVs = svm_model->SV[Idx];
while (SVs->index != -1) {
SupportVectors.PutXY(Idx, SVs->index - 1, SVs->value);
WgtV[SVs->index - 1] += svm_model->sv_coef[0][Idx] * SVs->value;
++SVs;
}
for (int cIdx = 0; cIdx < svm_model->nr_class - 1; cIdx++){
Coef.PutXY(cIdx, Idx, svm_model->sv_coef[cIdx][Idx]);
}
}
for (int Idx = 0; Idx < svm_model->nr_class * (svm_model->nr_class - 1)/2; Idx++){
Rho.SetVal(Idx, svm_model->rho[Idx]);
}
if (Param.Type == C_SVC || Param.Type == NU_SVC){
NSupportVectors = TIntV(svm_model->nr_class);
for (int Idx = 0; Idx < svm_model->nr_class; Idx++){
NSupportVectors.SetVal(Idx, svm_model->nSV[Idx]);
}
}
// clean up
svm_free_and_destroy_model(&svm_model);
free(svm_model);
}
double TLibSvmModel::Predict(const TFltV& Vec) const {
if (Param.Kernel == LINEAR){
return TLinAlg::DotProduct(WgtV, Vec) + Bias;
}
svm_model_t* model = GetModelStruct();
svm_node_t *x = (svm_node_t *)malloc((Vec.Len() + 1) * sizeof(svm_node_t));
double* dec_val = (double *)malloc(model->nr_class*(model->nr_class-1)/2 * sizeof(double));
for (int Idx = 0; Idx < Vec.Len(); Idx++){
x[Idx].index = Idx;
x[Idx].value = Vec[Idx];
}
x[Vec.Len()].index = -1;
svm_predict_values(model, x, dec_val);
double result = dec_val[0];
free(x);
free(dec_val);
DeleteModelStruct(model);
return result;
}
double TLibSvmModel::Predict(const TIntFltKdV& SpVec) const {
if (Param.Kernel == LINEAR){
return TLinAlg::DotProduct(WgtV, SpVec) + Bias;
}
int FullDim = WgtV.Len();
TFltV Vec = TFltV(FullDim);
for (int Idx = 0; Idx < FullDim; Idx++){
Vec.SetVal(Idx, 0);
}
int Dim = SpVec.Len();
for (int Idx = 0; Idx < Dim; Idx++){
EAssertR(SpVec.GetVal(Idx).Key < FullDim, "Dimension mismatch.");
Vec.SetVal(SpVec.GetVal(Idx).Key, SpVec.GetVal(Idx).Dat);
}
return Predict(Vec);
}
double TLibSvmModel::Predict(const TFltVV& Mat, const int& ColN) const {
if (Param.Kernel == LINEAR){
return TLinAlg::DotProduct(Mat, ColN, WgtV) + Bias;
}
int DimX = Mat.GetXDim();
TFltV Col(DimX);
Mat.GetCol(ColN, Col);
return Predict(Col);
}
void TLibSvmModel::FitClassification(const TVec<TIntFltKdV>& VecV, const int& DimsA, const int& VecsA,
const TFltV& TargetV, const PNotify& _LogNotify, const PNotify& ErrorNotify) {
printf("inside FitClassification\n");
if (Param.Type == DEFAULT) { Param.Type = LIBSVM_CSVC; }
// load training parameters
svm_parameter_t svm_parameter = Param.GetParamStruct();
// Asserts for input arguments
EAssertR(Param.Cost > 0.0, "Cost parameter has to be positive.");
// load train data
svm_problem_t svm_problem;
svm_problem.l = VecV.Len();
// reserve space for target variable
svm_problem.y = (double *)malloc(VecV.Len() * sizeof(double));
// reserve space for training vectors
svm_problem.x = (svm_node_t **)malloc(VecV.Len() * sizeof(svm_node_t *));
// compute number of nonzero elements and get dimensionalit
int NonZero = 0, Dim = 0;
for (int VecN = 0; VecN < VecV.Len(); ++VecN) {
NonZero += (VecV[VecN].Len() + 1);
if (!VecV[VecN].Empty()) {
Dim = TInt::GetMx(Dim, VecV[VecN].Last().Key + 1);
}
}
svm_node_t* x_space = (svm_node_t *)malloc(NonZero * sizeof(svm_node_t));
// load training data and vectors
int N = 0, prevN = 0;
for (int VecN = 0; VecN < VecV.Len(); ++VecN) {
prevN = N;
svm_problem.y[VecN] = TargetV[VecN];
for (int EltN = 0; EltN < VecV[VecN].Len(); ++EltN) {
x_space[N].index = VecV[VecN][EltN].Key+1;
x_space[N++].value = VecV[VecN][EltN].Dat;
}
x_space[N++].index = -1;
svm_problem.x[VecN] = &x_space[prevN];
}
const char* error_msg = svm_check_parameter(&svm_problem, &svm_parameter);
EAssertR(error_msg == NULL, error_msg);
// hide output if not verbose
PNotify LogNotify = Param.Verbose ? _LogNotify : TNotify::NullNotify;
// train the model
svm_model_t* svm_model = svm_train(&svm_problem, &svm_parameter, LogNotify(), ErrorNotify());
// save model and clean up
ConvertResults(svm_model, Dim);
// clean up
svm_destroy_param(&svm_parameter);
free(svm_problem.y);
free(svm_problem.x);
free(x_space);
}
void TLibSvmModel::FitClassification(const TFltVV& VecV, const int& DimsA, const int& VecsA,
const TFltV& TargetV, const PNotify& _LogNotify, const PNotify& ErrorNotify) {
if (Param.Type == DEFAULT) { Param.Type = LIBSVM_CSVC; }
// load training parameters
svm_parameter_t svm_parameter = Param.GetParamStruct();
// Asserts for input arguments
EAssertR(Param.Cost > 0.0, "Cost parameter has to be positive.");
const int DimN = VecV.GetXDim(); // Number of features
const int AllN = VecV.GetYDim(); // Number of examples
EAssertR(TargetV.Len() == AllN, "Dimension mismatch.");
svm_problem_t svm_problem;
svm_problem.l = AllN;
svm_problem.y = (double *)malloc(AllN*sizeof(double));
svm_problem.x = (svm_node_t **)malloc(AllN*sizeof(svm_node_t *));
svm_node_t* x_space = (svm_node_t *)malloc((AllN*(DimN+1))*sizeof(svm_node_t));
int N = 0, prevN = 0;
for (int Idx = 0; Idx < AllN; ++Idx) { // # of examples
prevN = N;
svm_problem.y[Idx] = TargetV[Idx];
for (int Jdx = 0; Jdx < DimN; ++Jdx) { // # of features
if (VecV.At(Jdx, Idx) != 0.0) { // Store non-zero entries only
x_space[N].index = Jdx+1;
x_space[N].value = VecV.At(Jdx, Idx);
++N;
}
}
x_space[N].index = -1;
++N;
svm_problem.x[Idx] = &x_space[prevN];
}
const char* error_msg = svm_check_parameter(&svm_problem, &svm_parameter);
EAssertR(error_msg == NULL, error_msg);
// hide output if not verbose
PNotify LogNotify = Param.Verbose ? _LogNotify : TNotify::NullNotify;
// train model
svm_model_t* svm_model = svm_train(&svm_problem, &svm_parameter, LogNotify(), ErrorNotify());
// save model and clean up
ConvertResults(svm_model, DimN);
// clean up
svm_destroy_param(&svm_parameter);
free(svm_problem.y);
free(svm_problem.x);
free(x_space);
}
void TLibSvmModel::FitRegression(const TVec<TIntFltKdV>& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& LogNotify, const PNotify& ErrorNotify) {
if (Param.Type == DEFAULT) { Param.Type = LIBSVM_EPSILONSVR; }
FitClassification(VecV, Dims, Vecs, TargetV, LogNotify, ErrorNotify);
}
void TLibSvmModel::FitRegression(const TFltVV& VecV, const int& Dims, const int& Vecs,
const TFltV& TargetV, const PNotify& LogNotify, const PNotify& ErrorNotify) {
if (Param.Type == DEFAULT) { Param.Type = LIBSVM_EPSILONSVR; }
FitClassification(VecV, Dims, Vecs, TargetV, LogNotify, ErrorNotify);
}
}// end namespace
|
# SPDX-License-Identifier: MIT
# Copyright (c) 2021 scmanjarrez. All rights reserved.
# This work is licensed under the terms of the MIT license.
from datetime import datetime
import database as db
import threads as th
import util as ut
import paimon
STATE = {}
HELP = (
"I can help you manage your resin. "
"Control me by sending these commands:"
"\n\n"
"❔ /menu - Interact with me using UI. <b>[beta]</b>"
"\n\n"
"<b>Manage Resin</b>"
"\n"
"❔ /resin <code>[#]</code> - Resin status. "
"Use <code>number</code> to calculate hour before cap."
"\n"
"❔ /set <code>[#]</code> - Set resin value."
"\n"
"❔ /spend <code>[#]</code> - Spend resin."
"\n"
"❔ /refill <code>[#]</code> - Increase resin."
"\n"
"❔ /track <code>[mm:ss]</code> - Synchronize bot timer. "
"Use <code>-1</code> to disable."
"\n\n"
"<b>Reminders</b>"
"\n"
"❔ /warnings <code>[#]</code> - Set resin warning threshold. "
"Use <code>-1</code> to disable warnings."
"\n"
"❔ /timezone <code>[hh:mm]</code> - Set your time zone. "
"Use <code>-1</code> to disable timezone."
"\n\n"
"<b>Bot Usage</b>\n"
"❔ /help - List of commands."
"\n"
"❔ /cancel - Cancel current action."
"\n"
"❔ /stop - Remove your information from the bot."
"\n\n"
"<i><b>Note:</b> Arguments inside brackets are optional.</i>"
)
def _state(uid, state=ut.CMD.NOP):
STATE[uid] = state
def del_state(uid):
if uid in STATE:
del STATE[uid]
def _synchronized(uid, msg):
if th.is_unsync(uid):
msg = (f"{msg}\n\n"
f"⚠ Bot not synchronized, "
f"send /track to synchronize bot timer.")
elif not th.is_tracked(uid):
msg = (f"{msg}\n\n"
f"⚠ Bot not tracking your resin, "
f"send /track to synchronize bot timer.")
return msg
def start(update, context):
uid = ut.uid(update)
if not db.banned(uid):
msg = "I'm glad to see you again, Traveler!"
if not db.cached(uid):
_state(uid)
db.add_user(uid)
msg = f"Hi Traveler, I'm Paimon!\n\n{HELP}"
ut.send(update, msg)
def bot_help(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
ut.send(update, HELP)
def resin(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
cur_resin = db.get_resin(uid)
msg = (f"❗ Argument must be an integer greater than "
f"{cur_resin} and lower than {ut.RESIN_MAX}, "
f"e.g. /resin, /resin 135")
if context.args:
try:
value = int(context.args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if cur_resin < value < ut.RESIN_MAX:
msg = ut.cap_format(uid, value)
msg = _synchronized(uid, msg)
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
else:
msg = ut.cap_format(uid)
msg = _synchronized(uid, msg)
db.dec_strikes(uid)
_state(uid)
ut.send(update, msg)
def _set_resin(args, uid, msg):
try:
value = int(args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if 0 < value < ut.RESIN_MAX:
db.set_resin(uid, value)
msg = ut.text_format("Current resin", value)
msg = _synchronized(uid, msg)
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
return msg
def set_resin(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
msg = (f"❗ Argument must be an integer lower than {ut.RESIN_MAX}, "
f"e.g. /set, /set 12")
if context.args:
msg = _set_resin(context.args, uid, msg)
else:
msg = (f"Tell me your current resin "
f"(max: <code>{ut.RESIN_MAX}</code>)")
_state(uid, ut.CMD.SET)
db.dec_strikes(uid)
ut.send(update, msg)
def _spend(args, uid, msg, current):
try:
value = int(args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if 0 < value < current:
db.dec_resin(uid, value)
msg = ut.text_format("Current resin", current - value)
msg = _synchronized(uid, msg)
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
return msg
def spend(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
cur_resin = db.get_resin(uid)
msg = (f"❗ Argument must be an integer greater than 0 "
f"and lower than {cur_resin}, "
f"e.g. /spend, /spend 20")
if context.args:
msg = _spend(context.args, uid, msg, cur_resin)
else:
msg = (f"Tell me how much resin to spend "
f"(max: <code>{cur_resin}</code>)")
_state(uid, ut.CMD.SPEND)
db.dec_strikes(uid)
ut.send(update, msg)
def _refill(args, uid, msg, current, max_resin):
try:
value = int(args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if 0 < value < max_resin:
db.inc_resin(uid, value)
msg = ut.text_format("Current resin", current + value)
msg = _synchronized(uid, msg)
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
return msg
def refill(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
cur_resin = db.get_resin(uid)
max_resin = ut.RESIN_MAX - cur_resin
msg = (f"❗ Argument must be an integer greater than 0 "
f"and lower than {max_resin}, "
f"e.g. /refill, /refill 20")
if context.args:
msg = _refill(context.args, uid, msg, cur_resin, max_resin)
else:
msg = (f"Tell me how much resin to refill "
f"(max: <code>{max_resin}</code>)")
_state(uid, ut.CMD.REFILL)
db.dec_strikes(uid)
ut.send(update, msg)
def _track(args, bot, uid, msg):
try:
datetime.strptime(args[0], "%M:%S")
except ValueError:
try:
value = int(args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if value == -1:
th.del_thread(uid)
msg = ut.text_format("Current tracking status", "disabled")
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
else:
minutes, seconds = map(int, args[0].split(':'))
timer = minutes * 60 + seconds
th.new_thread(bot, uid, timer)
msg = "Bot timer synchronized."
db.dec_strikes(uid)
return msg
def track(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
msg = ("❗ Argument must have format: mm:ss, "
"e.g. /track, /track 5:35m /track -1")
if context.args:
msg = _track(context.args, context.bot, uid, msg)
else:
msg = ("Tell me your genshin timer "
"in format <code>mm:ss</code>, "
"or <code>-1</code> to disable")
_state(uid, ut.CMD.TRACK)
db.dec_strikes(uid)
ut.send(update, msg)
def _warnings(args, uid, msg):
try:
value = int(args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if value == -1 or 0 < value < ut.RESIN_MAX:
if value == -1:
value = "disabled"
db.unset_warn(uid)
else:
db.set_warn(uid, value)
msg = ut.text_format("Current warning threshold", value)
msg = _synchronized(uid, msg)
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
return msg
def warnings(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
msg = (f"❗ Argument must be an integer greater than 0 "
f"and lower than {ut.RESIN_MAX}, or -1, "
f"e.g. /warnings, /warnings -1, /warning 140")
if context.args:
msg = _warnings(context.args, uid, msg)
else:
cur_warn = db.get_warn(uid)
if cur_warn == -1:
cur_warn = "disabled"
msg = (f"{ut.text_format('Warning threshold', cur_warn)}\n\n"
f"Tell me resin value to be warned at, "
f"or <code>-1</code> to disable")
_state(uid, ut.CMD.WARN)
db.dec_strikes(uid)
ut.send(update, msg)
def _timezone(args, uid, msg):
try:
datetime.strptime(args[0], "%H:%M")
except ValueError:
try:
value = int(args[0])
except ValueError:
msg = ut.strike_user(uid, msg)
else:
if value == -1:
db.unset_timezone(uid)
msg = ut.text_format("Current timezone", "disabled")
db.dec_strikes(uid)
else:
msg = ut.strike_user(uid, msg)
else:
hour, minutes = map(int, args[0].split(':'))
bot_hour, bot_minutes = map(int, datetime.now()
.strftime("%H:%M").split(':'))
tz_hour = hour - bot_hour
tz_minutes = minutes - bot_minutes
db.set_timezone(uid, tz_hour, tz_minutes)
tz = ut.normalize_timezone(tz_hour, tz_minutes)
msg = (f"{ut.text_format('Bot hour', f'{bot_hour:02}:{bot_minutes:02}')}\n\n" # noqa
f"{ut.text_format('Current timezone', f'{tz}')}")
db.dec_strikes(uid)
return msg
def timezone(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
msg = ("❗ Argument must have format(24h): hh:mm or -1, "
"e.g. /timezone, /timezone -1, /timezone 18:30")
if context.args:
msg = _timezone(context.args, uid, msg)
else:
tz_hour, tz_minutes = db.get_timezone(uid).split(':')
if tz_hour == 'null':
tz = "disabled"
else:
tz = ut.normalize_timezone(tz_hour, tz_minutes)
msg = (f"{ut.text_format('Current timezone', tz)}\n\n"
f"Tell me your current hour "
f"in format(24h): <code>hh:mm</code>, "
f"or <code>-1</code> to disable")
_state(uid, ut.CMD.TZ)
db.dec_strikes(uid)
ut.send(update, msg)
def text(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
msg = "❗ Send only one argument, following the format."
args = update.message.text.split()
if len(args) == 1:
if uid in STATE:
if STATE[uid] == ut.CMD.SET:
msg = (f"❗ Value must be an integer lower "
f"than {ut.RESIN_MAX}.")
msg = _set_resin(args, uid, msg)
elif STATE[uid] == ut.CMD.SPEND:
cur_resin = db.get_resin(uid)
msg = (f"❗ Value must be an integer greater than 0 "
f"and lower than {cur_resin}.")
msg = _spend(args, uid, msg, cur_resin)
elif STATE[uid] == ut.CMD.REFILL:
cur_resin = db.get_resin(uid)
max_resin = ut.RESIN_MAX - cur_resin
msg = (f"❗ Value must be an integer greater than 0 "
f"and lower than {max_resin}.")
msg = _refill(args, uid, msg, cur_resin, max_resin)
elif STATE[uid] == ut.CMD.TRACK:
msg = "❗ Timer must have format: <code>mm:ss</code>."
msg = _track(args, context.bot, uid, msg)
elif STATE[uid] == ut.CMD.WARN:
msg = (f"❗ Value must be an integer greater than 0 "
f"and lower than {ut.RESIN_MAX}, "
f"or <code>-1</code>.")
msg = _warnings(args, uid, msg)
elif STATE[uid] == ut.CMD.TZ:
msg = ("❗ Hour must have format(24h): "
"<code>hh:mm</code> "
"or <code>-1</code>.")
msg = _timezone(args, uid, msg)
else:
_state(uid)
ut.send(update, msg)
def cancel(update, context):
uid = ut.uid(update)
if not db.banned(uid):
if not db.cached(uid):
ut.not_started(update)
else:
if uid in STATE and STATE[uid] != ut.CMD.NOP:
msg = (f"The command <code>{STATE[uid].value}</code> "
f"has been cancelled. Anything else I can do for you?"
f"\n\n"
f"Send /help for a list of commands.")
else:
msg = ("No active command to cancel. "
"I wasn't doing anything anyway.\nZzzzz...")
ut.send(update, msg)
_state(uid)
def stop(update, context):
uid = ut.uid(update)
if not db.banned(uid):
msg = "Bot doesn't have information about you."
if db.cached(uid):
ut.blocked(uid)
msg = "Your information has been removed from the bot."
ut.send(update, msg)
def announce(update, context):
uid = ut.uid(update)
admin = int(paimon.load_config()['admin'])
if uid == admin:
msg = f"❗ <b>Announcement:</b> {' '.join(context.args)}"
ut.notify(context.job_queue, msg)
def restart(update, context):
uid = ut.uid(update)
with open('.adminid', 'r') as f:
admin = int(f.read().strip())
if uid == admin:
msg = "⚠ Bot restarted. Please, synchronize bot timer /track."
ut.notify(context.job_queue, msg)
|
import aiofiles
import asyncstdlib
from pydantic.types import PositiveFloat
from conversion_parameters import ConversionParameters
from conversion_spec import ConversionSpec, Currency
from conversion_spec_provider import ConversionSpecProvider
class ConversionSpecFileReader(ConversionSpecProvider):
async def provide_conversion_spec(self, conversion_parameters: ConversionParameters) -> ConversionSpec:
source_currency: Currency
target_currency: Currency
source_amounts: tuple[PositiveFloat]
async with aiofiles.open(conversion_parameters.file_path) as conversion_spec_file:
source_currency, target_currency, *source_amounts = await asyncstdlib.tuple(conversion_spec_file)
return ConversionSpec(
source_currency=source_currency,
target_currency=target_currency,
source_amounts=source_amounts,
)
|
<filename>src/actions/agreementActions.js<gh_stars>0
/**
* Created by skandara on 29/08/2016.
*/
import * as types from '../constants/actionTypes';
export function SaveAgreement(agreement) {
return (dispatch) => {
return dispatch({
type: types.SAVE_AGREEMENT,
payload: agreement
});
}
}
|
#/bin/bash
NS=openmcp
controller_name="openmcp-job-controller"
NAME=$(kubectl get pod -n $NS | grep -E $controller_name | awk '{print $1}')
echo "Exec Into '"$NAME"'"
#kubectl exec -it $NAME -n $NS /bin/sh
for ((;;))
do
kubectl logs --follow -n $NS $NAME
done
|
require 'fileutils'
module Linux
module Lxc
class Directory
attr_reader :entries, :index, :file
def initialize(fname, index)
@file = fname
@index = index
@entries = {}
end
def all_lines(&block)
@entries.values.each do |entry|
entry.all_lines(&block)
end
end
def to_s
@file
end
def get(key)
@index.get_key(key)
end
def add_file(fname)
@entries[fname] ||= @index.add_file(fname, self)
end
def write
FileUtils.mkdir_p file
@entries.values.each do |entry|
entry.write
end
end
end
end
end
|
<filename>apractice/sort/MergeSort.java
import java.util.Arrays;
public class MergeSort {
public static void mergeSort(int[] a, int n) {
mergeSortInternally(a, 0, n-1);
}
private static void mergeSortInternally(int[] a, int p, int r) {
if (p >= r) return;
int q = p + (r - p) / 2;
mergeSortInternally(a, p, q);
mergeSortInternally(a, q+1, r);
// merge a[p...q] and a[q+1...r] as a[p...r]
merge(a, p, q, r);
}
private static void merge(int[] a, int p, int q, int r) {
int i = p;
int j = q + 1;
int k = 0;
// temp arr: a[p...r]
int[] tmp = new int[r-p+1];
while (i<=q && j<=r) {
if (a[i] <= a[j]) {
tmp[k++] = a[i++];
} else {
tmp[k++] = a[j++];
}
}
// 判断哪个子数组有剩余数据
int start = i;
int end = q;
if (j <= r) {
start = j;
end = r;
}
// 将剩余数据拷贝到 temp arr
while (start <= end) {
tmp[k++] = a[start++];
}
// 将 tmp 中的数组拷贝会 a[p...r]
for (i = 0; i <= r-p; ++i) {
a[p+i] = tmp[i];
}
}
public static void main(String[] args) {
int[] arr1 = new int[]{3, 2, 52, 43, 32, 56, 93, 29, 19};
System.out.println("Origin: " + Arrays.toString(arr1));
mergeSort(arr1, arr1.length);
System.out.println("Sorted: " + Arrays.toString(arr1));
}
}
|
<filename>node_modules/@buffetjs/icons/src/components/Remove/index.js<gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
const Icon = props => (
<svg
viewBox="0 0 8 8"
width="10"
height="10"
xmlns="http://www.w3.org/2000/svg"
{...props}
>
<g fill={props.fill} fillRule="nonzero">
<path d="M1.156 7.433c.06 0 .114-.023.16-.07l3.258-3.257a.221.221 0 000-.322L1.317.527a.221.221 0 00-.322 0l-.35.35a.221.221 0 000 .321l2.748 2.747L.646 6.692a.221.221 0 000 .322l.35.35c.046.046.1.07.16.07z" />
<path d="M6.888 7.433a.221.221 0 01-.16-.07L3.47 4.106a.221.221 0 010-.322L6.728.527a.221.221 0 01.321 0l.35.35a.221.221 0 010 .321L4.65 3.945 7.4 6.692a.221.221 0 010 .322l-.35.35a.221.221 0 01-.16.07z" />
</g>
</svg>
);
Icon.defaultProps = {
fill: '#b3b5b9',
};
Icon.propTypes = {
fill: PropTypes.string,
};
export default Icon;
|
from onnx_tf.common import exception
from onnx_tf.handlers.frontend_handler import FrontendHandler
from onnx_tf.handlers.handler import onnx_op
from onnx_tf.handlers.handler import tf_op
@onnx_op("Reshape")
@tf_op("Reshape")
class Reshape(FrontendHandler):
@classmethod
def args_check(cls, node, **kwargs):
if cls.SINCE_VERSION == 1:
if node.inputs[1] not in kwargs["consts"]:
exception.CONST_NOT_FOUND_EXCEPT(node.inputs[1], node.op_type)
@classmethod
def version_1(cls, node, **kwargs):
consts = kwargs["consts"]
shape = consts[node.inputs[1]]
return cls.make_node_from_tf_node(node, [node.inputs[0]], shape=shape)
@classmethod
def version_5(cls, node, **kwargs):
return cls.make_node_from_tf_node(node, [node.inputs[0], node.inputs[1]])
|
// Copyright 2007 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.integration.app1.pages;
import org.apache.tapestry5.annotations.Component;
import org.apache.tapestry5.corelib.components.Form;
import org.apache.tapestry5.integration.app1.data.ToDoItem;
import org.apache.tapestry5.integration.app1.services.ToDoDatabase;
import org.apache.tapestry5.ioc.annotations.Inject;
import java.util.List;
public class ToDoListVolatile
{
@Inject
private ToDoDatabase database;
private ToDoItem item;
private List<ToDoItem> items;
@Component
private Form form;
public List<ToDoItem> getItems()
{
return items;
}
public ToDoItem getItem()
{
return item;
}
public void setItem(ToDoItem item)
{
this.item = item;
}
public ToDoDatabase getDatabase()
{
return database;
}
void onPrepare()
{
items = database.findAll();
}
void onSuccess()
{
int order = 0;
for (ToDoItem item : items)
{
item.setOrder(order++);
database.update(item);
}
}
void onSelectedFromAddNew()
{
if (form.isValid())
{
ToDoItem item = new ToDoItem();
item.setTitle("<New To Do>");
item.setOrder(items.size());
database.add(item);
}
}
void onActionFromReset()
{
database.reset();
}
}
|
package com.surfwatchlabs.authrocket.service;
import com.authrocket.model.core.Organization;
import com.authrocket.model.response.OrganizationResponse;
import com.google.gson.Gson;
import com.surfwatchlabs.authrocket.client.AuthRocketRESTClient;
import com.surfwatchlabs.authrocket.util.AuthRocketGsonBuilder;
import java.util.Collection;
import java.util.Collections;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>OrganizationClientImpl.java</code> is a simple client that can handle GET operations for
* AuthRocket orgs.
*
* TODO : handle more than GET
*
* @see <a href="https://authrocket.com/docs/api/orgs">AuthRocket API -- Orgs</a>
*/
public class OrganizationClientImpl {
private AuthRocketRESTClient authRocketClient;
private final Gson gson;
private static final String RESOURCE_PATH = "orgs";
private static final Logger LOG = LoggerFactory.getLogger(OrganizationClientImpl.class);
public OrganizationClientImpl() {
gson = AuthRocketGsonBuilder.getBuilder();
}
public Collection<Organization> getOrganizations() {
LOG.debug( "Getting all organizations" );
// TODO : we could set a whole bunch of query params here if we cared to
String responseJson = authRocketClient.getResponseJson( RESOURCE_PATH, null );
// null on error in client
if( responseJson == null )
return Collections.EMPTY_LIST;
OrganizationResponse response = gson.fromJson( responseJson, OrganizationResponse.class );
LOG.debug( "Returning all organizations - numOrganizations={}", response.getResponseCollection().size() );
return response.getResponseCollection();
}
public Organization getOrganizationById( String organizationId ) {
LOG.debug( "Getting organization by id - organizationId={}", organizationId );
// TODO : we could set a whole bunch of query params here if we cared to
String responseJson = authRocketClient.getResponseJson( RESOURCE_PATH + "/" + organizationId, null );
// null on error in client
if( responseJson == null )
return null;
Organization organization = gson.fromJson( responseJson, Organization.class );
LOG.debug( "Returning organization by id - organizationId={}", organizationId );
return organization;
}
public AuthRocketRESTClient getAuthRocketClient() {
return authRocketClient;
}
public void setAuthRocketClient(AuthRocketRESTClient arClient) {
this.authRocketClient = arClient;
}
}
|
package io.eventuate.tram.messaging.proxy.consumer;
import org.apache.commons.lang.builder.ReflectionToStringBuilder;
public class CommandSubscriptionData {
private String channel;
private String resource;
private String baseUrl;
private String commands;
public String getChannel() {
return channel;
}
public void setChannel(String channel) {
this.channel = channel;
}
public String getResource() {
return resource;
}
public void setResource(String resource) {
this.resource = resource;
}
public String getBaseUrl() {
return baseUrl;
}
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
public String getCommands() {
return commands;
}
public void setCommands(String commands) {
this.commands = commands;
}
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this);
}
}
|
use commitments::{integer::IntegerCommitment, Commitment};
use utils::ConvertibleUnknownOrderGroup;
// Define a struct MyVerifierChannel that will implement the MembershipVerifierChannel trait
struct MyVerifierChannel;
// Implement the MembershipVerifierChannel trait for the MyVerifierChannel type
impl<G: ConvertibleUnknownOrderGroup> MembershipVerifierChannel<G> for MyVerifierChannel {
// Implement the send_c_e method for the MyVerifierChannel type
fn send_c_e(_: G) {
// Implement the logic for the send_c_e method here
unimplemented!();
}
} |
package es.upm.etsisi.cf4j.qualityMeasure.prediction;
import es.upm.etsisi.cf4j.data.TestUser;
import es.upm.etsisi.cf4j.recommender.Recommender;
/**
* This class calculates the Root Mean Squared Error (RMSE) between the predictions and the test
* ratings.
*
* <p>MSE = √(∑(<test item rating prediction> - <test item
* rating>)<sup>2</sup> / <number of predictions>)
*/
public class RMSE extends MSE { //QualityMeasure through MSE
/**
* Constructor of the class which basically calls the father's one
*
* @param recommender Recommender instance for which the RMSE are going to be computed
*/
public RMSE(Recommender recommender) {
super(recommender);
}
@Override
public double getScore(TestUser testUser, double[] predictions) {
double mse = super.getScore(testUser, predictions);
return (Double.isNaN(mse)) ? Double.NaN : Math.sqrt(mse);
}
}
|
<filename>consensus/poa/roundmanager.go
// Copyright (c) 2018-2020. The asimov developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package poa
import (
"github.com/AsimovNetwork/asimov/ainterface"
"github.com/AsimovNetwork/asimov/chaincfg"
"github.com/AsimovNetwork/asimov/common"
"github.com/AsimovNetwork/asimov/database"
)
//manage how validators will create blocks within a round.
//validators may change between different round.
type RoundManager struct {
}
func (m *RoundManager) HasValidator(validator common.Address) bool {
return true
}
func (m *RoundManager) GetValidators(blockHash common.Hash, round uint32, fn ainterface.GetValidatorsCallBack) (
[]*common.Address, map[common.Address]uint16, error) {
if fn == nil {
return nil, nil, nil
}
signupValidators, _, err := fn(nil)
if err != nil {
return nil, nil, err
}
validators := make([]*common.Address, chaincfg.ActiveNetParams.RoundSize)
weightmap := make(map[common.Address]uint16)
for _, v := range signupValidators {
weightmap[v] = 1
}
l := len(signupValidators)
for i := 0; i < int(chaincfg.ActiveNetParams.RoundSize); i++ {
validators[i] = &signupValidators[i%l]
}
return validators, weightmap, nil
}
func (m *RoundManager) GetHsMappingByRound(round uint32) (map[string]*ainterface.ValidatorInfo, error) {
return nil, nil
}
func (m *RoundManager) GetNextRound(round *ainterface.Round) (*ainterface.Round, error) {
newRound := &ainterface.Round{
Round: round.Round + 1,
RoundStartUnix: round.RoundStartUnix + round.Duration,
Duration: common.DefaultBlockInterval * int64(chaincfg.ActiveNetParams.RoundSize),
}
return newRound, nil
}
func (m *RoundManager) GetRoundInterval(round int64) int64 {
return common.DefaultBlockInterval * int64(chaincfg.ActiveNetParams.RoundSize)
}
func (m *RoundManager) Init(round uint32, db database.Transactor, c ainterface.IBtcClient) error {
return nil
}
func (m *RoundManager) Start() {
}
func (m *RoundManager) Halt() {
}
func (m *RoundManager) GetContract() common.Address {
return common.ConsensusPOA
}
func NewRoundManager() *RoundManager {
return &RoundManager{}
}
|
#!/usr/bin/env bash
prefix="./"
help()
{
cat << HELP
Usage: $0 [-p]
-----------------------------------
parameter statement:
-p: path prefix of perf data
HELP
}
while [ -n "$1" ]; do
case "$1" in
-h) help;exit;;
-p) prefix=$2;shift 2;;
-*) echo "error: no such option $1. -h for help"; exit 1;;
*)break;
esac
done
#echo "prefix:" $prefix
ps -eo tid,comm,args -L | grep database | grep -v grep | grep -v qlogd | grep -v vim | sed 's/\/home.*//g' > t.txt
declare -A arr
while read x y
do
y=`echo $y | sed 's/ //g' | sed 's/:/-/g' | sed 's/\./-/g'`
#echo $x -- $y
if [ -z ${arr[$y]} ]; then
arr[$y]=$x
else
arr[$y]=${arr[$y]}","$x
fi
done < t.txt
for t in ${!arr[@]};
do
#echo $t ${arr[$t]}
perf record -t ${arr[$t]} -o $prefix/${t}.data &
done
|
# See: https://symbiflow-examples.readthedocs.io/en/latest/getting-symbiflow.html
# Prerequisites: apt install -y git wget xz-utils
# Set ARCH_DEFS_WEB according to https://symbiflow-examples.readthedocs.io/en/latest/getting-symbiflow.html
INSTALL_DIR=$HOME/opt/symbiflow
FPGA_FAM=xc7
ARCH_DEFS_WEB=https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/symbiflow-arch-defs/continuous/install/367/20210822-000315
SYMBIFLOW_EXAMPLES=https://raw.githubusercontent.com/SymbiFlow/symbiflow-examples/master/xc7
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O conda_installer.sh
wget $SYMBIFLOW_EXAMPLES/environment.yml -O environment.yml
wget $SYMBIFLOW_EXAMPLES/requirements.txt -O requirements.txt
bash conda_installer.sh -u -b -p $INSTALL_DIR/$FPGA_FAM/conda;
. "$INSTALL_DIR/$FPGA_FAM/conda/etc/profile.d/conda.sh";
$INSTALL_DIR/$FPGA_FAM/conda/bin/conda env create -f environment.yml
mkdir -p $INSTALL_DIR/xc7/install
echo Getting arch defs
echo 1/5
wget -qO- $ARCH_DEFS_WEB/symbiflow-arch-defs-install-709cac78.tar.xz | tar -xJC $INSTALL_DIR/xc7/install
echo 2/5
wget -qO- $ARCH_DEFS_WEB/symbiflow-arch-defs-xc7a50t_test-709cac78.tar.xz | tar -xJC $INSTALL_DIR/xc7/install
echo 3/5
wget -qO- $ARCH_DEFS_WEB/symbiflow-arch-defs-xc7a100t_test-709cac78.tar.xz | tar -xJC $INSTALL_DIR/xc7/install
echo 4/5
wget -qO- $ARCH_DEFS_WEB/symbiflow-arch-defs-xc7a200t_test-709cac78.tar.xz | tar -xJC $INSTALL_DIR/xc7/install
echo 5/5
wget -qO- $ARCH_DEFS_WEB/symbiflow-arch-defs-xc7z010_test-709cac78.tar.xz | tar -xJC $INSTALL_DIR/xc7/install
|
/* eslint-env mocha */
var chai = require('chai')
var sinon = require('sinon')
var JsReporters = require('../../dist/js-reporters.js')
var data = require('./data.js')
var expect = chai.expect
chai.use(require('sinon-chai'))
describe('Helpers', function () {
var dummyFunc = function () {}
describe('autoregister', function () {
beforeEach(function () {
global.QUnit = undefined
global.mocha = undefined
global.jasmine = undefined
})
afterEach(function () {
delete global.QUnit
delete global.mocha
delete global.jasmine
})
it('should register the QUnitAdapter', function () {
global.QUnit = {
begin: sinon.stub(),
testStart: dummyFunc,
log: dummyFunc,
testDone: dummyFunc,
done: dummyFunc
}
JsReporters.autoRegister()
expect(global.QUnit.begin).to.have.been.calledOnce
})
it('should register the MochaAdapter', function () {
global.mocha = {
reporter: sinon.stub()
}
JsReporters.autoRegister()
expect(global.mocha.reporter).to.have.been.calledOnce
})
it('should register the JasmineAdapter', function () {
var spy = sinon.stub()
global.jasmine = {
getEnv: function () {
return {
addReporter: spy
}
}
}
JsReporters.autoRegister()
expect(spy).to.have.been.calledOnce
})
it('should throw an error if no testing framework was found', function () {
expect(JsReporters.autoRegister).to.throw(Error)
})
})
describe('create functions', function () {
it('should return a suite start', function () {
var startSuite = JsReporters.createSuiteStart(data.startSuite)
expect(startSuite).to.be.deep.equal(data.startSuite)
})
it('should return a test start', function () {
var startTest = JsReporters.createTestStart(data.startTest)
expect(startTest).to.be.deep.equal(data.startTest)
})
it('should return a test end', function () {
var endTest = JsReporters.createTestEnd(data.endTest)
expect(endTest).to.be.deep.equal(data.endTest)
})
it('should return a suite end', function () {
var endSuite = JsReporters.createSuiteEnd(data.endSuite)
expect(endSuite).to.be.deep.equal(data.endSuite)
})
})
})
|
#!/bin/bash
cp vendor/modionut/php-codesniffer-hooks/src/pre-commit .git/hooks/pre-commit
chmod +x .git/hooks/pre-commit
|
<div onclick="window.open('http://example.com', '_blank');">Click Here</div> |
#!/bin/sh
cd "$(dirname "$0")"
python2 weather-script.py
rsvg-convert --background-color=white -o weather-script-output.png weather-script-output.svg
pngcrush -c 0 -ow weather-script-output.png
#If you need to copy the file to a webserver location uncomment this line
#cp -f weather-script-output.png /home/pi/pyImages/weather-script-output0.png
|
sudo apt-update
# install nginx
sudo apt install nginx
# install php7.4
sudo apt -y install software-properties-common
sudo add-apt-repository ppa:ondrej/php
sudo apt-get update
sudo apt -y install php7.4
sudo apt-get install -y php7.4-cli php7.4-json php7.4-common php7.4-mysql php7.4-zip php7.4-gd php7.4-mbstring php7.4-curl php7.4-xml php7.4-bcmath php7.4-fpm
# install composer
curl -sS https://getcomposer.org/installer -o composer-setup.php
sudo php composer-setup.php --install-dir=/usr/local/bin --filename=composer
# install docker
sudo apt update
sudo apt-get remove docker docker-engine docker.io containerd runc
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
gnupg \
lsb-release
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo \
"deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
sudo usermod -aG docker $USER
newgrp docker
# run mysql
docker run -d -p 3306:3306 -e MYSQL_ROOT_PASSWORD=my-secret-pw --name=mysql -d mysql --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
# setup project
sudo apt install unzip
unzip nova
php artisan project:setup
sudo chown -R $USER:www-data storage bootstrap/cache/ |
${CARGO_TARGET_DIR:-../target}/debug/quill get-proposal-info 22174 --dry-run
|
python webcam_demo_spatiotemporal_det.py \
--config configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py \
--checkpoint checkpoints/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth \
--det-config demo/faster_rcnn_r50_fpn_2x_coco.py \
--det-checkpoint checkpoints/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \
--det-score-thr 0.9 \
--action-score-thr 0.5 \
--label-map demo/label_map_ava.txt \
--predict-stepsize 8 \
--output-fps 20 \
--clip-vis-length 8 \
--input-video "test.mp4" \
--show
|
package worker
import "github.com/youngzhu/golab/crawler/fetch"
// Concurrent crawler with channels
func ConcurrentChannel(url string, fetcher fetch.Fetcher) {
ch := make(chan []string)
go func() {
ch <- []string{url}
}()
coordinator(ch, fetcher)
}
func coordinator(ch chan []string, fetcher fetch.Fetcher) {
n := 1
fetched := make(map[string]bool)
for urls := range ch {
for _, u := range urls {
if !fetched[u] {
fetched[u] = true
n++
go worker(u, ch, fetcher)
}
}
n--
if n == 0 {
break
}
}
}
func worker(url string, ch chan []string, fetcher fetch.Fetcher) {
urls, err := fetcher.Fetch(url)
if err != nil {
ch <- []string{}
} else {
ch <- urls
}
}
|
#!/usr/bin/bash
# this file by itself doesn't tell your system HOW to run it.
# bash is not the only shell (also bash has compatiblity modes)
# in linux, a "shebang" at the top of the file
# tells the system what shell program to run your script with.
echo welcome to my script
# MOST complicated parts of bash are just text substitutions/expansions
# and exit codes
# exit codes:
# 0 means success, anything else means error
if echo text | grep -q tex ; then
echo "exit code must have been zero"
fi
# _ ; _ just runs both commands
# _ && _ returns 0 if both commands' exit codes were 0
# runs first command, if it was nonzero, then we stop there
# _ || _ returns 0 if either commands' exit code was 0
# runs first command, if it was zero, then we stop there
# this behavior is called "short-circuiting"
echo text | grep -q tex && echo "previous command successful"
echo text | grep -q tex || echo "previous command unsuccessful"
if [[ 4 == 3 || 5 -gt 4 ]] ; then
echo "exit code must have been zero"
fi
if (( 0 )) ; then
echo "exit code must have been zero"
fi
data=4
((data+=1))
echo $data
# double parentheses is for arithmetic
# (also: parameters don't need to be prefixed with $,
# and they are treated like numbers not text)
echo you entered: "$@"
echo all done
printarg () {
variable=1
echo $1
echo $2
}
echo $variable
printarg $@
echo $variable
# normally, variables are "global scope"
# brace expansion
# command substitution
echo $(echo 4) + $(echo 1) = 5
echo current dir is $(pwd)
echo current dir is `pwd`
# grep for filtering/searching text
# find for searching for files (can execute a command for each file)
# printf for formatting text in more complex ways than echo
# sed for replacing text based on patterns (regex)
# awk for parsing text and processing it
# xargs for dynamically building one command's arguments from another command's output
# curl/wget for downloading webpages (or sending any http requests)
|
import React from 'react';
import AccountCard from './AccountCard';
import BlogCard from './BlogCard';
import ClockCard from './ClockCard';
import DataCard from './DataCard';
import s from './index.scss';
import NoticeCard from './NoticeCard';
import SiteCard from './SiteCard';
import TagCard from './TagCard';
const Aside: React.FC = () => {
return (
<aside className={s.aside}>
<BlogCard />
<AccountCard />
<DataCard />
<NoticeCard />
<ClockCard />
<div className={s.cardSticky}>
<TagCard />
<SiteCard />
</div>
</aside>
);
};
export default Aside;
|
#!/bin/bash
#
# Copyright (c) 2017-2018 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Currently we will use this repository until this issue is solved
# See https://github.com/kata-containers/packaging/issues/1
set -o errexit
set -o nounset
set -o pipefail
[ -z "${DEBUG:-}" ] || set -x
cidir=$(dirname "$0")
source "${cidir}/lib.sh"
source "/etc/os-release" || source "/usr/lib/os-release"
kernel_repo_name="packaging"
kernel_repo_owner="kata-containers"
kernel_repo="github.com/${kernel_repo_owner}/${kernel_repo_name}"
export GOPATH=${GOPATH:-${HOME}/go}
kernel_repo_dir="${GOPATH}/src/${kernel_repo}"
kernel_arch="$(arch)"
tmp_dir="$(mktemp -d -t install-kata-XXXXXXXXXXX)"
packaged_kernel="kata-linux-container"
exit_handler () {
rm -rf "$tmp_dir"
}
trap exit_handler EXIT
download_repo() {
pushd ${tmp_dir}
go get -d -u "${kernel_repo}" || true
popd
}
get_current_kernel_version() {
kernel_version=$(get_version "assets.kernel.version")
echo "${kernel_version/v/}"
}
get_kata_config_version() {
kata_config_version=$(cat "${kernel_repo_dir}/kernel/kata_config_version")
echo "${kata_config_version}"
}
get_packaged_kernel_version() {
if [ "$ID" == "ubuntu" ] || [ "$ID" == "debian" ]; then
kernel_version=$(sudo apt-cache madison $packaged_kernel | awk '{print $3}' | cut -d'-' -f1)
elif [ "$ID" == "fedora" ]; then
kernel_version=$(sudo dnf --showduplicate list ${packaged_kernel}.${kernel_arch} |
awk '/'$packaged_kernel'/ {print $2}' |
tail -1 |
cut -d'-' -f1)
elif [ "$ID" == "centos" ] || [ "$ID" == "rhel" ]; then
kernel_version=$(sudo yum --showduplicate list $packaged_kernel | awk '/'$packaged_kernel'/ {print $2}' | cut -d'-' -f1)
fi
echo "${kernel_version}"
}
build_and_install_kernel() {
info "Install kernel from sources"
pushd "${tmp_dir}" >> /dev/null
"${kernel_repo_dir}/kernel/build-kernel.sh" "setup"
"${kernel_repo_dir}/kernel/build-kernel.sh" "build"
sudo -E PATH="$PATH" "${kernel_repo_dir}/kernel/build-kernel.sh" "install"
popd >> /dev/null
}
install_packaged_kernel(){
info "Install packaged kernel version"
rc=0
if [ "$ID" == "ubuntu" ] || [ "$ID" == "debian" ]; then
chronic sudo apt install -y "$packaged_kernel" || rc=1
elif [ "$ID" == "fedora" ]; then
chronic sudo dnf install -y "$packaged_kernel" || rc=1
elif [ "$ID" == "centos" ] || [ "$ID" == "rhel" ]; then
chronic sudo yum install -y "$packaged_kernel" || rc=1
else
die "Unrecognized distro"
fi
return "$rc"
}
cleanup() {
rm -rf "${tmp_dir}"
}
main() {
download_repo
kernel_version="$(get_current_kernel_version)"
kata_config_version="$(get_kata_config_version)"
current_kernel_version="${kernel_version}.${kata_config_version}"
info "Current Kernel version ${current_kernel_version}"
info "Get packaged kernel version"
packaged_kernel_version=$(get_packaged_kernel_version)
info "Packaged Kernel version ${packaged_kernel_version}"
if [ "$packaged_kernel_version" == "$current_kernel_version" ] && [ "$kernel_arch" == "x86_64" ]; then
# If installing packaged kernel from OBS fails,
# then build and install it from sources.
if ! install_packaged_kernel;then
info "failed to install packaged kernel, trying to build from source"
build_and_install_kernel
fi
else
build_and_install_kernel
fi
}
main
|
// Interface hanya bisa memakai object type
interface Dinosaur {
name: string;
}
// Type bisa string, number, bahkan object
type Phoenix = {
name: string;
};
// Interface auto tergabung
interface Unicorn {
name: string;
}
interface Unicorn {
// Interface ini akan bergabung dengan interface di atas karena namanya sama
speed: number;
}
// Type tidak auto tergabung dan akan error (bisa digabung dengan suatu cara)
type GodzillaName = {
name: string;
};
type GodzillaPower = {
// Akan error
power: number;
};
type Godzilla = GodzillaName & GodzillaPower;
|
#!/bin/bash
dieharder -d 15 -g 13 -S 3054394121
|
#ifndef _CELLSHAPE_H_
#define _CELLSHAPE_H_
#include <vector>
#include <boost/shared_ptr.hpp>
#include <Common.h>
#include <SphereShape.h>
#include <RandomGenerator.h>
class CellShape : public SphereShape
{
public:
CellShape(int _identifier)
: SphereShape()
, m_nutrition(0.f)
, m_identifier(_identifier)
, m_target_pos(Vector3f(0.f, 0.f, 0.f))
, m_normal(Vector3f(0.f, 0.f, 0.f))
{;}
inline float nutrition() const {return m_nutrition;}
inline int identifier() const {return m_identifier;}
inline Vector3f targetPosition() const {return m_target_pos;}
inline Vector3f normal() const {return m_normal;}
inline int linkListSize() const {return m_links.size();}
inline boost::shared_ptr<CellShape> getLink(const int _index) const {return m_links[_index];}
inline void targetPosition(const Vector3f &_target_pos){m_target_pos = _target_pos;}
inline void normal(const Vector3f &_normal){m_normal = _normal;}
inline void nutrition(const float &_nutrition){m_nutrition = _nutrition;}
inline void identifier(const unsigned int &_identifier){m_identifier = _identifier;}
inline void setLink(const int _index, boost::shared_ptr<CellShape> _cell){m_links[_index] = _cell;}
void addLink(const boost::shared_ptr<CellShape> &_link);
void insertLink(const int _index, const boost::shared_ptr<CellShape> &_link);
void eraseLink(const int _index);
int findLink(const int _identifier) const;
void addNutrition(RandomGenerator &_random_sampler);
void calculateNormal();
void findTarget(const float _spring_factor, const float _planar_factor, const float _bulge_factor, const float _link_rest_length);
void update(const float _time_step);
private:
std::vector< boost::shared_ptr<CellShape> > m_links;
float m_nutrition;
int m_identifier;
Vector3f m_target_pos;
Vector3f m_normal;
Vector3f springTarget(const float _link_rest_length) const;
Vector3f planarTarget() const;
Vector3f bulgeTarget(const float _link_rest_length) const;
};
#endif
|
# Because we deploy from the server directory
# the shared directory needs to be inside the shared directory
# So we'll make a symlink inside the server/other directory
# that points to the shared directory, and install that.
mkdir -p $PWD/server/other
ln -s $PWD/shared $PWD/server/other/
cd server
npm install -S ./other/shared
npx --package now@9 --call "now -e NODE_ENV=production --token $NOW_TOKEN --npm deploy --public && now alias --token=$NOW_TOKEN"
# restore things (in case someone runs this locally by mistake)
rm -rf server/other/shared
npm install -S ../shared
|
<gh_stars>0
import { Type } from "../types";
const loadingReducer = (state = false, { type }) => {
switch (type) {
case Type.ITEM_DATA_FETCH_START:
case Type.ITEM_ADD_START:
case Type.ITEM_DELETE_START:
case Type.ITEM_UPDATE_START:
return true;
case Type.ITEM_DATA_FETCH_SUCCESS:
case Type.ITEM_DATA_FETCH_ERROR:
case Type.ITEM_ADD_SUCCESS:
case Type.ITEM_ADD_ERROR:
case Type.ITEM_DELETE_SUCCESS:
case Type.ITEM_DELETE_ERROR:
case Type.ITEM_UPDATE_SUCCESS:
case Type.ITEM_UPDATE_ERROR:
return false;
default:
return state;
}
};
export default loadingReducer;
|
<reponame>scottburton11/moviebot
class Local
def initialize(path, mode)
mode = mode
path = path
@file = open(path, mode)
end
def method_missing(meth, *args, &block)
@file.send(meth.to_sym, *args, &block)
end
end
class HTTP
def initialize(path, mode)
mode = mode
path = path
@url = URI.parse(path)
end
def read
req = Net::HTTP::Get.new(@url.path)
res = Net::HTTP.start(@url.host, @url.port) {|http|
http.request(req)
}
return res.body
end
def method_missing(meth)
@file.send(meth.to_sym)
end
end
class S3
def initialize
end
end
class FileAccessor
require 'ostruct'
require 'open-uri'
attr_reader :path
def initialize(path, mode="r")
@path = path
@mode = mode
build_accessor
# Delegator.new(@accessor)
end
def build_accessor
f = OpenStruct.new
f.local = Local
f.http = HTTP
f.s3 = S3
uri_prefix = %r|[a-z]*:\/\/|
s3_prefix = %r|s3:\/\/|
case @path
when uri_prefix
@accessor = f.http.new(@path,@mode)
when s3_prefix
@accessor = f.s3.new(@path,@mode)
else
@accessor = f.local.new(@path,@mode)
end
end
def method_missing(meth, *args, &block)
@accessor.send(meth.to_sym, *args, &block)
end
end
|
module BrNfe
module Product
class Emitente < BrNfe::Person
validate :validar_endereco
# IE do Substituto Tributário da UF de destino da mercadoria,
# quando houver a retenção do ICMS ST para a UF de destino.
#
# <b>Tipo: </b> _Number_
#
attr_accessor :inscricao_estadual_st
# Código CNAE
#
attr_accessor :cnae_code
end
end
end |
<reponame>ZacharyWeiner/nomcre-rails
class AddUserToProposal < ActiveRecord::Migration[5.0]
def change
add_reference :proposals, :user, foreign_key: true
end
end
|
#!/bin/bash
fun_gen_in_two_arrs() {
rm -rf ./single_combinations.txt
#touch ./single_combinations.txt
local _firstArr=(`echo $1|cut -d " " --output-delimiter=" " -f 1-`)
local _firstArrLen=${#_firstArr[@]}
local _secondArr=(`echo $2|cut -d " " --output-delimiter=" " -f 1-`)
local _secondArrLen=${#_secondArr[@]}
index=0
for ((i=0;i<_firstArrLen;i++))
do
for ((j=0;j<_secondArrLen;j++))
do
elem1=${_firstArr[$i]}
elem2=${_secondArr[$j]}
combine_str=$elem1"--"$elem2
echo $combine_str >> ./single_combinations.txt
let index+=1
done
done
}
rm -rf ./final_combinations.txt
while read line
do
if [[ ${line} =~ ^#.* ]]; then
continue
fi
original_strs=()
original_index=0
for i in $(echo $line| tr "&" "\n")
do
original_strs[$original_index]=$i
let original_index+=1
done
for i in "${!original_strs[@]}";
do
sub_str=${original_strs[$i]}
if [ $i == 0 ]
then
rm -rf ./single_combinations.txt
for j in $(echo $sub_str| tr ";" "\n")
do
echo $j >> ./single_combinations.txt
done
else
sub_firstArr=()
sub_firstIndex=0
for line in `cat ./single_combinations.txt`
do
sub_firstArr[$sub_firstIndex]=$line
let sub_firstIndex+=1
done
sub_secondArr=($(echo "$sub_str"| tr ";" "\n"))
fun_gen_in_two_arrs "$(echo ${sub_firstArr[@]})" "$(echo ${sub_secondArr[@]})"
fi
done
cat ./single_combinations.txt >> ./final_combinations.txt
done < $1
rm -rf ./single_combinations.txt
|
import { StyleSheet } from 'react-native';
import { DarkTheme } from 'react-native-paper';
export default StyleSheet.create({
title: {
fontSize: 21,
fontWeight: 'bold',
color: DarkTheme.colors.text,
},
pickerContainer: {
display: 'flex',
flexDirection: 'row',
paddingVertical: 25,
justifyContent: 'space-between',
alignItems: 'center',
},
measure: {
flexBasis: '40%',
fontSize: 22,
},
});
|
<gh_stars>0
/*
* Copyright © 2019 <NAME>, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
import React from 'react';
import T from 'i18n-react';
import { Consumer } from '../Context/FllContext';
interface IHeaderProps {
type: string;
first: number;
total: number;
}
function FllHeader({ type, first, total }: IHeaderProps) {
return (
<Consumer>
{({ numTables, target }) => {
let last;
if (type === ('impact' || 'cause')) {
last = first + numTables - 1 <= total ? first + numTables - 1 : total;
} else {
last = total;
}
const header =
type === 'target'
? T.translate('features.FieldLevelLineage.v2.FllHeader.TargetHeader')
: T.translate('features.FieldLevelLineage.v2.FllHeader.RelatedHeader', {
type,
target,
});
const options = { first, last, total };
const subHeader =
type === 'target'
? T.translate('features.FieldLevelLineage.v2.FllHeader.TargetSubheader', options)
: T.translate('features.FieldLevelLineage.v2.FllHeader.RelatedSubheader', options);
return (
<React.Fragment>
<div>{header}</div>
<div>{subHeader}</div>
</React.Fragment>
);
}}
</Consumer>
);
}
export default FllHeader;
|
def find_factors(num):
factors = []
for i in range(2, num):
if (num % i == 0):
factors.append(i)
print(f"Factors of {num}: {factors}")
find_factors(60) |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-STG/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-STG/13-1024+0+512-N-VB-IP-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_and_verbs_first_two_thirds_sixth --eval_function last_sixth_eval |
<reponame>LwcReber/vue-admin<gh_stars>0
import { asyncRoutes, constantRoutes } from '@/router'
import { findPermissionBtns } from '@/utils/permission'
/**
* Use meta.role to determine if the current user has permission
* @param roles
* @param route
*/
function hasPermission (roles, route) {
if (route.meta && route.meta.roles) {
return roles.some(role => route.meta.roles.includes(role))
} else {
return true
}
}
/**
* Filter asynchronous routing tables by recursion
* @param routes asyncRoutes
* @param roles
*/
export function filterAsyncRoutes (routes, roles) {
const res = []
routes.forEach(route => {
const tmp = { ...route }
// 根据用户类型权限划分路由
if (hasPermission(roles, tmp)) {
if (tmp.children) {
tmp.children = filterAsyncRoutes(tmp.children, roles)
}
res.push(tmp)
}
})
return res
}
const state = {
routes: [], // 静态路由 + 用户权限路由
userRoutes: [], // 用户权限路由
btns: [] // 当前页面的btn权限表
}
const mutations = {
SET_ROUTES: (state, routes) => {
state.userRoutes = routes
state.routes = constantRoutes.concat(routes)
},
SET_PERMISSIONBTNS: (state, btns) => {
state.btns = btns
}
}
const handleRoutes = (sourceRoute, userRoutes) => {
const routes = []
for (let index = 0; index < userRoutes.length; index++) {
const route = userRoutes[index]
// 找出原始路由的parent
const parentIndex = sourceRoute.findIndex((item) => item.path === route.path)
// 存在原始路由,开始拼接路由列表
if (parentIndex > -1) {
const parent = sourceRoute[parentIndex]
let children = []
let btns = []
// 子路由处理
if (parent.children && route.children) {
children = handleRoutes(parent.children, route.children)
}
// 权限按钮处理
if (parent.btns && route.btns) {
btns = handleRoutes(parent.btns, route.btns)
}
const routeData = { ...parent }
if (children.length) {
routeData.children = children
}
if (btns.length) {
routeData.btns = btns
}
routes.push(routeData)
// 找到一个删除一次,减少查找时间
sourceRoute.splice(parentIndex, 1)
}
}
return routes
}
const actions = {
generateRoutes ({ commit }, { roles, routes }) {
return new Promise(resolve => {
let accessedRoutes = []
accessedRoutes = filterAsyncRoutes(asyncRoutes, roles)
// 如果是管理身份不需要进行再次筛选
// let userRoutes = accessedRoutes
// if (!(roles.includes('admin') || roles.includes('customer'))) {
const userRoutes = handleRoutes(accessedRoutes, routes)
// }
commit('SET_ROUTES', userRoutes)
resolve(userRoutes)
})
},
findPermissionBtns ({ commit }, path) {
return new Promise(resolve => {
const btns = findPermissionBtns(state.routes, path)
commit('SET_PERMISSIONBTNS', btns)
})
}
}
export default {
namespaced: true,
state,
mutations,
actions
}
|
import React, { useState, useReducer } from 'react';
const ACTIONS = {
INCREMENT: 'increment',
DECREMENT: 'decrement',
};
function reducer(state, action) {
switch (action.type) {
case ACTIONS.INCREMENT:
return { count: state.count + 1 };
case ACTIONS.DECREMENT:
return { count: state.count - 1 };
default:
return state;
}
}
export default function UseReducer() {
const [state, dispatch] = useReducer(reducer, { count: 0 });
//state = {count: 0} e dispatch chama action
function increment() {
dispatch({ type: ACTIONS.INCREMENT });
}
function decrement() {
dispatch({ type: ACTIONS.DECREMENT });
}
return (
<div>
<button onClick={decrement}>-</button>
<span>{state.count}</span>
<button onClick={increment}>+</button>
</div>
);
}
|
#!/bin/sh
########################################################################################################################
# Usage
#
# release.sh
# - Uses conventional commits to determine the next version of each package to release
#
# release.sh --custom
# - Allows you to specify the next version of each package to release
#
########################################################################################################################
print_header_line() {
echo "\n------------------------------------------------------------"
}
print_major_header_line() {
echo "\n============================================================"
}
print_major_semver() {
echo "Reference:"
echo " https://semver.org/"
echo " Given a version number MAJOR.MINOR.PATCH, increment the:"
echo ""
echo " MAJOR version when you make incompatible API changes,"
echo " MINOR version when you add functionality in a backwards compatible manner, and"
echo " PATCH version when you make backwards compatible bug fixes."
echo ""
}
CUSTOM_RELEASE=false
if [[ "$1" == "--custom" ]]
then
CUSTOM_RELEASE=true
fi
EXPLICIT_RELEASE=false
EXPLICIT_RELEASE_VERSION=""
if [[ "$1" == "--explicit" ]]
then
if [[ "$2" == "" ]]
then
echo "You must specify a version number for an explicit release."
echo ""
print_major_semver
echo "Exiting ..."
exit 1
fi
EXPLICIT_RELEASE=true
EXPLICIT_RELEASE_VERSION=$2
fi
if [[ "$CI_PROJECT_URL" == "" ]] || [[ "$GITHUB_PROJECT_URL" == "" ]]
then
echo "Environment variables for project URLs are not specified. Please specify the following variables:"
echo " CI_PROJECT_URL"
echo " GITHUB_PROJECT_URL"
echo "Exiting ..."
exit 1
fi
print_header_line
echo "Preparing for release ..."
echo "Running 'git checkout develop' ..."
git checkout develop
print_header_line
echo "Running 'git fetch --all && git pull --all' ..."
git fetch --tags --all && git pull --all
print_header_line
BRANCH_NAME=release/$(date +'%Y-%m-%d_%H-%M-%S')
echo "Checking out a $BRANCH_NAME branch ..."
git checkout -b $BRANCH_NAME
git checkout $BRANCH_NAME
print_header_line
echo "Installing dependencies for tooling ..."
yarn
print_header_line
echo "Building resouces for documentation ..."
yarn build
if [ $CUSTOM_RELEASE == true ]
then
print_header_line
echo "Compare develop...master branches to determine versions for each package..."
echo ""
echo "Please review all incoming changes and determine the release types of each package."
echo "Update that package.json of each package you would like to release."
echo "When you are ready type 'Y' and then press [ENTER]."
echo ""
print_major_semver
read CONFIRM_MANUAL_REVIEW
if [[ "$CONFIRM_MANUAL_REVIEW" != "y" ]] && [[ "$CONFIRM_MANUAL_REVIEW" != "Y" ]]
then
exit 1
fi
yarn lerna version --no-push --include-merged-tags
LERNA_VERSION_EXIT_CODE=$?
if [ $LERNA_VERSION_EXIT_CODE -ne 0 ]
then
print_header_line
echo "Showing changes with 'git status' ..."
git status
exit $LERNA_VERSION_EXIT_CODE
fi
print_major_header_line
echo " NOTICE"
echo "Please update the respective CHANGELOG.MD files for each package before pushing."
else
if [ $EXPLICIT_RELEASE ]
then
print_header_line
yarn lerna version $EXPLICIT_RELEASE_VERSION --no-push --include-merged-tags --yes
else
print_header_line
yarn lerna version --conventional-commits --no-push --include-merged-tags --yes
fi
LERNA_VERSION_EXIT_CODE=$?
if [ $LERNA_VERSION_EXIT_CODE -ne 0 ]
then
print_header_line
echo "Showing changes with 'git status' ..."
git status
exit $LERNA_VERSION_EXIT_CODE
fi
print_header_line
echo "Would you like to push the release branch to the repository? Type 'Y' and then press [ENTER]."
read PUSH_TO_GIT
if [[ "$PUSH_TO_GIT" == "y" ]] || [[ "$PUSH_TO_GIT" == "Y" ]]
then
print_header_line
echo " WARNING"
echo "Tags created locally ..."
echo "Pushing branch $BRANCH_NAME ..."
echo ""
git push --set-upstream origin $BRANCH_NAME
else
print_major_header_line
echo " WARNING"
echo "No changes pushed, tags and branch $BRANCH_NAME only created locally ..."
echo ""
fi
fi
print_major_header_line
echo " NEXT STEPS"
echo ""
echo "1. Merge the release branch to master"
echo ""
echo "2. Once the release branch has merged, ensure lerna-generated tags match package.json versions of each package and the merged commit hash"
echo " - Tag Name Format: @splunkdev/[PACKAGE_NAME]@[VERSION]"
echo " - Example: @splunkdev/cloud-auth-common@1.0.0"
echo ""
echo " View local tags:"
echo " 'git tag'"
echo ""
echo " Delete existing tag:"
echo " 'git tag -d [TAG_NAME]'"
echo ""
echo " Create new tag:"
echo " 'git tag [TAG_NAME]'"
echo ""
echo " Update existing tag:"
echo " 'git tag -f [TAG_NAME] [COMMIT_HASH]'"
echo ""
echo "3. Merge the tags"
echo ""
|
#!/bin/bash
set -ev
if [ "$LIBEVENT_ENABLED" = "false" ]; then
phpunit tests/CreateStreamSelectLoopTest.php
else
pecl install -f libevent
phpunit tests/CreateLibEventLoopTest.php
fi
|
#!/bin/zsh
# ****************************************************************************
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo
# Copyright (C) 2001-2020 German Aerospace Center (DLR) and others.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License 2.0 which is available at
# https://www.eclipse.org/legal/epl-2.0/
# This Source Code may also be made available under the following Secondary
# Licenses when the conditions for such availability set forth in the Eclipse
# Public License 2.0 are satisfied: GNU General Public License, version 2
# or later which is available at
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
# ****************************************************************************
# @file osm-web-wizard-launcher.zsh
# @author Robert Hilbrich
# @date Thu, 14 May 2020
#
# Launcher script for macOS application bundle for osm-web-wizard
# ****************************************************************************/
# Go through a list of possible config files, which may contain SUMO_HOME declaration
FILE_LIST=("$HOME/.zshrc" "$HOME/.bash_profile" "$HOME/.bashrc" "$HOME/.profile")
for FILE in ${FILE_LIST[*]}; do
if [ -z "$SUMO_HOME" ]; then
echo "Loading $FILE" 2> /dev/null
source $FILE
else
echo "Found \$SUMO_HOME"
break
fi
done
if [ -z "$SUMO_HOME" ]; then
echo "\$SUMO_HOME not found - aborting"
osascript -e 'tell application (path to frontmost application as text) to display dialog "Could not identify the location of SUMO (SUMO_HOME environment variable not found)\n\nPlease set this environment variable in .zshrc, .bash_profile or .profile" buttons {"OK"} with icon stop'
exit -1
else
echo "Starting: python $SUMO_HOME/tools/osmWebWizard.py"
python $SUMO_HOME/tools/osmWebWizard.py
fi
|
<filename>_site/src/basic-web-validations/src/main/java/com/example/basicwebvalidations/AppConfig.java
package com.example.basicwebvalidations;
import org.springframework.context.MessageSource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.ReloadableResourceBundleMessageSource;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
/**
* This is not loading????
*/
@Configuration
public class AppConfig {
public AppConfig(){
System.out.println("WHAT??????????????????????????????????????/");
}
public String myBean(){
return "fdsa";
}
@Bean
public MessageSource messageSource() {
ReloadableResourceBundleMessageSource bean = new ReloadableResourceBundleMessageSource();
bean.setBasename("classpath:messages");
bean.setDefaultEncoding("UTF-8");
return bean;
}
@Bean
public javax.validation.Validator validator() {
final LocalValidatorFactoryBean factory = new LocalValidatorFactoryBean();
factory.setValidationMessageSource(messageSource());
return factory;
}
}
|
CREATE TABLE users (
id INT AUTO_INCREMENT PRIMARY KEY,
username VARCHAR(20) NOT NULL,
email VARCHAR(255) NOT NULL,
password VARCHAR(255) NOT NULL
); |
#!/usr/bin/env bash
# Colors
NOCOLOR='\033[0m'
RED='\033[0;31m' # Error message
LIGHTRED='\033[1;31m'
GREEN='\033[0;32m' # Success message
LIGHTGREEN='\033[1;32m'
ORANGE='\033[0;33m'
YELLOW='\033[1;33m' # Warning message
BLUE='\033[0;34m' # Info message
LIGHTBLUE='\033[1;34m'
PURPLE='\033[0;35m'
FUCHSIA='\033[0;35m'
LIGHTPURPLE='\033[1;35m'
CYAN='\033[0;36m'
LIGHTCYAN='\033[1;36m'
DARKGRAY='\033[1;30m'
LIGHTGRAY='\033[0;37m'
WHITE='\033[1;37m'
function colorEcho() {
if [[ $# -gt 1 ]]; then
local COLOR=$1
echo -e "${COLOR}${@:2}${NOCOLOR}"
else
echo -e "${@:1}${NOCOLOR}"
fi
}
function check_socks5_proxy_up() {
local socks_proxy_url=${1:-"127.0.0.1:1080"}
local webservice_url=${2:-"www.google.com"}
local exitStatus=0
curl -fsL -I --connect-timeout 3 --max-time 5 \
--socks5-hostname "${socks_proxy_url}" \
"${webservice_url}" >/dev/null 2>&1 || exitStatus=$?
if [[ "$exitStatus" -eq "0" ]]; then
return 0
else
return 1
fi
}
function Install_systemd_Service() {
# Usage:
# Install_systemd_Service "subconverter" "/srv/subconverter/subconverter"
local service_name=$1
local service_exec=$2
local service_user=${3:-"nobody"}
local service_workdir=${4:-""}
local filename
local service_file
[[ $# -lt 2 ]] && return 1
[[ -z "$service_name" ]] && return 1
[[ -z "$service_exec" ]] && return 1
if [[ -z "$service_workdir" ]]; then
filename=$(echo "${service_exec}" | cut -d" " -f1)
service_workdir=$(dirname "$(readlink -f "$filename")")
fi
service_file="/etc/systemd/system/${service_name}.service"
if [[ ! -s "$service_file" ]]; then
sudo tee "$service_file" >/dev/null <<-EOF
[Unit]
Description=${service_name}
After=network.target network-online.target nss-lookup.target
[Service]
Type=simple
StandardError=journal
User=${service_user}
AmbientCapabilities=CAP_NET_BIND_SERVICE
ExecStart=${service_exec}
WorkingDirectory=${service_workdir}
ExecReload=/bin/kill -HUP \$MAINPID
Restart=on-failure
RestartSec=5s
[Install]
WantedBy=multi-user.target
EOF
fi
sudo systemctl enable "$service_name" && sudo systemctl restart "$service_name"
if systemctl is-enabled "$service_name" >/dev/null 2>&1; then
colorEcho "${GREEN} systemd service ${FUCHSIA}${service_name}${GREEN} installed!"
else
colorEcho "${RED} systemd service ${FUCHSIA}${service_name}${GREEN} install failed!"
fi
}
trap 'rm -rf "${WORKDIR}"' EXIT
[[ -z "${WORKDIR}" || "${WORKDIR}" != "/tmp/"* || ! -d "${WORKDIR}" ]] && WORKDIR="$(mktemp -d)"
DOWNLOAD_URL=${1:-""}
SUB_LIST_FILE=${2:-"/etc/clash/clash_client_subscription.list"}
if [[ ! -d "/srv/clash" ]]; then
if [[ -z "${DOWNLOAD_URL}" ]]; then
[[ -s "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/cross/subconverter_installer.sh" ]] && \
source "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/cross/subconverter_installer.sh"
[[ -s "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/cross/clash_installer.sh" ]] && \
source "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/cross/clash_installer.sh"
else
wget -c -O "/tmp/subconverter_clash.zip" "${DOWNLOAD_URL}" && \
unzip -qo "/tmp/subconverter_clash.zip" -d "/srv" && \
rm -f "/tmp/subconverter_clash.zip" && \
Install_systemd_Service "subconverter" "/srv/subconverter/subconverter" && \
Install_systemd_Service "clash" "/srv/clash/clash -d /srv/clash" "root" && \
colorEcho "${GREEN}Subconverter & Clash installed!"
fi
fi
if ! pgrep -f "clash" >/dev/null 2>&1; then
[[ -d "/srv/subconverter" ]] && Install_systemd_Service "subconverter" "/srv/subconverter/subconverter"
[[ -d "/srv/clash" ]] && Install_systemd_Service "clash" "/srv/clash/clash -d /srv/clash" "root"
systemctl is-enabled clash >/dev/null 2>&1 && sudo systemctl restart clash
fi
if ! pgrep -f "clash" >/dev/null 2>&1; then
colorEcho "${RED}Please install and run ${FUCHSIA}clash${RED} first!"
exit 1
fi
TARGET_CONFIG_FILE="/srv/clash/config.yaml"
[[ ! -s "${SUB_LIST_FILE}" ]] && SUB_LIST_FILE="/srv/clash/clash_client_subscription.list"
[[ ! -s "${SUB_LIST_FILE}" ]] && SUB_LIST_FILE="$HOME/clash_client_subscription.list"
DNS_CONIFG_FILE="/etc/clash/clash_client_dns.yaml"
[[ ! -s "${DNS_CONIFG_FILE}" ]] && DNS_CONIFG_FILE="$HOME/clash_client_dns.yaml"
if [[ ! -s "${DNS_CONIFG_FILE}" ]]; then
tee -a "${DNS_CONIFG_FILE}" >/dev/null <<-'EOF'
dns:
enable: true
listen: 127.0.0.1:8053
ipv6: true
enhanced-mode: redir-host
fake-ip-range: 198.18.0.1/16
nameserver:
- 223.5.5.5
- 114.114.114.114
- https://dns.alidns.com/dns-query
- "[2400:3200::1]:53"
fallback:
- tcp://1.1.1.1
- tcp://8.8.8.8
- tls://1.0.0.1:853
- tls://dns.google:853
- "[2606:4700:4700::1111]:53"
- "[2620:fe::9]:53"
EOF
fi
if [[ -s "${SUB_LIST_FILE}" ]]; then
# Subscribe urls
URL_EXCLUDE=$(grep -E '^# exclude=' "${SUB_LIST_FILE}" | cut -d" " -f2)
URL_CONFIG=$(grep -E '^# config=' "${SUB_LIST_FILE}" | cut -d" " -f2)
URL_LIST_CONTENT=$(grep -E '^# url=' "${SUB_LIST_FILE}" | cut -d" " -f2 | cut -d"=" -f2)
URL_UNION=""
URL_LIST=()
while read -r READLINE || [[ "${READLINE}" ]]; do
URL_LIST+=("${READLINE}")
[[ -n "${URL_UNION}" ]] && URL_UNION="${URL_UNION}%7C${READLINE}" || URL_UNION="${READLINE}"
done <<<"${URL_LIST_CONTENT}"
URL_LIST+=("${URL_UNION}")
# Subconverter web service urls
SUB_LIST=()
# || In case the file has an incomplete (missing newline) last line
while read -r READLINE || [[ "${READLINE}" ]]; do
[[ "${READLINE}" =~ ^#.* ]] && continue
SUB_LIST+=("${READLINE}")
done < "${SUB_LIST_FILE}"
# Download clash configuration file
SUB_DOWNLOAD_FILE="${WORKDIR}/clash_sub.yaml"
for TargetURL in "${SUB_LIST[@]}"; do
[[ -z "${TargetURL}" ]] && continue
for URL_URL in "${URL_LIST[@]}"; do
[[ -z "${URL_URL}" ]] && continue
# https://www.example.com/sub?target=clash&url=<url>&config=<config>&exclude=<exclude>
DownloadURL="${TargetURL}&url=${URL_URL}&${URL_CONFIG}"
[[ -n "${URL_EXCLUDE}" ]] && DownloadURL="${DownloadURL}&${URL_EXCLUDE}"
colorEcho "${BLUE}Downloading clash configuration from ${FUCHSIA}${DownloadURL}${BLUE}..."
curl -fSL --noproxy "*" --connect-timeout 10 --max-time 60 \
-o "${SUB_DOWNLOAD_FILE}" "${DownloadURL}"
curl_download_status=$?
if [[ ${curl_download_status} -eq 0 ]]; then
sed -i -e "s/^allow-lan:.*/allow-lan: false/" \
-e "s/^external-controller:.*/# &/" \
-e "s/^port:.*/# &/" \
-e "s/^redir-port:.*/# &/" \
-e "s/^mixed-port:.*/# &/" \
-e "s/^socks-port:.*/# &/" "${SUB_DOWNLOAD_FILE}"
sed -i "1i\mixed-port: 7890\nredir-port: 7892" "${SUB_DOWNLOAD_FILE}"
[[ -x "$(command -v yq)" ]] && DNS_ENABLE=$(yq e ".dns.enable // \"\"" "${SUB_DOWNLOAD_FILE}")
[[ -z "${DNS_ENABLE}" && -s "${DNS_CONIFG_FILE}" ]] && sed -i "/^redir-port/r ${DNS_CONIFG_FILE}" "${SUB_DOWNLOAD_FILE}"
sudo cp -f "${SUB_DOWNLOAD_FILE}" "${TARGET_CONFIG_FILE}"
# if pgrep -f "clash" >/dev/null 2>&1; then
if systemctl is-enabled clash >/dev/null 2>&1; then
colorEcho "${BLUE}Checking clash connectivity..."
sudo systemctl restart clash && sleep 3
if check_socks5_proxy_up "127.0.0.1:7890"; then
colorEcho "${GREEN}The configuration looks ok, done!"
break 2
else
colorEcho "${RED}Connection failed!"
fi
else
break 2
fi
else
break
fi
done
done
fi
if [[ "$(uname -r)" =~ "microsoft" ]]; then
# WSL2
PROXY_IP=$(ipconfig.exe | grep "IPv4" \
| grep -Eo '([0-9]{1,3}[\.]){3}[0-9]{1,3}' \
| grep -Ev "^0\.|^127\.|^172\." \
| head -n1)
# PROXY_IP=$(grep -m1 nameserver /etc/resolv.conf | awk '{print $2}')
# PROXY_ADDRESS="socks5://${PROXY_IP}:7890"
# export {http,https,ftp,all}_proxy=${PROXY_ADDRESS} && export {HTTP,HTTPS,FTP,ALL}_PROXY=${PROXY_ADDRESS}
# git config --global http.proxy \"${PROXY_ADDRESS}\" && git config --global https.proxy \"${PROXY_ADDRESS}\"
else
PROXY_IP="127.0.0.1"
fi
PROXY_ADDRESS=""
if check_socks5_proxy_up "${PROXY_IP}:7890"; then
PROXY_ADDRESS="${PROXY_IP}:7890"
elif check_socks5_proxy_up "${PROXY_IP}:7891"; then
PROXY_ADDRESS="${PROXY_IP}:7891"
fi
if [[ -z "${PROXY_ADDRESS}" ]]; then
colorEcho "${RED}Error when setting socks5 proxy!"
exit 1
fi
PROXY_ADDRESS="socks5://${PROXY_ADDRESS}"
colorEcho "${GREEN}Usage:"
colorEcho "${BLUE} export {http,https,ftp,all}_proxy=\"${PROXY_ADDRESS}\""
colorEcho "${BLUE} export {HTTP,HTTPS,FTP,ALL}_PROXY=\"${PROXY_ADDRESS}\""
colorEcho "${BLUE} git config --global http.proxy \"${PROXY_ADDRESS}\""
colorEcho "${BLUE} git config --global https.proxy \"${PROXY_ADDRESS}\""
unset PROXY_IP
unset PROXY_ADDRESS
colorEcho "${GREEN}Done!" |
/*
* Copyright 2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.constretto.spring;
import org.constretto.exception.ConstrettoException;
import org.constretto.spring.annotation.Environment;
import org.constretto.spring.internal.ConstrettoAutowireCandidateResolver;
import org.constretto.spring.resolver.AssemblyContextResolver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static java.util.Arrays.asList;
/**
* A BeanFactoryBeanFactoryPostProcessor implementation that will if registered as a bean in a spring context, enable
* the constretto autowiring capabilities in the container.
* <br>
* <br>
* May be used on any existing configurations and in combination with all the standard context implementations from the
* Spring framework.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class EnvironmentAnnotationConfigurer implements BeanFactoryPostProcessor {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final AssemblyContextResolver assemblyContextResolver;
public static final String INCLUDE_IN_COLLECTIONS = "includeInCollections";
public EnvironmentAnnotationConfigurer(AssemblyContextResolver assemblyContextResolver) {
this.assemblyContextResolver = assemblyContextResolver;
}
@SuppressWarnings("unchecked")
public void postProcessBeanFactory(ConfigurableListableBeanFactory configurableListableBeanFactory)
throws BeansException {
if (!(configurableListableBeanFactory instanceof DefaultListableBeanFactory)) {
throw new IllegalStateException(
"EnvironmentAnnotationConfigurer needs to operate on a DefaultListableBeanFactory");
}
DefaultListableBeanFactory defaultListableBeanFactory = (DefaultListableBeanFactory) configurableListableBeanFactory;
defaultListableBeanFactory.setAutowireCandidateResolver(new ConstrettoAutowireCandidateResolver());
String[] beanNames = configurableListableBeanFactory.getBeanDefinitionNames();
int lowestDiscoveredPriority = Integer.MAX_VALUE;
for (String beanName : beanNames) {
BeanDefinition beanDefinition = configurableListableBeanFactory.getBeanDefinition(beanName);
if (beanDefinition.getBeanClassName() != null) {
try {
Class beanClass = Class.forName(beanDefinition.getBeanClassName());
Environment environmentAnnotation = findEnvironmentAnnotation(beanClass);
if (environmentAnnotation != null) {
if (!assemblyContextResolver.getAssemblyContext().isEmpty()) {
boolean autowireCandidate = decideIfAutowireCandiate(beanName, environmentAnnotation);
beanDefinition.setAutowireCandidate(autowireCandidate);
if (autowireCandidate) {
removeNonAnnotatedBeansFromAutowireForType(beanClass, configurableListableBeanFactory);
}
} else {
beanDefinition.setAutowireCandidate(false);
}
}
} catch (ClassNotFoundException e) {
beanDefinition.setAutowireCandidate(false);
}
}
}
}
@SuppressWarnings("unchecked")
public static Environment findEnvironmentAnnotation(Class beanClass) {
if (beanClass.isAnnotationPresent(Environment.class)) {
return (Environment) beanClass.getAnnotation(Environment.class);
} else {
return findEnvironmentMetaAnnotation(new HashSet<Annotation>(), beanClass.getAnnotations());
}
}
public static Environment findEnvironmentMetaAnnotation(Set<Annotation> visited, Annotation[] annotations) {
for (Annotation annotation : annotations) {
if (annotation instanceof Environment) {
return (Environment) annotation;
} else {
if (!visited.contains(annotation)) {
visited.add(annotation);
Environment environment = findEnvironmentMetaAnnotation(visited, annotation.annotationType().getAnnotations());
if (environment != null) {
return environment;
}
}
}
}
return null;
}
@SuppressWarnings("unchecked")
private void removeNonAnnotatedBeansFromAutowireForType(Class lookupClass, ConfigurableListableBeanFactory configurableListableBeanFactory) throws ClassNotFoundException {
List<String> beanNames = new ArrayList<String>();
Class[] interfaces = lookupClass.getInterfaces();
for (Class anInterface : interfaces) {
beanNames.addAll(asList(BeanFactoryUtils.beanNamesForTypeIncludingAncestors(configurableListableBeanFactory, anInterface)));
}
List<BeanDefinition> potentialMatches = new ArrayList<BeanDefinition>();
for (String beanName : beanNames) {
BeanDefinition beanDefinition = configurableListableBeanFactory.getBeanDefinition(beanName);
Class beanClass = Class.forName(beanDefinition.getBeanClassName());
beanDefinition.setAttribute(INCLUDE_IN_COLLECTIONS, new Class[]{beanClass});
Environment environmentAnnotation = findEnvironmentAnnotation(beanClass);
if (environmentAnnotation == null) {
beanDefinition.setAutowireCandidate(false);
} else {
potentialMatches.add(beanDefinition);
}
}
if (potentialMatches.size() == 1) {
potentialMatches.get(0).setAutowireCandidate(true);
} else {
List<BeanDefinition> highestPriorityBeans = new ArrayList<BeanDefinition>();
for (BeanDefinition potentialMatch : potentialMatches) {
if (potentialMatch.isAutowireCandidate()) {
potentialMatch.setAutowireCandidate(false);
highestPriorityBeans = prioritizeBeans(potentialMatch, highestPriorityBeans);
}
}
if (highestPriorityBeans.size() == 1) {
highestPriorityBeans.get(0).setAutowireCandidate(true);
} else {
List<String> equalPriorityBeans = new ArrayList<String>();
for (BeanDefinition highestPriorityBean : highestPriorityBeans) {
equalPriorityBeans.add(highestPriorityBean.getBeanClassName());
}
throw new ConstrettoException(
"More than one bean with the class or interface + [" + lookupClass.getSimpleName() +"] registered with same tag. Could not resolve priority. To fix this, remove one of the following beans "
+ equalPriorityBeans.toString());
}
}
}
private List<BeanDefinition> prioritizeBeans(BeanDefinition potentialMatch, List<BeanDefinition> highestPriorityBeans) throws ClassNotFoundException {
List<BeanDefinition> result = new ArrayList<BeanDefinition>();
int matchPriority = getAutowirePriority(Class.forName(potentialMatch.getBeanClassName()));
if (highestPriorityBeans.isEmpty()) {
result.add(potentialMatch);
} else {
for (BeanDefinition highestPriorityBean : highestPriorityBeans) {
int mostSpesificPriority = getAutowirePriority(Class.forName(highestPriorityBean.getBeanClassName()));
if ((matchPriority - mostSpesificPriority) < 0) {
result.clear();
result.add(potentialMatch);
} else if (((matchPriority - mostSpesificPriority) == 0)) {
result.add(potentialMatch);
result.add(highestPriorityBean);
} else {
result.add(highestPriorityBean);
}
}
}
return result;
}
@SuppressWarnings("unchecked")
private int getAutowirePriority(Class beanClass) {
Environment environmentAnnotation = findEnvironmentAnnotation(beanClass);
if (environmentAnnotation != null) {
List<String> environments = asList(environmentAnnotation.value());
List<String> assemblyContext = assemblyContextResolver.getAssemblyContext();
for (int i = 0; i < assemblyContext.size(); i++) {
if (environments.contains(assemblyContext.get(i))) {
return i;
}
}
}
return Integer.MAX_VALUE;
}
private boolean decideIfAutowireCandiate(String beanName, final Environment environmentAnnotation) {
List<String> targetEnvironments = new ArrayList<String>() {{
addAll(asList(environmentAnnotation.value()));
}};
validateAnnotationValues(beanName, targetEnvironments);
List<String> assemblyContext = assemblyContextResolver.getAssemblyContext();
targetEnvironments.retainAll(assemblyContext);
boolean autowireCandidate = !targetEnvironments.isEmpty();
if (autowireCandidate) {
logger.info("{} is annotated with environment '{}', and is selected for autowiring in the current environment '{}'",
beanName,
environmentAnnotation.value(),
assemblyContextResolver.getAssemblyContext());
} else {
logger.info("{} is annotated with environment '{}', and is discarded for autowiring in the current environment '{}'",
beanName,
environmentAnnotation.value(),
assemblyContextResolver.getAssemblyContext());
}
return autowireCandidate;
}
private void validateAnnotationValues(String beanName, List<String> beanEnvironments) {
if (beanEnvironments.isEmpty()) {
throw new ConstrettoException(
"You must specify environment tags in @Environment. offending bean: "
+ beanName);
}
}
}
|
#include <iostream>
using namespace std;
int main() {
int n;
cout << "Enter the number of rows: ";
cin >> n;
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= i; j++) {
cout << i << " ";
}
cout << endl;
}
return 0;
} |
#!/usr/bin/env bash
# Setup file: setup.sh for configuring Ubuntu 14.04 LTS (HVM) EC2 instance
echo
echo "Setup file: setup.sh for configuring Ubuntu 14.04 LTS (HVM) EC2 instance"
echo
# create an alias in bash
echo "Creating an alias in bash for ll = ls -alrth"
alias ll=’ls -alrth’
# Install CURL
echo
echo "Installing CURL"
echo
sudo apt-get install -y curl
# Install NODE.JS
echo
echo "Installing NODE.JS"
echo
sudo apt-get update
sudo apt-get install nodejs
sudo ln -s /usr/bin/nodejs /usr/bin/node
# Install NPM
echo
echo "Installing NPM"
echo
sudo apt-get install npm
# Install jshint
# http://jshint.com/
echo
echo Installing JSHINT
echo
sudo npm install -g jshint
# Install rlwrap
# http://nodejs.org/api/repl.html#repl_repl
echo
echo Install RLWRAP
echo
sudo apt-get install -y rlwrap
# Install express
echo
echo "Installing Express"
echo
sudo npm install express
# Install Async - Work with asyncronous Javascript
echo
echo Install ASYNC
echo
sudo npm install async
# Install Passport - Node Authentication
echo
echo Install Passport
echo
sudo npm install passport
# Install EJS - Embeded Javascript Templates
echo
echo Install EJS
echo
sudo npm install ejs
# Install EmailJS - Handle email
echo
echo Install EMAILJS
echo
sudo npm install emailjs
# Install Python 2.7 and up
echo
echo Install Python 2.7
echo
sudo apt-get install python2.7
# Download and install PIP
echo
echo Download and Install PIP
echo
curl -O https://bootstrap.pypa.io/get-pip.py
sudo python2.7 get-pip.py
# Install AWS EB Command Line Interface
echo
echo Install AWS EB Command Line Interface
echo
sudo pip install awsebcli
eb --version
# Intall and Setup PostgreSQL
echo
echo Install and Setup Postgresql
echo
sudo npm install -g pg
sudo npm install pg-hstore
./pgsetup.sh
# Install NODEJS Middleware for serving favicon: serve-favicon
echo
echo Installing serve-favicon middleware
echo
sudo npm install serve-favicon
# Install NODEJS Middleware for cookie: cookie-parser
echo
echo Installing cookie-parser middleware
echo
sudo npm install cookie-parser
# Install NODEJS Middleware for body parser: body-parser
echo
echo Installing body-parser middleware
echo
sudo npm install body-parser
# Install NODEJS Middleware for session: express-session
echo
echo Installing express-session middleware
echo
sudo npm install express-session
# Install NODEJS Middleware for passport: passport and passport-local
echo
echo Installing passport and passport-local middleware
echo
sudo npm install passport
sudo npm install passport-local
# Install NODEJS Middleware for serve-static: serve-static
echo
echo Installing serve-static middleware
echo
sudo npm install serve-static
# Install NODEJS Middleware for stripe: stripe
echo
echo Installing stripe middleware
echo
sudo npm install stripe
# Install NODEJS Middleware for aws-sdk
echo
echo Installing aws-sdk middleware
echo
sudo npm install aws-sdk
# Install NODEJS Middleware for imagemagick
echo
echo Installing imagemagick middleware
echo
sudo npm install imagemagick
# Install NODEJS Middleware for sequelize
echo
echo Installing sequelize middleware
echo
sudo npm install sequelize
# Install NODEJS Middleware for underscore
echo
echo Installing underscore middleware
echo
sudo npm install underscore
# Install NODEJS oAuth
echo
echo Installing oAuth
echo
sudo npm install oauth
# Install NODEJS foreman
echo
echo Installing foreman to start node using: "nf start"
echo
sudo npm install -g foreman
|
#!/bin/bash
# This is the script buildbot.libsdl.org uses to cross-compile SDL2 from
# x86 Linux to Raspberry Pi.
# The final tarball can be unpacked in the root directory of a RPi,
# so the SDL2 install lands in /usr/local. Run ldconfig, and then
# you should be able to build and run SDL2-based software on your
# Pi. Standard configure scripts should be able to find SDL and
# build against it, and sdl2-config should work correctly on the
# actual device.
TARBALL="$1"
if [ -z $1 ]; then
TARBALL=sdl-raspberrypi.tar.xz
fi
OSTYPE=`uname -s`
if [ "$OSTYPE" != "Linux" ]; then
# !!! FIXME
echo "This only works on x86 or x64-64 Linux at the moment." 1>&2
exit 1
fi
if [ "x$MAKE" == "x" ]; then
NCPU=`cat /proc/cpuinfo |grep vendor_id |wc -l`
let NCPU=$NCPU+1
MAKE="make -j$NCPU"
fi
BUILDBOTDIR="raspberrypi-buildbot"
PARENTDIR="$PWD"
set -e
set -x
rm -f $TARBALL
rm -rf $BUILDBOTDIR
mkdir -p $BUILDBOTDIR
pushd $BUILDBOTDIR
SYSROOT="/opt/rpi-sysroot"
export CC="ccache /opt/rpi-tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian/bin/arm-linux-gnueabihf-gcc --sysroot=$SYSROOT -I$SYSROOT/opt/vc/include -I$SYSROOT/usr/include -I$SYSROOT/opt/vc/include/interface/vcos/pthreads -I$SYSROOT/opt/vc/include/interface/vmcs_host/linux -L$SYSROOT/opt/vc/lib"
# -L$SYSROOT/usr/lib/arm-linux-gnueabihf"
# !!! FIXME: shouldn't have to --disable-* things here.
../configure --with-sysroot=$SYSROOT --host=arm-raspberry-linux-gnueabihf --prefix=$PWD/rpi-sdl2-installed --disable-pulseaudio --disable-esd --disable-video-mir --disable-video-wayland
$MAKE
$MAKE install
# Fix up a few things to a real install path on a real Raspberry Pi...
perl -w -pi -e "s#$PWD/rpi-sdl2-installed#/usr/local#g;" ./rpi-sdl2-installed/lib/libSDL2.la ./rpi-sdl2-installed/lib/pkgconfig/sdl2.pc ./rpi-sdl2-installed/bin/sdl2-config
mkdir -p ./usr
mv ./rpi-sdl2-installed ./usr/local
popd
tar -cJvvf $TARBALL -C $BUILDBOTDIR usr
rm -rf $BUILDBOTDIR
set +x
echo "All done. Final installable is in $TARBALL ...";
|
import cv2 as cv
# Create a window
viz = cv.viz_Viz3d("Widgets Example")
# Create and display 2D text widget
text2d = cv.viz_WText("Hello, World!", (50, 50), 30, cv.viz_Color().red())
viz.showWidget("text2d", text2d)
# Create and display 3D text widget
text3d = cv.viz_WText3D("3D Widget", (0.2, 0.2, 0.2), 0.15, True, cv.viz_Color().blue())
viz.showWidget("text3d", text3d)
# Create and display a plane widget
plane = cv.viz_WPlane((0.1, -0.1), (0.3, 0.3), cv.viz_Color().green())
viz.showWidget("plane", plane)
# Create and display a grid widget
grid = cv.viz_WGrid((5, 5), (0.5, 0.5), cv.viz_Color().gray())
viz.showWidget("grid", grid)
# Start the event loop and display the scene
viz.spin() |
#!/bin/bash
if [ ! -z ${ENV_DIR+x} ]; then
STAGE=$(cat "$ENV_DIR/STAGE")
fi
echo "BUILDING requirements.txt WITH STAGE=$STAGE"
cat requirements-base.txt > requirements.txt
if [[ $STAGE == "production" ]]; then
cat requirements-production.txt >> requirements.txt
else
cat requirements-development.txt >> requirements.txt
fi
cat requirements.txt
|
#!/bin/sh
# $XTermId: acolors.sh,v 1.7 2011/12/11 16:21:22 tom Exp $
# -----------------------------------------------------------------------------
# this file is part of xterm
#
# Copyright 2002-2003,2011 by Thomas E. Dickey
#
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE ABOVE LISTED COPYRIGHT HOLDER(S) BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name(s) of the above copyright
# holders shall not be used in advertising or otherwise to promote the
# sale, use or other dealings in this Software without prior written
# authorization.
# -----------------------------------------------------------------------------
# Demonstrate the use of the control sequence for changing ANSI colors.
ESC=""
CMD='/bin/echo'
OPT='-n'
SUF=''
TMP=/tmp/xterm$$
eval '$CMD $OPT >$TMP || echo fail >$TMP' 2>/dev/null
( test ! -f $TMP || test -s $TMP ) &&
for verb in printf print ; do
rm -f $TMP
eval '$verb "\c" >$TMP || echo fail >$TMP' 2>/dev/null
if test -f $TMP ; then
if test ! -s $TMP ; then
CMD="$verb"
OPT=
SUF='\c'
break
fi
fi
done
rm -f $TMP
LIST="00 30 80 d0 ff"
exec </dev/tty
old=`stty -g`
stty raw -echo min 0 time 5
$CMD $OPT "${ESC}]4;4;?${SUF}" > /dev/tty
read original
stty $old
original=${original}${SUF}
if ( trap "echo exit" EXIT 2>/dev/null ) >/dev/null
then
trap '$CMD $OPT "$original" >/dev/tty; exit' EXIT HUP INT TRAP TERM
else
trap '$CMD $OPT "$original" >/dev/tty; exit' 0 1 2 5 15
fi
$CMD "${ESC}[0;1;34mThis message is BLUE"
$CMD "${ESC}[0;1;31mThis message is RED ${ESC}[0;31m(sometimes)"
$CMD "${ESC}[0;1;32mThis message is GREEN${ESC}[0m"
while true
do
for R in $LIST
do
for G in $LIST
do
for B in $LIST
do
# color "9" is bold-red
test $R != 00 && test $G = 00 && test $B = 00 && $CMD $OPT "" >/dev/tty
$CMD $OPT "${ESC}]4;9;rgb:$R/$G/$B${SUF}" >/dev/tty
sleep 1
done
done
done
done
|
<filename>src/main/java/tae/packetevent/ChannelHandlerInput.java
package tae.packetevent;
/*
* By TheAlphaEpsilon
* 2JAN2020
*
*/
import io.netty.channel.ChannelPipeline;
import net.minecraft.client.Minecraft;
import net.minecraftforge.fml.common.eventhandler.EventPriority;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.network.FMLNetworkEvent.ClientConnectedToServerEvent;
import net.minecraftforge.fml.common.network.FMLNetworkEvent.ClientDisconnectionFromServerEvent;
import tae.cosmetics.OnLogin;
import tae.cosmetics.exceptions.TAEModException;
public class ChannelHandlerInput {
public static Minecraft mc = Minecraft.getMinecraft();
public static boolean firstConnection = true;
@SubscribeEvent
public void init(ClientConnectedToServerEvent event) {
if(firstConnection) {
firstConnection = false;
ChannelPipeline pipeline = event.getManager().channel().pipeline();
try {
pipeline.addBefore("packet_handler","listener", new PacketListener());
} catch (Exception e) {
new TAEModException(ChannelHandlerInput.class, e.getClass() + ": " + e.getMessage()).post();
}
}
}
@SubscribeEvent (priority = EventPriority.HIGHEST)
public void onDisconnect(ClientDisconnectionFromServerEvent event) {
firstConnection = true;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.