text
stringlengths 1
1.05M
|
|---|
<filename>src/app/services/chrome-storage.service.ts
import { Injectable } from '@angular/core';
import { watch } from 'fs';
@Injectable({
providedIn: 'root',
})
export class ChromeStorageService {
constructor() {}
chromeStorageSwitch = true;
updateWatchHistory(petIdList, petsFromSite) {
if (this.chromeStorageSwitch) {
return new Promise((resolve, reject) => {
chrome.storage.local.get(petIdList, (result) => {
const unwatchedPets: any[] = [];
petsFromSite.forEach((pet) => {
if (result[pet.id] === undefined) {
unwatchedPets.push(pet);
}
});
resolve(unwatchedPets);
});
});
} else {
return new Promise((resolve, reject) => {
resolve(petsFromSite);
});
}
}
getSearchCriteria() {
const obj = {
zipCode: '',
filterType: 'all',
filterAges: [],
filterBreeds: [],
};
if (this.chromeStorageSwitch) {
return new Promise((resolve, reject) => {
chrome.storage.local.get(['zip_code'], (result) => {
const zip: string = result.zip_code;
if (zip == null) {
obj.zipCode = '';
} else {
obj.zipCode = zip;
}
chrome.storage.local.get(['filter_type'], (typeResult) => {
const cachedType: string = typeResult.filter_type;
if (cachedType == null) {
obj.filterType = 'all';
} else {
obj.filterType = cachedType;
}
chrome.storage.local.get(['filter_ages'], (ageResults) => {
const storedAge: [] = ageResults.filter_ages;
if (storedAge == null) {
obj.filterAges = [];
} else {
obj.filterAges = storedAge;
}
chrome.storage.local.get(['filter_breeds'], (breedResults) => {
const storedBreed: [] = breedResults.filter_breeds;
if (storedBreed == null) {
obj.filterBreeds = [];
} else {
obj.filterBreeds = storedBreed;
}
resolve(obj);
});
});
});
});
});
} else {
console.log('google storage is off');
return new Promise((resolve, reject) => {
resolve(obj);
});
}
}
setFilterType(type) {
if (this.chromeStorageSwitch) {
const obj = {
filter_type: '',
};
obj.filter_type = type;
chrome.storage.local.set(obj);
}
}
setFilterAges(ages: string) {
if (this.chromeStorageSwitch) {
const obj = {
filter_ages: '',
};
obj.filter_ages = ages;
chrome.storage.local.set(obj);
}
}
setFilterBreeds(breeds: string) {
if (this.chromeStorageSwitch) {
const obj = {
filter_breeds: '',
};
obj.filter_breeds = breeds;
chrome.storage.local.set(obj);
}
}
setZip(zip: string) {
if (this.chromeStorageSwitch) {
const obj = {
zip_code: '',
};
obj.zip_code = zip;
chrome.storage.local.set(obj);
}
}
dismissPet(id, type) {
if (this.chromeStorageSwitch) {
this.removeFromWatchedList(id, type);
const obj = {};
obj[id] = 'D';
chrome.storage.local.set(obj);
}
}
setPetInfo(petInfo) {
if (this.chromeStorageSwitch) {
const obj = {};
obj[petInfo.id] = petInfo;
chrome.storage.local.set(obj);
}
}
removeFromWatchedList(id, type) {
if (this.chromeStorageSwitch) {
chrome.storage.local.get([type], (result) => {
const storedWatchedList = result[type];
if (storedWatchedList !== undefined) {
const index = storedWatchedList.indexOf(id);
if (index > -1) {
storedWatchedList.splice(index, 1);
} else {
console.log('trying to remove id from watch list, but does not exist: ' + id);
}
const obj = {};
obj[type] = storedWatchedList;
chrome.storage.local.set(obj);
}
});
}
}
addToWatchedList(id, type) {
if (this.chromeStorageSwitch) {
chrome.storage.local.get([type], (result) => {
const storedWatchedList = result[type];
let watched = [];
if (storedWatchedList === undefined) {
watched.push(id);
} else {
storedWatchedList.push(id);
watched = storedWatchedList;
}
const obj = {};
obj[type] = watched;
chrome.storage.local.set(obj);
});
}
}
getWatchedPets(type: string) {
if (this.chromeStorageSwitch) {
return new Promise((resolve, reject) => {
const pets = [];
let petTypes = [];
if (type === 'all') {
petTypes = ['dog', 'cat'];
} else {
petTypes.push(type);
}
chrome.storage.local.get(petTypes, (results) => {
let watchedList = [];
petTypes.forEach((petType) => {
if (results[petType] !== undefined) {
watchedList = watchedList.concat(results[petType]);
}
});
if (watchedList.length === 0) {
resolve([]);
} else {
chrome.storage.local.get(watchedList, (result) => {
watchedList.forEach((watchedPet) => {
pets.push(result[watchedPet]);
});
resolve(pets);
});
}
});
});
} else {
return new Promise((resolve, reject) => {
resolve([]);
});
}
}
}
|
/**
* Write a description of FourthRatings here.
*
* @author (your name)
* @version (a version number or a date)
*/
import edu.duke.*;
import java.util.*;
import org.apache.commons.csv.*;
public class FourthRatings {
private double getAverageByID(String id, int minimalRaters){
//String moviefile = "data/ratedmoviesfull.csv";
//String ratingsfile = "data/ratings.csv";
String moviefile = "data/ratedmovies_short.csv";
String ratingsfile = "data/ratings_short.csv";
RaterDatabase database = new RaterDatabase();
database.initialize(ratingsfile);
double avg_rating = 0.0;
double running_total = 0.0;
int count = 0;
for (Rater rater : database.getRaters()){
if(rater.getItemsRated().contains(id)){
double movie_rating = rater.getRating(id);
running_total += movie_rating;
count += 1;
//System.out.println(movie_rating);
}
}
if(count >= minimalRaters){
avg_rating = running_total / count;
}
return avg_rating;
}
public ArrayList<Rating> getAverageRatings(int minimalRaters){
ArrayList<Rating> ratings_list = new ArrayList<Rating>();
ArrayList<String> movies = MovieDatabase.filterBy(new TrueFilter());
for(String mov_id : movies){
double curr_rating = getAverageByID(mov_id , minimalRaters);
Rating rating = new Rating(mov_id, curr_rating);
ratings_list.add(rating);
}
return ratings_list;
}
public ArrayList<Rating> getAverageRatingsByFilter(int minimalRaters, Filter filterCriteria){
ArrayList<Rating> filtered_list = new ArrayList<Rating>();
ArrayList<String> id_list = MovieDatabase.filterBy(filterCriteria);
for(String mov_id : id_list){
double curr_rating = getAverageByID(mov_id , minimalRaters);
Rating rating = new Rating(mov_id, curr_rating);
filtered_list.add(rating);
}
return filtered_list;
}
private double dotProduct(Rater me, Rater r){
ArrayList<String> me_items = me.getItemsRated();
ArrayList<String> r_items = r.getItemsRated();
double dot_product = 0.0;
for(String me_item : me_items){
for(String r_item : r_items){
if(me_item.equals(r_item)){
double me_rating = me.getRating(me_item) - 5;
double r_rating = r.getRating(r_item) - 5;
double curr_val = me_rating * r_rating;
dot_product += curr_val;
}
}
}
return dot_product;
}
private ArrayList<Rating> getSimilarities(String id){
//String moviefile = "data/ratedmoviesfull.csv";
//String ratingsfile = "data/ratings.csv";
String moviefile = "data/ratedmovies_short.csv";
String ratingsfile = "data/ratings_short.csv";
RaterDatabase database = new RaterDatabase();
database.initialize(ratingsfile);
ArrayList<Rating> dot_list = new ArrayList<Rating>();
Rater me = database.getRater(id);
for (Rater rater : database.getRaters()){
if(rater != me){
double dot_product = dotProduct(me, rater);
if (dot_product >= 0.0){
Rating new_rating = new Rating(rater.getID() , dot_product);
dot_list.add(new_rating);
}
}
}
Collections.sort(dot_list , Collections.reverseOrder());
return dot_list;
}
public ArrayList<Rating> getSimilarRatings(String id, int numSimilarRaters, int minimalRaters){
ArrayList<Rating> rating_list = getSimilarities(id);
ArrayList<Rating> similar_list = new ArrayList<Rating>();
ArrayList<String> movies = MovieDatabase.filterBy(new TrueFilter());
for(String mov_id : movies){
int num_ratings = 0;
double weight_total = 0.0;
for(int i=0; i<numSimilarRaters; i++){
Rating curr_rating = rating_list.get(i);
String curr_rater_id = curr_rating.getItem();
Rater curr_rater = RaterDatabase.getRater(curr_rater_id);
if(curr_rater.hasRating(mov_id)){
num_ratings += 1;
double similar_rating = curr_rater.getRating(mov_id);
double weight = curr_rating.getValue();
weight_total += similar_rating * weight;
}
}
if(num_ratings >= minimalRaters){
double weight_avg = weight_total / num_ratings;
similar_list.add(new Rating(mov_id, weight_avg));
}
}
Collections.sort(similar_list, Collections.reverseOrder());
return similar_list;
}
public ArrayList<Rating> getSimilarRatingsByFilter(String id, int numSimilarRaters, int minimalRaters, Filter filterCriteria){
ArrayList<Rating> rating_list = getSimilarities(id);
ArrayList<Rating> similar_list = new ArrayList<Rating>();
ArrayList<String> movies = MovieDatabase.filterBy(filterCriteria);
for(String mov_id : movies){
int num_ratings = 0;
double weight_total = 0.0;
for(int i=0; i<numSimilarRaters; i++){
Rating curr_rating = rating_list.get(i);
String curr_rater_id = curr_rating.getItem();
Rater curr_rater = RaterDatabase.getRater(curr_rater_id);
if(curr_rater.hasRating(mov_id)){
num_ratings += 1;
double similar_rating = curr_rater.getRating(mov_id);
double weight = curr_rating.getValue();
weight_total += similar_rating * weight;
}
}
if(num_ratings >= minimalRaters){
double weight_avg = weight_total / num_ratings;
similar_list.add(new Rating(mov_id, weight_avg));
}
}
Collections.sort(similar_list, Collections.reverseOrder());
return similar_list;
}
}
|
#!/bin/bash
cd /twio/scripts/
import_env(){
# Import .env vars -> Carries over to docker-compose.yml
FILE=./.env
if [ -f $FILE ]; then
echo "Loading variables from $FILE"
set -o allexport
source $FILE
set +o allexport
else
echo "Please setup a .env file according to the README.md"
exit 1
fi
echo "Variables loaded!"
}
launch(){
docker-compose down && docker-compose up -d
}
import_env
launch
|
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.model.device.command;
import java.util.UUID;
import com.sitewhere.spi.device.command.DeviceStreamStatus;
import com.sitewhere.spi.device.command.IDeviceStreamAckCommand;
import com.sitewhere.spi.device.command.SystemCommandType;
/**
* Command sent to a device to indicate status of creating a device stream.
*/
public class DeviceStreamAckCommand extends SystemCommand implements IDeviceStreamAckCommand {
/** Serial version UID */
private static final long serialVersionUID = -6363023316760034867L;
/** Id of stream being created */
private UUID streamId;
/** Status of creating device stream */
private DeviceStreamStatus status;
public DeviceStreamAckCommand() {
super(SystemCommandType.DeviceStreamAck);
}
/*
* @see com.sitewhere.spi.device.command.IDeviceStreamAckCommand#getStreamId()
*/
@Override
public UUID getStreamId() {
return streamId;
}
public void setStreamId(UUID streamId) {
this.streamId = streamId;
}
/*
* @see com.sitewhere.spi.device.command.IDeviceStreamAckCommand#getStatus()
*/
@Override
public DeviceStreamStatus getStatus() {
return status;
}
public void setStatus(DeviceStreamStatus status) {
this.status = status;
}
}
|
#!/bin/bash
#
# Kubernetes Join Worker
#
SERVER_IP=$(sudo cat /var/lib/cloud/instance/datasource | cut -d: -f3 | cut -d/ -f3)
MASTER=$(hostname | cut -d- -f 3,4)
# Master vorhanden?
if [ "${SERVER_IP}" != "" ] && [ "${MASTER}" != "" ]
then
# Master statt Worker Node mounten
sudo umount /home/ubuntu/data
sudo mount -t nfs ${SERVER_IP}:/data/storage/${MASTER} /home/ubuntu/data/
sudo sed -i -e "s/$(hostname)/${MASTER}/g" /etc/fstab
# Password und ssh-key wie Master
sudo chpasswd <<<ubuntu:$(cat /home/ubuntu/data/.ssh/passwd)
cat /home/ubuntu/data/.ssh/id_rsa.pub >>/home/ubuntu/.ssh/authorized_keys
# loop bis Master bereit, Timeout zwei Minuten
for i in {1..60}
do
if [ -f /home/ubuntu/data/join-${MASTER}.sh ]
then
sudo bash -x /home/ubuntu/data/join-${MASTER}.sh
break
fi
sleep 2
done
fi
## Hinweis wie joinen, falls nicht geklappt
if [ -f /etc/kubernetes/kubelet.conf ]
then
cat <<%EOF% | sudo tee README.md
### Kubernetes Worker Node
Worker Node von Kubernetes ${MASTER} Master
%EOF%
else
cat <<%EOF% | sudo tee README.md
### Kubernetes Worker Node
Um die Worker Node mit dem Master zu verbinden, ist auf dem Master folgender Befehl zu starten:
sudo kubeadm token create --print-join-command
Dieser gibt den Befehl aus, der auf jedem Worker Node zu starten ist.
%EOF%
fi
bash -x helper/intro
|
<reponame>Krzychu81/knex-expo<filename>lib/helpers.js
'use strict';
exports.__esModule = true;
var _isTypedArray2 = require('lodash/isTypedArray');
var _isTypedArray3 = _interopRequireDefault(_isTypedArray2);
var _isArray2 = require('lodash/isArray');
var _isArray3 = _interopRequireDefault(_isArray2);
var _isPlainObject2 = require('lodash/isPlainObject');
var _isPlainObject3 = _interopRequireDefault(_isPlainObject2);
var _isUndefined2 = require('lodash/isUndefined');
var _isUndefined3 = _interopRequireDefault(_isUndefined2);
var _isFunction2 = require('lodash/isFunction');
var _isFunction3 = _interopRequireDefault(_isFunction2);
var _keys2 = require('lodash/keys');
var _keys3 = _interopRequireDefault(_keys2);
var _pick2 = require('lodash/pick');
var _pick3 = _interopRequireDefault(_pick2);
var _map2 = require('lodash/map');
var _map3 = _interopRequireDefault(_map2);
exports.skim = skim;
exports.normalizeArr = normalizeArr;
exports.debugLog = debugLog;
exports.error = error;
exports.deprecate = deprecate;
exports.warn = warn;
exports.exit = exit;
exports.containsUndefined = containsUndefined;
exports.addQueryContext = addQueryContext;
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// import chalk from 'chalk';
// Pick off the attributes from only the current layer of the object.
/* eslint no-console:0 */
function skim(data) {
return (0, _map3.default)(data, function (obj) {
return (0, _pick3.default)(obj, (0, _keys3.default)(obj));
});
}
// Check if the first argument is an array, otherwise uses all arguments as an
// array.
function normalizeArr() {
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
if (Array.isArray(args[0])) {
return args[0];
}
return args;
}
function debugLog(msg) {
console.log(msg);
}
function error(msg) {
console.log('Knex:Error ' + msg);
}
// Used to signify deprecated functionality.
function deprecate(method, alternate) {
warn(method + ' is deprecated, please use ' + alternate);
}
// Used to warn about incorrect use, without error'ing
function warn(msg) {
console.log('Knex:warning - ' + msg);
}
function exit(msg) {
console.log(msg);
process.exit(1);
}
function containsUndefined(mixed) {
var argContainsUndefined = false;
if ((0, _isTypedArray3.default)(mixed)) return false;
if (mixed && (0, _isFunction3.default)(mixed.toSQL)) {
//Any QueryBuilder or Raw will automatically be validated during compile.
return argContainsUndefined;
}
if ((0, _isArray3.default)(mixed)) {
for (var i = 0; i < mixed.length; i++) {
if (argContainsUndefined) break;
argContainsUndefined = this.containsUndefined(mixed[i]);
}
} else if ((0, _isPlainObject3.default)(mixed)) {
for (var key in mixed) {
if (mixed.hasOwnProperty(key)) {
if (argContainsUndefined) break;
argContainsUndefined = this.containsUndefined(mixed[key]);
}
}
} else {
argContainsUndefined = (0, _isUndefined3.default)(mixed);
}
return argContainsUndefined;
}
function addQueryContext(Target) {
// Stores or returns (if called with no arguments) context passed to
// wrapIdentifier and postProcessResponse hooks
Target.prototype.queryContext = function (context) {
if ((0, _isUndefined3.default)(context)) {
return this._queryContext;
}
this._queryContext = context;
return this;
};
}
|
import tensorflow as tf
from tensorflow.keras.datasets import mnist
# Load the data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# Reshape the data
X_train = X_train.reshape(60000, 28, 28, 1)
X_test = X_test.reshape(10000, 28, 28, 1)
# Normalize the data
X_train = X_train / 255.0
X_test = X_test / 255.0
# Create the model
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, (3,3), activation='relu', input_shape=(28, 28, 1)),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
# Compile the model
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train the model
model.fit(X_train, y_train, epochs=5)
# Evaluate the model
model.evaluate(X_test, y_test)
|
import React from 'react';
import {View, Text, StyleSheet, Alert, Platform} from 'react-native';
import { Constants, Location, Permissions } from 'expo';
export default class App extends React.Component {
constructor(props) {
super(props);
this.state = {
location: null
};
}
componentDidMount() {
if (Platform.OS === 'android' && !Constants.isDevice) {
Alert.alert('Oops', 'This will not work on an Android simulator. Try it on your device!');
} else {
this._getLocationAsync();
}
}
_getLocationAsync = async () => {
let {status } = await Permissions.askAsync(Permissions.LOCATION);
if (status !== 'granted') {
Alert.alert('Permission to access location was denied');
}
let location = await Location.getCurrentPositionAsync({});
this.setState({ location });
};
render() {
let text = 'Waiting..';
if (this.state.errorMessage) {
text = this.state.errorMessage;
} else if (this.state.location) {
text = `Latitude: ${this.state.location.coords.latitude}\nLongitude: ${this.state.location.coords.longitude}`;
}
return (
<View style={styles.container}>
<Text style={styles.paragraph}>{text}</Text>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
paddingTop: Constants.statusBarHeight,
backgroundColor: '#ecf0f1',
},
paragraph: {
margin: 24,
fontSize: 18,
fontWeight: 'bold',
textAlign: 'center',
color: '#34495e',
},
});
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_view_column_twotone = void 0;
var ic_view_column_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M6 7h3v9H6zm5 0h3v9h-3zm5 0h3v9h-3z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M4 5v13h17V5H4zm5 11H6V7h3v9zm5 0h-3V7h3v9zm5 0h-3V7h3v9z"
},
"children": []
}]
};
exports.ic_view_column_twotone = ic_view_column_twotone;
|
<reponame>pk762/epizza<gh_stars>0
package epizza.order.checkout;
import com.google.common.base.MoreObjects;
import javax.money.MonetaryAmount;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Basic;
import javax.persistence.Embeddable;
import javax.persistence.FetchType;
import javax.persistence.ManyToOne;
import epizza.order.catalog.Pizza;
@Embeddable
@Access(AccessType.FIELD)
// FIXME introduce lombok
public class OrderItem {
@ManyToOne(fetch = FetchType.EAGER, optional = false)
private Pizza pizza;
@Basic(optional = false)
private Integer quantity;
@Basic(optional = false)
private MonetaryAmount price;
private OrderItem() {
// JPA needs this
}
private OrderItem(Pizza pizza, Integer quantity, MonetaryAmount price) {
this.pizza = pizza;
this.quantity = quantity;
this.price = price;
}
public Pizza getPizza() {
return pizza;
}
public Integer getQuantity() {
return quantity;
}
public MonetaryAmount getPrice() {
return price;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("pizza", pizza)
.add("quantity", quantity)
.toString();
}
public static OrderItemBuilder builder() {
return new OrderItemBuilder();
}
public static class OrderItemBuilder {
private Pizza pizza;
private Integer quantity;
private MonetaryAmount price;
public OrderItemBuilder pizza(Pizza pizza) {
this.pizza = pizza;
return this;
}
public OrderItemBuilder price(MonetaryAmount price) {
this.price = price;
return this;
}
public OrderItemBuilder quantity(Integer quantity) {
this.quantity = quantity;
return price(pizza.getPrice().multiply(quantity));
}
public OrderItem build() {
return new OrderItem(pizza, quantity, price);
}
}
}
|
import React from 'react';
import { Menu, MenuContent, MenuList, MenuItem, Divider, DrilldownMenu } from '@patternfly/react-core';
import StorageDomainIcon from '@patternfly/react-icons/dist/esm/icons/storage-domain-icon';
import CodeBranchIcon from '@patternfly/react-icons/dist/esm/icons/code-branch-icon';
import LayerGroupIcon from '@patternfly/react-icons/dist/esm/icons/layer-group-icon';
import CubeIcon from '@patternfly/react-icons/dist/esm/icons/cube-icon';
export const MenuDrilldownSubmenuFunctions: React.FunctionComponent = () => {
const [menuDrilledIn, setMenuDrilledIn] = React.useState<string[]>([]);
const [drilldownPath, setDrilldownPath] = React.useState<string[]>([]);
const [menuHeights, setMenuHeights] = React.useState<any>({});
const [activeMenu, setActiveMenu] = React.useState<string>('rootMenu');
const drillIn = (fromMenuId: string, toMenuId: string, pathId: string) => {
setMenuDrilledIn([...menuDrilledIn, fromMenuId]);
setDrilldownPath([...drilldownPath, pathId]);
setActiveMenu(toMenuId);
};
const drillOut = (toMenuId: string) => {
const menuDrilledInSansLast = menuDrilledIn.slice(0, menuDrilledIn.length - 1);
const pathSansLast = drilldownPath.slice(0, drilldownPath.length - 1);
setMenuDrilledIn(menuDrilledInSansLast);
setDrilldownPath(pathSansLast);
setActiveMenu(toMenuId);
};
const setHeight = (menuId: string, height: number) => {
if (menuHeights[menuId] === undefined) {
setMenuHeights({ ...menuHeights, [menuId]: height });
}
};
return (
<Menu
id="rootMenu"
containsDrilldown
drilldownItemPath={drilldownPath}
drilledInMenus={menuDrilledIn}
activeMenu={activeMenu}
onDrillIn={drillIn}
onDrillOut={drillOut}
onGetMenuHeight={setHeight}
>
<MenuContent menuHeight={`${menuHeights[activeMenu]}px`}>
<MenuList>
<MenuItem
itemId="group:start_rollout"
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuStart">
<MenuItem itemId="group:start_rollout_breadcrumb" direction="up">
Start rollout
</MenuItem>
<Divider component="li" />
<MenuItem
itemId="group:app_grouping"
description="Groups A-C"
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuStartGrouping">
<MenuItem itemId="group:app_grouping_breadcrumb" direction="up">
Application grouping
</MenuItem>
<Divider component="li" />
<MenuItem itemId="group_a">Group A</MenuItem>
<MenuItem itemId="group_b">Group B</MenuItem>
<MenuItem itemId="group_c">Group C</MenuItem>
</DrilldownMenu>
)}
>
Application grouping
</MenuItem>
<MenuItem itemId="count">Count</MenuItem>
<MenuItem
itemId="group:labels"
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuStartLabels">
<MenuItem itemId="group:labels_breadcrumb" direction="up">
Labels
</MenuItem>
<Divider component="li" />
<MenuItem itemId="label_1">Label 1</MenuItem>
<MenuItem itemId="label_2">Label 2</MenuItem>
<MenuItem itemId="label_3">Label 3</MenuItem>
</DrilldownMenu>
)}
>
Labels
</MenuItem>
<MenuItem itemId="annotations">Annotations</MenuItem>
</DrilldownMenu>
)}
>
Start rollout
</MenuItem>
<MenuItem
itemId="group:pause_rollout"
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuPause">
<MenuItem itemId="group:pause_rollout_breadcrumb" direction="up">
Pause rollouts
</MenuItem>
<Divider component="li" />
<MenuItem
itemId="group:app_grouping"
description="Groups A-C"
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuGrouping">
<MenuItem itemId="group:app_grouping_breadcrumb" direction="up">
Application grouping
</MenuItem>
<Divider component="li" />
<MenuItem itemId="group_a">Group A</MenuItem>
<MenuItem itemId="group_b">Group B</MenuItem>
<MenuItem itemId="group_c">Group C</MenuItem>
</DrilldownMenu>
)}
>
Application grouping
</MenuItem>
<MenuItem itemId="count">Count</MenuItem>
<MenuItem
itemId="group:labels"
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuLabels">
<MenuItem itemId="group:labels_breadcrumb" direction="up">
Labels
</MenuItem>
<Divider component="li" />
<MenuItem itemId="label_1">Label 1</MenuItem>
<MenuItem itemId="label_2">Label 2</MenuItem>
<MenuItem itemId="label_3">Label 3</MenuItem>
</DrilldownMenu>
)}
>
Labels
</MenuItem>
<MenuItem itemId="annotations">Annotations</MenuItem>
</DrilldownMenu>
)}
>
Pause rollouts
</MenuItem>
<MenuItem
itemId="group:storage"
icon={<StorageDomainIcon aria-hidden />}
direction="down"
drilldownMenu={() => (
<DrilldownMenu id="drilldownMenuStorage">
<MenuItem itemId="group:storage_breadcrumb" icon={<StorageDomainIcon aria-hidden />} direction="up">
Add storage
</MenuItem>
<Divider component="li" />
<MenuItem icon={<CodeBranchIcon aria-hidden />} itemId="git">
From git
</MenuItem>
<MenuItem icon={<LayerGroupIcon aria-hidden />} itemId="container">
Container image
</MenuItem>
<MenuItem icon={<CubeIcon aria-hidden />} itemId="docker">
Docker file
</MenuItem>
</DrilldownMenu>
)}
>
Add storage
</MenuItem>
<MenuItem itemId="edit">Edit</MenuItem>
<MenuItem itemId="delete_deployment">Delete deployment config</MenuItem>
</MenuList>
</MenuContent>
</Menu>
);
};
|
module Gitrob
module Github
class Repository
attr_reader :owner, :name, :http_client
def initialize(owner, name, http_client)
@owner, @name, @http_client = owner, name, http_client
end
def contents
if !@contents
@contents = []
response = JSON.parse(http_client.do_get("/repos/#{owner}/#{name}/git/trees/master?recursive=1").body)
response['tree'].each do |object|
next unless object['type'] == 'blob'
@contents << Blob.new(object['path'], object['size'], self)
end
end
@contents
rescue HttpClient::ClientError => ex
if ex.status == 409 || ex.status == 404
@contents = []
else
raise ex
end
end
def full_name
[owner, name].join('/')
end
def url
info['html_url']
end
def description
info['description']
end
def website
info['homepage']
end
def to_model(organization, user = nil)
Gitrob::Repo.new(
:name => self.name,
:owner_name => self.owner,
:description => self.description,
:website => self.website,
:url => self.url,
:organization => organization,
:user => user
)
end
def save_to_database!(organization, user = nil)
self.to_model(organization, user).tap { |m| m.save }
rescue DataMapper::SaveFailureError => e
puts e.resource.errors.inspect
end
private
def info
if !@info
@info = JSON.parse(http_client.do_get("/repos/#{owner}/#{name}").body)
end
@info
end
end
end
end
|
#!/bin/sh
oc_installed=false
kubectl_installed=false
platform="kubectl"
secrets="hidden"
oc &>/dev/null
if [ $? -eq 0 ]; then
oc_installed=true
platform="oc"
fi
kubectl &>/dev/null
if [ $? -eq 0 ]; then # we will use kubectl with priority (?)
kubectl_installed=true
platform="kubectl"
fi
if [[ $oc_installed = false && $kubectl_installed = false ]]; then
echo "There is no oc or kubectl installed"
exit 1
fi
usage() {
echo "Usage: $0 --namespace=<string> --cluster=<string> --secrets=(off|hidden|all)" 1>&2;
echo "Default level of secret verbosity is 'hidden' (only secret key will be reported)."
exit 1;
}
optspec=":-:"
while getopts "$optspec" optchar; do
case "${optchar}" in
-)
case "${OPTARG}" in
cluster=*)
cluster=${OPTARG#*=}
;;
namespace=*)
namespace=${OPTARG#*=}
;;
secrets=*)
secrets=${OPTARG#*=}
;;
*)
usage
;;
esac;;
esac
done
shift $((OPTIND-1))
if [ -z $cluster ] && [ -z $namespace ]; then
echo "--cluster and --namespace are mandatory options."
usage
fi
if [ -z $secrets ]; then
secrets="hidden"
fi
if [ "$secrets" != "all" ] && [ "$secrets" != "off" ] && [ "$secrets" != "hidden" ]; then
echo "Unknown secrets verbosity level. Use one of 'off', 'hidden' or 'all'."
echo " 'all' - secret keys and data values are reported"
echo " 'hidden' - secrets with only data keys are reported"
echo " 'off' - secrets are not reported at all"
echo "Default value is 'hidden'"
usage
fi
if [ -z $cluster ]; then
echo "Cluster was not specified. Use --cluster option to specify it."
usage
fi
if [ -z $namespace ]; then
echo "Namespace was not specified. Use --namespace option to specify it."
usage
fi
direct=`mktemp -d`
resources_to_fetch=(
"deployments"
"statefulsets"
"replicasets"
"configmaps"
"secrets"
"services"
"poddisruptionbudgets"
"roles"
"rolebindings"
"networkpolicies"
"routes"
"pods"
"persistentvolumeclaims"
)
if [ "$secrets" = "off" ]; then
resources_to_fetch=( "${resources_to_fetch[@]/secrets}" )
fi
get_masked_secrets() {
mkdir -p $direct/reports/"$1"
resources=$($platform get $1 -l strimzi.io/cluster=$cluster -o name -n $namespace)
for line in $resources; do
filename=`echo $line | cut -f 2 -d "/"`
echo " "$line
original_data=`oc get $line -o=jsonpath='{.data}' | cut -c5-`
SAVEIFS=$IFS
IFS=$'\n'
original_data=($original_data)
IFS=$SAVEIFS
data_entries=()
for data in $original_data; do
entry=`printf "${data}" | sed 's/\(\s*.*\s*\):\s*.*/\1/g'`
data_entries+=( "$entry" )
done;
secret=`$platform get $line -o yaml -n $namespace`
for data_key in "${data_entries[@]}"; do
secret=`printf "$secret" | sed "s/\s*$data_key\s*:\s*.*/ $data_key: *****/"`
done;
printf "$secret" | sed 's/^\(\s*password\s*:\s*\).*\n/\1*****/' \
| sed 's/^\(\s*.*\.key\s*:\s*\).*/\1*****/' > $direct/reports/$1/"$filename".yaml
done;
}
nonnamespaced_resources_to_fetch=(
"clusterroles"
"clusterrolebindings"
)
get_namespaced_yamls() {
mkdir -p $direct/reports/"$1"
resources=$($platform get $1 -l strimzi.io/cluster=$cluster -o name -n $namespace)
echo "$1"
for line in $resources; do
filename=`echo $line | cut -f 2 -d "/"`
echo " "$line
$platform get $line -o yaml -n $namespace | sed 's/^\(\s*password\s*:\s*\).*/\1*****/' \
| sed 's/^\(\s*.*\.key\s*:\s*\).*/\1*****/' > $direct/reports/"$1"/"$filename".yaml
done;
}
for res in "${resources_to_fetch[@]}"; do
if [ "$res" = "secrets" ] && [ "$secrets" = "hidden" ]; then
get_masked_secrets "secrets"
else
if [ ! -z "$res" ]; then
get_namespaced_yamls "$res"
fi
fi
done;
get_nonnamespaced_yamls() {
mkdir -p $direct/reports/"$1"
resources=$($platform get $1 -l app=strimzi -o name -n $namespace)
echo "$1"
for line in $resources; do
filename=`echo $line | cut -f 2 -d "/"`
echo " "$line
$platform get $line -o yaml | sed 's/^\(\s*password\s*:\s*\).*/\1*****/' \
| sed 's/^\(\s*.*\.key\s*:\s*\).*/\1*****/' > $direct/reports/"$1"/"$filename".yaml
done;
}
for res in "${nonnamespaced_resources_to_fetch[@]}"; do
get_nonnamespaced_yamls "$res"
done;
mkdir -p $direct/reports/podLogs
mkdir -p $direct/reports/configs
echo "Pod logs:"
pods=$($platform get pods -l strimzi.io/cluster=$cluster -o name -n $namespace | cut -d "/" -f 2)
for pod in $pods; do
echo " "$pod
if [[ $pod == *"-entity-operator-"* ]]; then
$platform logs $pod -c tls-sidecar -n $namespace > $direct/reports/podLogs/"$pod"-tls-sidecar.log
$platform logs $pod -c topic-operator -n $namespace > $direct/reports/podLogs/"$pod"-topic-operator.log
$platform logs $pod -c user-operator -n $namespace > $direct/reports/podLogs/"$pod"-user-operator.log
$platform logs $pod -p -c tls-sidecar -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-tls-sidecar.log
$platform logs $pod -p -c topic-operator -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-topic-operator.log
$platform logs $pod -p -c user-operator -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-user-operator.log
elif [[ $pod =~ .*-kafka-[0-9]+ ]]; then
$platform logs $pod -c tls-sidecar -n $namespace > $direct/reports/podLogs/"$pod"-tls-sidecar.log
$platform logs $pod -c kafka -n $namespace > $direct/reports/podLogs/"$pod"-kafka.log
$platform logs $pod -p -c tls-sidecar -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-tls-sidecar.log
$platform logs $pod -p -c kafka -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-kafka.log
$platform exec -i $pod -n $namespace -c kafka -- cat /tmp/strimzi.properties > $direct/reports/configs/"$pod".cfg
elif [[ $pod =~ .*-zookeeper-[0-9]+ ]]; then
$platform logs $pod -c tls-sidecar -n $namespace > $direct/reports/podLogs/"$pod"-tls-sidecar.log
$platform logs $pod -c zookeeper -n $namespace > $direct/reports/podLogs/"$pod"-zookeeper.log
$platform logs $pod -p -c tls-sidecar -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-tls-sidecar.log
$platform logs $pod -p -c zookeeper -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod"-zookeeper.log
$platform exec -i $pod -n $namespace -c zookeeper -- cat /tmp/zookeeper.properties > $direct/reports/configs/"$pod".cfg
elif [[ $pod == *"-kafka-exporter-"* || $pod == *"-connect-"* || $pod == *"-bridge-"* || $pod == *"-mirror-maker-"* ]]; then
$platform logs $pod -n $namespace > $direct/reports/podLogs/"$pod".log
$platform logs $pod -p -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-"$pod".log
fi
done;
# getting CO deployment from the pod
$platform get deployment strimzi-cluster-operator -o yaml -n $namespace > $direct/reports/deployments/cluster-operator.yaml
$platform get pod -l strimzi.io/kind=cluster-operator -o yaml -n $namespace > $direct/reports/pods/cluster-operator.yaml
co_pod=$($platform get pod -l strimzi.io/kind=cluster-operator -o name -n $namespace)
$platform logs $co_pod -n $namespace > $direct/reports/podLogs/cluster-operator.log
$platform logs $co_pod -p -n $namespace 2>/dev/null > $direct/reports/podLogs/previous-cluster-operator.log
# getting CO replicaset
co_rs=$($platform get replicaset -l strimzi.io/kind=cluster-operator -o name -n $namespace)
$platform describe $co_rs -n $namespace > $direct/reports/replicasets/cluster-operator-replicaset.yaml
#Kafka, KafkaConnect, KafkaConnectS2i, KafkaTopic, KafkaUser, KafkaMirrorMaker
echo "CRs:"
mkdir -p $direct/reports/crs
crs=$($platform get crd -o name)
for line in $crs; do
cr=`echo $line | cut -d "/" -f 2`
if [[ $cr == *"kafka.strimzi.io" ]]; then
resources=$($platform get $cr -o name -n $namespace | cut -d "/" -f 2)
if [[ -n "$resources" ]]; then
echo $cr
for line in $resources; do
resource=`echo $line | cut -f 1 -d " "`
$platform get $cr $resource -n $namespace -o yaml > $direct/reports/crs/"$cr"-"$resource".yaml
echo " "$resource
done;
fi
fi
done;
echo "CRDs:"
mkdir -p $direct/reports/crds
crds=$($platform get crd -o name)
for line in $crds; do
crd=`echo $line | cut -d "/" -f 2`
if [[ $crd == *"kafka.strimzi.io" ]]; then
echo " "$crd
$platform get crd $crd -o yaml > $direct/reports/crds/"$crd".yaml
fi
done;
mkdir -p $direct/reports/events
$platform get event -n $namespace > /$direct/reports/events/events.yaml
filename=`date +"%d-%m-%Y_%H-%M-%S"`
filename=report-"$filename"
olddir=$(pwd)
cd /$direct
zip -qr $filename.zip ./reports/
cd $olddir
mv /$direct/$filename.zip ./
echo "Report file $filename.zip created"
|
<gh_stars>1-10
package gameClient.util;
import api.geo_location;
/**
* This class represents a 2D Range, composed from two 1D Ranges.
*/
public class Range2D {
private Range _y_range;
private Range _x_range;
public Range2D(Range x, Range y) {
_x_range = new Range(x);
_y_range = new Range(y);
}
public Range2D(Range2D w) {
_x_range = new Range(w._x_range);
_y_range = new Range(w._y_range);
}
public Point3D getPortion(geo_location p) {
double x = _x_range.getPortion(p.x());
double y = _y_range.getPortion(p.y());
return new Point3D(x,y,0);
}
public Point3D fromPortion(geo_location p) {
double x = _x_range.fromPortion(p.x());
double y = _y_range.fromPortion(p.y());
return new Point3D(x,y,0);
}
}
|
SELECT DISTINCT t1.team_name, t2.team_name
FROM teams t1
INNER JOIN teams t2 ON t1.school = t2.school
WHERE t1.member <> t2.member
GROUP BY t1.team_name, t2.team_name
HAVING COUNT(*) > 4;
|
import java.io.File;
import net.runelite.mapping.Export;
import net.runelite.mapping.Implements;
import net.runelite.mapping.ObfuscatedGetter;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("js")
@Implements("InvDefinition")
public class InvDefinition extends DualNode {
@ObfuscatedName("f")
@ObfuscatedSignature(
descriptor = "Lir;"
)
@Export("InvDefinition_archive")
public static AbstractArchive InvDefinition_archive;
@ObfuscatedName("o")
@ObfuscatedSignature(
descriptor = "Lgf;"
)
@Export("InvDefinition_cached")
static EvictingDualNodeHashTable InvDefinition_cached;
@ObfuscatedName("u")
@ObfuscatedGetter(
intValue = 651568811
)
@Export("size")
public int size;
static {
InvDefinition_cached = new EvictingDualNodeHashTable(64); // L: 11
}
InvDefinition() {
this.size = 0; // L: 12
} // L: 14
@ObfuscatedName("o")
@ObfuscatedSignature(
descriptor = "(Lnu;B)V",
garbageValue = "86"
)
@Export("decode")
void decode(Buffer var1) {
while (true) {
int var2 = var1.readUnsignedByte(); // L: 28
if (var2 == 0) { // L: 29
return; // L: 32
}
this.decodeNext(var1, var2); // L: 30
}
}
@ObfuscatedName("u")
@ObfuscatedSignature(
descriptor = "(Lnu;IB)V",
garbageValue = "46"
)
@Export("decodeNext")
void decodeNext(Buffer var1, int var2) {
if (var2 == 2) { // L: 35
this.size = var1.readUnsignedShort();
}
} // L: 37
@ObfuscatedName("f")
@ObfuscatedSignature(
descriptor = "(Ljava/io/File;I)V",
garbageValue = "1853362286"
)
@Export("assertCacheDirExists")
static void assertCacheDirExists(File var0) {
FileSystem.FileSystem_cacheDir = var0; // L: 16
if (!FileSystem.FileSystem_cacheDir.exists()) { // L: 17
throw new RuntimeException("");
} else {
FileSystem.FileSystem_hasPermissions = true; // L: 18
}
} // L: 19
}
|
// Copyright (c) 2021 rookie-ninja
//
// Use of this source code is governed by an Apache-style
// license that can be found in the LICENSE file.
// Package rkmidmeta is a middleware for metadata
package rkmidmeta
import (
"fmt"
"github.com/rookie-ninja/rk-common/common"
"github.com/rookie-ninja/rk-entry/entry"
"github.com/rookie-ninja/rk-entry/middleware"
"github.com/rookie-ninja/rk-query"
"github.com/rs/xid"
"strings"
"time"
)
// ***************** OptionSet Interface *****************
// OptionSetInterface mainly for testing purpose
type OptionSetInterface interface {
GetEntryName() string
GetEntryType() string
Before(*BeforeCtx)
BeforeCtx(rkquery.Event) *BeforeCtx
}
// ***************** OptionSet Implementation *****************
// optionSet which is used for middleware implementation
type optionSet struct {
entryName string
entryType string
prefix string
localeKey string
appNameKey string
appVersionKey string
appUnixTimeKey string
receivedTimeKey string
mock OptionSetInterface
}
// NewOptionSet Create new optionSet with options.
func NewOptionSet(opts ...Option) OptionSetInterface {
set := &optionSet{
entryName: xid.New().String(),
entryType: "",
prefix: "RK",
}
for i := range opts {
opts[i](set)
}
if set.mock != nil {
return set.mock
}
if len(set.prefix) < 1 {
set.prefix = "RK"
}
set.appNameKey = fmt.Sprintf("X-%s-App-Name", set.prefix)
set.appVersionKey = fmt.Sprintf("X-%s-App-Version", set.prefix)
set.appUnixTimeKey = fmt.Sprintf("X-%s-App-Unix-Time", set.prefix)
set.receivedTimeKey = fmt.Sprintf("X-%s-Received-Time", set.prefix)
set.localeKey = fmt.Sprintf("X-%s-Locale", set.prefix)
return set
}
// GetEntryName returns entry name
func (set *optionSet) GetEntryName() string {
return set.entryName
}
// GetEntryType returns entry type
func (set *optionSet) GetEntryType() string {
return set.entryType
}
// BeforeCtx should be created before Before()
func (set *optionSet) BeforeCtx(event rkquery.Event) *BeforeCtx {
ctx := NewBeforeCtx()
ctx.Output.HeadersToReturn = make(map[string]string)
ctx.Input.Event = event
return ctx
}
// Before should run before user handler
func (set *optionSet) Before(ctx *BeforeCtx) {
if ctx == nil {
return
}
reqId := rkcommon.GenerateRequestId()
now := time.Now().Format(time.RFC3339Nano)
if ctx.Input.Event != nil {
ctx.Input.Event.SetRequestId(reqId)
ctx.Input.Event.SetEventId(reqId)
}
ctx.Output.RequestId = reqId
ctx.Output.HeadersToReturn[rkmid.HeaderRequestId] = reqId
ctx.Output.HeadersToReturn[fmt.Sprintf("X-%s-App-Name", set.prefix)] = rkentry.GlobalAppCtx.GetAppInfoEntry().AppName
ctx.Output.HeadersToReturn[fmt.Sprintf("X-%s-App-Version", set.prefix)] = rkentry.GlobalAppCtx.GetAppInfoEntry().Version
ctx.Output.HeadersToReturn[fmt.Sprintf("X-%s-App-Unix-Time", set.prefix)] = now
ctx.Output.HeadersToReturn[fmt.Sprintf("X-%s-Received-Time", set.prefix)] = now
ctx.Output.HeadersToReturn[fmt.Sprintf("X-%s-App-Locale", set.prefix)] = strings.Join([]string{
rkmid.Realm.String, rkmid.Region.String, rkmid.AZ.String, rkmid.Domain.String,
}, "::")
}
// ***************** OptionSet Mock *****************
// NewOptionSetMock for testing purpose
func NewOptionSetMock(before *BeforeCtx) OptionSetInterface {
return &optionSetMock{
before: before,
}
}
type optionSetMock struct {
before *BeforeCtx
}
// GetEntryName returns entry name
func (mock *optionSetMock) GetEntryName() string {
return "mock"
}
// GetEntryType returns entry type
func (mock *optionSetMock) GetEntryType() string {
return "mock"
}
// BeforeCtx should be created before Before()
func (mock *optionSetMock) BeforeCtx(event rkquery.Event) *BeforeCtx {
return mock.before
}
// Before should run before user handler
func (mock *optionSetMock) Before(ctx *BeforeCtx) {
return
}
// ***************** Context *****************
// NewBeforeCtx create new BeforeCtx with fields initialized
func NewBeforeCtx() *BeforeCtx {
ctx := &BeforeCtx{}
ctx.Output.HeadersToReturn = make(map[string]string)
return ctx
}
// BeforeCtx context for Before() function
type BeforeCtx struct {
Input struct {
Event rkquery.Event
}
Output struct {
RequestId string
HeadersToReturn map[string]string
}
}
// ***************** BootConfig *****************
// BootConfig for YAML
type BootConfig struct {
Enabled bool `yaml:"enabled" json:"enabled"`
Prefix string `yaml:"prefix" json:"prefix"`
}
// ToOptions convert BootConfig into Option list
func ToOptions(config *BootConfig, entryName, entryType string) []Option {
opts := make([]Option, 0)
if config.Enabled {
opts = append(opts,
WithEntryNameAndType(entryName, entryType),
WithPrefix(config.Prefix))
}
return opts
}
// ***************** Option *****************
// Option if for middleware options while creating middleware
type Option func(*optionSet)
// WithEntryNameAndType provide entry name and entry type.
func WithEntryNameAndType(entryName, entryType string) Option {
return func(opt *optionSet) {
opt.entryName = entryName
opt.entryType = entryType
}
}
// WithPrefix provide prefix.
func WithPrefix(prefix string) Option {
return func(opt *optionSet) {
opt.prefix = prefix
}
}
// WithMockOptionSet provide mock OptionSetInterface
func WithMockOptionSet(mock OptionSetInterface) Option {
return func(set *optionSet) {
set.mock = mock
}
}
|
#!/bin/bash
docker rm -f bean || true
docker rm -f chat-app || true
|
import Vue from 'vue'
import options, { i18n } from '@/config'
import messages from '@/messages.json'
const config = options(Vue)
/**
* Tests internationalization
*/
describe('I18N', () => {
it('is loaded', () => {
expect(i18n).to.be.a('object')
expect(i18n.t).to.be.a('function')
})
it('translates', () => {
expect(i18n.t('title')).to.equal(messages[i18n.locale].title)
})
})
|
def handle_agent_request(player, agent, agent_id):
if player.name == 'Charlie' and not agent.in_safehouse() and agent.player.name == 'Alpha':
return json.jsonify(
id=agent.id,
x=agent.x,
y=agent.y,
packages=len(agent.packages),
)
elif not player.name == 'Charlie' or agent.in_safehouse() or not agent.player.name == 'Alpha':
raise Unauthenticated('Access Denied', status_code=401)
else:
raise ResourceMissing("No Agent found with id %r" % agent_id, 404)
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2453-2
#
# Security announcement date: 2012-04-19 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:23 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - gajim:0.13.4-3+squeeze3
#
# Last versions recommanded by security team:
# - gajim:0.13.4-3+squeeze4
#
# CVE List:
# - CVE-2012-2093
# - CVE-2012-2086
# - CVE-2012-2085
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade gajim=0.13.4-3+squeeze4 -y
|
<gh_stars>10-100
import { BigNumber, providers } from 'ethers';
import BaseService from '../commons/BaseService';
import { ERC20Validator } from '../commons/validators/methodValidators';
import { isEthAddress } from '../commons/validators/paramValidators';
import { IERC202612 } from './typechain/IERC202612';
import { IERC202612__factory } from './typechain/IERC202612__factory';
export type GetNonceType = { token: string; owner: string };
export interface ERC20_2612Interface {
getNonce: (args: GetNonceType) => Promise<number | null>;
}
export class ERC20_2612Service
extends BaseService<IERC202612>
implements ERC20_2612Interface
{
constructor(provider: providers.Provider) {
super(provider, IERC202612__factory);
this.getNonce = this.getNonce.bind(this);
}
@ERC20Validator
public async getNonce(
@isEthAddress('token')
@isEthAddress('owner')
{ token, owner }: GetNonceType,
): Promise<number | null> {
const tokenContract = this.getContractInstance(token);
let nonce: BigNumber;
try {
nonce = await tokenContract.nonces(owner);
return nonce.toNumber();
} catch (_: unknown) {
// Skip console log here since other nonce method can also work
}
try {
nonce = await tokenContract._nonces(owner);
return nonce.toNumber();
} catch (_: unknown) {
console.log(`Token ${token} does not implement nonces or _nonces method`);
}
return null;
}
}
|
#!/bin/sh
PATH=$PATH:/opt/bin
SCRIPT_PATH=$(realpath "$0")
SCRIPT_HOME=$(dirname "$SCRIPT_PATH")
CACHE_FILE=$SCRIPT_HOME/.project-cache
if [[ -f "$CACHE_FILE" ]] && [[ "$(find $CACHE_FILE -mmin +1440 | wc -l)" == "0" ]]; then
#use the cache
CHOSEN=$(cat $CACHE_FILE | rofi -dmenu)
else
#override the cache
CHOSEN=$($SCRIPT_HOME/jb-project-opener | tee $CACHE_FILE | rofi -dmenu)
fi
if [ -z "$CHOSEN" ]; then
exit 0
fi
PROJECT_PATH=$(echo $CHOSEN | sed 's/^[^:]*: //')
IDE=$(echo $CHOSEN | cut -d\: -f1)
case "$IDE" in
GoLand)
goland $PROJECT_PATH
;;
WebStorm)
webstorm $PROJECT_PATH
;;
IntelliJIdea)
idea $PROJECT_PATH
;;
PyCharm)
pycharm $PROJECT_PATH
;;
DataGrip)
datagrip $PROJECT_PATH
;;
CLion)
clion $PROJECT_PATH
;;
esac
|
<gh_stars>10-100
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Modifications copyright (c) 2020. <NAME>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.kpax.winfoom.proxy.pac.datetime;
import org.kpax.winfoom.proxy.pac.PacHelperMethodsNetscape;
import java.util.*;
/**
* Methods and constants useful in PAC script evaluation, specifically
* date/time related.
*
* @author lbruun
*/
public class PacDateTimeUtils {
/**
* List of valid weekday names as used in the Netscape specification.
* <p>
* Content: {@code SUN MON TUE WED THU FRI SAT}
*/
public final static List<String> WEEKDAY_NAMES = List.of("SUN", "MON", "TUE", "WED", "THU", "FRI", "SAT");
/**
* List of valid month names as used in the Netscape specification.
* <p>
* Content: {@code JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC}
*/
public final static List<String> MONTH_NAMES = List.of("JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC");
private static final TimeZone UTC_TIME = TimeZone.getTimeZone("UTC");
/**
* Evaluates if now is within a weekday range. Method arguments are as described
* for {@link PacHelperMethodsNetscape#weekdayRange(Object...) }
*
* @param now
* @param args
* @return true if within range
* @throws PacDateTimeInputException if arguments were invalid
*/
public static boolean isInWeekdayRange(Date now, Object... args) throws PacDateTimeInputException {
ParamsInfo paramsInfo = getParamsInfo(args);
if (!(paramsInfo.count >= 1 && paramsInfo.count <= 2)) {
throw new PacDateTimeInputException("invalid number of arguments");
}
final int wdNumMin;
final int wdNumMax;
if (paramsInfo.count == 1) {
wdNumMin = getWeekday(args[0].toString());
wdNumMax = wdNumMin;
} else {
wdNumMin = getWeekday(args[0].toString());
wdNumMax = getWeekday(args[1].toString());
}
Calendar cal = getCalendar(now, paramsInfo.useGMT);
int wdNum = cal.get(Calendar.DAY_OF_WEEK);
if (wdNumMin <= wdNumMax) {
return (wdNum >= wdNumMin && wdNum <= wdNumMax);
} else {
return (wdNum >= wdNumMin || wdNum <= wdNumMax);
}
}
/**
* Evaluates if now is within a time range. Method arguments are as described
* for {@link PacHelperMethodsNetscape#timeRange(Object...) }
*
* @param now
* @param args
* @return true if within range
* @throws PacDateTimeInputException if arguments were invalid
*/
public static boolean isInTimeRange(Date now, Object... args) throws PacDateTimeInputException {
ParamsInfo paramsInfo = getParamsInfo(args);
if (!(paramsInfo.count >= 1 && paramsInfo.count <= 6) || paramsInfo.count == 5 || paramsInfo.count == 3) {
throw new PacDateTimeInputException("invalid number of arguments");
}
TimeRange.TimeRangeBuilder builder = TimeRange.getBuilder();
if (paramsInfo.count == 1) {
builder.withHourMinMax(getHour(args[0]), getHour(args[0]));
}
if (paramsInfo.count == 2) {
builder.withHourMinMax(getHour(args[0]), getHour(args[1]));
if (getHour(args[0]) != getHour(args[1])) {
builder.withMinuteMinMax(0, 0);
}
}
if (paramsInfo.count == 4) {
builder.withHourMinMax(getHour(args[0]), getHour(args[2]))
.withMinuteMinMax(getMinute(args[1]), getMinute(args[3]))
.withSecondMinMax(0, 0);
}
if (paramsInfo.count == 6) {
builder.withHourMinMax(getHour(args[0]), getHour(args[3]))
.withMinuteMinMax(getMinute(args[1]), getMinute(args[4]))
.withSecondMinMax(getSecond(args[2]), getSecond(args[5]));
}
TimeRange timeRange = builder.build();
Calendar calendar = getCalendar(now, paramsInfo.useGMT);
return timeRange.isInRange(calendar);
}
/**
* Evaluates if now is within a date range. Method arguments are as described
* for {@link PacHelperMethodsNetscape#dateRange(Object...) }
*
* @param now
* @param args arguments
* @return true if within range
* @throws PacDateTimeInputException if arguments were invalid
*/
public static boolean isInDateRange(Date now, Object... args) throws PacDateTimeInputException {
ParamsInfo paramsInfo = getParamsInfo(args);
if (!(paramsInfo.count >= 1 && paramsInfo.count <= 6) || paramsInfo.count == 5 || paramsInfo.count == 3) {
throw new PacDateTimeInputException("invalid number of arguments");
}
DateRange.DateRangeBuilder builder = DateRange.builder();
if (paramsInfo.count == 1) {
if (isYear(args[0])) {
int year = getYear(args[0]);
builder.withYear(year, year);
} else if (isMonth(args[0])) {
int month = getMonth(args[0].toString());
builder.withMonth(month, month);
} else if (isDate(args[0])) {
int date = getDate(args[0]);
builder.withDate(date, date);
} else {
throw new PacDateTimeInputException("invalid argument : " + args[0].toString());
}
} else if (paramsInfo.count == 2) {
if (isYear(args[0])) {
builder.withYear(getYear(args[0]), getYear(args[1]));
} else if (isMonth(args[0])) {
builder.withMonth(getMonth(args[0].toString()), getMonth(args[1].toString()));
} else if (isDate(args[0])) {
builder.withDate(getDate(args[0]), getDate(args[1]));
} else {
throw new PacDateTimeInputException("invalid argument : " + args[0].toString());
}
} else if (paramsInfo.count == 4) {
if (isMonth(args[0])) {
builder.withYear(getYear(args[1]), getYear(args[3]))
.withMonth(getMonth(args[0].toString()), getMonth(args[2].toString()));
} else if (isDate(args[0])) {
builder.withMonth(getMonth(args[1].toString()), getMonth(args[3].toString()))
.withDate(getDate(args[0]), getDate(args[2]));
} else {
throw new PacDateTimeInputException("invalid argument : " + args[0].toString());
}
} else if (paramsInfo.count == 6) {
builder.withYear(getYear(args[2]), getYear(args[5]))
.withMonth(getMonth(args[1].toString()), getMonth(args[4].toString()))
.withDate(getDate(args[0]), getDate(args[3]));
}
Calendar cal = getCalendar(now, paramsInfo.useGMT);
return builder.build().isInRange(cal);
}
private static boolean isMonth(Object obj) {
return (obj instanceof CharSequence);
}
private static boolean isYear(Object obj) {
try {
int val = getInteger(obj);
return (val >= 1000);
} catch (PacDateTimeInputException ex) {
return false;
}
}
private static boolean isDate(Object obj) {
try {
int val = getInteger(obj);
return (val >= 1 && val <= 31);
} catch (PacDateTimeInputException ex) {
return false;
}
}
private static int getDate(Object obj) throws PacDateTimeInputException {
if (!isDate(obj)) {
throw new PacDateTimeInputException("value " + obj.toString() + " is not a valid day of month");
}
return getInteger(obj);
}
private static int getYear(Object obj) throws PacDateTimeInputException {
if (!isYear(obj)) {
throw new PacDateTimeInputException("value " + obj.toString() + " is not a valid year");
}
return getInteger(obj);
}
private static int getWeekday(String wd) throws PacDateTimeInputException {
int indexOf = WEEKDAY_NAMES.indexOf(wd);
if (indexOf == -1) {
throw new PacDateTimeInputException("Unknown weekday name : \"" + wd + "\"");
}
return indexOf + 1; // In Calendar, the first weekday (Sunday) is 1
}
private static int getMonth(String month) throws PacDateTimeInputException {
int indexOf = MONTH_NAMES.indexOf(month);
if (indexOf == -1) {
throw new PacDateTimeInputException("Unknown month name : \"" + month + "\"");
}
return indexOf; // In Calendar, January is 0
}
private static int getInteger(Object obj) throws PacDateTimeInputException {
if (obj instanceof Integer || obj instanceof Long) {
return ((Number) obj).intValue();
}
if (obj instanceof String) {
try {
return Integer.parseInt((String) obj);
} catch (NumberFormatException ex) {
}
}
throw new PacDateTimeInputException("value is " + obj + " is not an integer");
}
private static int getHour(Object obj) throws PacDateTimeInputException {
int hour = getInteger(obj);
if (!(hour >= 0 && hour <= 23)) {
throw new PacDateTimeInputException("value is " + hour + " is not a valid hour of day (0-23)");
}
return hour;
}
private static int getMinute(Object obj) throws PacDateTimeInputException {
int min = getInteger(obj);
if (!(min >= 0 && min <= 59)) {
throw new PacDateTimeInputException("value is " + min + " is not a valid minute (0-59)");
}
return min;
}
private static int getSecond(Object obj) throws PacDateTimeInputException {
int sec = getInteger(obj);
if (!(sec >= 0 && sec <= 59)) {
throw new PacDateTimeInputException("value is " + sec + " is not a valid second (0-59)");
}
return sec;
}
private static Calendar getCalendar(Date now, boolean useGMT) {
Calendar cal = Calendar.getInstance();
cal.setTime(now);
if (useGMT) {
cal.setTimeZone(UTC_TIME);
}
return cal;
}
/**
* Gets the number of actual arguments passed to a JavaScript
* function. This is done by counting the number of arguments of type
* {@code Number} or {@code CharSequence}.
*
* <p>
* This is a convenience method useful when implementing
* {@link PacHelperMethodsNetscape#dateRange(Object...) dateRange()}
* ,
* {@link PacHelperMethodsNetscape#timeRange(Object...) timeRange()}
* or
* {@link PacHelperMethodsNetscape#weekdayRange(Object...) weekdayRange()}
*
* <p>
* Note: In some engines, JavaScript function arguments that are not used in the
* call will have a type of {@code Undefined}.
*
* @param objs
* @return
*/
private static int getNoOfParams(Object... objs) {
return (int) Arrays.stream(objs).filter((obj) -> obj instanceof Number || obj instanceof CharSequence).count();
}
private static ParamsInfo getParamsInfo(Object... args) {
int noOfParams = getNoOfParams(args);
boolean useGMT;
if (args[noOfParams - 1] instanceof CharSequence) {
String p = args[noOfParams - 1].toString();
useGMT = p.equals("GMT");
} else {
useGMT = false;
}
if (useGMT) {
noOfParams--;
}
return new ParamsInfo(noOfParams, useGMT);
}
/**
* Validation errors on input to {@code weekdayRange()},
* {@code timeRange()} and {@code dateRange()}.
*/
public static class PacDateTimeInputException extends Exception {
public PacDateTimeInputException(String msg) {
super(msg);
}
}
private static class ParamsInfo {
private final int count;
private final boolean useGMT;
ParamsInfo(int count, boolean useGMT) {
this.count = count;
this.useGMT = useGMT;
}
}
}
|
from pathlib import Path
from django.core.management.base import BaseCommand, CommandError
from hill.data.apt.apt_client import AptClient
from hill.data.common.http_client import HttpClient
from hill.data.common.local_cache import LocalCache, LocalCacheTime
from hill.data.common.logger import Logger
from hill.data.pgp.manage.messages import Messages
from hill.data.database.importer import Importer as DbImporter
class Command(BaseCommand):
help = "Import an APT repository."
def add_arguments(self, parser):
parser.add_argument("url")
parser.add_argument("--public-key-url")
parser.add_argument("--public-key-file")
def handle(self, *args, **options):
archive_url = options.get("url")
pub_key_url = options.get("public_key_url")
pub_key_file = options.get("public_key_file")
if not archive_url:
raise CommandError("Invalid apt repository url.")
if pub_key_url and pub_key_file:
raise CommandError("Provide only one of public key url or file.")
logger = Logger(cmd=self)
cache = LocalCache()
http_client = HttpClient(logger=logger, cache=cache)
messages = Messages(logger)
if pub_key_url:
content = http_client.get(pub_key_url, LocalCacheTime.KEEP)
message = messages.read_container_content(content)
elif pub_key_file:
message = messages.read_file(Path(pub_key_file))
else:
logger.warning(
"CAUTION! No public key supplied. Downloaded files will not be verified."
)
message = None
logger.info(f"Importing apt repository from url '{archive_url}'.")
client = AptClient(logger, http_client, archive_url, message)
releases, packages, contents, translations = client.get()
importer = DbImporter()
importer.run(releases, packages, contents, translations)
logger.info("Finished importing apt repository.")
|
<reponame>ArchieMedes/sistop-2021-1<gh_stars>1-10
# -*- encoding: Latin-1
"""
Created on Thu Jan 8 15:37:48 2021
@author: Jonathan
"""
import os.path # Se utiliza para asignar rutas en el sistema
from math import ceil#Se utiliza ceil para asignar el valor del cluster
#Cuerpo del programa
if os.path.exists("fiunam.img"):# verificar si la ruta especificada existe
pass
else:
print("Creamos Sistema de Archivos")
file=open("fiunam.img","w")
file.write("FiUnamFS")
file.seek(10)
file.write("0.4")
file.seek(20)
file.write("Sistema Archivo")
file.seek(40)
file.write("01024")
file.seek(47)
file.write("04")
file.seek(52)
file.write("00001440")
file.seek(1024)
for i in range(64):
file.write("AQUI_NO_VA_NADA")
file.seek(file.tell()+1)
file.write("00000000")
file.seek(file.tell()+1)
file.write("00000")
file.seek(file.tell()+1)
file.write("00000000000000")
file.seek(file.tell()+1)
file.write("00000000000000")
file.seek(file.tell()+4)
file.close()
#Funcion para agregar archivo de la computadora al sistema de archivos
def agregar(nombre):
size=os.path.getsize(nombre)# os.path verificamos la ruta existente
cluster_ant=0
tamanio=0
cluster=ceil(size/1024)
file=open("fiunam.img","r+")
file.seek(0,os.SEEK_SET)
file.seek(1024)
aux=file.read(15)
while aux != 'AQUI_NO_VA_NADA':
file.seek(file.tell()+49)
aux=file.read(15)
#Si es el primer archivo en directorio
if file.tell()==1039:#Se verifica si es el primer archivo dentro del sistema de archivos
cluster_act=4
ahora=file.tell()
else:
ahora=file.tell()
anterior=file.seek(file.tell()-63)
tamanio=file.read(8)
file.seek(file.tell()+1)
cluster_ant=file.read(5)
cluster_act=ceil(int(tamanio)/1024)+int(cluster_ant)-1
file.seek(file.tell()+49)
if (1440-(cluster_act+cluster))>0:
temp=15-len(nombre)
if(temp>0):
file.seek(file.tell()-15)
for i in range(temp):
file.write(" ")
file.write(nombre)
else:
return ("El archivo tiene un nombre demasiado grande")
#Escribimos tamanio archivo nuevo
file.seek(file.tell()+1)
temp=8-len(str(size))
for i in range(temp):
file.write("0")
file.write(str(size))
#Escribimos cluster inicial
file.seek(file.tell()+1)
temp=5-len(str(cluster_act+cluster))
for i in range(temp):
file.write("0")
file.write(str(cluster_act+cluster))
#Funcion que copia archivo del sistema a la computadora
def copy(nombre):
size=0
cluster=0
name=nombre
copia=""
while name==nombre:
print("Introduce el nombre del nuevo archivo")
name=input()
file=open("fiunam.img","r+",encoding="Latin-1")
file.seek(1024)
aux=file.read(15)
while aux.replace(" ","")!=nombre:
file.seek(file.tell()+49)
aux=file.read(15)
file.seek(file.tell()+1)
size=int(file.read(8))
file.seek(file.tell()+1)
cluster=int(file.read(5))
file.seek(cluster*1024)
copia=file.read(size)
archivo=open(""+name,"w",encoding="Latin-1")
archivo.write(copia)
archivo.close()
file.close()
return("Archivo copiado con exito")
#Muestra el contenido del FS
def listar():
file=open("fiUnam.img","r")
file.seek(1024)
for i in range(64):
archivoAux=file.read(15)
if archivoAux != 'AQUI_NO_VA_NADA':
print(archivoAux.replace(' ',''))
file.seek(file.tell()+49)
file.close()
#Esta funcion elimina del sistema de archivos
def eliminar(nombre):#Recibe el nombre del archivo a eliminar
file=open("fiUnam.img","r+")
file.seek(1024)
for i in range(64):
archivoAux=file.read(15)
if archivoAux.replace(' ','') == nombre:
#Recupera informacion necesaria del archivo a eliminar
file.seek(file.tell()+1)
tam=int(file.read(8))
file.seek(file.tell()+1)
ini=int(file.read(5))
#Borra del directorio
file.seek(file.tell()-30)
file.write("AQUI_NO_VA_NADA")
file.seek(file.tell()+1)
file.write("00000000")
file.seek(file.tell()+1)
file.write("00000")
file.seek(file.tell()+1)
file.write("00000000000000")
file.seek(file.tell()+1)
file.write("00000000000000")
file.seek(file.tell()+4)
file.close()
#Limpia la memoria
limpiar(ini,ceil(tam/1024))#El tamaño se envia en clusters
return 'Se borro el archivo'
else:
file.seek(file.tell()+49)
file.close()
return 'No se encontro el archivo'
#Esta funcion limpia la memoria con el inicio y la magnitud.(EStara en clusters)
def limpiar(ini,tam):
file=open("fiUnam.img","r+")
file.seek(ini*1024)
vacio='\x00'
for i in range(0,tam):
file.write(vacio*1024)
file.close()
a=True
while a==True:
print("_____________________ ")
print("|1. Listar contenido.|")
print("|2. añadir archivo.|")
print("|3. Copiar archivo a su computadora. |")
print("|4. Eliminar archivo. |")#CHECK
print("|5. Salir. |")
print("| Seleccione un opcion: |")
print(" ")
op=int(input("Ingrese una opcion: \n"))
if op == 1:
listar()
elif op == 2:
print("añadir archivo")
arg=input("Ingrese el nombre del archivo :")
agregar(arg)
print("Añadido")
elif op == 3:
print("Copiar del FiUnamFs a la computadora")
arg=input("Ingrese el nombre del archivo :")
copy(arg)
print("copiado")
elif op == 4:
arg=input("Ingrese el nombre del archivo :")
eliminar(arg)
print("Eliminado")
elif op == 5:
a= False
|
import { AppRegistry } from 'react-native';
import App from './App';
AppRegistry.registerComponent('BReactNative', () => App);
|
<filename>docs/search/all_e.js
var searchData=
[
['searchagent',['SearchAgent',['../classsearch_agents_1_1_search_agent.html',1,'searchAgents']]],
['searchproblem',['SearchProblem',['../classsearch_1_1_search_problem.html',1,'search']]],
['sortedkeys',['sortedKeys',['../classutil_1_1_counter.html#a8e32d106f34cb7cd8341e78b49f5e40a',1,'util::Counter']]],
['stack',['Stack',['../classutil_1_1_stack.html',1,'util']]],
['stayeastsearchagent',['StayEastSearchAgent',['../classsearch_agents_1_1_stay_east_search_agent.html',1,'searchAgents']]],
['staywestsearchagent',['StayWestSearchAgent',['../classsearch_agents_1_1_stay_west_search_agent.html',1,'searchAgents']]],
['swapimages',['swapImages',['../classgraphics_display_1_1_pacman_graphics.html#afa38cdde4141fb7f84b80983404f6ab0',1,'graphicsDisplay::PacmanGraphics']]]
];
|
#!/bin/bash
#SBATCH -p amd_256
#SBATCH -N 1
#SBATCH -n 64
#SBATCH -o run.log
#SBATCH -e err.log
echo working directory: `pwd`
source /public1/soft/openfoam/OpenFOAM7-fgl/OpenFOAM-7/etc/rebashrc
. $WM_PROJECT_USER_DIR/utilities/scripts/postProcessFunctions
blockMesh
# renumberMesh -overwrite -noFields # not update the fields due to bug in the OpenFOAM on HPC
decomposePar
cpu=64
workerNum=$(($cpu-4)) #postProcessor
caseDir=./
sampleRate=1 #postProcessing sampling rate
dataFolder=./postProcess
transverse_data_folder=./postProcess/transverseAveragedData/
imageFolder=./postProcess/images
animation_rate=10 #data interval 0.1 s
runWorkflow $cpu $workerNum $caseDir $sampleRate $dataFolder $transverse_data_folder $imageFolder $animation_rate
|
#!/bin/bash
# the script uses COPASISE to import an SBML file
# and reexport it
COPASISE=$1
FILENAME=$2
OUTFILE=$3
LOGFILE=$4
if [ -e $COPASISE ];then
if [ -e $FILENAME ];then
$COPASISE --importSBML ${FILENAME} --oldExportSBML ${OUTFILE} 2>&1 | sed '1,4d' > ${LOGFILE}
fi
fi
|
import time
from django.conf import settings
from drawquest.tests.tests_helpers import (CanvasTestCase, create_content, create_user, create_group,
create_comment, create_staff, create_quest, create_quest_comment,
action_recipients)
from canvas.models import CommentSticker
from services import Services, override_service
class TestPushNotificationSubscriptions(CanvasTestCase):
def test_unsubscribe(self):
author = create_user()
def recipients():
actor = create_user()
cmt = create_quest_comment(author=author)
sticker = CommentSticker.objects.create(
comment=cmt,
timestamp=time.time(),
type_id=settings.STAR_STICKER_TYPE_ID,
user=actor,
ip='0.0.0.0',
)
r = action_recipients('starred', actor, sticker, channel='PushNotificationChannel')
return (r, cmt)
r, cmt = recipients()
self.assertIn(author.id, [u.id for u in r])
self.api_post('/api/push_notifications/unsubscribe', {'notification_type': 'starred'}, user=author)
r, cmt = recipients()
self.assertNotIn(author.id, [u.id for u in r])
self.api_post('/api/push_notifications/resubscribe', {'notification_type': 'starred'}, user=author)
r, cmt = recipients()
self.assertIn(author.id, [u.id for u in r])
|
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import StandardScaler
# load the data for the Wikipedia page
page_data = pd.read_csv('page_data.csv')
# extract the features
X = page_data[['edits', 'views']]
# set the target
y = page_data['popularity']
# use the LinearRegression model to fit the data
model = LinearRegression()
model.fit(X, y)
# define the scaler
scaler = StandardScaler()
scaler.fit(X)
# get the predictions
X_scaled = scaler.transform(X)
predictions = model.predict(X_scaled)
# print the predictions
print(predictions)
|
require('./bootstrap');
window.Vue = require('vue');
import Vuex from 'vuex'
import VueRouter from 'vue-router'
import Vuetify from 'vuetify'
window.iziToast = require('iziToast');
Vue.use(Vuex)
Vue.use(Vuetify)
Vue.use(VueRouter);
Vue.component('example-component', require('./components/ExampleComponent.vue').default);
import router from '@/js/routes/router'
import App from '@/js/components/Baseapp'
import store from '@/js/store';
//USER GATES PERMISSION
Vue.prototype.$gates = new Gates(window.user)
router.beforeEach((to, from, next) => {
//CHECK IF USER IS ALREADY LOGIN
if (to.matched.some(record => record.meta.requiresLogin) && Vue.prototype.$gates.user == undefined) {
next("/login")
}
//CHECK IF USER HAS ANY PERMISSION ON SPECIFIC PAGE //CREATE UPDATE OR DELETE
else if (to.matched.some(record => record.meta.permission) && Vue.prototype.$gates.hasAnyPermission(to.meta.permission) == false) {
next("/page403")
}
else {
next()
}
})
//CSRF TOKEN
Vue.prototype.$csrf = document.querySelector('meta[name=csrf-token]').content
import Gates from '@/js/class/gates'
const app = new Vue({
components:{
App,
},
el: '#app',
router,
store,
vuetify: new Vuetify()
});
|
#!/usr/bin/env bash
set -eo pipefail
downloadBundlesListedInFile() {
bucket=$1
downloads_file=$2
awk -v bucket="$bucket" '{print bucket "/bundle-" $1 ".tgz"}' < "$downloads_file" \
| gsutil -m cp -I "$bundles_dir" || true
}
getNumFailedDownloads() {
find "$bundles_dir"/*.gstmp 2> /dev/null | wc -l || true
}
reportIfFailed() {
num_failed_downloads="$(getNumFailedDownloads)"
if ((num_failed_downloads > 0)); then
echo
echo "There are $num_failed_downloads failed downloads"
failed_downloads_dir="$bundles_dir/failed-downloads/"
mkdir "$failed_downloads_dir"
mv "$bundles_dir"/*.gstmp "$failed_downloads_dir"
ls "$failed_downloads_dir"/
echo
fi
}
retryFailedDownloads() {
bucket=$1
max_attempts=5 #If you can't download it after 5 attempts you probably can't download it
for ((i = 0; i < max_attempts; i = i + 1)); do
num_failed_downloads="$(getNumFailedDownloads)"
if ((num_failed_downloads == 0)); then
break
fi
sleep 30
failed_downloads_file="$KOBUILD_DIR/failed_downloads.txt"
ls "$bundles_dir"/*.gstmp > "$failed_downloads_file"
sed -i 's|^.*bundle-||' "$failed_downloads_file"
sed -i 's|\.tgz_\.gstmp$||' "$failed_downloads_file"
downloadBundlesListedInFile "$bucket" "$failed_downloads_file"
done
reportIfFailed
}
downloadBundles() {
bucket=$1
downloadBundlesListedInFile "$bucket" "$KERNELS_FILE"
retryFailedDownloads "$bucket"
}
TAG=$1
BRANCH=$2
KOBUILD_DIR="${3:-~/kobuild-tmp}"
KERNELS_FILE="${4:-"$KOBUILD_DIR/all-kernel-versions"}"
bundles_dir="$KOBUILD_DIR/bundles"
mkdir -p "$bundles_dir"
downloadBundles "$KERNEL_BUNDLES_BUCKET"
if [[ -z "$TAG" && "$BRANCH" != "master" && -n "$KERNEL_BUNDLES_STAGING_BUCKET" ]]; then
downloadBundles "$KERNEL_BUNDLES_STAGING_BUCKET"
fi
|
#!/bin/tcsh
setenv CI_SEP -
setenv CI_BUILD_TYPE release
setenv CI_BUILD_SUFFIX build
setenv CI_INSTALL_SUFFIX install
setenv CI_MACHINE_ARCH x86
setenv CI_COMPILER_FAMILY intel
setenv CI_COMPILER_VER 19.0.5
setenv CI_COMPILER_NAME $CI_COMPILER_FAMILY$CI_SEP$CI_COMPILER_VER$CI_SEP
setenv CI_CUDA_PREFIX cuda
setenv CI_CUDA_VER
setenv CI_CUDA_NAME
setenv CI_KOKKOS_PREFIX kokkos
setenv CI_KOKKOS_VER 3.0.00
setenv CI_KOKKOS_BACKEND serial
setenv CI_KOKKOS_NAME $CI_KOKKOS_PREFIX$CI_SEP$CI_KOKKOS_VER$CI_SEP$CI_KOKKOS_BACKEND$CI_SEP$CI_BUILD_TYPE
setenv CI_KOKKOS_PATH_PREFIX /home/$USER/kt
setenv CI_KOKKOS_INSTALL_DIR $CI_KOKKOS_PATH_PREFIX/$CI_MACHINE_ARCH$CI_SEP$CI_COMPILER_NAME$CI_CUDA_NAME$CI_KOKKOS_NAME
setenv CI_KOKKOS_BUILD_DIR $CI_KOKKOS_INSTALL_DIR$CI_SEP$CI_BUILD_SUFFIX
setenv CI_FLCL_PREFIX flcl
setenv CI_FLCL_PATH_PREFIX /home/$USER/kokkos-fortran-interop
setenv CI_FLCL_CI_PATH_PREFIX $CI_FLCL_PATH_PREFIX/ci
setenv CI_FLCL_KOKKOS_PATH $CI_KOKKOS_INSTALL_DIR/lib64/cmake/Kokkos
setenv CI_FLCL_BUILD_DIR $CI_FLCL_CI_PATH_PREFIX/$CI_FLCL_PREFIX$CI_SEP$CI_MACHINE_ARCH$CI_SEP$CI_COMPILER_NAME$CI_CUDA_NAME$CI_KOKKOS_NAME$CI_SEP$CI_BUILD_SUFFIX
setenv CI_FLCL_INSTALL_DIR $CI_FLCL_CI_PATH_PREFIX/$CI_FLCL_PREFIX$CI_SEP$CI_MACHINE_ARCH$CI_SEP$CI_COMPILER_NAME$CI_CUDA_NAME$CI_KOKKOS_NAME$CI_SEP$CI_INSTALL_SUFFIX
rm -rf $CI_FLCL_INSTALL_DIR
rm -rf $CI_FLCL_BUILD_DIR
mkdir -p $CI_FLCL_INSTALL_DIR
mkdir -p $CI_FLCL_BUILD_DIR
module load cmake/3.19.2
module load intel/19.0.5
cd $CI_FLCL_BUILD_DIR
cmake $CI_FLCL_PATH_PREFIX\
-DKokkos_DIR=$CI_FLCL_KOKKOS_PATH \
-DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=ON -DBUILD_EXAMPLES=ON \
-DCMAKE_INSTALL_PREFIX=$CI_FLCL_INSTALL_DIR
cmake --build $CI_FLCL_BUILD_DIR --parallel
cmake --install $CI_FLCL_BUILD_DIR
ctest
module purge
#rm -rf $CI_FLCL_BUILD_DIR
|
const DrawCard = require('../../../drawcard.js');
class ADragonIsNoSlave extends DrawCard {
setupCardAbilities(ability) {
this.action({
title: 'Give -2 STR',
phase: 'challenge',
target: {
activePromptTitle: 'Select a character',
cardCondition: card => card.location === 'play area' && card.getType() === 'character' && card.attachments.size() === 0
},
handler: context => {
this.untilEndOfPhase(ability => ({
match: context.target,
effect: ability.effects.modifyStrength(-2)
}));
this.game.addMessage('{0} uses {1} to give {2} -2 STR until the end of the phase',
this.controller, this, context.target);
}
});
this.reaction({
location: 'discard pile',
when: {
afterChallenge: ({challenge}) => challenge.winner === this.controller && this.hasParticipatingDragonOrDany()
},
ignoreEventCosts: true,
cost: ability.costs.payGold(1),
handler: () => {
this.game.addMessage('{0} pays 1 gold to move {1} back to their hand', this.controller, this);
this.controller.moveCard(this, 'hand');
}
});
}
hasParticipatingDragonOrDany() {
let cards = this.controller.filterCardsInPlay(card => {
return (this.game.currentChallenge.isParticipating(card) &&
(card.hasTrait('Dragon') || card.name === '<NAME>') &&
card.getType() === 'character');
});
return cards.length >= 1;
}
}
ADragonIsNoSlave.code = '06034';
module.exports = ADragonIsNoSlave;
|
<reponame>lterrac/system-autoscaler
package main
import (
"flag"
"time"
"github.com/kubernetes-sigs/custom-metrics-apiserver/pkg/dynamicmapper"
informers2 "github.com/lterrac/system-autoscaler/pkg/informers"
sainformers "github.com/lterrac/system-autoscaler/pkg/generated/informers/externalversions"
cm "github.com/lterrac/system-autoscaler/pkg/pod-autoscaler/pkg/contention-manager"
metricsgetter "github.com/lterrac/system-autoscaler/pkg/pod-autoscaler/pkg/metrics"
resupd "github.com/lterrac/system-autoscaler/pkg/pod-autoscaler/pkg/pod-resource-updater"
"github.com/lterrac/system-autoscaler/pkg/podscale-controller/pkg/types"
metricsclient "k8s.io/metrics/pkg/client/custom_metrics"
coreinformers "k8s.io/client-go/informers"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/klog/v2"
// Uncomment the following line to load the gcp plugin (only required to authenticate against GKE clusters).
// _ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
clientset "github.com/lterrac/system-autoscaler/pkg/generated/clientset/versioned"
"github.com/lterrac/system-autoscaler/pkg/pod-autoscaler/pkg/recommender"
"github.com/lterrac/system-autoscaler/pkg/signals"
)
var (
masterURL string
kubeconfig string
)
func main() {
klog.InitFlags(nil)
flag.Parse()
// set up signals so we handle the first shutdown signal gracefully
stopCh := signals.SetupSignalHandler()
cfg, err := clientcmd.BuildConfigFromFlags(masterURL, kubeconfig)
if err != nil {
klog.Fatalf("Error building kubeconfig: %s", err.Error())
}
client, err := clientset.NewForConfig(cfg)
if err != nil {
klog.Fatalf("Error building system-autoscaler clientset: %s", err.Error())
}
kubernetesClient, err := kubernetes.NewForConfig(cfg)
if err != nil {
klog.Fatalf("Error building kubernetes clientset: %s", err.Error())
}
//TODO: tune refresh interval
mapper, err := dynamicmapper.NewRESTMapper(kubernetesClient, time.Second)
if err != nil {
klog.Fatalf("Error building REST Mapper: %s", err.Error())
}
metricsGetter := metricsgetter.NewDefaultGetter(cfg, mapper, metricsclient.NewAvailableAPIsGetter(kubernetesClient))
saInformerFactory := sainformers.NewSharedInformerFactory(client, time.Second*30)
coreInformerFactory := coreinformers.NewSharedInformerFactory(kubernetesClient, time.Second*30)
// TODO: Check name of this variable
informers := informers2.Informers{
Pod: coreInformerFactory.Core().V1().Pods(),
Node: coreInformerFactory.Core().V1().Nodes(),
Service: coreInformerFactory.Core().V1().Services(),
PodScale: saInformerFactory.Systemautoscaler().V1beta1().PodScales(),
ServiceLevelAgreement: saInformerFactory.Systemautoscaler().V1beta1().ServiceLevelAgreements(),
}
//TODO: should be renamed
//TODO: we should try without buffer
recommenderOut := make(chan types.NodeScales, 10000)
contentionManagerOut := make(chan types.NodeScales, 10000)
// TODO: adjust arguments to recommender
recommenderController := recommender.NewController(
kubernetesClient,
client,
metricsGetter,
informers,
recommenderOut,
)
contentionManagerController := cm.NewController(
kubernetesClient,
client,
informers,
recommenderOut,
contentionManagerOut,
)
updaterController := resupd.NewController(
kubernetesClient,
client,
informers,
contentionManagerOut,
)
// notice that there is no need to run Start methods in a separate goroutine. (i.e. go kubeInformerFactory.Start(stopCh)
// Start method is non-blocking and runs all registered safactory in a dedicated goroutine.
saInformerFactory.Start(stopCh)
coreInformerFactory.Start(stopCh)
if err = recommenderController.Run(4, stopCh); err != nil {
klog.Fatalf("Error running recommender: %s", err.Error())
}
defer recommenderController.Shutdown()
if err = contentionManagerController.Run(4, stopCh); err != nil {
klog.Fatalf("Error running update controller: %s", err.Error())
}
defer contentionManagerController.Shutdown()
if err = updaterController.Run(4, stopCh); err != nil {
klog.Fatalf("Error running update controller: %s", err.Error())
}
defer updaterController.Shutdown()
<-stopCh
klog.Info("Shutting down workers")
}
func init() {
flag.StringVar(&kubeconfig, "kubeconfig", "", "Path to a kubeconfig. Only required if out-of-cluster.")
flag.StringVar(&masterURL, "master", "", "The address of the Kubernetes API server. Overrides any value in kubeconfig. Only required if out-of-cluster.")
}
|
<reponame>sturmundbraem/kursausschreibung<gh_stars>1-10
import Component from '@ember/component';
import { observer } from '@ember/object';
import { getString } from 'kursausschreibung/framework/translate';
const statuses = {
green: { tooltip: getString('greenLamp'), className: 'lamp-green', icon: 'pencil' },
chartreuse: { tooltip: getString('chartreuseLamp'), className: 'lamp-chartreuse', icon: 'check' },
yellow: { tooltip: getString('yellowLamp'), className: 'lamp-yellow', icon: 'clock' },
red: { tooltip: getString('redLamp'), className: 'lamp-red', icon: 'close' },
orange: { tooltip: getString('orangeLamp'), className: 'lamp-orange', icon: 'ban' }
};
export default Component.extend({
init() {
this._super(...arguments);
// trigger observer
this.statusChanged();
},
statusChanged: observer('status', function () {
let status = statuses[this.get('status')];
if (status !== undefined) {
this.set('tooltip', status.tooltip);
this.set('color', status.className);
this.set('icon', status.icon);
}
}),
tagName: 'span',
attributeBindings: ['tooltip:data-uk-tooltip',"icon:uk-icon"],
classNames: ['status-lamp','icon-lamp'],
classNameBindings: ['color']
});
|
from typing import List, Dict
def count_license_terms(file_path: str, terms: List[str]) -> Dict[str, int]:
term_counts = {term.lower(): 0 for term in terms} # Initialize counts for each term
with open(file_path, 'r') as file:
file_content = file.read().lower() # Read file content and convert to lowercase for case-insensitive matching
for term in terms:
term_counts[term.lower()] = file_content.count(term.lower()) # Count occurrences of each term
return term_counts
|
package com.pickth.comepennyrenewal.util;
/**
* Created by Kim on 2016-11-11.
*/
public class StaticUrl {
public static final String BASE_URL = "http://pickth.com";
public static final String FILE_URL = "https://s3.ap-northeast-2.amazonaws.com/comepenny";
}
|
package com.qtimes.pavilion.base.layout;
/**
* Created by liutao on 2017/2/25.
*/
public interface BaseView {
/**
* 当自定义view实现此方法时,调用改方法释放相应的资源
* 通常在BaseActivity或者baseFragment中释放
*
*
*/
void release();
/**
* 判断资源释放已经释放,防止重复调用释放方法
* 调用release之前需要先调用此方法进行判断
* @return
*/
boolean isRelease();
}
|
<reponame>melkishengue/cpachecker
public class FunctionCall_true_assert {
public static void main(String[] args) {
int n1 = 1;
int n2 = 1;
int n3 = 2;
if (n1 == n2) {
if (n1 != n3) {
n3 = 1;
des();
}
if (n1 == n3) {
des();
n1 = n1 + n2 + n3; // n1 = 3
} else {
assert false; // not reached
}
if (n1 == n1 + n2) {
assert false; // not reached
} else if (n1 == 2 * n2 + n3) {
assert n3 == n2; // always true
}
}
des();
}
public static void des() {
int n1 = 1;
int n2 = 2;
if (n1 == n2) {
assert (false); // not reached
}
}
}
|
package set summary "Lightning memory-mapped database: key-value data store"
package set webpage "https://symas.com/lmdb"
package set src.url "https://git.openldap.org/openldap/openldap/-/archive/LMDB_0.9.28/openldap-LMDB_0.9.28.tar.bz2"
package set src.sum "54f4a3a927793db950288e9254c0dfe35afc75af12cd92b8aaae0d1e990186c0"
package set license "OLDAP-2.8"
package set cdefine "MDB_USE_ROBUST=0"
package set sourced "libraries/liblmdb"
package set bsystem "make"
package set binsrcd 'YES'
build() {
makew clean &&
makew CC="$CC" XCFLAGS="'$CFLAGS $CPPFLAGS'" LDFLAGS="'$LDFLAGS'" AR="$AR" &&
makew install prefix="$PACKAGE_INSTALL_DIR"
}
|
#!/usr/bin/env bash
# server_functions.bash
# Primarily intended to be sourced by other scripts
# Provides functions to use for communicating with the game server
# Include guard
[[ -n "${MINECRAFT_SERVER_COMMON_FUNCTIONS}" ]] && return
MINECRAFT_SERVER_COMMON_FUNCTIONS=true
# shellcheck source=./common_vars.bash
. "$(dirname "$(realpath "${0}")")/common_vars.bash"
# Sends argument(s) to server to run as a command
server_cmd() { echo "${@}" >"${server_in_pipe}"; }
# Two args: 1st is command to run, second is string to check for
# Sends the command to the server and then waits for a match to the string to
# appear in the server's output
server_cmd_wait() {
local catch_pipe="${server_tmp_dir}/catch_pipe-$$"
[[ -e ${catch_pipe} ]] && rm -f "${catch_pipe}"
mkfifo "${catch_pipe}"
#journalctl --unit=minecraft-server \
# --follow --lines=0 --output=cat --quiet >"${catch_pipe}" &
tail -F -n 0 "${server_dir}/logs/latest.log" >"${catch_pipe}" &
logger_pid=$!
# Have grep start checking for the string now, and close on first match
grep -m 1 -E '^\[.*\]: '"${2}" <"${catch_pipe}" &
grep_pid=$!
server_cmd "${1}"
wait "${grep_pid}"
# If grep closed, match was found. Start cleaning up
kill "${logger_pid}"
rm "${catch_pipe}"
return 0
}
|
#!/usr/bin/env bash
#
# Bump latest version to
# - _sass/jekyll-theme-chirpy.scss
# - assets/js/_commons/_copyright.js
# - assets/js/dist/*.js
# - jekyll-theme-chirpy.gemspec
#
# Required: gulp
set -eu
ASSETS=(
"_sass/jekyll-theme-chirpy.scss"
"assets/js/.copyright"
)
GEM_SPEC="jekyll-theme-chirpy.gemspec"
NODE_META="package.json"
bump_assets() {
_version="$1"
for i in "${!ASSETS[@]}"; do
sed -i "s/v[[:digit:]]\.[[:digit:]]\.[[:digit:]]/v$_version/" "${ASSETS[$i]}"
done
gulp
}
bump_gemspec() {
sed -i "s/[[:digit:]]\.[[:digit:]]\.[[:digit:]]/$1/" "$GEM_SPEC"
}
bump_node() {
sed -i \
"s,[\"]version[\"]: [\"][[:digit:]]\.[[:digit:]]\.[[:digit:]][\"],\"version\": \"$1\"," \
$NODE_META
}
bump() {
bump_assets "$1"
bump_gemspec "$1"
bump_node "$1"
if [[ -n $(git status . -s) ]]; then
git add .
git commit -m "Bump version to $1"
fi
}
main() {
if [[ -n $(git status . -s) ]]; then
echo "Warning: commit unstaged files first, and then run this tool againt."
exit -1
fi
_latest_tag="$(git describe --tags --abbrev=0)"
echo "Input a version number (hint: latest version is ${_latest_tag:1})"
read _version
if [[ $_version =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]$ ]]; then
if git tag --list | egrep -q "^v$_version$"; then
echo "Error: version '$_version' already exists"
exit -1
fi
echo "Bump version to $_version"
bump "$_version"
echo "Create tag v$_version"
git tag "v$_version"
else
echo "Error: Illegal version number: '$_version'"
fi
}
main
|
from typing import List
from html.parser import HTMLParser
class HTMLTextExtractor(HTMLParser):
def __init__(self):
super().__init__()
self.text_content = []
def handle_data(self, data):
self.text_content.append(data.strip())
def extract_text_from_html(html: str) -> List[str]:
parser = HTMLTextExtractor()
parser.feed(html)
return parser.text_content
|
#!/bin/sh -e
DIR=$PWD
config_enable () {
ret=$(./scripts/config --state ${config})
if [ ! "x${ret}" = "xy" ] ; then
echo "Setting: ${config}=y"
./scripts/config --enable ${config}
fi
}
config_disable () {
ret=$(./scripts/config --state ${config})
if [ ! "x${ret}" = "xn" ] ; then
echo "Setting: ${config}=n"
./scripts/config --disable ${config}
fi
}
config_module () {
ret=$(./scripts/config --state ${config})
if [ ! "x${ret}" = "xm" ] ; then
echo "Setting: ${config}=m"
./scripts/config --module ${config}
fi
}
config_string () {
ret=$(./scripts/config --state ${config})
if [ ! "x${ret}" = "x${option}" ] ; then
echo "Setting: ${config}=\"${option}\""
./scripts/config --set-str ${config} "${option}"
fi
}
config_value () {
ret=$(./scripts/config --state ${config})
if [ ! "x${ret}" = "x${option}" ] ; then
echo "Setting: ${config}=${option}"
./scripts/config --set-val ${config} ${option}
fi
}
cd ${DIR}/KERNEL/
#
# RCU Subsystem
#
config="CONFIG_IKCONFIG" ; config_enable
config="CONFIG_IKCONFIG_PROC" ; config_enable
config="CONFIG_LOG_BUF_SHIFT" ; option="18" ; config_value
config="CONFIG_MEMCG_SWAP_ENABLED" ; config_enable
config="CONFIG_RT_GROUP_SCHED" ; config_enable
config="CONFIG_SYSFS_SYSCALL" ; config_enable
config="CONFIG_SYSCTL_SYSCALL" ; config_enable
config="CONFIG_KALLSYMS_ALL" ; config_enable
config="CONFIG_EMBEDDED" ; config_enable
#
# Kernel Performance Events And Counters
#
config="CONFIG_SECCOMP_FILTER" ; config_enable
#
# CPU Core family selection
#
config="CONFIG_ARCH_VIRT" ; config_disable
config="CONFIG_ARCH_MVEBU" ; config_disable
config="CONFIG_ARCH_BCM" ; config_disable
config="CONFIG_ARCH_HIGHBANK" ; config_disable
#
# Device tree only
#
config="CONFIG_SOC_IMX50" ; config_enable
config="CONFIG_SOC_IMX6SL" ; config_enable
config="CONFIG_SOC_IMX6SX" ; config_enable
config="CONFIG_SOC_IMX6UL" ; config_enable
config="CONFIG_SOC_IMX7D" ; config_enable
config="CONFIG_SOC_LS1021A" ; config_enable
#
# Cortex-A/Cortex-M asymmetric multiprocessing platforms
#
config="CONFIG_SOC_VF610" ; config_enable
config="CONFIG_WAND_RFKILL" ; config_enable
#
# OMAP Feature Selections
#
config="CONFIG_POWER_AVS_OMAP" ; config_enable
config="CONFIG_POWER_AVS_OMAP_CLASS3" ; config_enable
config="CONFIG_OMAP_MUX_DEBUG" ; config_enable
#
# TI OMAP/AM/DM/DRA Family
#
config="CONFIG_SOC_AM43XX" ; config_enable
#
# OMAP Legacy Platform Data Board Type
#
config="CONFIG_MACH_OMAP_LDP" ; config_disable
config="CONFIG_MACH_OMAP3517EVM" ; config_disable
config="CONFIG_MACH_OMAP3_PANDORA" ; config_disable
config="CONFIG_MACH_NOKIA_RX51" ; config_disable
config="CONFIG_ARCH_EXYNOS" ; config_disable
config="CONFIG_ARCH_VEXPRESS" ; config_disable
config="CONFIG_ARCH_WM8850" ; config_disable
#
# Processor Features
#
config="CONFIG_PL310_ERRATA_753970" ; config_disable
config="CONFIG_ARM_ERRATA_754327" ; config_disable
config="CONFIG_ARM_ERRATA_773022" ; config_disable
#first check..
#exit
#
# Kernel Features
#
config="CONFIG_MEMORY_ISOLATION" ; config_enable
config="CONFIG_CMA" ; config_enable
config="CONFIG_CMA_DEBUG" ; config_disable
config="CONFIG_SECCOMP" ; config_enable
config="CONFIG_PARAVIRT" ; config_disable
config="CONFIG_XEN" ; config_disable
#
# CPU Frequency scaling
#
config="CONFIG_CPU_FREQ_STAT" ; config_enable
config="CONFIG_CPU_FREQ_STAT_DETAILS" ; config_enable
config="CONFIG_CPU_FREQ_GOV_POWERSAVE" ; config_enable
config="CONFIG_CPU_FREQ_GOV_USERSPACE" ; config_enable
config="CONFIG_CPU_FREQ_GOV_ONDEMAND" ; config_enable
config="CONFIG_CPU_FREQ_GOV_CONSERVATIVE" ; config_enable
#
# CPU frequency scaling drivers
#
config="CONFIG_CPUFREQ_DT" ; config_enable
config="CONFIG_ARM_IMX6Q_CPUFREQ" ; config_enable
config="CONFIG_ARM_OMAP2PLUS_CPUFREQ" ; config_disable
config="CONFIG_ARM_TEGRA124_CPUFREQ" ; config_enable
config="CONFIG_QORIQ_CPUFREQ" ; config_enable
#
# CPU Idle
#
config="CONFIG_CPU_IDLE" ; config_enable
#
# ARM CPU Idle Drivers
#
config="CONFIG_ARM_CPUIDLE" ; config_enable
#
# At least one emulation must be selected
#
config="CONFIG_KERNEL_MODE_NEON" ; config_enable
#
# Power management options
#
config="CONFIG_PM_AUTOSLEEP" ; config_enable
config="CONFIG_PM_WAKELOCKS" ; config_enable
config="CONFIG_PM_WAKELOCKS_GC" ; config_enable
#
# Networking options
#
config="CONFIG_IP_PNP" ; config_enable
config="CONFIG_IP_PNP_DHCP" ; config_enable
config="CONFIG_IP_PNP_BOOTP" ; config_enable
config="CONFIG_IP_PNP_RARP" ; config_enable
config="CONFIG_NETLABEL" ; config_enable
#
# DCCP Kernel Hacking
#
config="CONFIG_MAC802154" ; config_module
#
# CAN Device Drivers
#
config="CONFIG_CAN_C_CAN" ; config_module
config="CONFIG_CAN_C_CAN_PLATFORM" ; config_module
#
# CAN SPI interfaces
#
config="CONFIG_CAN_MCP251X" ; config_module
#
# Bluetooth device drivers
#
config="CONFIG_BT_HCIUART" ; config_module
config="CONFIG_BT_HCIUART_H4" ; config_enable
config="CONFIG_BT_HCIUART_BCSP" ; config_enable
config="CONFIG_BT_HCIUART_ATH3K" ; config_enable
config="CONFIG_BT_HCIUART_LL" ; config_enable
config="CONFIG_BT_HCIUART_3WIRE" ; config_enable
config="CONFIG_BT_HCIUART_BCM" ; config_enable
config="CONFIG_BT_HCIUART_QCA" ; config_enable
config="CONFIG_BT_HCIBCM203X" ; config_module
config="CONFIG_BT_HCIBPA10X" ; config_module
config="CONFIG_BT_HCIBFUSB" ; config_module
config="CONFIG_NFC_NCI" ; config_module
config="CONFIG_NFC_NCI_SPI" ; config_module
config="CONFIG_NFC_HCI" ; config_module
config="CONFIG_NFC_SHDLC" ; config_enable
#
# Near Field Communication (NFC) devices
#
config="CONFIG_NFC_WILINK" ; config_module
config="CONFIG_NFC_PN544" ; config_module
config="CONFIG_NFC_PN544_I2C" ; config_module
config="CONFIG_NFC_MICROREAD" ; config_module
config="CONFIG_NFC_MICROREAD_I2C" ; config_module
#
# Generic Driver Options
#
config="CONFIG_DEVTMPFS_MOUNT" ; config_enable
config="CONFIG_FIRMWARE_IN_KERNEL" ; config_enable
#config="CONFIG_EXTRA_FIRMWARE" ; option="am335x-pm-firmware.elf am335x-bone-scale-data.bin am335x-evm-scale-data.bin am43x-evm-scale-data.bin" ; config_string
config="CONFIG_EXTRA_FIRMWARE_DIR" ; option="firmware" ; config_string
config="CONFIG_DMA_CMA" ; config_enable
config="CONFIG_CMA_SIZE_MBYTES" ; option=24 ; config_value
#
# Bus devices
#
config="CONFIG_OMAP_OCP2SCP" ; config_enable
#
# Device Tree and Open Firmware support
#
config="CONFIG_OF_CONFIGFS" ; config_enable
config="CONFIG_PARPORT" ; config_disable
#
# Misc devices
#
config="CONFIG_BONE_CAPEMGR" ; config_enable
config="CONFIG_TIEQEP" ; config_module
#
# EEPROM support
#
config="CONFIG_EEPROM_AT24" ; config_enable
config="CONFIG_EEPROM_AT25" ; config_enable
config="CONFIG_EEPROM_93XX46" ; config_module
#
# Texas Instruments shared transport line discipline
#
config="CONFIG_SENSORS_LIS3_SPI" ; config_module
#
# Argus cape driver for beaglebone black
#
config="CONFIG_CAPE_BONE_ARGUS" ; config_enable
#
# SCSI device support
#
config="CONFIG_SCSI_MOD" ; config_enable
config="CONFIG_SCSI" ; config_enable
config="CONFIG_SCSI_PROC_FS" ; config_enable
#
# SCSI support type (disk, tape, CD-ROM)
#
config="CONFIG_BLK_DEV_SD" ; config_enable
#
# SCSI Transports
#
config="CONFIG_ATA" ; config_enable
#
# Controllers with non-SFF native interface
#
config="CONFIG_SATA_AHCI_PLATFORM" ; config_enable
config="CONFIG_AHCI_IMX" ; config_enable
config="CONFIG_AHCI_SUNXI" ; config_enable
config="CONFIG_AHCI_TEGRA" ; config_enable
config="CONFIG_AHCI_QORIQ" ; config_enable
#
# PATA SFF controllers with BMDMA
#
config="CONFIG_PATA_IMX" ; config_enable
#
# PIO-only SFF controllers
#
config="CONFIG_PATA_PLATFORM" ; config_enable
config="CONFIG_PATA_OF_PLATFORM" ; config_enable
#
# Generic fallback / legacy drivers
#
config="CONFIG_MII" ; config_enable
#
# Distributed Switch Architecture drivers
#
config="CONFIG_SUN4I_EMAC" ; config_enable
config="CONFIG_MVMDIO" ; config_disable
config="CONFIG_KS8851" ; config_module
config="CONFIG_ENCX24J600" ; config_module
config="CONFIG_NET_VENDOR_SEEQ" ; config_enable
config="CONFIG_STMMAC_ETH" ; config_enable
config="CONFIG_STMMAC_PLATFORM" ; config_enable
config="CONFIG_DWMAC_GENERIC" ; config_enable
config="CONFIG_DWMAC_ROCKCHIP" ; config_enable
config="CONFIG_DWMAC_SOCFPGA" ; config_enable
config="CONFIG_DWMAC_SUNXI" ; config_enable
config="CONFIG_TI_DAVINCI_EMAC" ; config_enable
config="CONFIG_TI_DAVINCI_MDIO" ; config_enable
config="CONFIG_TI_DAVINCI_CPDMA" ; config_enable
config="CONFIG_TI_CPSW_ALE" ; config_enable
config="CONFIG_TI_CPSW" ; config_enable
config="CONFIG_TI_CPTS" ; config_enable
#
# MII PHY device drivers
#
config="CONFIG_SMSC_PHY" ; config_enable
config="CONFIG_MICREL_PHY" ; config_enable
config="CONFIG_DP83848_PHY" ; config_enable
#
# USB Network Adapters
#
config="CONFIG_USB_LAN78XX" ; config_module
config="CONFIG_USB_NET_CH9200" ; config_module
config="CONFIG_LIBERTAS_SPI" ; config_module
config="CONFIG_MWIFIEX" ; config_module
config="CONFIG_MWIFIEX_SDIO" ; config_module
config="CONFIG_MWIFIEX_USB" ; config_module
config="CONFIG_RTL8XXXU" ; config_module
config="CONFIG_USB_ZD1201" ; config_module
#
# WiMAX Wireless Broadband devices
#
config="CONFIG_IEEE802154_FAKELB" ; config_module
config="CONFIG_IEEE802154_AT86RF230" ; config_module
config="CONFIG_IEEE802154_MRF24J40" ; config_module
config="CONFIG_IEEE802154_CC2520" ; config_module
config="CONFIG_IEEE802154_ATUSB" ; config_module
#
# Input Device Drivers
#
config="CONFIG_KEYBOARD_ADP5589" ; config_module
config="CONFIG_KEYBOARD_QT1070" ; config_module
config="CONFIG_KEYBOARD_LKKBD" ; config_module
config="CONFIG_KEYBOARD_GPIO_POLLED" ; config_module
config="CONFIG_KEYBOARD_TCA6416" ; config_module
config="CONFIG_KEYBOARD_TCA8418" ; config_module
config="CONFIG_KEYBOARD_MATRIX" ; config_module
config="CONFIG_KEYBOARD_LM8333" ; config_module
config="CONFIG_KEYBOARD_MCS" ; config_module
config="CONFIG_KEYBOARD_MPR121" ; config_module
config="CONFIG_KEYBOARD_SNVS_PWRKEY" ; config_module
config="CONFIG_KEYBOARD_NEWTON" ; config_module
config="CONFIG_KEYBOARD_SAMSUNG" ; config_module
config="CONFIG_KEYBOARD_SUNKBD" ; config_module
config="CONFIG_KEYBOARD_SUN4I_LRADC" ; config_module
config="CONFIG_KEYBOARD_XTKBD" ; config_module
config="CONFIG_KEYBOARD_CAP11XX" ; config_module
config="CONFIG_KEYBOARD_BCM" ; config_module
config="CONFIG_MOUSE_PS2_TOUCHKIT" ; config_enable
config="CONFIG_MOUSE_SERIAL" ; config_module
config="CONFIG_MOUSE_BCM5974" ; config_module
config="CONFIG_MOUSE_CYAPA" ; config_module
config="CONFIG_MOUSE_ELAN_I2C" ; config_module
config="CONFIG_MOUSE_VSXXXAA" ; config_module
config="CONFIG_MOUSE_GPIO" ; config_module
config="CONFIG_INPUT_JOYSTICK" ; config_enable
config="CONFIG_JOYSTICK_ANALOG" ; config_module
config="CONFIG_JOYSTICK_A3D" ; config_module
config="CONFIG_JOYSTICK_ADI" ; config_module
config="CONFIG_JOYSTICK_COBRA" ; config_module
config="CONFIG_JOYSTICK_GF2K" ; config_module
config="CONFIG_JOYSTICK_GRIP" ; config_module
config="CONFIG_JOYSTICK_GRIP_MP" ; config_module
config="CONFIG_JOYSTICK_GUILLEMOT" ; config_module
config="CONFIG_JOYSTICK_INTERACT" ; config_module
config="CONFIG_JOYSTICK_SIDEWINDER" ; config_module
config="CONFIG_JOYSTICK_TMDC" ; config_module
config="CONFIG_JOYSTICK_IFORCE" ; config_module
config="CONFIG_JOYSTICK_IFORCE_USB" ; config_enable
config="CONFIG_JOYSTICK_IFORCE_232" ; config_enable
config="CONFIG_JOYSTICK_WARRIOR" ; config_module
config="CONFIG_JOYSTICK_MAGELLAN" ; config_module
config="CONFIG_JOYSTICK_SPACEORB" ; config_module
config="CONFIG_JOYSTICK_SPACEBALL" ; config_module
config="CONFIG_JOYSTICK_STINGER" ; config_module
config="CONFIG_JOYSTICK_TWIDJOY" ; config_module
config="CONFIG_JOYSTICK_ZHENHUA" ; config_module
config="CONFIG_JOYSTICK_AS5011" ; config_module
config="CONFIG_JOYSTICK_JOYDUMP" ; config_module
config="CONFIG_JOYSTICK_XPAD" ; config_module
config="CONFIG_JOYSTICK_XPAD_FF" ; config_enable
config="CONFIG_JOYSTICK_XPAD_LEDS" ; config_enable
#exit
config="CONFIG_TOUCHSCREEN_AD7879_SPI" ; config_module
config="CONFIG_TOUCHSCREEN_AR1021_I2C" ; config_module
config="CONFIG_TOUCHSCREEN_AUO_PIXCIR" ; config_module
config="CONFIG_TOUCHSCREEN_BU21013" ; config_module
config="CONFIG_TOUCHSCREEN_CHIPONE_ICN8318" ; config_module
config="CONFIG_TOUCHSCREEN_CY8CTMG110" ; config_module
config="CONFIG_TOUCHSCREEN_CYTTSP_CORE" ; config_module
config="CONFIG_TOUCHSCREEN_CYTTSP_I2C" ; config_module
config="CONFIG_TOUCHSCREEN_CYTTSP_SPI" ; config_module
config="CONFIG_TOUCHSCREEN_CYTTSP4_CORE" ; config_module
config="CONFIG_TOUCHSCREEN_CYTTSP4_I2C" ; config_module
config="CONFIG_TOUCHSCREEN_CYTTSP4_SPI" ; config_module
config="CONFIG_TOUCHSCREEN_DA9052" ; config_module
config="CONFIG_TOUCHSCREEN_EETI" ; config_module
config="CONFIG_TOUCHSCREEN_EGALAX" ; config_module
config="CONFIG_TOUCHSCREEN_FT6236" ; config_module
config="CONFIG_TOUCHSCREEN_GOODIX" ; config_module
config="CONFIG_TOUCHSCREEN_ILI210X" ; config_module
config="CONFIG_TOUCHSCREEN_ELAN" ; config_module
config="CONFIG_TOUCHSCREEN_WACOM_I2C" ; config_module
config="CONFIG_TOUCHSCREEN_MAX11801" ; config_module
config="CONFIG_TOUCHSCREEN_MMS114" ; config_module
config="CONFIG_TOUCHSCREEN_IMX6UL_TSC" ; config_module
config="CONFIG_TOUCHSCREEN_EDT_FT5X06" ; config_module
config="CONFIG_TOUCHSCREEN_PIXCIR" ; config_module
config="CONFIG_TOUCHSCREEN_WDT87XX_I2C" ; config_module
config="CONFIG_TOUCHSCREEN_TSC_SERIO" ; config_module
config="CONFIG_TOUCHSCREEN_TSC2004" ; config_module
config="CONFIG_TOUCHSCREEN_ST1232" ; config_module
config="CONFIG_TOUCHSCREEN_SX8654" ; config_module
config="CONFIG_TOUCHSCREEN_ZFORCE" ; config_module
config="CONFIG_TOUCHSCREEN_ROHM_BU21023" ; config_module
config="CONFIG_INPUT_AD714X" ; config_module
config="CONFIG_INPUT_AD714X_I2C" ; config_module
config="CONFIG_INPUT_AD714X_SPI" ; config_module
config="CONFIG_INPUT_BMA150" ; config_module
config="CONFIG_INPUT_E3X0_BUTTON" ; config_module
config="CONFIG_INPUT_MC13783_PWRBUTTON" ; config_module
config="CONFIG_INPUT_MPU3050" ; config_module
config="CONFIG_INPUT_GP2A" ; config_module
config="CONFIG_INPUT_GPIO_TILT_POLLED" ; config_module
config="CONFIG_INPUT_KXTJ9" ; config_module
config="CONFIG_INPUT_KXTJ9_POLLED_MODE" ; config_enable
config="CONFIG_INPUT_REGULATOR_HAPTIC" ; config_module
config="CONFIG_INPUT_TPS65218_PWRBUTTON" ; config_enable
config="CONFIG_INPUT_AXP20X_PEK" ; config_enable
config="CONFIG_INPUT_TWL4030_PWRBUTTON" ; config_enable
config="CONFIG_INPUT_TWL4030_VIBRA" ; config_enable
config="CONFIG_INPUT_TWL6040_VIBRA" ; config_enable
config="CONFIG_INPUT_UINPUT" ; config_enable
config="CONFIG_INPUT_PALMAS_PWRBUTTON" ; config_enable
config="CONFIG_INPUT_PCF8574" ; config_module
config="CONFIG_INPUT_GPIO_ROTARY_ENCODER" ; config_module
config="CONFIG_INPUT_DA9052_ONKEY" ; config_module
config="CONFIG_INPUT_DA9055_ONKEY" ; config_module
config="CONFIG_INPUT_ADXL34X" ; config_module
config="CONFIG_INPUT_ADXL34X_I2C" ; config_module
config="CONFIG_INPUT_ADXL34X_SPI" ; config_module
config="CONFIG_INPUT_IMS_PCU" ; config_module
config="CONFIG_INPUT_CMA3000" ; config_module
config="CONFIG_INPUT_CMA3000_I2C" ; config_module
config="CONFIG_INPUT_DRV260X_HAPTICS" ; config_module
config="CONFIG_INPUT_DRV2667_HAPTICS" ; config_module
#exit
#
# Character devices
#
config="CONFIG_DEVKMEM" ; config_enable
#
# Serial drivers
#
config="CONFIG_SERIAL_8250_DMA" ; config_disable
config="CONFIG_SERIAL_8250_NR_UARTS" ; option="6" ; config_value
config="CONFIG_SERIAL_8250_RUNTIME_UARTS" ; option="6" ; config_value
config="CONFIG_SERIAL_8250_OMAP" ; config_disable
#config="CONFIG_SERIAL_8250_OMAP_TTYO_FIXUP" ; config_enable
#
# Non-8250 serial port support
#
#config="CONFIG_SERIAL_OMAP" ; config_disable
config="CONFIG_SERIAL_ARC" ; config_disable
config="CONFIG_SERIAL_FSL_LPUART" ; config_enable
config="CONFIG_SERIAL_FSL_LPUART_CONSOLE" ; config_enable
config="CONFIG_TCG_TPM" ; config_module
config="CONFIG_TCG_TIS_I2C_ATMEL" ; config_module
#
# I2C support
#
config="CONFIG_I2C_CHARDEV" ; config_enable
#
# Multiplexer I2C Chip support
#
config="CONFIG_I2C_MUX_PCA954x" ; config_enable
config="CONFIG_I2C_MUX_PINCTRL" ; config_enable
#
# I2C system bus drivers (mostly embedded / system-on-chip)
#
config="CONFIG_I2C_IMX" ; config_enable
config="CONFIG_I2C_MV64XXX" ; config_enable
config="CONFIG_I2C_RK3X" ; config_enable
config="CONFIG_I2C_SUN6I_P2WI" ; config_enable
#
# External I2C/SMBus adapter drivers
#
config="CONFIG_I2C_DLN2" ; config_module
#
# SPI Master Controller Drivers
#
config="CONFIG_SPI_BITBANG" ; config_module
config="CONFIG_SPI_DLN2" ; config_module
config="CONFIG_SPI_GPIO" ; config_module
config="CONFIG_SPI_TEGRA114" ; config_module
config="CONFIG_SPI_TEGRA20_SLINK" ; config_module
#
# SPI Protocol Masters
#
config="CONFIG_SPI_SPIDEV" ; config_module
#
# PPS clients support
#
config="CONFIG_PPS_CLIENT_GPIO" ; config_module
#
# Pin controllers
#
config="CONFIG_PINCTRL_TI_IODELAY" ; config_enable
config="CONFIG_GPIO_MAX730X" ; config_module
#
# Memory mapped GPIO drivers
#
config="CONFIG_GPIO_SYSCON" ; config_enable
#
# I2C GPIO expanders:
#
config="CONFIG_GPIO_ADP5588" ; config_module
config="CONFIG_GPIO_ADNP" ; config_module
config="CONFIG_GPIO_MAX7300" ; config_module
config="CONFIG_GPIO_MAX732X" ; config_module
config="CONFIG_GPIO_PCA953X" ; config_enable
config="CONFIG_GPIO_PCA953X_IRQ" ; config_enable
config="CONFIG_GPIO_PCF857X" ; config_module
config="CONFIG_GPIO_SX150X" ; config_enable
#
# MFD GPIO expanders
#
config="CONFIG_GPIO_DA9052" ; config_enable
config="CONFIG_GPIO_DA9055" ; config_enable
config="CONFIG_GPIO_DLN2" ; config_module
config="CONFIG_GPIO_TPS65910" ; config_enable
#
# SPI GPIO expanders:
#
config="CONFIG_GPIO_74X164" ; config_module
config="CONFIG_GPIO_MAX7301" ; config_module
config="CONFIG_GPIO_MCP23S08" ; config_module
config="CONFIG_GPIO_MC33880" ; config_module
#
# 1-wire Bus Masters
#
config="CONFIG_W1_MASTER_MXC" ; config_module
config="CONFIG_W1_MASTER_DS1WM" ; config_module
config="CONFIG_W1_MASTER_GPIO" ; config_module
config="CONFIG_HDQ_MASTER_OMAP" ; config_module
#
# 1-wire Slaves
#
config="CONFIG_W1_SLAVE_DS2408" ; config_module
config="CONFIG_W1_SLAVE_DS2408_READBACK" ; config_enable
config="CONFIG_W1_SLAVE_DS2413" ; config_module
config="CONFIG_W1_SLAVE_DS2406" ; config_module
config="CONFIG_W1_SLAVE_DS2423" ; config_module
config="CONFIG_W1_SLAVE_DS2433_CRC" ; config_enable
config="CONFIG_W1_SLAVE_DS2760" ; config_module
config="CONFIG_W1_SLAVE_DS2780" ; config_module
config="CONFIG_W1_SLAVE_DS2781" ; config_module
config="CONFIG_W1_SLAVE_DS28E04" ; config_module
config="CONFIG_GENERIC_ADC_BATTERY" ; config_module
config="CONFIG_BATTERY_DA9052" ; config_module
config="CONFIG_AXP288_FUEL_GAUGE" ; config_module
config="CONFIG_CHARGER_GPIO" ; config_module
config="CONFIG_CHARGER_TPS65217" ; config_module
config="CONFIG_AXP20X_POWER" ; config_enable
config="CONFIG_POWER_RESET_GPIO_RESTART" ; config_enable
config="CONFIG_POWER_RESET_IMX" ; config_enable
config="CONFIG_POWER_AVS" ; config_enable
config="CONFIG_ROCKCHIP_IODOMAIN" ; config_enable
#exit
#
# Native drivers
#
config="CONFIG_SENSORS_AD7314" ; config_module
config="CONFIG_SENSORS_ADM1021" ; config_module
config="CONFIG_SENSORS_ADM1025" ; config_module
config="CONFIG_SENSORS_ADM1026" ; config_module
config="CONFIG_SENSORS_ADM1031" ; config_module
config="CONFIG_SENSORS_ADT7X10" ; config_module
config="CONFIG_SENSORS_ADT7310" ; config_module
config="CONFIG_SENSORS_ADT7410" ; config_module
config="CONFIG_SENSORS_DS1621" ; config_module
config="CONFIG_SENSORS_DA9052_ADC" ; config_module
config="CONFIG_SENSORS_DA9055" ; config_module
config="CONFIG_SENSORS_F71805F" ; config_module
config="CONFIG_SENSORS_GL518SM" ; config_module
config="CONFIG_SENSORS_GL520SM" ; config_module
config="CONFIG_SENSORS_GPIO_FAN" ; config_enable
config="CONFIG_SENSORS_HIH6130" ; config_module
config="CONFIG_SENSORS_IIO_HWMON" ; config_module
config="CONFIG_SENSORS_IT87" ; config_module
config="CONFIG_SENSORS_POWR1220" ; config_module
config="CONFIG_SENSORS_LTC2945" ; config_module
config="CONFIG_SENSORS_LTC4222" ; config_module
config="CONFIG_SENSORS_LTC4260" ; config_module
config="CONFIG_SENSORS_MAX1619" ; config_module
config="CONFIG_SENSORS_MAX197" ; config_module
config="CONFIG_SENSORS_MAX6697" ; config_module
config="CONFIG_SENSORS_MAX31790" ; config_module
config="CONFIG_SENSORS_MCP3021" ; config_module
config="CONFIG_SENSORS_LM63" ; config_module
config="CONFIG_SENSORS_LM75" ; config_module
config="CONFIG_SENSORS_LM77" ; config_module
config="CONFIG_SENSORS_LM78" ; config_module
config="CONFIG_SENSORS_LM80" ; config_module
config="CONFIG_SENSORS_LM83" ; config_module
config="CONFIG_SENSORS_LM85" ; config_module
config="CONFIG_SENSORS_LM87" ; config_module
config="CONFIG_SENSORS_LM90" ; config_module
config="CONFIG_SENSORS_LM92" ; config_module
config="CONFIG_SENSORS_LM95234" ; config_module
config="CONFIG_SENSORS_PC87360" ; config_module
config="CONFIG_SENSORS_NCT6683" ; config_module
config="CONFIG_SENSORS_NCT7802" ; config_module
config="CONFIG_SENSORS_NCT7904" ; config_module
config="CONFIG_SENSORS_PCF8591" ; config_module
config="CONFIG_PMBUS" ; config_module
config="CONFIG_SENSORS_PMBUS" ; config_module
config="CONFIG_SENSORS_ADM1275" ; config_module
config="CONFIG_SENSORS_LM25066" ; config_module
config="CONFIG_SENSORS_LTC2978" ; config_module
config="CONFIG_SENSORS_LTC2978_REGULATOR" ; config_enable
config="CONFIG_SENSORS_MAX16064" ; config_module
config="CONFIG_SENSORS_MAX20751" ; config_module
config="CONFIG_SENSORS_MAX34440" ; config_module
config="CONFIG_SENSORS_MAX8688" ; config_module
config="CONFIG_SENSORS_TPS40422" ; config_module
config="CONFIG_SENSORS_UCD9000" ; config_module
config="CONFIG_SENSORS_UCD9200" ; config_module
config="CONFIG_SENSORS_ZL6100" ; config_module
config="CONFIG_SENSORS_PWM_FAN" ; config_module
config="CONFIG_SENSORS_SHT15" ; config_module
config="CONFIG_SENSORS_SHTC1" ; config_module
config="CONFIG_SENSORS_SMSC47M1" ; config_module
config="CONFIG_SENSORS_SMSC47B397" ; config_module
config="CONFIG_SENSORS_SCH5636" ; config_module
config="CONFIG_SENSORS_ADC128D818" ; config_module
config="CONFIG_SENSORS_INA209" ; config_module
config="CONFIG_SENSORS_INA2XX" ; config_module
config="CONFIG_SENSORS_TMP103" ; config_module
config="CONFIG_SENSORS_TWL4030_MADC" ; config_module
config="CONFIG_SENSORS_W83781D" ; config_module
config="CONFIG_SENSORS_W83L785TS" ; config_module
config="CONFIG_SENSORS_W83627HF" ; config_module
config="CONFIG_THERMAL_GOV_BANG_BANG" ; config_enable
config="CONFIG_CLOCK_THERMAL" ; config_enable
config="CONFIG_DEVFREQ_THERMAL" ; config_enable
config="CONFIG_IMX_THERMAL" ; config_enable
config="CONFIG_ROCKCHIP_THERMAL" ; config_enable
config="CONFIG_TEGRA_SOCTHERM" ; config_enable
#exit
#
# Texas Instruments thermal drivers
#
config="CONFIG_TI_SOC_THERMAL" ; config_enable
config="CONFIG_OMAP3_THERMAL" ; config_enable
config="CONFIG_WATCHDOG_NOWAYOUT" ; config_enable
#
# Watchdog Device Drivers
#
config="CONFIG_DA9052_WATCHDOG" ; config_enable
config="CONFIG_DW_WATCHDOG" ; config_enable
config="CONFIG_OMAP_WATCHDOG" ; config_enable
config="CONFIG_SUNXI_WATCHDOG" ; config_enable
config="CONFIG_TWL4030_WATCHDOG" ; config_enable
config="CONFIG_IMX2_WDT" ; config_enable
config="CONFIG_TEGRA_WATCHDOG" ; config_enable
#
# Multifunction device drivers
#
config="CONFIG_MFD_DA9055" ; config_enable
config="CONFIG_MFD_DA9063" ; config_enable
config="CONFIG_MFD_DLN2" ; config_enable
config="CONFIG_MFD_TPS65217" ; config_enable
config="CONFIG_MFD_TPS65218" ; config_enable
config="CONFIG_MFD_TPS65910" ; config_enable
config="CONFIG_MFD_WL1273_CORE" ; config_module
config="CONFIG_REGULATOR_USERSPACE_CONSUMER" ; config_enable
config="CONFIG_REGULATOR_ANATOP" ; config_enable
config="CONFIG_REGULATOR_AXP20X" ; config_enable
config="CONFIG_REGULATOR_DA9052" ; config_enable
config="CONFIG_REGULATOR_DA9055" ; config_disable
config="CONFIG_REGULATOR_DA9063" ; config_enable
config="CONFIG_REGULATOR_GPIO" ; config_enable
config="CONFIG_REGULATOR_MT6311" ; config_enable
config="CONFIG_REGULATOR_PBIAS" ; config_enable
config="CONFIG_REGULATOR_PFUZE100" ; config_enable
config="CONFIG_REGULATOR_PWM" ; config_enable
config="CONFIG_REGULATOR_TI_ABB" ; config_enable
config="CONFIG_REGULATOR_TPS65023" ; config_enable
config="CONFIG_REGULATOR_TPS6507X" ; config_enable
config="CONFIG_REGULATOR_TPS65217" ; config_enable
config="CONFIG_REGULATOR_TPS65218" ; config_enable
config="CONFIG_REGULATOR_TPS65910" ; config_enable
#exit
#
# Multimedia core support
#
config="CONFIG_VIDEO_V4L2_SUBDEV_API" ; config_enable
config="CONFIG_V4L2_MEM2MEM_DEV" ; config_enable
config="CONFIG_VIDEOBUF2_CORE" ; config_enable
config="CONFIG_VIDEOBUF2_MEMOPS" ; config_enable
config="CONFIG_VIDEOBUF2_DMA_CONTIG" ; config_enable
#
# Software defined radio USB devices
#
config="CONFIG_VIDEO_OMAP3" ; config_module
config="CONFIG_VIDEO_OMAP3_DEBUG" ; config_disable
config="CONFIG_SOC_CAMERA" ; config_module
config="CONFIG_SOC_CAMERA_PLATFORM" ; config_module
config="CONFIG_VIDEO_AM437X_VPFE" ; config_module
config="CONFIG_VIDEO_CODA" ; config_enable
config="CONFIG_VIDEO_MEM2MEM_DEINTERLACE" ; config_module
config="CONFIG_DVB_PLATFORM_DRIVERS" ; config_enable
config="CONFIG_DVB_C8SECTPFE" ; config_module
#
# soc_camera sensor drivers
#
config="CONFIG_SOC_CAMERA_IMX074" ; config_module
config="CONFIG_SOC_CAMERA_MT9M001" ; config_module
config="CONFIG_SOC_CAMERA_MT9M111" ; config_module
config="CONFIG_SOC_CAMERA_MT9T031" ; config_module
config="CONFIG_SOC_CAMERA_MT9T112" ; config_module
config="CONFIG_SOC_CAMERA_MT9V022" ; config_module
config="CONFIG_SOC_CAMERA_OV2640" ; config_module
config="CONFIG_SOC_CAMERA_OV5642" ; config_module
config="CONFIG_SOC_CAMERA_OV6650" ; config_module
config="CONFIG_SOC_CAMERA_OV772X" ; config_module
config="CONFIG_SOC_CAMERA_OV9640" ; config_module
config="CONFIG_SOC_CAMERA_OV9740" ; config_module
config="CONFIG_SOC_CAMERA_RJ54N1" ; config_module
config="CONFIG_SOC_CAMERA_TW9910" ; config_module
#exit
#
# Graphics support
#
config="CONFIG_IMX_IPUV3_CORE" ; config_enable
config="CONFIG_DRM" ; config_enable
config="CONFIG_DRM_KMS_HELPER" ; config_enable
#
# I2C encoder or helper chips
#
config="CONFIG_DRM_I2C_ADV7511" ; config_module
config="CONFIG_DRM_I2C_CH7006" ; config_module
config="CONFIG_DRM_I2C_SIL164" ; config_module
config="CONFIG_DRM_OMAP" ; config_enable
config="CONFIG_OMAP2_DSS" ; config_enable
#
# OMAPDRM External Display Device Drivers
#
config="CONFIG_DISPLAY_ENCODER_OPA362" ; config_enable
config="CONFIG_DISPLAY_ENCODER_TFP410" ; config_enable
config="CONFIG_DISPLAY_ENCODER_TPD12S015" ; config_enable
config="CONFIG_DISPLAY_CONNECTOR_DVI" ; config_enable
config="CONFIG_DISPLAY_CONNECTOR_HDMI" ; config_enable
config="CONFIG_DISPLAY_PANEL_DPI" ; config_enable
config="CONFIG_DRM_TILCDC" ; config_enable
#
# Display Interface Bridges
#
config="CONFIG_DRM_DW_HDMI" ; config_enable
config="CONFIG_DRM_DW_HDMI_AHB_AUDIO" ; config_module
config="CONFIG_DRM_IMX" ; config_enable
config="CONFIG_DRM_IMX_PARALLEL_DISPLAY" ; config_enable
config="CONFIG_DRM_IMX_TVE" ; config_enable
config="CONFIG_DRM_IMX_LDB" ; config_enable
config="CONFIG_DRM_IMX_IPUV3" ; config_enable
config="CONFIG_DRM_IMX_HDMI" ; config_enable
config="CONFIG_DRM_ETNAVIV" ; config_module
#exit
#
# Frame buffer hardware drivers
#
config="CONFIG_FB_MX3" ; config_disable
config="CONFIG_FB_SSD1307" ; config_enable
config="CONFIG_BACKLIGHT_PWM" ; config_enable
config="CONFIG_BACKLIGHT_GPIO" ; config_enable
#
# HD-Audio
#
config="CONFIG_SND_EDMA_SOC" ; config_module
config="CONFIG_SND_DAVINCI_SOC_GENERIC_EVM" ; config_module
config="CONFIG_SND_AM33XX_SOC_EVM" ; config_module
#
# SoC Audio support for Freescale i.MX boards:
#
config="CONFIG_SND_OMAP_SOC_HDMI_AUDIO" ; config_module
#
# CODEC drivers
#
config="CONFIG_SND_SOC_TLV320AIC31XX" ; config_module
#
# HID support
#
config="CONFIG_HID" ; config_enable
config="CONFIG_UHID" ; config_enable
config="CONFIG_HID_GENERIC" ; config_enable
#
# Special HID drivers
#
config="CONFIG_HID_APPLEIR" ; config_module
config="CONFIG_HID_GFRM" ; config_module
config="CONFIG_HID_GT683R" ; config_module
config="CONFIG_HID_LOGITECH" ; config_enable
config="CONFIG_HID_LOGITECH_DJ" ; config_enable
config="CONFIG_HID_LOGITECH_HIDPP" ; config_enable
config="CONFIG_HID_SENSOR_CUSTOM_SENSOR" ; config_module
#
# USB HID support
#
config="CONFIG_USB_HID" ; config_enable
#
# I2C HID support
#
config="CONFIG_USB_COMMON" ; config_enable
config="CONFIG_USB" ; config_enable
#
# Miscellaneous USB options
#
config="CONFIG_USB_OTG" ; config_enable
#
# USB Host Controller Drivers
#
config="CONFIG_USB_XHCI_HCD" ; config_enable
config="CONFIG_USB_XHCI_PLATFORM" ; config_enable
config="CONFIG_USB_EHCI_HCD" ; config_enable
config="CONFIG_USB_EHCI_HCD_OMAP" ; config_enable
config="CONFIG_USB_EHCI_TEGRA" ; config_enable
config="CONFIG_USB_EHCI_HCD_PLATFORM" ; config_enable
config="CONFIG_USB_OHCI_HCD" ; config_disable
#
# also be needed; see USB_STORAGE Help for more info
#
config="CONFIG_USB_STORAGE" ; config_enable
#
# USB Imaging devices
#
config="CONFIG_USBIP_CORE" ; config_module
config="CONFIG_USBIP_VHCI_HCD" ; config_module
config="CONFIG_USBIP_HOST" ; config_module
config="CONFIG_USBIP_DEBUG" ; config_disable
config="CONFIG_USB_MUSB_HDRC" ; config_enable
config="CONFIG_USB_MUSB_HOST" ; config_disable
config="CONFIG_USB_MUSB_GADGET" ; config_disable
config="CONFIG_USB_MUSB_DUAL_ROLE" ; config_enable
#
# Platform Glue Layer
#
config="CONFIG_USB_MUSB_SUNXI" ; config_enable
config="CONFIG_USB_MUSB_TUSB6010" ; config_enable
config="CONFIG_USB_MUSB_OMAP2PLUS" ; config_enable
config="CONFIG_USB_MUSB_AM35X" ; config_enable
config="CONFIG_USB_MUSB_DSPS" ; config_enable
config="CONFIG_USB_MUSB_AM335X_CHILD" ; config_enable
#
# MUSB DMA mode
#
config="CONFIG_MUSB_PIO_ONLY" ; config_enable
config="CONFIG_USB_DWC3" ; config_enable
config="CONFIG_USB_DWC3_HOST" ; config_disable
config="CONFIG_USB_DWC3_GADGET" ; config_disable
config="CONFIG_USB_DWC3_DUAL_ROLE" ; config_enable
#
# Platform Glue Driver Support
#
config="CONFIG_USB_DWC3_OF_SIMPLE" ; config_enable
#
# Debugging features
#
config="CONFIG_USB_CHIPIDEA" ; config_enable
#
# USB Miscellaneous drivers
#
config="CONFIG_USB_ONBOARD_DEVICE" ; config_enable
#
# USB Physical Layer drivers
#
config="CONFIG_AM335X_CONTROL_USB" ; config_enable
config="CONFIG_AM335X_PHY_USB" ; config_enable
config="CONFIG_TWL6030_USB" ; config_enable
config="CONFIG_USB_GPIO_VBUS" ; config_enable
config="CONFIG_USB_MXS_PHY" ; config_enable
config="CONFIG_USB_GADGET" ; config_enable
config="CONFIG_USB_GADGET_VBUS_DRAW" ; option="500" ; config_value
#
# USB Peripheral Controller
#
config="CONFIG_USB_LIBCOMPOSITE" ; config_enable
config="CONFIG_USB_U_ETHER" ; config_enable
config="CONFIG_USB_F_ECM" ; config_enable
config="CONFIG_USB_F_SUBSET" ; config_enable
config="CONFIG_USB_F_RNDIS" ; config_enable
config="CONFIG_USB_ETH" ; config_enable
config="CONFIG_USB_ETH_EEM" ; config_disable
config="CONFIG_USB_GADGETFS" ; config_disable
config="CONFIG_USB_G_NOKIA" ; config_disable
#
# MMC/SD/SDIO Card Drivers
#
config="CONFIG_MMC_BLOCK_MINORS" ; option="8" ; config_value
#
# MMC/SD/SDIO Host Controller Drivers
#
config="CONFIG_MMC_SDHCI" ; config_enable
config="CONFIG_MMC_SDHCI_PLTFM" ; config_enable
config="CONFIG_MMC_SDHCI_OF_ESDHC" ; config_enable
config="CONFIG_MMC_SDHCI_ESDHC_IMX" ; config_enable
config="CONFIG_MMC_SDHCI_TEGRA" ; config_enable
config="CONFIG_MMC_OMAP" ; config_enable
config="CONFIG_MMC_OMAP_HS" ; config_enable
config="CONFIG_MMC_DW" ; config_enable
config="CONFIG_MMC_DW_ROCKCHIP" ; config_enable
config="CONFIG_MMC_SUNXI" ; config_enable
#
# LED drivers
#
config="CONFIG_LEDS_LM3530" ; config_module
config="CONFIG_LEDS_LM3642" ; config_module
config="CONFIG_LEDS_PCA9532" ; config_module
config="CONFIG_LEDS_PCA9532_GPIO" ; config_enable
config="CONFIG_LEDS_GPIO" ; config_enable
config="CONFIG_LEDS_LP5521" ; config_module
config="CONFIG_LEDS_LP5562" ; config_module
config="CONFIG_LEDS_LP8501" ; config_module
config="CONFIG_LEDS_LP8860" ; config_module
config="CONFIG_LEDS_PCA963X" ; config_module
config="CONFIG_LEDS_TCA6507" ; config_module
config="CONFIG_LEDS_TLC591XX" ; config_module
config="CONFIG_LEDS_LM355x" ; config_module
#
# LED Triggers
#
config="CONFIG_LEDS_TRIGGER_TIMER" ; config_enable
config="CONFIG_LEDS_TRIGGER_ONESHOT" ; config_enable
config="CONFIG_LEDS_TRIGGER_HEARTBEAT" ; config_enable
config="CONFIG_LEDS_TRIGGER_BACKLIGHT" ; config_enable
config="CONFIG_LEDS_TRIGGER_GPIO" ; config_enable
config="CONFIG_LEDS_TRIGGER_DEFAULT_ON" ; config_enable
#
# I2C RTC drivers
#
config="CONFIG_RTC_DRV_DS1374" ; config_module
config="CONFIG_RTC_DRV_DS1374_WDT" ; config_enable
config="CONFIG_RTC_DRV_DS1672" ; config_module
config="CONFIG_RTC_DRV_DS3232" ; config_module
config="CONFIG_RTC_DRV_HYM8563" ; config_module
config="CONFIG_RTC_DRV_MAX6900" ; config_module
config="CONFIG_RTC_DRV_RS5C372" ; config_module
config="CONFIG_RTC_DRV_ISL1208" ; config_module
config="CONFIG_RTC_DRV_ISL12022" ; config_module
config="CONFIG_RTC_DRV_X1205" ; config_module
config="CONFIG_RTC_DRV_PCF2127" ; config_module
config="CONFIG_RTC_DRV_PCF85063" ; config_module
config="CONFIG_RTC_DRV_PCF8583" ; config_module
config="CONFIG_RTC_DRV_M41T80" ; config_module
config="CONFIG_RTC_DRV_M41T80_WDT" ; config_enable
config="CONFIG_RTC_DRV_BQ32K" ; config_module
config="CONFIG_RTC_DRV_TPS65910" ; config_module
config="CONFIG_RTC_DRV_S35390A" ; config_module
config="CONFIG_RTC_DRV_FM3130" ; config_module
config="CONFIG_RTC_DRV_RX8581" ; config_module
config="CONFIG_RTC_DRV_RX8025" ; config_module
config="CONFIG_RTC_DRV_EM3027" ; config_module
config="CONFIG_RTC_DRV_RV3029C2" ; config_module
config="CONFIG_RTC_DRV_RV8803" ; config_module
#
# SPI RTC drivers
#
config="CONFIG_RTC_DRV_M41T93" ; config_module
config="CONFIG_RTC_DRV_M41T94" ; config_module
config="CONFIG_RTC_DRV_DS1305" ; config_module
config="CONFIG_RTC_DRV_DS1343" ; config_module
config="CONFIG_RTC_DRV_DS1347" ; config_module
config="CONFIG_RTC_DRV_DS1390" ; config_module
config="CONFIG_RTC_DRV_MAX6902" ; config_module
config="CONFIG_RTC_DRV_R9701" ; config_module
config="CONFIG_RTC_DRV_RS5C348" ; config_module
config="CONFIG_RTC_DRV_DS3234" ; config_module
config="CONFIG_RTC_DRV_PCF2123" ; config_module
config="CONFIG_RTC_DRV_RX4581" ; config_module
config="CONFIG_RTC_DRV_MCP795" ; config_module
#
# Platform RTC drivers
#
config="CONFIG_RTC_DRV_DS1286" ; config_module
config="CONFIG_RTC_DRV_DS1511" ; config_module
config="CONFIG_RTC_DRV_DS1553" ; config_module
config="CONFIG_RTC_DRV_DS1685_FAMILY" ; config_module
config="CONFIG_RTC_DRV_DS1742" ; config_module
config="CONFIG_RTC_DRV_DA9055" ; config_module
config="CONFIG_RTC_DRV_DA9063" ; config_module
config="CONFIG_RTC_DRV_STK17TA8" ; config_module
config="CONFIG_RTC_DRV_M48T86" ; config_module
config="CONFIG_RTC_DRV_M48T35" ; config_module
config="CONFIG_RTC_DRV_M48T59" ; config_module
config="CONFIG_RTC_DRV_MSM6242" ; config_module
config="CONFIG_RTC_DRV_BQ4802" ; config_module
config="CONFIG_RTC_DRV_RP5C01" ; config_module
config="CONFIG_RTC_DRV_V3020" ; config_module
config="CONFIG_RTC_DRV_DS2404" ; config_module
#
# on-CPU RTC drivers
#
config="CONFIG_RTC_DRV_SUN6I" ; config_enable
#
# HID Sensor RTC drivers
#
config="CONFIG_RTC_DRV_HID_SENSOR_TIME" ; config_module
#
# DMA Devices
#
config="CONFIG_AXI_DMAC" ; config_enable
config="CONFIG_DMA_SUN6I" ; config_enable
config="CONFIG_FSL_EDMA" ; config_enable
config="CONFIG_TI_CPPI41" ; config_enable
config="CONFIG_DW_DMAC_CORE" ; config_enable
config="CONFIG_DW_DMAC" ; config_enable
#
# DMA Clients
#
config="CONFIG_UIO_PDRV_GENIRQ" ; config_module
config="CONFIG_UIO_DMEM_GENIRQ" ; config_module
config="CONFIG_UIO_PRUSS" ; config_module
#STAGING
#
# Microsoft Hyper-V guest support
#
config="CONFIG_RTLLIB" ; config_module
config="CONFIG_RTLLIB_CRYPTO_CCMP" ; config_module
config="CONFIG_RTLLIB_CRYPTO_TKIP" ; config_module
config="CONFIG_RTLLIB_CRYPTO_WEP" ; config_module
#
# Accelerometers
#
config="CONFIG_ADIS16201" ; config_module
config="CONFIG_ADIS16203" ; config_module
config="CONFIG_ADIS16204" ; config_module
config="CONFIG_ADIS16209" ; config_module
config="CONFIG_ADIS16220" ; config_module
config="CONFIG_ADIS16240" ; config_module
config="CONFIG_SCA3000" ; config_module
#
# Analog to digital converters
#
config="CONFIG_AD7606" ; config_module
config="CONFIG_AD7606_IFACE_SPI" ; config_module
config="CONFIG_AD7780" ; config_module
config="CONFIG_AD7816" ; config_module
config="CONFIG_AD7192" ; config_module
config="CONFIG_AD7280" ; config_module
#
# Analog digital bi-direction converters
#
config="CONFIG_ADT7316" ; config_module
config="CONFIG_ADT7316_SPI" ; config_module
config="CONFIG_ADT7316_I2C" ; config_module
#
# Capacitance to digital converters
#
config="CONFIG_AD7150" ; config_module
config="CONFIG_AD7152" ; config_module
config="CONFIG_AD7746" ; config_module
#
# Direct Digital Synthesis
#
config="CONFIG_AD9832" ; config_module
config="CONFIG_AD9834" ; config_module
#
# Digital gyroscope sensors
#
config="CONFIG_ADIS16060" ; config_module
#
# Network Analyzer, Impedance Converters
#
config="CONFIG_AD5933" ; config_module
#
# Light sensors
#
config="CONFIG_SENSORS_ISL29018" ; config_module
config="CONFIG_SENSORS_ISL29028" ; config_module
config="CONFIG_TSL2583" ; config_module
config="CONFIG_TSL2x7x" ; config_module
#
# Magnetometer sensors
#
config="CONFIG_SENSORS_HMC5843" ; config_module
config="CONFIG_SENSORS_HMC5843_I2C" ; config_module
config="CONFIG_SENSORS_HMC5843_SPI" ; config_module
#
# Active energy metering IC
#
config="CONFIG_ADE7753" ; config_module
config="CONFIG_ADE7754" ; config_module
config="CONFIG_ADE7758" ; config_module
config="CONFIG_ADE7759" ; config_module
config="CONFIG_ADE7854" ; config_module
config="CONFIG_ADE7854_I2C" ; config_module
config="CONFIG_ADE7854_SPI" ; config_module
#
# Resolver to digital converters
#
config="CONFIG_AD2S90" ; config_module
config="CONFIG_AD2S1200" ; config_module
config="CONFIG_AD2S1210" ; config_module
#
# Android
#
config="CONFIG_ASHMEM" ; config_enable
config="CONFIG_ANDROID_TIMED_GPIO" ; config_module
config="CONFIG_SYNC" ; config_enable
config="CONFIG_SW_SYNC" ; config_disable
config="CONFIG_ION" ; config_enable
config="CONFIG_FB_TFT" ; config_module
config="CONFIG_FB_TFT_AGM1264K_FL" ; config_module
config="CONFIG_FB_TFT_BD663474" ; config_module
config="CONFIG_FB_TFT_HX8340BN" ; config_module
config="CONFIG_FB_TFT_HX8347D" ; config_module
config="CONFIG_FB_TFT_HX8353D" ; config_module
config="CONFIG_FB_TFT_HX8357D" ; config_module
config="CONFIG_FB_TFT_ILI9163" ; config_module
config="CONFIG_FB_TFT_ILI9320" ; config_module
config="CONFIG_FB_TFT_ILI9325" ; config_module
config="CONFIG_FB_TFT_ILI9340" ; config_module
config="CONFIG_FB_TFT_ILI9341" ; config_module
config="CONFIG_FB_TFT_ILI9481" ; config_module
config="CONFIG_FB_TFT_ILI9486" ; config_module
config="CONFIG_FB_TFT_PCD8544" ; config_module
config="CONFIG_FB_TFT_RA8875" ; config_module
config="CONFIG_FB_TFT_S6D02A1" ; config_module
config="CONFIG_FB_TFT_S6D1121" ; config_module
config="CONFIG_FB_TFT_SSD1289" ; config_module
config="CONFIG_FB_TFT_SSD1306" ; config_module
config="CONFIG_FB_TFT_SSD1331" ; config_module
config="CONFIG_FB_TFT_SSD1351" ; config_module
config="CONFIG_FB_TFT_ST7735R" ; config_module
config="CONFIG_FB_TFT_ST7789V" ; config_module
config="CONFIG_FB_TFT_TINYLCD" ; config_module
config="CONFIG_FB_TFT_TLS8204" ; config_module
config="CONFIG_FB_TFT_UC1611" ; config_module
config="CONFIG_FB_TFT_UC1701" ; config_module
config="CONFIG_FB_TFT_UPD161704" ; config_module
config="CONFIG_FB_TFT_WATTEROTT" ; config_module
config="CONFIG_FB_FLEX" ; config_module
config="CONFIG_FB_TFT_FBTFT_DEVICE" ; config_module
#
# Common Clock Framework
#
config="CONFIG_CLK_TWL6040" ; config_enable
config="CONFIG_COMMON_CLK_PALMAS" ; config_enable
config="CONFIG_HWSPINLOCK" ; config_enable
#
# Hardware Spinlock drivers
#
config="CONFIG_HWSPINLOCK_OMAP" ; config_enable
#
# Clock Source drivers
#
config="CONFIG_TEGRA_IOMMU_SMMU" ; config_enable
config="CONFIG_ARM_SMMU" ; config_enable
#
# Remoteproc drivers
#
config="CONFIG_REMOTEPROC" ; config_enable
config="CONFIG_OMAP_REMOTEPROC" ; config_enable
config="CONFIG_WKUP_M3_RPROC" ; config_enable
#
# Rpmsg drivers
#
config="CONFIG_RPMSG" ; config_enable
#
# SOC (System On Chip) specific Drivers
#
config="CONFIG_ROCKCHIP_PM_DOMAINS" ; config_enable
config="CONFIG_SOC_TI" ; config_enable
config="CONFIG_WKUP_M3_IPC" ; config_enable
#
# DEVFREQ Governors
#
config="CONFIG_DEVFREQ_GOV_SIMPLE_ONDEMAND" ; config_enable
config="CONFIG_DEVFREQ_GOV_PERFORMANCE" ; config_enable
config="CONFIG_DEVFREQ_GOV_POWERSAVE" ; config_enable
config="CONFIG_DEVFREQ_GOV_USERSPACE" ; config_enable
#
# DEVFREQ Drivers
#
config="CONFIG_ARM_TEGRA_DEVFREQ" ; config_enable
#
# Extcon Device Drivers
#
config="CONFIG_EXTCON_GPIO" ; config_enable
config="CONFIG_EXTCON_PALMAS" ; config_enable
config="CONFIG_EXTCON_USB_GPIO" ; config_enable
config="CONFIG_TI_EMIF" ; config_enable
#exit
#
# Accelerometers
#
config="CONFIG_BMA180" ; config_module
config="CONFIG_BMC150_ACCEL" ; config_module
config="CONFIG_IIO_ST_ACCEL_3AXIS" ; config_module
config="CONFIG_IIO_ST_ACCEL_I2C_3AXIS" ; config_module
config="CONFIG_IIO_ST_ACCEL_SPI_3AXIS" ; config_module
config="CONFIG_KXSD9" ; config_module
config="CONFIG_MMA8452" ; config_module
config="CONFIG_KXCJK1013" ; config_module
config="CONFIG_MMA7455_I2C" ; config_module
config="CONFIG_MMA7455_SPI" ; config_module
config="CONFIG_MMA9551" ; config_module
config="CONFIG_MMA9553" ; config_module
config="CONFIG_MXC4005" ; config_module
config="CONFIG_MXC6255" ; config_module
config="CONFIG_STK8312" ; config_module
config="CONFIG_STK8BA50" ; config_module
#
# Analog to digital converters
#
config="CONFIG_AD7266" ; config_module
config="CONFIG_AD7291" ; config_module
config="CONFIG_AD7298" ; config_module
config="CONFIG_AD7476" ; config_module
config="CONFIG_AD7791" ; config_module
config="CONFIG_AD7793" ; config_module
config="CONFIG_AD7887" ; config_module
config="CONFIG_AD7923" ; config_module
config="CONFIG_AD799X" ; config_module
config="CONFIG_AXP288_ADC" ; config_module
config="CONFIG_CC10001_ADC" ; config_module
config="CONFIG_HI8435" ; config_module
config="CONFIG_INA2XX_ADC" ; config_module
config="CONFIG_IMX7D_ADC" ; config_module
config="CONFIG_MAX1027" ; config_module
config="CONFIG_MAX1363" ; config_module
config="CONFIG_MCP320X" ; config_module
config="CONFIG_MCP3422" ; config_module
config="CONFIG_NAU7802" ; config_module
config="CONFIG_PALMAS_GPADC" ; config_module
config="CONFIG_TI_ADC081C" ; config_module
config="CONFIG_TI_ADC128S052" ; config_module
config="CONFIG_TI_ADS8688" ; config_module
config="CONFIG_TWL6030_GPADC" ; config_module
config="CONFIG_VF610_ADC" ; config_module
#
# Amplifiers
#
config="CONFIG_AD8366" ; config_module
#
# Chemical Sensors
#
config="CONFIG_VZ89X" ; config_module
#
# SSP Sensor Common
#
config="CONFIG_IIO_SSP_SENSORHUB" ; config_module
#
# Digital to analog converters
#
config="CONFIG_AD5064" ; config_module
config="CONFIG_AD5360" ; config_module
config="CONFIG_AD5380" ; config_module
config="CONFIG_AD5421" ; config_module
config="CONFIG_AD5446" ; config_module
config="CONFIG_AD5449" ; config_module
config="CONFIG_AD5504" ; config_module
config="CONFIG_AD5624R_SPI" ; config_module
config="CONFIG_AD5686" ; config_module
config="CONFIG_AD5755" ; config_module
config="CONFIG_AD5764" ; config_module
config="CONFIG_AD5791" ; config_module
config="CONFIG_AD7303" ; config_module
config="CONFIG_M62332" ; config_module
config="CONFIG_MAX517" ; config_module
config="CONFIG_MAX5821" ; config_module
config="CONFIG_MCP4725" ; config_module
config="CONFIG_MCP4922" ; config_module
#
# Clock Generator/Distribution
#
config="CONFIG_AD9523" ; config_module
#
# Phase-Locked Loop (PLL) frequency synthesizers
#
config="CONFIG_ADF4350" ; config_module
#
# Digital gyroscope sensors
#
config="CONFIG_ADIS16080" ; config_module
config="CONFIG_ADIS16130" ; config_module
config="CONFIG_ADIS16136" ; config_module
config="CONFIG_ADIS16260" ; config_module
config="CONFIG_ADXRS450" ; config_module
config="CONFIG_BMG160" ; config_module
config="CONFIG_IIO_ST_GYRO_3AXIS" ; config_module
config="CONFIG_IIO_ST_GYRO_I2C_3AXIS" ; config_module
config="CONFIG_IIO_ST_GYRO_SPI_3AXIS" ; config_module
config="CONFIG_ITG3200" ; config_module
#
# Health sensors
#
config="CONFIG_MAX30100" ; config_module
#
# Humidity sensors
#
config="CONFIG_DHT11" ; config_module
config="CONFIG_HDC100X" ; config_module
config="CONFIG_HTU21" ; config_module
config="CONFIG_SI7005" ; config_module
config="CONFIG_SI7020" ; config_module
#
# Inertial measurement units
#
config="CONFIG_ADIS16400" ; config_module
config="CONFIG_ADIS16480" ; config_module
config="CONFIG_KMX61" ; config_module
config="CONFIG_INV_MPU6050_IIO" ; config_module
#
# Light sensors
#
config="CONFIG_ADJD_S311" ; config_module
config="CONFIG_AL3320A" ; config_module
config="CONFIG_APDS9300" ; config_module
config="CONFIG_APDS9960" ; config_module
config="CONFIG_BH1750" ; config_module
config="CONFIG_CM32181" ; config_module
config="CONFIG_CM3232" ; config_module
config="CONFIG_CM3323" ; config_module
config="CONFIG_CM36651" ; config_module
config="CONFIG_GP2AP020A00F" ; config_module
config="CONFIG_ISL29125" ; config_module
config="CONFIG_JSA1212" ; config_module
config="CONFIG_RPR0521" ; config_module
config="CONFIG_LTR501" ; config_module
config="CONFIG_OPT3001" ; config_module
config="CONFIG_PA12203001" ; config_module
config="CONFIG_STK3310" ; config_module
config="CONFIG_TCS3414" ; config_module
config="CONFIG_TCS3472" ; config_module
config="CONFIG_TSL4531" ; config_module
config="CONFIG_US5182D" ; config_module
config="CONFIG_VCNL4000" ; config_module
#
# Magnetometer sensors
#
config="CONFIG_AK8975" ; config_module
config="CONFIG_AK09911" ; config_module
config="CONFIG_MAG3110" ; config_module
config="CONFIG_MMC35240" ; config_module
config="CONFIG_IIO_ST_MAGN_3AXIS" ; config_module
config="CONFIG_IIO_ST_MAGN_I2C_3AXIS" ; config_module
config="CONFIG_IIO_ST_MAGN_SPI_3AXIS" ; config_module
config="CONFIG_BMC150_MAGN" ; config_module
#
# Triggers - standalone
#
config="CONFIG_IIO_INTERRUPT_TRIGGER" ; config_module
config="CONFIG_IIO_SYSFS_TRIGGER" ; config_module
#
# Digital potentiometers
#
config="CONFIG_MCP4531" ; config_module
#
# Pressure sensors
#
config="CONFIG_BMP280" ; config_module
config="CONFIG_MPL115" ; config_module
config="CONFIG_MPL3115" ; config_module
config="CONFIG_MS5611" ; config_module
config="CONFIG_MS5611_I2C" ; config_module
config="CONFIG_MS5611_SPI" ; config_module
config="CONFIG_MS5637" ; config_module
config="CONFIG_IIO_ST_PRESS" ; config_module
config="CONFIG_IIO_ST_PRESS_I2C" ; config_module
config="CONFIG_IIO_ST_PRESS_SPI" ; config_module
config="CONFIG_T5403" ; config_module
#
# Lightning sensors
#
config="CONFIG_AS3935" ; config_module
#
# Proximity sensors
#
config="CONFIG_LIDAR_LITE_V2" ; config_module
config="CONFIG_SX9500" ; config_module
#
# Temperature sensors
#
config="CONFIG_MLX90614" ; config_module
config="CONFIG_TMP006" ; config_module
config="CONFIG_TSYS01" ; config_module
config="CONFIG_TSYS02D" ; config_module
config="CONFIG_PWM_OMAP_DMTIMER" ; config_module
config="CONFIG_PWM_PCA9685" ; config_module
config="CONFIG_PWM_ROCKCHIP" ; config_module
#
# PHY Subsystem
#
config="CONFIG_OMAP_CONTROL_PHY" ; config_enable
config="CONFIG_OMAP_USB2" ; config_enable
config="CONFIG_TI_PIPE3" ; config_module
config="CONFIG_TWL4030_USB" ; config_enable
config="CONFIG_PHY_SUN4I_USB" ; config_enable
config="CONFIG_PHY_SUN9I_USB" ; config_enable
config="CONFIG_PHY_ROCKCHIP_USB" ; config_enable
#
# Android
#
config="CONFIG_ANDROID" ; config_enable
config="CONFIG_ANDROID_BINDER_IPC" ; config_enable
config="CONFIG_ANDROID_BINDER_IPC_32BIT" ; config_enable
config="CONFIG_NVMEM" ; config_enable
config="CONFIG_NVMEM_IMX_OCOTP" ; config_enable
config="CONFIG_ROCKCHIP_EFUSE" ; config_enable
config="CONFIG_NVMEM_SUNXI_SID" ; config_enable
config="CONFIG_NVMEM_VF610_OCOTP" ; config_enable
#exit
#
# FPGA Configuration Support
#
config="CONFIG_FPGA" ; config_module
config="CONFIG_FPGA_MGR_SOCFPGA" ; config_module
#
# File systems
#
config="CONFIG_EXT4_FS" ; config_enable
config="CONFIG_EXT4_ENCRYPTION" ; config_enable
config="CONFIG_JBD2" ; config_enable
config="CONFIG_FS_MBCACHE" ; config_enable
config="CONFIG_XFS_FS" ; config_enable
config="CONFIG_BTRFS_FS" ; config_enable
config="CONFIG_F2FS_FS" ; config_enable
config="CONFIG_FANOTIFY_ACCESS_PERMISSIONS" ; config_enable
config="CONFIG_AUTOFS4_FS" ; config_enable
config="CONFIG_FUSE_FS" ; config_enable
config="CONFIG_OVERLAY_FS" ; config_enable
#
# DOS/FAT/NT Filesystems
#
config="CONFIG_FAT_FS" ; config_enable
config="CONFIG_MSDOS_FS" ; config_enable
config="CONFIG_VFAT_FS" ; config_enable
config="CONFIG_FAT_DEFAULT_IOCHARSET" ; option="iso8859-1" ; config_string
#
# Pseudo filesystems
#
config="CONFIG_SQUASHFS_LZ4" ; config_enable
config="CONFIG_NFS_FS" ; config_enable
config="CONFIG_NFS_V2" ; config_enable
config="CONFIG_NFS_V3" ; config_enable
config="CONFIG_NFS_V4" ; config_enable
config="CONFIG_ROOT_NFS" ; config_enable
config="CONFIG_NLS_DEFAULT" ; option="iso8859-1" ; config_string
config="CONFIG_NLS_CODEPAGE_437" ; config_enable
config="CONFIG_NLS_ISO8859_1" ; config_enable
#
# Debug Lockups and Hangs
#
config="CONFIG_SCHEDSTATS" ; config_enable
config="CONFIG_SCHED_STACK_END_CHECK" ; config_enable
#
# Runtime Testing
#
config="CONFIG_KGDB" ; config_enable
config="CONFIG_KGDB_SERIAL_CONSOLE" ; config_enable
config="CONFIG_KGDB_TESTS" ; config_disable
config="CONFIG_KGDB_KDB" ; config_enable
config="CONFIG_KDB_KEYBOARD" ; config_enable
#
# Crypto core or helper
#
config="CONFIG_CRYPTO_MANAGER_DISABLE_TESTS" ; config_enable
#
# Random Number Generation
#
config="CONFIG_CRYPTO_DEV_FSL_CAAM" ; config_module
config="CONFIG_CRYPTO_DEV_OMAP_DES" ; config_module
config="CONFIG_CRYPTO_DEV_SAHARA" ; config_module
config="CONFIG_CRYPTO_DEV_SUN4I_SS" ; config_module
#
# Certificates for signature checking
#
config="CONFIG_ARM_CRYPTO" ; config_enable
config="CONFIG_CRYPTO_SHA1_ARM" ; config_module
config="CONFIG_CRYPTO_SHA1_ARM_NEON" ; config_module
config="CONFIG_CRYPTO_SHA256_ARM" ; config_module
config="CONFIG_CRYPTO_SHA512_ARM" ; config_module
config="CONFIG_CRYPTO_AES_ARM" ; config_module
config="CONFIG_CRYPTO_AES_ARM_BS" ; config_module
cd ${DIR}/
#
|
package kvraft
import (
"6.824/labgob"
"6.824/labrpc"
"6.824/raft"
"bytes"
"fmt"
"log"
"sync"
"sync/atomic"
"time"
)
const (
OpGet = "Get"
OpPut = "Put"
OpAppend = "Append"
REQUEST_TIMEOUT = time.Duration(time.Millisecond * 500)
)
type Op struct {
// Your definitions here.
// Field names must start with capital letters,
// otherwise RPC will break.
Opr string
Key string
Value string
ClientId int64
CommandId int
}
type NotifyMsg struct {
Error Err
Value string
}
type ApplyRecord struct {
CommandId int
Error Err
}
type KVServer struct {
mu sync.Mutex
me int
rf *raft.Raft
applyCh chan raft.ApplyMsg
dead int32 // set by Kill()
maxraftstate int // snapshot if log grows this big
// Your definitions here.
lastApplied int
maxSeenTerm int
kvStateMachine *KVStateMachine // key -> value
notifyChs map[int]chan NotifyMsg // commandIndex -> channel
lastOpr map[int64]ApplyRecord // clientId -> [latest commandId + command info]
}
func (kv *KVServer) applyEntryToStateMachine(op Op) (Err, string) {
val := ""
var err Err
switch op.Opr {
case OpGet:
val, err = kv.kvStateMachine.Get(op.Key)
case OpPut:
err = kv.kvStateMachine.Put(op.Key, op.Value)
case OpAppend:
err = kv.kvStateMachine.Append(op.Key, op.Value)
default:
log.Fatal("unknown op type when applying entry to state machine")
}
return err, val
}
func (kv *KVServer) needSnapshot() bool {
if kv.maxraftstate == -1 { // 3A
return false
}
if kv.maxraftstate <= kv.rf.GetPersister().RaftStateSize() {
return true
}
return false
}
// should be called when snapshot updated,
// namely when calling Snapshot or calling CondInstallSnapshot return true.
func (kv *KVServer) encodeSnapshot() []byte {
w:= new(bytes.Buffer)
e := labgob.NewEncoder(w)
e.Encode(kv.lastApplied)
e.Encode(kv.kvStateMachine.GetSM())
e.Encode(kv.lastOpr)
data := w.Bytes()
return data
}
// call Snapshot to persist snapshot, witch will call SaveStateAndSnapshot.
// and snapshot contains one var lastApplied, and two maps: kvDB, lastOpr.
func (kv *KVServer) takeSnapshot() {
// lock achieved in applier
snapshot := kv.encodeSnapshot()
kv.rf.Snapshot(kv.lastApplied, snapshot)
}
func (kv *KVServer) readSnapshotPersist(data []byte) {
if data == nil || len(data) < 1 {
return
}
r := bytes.NewBuffer(data)
d := labgob.NewDecoder(r)
var la int
var sm map[string]string
var lo map[int64]ApplyRecord
if d.Decode(&la) != nil || d.Decode(&sm) != nil || d.Decode(&lo) != nil {
log.Fatal("read lastApplied, kvStateMachine and last operations failed.")
} else {
DPrintf("read lastApplied, kvStateMachine and last operation succeed.")
kv.lastApplied = la
kv.kvStateMachine.SetSM(sm)
kv.lastOpr = lo
}
}
func (kv *KVServer) isDuplicated(op string, clientId int64, commandId int) bool {
if op == OpGet {
return false
}
rec, ok := kv.lastOpr[clientId]
if !ok || commandId > rec.CommandId {
return false
}
return true
}
func (kv *KVServer) makeOp(args interface{}) Op {
op := Op{}
switch args.(type) {
case *GetArgs:
m := args.(*GetArgs)
op.Opr = OpGet
op.Key = m.Key
op.Value = ""
op.ClientId = m.ClientId
op.CommandId = m.CommandId
case *PutAppendArgs:
m := args.(*PutAppendArgs)
op.Opr = m.Op
op.Key = m.Key
op.Value = m.Value
op.ClientId = m.ClientId
op.CommandId = m.CommandId
default:
log.Fatalf("unknown args type %T in makeOp.", args)
}
return op
}
func (kv *KVServer) generateNotifyCh(index int) chan NotifyMsg {
kv.mu.Lock()
defer kv.mu.Unlock()
ch, ok := kv.notifyChs[index]
if !ok {
ch = make(chan NotifyMsg, 1)
kv.notifyChs[index] = ch
}
return ch
}
func (kv *KVServer) getNotifyCh(index int) (chan NotifyMsg, bool) {
// achieve lock in applier
ch, ok := kv.notifyChs[index]
if !ok {
DPrintf("applier wants to get NotifyCh[%d] but it not exists \n", index)
}
return ch, ok
}
func (kv *KVServer) deleteOutdatedNotifyCh(index int) {
kv.mu.Lock()
defer kv.mu.Unlock()
for k, _ := range kv.notifyChs {
if k < index {
delete(kv.notifyChs, k)
}
}
delete(kv.notifyChs, index)
}
func (kv *KVServer) processNotifyCh(op Op) NotifyMsg {
index, _, _ := kv.rf.Start(op)
ch := kv.generateNotifyCh(index)
t := time.NewTimer(REQUEST_TIMEOUT)
not := NotifyMsg{}
select {
case not = <-ch :
break
case <-t.C:
not.Error, not.Value = ErrTimeout, ""
break
}
go kv.deleteOutdatedNotifyCh(index)
return not
}
func (kv *KVServer) Get(args *GetArgs, reply *GetReply) {
// Your code here.
_, isLeader, hint := kv.rf.GetStateAndLeader()
// DPrintf("server [%d]: isLeader %v, leaderHint %d", kv.me, isLeader, hint)
if !isLeader {
reply.Err = ErrWrongLeader
reply.LeaderHint = hint
return
}
op := kv.makeOp(args)
not := kv.processNotifyCh(op)
reply.Err, reply.Value = not.Error, not.Value
reply.LeaderHint = -1
}
func (kv *KVServer) PutAppend(args *PutAppendArgs, reply *PutAppendReply) {
DPrintf("server [%d]: RPC PutAppend from client %v and commandId %d", kv.me, args.ClientId, args.CommandId)
// Your code here.
kv.mu.Lock()
if kv.isDuplicated(args.Op, args.ClientId, args.CommandId) {
reply.Err = kv.lastOpr[args.ClientId].Error
reply.LeaderHint = -1
kv.mu.Unlock()
return
}
kv.mu.Unlock()
_, isLeader, hint := kv.rf.GetStateAndLeader()
// DPrintf("server [%d]: isLeader %v, leaderHint %d", kv.me, isLeader, hint)
if !isLeader {
reply.Err = ErrWrongLeader
reply.LeaderHint = hint
return
}
op := kv.makeOp(args)
not := kv.processNotifyCh(op)
reply.Err, reply.LeaderHint = not.Error, -1
}
//
// the tester calls Kill() when a KVServer instance won't
// be needed again. for your convenience, we supply
// code to set rf.dead (without needing a lock),
// and a killed() method to test rf.dead in
// long-running loops. you can also add your own
// code to Kill(). you're not required to do anything
// about this, but it may be convenient (for example)
// to suppress debug output from a Kill()ed instance.
//
func (kv *KVServer) Kill() {
atomic.StoreInt32(&kv.dead, 1)
kv.rf.Kill()
// Your code here, if desired.
}
func (kv *KVServer) killed() bool {
z := atomic.LoadInt32(&kv.dead)
return z == 1
}
func (kv *KVServer) applier() {
for !kv.killed() {
select {
case msg := <- kv.applyCh :
if msg.CommandValid {
kv.mu.Lock()
if msg.CommandIndex <= kv.lastApplied {
fmt.Printf("msg.CommandIndex %d <= kv.lastApplied %d \n", msg.CommandIndex, kv.lastApplied)
kv.mu.Unlock()
continue
}
kv.lastApplied = msg.CommandIndex
switch msg.Command.(type) {
case int:
// since we add a no-op command when a peer becomes leader,
// we should not consider this command
term := msg.CommandTerm // may not equals to term in kv.rf.GetState()
if kv.maxSeenTerm >= term {
fmt.Println("kv.maxSeenTerm >= msg.CommandTerm")
}
kv.maxSeenTerm = term
kv.mu.Unlock()
continue
case Op:
op := msg.Command.(Op)
not := NotifyMsg{}
if kv.isDuplicated(op.Opr, op.ClientId, op.CommandId) {
// request out-of-date, just reply
DPrintf("receive duplicated operation from applyCh with clientId %v and commandId %d, but last id is %v",
op.ClientId, op.CommandId, kv.lastOpr[op.ClientId])
not.Error = kv.lastOpr[op.ClientId].Error
} else {
// update state machine or get value from it, no matter when server's state
// has updated since command applied means it will persist.
err, val := kv.applyEntryToStateMachine(op)
not.Error, not.Value = err, val
// when op is Put or Append, we need to update lastOperation ever seen of this clientId.
if op.Opr != OpGet {
ar := ApplyRecord{
CommandId: op.CommandId,
Error: err,
}
kv.lastOpr[op.ClientId] = ar
}
}
// is case that a peer has changed its state,
// the request of CommandIndex in Start it refers may convert to another request.
if currentTerm, isLeader := kv.rf.GetState(); isLeader && currentTerm == msg.CommandTerm {
ch, ok := kv.getNotifyCh(msg.CommandIndex)
if ok {
ch <- not
}
}
default:
log.Fatalf("unknown command type %T", msg.Command)
}
// check if service needs to take snapshot, then persist snapshot, lastApplied, lastOpr here.
if kv.needSnapshot() {
kv.takeSnapshot()
}
kv.mu.Unlock()
} else if msg.SnapshotValid { // from leader's InstallSnapshot
kv.mu.Lock()
if kv.lastApplied > msg.SnapshotIndex || kv.maxSeenTerm > msg.SnapshotTerm {
log.Fatalf("SnapshotValid, but kv.lastApplied %d > msg.SnapshotIndex %d \n", kv.lastApplied, msg.SnapshotIndex)
}
// check if raft accepts snapshot, then persist
if kv.rf.CondInstallSnapshot(msg.SnapshotTerm, msg.SnapshotIndex, msg.Snapshot) {
kv.lastApplied = msg.SnapshotIndex
kv.maxSeenTerm = msg.SnapshotTerm
kv.readSnapshotPersist(msg.Snapshot)
if kv.lastApplied != msg.SnapshotIndex {
fmt.Printf("SnapshotValid, but kv.lastApplied %d != msg.SnapshotIndex %d\n", kv.lastApplied, msg.SnapshotIndex)
}
}
kv.mu.Unlock()
} else {
log.Fatal("unknown ApplyMsg type.")
}
}
time.Sleep(5 * time.Millisecond)
}
}
//
// servers[] contains the ports of the set of
// servers that will cooperate via Raft to
// form the fault-tolerant key/value service.
// me is the index of the current server in servers[].
// the k/v server should store snapshots through the underlying Raft
// implementation, which should call persister.SaveStateAndSnapshot() to
// atomically save the Raft state along with the snapshot.
// the k/v server should snapshot when Raft's saved state exceeds maxraftstate bytes,
// in order to allow Raft to garbage-collect its log. if maxraftstate is -1,
// you don't need to snapshot.
// StartKVServer() must return quickly, so it should start goroutines
// for any long-running work.
//
func StartKVServer(servers []*labrpc.ClientEnd, me int, persister *raft.Persister, maxraftstate int) *KVServer {
// call labgob.Register on structures you want
// Go's RPC library to marshall/unmarshall.
labgob.Register(Op{})
kv := new(KVServer)
kv.me = me
kv.maxraftstate = maxraftstate
// You may need initialization code here.
kv.applyCh = make(chan raft.ApplyMsg)
kv.rf = raft.Make(servers, me, persister, kv.applyCh)
kv.mu = sync.Mutex{}
kv.dead = 0
kv.kvStateMachine = MakeSM()
// kv.lastApplied = -1
// kv.maxSeenTerm = -1
kv.notifyChs = make(map[int]chan NotifyMsg)
kv.lastOpr = make(map[int64]ApplyRecord)
kv.lastApplied = kv.rf.GetSnapshotIndex()
kv.maxSeenTerm = kv.rf.GetSnapshotTerm()
kv.readSnapshotPersist(kv.rf.GetPersister().ReadSnapshot())
// You may need initialization code here.
go kv.applier()
return kv
}
|
import React from "react"
import { storiesOf } from "@storybook/react"
import Dialog, { DialogHeader, DialogBody, DialogActions } from "../src"
storiesOf("Dialog", module).add("Basic", () =>
<Dialog visible={true}>
<DialogHeader>Dialog Header</DialogHeader>
<DialogBody>
<p>lorem ipsum...</p>
</DialogBody>
<DialogActions>
<button>Button 1</button>
<button>Button 2</button>
</DialogActions>
</Dialog>
)
|
The network should be a Recurrent Neural Network (RNN) such as a Long Short-Term Memory (LSTM) network. This type of network is designed to process a sequence of data points, which can be used to predict the next character in a character sequence.
The network should have an input layer, an output layer, and at least one hidden layer. The input layer should take in a single character from the sequence as input. The output layer should output a single character. The hidden layers will use the long short-term memory architecture to store information about the sequence and use this information to predict the next character in the sequence.
|
class EventSchedule24 {
events: Map<string, { startTime: Date; endTime: Date }>;
constructor() {
this.events = new Map();
}
addEvent(id: string, startTime: Date, endTime: Date): void {
if (this.hasConflictingEvents(id)) {
throw new Error(`Event with ID ${id} conflicts with existing events`);
}
this.events.set(id, { startTime, endTime });
}
removeEvent(id: string): void {
this.events.delete(id);
}
getEventsInRange(startTime: Date, endTime: Date): string[] {
const eventsInRange: string[] = [];
for (const [id, { startTime, endTime }] of this.events) {
if (startTime >= startTime && endTime <= endTime) {
eventsInRange.push(id);
}
}
return eventsInRange;
}
hasConflictingEvents(id: string): boolean {
const { startTime, endTime } = this.events.get(id);
for (const [, { startTime: existingStartTime, endTime: existingEndTime }] of this.events) {
if (
(startTime >= existingStartTime && startTime < existingEndTime) ||
(endTime > existingStartTime && endTime <= existingEndTime)
) {
return true;
}
}
return false;
}
}
|
<reponame>l0stbitz/cliently-symfony
//User Data
var UserData = null;
var accounts;
var current_account_index = null;
var current_account_id = null;
var workspaces;
var current_workspace_index = null;
var current_workspace_id = null;
var current_account_workspace_id = null;
var pipelines;
var current_pipeline_index = null;
var current_pipeline_id = null;
var workspace_members = null;
var workflows = null;
var current_owner_id = null;
var plan_class = 0;
var UserWizards = {
NONE: -1,
USER_INFO: 0,
USER_CONNECTOR: 1,
TOUR: 2
};
var user_wizard = -1;
var TourSteps = {
COMPLETED: -1,
START: 0,
CLIENT_DETAILS: 2,
CLIENT_DETAILS_TASKS: 4,
LEFT_MENU: 5,
LEAD_SOURCE: 6,
LAST: 7
};
var tour_step = TourSteps.COMPLETED;
var MonthNames = ["January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December"
];
function parseInitPathAndOptions(context) {
var emptyFilter = function (ele) {
return ele !== '';
};
var initParams = {
page: Pages.CLIENTS,
settingsPage: SettingsPages.USER,
paths: null,
sidebar: 0
};
var defaultDevOptions = {
debug: false,
localdev: false,
showWorkInProgress: false,
showSampleData: false,
wizard: UserWizards.NONE
};
var devOptionNames = {
debug: 'debug',
localdev: 'localdev',
showworkinprogress: 'showWorkInProgres',
showsampledata: 'showSampleData',
wizard: 'wizard'
};
var devOptions = $.extend({}, defaultDevOptions);
if (window.location.pathname + window.location.search) {
var pp = window.location.pathname + window.location.search; //pp: Paths and Parameters
var paramsPos = pp.indexOf('?');
if (paramsPos > -1) {
var params = pp.substr(paramsPos + 1).split('&').filter(emptyFilter);
pp = pp.substr(0, paramsPos);
for (var i in params) {
var param = params[i];
var value = true;
var pos = param.indexOf('=');
if (pos > -1) {
value = param.substr(pos + 1);
param = param.substr(0, pos).toLowerCase();
}
if (typeof devOptionNames[param] != 'undefined') {
devOptions[devOptionNames[param]] = value === 'true' ? true : (value === 'false' ? false : value);
}
}
}
var paths = pp.split('/').filter(emptyFilter);
if (paths.length > 0) {
initParams.page = paths[0];
paths.shift();
if (initParams.page == Pages.SETTINGS && paths.length) {
initParams.settingsPage = paths[0];
paths.shift();
}
initParams.paths = paths;
}
}
context.DefaultDevOptions = defaultDevOptions;
context.DevOptions = devOptions.debug ? devOptions : defaultDevOptions;
context.InitParams = initParams;
}
(function (context) {
parseInitPathAndOptions(context);
context.onpopstate = function () {
parseInitPathAndOptions(context);
Layout.activatePage(context.InitParams.page, context.InitParams.settingsPage);
};
})(window);
$.fn.serializeObject = function ()
{
var o = {};
var a = this.serializeArray();
$.each(a, function () {
if (o[this.name] !== undefined) {
if (!o[this.name].push) {
o[this.name] = [o[this.name]];
}
o[this.name].push(this.value || '');
} else {
o[this.name] = this.value || '';
}
});
return o;
};
$.fn.reset = function () {
this.each(function () {
if (typeof this.reset != 'undefined') {
this.reset();
}
});
return this;
};
$.fn.editable.defaults.ajaxOptions = {type: 'PUT'};
$.fn.editable.defaults.params = function (params) {
var data = {};
data[params.name] = params.value;
return data;
};
$.fn.editable.defaults.error = function(response, newValue) {
return '';
}
$.fn.qtip.zindex = 1019;
ss.uploadSetup({
url: '/api/v1/uploads',
name: 'uploadfile', // upload parameter name
progressUrl: '/uploads/x-upload-session-progress', // enables cross-browser progress support (more info below)
sessionProgressUrl: '/uploads/x-upload-session-progress', // enables cross-browser progress support (more info below)
multiple: false,
multipart: true,
queue: false,
responseType: 'json',
allowedExtensions: ['jpg', 'jpeg', 'png', 'gif'],
accept: 'image/*',
debug: DevOptions.debug,
maxSize: 1024, // kilobytes
//hoverClass: 'ui-state-hover',
//focusClass: 'ui-state-focus',
//disabledClass: 'ui-state-disabled',
onSubmit: function (filename, extension) {
//this.setFileSizeBox(sizeBox); // designate this element as file size container
//this.setProgressBar(progress); // designate as progress bar
},
onSizeError: function () {
showErrorMessage("Error!!!", "Files may not exceed 1MB.");
},
onExtError: function () {
showErrorMessage("Error!!!", "Invalid file type. Please select a PNG, JPG, GIF image.");
}
});
$(document).on('show.bs.modal', '.modal', function () {
var zIndex = 1050 + (10 * $('.modal:visible').length);
$(this).css('z-index', zIndex);
setTimeout(function () {
$('.modal-backdrop:not(.modal-backdrop-stacked)').css('z-index', zIndex - 1).addClass('modal-backdrop-stacked');
}, 0);
}).on('shown.bs.modal', '.modal', function () {
focusOnPopup($(this).find('>.modal-dialog'));
}).on('click', '.modal>.modal-dialog>.popup-focus', function () {
return false;
});
function showGlobalLoader(msg) {
$("#global-loader-back, #global-loader").stop().fadeIn();
if (typeof msg != 'undefined' && msg) {
$("#global-loader-text").html(msg).stop().fadeIn();
}
}
function hideGlobalLoader() {
$("#global-loader-back, #global-loader, #global-loader-text").stop().fadeOut('fast');
}
$(document).ready(function () {
Layout.load();
loadApp();
});
function loadApp() {
var __loaders = [loadWorkingPlace, Dbperson.load];
var __loadingCount = __loaders.length;
var __loadedCount = 0;
function done() {
if (++__loadedCount >= __loadingCount) {
hideGlobalLoader();
startApp();
}
}
$.each(__loaders, function (i, loader) {
loader().done(done);
});
}
function startApp() {
user_wizard = UserData.wizard;
if (DevOptions.debug && DevOptions.wizard > UserWizards.NONE) {
user_wizard = DevOptions.wizard;
}
if (user_wizard < UserWizards.TOUR) {
// Layout.activatePage(InitParams.page);
} else {
tour_step = user_wizard - 2;
user_wizard = UserWizards.NONE;
openTour();
}
Search.load();
}
//Loading profile including image
function loadWorkingPlace(workspace_id, page, subpage, refresh) {
return $.getJSON('/api/v1/users/me?include_accounts=1&include_accounts_membership=1&include_workspaces=1&include_workspaces_membership=1&include_integrations=1', function (data) {
if (data) {
UserData = data;
accounts = data.accounts;
workspaces = data.workspaces;
if (typeof workspace_id === 'undefined' || workspace_id === null) {
workspace_id = false;
}
current_workspace_index = null;
$.each(workspaces, function (i, workspace) {
if ( ! workspace_id) {
if (workspace.membership.is_enabled) {
current_workspace_index = i;
return false;
}
} else if (workspace.id === workspace_id) {
if (workspace.membership.is_enabled) {
current_workspace_index = i;
}
return false;
}
});
if (current_workspace_index === null) {
window.location = '/message/suspended-workspace-membership';
}
var account_id = data.workspaces[current_workspace_index].account_id;;
current_account_index = null;
$.each(accounts, function (i, account) {
if ( ! account_id) {
if (account.membership.is_enabled) {
current_account_index = i;
return false;
}
} else if (account.id === account_id) {
if (account.membership.is_enabled) {
current_account_index = i;
}
return false;
}
});
if (current_account_index === null) {
// window.location = '/message/suspended-account-membership';
plan_class = 0;
} else {
current_account_id = data.accounts[current_account_index].id;
plan_class = data.accounts[current_account_index].plan_class;
}
current_workspace_id = data.workspaces[current_workspace_index].id;
if (data.avatar) {
$("#user-profile-image").attr('src', '/uploads/users/avatars/' + data.avatar + '_200x200.jpg');
} else if (data.integration_avatar) {
if (data.integration_type == 2) {
$("#user-profile-image").attr('src', '//' + data.integration_avatar.replace(/normal/, '200x200'));
} else if (data.integration_type == 3) {
$("#user-profile-image").attr('src', '//' + data.integration_avatar + '?sz=200');
}
} else {
$("#user-profile-image").attr('src', '/images/profile-blank.png');
}
$("#user-profile-image").fadeIn();
if (data.workspaces[current_workspace_index].membership.role === 'owner' || data.workspaces[current_workspace_index].membership.role === 'admin') {
$('.page-menu-item-sources').removeClass('hide');
} else {
$('.page-menu-item-sources').addClass('hide');
}
updateAvailableDealCount(data.workspaces[current_workspace_index].membership.credit_balance);
showUpgradeLink();
loadWorkspaceMembers(current_workspace_id)
.done(function() {
loadPipelines(current_workspace_id, page, subpage, refresh);
load_workflows(current_workspace_id).done(function(data) {
workflows = data;
});
$('.page-sidebar .workspace-name').text(workspaces[current_workspace_index].name);
});
}
});
}
function loadPipelines(workspace_id, page, subpage, refresh) {
return $.get('/api/v1/workspaces/' + workspace_id + '/pipelines?include_stages=1', function (data) {
pipelines = data;
if (data.length) {
current_pipeline_index = 0;
current_pipeline_id = data[0].id;
checkNewLeads();
}
if (page) {
Layout.activatePage(page, subpage, refresh);
} else {
Layout.activatePage(InitParams.page, InitParams.settingsPage, true);
}
});
}
function loadWorkspaceMembers(workspace_id) {
return $.get('/api/v1/workspaces/' + workspace_id + '/workspace_members?include_user=1&include_user_integrations=1', function (data) {
workspace_members = data;
});
}
function load_workflows(workspace_id, include_actions, include_sources, is_enabled) {
var params;
if (include_sources) {
params.include_sources = 1;
}
if (include_actions) {
params.include_actions = 1;
}
if (is_enabled != null) {
params.is_enabled = is_enabled ? 1 : 0;
}
return $.getJSON('/api/v1/workspaces/' + workspace_id + '/workflows', params);
}
function createToken(type, type_id, callback) {
return $.ajax({
method: 'post',
url: '/api/v1/tokens',
data: {
type: type,
type_id: type_id,
},
}).done(function (data) {
callback(data);
});
}
function startProspectSearching() {
startProspectSearchingCounter();
searchProspects();
}
var prospectSearchingTimerHander = null;
var prospectSearchingTimer = 60;
function startProspectSearchingCounter() {
$('<div id="plsearching-counter">\
<div class="plsearching-counter-inner">\
<img src="/images/clock2x.png" />\
<div class="counter-wrapper">\
<h6>Searching for Prospects</h6>\
<span class="counter">60 seconds</span>\
</div>\
</div>\
</div>').appendTo('body');
prospectSearchingTimerHander = setTimeout(countProspectSearching, 1000);
}
function countProspectSearching() {
//console.log('prospectSearchingTimer: ' + prospectSearchingTimer);
prospectSearchingTimer--;
$("#plsearching-counter .counter").html(prospectSearchingTimer + ' second' + (prospectSearchingTimer > 1 ? 's' : ''));
if (prospectSearchingTimer <= 0) {
stopProspectSearchingCounter();
} else {
prospectSearchingTimerHander = setTimeout(countProspectSearching, 1000);
}
}
function stopProspectSearchingCounter() {
if (prospectSearchingTimerHander) {
clearTimeout(prospectSearchingTimerHander);
}
prospectSearchingTimerHander = null;
}
var prospectSearchingCounter = 2;
function searchProspects(time) {
//console.log('prospectSearchingCounter: ' + prospectSearchingCounter);
if (prospectSearchingCounter > 0) {
setTimeout(function () {
Leads.loadClients().done(function (leads) {
if (leads.length <= 0) {
searchProspects(5000);
return false;
} else {
stopProspectSearching(leads.length);
}
hideGlobalLoader();
});
}, typeof time == 'undefined' ? 10000 : time);
prospectSearchingCounter--;
} else {
stopProspectSearching(0);
}
}
function stopProspectSearching(count) {
stopProspectSearchingCounter();
showProspectsSearchResultPopup(count, 60 - prospectSearchingTimer);
}
//Available Deal Count functions
var available_deal_count = 0;
function updateAvailableDealCount(count) {
available_deal_count = count;
if (typeof repeat == 'undefined') {
repeat = 1;
}
var html = parseInt(available_deal_count);
var fraction = available_deal_count - html;
var numerator = fraction / 0.25;
if (numerator > 0) {
html += ' <span class="fraction"><sup>' + numerator + '</sup>/<sub>4</sub></span>';
}
$(".available-deal-counter a").html(html);
if ($(".available-deal-counter").hasClass('hidden')) {
$(".available-deal-counter").removeClass('hidden');
}
try {
$(".available-deal-counter a").pulsate('destroy');
} catch (e) {
}
if (available_deal_count > 1) {
$(".available-deal-counter a").removeClass('danger').pulsate({color: "#399bc3", repeat: 2});
} else {
$(".available-deal-counter a").addClass('danger').pulsate({color: "#bf1c56", pause: 5000});
}
$(".available-deal-counter a").attr('data-original-title', count + ' Lead' + (count > 1 ? 's' : '') + ' Remaining');
}
function refreshAvailableDealCount() {
$.getJSON('/api/v1/workspaces/' + current_workspace_id + '?include_membership=1', function (data) {
if (data.membership !== null) {
updateAvailableDealCount(data.membership.credit_balance);
}
});
}
function isLeadAcceptable() {
return available_deal_count > 0;
}
function checkIfLeadAcceptable() {
if (isLeadAcceptable()) {
return true;
} else {
// $(".available-deal-counter a").pulsate({color: "#bf1c56", repeat: 3});
// showErrorMessage('Accept Lead', 'No more available deals.');
if (workspaces[current_workspace_index].membership.role === 'owner' || workspaces[current_workspace_index].membership.role === 'admin') {
bootbox.confirm('Please purchase additional credits to accept more leads.',
function (result) {
if (result) {
Layout.activatePage(Pages.SETTINGS, 'billing');
}
}
);
} else {
bootbox.confirm('Please contact your account owner or admin to add more credits.',
function (result) {
}
);
}
// if (confirm('Upgrade your account to accept more prospects.')) {
// activatePage(Pages.SETTINGS, 'billing');
// }
return false;
}
}
function showUpgradeLink() {
if (plan_class === 'free') {
$(".plan-upgrade-recommender").show();
} else {
$(".plan-upgrade-recommender").hide();
}
}
//The function that checks new leads
function checkNewLeads() {
$.getJSON('/api/v1/workspaces/' + current_workspace_id + '/leads/info/new', function (data) {
if (data.count > 0 && data.since > 0) {
var since = moment(data.since * 1000).fromNow();
showSuccessMessage("New Leads", data.count + " new lead(s) have(has) been added to your account since " + since + ".");
// Update sidebar leads tab badge value
Sidebar.addNewLeadsCount(data.count)
}
});
setTimeout(checkNewLeads, 15 * 60 * 1000);
}
function getRootUrl() {
return (location.origin ? location.origin : location.protocol + '//' + location.hostname) + '/';
}
function getBaseUrl() {
var re = new RegExp(/^.*\//);
return re.exec(location.href);
}
function getOptionParams() {
var params = '';
if (DevOptions.debug) {
for (var name in DevOptions) {
var value = DevOptions[name];
if (value) {
if (DefaultDevOptions[name] === value) {
continue;
}
if (params) {
params += '&';
}
params += name;
if (value !== true) {
params += '=' + value;
}
}
}
}
return params;
}
function changeUrlWithOptionParams(url) {
var params = getOptionParams();
if (params) {
url += '?' + params;
}
window.history.pushState(null, null, url);
}
function decorateTweet(ttiframe, keywords, status) {
keywords = keywords[0];
var $tweet = $(ttiframe).contents();
var head = $tweet.find('head');
if (head.length) {
head.append('<style>.keyword-highlight { color: #c63 !important; font-weight: bold; }</style>');
}
if (keywords && keywords.length > 0) {
keywords = keywords.toLowerCase();
//highlights by hashtag
if (keywords[0] == '#') {
$tweet.find('.Tweet-body').find(".hashtag").each(function () {
var k = $(this).find(".PrettyLink-value").html();
if (k && k.length > 0 && "#" + k.toLowerCase() == keywords) {
k = "#" + k;
if (keywords == k) {
$(this).addClass('keyword-highlight');
}
}
});
} else {
if (keywords[0] == '"' && keywords[keywords.length - 1] == '"') {
keywords = [keywords.substr(1, keywords.length - 2)];
} else {
keywords = keywords.split(' ');
}
//highlights by keyword
for (var j = 0; j < keywords.length; j++) {
$tweet.find('.Tweet-body').find('*').contents().filter(function () {
return this.nodeType == 3;
}).each(function () {
var n = this;
var keyword = keywords[j];
var regex = new RegExp('\\b' + keyword + '\\b');
for (var i; (i = n.nodeValue.toLowerCase().search(regex)) > -1; n = after) {
var after = n.splitText(i + keyword.length);
var highlighted = n.splitText(i);
var span = document.createElement('span');
span.className = 'keyword-highlight';
span.appendChild(highlighted);
after.parentNode.insertBefore(span, after);
}
});
}
}
}
if (!status) {
//$tweet.find('.Tweet-header').hide();
//$tweet.find('.Tweet-header').css('padding-left', 0);
$tweet.find('.Tweet-brand').hide();
//$tweet.find('.TweetAuthor-avatar').hide();
//$tweet.find('.TweetAuthor-screenName').removeClass('TweetAuthor-screenName');
$tweet.find('.Tweet-actions').hide();
//$tweet.find('.Tweet-header').show();
}
}
function ltrim(str, chr) {
var rgxtrim = (!chr) ? new RegExp('^\\s+') : new RegExp('^' + chr + '+');
return str.replace(rgxtrim, '');
}
function numberWithCommas(x) {
return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
}
function decorateDealSourceText(text, keywords, maxlen) {
if (typeof maxlen == 'undefined') {
maxlen = 0;
}
keywords = keywords[0];
if (keywords[0] == '"' && keywords[keywords.length - 1] == '"') {
keywords = [keywords.substr(1, keywords.length - 2)];
} else {
keywords = keywords.split(' ');
}
//highlights by keyword
for (var j = 0; j < keywords.length; j++) {
var keyword = keywords[j];
text = text.replace(new RegExp('\\b' + keyword + '\\b', 'gi'), '<span class="keyword-highlight">' + keyword + '</span>');
}
if (maxlen > 0) {
text = (text.length > maxlen ? $('<div />').append(text.substr(0, maxlen) + '...').html() : text);
}
return text;
}
function getDateString(tc, showFullMonthName) {
var dt = new Date(tc * 1000);
var monthName = MonthNames[dt.getMonth()];
//if (!showFullMonthName) {
monthName = monthName.substr(0, 3);
//}
return monthName + ' ' + dt.getDate() + ', ' + dt.getFullYear();
}
function getTimeString(tc) {
var dt = new Date(tc * 1000);
var hours = dt.getHours();
var minutes = dt.getMinutes();
var ampm = hours >= 12 ? 'PM' : 'AM';
hours = hours % 12;
hours = hours ? hours : 12; // the hour '0' should be '12'
minutes = minutes < 10 ? '0' + minutes : minutes;
return hours + ":" + minutes + " " + ampm;
}
function getDateTimeString(tc, showFullMonthName) {
var dt = new Date(tc * 1000);
var hours = dt.getHours();
var minutes = dt.getMinutes();
var ampm = hours >= 12 ? 'PM' : 'AM';
hours = hours % 12;
hours = hours ? hours : 12; // the hour '0' should be '12'
minutes = minutes < 10 ? '0' + minutes : minutes;
return getDateString(tc, showFullMonthName) + " " + getTimeString(tc);
}
function validateForm(el) {
$(el).addClass("danger-required");
setTimeout(function () {
$(el).removeClass("danger-required");
}, 2500);
}
// Close NOTIFICATION MENU
function closeNotificationMenu() {
$('#notification-menu').removeClass('slideInRight');
$('#notification-menu').addClass('slideOutRight');
}
function showNotificationMenu() {
$('#notification-menu').removeClass('slideOutRight');
$('#notification-menu').addClass('slideInRight');
}
function showSuccessMessage(title, msg) {
$.bigBox({
title: title,
content: msg,
tabicon: false,
sound: false,
color: "#00aaf0",
timeout: 3000
});
}
function showWarningMessage(title, msg) {
$.bigBox({
title: title,
content: msg,
tabicon: false,
sound: false,
color: "#cc9933",
timeout: 3000
});
}
function showErrorMessage(title, msg) {
$.bigBox({
title: title,
content: msg,
tabicon: false,
sound: false,
color: "#993333",
timeout: 3000
});
}
function update_subscription_card(card, subscription_id, plan_id, plan_class) {
card.data('id', subscription_id);
card.data('plan-id', plan_id);
card.data('value', $plans[plan_id].price);
card.find('.team-size').text($subscriptions[subscription_id].users);
// card.find('.deal-count').text($subscriptions[subscription_id].deals * Math.abs($plans[plan_id].period));
card.find('h1.subscription-name').html($subscriptions[subscription_id].name);
if ($plans[plan_id].period_word !== false) {
card.find('.plan-period').removeClass('active');
card.find('.plan-period-' + $plans[plan_id].period_word).addClass('active');
card.find('.billing-variants').show();
card.find('.period-container').show();
} else {
card.find('.billing-variants').hide();
card.find('.period-container').hide();
}
card.find('.price').text($plans[plan_id].month_price_word);
card.find('.period').text($plans[plan_id].month_period_word);
if ($plans[plan_id].month_price_word !== $plans[plan_id].price_word && $plans[plan_id].period > 1) {
card.find('.total-price-container').css('visibility', 'visible');
card.find('.total-price').text($plans[plan_id].price_word);
card.find('.total-period').text($plans[plan_id].period_word);
card.find('.for-annual-only').css('visibility', 'visible');
} else {
card.find('.total-price-container').css('visibility', 'hidden');
card.find('.for-annual-only').css('visibility', 'hidden');
}
card.find('.btn-choose-plan').attr('onclick', 'openPaymentPage(' + current_account_id + ', ' + subscription_id + ', ' + plan_id + ', "' + plan_class + '");');
plan_annually = $plans[$subscriptions[subscription_id].annually_id];
plan_monthly = $plans[$subscriptions[subscription_id].monthly_id];
try {
save_percentage = Math.floor((plan_monthly.period * plan_monthly.price - plan_annually.price / plan_annually.period) / (plan_monthly.period * plan_monthly.price) * 100);
save_percentage_over = Math.floor(save_percentage / 10) * 10;
} catch (e) {
save_percentage = false;
}
if (plan_id == plan_monthly.id) {
card.find('.save-money').hide();
} else {
if (save_percentage > 0) {
if (save_percentage <= 10 || save_percentage % 10 === 0) {
card.find('.save-percentage').text(save_percentage + '%');
} else {
card.find('.save-percentage').text('over ' + save_percentage_over + '%!');
}
card.find('.save-money').show();
} else {
card.find('.save-money').hide();
}
}
if (plan_id == $('body').data('plan-id')) {
$('#payment-form button').prop('disabled', true).hide();
card.find('.btn-choose-plan').prop('disabled', true);
if ($plans[plan_id].period == -1) {
card.find('.btn-choose-plan').text('ACTIVE');
} else {
var dt = new Date($('body').data('paid-at') * 1000);
dt.setMonth(dt.getMonth() + parseInt($plans[plan_id].period));
var expire_at = dt.getTime() / 1000;
card.find('.btn-choose-plan').text('Ends on: ' + getDateString(expire_at));
}
} else {
$('#payment-form button').prop('disabled', false).show();
card.find('.btn-choose-plan').prop('disabled', false);
card.find('.btn-choose-plan').text('SUBSCRIBE');
}
}
$(document).on('submit', '#payment-form', function (event) {
var $form = $(this);
$form.find('button').prop('disabled', true);
$form.data('plan-id', $('#payment-page .card').data('plan-id'));
$form.data('value', $('#payment-page .card').data('value'));
Stripe.card.createToken($form, stripeResponseHandler);
return false;
});
function stripeResponseHandler(status, response) {
var $form = $('#payment-form');
if (response.error) {
showErrorMessage("Error!!!", response.error.message);
$form.find('button').prop('disabled', false);
} else {
var token = response.id;
$.ajax({
url: '/api/v1/accounts/' + current_account_id + '/purchases',
method: 'POST',
data: {
token: response.id,
type: 1,
product_id: $form.data('plan-id'),
value: $form.data('value'),
quantity: 1,
coupon_code: $form.find('input[name=coupon_code]').val(),
},
success: function () {
showSuccessMessage("Payment Information", "Payment completed successfully.");
Settings.openBillingPage(false);
if (typeof analytics !== 'undefined' && analytics !== null) {
window.segment_traits.plan = $plans[$form.data('plan-id')].class;
analytics.identify(window.segment_user, window.segment_traits);
analytics.track('Subscription Started', {
// username: values.keywords,
// billingEmail: values.location,
prev_plan_class: $user_usage.plan_class,
plan_class: $plans[$form.data('plan-id')].class
});
}
}
});
// Insert the token into the form so it gets submitted to the server
// $form.append($('<input type="hidden" name="stripeToken" />').val(token));
// // and submit
// $form.get(0).submit();
}
}
function style_tweet(content) {
content = content.replace(/( |^)([@#].+?)(?=( |$))/g, function (match, $1, $2, offset, original) {
return $1 + '<span class="style-twitter-links">' + $2 + '</span>';
});
content = content.replace(/( |^)(https?:\/\/.+?)(?=( |$))/g, function (match, $1, $2, offset, original) {
return $1 + '<a href="' + $2 + '" class="style-twitter-links">' + $2 + '</span>';
});
return content;
}
function getEditableWYSIHTML5OptionsHtml() {
return '\
<div class="btn-toolbar" data-role="editor-toolbar">\
<div class="btn-group">\
<a class="btn" data-edit="bold" title="Bold (Ctrl/Cmd+B)"><i class="fa fa-bold"></i></a>\
<a class="btn" data-edit="italic" title="Italic (Ctrl/Cmd+I)"><i class="fa fa-italic"></i></a>\
<a class="btn" data-edit="strikethrough" title="Strikethrough"><i class="fa fa-strikethrough"></i></a>\
<a class="btn" data-edit="underline" title="Underline (Ctrl/Cmd+U)"><i class="fa fa-underline"></i></a>\
<a class="btn" data-edit="insertunorderedlist" title="Bullet list"><i class="fa fa-list-ul"></i></a>\
<a class="btn" data-edit="insertorderedlist" title="Number list"><i class="fa fa-list-ol"></i></a>\
<a class="btn" data-edit="justifyleft" title="Align Left (Ctrl/Cmd+L)"><i class="fa fa-align-left"></i></a>\
<a class="btn" data-edit="justifycenter" title="Center (Ctrl/Cmd+E)"><i class="fa fa-align-center"></i></a>\
<a class="btn" data-edit="justifyright" title="Align Right (Ctrl/Cmd+R)"><i class="fa fa-align-right"></i></a>\
<a class="btn" data-edit="justifyfull" title="Justify (Ctrl/Cmd+J)"><i class="fa fa-align-justify"></i></a>\
</div>\
<div class="btn-group">\
<a class="btn" data-edit="undo" title="Undo (Ctrl/Cmd+Z)"><i class="fa fa-undo"></i></a>\
<a class="btn" data-edit="redo" title="Redo (Ctrl/Cmd+Y)"><i class="fa fa-repeat"></i></a>\
</div>\
</div>\
';
}
function focusOnPopup($dialog) {
var $focus = $dialog.find('>.popup-focus');
if (!$focus.length) {
$focus = $('<a href="#">').prependTo($dialog);
}
$focus.focus();
}
function textareaLineCountResize (element, limit, lineHeight) {
var carretPos = element.selectionStart;
element.style.height = lineHeight + 'px';
var lineCount = Math.round(element.scrollHeight / lineHeight);
if (lineCount > limit) {
var prevValue = $(element).data('prev-value');
var prevCarretPos = $(element).data('prev-carret-pos');
if (prevValue != null) {
if (element.value === prevValue) {
element.value = '';
} else {
element.value = prevValue;
element.value = prevValue;
}
}
element.style.height = limit * lineHeight + 'px';
element.selectionStart = prevCarretPos;
element.selectionEnd = prevCarretPos;
} else {
element.style.height = lineCount * lineHeight + 'px';
$(element).data('prev-value', element.value);
$(element).data('prev-carret-pos', carretPos);
}
}
|
<filename>spark/core/src/test/scala/org/elasticsearch/spark/cfg/SparkConfigTest.scala
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.spark.cfg
import org.apache.hadoop.security.UserGroupInformation
import org.elasticsearch.spark.serialization.ReflectionUtils._
import org.junit.Test
import org.junit.Assert._
import org.hamcrest.Matchers._
import org.apache.spark.SparkConf
import org.elasticsearch.hadoop.cfg.PropertiesSettings
import java.util.Locale
class SparkConfigTest {
@Test
def testProperties(): Unit = {
val cfg = new SparkConf().set("type", "onegative")
val settings = new SparkSettingsManager().load(cfg)
val props = new PropertiesSettings().load(settings.save())
assertEquals("onegative", props.getProperty("type"))
}
@Test
def testSparkProperties(): Unit = {
val cfg = new SparkConf().set("spark.type", "onegative")
val settings = new SparkSettingsManager().load(cfg)
val props = new PropertiesSettings().load(settings.save())
assertEquals("onegative", props.getProperty("type"))
}
@Test
def testSparkPropertiesOverride(): Unit = {
val cfg = new SparkConf().set("spark.type", "fail").set("type", "win")
val settings = new SparkSettingsManager().load(cfg)
val props = new PropertiesSettings().load(settings.save())
assertEquals("win", props.getProperty("type"))
}
@Test
def testOpaqueId(): Unit = {
var cfg = new SparkConf()
assertEquals(String.format(Locale.ROOT, "[spark] [%s] [] []", UserGroupInformation.getCurrentUser.getShortUserName),
new SparkSettingsManager().load(cfg).getOpaqueId)
val appName = "some app"
val appdId = "some app id"
cfg = new SparkConf().set("spark.app.name", appName).set("spark.app.id", appdId)
assertEquals(String.format(Locale.ROOT, "[spark] [%s] [%s] [%s]", UserGroupInformation.getCurrentUser.getShortUserName, appName,
appdId), new SparkSettingsManager().load(cfg).getOpaqueId)
}
}
|
<reponame>RotaNova/rotanova-UI<filename>src/api/noticeManage/systemMessages.js
import Axios from '../http'
export default {
getAnnouncementItemPage: params => Axios.post(`/v1/sysAnnouncementSys/getAnnouncementItemPage`, params), //获取系统接口权限列表
updateAnnouncement: params => Axios.post(`/v1/sysAnnouncementSys/updateAnnouncement`, params), //获取系统接口权限列表
getAnnouncementInfo: params => Axios.get(`/v1/sysAnnouncementSys/getAnnouncementInfo`, { params }), //获取系统接口权限列表
publishAnnouncement: params => Axios.post(`/v1/sysAnnouncementSys/publishAnnouncement`, params), //发布通告
revokeAnnouncement: params => Axios.post(`/v1/sysAnnouncementSys/revokeAnnouncement`, params), //撤销通告
deleteAnnouncement: params => Axios.post(`/v1/sysAnnouncementSys/deleteAnnouncement`, params), //删除通告
getListSysUser: params => Axios.post(`/v1/sysAnnouncementSys/getListSysUser`, params), //删除通告
getDeptDepartmentList: params => Axios.get(`/v1/sysAnnouncementSys/getDeptDepartmentList`, params), //删除通告
}
|
#!/usr/bin/env bash
set -e
TARGET_IP=$(kubectl get pod -n kube-system -o wide| grep kube-controller | head -n 1 | awk '{print $6}')
sed "s/TARGETIP/$TARGET_IP/g" deploy/deployment.yaml > deploy/deployment.yamlg
mv deploy/deployment.yamlg deploy/deployment.yaml
kubectl apply -f deploy/
while [[ $(kubectl get pods -l app=web-show -o 'jsonpath={..status.conditions[?(@.type=="Ready")].status}') != "True" ]]; do echo "waiting for pod running" && sleep 1; done
kill $(lsof -t -i:8081) 2>&1 >/dev/null | True
nohup kubectl port-forward svc/web-show --address 0.0.0.0 8081:8081 2>&1 &
|
const api = {
getStadions(callback) {
$.get('/logic/api/v1/stadion', (stadions, result) => {
if (result === 'success') {
callback(stadions);
} else {
console.log('ERROR: fetch stadions:');
}
});
},
getStadionTimes(stadionId, callback) {
$.get(`/logic/api/v1/stadion/${stadionId}/times`, (data, result) => {
if (result === 'success') {
callback(data);
} else {
console.log('ERROR: fetch stadion times');
}
});
},
createStadionTime(stadionId, params, callback) {
let selectedTimeStart = params.selectedTimeStart.split(':');
let selectedTimeEnd = params.selectedTimeEnd.split(':');
selectedTimeStart = Number(selectedTimeStart[0]) * 60 + Number(selectedTimeStart[1]);
selectedTimeEnd = Number(selectedTimeEnd[0]) * 60 + Number(selectedTimeEnd[1]);
const data = {
time1: selectedTimeStart,
time2: selectedTimeEnd,
date: params.selectedDate,
};
$.post(`/logic/api/v1/stadion/${stadionId}/times`, data, (data, result) => {
callback();
});
},
};
export default api;
|
#!/bin/bash
folder=/home/roott/watDivQueries
# initial id
k=$1
# number of vms; step between local ids
x=$2
# technique
h=${3}
s=${h}-client-eval
a=$4
# number of clients per vm
n=$5
c=$6
t=$7
e="http://172.19.2.112:8890/sparql?default-graph-uri=http%3A%2F%2Fwatdiv10M&query="
m=$8
o=$9
f=${10}
cd /home/roott/Client.js-brTPF
#sleep 1s
spids=""
for i in `seq 1 ${n}`; do
rm /home/roott/Client.js-brTPF/eval_${h}_${k}.csv
./eval.sh ${s} ${f} ${folder}/client_${k} ${o} 30 ${t} ${k} ${c} ${e} ${m} > outputEvalWatDiv_${s}_${a}_${k}_${c} &
pid=$!
spids="$spids $pid"
k=$(($k+$x))
done
for e in $spids; do
wait $e
done
#date
#ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'
|
def palindrome_detect(arr):
'''This function will detect the presence of a palindrome in an array.'''
palindromes = set()
for i in range(len(arr)):
for j in range(i+1, len(arr)):
word = arr[i:j+1]
word_reverse = word[::-1]
if word == word_reverse:
palindromes.add(word)
return palindromes
|
CREATE TABLE users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT NOT NULL,
email TEXT NOT NULL,
age INTEGER NOT NULL,
gender TEXT NOT NULL,
location TEXT NOT NULL,
preferences TEXT
);
|
float total = 0.0f;
float average;
foreach (float number in List)
{
total += number;
}
average = total / List.Length;
return average;
|
public static String reverseWords(String sentence) {
String[] words = sentence.split(" ");
String reversedString = "";
for (int i = words.length - 1; i >= 0; i--) {
reversedString += words[i] + " ";
}
return reversedString;
}
String originalString = "This is a sentence.";
String reversedString = reverseWords(originalString);
System.out.println(reversedString); // prints "sentence. a is This"
|
package com.nils.engine.main;
import java.awt.Font;
import java.awt.FontFormatException;
import java.awt.GraphicsEnvironment;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Random;
import javax.imageio.ImageIO;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.UnsupportedAudioFileException;
import com.nils.engine.gfx.Display;
public class Util {
public static void init() {
try {
GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
ge.registerFont(Font.createFont(Font.TRUETYPE_FONT, new File("/upheavtt.ttf")));
} catch (IOException|FontFormatException e) {
e.printStackTrace();
}
}
public static BufferedImage loadImage(String path) {
try {
return ImageIO.read(Util.class.getResourceAsStream(path));
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public static BufferedImage flip(BufferedImage img) {
BufferedImage flipped = img;
AffineTransform tx = AffineTransform.getScaleInstance(-1, 1);
AffineTransformOp op = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
tx.translate(-flipped.getWidth(null), 0);
op = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
flipped = op.filter(flipped, null);
return flipped;
}
public static int randInt(int min, int max) {
Random rand = new Random();
int randomNum = rand.nextInt((max - min) + 1) + min;
return randomNum;
}
public static BufferedImage getSubimage(BufferedImage sheet, int x, int y, int width, int height) {
BufferedImage sub = null;
sub = sheet.getSubimage(x*width, y*height, width, height);
return sub;
}
public static void playMusic(String path) {
Clip clip = null;
FloatControl gainControl;
try {
InputStream audioSrc = Util.class.getResourceAsStream(path);
InputStream bufferedIn = new BufferedInputStream(audioSrc);
AudioInputStream ais = AudioSystem.getAudioInputStream(bufferedIn);
AudioFormat baseFormat = ais.getFormat();
AudioFormat decodeFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2,
baseFormat.getSampleRate(), false);
AudioInputStream dais = AudioSystem.getAudioInputStream(decodeFormat, ais);
clip = AudioSystem.getClip();
clip.open(dais);
gainControl = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN);
if (clip.isRunning()) {
clip.stop();
}
clip.setFramePosition(0);
while (!clip.isRunning()) {
clip.start();
}
} catch (UnsupportedAudioFileException | IOException | LineUnavailableException e) {
e.printStackTrace();
}
}
}
|
#
# Color grep results
#
GREP_OPTIONS="--color=auto"
# avoid VCS folders (if the necessary grep flags are available)
grep-flag-available() {
echo | grep $1 "" >/dev/null 2>&1
}
if grep-flag-available --exclude-dir=.cvs; then
for PATTERN in .cvs .git .hg .svn; do
GREP_OPTIONS+=" --exclude-dir=$PATTERN"
done
elif grep-flag-available --exclude=.cvs; then
for PATTERN in .cvs .git .hg .svn; do
GREP_OPTIONS+=" --exclude=$PATTERN"
done
fi
unfunction grep-flag-available
alias grep="grep $GREP_OPTIONS"
export GREP_COLOR='1;32'
|
<reponame>p2b2/p2b2-webapp<filename>src/app/ranking/ranking.component.ts
import { Component, OnInit } from '@angular/core';
import {EthereumAnalysisService} from "../../services/ethereum-analysis.service";
@Component({
selector: 'app-ranking',
templateUrl: './ranking.component.html',
styleUrls: ['./ranking.component.css']
})
export class RankingComponent implements OnInit {
private topRevenueSent = [];
private topRevenueReceived = [];
private topGasRevenue = [];
constructor(private eas: EthereumAnalysisService) { }
ngOnInit() {
this.getTopRevenue();
}
private getTopRevenue(){
this.eas.getTopRevenueSent(3).subscribe((result) => {
this.topRevenueSent = this.prepareResult(result);
})
this.eas.getTopRevenueReceived(3).subscribe((result) => {
this.topRevenueReceived = this.prepareResult(result);
})
this.eas.getTopGasRevenue(3).subscribe((result) => {
this.topGasRevenue = this.prepareResult(result);
})
}
private prepareResult(result){
let res = result.json();
for (var i = 0; i < res.length; ++i) {
res[i].rank = i+1
res[i].value = parseFloat(res[i].value).toFixed(7)
}
return res;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.merge = void 0;
var merge = {
"viewBox": "0 0 20 20",
"children": [{
"name": "path",
"attribs": {
"d": "M17.8896484,17.7070312L16.8916016,20C13.7548828,18.6341553,11.3964844,16.8476562,10,14.7250977\r\n\tC8.6035156,16.8476562,6.2451172,18.6341553,3.1083984,20l-0.9980469-2.2929688\r\n\tC5.1396484,16.3886719,8.5498047,14.1015625,8.5498047,10V7H5.5L10,0l4.5,7h-3.0498047v3\r\n\tC11.4501953,14.1015625,14.8603516,16.3886719,17.8896484,17.7070312z"
}
}]
};
exports.merge = merge;
|
#!/bin/bash
#
# Copyright (c) 2020 The Orbit Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Fail on any error.
set -euo pipefail
readonly REPO_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../../../" >/dev/null 2>&1 && pwd )"
readonly SCRIPT="/mnt/kokoro/checks/clang_format/check.sh"
if [ "$0" == "$SCRIPT" ]; then
# We are inside the docker container
echo -e "\n\nThe following files don't match our code-formatting standard:"
echo -e "> This list includes all files in the repo that need formatting,"
echo -e "> but changes outside of the scope of your PR won't affect the outcome"
echo -e "> of this presubmit check.\n"
while read line; do
if clang-format --output-replacements-xml $line | grep '<replacement ' > /dev/null; then
echo $line
fi
done <<< $(find /mnt -name '*.cpp' -o -name '*.h' \
| grep -v "third_party/" \
| grep -v "/build" )
echo -e "--\n"
cd /mnt
# Use origin/master as reference branch, if not specified by kokoro
REFERENCE="origin/${KOKORO_GITHUB_PULL_REQUEST_TARGET_BRANCH:-master}"
MERGE_BASE="$(git merge-base $REFERENCE HEAD)" # Merge base is the commit on master this PR was branched from.
FORMATTING_DIFF="$(git diff -U0 --no-color --relative --diff-filter=r $MERGE_BASE | clang-format-diff-9 -p1)"
if [ -n "$FORMATTING_DIFF" ]; then
echo "clang-format determined the following necessary changes to your PR:"
echo "$FORMATTING_DIFF"
echo -e "\n\nHere is the list of files that need formatting:"
echo "$(echo "$FORMATTING_DIFF" | egrep '^\+\+\+' | cut -d' ' -f2 | cut -f1 )"
echo -e "--\n\nNote: We recommend you to use git clang-format to format your changes!"
exit 1
else
echo "All your changes fulfill our code formatting requirements!"
exit 0
fi
else
gcloud auth configure-docker --quiet
docker run --rm --network host -v ${REPO_ROOT}:/mnt -e KOKORO_GITHUB_PULL_REQUEST_TARGET_BRANCH \
gcr.io/orbitprofiler/clang_format:latest $SCRIPT
fi
|
import React from 'react';
import { NavLink } from "react-router-dom";
export class NavBar extends React.Component {
render() {
return (
<nav>
<div className="title">Quizzer</div>
<NavLink exact to="/">
Home
</NavLink>
<NavLink to="/help">Help</NavLink>
</nav>
);
}
}
|
#!/usr/bin/env bash
export RENDERER_HOME=$PWD
mkdir -p ${RENDERER_HOME}/tmp
pushd $(mktemp -p ${RENDERER_HOME}/tmp -d)
export TEMP_DIR=$PWD
mkdir -p ./input
mkdir -p ./output/html
pushd ./input
git clone https://github.com/DSchau/blog.git ./main
touch layout.json
popd
docker run --rm \
-ti \
--user="$(id -u $USER):$(id -g $USER)" \
--mount source=${TEMP_DIR}/input,target=/input,type=bind \
--mount source=${TEMP_DIR}/output,target=/output,type=bind \
renderer-gatsby
echo "Point your browser to ${TEMP_DIR}/output/html/index.html"
popd
|
<gh_stars>0
/*
_____ __________.___ _____ ______________________ _____ _____
/ _ \\______ \ | / _ \ \__ ___/\_ _____/ / _ \ / \
/ /_\ \| _/ |/ /_\ \ | | | __)_ / /_\ \ / \ / \
/ | \ | \ / | \ | | | \/ | \/ Y \
\____|__ /____|_ /___\____|__ / |____| /_______ /\____|__ /\____|__ /
\/ \/ \/ \/ \/ \/
________ .___ __________________ ________ __________________ ________ _______________ ____
\______ \ | |/ _____/\_ ___ \ \_____ \\______ \______ \ \______ \ \_ _____/\ \ / /
| | \| |\_____ \ / \ \/ / | \| _/| | \ | | \ | __)_ \ Y /
| ` \ |/ \\ \____/ | \ | \| ` \ | ` \| \ \ /
/_______ /___/_______ / \______ /\_______ /____|_ /_______ / /_______ /_______ / \___/
\/ \/ \/ \/ \/ \/ \/ \/
-> Founder: JblusDev & SmoilDev
-> Discord: Soon
-> Website: Soon
-> Team: Aria
-> Goal: CyberSecurity and development of site, software, application and other
*/
const Discord = require('discord.js');
const client = new Discord.Client();
var prefix = ".";
client.login('token');
client.on("ready", () => {
console.log("[AriaTeam] > Le bot est bien activer !")
client.user.setGame("Join & Quit > DiscordBot")
});
/* Join */
client.on('guildMemberAdd', member =>{
let embed = new Discord.RichEmbed()
.setDescription(':tada: **' + member.user.username + '** a rejoint ' + member.guild.name)
.setFooter('Nous sommes désormais ' + member.guild.memberCount)
member.guild.channels.get('ChannelJoinId').send(embed)
});
/* Quit */
client.on('guildMemberRemove', member =>{
let embed = new Discord.RichEmbed()
.setDescription(':cry: **' + member.user.username + '** a quitté ' + member.guild.name)
.setFooter('Nous sommes désormais ' + member.guild.memberCount)
member.guild.channels.get('ChannelQuitId').send(embed)
});
|
package subscriber
import (
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/sqs"
"github.com/aws/aws-sdk-go/aws/credentials"
"encoding/json"
"github.com/revel/revel"
"github.com/jrallison/go-workers"
)
func SqsSubscribe(){
sess := session.New(&aws.Config{
Region: aws.String(revel.Config.StringDefault("aws_region", "aws_region")),
Credentials: credentials.NewStaticCredentials(revel.Config.StringDefault("aws_access_key", "aws_access_key"),revel.Config.StringDefault("aws_secret_key", "aws_secret_key"),""),
MaxRetries: aws.Int(5),
})
q := sqs.New(sess)
receive_params := &sqs.ReceiveMessageInput{
QueueUrl: aws.String(revel.Config.StringDefault("aws_queue_url", "aws_queue_url")),
MaxNumberOfMessages: aws.Int64(10),
VisibilityTimeout: aws.Int64(30),
WaitTimeSeconds: aws.Int64(20),
}
revel.INFO.Println("Listening For Messages")
for ; ; {
messages, err := q.ReceiveMessage(receive_params)
if err!=nil{
revel.ERROR.Println(err.Error())
continue
}
if len(messages.Messages) > 0{
//Read Messages
for _,msg:=range messages.Messages{
ma := make(map[string]interface{})
json.Unmarshal([]byte(*(msg.Body)), &ma)
if ma["MessageAttributes"] == nil{
continue
}
/*msg_attr := ma["MessageAttributes"].(map[string]interface{})
resp := map[string]Data{}
for k,v:=range msg_attr{
tmp := Data{}
converted := v.(map[string]interface{})
tmp.Value = utils.ToStr(converted["Value"])
resp[k] = tmp
}*/
params := make(map[string]interface{})
params["action"] = "save_in_db"
params["data"] = ma["MessageAttributes"]
workers.Enqueue("packet_subscriber_queue", "save_in_db", params)
revel.INFO.Println("Job Enqueued")
// Delete Message
delete_params := &sqs.DeleteMessageInput{
QueueUrl: aws.String(revel.Config.StringDefault("aws_queue_url", "aws_queue_url")),
ReceiptHandle: msg.ReceiptHandle,
}
_, err := q.DeleteMessage(delete_params)
if err != nil {
revel.ERROR.Println(err.Error())
}
revel.INFO.Println("Message ID:",*msg.MessageId,"has been deleted")
}
}
}
}
|
<gh_stars>1-10
package fwcd.fructose.time;
import java.time.Duration;
import java.time.LocalTime;
import fwcd.fructose.util.CompareUtils;
/**
* A fixed, half-open interval between two {@link LocalTime}s.
*/
public class LocalTimeInterval {
private final LocalTime startInclusive;
private final LocalTime endExclusive;
public LocalTimeInterval(LocalTime startInclusive, LocalTime endExclusive) {
this.startInclusive = startInclusive;
this.endExclusive = endExclusive;
}
/** Returns the inclusive start */
public LocalTime getStart() { return startInclusive; }
/** Returns the exclusive end */
public LocalTime getEnd() { return endExclusive; }
public Duration getDuration() { return Duration.between(startInclusive, endExclusive); }
public boolean contains(LocalTime date) {
return (date.compareTo(startInclusive) >= 0) && (date.compareTo(endExclusive) < 0);
}
public LocalTimeInterval merge(LocalTimeInterval other) {
return new LocalTimeInterval(
CompareUtils.min(getStart(), getEnd(), other.getStart(), other.getEnd()),
CompareUtils.max(getStart(), getEnd(), other.getStart(), other.getEnd())
);
}
public boolean overlaps(LocalTimeInterval other) {
return getStart().isBefore(other.getEnd()) && other.getStart().isBefore(getEnd());
}
@Override
public String toString() {
return "[" + startInclusive + ", " + endExclusive + ")";
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (!getClass().equals(obj.getClass())) return false;
LocalTimeInterval other = (LocalTimeInterval) obj;
return startInclusive.equals(other.startInclusive)
&& endExclusive.equals(other.endExclusive);
}
@Override
public int hashCode() {
return 9 * startInclusive.hashCode() * endExclusive.hashCode();
}
}
|
class TodoAdapter(private val list: ArrayList<Todo>) : RecyclerView.Adapter<TodoAdapter.TodoViewHolder>() {
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): TodoViewHolder {
val itemView = LayoutInflater.from(parent.context).inflate(R.layout.todo_item, parent, false)
return TodoViewHolder(itemView)
}
override fun onBindViewHolder(holder: TodoViewHolder, position: Int) {
holder.bind(list[position])
}
override fun getItemCount() = list.size
inner class TodoViewHolder(itemView: View): RecyclerView.ViewHolder(itemView) {
private val todoName = itemView.findViewById<TextView>(R.id.todoName)
private val todoCheck = itemView.findViewById<CheckBox>(R.id.todoCheck)
fun bind(todo: Todo) {
todoName.text = todo.name
todoCheck.isChecked = todo.isCompleted
todoCheck.setOnCheckedChangeListener { _, isChecked ->
todo.isCompleted = isChecked
}
}
}
}
|
<filename>opentaps/purchasing/src/org/opentaps/purchasing/mrp/OpentapsProposedOrder.java
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
/* This file is partially based on an OFBIZ file and has been modified by Open Source Strategies, Inc. */
package org.opentaps.purchasing.mrp;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Map;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.manufacturing.bom.BOMTree;
import org.ofbiz.manufacturing.mrp.MrpServices;
import org.ofbiz.manufacturing.mrp.ProposedOrder;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.GenericServiceException;
import org.ofbiz.service.LocalDispatcher;
/**
* Proposed Order Object generated by the MRP process or other re-Order process.
* Note: this class only extend <code>ProposedOrder</code> to add support for the PENDING_INTERNAL_REQ requirements.
*/
public class OpentapsProposedOrder extends ProposedOrder {
private static final String MODULE = OpentapsProposedOrder.class.getName();
private boolean createPendingManufacturingRequirements;
/**
* Creates a new <code>OpentapsProposedOrder</code> instance.
*
* @param product a <code>GenericValue</code> value
* @param facilityId a <code>String</code> value
* @param manufacturingFacilityId a <code>String</code> value
* @param isBuilt a <code>boolean</code> value
* @param requiredByDate a <code>Timestamp</code> value
* @param quantity a <code>BigDecimal</code> value
* @param createPendingManufacturingRequirements a <code>boolean</code> value
*/
public OpentapsProposedOrder(GenericValue product, String facilityId, String manufacturingFacilityId, boolean isBuilt, Timestamp requiredByDate, BigDecimal quantity, boolean createPendingManufacturingRequirements) {
super(product, facilityId, manufacturingFacilityId, isBuilt, requiredByDate, quantity);
this.createPendingManufacturingRequirements = createPendingManufacturingRequirements;
}
/**
* Creates a ProposedOrder in the Requirement Entity calling the createRequirement service.
* This method overrides the default ofbiz implementation to support creating Pending Manufacturing Requirements.
*
* @param ctx The DispatchContext used to call service to create the Requirement Entity record.
* @param userLogin the user login <code>GenericValue</code> used to call the service
* @return the requirementId
*/
@SuppressWarnings("unchecked")
@Override
public String create(DispatchContext ctx, GenericValue userLogin) {
/// -- All of the code hear is from org.ofbiz.manufacturing.mrp.ProposedOrder, except the indicated block -- ///
if ("WIP".equals(product.getString("productTypeId"))) {
// No requirements for Work In Process products
return null;
}
LocalDispatcher dispatcher = ctx.getDispatcher();
Delegator delegator = ctx.getDelegator();
Map parameters = UtilMisc.toMap("userLogin", userLogin);
// if product is built get the requirement start date from the BOM
if (isBuilt) {
try {
BOMTree tree = new BOMTree(productId, "MANUF_COMPONENT", null, BOMTree.EXPLOSION_MANUFACTURING, delegator, dispatcher, userLogin);
tree.setRootQuantity(quantity);
tree.print(new ArrayList());
requirementStartDate = tree.getRoot().getStartDate(manufacturingFacilityId, requiredByDate, true);
} catch (Exception e) {
Debug.logError(e, "Error : computing the requirement start date. " + e.getMessage(), MODULE);
}
}
parameters.put("productId", productId);
parameters.put("statusId", "REQ_PROPOSED");
parameters.put("requiredByDate", requiredByDate);
parameters.put("requirementStartDate", requirementStartDate);
parameters.put("quantity", quantity);
// if product is built, create a manufacturing requirement, else a product requirement (order)
if (isBuilt) {
parameters.put("facilityId", manufacturingFacilityId);
/// -- Customization here: support for the flag createPendingManufacturingRequirements, if set we create PENDING_INTERNAL_REQ instead of INTERNAL_REQUIREMENT -- ///
if (createPendingManufacturingRequirements) {
parameters.put("requirementTypeId", "PENDING_INTERNAL_REQ");
} else {
parameters.put("requirementTypeId", "INTERNAL_REQUIREMENT");
}
} else {
parameters.put("facilityId", facilityId);
parameters.put("requirementTypeId", "PRODUCT_REQUIREMENT");
}
if (mrpName != null) {
parameters.put("description", "MRP_" + mrpName);
} else {
parameters.put("description", "Automatically generated by MRP");
}
try {
Map result = dispatcher.runSync("createRequirement", parameters);
return (String) result.get("requirementId");
} catch (GenericServiceException e) {
Debug.logError(e, "Error : createRequirement with parameters = " + parameters + "--" + e.getMessage(), MODULE);
MrpServices.logMrpError(/* mrpId */ null, productId, "Error creating requirement", delegator);
return null;
}
}
}
|
<html>
<head>
<title>Calculator</title>
<style>
.calculator {
width: 250px;
margin: 0 auto;
}
.calculator input {
font-size: 1.3rem;
text-align: right;
}
.calculator button {
font-size: 1.3rem;
}
</style>
</head>
<body>
<div class="calculator">
<input type="text" name="n1" value="0" />
<input type="text" name="n2" value="0" />
<br />
<button onclick="add()">+</button>
<button onclick="subtract()">-</button>
<button onclick="multiply()">*</button>
<button onclick="divide()">/</button>
</div>
<script>
function add(){
let n1 = document.querySelector('input[name="n1"]').value;
let n2 = document.querySelector('input[name="n2"]').value;
let result = parseInt(n1) + parseInt(n2);
alert("Result is: "+result);
}
function subtract(){
let n1 = document.querySelector('input[name="n1"]').value;
let n2 = document.querySelector('input[name="n2"]').value;
let result = parseInt(n1) - parseInt(n2);
alert("Result is: "+result);
}
function multiply(){
let n1 = document.querySelector('input[name="n1"]').value;
let n2 = document.querySelector('input[name="n2"]').value;
let result = parseInt(n1) * parseInt(n2);
alert("Result is: "+result);
}
function divide(){
let n1 = document.querySelector('input[name="n1"]').value;
let n2 = document.querySelector('input[name="n2"]').value;
let result = parseInt(n1) / parseInt(n2);
alert("Result is: "+result);
}
</script>
</body>
</html>
|
#!/usr/bin/env bash
set -eux
apt-get purge -qq '^mysql*' '^libmysql*'
rm -fr /etc/mysql
rm -fr /var/lib/mysql
apt-get update -qq
apt-get install -qq mysql-server-5.5 mysql-client-core-5.5 mysql-client-5.5 libmysqlclient-dev
|
from typing import Any, Union
import sys
if sys.version_info < (3, 8):
from typing_extensions import Protocol
else:
from typing import Protocol
class WithAsyncWrite(Protocol):
async def write(self, __b: str) -> Any: ...
class WithAsyncRead(Protocol):
async def read(self, __size: int) -> Union[str, bytes]: ...
|
<filename>common/test/tb_utils.cpp
// tb_utils.cpp
// Common utilities for layer testbenches
#include "global_defines.h"
#include <math.h>
// Initialize input data to random values between 0 and 1
void gen_random_inputs(data_t *ifmaps, int len) {
for (int i = 0; i < len; ++i) {
ifmaps[i] = (data_t)((double)rand()/(double(RAND_MAX)));
}
}
// Filters: random values between -1 and 1
void gen_random_filters(data_t *filters, int len) {
for (int i = 0; i < len; ++i) {
filters[i] = (data_t)((double)rand()/(double(RAND_MAX/2))) - 1.0;
}
}
// Initializes random values for the "adjustments".
// The output is a 2D array whose first dimension is len and second dimension
// is always 4.
void gen_random_adjustments(data_t adjustments[][4], int len) {
for (int i = 0; i < len; i++) {
// Element 0 is the mean. Pick a random value between -1 and 1
data_t mean = (data_t)((double)rand()/(double(RAND_MAX/2))) - 1.0;
// Element 1 is inverse square-root of variance. Pick a random value
// between 0.2 and 1 (since variance is always positive)
data_t inv_sqrt_var = (data_t)((double)rand()/(double(RAND_MAX/0.8))) + 0.2;
// Element 2 is bias. Pick a random value betwen -2 and 2
data_t bias = (data_t)((double)rand()/(double(RAND_MAX/4))) - 2.0;
// Store to the output. Element 3 is unused.
adjustments[i][0] = mean;
adjustments[i][1] = inv_sqrt_var;
adjustments[i][2] = bias;
adjustments[i][3] = 0;
}
}
// Compare expected data against actual data
// Due to floating point rounding error the values might not be exactly
// the same. Since the exponents of the numbers across accumulations can
// vary so drastically, the bounds for worst case error must be set somewhat high.
// However, as observed error increases, the number of points that have at least that
// amount of error decreases exponentially due to the random nature of the inputs.
//
// So I will do the data validation as follows:
// - Make sure zero points exceed a higher error margin
// - Make sure no more than a very small fraction of points exceed a smaller error margin.
//
// I will also compare both absolute error and percentage error to accommodate for small and large
// numbers. The difference must exceed both to be considered an error.
//
// The current bounds I am using can be seen in the constant definitions below.
int compare_expected_vs_actual(data_t *expected_data, data_t *actual_data, int num_els, int ichans) {
const float HIGH_ERROR_ABS = fmax(0.2, 0.04 * ichans / 16.0);
const float HIGH_ERROR_PCT = fmax(0.2, 0.04 * ichans / 16.0);
// The acceptable error is higher with more and more accumulations necessary to compute one output element.
// The number of accumulations scales with the number of input channels.
const float LOW_ERROR_ABS = fmax(0.01, 0.01 * ichans / 16.0);
const float LOW_ERROR_PCT = fmax(0.01, 0.01 * ichans / 16.0);
int MAX_LOW_ERROR_COUNT = (num_els / 100) + 1; // Only 1% of points can exceed the low error threshold.
int low_error_count = 0;
int exactly_equal_count = 0;
for (int k = 0; k < num_els; k++) {
float abs_error = abs(actual_data[k] - expected_data[k]);
float pct_error = abs_error / abs(expected_data[k]);
if (abs_error > HIGH_ERROR_ABS && pct_error > HIGH_ERROR_PCT) {
printf("HIGH ERROR MISMATCH between expected and actual values.\n");
printf("k=%d\n", k);
printf("Expected = %f\n", (float)(expected_data[k]));
printf("Actual = %f\n", (float)(actual_data[k]));
return -1;
}
if (abs_error > LOW_ERROR_ABS && pct_error > LOW_ERROR_PCT) {
++low_error_count;
}
if (actual_data[k] == expected_data[k]) ++exactly_equal_count;
}
if (low_error_count > MAX_LOW_ERROR_COUNT) {
printf("Too many points (%d out of %d) exceeded low error threshold.\n", low_error_count, num_els);
return -1;
} else {
printf("%d out of %d points exceeded low error threshold.\n", low_error_count, num_els);
}
int MAX_EXACT_EQUAL_COUNT = num_els * 5 / 6;
if (exactly_equal_count > MAX_EXACT_EQUAL_COUNT) {
printf("Too many points (%d out of %d) equaled their exact expected values.\n", exactly_equal_count, num_els);
return -1;
} else {
printf("%d out of %d points equaled their exact expected values.\n", exactly_equal_count, num_els);
}
return 0;
}
|
<filename>learn/src/main/java/org/ruogu/learn/lang/exception/ExceptionExample.java
package org.ruogu.learn.lang.exception;
/**
* ExceptionExample
*
* @author xueyintao 2016年2月5日 下午7:53:43
*/
public class ExceptionExample {
/**
* @param args
*/
public static void main(String[] args) {
Throwable throwAble = null;
}
}
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Adapted from https://github.com/apache/arrow-rs/tree/master/dev/release/update_change_log.sh
# invokes the changelog generator from
# https://github.com/github-changelog-generator/github-changelog-generator
#
# With the config located in
# arrow-datafusion/.github_changelog_generator
#
# Usage:
# CHANGELOG_GITHUB_TOKEN=<TOKEN> ./update_change_log.sh <PROJECT> <SINCE_TAG> <EXTRA_ARGS...>
set -e
SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SOURCE_TOP_DIR="$(cd "${SOURCE_DIR}/../../" && pwd)"
echo $1
if [[ "$#" -lt 2 ]]; then
echo "USAGE: $0 PROJECT SINCE_TAG EXTRA_ARGS..."
exit 1
fi
PROJECT=$1
SINCE_TAG=$2
shift 2
OUTPUT_PATH="${PROJECT}/CHANGELOG.md"
pushd ${SOURCE_TOP_DIR}
# reset content in changelog
git checkout "${SINCE_TAG}" "${OUTPUT_PATH}"
# remove license header so github-changelog-generator has a clean base to append
sed -i.bak '1,18d' "${OUTPUT_PATH}"
docker run -it --rm \
--cpus "0.1" \
-e CHANGELOG_GITHUB_TOKEN=$CHANGELOG_GITHUB_TOKEN \
-v "$(pwd)":/usr/local/src/your-app \
githubchangeloggenerator/github-changelog-generator \
--user apache \
--project arrow-datafusion \
--since-tag "${SINCE_TAG}" \
--base "${OUTPUT_PATH}" \
--output "${OUTPUT_PATH}" \
"$@"
sed -i.bak "s/\\\n/\n\n/" "${OUTPUT_PATH}"
echo '<!---
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
' | cat - "${OUTPUT_PATH}" > "${OUTPUT_PATH}".tmp
mv "${OUTPUT_PATH}".tmp "${OUTPUT_PATH}"
|
// SPDX-License-Identifier: Apache-2.0
// YAPION
// Copyright (C) 2019,2020 yoyosource
package yapion.packet;
import yapion.annotations.deserialize.YAPIONLoadExclude;
import yapion.annotations.serialize.YAPIONSaveExclude;
import java.util.function.Consumer;
@YAPIONLoadExclude(context = "*")
@YAPIONSaveExclude(context = "*")
@FunctionalInterface
public interface YAPIONPacketHandler {
/**
* Handle a specific {@link YAPIONPacket}.
*
* @param yapionPacket the packet to handle.
*/
void handlePacket(YAPIONPacket yapionPacket);
/**
* Specifies if this handler should be run in a Thread.
*
* @return {@code true} if it should run in a thread, {@code false} otherwise
*/
default boolean runThread() {
return false;
}
/**
* Specifies if the Thread should be a Daemon or not.
* This only takes effect when {@link #runThread()} returns {@code true}.
* If not implemented this method returns {@code true} by default.
*
* @return {@code true} if it should run as a daemon, {@code false} otherwise.
*/
default boolean daemonThread() {
return true;
}
/**
* Specifies if exceptions should be ignores and not processes by the
* '@exception' handler.
*
* @return {@code true} if exceptions should be ignored, {@code false} otherwise
*/
default boolean ignoreException() {
return false;
}
/**
* Create a {@link YAPIONPacketHandler} by the different parameters
*
* @param yapionPacketConsumer the {@link Consumer} to handle the {@link YAPIONPacket}
* @param runThread {@code true} if it should run in a thread, {@code false} otherwise
* @param daemonThread {@code true} if it should run as a daemon, {@code false} otherwise.
* @param ignoreException {@code true} if exceptions should be ignored, {@code false} otherwise
* @return {@link YAPIONPacketHandler} instance
*/
static YAPIONPacketHandler createInstance(Consumer<YAPIONPacket> yapionPacketConsumer, boolean runThread, boolean daemonThread, boolean ignoreException) {
return new YAPIONPacketHandler() {
@Override
public void handlePacket(YAPIONPacket yapionPacket) {
yapionPacketConsumer.accept(yapionPacket);
}
@Override
public boolean runThread() {
return runThread;
}
@Override
public boolean daemonThread() {
return daemonThread;
}
@Override
public boolean ignoreException() {
return ignoreException;
}
};
}
/**
* Create a {@link YAPIONPacketHandler} by the different parameters
*
* @param yapionPacketConsumer the {@link Consumer} to handle the {@link YAPIONPacket}
* @param runThread {@code true} if it should run in a thread, {@code false} otherwise
* @param ignoreException {@code true} if exceptions should be ignored, {@code false} otherwise
* @return {@link YAPIONPacketHandler} instance
*/
static YAPIONPacketHandler createInstance(Consumer<YAPIONPacket> yapionPacketConsumer, boolean runThread, boolean ignoreException) {
return new YAPIONPacketHandler() {
@Override
public void handlePacket(YAPIONPacket yapionPacket) {
yapionPacketConsumer.accept(yapionPacket);
}
@Override
public boolean runThread() {
return runThread;
}
@Override
public boolean ignoreException() {
return ignoreException;
}
};
}
}
|
# Reload function
function rfunc() {
if [ $# -ne 1 ]; then
echo "usage: $0 <function name>" 1>&2
return 1
fi
unfunction $1 >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "$1 is not defined" 1>&2
return 1
fi
autoload +X $1
return 0
}
|
<filename>devilry/devilry_student/tests/test_dashboard/test_allperiods.py
from datetime import timedelta
from django import test
from django.conf import settings
from cradmin_legacy import cradmin_testhelpers
from cradmin_legacy.crinstance import reverse_cradmin_url
from cradmin_legacy import crapp
from model_bakery import baker
from devilry.apps.core.baker_recipes import OLD_PERIOD_START, ACTIVE_PERIOD_START, ACTIVE_PERIOD_END
from devilry.devilry_qualifiesforexam.models import Status
from devilry.devilry_student.views.dashboard import allperiods
class TestAllPeriodsView(test.TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = allperiods.AllPeriodsView
def test_title(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.Candidate', relatedstudent__user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertIn(
'Your courses',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.Candidate', relatedstudent__user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
'Your courses',
mockresponse.selector.one('h1').alltext_normalized)
def __get_period_count(self, selector):
return selector.count('.cradmin-legacy-listbuilder-itemvalue')
def test_not_periods_where_not_student(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make_recipe('devilry.apps.core.assignment_activeperiod_start')
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
0,
self.__get_period_count(selector=mockresponse.selector))
def test_not_future_periods(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent',
user=requestuser,
period=baker.make_recipe('devilry.apps.core.period_future'))
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
0,
self.__get_period_count(selector=mockresponse.selector))
def test_include_old_periods(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent',
user=requestuser,
period=baker.make_recipe('devilry.apps.core.period_old'))
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
1,
self.__get_period_count(selector=mockresponse.selector))
def test_include_active_periods(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent', user=requestuser,
period=baker.make_recipe('devilry.apps.core.period_active'))
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
1,
self.__get_period_count(selector=mockresponse.selector))
def test_no_items_message(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
'You are not registered on any courses in Devilry.',
mockresponse.selector.one('.cradmin-legacy-listing-no-items-message').alltext_normalized)
def __get_titles(self, selector):
return [element.alltext_normalized
for element in selector.list('.cradmin-legacy-listbuilder-itemvalue-titledescription-title')]
def test_orderby_sanity(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod1 = baker.make_recipe('devilry.apps.core.period_old',
parentnode__long_name='Test Subject',
long_name='Old Period 1')
testperiod2 = baker.make_recipe('devilry.apps.core.period_old',
parentnode__long_name='Test Subject',
start_time=OLD_PERIOD_START + timedelta(days=2),
long_name='Old Period 2')
testperiod3 = baker.make_recipe('devilry.apps.core.period_active',
parentnode__long_name='Test Subject',
start_time=ACTIVE_PERIOD_START + timedelta(days=2),
long_name='Active Period 2')
testperiod4 = baker.make_recipe('devilry.apps.core.period_active',
parentnode__long_name='Test Subject',
long_name='Active Period 1')
baker.make('core.RelatedStudent', period=testperiod1, user=requestuser)
baker.make('core.RelatedStudent', period=testperiod2, user=requestuser)
baker.make('core.RelatedStudent', period=testperiod3, user=requestuser)
baker.make('core.RelatedStudent', period=testperiod4, user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=requestuser)
self.assertEqual(
[
'Test Subject - Active Period 2',
'Test Subject - Active Period 1',
'Test Subject - Old Period 2',
'Test Subject - Old Period 1',
],
self.__get_titles(mockresponse.selector))
def test_orderby_multiple_with_same_start_time(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod1 = baker.make_recipe('devilry.apps.core.period_active',
parentnode__long_name='Test Subject 1',
long_name='Active Period')
testperiod2 = baker.make_recipe('devilry.apps.core.period_active',
parentnode__long_name='Test Subject 2',
long_name='Active Period')
testperiod3 = baker.make_recipe('devilry.apps.core.period_active',
parentnode__long_name='Test Subject 3',
long_name='Active Period')
baker.make('core.RelatedStudent', period=testperiod1, user=requestuser)
baker.make('core.RelatedStudent', period=testperiod2, user=requestuser)
baker.make('core.RelatedStudent', period=testperiod3, user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=requestuser)
self.assertEqual(
[
'Test Subject 1 - Active Period',
'Test Subject 2 - Active Period',
'Test Subject 3 - Active Period',
],
self.__get_titles(mockresponse.selector))
def test_listitem_title_sanity(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod = baker.make_recipe('devilry.apps.core.period_active',
parentnode__long_name='Test Subject',
long_name='Test Period')
baker.make('core.RelatedStudent', period=testperiod, user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
'Test Subject - Test Period',
mockresponse.selector.one(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-title').alltext_normalized
)
def test_listitem_url(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod = baker.make_recipe('devilry.apps.core.period_active')
baker.make('core.RelatedStudent', period=testperiod, user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
reverse_cradmin_url(
instanceid='devilry_student_period',
appname='overview',
roleid=testperiod.id,
viewname=crapp.INDEXVIEW_NAME,
),
mockresponse.selector.one('a.devilry-student-listbuilder-period-itemframe')['href'])
def test_assignmentcount_sanity(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod = baker.make_recipe('devilry.apps.core.period_active')
relatedstudent = baker.make('core.RelatedStudent', user=requestuser, period=testperiod)
testassignment1 = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
assignment_group__parentnode=testassignment1,
relatedstudent=relatedstudent)
testassignment2 = baker.make('core.Assignment', parentnode=testperiod)
baker.make('core.Candidate',
assignment_group__parentnode=testassignment2,
relatedstudent=relatedstudent)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertEqual(
'2 assignments',
mockresponse.selector.one(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-description').alltext_normalized)
def test_assignmentcount_multiple_periods(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod1 = baker.make_recipe('devilry.apps.core.period_active')
relatedstudent1 = baker.make('core.RelatedStudent', user=requestuser, period=testperiod1)
testassignment1 = baker.make('core.Assignment', parentnode=testperiod1)
baker.make('core.Candidate',
assignment_group__parentnode=testassignment1,
relatedstudent=relatedstudent1)
testperiod2 = baker.make_recipe('devilry.apps.core.period_active')
baker.make('core.RelatedStudent', user=requestuser, period=testperiod2)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
assignmentcounts = {
element.alltext_normalized
for element in mockresponse.selector.list(
'.cradmin-legacy-listbuilder-itemvalue-titledescription-description')}
self.assertEqual(
{'1 assignment', '0 assignments'},
assignmentcounts)
def test_qualified_for_final_exam_sanity_no_status(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod = baker.make_recipe('devilry.apps.core.period_active')
baker.make('core.RelatedStudent', period=testperiod, user=requestuser)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertFalse(mockresponse.selector.exists('.devilry-cradmin-perioditemvalue-student-qualifedforexam'))
def test_qualified_for_final_exam_sanity_qualified(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod = baker.make_recipe('devilry.apps.core.period_active')
relatedstudent = baker.make('core.RelatedStudent', period=testperiod, user=requestuser)
status = baker.make('devilry_qualifiesforexam.Status', period=testperiod,
status=Status.READY)
baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
relatedstudent=relatedstudent,
status=status,
qualifies=True)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertTrue(mockresponse.selector.exists(
'.devilry-cradmin-perioditemvalue-student-qualifedforexam'
'.devilry-cradmin-perioditemvalue-student-qualifedforexam-yes'))
def test_qualified_for_final_exam_sanity_not_qualified(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
testperiod = baker.make_recipe('devilry.apps.core.period_active')
relatedstudent = baker.make('core.RelatedStudent', period=testperiod, user=requestuser)
status = baker.make('devilry_qualifiesforexam.Status', period=testperiod,
status=Status.READY)
baker.make('devilry_qualifiesforexam.QualifiesForFinalExam',
relatedstudent=relatedstudent,
status=status,
qualifies=False)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertTrue(mockresponse.selector.exists(
'.devilry-cradmin-perioditemvalue-student-qualifedforexam'
'.devilry-cradmin-perioditemvalue-student-qualifedforexam-no'))
def test_no_pagination(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent',
period__start_time=ACTIVE_PERIOD_START,
period__end_time=ACTIVE_PERIOD_END,
user=requestuser,
_quantity=allperiods.AllPeriodsView.paginate_by)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertFalse(mockresponse.selector.exists('.cradmin-legacy-loadmorepager'))
def test_pagination(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent',
period__start_time=ACTIVE_PERIOD_START,
period__end_time=ACTIVE_PERIOD_END,
user=requestuser,
_quantity=allperiods.AllPeriodsView.paginate_by + 1)
mockresponse = self.mock_http200_getrequest_htmls(requestuser=requestuser)
self.assertTrue(mockresponse.selector.exists('.cradmin-legacy-loadmorepager'))
def test_querycount(self):
requestuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent',
period__start_time=ACTIVE_PERIOD_START,
period__end_time=ACTIVE_PERIOD_END,
user=requestuser,
_quantity=10)
with self.assertNumQueries(2):
self.mock_http200_getrequest_htmls(requestuser=requestuser)
|
int containsDuplicates(int arr[], int n, int k)
{
unordered_set<int> hash;
for (int i = 0; i < n; i++) {
if (hash.find(arr[i]) != hash.end())
return 1;
hash.insert(arr[i]);
if (i >= k)
hash.erase(arr[i - k]);
}
return 0;
}
|
#!/bin/bash
mkdir -p .build/src
mkdir -p .build/tarballs
if [[ $(uname) == Darwin ]]; then
DOWNLOADER="curl -SL"
DOWNLOADER_INSECURE=${DOWNLOADER}" --insecure"
DOWNLOADER_OUT="-C - -o"
else
DOWNLOADER="wget -c"
DOWNLOADER_INSECURE=${DOWNLOADER}" --no-check-certificate"
DOWNLOADER_OUT="-O"
fi
mkdir -p ${SYS_PREFIX}/conda-bld/src_cache/
# Some kernels are not on kernel.org, such as the CentOS 5.11 one used (and heavily patched) by RedHat.
if [[ ! -e "${SYS_PREFIX}/conda-bld/src_cache/linux-${ctng_kernel}.tar.bz2" ]] && \
[[ ! -e "${SYS_PREFIX}/conda-bld/src_cache/linux-${ctng_kernel}.tar.xz" ]]; then
if [[ ${ctng_kernel} == 2.6.* ]]; then
${DOWNLOADER} ftp://ftp.be.debian.org/pub/linux/kernel/v2.6/linux-${ctng_kernel}.tar.bz2 ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/linux-${ctng_kernel}.tar.bz2
elif [[ ${ctng_kernel} == 3.* ]]; then
# Necessary because crosstool-ng looks in the wrong location for this one.
${DOWNLOADER} https://www.kernel.org/pub/linux/kernel/v3.x/linux-${ctng_kernel}.tar.bz2 ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/linux-${ctng_kernel}.tar.bz2
elif [[ ${ctng_kernel} == 4.* ]]; then
${DOWNLOADER} https://www.kernel.org/pub/linux/kernel/v4.x/linux-${ctng_kernel}.tar.xz ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/linux-${ctng_kernel}.tar.xz
fi
fi
# Necessary because uclibc let their certificate expire, this is a bit hacky.
if [[ ${ctng_libc} == uClibc ]]; then
if [[ ! -e "${SYS_PREFIX}/conda-bld/src_cache/uClibc-${ctng_uClibc}.tar.xz" ]]; then
${DOWNLOADER_INSECURE} https://www.uclibc.org/downloads/uClibc-${ctng_uClibc}.tar.xz ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/uClibc-${ctng_uClibc}.tar.xz
fi
else
if [[ ! -e "${SYS_PREFIX}/conda-bld/src_cache/glibc-${gnu}.tar.bz2" ]]; then
${DOWNLOADER_INSECURE} https://ftp.gnu.org/gnu/libc/glibc-${gnu}.tar.bz2 ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/glibc-${gnu}.tar.bz2
fi
fi
# Necessary because CentOS5.11 is having some certificate issues.
if [[ ! -e "${SYS_PREFIX}/conda-bld/src_cache/duma_${ctng_duma//./_}.tar.gz" ]]; then
${DOWNLOADER_INSECURE} http://mirror.opencompute.org/onie/crosstool-NG/duma_${ctng_duma//./_}.tar.gz ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/duma_${ctng_duma//./_}.tar.gz
fi
if [[ ! -e "${SYS_PREFIX}/conda-bld/src_cache/expat-2.2.0.tar.bz2" ]]; then
${DOWNLOADER_INSECURE} http://mirror.opencompute.org/onie/crosstool-NG/expat-2.2.0.tar.bz2 ${DOWNLOADER_OUT} ${SYS_PREFIX}/conda-bld/src_cache/expat-2.2.0.tar.bz2
fi
BUILD_NCPUS=4
if [ "$(uname)" == "Linux" ]; then
BUILD_NCPUS=$(grep -c ^processor /proc/cpuinfo)
elif [ "$(uname)" == "Darwin" ]; then
BUILD_NCPUS=$(sysctl -n hw.ncpu)
elif [ "$OSTYPE" == "msys" ]; then
BUILD_NCPUS=${NUMBER_OF_PROCESSORS}
fi
[[ -d ${SRC_DIR}/gcc_built ]] || mkdir -p ${SRC_DIR}/gcc_built
# If the gfortran binary doesn't exist yet, then run ct-ng
if [[ ! -n $(find ${SRC_DIR}/gcc_built -iname ${ctng_cpu_arch}-${ctng_vendor}-*-gfortran) ]]; then
source ${RECIPE_DIR}/write_ctng_config
yes "" | ct-ng ${ctng_sample}
write_ctng_config_before .config
# Apply some adjustments for conda.
sed -i.bak "s|# CT_DISABLE_MULTILIB_LIB_OSDIRNAMES is not set|CT_DISABLE_MULTILIB_LIB_OSDIRNAMES=y|g" .config
sed -i.bak "s|CT_CC_GCC_USE_LTO=n|CT_CC_GCC_USE_LTO=y|g" .config
cat .config | grep CT_DISABLE_MULTILIB_LIB_OSDIRNAMES=y || exit 1
cat .config | grep CT_CC_GCC_USE_LTO=y || exit 1
# Not sure why this is getting set to y since it depends on ! STATIC_TOOLCHAIN
if [[ ${ctng_nature} == static ]]; then
sed -i.bak "s|CT_CC_GCC_ENABLE_PLUGINS=y|CT_CC_GCC_ENABLE_PLUGINS=n|g" .config
fi
if [[ $(uname) == Darwin ]]; then
sed -i.bak "s|CT_WANTS_STATIC_LINK=y|CT_WANTS_STATIC_LINK=n|g" .config
sed -i.bak "s|CT_CC_GCC_STATIC_LIBSTDCXX=y|CT_CC_GCC_STATIC_LIBSTDCXX=n|g" .config
sed -i.bak "s|CT_STATIC_TOOLCHAIN=y|CT_STATIC_TOOLCHAIN=n|g" .config
sed -i.bak "s|CT_BUILD=\"x86_64-pc-linux-gnu\"|CT_BUILD=\"x86_64-apple-darwin11\"|g" .config
fi
# Now ensure any changes we made above pull in other requirements by running oldconfig.
yes "" | ct-ng oldconfig
# Now filter out 'things that cause problems'. For example, depending on the base sample, you can end up with
# two different glibc versions in-play.
sed -i.bak '/CT_LIBC/d' .config
sed -i.bak '/CT_LIBC_GLIBC/d' .config
# And undo any damage to version numbers => the seds above could be moved into this too probably.
write_ctng_config_after .config
if cat .config | grep "CT_GDB_NATIVE=y"; then
if ! cat .config | grep "CT_EXPAT_TARGET=y"; then
echo "ERROR: CT_GDB_NATIVE=y but CT_EXPAT_TARGET!=y"
cat .config
echo "ERROR: CT_GDB_NATIVE=y but CT_EXPAT_TARGET!=y"
exit 1
fi
fi
unset CFLAGS CXXFLAGS LDFLAGS
ct-ng build
fi
# increase stack size to prevent test failures
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=31827
if [[ $(uname) == Linux ]]; then
ulimit -s 32768
fi
CHOST=$(${SRC_DIR}/.build/*-*-*-*/build/build-cc-gcc-final/gcc/xgcc -dumpmachine)
# pushd .build/${CHOST}/build/build-cc-gcc-final
# make -k check || true
# popd
# .build/src/gcc-${PKG_VERSION}/contrib/test_summary
chmod -R u+w ${SRC_DIR}/gcc_built
# Next problem: macOS targetting uClibc ends up with broken symlinks in sysroot/usr/lib:
if [[ $(uname) == Darwin ]]; then
pushd ${SRC_DIR}/gcc_built/${CHOST}/sysroot/usr/lib
links=$(find . -type l | cut -c 3-)
for link in ${links}; do
target=$(readlink ${link} | sed 's#^/##' | sed 's#//#/#')
rm ${link}
ln -s ${target} ${link}
done
popd
fi
exit 0
|
gold_amr=$1
hyp_amr=$2
python eval_smatch.py $gold_amr $hyp_amr
|
from .sorting_algorithms import *
class Policy:
context = None
def __init__(self, context):
self.context = context
def configure(self):
if len(self.context.numbers) > 10:
print('More than 10 numbers, choosing merge sort!')
self.context.sorting_algorithm = MergeSort()
else:
print('Less or equal than 10 numbers, choosing bubble sort!')
self.context.sorting_algorithm = BubbleSort()
|
require 'spec_helper'
describe Travis::Services::FindBranches do
include Support::ActiveRecord
let(:repo) { Factory(:repository, :owner_name => 'travis-ci', :name => 'travis-core') }
let!(:build) { Factory(:build, :repository => repo, :state => :finished) }
let(:service) { described_class.new(stub('user'), params) }
attr_reader :params
it 'finds the last builds of the given repository grouped per branch' do
@params = { :repository_id => repo.id }
service.run.should include(build)
end
it 'scopes to the given repository' do
@params = { :repository_id => repo.id }
build = Factory(:build, :repository => Factory(:repository), :state => :finished)
service.run.should_not include(build)
end
it 'returns an empty build scope when the repository could not be found' do
@params = { :repository_id => repo.id + 1 }
service.run.should == Build.none
end
it 'finds branches by a given list of ids' do
@params = { :ids => [build.id] }
service.run.should == [build]
end
end
|
import React from 'react';
import axios from 'axios';
class App extends React.Component {
constructor(props) {
super(props);
this.state = {
data: [],
sortBy: 'name',
reverseOrder: false,
};
}
componentDidMount() {
axios.get('https://api.example.com/data')
.then((response) => {
this.setState({
data: response.data,
});
})
.catch((error) => {
// handle error
});
}
sortData = (key) => {
const { data, sortBy, reverseOrder } = this.state;
let newData;
if (key === sortBy) {
newData = data.slice().reverse();
} else {
newData = data.slice().sort((a, b) => (a[key] < b[key] ? -1 : a[key] > b[key] ? 1 : 0));
}
this.setState({
data: newData,
sortBy: key,
reverseOrder: !reverseOrder,
});
}
render() {
const { data, sortBy, reverseOrder } = this.state;
return (
<div>
<table>
<thead>
<tr>
<th onClick={() => this.sortData('name')}>Name</th>
<th onClick={() => this.sortData('age')}>Age</th>
<th onClick={() => this.sortData('address')}>Address</th>
</tr>
</thead>
<tbody>
{data.map(item => (
<tr key={item.id}>
<td>{item.name}</td>
<td>{item.age}</td>
<td>{item.address}</td>
</tr>
))}
</tbody>
</table>
</div>
)
}
}
export default App;
|
words = ["apple", "pie", "is", "delicious"]
for word in words:
print(len(word))
|
<reponame>joergdev/MoSy-backend-standalone
package de.joergdev.mosy.backend.standalone.pool;
/**
* Klasse fuer ein Object im Pool.
*
* @author <NAME>
*
* @param <T>
*/
class PoolObject<T>
{
//eigentliches object
private T obj;
//Flag ob freigegeben
private boolean locked;
//timeStamp seit wann inaktiv
private long timeGaveBack;
/**
* constructor
*
* @param obj
* @param locked
*/
public PoolObject(T obj, boolean locked)
{
setObj(obj);
setLocked(locked);
}
public void setLocked(boolean locked)
{
this.locked = locked;
}
public boolean isLocked()
{
return locked;
}
public void setObj(T obj)
{
this.obj = obj;
}
public T getObj()
{
return obj;
}
public void setTimeGaveBack(long timeGaveBack)
{
this.timeGaveBack = timeGaveBack;
}
public long getTimeGaveBack()
{
return timeGaveBack;
}
}
|
#!/usr/bin/env bash
pkill -f runserver
sudo lsof -t -i tcp:8000 | xargs kill -9
|
package com.gank.gankly.bean;
/**
* Create by LingYan on 2016-11-21
* Email:<EMAIL>
*/
public class JianDanBean {
private String url;
private String title;
private String type;
private String imgUrl;
public JianDanBean(String url, String title, String type, String imgUrl) {
this.url = url;
this.title = title;
this.type = type;
this.imgUrl = imgUrl;
}
public String getImgUrl() {
return imgUrl;
}
public void setImgUrl(String imgUrl) {
this.imgUrl = imgUrl;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
|
#!/bin/bash
#=============================================================================
# Copyright 2014 Istituto Italiano di Tecnologia (IIT)
# Authors: Daniele E. Domenichelli <daniele.domenichelli@iit.it>
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of YCM, substitute the full
# License text for the above reference.)
if [ $# -gt 1 ]; then
echo "Usage: $(basename $0) [remote (default=origin)]"
fi
if [ ! -f YCMConfig.cmake.in ]; then
echo "You must run this script in YCM main dir"
exit 1
fi
if [ $# -eq 1 ]; then
remote=$1
else
remote=origin
fi
git checkout -q master || exit 1
git branch -q -f gh-pages master || exit 1
git checkout -q gh-pages || exit 1
#git rm -rf .
rm -Rf build-docs
git clone -q $(git config --get remote.${remote}.url) build-docs || exit 1
cat > index.html << EOF
<!DOCTYPE HTML>
<html lang="en-US">
<head>
<meta charset="UTF-8">
<meta http-equiv="refresh" content="1;url=gh-pages/master/index.html">
<script type="text/javascript">
window.location.href = "gh-pages/git-master/index.html"
</script>
<title>Page Redirection</title>
</head>
<body>
If you are not redirected automatically, follow the <a href='gh-pages/git-master/index.html'>link to the documentation</a>
</body>
</html>
EOF
rm -Rf gh-pages
mkdir -p gh-pages
branches=$(git for-each-ref --format="%(refname)" refs/remotes/${remote} | grep -v "HEAD\|gh-pages\|travis\|appveyor\|ycm-\|/pr/\|fix/" | sed "s#refs/remotes/${remote}/##")
all_tags=$(git for-each-ref --format="%(refname)" refs/tags/ | sed "s#refs/tags/##" | sort -V)
# Keep only the last tag for each series
for tag in ${all_tags}; do
vmaj=$(echo ${tag} | sed 's/v//' | cut -d'.' -f1)
vmin=$(echo ${tag} | sed 's/v//' | cut -d'.' -f2)
if [ "${cur_vmaj}.${cur_vmin}" != "${vmaj}.${vmin}" ]; then
if [ -n "${cur_tag}" ]; then
tags="${tags}${cur_tag}"$'\n'
fi
fi
cur_vmaj=${vmaj}
cur_vmin=${vmin}
cur_tag=${tag}
done
tags="${tags}${cur_tag}"
for ref in ${branches} ${tags}; do
if [[ ${ref} =~ ^v[0-9]+\.[0-9]+ ]]; then
dir=$(echo ${ref} | sed 's/^\(v[0-9]\+\.[0-9]\+\).*$/\1/')
all_tags_versions="${all_tags_versions} '${dir}': '$(echo ${dir} | sed 's/v//')',"$'\n'
else
dir="git-${ref}"
all_versions="${all_versions} '${dir}': '${dir}',"$'\n'
fi
echo "Generating documentation for ref ${ref} in dir ${dir}"
(cd build-docs && git checkout -q ${ref})
mkdir build-docs/build
(cd build-docs/build && cmake .. -DSPHINX_HTML:BOOL=TRUE && make documentation) >/dev/null 2>&1
if [ -d build-docs/build/docs/html/ ]; then
cp -R build-docs/build/docs/html/ gh-pages/${dir}
mv gh-pages/${dir}/_sources/ gh-pages/${dir}/sources
mv gh-pages/${dir}/_static/ gh-pages/${dir}/static
mv gh-pages/${dir}/_images/ gh-pages/${dir}/images
(cd gh-pages/${dir}/ && grep -Rl _sources | xargs sed -i 's/_sources/sources/g')
(cd gh-pages/${dir}/ && grep -Rl _static | xargs sed -i 's/_static/static/g')
(cd gh-pages/${dir}/ && grep -Rl _images | xargs sed -i 's/_images/images/g')
ln -sfn ${dir} gh-pages/latest
echo " done"
else
echo " WARNING: no documentation produced"
fi
rm -Rf build-docs/build
echo "-------------------------------"
done
all_tags_versions="$(echo "${all_tags_versions}" | sort -Vr)"
all_versions="${all_versions} 'latest': 'latest release',"$'\n'"${all_tags_versions}"
rm -Rf build-docs
# Add version_switch script
find gh-pages -mindepth 2 -maxdepth 2 -name "*.html" -print0 | \
xargs -0 -n1 sed -i 's|</head>| <script type="text/javascript" src="../version_switch.js"></script>\n </head>|g'
find gh-pages -mindepth 3 -maxdepth 3 -name "*.html" -print0 | \
xargs -0 -n1 sed -i 's|</head>| <script type="text/javascript" src="../../version_switch.js"></script>\n </head>|g'
find gh-pages -mindepth 2 -maxdepth 3 -name "*.html" -print0 | \
xargs -0 -n1 sed -i 's|<a href="\(.\+\)">\(.\+\) Documentation</a>|<span class="version_switch">\2</span>\n <a href="\1">Documentation</a>|g'
cat > gh-pages/version_switch.js << EOF
(function() {
'use strict';
var url_re = /robotology\.github\.io\/ycm\/gh-pages\/(git-master|git-devel|latest|(v\d\.\d+))\//;
var all_versions = {
${all_versions}
};
function build_select(current_version, current_release) {
var buf = ['<select>'];
\$.each(all_versions, function(version, title) {
buf.push('<option value="' + version + '"');
if (version == current_version) {
buf.push(' selected="selected">');
if (version[0] == 'v') {
buf.push(current_release);
} else {
buf.push(title + ' (' + current_release + ')');
}
} else {
buf.push('>' + title);
}
buf.push('</option>');
});
buf.push('</select>');
return buf.join('');
}
function patch_url(url, new_version) {
return url.replace(url_re, 'robotology.github.io/ycm/gh-pages/' + new_version + '/');
}
function on_switch() {
var selected = \$(this).children('option:selected').attr('value');
var url = window.location.href,
new_url = patch_url(url, selected);
if (new_url != url) {
// check beforehand if url exists, else redirect to version's start page
\$.ajax({
url: new_url,
success: function() {
window.location.href = new_url;
},
error: function() {
window.location.href = 'http://robotology.github.io/ycm/gh-pages/' + selected;
}
});
}
}
\$(document).ready(function() {
var match = url_re.exec(window.location.href);
if (match) {
var release = DOCUMENTATION_OPTIONS.VERSION;
var version = match[1];
var select = build_select(version, release);
\$('.version_switch').html(select);
\$('.version_switch select').bind('change', on_switch);
}
});
})();
EOF
git add gh-pages/ index.html
git commit -q -m "Generate documentation"
git checkout -q master || exit 1
echo
echo "Finished. You can now push with"
echo
echo " git push --force ${remote} gh-pages"
echo
|
var _parser_flatbuffers_fixture_8hpp =
[
[ "ParserFlatbuffersFixture", "struct_parser_flatbuffers_fixture.xhtml", "struct_parser_flatbuffers_fixture" ],
[ "TensorRawPtr", "_parser_flatbuffers_fixture_8hpp.xhtml#ac3486e6c1a291aa67efd8b280ffb83cc", null ]
];
|
<reponame>ikim1991/my-golf-tracker
export const checkInputFields = () => {
if(document.querySelector("#numOfHoles").value === ""){
document.querySelector("#numOfHoles").classList.add("border", "border-danger")
} else{
document.querySelector("#numOfHoles").classList.remove("border", "border-danger")
}
for(let input of Array.from(document.querySelectorAll("#course-form input"))){
if(input.value === ""){
input.classList.add("border", "border-danger")
} else{
input.classList.remove("border", "border-danger")
}
}
}
export const resetInputFields = () => {
for(let input of Array.from(document.querySelectorAll("#course-form input"))){
input.classList.remove("border", "border-danger")
}
document.querySelector("#numOfHoles").classList.remove("border", "border-danger")
}
export const checkRoundInputFields = () => {
for(let input of Array.from(document.querySelectorAll("#new-round-form input"))){
if(input.value === ""){
input.classList.add("border", "border-danger")
} else{
input.classList.remove("border", "border-danger")
}
}
}
export const resetRoundInputFields = () => {
for(let input of Array.from(document.querySelectorAll("#new-round-form input"))){
input.classList.remove("border", "border-danger")
}
}
|
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.cpman.impl;
import org.onosproject.cpman.ControlLoad;
import org.onosproject.cpman.ControlMetricType;
import org.onosproject.cpman.MetricsDatabase;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.stream.IntStream;
/**
* An implementation of control plane load.
*/
public class DefaultControlLoad implements ControlLoad {
private final MetricsDatabase mdb;
private final ControlMetricType type;
/**
* Constructs a control load using the given metrics database and
* control metric type.
*
* @param mdb metrics database
* @param type control metric type
*/
public DefaultControlLoad(MetricsDatabase mdb, ControlMetricType type) {
this.mdb = mdb;
this.type = type;
}
@Override
public long average(int duration, TimeUnit unit) {
return (long) Arrays.stream(recent(duration, unit)).average().getAsDouble();
}
@Override
public long average() {
return (long) Arrays.stream(all()).average().getAsDouble();
}
@Override
public long rate() {
return 0;
}
@Override
public long latest() {
return (long) mdb.recentMetric(type.toString());
}
@Override
public boolean isValid() {
return true;
}
@Override
public long time() {
return mdb.lastUpdate(type.toString());
}
@Override
public long[] recent(int duration, TimeUnit unit) {
return doubleToLong(mdb.recentMetrics(type.toString(), duration, unit));
}
@Override
public long[] all() {
return doubleToLong(mdb.metrics(type.toString()));
}
private double nanToZero(double d) {
return Double.isNaN(d) ? 0D : d;
}
private long[] doubleToLong(double[] array) {
final long[] longArray = new long[array.length];
IntStream.range(0, array.length).forEach(i ->
longArray[i] = (long) nanToZero(array[i]));
return longArray;
}
}
|
#!/bin/bash
#
# IHMTerminal.sh
# Programa para ler informações de pontos analógicos e digitais e também efetuar comandos no SAGE
#
# Igor Siqueira Stevanato <igorstevanato@gmail.com>
# 03/08/2019
#
# Versão 0.1: lê informações de pontos digitais
#
#
#Mensagem de ajuda
#
while getopts ac:f: OPCAO; do
case "${OPCAO}" in
a) analogico=1 ;;
c) comando=1; valor_comando="${OPTARG}" ;;
f) flags="${OPTARG}" ;;
h) help=1 ;;
esac
done
|
#!/bin/sh
#
# Copyright (C) 2004, 2007, 2012 Internet Systems Consortium, Inc. ("ISC")
# Copyright (C) 2000, 2001 Internet Software Consortium.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id: clean.sh,v 1.10 2007/09/26 03:22:44 marka Exp $
#
# Clean up after stub tests.
#
rm -f dig.out.ns3 ns3/child.example.st
rm -f */named.memstats
|
import type { ListrTask, ListrRendererFactory } from 'listr2';
import { LockData } from '@@install/utils/lock';
import type { VersionInfo } from '@request/request-npm';
interface Context {
resolvedDeps: VersionInfo[];
diff: VersionInfo[];
lockData: LockData;
}
declare const diffLocalFiles: <T extends Context>() => ListrTask<T, typeof import("listr2").ListrRenderer>;
export default diffLocalFiles;
|
"use strict";
exports.__esModule = true;
exports.default = void 0;
var _react = _interopRequireDefault(require("react"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _classnames = _interopRequireDefault(require("classnames"));
var _dates = _interopRequireDefault(require("./util/dates"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
var VIEW_UNITS = ['month', 'year', 'decade', 'century'];
function clamp(date, min, max) {
return _dates.default.max(_dates.default.min(date, max), min);
}
var CalendarView =
/*#__PURE__*/
function (_React$Component) {
_inheritsLoose(CalendarView, _React$Component);
function CalendarView() {
return _React$Component.apply(this, arguments) || this;
}
var _proto = CalendarView.prototype;
_proto.render = function render() {
var _this$props = this.props,
className = _this$props.className,
activeId = _this$props.activeId,
props = _objectWithoutProperties(_this$props, ["className", "activeId"]);
return _react.default.createElement("table", _extends({}, props, {
role: "grid",
tabIndex: "-1",
"aria-activedescendant": activeId || null,
className: (0, _classnames.default)(className, 'rw-nav-view', 'rw-calendar-grid')
}));
};
return CalendarView;
}(_react.default.Component);
CalendarView.propTypes = {
activeId: _propTypes.default.string
};
var CalendarViewCell =
/*#__PURE__*/
function (_React$Component2) {
_inheritsLoose(CalendarViewCell, _React$Component2);
function CalendarViewCell() {
var _this;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _React$Component2.call.apply(_React$Component2, [this].concat(args)) || this;
_this.handleChange = function () {
var _this$props2 = _this.props,
onChange = _this$props2.onChange,
min = _this$props2.min,
max = _this$props2.max,
date = _this$props2.date;
onChange(clamp(date, min, max));
};
return _this;
}
var _proto2 = CalendarViewCell.prototype;
_proto2.isEmpty = function isEmpty() {
var _this$props3 = this.props,
unit = _this$props3.unit,
min = _this$props3.min,
max = _this$props3.max,
date = _this$props3.date;
return !_dates.default.inRange(date, min, max, unit);
};
_proto2.isEqual = function isEqual(date) {
return _dates.default.eq(this.props.date, date, this.props.unit);
};
_proto2.isFocused = function isFocused() {
return !this.props.disabled && !this.isEmpty() && this.isEqual(this.props.focused);
};
_proto2.isNow = function isNow() {
return this.props.now && this.isEqual(this.props.now);
};
_proto2.isOffView = function isOffView() {
var _this$props4 = this.props,
viewUnit = _this$props4.viewUnit,
focused = _this$props4.focused,
date = _this$props4.date;
return date && focused && viewUnit && _dates.default[viewUnit](date) !== _dates.default[viewUnit](focused);
};
_proto2.isSelected = function isSelected() {
return this.props.selected && this.isEqual(this.props.selected);
};
_proto2.render = function render() {
var _this$props5 = this.props,
children = _this$props5.children,
activeId = _this$props5.activeId,
label = _this$props5.label,
disabled = _this$props5.disabled;
var isDisabled = disabled || this.isEmpty();
return _react.default.createElement("td", {
role: "gridcell",
id: this.isFocused() ? activeId : null,
title: label,
"aria-label": label,
"aria-readonly": disabled,
"aria-selected": this.isSelected(),
onClick: !isDisabled ? this.handleChange : undefined,
className: (0, _classnames.default)('rw-cell', this.isNow() && 'rw-now', isDisabled && 'rw-state-disabled', this.isEmpty() && 'rw-cell-not-allowed', this.isOffView() && 'rw-cell-off-range', this.isFocused() && 'rw-state-focus', this.isSelected() && 'rw-state-selected')
}, children);
};
return CalendarViewCell;
}(_react.default.Component);
CalendarViewCell.propTypes = {
id: _propTypes.default.string,
activeId: _propTypes.default.string.isRequired,
label: _propTypes.default.string,
now: _propTypes.default.instanceOf(Date),
date: _propTypes.default.instanceOf(Date),
selected: _propTypes.default.instanceOf(Date),
focused: _propTypes.default.instanceOf(Date),
min: _propTypes.default.instanceOf(Date),
max: _propTypes.default.instanceOf(Date),
unit: _propTypes.default.oneOf(['day'].concat(VIEW_UNITS)),
viewUnit: _propTypes.default.oneOf(VIEW_UNITS),
onChange: _propTypes.default.func.isRequired,
disabled: _propTypes.default.bool
};
CalendarView.Body = function (props) {
return _react.default.createElement("tbody", _extends({
className: "rw-calendar-body"
}, props));
};
CalendarView.Row = function (props) {
return _react.default.createElement("tr", _extends({
role: "row",
className: "rw-calendar-row"
}, props));
};
CalendarView.Cell = CalendarViewCell;
var _default = CalendarView;
exports.default = _default;
module.exports = exports["default"];
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.