text
stringlengths 1
1.05M
|
|---|
check_string <- function(string){
is_a_letter <- grepl("[a-zA-Z]", string)
return(is_a_letter)
}
|
module Rspiderx
# Sample implementation for feeding the initial data for the fetcher
class FeederSimple
def feed(input)
return {
event: 'feed',
data: input
}
end
end
end
|
<gh_stars>0
'use strict';
angular.module('myApp.community', ['ngRoute','ngResource'])
.factory('Community',function($resource) {
return $resource('http://localhost:4000/community/:id')
})
.controller('communityCtrl', function($scope, Community,$http,$routeParams) {
/*var test = [];
test = Community.query();
$scope.history = test;*/
$http.get("http://localhost:4000/community/"+$routeParams.param)
.then(function(response) {
$scope.community = response.data;
console.log($scope.community);
});
/* var entry = Community.get({ id:$routeParams.param }, function() {
console.log(entry);
}); // get() returns a single entry*/
});
|
#!/bin/bash
# todo: update arg handling
if [ $# -eq 0 ]; then
echo
echo Please provide archive file name, \'.tar.gz\' will be appended
echo
exit
fi
rm -f "./$1.tar.gz" >/dev/null
tarball_parent=/tmp
tarball_source=tari_testnet
tarball_folder=${tarball_parent}/${tarball_source}
if [ -d "${tarball_folder}" ]; then
rm -f -r "${tarball_folder:?}"
fi
mkdir "${tarball_folder}"
mkdir "${tarball_folder}/config"
mkdir "${tarball_folder}/runtime"
mkdir "${tarball_folder}/scripts"
local_dir="$(
cd "$(dirname "$0")" >/dev/null 2>&1 || exit 1
pwd -P
)"
project_dir="$(dirname "$(greadlink -e "$local_dir")")"
app_dir="$(dirname "$(greadlink -e "$project_dir/applications/tari_base_node")")"
if [ ! "${app_dir}" == "${project_dir}/applications" ]; then
echo
echo Please run this script from '/buildtools'
echo
exit
else
echo
echo Found project folders:
echo " ${project_dir}"
echo " ${local_dir}"
echo " ${app_dir}"
echo
fi
# One click miner
# cp -f -P "${app_dir}/tari_base_node/osx/start_all" "${tarball_folder}/start_all"
# cp -f "${app_dir}/tari_base_node/osx/runtime/start_all.sh" "${tarball_folder}/runtime/start_all.sh"
# Base Node
cp -f -P "${app_dir}/tari_base_node/osx/start_tari_base_node" "${tarball_folder}/start_tari_base_node"
cp -f -P "${app_dir}/tari_base_node/osx/start_tor" "${tarball_folder}/start_tor"
cp -f "${app_dir}/tari_base_node/osx/runtime/start_tari_base_node.sh" "${tarball_folder}/runtime/start_tari_base_node.sh"
cp -f "${app_dir}/tari_base_node/osx/runtime/start_tor.sh" "${tarball_folder}/runtime/start_tor.sh"
cp -f "${project_dir}/target/release/tari_base_node" "${tarball_folder}/runtime/tari_base_node"
# Console Wallet
cp -f -P "${app_dir}/tari_console_wallet/osx/start_tari_console_wallet" "${tarball_folder}/start_tari_console_wallet"
cp -f "${app_dir}/tari_console_wallet/osx/runtime/start_tari_console_wallet.sh" "${tarball_folder}/runtime/start_tari_console_wallet.sh"
cp -f "${project_dir}/target/release/tari_console_wallet" "${tarball_folder}/runtime/tari_console_wallet"
# Mining Node
cp -f -P "${app_dir}/tari_mining_node/osx/start_tari_mining_node" "${tarball_folder}/start_tari_mining_node"
cp -f "${app_dir}/tari_mining_node/osx/runtime/start_tari_mining_node.sh" "${tarball_folder}/runtime/start_tari_mining_node.sh"
cp -f "${project_dir}/target/release/tari_mining_node" "${tarball_folder}/runtime/tari_mining_node"
# Merge Mining Proxy
cp -f -P "${app_dir}/tari_merge_mining_proxy/osx/start_tari_merge_mining_proxy" "${tarball_folder}/start_tari_merge_mining_proxy"
cp -f -P "${app_dir}/tari_merge_mining_proxy/osx/start_xmrig" "${tarball_folder}/start_xmrig"
cp -f "${app_dir}/tari_merge_mining_proxy/osx/runtime/start_tari_merge_mining_proxy.sh" "${tarball_folder}/runtime/start_tari_merge_mining_proxy.sh"
cp -f "${app_dir}/tari_merge_mining_proxy/osx/runtime/start_xmrig.sh" "${tarball_folder}/runtime/start_xmrig.sh"
cp -f "${project_dir}/target/release/tari_merge_mining_proxy" "${tarball_folder}/runtime/tari_merge_mining_proxy"
# Collectibles
cp -f "${project_dir}/target/release/tari_collectibles" "${tarball_folder}/runtime/tari_collectibles"
# Validator node
cp -f "${project_dir}/target/release/tari_validator_node" "${tarball_folder}/runtime/tari_validator_node"
# todo: launchpad
# 3rd party install
cp -f "${local_dir}/install_xmrig.sh" "${tarball_folder}/runtime/install_xmrig.sh"
cp -f "${local_dir}/get_xmrig_osx.ps1" "${tarball_folder}/runtime/get_xmrig_osx.ps1"
# Config
cat "${project_dir}"/common/config/presets/*.toml >"${tarball_folder}/config/config.toml"
cp -f "${project_dir}/common/xmrig_config/config_example_stagenet.json" "${tarball_folder}/config/xmrig_config_example_stagenet.json"
cp -f "${project_dir}/common/xmrig_config/config_example_mainnet.json" "${tarball_folder}/config/xxmrig_config_example_mainnet.json"
cp -f "${project_dir}/common/xmrig_config/config_example_mainnet_self_select.json" "${tarball_folder}/config/xmrig_config_example_mainnet_self_select.json"
# Scripts
cp -f "${local_dir}/osx_postinstall.sh" "${tarball_folder}/scripts/postinstall"
echo Files copied to "${tarball_folder}"
echo Creating archive...
echo
cd "${tarball_parent}" || exit 1
if [ -z "$2" ]; then
tar -cvf "${local_dir}/$1.tar.gz" ${tarball_source}
cd "${local_dir}" || exit 1
echo
echo Created "./$1.tar.gz" in "${local_dir}".
echo
else
echo
echo Files copied to "${tarball_folder}". No zip created.
echo
fi
|
<gh_stars>0
"use strict";
var str = "abcababcababcab";
//const str: string = "abab";
/**
*
* @param str String to find period in
* @returns
*/
function StringPeriod(str) {
console.log(str);
for (var size = str.length / 2; size >= 2; size--) {
for (var ind = 0; ind <= str.length - size; ind++) {
var tmp = str.substring(ind, ind + size);
for (var k = ind + size; k <= str.length - size; k++) {
console.log(tmp);
console.log(str.substring(k, k + size - 1));
console.log("------------------------------");
if (tmp === str.substring(k, k + size - 1))
return tmp;
}
}
}
return "-1";
}
console.log(StringPeriod(str));
|
import * as React from "react"
import PropTypes from "prop-types"
import { Container, Title, TitleSub, Section, UnOrderingList } from '../styles';
const servicesList = [
{
id: 1,
background: '#f9d423',
title: 'Digital Marketing'
},
{
id: 2,
background: '#2575fc',
title: 'Web/Mobile App'
},
{
id: 3,
background: '#ff9099',
title: 'Web Design'
},
{
id: 4,
background: '#ccd031',
title: 'E-Commerce'
}
]
const ServicesSection = ({ title, email, address, siteTitle }) => (
<Section className="section-services">
<Container>
<TitleSub
color='white'
>{`What we do?`}</TitleSub>
<Title
className="underline"
as='h3'
option={{
size: '10rem',
color: 'white',
weight: '300'
}}
>
{`Let's Grow Business Together.`}
</Title>
<UnOrderingList>
{
servicesList && servicesList.map(({ id, background, title }) => {
return (
<li key={id} style={{background}}>
{title}
</li>
)
})
}
</UnOrderingList>
</Container>
</Section>
)
ServicesSection.propTypes = {
}
export default ServicesSection
|
<reponame>WGBH/django-pbsmmapi
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-31 15:37
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='PBSMMShow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True, help_text='Not set by API', verbose_name='Created On')),
('date_last_api_update', models.DateTimeField(help_text='Not set by API', null=True, verbose_name='Last API Retrieval')),
('ingest_on_save', models.BooleanField(default=False, help_text='If true, then will update values from the PBSMM API on save()', verbose_name='Ingest on Save')),
('last_api_status', models.PositiveIntegerField(blank=True, null=True, verbose_name='Last API Status')),
('json', jsonfield.fields.JSONField(blank=True, help_text='This is the last JSON uploaded.', null=True, verbose_name='JSON')),
('publish_status', models.IntegerField(choices=[(-1, 'NEVER Available'), (0, 'USE "Live as of Date"'), (1, 'ALWAYS Available')], default=0, verbose_name='Publish Status')),
('live_as_of', models.DateTimeField(blank=True, help_text='You can Set this to a future date/time to schedule availability.', null=True, verbose_name='Live As Of')),
('object_id', models.UUIDField(blank=True, null=True, unique=True, verbose_name='Object ID')),
('api_endpoint', models.URLField(blank=True, help_text='Endpoint to original record from the API', null=True, verbose_name='Link to API Record')),
('title', models.CharField(blank=True, max_length=200, null=True, verbose_name='Title')),
('title_sortable', models.CharField(blank=True, max_length=200, null=True, verbose_name='Sortable Title')),
('slug', models.SlugField(max_length=200, unique=True, verbose_name='Slug')),
('description_long', models.TextField(verbose_name='Long Description')),
('description_short', models.TextField(verbose_name='Short Description')),
('updated_at', models.DateTimeField(blank=True, help_text='API record modified date', null=True, verbose_name='Updated At')),
('premiered_on', models.DateTimeField(blank=True, null=True, verbose_name='Premiered On')),
('nola', models.CharField(blank=True, max_length=8, null=True, verbose_name='NOLA Code')),
('images', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Images')),
('canonical_image_type_override', models.CharField(blank=True, help_text='Profile Image Type to use for Canonical Image', max_length=80, null=True, verbose_name='Canonical Image Type Override')),
('funder_message', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Funder Message')),
('is_excluded_from_dfp', models.BooleanField(default=False, verbose_name='Is excluded from DFP')),
('can_embed_player', models.BooleanField(default=False, verbose_name='Can Embed Player')),
('links', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Links')),
('platforms', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Platforms')),
('ga_page', models.CharField(blank=True, max_length=40, null=True, verbose_name='GA Page Tag')),
('ga_event', models.CharField(blank=True, max_length=40, null=True, verbose_name='GA Event Tag')),
('genre', models.TextField(blank=True, help_text='JSON Serialized Field', null=True, verbose_name='Genre')),
('episode_count', models.PositiveIntegerField(blank=True, null=True, verbose_name='Episode Count')),
('display_episode_number', models.BooleanField(default=False, verbose_name='Display Episode Number')),
('sort_episodes_descending', models.BooleanField(default=False, verbose_name='Sort Episodes Descending')),
('ordinal_season', models.BooleanField(default=True, verbose_name='Ordinal Season')),
('language', models.CharField(blank=True, max_length=10, null=True, verbose_name='Language')),
('audience', models.TextField(blank=True, help_text='JSON Serialized Field', null=True, verbose_name='Audience')),
('hashtag', models.CharField(blank=True, max_length=100, null=True, verbose_name='Hashtag')),
('ingest_seasons', models.BooleanField(default=False, help_text='Also ingest all Seasons', verbose_name='Ingest Seasons')),
('ingest_specials', models.BooleanField(default=False, help_text='Also ingest all Specials', verbose_name='Ingest Specials')),
('ingest_episodes', models.BooleanField(default=False, help_text='Also ingest all Episodes (for each Season)', verbose_name='Ingest Episodes')),
],
options={
'db_table': 'pbsmm_show',
'verbose_name': 'PBS MM Show',
'verbose_name_plural': 'PBS MM Shows',
},
),
migrations.CreateModel(
name='PBSMMShowAsset',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True, help_text='Not set by API', verbose_name='Created On')),
('date_last_api_update', models.DateTimeField(help_text='Not set by API', null=True, verbose_name='Last API Retrieval')),
('ingest_on_save', models.BooleanField(default=False, help_text='If true, then will update values from the PBSMM API on save()', verbose_name='Ingest on Save')),
('last_api_status', models.PositiveIntegerField(blank=True, null=True, verbose_name='Last API Status')),
('json', jsonfield.fields.JSONField(blank=True, help_text='This is the last JSON uploaded.', null=True, verbose_name='JSON')),
('object_id', models.UUIDField(blank=True, null=True, unique=True, verbose_name='Object ID')),
('api_endpoint', models.URLField(blank=True, help_text='Endpoint to original record from the API', null=True, verbose_name='Link to API Record')),
('title', models.CharField(blank=True, max_length=200, null=True, verbose_name='Title')),
('title_sortable', models.CharField(blank=True, max_length=200, null=True, verbose_name='Sortable Title')),
('slug', models.SlugField(max_length=200, unique=True, verbose_name='Slug')),
('description_long', models.TextField(verbose_name='Long Description')),
('description_short', models.TextField(verbose_name='Short Description')),
('updated_at', models.DateTimeField(blank=True, help_text='API record modified date', null=True, verbose_name='Updated At')),
('images', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Images')),
('canonical_image_type_override', models.CharField(blank=True, help_text='Profile Image Type to use for Canonical Image', max_length=80, null=True, verbose_name='Canonical Image Type Override')),
('funder_message', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Funder Message')),
('is_excluded_from_dfp', models.BooleanField(default=False, verbose_name='Is excluded from DFP')),
('can_embed_player', models.BooleanField(default=False, verbose_name='Can Embed Player')),
('links', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Links')),
('platforms', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Platforms')),
('windows', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Windows')),
('geo_profile', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Geo Profile')),
('language', models.CharField(blank=True, max_length=10, null=True, verbose_name='Language')),
('legacy_tp_media_id', models.BigIntegerField(blank=True, help_text='(Legacy TP Media ID)', null=True, unique=True, verbose_name='COVE ID')),
('availability', models.TextField(blank=True, help_text='JSON serialized Field', null=True, verbose_name='Availability')),
('duration', models.IntegerField(blank=True, help_text='(in seconds)', null=True, verbose_name='Duration')),
('object_type', models.CharField(blank=True, max_length=40, null=True, verbose_name='Object Type')),
('has_captions', models.BooleanField(default=False, verbose_name='Has Captions')),
('tags', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Tags')),
('topics', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Topics')),
('player_code', models.TextField(blank=True, null=True, verbose_name='Player Code')),
('chapters', models.TextField(blank=True, help_text='JSON serialized field', null=True, verbose_name='Chapters')),
('content_rating', models.CharField(blank=True, max_length=100, null=True, verbose_name='Content Rating')),
('content_rating_description', models.TextField(blank=True, null=True, verbose_name='Content Rating Description')),
('override_default_asset', models.PositiveIntegerField(choices=[(1, 'Yes'), (0, 'No')], default=0, verbose_name='Override Default Asset')),
('show', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assets', to='show.PBSMMShow')),
],
options={
'db_table': 'pbsmm_show_asset',
'verbose_name': 'PBS MM Show - Asset',
'verbose_name_plural': 'PBS MM Shows - Assets',
},
),
]
|
<gh_stars>1-10
import {
managedChild,
ManagedList,
ManagedRecord,
ManagedService,
service,
} from "typescene";
import { RemoteService } from "./Remote";
import { Profile } from "./User";
/** All fields for an article object */
export class Article extends ManagedRecord {
constructor(
public slug = "",
public title = "",
public description = "",
public body = ""
) {
super();
}
tagList?: string[];
createdAt?: string;
updatedAt?: string;
favorited?: boolean;
favoritesCount?: number;
author?: Profile;
}
/** All fields for a comment object */
export class Comment extends ManagedRecord {
id?: string;
createdAt?: string;
updatedAt?: string;
body?: string;
author?: Profile;
}
/** All fields to be specified to query articles from the server */
export interface ArticleQuery {
limit?: number;
offset?: number;
tag?: string;
author?: string;
favorited?: string;
}
/** Managed list of comments */
export type CommentList = ManagedList<Comment>;
/** Represents an article feed that can be updated at another offset */
export class ArticleFeed extends ManagedRecord {
constructor(
public readonly query: ArticleQuery,
public readonly isPersonalFeed?: boolean
) {
super();
}
@service("App.Remote")
remote!: RemoteService;
/** Reload the feed from given offset */
async updateAsync(offset = 0) {
this.query.offset = Math.max(0, offset);
try {
this.loading = true;
let params = "";
let q = this.query;
for (let key in q) {
params += "&" + key + "=" + encodeURIComponent((q as any)[key]);
}
let path = this.isPersonalFeed ? "articles/feed" : "articles";
let result = await this.remote!.getAsync(path + "?" + params.slice(1));
let articles: Article[] = result.articles;
if (!Array.isArray(articles)) {
throw Error("Invalid article listing");
}
// check if not destroyed while loading
if (this.managedState) {
this.totalCount = result.articlesCount || 0;
this.list.replace(articles.map(a => Article.create(a)));
this.offset = offset || 0;
this.error = false;
this.emitChange();
}
} catch (err) {
console.log(err);
this.error = true;
this.emitChange();
}
this.loading = false;
}
/** The actual list of articles */
@managedChild
list = new ManagedList<Article>();
/** True if the list is currently loading */
loading?: boolean;
/** True if an error occurred while loading */
error?: boolean;
/** Total number of articles, could be more than the current list */
totalCount?: number;
/** Current article offset, starts at 0 */
offset = 0;
}
/** Service that handles articles and comments */
export class ArticlesService extends ManagedService {
name = "App.Articles";
@service("App.Remote")
remote?: RemoteService;
/** Returns an article feed for given query */
getArticleFeed(q: ArticleQuery = {}, isPersonalFeed?: boolean) {
let result = new ArticleFeed(q, isPersonalFeed);
result.updateAsync();
return result;
}
/** Returns the article for given slug */
async getArticleAsync(slug: string) {
let result = await this.remote!.getAsync("articles/" + slug);
let article: Partial<Article> = result.article;
if (!article || !article.slug) {
throw Error("Invalid article");
}
return Article.create(article);
}
/** Saves (create/update) given article data, returns the resulting record */
async saveArticleAsync(article: Article) {
let result = article.slug
? await this.remote!.putAsync("articles/" + article.slug, { article })
: await this.remote!.postAsync("articles", { article });
let created: Partial<Article> = result.article;
if (!created || !created.slug) {
throw Error("Invalid article");
}
return Article.create(created);
}
/** Deletes the article with given slug */
async deleteArticleAsync(slug: string) {
await this.remote!.deleteAsync("articles/" + slug);
}
/** Marks the article with given slug as a favorite for the current user */
async favoriteArticleAsync(slug: string) {
await this.remote!.postAsync("articles/" + slug + "/favorite", {});
}
/** Unmarks the article with given slug as a favorite for the current user */
async unfavoriteArticleAsync(slug: string) {
await this.remote!.deleteAsync("articles/" + slug + "/favorite");
}
/** Returns a managed list of Comment records */
async getCommentsAsync(slug: string): Promise<CommentList> {
let result = await this.remote!.getAsync("articles/" + slug + "/comments");
let comments: Array<Partial<Comment>> = result.comments;
if (!Array.isArray(comments)) {
throw Error("Invalid comments listing");
}
return new ManagedList(...comments.map(c => Comment.create(c)));
}
/** Saves (creates) given comment for the article with given slug */
async addCommmentAsync(slug: string, comment: Partial<Comment>) {
let result = await this.remote!.postAsync(
"articles/" + slug + "/comments",
{ comment }
);
let created: Partial<Comment> = result.comment;
if (!created || !created.id) {
throw Error("Invalid comment");
}
return Comment.create(created);
}
/** Deletes given comment from the article with given slug */
async deleteCommentAsync(slug: string, id: string) {
await this.remote!.deleteAsync("articles/" + slug + "/comments/" + id);
}
}
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
export class Lists extends Component {
render() {
let arrTasks = this.props.items;
let componentUI = {
title: 'Uncompleted tasks',
style: ''
}
this.props.titleStatus === 1 ?
componentUI = {
...componentUI,
title: 'Completed tasks',
style: 'deleted-tasks'
} :
null
if (arrTasks.length > 0)
return (
<div>
<p>{componentUI.title}</p>
<table className={componentUI.style}>
<tbody>
{arrTasks.map((item, key) => {
return (
<tr key={key}>
<td>
<table>
<tbody>
<tr>
<td>
{
item.isDone ?
<input type="checkbox" onClick={e => this.props.fnCheck(item.id, e)} defaultChecked /> :
<input type="checkbox" onClick={e => this.props.fnCheck(item.id, e)} />
}
</td>
<td>{item.description}</td>
<td className="icon">
<svg onClick={e => this.props.fnRemove(item.id, e)} aria-hidden="true" data-prefix="far" data-icon="trash-alt" className="svg-inline--fa fa-trash-alt fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M192 188v216c0 6.627-5.373 12-12 12h-24c-6.627 0-12-5.373-12-12V188c0-6.627 5.373-12 12-12h24c6.627 0 12 5.373 12 12zm100-12h-24c-6.627 0-12 5.373-12 12v216c0 6.627 5.373 12 12 12h24c6.627 0 12-5.373 12-12V188c0-6.627-5.373-12-12-12zm132-96c13.255 0 24 10.745 24 24v12c0 6.627-5.373 12-12 12h-20v336c0 26.51-21.49 48-48 48H80c-26.51 0-48-21.49-48-48V128H12c-6.627 0-12-5.373-12-12v-12c0-13.255 10.745-24 24-24h74.411l34.018-56.696A48 48 0 0 1 173.589 0h100.823a48 48 0 0 1 41.16 23.304L349.589 80H424zm-269.611 0h139.223L276.16 50.913A6 6 0 0 0 271.015 48h-94.028a6 6 0 0 0-5.145 2.913L154.389 80zM368 128H80v330a6 6 0 0 0 6 6h276a6 6 0 0 0 6-6V128z"></path></svg>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
)
})}
</tbody>
</table>
</div>
)
return null
}
};
Lists.propTypes = {
items: PropTypes.array,
fnCheck: PropTypes.func.isRequired,
fnRemove: PropTypes.func.isRequired
}
|
#!/bin/bash
SAVEIFS=$IFS;
IFS=$(echo -en "\n\b");
if [ A$1 == "A" ]; then
echo "Need an input file"
exit 0;
fi
MIX_ENERGY_DATA_FILE=$1
#Filter Wind
awk -F, '{if(match($3,/Wind/)) print;}' $MIX_ENERGY_DATA_FILE > filter_wind.csv;
#then Aggregate all regions electricity generation by hour
awk -F, '{data[$1]+=$4;}END{for(i in data) { split(i, a, /|"|:| |\//); if(a[5] != "") {d=mktime(a[2]" "a[3]" "a[4]" "a[5]" "a[6]" "a[7]);} else {d=mktime(a[1]" "a[2]" "a[3]" "00" "00" "00);} dd=strftime("%Y-%m-%dT%H:00:00", d);print dd,",",data[i];}}' ./filter.csv # > output_hourEnergyType.csv;
#$1:date, $2: generation
IFS=$SAVEIFS
|
<reponame>iamareebjamal/roboclub-amu
package amu.roboclub.ui.fragments;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.design.widget.Snackbar;
import android.support.v4.app.Fragment;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.firebase.ui.database.FirebaseRecyclerAdapter;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.Query;
import com.google.firebase.database.ValueEventListener;
import amu.roboclub.R;
import amu.roboclub.models.Profile;
import amu.roboclub.ui.viewholder.ProfileHolder;
import butterknife.BindView;
import butterknife.ButterKnife;
public class TeamFragment extends Fragment {
private Snackbar snackbar;
@BindView(R.id.recycler_view) RecyclerView recyclerView;
public static TeamFragment newInstance() {
return new TeamFragment();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View root = inflater.inflate(R.layout.fragment_team, container, false);
ButterKnife.bind(this, root);
GridLayoutManager gridLayoutManager = new GridLayoutManager(getContext(), 1);
recyclerView.setLayoutManager(gridLayoutManager);
recyclerView.setItemAnimator(new DefaultItemAnimator());
recyclerView.setNestedScrollingEnabled(false);
snackbar = Snackbar.make(recyclerView, R.string.loading_members, Snackbar.LENGTH_INDEFINITE);
snackbar.show();
FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser();
ProfileHolder.setUser(FirebaseAuth.getInstance().getCurrentUser());
Query teamReference = FirebaseDatabase.getInstance().getReference("team/16").orderByChild("rank");
FirebaseRecyclerAdapter teamAdapter =
new FirebaseRecyclerAdapter<Profile, ProfileHolder>
(Profile.class, R.layout.item_contact, ProfileHolder.class, teamReference) {
@Override
protected void populateViewHolder(final ProfileHolder holder, Profile profile, int position) {
if (snackbar.isShown())
snackbar.dismiss();
holder.setProfile(getContext(), profile, getRef(position).toString());
}
};
recyclerView.setAdapter(teamAdapter);
if(getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
gridLayoutManager.setSpanCount(2);
}
if(user != null) {
FirebaseDatabase.getInstance()
.getReference("admins/" + user.getUid())
.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
if (dataSnapshot.getValue() != null) {
// User is Admin. Set override to true
Log.d("TeamFragment", "Admin Override Enabled");
ProfileHolder.setAdminOverride(true);
teamAdapter.notifyDataSetChanged();
}
}
@Override
public void onCancelled(DatabaseError databaseError) {
// No action
}
});
}
return root;
}
}
|
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import React from 'react';
import { storiesOf } from '@storybook/react';
import Filter16 from '@carbon/icons-react/lib/filter/16';
import { withKnobs, select, text } from '@storybook/addon-knobs';
import TooltipIcon from '../TooltipIcon';
import { componentsX } from '../../internal/FeatureFlags';
var directions = {
'Bottom (bottom)': 'bottom',
'Top (top)': 'top'
};
var props = function props() {
return {
direction: select('Tooltip direction (direction)', directions, 'bottom'),
tooltipText: text('Tooltip content (tooltipText)', 'Filter')
};
};
storiesOf('TooltipIcon', module).addDecorator(withKnobs).add('default', function () {
return React.createElement(TooltipIcon, props(), componentsX ? React.createElement(Filter16, null) : React.createElement("svg", {
width: "16",
height: "12",
viewBox: "0 0 16 12"
}, React.createElement("g", {
fillRule: "nonzero"
}, React.createElement("path", {
d: "M8.05 2a2.5 2.5 0 0 1 4.9 0H16v1h-3.05a2.5 2.5 0 0 1-4.9 0H0V2h8.05zm2.45 2a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3zM3.05 9a2.5 2.5 0 0 1 4.9 0H16v1H7.95a2.5 2.5 0 0 1-4.9 0H0V9h3.05zm2.45 2a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3z"
}))));
}, {
info: {
text: "\n Tooltip Icon\n "
}
});
|
function UserService(editor, host) {
function update(ed, concepts, property, single, callback) {
if (!ed.active) return;
if (!ed.graph) return;
var delExisting = (single == true);
var urlParams="?property="+encodeURIComponent(property)
+ "&context="+encodeURIComponent(ed.graph)
+ "&replace="+delExisting;
$.post(ed.host + "skos/currentUser"+urlParams, {uri: concepts})
.error(function() { ed.active = false; })
.complete(function() { if (callback) callback(); });
}
var us = {
editor: editor,
host: host,
graph: null,
active: true,
init: function() {
var self = this;
function handleUpdate(event) {
var uris = event.data.uris;
var prop = "http://purl.org/dc/terms/contributor";
update(self, uris, prop, false, function() {
self.editor.event.fire(editor.EventCode.VIEW.RELOAD, {uris: [prop]});
});
}
function handleCreate(event) {
var uris = event.data.uri;
var prop = "http://purl.org/dc/terms/creator";
update(self, uris, prop, true, function() {
self.editor.event.fire(editor.EventCode.VIEW.RELOAD, {uris: [prop]});
});
}
self.editor.event.bind(editor.EventCode.GRAPH.LOAD, function(event) {
self.graph = event.data.uri;
});
self.editor.event.bind(editor.EventCode.CONCEPT.UPDATED, handleUpdate);
self.editor.event.bind(editor.EventCode.SCHEME.UPDATED, handleUpdate);
self.editor.event.bind(editor.EventCode.GRAPH.UPDATED, handleUpdate);
self.editor.event.bind(editor.EventCode.CONCEPT.CREATED, handleCreate);
self.editor.event.bind(editor.EventCode.SCHEME.CREATED, handleCreate);
self.editor.event.bind(editor.EventCode.GRAPH.CREATED, function(event) {
self.graph = event.data.uri;
handleCreate(event);
});
}
};
us.init();
return us;
}
|
<reponame>mikitamironenka/job4j<gh_stars>0
package ru.job4j.io;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
//Метод main - записывает текст в файл "unavailable.csv"
//Задание.
//1. Реализуйте метод unavailable.
//source - имя файла лога
//target - имя файла после анализа.
//2. Метод unavailable() должен находить диапазоны, когда сервер не работал.
//Сервер не работал. если status = 400 или 500.
//Диапазон считается последовательность статусов 400 и 500
//Например:
//200 10:56:01 200 10:59:01
//500 10:57:01 500 11:01:02
//400 10:58:01 200 11:02:02
//тут два периода - 10:57:01 до 10:59:01 и 11:01:02 до 11:02:02
//Начальное время - это время когда статус 400 или 500. конечное время это когда статус меняется с 400 или 500 на 200 300.
//3. Результат анализа нужно записать в файл target.
//Формат файла
//начала периода;конец периода;
//4. Записать тесты.
public class Analizy {
private final String lineSeparator = System.lineSeparator();
private List<String> list;
public Analizy() {
this.list = new ArrayList<>();
}
public List<String> getList() {
return this.list;
}
public void unavailable(String source, String target) throws IOException {
try (BufferedReader reader = new BufferedReader(new FileReader(source))) {
String st;
boolean isAvailable = true;
String code;
String time;
while ((st = reader.readLine()) != null) {
code = st.substring(0, st.indexOf(" "));
time = st.substring(st.indexOf(" ") + 1);
if ((code.startsWith("4") || code.startsWith("5")) && isAvailable) {
isAvailable = false;
this.list.add(time + "-");
} else if (!isAvailable && !((code.startsWith("4") || code.startsWith("5")))) {
isAvailable = true;
this.list.add(time + lineSeparator);
}
}
}
writeToFile(this.list, target);
}
private void writeToFile(List<String> list, String fileName) {
try (PrintWriter out = new PrintWriter(new FileOutputStream(fileName))) {
for (String string : list) {
out.print(string);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
package de.ids_mannheim.korap.config;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* @author hanl
* @date 17/06/2015
*/
public class DefaultHandler {
private Map<String, Object> defaults;
public DefaultHandler () {
this.defaults = new HashMap<>();
loadClasses();
}
private void loadClasses () {
Set<Class<?>> cls = KustvaktClassLoader
.loadFromAnnotation(Configurable.class);
for (Class clazz : cls) {
Configurable c = (Configurable) clazz
.getAnnotation(Configurable.class);
try {
this.defaults.put(c.value(), clazz.newInstance());
}
catch (InstantiationException | IllegalAccessException e) {
throw new RuntimeException("Could not instantiate class");
}
}
}
public Object getDefault (String name) {
return this.defaults.get(name);
}
public <T> T getDefault (Class<T> tClass) {
for (Object o : this.defaults.values()) {
if (o.getClass().equals(tClass))
return (T) o;
}
return null;
}
public void remove (String name) {
this.defaults.remove(name);
}
@Override
public String toString () {
return defaults.toString();
}
}
|
<filename>gatsby-config.js
const path = require('path')
module.exports = {
siteMetadata: {
title: `<NAME> | Fullstack Software Developer`,
description: `Fullstack web and software developer with experinces in NodeJS, React, GO and DevOps.`,
author: `@alexanderhorl`,
nav: [
{
name: 'Contact',
link: '/contact'
},
],
footer: [
{
name: 'Imprint',
link: '/imprint'
},
],
social: [
{
name: 'Twitter',
url: 'https://twitter.com/alexanderhorl',
},
{
name: 'GitHub',
url: 'https://github.com/AlexanderProd',
},
]
},
plugins: [
`gatsby-plugin-react-helmet`,
`gatsby-transformer-sharp`,
{
resolve: `gatsby-plugin-manifest`,
options: {
name: `gatsby-starter-default`,
short_name: `starter`,
start_url: `/`,
background_color: `#663399`,
theme_color: `#663399`,
display: `minimal-ui`,
//icon: `src/images/gatsby-icon.png`,
},
},
{
resolve: `gatsby-plugin-google-analytics`,
options: {
trackingId: "UA-134421805-3",
anonymize: true,
respectDNT: true,
},
},
{
resolve: 'gatsby-plugin-root-import',
options: {
'~': path.join(__dirname, 'src/'),
},
},
// this (optional) plugin enables Progressive Web App + Offline functionality
// To learn more, visit: https://gatsby.dev/offline
// `gatsby-plugin-offline`,
],
}
|
import 'babel-polyfill'
import Mappersmith from 'mappersmith'
import 'mappersmith/fixtures'
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
const middlewares = [ thunk ]
const mockStore = configureMockStore(middlewares)
import {
FAILURE_SHOW_RETRY,
FAILURE_HIDE_RETRY,
REQUEST_FAILURE_RETRY,
RECEIVE_FAILURE_RETRY,
REQUEST_FAILURE_RETRY_FAILED,
ADD_FLASH_MESSAGE,
FAILURE_HIDE_OVERVIEW,
DELETE_FAILURE
} from 'actions'
import {
showFailureRetry,
hideFailureRetry,
performFailureRetry
} from 'actions/failures/retry'
beforeEach(() => {
Mappersmith.Env.Fixture.clear()
})
describe('actions/failures/retry', () => {
describe('#showFailureRetry', () => {
it('creates an action to show failure retry', () => {
const failure = { id: 1 }
const expectedAction = { type: FAILURE_SHOW_RETRY, failure }
expect(showFailureRetry(failure)).toEqual(expectedAction)
})
})
describe('#hideFailureRetry', () => {
it('creates an action to hide failure retry', () => {
const failure = { id: 1 }
const expectedAction = { type: FAILURE_HIDE_RETRY, failure }
expect(hideFailureRetry(failure)).toEqual(expectedAction)
})
})
describe('#performFailureRetry', () => {
describe('when it succeeds', () => {
let failure, store
beforeEach(() => {
failure = { id: 1 }
store = mockStore({})
Mappersmith.Env.Fixture
.define('post')
.matching({ url: `/api/v1/failures/${failure.id}/retry` })
.response({ acknowledged: true })
})
it('creates REQUEST and RECEIVE actions', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: REQUEST_FAILURE_RETRY, failure })
expect(actions[1]).toEqual({ type: RECEIVE_FAILURE_RETRY, failure, acknowledged: true })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
it('creates an action to hide the failure retry', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[2]).toEqual({ type: FAILURE_HIDE_RETRY, failure })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
it('create an action to add a success flash message', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[3]).toEqual({ type: ADD_FLASH_MESSAGE, message: {
id: jasmine.any(String),
type: 'success',
text: 'Failure retried with success. Acknowledged: true',
autoClose: true
}})
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
it('creates an action to hide the failure overview', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[4]).toEqual({ type: FAILURE_HIDE_OVERVIEW, failure: failure })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
it('creates an action to delete the failure from state', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[5]).toEqual({ type: DELETE_FAILURE, failure: failure })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
describe('when it fails', () => {
let store, failure
beforeEach(() => {
failure = { id: 1 }
store = mockStore({})
Mappersmith.Env.Fixture
.define('post')
.matching({ url: `/api/v1/failures/${failure.id}/retry` })
.failure()
.response({
responseText: JSON.stringify({
error: true,
message: 'some error'
})
})
})
it('creates REQUEST and REQUEST_FAILED actions', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[0]).toEqual({ type: REQUEST_FAILURE_RETRY, failure })
expect(actions[1]).toEqual({ type: FAILURE_HIDE_RETRY, failure })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
it('creates an action to add an error flash message', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[2]).toEqual({ type: ADD_FLASH_MESSAGE, message: {
id: jasmine.any(String),
type: 'error',
text: 'Failure retried with error: some error',
autoClose: false
}})
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
it('creates an action to clear the loading status', (done) => {
store.dispatch(performFailureRetry(failure)).then(() => {
const actions = store.getActions()
expect(actions[3]).toEqual({ type: REQUEST_FAILURE_RETRY_FAILED, failure })
done()
})
.catch((e) => done.fail(`test failed with promise error: ${e.message}`))
})
})
})
})
|
<filename>opendnp3/APL/PhysicalLayerMonitor.h
//
// Licensed to Green Energy Corp (www.greenenergycorp.com) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Green Enery Corp licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
#ifndef __PHYSICAL_LAYER_MONITOR_H_
#define __PHYSICAL_LAYER_MONITOR_H_
#include <opendnp3/APL/IHandlerAsync.h>
#include <opendnp3/APL/IPhysicalLayerObserver.h>
#include <opendnp3/APL/ITimerSource.h>
#include <opendnp3/APL/Lock.h>
#include <set>
namespace apl
{
class IPhysicalLayerAsync;
class IMonitorState;
class IPhysicalLayerObserver;
/** Manages the lifecycle of a physical layer
*/
class PhysicalLayerMonitor : public IHandlerAsync
{
friend class MonitorStateActions;
public:
PhysicalLayerMonitor(Logger*, IPhysicalLayerAsync*, ITimerSource*, millis_t aOpenRetry);
~PhysicalLayerMonitor();
/** Begin monitor execution, retry indefinitely on failure - Idempotent*/
void Start();
/** Begin monitor execution, don't reconnect automatically on failure - Idempotent*/
void StartOne();
/** Close the physical layer if it's open */
void Close();
/** Close the physical layer and don't try until someone calls Start() */
void Suspend();
/** Permanently shutdown the monitor, further calls to Start() will do nothing - Idempotent */
void Shutdown();
PhysicalLayerState GetState();
/** Add an observer to the set of state callbacks */
void AddObserver(IPhysicalLayerObserver* apObserver);
/** Blocks until the monitor has permanently stopped or the timeout expires.
@param aTimeoutMS Timeout in milliseconds, < 0 waits forever
@return True of the shutdown condition was met, false otherwise
*/
bool WaitForShutdown(millis_t aTimeoutMs = -1);
Logger* GetLogger() {
return mpLogger;
}
protected:
virtual void OnPhysicalLayerOpenSuccessCallback() = 0;
virtual void OnPhysicalLayerOpenFailureCallback() = 0;
virtual void OnPhysicalLayerCloseCallback() = 0;
/// Begins the open timer
void StartOpenTimer();
IPhysicalLayerAsync* mpPhys;
private:
ITimerSource* mpTimerSrc;
ITimer* mpOpenTimer;
IMonitorState* mpState;
bool mFinalShutdown;
/* --- Actions for the states to call --- */
/// Internal function used to change the state
void ChangeState(IMonitorState* apState);
/// Internal callback when open timer expires
void OnOpenTimerExpiration();
/// Cancels the open timer
void CancelOpenTimer();
/* --- Internal helper functions --- */
void DoFinalShutdown();
SigLock mLock;
const millis_t M_OPEN_RETRY;
// Implement from IHandlerAsync - Try to reconnect using a timer
void _OnOpenFailure();
void _OnLowerLayerUp();
void _OnLowerLayerDown();
typedef std::set<IPhysicalLayerObserver*> ObserverSet;
ObserverSet mObservers;
};
}
#endif
|
<reponame>nimbus-cloud/cli<filename>src/cf/net/gateway.go
package net
import (
"cf"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"runtime"
"strings"
"time"
)
const (
INVALID_TOKEN_CODE = "GATEWAY INVALID TOKEN CODE"
JOB_FINISHED = "finished"
JOB_FAILED = "failed"
DEFAULT_POLLING_THROTTLE = 5 * time.Second
ASYNC_REQUEST_TIMEOUT = 20 * time.Second
)
type JobEntity struct {
Status string
}
type JobResponse struct {
Entity JobEntity
}
type AsyncMetadata struct {
Url string
}
type AsyncResponse struct {
Metadata AsyncMetadata
}
type errorResponse struct {
Code string
Description string
ResponseHeader string
ResponseBody string
}
type errorHandler func(*http.Response) errorResponse
type tokenRefresher interface {
RefreshAuthToken() (string, ApiResponse)
}
type Request struct {
HttpReq *http.Request
SeekableBody io.ReadSeeker
}
type Gateway struct {
authenticator tokenRefresher
errHandler errorHandler
PollingEnabled bool
PollingThrottle time.Duration
}
func newGateway(errHandler errorHandler) (gateway Gateway) {
gateway.errHandler = errHandler
gateway.PollingThrottle = DEFAULT_POLLING_THROTTLE
return
}
func (gateway *Gateway) SetTokenRefresher(auth tokenRefresher) {
gateway.authenticator = auth
}
func (gateway Gateway) GetResource(url, accessToken string, resource interface{}) (apiResponse ApiResponse) {
request, apiResponse := gateway.NewRequest("GET", url, accessToken, nil)
if apiResponse.IsNotSuccessful() {
return
}
_, apiResponse = gateway.PerformRequestForJSONResponse(request, resource)
return
}
func (gateway Gateway) CreateResource(url, accessToken string, body io.ReadSeeker) (apiResponse ApiResponse) {
return gateway.createUpdateOrDeleteResource("POST", url, accessToken, body, nil)
}
func (gateway Gateway) CreateResourceForResponse(url, accessToken string, body io.ReadSeeker, resource interface{}) (apiResponse ApiResponse) {
return gateway.createUpdateOrDeleteResource("POST", url, accessToken, body, resource)
}
func (gateway Gateway) UpdateResource(url, accessToken string, body io.ReadSeeker) (apiResponse ApiResponse) {
return gateway.createUpdateOrDeleteResource("PUT", url, accessToken, body, nil)
}
func (gateway Gateway) UpdateResourceForResponse(url, accessToken string, body io.ReadSeeker, resource interface{}) (apiResponse ApiResponse) {
return gateway.createUpdateOrDeleteResource("PUT", url, accessToken, body, resource)
}
func (gateway Gateway) DeleteResource(url, accessToken string) (apiResponse ApiResponse) {
return gateway.createUpdateOrDeleteResource("DELETE", url, accessToken, nil, &AsyncResponse{})
}
func (gateway Gateway) ListPaginatedResources(
target string,
accessToken string,
path string,
resource interface{},
cb func(interface{}) bool) (apiResponse ApiResponse) {
for path != "" {
pagination := NewPaginatedResources(resource)
apiResponse = gateway.GetResource(fmt.Sprintf("%s%s", target, path), accessToken, &pagination)
if apiResponse.IsNotSuccessful() {
return
}
resources, err := pagination.Resources()
if err != nil {
return NewApiResponseWithError("Error parsing JSON", err)
}
for _, resource := range resources {
if !cb(resource) {
return
}
}
path = pagination.NextURL
}
return
}
func (gateway Gateway) createUpdateOrDeleteResource(verb, url, accessToken string, body io.ReadSeeker, resource interface{}) (apiResponse ApiResponse) {
request, apiResponse := gateway.NewRequest(verb, url, accessToken, body)
if apiResponse.IsNotSuccessful() {
return
}
if resource == nil {
return gateway.PerformRequest(request)
}
if gateway.PollingEnabled {
_, apiResponse = gateway.PerformPollingRequestForJSONResponse(request, resource, ASYNC_REQUEST_TIMEOUT)
return
} else {
_, apiResponse = gateway.PerformRequestForJSONResponse(request, resource)
return
}
}
func (gateway Gateway) NewRequest(method, path, accessToken string, body io.ReadSeeker) (req *Request, apiResponse ApiResponse) {
if body != nil {
body.Seek(0, 0)
}
request, err := http.NewRequest(method, path, body)
if err != nil {
apiResponse = NewApiResponseWithError("Error building request", err)
return
}
if accessToken != "" {
request.Header.Set("Authorization", accessToken)
}
request.Header.Set("accept", "application/json")
request.Header.Set("content-type", "application/json")
request.Header.Set("User-Agent", "go-cli "+cf.Version+" / "+runtime.GOOS)
if body != nil {
switch v := body.(type) {
case *os.File:
fileStats, err := v.Stat()
if err != nil {
break
}
request.ContentLength = fileStats.Size()
}
}
req = &Request{HttpReq: request, SeekableBody: body}
return
}
func (gateway Gateway) PerformRequest(request *Request) (apiResponse ApiResponse) {
_, apiResponse = gateway.doRequestHandlingAuth(request)
return
}
func (gateway Gateway) PerformRequestForResponse(request *Request) (rawResponse *http.Response, apiResponse ApiResponse) {
return gateway.doRequestHandlingAuth(request)
}
func (gateway Gateway) PerformRequestForResponseBytes(request *Request) (bytes []byte, headers http.Header, apiResponse ApiResponse) {
rawResponse, apiResponse := gateway.doRequestHandlingAuth(request)
if apiResponse.IsNotSuccessful() {
return
}
bytes, err := ioutil.ReadAll(rawResponse.Body)
if err != nil {
apiResponse = NewApiResponseWithError("Error reading response", err)
}
headers = rawResponse.Header
return
}
func (gateway Gateway) PerformRequestForTextResponse(request *Request) (response string, headers http.Header, apiResponse ApiResponse) {
bytes, headers, apiResponse := gateway.PerformRequestForResponseBytes(request)
response = string(bytes)
return
}
func (gateway Gateway) PerformRequestForJSONResponse(request *Request, response interface{}) (headers http.Header, apiResponse ApiResponse) {
bytes, headers, apiResponse := gateway.PerformRequestForResponseBytes(request)
if apiResponse.IsNotSuccessful() {
return
}
if apiResponse.StatusCode > 203 || strings.TrimSpace(string(bytes)) == "" {
return
}
err := json.Unmarshal(bytes, &response)
if err != nil {
apiResponse = NewApiResponseWithError("Invalid JSON response from server", err)
}
return
}
func (gateway Gateway) PerformPollingRequestForJSONResponse(request *Request, response interface{}, timeout time.Duration) (headers http.Header, apiResponse ApiResponse) {
query := request.HttpReq.URL.Query()
query.Add("async", "true")
request.HttpReq.URL.RawQuery = query.Encode()
bytes, headers, apiResponse := gateway.PerformRequestForResponseBytes(request)
if apiResponse.IsNotSuccessful() {
return
}
if apiResponse.StatusCode > 203 || strings.TrimSpace(string(bytes)) == "" {
return
}
err := json.Unmarshal(bytes, &response)
if err != nil {
apiResponse = NewApiResponseWithError("Invalid JSON response from server", err)
return
}
asyncResponse := &AsyncResponse{}
err = json.Unmarshal(bytes, &asyncResponse)
if err != nil {
apiResponse = NewApiResponseWithError("Invalid async response from server", err)
return
}
jobUrl := asyncResponse.Metadata.Url
if jobUrl == "" {
return
}
if !strings.Contains(jobUrl, "/jobs/") {
return
}
jobUrl = fmt.Sprintf("%s://%s%s", request.HttpReq.URL.Scheme, request.HttpReq.URL.Host, asyncResponse.Metadata.Url)
apiResponse = gateway.waitForJob(jobUrl, request.HttpReq.Header.Get("Authorization"), timeout)
return
}
func (gateway Gateway) waitForJob(jobUrl, accessToken string, timeout time.Duration) (apiResponse ApiResponse) {
startTime := time.Now()
for true {
if time.Since(startTime) > timeout {
apiResponse = NewApiResponseWithMessage("Error: timed out waiting for async job '%s' to finish", jobUrl)
return
}
var request *Request
request, apiResponse = gateway.NewRequest("GET", jobUrl, accessToken, nil)
response := &JobResponse{}
_, apiResponse = gateway.PerformRequestForJSONResponse(request, response)
if apiResponse.IsNotSuccessful() {
return
}
switch response.Entity.Status {
case JOB_FINISHED:
return
case JOB_FAILED:
apiResponse = NewApiResponse("Internal Server Error", "", 500)
return
}
accessToken = request.HttpReq.Header.Get("Authorization")
time.Sleep(gateway.PollingThrottle)
}
return
}
func (gateway Gateway) doRequestHandlingAuth(request *Request) (rawResponse *http.Response, apiResponse ApiResponse) {
httpReq := request.HttpReq
if request.SeekableBody != nil {
httpReq.Body = ioutil.NopCloser(request.SeekableBody)
}
// perform request
rawResponse, apiResponse = gateway.doRequestAndHandlerError(request)
if apiResponse.IsSuccessful() || gateway.authenticator == nil {
return
}
if apiResponse.ErrorCode != INVALID_TOKEN_CODE {
return
}
// refresh the auth token
newToken, apiResponse := gateway.authenticator.RefreshAuthToken()
if apiResponse.IsNotSuccessful() {
return
}
// reset the auth token and request body
httpReq.Header.Set("Authorization", newToken)
if request.SeekableBody != nil {
request.SeekableBody.Seek(0, 0)
httpReq.Body = ioutil.NopCloser(request.SeekableBody)
}
// make the request again
rawResponse, apiResponse = gateway.doRequestAndHandlerError(request)
return
}
func (gateway Gateway) doRequestAndHandlerError(request *Request) (rawResponse *http.Response, apiResponse ApiResponse) {
rawResponse, err := doRequest(request.HttpReq)
if err != nil {
apiResponse = NewApiResponseWithError("Error performing request", err)
return
}
if rawResponse.StatusCode > 299 {
errorResponse := gateway.errHandler(rawResponse)
message := fmt.Sprintf(
"Server error, status code: %d, error code: %s, message: %s",
rawResponse.StatusCode,
errorResponse.Code,
errorResponse.Description,
)
apiResponse = NewApiResponseWithHttpError(message, errorResponse.Code, rawResponse.StatusCode, errorResponse.ResponseHeader, errorResponse.ResponseBody)
} else {
apiResponse = NewApiResponseWithStatusCode(rawResponse.StatusCode)
}
return
}
|
<gh_stars>0
const formatNumber = require('.')
const tp = require('testpass')
const tests = [
[1, '1.00'],
[1.1, '1.10'],
[1.11, '1.11'],
[1.111, '1.11'],
[1.115, '1.11'],
[9.99, '9.99'],
[9.995, '9.99'],
// Hundreds
[100, '100.00'],
[100.1, '100.10'],
[100.11, '100.11'],
[100.001, '100.00'],
[100.005, '100.00'],
// Thousands
[1000, '1,000.00'],
[1000.1, '1,000.10'],
[1000.11, '1,000.11'],
[1000.111, '1,000.11'],
// Millions
[1e6, '1.00 M'],
[1e6 + 1e5, '1.10 M'],
[1e6 + 11e4, '1.11 M'],
[1e6 + 111e3, '1.11 M'],
[999e6 + 999e3, '999.99 M'],
// Billions
[1e9, '1.00 B'],
[1e9 + 1e8, '1.10 B'],
[1e9 + 11e7, '1.11 B'],
[1e9 + 111e6, '1.11 B'],
[999e9 + 999e6, '999.99 B'],
// Trillions
[1e12, '1,000.00 B'],
[1e12 + 999e6, '1,000.99 B'],
// Fractions
[0.1, '0.10'],
[0.11, '0.11'],
[0.111, '0.11'],
[0.115, '0.11'],
[0.011, '0.011'],
[0.0111, '0.0111'],
[0.01111, '0.0111'],
[0.01115, '0.0111'],
[0.001111, '0.00111'],
[0.0001111, '0.000111'],
[0.000000111, ''],
]
tests.forEach(([input, output]) => {
tp.test(input + ` => '${output}'`, (t) => {
t.eq(formatNumber(input), output)
})
})
|
package com.klk.mobilefingerprint.data;
import com.klk.mobilefingerprint.models.Staff;
import java.util.ArrayList;
public class GlobalData {
public static ArrayList<Staff> StaffList = new ArrayList<>();
private static GlobalData instance;
public static GlobalData getInstance() {
if(null == instance) {
instance = new GlobalData();
}
return instance;
}
public void loadStaffData(){
for (int i = 0; i < 5; i++) {
Staff staff = new Staff();
staff.set_id(1081012380);
staff.setName("Seseorang dengan Nama " + i);
StaffList.add(staff);
}
}
}
|
public static int maxSumOfSubArray(int[] arr, int k) {
int n = arr.length;
int max_sum = 0;
for (int i = 0; i < n - k + 1; i++)
{
int current_sum = 0;
for (int j = 0; j < k; j++)
current_sum += arr[i + j];
max_sum = Math.max(current_sum, max_sum);
}
return max_sum;
}
System.out.println(maxSumOfSubArray([1, 4, 2, 10, 23, 3, 1, 0, 20], 4));
// Output = 39 # The maximum sum of subarray of size 4 is 39
|
#!/bin/sh
if which redis-server > /dev/null 2>&1 ;
then
echo 0 > ~/install-exit-status
else
echo "ERROR: Redis server is not found on the system! No redis-server found in PATH."
echo 2 > ~/install-exit-status
fi
tar -xzf memtier_benchmark-1.3.0.tar.gz
cd memtier_benchmark-1.3.0
autoreconf -ivf
./configure
make -j $NUM_CPU_CORES
cd $HOME
echo "#!/bin/bash
redis-server &
REDIS_SERVER_PID=\$!
sleep 15
if [[ \"\$1\" == \"memtier\" ]]; then
cd memtier_benchmark-1.3.0
./memtier_benchmark --ratio=\$2 -d 1024 --pipeline=8 --test-time=90 --key-pattern=R:R --key-minimum=1 --key-maximum=10000000 --distinct-client-seed -c 10 -t 8 -s 127.0.0.1 -p 6379 --out-file=\$LOG_FILE
else
redis-benchmark -t \$2 -n 10000000 -P 16 --csv > \$LOG_FILE 2>&1
fi
kill \$REDIS_SERVER_PID
redis-server --version > ~/pts-footnote 2>/dev/null
sleep 30
rm -f dump.rdb
sed \"s/\\\"/ /g\" -i \$LOG_FILE
if [[ \"\$3\" == \"lpop\" ]]; then
sed -i '1d' \$LOG_FILE
else
sed -i '2d' \$LOG_FILE
fi" > redis
chmod +x redis
|
#!/bin/bash -l
export DATA_DIR=../data/cogs
source activate pytorch_p36
mkdir $DATA_DIR
python ../preprocess.py -train_src $DATA_DIR/train_source.txt -train_tgt $DATA_DIR/train_target.txt -valid_src $DATA_DIR/dev_source.txt -valid_tgt $DATA_DIR/dev_target.txt -save_data $DATA_DIR/1_example -src_seq_length 5000 -tgt_seq_length 5000 -src_vocab $DATA_DIR/source_vocab.txt -tgt_vocab $DATA_DIR/target_vocab.txt
python ../preprocess.py -train_src $DATA_DIR/train_100_source.txt -train_tgt $DATA_DIR/train_100_target.txt -valid_src $DATA_DIR/dev_source.txt -valid_tgt $DATA_DIR/dev_target.txt -save_data $DATA_DIR/100_example -src_seq_length 5000 -tgt_seq_length 5000 -src_vocab $DATA_DIR/source_vocab.txt -tgt_vocab $DATA_DIR/target_vocab.txt
|
<filename>src/academy/devonline/java/home_section001_classes/function_methods/Remove.java
/*
* Copyright 2022. http://devonline.academy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package academy.devonline.java.home_section001_classes.function_methods;
import java.util.Arrays;
/**
* Создать три метода removeInt, removeByIndex, indexOf
* Создать объект Remove удалить любое число из массива numbers
* asString() показывает элементы до count
* toString() тоже строковое представление вообще весь массив независимо от count
*
* @author devonline
* @link http://devonline.academy/java
*/
public class Remove {
// private int[] numbers = {1, 23, 55, 35, 12, 111, 444};
private int[] numbers;
private int count;
public static void main(String[] args) {
// Тесты
Remove rmv = new Remove();
rmv.add(1);
System.out.println(rmv.count);
System.out.println(Arrays.toString(rmv.numbers));
rmv.add(23);
rmv.add(55);
rmv.add(35);
System.out.println(Arrays.toString(rmv.numbers));
rmv.removeInt(1);
System.out.println(Arrays.toString(rmv.numbers));
System.out.println(Arrays.toString(rmv.numbers));
rmv.removeInt(23);
System.out.println(Arrays.toString(rmv.numbers));
System.out.println(Arrays.toString(rmv.numbers));
System.out.println(rmv.asString());
System.out.println(Arrays.toString(rmv.numbers));
rmv.removeInt(55);
System.out.println(rmv.asString());
System.out.println(Arrays.toString(rmv.numbers));
rmv.removeInt(35);
System.out.println(rmv.asString());
System.out.println(Arrays.toString(rmv.numbers));
}
/**
* конструкторы
*/
public Remove() {
this(5);
}
public Remove(int size) {
numbers = new int[size];
}
/**
* @param value целое число
*/
public void add(int value) {
if (count == numbers.length) {
grow(numbers.length == 0 ? 5 : numbers.length * 2);
}
numbers[count++] = value;
}
/**
* приватная функция grow, которая увеличивает наш массив
*
* @param length длина значение
*/
private void grow(int length) {
int[] newArray = new int[length];
System.arraycopy(numbers, 0, newArray, 0, numbers.length);
numbers = newArray;
}
boolean removeInt(int value) {
// если элемент присутствует то переменная индекс будет хранить индекс элемента от 0 до значения count
//если элемент отсутствует от индекс будет равен -1
var index = indexOf(value);
if (index != -1) {
removeByIndex(index);
return true;
} else {
return false;
}
}
/**
* method remove -
* | - removeByIndex
* При удалении посл элемента ничего делать не нужно,
* но при удалении первого элемента, нужно все элементы сдвинуть на 1 индекс влево
* Проверка как раз определяет нужно ли делать сдвигание или не нужно(Операция сдвига)
*
* @param index принимает индекс элемента
*/
private void removeByIndex(int index) {
// [0,1,2,3,4] Пример index = 0
// 0 < 5 - 1
if (index < numbers.length - 1) {
// for (int i = index; i < count - 1; i++) {
// result[i] = result[i + 1];
// }
// (0,1,2,3,4) , 0 + 1 позиция начала нового массива, (1,2,3,4) массив-назначения, 0 начальным положением целевого массива,
//это количество элементов, которые будут скопированы 5 - 1 - 0 = 4
System.arraycopy(numbers, index + 1, numbers, index, count - 1 - index);
}
count--; // с 5 до 4
}
/**
* method remove -
* | - method indexOf
* метод приватный для внутренного использования, реализация обычный линейный поиск
*
* @param value целое число
* @return индекс элемента или -1
*/
private int indexOf(int value) {
for (int i = 0; i < count; i++) {
if (numbers[i] == value) {
return i;
}
}
return -1;
}
/**
* @return строковое представления массива [....]
*/
public String asString() {
final StringBuilder stringBuilder = new StringBuilder().append('[');
for (int i = 0; i < count; i++) {
stringBuilder.append(numbers[i]);
if (i < count - 1) {
stringBuilder.append(',').append(' ');
}
}
return stringBuilder.append(']').toString();
}
}
|
package br.com.papyrus.controller;
import br.com.papyrus.model.ModelEditorasDAO;
import br.com.papyrus.model.ModelEditorasTableModel;
import br.com.papyrus.model.ModelEditorasVO;
import br.com.papyrus.view.ViewEditoras;
import static br.com.papyrus.view.ViewPrincipal.DesktopPrincipal;
import java.awt.Component;
import java.awt.event.ActionEvent;
import javax.swing.JDesktopPane;
import javax.swing.JInternalFrame;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
/**
* Classe que cria e gerencia o AbstractTableModel para Editoras
*
* @author <NAME>.
*
* varAlterar: Controla se é uma alteração ou uma inclusão no
* AbstractTableModel, se True é para incluir um novo registro, se False é para
* alterar Editoras um registro existente.
*
* telaEditoras: Recebe as informações da ViewEditoras.java que é a tela com os
* dados da Editora.
*/
public final class ControllerEditoras {
public ViewEditoras telaEditoras;
private ModelEditorasTableModel tbModel;
private boolean varAlterar = false;
public ControllerEditoras() {
telaEditoras = new ViewEditoras();
DesktopPrincipal.add(telaEditoras);
telaEditoras.show();
carregarEditoras();
telaEditoras.getBtnSalvar().addActionListener((ActionEvent ae) -> {
salvarEditoras();
});
telaEditoras.getBtnExcluir().addActionListener((ActionEvent ae) -> {
excluirEditoras();
});
telaEditoras.getBtnAlterar().addActionListener((ActionEvent ae) -> {
alterarEditoras();
});
}
public ControllerEditoras(JDesktopPane DesktopPrincipal) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
/**
* Método que salva os dados no AbstractTableModel para Editoras, utiliza a
* variável varAlterar que se for True indica que é para gravar um novo
* registro na tabela Editoras através do AbstractTableModel.
*/
public void salvarEditoras() {
if (!varAlterar) {
if (telaEditoras.getTxtNome().getText().equals("")) { //Objeto novo
JOptionPane.showMessageDialog(null, "Por favor preencha os dados !");
} else {
ModelEditorasVO editora = new ModelEditorasVO();
editora.setNome(telaEditoras.getTxtNome().getText());
editora.setLocalizacao(telaEditoras.getTxtLocalizacao().getText());
ModelEditorasDAO editorasDAO = new ModelEditorasDAO();
if (editorasDAO.inserirEditoras(editora)) {
JOptionPane.showMessageDialog(null, "Gravado com sucesso !");
carregarEditoras();
} else {
JOptionPane.showMessageDialog(null, "Erro ao tentar gravar!\n" + "Algum campo tem valores inválidos.\n" + "Por favor corriga e tente novamente.");
}
}
}
if (varAlterar) {
ModelEditorasVO editora = new ModelEditorasVO();
editora.setNome(telaEditoras.getTxtNome().getText());
editora.setLocalizacao(telaEditoras.getTxtLocalizacao().getText());
ModelEditorasDAO editorasDAO = new ModelEditorasDAO();
editora.setId(Integer.valueOf(telaEditoras.getTxtId().getText()));
if (editorasDAO.alterarEditoras(editora)) {
JOptionPane.showMessageDialog(null, "Alterado com sucesso !");
carregarEditoras();
} else {
JOptionPane.showMessageDialog(null, "Erro ao tentar gravar!\n" + "Algum campo tem valores inválidos.\n" + "Por favor corriga e tente novamente.");
}
varAlterar = false;
}
limparVariaveis(telaEditoras);
}
/**
* Método que carrega os dados das editoras no AbstractTableModel para
* Editoras
*/
public void carregarEditoras() {
ModelEditorasDAO editorasDAO = new ModelEditorasDAO();
tbModel = new ModelEditorasTableModel(editorasDAO.listarEditoras());
telaEditoras.setTbEditoras(tbModel);
}
/**
* Método que exclui um editora da tabela do AbstractTableModel para
* Editoras
*/
public void excluirEditoras() {
if (telaEditoras.getTbEditoras().getSelectedRow() >= 0) { //Só exclui se tiver uma linha selecionada
ModelEditorasDAO editorasDAO = new ModelEditorasDAO();
ModelEditorasVO editora = tbModel.getEditoras(telaEditoras.getTbEditoras().getSelectedRow());
editorasDAO.excluirEditoras(editora);
carregarEditoras();
}
}
/**
* Método que altera os dados de uma editora na tabela do AbstractTableModel
* para Editoras
*/
public void alterarEditoras() {
if (telaEditoras.getTbEditoras().getSelectedRow() >= 0) { //Só altera se tiver uma linha selecionada
ModelEditorasDAO editorasDAO = new ModelEditorasDAO();
ModelEditorasVO editora = tbModel.getEditoras(telaEditoras.getTbEditoras().getSelectedRow());
telaEditoras.getTxtId().setText(String.valueOf(editora.getId()));
telaEditoras.getTxtNome().setText(editora.getNome());
telaEditoras.getTxtLocalizacao().setText(editora.getLocalizacao());
//ViewEditoras.cmbCidades.setSelectedItem(editora.getLocalizacao());
varAlterar = true;
}
}
/**
* Método que limpa as variaveis e campos na tela do formulário passado como
* parametro ViewEditoras.java
*
* @param obj O JInternalFrame (tela) com seus campos a ser limpo por esta
* classe.
*/
public void limparVariaveis(Object obj) {
JInternalFrame tela = (JInternalFrame) (Object) obj;
for (int i = 0; i < tela.getContentPane().getComponentCount(); i++) {
//varre todos os componentes
Component c = tela.getContentPane().getComponent(i);
if (c instanceof JTextField) {
//apaga os valores
JTextField field = (JTextField) c;
field.setText("");
}
}
}
}
|
<filename>Reference/qpc/html/search/variables_9.js
var searchData=
[
['l_5fidlethread_1235',['l_idleThread',['../qxk_8c.html#a0458880fea6279421c6acde673d48e3f',1,'qxk.c']]],
['l_5fmsm_5ftop_5fs_1236',['l_msm_top_s',['../qep__msm_8c.html#aae45de5c95eacc55233bf6773aab8049',1,'qep_msm.c']]],
['locfilter_1237',['locFilter',['../qs_8h.html#ac03d08cb94cbfb1adfcb711ef5bd9254',1,'QSPrivAttr']]],
['locfilter_5fap_1238',['locFilter_AP',['../qs_8h.html#a2acf9e9d049afcebbb4b4e4dea7ac067',1,'QSPrivAttr']]],
['lockholder_1239',['lockHolder',['../qk_8h.html#aff9b1151ed0af2d39ee445372f8208aa',1,'QK_PrivAttr::lockHolder()'],['../qxk_8h.html#aff9b1151ed0af2d39ee445372f8208aa',1,'QXK_PrivAttr::lockHolder()']]],
['locknest_1240',['lockNest',['../qxthread_8h.html#a68e931bdcab495826d4d0ec5f5f90941',1,'QXMutex']]],
['lockprio_1241',['lockPrio',['../qk_8h.html#a36bdd97177b7ac1ae7fa0d2a1ea433ab',1,'QK_PrivAttr::lockPrio()'],['../qxk_8h.html#a36bdd97177b7ac1ae7fa0d2a1ea433ab',1,'QXK_PrivAttr::lockPrio()']]]
];
|
// Loads a JSON file
function loadJSON(jsonUrl) {
return $.ajax({
url: jsonUrl,
dataType: "json"
});
}
//draw the map key with D3
function drawKey() {
//key width and height
var width = 250;
var height = 500;
//get the key element from index
var key = d3.select("#key");
var svg = key.append("svg")
.attr("width", width)
.attr("height", height);
//define the squares and colors for each household
var marriedFamilyKey = svg.append("rect")
.attr("x", 0)
.attr("y", 0)
.attr("width", (width/10))
.attr("height", (height/20))
.style("fill", "rgb(24, 204, 37)");
var maleHouseholderKey = svg.append("rect")
.attr("x", 0)
.attr("y", 0)
.attr("width", (width/10))
.attr("height", (height/20))
.style("fill", "rgb(48, 170, 178)");
var femaleHouseholderKey = svg.append("rect")
.attr("x", 0)
.attr("y", 0)
.attr("width", (width/10))
.attr("height", (height/20))
.style("fill", "rgb(255,20,147)");
//define squares labels
var labelOffset = 5;
var maleText = "Solo Male Homeowner";
var femaleText = "Solo Female Homeowner";
var marriedText = svg.append("text")
.attr("x", (( width / 10) + labelOffset))
.attr("y", 15)
.text("Married Family Homeowners");
}
//use the Json to make calculations on the data
function parseCensusJson(census) {
var totalHomes = "ACS_13_5YR_B11001_with_ann_HD02_VD01";
var marriedHome = "ACS_13_5YR_B11001_with_ann_HD01_VD03";
var singleMale = "ACS_13_5YR_B11001_with_ann_HD01_VD05";
var singleFemale = "ACS_13_5YR_B11001_with_ann_HD01_VD06";
}
//find the percent of total households that fall under these categories
var avgHomes = [];
censusJson.features.forEach(function(feature) {
//find the total of homes that fall under these categories
var homes = (Number(feature.properties[marriedHome])) + (Number(feature.propertiesp[singleMale])) + (Number(feature.properties[singleFemale]));
//find the average number of homes in these categories
if(homes > 0) {
averageHomes.push(homes / Number(feature.properties[totalHomes]));
}
});
var scale = d3.scaleLinear().
$(document).ready(function(){
// Makes the map
L.mapbox.accessToken = "<KEY>";
var map = L.mapbox.map('map', 'mapbox.streets');
map.setView([35.08, -106.61], 11);
overlays = L.layerGroup().addTo(map);
$.when(loadJSON("data/BernalilloCensusBlocks_Joined.json")).done(function(json) {
var censusJson = parseCensusJson(json);
//census blocks
var censusBlocks = L.mapbox.featureLayer().setGeoJSON(censusJson[0]).addTo(map);
//draw the key
drawKey();
});
});
|
#!/usr/bin/env bash
export DEVENVROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
source $DEVENVROOT/scripts/cmn.sh
final_ret=0
probe_module cfg80211
if [ $? -ne 0 ]; then
final_ret=1
fi
insert_module vwifi.ko
if [ $? -ne 0 ]; then
final_ret=2
fi
if [ $final_ret -eq 0 ]; then
sudo ip link set owl0 up
sudo iw dev owl0 scan | grep -o -E '([[:xdigit:]]{1,2}:){5}[[:xdigit:]]{1,2}'| tr [:lower:] [:upper:] > scan_bssid.log
sudo iw dev owl0 connect MyHomeWiFi
iwconfig owl0 | grep -o -E '([[:xdigit:]]{1,2}:){5}[[:xdigit:]]{1,2}' > connected.log
DIFF=$(diff connected.log scan_bssid.log)
if [ "$DIFF" != "" ]; then
final_ret = 3
fi
fi
if [ $final_ret -eq 0 ]; then
remove_module vwifi
rm scan_bssid.log connected.log
echo "==== Test PASSED ===="
exit 0
fi
echo "FAILED Reason Code: $final_ret"
echo "==== Test FAILED ===="
exit $final_ret
|
#!/bin/bash
env=$1
fails=""
inspect() {
if [ $1 -ne 0 ]; then
fails="${fails} $2"
fi
}
# run client and server-side tests
dev() {
docker-compose up -d --build
docker-compose exec users python manage.py test
inspect $? users
docker-compose exec users black .
inspect $? users-fix
docker-compose exec users flake8 project
inspect $? users-lint
docker-compose exec client npm run coverage
inspect $? client
docker-compose down
}
# run e2e tests
e2e() {
docker-compose -f docker-compose-$1.yml up -d --build
docker-compose -f docker-compose-$1.yml run users python manage.py recreate_db
./node_modules/.bin/cypress run --config baseUrl=http://localhost
inspect $? e2e
docker-compose -f docker-compose-$1.yml down
}
# run appropriate tests
if [[ "${env}" == "development" ]]; then
echo "Running client and server-side tests!"
dev
elif [[ "${env}" == "staging" ]]; then
echo "Running e2e tests!"
e2e stage
elif [[ "${env}" == "production" ]]; then
echo "Running e2e tests!"
e2e prod
else
echo "Running client and server-side tests!"
dev
fi
# return proper code
if [ -n "${fails}" ]; then
echo "Tests failed: ${fails}"
exit 1
else
echo "Tests passed!"
exit 0
fi
|
import axios from 'axios'
const GET_SINGLE_USER = 'GET_SINGLE_USER'
const getSingleUser = user => ({
type: GET_SINGLE_USER,
user
})
export const getSingleUserThunk = id => async dispatch => {
try {
const response = await axios.get(`/api/users/${id}`)
const user = response.data
dispatch(getSingleUser(user))
} catch (error) {
console.error(error)
}
}
export const singleUserReducer = (state = [], action) => {
switch (action.type) {
case GET_SINGLE_USER:
return action.user
default:
return state
}
}
export default singleUserReducer
|
<gh_stars>1-10
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.model.metrics;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import java.util.HashMap;
import java.util.Map;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE;
import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.PUBLIC_ONLY;
/**
* Atlas metrics
*/
@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown=true)
public class AtlasMetrics {
private Map<String, Map<String, Object>> data;
public AtlasMetrics() {
setData(null);
}
public AtlasMetrics(Map<String, Map<String, Object>> data) {
setData(data);
}
public AtlasMetrics(AtlasMetrics other) {
if (other != null) {
setData(other.getData());
}
}
public Map<String, Map<String, Object>> getData() {
return data;
}
public void setData(Map<String, Map<String, Object>> data) {
this.data = data;
}
@JsonIgnore
public void addMetric(String groupKey, String key, Object value) {
Map<String, Map<String, Object>> data = this.data;
if (data == null) {
data = new HashMap<>();
}
Map<String, Object> metricMap = data.computeIfAbsent(groupKey, k -> new HashMap<>());
metricMap.put(key, value);
setData(data);
}
@JsonIgnore
public Number getNumericMetric(String groupKey, String key) {
Object metric = getMetric(groupKey, key);
return metric instanceof Number ? (Number) metric : null;
}
@JsonIgnore
public Object getMetric(String groupKey, String key) {
Map<String, Map<String, Object>> data = this.data;
Object ret = null;
if (data != null) {
Map<String, Object> metricMap = data.get(groupKey);
if (metricMap != null && !metricMap.isEmpty()) {
ret = metricMap.get(key);
}
}
return ret;
}
}
|
package com.imooc.o2o.service;
import com.imooc.o2o.BaseTest;
import org.junit.Test;
import redis.clients.jedis.Jedis;
public class RedisTest extends BaseTest {
@Test
public void methodOne(){
Jedis jedis = new Jedis("172.16.31.10",6379);
jedis.set("name","xixi");
String vale = jedis.get("name");
System.out.println(vale);
jedis.close();
}
}
|
<reponame>3Xpl0it3r/loki-operator<gh_stars>1-10
package v1alpha1
import metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:defaulter-gen=true
// Promtail defines Promtail deployment
type Promtail struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec PromtailSpec `json:"spec"`
Status PromtailStatus `json:"status"`
}
// PromtailSpec describes the specification of Promtail applications using kubernetes as a cluster manager
type PromtailSpec struct {
Image string `json:"image"`
ConfigMap string `json:"configMap"`
Config PromtailConfig `json:"config"`
}
// PromtailStatus describes the current status of Promtail applications
type PromtailStatus struct {
// todo, write your code
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// PromtailList carries a list of Promtail objects
type PromtailList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []Promtail `json:"items"`
}
|
var searchData=
[
['ratelimiter',['RateLimiter',['../classserver_1_1RateLimiter.html',1,'server']]]
];
|
<reponame>christopherwallis/Fixate
import sys
import time
import re
from pubsub import pub
from fixate.core.common import TestList, TestClass
from fixate.core.exceptions import SequenceAbort, CheckFail
from fixate.core.ui import user_retry_abort_fail
STATUS_STATES = ["Idle", "Running", "Paused", "Finished", "Restart", "Aborted"]
class ContextStackNode:
def __init__(self, seq):
self.index = 0
if isinstance(seq, TestList):
self.testlist = seq
elif isinstance(seq, list):
self.testlist = TestList(seq)
def current(self):
next_item = self.testlist[self.index]
if not isinstance(next_item, TestList) and isinstance(next_item, list):
# Convert a normal list into a TestList
self.testlist[self.index] = TestList(next_item)
next_item = self.testlist[self.index]
return next_item
class ContextStack(list):
def push(self, test):
self.append(ContextStackNode(test))
def top(self):
return self[-1]
def test_list_repr(test_list):
def levels_repr():
return ".".join(str(x.index + 1) for x in context[1:])
def curr_test_name():
return top.current().test_desc
def curr_test_skip():
if isinstance(top.current(), TestList):
return False
return top.current().skip
context = ContextStack()
context.push(test_list)
ret_list = []
while context:
top = context.top()
if top.index >= len(top.testlist): # Finished tests in the test list
context.pop()
if context:
context.top().index += 1
elif isinstance(top.current(), TestClass):
ret_list.append({"level": levels_repr(), "test_name": curr_test_name(), "test_type": "test",
"test_skip": curr_test_skip(),
"parent": get_parent_level(levels_repr())})
top.index += 1
elif isinstance(top.current(), TestList):
ret_list.append(
{"level": levels_repr(), "test_name": curr_test_name(), "test_type": 'list',
"test_skip": curr_test_skip(),
"parent": get_parent_level(levels_repr())})
context.push(top.current())
return ret_list
def get_parent_level(level):
m = re.match(r'^\d+$', level)
if m:
return 'Top'
else:
level = re.sub(r'\.\d+$', '', level)
return level
class Sequencer:
def __init__(self):
self.tests = TestList()
self._status = "Idle"
self.active_test = None
self.ABORT = False
self.test_attempts = 0
self.chk_fail = 0
self.chk_pass = 0
self.tests_failed = 0
self.tests_passed = 0
self.tests_errored = 0
self.tests_skipped = 0
self._skip_tests = set([])
self.context = ContextStack()
self.context_data = {}
self.end_status = "N/A"
# Sequencer behaviour. Don't ask the user when things to wrong, just marks tests as failed.
# This does not change the behaviour of tests that call out to the user. They will still block as required.
self.non_interactive = False
def levels(self):
"""
Get the current test context from the stack
:return:
"""
# Load now pushes whole test list as opposed to extending
return ".".join(str(x.index + 1) for x in self.context[1:])
@property
def status(self):
return self._status
@status.setter
def status(self, val):
if val not in STATUS_STATES:
raise ValueError("Invalid Sequencer Status")
# Only if a change in status
if val != self._status:
pub.sendMessage('Sequence_Update', status=val)
if self._status not in ["Paused"] and val in ["Running"]:
pub.sendMessage('Sequence_Start')
if val == "Restart":
self._status = "Running"
pub.sendMessage('Sequence_Update', status="Running")
elif val in ["Aborted", "Finished"]:
self._status = val
if self.tests_errored or val == "Aborted":
self.end_status = "ERROR"
elif self.tests_failed:
self.end_status = "FAILED"
else:
self.end_status = "PASSED"
# This notifies other sections on the final
pub.sendMessage('Sequence_Complete', status=self.end_status, passed=self.tests_passed,
failed=self.tests_failed, error=self.tests_errored, skipped=self.tests_skipped,
sequence_status=self._status)
else:
self._status = val
def load(self, val):
self.tests.append(val)
self.context.push(self.tests)
self.end_status = "N/A"
def count_tests(self):
"""Get the total number of tests"""
return sum(1 for test in test_list_repr(self.context[0].testlist) if test.get('test_type') == 'test')
def tests_completed(self):
"""Count the number of tests remaining"""
current_index = "0"
search_index = 0
for index, test in enumerate(self.context):
if index == 0:
pass
elif index == 1:
current_index = str(test.index + 1)
else:
current_index += "." + str(test.index + 1)
try:
for index, test in enumerate(test_list_repr(self.context[0].testlist)):
if test.get('test_type') == 'test':
search_index += 1
if test.get('level') == current_index:
return search_index
except IndexError:
return 0
return 0
def get_tree(self):
"""Get the test tree as a list"""
tree = []
tests = test_list_repr(self.context[0].testlist)
for test in tests:
if len(test.get('level')) != 0:
tree.append(["", ""])
tree[-1][0] = test.get('level')
tree[-1][1] = test.get('test_name')
return tree
def run_sequence(self):
"""
Runs the sequence from the beginning to end once
:return:
"""
self.status = "Running"
try:
self.run_once()
finally:
while self.context:
top = self.context.top()
if isinstance(top.current(), TestList):
top.current().exit()
self.context.pop()
def run_once(self):
"""
Runs through the tests once as are pushed onto the context stack.
Ie. One run through of the tests
Once finished sets the status to Finished
"""
while self.context:
if self.status == "Running":
try:
top = self.context.top()
if top.index >= len(top.testlist): # Finished tests in the test list
self.context.pop()
pub.sendMessage("TestList_Complete", data=top.testlist, test_index=self.levels())
top.testlist.exit()
if self.context:
self.context.top().index += 1
elif isinstance(top.current(), TestClass):
if self.run_test():
top.index += 1
else:
if not self.retry_prompt():
# mark the test as failed and continue. else will loop and try again
self.tests_failed += 1
top.index += 1
elif isinstance(top.current(), TestList):
pub.sendMessage("TestList_Start", data=top.current(), test_index=self.levels())
top.current().enter()
self.context.push(top.current())
else:
raise SequenceAbort("Unknown Test Item Type")
except BaseException as e:
pub.sendMessage("Test_Exception", exception=sys.exc_info()[1], test_index=self.levels())
pub.sendMessage("Sequence_Abort", exception=e)
self._handle_sequence_abort()
return
elif self.status != "Aborted":
time.sleep(0.1)
else:
return
self.status = "Finished"
def run_test(self):
"""
Runs the active test in the stack.
Should only be called if the top of the stack is a TestClass
:return: True if test passed, False if test failed or had an exception
"""
active_test = self.context.top().current()
active_test_status = "PENDING"
pub.sendMessage("Test_Start", data=active_test, test_index=self.levels())
if active_test.skip:
self.tests_skipped += 1
active_test_status = "SKIP"
pub.sendMessage("Test_Skip", data=active_test, test_index=self.levels())
pub.sendMessage("Test_Complete", data=active_test, test_index=self.levels(), status=active_test_status)
return True
attempts = 0
abort_exceptions = [SequenceAbort, KeyboardInterrupt]
abort_exceptions.extend(active_test.abort_exceptions)
while True:
attempts += 1
# Retry exceeded test only when user is not involved in retry process
try:
if attempts > active_test.attempts and attempts != -1:
break
self.chk_fail, self.chk_pass = 0, 0
# Run the test
try:
for index_context, current_level in enumerate(self.context):
current_level.current().set_up()
active_test.test()
finally:
for current_level in self.context[index_context::-1]:
current_level.current().tear_down()
if not self.chk_fail:
active_test_status = "PASS"
self.tests_passed += 1
else:
active_test_status = "FAIL"
self.tests_failed += 1
break
except CheckFail:
if self.ABORT: # Program force quit
active_test_status = "ERROR"
raise SequenceAbort("Sequence Aborted")
# Retry Logic for failed checks
active_test_status = "FAIL"
except tuple(abort_exceptions):
if self.ABORT: # Program force quit
active_test_status = "ERROR"
raise SequenceAbort("Sequence Aborted")
pub.sendMessage("Test_Exception", exception=sys.exc_info()[1], test_index=self.levels())
attempts = 0
active_test_status = "ERROR"
if not self.retry_prompt():
self.tests_errored += 1
break
# Retry logic for exceptions
except BaseException as e:
active_test_status = "ERROR"
if self.ABORT: # Program force quit
raise SequenceAbort("Sequence Aborted")
pub.sendMessage("Test_Exception", exception=sys.exc_info()[1], test_index=self.levels())
# Retry Logic
pub.sendMessage("Test_Retry", data=active_test, test_index=self.levels())
pub.sendMessage("Test_Complete", data=active_test, test_index=self.levels(), status=active_test_status)
return active_test_status == "PASS"
def retry_prompt(self):
"""Prompt the user when something goes wrong.
For retry return True, to fail return False and to abort raise and abort exception. Respect the
non_interactive flag, which can be set by the command line option --non-interactive"""
if self.non_interactive:
return False
status, resp = user_retry_abort_fail(msg="")
if resp == "ABORT":
raise SequenceAbort("Sequence Aborted By User")
else:
return resp == "RETRY"
def _handle_sequence_abort(self):
self.status = "Aborted"
self.ABORT = True
self.test_running = False
def check(self, chk, result):
if result:
self.chk_pass += 1
else:
self.chk_fail += 1
pub.sendMessage("Check", passes=result, chk=chk,
chk_cnt=self.chk_pass + self.chk_fail, context=self.levels())
if not result:
raise CheckFail("Check function returned failure, aborting test")
return result
|
#
# Copyright (C) 2021 Vaticle
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import typedb_protocol.common.answer_pb2 as answer_proto
from typedb.api.answer.numeric import Numeric
from typedb.common.exception import TypeDBClientException, BAD_ANSWER_TYPE, ILLEGAL_CAST
class _Numeric(Numeric):
def __init__(self, int_value, float_value):
self._int_value = int_value
self._float_value = float_value
@staticmethod
def of(numeric_proto: answer_proto.Numeric):
numeric_case = numeric_proto.WhichOneof("value")
if numeric_case == "long_value":
return _Numeric(numeric_proto.long_value, None)
elif numeric_case == "double_value":
return _Numeric(None, numeric_proto.double_value)
elif numeric_case == "nan":
return _Numeric(None, None)
else:
raise TypeDBClientException.of(BAD_ANSWER_TYPE, numeric_case)
def is_int(self):
return self._int_value is not None
def is_float(self):
return self._float_value is not None
def is_nan(self):
return not self.is_int() and not self.is_float()
def as_float(self):
if self.is_float():
return self._float_value
else:
raise TypeDBClientException.of(ILLEGAL_CAST, "float")
def as_int(self):
if self.is_int():
return self._int_value
else:
raise TypeDBClientException.of(ILLEGAL_CAST, "int")
|
package com.google.teampot.servlet.task;
import java.io.IOException;
import java.util.logging.Logger;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.teampot.Config;
import com.google.teampot.model.User;
import com.google.teampot.service.UserService;
public class ProvisionGroupServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(ProvisionGroupServlet.class.getSimpleName());
@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
String appGroup = Config.get(Config.APPS_GROUP);
if (appGroup == null || appGroup.equals("")) return;
UserService.getInstance().provisionGroup(appGroup);
}
}
|
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
public class Boj1008 {
private static final String SPACE = " ";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine(), SPACE);
double A = Double.parseDouble(st.nextToken());
double B = Double.parseDouble(st.nextToken());
double res = A/B;
System.out.println(res);
}
}
|
#!/bin/bash
# import.js
printf "import.js:5:1 = "
assert_ok "$FLOW" type-at-pos import.js 5 1 --strip-root --pretty
# exports.js
printf "exports.js:3:24 = "
assert_ok "$FLOW" type-at-pos exports.js 3 24 --strip-root --pretty
printf "exports.js:5:25 = "
assert_ok "$FLOW" type-at-pos exports.js 5 25 --strip-root --pretty
# import_lib.js
printf "import_lib.js:7:8 = "
assert_ok "$FLOW" type-at-pos import_lib.js 7 8 --strip-root --pretty
printf "import_lib.js:7:25 (--expand-json-output) = "
assert_ok "$FLOW" type-at-pos import_lib.js 7 25 --strip-root --pretty --expand-json-output
# import_lib_named.js
printf "import_lib_named.js:3:15 (--expand-json-output) = "
assert_ok "$FLOW" type-at-pos import_lib_named.js 3 15 --strip-root --pretty --expand-json-output
printf "import_lib_named.js:3:27 (--expand-json-output) = "
assert_ok "$FLOW" type-at-pos import_lib_named.js 3 27 --strip-root --pretty --expand-json-output
# import-class-as-type.js
printf "import-class-as-type.js:8:13 = "
assert_ok "$FLOW" type-at-pos import-class-as-type.js 8 13 --strip-root --pretty
printf "import-class-as-type.js:9:13 = "
assert_ok "$FLOW" type-at-pos import-class-as-type.js 9 13 --strip-root --pretty
printf "import-class-as-type.js:10:13 = "
assert_ok "$FLOW" type-at-pos import-class-as-type.js 10 13 --strip-root --pretty
printf "import-class-as-type.js:11:13 = "
assert_ok "$FLOW" type-at-pos import-class-as-type.js 11 13 --strip-root --pretty
# import-default.js
printf "import-default.js:15:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 15 13 --strip-root --pretty
printf "import-default.js:16:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 16 13 --strip-root --pretty
printf "import-default.js:17:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 17 13 --strip-root --pretty
printf "import-default.js:18:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 18 13 --strip-root --pretty
printf "import-default.js:19:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 19 13 --strip-root --pretty
printf "import-default.js:20:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 20 13 --strip-root --pretty
printf "import-default.js:21:13 = "
assert_ok "$FLOW" type-at-pos import-default.js 21 13 --strip-root --pretty
# import-typeof-class.js
printf "import-typeof-class.js:6:16 "
assert_ok "$FLOW" type-at-pos import-typeof-class.js 6 16 --strip-root --pretty --expand-json-output
printf "import-typeof-class.js:7:16 "
assert_ok "$FLOW" type-at-pos import-typeof-class.js 7 16 --strip-root --pretty --expand-json-output
# module-export-0.js
printf "module-export-0.js:7:13 = "
assert_ok "$FLOW" type-at-pos module-export-0.js 7 13 --strip-root --pretty
# module-export-1.js
printf "module-export-1.js:3:8 = "
assert_ok "$FLOW" type-at-pos module-export-1.js 3 8 --strip-root --pretty
printf "module-export-1.js:4:9 = "
assert_ok "$FLOW" type-at-pos module-export-1.js 4 9 --strip-root --pretty
printf "module-export-1.js:7:10 = "
assert_ok "$FLOW" type-at-pos module-export-1.js 7 10 --strip-root --pretty
printf "module-export-1.js:8:11 = "
assert_ok "$FLOW" type-at-pos module-export-1.js 8 11 --strip-root --pretty
printf "module-export-1.js:10:9 = "
assert_ok "$FLOW" type-at-pos module-export-1.js 10 9 --strip-root --pretty
# module-import.js
printf "module-import.js:3:7 = "
assert_ok "$FLOW" type-at-pos module-import.js 3 7 --strip-root --pretty
# require-class.js
printf "require-class.js:5:16 = "
assert_ok "$FLOW" type-at-pos require-class.js 5 16 --strip-root --expand-json-output --pretty
printf "require-class.js:6:16 = "
assert_ok "$FLOW" type-at-pos require-class.js 6 16 --strip-root --expand-json-output --pretty
# test.js
printf "test.js:5:1 = "
assert_ok "$FLOW" type-at-pos test.js 5 1 --strip-root --pretty
printf "test.js:8:7 = "
assert_ok "$FLOW" type-at-pos test.js 8 7 --strip-root --pretty
printf "test.js:10:7 = "
assert_ok "$FLOW" type-at-pos test.js 10 7 --strip-root --pretty
printf "test.js:12:7 = "
assert_ok "$FLOW" type-at-pos test.js 12 7 --strip-root --pretty
printf "test.js:14:7 = "
assert_ok "$FLOW" type-at-pos test.js 14 7 --strip-root --pretty
|
package com.leetcode;
import java.util.Arrays;
public class Solution_1099 {
public int twoSumLessThanK(int[] nums, int k) {
Arrays.sort(nums);
int ans = -1;
int left = 0;
int right = nums.length - 1;
while (left < right) {
if (nums[left] + nums[right] < k) {
ans = Math.abs(ans - k) > Math.abs(nums[left] + nums[right] - k) ? nums[left] + nums[right] : ans;
left++;
} else {
right--;
}
}
return ans;
}
}
|
package logging;
import java.util.logging.*;
public class LogUtils {
private Logger logger;
private String TAG = "";
private boolean isEnabled = false;
private LogUtils(){
// prevent default constructor
}
public LogUtils(String TAG, boolean enable){
this.TAG = TAG;
this.isEnabled = enable;
logger = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
/* logger.setUseParentHandlers(false);
ConsoleHandler handler = new ConsoleHandler();
logger.addHandler(handler);
Formatter formatter = new MyFormatter();
handler.setFormatter(formatter);*/
}
public void setEnable(boolean enable){
this.isEnabled = enable;
}
public void logW(String msg){
if(isEnabled){
logger.setLevel(Level.WARNING);
logger.warning(TAG+"---> "+ msg);
}
}
public void logI(String msg){
if(isEnabled){
logger.setLevel(Level.INFO);
logger.info(TAG+"---> "+msg);
}
}
public void logS(String msg){
if(isEnabled){
logger.setLevel(Level.SEVERE);
logger.severe(TAG+"---> "+msg);
}
}
public void logAll(String msg){
if(isEnabled){
logger.setLevel(Level.ALL);
logger.finest(TAG+"---> "+msg);
}
}
static class MyFormatter extends Formatter {
@Override
public String format(LogRecord record) {
StringBuilder builder = new StringBuilder();
//builder.append(record.getLevel() + ": ");
//builder.append(formatMessage(record));
builder.append(record.getMessage());
builder.append(System.lineSeparator());
// pre-Java7: builder.append(System.getProperty('line.separator'));
return builder.toString();
}
}
}
|
#!/bin/bash -
#===============================================================================
#
# FILE: repo_prepare.sh
#
# USAGE: ./repo_prepare.sh
#
# DESCRIPTION:
#
# OPTIONS: ---
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: Kacper Kowalski (kk), kacper.kowalski@lerta.energy
# ORGANIZATION: Lerta
# CREATED: 27.02.2019 12:31
# REVISION: ---
#===============================================================================
# Exit on first error, treat unset variables as an error, exit pipe after the first failure.
set -euo pipefail
mkdir -p ../build/{drv_test,fw,ul_test,url_test}
|
import { of, race, timeout } from '@tanbo/stream';
describe('race', () => {
test('发送时间最近的值', done => {
const unsub = race(timeout(100, 1), timeout(10, 2)).subscribe(value => {
unsub.unsubscribe();
expect(value).toBe(2)
done()
})
})
test('只发送一个值', done => {
const arr: any[] = []
race(of(1, 2)).subscribe({
next(v) {
arr.push(v)
},
complete() {
expect(arr).toEqual([1])
done()
}
})
})
test('取消订阅后不再发送', done => {
const arr: any[] = []
const sub = race(timeout(1)).subscribe(value => {
arr.push(value)
})
sub.unsubscribe()
setTimeout(() => {
expect(arr.length).toBe(0)
done()
}, 10)
})
})
|
def process_experiment_data(block, ads_press, ads_amount, des_press, des_amount):
if not all(isinstance(lst, list) for lst in [ads_press, ads_amount, des_press, des_amount]):
return None # Invalid input format
material_id = block.get('_sample_material_id')
units_loading = block.get('_units_loading')
units_pressure = block.get('_units_pressure')
exptl_adsorptive = block.get('_exptl_adsorptive')
exptl_temperature = block.get('_exptl_temperature')
if not all([material_id, units_loading, units_pressure, exptl_adsorptive, exptl_temperature]):
return None # Missing required data in block dictionary
adsorption_plot = (ads_press, ads_amount, 'o-', color='C0')
desorption_plot = (des_press, des_amount, 'o-', color='C0', markerfacecolor='white')
y_label = f'quantity adsorbed / {units_loading}'
x_label = f'pressure / {units_pressure}'
plot_title = f'{exptl_adsorptive} on {material_id} at {exptl_temperature}K'
return {
'material_id': material_id,
'adsorption_plot': adsorption_plot,
'desorption_plot': desorption_plot,
'y_label': y_label,
'x_label': x_label,
'plot_title': plot_title
}
|
struct CustomArray<T> {
private var elements: [T]
init(_ elements: [T]) {
self.elements = elements
}
func prettyDescription() -> String {
var output: [String] = ["\n["]
for (index, element) in elements.enumerated() {
output.append("\t\(index): \(element)")
}
output.append("]\n")
return output.joined(separator: "\n")
}
}
// Example usage
let customArray = CustomArray([3, 7, 11])
print(customArray.prettyDescription())
|
import * as path from 'path';
import * as vscode from 'vscode';
export class OnlyLocalScriptItem extends vscode.TreeItem {
contextValue = "onlyLocalScriptItem";
constructor(public fileUri: vscode.Uri) {
super("", vscode.TreeItemCollapsibleState.None);
this.description = `${path.basename(fileUri.fsPath)} (only local)`;
this.command = {
title: "Open script",
command: "vscode.open",
arguments: [
fileUri
]
};
}
}
|
# SQLiteファイル作成スクリプト
# $ start -i /tmp/DatabasesDefine -o /tmp/DatabaseOutput
# -i: DBディレクトリとsqlファイルが存在するルートディレクトリパス
# -o: .sqlite3ファイルを出力するディレクトリパス
InputDir=`pwd`
OutputDir=`pwd`
while getopts "i:o:" OPT
do
case $OPT in
i) InputDir="$OPTARG"; echo "InputDir: ${InputDir}"; ;;
o) OutputDir="$OPTARG"; echo "OutputDir: ${OutputDir}"; ;;
h) echo "Help";;
esac
done
# ディレクトリ名一覧
# $1: 一覧したいディレクトリパス
# http://8ttyan.hatenablog.com/entry/2016/01/25/162456
# http://yuyunko.hatenablog.com/entry/20110304/1299218939
function DirNames() {
local path=`pwd`
cd "${1}"
local names=`echo $(ls -d */) | sed 's/\///g'`
cd $path
echo $names
}
# Sqlite3ファイル存在確認
# $1: テーブル名
function IsExistDbFile() {
[ -f "${OutputDir%/}/${1}" ] && return 1 || return 0
}
# テーブル存在確認
# $1: 存在を確認したいテーブルがあるDBファイルパス
# $1: 存在を確認したいテーブル名
# return: 0:存在しない, 1:存在する
function IsExistTable() {
echo "tables: "
for tblnm in $(sqlite3 "${1}" ".tables"); do
echo " ${tblnm}"
[ "${2}" = "${tblnm}" ] && return 1
done
return 0;
}
# テーブル作成
function CreateTable() {
local currentpath=`pwd`
echo "databases: "
# データベース名一覧取得
for dbnm in $(DirNames "${InputDir}"); do
echo " ${dbnm}"
local dbfilename="${dbnm}.sqlite3"
local dbpath="${OutputDir%/}/${dbfilename}"
cd "${InputDir%/}/${dbnm}"
# SQLファイル名一覧取得
for flnm in $(ls -F | grep -v / | grep *.sql); do
local tablename=`basename ${flnm} ".sql"`
local sqlpath="${InputDir%/}/${dbnm}/${flnm}"
local tsvpath="${InputDir%/}/${dbnm}/${tablename}.tsv"
echo " ${sqlpath}"
echo "tablename: ${tablename}"
echo "sqlpath: ${sqlpath}"
echo "tsvpath: ${tsvpath}"
IsExistTable ${dbpath} ${tablename}
local IsExist=$?
echo "IsExistTable: ${IsExist}"
[ 0 -lt $IsExist ] && { echo "テーブルが既存のため処理をスキップします。"; continue; }
sqlite3 ${dbpath} < "${sqlpath}"
[ -f "${tsvpath}" ] && sqlite3 ${dbpath} ".mode tabs" ".import \"${tsvpath}\" ${tablename}"
sqlite3 ${dbpath} "select * from ${tablename};"
done
done
cd "${currentpath}"
}
CreateTable
|
<reponame>UrbanRiskSlumRedevelopment/Maya<gh_stars>0
import { Component, OnInit, ViewEncapsulation } from '@angular/core';
@Component({
selector: 'bootstrap-grid',
template: '<ng-content></ng-content>',
styles: [`
@-ms-viewport {
width: device-width;
}
html {
-webkit-box-sizing: border-box;
box-sizing: border-box;
-ms-overflow-style: scrollbar;
}
*,
*::before,
*::after {
-webkit-box-sizing: inherit;
box-sizing: inherit;
}
.col-1, .col-2, .col-3, .col-4, .col-5, .col-6, .col-7, .col-8, .col-9, .col-10, .col-11, .col-12, .col, .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm, .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12, .col-md, .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg, .col-xl-1, .col-xl-2, .col-xl-3, .col-xl-4, .col-xl-5, .col-xl-6, .col-xl-7, .col-xl-8, .col-xl-9, .col-xl-10, .col-xl-11, .col-xl-12, .col-xl {
position: relative;
width: 100%;
min-height: 1px;
padding-right: 15px;
padding-left: 15px;
}
@media (min-width: 576px) {
.col-1, .col-2, .col-3, .col-4, .col-5, .col-6, .col-7, .col-8, .col-9, .col-10, .col-11, .col-12, .col, .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm, .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12, .col-md, .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg, .col-xl-1, .col-xl-2, .col-xl-3, .col-xl-4, .col-xl-5, .col-xl-6, .col-xl-7, .col-xl-8, .col-xl-9, .col-xl-10, .col-xl-11, .col-xl-12, .col-xl {
padding-right: 15px;
padding-left: 15px;
}
}
@media (min-width: 768px) {
.col-1, .col-2, .col-3, .col-4, .col-5, .col-6, .col-7, .col-8, .col-9, .col-10, .col-11, .col-12, .col, .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm, .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12, .col-md, .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg, .col-xl-1, .col-xl-2, .col-xl-3, .col-xl-4, .col-xl-5, .col-xl-6, .col-xl-7, .col-xl-8, .col-xl-9, .col-xl-10, .col-xl-11, .col-xl-12, .col-xl {
padding-right: 15px;
padding-left: 15px;
}
}
@media (min-width: 992px) {
.col-1, .col-2, .col-3, .col-4, .col-5, .col-6, .col-7, .col-8, .col-9, .col-10, .col-11, .col-12, .col, .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm, .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12, .col-md, .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg, .col-xl-1, .col-xl-2, .col-xl-3, .col-xl-4, .col-xl-5, .col-xl-6, .col-xl-7, .col-xl-8, .col-xl-9, .col-xl-10, .col-xl-11, .col-xl-12, .col-xl {
padding-right: 15px;
padding-left: 15px;
}
}
@media (min-width: 1200px) {
.col-1, .col-2, .col-3, .col-4, .col-5, .col-6, .col-7, .col-8, .col-9, .col-10, .col-11, .col-12, .col, .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12, .col-sm, .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12, .col-md, .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12, .col-lg, .col-xl-1, .col-xl-2, .col-xl-3, .col-xl-4, .col-xl-5, .col-xl-6, .col-xl-7, .col-xl-8, .col-xl-9, .col-xl-10, .col-xl-11, .col-xl-12, .col-xl {
padding-right: 15px;
padding-left: 15px;
}
}
.col {
-webkit-flex-basis: 0;
-ms-flex-preferred-size: 0;
flex-basis: 0;
-webkit-box-flex: 1;
-webkit-flex-grow: 1;
-ms-flex-positive: 1;
flex-grow: 1;
max-width: 100%;
}
.col-auto {
-webkit-box-flex: 0;
-webkit-flex: 0 0 auto;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: auto;
}
.col-1 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 8.333333%;
-ms-flex: 0 0 8.333333%;
flex: 0 0 8.333333%;
max-width: 8.333333%;
}
.col-2 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 16.666667%;
-ms-flex: 0 0 16.666667%;
flex: 0 0 16.666667%;
max-width: 16.666667%;
}
.col-3 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 25%;
-ms-flex: 0 0 25%;
flex: 0 0 25%;
max-width: 25%;
}
.col-4 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 33.333333%;
-ms-flex: 0 0 33.333333%;
flex: 0 0 33.333333%;
max-width: 33.333333%;
}
.col-5 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 41.666667%;
-ms-flex: 0 0 41.666667%;
flex: 0 0 41.666667%;
max-width: 41.666667%;
}
.col-6 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 50%;
-ms-flex: 0 0 50%;
flex: 0 0 50%;
max-width: 50%;
}
.col-7 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 58.333333%;
-ms-flex: 0 0 58.333333%;
flex: 0 0 58.333333%;
max-width: 58.333333%;
}
.col-8 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 66.666667%;
-ms-flex: 0 0 66.666667%;
flex: 0 0 66.666667%;
max-width: 66.666667%;
}
.col-9 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 75%;
-ms-flex: 0 0 75%;
flex: 0 0 75%;
max-width: 75%;
}
.col-10 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 83.333333%;
-ms-flex: 0 0 83.333333%;
flex: 0 0 83.333333%;
max-width: 83.333333%;
}
.col-11 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 91.666667%;
-ms-flex: 0 0 91.666667%;
flex: 0 0 91.666667%;
max-width: 91.666667%;
}
.col-12 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 100%;
-ms-flex: 0 0 100%;
flex: 0 0 100%;
max-width: 100%;
}
.pull-0 {
right: auto;
}
.pull-1 {
right: 8.333333%;
}
.pull-2 {
right: 16.666667%;
}
.pull-3 {
right: 25%;
}
.pull-4 {
right: 33.333333%;
}
.pull-5 {
right: 41.666667%;
}
.pull-6 {
right: 50%;
}
.pull-7 {
right: 58.333333%;
}
.pull-8 {
right: 66.666667%;
}
.pull-9 {
right: 75%;
}
.pull-10 {
right: 83.333333%;
}
.pull-11 {
right: 91.666667%;
}
.pull-12 {
right: 100%;
}
.push-0 {
left: auto;
}
.push-1 {
left: 8.333333%;
}
.push-2 {
left: 16.666667%;
}
.push-3 {
left: 25%;
}
.push-4 {
left: 33.333333%;
}
.push-5 {
left: 41.666667%;
}
.push-6 {
left: 50%;
}
.push-7 {
left: 58.333333%;
}
.push-8 {
left: 66.666667%;
}
.push-9 {
left: 75%;
}
.push-10 {
left: 83.333333%;
}
.push-11 {
left: 91.666667%;
}
.push-12 {
left: 100%;
}
.offset-1 {
margin-left: 8.333333%;
}
.offset-2 {
margin-left: 16.666667%;
}
.offset-3 {
margin-left: 25%;
}
.offset-4 {
margin-left: 33.333333%;
}
.offset-5 {
margin-left: 41.666667%;
}
.offset-6 {
margin-left: 50%;
}
.offset-7 {
margin-left: 58.333333%;
}
.offset-8 {
margin-left: 66.666667%;
}
.offset-9 {
margin-left: 75%;
}
.offset-10 {
margin-left: 83.333333%;
}
.offset-11 {
margin-left: 91.666667%;
}
@media (min-width: 576px) {
.col-sm {
-webkit-flex-basis: 0;
-ms-flex-preferred-size: 0;
flex-basis: 0;
-webkit-box-flex: 1;
-webkit-flex-grow: 1;
-ms-flex-positive: 1;
flex-grow: 1;
max-width: 100%;
}
.col-sm-auto {
-webkit-box-flex: 0;
-webkit-flex: 0 0 auto;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: auto;
}
.col-sm-1 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 8.333333%;
-ms-flex: 0 0 8.333333%;
flex: 0 0 8.333333%;
max-width: 8.333333%;
}
.col-sm-2 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 16.666667%;
-ms-flex: 0 0 16.666667%;
flex: 0 0 16.666667%;
max-width: 16.666667%;
}
.col-sm-3 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 25%;
-ms-flex: 0 0 25%;
flex: 0 0 25%;
max-width: 25%;
}
.col-sm-4 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 33.333333%;
-ms-flex: 0 0 33.333333%;
flex: 0 0 33.333333%;
max-width: 33.333333%;
}
.col-sm-5 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 41.666667%;
-ms-flex: 0 0 41.666667%;
flex: 0 0 41.666667%;
max-width: 41.666667%;
}
.col-sm-6 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 50%;
-ms-flex: 0 0 50%;
flex: 0 0 50%;
max-width: 50%;
}
.col-sm-7 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 58.333333%;
-ms-flex: 0 0 58.333333%;
flex: 0 0 58.333333%;
max-width: 58.333333%;
}
.col-sm-8 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 66.666667%;
-ms-flex: 0 0 66.666667%;
flex: 0 0 66.666667%;
max-width: 66.666667%;
}
.col-sm-9 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 75%;
-ms-flex: 0 0 75%;
flex: 0 0 75%;
max-width: 75%;
}
.col-sm-10 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 83.333333%;
-ms-flex: 0 0 83.333333%;
flex: 0 0 83.333333%;
max-width: 83.333333%;
}
.col-sm-11 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 91.666667%;
-ms-flex: 0 0 91.666667%;
flex: 0 0 91.666667%;
max-width: 91.666667%;
}
.col-sm-12 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 100%;
-ms-flex: 0 0 100%;
flex: 0 0 100%;
max-width: 100%;
}
.pull-sm-0 {
right: auto;
}
.pull-sm-1 {
right: 8.333333%;
}
.pull-sm-2 {
right: 16.666667%;
}
.pull-sm-3 {
right: 25%;
}
.pull-sm-4 {
right: 33.333333%;
}
.pull-sm-5 {
right: 41.666667%;
}
.pull-sm-6 {
right: 50%;
}
.pull-sm-7 {
right: 58.333333%;
}
.pull-sm-8 {
right: 66.666667%;
}
.pull-sm-9 {
right: 75%;
}
.pull-sm-10 {
right: 83.333333%;
}
.pull-sm-11 {
right: 91.666667%;
}
.pull-sm-12 {
right: 100%;
}
.push-sm-0 {
left: auto;
}
.push-sm-1 {
left: 8.333333%;
}
.push-sm-2 {
left: 16.666667%;
}
.push-sm-3 {
left: 25%;
}
.push-sm-4 {
left: 33.333333%;
}
.push-sm-5 {
left: 41.666667%;
}
.push-sm-6 {
left: 50%;
}
.push-sm-7 {
left: 58.333333%;
}
.push-sm-8 {
left: 66.666667%;
}
.push-sm-9 {
left: 75%;
}
.push-sm-10 {
left: 83.333333%;
}
.push-sm-11 {
left: 91.666667%;
}
.push-sm-12 {
left: 100%;
}
.offset-sm-0 {
margin-left: 0%;
}
.offset-sm-1 {
margin-left: 8.333333%;
}
.offset-sm-2 {
margin-left: 16.666667%;
}
.offset-sm-3 {
margin-left: 25%;
}
.offset-sm-4 {
margin-left: 33.333333%;
}
.offset-sm-5 {
margin-left: 41.666667%;
}
.offset-sm-6 {
margin-left: 50%;
}
.offset-sm-7 {
margin-left: 58.333333%;
}
.offset-sm-8 {
margin-left: 66.666667%;
}
.offset-sm-9 {
margin-left: 75%;
}
.offset-sm-10 {
margin-left: 83.333333%;
}
.offset-sm-11 {
margin-left: 91.666667%;
}
}
@media (min-width: 768px) {
.col-md {
-webkit-flex-basis: 0;
-ms-flex-preferred-size: 0;
flex-basis: 0;
-webkit-box-flex: 1;
-webkit-flex-grow: 1;
-ms-flex-positive: 1;
flex-grow: 1;
max-width: 100%;
}
.col-md-auto {
-webkit-box-flex: 0;
-webkit-flex: 0 0 auto;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: auto;
}
.col-md-1 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 8.333333%;
-ms-flex: 0 0 8.333333%;
flex: 0 0 8.333333%;
max-width: 8.333333%;
}
.col-md-2 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 16.666667%;
-ms-flex: 0 0 16.666667%;
flex: 0 0 16.666667%;
max-width: 16.666667%;
}
.col-md-3 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 25%;
-ms-flex: 0 0 25%;
flex: 0 0 25%;
max-width: 25%;
}
.col-md-4 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 33.333333%;
-ms-flex: 0 0 33.333333%;
flex: 0 0 33.333333%;
max-width: 33.333333%;
}
.col-md-5 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 41.666667%;
-ms-flex: 0 0 41.666667%;
flex: 0 0 41.666667%;
max-width: 41.666667%;
}
.col-md-6 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 50%;
-ms-flex: 0 0 50%;
flex: 0 0 50%;
max-width: 50%;
}
.col-md-7 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 58.333333%;
-ms-flex: 0 0 58.333333%;
flex: 0 0 58.333333%;
max-width: 58.333333%;
}
.col-md-8 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 66.666667%;
-ms-flex: 0 0 66.666667%;
flex: 0 0 66.666667%;
max-width: 66.666667%;
}
.col-md-9 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 75%;
-ms-flex: 0 0 75%;
flex: 0 0 75%;
max-width: 75%;
}
.col-md-10 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 83.333333%;
-ms-flex: 0 0 83.333333%;
flex: 0 0 83.333333%;
max-width: 83.333333%;
}
.col-md-11 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 91.666667%;
-ms-flex: 0 0 91.666667%;
flex: 0 0 91.666667%;
max-width: 91.666667%;
}
.col-md-12 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 100%;
-ms-flex: 0 0 100%;
flex: 0 0 100%;
max-width: 100%;
}
.pull-md-0 {
right: auto;
}
.pull-md-1 {
right: 8.333333%;
}
.pull-md-2 {
right: 16.666667%;
}
.pull-md-3 {
right: 25%;
}
.pull-md-4 {
right: 33.333333%;
}
.pull-md-5 {
right: 41.666667%;
}
.pull-md-6 {
right: 50%;
}
.pull-md-7 {
right: 58.333333%;
}
.pull-md-8 {
right: 66.666667%;
}
.pull-md-9 {
right: 75%;
}
.pull-md-10 {
right: 83.333333%;
}
.pull-md-11 {
right: 91.666667%;
}
.pull-md-12 {
right: 100%;
}
.push-md-0 {
left: auto;
}
.push-md-1 {
left: 8.333333%;
}
.push-md-2 {
left: 16.666667%;
}
.push-md-3 {
left: 25%;
}
.push-md-4 {
left: 33.333333%;
}
.push-md-5 {
left: 41.666667%;
}
.push-md-6 {
left: 50%;
}
.push-md-7 {
left: 58.333333%;
}
.push-md-8 {
left: 66.666667%;
}
.push-md-9 {
left: 75%;
}
.push-md-10 {
left: 83.333333%;
}
.push-md-11 {
left: 91.666667%;
}
.push-md-12 {
left: 100%;
}
.offset-md-0 {
margin-left: 0%;
}
.offset-md-1 {
margin-left: 8.333333%;
}
.offset-md-2 {
margin-left: 16.666667%;
}
.offset-md-3 {
margin-left: 25%;
}
.offset-md-4 {
margin-left: 33.333333%;
}
.offset-md-5 {
margin-left: 41.666667%;
}
.offset-md-6 {
margin-left: 50%;
}
.offset-md-7 {
margin-left: 58.333333%;
}
.offset-md-8 {
margin-left: 66.666667%;
}
.offset-md-9 {
margin-left: 75%;
}
.offset-md-10 {
margin-left: 83.333333%;
}
.offset-md-11 {
margin-left: 91.666667%;
}
}
@media (min-width: 992px) {
.col-lg {
-webkit-flex-basis: 0;
-ms-flex-preferred-size: 0;
flex-basis: 0;
-webkit-box-flex: 1;
-webkit-flex-grow: 1;
-ms-flex-positive: 1;
flex-grow: 1;
max-width: 100%;
}
.col-lg-auto {
-webkit-box-flex: 0;
-webkit-flex: 0 0 auto;
-ms-flex: 0 0 auto;
flex: 0 0 auto;
width: auto;
}
.col-lg-1 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 8.333333%;
-ms-flex: 0 0 8.333333%;
flex: 0 0 8.333333%;
max-width: 8.333333%;
}
.col-lg-2 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 16.666667%;
-ms-flex: 0 0 16.666667%;
flex: 0 0 16.666667%;
max-width: 16.666667%;
}
.col-lg-3 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 25%;
-ms-flex: 0 0 25%;
flex: 0 0 25%;
max-width: 25%;
}
.col-lg-4 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 33.333333%;
-ms-flex: 0 0 33.333333%;
flex: 0 0 33.333333%;
max-width: 33.333333%;
}
.col-lg-5 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 41.666667%;
-ms-flex: 0 0 41.666667%;
flex: 0 0 41.666667%;
max-width: 41.666667%;
}
.col-lg-6 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 50%;
-ms-flex: 0 0 50%;
flex: 0 0 50%;
max-width: 50%;
}
.col-lg-7 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 58.333333%;
-ms-flex: 0 0 58.333333%;
flex: 0 0 58.333333%;
max-width: 58.333333%;
}
.col-lg-8 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 66.666667%;
-ms-flex: 0 0 66.666667%;
flex: 0 0 66.666667%;
max-width: 66.666667%;
}
.col-lg-9 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 75%;
-ms-flex: 0 0 75%;
flex: 0 0 75%;
max-width: 75%;
}
.col-lg-10 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 83.333333%;
-ms-flex: 0 0 83.333333%;
flex: 0 0 83.333333%;
max-width: 83.333333%;
}
.col-lg-11 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 91.666667%;
-ms-flex: 0 0 91.666667%;
flex: 0 0 91.666667%;
max-width: 91.666667%;
}
.col-lg-12 {
-webkit-box-flex: 0;
-webkit-flex: 0 0 100%;
-ms-flex: 0 0 100%;
flex: 0 0 100%;
max-width: 100%;
}}
`],
encapsulation: ViewEncapsulation.None
})
export class BootstrapGridComponent implements OnInit {
constructor() { }
ngOnInit() {
}
}
|
/*
*
*/
package net.community.chest.ui.components.panel;
import java.awt.Component;
import java.awt.FlowLayout;
import javax.swing.Icon;
import javax.swing.JLabel;
import net.community.chest.awt.TypedComponentAssignment;
import net.community.chest.awt.attributes.Iconable;
import net.community.chest.awt.attributes.Titled;
import net.community.chest.awt.layout.FlowLayoutAlignment;
import net.community.chest.dom.proxy.XmlProxyConvertible;
import net.community.chest.lang.TypedValuesContainer;
import net.community.chest.swing.HAlignmentValue;
import net.community.chest.swing.component.label.BaseLabel;
import net.community.chest.ui.helpers.panel.PresetFlowLayoutPanel;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* <P>Copyright GPLv2</P>
*
* <P>Provides a panel that allows adding a {@link JLabel} to describe
* some other displayed {@link Component}. The panel allows arranging the
* label/component either horizontally or vertically and also define whether
* the label is displayed first (or the component).</P>
*
* @param <C> The {@link Component} type being labeled
* @author <NAME>.
* @since Mar 12, 2009 8:38:46 AM
*/
public class LRLabeledComponent<C extends Component> extends PresetFlowLayoutPanel
implements TypedValuesContainer<C>, TypedComponentAssignment<C>,
Iconable, Titled {
/**
*
*/
private static final long serialVersionUID = 5457988173257644169L;
private final Class<C> _valsClass;
/*
* @see net.community.chest.lang.TypedValuesContainer#getValuesClass()
*/
@Override
public final Class<C> getValuesClass ()
{
return _valsClass;
}
private C _value;
/*
* @see net.community.chest.ui.helpers.TypedComponentAssignment#getAssignedValue()
*/
@Override
public C getAssignedValue ()
{
return _value;
}
/*
* @see net.community.chest.ui.helpers.TypedComponentAssignment#setAssignedValue(java.lang.Object)
*/
@Override
public void setAssignedValue (C v)
{
_value = v;
}
/*
* @see net.community.chest.ui.helpers.panel.HelperPanel#getPanelConverter(org.w3c.dom.Element)
*/
@Override
protected XmlProxyConvertible<?> getPanelConverter (Element elem)
{
return (null == elem) ? null : LRLabeledComponentReflectiveProxy.LRLBLCOMP;
}
public C getLabeledComponent ()
{
return getAssignedValue();
}
// NOTE might have no effect if called after {@link #layoutComponent}
public void setLabeledComponent (C c)
{
setAssignedValue(c);
}
private JLabel _lbl;
protected JLabel getLabel (boolean createIfNoExist)
{
if ((null == _lbl) && createIfNoExist)
_lbl = new BaseLabel();
return _lbl;
}
public JLabel getLabel ()
{
return getLabel(false);
}
public void setLabel (JLabel lbl)
{
_lbl = lbl;
}
/*
* @see net.community.chest.awt.attributes.Titled#getTitle()
*/
@Override
public String getTitle ()
{
final JLabel l=getLabel();
return (null == l) ? null : l.getText();
}
/*
* @see net.community.chest.awt.attributes.Titled#setTitle(java.lang.String)
*/
@Override
public void setTitle (final String t)
{
final JLabel l=getLabel(true);
if (l != null)
l.setText((null == t) ? "" : t);
}
/*
* @see net.community.chest.awt.attributes.Iconable#getIcon()
*/
@Override
public Icon getIcon ()
{
final JLabel l=getLabel();
return (null == l) ? null : l.getIcon();
}
/*
* @see net.community.chest.awt.attributes.Iconable#setIcon(javax.swing.Icon)
*/
@Override
public void setIcon (Icon i)
{
final JLabel l=getLabel(true);
if (l != null)
l.setIcon(i);
}
private boolean _lblLastPos /* =false */;
public boolean isLabelLastPos ()
{
return _lblLastPos;
}
public void setLabelLastPos (final boolean lblLastPos)
{
if (lblLastPos != isLabelLastPos())
_lblLastPos = lblLastPos;
}
protected void layoutComponent (JLabel lbl, C comp)
{
final boolean lblLastPos=isLabelLastPos();
final Component[] ca={ lblLastPos ? comp : lbl, lblLastPos ? lbl : comp };
for (final Component c : ca)
{
if (null == c)
continue;
add(c);
}
}
/*
* @see net.community.chest.ui.helpers.panel.HelperPanel#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
final C comp=getLabeledComponent();
final JLabel lbl=getLabel(true);
layoutComponent(lbl, comp);
}
public LRLabeledComponent (Class<C> vc, C v, JLabel l, boolean lblLastPos, FlowLayoutAlignment fla, int gap, Document doc, boolean autoLayout)
{
super(new FlowLayout(((null == fla) ? DEFAULT_ALIGNMENT : fla).getAlignment(), gap, 0), doc, false /* delay auto-layout till initialized the value and its class */);
if (null == (_valsClass=vc))
throw new IllegalArgumentException("No values class specified");
_lblLastPos = lblLastPos;
_value = v;
_lbl = l;
if (autoLayout)
layoutComponent();
}
public LRLabeledComponent (Class<C> vc, C v, String text, Icon icon, HAlignmentValue align, boolean lblLastPos, FlowLayoutAlignment fla, int gap, boolean autoLayout)
{
this(vc, v, new BaseLabel(text, icon, (null == align) ? Integer.MIN_VALUE : align.getAlignmentValue()), lblLastPos, fla, gap, null, autoLayout);
}
public LRLabeledComponent (Class<C> vc, C v, String text, Icon icon, HAlignmentValue align, boolean lblLastPos, FlowLayoutAlignment fla, int gap)
{
this(vc, v, text, icon, align, lblLastPos, fla, gap, true);
}
public static final int DEFAULT_GAP=5;
public LRLabeledComponent (Class<C> vc, C v, JLabel lbl, boolean lblLastPos, boolean autoLayout)
{
this(vc, v, lbl, lblLastPos, DEFAULT_ALIGNMENT, DEFAULT_GAP, null, autoLayout);
}
public LRLabeledComponent (Class<C> vc, C v, JLabel lbl)
{
this(vc, v, lbl, false, true);
}
public LRLabeledComponent (Class<C> vc, C v)
{
this(vc, v, (JLabel) null);
}
public LRLabeledComponent (Class<C> vc)
{
this(vc, null);
}
@SuppressWarnings("unchecked")
public LRLabeledComponent (C v, String text, Icon icon, HAlignmentValue align, boolean lblLastPos, FlowLayoutAlignment fla, int gap, boolean autoLayout)
{
this((null == v) ? null : (Class<C>) v.getClass(), v, text, icon, align, lblLastPos, fla, gap, autoLayout);
}
public LRLabeledComponent (C v, String text, Icon icon, HAlignmentValue align, boolean lblLastPos, boolean autoLayout)
{
this(v, text, icon, align, lblLastPos, DEFAULT_ALIGNMENT, DEFAULT_GAP, autoLayout);
}
public LRLabeledComponent (C v, String text, Icon icon, HAlignmentValue align)
{
this(v, text, icon, align, false, true);
}
public static final HAlignmentValue DEFAULT_LABEL_ALIGNMENT=HAlignmentValue.LEFT;
public LRLabeledComponent (Class<C> vc, C v, String text, Icon icon, HAlignmentValue align, boolean lblLastPos, boolean autoLayout)
{
this(vc, v, text, icon, align, lblLastPos, DEFAULT_ALIGNMENT, DEFAULT_GAP, autoLayout);
}
public LRLabeledComponent (Class<C> vc, C v, String text, Icon icon, boolean lblLastPos, boolean autoLayout)
{
this(vc, v, text, icon, DEFAULT_LABEL_ALIGNMENT, lblLastPos, autoLayout);
}
public LRLabeledComponent (Class<C> vc, C v, String text, Icon icon)
{
this(vc, v, text, icon, false, true);
}
public LRLabeledComponent (Class<C> vc, C v, String text)
{
this(vc, v, text, null);
}
public LRLabeledComponent (C v, String text, Icon icon)
{
this(v, text, icon, DEFAULT_LABEL_ALIGNMENT, false, true);
}
public LRLabeledComponent (C v, String text)
{
this(v, text, null);
}
public LRLabeledComponent (C v, Icon icon)
{
this(v, null, icon);
}
@SuppressWarnings("unchecked")
public LRLabeledComponent (C v, JLabel lbl, boolean lblLastPos, boolean autoLayout)
{
this((null == v) ? null : (Class<C>) v.getClass(), v, lbl, lblLastPos, DEFAULT_ALIGNMENT, DEFAULT_GAP, null, autoLayout);
}
public LRLabeledComponent (C v, JLabel lbl)
{
this(v, lbl, false, true);
}
public LRLabeledComponent (C v)
{
this(v, (String) null);
}
}
|
#!/bin/sh
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2015 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
#------------------------------------------------------------------
# Copyright (c) 2008 by Cisco Systems, Inc. All Rights Reserved.
#
# This work is subject to U.S. and international copyright laws and
# treaties. No part of this work may be used, practiced, performed,
# copied, distributed, revised, modified, translated, abridged, condensed,
# expanded, collected, compiled, linked, recast, transformed or adapted
# without the prior written consent of Cisco Systems, Inc. Any use or
# exploitation of this work without authorization could subject the
# perpetrator to criminal and civil liability.
#------------------------------------------------------------------
#------------------------------------------------------------------
# This file contains the code to initialize the board
#------------------------------------------------------------------
changeFilePermissions() {
if [ -e $1 ]; then
filepermission=$(stat -c %a $1)
if [ $filepermission -ne $2 ]
then
chmod $2 $1
echo "[utopia][init] Modified File Permission to $2 for file - $1"
fi
else
echo "[utopia][init] changeFilePermissions: file $1 doesn't exist"
fi
}
echo "*******************************************************************"
echo "* "
echo "* Copyright (c) 2010 by Cisco Systems, Inc. All Rights Reserved. "
echo "* "
echo "*******************************************************************"
source /etc/utopia/service.d/log_capture_path.sh
dmesg -n 5
TR69TLVFILE="/nvram/TLVData.bin"
REVERTFLAG="/nvram/reverted"
MAINT_START="/nvram/.FirmwareUpgradeStartTime"
MAINT_END="/nvram/.FirmwareUpgradeEndTime"
# determine the distro type (GAP or GNP)
if [ -n "$(grep TPG /etc/drg_version.txt)" ]; then
distro=GAP
else
distro=GNP
fi
# determine the build type (debug or production)
if [ -f /etc/debug_build ] ; then
debug_build=1
else
debug_build=0
fi
firmware_name=`cat /version.txt | grep ^imagename: | cut -d ":" -f 2`
utc_time=`date -u`
echo "[$utc_time] [utopia][init] DEVICE_INIT:$firmware_name"
echo "[utopia][init] Tweaking network parameters" > /dev/console
echo "60" > /proc/sys/net/ipv4/netfilter/ip_conntrack_udp_timeout_stream
echo "60" > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_syn_sent
echo "60" > /proc/sys/net/ipv4/netfilter/ip_conntrack_generic_timeout
echo "10" > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_time_wait
echo "10" > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_close
echo "20" > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_close_wait
echo "1800" > /proc/sys/net/ipv4/netfilter/ip_conntrack_tcp_timeout_established
echo "8192" > /proc/sys/net/ipv4/netfilter/ip_conntrack_max
echo "400" > /proc/sys/net/netfilter/nf_conntrack_expect_max
echo 4096 > /proc/sys/net/ipv6/neigh/default/gc_thresh1
echo 8192 > /proc/sys/net/ipv6/neigh/default/gc_thresh2
echo 8192 > /proc/sys/net/ipv6/neigh/default/gc_thresh3
#echo "[utopia][init] Loading drivers"
#MODULE_PATH=/fss/gw/lib/modules/`uname -r`/
#insmod $MODULE_PATH/drivers/net/erouter_ni.ko netdevname=erouter0
#if [ "$distro" = "GAP" ]; then
# #
# # ---- GAP: boot sequence (TPG)
# #
#
# sh /etc/rcS.d/11platform-init.sh
#
# echo "*******************************************************************"
# echo "* "
# echo "* Booting Cisco DRG `getFlashValue model -d` "
# echo "* Hardware ID: `getFlashValue hwid -d` Hardware Version: `getFlashValue hwversion -d`"
# echo "* Unit Serial Number: `getFlashValue unitsn` "
# echo "* Board Serial Number: `getFlashValue boardsn` "
# echo "* Manufacture Date: `getFlashValue mfgdate -d` "
# echo "* Software Version: `cat /etc/drg_version.txt` "
# echo "* "
# echo "*******************************************************************"
#
#else
# #
# # ---- GNP: boot sequence (CNS)
# #
#
# echo "*******************************************************************"
# echo "* Software Version: `cat /etc/drg_version.txt` "
# echo "*******************************************************************"
#
# insmod /lib/modules/`uname -r`/kernel/drivers/wifi/wl.ko
# cp /etc/utopia/service.d/nvram.dat /tmp
#fi
echo "Starting log module.."
/fss/gw/usr/sbin/log_start.sh
echo "[utopia][init] Starting udev.."
# Spawn telnet daemon only for production images
#if [ $debug_build -ne 0 ]; then
#echo "[utopia][init] Starting telnetd"
#service telnet start
#utelnetd -d
#fi
#echo "[utopia][init] Starting syslogd"
#/sbin/syslogd && /sbin/klogd
# echo "[utopia][init] Provisioning loopback interface"
#ip addr add 127.0.0.1/255.0.0.0 dev lo
#ip link set lo up
#ip route add 127.0.0.0/8 dev lo
# create our passwd/shadow/group files
#mkdir -p /tmp/etc/.root
#chmod 711 /tmp/etc/.root
#chmod 644 /tmp/etc/.root/passwd
#chmod 600 /tmp/etc/.root/shadow
#chmod 600 /tmp/etc/.root/group
# create the default profile. This is linked to by /etc/profile
#echo "export setenv PATH=/bin:/sbin:/usr/sbin:/usr/bin:/opt/sbin:/opt/bin" > /tmp/profile
#echo "export setenv LD_LIBRARY_PATH=/lib:/usr/lib:/opt/lib" >> /tmp/profile
#echo "if [ \$(tty) != \"/dev/console\" -a \${USER} != \"root\" ]; then cd /usr/cosa; ./cli_start.sh; fi" >> /tmp/profile
# create other files that are linked to by etc
#echo -n > /tmp/hosts
#echo -n > /tmp/hostname
#echo -n > /tmp/resolv.conf
#echo -n > /tmp/igmpproxy.conf
#echo -n > /tmp/ez-ipupdate.conf
#echo -n > /tmp/ez-ipupdate.out
#echo -n > /tmp/TZ
#echo -n > /tmp/.htpasswd
#echo -n > /tmp/dnsmasq.conf
#echo -n > /tmp/dhcp_options
#echo -n > /tmp/dhcp_static_hosts
#echo -n > /tmp/dnsmasq.leases
#echo -n > /tmp/zebra.conf
#echo -n > /tmp/ripd.conf
#echo -n > /tmp/dhcp6c.conf
mkdir -p /tmp/cron
BUTTON_THRESHOLD=15
FACTORY_RESET_KEY=factory_reset
FACTORY_RESET_RGWIFI=y
FACTORY_RESET_WIFI=w
SYSCFG_MOUNT=/nvram
SYSCFG_FILE=$SYSCFG_MOUNT/syscfg.db
PSM_CUR_XML_CONFIG_FILE_NAME="$SYSCFG_MOUNT/bbhm_cur_cfg.xml"
PSM_BAK_XML_CONFIG_FILE_NAME="$SYSCFG_MOUNT/bbhm_bak_cfg.xml"
PSM_TMP_XML_CONFIG_FILE_NAME="$SYSCFG_MOUNT/bbhm_tmp_cfg.xml"
XDNS_DNSMASQ_SERVERS_CONFIG_FILE_NAME="$SYSCFG_MOUNT/dnsmasq_servers.conf"
FACTORY_RESET_REASON=false
#syscfg_check -d $MTD_DEVICE
#if [ $? = 0 ]; then
# echo "[utopia][init] Starting syscfg subsystem using flash partition $MTD_DEVICE"
# /sbin/syscfg_create -d $MTD_DEVICE
#else
# echo "[utopia][init] Formating flash partition $MTD_DEVICE for syscfg use"
# syscfg_format -d $MTD_DEVICE
# if [ $? = 0 ]; then
# echo "[utopia][init] Starting syscfg subsystem using flash partition $MTD_DEVICE with default settings"
# /sbin/syscfg_create -d $MTD_DEVICE
# else
# echo "[utopia][init] FAILURE: formatting flash partition $MTD_DEVICE for syscfg use"
# echo "[utopia][init] Starting syscfg with default settings using file store ($SYSCFG_FILE)"
# echo "" > $SYSCFG_FILE
# /sbin/syscfg_create -f $SYSCFG_FILE
# fi
#fi
CheckAndReCreateDB()
{
NVRAMFullStatus=`df -h $SYSCFG_MOUNT | grep "100%"`
if [ "$NVRAMFullStatus" != "" ]; then
if [ -f "/rdklogger/rdkbLogMonitor.sh" ]
then
#Remove Old backup files if there
sh /rdklogger/rdkbLogMonitor.sh "remove_old_logbackup"
#Re-create syscfg create again
syscfg_create -f $SYSCFG_FILE
if [ $? != 0 ]; then
NVRAMFullStatus=`df -h $SYSCFG_MOUNT | grep "100%"`
if [ "$NVRAMFullStatus" != "" ]; then
echo "[utopia][init] NVRAM Full(100%) and below is the dump"
du -h $SYSCFG_MOUNT
ls -al $SYSCFG_MOUNT
fi
fi
fi
fi
}
echo "[utopia][init] Starting syscfg using file store ($SYSCFG_FILE)"
if [ -f $SYSCFG_FILE ]; then
syscfg_create -f $SYSCFG_FILE
if [ $? != 0 ]; then
CheckAndReCreateDB
fi
else
echo -n > $SYSCFG_FILE
syscfg_create -f $SYSCFG_FILE
if [ $? != 0 ]; then
CheckAndReCreateDB
fi
#>>zqiu
echo "[utopia][init] need to reset wifi when ($SYSCFG_FILE) is not avaliable (for 1st time boot up)"
syscfg set $FACTORY_RESET_KEY $FACTORY_RESET_WIFI
#<<zqiu
touch /nvram/.apply_partner_defaults
# Put value 204 into networkresponse.txt file so that
# all LAN services start with a configuration which will
# redirect everything to Gateway IP.
# This value again will be modified from network_response.sh
echo "[utopia][init] Echoing network response during Factory reset"
echo 204 > /var/tmp/networkresponse.txt
fi
SYSCFG_LAN_DOMAIN=`syscfg get lan_domain`
if [ "$SYSCFG_LAN_DOMAIN" == "utopia.net" ]; then
echo "[utopia][init] Setting lan domain to NULL"
syscfg set lan_domain ""
syscfg commit
fi
# Read reset duration to check if the unit was rebooted by pressing the HW reset button
if [ -s /sys/bus/acpi/devices/INT34DB:00/reset_btn_dur ]; then
#Note: /sys/bus/acpi/devices/INT34DB:00/reset_btn_dur is an Arris XB6 File created by Arris and Intel by reading ARM
PUNIT_RESET_DURATION=`cat /sys/bus/acpi/devices/INT34DB:00/reset_btn_dur`
else
echo "[utopia][init] /sys/bus/acpi/devices/INT34DB:00/reset_btn_dur is empty or missing"
PUNIT_RESET_DURATION=0
fi
# Set the factory reset key if it was pressed for longer than our threshold
if test "$BUTTON_THRESHOLD" -le "$PUNIT_RESET_DURATION"; then
syscfg set $FACTORY_RESET_KEY $FACTORY_RESET_RGWIFI && BUTTON_FR="1"
fi
SYSCFG_FR_VAL="`syscfg get $FACTORY_RESET_KEY`"
if [ "x$FACTORY_RESET_RGWIFI" = "x$SYSCFG_FR_VAL" ]; then
echo "[utopia][init] Performing factory reset"
SYSCFG_PARTNER_FR="`syscfg get PartnerID_FR`"
if [ "1" = "$SYSCFG_PARTNER_FR" ]; then
echo_t "[utopia][init] Performing factory reset due to PartnerID change"
else
rm -f /nvram/.partner_ID
fi
# Remove log file first because it need get log file path from syscfg
/fss/gw/usr/sbin/log_handle.sh reset
echo -e "\n" | syscfg_destroy
# umount $SYSCFG_MOUNT
# SYSDATA_MTD=`grep SysData /proc/mtd | awk -F: '{print $1}'`
# if [ -n $SYSDATA_MTD ]; then
# echo "[utopia][init] wiping system data flash"
# flash_eraseall -j /dev/$SYSDATA_MTD
# echo "[utopia][init] remounting system data flash"
# mount -t jffs2 mtd:SysData $SYSCFG_MOUNT
# echo -n > $SYSCFG_FILE
# fi
# Remove syscfg and PSM storage files
#mark the factory reset flag 'on'
FACTORY_RESET_REASON=true
rm -f /nvram/.keys/*
touch /nvram/.apply_partner_defaults
rm -f $SYSCFG_FILE
rm -f $PSM_CUR_XML_CONFIG_FILE_NAME
rm -f $PSM_BAK_XML_CONFIG_FILE_NAME
rm -f $PSM_TMP_XML_CONFIG_FILE_NAME
rm -f $TR69TLVFILE
rm -f $REVERTFLAG
rm -f $XDNS_DNSMASQ_SERVERS_CONFIG_FILE_NAME
rm -f $MAINT_START
rm -f $MAINT_END
# Remove DHCP lease file
rm -f /nvram/dnsmasq.leases
rm -f /nvram/server-IfaceMgr.xml
rm -f /nvram/server-AddrMgr.xml
rm -f /nvram/server-CfgMgr.xml
rm -f /nvram/server-TransMgr.xml
rm -f /nvram/server-cache.xml
rm -f /nvram/server-duid
rm -f /nvram/partners_defaults.json
#>>zqiu
create_wifi_default
#<<zqiu
echo "[utopia][init] Retarting syscfg using file store ($SYSCFG_FILE)"
syscfg_create -f $SYSCFG_FILE
if [ $? != 0 ]; then
CheckAndReCreateDB
fi
#>>zqiu
# Put value 204 into networkresponse.txt file so that
# all LAN services start with a configuration which will
# redirect everything to Gateway IP.
# This value again will be modified from network_response.sh
echo "[utopia][init] Echoing network response during Factory reset"
echo 204 > /var/tmp/networkresponse.txt
elif [ "x$FACTORY_RESET_WIFI" = "x$SYSCFG_FR_VAL" ]; then
echo "[utopia][init] Performing wifi reset"
create_wifi_default
syscfg unset $FACTORY_RESET_KEY
#<<zqiu
fi
#echo "[utopia][init] Cleaning up vendor nvram"
# /etc/utopia/service.d/nvram_cleanup.sh
echo "*** HTTPS root certificate for TR69 ***"
if [ ! -f /etc/cacert.pem ]; then
echo "HTTPS root certificate for TR69 is missing..."
else
echo "Copying HTTPS root certificate for TR69"
if [ -f /nvram/cacert.pem ]; then
rm -f /nvram/cacert.pem
fi
cp -f /etc/cacert.pem /nvram/
fi
echo "[utopia][init] Starting system logging"
/etc/utopia/service.d/service_syslog.sh syslog-start
echo "[utopia][init] Starting sysevent subsystem"
#syseventd --threads 18
syseventd
# we want plugged in usb devices to propagate events to sysevent
#echo "[utopia][init] Late loading usb drivers"
#MODULE_PATH=/lib/modules/`uname -r`/
#insmod $MODULE_PATH/usbcore.ko
#insmod $MODULE_PATH/ehci-hcd.ko
#insmod $MODULE_PATH/scsi_mod.ko
#insmod $MODULE_PATH/sd_mod.ko
#insmod $MODULE_PATH/libusual.ko
#insmod $MODULE_PATH/usb-storage.ko
#insmod $MODULE_PATH/nls_cp437.ko
#insmod $MODULE_PATH/nls_iso8859-1.ko
#insmod $MODULE_PATH/fat.ko
#insmod $MODULE_PATH/vfat.ko
#ARRISXB6-1554: apply_system_defaults calls sysevent API. Logs showed binaries weren't fully started
attemptCounter=0
until [ -e "/tmp/syseventd_connection" ]; do
if [ $attemptCounter -lt 3 ]
then
sleep 2
let "attemptCounter++"
else
break
fi
done
echo "[utopia][init] Setting any unset system values to default"
apply_system_defaults
#ARRISXB6-2998
changeFilePermissions $SYSCFG_FILE 644
# Get the syscfg value which indicates whether unit is activated or not.
# This value is set from network_response.sh based on the return code received.
activated=`syscfg get unit_activated`
echo "[utopia][init] Value of unit_activated got is : $activated"
if [ "$activated" = "1" ]
then
echo "[utopia][init] Echoing network response during Reboot"
echo 204 > /var/tmp/networkresponse.txt
fi
echo "[utopia][init] Applying iptables settings"
lan_ifname=`syscfg get lan_ifname`
cmdiag_ifname=`syscfg get cmdiag_ifname`
ecm_wan_ifname=`syscfg get ecm_wan_ifname`
wan_ifname=`sysevent get wan_ifname`
#disable telnet / ssh ports
iptables -A INPUT -i $lan_ifname -p tcp --dport 23 -j DROP
iptables -A INPUT -i $lan_ifname -p tcp --dport 22 -j DROP
iptables -A INPUT -i $cmdiag_ifname -p tcp --dport 23 -j DROP
iptables -A INPUT -i $cmdiag_ifname -p tcp --dport 22 -j DROP
ip6tables -A INPUT -i $lan_ifname -p tcp --dport 23 -j DROP
ip6tables -A INPUT -i $lan_ifname -p tcp --dport 22 -j DROP
ip6tables -A INPUT -i $cmdiag_ifname -p tcp --dport 23 -j DROP
ip6tables -A INPUT -i $cmdiag_ifname -p tcp --dport 22 -j DROP
#protect from IPv6 NS flooding
ip6tables -t mangle -A PREROUTING -i $ecm_wan_ifname -d ff00::/8 -p ipv6-icmp -m icmp6 --icmpv6-type 135 -j DROP
ip6tables -t mangle -A PREROUTING -i $wan_ifname -d ff00::/8 -p ipv6-icmp -m icmp6 --icmpv6-type 135 -j DROP
/fss/gw/sbin/ulogd -c /fss/gw/etc/ulogd.conf -d
#echo "[utopia][init] Starting telnetd"
#TELNET_ENABLE=`syscfg get mgmt_wan_telnetaccess`
#if [ "$TELNET_ENABLE" = "1" ]; then
# if [ -e /bin/login ]; then
# /usr/sbin/telnetd -l /bin/login
# else
# /usr/sbin/telnetd
# fi
#fi
echo "[utopia][init] Processing registration"
INIT_DIR=/etc/utopia/registration.d
# run all executables in the sysevent registration directory
# echo "[utopia][init] Running registration using $INIT_DIR"
execute_dir $INIT_DIR&
#init_inter_subsystem&
#--------Set up private IPC vlan----------------
#SWITCH_HANDLER=/etc/utopia/service.d/service_multinet/handle_sw.sh
#vconfig add l2sd0 500
#$SWITCH_HANDLER addVlan 0 500 sw_6
#ifconfig l2sd0.500 192.168.101.1
#--------Set up Radius vlan -------------------
#vconfig add l2sd0 4090
#$SWITCH_HANDLER addVlan 0 4090 sw_6
#ifconfig l2sd0.4090 192.168.251.1 netmask 255.255.255.0 up
#ip rule add from all iif l2sd0.4090 lookup erouter
#--------Marvell LAN-side egress flood mitigation----------------
#echo "88E6172: Do not egress flood unicast with unknown DA"
#swctl -c 11 -p 5 -r 4 -b 0x007b
# Creating IOT VLAN on ARM
#swctl -c 16 -p 0 -v 106 -m 2 -q 1
#swctl -c 16 -p 7 -v 106 -m 2 -q 1
#vconfig add l2sd0 106
#ifconfig l2sd0.106 192.168.106.1 netmask 255.255.255.0 up
#ip rule add from all iif l2sd0.106 lookup erouter
# Check and set factory-reset as reboot reason
if [ "$FACTORY_RESET_REASON" = "true" ]; then
echo "[utopia][init] Detected last reboot reason as factory-reset"
syscfg set X_RDKCENTRAL-COM_LastRebootReason "factory-reset"
syscfg set X_RDKCENTRAL-COM_LastRebootCounter "1"
else
rebootReason=`syscfg get X_RDKCENTRAL-COM_LastRebootReason`
echo "[utopia][init] X_RDKCENTRAL-COM_LastRebootReason ($rebootReason)"
if [ "$rebootReason" = "factory-reset" ]; then
echo "[utopia][init] Setting last reboot reason as unknown"
syscfg set X_RDKCENTRAL-COM_LastRebootReason "unknown"
else
if [ -f /nvram/restore_reboot ];then
syscfg set X_RDKCENTRAL-COM_LastRebootReason "restore-reboot"
syscfg set X_RDKCENTRAL-COM_LastRebootCounter "1"
rm -f /nvram/restore_reboot
fi
fi
fi
echo "[utopia][init] completed creating utopia_inited flag"
touch /tmp/utopia_inited
|
<gh_stars>0
import path from 'path'
import { GatsbyNode } from 'gatsby'
// @ts-ignore
import createPaginatedPages from 'gatsby-paginate'
import postSlug from './src/utils/post-slug'
import { CATEGORY_BASE, TAG_BASE } from './src/templates/blog-post/url-base'
export const createPages: GatsbyNode['createPages'] = ({
graphql,
actions,
}) => {
const { createPage } = actions
return new Promise((resolve, reject) => {
const blogPostTemplate = path.resolve('src/templates/blog-post/index.tsx')
const resourcePageTemplate = path.resolve('src/templates/resource-page.tsx')
resolve(
graphql(`
fragment BlogPost on ContentfulBlogPost {
id
title
postDate(formatString: "MMMM D, YYYY")
body {
childMarkdownRemark {
excerpt(pruneLength: 750)
}
}
headlineImage {
title
file {
url
}
}
fields {
slug
}
}
{
allContentfulBlogPost(
limit: 500
sort: { fields: [postDate], order: DESC }
) {
nodes {
...BlogPost
}
}
allContentfulCategories {
nodes {
category
slug
blog_post {
...BlogPost
}
}
}
allContentfulTags {
nodes {
tag
slug
blog_post {
...BlogPost
}
}
}
allFile(filter: { sourceInstanceName: { eq: "resource-pages" } }) {
nodes {
childMarkdownRemark {
frontmatter {
path
}
}
}
}
}
`).then((result) => {
if (result.errors) {
reject(result.errors)
}
// Create page for each blog post
// @ts-ignore
result.data.allContentfulBlogPost.nodes.forEach(
// @ts-ignore
({ id, fields: { slug } }) => {
createPage({
path: slug,
component: blogPostTemplate,
context: {
id,
},
})
},
)
// Create Resource pages
// @ts-ignore
result.data.allFile.nodes.forEach(
({
childMarkdownRemark: {
// @ts-ignore
frontmatter: { path: sitePath },
},
}) => {
createPage({
path: sitePath,
component: resourcePageTemplate,
// @ts-ignore
content: {
path: sitePath,
},
})
},
)
// Create Blog Index
createPaginatedPages({
// @ts-ignore
edges: result.data.allContentfulBlogPost.nodes,
createPage,
pageTemplate: 'src/templates/blog-index/index.tsx',
pageLength: 5,
pathPrefix: 'blog',
buildPath: (index: number, pathPrefix: string) =>
index > 1 ? `${pathPrefix}/page/${index}` : `/${pathPrefix}`,
context: {
headline: 'Blog Index',
},
})
// Create Category Pages
// @ts-ignore
result.data.allContentfulCategories.nodes.forEach(
// @ts-ignore
({ category, slug, blog_post: posts }) => {
if (Array.isArray(posts)) {
createPaginatedPages({
edges: posts,
createPage,
pageTemplate: 'src/templates/blog-index/index.tsx',
pageLength: 5,
pathPrefix: `${CATEGORY_BASE}/${slug}`,
buildPath: (index: number, pathPrefix: string) =>
index > 1 ? `${pathPrefix}/page/${index}` : `/${pathPrefix}`,
context: {
headline: `Category: ${category}`,
},
})
}
},
)
// Create Tag Pages
// @ts-ignore
result.data.allContentfulTags.nodes.forEach(
// @ts-ignore
({ tag, slug, blog_post: posts }) => {
if (Array.isArray(posts)) {
createPaginatedPages({
edges: posts,
createPage,
pageTemplate: 'src/templates/blog-index/index.tsx',
pageLength: 5,
pathPrefix: `${TAG_BASE}/${slug}`,
buildPath: (index: number, pathPrefix: string) =>
index > 1 ? `${pathPrefix}/page/${index}` : `/${pathPrefix}`,
context: {
headline: `Tag: ${tag}`,
},
})
}
},
)
}),
)
})
}
export const onCreateNode: GatsbyNode['onCreateNode'] = ({ node, actions }) => {
const { createNodeField } = actions
if (node.internal.type === 'ContentfulBlogPost') {
const { postDate, title } = node
// @ts-ignore
const slug = postSlug(postDate, title)
createNodeField({
node,
name: 'slug',
value: slug,
})
}
}
|
<gh_stars>1-10
class UiState {
constructor() {
this.hours = new Array();
this.minutes = new Array();
this.seconds = new Array();
}
setHours(val) {
this.hours = this.splitIntoParts(val);
}
setMinutes(val) {
this.minutes = this.splitIntoParts(val);
}
setSeconds(val) {
this.seconds = this.splitIntoParts(val);
}
splitIntoParts(val) {
let tens = Math.floor(val / 10);
let ones = val % 10;
let tensBit
return {
"tens" : (tens >>> 0).toString("2").padStart(3, "0"),
"ones" : (ones >>> 0).toString("2").padStart(4, "0")
}
}
}
|
#!/bin/bash
# Author: yeho <lj2007331 AT gmail.com>
# BLOG: https://linuxeye.com
#
# Notes: OneinStack for CentOS/RedHat 7+ Debian 8+ and Ubuntu 16+
#
# Project home page:
# https://oneinstack.com
# https://github.com/oneinstack/oneinstack
Install_PHP72() {
pushd ${oneinstack_dir}/src > /dev/null
if [ -e "${apache_install_dir}/bin/httpd" ];then
[ "$(${apache_install_dir}/bin/httpd -v | awk -F'.' /version/'{print $2}')" == '4' ] && Apache_main_ver=24
[ "$(${apache_install_dir}/bin/httpd -v | awk -F'.' /version/'{print $2}')" == '2' ] && Apache_main_ver=22
fi
if [ ! -e "/usr/local/lib/libiconv.la" ]; then
tar xzf libiconv-${libiconv_ver}.tar.gz
pushd libiconv-${libiconv_ver} > /dev/null
./configure
make -j ${THREAD} && make install
popd > /dev/null
rm -rf libiconv-${libiconv_ver}
fi
if [ ! -e "${curl_install_dir}/lib/libcurl.la" ]; then
tar xzf curl-${curl_ver}.tar.gz
pushd curl-${curl_ver} > /dev/null
[ "${Debian_ver}" == '8' ] && apt-get -y remove zlib1g-dev
./configure --prefix=${curl_install_dir} --with-ssl=${openssl_install_dir}
make -j ${THREAD} && make install
[ "${Debian_ver}" == '8' ] && apt-get -y install libc-client2007e-dev libglib2.0-dev libpng12-dev libssl-dev libzip-dev zlib1g-dev
popd > /dev/null
rm -rf curl-${curl_ver}
fi
if [ ! -e "${freetype_install_dir}/lib/libfreetype.la" ]; then
tar xzf freetype-${freetype_ver}.tar.gz
pushd freetype-${freetype_ver} > /dev/null
./configure --prefix=${freetype_install_dir} --enable-freetype-config
make -j ${THREAD} && make install
ln -sf ${freetype_install_dir}/include/freetype2/* /usr/include/
[ -d /usr/lib/pkgconfig ] && /bin/cp ${freetype_install_dir}/lib/pkgconfig/freetype2.pc /usr/lib/pkgconfig/
popd > /dev/null
rm -rf freetype-${freetype_ver}
fi
if [ ! -e "/usr/local/lib/pkgconfig/libargon2.pc" ]; then
tar xzf argon2-${argon2_ver}.tar.gz
pushd argon2-${argon2_ver} > /dev/null
make -j ${THREAD} && make install
[ ! -d /usr/local/lib/pkgconfig ] && mkdir -p /usr/local/lib/pkgconfig
/bin/cp libargon2.pc /usr/local/lib/pkgconfig/
popd > /dev/null
rm -rf argon2-${argon2_ver}
fi
if [ ! -e "/usr/local/lib/libsodium.la" ]; then
tar xzf libsodium-${libsodium_ver}.tar.gz
pushd libsodium-${libsodium_ver} > /dev/null
./configure --disable-dependency-tracking --enable-minimal
make -j ${THREAD} && make install
popd > /dev/null
rm -rf libsodium-${libsodium_ver}
fi
if [ ! -e "/usr/local/include/mhash.h" -a ! -e "/usr/include/mhash.h" ]; then
tar xzf mhash-${mhash_ver}.tar.gz
pushd mhash-${mhash_ver} > /dev/null
./configure
make -j ${THREAD} && make install
popd > /dev/null
rm -rf mhash-${mhash_ver}
fi
[ -z "`grep /usr/local/lib /etc/ld.so.conf.d/*.conf`" ] && echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf
ldconfig
if [ "${PM}" == 'yum' ]; then
[ ! -e "/lib64/libpcre.so.1" ] && ln -s /lib64/libpcre.so.0.0.1 /lib64/libpcre.so.1
[ ! -e "/usr/lib/libc-client.so" ] && ln -s /usr/lib64/libc-client.so /usr/lib/libc-client.so
fi
id -g ${run_group} >/dev/null 2>&1
[ $? -ne 0 ] && groupadd ${run_group}
id -u ${run_user} >/dev/null 2>&1
[ $? -ne 0 ] && useradd -g ${run_group} -M -s /sbin/nologin ${run_user}
tar xzf php-${php72_ver}.tar.gz
pushd php-${php72_ver} > /dev/null
make clean
[ ! -d "${php_install_dir}" ] && mkdir -p ${php_install_dir}
[ "${phpcache_option}" == '1' ] && phpcache_arg='--enable-opcache' || phpcache_arg='--disable-opcache'
if [ "${Apache_main_ver}" == '22' ] || [ "${apache_mode_option}" == '2' ]; then
./configure --prefix=${php_install_dir} --with-config-file-path=${php_install_dir}/etc \
--with-config-file-scan-dir=${php_install_dir}/etc/php.d \
--with-apxs2=${apache_install_dir}/bin/apxs ${phpcache_arg} --disable-fileinfo \
--enable-mysqlnd --with-mysqli=mysqlnd --with-pdo-mysql=mysqlnd \
--with-iconv-dir=/usr/local --with-freetype-dir=${freetype_install_dir} --with-jpeg-dir --with-png-dir --with-zlib \
--with-libxml-dir=/usr --enable-xml --disable-rpath --enable-bcmath --enable-shmop --enable-exif \
--enable-sysvsem --enable-inline-optimization --with-curl=${curl_install_dir} --enable-mbregex \
--enable-mbstring --with-password-argon2 --with-sodium=/usr/local --with-gd --with-openssl=${openssl_install_dir} \
--with-mhash --enable-pcntl --enable-sockets --with-xmlrpc --enable-ftp --enable-intl --with-xsl \
--with-gettext --enable-zip --enable-soap --disable-debug ${php_modules_options}
else
./configure --prefix=${php_install_dir} --with-config-file-path=${php_install_dir}/etc \
--with-config-file-scan-dir=${php_install_dir}/etc/php.d \
--with-fpm-user=${run_user} --with-fpm-group=${run_group} --enable-fpm ${phpcache_arg} --disable-fileinfo \
--enable-mysqlnd --with-mysqli=mysqlnd --with-pdo-mysql=mysqlnd \
--with-iconv-dir=/usr/local --with-freetype-dir=${freetype_install_dir} --with-jpeg-dir --with-png-dir --with-zlib \
--with-libxml-dir=/usr --enable-xml --disable-rpath --enable-bcmath --enable-shmop --enable-exif \
--enable-sysvsem --enable-inline-optimization --with-curl=${curl_install_dir} --enable-mbregex \
--enable-mbstring --with-password-argon2 --with-sodium=/usr/local --with-gd --with-openssl=${openssl_install_dir} \
--with-mhash --enable-pcntl --enable-sockets --with-xmlrpc --enable-ftp --enable-intl --with-xsl \
--with-gettext --enable-zip --enable-soap --disable-debug ${php_modules_options}
fi
make ZEND_EXTRA_LIBS='-liconv' -j ${THREAD}
make install
if [ -e "${php_install_dir}/bin/phpize" ]; then
[ ! -e "${php_install_dir}/etc/php.d" ] && mkdir -p ${php_install_dir}/etc/php.d
echo "${CSUCCESS}PHP installed successfully! ${CEND}"
else
rm -rf ${php_install_dir}
echo "${CFAILURE}PHP install failed, Please Contact the author! ${CEND}"
kill -9 $$; exit 1;
fi
[ -z "`grep ^'export PATH=' /etc/profile`" ] && echo "export PATH=${php_install_dir}/bin:\$PATH" >> /etc/profile
[ -n "`grep ^'export PATH=' /etc/profile`" -a -z "`grep ${php_install_dir} /etc/profile`" ] && sed -i "s@^export PATH=\(.*\)@export PATH=${php_install_dir}/bin:\1@" /etc/profile
. /etc/profile
# wget -c http://pear.php.net/go-pear.phar
# ${php_install_dir}/bin/php go-pear.phar
/bin/cp php.ini-production ${php_install_dir}/etc/php.ini
sed -i "s@^memory_limit.*@memory_limit = ${Memory_limit}M@" ${php_install_dir}/etc/php.ini
sed -i 's@^output_buffering =@output_buffering = On\noutput_buffering =@' ${php_install_dir}/etc/php.ini
#sed -i 's@^;cgi.fix_pathinfo.*@cgi.fix_pathinfo=0@' ${php_install_dir}/etc/php.ini
sed -i 's@^short_open_tag = Off@short_open_tag = On@' ${php_install_dir}/etc/php.ini
sed -i 's@^expose_php = On@expose_php = Off@' ${php_install_dir}/etc/php.ini
sed -i 's@^request_order.*@request_order = "CGP"@' ${php_install_dir}/etc/php.ini
sed -i "s@^;date.timezone.*@date.timezone = ${timezone}@" ${php_install_dir}/etc/php.ini
sed -i 's@^post_max_size.*@post_max_size = 100M@' ${php_install_dir}/etc/php.ini
sed -i 's@^upload_max_filesize.*@upload_max_filesize = 50M@' ${php_install_dir}/etc/php.ini
sed -i 's@^max_execution_time.*@max_execution_time = 600@' ${php_install_dir}/etc/php.ini
sed -i 's@^;realpath_cache_size.*@realpath_cache_size = 2M@' ${php_install_dir}/etc/php.ini
sed -i 's@^disable_functions.*@disable_functions = passthru,exec,system,chroot,chgrp,chown,shell_exec,proc_open,proc_get_status,ini_alter,ini_restore,dl,readlink,symlink,popepassthru,stream_socket_server,fsocket,popen@' ${php_install_dir}/etc/php.ini
[ -e /usr/sbin/sendmail ] && sed -i 's@^;sendmail_path.*@sendmail_path = /usr/sbin/sendmail -t -i@' ${php_install_dir}/etc/php.ini
sed -i "s@^;curl.cainfo.*@curl.cainfo = \"${openssl_install_dir}/cert.pem\"@" ${php_install_dir}/etc/php.ini
sed -i "s@^;openssl.cafile.*@openssl.cafile = \"${openssl_install_dir}/cert.pem\"@" ${php_install_dir}/etc/php.ini
sed -i "s@^;openssl.capath.*@openssl.capath = \"${openssl_install_dir}/cert.pem\"@" ${php_install_dir}/etc/php.ini
[ "${phpcache_option}" == '1' ] && cat > ${php_install_dir}/etc/php.d/02-opcache.ini << EOF
[opcache]
zend_extension=opcache.so
opcache.enable=1
opcache.enable_cli=1
opcache.memory_consumption=${Memory_limit}
opcache.interned_strings_buffer=8
opcache.max_accelerated_files=100000
opcache.max_wasted_percentage=5
opcache.use_cwd=1
opcache.validate_timestamps=1
opcache.revalidate_freq=60
;opcache.save_comments=0
opcache.consistency_checks=0
;opcache.optimization_level=0
EOF
if [ ! -e "${apache_install_dir}/bin/apxs" -o "${Apache_main_ver}" == '24' ] && [ "${apache_mode_option}" != '2' ]; then
# php-fpm Init Script
if [ -e /bin/systemctl ]; then
/bin/cp ${oneinstack_dir}/init.d/php-fpm.service /lib/systemd/system/
sed -i "s@/usr/local/php@${php_install_dir}@g" /lib/systemd/system/php-fpm.service
systemctl enable php-fpm
else
/bin/cp sapi/fpm/init.d.php-fpm /etc/init.d/php-fpm
chmod +x /etc/init.d/php-fpm
[ "${PM}" == 'yum' ] && { chkconfig --add php-fpm; chkconfig php-fpm on; }
[ "${PM}" == 'apt-get' ] && update-rc.d php-fpm defaults
fi
cat > ${php_install_dir}/etc/php-fpm.conf <<EOF
;;;;;;;;;;;;;;;;;;;;;
; FPM Configuration ;
;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;
; Global Options ;
;;;;;;;;;;;;;;;;;;
[global]
pid = run/php-fpm.pid
error_log = log/php-fpm.log
log_level = warning
emergency_restart_threshold = 30
emergency_restart_interval = 60s
process_control_timeout = 5s
daemonize = yes
;;;;;;;;;;;;;;;;;;;;
; Pool Definitions ;
;;;;;;;;;;;;;;;;;;;;
[${run_user}]
listen = /dev/shm/php-cgi.sock
listen.backlog = -1
listen.allowed_clients = 127.0.0.1
listen.owner = ${run_user}
listen.group = ${run_group}
listen.mode = 0666
user = ${run_user}
group = ${run_group}
pm = dynamic
pm.max_children = 12
pm.start_servers = 8
pm.min_spare_servers = 6
pm.max_spare_servers = 12
pm.max_requests = 2048
pm.process_idle_timeout = 10s
request_terminate_timeout = 120
request_slowlog_timeout = 0
pm.status_path = /php-fpm_status
slowlog = var/log/slow.log
rlimit_files = 51200
rlimit_core = 0
catch_workers_output = yes
;env[HOSTNAME] = $HOSTNAME
env[PATH] = /usr/local/bin:/usr/bin:/bin
env[TMP] = /tmp
env[TMPDIR] = /tmp
env[TEMP] = /tmp
EOF
if [ $Mem -le 3000 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = $(($Mem/3/20))@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = $(($Mem/3/30))@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = $(($Mem/3/40))@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = $(($Mem/3/20))@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 3000 -a $Mem -le 4500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 50@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 30@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 20@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 50@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 4500 -a $Mem -le 6500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 60@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 40@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 30@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 60@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 6500 -a $Mem -le 8500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 70@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 50@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 40@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 70@" ${php_install_dir}/etc/php-fpm.conf
elif [ $Mem -gt 8500 ]; then
sed -i "s@^pm.max_children.*@pm.max_children = 80@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.start_servers.*@pm.start_servers = 60@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.min_spare_servers.*@pm.min_spare_servers = 50@" ${php_install_dir}/etc/php-fpm.conf
sed -i "s@^pm.max_spare_servers.*@pm.max_spare_servers = 80@" ${php_install_dir}/etc/php-fpm.conf
fi
service php-fpm start
elif [ "${Apache_main_ver}" == '22' ] || [ "${apache_mode_option}" == '2' ]; then
service httpd restart
fi
popd > /dev/null
[ -e "${php_install_dir}/bin/phpize" ] && rm -rf php-${php72_ver}
popd > /dev/null
}
|
#!/usr/bin/zsh
echo "== Starting ship script =="
cd `realpath "$0" | xargs dirname`
. ./config.sh
[ -z "$BUILD_DIR" ] \
&& echo 'No build directory (BUILD_DIR) specified or config didnt load' \
&& exit 1
cd "$BUILD_DIR"
[ $? -eq 1 ] \
&& echo "Cant change into build directory (BUILD_DIR) at '$BUILD_DIR'" \
&& exit 1
[ -z "$SERVER_DIR" ] \
&& echo 'No target-directory (SERVER_DIR) specified or config didnt load' \
&& exit 1
# sshfs
~/.ssh/server-fs.sh
mkdir -p "$SERVER_DIR"
[ ! -d "$SERVER_DIR" ] \
&& echo "Cant create target-directory (SERVER_DIR) at '$SERVER_DIR'" \
&& exit 1
echo "BUILD_DIR: '$BUILD_DIR'"
echo "SERVER_DIR: '$SERVER_DIR'"
# sync up
rsync -z --progress --archive --links --inplace --verbose --delete \
--exclude '.since' \
"$BUILD_DIR/" \
"$SERVER_DIR"
echo `date '+%s'` \
> "$SERVER_DIR/.since"
|
<reponame>libertyernie/3DSFE-Randomizer<gh_stars>1-10
package randomizer.common.enums;
public enum SkillType
{
Basic, Enemy, DLC, Personal
}
|
def compress_file(file_path: str) -> None:
if COMPRESSOR_DEBUG:
print(f"Debugging information: Compressing file {file_path}")
if COMPRESSOR_OFFLINE_COMPRESS:
print("Performing offline compression")
else:
print("Using online compression services")
|
/*
* Core
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 6.1-preview
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.epam.reportportal.extension.azure.rest.client.model;
import com.google.gson.annotations.SerializedName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.Objects;
import java.util.UUID;
/**
*
*/
@ApiModel(description = "")
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2021-07-22T22:33:25.592Z")
public class WebApiTeamRef {
@SerializedName("id")
private UUID id = null;
@SerializedName("name")
private String name = null;
@SerializedName("url")
private String url = null;
public WebApiTeamRef id(UUID id) {
this.id = id;
return this;
}
/**
* Team (Identity) Guid. A Team Foundation ID.
* @return id
**/
@ApiModelProperty(value = "Team (Identity) Guid. A Team Foundation ID.")
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public WebApiTeamRef name(String name) {
this.name = name;
return this;
}
/**
* Team name
* @return name
**/
@ApiModelProperty(value = "Team name")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public WebApiTeamRef url(String url) {
this.url = url;
return this;
}
/**
* Team REST API Url
* @return url
**/
@ApiModelProperty(value = "Team REST API Url")
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WebApiTeamRef webApiTeamRef = (WebApiTeamRef) o;
return Objects.equals(this.id, webApiTeamRef.id) &&
Objects.equals(this.name, webApiTeamRef.name) &&
Objects.equals(this.url, webApiTeamRef.url);
}
@Override
public int hashCode() {
return Objects.hash(id, name, url);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class WebApiTeamRef {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" url: ").append(toIndentedString(url)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package cmu.xprize.asm_component;
/**
*
*/
public interface IDotMechanics {
// set up?
void preClickSetup();
// just resets to the default state
void next();
// called by C_Component.nextDigit()
void nextDigit();
// I *think* this is where the magic of moving the dots happens
// implemented by Base, Add, Subtract
void handleClick();
// looks like only used by Subtract/Borrow... might be deletable
void correctOverheadText();
// looks like only used by Subtract/Borrow...
void highlightOverhead();
// highlights the result.
// only in Base
void highlightResult();
}
|
<filename>src/components/Buttons.js
import React, { Component } from 'react';
export default class Buttons extends Component {
render() {
return (
<div>
<div onClick={() => this.props.fetchData('starships')}>Starships</div>
<div onClick={() => this.props.fetchData('people')}>People</div>
<div onClick={() => this.props.fetchData('planets')}>Planets</div>
</div>
);
}
}
|
<gh_stars>10-100
package io.opensphere.mantle.data.cache;
import java.io.File;
/**
* The CacheConfiguration for the DataElementCache.
*/
public final class CacheConfiguration
{
/** The disk cache location. */
private final File myDiskCacheLocation;
/**
* The max allowed elements in memory even accounting for those that are
* scheduled for cache to permanent store.
*/
private final int myMaxInMemory;
/**
* The number of elements allowed to reside in memory at all times.
*/
private final int myPoolInMemory;
/** The my remove from store on remove. */
private final boolean myRemoveFromStoreOnRemove;
/** The store type. */
private final CacheStoreType myStoreType;
/** The my use disk encryption. */
private final boolean myUseDiskEncryption;
/** The Use dynamic class storage for data elements. */
private final boolean myUseDynamicClassStorageForDataElements;
/**
* Creates the disk cached configuration.
*
* @param maxInMemory the max allowed elements in memory
* @param inMemoryPoolSize the preferred number of elements in memory
* @param diskCacheLocation the disk cache location
* @param removeFromStoreOnRemove the remove from store on remove
* @param useEncryption the use encryption
* @param useDynamicClasses the use dynamic classes
* @return the cache configuration
*/
public static CacheConfiguration createDiskCachedConfiguration(int maxInMemory, int inMemoryPoolSize, File diskCacheLocation,
boolean removeFromStoreOnRemove, boolean useEncryption, boolean useDynamicClasses)
{
if (inMemoryPoolSize > maxInMemory)
{
throw new IllegalArgumentException("inMemoryPoolSize must be <= maxInMemory");
}
return new CacheConfiguration(CacheStoreType.DISK, maxInMemory, inMemoryPoolSize, diskCacheLocation,
removeFromStoreOnRemove, useEncryption, useDynamicClasses);
}
/**
* Creates the registry cached configuration.
*
* @param maxInMemory the max allowed elements in memory
* @param inMemoryPoolSize the preferred number of elements in memory
* @param removeFromStoreOnRemove the remove from store on remove
* @param useDynamicClasses the use dynamic classes
* @return the cache configuration
*/
public static CacheConfiguration createRegistryCachedConfiguration(int maxInMemory, int inMemoryPoolSize,
boolean removeFromStoreOnRemove, boolean useDynamicClasses)
{
if (inMemoryPoolSize > maxInMemory)
{
throw new IllegalArgumentException("inMemoryPoolSize must be <= maxInMemory");
}
return new CacheConfiguration(CacheStoreType.REGISTRY, maxInMemory, inMemoryPoolSize, null, removeFromStoreOnRemove,
false, useDynamicClasses);
}
/**
* Creates the unlimited in memory configuration with no persisted store.
*
* @param useDynamicClasses the use dynamic classes
* @return the cache configuration
*/
public static CacheConfiguration createUnlimitedInMemoryConfiguration(boolean useDynamicClasses)
{
return new CacheConfiguration(CacheStoreType.NONE, Integer.MAX_VALUE, Integer.MAX_VALUE, null, false, true,
useDynamicClasses);
}
/**
* Instantiates a new cache configuration.
*
* @param type the type
* @param maxInMemory the max allowed elements in memory
* @param poolInMemory the preferred number of elements in memory
* @param diskCacheLocation the disk cache location
* @param removeFromStoreOnRemove the remove from store on remove
* @param useDiskEncryption the use disk encryption
* @param useDynamicClassStorageForDataElements the use dynamic class
* storage for data elements
*/
private CacheConfiguration(CacheStoreType type, int maxInMemory, int poolInMemory, File diskCacheLocation,
boolean removeFromStoreOnRemove, boolean useDiskEncryption, boolean useDynamicClassStorageForDataElements)
{
myMaxInMemory = maxInMemory;
myPoolInMemory = poolInMemory;
myStoreType = type;
myRemoveFromStoreOnRemove = removeFromStoreOnRemove;
myUseDiskEncryption = useDiskEncryption;
myDiskCacheLocation = diskCacheLocation;
myUseDynamicClassStorageForDataElements = useDynamicClassStorageForDataElements;
}
/**
* Gets the cache store type.
*
* @return the cache store type
*/
public CacheStoreType getCacheStoreType()
{
return myStoreType;
}
/**
* Gets the disk cache location.
*
* @return the disk cache location
*/
public File getDiskCacheLocation()
{
return myDiskCacheLocation;
}
/**
* Gets the in memory pool size.
*
* @return the in memory pool size
*/
public int getInMemoryPoolSize()
{
return myPoolInMemory;
}
/**
* Gets the max in memory.
*
* @return the max in memory
*/
public int getMaxInMemory()
{
return myMaxInMemory;
}
/**
* Removes the from store on remove.
*
* @return true, if successful
*/
public boolean isRemoveFromStoreOnRemove()
{
return myRemoveFromStoreOnRemove;
}
/**
* Checks if is unlimited.
*
* @return true, if is unlimited
*/
public boolean isUnlimited()
{
return myPoolInMemory == Integer.MAX_VALUE;
}
/**
* Checks if is use disk encryption.
*
* @return true, if is use disk encryption
*/
public boolean isUseDiskEncryption()
{
return myUseDiskEncryption;
}
/**
* Checks if is use dynamic class storage for data elements.
*
* @return true, if is use dynamic class storage for data elements
*/
public boolean isUseDynamicClassStorageForDataElements()
{
return myUseDynamicClassStorageForDataElements;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder(256);
sb.append("Cache Configuration Summary: \n" + " Store Type : ").append(myStoreType)
.append("\n" + " Encryption : ").append(myUseDiskEncryption).append("\n" + " RemoveOnRemove : ")
.append(myRemoveFromStoreOnRemove).append('\n');
if (myStoreType == CacheStoreType.DISK)
{
sb.append(" DiskCacheLocation: ")
.append(myDiskCacheLocation == null ? "NULL" : myDiskCacheLocation.getAbsolutePath()).append('\n');
}
sb.append(" Pool In Memory : ").append(myPoolInMemory).append("\n" + " Max In Memory : ").append(myMaxInMemory)
.append("\n" + " Dynamic Classes : ").append(myUseDynamicClassStorageForDataElements).append('\n');
return sb.toString();
}
}
|
<gh_stars>10-100
import numpy as np
np.set_printoptions(threshold=np.inf)
Model_name = 'level_1012'
outputs_ori = f'SSIM/results/{Model_name}_outputs_ori.npy'
outputs_scal = f'SSIM/results/{Model_name}_outputs_scal.npy'
outputs_ori_array = np.load(outputs_ori)
outputs_scal_array = np.load(outputs_scal)
print(outputs_ori_array.shape)
outputs_ori_array_reshape = np.array_split(outputs_ori_array, outputs_ori_array.shape[0]//3)
print(len(outputs_ori_array_reshape))
|
#!/bin/bash
# Copyright (c) 2021, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
#Function to output message to StdErr
function echo_stderr ()
{
echo "$@" >&2
}
#Function to display usage message
function usage()
{
echo_stderr "./addnode.sh <<< \"<addNodeSetupArgumentsFromStdIn>\""
}
function installUtilities()
{
echo "Installing zip unzip wget vnc-server rng-tools cifs-utils"
sudo yum install -y zip unzip wget vnc-server rng-tools cifs-utils
#Setting up rngd utils
attempt=1
while [[ $attempt -lt 4 ]]
do
echo "Starting rngd service attempt $attempt"
sudo systemctl start rngd
attempt=`expr $attempt + 1`
sudo systemctl status rngd | grep running
if [[ $? == 0 ]];
then
echo "rngd utility service started successfully"
break
fi
sleep 1m
done
}
function validateInput()
{
if [ -z "$wlsDomainName" ];
then
echo_stderr "wlsDomainName is required. "
fi
if [[ -z "$wlsUserName" || -z "$wlsPassword" ]]
then
echo_stderr "wlsUserName or wlsPassword is required. "
exit 1
fi
if [ -z "$managedServerPrefix" ];
then
echo_stderr "managedServerPrefix is required. "
fi
if [ -z "$serverIndex" ];
then
echo_stderr "serverIndex is required. "
fi
if [ -z "$wlsAdminURL" ];
then
echo_stderr "wlsAdminURL is required. "
fi
if [ -z "$oracleHome" ]; then
echo_stderr "oracleHome is required. "
fi
if [ -z "$wlsDomainPath" ]; then
echo_stderr "wlsDomainPath is required. "
fi
if [ -z "$dynamicClusterSize" ];
then
echo_stderr "dynamicClusterSize is required. "
fi
if [ -z "$vmNamePrefix" ];
then
echo_stderr "vmNamePrefix is required. "
fi
if [ -z "$storageAccountName" ]; then
echo_stderr "storageAccountName is required. "
fi
if [ -z "$storageAccountKey" ]; then
echo_stderr "storageAccountKey is required. "
fi
if [ -z "$mountpointPath" ]; then
echo_stderr "mountpointPath is required. "
fi
if [[ -z "$wlsADSSLCer" || -z "$wlsLDAPPublicIP" || -z "$adServerHost" ]]; then
echo_stderr "wlsADSSLCer, wlsLDAPPublicIP and adServerHost are required. "
exit 1
fi
if [[ "$wlsADSSLCer" != "null" && "$wlsLDAPPublicIP" != "null" && "$adServerHost" != "null" ]]; then
enableAAD="true"
fi
if [ -z "$enableELK" ]; then
echo_stderr "enableELK is required. "
fi
if [ -z "$elasticURI" ]; then
echo_stderr "elasticURI is required. "
fi
if [ -z "$elasticUserName" ]; then
echo_stderr "elasticUserName is required. "
fi
if [ -z "$elasticPassword" ]; then
echo_stderr "elasticPassword is required. "
fi
if [ -z "$logsToIntegrate" ]; then
echo_stderr "logsToIntegrate is required. "
fi
if [ -z "$logIndex" ]; then
echo_stderr "logIndex is required. "
fi
if [ -z "$maxDynamicClusterSize" ]; then
echo_stderr "maxDynamicClusterSize is required. "
fi
if [ ! -z "$isCustomSSLEnabled" == "true" ];
then
if [[ -z "$customIdentityKeyStoreBase64String" || -z "$customIdentityKeyStorePassPhrase" || -z "$customIdentityKeyStoreType" ||
-z "$customTrustKeyStoreBase64String" || -z "$customTrustKeyStorePassPhrase" || -z "$customTrustKeyStoreType" ||
-z "$privateKeyAlias" || -z "$privateKeyPassPhrase" ]]
then
echo_stderr "customIdentityKeyStoreBase64String, customIdentityKeyStorePassPhrase, customIdentityKeyStoreType, customTrustKeyStoreBase64String, customTrustKeyStorePassPhrase, customTrustKeyStoreType, privateKeyAlias and privateKeyPassPhrase are required. "
exit 1
fi
fi
}
#Function to cleanup all temporary files
function cleanup()
{
echo "Cleaning up temporary files..."
rm -rf $wlsDomainPath/managed-domain.yaml
rm -rf $wlsDomainPath/weblogic-deploy.zip
rm -rf $wlsDomainPath/weblogic-deploy
rm -rf $wlsDomainPath/deploy-app.yaml
rm -rf $wlsDomainPath/shoppingcart.zip
rm -rf $wlsDomainPath/*.py
echo "Cleanup completed."
}
#Creates weblogic deployment model for managed server domain
function create_managed_server_domain()
{
echo "Creating managed server domain"
cat <<EOF >$wlsDomainPath/managed-domain.yaml
domainInfo:
AdminUserName: "$wlsUserName"
AdminPassword: "$wlsPassword"
ServerStartMode: prod
topology:
Name: "$wlsDomainName"
Machine:
'$machineName':
NodeManager:
ListenAddress: "$nmHost"
ListenPort: $nmPort
NMType : ssl
Cluster:
'$wlsClusterName':
MigrationBasis: 'consensus'
DynamicServers:
ServerTemplate: '${dynamicServerTemplate}'
DynamicClusterSize: ${dynamicClusterSize}
MaxDynamicClusterSize: ${maxDynamicClusterSize}
CalculatedListenPorts: true
CalculatedMachineNames: true
ServerNamePrefix: "${managedServerPrefix}"
MachineNameMatchExpression: "machine-${vmNamePrefix}*"
ServerTemplate:
'${dynamicServerTemplate}' :
ListenPort: ${wlsManagedPort}
Cluster: '${wlsClusterName}'
SSL:
HostnameVerificationIgnored: true
HostnameVerifier: 'None'
EOF
if [ "${isCustomSSLEnabled}" == "true" ];
then
cat <<EOF>>$DOMAIN_PATH/managed-domain.yaml
ServerPrivateKeyAlias: "$serverPrivateKeyAlias"
ServerPrivateKeyPassPhraseEncrypted: "$serverPrivateKeyPassPhrase"
EOF
fi
if [ "${isCustomSSLEnabled}" == "true" ];
then
cat <<EOF>>$DOMAIN_PATH/managed-domain.yaml
KeyStores: 'CustomIdentityAndCustomTrust'
CustomIdentityKeyStoreFileName: "$customIdentityKeyStoreFileName"
CustomIdentityKeyStoreType: "$customIdentityKeyStoreType"
CustomIdentityKeyStorePassPhraseEncrypted: "$customIdentityKeyStorePassPhrase"
CustomTrustKeyStoreFileName: "$customTrustKeyStoreFileName"
CustomTrustKeyStoreType: "$customTrustKeyStoreType"
CustomTrustKeyStorePassPhraseEncrypted: "$customTrustKeyStorePassPhrase"
EOF
fi
cat <<EOF>>$DOMAIN_PATH/managed-domain.yaml
SecurityConfiguration:
NodeManagerUsername: "$wlsUserName"
NodeManagerPasswordEncrypted: "$wlsPassword"
EOF
}
#This function create py Script to create Machine on the Domain
function createMachinePyScript()
{
echo "Creating machine name model: $machineName"
cat <<EOF >$wlsDomainPath/add-machine.py
connect('$wlsUserName','$wlsPassword','t3://$wlsAdminURL')
shutdown('$wlsClusterName', 'Cluster')
edit("$machineName")
startEdit()
cd('/')
cmo.createMachine('$machineName')
cd('/Machines/$machineName/NodeManager/$machineName')
cmo.setListenPort(int($nmPort))
cmo.setListenAddress('$nmHost')
cmo.setNMType('ssl')
save()
resolve()
activate()
destroyEditSession("$machineName")
disconnect()
EOF
}
#This function creates py Script to enroll Node Manager to the Domain
function createEnrollServerPyScript()
{
echo "Creating managed server model"
cat <<EOF >$wlsDomainPath/enroll-server.py
connect('$wlsUserName','$wlsPassword','t3://$wlsAdminURL')
nmEnroll('$wlsDomainPath/$wlsDomainName','$wlsDomainPath/$wlsDomainName/nodemanager')
disconnect()
EOF
}
#This function to wait for admin server
function wait_for_admin()
{
#wait for admin to start
count=1
CHECK_URL="http://$wlsAdminURL/weblogic/ready"
status=`curl --insecure -ILs $CHECK_URL | tac | grep -m1 HTTP/1.1 | awk {'print $2'}`
while [[ "$status" != "200" ]]
do
echo "Waiting for admin server to start"
count=$((count+1))
if [ $count -le 30 ];
then
sleep 1m
else
echo "Error : Maximum attempts exceeded while starting admin server"
exit 1
fi
status=`curl --insecure -ILs $CHECK_URL | tac | grep -m1 HTTP/1.1 | awk {'print $2'}`
if [ "$status" == "200" ];
then
echo "Admin Server started succesfully..."
break
fi
done
}
#This function to start managed server
function start_cluster()
{
echo "Starting Cluster $wlsClusterName"
cat <<EOF >$wlsDomainPath/start-server.py
connect('$wlsUserName','$wlsPassword','t3://$wlsAdminURL')
try:
start('$wlsClusterName', 'Cluster')
except:
print "Failed starting Cluster $wlsClusterName"
dumpStack()
disconnect()
EOF
sudo chown -R $username:$groupname $wlsDomainPath
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; java $WLST_ARGS weblogic.WLST $wlsDomainPath/start-server.py"
if [[ $? != 0 ]]; then
echo "Error : Failed in starting cluster"
exit 1
fi
}
#Function to start nodemanager
function start_nm()
{
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; \"$wlsDomainPath/$wlsDomainName/bin/startNodeManager.sh\" &"
sleep 1m
}
function create_managedSetup(){
echo "Creating Managed Server Setup"
echo "Creating domain path /u01/domains"
echo "Downloading weblogic-deploy-tool"
cd $wlsDomainPath
wget -q $WEBLOGIC_DEPLOY_TOOL
if [[ $? != 0 ]]; then
echo "Error : Downloading weblogic-deploy-tool failed"
exit 1
fi
sudo unzip -o weblogic-deploy.zip -d $wlsDomainPath
echo "Creating managed server model files"
create_managed_server_domain
createMachinePyScript
createEnrollServerPyScript
echo "Completed managed server model files"
sudo chown -R $username:$groupname $wlsDomainPath
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; $wlsDomainPath/weblogic-deploy/bin/createDomain.sh -oracle_home ${oracleHome} -domain_parent $wlsDomainPath -domain_type WLS -model_file $wlsDomainPath/managed-domain.yaml"
if [[ $? != 0 ]]; then
echo "Error : Managed setup failed"
exit 1
fi
wait_for_admin
# For issue https://github.com/wls-eng/arm-oraclelinux-wls/issues/89
getSerializedSystemIniFileFromShare
echo "Adding Machine $machineName"
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; java $WLST_ARGS weblogic.WLST $wlsDomainPath/add-machine.py"
if [[ $? != 0 ]]; then
echo "Error : Adding machine $machineName failed"
exit 1
fi
echo "Enrolling Domain for Machine $machineName"
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; java $WLST_ARGS weblogic.WLST $wlsDomainPath/enroll-server.py"
if [[ $? != 0 ]]; then
echo "Error : Enrolling machine $machineName failed"
exit 1
fi
}
# Create systemctl service for nodemanager
function create_nodemanager_service()
{
echo "Creating services for Nodemanager"
echo "Setting CrashRecoveryEnabled true at $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties"
sed -i.bak -e 's/CrashRecoveryEnabled=false/CrashRecoveryEnabled=true/g' $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
if [ $? != 0 ];
then
echo "Warning : Failed in setting option CrashRecoveryEnabled=true. Continuing without the option."
mv $wlsDomainPath/nodemanager/nodemanager.properties.bak $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
fi
if [ "${isCustomSSLEnabled}" == "true" ];
then
echo "KeyStores=CustomIdentityAndCustomTrust" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomIdentityKeystoreType=${customIdentityKeyStoreType}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomIdentityKeyStoreFileName=${customSSLIdentityKeyStoreFile}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomIdentityKeyStorePassPhrase=${customIdentityKeyStorePassPhrase}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomIdentityAlias=${privateKeyAlias}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomIdentityPrivateKeyPassPhrase=${privateKeyPassPhrase}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomTrustKeystoreType=${customTrustKeyStoreType}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomTrustKeyStoreFileName=${customSSLTrustKeyStoreFile}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
echo "CustomTrustKeyStorePassPhrase=${customTrustKeyStorePassPhrase}" >> $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties
fi
sudo chown -R $username:$groupname $wlsDomainPath/$wlsDomainName/nodemanager/nodemanager.properties*
echo "Creating NodeManager service"
cat <<EOF >/etc/systemd/system/wls_nodemanager.service
[Unit]
Description=WebLogic nodemanager service
[Service]
Type=simple
# Note that the following three parameters should be changed to the correct paths
# on your own system
WorkingDirectory="$wlsDomainPath/$wlsDomainName"
ExecStart="$wlsDomainPath/$wlsDomainName/bin/startNodeManager.sh"
ExecStop="$wlsDomainPath/$wlsDomainName/bin/stopNodeManager.sh"
User=oracle
Group=oracle
KillMode=process
LimitNOFILE=65535
[Install]
WantedBy=multi-user.target
EOF
echo "Created service for Nodemanager"
}
function enabledAndStartNodeManagerService()
{
sudo systemctl enable wls_nodemanager
sudo systemctl daemon-reload
echo "Starting nodemanager service"
sudo systemctl start wls_nodemanager
}
function updateNetworkRules()
{
# for Oracle Linux 7.3, 7.4, iptable is not running.
if [ -z `command -v firewall-cmd` ]; then
return 0
fi
# for Oracle Linux 7.6, open weblogic ports
tag=$1
if [ ${tag} == 'admin' ]; then
echo "update network rules for admin server"
sudo firewall-cmd --zone=public --add-port=$wlsAdminPort/tcp
sudo firewall-cmd --zone=public --add-port=$wlsSSLAdminPort/tcp
sudo firewall-cmd --zone=public --add-port=$wlsManagedPort/tcp
sudo firewall-cmd --zone=public --add-port=$nmPort/tcp
else
echo "update network rules for managed server"
sudo firewall-cmd --zone=public --add-port=$wlsManagedPort/tcp
sudo firewall-cmd --zone=public --add-port=$nmPort/tcp
fi
sudo firewall-cmd --runtime-to-permanent
sudo systemctl restart firewalld
}
# Mount the Azure file share on all VMs created
function mountFileShare()
{
echo "Creating mount point"
echo "Mount point: $mountpointPath"
sudo mkdir -p $mountpointPath
if [ ! -d "/etc/smbcredentials" ]; then
sudo mkdir /etc/smbcredentials
fi
if [ ! -f "/etc/smbcredentials/${storageAccountName}.cred" ]; then
echo "Crearing smbcredentials"
echo "username=$storageAccountName >> /etc/smbcredentials/${storageAccountName}.cred"
echo "password=$storageAccountKey >> /etc/smbcredentials/${storageAccountName}.cred"
sudo bash -c "echo "username=$storageAccountName" >> /etc/smbcredentials/${storageAccountName}.cred"
sudo bash -c "echo "password=$storageAccountKey" >> /etc/smbcredentials/${storageAccountName}.cred"
fi
echo "chmod 600 /etc/smbcredentials/${storageAccountName}.cred"
sudo chmod 600 /etc/smbcredentials/${storageAccountName}.cred
echo "//${storageAccountName}.file.core.windows.net/wlsshare $mountpointPath cifs nofail,vers=2.1,credentials=/etc/smbcredentials/${storageAccountName}.cred ,dir_mode=0777,file_mode=0777,serverino"
sudo bash -c "echo \"//${storageAccountName}.file.core.windows.net/wlsshare $mountpointPath cifs nofail,vers=2.1,credentials=/etc/smbcredentials/${storageAccountName}.cred ,dir_mode=0777,file_mode=0777,serverino\" >> /etc/fstab"
echo "mount -t cifs //${storageAccountName}.file.core.windows.net/wlsshare $mountpointPath -o vers=2.1,credentials=/etc/smbcredentials/${storageAccountName}.cred,dir_mode=0777,file_mode=0777,serverino"
sudo mount -t cifs //${storageAccountName}.file.core.windows.net/wlsshare $mountpointPath -o vers=2.1,credentials=/etc/smbcredentials/${storageAccountName}.cred,dir_mode=0777,file_mode=0777,serverino
if [[ $? != 0 ]];
then
echo "Failed to mount //${storageAccountName}.file.core.windows.net/wlsshare $mountpointPath"
exit 1
fi
}
# Get SerializedSystemIni.dat file from share point to managed server vm
function getSerializedSystemIniFileFromShare()
{
runuser -l oracle -c "mv ${wlsDomainPath}/${wlsDomainName}/security/SerializedSystemIni.dat ${wlsDomainPath}/${wlsDomainName}/security/SerializedSystemIni.dat.backup"
runuser -l oracle -c "cp ${mountpointPath}/SerializedSystemIni.dat ${wlsDomainPath}/${wlsDomainName}/security/."
ls -lt ${wlsDomainPath}/${wlsDomainName}/security/SerializedSystemIni.dat
if [[ $? != 0 ]];
then
echo "Failed to get ${mountpointPath}/SerializedSystemIni.dat"
exit 1
fi
runuser -l oracle -c "chmod 640 ${wlsDomainPath}/${wlsDomainName}/security/SerializedSystemIni.dat"
}
function mapLDAPHostWithPublicIP()
{
echo "map LDAP host with pubilc IP"
# remove existing ip address for the same host
sudo sed -i '/${adServerHost}/d' /etc/hosts
sudo echo "${wlsLDAPPublicIP} ${adServerHost}" >> /etc/hosts
}
function parseLDAPCertificate()
{
echo "create key store"
cer_begin=0
cer_size=${#wlsADSSLCer}
cer_line_len=64
mkdir ${SCRIPT_PWD}/security
touch ${SCRIPT_PWD}/security/AzureADLDAPCerBase64String.txt
while [ ${cer_begin} -lt ${cer_size} ]
do
cer_sub=${wlsADSSLCer:$cer_begin:$cer_line_len}
echo ${cer_sub} >> ${SCRIPT_PWD}/security/AzureADLDAPCerBase64String.txt
cer_begin=$((cer_begin+$cer_line_len))
done
openssl base64 -d -in ${SCRIPT_PWD}/security/AzureADLDAPCerBase64String.txt -out ${SCRIPT_PWD}/security/AzureADTrust.cer
addsCertificate=${SCRIPT_PWD}/security/AzureADTrust.cer
}
function importAADCertificate()
{
# import the key to java security
. $oracleHome/oracle_common/common/bin/setWlstEnv.sh
# For AAD failure: exception happens when importing certificate to JDK 11.0.7
# ISSUE: https://github.com/wls-eng/arm-oraclelinux-wls/issues/109
# JRE was removed since JDK 11.
java_version=$(java -version 2>&1 | sed -n ';s/.* version "\(.*\)\.\(.*\)\..*"/\1\2/p;')
if [ ${java_version:0:3} -ge 110 ];
then
java_cacerts_path=${JAVA_HOME}/lib/security/cacerts
else
java_cacerts_path=${JAVA_HOME}/jre/lib/security/cacerts
fi
# remove existing certificate.
queryAADTrust=$(${JAVA_HOME}/bin/keytool -list -keystore ${java_cacerts_path} -storepass changeit | grep "aadtrust")
if [ -n "${queryAADTrust}" ];
then
sudo ${JAVA_HOME}/bin/keytool -delete -alias aadtrust -keystore ${java_cacerts_path} -storepass changeit
fi
sudo ${JAVA_HOME}/bin/keytool -noprompt -import -alias aadtrust -file ${addsCertificate} -keystore ${java_cacerts_path} -storepass changeit
}
function importAADCertificateIntoWLSCustomTrustKeyStore()
{
if [ "${isCustomSSLEnabled,,}" == "true" ];
then
# set java home
. $oracleHome/oracle_common/common/bin/setWlstEnv.sh
#validate Trust keystore
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; keytool -list -v -keystore $customSSLTrustKeyStoreFile -storepass $customTrustKeyStorePassPhrase -storetype $customTrustKeyStoreType | grep 'Entry type:' | grep 'trustedCertEntry'"
if [[ $? != 0 ]]; then
echo "Error : Trust Keystore Validation Failed !!"
exit 1
fi
# For SSL enabled causes AAD failure #225
# ISSUE: https://github.com/wls-eng/arm-oraclelinux-wls/issues/225
echo "Importing AAD Certificate into WLS Custom Trust Key Store: "
sudo ${JAVA_HOME}/bin/keytool -noprompt -import -trustcacerts -keystore {KEYSTORE_PATH}/trust.keystore -storepass ${customTrustKeyStorePassPhrase} -alias aadtrust -file ${addsCertificate} -storetype ${customTrustKeyStoreType}
else
echo "customSSL not enabled. Not required to configure AAD for WebLogic Custom SSL"
fi
}
function validateSSLKeyStores()
{
sudo chown -R $username:$groupname $KEYSTORE_PATH
#validate identity keystore
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; keytool -list -v -keystore $customSSLIdentityKeyStoreFile -storepass $customIdentityKeyStorePassPhrase -storetype $customIdentityKeyStoreType | grep 'Entry type:' | grep 'PrivateKeyEntry'"
if [[ $? != 0 ]]; then
echo "Error : Identity Keystore Validation Failed !!"
exit 1
fi
#validate Trust keystore
runuser -l oracle -c ". $oracleHome/oracle_common/common/bin/setWlstEnv.sh; keytool -list -v -keystore $customSSLTrustKeyStoreFile -storepass $customTrustKeyStorePassPhrase -storetype $customTrustKeyStoreType | grep 'Entry type:' | grep 'trustedCertEntry'"
if [[ $? != 0 ]]; then
echo "Error : Trust Keystore Validation Failed !!"
exit 1
fi
echo "ValidateSSLKeyStores Successfull !!"
}
function parseAndSaveCustomSSLKeyStoreData()
{
echo "create key stores for custom ssl settings"
mkdir -p ${KEYSTORE_PATH}
touch ${KEYSTORE_PATH}/identityKeyStoreCerBase64String.txt
echo "$customIdentityKeyStoreBase64String" > ${KEYSTORE_PATH}/identityKeyStoreCerBase64String.txt
cat ${KEYSTORE_PATH}/identityKeyStoreCerBase64String.txt | base64 -d > ${KEYSTORE_PATH}/identity.keystore
customSSLIdentityKeyStoreFile=${KEYSTORE_PATH}/identity.keystore
rm -rf ${KEYSTORE_PATH}/identityKeyStoreCerBase64String.txt
mkdir -p ${KEYSTORE_PATH}
touch ${KEYSTORE_PATH}/trustKeyStoreCerBase64String.txt
echo "$customTrustKeyStoreBase64String" > ${KEYSTORE_PATH}/trustKeyStoreCerBase64String.txt
cat ${KEYSTORE_PATH}/trustKeyStoreCerBase64String.txt | base64 -d > ${KEYSTORE_PATH}/trust.keystore
customSSLTrustKeyStoreFile=${KEYSTORE_PATH}/trust.keystore
rm -rf ${KEYSTORE_PATH}/trustKeyStoreCerBase64String.txt
}
#main script starts here
SCRIPT_PWD=`pwd`
# store arguments in a special array
#args=("$@")
# get number of elements
#ELEMENTS=${#args[@]}
read wlsDomainName wlsUserName wlsPassword managedServerPrefix serverIndex wlsAdminURL oracleHome wlsDomainPath dynamicClusterSize vmNamePrefix storageAccountName storageAccountKey mountpointPath wlsADSSLCer wlsLDAPPublicIP adServerHost enableELK elasticURI elasticUserName elasticPassword logsToIntegrate logIndex maxDynamicClusterSize isCustomSSLEnabled customIdentityKeyStoreBase64String customIdentityKeyStorePassPhrase customIdentityKeyStoreType customTrustKeyStoreBase64String customTrustKeyStorePassPhrase customTrustKeyStoreType privateKeyAlias privateKeyPassPhrase
isCustomSSLEnabled="${isCustomSSLEnabled,,}"
enableAAD="false"
validateInput
nmHost=`hostname`
nmPort=5556
wlsAdminPort=7001
wlsSSLAdminPort=7002
wlsManagedPort=8001
wlsClusterName="cluster1"
dynamicServerTemplate="myServerTemplate"
machineNamePrefix="machine"
machineName="$machineNamePrefix-$nmHost"
WEBLOGIC_DEPLOY_TOOL=https://github.com/oracle/weblogic-deploy-tooling/releases/download/weblogic-deploy-tooling-1.8.1/weblogic-deploy.zip
username="oracle"
groupname="oracle"
KEYSTORE_PATH="$wlsDomainPath/$wlsDomainName/keystores"
cleanup
installUtilities
mountFileShare
updateNetworkRules "managed"
if [ "$isCustomSSLEnabled" == "true" ];then
parseAndSaveCustomSSLKeyStoreData
fi
if [ "$enableAAD" == "true" ];then
mapLDAPHostWithPublicIP
parseLDAPCertificate
importAADCertificate
importAADCertificateIntoWLSCustomTrustKeyStore
fi
create_managedSetup
create_nodemanager_service
enabledAndStartNodeManagerService
start_cluster
echo "enable ELK? ${enableELK}"
if [[ "${enableELK,,}" == "true" ]];then
echo "Set up ELK..."
${SCRIPT_PWD}/elkIntegrationForDynamicCluster.sh \
${oracleHome} \
${wlsAdminURL} \
${managedServerPrefix} \
${wlsUserName} \
${wlsPassword} \
"admin" \
${elasticURI} \
${elasticUserName} \
${elasticPassword} \
${wlsDomainName} \
${wlsDomainPath}/${wlsDomainName} \
${logsToIntegrate} \
${serverIndex} \
${logIndex} \
${maxDynamicClusterSize}
fi
cleanup
|
function averageOfNums(nums) {
let sum = 0;
for (let num of nums) {
sum += num;
}
return sum / nums.length;
}
|
"use babel"
import { CompositeDisposable } from "atom"
import IndentationLinesView from "./indentation-lines-view"
export default {
activate() {
this.subscriptions = new CompositeDisposable()
this.subscriptions.add(atom.workspace.observeTextEditors((editor) => {
new IndentationLinesView(editor)
}))
},
deactivate() {
this.subscriptions.dispose()
}
}
|
#!/usr/bin/env bash
SRC=/$1
docker-compose \
run \
--rm \
img-convert \
convert -density 256x256 -background none $SRC -colors 256 /src/static/favicon.ico
#convert -density 384 -background transparent $SRC -define icon:auto-resize -colors 256 /src/static/favicon.ico
|
<reponame>zonesgame/StendhalArcClient
package mindustry.type;
import arc.func.*;
import mindustry.ctype.*;
import mindustry.ctype.ContentType;
import mindustry.entities.traits.*;
/**
* Unit的Trait类型ID
* */
public class TypeID extends MappableContent{
public final Prov<? extends TypeTrait> constructor;
public TypeID(String name, Prov<? extends TypeTrait> constructor){
super(name);
this.constructor = constructor;
}
@Override
public ContentType getContentType(){
return ContentType.typeid;
}
}
|
package com.chequer.axboot.core.mybatis;
public interface MyBatisMapper {
}
|
#!/usr/bin/env bash
# Don't use the default trap in common.bash
NOTRAP=1
HEREDIR=$(readlink -f "$(dirname "$0")")
# shellcheck source=common.bash
source "$HEREDIR/common.bash"
function usage {
echo "usage: $0 [FLAGS] [-r trials] [-f fuzzer-name] [-p snaps-placement] -d outdir -o output-csv -t target"
echo " -h print this help"
echo " -a append to existing output"
echo " -e extract afl* trial data archives first; overwrites previous separate coverage.sh run"
exit 1
}
fuzzer_name=nyx
outcsv=
append=0
extract=0
while getopts ":haer:o:t:d:f:p:" opt; do
case ${opt} in
h)
usage
;;
r)
validate_posnum "$OPTARG" "$opt"
runs=${OPTARG}
;;
t)
target=${OPTARG}
;;
d)
validate_outdir
;;
o)
outcsv=${OPTARG}
;;
a)
append=1
;;
e)
extract=1
;;
f)
fuzzer_name=${OPTARG}
;;
p)
validate_snap_placement
;;
:)
no_arg_error
;;
?)
invalid_arg_error
;;
esac
done
if [ -z "$target" ] || [ -z "$outdir" ] || [ -z "$outcsv" ]; then
>&2 error "Required arguments missing."
>&2 usage
fi
if [ -z "$fuzzer_name" ]; then
>&2 error "Fuzzer parameter is empty"
>&2 usage
fi
fuzzer="$fuzzer_name"
if [[ "$fuzzer_name" =~ ^nyx ]]; then
fuzzer="nyx"
fi
if [ $append = 1 ]; then
if [ ! -f "$outcsv" ] || [ ! -w "$outcsv" ]; then
>&2 error "Output is not writable or existent (append mode)"
exit 1
fi
else
if ! truncate -s 0 "$outcsv"; then
>&2 error "Could not truncate '$outcsv'"
exit 1
fi
# Init file with header
echo "time,subject,fuzzer,run,cov_type,cov" >> "$outcsv"
fi
# Remove space(s); it requires that there is no space in the middle
strim() {
trimmedStr=$1
echo "${trimmedStr##*( )}"
}
#original format: time,l_per,l_abs,b_per,b_abs
#converted format: time,subject,fuzzer,run,cov_type,cov
convert() {
local run_index=$1 ifile=$2 line
local fuzzer_tag="$fuzzer_name"
if [ "$snap_placement" != "none" ]; then
fuzzer_tag="$fuzzer_name-$snap_placement"
fi
tail -n +2 "$ifile" | while read -r line; do
time=$(strim "$(echo "$line" | cut -d',' -f1)")
l_per=$(strim "$(echo "$line" | cut -d',' -f2)")
l_abs=$(strim "$(echo "$line" | cut -d',' -f3)")
b_per=$(strim "$(echo "$line" | cut -d',' -f4)")
b_abs=$(strim "$(echo "$line" | cut -d',' -f5)")
{
echo "$time,$target,$fuzzer_tag,$run_index,l_per,$l_per"
echo "$time,$target,$fuzzer_tag,$run_index,l_abs,$l_abs"
echo "$time,$target,$fuzzer_tag,$run_index,b_per,$b_per"
echo "$time,$target,$fuzzer_tag,$run_index,b_abs,$b_abs"
} >> "$outcsv"
done
}
main() {
local i trial_outdir trial_archive trial_cov_archive runcsv
local is_afl=0
[[ "$fuzzer" =~ ^afl ]] && is_afl=1
for i in $(seq 0 $((runs - 1))); do
trial_outdir=$(get_outdir "$i" "$fuzzer")
if [ $extract = 1 ] && [ $is_afl = 1 ]; then
trial_archive="$trial_outdir.tar.gz"
info "Extracting $trial_archive"
[ -d "$trial_outdir" ] && rm -rf "$trial_outdir"
if ! tmpdir=$(mktemp -d); then
>&2 error "Failed to create temporary directory"
exit 1
fi
if tar -xf "$trial_archive" -C "$tmpdir"; then
set -e
mv "$tmpdir/out-$target-$fuzzer" "$trial_outdir"
rm -r "$tmpdir"
set +e
else
>&2 error "Failed to extract trial archive"
exit 1
fi
fi
trial_cov_archive="$trial_outdir/coverage.tar.gz"
if [ -e "$trial_cov_archive" ]; then
info "Extracting coverage archive $trial_cov_archive"
if ! tar -xf "$trial_cov_archive" -C "$trial_outdir"; then
>&2 error "Failed to extract coverage archive $trial_cov_archive"
exit 1
fi
fi
if [ "$is_afl" = 1 ]; then
runcsv="$trial_outdir/cov_over_time.csv"
else
runcsv="$trial_outdir/coverage.csv"
fi
info "Converting from $runcsv to $outcsv"
if [ ! -f "$runcsv" ] || [ ! -r "$runcsv" ]; then
>&2 warn "CSV not a file or not readable"
continue
fi
convert $((i + 1)) "$runcsv"
done
}
main
info "All done!"
|
<reponame>Sopiro/Physics
import { Vector2 } from "./math.js";
import { Simplex } from "./simplex.js";
export interface ClosestEdgeInfo
{
index: number;
distance: number;
normal: Vector2;
}
export class Polytope
{
public readonly vertices: Vector2[];
constructor(simplex: Simplex)
{
if (simplex.count != 3) throw "Input simplex isn't a triangle";
this.vertices = [
simplex.vertices[0].copy(),
simplex.vertices[1].copy(),
simplex.vertices[2].copy()
];
}
public get count(): number
{
return this.vertices.length;
}
// Returns the edge closest to the origin
getClosestEdge(): ClosestEdgeInfo
{
let minIndex = 0;
let minDistance = Infinity;
let minNormal = new Vector2();
for (let i = 0; i < this.count; i++)
{
let j = (i + 1) % this.count;
let vertexI = this.vertices[i];
let vertexJ = this.vertices[j];
let edge = vertexJ.sub(vertexI);
let normal = new Vector2(-edge.y, edge.x).normalized();
let distance = normal.dot(vertexI);
if (distance < 0)
{
distance *= -1;
normal.invert();
}
if (distance < minDistance)
{
minDistance = distance;
minNormal = normal;
minIndex = i;
}
}
return { index: minIndex, distance: minDistance, normal: minNormal };
}
}
|
var NAVTREEINDEX31 =
{
"armnn_tf_parser_2test_2_mean_8cpp.xhtml#ac13d193e18724ec1171e0e1a7909ac7f":[8,0,1,8,0,21,4],
"armnn_tf_parser_2test_2_mean_8cpp.xhtml#ac46e1d1e4c8f3de33bb8d22c5b57c7c1":[8,0,1,8,0,21,1],
"armnn_tf_parser_2test_2_mean_8cpp.xhtml#ae777849f6582f53b6b29eb3fd9c3bc22":[8,0,1,8,0,21,3],
"armnn_tf_parser_2test_2_mean_8cpp_source.xhtml":[8,0,1,8,0,21],
"armnn_tf_parser_2test_2_minimum_8cpp.xhtml":[8,0,1,8,0,22],
"armnn_tf_parser_2test_2_minimum_8cpp.xhtml#a22beb506dae13f30efcd16bf90e0df7b":[8,0,1,8,0,22,1],
"armnn_tf_parser_2test_2_minimum_8cpp.xhtml#a397f9751e9b2b537044ad50df7956199":[8,0,1,8,0,22,3],
"armnn_tf_parser_2test_2_minimum_8cpp.xhtml#a4330bf1d859904ff3010c26099cc2798":[8,0,1,8,0,22,0],
"armnn_tf_parser_2test_2_minimum_8cpp.xhtml#a987870bce143386b1a18e2fd3dd10bfe":[8,0,1,8,0,22,2],
"armnn_tf_parser_2test_2_minimum_8cpp.xhtml#ac8219e832c3c5988459a8f482c30e1ae":[8,0,1,8,0,22,4],
"armnn_tf_parser_2test_2_minimum_8cpp_source.xhtml":[8,0,1,8,0,22],
"armnn_tf_parser_2test_2_multiplication_8cpp.xhtml":[8,0,1,8,0,24],
"armnn_tf_parser_2test_2_multiplication_8cpp.xhtml#a2e1099101b63a7384154b6a5be67eb4c":[8,0,1,8,0,24,0],
"armnn_tf_parser_2test_2_multiplication_8cpp.xhtml#acc6e2d4f962a9eb26924682476aab381":[8,0,1,8,0,24,1],
"armnn_tf_parser_2test_2_multiplication_8cpp.xhtml#adcd7538f607097efe7782a4d697dea9e":[8,0,1,8,0,24,2],
"armnn_tf_parser_2test_2_multiplication_8cpp_source.xhtml":[8,0,1,8,0,24],
"armnn_tf_parser_2test_2_pad_8cpp.xhtml":[8,0,1,8,0,25],
"armnn_tf_parser_2test_2_pad_8cpp.xhtml#a589019c814fb13b23e6e7e8957f2358c":[8,0,1,8,0,25,0],
"armnn_tf_parser_2test_2_pad_8cpp_source.xhtml":[8,0,1,8,0,25],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml":[8,0,1,8,0,27],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a098d4ce5800b0bf43bb583f89792c8e0":[8,0,1,8,0,27,5],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a1910f21e9e525fea3c967b20345d924f":[8,0,1,8,0,27,4],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a27640c3143f9947edd06ef2a0f8da7c7":[8,0,1,8,0,27,0],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a32fbc45e5916ce27bcf25fe1b36c33be":[8,0,1,8,0,27,6],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a53a2105351a421b31d40ab13b538dbb7":[8,0,1,8,0,27,7],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a6194e83079398b6f87b6a4e3e6256aaf":[8,0,1,8,0,27,3],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#a85a7bfc2360794af492a64e286e945b9":[8,0,1,8,0,27,2],
"armnn_tf_parser_2test_2_pooling_8cpp.xhtml#aab47e7238db979daf027c65718652f78":[8,0,1,8,0,27,1],
"armnn_tf_parser_2test_2_pooling_8cpp_source.xhtml":[8,0,1,8,0,27],
"armnn_tf_parser_2test_2_reshape_8cpp.xhtml":[8,0,1,8,0,29],
"armnn_tf_parser_2test_2_reshape_8cpp.xhtml#aa0de997acfe6346d5f26a717f1fc64b6":[8,0,1,8,0,29,0],
"armnn_tf_parser_2test_2_reshape_8cpp_source.xhtml":[8,0,1,8,0,29],
"armnn_tf_parser_2test_2_resize_bilinear_8cpp.xhtml":[8,0,1,8,0,30],
"armnn_tf_parser_2test_2_resize_bilinear_8cpp.xhtml#ade79b955a7a51f76cd7ad8af9ad284db":[8,0,1,8,0,30,0],
"armnn_tf_parser_2test_2_resize_bilinear_8cpp_source.xhtml":[8,0,1,8,0,30],
"armnn_tf_parser_2test_2_softmax_8cpp.xhtml":[8,0,1,8,0,33],
"armnn_tf_parser_2test_2_softmax_8cpp.xhtml#a26a5b37954a8c9b0aa9d094f25e1c3c7":[8,0,1,8,0,33,0],
"armnn_tf_parser_2test_2_softmax_8cpp_source.xhtml":[8,0,1,8,0,33],
"armnn_tf_parser_2test_2_split_8cpp.xhtml":[8,0,1,8,0,34],
"armnn_tf_parser_2test_2_split_8cpp.xhtml#a02c152e9b2637143e9d2be942bbc4e71":[8,0,1,8,0,34,0],
"armnn_tf_parser_2test_2_split_8cpp.xhtml#a91119a168a662a9ffad847c8c9a8eef2":[8,0,1,8,0,34,2],
"armnn_tf_parser_2test_2_split_8cpp.xhtml#ae2c06a87722bd75b1b94a1ecbb51ede9":[8,0,1,8,0,34,1],
"armnn_tf_parser_2test_2_split_8cpp_source.xhtml":[8,0,1,8,0,34],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml":[8,0,1,8,0,35],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#a1ae04f5ea7b850214b40476cda25db6f":[8,0,1,8,0,35,4],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#a2df541c81ed9390c2dabbcf465e2b0c0":[8,0,1,8,0,35,3],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#a757db9720ed629c90ab29f0beef4a813":[8,0,1,8,0,35,6],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#a858c1f60d7157b2d0568a8f945fd0f71":[8,0,1,8,0,35,1],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#ad9a34ca7d13dbddd3345eeba6ddd6fbc":[8,0,1,8,0,35,2],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#ae1a61773df07227def16ca732175320a":[8,0,1,8,0,35,0],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#ae697a9e146d663b6a9dd7329d603b462":[8,0,1,8,0,35,7],
"armnn_tf_parser_2test_2_squeeze_8cpp.xhtml#af1b422a212c1b21066a185ebd9d63d80":[8,0,1,8,0,35,5],
"armnn_tf_parser_2test_2_squeeze_8cpp_source.xhtml":[8,0,1,8,0,35],
"armnn_tf_parser_2test_2_stack_8cpp.xhtml":[8,0,1,8,0,36],
"armnn_tf_parser_2test_2_stack_8cpp.xhtml#a16ab2d17eba7cd84730c20e0fffa5976":[8,0,1,8,0,36,0],
"armnn_tf_parser_2test_2_stack_8cpp.xhtml#a4d065b12e3423cb3d75496fb3468e24a":[8,0,1,8,0,36,1],
"armnn_tf_parser_2test_2_stack_8cpp_source.xhtml":[8,0,1,8,0,36],
"armnn_tf_parser_2test_2_strided_slice_8cpp.xhtml":[8,0,1,8,0,37],
"armnn_tf_parser_2test_2_strided_slice_8cpp.xhtml#a1cbe6b36e090dfafad054e5ad3c45873":[8,0,1,8,0,37,2],
"armnn_tf_parser_2test_2_strided_slice_8cpp.xhtml#a907d6605f117d6d9051c007776a43554":[8,0,1,8,0,37,3],
"armnn_tf_parser_2test_2_strided_slice_8cpp.xhtml#ac85bc241a2c079bf3c1aeb587bfdffe3":[8,0,1,8,0,37,0],
"armnn_tf_parser_2test_2_strided_slice_8cpp.xhtml#affb16df52a91d9741ff1b51080f5a85b":[8,0,1,8,0,37,1],
"armnn_tf_parser_2test_2_strided_slice_8cpp_source.xhtml":[8,0,1,8,0,37],
"armnn_tf_parser_2test_2_sub_8cpp.xhtml":[8,0,1,8,0,38],
"armnn_tf_parser_2test_2_sub_8cpp.xhtml#a44db723ca45599b52099e39ee997207e":[8,0,1,8,0,38,1],
"armnn_tf_parser_2test_2_sub_8cpp.xhtml#a5412a63bc943a5f5357948cd8a6cc8c3":[8,0,1,8,0,38,0],
"armnn_tf_parser_2test_2_sub_8cpp.xhtml#a78d70093fc6c5ce323a0fc1af3b7fef2":[8,0,1,8,0,38,2],
"armnn_tf_parser_2test_2_sub_8cpp_source.xhtml":[8,0,1,8,0,38],
"armnn_tf_parser_2test_2_test_multi_inputs_outputs_8cpp.xhtml":[8,0,1,8,0,40],
"armnn_tf_parser_2test_2_test_multi_inputs_outputs_8cpp.xhtml#ade2300c7c66d25f6b8626df6fb0d82b3":[8,0,1,8,0,40,0],
"armnn_tf_parser_2test_2_test_multi_inputs_outputs_8cpp_source.xhtml":[8,0,1,8,0,40],
"armnn_tf_parser_2test_2_transpose_8cpp.xhtml":[8,0,1,8,0,41],
"armnn_tf_parser_2test_2_transpose_8cpp.xhtml#a1fb934805311725c812df45f07d17e16":[8,0,1,8,0,41,1],
"armnn_tf_parser_2test_2_transpose_8cpp.xhtml#a6e29b7a58dbf4b8385046bdb074f3117":[8,0,1,8,0,41,0],
"armnn_tf_parser_2test_2_transpose_8cpp_source.xhtml":[8,0,1,8,0,41],
"armnn_utils_2_transpose_8cpp.xhtml":[8,0,1,9,31],
"armnn_utils_2_transpose_8cpp.xhtml#a405d5f966ec992d1717711e5a2d7909d":[8,0,1,9,31,0],
"armnn_utils_2_transpose_8cpp.xhtml#a428a9a6ffdf0e8d723b50c038c56c336":[8,0,1,9,31,1],
"armnn_utils_2_transpose_8cpp.xhtml#a49e3aa80e05abc7b4643aad600e8d827":[8,0,1,9,31,2],
"armnn_utils_2_transpose_8cpp_source.xhtml":[8,0,1,9,31],
"backends.xhtml":[4],
"backends.xhtml#S12_10_backend_developer_guide":[4,0,9],
"backends.xhtml#S12_11_backend_developer_guide":[4,0,10],
"backends.xhtml#S12_12_backend_developer_guide":[4,0,11],
"backends.xhtml#S12_13_backend_developer_guide":[4,0,12],
"backends.xhtml#S12_14_backend_developer_guide":[4,0,13],
"backends.xhtml#S12_15_backend_developer_guide":[4,0,14],
"backends.xhtml#S12_1_backend_developer_guide":[4,0,0],
"backends.xhtml#S12_2_backend_developer_guide":[4,0,1],
"backends.xhtml#S12_3_backend_developer_guide":[4,0,2],
"backends.xhtml#S12_4_backend_developer_guide":[4,0,3],
"backends.xhtml#S12_5_backend_developer_guide":[4,0,4],
"backends.xhtml#S12_6_backend_developer_guide":[4,0,5],
"backends.xhtml#S12_7_backend_developer_guide":[4,0,6],
"backends.xhtml#S12_8_backend_developer_guide":[4,0,7],
"backends.xhtml#S12_9_backend_developer_guide":[4,0,8],
"backends.xhtml#S12_backend_developer_guide":[4,0],
"backends.xhtml#S13_1_dynamic_backend_guide":[4,1,0],
"backends.xhtml#S13_2_dynamic_backend_guide":[4,1,1],
"backends.xhtml#S13_dynamic_backend_guide":[4,1],
"backends_2reference_2workloads_2_batch_to_space_n_d_8cpp.xhtml":[8,0,1,10,5,1,8],
"backends_2reference_2workloads_2_batch_to_space_n_d_8cpp.xhtml#a8746512fab5ec10c2c57800c66311ba7":[8,0,1,10,5,1,8,0],
"backends_2reference_2workloads_2_batch_to_space_n_d_8cpp.xhtml#ac70a495c61526a0500b33b23db86ca27":[8,0,1,10,5,1,8,1],
"backends_2reference_2workloads_2_batch_to_space_n_d_8cpp_source.xhtml":[8,0,1,10,5,1,8],
"backends_2reference_2workloads_2_dequantize_8cpp.xhtml":[8,0,1,10,5,1,21],
"backends_2reference_2workloads_2_dequantize_8cpp.xhtml#acae7e910f899ae67340c9ce29e406a86":[8,0,1,10,5,1,21,0],
"backends_2reference_2workloads_2_dequantize_8cpp_source.xhtml":[8,0,1,10,5,1,21],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml":[8,0,1,10,5,1,23],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml#a2748f45e58b1c612d473043f711d1434":[8,0,1,10,5,1,23,5],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml#abf6aad7bc221f8ad22b4d99cd020373b":[8,0,1,10,5,1,23,3],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml#ac8c641d4a69c9a85c487cfbc7ea4d73c":[8,0,1,10,5,1,23,4],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml#ae76ce23fa9fc18e56448d52b37dd3f32":[8,0,1,10,5,1,23,1],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml#ae8dcbb74cf0c855724f12833a55a5684":[8,0,1,10,5,1,23,0],
"backends_2reference_2workloads_2_detection_post_process_8cpp.xhtml#ae8ed5c640761fb6744aec0ee16388417":[8,0,1,10,5,1,23,2],
"backends_2reference_2workloads_2_detection_post_process_8cpp_source.xhtml":[8,0,1,10,5,1,23],
"backends_2reference_2workloads_2_fully_connected_8cpp.xhtml":[8,0,1,10,5,1,29],
"backends_2reference_2workloads_2_fully_connected_8cpp.xhtml#ad34d1d5b1ca8f52dc296ecf52ba20c8a":[8,0,1,10,5,1,29,0],
"backends_2reference_2workloads_2_fully_connected_8cpp_source.xhtml":[8,0,1,10,5,1,29],
"backends_2reference_2workloads_2_gather_8cpp.xhtml":[8,0,1,10,5,1,31],
"backends_2reference_2workloads_2_gather_8cpp.xhtml#a66004b2326f8ccb1faa71d5efa186633":[8,0,1,10,5,1,31,0],
"backends_2reference_2workloads_2_gather_8cpp_source.xhtml":[8,0,1,10,5,1,31],
"backends_2reference_2workloads_2_mean_8cpp.xhtml":[8,0,1,10,5,1,40],
"backends_2reference_2workloads_2_mean_8cpp.xhtml#a165ae372a7f67cad64ef3395d30122ce":[8,0,1,10,5,1,40,0],
"backends_2reference_2workloads_2_mean_8cpp.xhtml#a869f740e9c2fcb8642350c6e3d0b3742":[8,0,1,10,5,1,40,1],
"backends_2reference_2workloads_2_mean_8cpp.xhtml#ae86f1ca23eaa764da9e589cc8e39a969":[8,0,1,10,5,1,40,2],
"backends_2reference_2workloads_2_mean_8cpp_source.xhtml":[8,0,1,10,5,1,40],
"backends_2reference_2workloads_2_pad_8cpp.xhtml":[8,0,1,10,5,1,43],
"backends_2reference_2workloads_2_pad_8cpp.xhtml#a09fc687543b371ddab280203dc989bd9":[8,0,1,10,5,1,43,2],
"backends_2reference_2workloads_2_pad_8cpp.xhtml#a1b165f49b29968defb57e2d9b8628b9f":[8,0,1,10,5,1,43,3],
"backends_2reference_2workloads_2_pad_8cpp.xhtml#a28e115f5d28500324b53fae9e6c00b77":[8,0,1,10,5,1,43,0],
"backends_2reference_2workloads_2_pad_8cpp.xhtml#a37fe5e5b5f650430dc0e71d69977bebd":[8,0,1,10,5,1,43,1],
"backends_2reference_2workloads_2_pad_8cpp.xhtml#a68b05cecb5ebbbc3b8d1fd94a66df4af":[8,0,1,10,5,1,43,4],
"backends_2reference_2workloads_2_pad_8cpp.xhtml#a7e27cbebab8cde65c84d7a00efa025cd":[8,0,1,10,5,1,43,5],
"backends_2reference_2workloads_2_pad_8cpp_source.xhtml":[8,0,1,10,5,1,43],
"backends_2reference_2workloads_2_slice_8cpp.xhtml":[8,0,1,10,5,1,142],
"backends_2reference_2workloads_2_slice_8cpp.xhtml#a044ea0cc993d4d1fbe4ec877b17b8d39":[8,0,1,10,5,1,142,0],
"backends_2reference_2workloads_2_slice_8cpp_source.xhtml":[8,0,1,10,5,1,142],
"backends_2reference_2workloads_2_softmax_8cpp.xhtml":[8,0,1,10,5,1,144],
"backends_2reference_2workloads_2_softmax_8cpp.xhtml#aa999ff2585ad75b95954a9323f63c32b":[8,0,1,10,5,1,144,0],
"backends_2reference_2workloads_2_softmax_8cpp_source.xhtml":[8,0,1,10,5,1,144],
"backends_2reference_2workloads_2_space_to_batch_n_d_8cpp.xhtml":[8,0,1,10,5,1,146],
"backends_2reference_2workloads_2_space_to_batch_n_d_8cpp.xhtml#a4a180e425d4c19b2cdea4ce5760180e1":[8,0,1,10,5,1,146,1],
"backends_2reference_2workloads_2_space_to_batch_n_d_8cpp.xhtml#adafb0fd0a3f6435c2bdf41f971761ecf":[8,0,1,10,5,1,146,0],
"backends_2reference_2workloads_2_space_to_batch_n_d_8cpp_source.xhtml":[8,0,1,10,5,1,146],
"backends_2reference_2workloads_2_stack_8cpp.xhtml":[8,0,1,10,5,1,153],
"backends_2reference_2workloads_2_stack_8cpp.xhtml#a6ef2dcac2ec0683d52df1b051404e7d6":[8,0,1,10,5,1,153,0],
"backends_2reference_2workloads_2_stack_8cpp_source.xhtml":[8,0,1,10,5,1,153],
"backends_2reference_2workloads_2_strided_slice_8cpp.xhtml":[8,0,1,10,5,1,155],
"backends_2reference_2workloads_2_strided_slice_8cpp.xhtml#a86d7a7168ac00b75b4971f9aad623698":[8,0,1,10,5,1,155,0],
"backends_2reference_2workloads_2_strided_slice_8cpp_source.xhtml":[8,0,1,10,5,1,155],
"class_caffe_preprocessor.xhtml":[7,0,11],
"class_caffe_preprocessor.xhtml#a0b6d5426d97a60215cdcd609dee35cc9":[7,0,11,1],
"class_caffe_preprocessor.xhtml#a21e0733780e895c0baf0072c51cbfd23":[7,0,11,3],
"class_caffe_preprocessor.xhtml#a6ca5a337226f4d2d85ffc3d95f39998e":[7,0,11,2],
"class_caffe_preprocessor.xhtml#a9d5c241a4bb952f78d140229d17f2465":[7,0,11,0],
"class_cifar10_database.xhtml":[7,0,12],
"class_cifar10_database.xhtml#a0b6d5426d97a60215cdcd609dee35cc9":[7,0,12,1],
"class_cifar10_database.xhtml#a9d5c241a4bb952f78d140229d17f2465":[7,0,12,0],
"class_cifar10_database.xhtml#ac907ba7482a33b174b9d8477ca80872f":[7,0,12,2],
"class_cifar10_database.xhtml#ae1e69b726d674896305197f6a439201e":[7,0,12,3],
"class_classifier_test_case_data.xhtml":[7,0,13],
"class_classifier_test_case_data.xhtml#a830c7bcf33020bebca7cea1d52156569":[7,0,13,0],
"class_classifier_test_case_data.xhtml#a936527a4a6435e6b70d8ed32a2067b41":[7,0,13,2],
"class_classifier_test_case_data.xhtml#aee7736d793f03d4c9c7ed6fbe5a16cae":[7,0,13,1],
"class_i_timeline_decoder.xhtml":[7,0,25],
"class_i_timeline_decoder.xhtml#a39eef4619be7051ac1c62c71bdabd248":[7,0,25,8],
"class_i_timeline_decoder.xhtml#a678c5d8abc10bc4b1936523483e8da8d":[7,0,25,10],
"class_i_timeline_decoder.xhtml#a7561a8de32d8a9ff3bfa7de7a89a590a":[7,0,25,6],
"class_i_timeline_decoder.xhtml#a7561a8de32d8a9ff3bfa7de7a89a590aa01c22cb6628ace016f56d9f9a2926c22":[7,0,25,6,0],
"class_i_timeline_decoder.xhtml#a7561a8de32d8a9ff3bfa7de7a89a590aa369a8f8f3bdedb1ae5445e8f52d06fa7":[7,0,25,6,1],
"class_i_timeline_decoder.xhtml#a83984ef549a8492f6711fd48c936b472":[7,0,25,9],
"class_i_timeline_decoder.xhtml#ad6cb3f1b3c851eb673ba1b8ab6640e40":[7,0,25,7],
"class_i_timeline_decoder.xhtml#adcad6f6c71d39d9b1b7863eb50804323":[7,0,25,11],
"class_i_timeline_decoder.xhtml#ae3dc53e6af44cab02cc709e03fbf171a":[7,0,25,12],
"class_i_timeline_decoder.xhtml#afcec9ac4ebf3a53432bbd77a314b0667":[7,0,25,5],
"class_i_timeline_decoder.xhtml#afcec9ac4ebf3a53432bbd77a314b0667a47d6c060a0aa40c6fd018f12b7c96f1f":[7,0,25,5,3],
"class_i_timeline_decoder.xhtml#afcec9ac4ebf3a53432bbd77a314b0667a5265db1a79410fbe447a3faa98e7b914":[7,0,25,5,1],
"class_i_timeline_decoder.xhtml#afcec9ac4ebf3a53432bbd77a314b0667a77ac530ff577e3b84e0761fa02b2520b":[7,0,25,5,2],
"class_i_timeline_decoder.xhtml#afcec9ac4ebf3a53432bbd77a314b0667afc04d023850b425f3b9c62b3a55dc110":[7,0,25,5,0],
"class_image_preprocessor.xhtml":[7,0,17],
"class_image_preprocessor.xhtml#a0859c20682ecfa660d0da13de7998029":[7,0,17,6],
"class_image_preprocessor.xhtml#a0b6d5426d97a60215cdcd609dee35cc9":[7,0,17,1],
"class_image_preprocessor.xhtml#a3d7f9eb381f95c181466db2172dfd775":[7,0,17,3],
"class_image_preprocessor.xhtml#a8b182988d497b07d1e06a1707b67897f":[7,0,17,2],
"class_image_preprocessor.xhtml#a8b182988d497b07d1e06a1707b67897fa54d2c44e9fb4c89a3abd5088e04fdc48":[7,0,17,2,1],
"class_image_preprocessor.xhtml#a8b182988d497b07d1e06a1707b67897fa7442702e1d93df81504eb5e5d536c2fc":[7,0,17,2,0],
"class_image_preprocessor.xhtml#ac2bd5628ca0535f9e8af78e65685d62f":[7,0,17,4],
"class_image_preprocessor.xhtml#af6964b8f91085191c5228d1a67db4523":[7,0,17,0],
"class_image_preprocessor.xhtml#afc05b2b7e914f9737812e66f5583b04c":[7,0,17,5],
"class_inference_model.xhtml":[7,0,18],
"class_inference_model.xhtml#a066580d185559e2efdcb6cedd1709b9c":[7,0,18,11],
"class_inference_model.xhtml#a0e0f3330fc1e6c1fa1d99d731490c483":[7,0,18,10],
"class_inference_model.xhtml#a1a09727b3f781272e702516fffb7d97f":[7,0,18,16],
"class_inference_model.xhtml#a262185c43532d5728c4bd7890f3b2235":[7,0,18,6],
"class_inference_model.xhtml#a2e61c7404108440828ef7a45f310e229":[7,0,18,2],
"class_inference_model.xhtml#a325f1c17b5ff2153cae944e3c62820a2":[7,0,18,13],
"class_inference_model.xhtml#a5a211a676b879363f0c5001698a14c50":[7,0,18,5],
"class_inference_model.xhtml#a679e4b22a845c8d7f58f6ca6a5df625f":[7,0,18,12],
"class_inference_model.xhtml#a72a6db08ffca660ece7c2d86e2a14daa":[7,0,18,7],
"class_inference_model.xhtml#a7af4f6c4d5f8720a6ea093a825722227":[7,0,18,17],
"class_inference_model.xhtml#a8282dddf88e0deb3c414235e20a6cb2c":[7,0,18,15],
"class_inference_model.xhtml#a9a28e0c17604e5a945409f994c020212":[7,0,18,9],
"class_inference_model.xhtml#a9eb69ebdaf4ceb8014e7c8a540266100":[7,0,18,4],
"class_inference_model.xhtml#ac0b73049e00e7013f5cc6ae7fcaedcd4":[7,0,18,14],
"class_inference_model.xhtml#ade1a8545be49b1c10e724ea8536907c7":[7,0,18,8],
"class_inference_model.xhtml#ae3ca0528045df8958ce5dcdf9d1af041":[7,0,18,3],
"class_inference_model.xhtml#af6964b8f91085191c5228d1a67db4523":[7,0,18,1],
"class_inference_test_image.xhtml":[7,0,19],
"class_inference_test_image.xhtml#a0983225e32025d901ce6547170892f56":[7,0,19,5],
"class_inference_test_image.xhtml#a1221f9d864aed14208371c83b47f2686":[7,0,19,3],
"class_inference_test_image.xhtml#a2ec879fd8f3865e9937bb875eaf5c092":[7,0,19,11],
"class_inference_test_image.xhtml#a4a6637c2a2952a14cb3a426133b67a73":[7,0,19,13],
"class_inference_test_image.xhtml#a4ec823e49ce46950b3d4cee1efff050b":[7,0,19,10],
"class_inference_test_image.xhtml#a51b6530e9551346a609770853dedf7b0":[7,0,19,4],
"class_inference_test_image.xhtml#a5a1e22b2882917bcd82a14328ff2c5d6":[7,0,19,6],
"class_inference_test_image.xhtml#a746dbda8e529cdc8450f81fabb604250":[7,0,19,14],
"class_inference_test_image.xhtml#a85fe58abcde5d3abd1e3fb8b5282ea7b":[7,0,19,12],
"class_inference_test_image.xhtml#a8846406ac37fbd2204f0be16ee05d5b7":[7,0,19,7],
"class_inference_test_image.xhtml#a9583c8d35e13002b79d9e65434e0b685":[7,0,19,2],
"class_inference_test_image.xhtml#a97a4644e316893df9dd2ab73cdd08d33":[7,0,19,8],
"class_inference_test_image.xhtml#a98be3e32f21051eca5de5728c9cd43bc":[7,0,19,15],
"class_inference_test_image.xhtml#acd495024dcb50f4081d5c05a1e66d210":[7,0,19,9],
"class_inference_test_image.xhtml#ae7a403f69a7717c1eaae1d74b7bb7bce":[7,0,19,0],
"class_inference_test_image.xhtml#ae7a403f69a7717c1eaae1d74b7bb7bcea773b27aa8d21604182ba90d029ad2e13":[7,0,19,0,1],
"class_inference_test_image.xhtml#ae7a403f69a7717c1eaae1d74b7bb7bceaecd472e37d2e3d8542fd5e9ff63e3450":[7,0,19,0,0],
"class_inference_test_image.xhtml#afe2346f1f07296902bc8d84beb69b45d":[7,0,19,1],
"class_inference_test_image.xhtml#afe2346f1f07296902bc8d84beb69b45da021d8346462df53d4272607b0f41a8d8":[7,0,19,1,1],
"class_inference_test_image.xhtml#afe2346f1f07296902bc8d84beb69b45da38f4e5f66749f755f54ef67faa2058dc":[7,0,19,1,2],
"class_inference_test_image.xhtml#afe2346f1f07296902bc8d84beb69b45daf8fd4f1b5b05c6b1cc6a661141fd4f54":[7,0,19,1,0],
"class_inference_test_image_exception.xhtml":[7,0,20],
"class_inference_test_image_load_failed.xhtml":[7,0,21],
"class_inference_test_image_out_of_bounds_access.xhtml":[7,0,22],
"class_inference_test_image_resize_failed.xhtml":[7,0,23],
"class_inference_test_image_write_failed.xhtml":[7,0,24],
"class_mnist_database.xhtml":[7,0,27],
"class_mnist_database.xhtml#a0b6d5426d97a60215cdcd609dee35cc9":[7,0,27,1],
"class_mnist_database.xhtml#a4808453ec85548b752bcc6d4c1bc90b2":[7,0,27,3],
"class_mnist_database.xhtml#a66e5904e741dbce2d648bc9a02e7eb0e":[7,0,27,2],
"class_mnist_database.xhtml#a9d5c241a4bb952f78d140229d17f2465":[7,0,27,0],
"class_test_activation_layer_visitor.xhtml":[7,0,34],
"class_test_activation_layer_visitor.xhtml#a0f764e8e3b690cfdfb7da116af2db7fc":[7,0,34,0],
"class_test_activation_layer_visitor.xhtml#a70104e9bd1dc97e0ec593d53ca07ceed":[7,0,34,1],
"class_test_addition_layer_visitor.xhtml":[7,0,35],
"class_test_addition_layer_visitor.xhtml#a83b6ac42de6ff677bf13b8c58ebfd534":[7,0,35,0],
"class_test_addition_layer_visitor.xhtml#a8e8b2a50a021997816e159c5b4efbd2f":[7,0,35,1],
"class_test_arg_min_max_layer_visitor.xhtml":[7,0,36],
"class_test_arg_min_max_layer_visitor.xhtml#ab119305244858618d3b41bd56a4b0575":[7,0,36,0],
"class_test_arg_min_max_layer_visitor.xhtml#ad182e15a8f78b102792e81a1f00d42a5":[7,0,36,1],
"class_test_backend_registry.xhtml":[7,0,37],
"class_test_backend_registry.xhtml#ac93c278f2b24eed88af3d58eb6a08242":[7,0,37,1]
};
|
'use strict';
// Add your code here
const createBase = function ( num ) {
return function constructing (value) {
return num + value;
};
};
let addSix = createBase(6);
addSix(10); // returns 16
addSix(21); // returns 27
console.log(addSix(10));
console.log(addSix(21));
|
<reponame>felipebaloneker/Practice<filename>javascript/150 exercicios basicos/102_Find_Inversion.js
function FindInversion(array){
var result = 0;
for(i=0;i < array.length;i++){
// test for all numbers is more than i
for(y=i +1; y < array.length;y++){
if(array[i] > array[y]){result++;}
}
}
return result;
}
console.log(FindInversion([1,2,3,5,4]));
console.log(FindInversion([1,5,4,3]));
|
def bubble_sort(arr):
# Traverse the array
for i in range(len(arr)-1):
# Perform N-1 passes
for j in range(0, len(arr) - i - 1):
# Swap if element is greater than its adjacent neighbor
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
# Driver code
arr = [5, 7, 4, 2, 8]
bubble_sort(arr)
# Print the sorted array
print ("Sorted array is:")
print(arr)
|
<reponame>MrBattary/ncstore-back
package com.netcracker.ncstore.security.filter;
/**
* Custom filter exception
*/
public class JwtAuthFilterException extends RuntimeException {
/**
* Exception
*
* @param message - message
*/
public JwtAuthFilterException(final String message) {
super(message);
}
}
|
import { StakePoolsActions } from './actions';
import { StakePoolsApi } from './api';
import { StakePoolsStore } from './store';
export const stakePoolsActions = new StakePoolsActions();
export const stakePoolsApi = new StakePoolsApi();
export const stakePoolsStore = new StakePoolsStore(
stakePoolsActions,
stakePoolsApi
);
stakePoolsStore.start();
export const stakePoolsContextDefault = {
actions: stakePoolsActions,
api: stakePoolsApi,
store: stakePoolsStore,
};
|
// Copyright 2015 PLUMgrid
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// vim: set ts=8:sts=8:sw=8:noet
package canvas
import (
"fmt"
"strings"
"github.com/iovisor/iomodules/hover/api"
"github.com/iovisor/iomodules/hover/bpf"
"github.com/iovisor/iomodules/hover/util"
)
var (
Debug = util.Debug
Info = util.Info
Warn = util.Warn
Error = util.Error
)
const (
PermW = 1 << (1 + iota)
PermR
)
type Adapter interface {
UUID() string
FD() int
Close()
Type() string
Name() string
Tags() []string
Perm() uint
Config() map[string]interface{}
SetConfig(req api.ModuleBase, g Graph, id int) error
Tables() []map[string]interface{}
Table(name string) AdapterTable
}
type AdapterTable interface {
ID() string
Name() string
Config() map[string]interface{}
Get(key string) (interface{}, bool)
Set(key, val string) error
Delete(key string) error
Iter() <-chan api.ModuleTableEntry
}
type Interface interface {
ID() int
Name() string
}
type AdapterNode struct {
NodeBase
adapter Adapter
}
func NewAdapter(req api.ModuleBase, g Graph, id int) (adapter Adapter, err error) {
uuid := util.NewUUID4()
parts := strings.SplitN(req.ModuleType, "/", 2)
switch parts[0] {
case "bpf":
var subtype string
if len(parts) > 1 {
subtype = parts[1]
}
a := &BpfAdapter{
uuid: uuid[:8],
perm: PermR | PermW,
config: make(map[string]interface{}),
subtype: subtype,
}
if err = a.SetConfig(req, g, id); err != nil {
return
}
adapter = a
case "bridge":
a := &BridgeAdapter{
uuid: uuid[:8],
name: req.DisplayName,
tags: req.Tags,
perm: PermR | PermW,
config: make(map[string]interface{}),
}
if err = a.SetConfig(req, g, id); err != nil {
return
}
adapter = a
default:
err = fmt.Errorf("unknown ModuleType %s", req.ModuleType)
return
}
return
}
func NewAdapterNode(adapter Adapter) *AdapterNode {
return &AdapterNode{
NodeBase: NewNodeBase(-1, adapter.FD(), adapter.UUID(), "", bpf.MAX_INTERFACES),
adapter: adapter,
}
}
func (n *AdapterNode) Close() { n.adapter.Close() }
func (n *AdapterNode) Adapter() Adapter { return n.adapter }
|
<filename>tests/hooks/chroot_test.go
package cos_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/rancher-sandbox/cOS/tests/sut"
)
var _ = Describe("cOS Feature tests", func() {
var s *sut.SUT
BeforeEach(func() {
s = sut.NewSUT()
s.EventuallyConnects(360)
})
Context("After install", func() {
It("can run chroot hooks during upgrade and reset", func() {
err := s.SendFile("../assets/chroot_hooks.yaml", "/oem/chroot_hooks.yaml", "0770")
Expect(err).ToNot(HaveOccurred())
out, err := s.Command("cos-upgrade")
Expect(err).ToNot(HaveOccurred())
Expect(out).Should(ContainSubstring("Upgrade done, now you might want to reboot"))
Expect(out).Should(ContainSubstring("Upgrade target: active.img"))
By("rebooting")
s.Reboot()
Expect(s.BootFrom()).To(Equal(sut.Active))
_, err = s.Command("cat /after-upgrade-chroot")
Expect(err).ToNot(HaveOccurred())
_, err = s.Command("cat /after-reset-chroot")
Expect(err).To(HaveOccurred())
s.Reset()
_, err = s.Command("cat /after-reset-chroot")
Expect(err).ToNot(HaveOccurred())
})
})
})
|
#!/bin/bash
cd utils && yarn
cd ..
cd aframe && yarn && yarn run build
cd ..
cd babylonjs && yarn && yarn run build
cd ..
cd r3f && yarn && yarn run build
cd ..
cd threejs && yarn && yarn run build
cd ..
cd web && yarn && yarn run build
cd ..
cd wonderland && yarn && yarn run build
cd ..
cp -R aframe/dist dist
cp -R babylonjs/dist/* dist
cp -R r3f/dist/* dist
cp -R threejs/dist/* dist
cp -R web/dist/* dist
cp -R wonderland/dist/* dist
cp -R assets dist
|
#include <iostream>
#include <fstream>
#include <string>
#include <vector>
#include "pml.hpp"
using namespace pml;
void interactive_mode(Thms & theorems, ThmDict & thm_dict);
void filechecking(const std::string & filename, Thms & theorems, ThmDict & thm_dict);
int main(int argc, char *argv[]) {
Thms theorems;
ThmDict thm_dict;
if( argc == 1 ) {
interactive_mode(theorems, thm_dict);
} else if( argc == 2 ) {
filechecking(argv[1], theorems, thm_dict);
} else {
std::cout << "Usage :" << std::endl;
std::cout << " pml [interactive mode]" << std::endl;
std::cout << " pml <filename> [cheking proof of the <filename>]" << std::endl;
}
}
void interactive_mode(Thms & theorems, ThmDict & thm_dict) {
std::cout << "PML -- A Proof Assistant for Modal Logic" << std::endl;
std::cout << "Axioms are :" << std::endl;
for(const auto & axiom : AxiomK) {
std::cout << axiom->to_string() << std::endl;
}
std::string command;
std::cout << ">> " << std::flush;
while(std::getline(std::cin, command)){
std::size_t p = 0;
std::size_t j = theorems.size();
if( command_parser(command, p, theorems, thm_dict) ){
std::size_t i = theorems.size();
if( j < i ) {
std::cout << "#" << i - 1 << " |- " << theorems[i - 1]->to_string() << std::endl;
}
} else {
std::cout << "Failed." << std::endl;
}
std::cout << ">> " << std::flush;
}
std::cout << std::endl;
}
void filechecking(const std::string & filename, Thms & theorems, ThmDict & thm_dict) {
std::ifstream fin;
fin.open(filename, std::ios::in);
if( !fin ){
std::cout << "Error : cannot open the file " << filename << "." << std::endl;
exit(1);
}
bool failed = false;
std::string source;
int line = 1;
while(std::getline(fin, source)) {
std::size_t p = 0;
std::size_t j = theorems.size();
if( command_parser(source, p, theorems, thm_dict) ){
std::size_t i = theorems.size();
if( j < i ) {
std::cout << "#" << i - 1 << " |- " << theorems[i - 1]->to_string() << std::endl;
}
} else {
std::cout << "Failed at line " << line << std::endl;
failed = true;
break;
}
line++;
}
if( failed ) {
std::cout << "\x1b[31mERROR\x1b[39m" << std::endl;
} else {
std::cout << "\x1b[32mOK\x1b[39m" << std::endl;
}
}
|
class FormValidator {
func validateFormInput(_ input: String) -> Bool {
return !input.isEmpty && input.count >= 8
}
}
|
from flask_restplus import Resource, reqparse
from flask_jwt import jwt_required
from models.item import ItemModel
class Item(Resource):
# Adding parser as part of the class
parser = reqparse.RequestParser()
parser.add_argument('price',
type = float,
required = True,
help = "Price is required!" )
@jwt_required()
def get(self, name):
item = ItemModel.find_by_name(name)
if item is not None:
return item.json()
else:
return {'message' : 'Item not found.'}, 404
def post(self, name):
if ItemModel.find_by_name(name) is not None:
return {'message' : 'Item already exists.'}, 400
else:
# Parsing
request_data = Item.parser.parse_args()
item = ItemModel(name, request_data['price'])
# Dealing with possible insertion error
try:
item.save_to_db()
except:
# Returning 500 - Internal Server Error
return {'message' : 'An error occurred inserting the item.'}, 500
return item.json(), 201
def delete(self, name):
item = ItemModel.find_by_name(name)
if item is not None:
item.delete_from_db()
return {'message' : 'Item deleted.'}
def put(self, name):
# Parsing
request_data = Item.parser.parse_args()
item = ItemModel.find_by_name(name)
if item is None:
item = ItemModel(name, request_data['price'])
else:
item.price = request_data['price']
item.save_to_db()
return item.json()
class ItemList(Resource):
def get(self):
return {'items' : [item.json() for item in ItemModel.query.all()]}
|
<gh_stars>0
"""Retrieve data from Meetup API
"""
# -*- coding: utf-8 -*-
import os
import json
import urllib2
from datetime import datetime
import requests
from bs4 import BeautifulSoup
import MySQLdb
import predictor
MEETUP_API_KEY = '<KEY>'
MEETUP_API_BASE = 'https://api.meetup.com'
URL_RE = r'^https?:\/\/.*[\r\n]*'
TAG_RE = r'</?([a-z][a-z0-9]*)\b[^>]*>'
CATEGORY_LIMIT = 20
MIN_CHARS_DESC = 200
TAG_LENGTH_LIMIT = 100
HEADERS = {'Content-Type': 'application/json'}
PARAMS = {'key': MEETUP_API_KEY}
VERBOSE = True
# Tags
TAGS = [
'desserts',
'wine',
'beer',
'vegetarian',
'vegan',
'meats',
'bbq',
'tapas',
'brunch',
'romantic',
'trendy',
'diy',
'soccer',
'football',
'basketball',
'baseball',
'tennis',
'lacrosse',
'hockey',
'golf',
'indoors',
'outdoors',
'water',
'adrenaline',
'dance',
'pilates',
'boxing',
'yoga',
'sculpting',
'painting',
'museum',
'theater',
'lecture',
'learn',
'clubbing',
'pop',
'karaoke',
]
CATEGORIES = {
'food_drink': [
'desserts',
'wine',
'beer',
'vegetarian',
'vegan',
'meats',
'bbq',
'tapas',
'brunch',
'romantic',
'trendy',
'diy',
],
'sports': [
'soccer',
'football',
'basketball',
'baseball',
'tennis',
'lacrosse',
'hockey',
'golf',
],
'location': ['indoors', 'outdoors', 'water'],
'fitness': [
'dance',
'pilates',
'boxing',
'yoga',
'spin',
'sculpting',
],
'arts_culture': ['painting', 'museum', 'theater', 'lecture', 'learn'],
'music': ['clubbing', 'pop', 'karaoke'],
}
MEETUP_TAGS = [
'arts-culture',
'beliefs',
'book-clubs',
'career-business',
'dancing',
'parents-family',
'film',
'food',
'hobbies-crafts',
'education',
'music',
'outdoors-adventure',
'language',
'sports-fitness',
'social',
'tech',
]
NUM_EVENTS = 200
def get_tagged_events():
"""Method to get tagged event from Meetup API."""
f = open('event_info.txt', 'w+')
f.write('')
f.close()
for category in MEETUP_TAGS:
events_added = 0
days = 5
while events_added < NUM_EVENTS:
urls = set()
today = datetime.date.today()
tomorrow = today
tomorrow = tomorrow + datetime.timedelta(days=days)
# https://www.meetup.com/find/events/arts-culture/?allMeetups=false&radius=5&userFreeform=New+York%2C+NY&mcId=z10025&month=4&day=20&year=2018&eventFilter=all
url = 'www.meetup.com/find/events/{}/?allMeetups=true&radius=20 \
&userFreeform=New+York%2C+NY&mcId=c10001&mcName=New+York%2C+NY \
&month={}&day={}&year={}'.format(category,
tomorrow.month,
tomorrow.day,
tomorrow.year)
r = requests.get('https://' + url)
print('https://' + url)
data = r.text
soup = BeautifulSoup(data)
for link in soup.find_all('a'):
href = link.get('href')
if '/events/' in href and '/find/' not in href:
urls.add(href)
if not urls:
break
for url in urls:
os.system('python retrieval.py ' + url + ' ' + category)
events_added += 1
if events_added > NUM_EVENTS:
break
print('Finished ' + str(days))
days += 1
def req_test():
"""Method to test request."""
tomorrow = datetime.date.today()
url = 'http://www.meetup.com/find/events?allMeetups=true&radius=20 \
&userFreeform=New+York%2C+NY&mcId=c10001&mcName=New+York \
%2C+NY&month={}&day={}&year={}'.format(tomorrow.month,
tomorrow.day,
tomorrow.year)
r = r = urllib2.urlopen(url)
return r.read()
def add_events_limited(day):
"""Method to add events limited by date."""
if day != 1 and day != 7:
return
db = MySQLdb.connect(host='192.168.3.11', user='kayvon',
passwd='<PASSWORD>', db='Dev')
m = predictor.Model()
urls = set()
err = 429
tomorrow = datetime.date.today() + datetime.timedelta(days=day)
url = \
'http://www.meetup.com/find/events?allMeetups=true&radius=20 \
&userFreeform=New+York%2C+NY&mcId=c10001&mcName=New+York \
%2C+NY&month={}&day={}&year={}'.format(tomorrow.month,
tomorrow.day,
tomorrow.year)
r = urllib2.urlopen(url)
data = r.read()
soup = BeautifulSoup(data)
for link in soup.find_all('a'):
href = link.get('href')
if '/events/' in href and '/find/' not in href:
urls.add(href)
added = []
# print(len(urls))
for url in urls:
tmp = url[23:-1].split('/')
tmp.remove('events')
(url_name, event_id) = tmp
api_url = '{0}/{1}/events/{2}'.format(MEETUP_API_BASE,
url_name,
event_id)
request = urllib2.Request(api_url + '?key=' + MEETUP_API_KEY,
HEADERS=HEADERS)
try:
result = urllib2.urlopen(request)
except Exception as e:
print("Exception", e)
break
status = result.getcode()
if status == err:
break
if status != 200:
continue
content = result.read()
event_info = json.loads(content)
if 'name' in event_info.keys():
ename = event_info['name']
else:
continue
tag = m.predict_bayes(ename)
success = add_event_from_info(db, event_info, event_id, tag)
if success:
added.append(ename + ', ' + tag)
return '\n'.join(added)
def add_events(days):
"""Method to add events."""
m = predictor.Model()
db = MySQLdb.connect(host='192.168.3.11', user='kayvon',
passwd='<PASSWORD>', db='Dev')
urls = set()
today = datetime.date.today()
tomorrow = today
tomorrow = tomorrow + datetime.timedelta(days=1)
for i in range(days):
tomorrow = tomorrow + datetime.timedelta(days=1)
url = 'www.meetup.com/find/events/?allMeetups=true&radius=20 \
&userFreeform=New+York%2C+NY&mcId=c10001&mcName=New+York%2C+NY \
&month={}&day={}&year={}'.format(tomorrow.month,
tomorrow.day,
tomorrow.year)
r = requests.get('https://' + url)
data = r.text
soup = BeautifulSoup(data)
for link in soup.find_all('a'):
href = link.get('href')
if '/events/' in href and '/find/' not in href:
urls.add(href)
print(len(urls))
f = open('event_info.txt', 'w+')
f.write('')
f.close()
for url in urls:
os.system('python code/retrieval.py ' + url)
print('Finished')
f = open('event_info.txt', 'r')
data = f.read()
lines = data.split('\n')
i = 0
while i < len(lines) - 1:
if lines[i] == '' or lines[i + 1] == '':
i += 2
continue
event_info = json.loads(lines[i])
if 'name' in event_info.keys():
ename = event_info['name']
else:
i += 2
continue
tag = m.predict_bayes(ename)
add_event_from_info(db, event_info, lines[i + 1], tag)
i += 2
f.close()
db.close()
def add_event_from_info(db, event_info, event_id, tag):
"""Method to add event based on database information."""
if 'description' not in event_info.keys():
return False
if len(event_info['description']) < MIN_CHARS_DESC:
if VERBOSE:
print('Failure: event description too short \
(>={} chars needed)'.format(MIN_CHARS_DESC))
return False
if 'name' in event_info.keys():
ename = event_info['name']
else:
ename = None
if 'venue' in event_info.keys():
if 'name' in event_info['venue'].keys() and event_info['venue']['name']:
lname = event_info['venue']['name']
else:
lname = None
if 'lon' in event_info['venue'].keys() and event_info['venue']['lon']:
lon = event_info['venue']['lon']
else:
lon = None
if 'lat' in event_info['venue'].keys() and event_info['venue']['lat']:
lat = event_info['venue']['lat']
else:
lat = None
if 'address_1' in event_info['venue'].keys() \
and event_info['venue']['address_1']:
address_1 = event_info['venue']['address_1']
else:
address_1 = None
if 'zip' in event_info['venue'].keys() and event_info['venue']['zip']:
zipcode = event_info['venue']['zip']
else:
zipcode = None
if 'city' in event_info['venue'].keys() and event_info['venue']['city']:
city = event_info['venue']['city']
else:
city = None
if 'state' in event_info['venue'].keys() \
and event_info['venue']['state']:
state = event_info['venue']['state']
else:
state = None
else:
lname = lon = lat = address_1 = zipcode = city = state = None
if 'time' in event_info.keys() and event_info['time']:
start_time = event_info['time']
else:
start_time = None
if 'duration' in event_info.keys() and event_info['duration']:
duration = event_info['duration']
else:
duration = None
if 'description' in event_info.keys() and event_info['description']:
description = event_info['description']
else:
description = None
# taglist = []
# for t in TAGS:
# if t in description.lower() or t in ename.lower():
# taglist.append(t)
#
# if len(taglist) > 0:
# print(ename, taglist)
# else:
# return
cursor = db.cursor()
cursor.execute("""SELECT eid
FROM Events
WHERE mid = %s
""",
(event_id, ))
result = cursor.fetchone()
if result:
print('Event already in database.')
return
cursor.execute("""SELECT eid
FROM Events
WHERE ename = %s
""",
(ename, ))
if result:
print('Event already in database.')
return
loc_query = \
"""
INSERT
INTO Locations(lname, lat, lon, address_1, zip, city, state)
VALUES (%s, %s, %s, %s, %s, %s, %s)
"""
cursor.execute(loc_query, (
lname,
lon,
lat,
address_1,
zipcode,
city,
state,
))
db.commit()
print('Inserted into Locations.')
cursor.execute('SELECT LAST_INSERT_ID()')
lid = cursor.fetchone()
start_date = str(datetime.fromtimestamp(start_time / 1000))
if start_date and duration:
end_date = str(datetime.fromtimestamp((start_time + duration) / 1000))
else:
end_date = None
ev_query = \
"""
INSERT
INTO Events(ename, start_date, end_date,
num_attending, lid, description, mid)
VALUES (%s, %s, %s, %s, %s, %s, %s);
"""
cursor.execute(ev_query, (
ename.encode('ascii', 'ignore'),
start_date,
end_date,
0,
lid,
description.encode('ascii', 'ignore'),
event_id,
))
db.commit()
print('Inserted into Events.')
cursor.execute('SELECT LAST_INSERT_ID()')
eid = cursor.fetchone()
# for tag in taglist:
# category = None
# for c in CATEGORIES:
# if tag in CATEGORIES[c]:
# category = c
et_query = \
"""
INSERT
INTO EventTags(eid, tag, category)
VALUES (%s, %s, %s)
"""
cursor.execute(et_query, (eid, tag, tag))
db.commit()
print('Inserted into EventTags.')
if VERBOSE:
print('Finished.')
return True
|
#!/bin/sh
echo "myDBPassword" | docker secret create psql-pw -
|
import { RenderPassDescriptor } from "../webgpu";
import { Subpass } from "./Subpass";
import { Scene } from "../Scene";
import { Camera } from "../Camera";
export class RenderPass {
renderPassDescriptor = new RenderPassDescriptor();
private _subpasses: Subpass[] = [];
private _activeSubpassIndex: number = 0;
get subpasses(): Subpass[] {
return this._subpasses;
}
/**
* @return Subpass currently being recorded, or the first one
* if drawing has not started
*/
get activeSubpass(): Subpass {
return this._subpasses[this._activeSubpassIndex];
}
/**
* @brief Appends a subpass to the pipeline
* @param subpass Subpass to append
*/
addSubpass(subpass: Subpass): void {
subpass.setRenderPass(this);
this._subpasses.push(subpass);
}
draw(scene: Scene, camera: Camera, commandEncoder: GPUCommandEncoder) {
const renderPassEncoder = commandEncoder.beginRenderPass(this.renderPassDescriptor);
for (let i: number = 0; i < this._subpasses.length; ++i) {
this._activeSubpassIndex = i;
this._subpasses[i].draw(scene, camera, renderPassEncoder);
}
this._activeSubpassIndex = 0;
renderPassEncoder.end();
}
}
|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl.record;
import com.hazelcast.internal.cluster.Versions;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.map.impl.recordstore.expiry.ExpiryMetadata;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.version.Version;
import java.io.IOException;
import static com.hazelcast.internal.nio.IOUtil.readData;
import static com.hazelcast.internal.nio.IOUtil.writeData;
/**
* Used when reading and writing records
* for backup and replication operations
*/
public enum RecordReaderWriter {
DATA_RECORD_WITH_STATS_READER_WRITER(TypeId.DATA_RECORD_WITH_STATS_TYPE_ID) {
@Override
void writeRecord(ObjectDataOutput out, Record record, Data dataValue,
ExpiryMetadata expiryMetadata) throws IOException {
writeData(out, dataValue);
// RU_COMPAT_4_2
Version version = out.getVersion();
if (version.isGreaterOrEqual(Versions.V5_0)) {
out.writeInt(record.getRawCreationTime());
out.writeInt(record.getRawLastAccessTime());
out.writeInt(record.getRawLastUpdateTime());
out.writeInt(record.getHits());
out.writeInt(record.getVersion());
out.writeInt(record.getRawLastStoredTime());
} else {
out.writeInt(expiryMetadata.getRawTtl());
out.writeInt(expiryMetadata.getRawMaxIdle());
out.writeInt(record.getRawCreationTime());
out.writeInt(record.getRawLastAccessTime());
out.writeInt(record.getRawLastUpdateTime());
out.writeInt(record.getHits());
out.writeLong(record.getVersion());
out.writeInt(record.getRawLastStoredTime());
out.writeInt(expiryMetadata.getRawExpirationTime());
}
}
@Override
public Record readRecord(ObjectDataInput in,
ExpiryMetadata expiryMetadata) throws IOException {
Record record = new DataRecordWithStats();
record.setValue(readData(in));
// RU_COMPAT_4_2
Version version = in.getVersion();
if (version.isGreaterOrEqual(Versions.V5_0)) {
record.setRawCreationTime(in.readInt());
record.setRawLastAccessTime(in.readInt());
record.setRawLastUpdateTime(in.readInt());
record.setHits(in.readInt());
record.setVersion(in.readInt());
record.setRawLastStoredTime(in.readInt());
} else {
expiryMetadata.setRawTtl(in.readInt());
expiryMetadata.setRawMaxIdle(in.readInt());
record.setRawCreationTime(in.readInt());
record.setRawLastAccessTime(in.readInt());
record.setRawLastUpdateTime(in.readInt());
record.setHits(in.readInt());
record.setVersion(longToIntVersion(in.readLong()));
record.setRawLastStoredTime(in.readInt());
expiryMetadata.setRawExpirationTime(in.readInt());
}
return record;
}
},
SIMPLE_DATA_RECORD_READER_WRITER(TypeId.SIMPLE_DATA_RECORD_TYPE_ID) {
@Override
void writeRecord(ObjectDataOutput out, Record record, Data dataValue,
ExpiryMetadata expiryMetadata) throws IOException {
writeData(out, dataValue);
out.writeInt(record.getVersion());
// RU_COMPAT_4_2
Version version = out.getVersion();
if (!version.isGreaterOrEqual(Versions.V5_0)) {
expiryMetadata.write(out);
}
}
@Override
public Record readRecord(ObjectDataInput in,
ExpiryMetadata expiryMetadata) throws IOException {
Record record = new SimpleRecord();
record.setValue(readData(in));
record.setVersion(in.readInt());
// RU_COMPAT_4_2
Version version = in.getVersion();
if (!version.isGreaterOrEqual(Versions.V5_0)) {
expiryMetadata.read(in);
}
return record;
}
},
SIMPLE_DATA_RECORD_WITH_LRU_EVICTION_READER_WRITER(TypeId.SIMPLE_DATA_RECORD_WITH_LRU_EVICTION_TYPE_ID) {
@Override
void writeRecord(ObjectDataOutput out, Record record, Data dataValue,
ExpiryMetadata expiryMetadata) throws IOException {
writeData(out, dataValue);
out.writeInt(record.getVersion());
out.writeInt(record.getRawLastAccessTime());
// RU_COMPAT_4_2
Version version = out.getVersion();
if (!version.isGreaterOrEqual(Versions.V5_0)) {
expiryMetadata.write(out);
}
}
@Override
public Record readRecord(ObjectDataInput in,
ExpiryMetadata expiryMetadata) throws IOException {
Record record = new SimpleRecordWithLRUEviction();
record.setValue(readData(in));
record.setVersion(in.readInt());
record.setRawLastAccessTime(in.readInt());
// RU_COMPAT_4_2
Version version = in.getVersion();
if (!version.isGreaterOrEqual(Versions.V5_0)) {
expiryMetadata.read(in);
}
return record;
}
},
SIMPLE_DATA_RECORD_WITH_LFU_EVICTION_READER_WRITER(TypeId.SIMPLE_DATA_RECORD_WITH_LFU_EVICTION_TYPE_ID) {
@Override
void writeRecord(ObjectDataOutput out, Record record, Data dataValue,
ExpiryMetadata expiryMetadata) throws IOException {
writeData(out, dataValue);
out.writeInt(record.getVersion());
out.writeInt(record.getHits());
// RU_COMPAT_4_2
Version version = out.getVersion();
if (!version.isGreaterOrEqual(Versions.V5_0)) {
expiryMetadata.write(out);
}
}
@Override
public Record readRecord(ObjectDataInput in,
ExpiryMetadata expiryMetadata) throws IOException {
Record record = new SimpleRecordWithLFUEviction();
record.setValue(readData(in));
record.setVersion(in.readInt());
record.setHits(in.readInt());
// RU_COMPAT_4_2
Version version = in.getVersion();
if (!version.isGreaterOrEqual(Versions.V5_0)) {
expiryMetadata.read(in);
}
return record;
}
};
private byte id;
RecordReaderWriter(byte id) {
this.id = id;
}
public byte getId() {
return id;
}
private static class TypeId {
private static final byte DATA_RECORD_WITH_STATS_TYPE_ID = 2;
private static final byte SIMPLE_DATA_RECORD_TYPE_ID = 3;
private static final byte SIMPLE_DATA_RECORD_WITH_LRU_EVICTION_TYPE_ID = 4;
private static final byte SIMPLE_DATA_RECORD_WITH_LFU_EVICTION_TYPE_ID = 5;
}
public static RecordReaderWriter getById(int id) {
switch (id) {
case TypeId.DATA_RECORD_WITH_STATS_TYPE_ID:
return DATA_RECORD_WITH_STATS_READER_WRITER;
case TypeId.SIMPLE_DATA_RECORD_TYPE_ID:
return SIMPLE_DATA_RECORD_READER_WRITER;
case TypeId.SIMPLE_DATA_RECORD_WITH_LRU_EVICTION_TYPE_ID:
return SIMPLE_DATA_RECORD_WITH_LRU_EVICTION_READER_WRITER;
case TypeId.SIMPLE_DATA_RECORD_WITH_LFU_EVICTION_TYPE_ID:
return SIMPLE_DATA_RECORD_WITH_LFU_EVICTION_READER_WRITER;
default:
throw new IllegalArgumentException("Not known RecordReaderWriter type-id: " + id);
}
}
private static int longToIntVersion(long version) {
return version >= Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) version;
}
abstract void writeRecord(ObjectDataOutput out,
Record record, Data dataValue, ExpiryMetadata expiryMetadata) throws IOException;
public abstract Record readRecord(ObjectDataInput in, ExpiryMetadata expiryMetadata) throws IOException;
}
|
#!/usr/bin/env bash
# Compiling with ghcjs
stack build --stack-yaml=stack-ghcjs.yaml
# Moving the generated files to the js folder
rm -r js
cp -r .stack-work/dist/x86_64-linux/Cabal-1.24.0.0_ghcjs/build/mockClient/mockClient.jsexe/ js
# Swapping the default html with the one serving a minified version
cp assets/html/index.html js/index.html
# Minifying all.js file using the closure compiler, and removing unnecessary files
cd js
# ccjs all.js --debug --compilation_level=ADVANCED_OPTIMIZATIONS > all.min.js
ccjs all.js > all.min.js
rm all.js out.stats runmain.js lib.js out.js rts.js
|
#include <GA/GA_Handle.h>
#include <GU/GU_Detail.h>
#include <OP/OP_AutoLockInputs.h>
#include <OP/OP_Operator.h>
#include <OP/OP_OperatorTable.h>
#include <PRM/PRM_Include.h>
#include <UT/UT_DSOVersion.h>
#include "Calculator.h"
#include "SOP_ComputeTangents.h"
static PRM_Name modeName("basic", "Basic Mode");
PRM_Template SOP_ComputeTangents::myTemplateList[] = {
PRM_Template(PRM_TOGGLE, 1, &modeName, PRMoneDefaults),
PRM_Template()
};
OP_Node *SOP_ComputeTangents::myConstructor(OP_Network *net, const char *name, OP_Operator *op)
{
return new SOP_ComputeTangents(net, name, op);
}
SOP_ComputeTangents::SOP_ComputeTangents(OP_Network *net, const char *name, OP_Operator *op) : SOP_Node(net, name, op)
{
mySopFlags.setManagesDataIDs(true);
}
SOP_ComputeTangents::~SOP_ComputeTangents()
{
}
OP_ERROR SOP_ComputeTangents::cookMySop(OP_Context &context)
{
OP_AutoLockInputs inputs(this);
if (inputs.lock(context) >= UT_ERROR_ABORT)
{
return error();
}
duplicateSource(0, context);
// Perform basic checks to avoid segfaults. We won't fix anything here
// because it is easier to do inside Houdini.
GA_ROHandleV3 normalHandle(gdp, GA_ATTRIB_VERTEX, "N");
if (normalHandle.isInvalid())
{
addError(SOP_ERR_INVALID_SRC, "(no vertex normals)");
return error();
}
GA_ROHandleV3 uvHandle(gdp, GA_ATTRIB_VERTEX, "uv");
if (uvHandle.isInvalid())
{
addError(SOP_ERR_INVALID_SRC, "(no vertex uvs)");
return error();
}
for (GA_Iterator i(gdp->getPrimitiveRange()); !i.atEnd(); i.advance())
{
GA_Size numvtx = gdp->getPrimitive(*i)->getVertexCount();
if (numvtx != 3 && numvtx != 4)
{
addError(SOP_ERR_INVALID_SRC, "(only quads and triangles allowed)");
return error();
}
}
bool basic = evalInt("basic", 0, context.getTime());
if (basic)
{
GA_RWHandleV3 tangentuHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "tangentu", 3));
GA_RWHandleV3 tangentvHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "tangentv", 3));
GA_RWHandleF signHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "sign", 1));
// Change type to "normal" from "3 floats". It allows to retain proper
// tangents directions after transforming geometry. PolyFrame SOP uses
// vectors for tangents, which is inconsistent with normals.
gdp->findAttribute(GA_ATTRIB_VERTEX, "tangentu")->setTypeInfo(GA_TYPE_NORMAL);
gdp->findAttribute(GA_ATTRIB_VERTEX, "tangentv")->setTypeInfo(GA_TYPE_NORMAL);
Calculator().callMorty(gdp, basic);
// Calculate "basic" tangentv.
for (GA_Iterator i(gdp->getVertexRange()); !i.atEnd(); i.advance())
{
UT_Vector3F normal, tangentu, tangentv;
normal = normalHandle.get(*i);
tangentu = tangentuHandle.get(*i);
tangentv = signHandle.get(*i) * cross(normal, tangentu);
tangentvHandle.set(*i, tangentv);
}
tangentuHandle.bumpDataId();
tangentvHandle.bumpDataId();
signHandle.bumpDataId();
}
else
{
// Looks like a gun.
GA_RWHandleV3 tangentuHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "tangentu", 3));
GA_RWHandleV3 tangentvHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "tangentv", 3));
gdp->findAttribute(GA_ATTRIB_VERTEX, "tangentu")->setTypeInfo(GA_TYPE_NORMAL);
gdp->findAttribute(GA_ATTRIB_VERTEX, "tangentv")->setTypeInfo(GA_TYPE_NORMAL);
GA_RWHandleF maguHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "magu", 1));
GA_RWHandleF magvHandle(gdp->addFloatTuple(GA_ATTRIB_VERTEX, "magv", 1));
GA_RWHandleI keepHandle(gdp->addIntTuple(GA_ATTRIB_VERTEX, "keep", 1));
Calculator().callMorty(gdp, basic);
tangentuHandle.bumpDataId();
tangentvHandle.bumpDataId();
maguHandle.bumpDataId();
magvHandle.bumpDataId();
keepHandle.bumpDataId();
}
return error();
}
void newSopOperator(OP_OperatorTable *table)
{
table->addOperator(new OP_Operator("computetangents", "Compute Tangents",
SOP_ComputeTangents::myConstructor,
SOP_ComputeTangents::myTemplateList,
1, 1));
}
|
<gh_stars>1-10
import { Price } from "@interfaces/price.interface";
import { MainCurrency } from "./main-currency.interface";
export interface ExchangeUIContainer {
mainCurrencies: MainCurrency[];
prices: Price[];
applicationNames: string[];
}
|
#! /bin/bash
export SCRIPT="$( basename "${BASH_SOURCE[0]}" )"
export SCRIPTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export WORKSPACE=${WORKSPACE:-$SCRIPTPATH/../workspace}
source $SCRIPTPATH/common.sh
export BOSH_NON_INTERACTIVE=${BOSH_NON_INTERACTIVE:-true}
export BOSH_ENV_FILE=${BOSH_ENV_FILE:-$WORKSPACE/bosh_env.sh}
if [ -f $BOSH_ENV_FILE ]; then
source $BOSH_ENV_FILE
fi
source $SCRIPTPATH/bosh-common.sh
function showUsage() {
echo
echo "Usage: $SCRIPT [OPTIONS]"
echo
echo "OPTIONS"
echo " -h Show Command options "
echo " -c Creates the BOSH-lite VM"
echo " -b Produces an environment variables file to supporting using BOSH cli with the BOSH-lite VM"
echo " -d Destroys the BOSH-lite VM"
echo " -s Saves the state and suspends the BOSH-lite VM"
echo " -p Poweron the BOSH-lite VM assuming a previously saved state."
echo " -n Recreates routes to support communications between host and BOSH-lite VM"
echo " -t <NAME> Takes a snapshot of the BOSH-lite VM with the given NAME"
echo " -g <NAME> Restores a snapshot of the BOSH-lite VM with the given NAME, the VM should be already Saved (-s)"
echo " -x <NAME> Deletes a snapshot of the BOSH-lite VM with the given NAME"
echo " -r Rolls back to the current snapshot of the BOSH-lite VM, the VM should be already Saved (-s)"
echo " -l Lists available snapshot names of the BOSH-lite VM"
}
while getopts "hbcdsrnt:g:x:pl" arg; do
case "${arg}" in
b)
create_bosh_env_file
;;
c)
## Create the VM and do additional tasks
create_bosh_lite_vm
bosh_lite_vm_additions
echo
echo "TIP: To access bosh you should \"source $BOSH_ENV_FILE\""
echo
echo "TIP: To deploy Cloud Foundry on bosh you should run \"$SCRIPTPATH/cf_deploy.sh\""
echo
;;
d)
destroy_bosh_lite_vm
;;
s)
savestate_bosh_lite_vm
;;
p)
resume_bosh_lite_vm
;;
n)
setup_bosh_lite_routes
;;
t)
export SNAPSHOT_NAME=${OPTARG:-"INITIAL"}
take_bosh_lite_vm_snapshot $SNAPSHOT_NAME
;;
g)
export SNAPSHOT_NAME=${OPTARG:-"INITIAL"}
restore_bosh_lite_vm_snapshot $SNAPSHOT_NAME
;;
x)
export SNAPSHOT_NAME=${OPTARG:-"INITIAL"}
delete_bosh_lite_vm_snapshot $SNAPSHOT_NAME
;;
r)
restore_current_bosh_lite_vm_snapshot
;;
l)
list_bosh_lite_vm_snapshot
;;
h)
showUsage
exit 0
;;
\?)
>&2 echo
>&2 echo "Invalid option: -$OPTARG" >&2
>&2 echo
showUsage
exit 1
;;
esac
done
|
#!/bin/sh
export GPU_ID=$1
echo $GPU_ID
cd ..
export DATASET_DIR="datasets/"
export CUDA_VISIBLE_DEVICES=$GPU_ID
# Activate the relevant virtual environment:
python train_continual_learning_few_shot_system.py --name_of_args_json_file experiment_config/omniglot_variant_default_5_way_1_maml++_high-end_shot__True_10_10_LSLR_conditioned_0.json --gpu_to_use $GPU_ID
|
# !/usr/bin/env python3
#
# Copyright 2014 <NAME>
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import cairo
from PIL import Image
def createCover(title, name, width, height):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
ctx = cairo.Context(surface)
ctx.scale
ctx.set_source_rgb(0.75, 0.75, 0.75)
ctx.rectangle(0, 0, width, height)
ctx.fill()
ctx.select_font_face('Sans')
ctx.set_source_rgb(0.00, 0.00, 0.00) # black
#draw text
ctx.set_font_size(60*height/800)
draw_text(ctx, 0.5*width, 0.5*height, title)
ctx.set_font_size(30*height/800)
draw_text(ctx, 0.5*width, 0.75*height, name)
# finish
ctx.stroke()
surface.write_to_png('%s-cover.png' %title)
# Convert to grayscale JPEG
# XXX: Kindle does not show thumbnails for PNG covers
im = Image.open('%s-cover.png' %title)
im = im.convert('L')
im.save('%s-cover.jpg' %title.replace(' ', '_'))
# draw centered text
def draw_text(ctx, x, y, t):
x_bearing, y_bearing, width, height, x_advance, y_advance = ctx.text_extents(t)
x -= 0.5*width
y -= 0.5*height
ctx.move_to(x, y)
ctx.show_text(t)
|
"use strict";
const nums = [1, 2, 3];
function permute(nums) {
const permutations = [];
const foo = (permutation, unique) => {
if (nums.length === permutation.length) {
permutations.push(permutation.slice(0));
return;
}
for (let i = 0; i < nums.length; i++) {
if (!unique.has(nums[i])) {
unique.add(nums[i]);
permutation.push(nums[i]);
foo(permutation, unique);
permutation.pop();
unique.delete(nums[i]);
}
}
};
foo([], new Set());
if (true) {
for (let row of permutations)
console.log(row);
}
return permutations;
}
;
permute(nums);
|
def avg_arr(arr):
s = 0
for i in arr:
s += i
return s/len(arr)
print(avg_arr(arr))
|
<filename>dot-plot/dot-plot.js
function makeChart(data,stylename,media,plotpadding,legAlign,yAlign,xMin,xMax, xAxisHighlight, numTicksx, size){
var titleYoffset = d3.select("#"+media+"Title").node().getBBox().height
var subtitleYoffset=d3.select("#"+media+"Subtitle").node().getBBox().height;
// return the series names from the first row of the spreadsheet
var seriesNames = Object.keys(data[0]);
//Select the plot space in the frame from which to take measurements
var frame=d3.select("#"+media+"chart")
var plot=d3.select("#"+media+"plot")
var yOffset=d3.select("#"+media+"Subtitle").style("font-size");
yOffset=Number(yOffset.replace(/[^\d.-]/g, ''));
//Get the width,height and the marginins unique to this chart
var w=plot.node().getBBox().width;
var h=plot.node().getBBox().height;
var margin=plotpadding.filter(function(d){
return (d.name === media);
});
margin=margin[0].margin[0]
var colours=stylename.linecolours;
var plotWidth = w-(margin.left+margin.right);
var plotHeight = h-(margin.top+margin.bottom);
var plotData=d3.nest()
.key(function(d) { return d.group; })
.entries(data);
xMin=Math.min(xMin,d3.min(plotData, function(d) { return d3.min(d.values, function(d) { return d.value; })}));
xMax=Math.max(xMax,d3.max(plotData, function(d) { return d3.max(d.values, function(d) { return d.value; })}));
//console.log(xMin,xMax)
var xScale = d3.scale.linear()
.range([0, plotWidth])
.domain([xMin,xMax]);
var xAxis = d3.svg.axis()
.scale(xScale)
.ticks(numTicksx)
.tickSize(plotHeight)
.orient("bottom");
var xLabels=plot.append("g")
.attr("class", media+"xAxis")
.attr("transform", "translate("+(margin.left)+"," + (margin.top) + ")")
.call(xAxis);
var originValue = 0;
var origin = plot.selectAll(".tick").filter(function(d, i) {
return d==originValue || d==xAxisHighlight;
}).classed(media+"origin",true);
var yScale = d3.scale.ordinal()
.rangeBands([plotHeight+margin.top, margin.top])
.domain(plotData.map(function(d) { return d.key; }));;
var category = plot.selectAll("."+media+"category")
.data(plotData)
.enter()
.append("g")
.attr("d",media+function(d){returnd.key})
.attr("transform", function (d) {return "translate(0," + yScale(d.key) + ")"; })
.attr("class", media+"category")
.call(function(parent){
parent.append('text')
.attr("class", media+"Subtitle")
.attr("x",margin.left)
.attr("y",0)
.text(function(d){return d.key})
parent.selectAll('circles')
.data(function(d){
return d.values
})
.enter()
.append('circle')
.attr("class",function(d,i){
if(d.highlight=="yes"){
return media+"highlight"
}
else {return media+"fill"}
})
.attr("id",function(d){return d.name +" "+d.value+ " "+d.size})
.attr("cx",function(d){return xScale(d.value)})
.attr("cy",yScale.rangeBand()*.4)
.attr("r", function(d) {
if (size) { return Math.sqrt((d.size * yScale.rangeBand() * .1)/Math.PI); }
else {return yOffset/2}
})
.attr("transform", function (d) {return "translate("+(margin.left)+","+(0)+")"})
.attr("fill", function(d) {
if(d.highlight){
return colours[4]
}
else{return colours[0]}
})
.on("mouseover",pointer)
.on("click",function(d){
var elClass = d3.select(this)
if (elClass.attr("class")==media+"fill") {
elClass.moveToFront()
d3.select(this).attr("class",media+"highlight")
.attr("fill",colours[4])
var group=d3.select(this.parentNode)
group.append("text")
.datum(d)
.attr('id',function(d){
return (media+d.name).replace(/\s/g, '');
})
.attr("x",function(d){
return xScale(d.value)+(margin.left);
})
.attr("y",function(d){
return yScale.rangeBand()*.4;
})
.text(function(d){
return d.name+' '+d.size
})
.attr("class",media+"circLabel")
.on("mouseover",pointer)
}
else{var el=d3.select(this)
el.attr("class",media+"fill")
.attr("fill",colours[0])
var textEl=d3.select(("#"+media+d.name).replace(/\s/g, ''))
textEl.remove()
}
})
parent.selectAll('.'+media+"circLabel")
.data(function(d){
let filtered=d.values.filter(function(d){
return d.highlight=="yes"
})
return filtered
})
.enter()
.append('text')
.attr("x",function(d){
return xScale(d.value)+(margin.left);
})
.attr("y",function(d){
return yScale.rangeBand()*.4;
})
.text(function(d){
return d.name+' '+d.size
})
.attr("class",media+"circLabel")
.attr("id",function(d){return (media+d.name).replace(/\s/g, '');})
})
d3.selection.prototype.moveToFront = function() {
return this.each(function() {
this.parentNode.appendChild(this);
});
};
function pointer() {
this.style.cursor='pointer'
}
}
|
#include <iostream>
#include <sstream>
#include <string>
namespace mime {
struct text_t {
std::string data;
};
void serialize(std::stringstream &s, const mime::text_t &text) {
if (text.data.empty()) {
s << "EMPTY"; // Placeholder for handling empty text data
} else {
bool containsWhitespace = (text.data.find_first_of(" \t\n") != std::string::npos);
if (containsWhitespace) {
s << '"';
for (char c : text.data) {
if (c == '"') {
s << "\\\"";
} else {
s << c;
}
}
s << '"';
} else {
s << text.data;
}
}
}
}
int main() {
mime::text_t text1{"Hello, World!"};
mime::text_t text2{"This is a multiline\nstring"};
std::stringstream ss1, ss2;
mime::serialize(ss1, text1);
mime::serialize(ss2, text2);
std::cout << "Serialized text1: " << ss1.str() << std::endl;
std::cout << "Serialized text2: " << ss2.str() << std::endl;
return 0;
}
|
/*
* (C) Copyright 2016-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.alg.cycle;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import org.jgrapht.Graph;
import org.jgrapht.Graphs;
import org.jgrapht.alg.interfaces.CycleBasisAlgorithm;
import org.jgrapht.alg.interfaces.CycleBasisAlgorithm.CycleBasis;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.Pseudograph;
import org.jgrapht.graph.SimpleGraph;
import org.junit.Test;
/**
* Unit tests for {@link QueueBFSFundamentalCycleBasis}.
*
* @author <NAME>
*/
public class QueueBFSFundamentalCycleBasisTest
{
@Test
public void testSimple()
{
Graph<Integer, DefaultEdge> graph = new SimpleGraph<>(DefaultEdge.class);
Graphs.addEdgeWithVertices(graph, 0, 1);
Graphs.addEdgeWithVertices(graph, 1, 2);
Graphs.addEdgeWithVertices(graph, 2, 0);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
List<List<DefaultEdge>> cycles = new ArrayList<>(cb.getCycles());
assertEquals(1, cb.getCycles().size());
List<DefaultEdge> c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(0, 1)));
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertEquals(3, c1.size());
assertEquals(3, cb.getLength());
assertEquals(3.0, cb.getWeight(), 0.0001);
Graphs.addEdgeWithVertices(graph, 2, 3);
Graphs.addEdgeWithVertices(graph, 3, 0);
cb = fcb.getCycleBasis();
cycles = new ArrayList<>(cb.getCycles());
assertEquals(2, cb.getCycles().size());
c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertTrue(c1.contains(graph.getEdge(1, 0)));
assertEquals(3, c1.size());
List<DefaultEdge> c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertEquals(3, c2.size());
assertEquals(6, cb.getLength());
assertEquals(6.0, cb.getWeight(), 0.0001);
Graphs.addEdgeWithVertices(graph, 3, 1);
cb = fcb.getCycleBasis();
cycles = new ArrayList<>(cb.getCycles());
assertEquals(3, cb.getCycles().size());
c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertTrue(c1.contains(graph.getEdge(1, 0)));
assertEquals(3, c1.size());
c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertEquals(3, c2.size());
List<DefaultEdge> c3 = cycles.get(2);
assertTrue(c3.contains(graph.getEdge(1, 3)));
assertTrue(c3.contains(graph.getEdge(0, 1)));
assertTrue(c3.contains(graph.getEdge(0, 3)));
assertEquals(3, c3.size());
assertEquals(9, cb.getLength());
assertEquals(9.0, cb.getWeight(), 0.0001);
Graphs.addEdgeWithVertices(graph, 3, 4);
Graphs.addEdgeWithVertices(graph, 4, 2);
cb = fcb.getCycleBasis();
cycles = new ArrayList<>(cb.getCycles());
assertEquals(4, cb.getCycles().size());
c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertTrue(c1.contains(graph.getEdge(1, 0)));
assertEquals(3, c1.size());
c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertEquals(3, c2.size());
c3 = cycles.get(2);
assertTrue(c3.contains(graph.getEdge(1, 3)));
assertTrue(c3.contains(graph.getEdge(0, 1)));
assertTrue(c3.contains(graph.getEdge(0, 3)));
assertEquals(3, c3.size());
List<DefaultEdge> c4 = cycles.get(3);
assertTrue(c4.contains(graph.getEdge(3, 4)));
assertTrue(c4.contains(graph.getEdge(0, 3)));
assertTrue(c4.contains(graph.getEdge(0, 2)));
assertTrue(c4.contains(graph.getEdge(2, 4)));
assertEquals(4, c4.size());
assertEquals(13, cb.getLength());
assertEquals(13.0, cb.getWeight(), 0.0001);
Graphs.addEdgeWithVertices(graph, 4, 5);
cb = fcb.getCycleBasis();
cycles = new ArrayList<>(cb.getCycles());
assertEquals(4, cb.getCycles().size());
c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertTrue(c1.contains(graph.getEdge(1, 0)));
assertEquals(3, c1.size());
c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertEquals(3, c2.size());
c3 = cycles.get(2);
assertTrue(c3.contains(graph.getEdge(1, 3)));
assertTrue(c3.contains(graph.getEdge(0, 1)));
assertTrue(c3.contains(graph.getEdge(0, 3)));
assertEquals(3, c3.size());
c4 = cycles.get(3);
assertTrue(c4.contains(graph.getEdge(3, 4)));
assertTrue(c4.contains(graph.getEdge(0, 3)));
assertTrue(c4.contains(graph.getEdge(0, 2)));
assertTrue(c4.contains(graph.getEdge(2, 4)));
assertEquals(4, c4.size());
assertEquals(13, cb.getLength());
assertEquals(13.0, cb.getWeight(), 0.0001);
Graphs.addEdgeWithVertices(graph, 5, 2);
cb = fcb.getCycleBasis();
cycles = new ArrayList<>(cb.getCycles());
assertEquals(5, cb.getCycles().size());
c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertTrue(c1.contains(graph.getEdge(1, 0)));
assertEquals(3, c1.size());
c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertEquals(3, c2.size());
c3 = cycles.get(2);
assertTrue(c3.contains(graph.getEdge(1, 3)));
assertTrue(c3.contains(graph.getEdge(0, 1)));
assertTrue(c3.contains(graph.getEdge(0, 3)));
assertEquals(3, c3.size());
c4 = cycles.get(3);
assertTrue(c4.contains(graph.getEdge(3, 4)));
assertTrue(c4.contains(graph.getEdge(0, 3)));
assertTrue(c4.contains(graph.getEdge(0, 2)));
assertTrue(c4.contains(graph.getEdge(2, 4)));
assertEquals(4, c4.size());
List<DefaultEdge> c5 = cycles.get(4);
assertTrue(c5.contains(graph.getEdge(4, 5)));
assertTrue(c5.contains(graph.getEdge(2, 4)));
assertTrue(c5.contains(graph.getEdge(2, 5)));
assertEquals(3, c5.size());
assertEquals(16, cb.getLength());
assertEquals(16.0, cb.getWeight(), 0.0001);
Graphs.addEdgeWithVertices(graph, 5, 6);
Graphs.addEdgeWithVertices(graph, 6, 4);
cb = fcb.getCycleBasis();
cycles = new ArrayList<>(cb.getCycles());
assertEquals(6, cb.getCycles().size());
c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertTrue(c1.contains(graph.getEdge(1, 0)));
assertEquals(3, c1.size());
c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertEquals(3, c2.size());
c3 = cycles.get(2);
assertTrue(c3.contains(graph.getEdge(1, 3)));
assertTrue(c3.contains(graph.getEdge(0, 1)));
assertTrue(c3.contains(graph.getEdge(0, 3)));
assertEquals(3, c3.size());
c4 = cycles.get(3);
assertTrue(c4.contains(graph.getEdge(3, 4)));
assertTrue(c4.contains(graph.getEdge(0, 3)));
assertTrue(c4.contains(graph.getEdge(0, 2)));
assertTrue(c4.contains(graph.getEdge(2, 4)));
assertEquals(4, c4.size());
c5 = cycles.get(4);
assertTrue(c5.contains(graph.getEdge(4, 5)));
assertTrue(c5.contains(graph.getEdge(2, 4)));
assertTrue(c5.contains(graph.getEdge(2, 5)));
assertEquals(3, c5.size());
List<DefaultEdge> c6 = cycles.get(5);
assertTrue(c6.contains(graph.getEdge(5, 6)));
assertTrue(c6.contains(graph.getEdge(5, 2)));
assertTrue(c6.contains(graph.getEdge(2, 4)));
assertTrue(c6.contains(graph.getEdge(4, 6)));
assertEquals(4, c6.size());
assertEquals(20, cb.getLength());
assertEquals(20.0, cb.getWeight(), 0.0001);
}
@Test
public void testMultigraphsWithLoops()
{
Graph<Integer, DefaultEdge> graph = new Pseudograph<>(DefaultEdge.class);
Graphs.addEdgeWithVertices(graph, 0, 1);
Graphs.addEdgeWithVertices(graph, 0, 2);
Graphs.addEdgeWithVertices(graph, 0, 3);
Graphs.addEdgeWithVertices(graph, 1, 2);
Graphs.addEdgeWithVertices(graph, 2, 3);
Graphs.addEdgeWithVertices(graph, 1, 4);
Graphs.addEdgeWithVertices(graph, 2, 5);
Graphs.addEdgeWithVertices(graph, 3, 6);
Graphs.addEdgeWithVertices(graph, 4, 5);
Graphs.addEdgeWithVertices(graph, 5, 6);
Graphs.addEdgeWithVertices(graph, 4, 7);
Graphs.addEdgeWithVertices(graph, 5, 8);
Graphs.addEdgeWithVertices(graph, 6, 9);
Graphs.addEdgeWithVertices(graph, 7, 8);
DefaultEdge e89_1 = graph.addEdge(8, 9);
Graphs.addEdgeWithVertices(graph, 7, 9);
DefaultEdge e89_2 = graph.addEdge(8, 9);
DefaultEdge e89_3 = graph.addEdge(8, 9);
DefaultEdge e89_4 = graph.addEdge(8, 9);
DefaultEdge e77_1 = graph.addEdge(7, 7);
DefaultEdge e77_2 = graph.addEdge(7, 7);
DefaultEdge e77_3 = graph.addEdge(7, 7);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
List<List<DefaultEdge>> cycles = new ArrayList<>(cb.getCycles());
assertEquals(13, cb.getCycles().size());
List<DefaultEdge> c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(0, 1)));
assertTrue(c1.contains(graph.getEdge(1, 2)));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertEquals(3, c1.size());
List<DefaultEdge> c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(0, 2)));
assertTrue(c2.contains(graph.getEdge(0, 3)));
assertTrue(c2.contains(graph.getEdge(2, 3)));
assertEquals(3, c2.size());
List<DefaultEdge> c3 = cycles.get(2);
assertTrue(c3.contains(graph.getEdge(0, 1)));
assertTrue(c3.contains(graph.getEdge(1, 4)));
assertTrue(c3.contains(graph.getEdge(4, 5)));
assertTrue(c3.contains(graph.getEdge(5, 2)));
assertTrue(c3.contains(graph.getEdge(2, 0)));
assertEquals(5, c3.size());
List<DefaultEdge> c4 = cycles.get(3);
assertTrue(c4.contains(graph.getEdge(0, 2)));
assertTrue(c4.contains(graph.getEdge(2, 5)));
assertTrue(c4.contains(graph.getEdge(5, 6)));
assertTrue(c4.contains(graph.getEdge(6, 3)));
assertTrue(c4.contains(graph.getEdge(3, 0)));
assertEquals(5, c4.size());
List<DefaultEdge> c5 = cycles.get(4);
assertTrue(c5.contains(graph.getEdge(0, 1)));
assertTrue(c5.contains(graph.getEdge(1, 4)));
assertTrue(c5.contains(graph.getEdge(4, 7)));
assertTrue(c5.contains(graph.getEdge(7, 8)));
assertTrue(c5.contains(graph.getEdge(8, 5)));
assertTrue(c5.contains(graph.getEdge(5, 2)));
assertTrue(c5.contains(graph.getEdge(2, 0)));
assertEquals(7, c5.size());
List<DefaultEdge> c6 = cycles.get(5);
assertTrue(c6.contains(graph.getEdge(0, 2)));
assertTrue(c6.contains(graph.getEdge(2, 5)));
assertTrue(c6.contains(graph.getEdge(5, 8)));
assertTrue(c6.contains(e89_1));
assertTrue(c6.contains(graph.getEdge(9, 6)));
assertTrue(c6.contains(graph.getEdge(6, 3)));
assertTrue(c6.contains(graph.getEdge(3, 0)));
assertEquals(7, c6.size());
List<DefaultEdge> c7 = cycles.get(6);
assertTrue(c7.contains(graph.getEdge(0, 1)));
assertTrue(c7.contains(graph.getEdge(1, 4)));
assertTrue(c7.contains(graph.getEdge(4, 7)));
assertTrue(c7.contains(graph.getEdge(7, 9)));
assertTrue(c7.contains(graph.getEdge(9, 6)));
assertTrue(c7.contains(graph.getEdge(6, 3)));
assertTrue(c7.contains(graph.getEdge(3, 0)));
assertEquals(7, c7.size());
List<DefaultEdge> c8 = cycles.get(7);
assertTrue(c8.contains(graph.getEdge(0, 2)));
assertTrue(c8.contains(graph.getEdge(2, 5)));
assertTrue(c8.contains(graph.getEdge(5, 8)));
assertTrue(c8.contains(e89_2));
assertTrue(c8.contains(graph.getEdge(9, 6)));
assertTrue(c8.contains(graph.getEdge(6, 3)));
assertTrue(c8.contains(graph.getEdge(3, 0)));
assertEquals(7, c8.size());
List<DefaultEdge> c9 = cycles.get(8);
assertTrue(c9.contains(graph.getEdge(0, 2)));
assertTrue(c9.contains(graph.getEdge(2, 5)));
assertTrue(c9.contains(graph.getEdge(5, 8)));
assertTrue(c9.contains(e89_3));
assertTrue(c9.contains(graph.getEdge(9, 6)));
assertTrue(c9.contains(graph.getEdge(6, 3)));
assertTrue(c9.contains(graph.getEdge(3, 0)));
assertEquals(7, c9.size());
List<DefaultEdge> c10 = cycles.get(9);
assertTrue(c10.contains(graph.getEdge(0, 2)));
assertTrue(c10.contains(graph.getEdge(2, 5)));
assertTrue(c10.contains(graph.getEdge(5, 8)));
assertTrue(c10.contains(e89_4));
assertTrue(c10.contains(graph.getEdge(9, 6)));
assertTrue(c10.contains(graph.getEdge(6, 3)));
assertTrue(c10.contains(graph.getEdge(3, 0)));
assertEquals(7, c10.size());
List<DefaultEdge> c11 = cycles.get(10);
assertTrue(c11.contains(e77_1));
assertEquals(1, c11.size());
List<DefaultEdge> c12 = cycles.get(11);
assertTrue(c12.contains(e77_2));
assertEquals(1, c12.size());
List<DefaultEdge> c13 = cycles.get(12);
assertTrue(c13.contains(e77_3));
assertEquals(1, c13.size());
assertEquals(61, cb.getLength());
assertEquals(61.0, cb.getWeight(), 0.0001);
}
@Test
public void testMultiGraphWithMultipleComponentsWithLoops()
{
Graph<Integer, DefaultEdge> graph = new Pseudograph<>(DefaultEdge.class);
graph.addVertex(0);
graph.addVertex(1);
graph.addVertex(2);
graph.addEdge(0, 1);
graph.addEdge(0, 2);
DefaultEdge e12_1 = graph.addEdge(1, 2);
DefaultEdge e12_2 = graph.addEdge(1, 2);
DefaultEdge e11_1 = graph.addEdge(1, 1);
DefaultEdge e11_2 = graph.addEdge(1, 1);
graph.addVertex(3);
graph.addVertex(4);
graph.addVertex(5);
graph.addEdge(3, 4);
graph.addEdge(3, 5);
DefaultEdge e45_1 = graph.addEdge(4, 5);
DefaultEdge e45_2 = graph.addEdge(4, 5);
DefaultEdge e55_1 = graph.addEdge(5, 5);
DefaultEdge e55_2 = graph.addEdge(5, 5);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
List<List<DefaultEdge>> cycles = new ArrayList<>(cb.getCycles());
assertEquals(8, cb.getCycles().size());
List<DefaultEdge> c1 = cycles.get(0);
assertTrue(c1.contains(graph.getEdge(0, 1)));
assertTrue(c1.contains(e12_1));
assertTrue(c1.contains(graph.getEdge(2, 0)));
assertEquals(3, c1.size());
List<DefaultEdge> c2 = cycles.get(1);
assertTrue(c2.contains(graph.getEdge(0, 1)));
assertTrue(c2.contains(e12_2));
assertTrue(c2.contains(graph.getEdge(2, 0)));
assertEquals(3, c2.size());
List<DefaultEdge> c3 = cycles.get(2);
assertTrue(c3.contains(e11_1));
assertEquals(1, c3.size());
List<DefaultEdge> c4 = cycles.get(3);
assertTrue(c4.contains(e11_2));
assertEquals(1, c4.size());
List<DefaultEdge> c5 = cycles.get(4);
assertTrue(c5.contains(graph.getEdge(3, 4)));
assertTrue(c5.contains(e45_1));
assertTrue(c5.contains(graph.getEdge(5, 3)));
assertEquals(3, c5.size());
List<DefaultEdge> c6 = cycles.get(5);
assertTrue(c6.contains(graph.getEdge(3, 4)));
assertTrue(c6.contains(e45_2));
assertTrue(c6.contains(graph.getEdge(5, 3)));
assertEquals(3, c6.size());
List<DefaultEdge> c7 = cycles.get(6);
assertTrue(c7.contains(e55_1));
assertEquals(1, c7.size());
List<DefaultEdge> c8 = cycles.get(7);
assertTrue(c8.contains(e55_2));
assertEquals(1, c8.size());
assertEquals(16, cb.getLength());
assertEquals(16.0, cb.getWeight(), 0.0001);
}
@Test
public void testTwoParallelEdges()
{
Graph<Integer, DefaultEdge> graph = new Pseudograph<>(DefaultEdge.class);
graph.addVertex(0);
graph.addVertex(1);
DefaultEdge e1 = graph.addEdge(0, 1);
DefaultEdge e2 = graph.addEdge(0, 1);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
List<List<DefaultEdge>> cycles = new ArrayList<>(cb.getCycles());
assertEquals(1, cb.getCycles().size());
List<DefaultEdge> c1 = cycles.get(0);
assertTrue(c1.contains(e1));
assertTrue(c1.contains(e2));
assertEquals(2, c1.size());
assertEquals(2, cb.getLength());
assertEquals(2.0, cb.getWeight(), 0.0001);
}
@Test
public void testMoreParallelEdges()
{
Graph<Integer, DefaultEdge> graph = new Pseudograph<>(DefaultEdge.class);
graph.addVertex(0);
graph.addVertex(1);
graph.addVertex(2);
graph.addVertex(3);
DefaultEdge e01_1 = graph.addEdge(0, 1);
DefaultEdge e01_2 = graph.addEdge(0, 1);
DefaultEdge e12 = graph.addEdge(1, 2);
DefaultEdge e23_1 = graph.addEdge(2, 3);
DefaultEdge e23_2 = graph.addEdge(2, 3);
DefaultEdge e30 = graph.addEdge(3, 0);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
List<List<DefaultEdge>> cycles = new ArrayList<>(cb.getCycles());
assertEquals(3, cb.getCycles().size());
List<DefaultEdge> c1 = cycles.get(0);
assertTrue(c1.contains(e01_2));
assertTrue(c1.contains(e01_1));
assertEquals(2, c1.size());
List<DefaultEdge> c2 = cycles.get(1);
assertTrue(c2.contains(e23_1));
assertTrue(c2.contains(e30));
assertTrue(c2.contains(e01_1));
assertTrue(c2.contains(e12));
assertEquals(4, c2.size());
List<DefaultEdge> c3 = cycles.get(2);
assertTrue(c3.contains(e23_2));
assertTrue(c3.contains(e30));
assertTrue(c3.contains(e01_1));
assertTrue(c3.contains(e12));
assertEquals(4, c3.size());
assertEquals(10, cb.getLength());
assertEquals(10.0, cb.getWeight(), 0.0001);
}
@Test
public void testZeroCycleSpaceDimension()
{
Graph<Integer, DefaultEdge> graph = new Pseudograph<>(DefaultEdge.class);
graph.addVertex(0);
graph.addVertex(1);
graph.addVertex(2);
graph.addVertex(3);
graph.addEdge(0, 1);
graph.addEdge(2, 3);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
assertEquals(0, cb.getCycles().size());
assertEquals(0, cb.getLength());
assertEquals(0d, cb.getWeight(), 0.0001);
}
@Test
public void testEmptyGraph()
{
Graph<Integer, DefaultEdge> graph = new Pseudograph<>(DefaultEdge.class);
CycleBasisAlgorithm<Integer, DefaultEdge> fcb = new QueueBFSFundamentalCycleBasis<>(graph);
CycleBasis<Integer, DefaultEdge> cb = fcb.getCycleBasis();
assertEquals(0, cb.getCycles().size());
assertEquals(0, cb.getLength());
assertEquals(0d, cb.getWeight(), 0.0001);
}
}
|
<filename>targets/TARGET_Silicon_Labs/TARGET_EFM32/trng/sl_trng.c
/*
* True Random Number Generator (TRNG) driver for Silicon Labs devices
*
* Copyright (C) 2016, Silicon Labs, http://www.silabs.com
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "sl_trng.h"
#if defined(TRNG_PRESENT)
#include "em_cmu.h"
#include "em_common.h"
#include <string.h>
#define FIFO_LEVEL_RETRY (1000)
#define TEST_WORDS_MIN (257)
#define TEST_VECTOR_CONDITIONING_KEY_SIZE (4)
static const uint32_t
test_vector_conditioning_key[TEST_VECTOR_CONDITIONING_KEY_SIZE] =
{0x16157E2B, 0xA6D2AE28, 0x8815F7AB, 0x3C4FCF09};
#define TEST_VECTOR_CONDITIONING_INPUT_SIZE (16)
static const uint32_t
test_vector_conditioning_input[TEST_VECTOR_CONDITIONING_INPUT_SIZE] =
{0xE1BCC06B, 0x9199452A, 0x1A7434E1, 0x25199E7F,
0x578A2DAE, 0x9CAC031E, 0xAC6FB79E, 0x518EAF45,
0x461CC830, 0x11E45CA3, 0x19C1FBE5, 0xEF520A1A,
0x45249FF6, 0x179B4FDF, 0x7B412BAD, 0x10376CE6};
#define TEST_VECTOR_CONDITIONING_OUTPUT_SIZE (4)
static const uint32_t
test_vector_conditioning_output[TEST_VECTOR_CONDITIONING_OUTPUT_SIZE] =
{0xA1CAF13F, 0x09AC1F68, 0x30CA0E12, 0xA7E18675};
#define TRNG_STARTUP_TEST_WAIT_RETRY (10000)
typedef struct {
TRNG_TypeDef *instance;
CMU_Clock_TypeDef clock;
} sl_trng_device_t;
static const sl_trng_device_t sl_trng_devices[TRNG_COUNT] =
{
#if defined(TRNG0)
{
TRNG0,
cmuClock_TRNG0
},
#endif
};
static CMU_Clock_TypeDef sl_trng_get_clock( TRNG_TypeDef *device )
{
for(int i = 0; i < TRNG_COUNT; i++) {
if(sl_trng_devices[i].instance == device) {
return sl_trng_devices[i].clock;
}
}
return cmuClock_TRNG0;
}
void sl_trng_init( TRNG_TypeDef *device )
{
int i;
/* Enable the TRNG's clock. */
CMU_ClockEnable( sl_trng_get_clock(device), true );
device->CONTROL =
TRNG_CONTROL_ENABLE |
TRNG_CONTROL_REPCOUNTIEN |
TRNG_CONTROL_APT64IEN |
TRNG_CONTROL_APT4096IEN |
TRNG_CONTROL_PREIEN |
TRNG_CONTROL_ALMIEN;
/* Apply software reset */
sl_trng_soft_reset(device);
/* Wait for TRNG to complete startup tests and start filling the FIFO. */
for (i=0; (device->FIFOLEVEL == 0) && (i<TRNG_STARTUP_TEST_WAIT_RETRY); i++);
EFM_ASSERT(i<TRNG_STARTUP_TEST_WAIT_RETRY);
}
void sl_trng_free( TRNG_TypeDef *device )
{
/* Disable TRNG. */
device->CONTROL = 0;
/* Disable the TRNG clock. */
CMU_ClockEnable( sl_trng_get_clock(device), false );
}
void sl_trng_soft_reset( TRNG_TypeDef *device )
{
uint32_t ctrl = device->CONTROL;
ctrl |= TRNG_CONTROL_SOFTRESET;
device->CONTROL = ctrl;
ctrl &= ~TRNG_CONTROL_SOFTRESET;
device->CONTROL = ctrl;
}
static void sl_trng_clear_fifo( TRNG_TypeDef *device )
{
volatile uint32_t val32;
/* Empty FIFO */
while ( device->FIFOLEVEL )
{
val32 = device->FIFO;
(void)val32;
}
}
int sl_trng_set_key( TRNG_TypeDef *device, const unsigned char *key )
{
uint32_t *_key = (uint32_t*) key;
sl_trng_clear_fifo(device);
/* Program key in KEY registers of the TRNG. */
device->KEY0 = *_key++;
device->KEY1 = *_key++;
device->KEY2 = *_key++;
device->KEY3 = *_key++;
return 0;
}
static int sl_trng_check_status( TRNG_TypeDef *device )
{
uint32_t status = device->STATUS;
if ( (status & (TRNG_STATUS_PREIF
| TRNG_STATUS_REPCOUNTIF
| TRNG_STATUS_APT64IF
| TRNG_STATUS_APT4096IF
| TRNG_STATUS_ALMIF)) == 0 )
{
/* No errors */
return 0;
}
if ( status & TRNG_STATUS_PREIF )
{
/* On a preliminary noise alarm we clear the FIFO and clear
* the alarm. The preliminary noise alarm is not critical. */
status &= ~TRNG_STATUS_PREIF;
device->STATUS = status;
sl_trng_clear_fifo(device);
return SL_TRNG_ERR_PRELIMINARY_NOISE_ALARM;
}
else
{
/* Clear alarm conditions by doing a TRNG soft reset. */
sl_trng_soft_reset( device );
if ( status & TRNG_STATUS_REPCOUNTIF )
{
return SL_TRNG_ERR_REPETITION_COUNT_TEST_FAILED;
}
if ( status & TRNG_STATUS_APT64IF )
{
return SL_TRNG_ERR_ADAPTIVE_PROPORTION_TEST_64_FAILED;
}
if ( status & TRNG_STATUS_APT4096IF )
{
return SL_TRNG_ERR_ADAPTIVE_PROPORTION_TEST_4096_FAILED;
}
if ( status & TRNG_STATUS_ALMIF )
{
return SL_TRNG_ERR_NOISE_ALARM;
}
}
return 0;
}
static void sl_trng_read_chunk( TRNG_TypeDef *device,
unsigned char *output,
size_t len )
{
uint32_t * out32 = (uint32_t *) output;
uint32_t tmp;
/* Read known good available data. */
while ( len >= 4)
{
*out32++ = device->FIFO;
len -= 4;
}
/* Handle the case where len is not a multiple of 4. */
if ( len < 4 )
{
tmp = device->FIFO;
memcpy((uint8_t *)out32, (const uint8_t *) &tmp, len);
}
}
int sl_trng_poll( TRNG_TypeDef *device,
unsigned char *output,
size_t len,
size_t *olen )
{
size_t output_len = 0;
size_t chunk_len = 0;
size_t available;
int ret = 0;
while (output_len < len)
{
available = device->FIFOLEVEL * 4;
if (available == 0)
{
break;
}
#if !defined(SL_TRNG_IGNORE_ALL_ALARMS)
/* Check status for current data in FIFO
* and handle any error conditions. */
ret = sl_trng_check_status( device );
#if defined(SL_TRNG_IGNORE_NOISE_ALARMS)
/* Ignore noise alarms by returning 0 (OK) if they occur and
* keeping the already generated random data. */
if ( (ret == SL_TRNG_ERR_PRELIMINARY_NOISE_ALARM) ||
(ret == SL_TRNG_ERR_NOISE_ALARM) )
{
ret = 0;
continue;
}
#else
/* Noise alarms trigger a FIFO clearing, and we need to throw
* away the collected entropy. */
if ( (ret == SL_TRNG_ERR_PRELIMINARY_NOISE_ALARM) ||
(ret == SL_TRNG_ERR_NOISE_ALARM) )
{
ret = 0;
output_len = 0;
continue;
}
#endif
/* Alarm has been signaled so we throw the generated data away. */
if (ret != 0)
{
output_len = 0;
break;
}
#endif
chunk_len = SL_MIN(len - output_len, available);
sl_trng_read_chunk(device, output + output_len, chunk_len);
output_len += chunk_len;
}
*olen = output_len;
return ret;
}
#endif /* TRNG_PRESENT */
|
import re
def parse_url_info(url):
url_parts = url.split('/')
filename = url_parts[-1]
version_match = re.search(r'-(\d+\.\d+)', filename)
if version_match:
version = version_match.group(1)
else:
version = None
sha256_match = re.search(r'sha256=([a-fA-F0-9]{64})', url)
if sha256_match:
sha256 = sha256_match.group(1)
else:
sha256 = None
return {'version': version, 'sha256': sha256}
# Test the function
url = "http://search.cpan.org/CPAN/authors/id/X/XA/XAOC/ExtUtils-PkgConfig-1.16.tar.gz"
print(parse_url_info(url)) # Output: {'version': '1.16', 'sha256': 'bbeaced995d7d8d10cfc51a3a5a66da41ceb2bc04fedcab50e10e6300e801c6e'}
|
#!/bin/bash
while [[ $(grep -c "Installation completed" /var/tmp/dietpi/logs/dietpi-firstrun-setup.log) == 0 ]]
do
sleep 5
done
sleep 30
reboot
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.