text
stringlengths 1
1.05M
|
|---|
struct Node {
int key;
Node *left, *right;
};
Node* searchKey(Node* root, int key)
{
if (root == NULL || root->key == key)
return root;
if (root->key < key)
return searchKey(root->right, key);
return searchKey(root->left, key);
}
|
<filename>home/fields.py
from django.db import models
from django.utils.functional import cached_property
from common.utils import ForeignKeyField, get_selected_or_fallback
from wagtail.core.blocks import StructBlock, StreamBlock, CharBlock, RichTextBlock, URLBlock
from wagtail.core.fields import StreamField
from wagtail.images.blocks import ImageChooserBlock
class HomePageFeature(StructBlock):
"""Block class for flexible home page features."""
title = CharBlock(icon="title", classname="title", help_text="Feature title.")
description = RichTextBlock()
image = ImageChooserBlock(required=False)
button_text = CharBlock(required=False, help_text="Button text.")
button_url = URLBlock(required=False, help_text="Button URL.")
class FlexibleFeatures(StreamBlock):
"""A block for holding flexible home page features."""
feature = HomePageFeature()
class HomeFieldsMixin(models.Model):
"""Abstract mixin class for the home page db fields."""
class Meta:
abstract = True
header_video = models.URLField(
max_length=255,
blank=True,
help_text='Optional: video embed URL for page header',
)
activities_description = models.CharField(
max_length=255,
help_text='Description for the activities statistics section',
)
organisations_description = models.CharField(
max_length=255,
help_text='Description for the organisations statistics section',
)
getting_started_title = models.CharField(
max_length=255,
help_text='Title for the getting started section',
)
flexible_features = StreamField(FlexibleFeatures(required=False), null=True, blank=True)
about_iati_title = models.CharField(
max_length=255,
help_text='Title for the about IATI section',
)
about_iati_description = models.TextField(
help_text='Description for the about IATI section',
)
about_iati_video = models.URLField(
max_length=255,
blank=True,
help_text='Optional: video embed URL for the about IATI section',
)
about_iati_page = ForeignKeyField(
model='wagtailcore.Page',
required=True,
)
about_iati_link_label = models.CharField(
max_length=255,
help_text='Link label for the about IATI section',
)
iati_in_action_title = models.CharField(
max_length=255,
help_text='Title for the IATI in action section',
)
iati_in_action_description = models.TextField(
blank=True,
help_text='Optional: description for the IATI in action section',
max_length=500
)
iati_tools_title = models.CharField(
max_length=255,
help_text='Title for the IATI tools section',
)
iati_tools_description = models.TextField(
blank=True,
help_text='Optional: description for the IATI tools section',
)
latest_news_title = models.CharField(
max_length=255,
help_text='Title for the latest new section',
)
latest_news_link_label = models.CharField(
max_length=255,
help_text='Label for the view all news button',
)
latest_news_tweets_title = models.CharField(
max_length=255,
help_text='Title for the latest news Twitter section',
)
@cached_property
def testimonial(self):
"""Get and return a random testomional or none if there is an error."""
try:
return self.testimonial_items.all().order_by('?').first().testimonial
except AttributeError:
return None
@cached_property
def getting_started(self):
"""Create and return a list of getting started items, added to list if the page is live."""
return [x for x in self.getting_started_items.all() if x.page.live]
@cached_property
def iati_in_action_featured(self):
"""Get and return the first IATI in action featured item, if the page is live."""
featured = self.iati_in_action_featured_item.all().first()
return featured if featured.page.live else None
@cached_property
def iati_in_action(self):
"""Create and return a list of IATI in action items, added to list if the page is live."""
return [x for x in self.iati_in_action_items.all() if x.page.live]
@cached_property
def tools(self):
"""Create and return a list of IATI tools items, added to list if the page is live."""
return [x.page.specific for x in self.iati_tools_items.all() if x.page.live]
@cached_property
def news_index(self):
"""Create and return the first live news index page."""
from news.models import NewsIndexPage
return NewsIndexPage.objects.live().first()
@cached_property
def selected_news(self):
"""Create and return a list of selected latest news items, added to list if the page is live."""
return [x.page.specific for x in self.latest_news_items.all() if x.page.live]
@cached_property
def news(self):
"""Return a list of news items using get selected or fallback."""
from news.models import NewsPage
return get_selected_or_fallback(
selected=self.selected_news,
fallback=NewsPage.objects,
max_length=3,
order='-date',
)
|
# frozen_string_literal: true
require 'vcr'
VCR.configure do |c|
c.cassette_library_dir = 'vcr_cassettes'
c.hook_into :webmock
c.filter_sensitive_data('TELEGRAM_BOT_API_KEY') { ENV['TELEGRAM_BOT_API_KEY'] }
c.filter_sensitive_data('TELEGRAM_BOT_USERNAME') { ENV['TELEGRAM_BOT_USERNAME'] }
c.filter_sensitive_data('*100EYES') { ENV['THREEMARB_API_IDENTITY'] }
c.filter_sensitive_data('THREEMARB_API_SECRET') { ENV['THREEMARB_API_SECRET'] }
c.filter_sensitive_data('THREEMARB_PRIVATE') { ENV['THREEMARB_PRIVATE'] }
c.filter_sensitive_data('SIGNAL_SERVER_PHONE_NUMBER') { ENV['SIGNAL_SERVER_PHONE_NUMBER'] }
c.configure_rspec_metadata!
end
|
/*
* Copyright 2020 Google LLC All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.imf.osc
import java.sql.Timestamp
import java.time.format.DateTimeFormatter
import java.time.{LocalDateTime, OffsetDateTime, ZoneId, ZonedDateTime}
import com.google.cloud.bigquery.{Field, FieldList, Schema, StandardSQLTypeName}
import com.google.cloud.imf.osc.Decoders.{DecimalDecoder, StringDecoder, TimestampDecoder, TimestampDecoder2}
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector
import org.scalatest.flatspec.AnyFlatSpec
class DecoderSpec extends AnyFlatSpec {
Util.configureLogging(true)
System.setProperty("user.timezone", "Etc/UTC");
"decoder" should "parse timestamp" in {
val pattern = "yyyy-MM-dd HH:mm:ssz"
val pattern2 = Decoders.LocalFormat
val example = "2006-01-02 03:04:05+00:00"
val example2 = "2006-01-02 03:04:05"
val fmt = DateTimeFormatter.ofPattern(pattern)
val fmt2 = DateTimeFormatter.ofPattern(pattern2)
val t = ZonedDateTime.from(fmt.parse(example))
val t2 = LocalDateTime.from(fmt2.parse(example2)).atZone(ZoneId.of("Etc/GMT"))
assert(t.toEpochSecond == t2.toEpochSecond)
assert(t.getNano == 0)
assert(t2.getNano == 0)
}
"timestamp decoder" should "offset" in {
val example = Seq(
("2020-04-17 12:08:57+00:00",12,12),
("2020-04-17 12:08:57+01:00",12,11),
("2020-04-17 12:08:57-00:00",12,12),
("2020-04-17 12:08:57-01:00",12,13),
("2020-04-17 12:08:57-02:00",12,14),
("2020-04-17 12:08:57-03:00",12,15),
("2020-04-17 12:08:57-04:00",12,16),
("2020-04-17 12:08:57-05:00",12,17),
("2020-04-17 12:08:57-06:00",12,18),
("2020-04-17 12:08:57-07:00",12,19),
("2020-04-17 12:08:57-08:00",12,20)
)
val fmt = DateTimeFormatter.ofPattern(Decoders.OffsetFormat)
for (e <- example){
val timestamp = OffsetDateTime.from(fmt.parse(e._1))
val utcTimeStamp = timestamp.atZoneSameInstant(Decoders.UTC)
System.out.println(s"${e._1} ${Timestamp.valueOf(timestamp.toLocalDateTime)} ${timestamp.toInstant.getEpochSecond / 3600} ${timestamp.toEpochSecond / 3600} ${utcTimeStamp.getHour}")
assert(timestamp.getHour == e._2)
assert(utcTimeStamp.getHour == e._3)
}
}
it should "zone2" in {
val sp = new TableSchemaProvider(Schema.of(FieldList.of(Seq[Field](
Field.of("a", StandardSQLTypeName.STRING),
Field.of("b", StandardSQLTypeName.TIMESTAMP),
Field.of("c", StandardSQLTypeName.TIMESTAMP)
):_*)), "Etc/Utc")
val example = TestUtil.resource("sample2.txt")
val decoders = sp.decoders
val cols = decoders.map(_.columnVector(12))
val lines = example.linesIterator.toArray
var i = 0
var j = 0
while (j < lines.length) {
val fields = lines(j).split('þ')
i = 0
while (i < decoders.length) {
val decoder = decoders(i)
val col = cols(i)
decoder.get(fields(i), col, j)
(decoder,col) match {
case (_: TimestampDecoder, x: TimestampColumnVector) if i == 1 =>
val t = x.time(j)
val epochHour = t/3600000
val ts = new Timestamp(t)
val msg = s"$i $j $epochHour ${ts.toString()}"
System.out.println(msg)
assert(epochHour == 440821 + j)
case _ =>
}
i += 1
}
j += 1
}
}
it should "zone" in {
val example = Seq(
("2020-04-17 12:08:57 UTC+00:00",12),
("2020-04-17 12:08:57 UTC+01:00",11),
("2020-04-17 12:08:57 UTC-07:00",19),
("2020-04-17 12:08:57 UTC-08:00",20)
)
val fmt = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss z")
for (e <- example){
val timestamp = ZonedDateTime.from(fmt.parse(e._1))
val utcTimestamp = timestamp.withZoneSameInstant(Decoders.UTC)
assert(utcTimestamp.getHour == e._2)
}
}
"StringToNum" should "decimal" in {
// positive
assert(StringToNum.decimalValue("1",5) == 100000L)
assert(StringToNum.decimalValue("0.01",5) == 1000L)
assert(StringToNum.decimalValue("0.00001",5) == 1L)
// no leading zero
assert(StringToNum.decimalValue(".01",5) == 1000L)
assert(StringToNum.decimalValue(".00001",5) == 1L)
assert(StringToNum.decimalValue(".",5) == 0L)
// negative
assert(StringToNum.decimalValue("-0.01",5) == -1000L)
assert(StringToNum.decimalValue("-.01",5) == -1000L)
assert(StringToNum.decimalValue("-1",5) == -100000L)
assert(StringToNum.decimalValue("-.00001",5) == -1L)
assert(StringToNum.decimalValue("-0.00001",5) == -1L)
// excess scale
assert(StringToNum.decimalValue("0.010001",5) == 1000L)
assert(StringToNum.decimalValue(".010001",5) == 1000L)
assert(StringToNum.decimalValue("1.000001",5) == 100000L)
assert(StringToNum.decimalValue(".000011",5) == 1L)
assert(StringToNum.decimalValue("0.000011",5) == 1L)
assert(StringToNum.decimalValue("-0.010001",5) == -1000L)
assert(StringToNum.decimalValue("-.010001",5) == -1000L)
assert(StringToNum.decimalValue("-1.000001",5) == -100000L)
assert(StringToNum.decimalValue("-.000011",5) == -1L)
assert(StringToNum.decimalValue("-0.000011",5) == -1L)
}
it should "long" in {
assert(StringToNum.longValue("1") == 1L)
assert(StringToNum.longValue("11") == 11L)
assert(StringToNum.longValue("111") == 111)
assert(StringToNum.longValue("12345") == 12345L)
assert(StringToNum.longValue("123456789") == 123456789L)
assert(StringToNum.longValue("-1") == -1L)
assert(StringToNum.longValue("-11") == -11L)
assert(StringToNum.longValue("-111") == -111L)
assert(StringToNum.longValue("-12345") == -12345L)
assert(StringToNum.longValue("-123456789") == -123456789L)
}
"cli" should "parse schema" in {
val example = "key1:STRING:24,key2:STRING:24,key3:STRING:24,key4:STRING:24,STATUS:STRING:15,date1:TIMESTAMP,qty1:NUMERIC:14.4,key5:STRING:24,key6:STRING:24,qty2:NUMERIC:14.4,date2:TIMESTAMP,key7:STRING:24,key8:STRING:24,timestamp1:TIMESTAMP,timestamp2:TIMESTAMP,id1:STRING:40,id2:STRING:40,id3:STRING:40,id4:STRING:40,id5:NUMERIC:5.0,rank:TIMESTAMP:America/Chicago"
val sp = CliSchemaProvider(example)
val expected = Array[Decoder](
StringDecoder(24),
StringDecoder(24),
StringDecoder(24),
StringDecoder(24),
StringDecoder(15),
TimestampDecoder(),
DecimalDecoder(14,4),
StringDecoder(24),
StringDecoder(24),
DecimalDecoder(14,4),
TimestampDecoder(),
StringDecoder(24),
StringDecoder(24),
TimestampDecoder(),
TimestampDecoder(),
StringDecoder(40),
StringDecoder(40),
StringDecoder(40),
StringDecoder(40),
DecimalDecoder(5,0),
TimestampDecoder2(zoneId = "America/Chicago")).toSeq
assert(sp.decoders.toSeq == expected)
}
}
|
<reponame>stlankes/hermit-playground
/*
* Copyright (c) 2010, <NAME>, RWTH Aachen University
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University nor the names of its contributors
* may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* author <NAME>
* @file arch/x86/include/asm/limits.h
* @brief Define constants related to numerical value-ranges of variable types
*
* This file contains define constants for the numerical
* ranges of the most typical variable types.
*/
#ifndef __ARCH_LIMITS_H__
#define __ARCH_LIMITS_H__
#ifdef __cplusplus
extern "C" {
#endif
/** Number of bits in a char */
#define CHAR_BIT 8
/** Maximum value for a signed char */
#define SCHAR_MAX 0x7f
/** Minimum value for a signed char */
#define SCHAR_MIN (-0x7f - 1)
/** Maximum value for an unsigned char */
#define UCHAR_MAX 0xff
/** Maximum value for an unsigned short */
#define USHRT_MAX 0xffff
/** Maximum value for a short */
#define SHRT_MAX 0x7fff
/** Minimum value for a short */
#define SHRT_MIN (-0x7fff - 1)
/** Maximum value for an unsigned int */
#define UINT_MAX 0xffffffffU
/** Maximum value for an int */
#define INT_MAX 0x7fffffff
/** Minimum value for an int */
#define INT_MIN (-0x7fffffff - 1)
/** Maximum value for an unsigned long */
#define ULONG_MAX 0xffffffffUL
/** Maximum value for a long */
#define LONG_MAX 0x7fffffffL
/** Minimum value for a long */
#define LONG_MIN (-0x7fffffffL - 1)
/** Maximum value for an unsigned long long */
#define ULLONG_MAX 0xffffffffffffffffULL
/** Maximum value for a long long */
#define LLONG_MAX 0x7fffffffffffffffLL
/** Minimum value for a long long */
#define LLONG_MIN (-0x7fffffffffffffffLL - 1)
#ifdef __cplusplus
}
#endif
#endif
|
/*
Copyright 2020-2021 University of Oxford
and Health and Social Care Information Centre, also known as NHS Digital
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
*/
import { Component, OnInit } from '@angular/core';
import { MessageService } from '@mdm/services/message.service';
import { ClipboardService } from 'ngx-clipboard';
import { YoutrackService } from '@mdm/services/youtrack.service';
import { SharedService } from '@mdm/services/shared.service';
import { ErrorComponent } from '../error.component';
@Component({
selector: 'mdm-not-authorized-error',
templateUrl: '../error.component.html',
styleUrls: []
})
export class NotAuthorizedComponent extends ErrorComponent implements OnInit {
constructor(protected messageService: MessageService,
protected clipboardService: ClipboardService,
protected sharedService: SharedService,
protected youtrackService: YoutrackService) {
super(messageService, clipboardService, sharedService, youtrackService);
this.errorHeader = 'Not Authorized';
this.errorMessage = 'We\'re sorry, but the server does not allow you to view this page.';
this.errorResolution = 'You may need to check that the item you have requested actually exists, and that you have permission to view it';
this.errorReportMessage = 'Alternatively, if you believe you really should have access to this item, please report the issue to us by using the link below:';
this.dataSource.push({ field: 'Message', value: this.lastError.message, code: false });
this.dataSource.push({ field: 'Status', value: this.lastError.status, code: false });
this.dataSource.push({ field: 'Path', value: this.lastError.url, code: false });
}
}
|
<filename>lib/src/schemas/documentEntity.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Created by aman on 10/7/17.
*/
var DocManagerMultipleUploadsEntity = {
// Entity for Upload DocManagerMultipleUploads component
name: "verify_document",
fields: [
{
name: "document",
type: "entity",
entityName: "doc_ref"
},
{
name: "uid",
type: "string"
},
{
name: "verification",
type: "entity",
entityName: "verification_container"
},
{
name: "waive_off_requested",
type: "string"
},
{
name: "comment",
type: "string",
error: function (m, r, g) {
return m.waive_off_requested
? !m.comment
? "Please give a reason for waive off"
: ""
: "";
}
},
{
name: "is_group",
type: "bool"
},
{
name: "document_type",
type: "string"
},
{
name: "is_multi_file",
type: "bool"
}
]
};
exports.DocManagerMultipleUploadsEntity = DocManagerMultipleUploadsEntity;
var VerificationStatusEntity = {
// Entity for Verification Status which contains history and latest verification object
name: "verification_container",
fields: [
{
name: "latest",
type: "entity",
entityName: "verification_entity"
},
{
name: "history",
type: "array",
arrayType: "entity",
entityType: "verification_entity"
}
]
};
exports.VerificationStatusEntity = VerificationStatusEntity;
var latestVerificationStatusEntity = {
// Entity for Latest Verification Status
name: "verification_entity",
fields: [
{
name: "timestamp",
type: "string"
},
{
name: "status",
type: "string"
},
{
name: "comments",
type: "string"
},
{
name: "verification_details",
type: "entity",
entityName: "verification_details_entity"
}
]
};
exports.latestVerificationStatusEntity = latestVerificationStatusEntity;
var verificationDetailsEntity = {
// Entity for verification_details
name: "verification_details_entity",
fields: [
{
name: "method",
type: "string"
},
{
name: "admin_verification_details",
type: "entity",
entityName: "admin_verification_detail_entity"
}
]
};
exports.verificationDetailsEntity = verificationDetailsEntity;
var adminVerificationDetailEntity = {
// Entity for admin_verification_detail_entity
name: "admin_verification_detail_entity",
fields: [
{
name: "checked_by_user_name",
type: "string"
}
]
};
exports.adminVerificationDetailEntity = adminVerificationDetailEntity;
var uploadDocumentEntity = {
// Entity for Upload document component
name: "doc_ref",
fields: [
{
name: "doc_uid",
type: "string"
},
{
name: "temp_url",
type: "string"
}
]
};
exports.uploadDocumentEntity = uploadDocumentEntity;
//# sourceMappingURL=documentEntity.js.map
|
#!/bin/sh
echo "Synchronizing pass"
pass git pull
pass-truncate-history
pass git push
|
#!/usr/bin/bash4
source $(dirname ${BASH_SOURCE[0]})/../cmdarg.sh
function shunittest_test_usage_helper
{
function usage_helper
{
echo "LOL I AM A HELPER"
return 0
}
function parser {
cmdarg_purge
cmdarg_helpers['usage']=usage_helper
cmdarg_parse --help
}
[[ "$(parser 2>&1)" == "LOL I AM A HELPER" ]] || return 1
}
function shunittest_test_describe_helper
{
function always_succeed
{
return 0
}
function describe
{
set -u
local longopt opt argtype default description
longopt=$1
opt=$2
argtype=$3
default="$4"
description="$5"
flags="$6"
validator="$7"
set +u
echo "${opt}:${longopt}:${argtype}:${description}:${default}:${flags}:${validator}"
}
function parser
{
declare -a array
declare -A hash
cmdarg_purge
cmdarg_helpers['describe']=describe
cmdarg 's:' 'string' 'some string' '12345' always_succeed
cmdarg 'b' 'boolean' 'some boolean'
cmdarg 'a?[]' 'array' 'some array'
cmdarg 'H?{}' 'hash' 'some hash'
set -x
[[ "$(cmdarg_describe s)" == "s:string:${CMDARG_TYPE_STRING}:some string:12345:${CMDARG_FLAG_REQARG}:always_succeed" ]] || return 1
[[ "$(cmdarg_describe b)" == "b:boolean:${CMDARG_TYPE_BOOLEAN}:some boolean::${CMDARG_FLAG_NOARG}:" ]] || return 1
[[ "$(cmdarg_describe a)" == "a:array:${CMDARG_TYPE_ARRAY}:some array::${CMDARG_FLAG_OPTARG}:" ]] || return 1
[[ "$(cmdarg_describe H)" == "H:hash:${CMDARG_TYPE_HASH}:some hash::${CMDARG_FLAG_OPTARG}:" ]] || return 1
set +x
}
parser
}
# This test adds no value to the test suite, it simply serves as an example of how to override
# both the describe AND usage helpers
function shunittest_test_describe_and_usage_helper
{
function always_succeed
{
return 0
}
function describe
{
set -u
local longopt opt argtype default description
longopt=$1
opt=$2
argtype=$3
default="$4"
description="$5"
flags="$6"
validator="$7"
set +u
echo "${opt}:${longopt}:${argtype}:${description}:${default}:${flags}:${validator}"
}
function usage
{
echo "I ignore the default header and footer, and substitute my own."
echo "I do not indent my arguments or separate optional and required."
# cmdarg helpfully separates options into OPTIONAL or REQUIRED arrays
# so that you don't have to sort the keys for uniform --help message output
# and so you can easily break arguments out into required/optional blocks
# in the usage message ... our helper doesn't care, it just prints them all
# together, but it still uses the sorted lists.
for shortopt in ${CMDARG_OPTIONAL[@]} ${CMDARG_REQUIRED[@]}
do
cmdarg_describe $shortopt
done
}
function parser
{
declare -a array
declare -A hash
cmdarg_purge
cmdarg_helpers['describe']=describe
cmdarg_helpers['usage']=usage
cmdarg 's:' 'string' 'some string' '12345' always_succeed
cmdarg 'b' 'boolean' 'some boolean'
cmdarg 'a?[]' 'array' 'some array'
cmdarg 'H?{}' 'hash' 'some hash'
cmdarg_parse --help
}
output="I ignore the default header and footer, and substitute my own.
I do not indent my arguments or separate optional and required.
s:string:${CMDARG_TYPE_STRING}:some string:12345:${CMDARG_FLAG_REQARG}:always_succeed
b:boolean:${CMDARG_TYPE_BOOLEAN}:some boolean::${CMDARG_FLAG_NOARG}:
a:array:${CMDARG_TYPE_ARRAY}:some array::${CMDARG_FLAG_OPTARG}:
H:hash:${CMDARG_TYPE_HASH}:some hash::${CMDARG_FLAG_OPTARG}:"
set +e
capture="$(parser 2>&1)"
if [[ "${capture}" != "$output" ]]; then
echo "${capture}" > /tmp/$$.parser 2>&1
echo "${output}" > /tmp/$$.output
diff -y /tmp/$$.output /tmp/$$.parser
return 1
fi
set -e
}
|
<gh_stars>0
import React from 'react';
import AnchorLink from '../../components/atoms/AnchorLink';
export default {
title: 'Atoms/AnchorLink',
component: AnchorLink,
};
const Template = (args) => <AnchorLink {...args}>{args.children}</AnchorLink>;
export const Basic = Template.bind({});
Basic.args = {
children: 'Click Me',
};
|
set -e
if [ -n "$BASH" ]; then
BASH=~/.bash-profile
fi
if [ -d "$BASH" ]; then
echo "\033[0;33mYou already have Bash Profile installed.\033[0m You'll need to remove $BASH if you want to install"
exit
fi
echo "\033[0;34mCloning Bash Profile...\033[0m"
#hash git >/dev/null 2>&1 && env git clone --depth=1 https://github.com/eldorplus/bash-profile.git $BASH || {
# echo "git not installed"
# exit
#}
echo "\033[0;34mLooking for an existing bash config...\033[0m"
if [ -f ~/.bash_profile ] || [ -h ~/.bash_profile ]; then
echo "\033[0;33mFound ~/.bash_profile.\033[0m \033[0;32mBacking up to ~/.bash_profile.hold\033[0m";
mv ~/.bash_profile ~/.bash_profile.hold;
fi
echo "\033[0;34mUsing the Bash Profile template file and adding it to ~/.bash_profile\033[0m"
cp $BASH/templates/bash_profile.template ~/.bash_profile
sed -i -e "/^BASH=/ c\\
BASH=$BASH
" ~/.bash_profile
echo "\033[0;34mCopying your current PATH and adding it to the end of ~/.bash_profile for you.\033[0m"
sed -i -e "/export PATH=/ c\\
export PATH=\"$PATH\"
" ~/.bash_profile
if [ "$SHELL" != "$(which bash)" ]; then
echo "\033[0;34mTime to change your default shell to bash!\033[0m"
chsh -s `which bash`
fi
env bash
. ~/.bash_profile
|
import React, { FC, useState } from 'react';
import AppBar from '@mui/material/AppBar';
import Box from '@mui/material/Box';
import Toolbar from '@mui/material/Toolbar';
import Button from '@mui/material/Button';
import IconButton from '@mui/material/IconButton';
import MenuIcon from '@mui/icons-material/Menu';
import Logo from "../../assets/bg.png"
import Typography from '@mui/material/Typography';
import './Navbar.css';
import { Content } from '../WalletConnection/WalletConnection';
export const NavAppBar: FC = (props) => {
return (
<div>
<Box sx={{ flexGrow: 1 }}>
<AppBar className="Appbar" position="static">
<Content />
</AppBar>
</Box>
</div>
);
};
|
<filename>frontend/test/unit/specs/components/App/ActionButtons.spec.js
import Vue from 'vue'
import * as sinon from 'sinon'
import ActionButtons from '@/components/App/ActionButtons'
describe('ActionButtons.vue', () => {
let vm = null
const sandbox = sinon.sandbox.create()
beforeEach(() => {
const Constructor = Vue.extend(ActionButtons)
vm = new Constructor().$mount()
})
afterEach(() => {
sandbox.restore()
vm.$destroy()
vm = null
})
it('should display three default buttons', () => {
vm.$el.querySelectorAll('.el-button--default')
.should.be.lengthOf(4)
const buttonImages = [...vm.$el.querySelectorAll('[alt]')]
buttonImages.map(el => el.getAttribute('alt'))
.should.deep.equal(['Print', 'Edit', 'Copy', 'Delete'])
buttonImages.map(el => el.getAttribute('src'))
.should.be.lengthOf(4)
})
it('emits actionEdit on edit button click', () => {
const spy = sandbox.spy(vm.$events, 'emit')
vm.$el.querySelector('#action-buttons__edit').click()
spy.should.have.been.calledOnce
spy.should.have.been.calledWith('actionEdit')
})
it('emits actionCopy on copy button click', () => {
const spy = sandbox.spy(vm.$events, 'emit')
vm.$el.querySelector('#action-buttons__copy').click()
spy.should.have.been.calledOnce
spy.should.have.been.calledWith('actionCopy')
})
it('emits actionDelete on delete button click', () => {
const spy = sandbox.spy(vm.$events, 'emit')
// FIXME: Remove once the delete button gets enabled
vm.$el.querySelector('#action-buttons__delete').disabled = false
vm.$el.querySelector('#action-buttons__delete').click()
spy.should.have.been.calledOnce
spy.should.have.been.calledWith('actionDelete')
})
})
|
<gh_stars>1-10
package com.ibm.socialcrm.notesintegration.servlet.servlets;
/****************************************************************
* IBM OpenSource
*
* (C) Copyright IBM Corp. 2012
*
* Licensed under the Apache License v2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
***************************************************************/
import java.io.IOException;
import java.io.OutputStream;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class SocialCRMServlet extends HttpServlet {
/**
* Serial UID
*/
private static final long serialVersionUID = -2141375946112563556L;
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
System.out.println(request.getRequestURL());
response.setStatus(200);
String responseStr = "Replace me with some code that actually does something useful";
OutputStream out = response.getOutputStream();
out.write(responseStr.getBytes());
out.flush();
out.close();
}
}
|
#!/bin/bash
# Copyright 2019 Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################
# e2e-suite runs Istio E2E tests. #
# #
# Usage: ./e2e-kind-suite.sh --single_test mixer_e2e #
# #
# ${E2E_ARGS} can be used to provide additional test #
# arguments. #
#######################################################
WD=$(dirname "$0")
WD=$(cd "$WD"; pwd)
ROOT=$(dirname "$WD")
# Exit immediately for non zero status
set -e
# Check unset variables
set -u
# Print commands
set -x
# shellcheck source=prow/lib.sh
source "${ROOT}/prow/lib.sh"
setup_and_export_git_sha
# getopts only handles single character flags
for ((i=1; i<=$#; i++)); do
case ${!i} in
# Node images can be found at https://github.com/kubernetes-sigs/kind/releases
# For example, kindest/node:v1.14.0
--node-image)
((i++))
NODE_IMAGE=${!i}
;;
--skip-setup)
SKIP_SETUP=true
continue
;;
--skip-build)
SKIP_BUILD=true
continue
;;
--skip-cleanup)
SKIP_CLEANUP=true
continue
;;
# -s/--single_test to specify test target to run.
# e.g. "-s e2e_mixer" will trigger e2e mixer_test
-s|--single_test) ((i++)); SINGLE_TEST=${!i}
continue
;;
--variant) ((i++)); VARIANT="${!i}"
continue
;;
esac
E2E_ARGS+=( "${!i}" )
done
E2E_ARGS+=("--test_logs_path=${ARTIFACTS}")
# e2e tests with kind clusters on prow will get deleted when prow deletes the pod
E2E_ARGS+=("--skip_cleanup")
E2E_ARGS+=("--use_local_cluster")
# KinD will have the images loaded into it; it should not attempt to pull them
# See https://kind.sigs.k8s.io/docs/user/quick-start/#loading-an-image-into-your-cluster
E2E_ARGS+=("--image_pull_policy" "IfNotPresent")
export HUB=${HUB:-"istio-testing"}
export TAG="${TAG:-"istio-testing"}"
make init
if [[ -z "${SKIP_SETUP:-}" ]]; then
time setup_kind_cluster "${NODE_IMAGE:-}"
fi
if [[ -z "${SKIP_BUILD:-}" ]]; then
time build_images
time kind_load_images ""
fi
if [[ "${ENABLE_ISTIO_CNI:-false}" == true ]]; then
cni_run_daemon_kind
fi
time make with_junit_report E2E_ARGS="${E2E_ARGS[*]}" TARGET="${SINGLE_TEST}" ${VARIANT:+ VARIANT="${VARIANT}"}
|
#!/usr/bin/env zsh
if [[ (! -d $HOME/.phpenv) ]]; then
alias get-phpenv="git clone git://github.com/phpenv/phpenv.git ~/.phpenv; echo You will need to reload your shell now."
else
prepend-path "$HOME/.phpenv/bin"
prepend-path "$HOME/.phpenv/shims"
source "$HOME/.phpenv/completions/phpenv.zsh"
fi
|
#! /usr/bin/env bash
set -e
# Let the DB start
python /app/app/tests_pre_start.py
# Run migrations
alembic upgrade head
# Create initial data in DB
python /app/app/initial_data.py
# Run tests
pytest $* /app/app/tests/
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/QrcodeBuilder/QrcodeBuilder.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/QrcodeBuilder/QrcodeBuilder.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
int result = Int32.Parse("12345");
Console.WriteLine(result);
|
TEMP=`ls ./target/lib/*.jar`
MAVEN_JARS=`echo $TEMP | sed 's/ /:/g'`
time java -server -cp .:target/SPARQL2Gremlin-1.0-SNAPSHOT.jar:$MAVEN_JARS com.liang.translator.SPARQL2Gremlin.SparqlToGremlinTest
|
<filename>akka-http/src/main/scala/com/lightbend/hedgehog/generators/akka/http/MediaTypeGenerators.scala
package com.lightbend.hedgehog.generators.akka.http
import akka.http.scaladsl.model
import akka.http.scaladsl.model.MediaType
import com.lightbend.hedgehog.generators.Fields
import hedgehog.Gen
object MediaTypeGenerators {
private val MediaTypes: List[MediaType] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType])
private val BinaryMediaTypes: List[MediaType.Binary] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType.Binary])
private val MultipartMediaTypes: List[MediaType.Multipart] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType.Multipart])
private val NonBinaryMediaTypes: List[MediaType.NonBinary] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType.NonBinary])
private val WithFixedCharsetMediaTypes: List[MediaType.WithFixedCharset] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType.WithFixedCharset])
private val WithOpenCharsetMediaTypes: List[MediaType.WithOpenCharset] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType.WithOpenCharset])
private val NonMultipartWithOpenCharsetMediaTypes: List[MediaType.NonMultipartWithOpenCharset] =
Fields.fieldsOf(model.MediaTypes, classOf[MediaType.NonMultipartWithOpenCharset])
def genPredefinedMediaType: Gen[MediaType] =
Gen.elementUnsafe(MediaTypes)
def genPredefinedBinaryMediaType: Gen[MediaType.Binary] =
Gen.elementUnsafe(BinaryMediaTypes)
def genPredefinedMultipartMediaType: Gen[MediaType.Multipart] =
Gen.elementUnsafe(MultipartMediaTypes)
def genPredefinedNonBinaryMediaType: Gen[MediaType.NonBinary] =
Gen.elementUnsafe(NonBinaryMediaTypes)
def genPredefinedWithFixedCharsetMediaType: Gen[MediaType.WithFixedCharset] =
Gen.elementUnsafe(WithFixedCharsetMediaTypes)
def genPredefinedWithOpenCharsetMediaType: Gen[MediaType.WithOpenCharset] =
Gen.elementUnsafe(WithOpenCharsetMediaTypes)
def genPredefinedNonMultipartWithOpenCharsetMediaType: Gen[MediaType.NonMultipartWithOpenCharset] =
Gen.elementUnsafe(NonMultipartWithOpenCharsetMediaTypes)
// TODO: Generate other media types.
def genMediaType: Gen[MediaType] =
Gen.choice1(genPredefinedMediaType)
def genBinaryMediaType: Gen[MediaType.Binary] =
Gen.choice1(genPredefinedBinaryMediaType)
def genMultipartMediaType: Gen[MediaType.Multipart] =
Gen.choice1(genPredefinedMultipartMediaType)
def genNonBinaryMediaType: Gen[MediaType.NonBinary] =
Gen.choice1(genPredefinedNonBinaryMediaType)
def genWithFixedCharsetMediaType: Gen[MediaType.WithFixedCharset] =
Gen.choice1(genPredefinedWithFixedCharsetMediaType)
def genWithOpenCharsetMediaType: Gen[MediaType.WithOpenCharset] =
Gen.choice1(genPredefinedWithOpenCharsetMediaType)
def genNonMultipartWithOpenCharsetMediaType: Gen[MediaType.NonMultipartWithOpenCharset] =
Gen.choice1(genPredefinedNonMultipartWithOpenCharsetMediaType)
}
|
#!/bin/bash
# SPDX-License-Identifier: GPL-2.0
# Copyright 2020 NXP
WAIT_TIME=1
NUM_NETIFS=4
lib_dir=$(dirname $0)/../../../net/forwarding
source $lib_dir/tc_common.sh
source $lib_dir/lib.sh
require_command tcpdump
#
# +---------------------------------------------+
# | DUT ports Generator ports |
# | +--------+ +--------+ +--------+ +--------+ |
# | | | | | | | | | |
# | | eth0 | | eth1 | | eth2 | | eth3 | |
# | | | | | | | | | |
# +-+--------+-+--------+-+--------+-+--------+-+
# | | | |
# | | | |
# | +-----------+ |
# | |
# +--------------------------------+
eth0=${NETIFS[p1]}
eth1=${NETIFS[p2]}
eth2=${NETIFS[p3]}
eth3=${NETIFS[p4]}
eth0_mac="de:ad:be:ef:00:00"
eth1_mac="de:ad:be:ef:00:01"
eth2_mac="de:ad:be:ef:00:02"
eth3_mac="de:ad:be:ef:00:03"
# Helpers to map a VCAP IS1 and VCAP IS2 lookup and policy to a chain number
# used by the kernel driver. The numbers are:
# VCAP IS1 lookup 0: 10000
# VCAP IS1 lookup 1: 11000
# VCAP IS1 lookup 2: 12000
# VCAP IS2 lookup 0 policy 0: 20000
# VCAP IS2 lookup 0 policy 1: 20001
# VCAP IS2 lookup 0 policy 255: 20255
# VCAP IS2 lookup 1 policy 0: 21000
# VCAP IS2 lookup 1 policy 1: 21001
# VCAP IS2 lookup 1 policy 255: 21255
IS1()
{
local lookup=$1
echo $((10000 + 1000 * lookup))
}
IS2()
{
local lookup=$1
local pag=$2
echo $((20000 + 1000 * lookup + pag))
}
ES0()
{
echo 0
}
# The Ocelot switches have a fixed ingress pipeline composed of:
#
# +----------------------------------------------+ +-----------------------------------------+
# | VCAP IS1 | | VCAP IS2 |
# | | | |
# | +----------+ +----------+ +----------+ | | +----------+ +----------+ |
# | | Lookup 0 | | Lookup 1 | | Lookup 2 | | --+------> PAG 0: | Lookup 0 | -> | Lookup 1 | |
# | +----------+ -> +----------+ -> +----------+ | | | +----------+ +----------+ |
# | |key&action| |key&action| |key&action| | | | |key&action| |key&action| |
# | |key&action| |key&action| |key&action| | | | | .. | | .. | |
# | | .. | | .. | | .. | | | | +----------+ +----------+ |
# | +----------+ +----------+ +----------+ | | | |
# | selects PAG | | | +----------+ +----------+ |
# +----------------------------------------------+ +------> PAG 1: | Lookup 0 | -> | Lookup 1 | |
# | | +----------+ +----------+ |
# | | |key&action| |key&action| |
# | | | .. | | .. | |
# | | +----------+ +----------+ |
# | | ... |
# | | |
# | | +----------+ +----------+ |
# +----> PAG 254: | Lookup 0 | -> | Lookup 1 | |
# | | +----------+ +----------+ |
# | | |key&action| |key&action| |
# | | | .. | | .. | |
# | | +----------+ +----------+ |
# | | |
# | | +----------+ +----------+ |
# +----> PAG 255: | Lookup 0 | -> | Lookup 1 | |
# | +----------+ +----------+ |
# | |key&action| |key&action| |
# | | .. | | .. | |
# | +----------+ +----------+ |
# +-----------------------------------------+
#
# Both the VCAP IS1 (Ingress Stage 1) and IS2 (Ingress Stage 2) are indexed
# (looked up) multiple times: IS1 3 times, and IS2 2 times. Each filter
# (key and action pair) can be configured to only match during the first, or
# second, etc, lookup.
#
# During one TCAM lookup, the filter processing stops at the first entry that
# matches, then the pipeline jumps to the next lookup.
# The driver maps each individual lookup of each individual ingress TCAM to a
# separate chain number. For correct rule offloading, it is mandatory that each
# filter installed in one TCAM is terminated by a non-optional GOTO action to
# the next lookup from the fixed pipeline.
#
# A chain can only be used if there is a GOTO action correctly set up from the
# prior lookup in the processing pipeline. Setting up all chains is not
# mandatory.
# NOTE: VCAP IS1 currently uses only S1_NORMAL half keys and VCAP IS2
# dynamically chooses between MAC_ETYPE, ARP, IP4_TCP_UDP, IP4_OTHER, which are
# all half keys as well.
create_tcam_skeleton()
{
local eth=$1
tc qdisc add dev $eth clsact
# VCAP IS1 is the Ingress Classification TCAM and can offload the
# following actions:
# - skbedit priority
# - vlan pop
# - vlan modify
# - goto (only in lookup 2, the last IS1 lookup)
tc filter add dev $eth ingress chain 0 pref 49152 flower \
skip_sw action goto chain $(IS1 0)
tc filter add dev $eth ingress chain $(IS1 0) pref 49152 \
flower skip_sw action goto chain $(IS1 1)
tc filter add dev $eth ingress chain $(IS1 1) pref 49152 \
flower skip_sw action goto chain $(IS1 2)
tc filter add dev $eth ingress chain $(IS1 2) pref 49152 \
flower skip_sw action goto chain $(IS2 0 0)
# VCAP IS2 is the Security Enforcement ingress TCAM and can offload the
# following actions:
# - trap
# - drop
# - police
# The two VCAP IS2 lookups can be segmented into up to 256 groups of
# rules, called Policies. A Policy is selected through the Policy
# Association Group (PAG) action of VCAP IS1 (which is the
# GOTO offload).
tc filter add dev $eth ingress chain $(IS2 0 0) pref 49152 \
flower skip_sw action goto chain $(IS2 1 0)
}
setup_prepare()
{
create_tcam_skeleton $eth0
ip link add br0 type bridge
ip link set $eth0 master br0
ip link set $eth1 master br0
ip link set br0 up
ip link add link $eth3 name $eth3.100 type vlan id 100
ip link set $eth3.100 up
ip link add link $eth3 name $eth3.200 type vlan id 200
ip link set $eth3.200 up
tc filter add dev $eth0 ingress chain $(IS1 1) pref 1 \
protocol 802.1Q flower skip_sw vlan_id 100 \
action vlan pop \
action goto chain $(IS1 2)
tc filter add dev $eth0 egress chain $(ES0) pref 1 \
flower skip_sw indev $eth1 \
action vlan push protocol 802.1Q id 100
tc filter add dev $eth0 ingress chain $(IS1 0) pref 2 \
protocol ipv4 flower skip_sw src_ip 10.1.1.2 \
action skbedit priority 7 \
action goto chain $(IS1 1)
tc filter add dev $eth0 ingress chain $(IS2 0 0) pref 1 \
protocol ipv4 flower skip_sw ip_proto udp dst_port 5201 \
action police rate 50mbit burst 64k \
action goto chain $(IS2 1 0)
}
cleanup()
{
ip link del $eth3.200
ip link del $eth3.100
tc qdisc del dev $eth0 clsact
ip link del br0
}
test_vlan_pop()
{
printf "Testing VLAN pop.. "
tcpdump_start $eth2
# Work around Mausezahn VLAN builder bug
# (https://github.com/netsniff-ng/netsniff-ng/issues/225) by using
# an 8021q upper
$MZ $eth3.100 -q -c 1 -p 64 -a $eth3_mac -b $eth2_mac -t ip
sleep 1
tcpdump_stop
if tcpdump_show | grep -q "$eth3_mac > $eth2_mac, ethertype IPv4"; then
echo "OK"
else
echo "FAIL"
fi
tcpdump_cleanup
}
test_vlan_push()
{
printf "Testing VLAN push.. "
tcpdump_start $eth3.100
$MZ $eth2 -q -c 1 -p 64 -a $eth2_mac -b $eth3_mac -t ip
sleep 1
tcpdump_stop
if tcpdump_show | grep -q "$eth2_mac > $eth3_mac"; then
echo "OK"
else
echo "FAIL"
fi
tcpdump_cleanup
}
test_vlan_modify()
{
printf "Testing VLAN modification.. "
ip link set br0 type bridge vlan_filtering 1
bridge vlan add dev $eth0 vid 200
bridge vlan add dev $eth0 vid 300
bridge vlan add dev $eth1 vid 300
tc filter add dev $eth0 ingress chain $(IS1 2) pref 3 \
protocol 802.1Q flower skip_sw vlan_id 200 \
action vlan modify id 300 \
action goto chain $(IS2 0 0)
tcpdump_start $eth2
$MZ $eth3.200 -q -c 1 -p 64 -a $eth3_mac -b $eth2_mac -t ip
sleep 1
tcpdump_stop
if tcpdump_show | grep -q "$eth3_mac > $eth2_mac, .* vlan 300"; then
echo "OK"
else
echo "FAIL"
fi
tcpdump_cleanup
tc filter del dev $eth0 ingress chain $(IS1 2) pref 3
bridge vlan del dev $eth0 vid 200
bridge vlan del dev $eth0 vid 300
bridge vlan del dev $eth1 vid 300
ip link set br0 type bridge vlan_filtering 0
}
test_skbedit_priority()
{
local num_pkts=100
printf "Testing frame prioritization.. "
before=$(ethtool_stats_get $eth0 'rx_green_prio_7')
$MZ $eth3 -q -c $num_pkts -p 64 -a $eth3_mac -b $eth2_mac -t ip -A 10.1.1.2
after=$(ethtool_stats_get $eth0 'rx_green_prio_7')
if [ $((after - before)) = $num_pkts ]; then
echo "OK"
else
echo "FAIL"
fi
}
trap cleanup EXIT
ALL_TESTS="
test_vlan_pop
test_vlan_push
test_vlan_modify
test_skbedit_priority
"
setup_prepare
setup_wait
tests_run
exit $EXIT_STATUS
|
<gh_stars>1-10
package gov.usgs.traveltime;
import java.util.Arrays;
/**
* A collection of spline interpolation routines needed for the computation of travel times.
*
* @author <NAME>
*/
public class Spline {
/**
* Construct custom spline interpolation basis functions. These basis functions depend only on the
* ray parameter grid. This is a straight port of FORTRAN routine Tauspl. If p has dimension N,
* basis must have dimension N X 5.
*
* @param p Normalized ray parameter grid
* @param basis Array to receive basis function coefficients
*/
public void basisSet(double[] p, double[][] basis) {
int i = 0;
double[] dp, sqrtDp, sqrt3Dp, invDp, dSqrtDp, dSqrt3Dp, dInvDp, d;
double pEnd, ali, alr, b1h, b3h, bih, th0p, th2p, th3p, th2m;
// Trap a one point series.
if (p.length == 1) return;
// Initialize scratch arrays.
dp = new double[5];
sqrtDp = new double[5];
sqrt3Dp = new double[5];
invDp = new double[5];
dSqrtDp = new double[4];
dSqrt3Dp = new double[4];
dInvDp = new double[4];
d = new double[4];
// Start the process.
pEnd = p[p.length - 1];
dp[1] = pEnd - p[0] + 3d * (p[1] - p[0]);
sqrtDp[1] = Math.sqrt(Math.abs(dp[1]));
sqrt3Dp[1] = dp[1] * sqrtDp[1];
invDp[1] = 1d / sqrtDp[1];
for (int k = 2; k < 5; k++) {
dp[k] = pEnd - p[0] + (4 - k) * (p[1] - p[0]);
sqrtDp[k] = Math.sqrt(Math.abs(dp[k]));
sqrt3Dp[k] = dp[k] * sqrtDp[k];
invDp[k] = 1d / sqrtDp[k];
dSqrtDp[k - 1] = sqrtDp[k] - sqrtDp[k - 1];
dSqrt3Dp[k - 1] = sqrt3Dp[k] - sqrt3Dp[k - 1];
dInvDp[k - 1] = invDp[k] - invDp[k - 1];
}
// Main loop.
if (p.length > 2) {
// Loop over the ray parameter array.
for (i = 0; i < p.length - 2; i++) {
// Update the temporary variables.
for (int k = 1; k < 5; k++) {
dp[k - 1] = dp[k];
sqrtDp[k - 1] = sqrtDp[k];
sqrt3Dp[k - 1] = sqrt3Dp[k];
invDp[k - 1] = invDp[k];
if (k < 4) {
dSqrtDp[k - 1] = dSqrtDp[k];
dSqrt3Dp[k - 1] = dSqrt3Dp[k];
dInvDp[k - 1] = dInvDp[k];
}
}
dp[4] = pEnd - p[i + 1];
sqrtDp[4] = Math.sqrt(Math.abs(dp[4]));
sqrt3Dp[4] = dp[4] * sqrtDp[4];
invDp[4] = 1d / sqrtDp[4];
dSqrtDp[3] = sqrtDp[4] - sqrtDp[3];
dSqrt3Dp[3] = sqrt3Dp[4] - sqrt3Dp[3];
dInvDp[3] = invDp[4] - invDp[3];
// Construct G;i-1.
ali =
1d / (0.125d * dSqrt3Dp[0] - (0.75d * dSqrtDp[0] + 0.375d * dInvDp[0] * dp[2]) * dp[2]);
alr =
ali
* (0.125d * sqrt3Dp[1]
- (0.75d * sqrtDp[1] + 0.375d * dp[2] * invDp[1] - sqrtDp[2]) * dp[2]);
b1h = dSqrtDp[1] + alr * dSqrtDp[0];
b3h = dSqrt3Dp[1] + alr * dSqrt3Dp[0];
bih = dInvDp[1] + alr * dInvDp[0];
th0p = dSqrtDp[0] * b3h - dSqrt3Dp[0] * b1h;
th2p = dSqrtDp[2] * b3h - dSqrt3Dp[2] * b1h;
th3p = dSqrtDp[3] * b3h - dSqrt3Dp[3] * b1h;
th2m = dInvDp[2] * b3h - dSqrt3Dp[2] * bih;
// The d;i's completely define G;i-1.
d[3] =
ali
* ((dInvDp[0] * b3h - dSqrt3Dp[0] * bih) * th2p - th2m * th0p)
/ ((dInvDp[3] * b3h - dSqrt3Dp[3] * bih) * th2p - th2m * th3p);
d[2] = (th0p * ali - th3p * d[3]) / th2p;
d[1] = (dSqrt3Dp[0] * ali - dSqrt3Dp[2] * d[2] - dSqrt3Dp[3] * d[3]) / b3h;
d[0] = alr * d[1] - ali;
// Construct the contributions G;i-1(p;i-2) and
// G;i-1(p;i). G;i-1(p;i-1) is normalized to unity.
basis[0][i] =
(0.125d * sqrt3Dp[4]
- (0.75d * sqrtDp[4] + 0.375d * dp[3] * invDp[4] - sqrtDp[3]) * dp[3])
* d[3];
if (i >= 2)
basis[1][i - 2] =
(0.125d * sqrt3Dp[0]
- (0.75d * sqrtDp[0] + 0.375d * dp[1] * invDp[0] - sqrtDp[1]) * dp[1])
* d[0];
// Construct the contributions -dG;i-1(p)/dp for p;i-2,
// p;i-1, and p;i.
basis[2][i] = -0.75d * (sqrtDp[4] + dp[3] * invDp[4] - 2d * sqrtDp[3]) * d[3];
if (i >= 1)
basis[3][i - 1] =
-0.75d
* ((sqrtDp[1] + dp[2] * invDp[1] - 2d * sqrtDp[2]) * d[1]
- (dSqrtDp[0] + dInvDp[0] * dp[2]) * d[0]);
if (i >= 2)
basis[4][i - 2] = -0.75d * (sqrtDp[0] + dp[1] * invDp[0] - 2d * sqrtDp[1]) * d[0];
}
}
for (int j = 0; j < 4; j++) {
for (int k = 1; k < 5; k++) {
dp[k - 1] = dp[k];
sqrtDp[k - 1] = sqrtDp[k];
sqrt3Dp[k - 1] = sqrt3Dp[k];
invDp[k - 1] = invDp[k];
if (k < 4) {
dSqrtDp[k - 1] = dSqrtDp[k];
dSqrt3Dp[k - 1] = dSqrt3Dp[k];
dInvDp[k - 1] = dInvDp[k];
}
}
dp[4] = 0d;
sqrtDp[4] = 0d;
invDp[4] = 0d;
// Construction of the d;i's is different for each case.
// In cases G;i, i=n-1,n,n+1, G;i is truncated at p;n to
// avoid patching across the singularity in the second
// derivative.
if (j == 3) {
// For G;n+1 constrain G;n+1(p;n) to be .25.
d[0] = 2d / (dp[0] * sqrtDp[0]);
} else {
// For G;i, i=n-2,n-1,n, the condition dG;i(p)/dp|p;i = 0
// has been substituted for the second derivative
// continuity condition that can no longer be satisfied.
alr = (sqrtDp[1] + dp[2] * invDp[1] - 2d * sqrtDp[2]) / (dSqrtDp[0] + dInvDp[0] * dp[2]);
d[1] =
1d
/ (0.125d * sqrt3Dp[1]
- (0.75d * sqrtDp[1] + 0.375d * dp[2] * invDp[1] - sqrtDp[2]) * dp[2]
- (0.125d * dSqrt3Dp[0]
- (0.75d * dSqrtDp[0] + 0.375d * dInvDp[0] * dp[2]) * dp[2])
* alr);
d[0] = alr * d[1];
if (j == 1) {
// For G;n-1 constrain G;n-1(p;n) to be .25.
d[2] = (2d + dSqrt3Dp[1] * d[1] + dSqrt3Dp[0] * d[0]) / (sqrt3Dp[2]);
} else if (j == 0) {
// No additional constraints are required for G;n-2.
d[2] =
-((dSqrt3Dp[1] - dSqrtDp[1] * dp[3]) * d[1]
+ (dSqrt3Dp[0] - dSqrtDp[0] * dp[3]) * d[0])
/ (dSqrt3Dp[2] - dSqrtDp[2] * dp[3]);
d[3] = (dSqrt3Dp[2] * d[2] + dSqrt3Dp[1] * d[1] + dSqrt3Dp[0] * d[0]) / (sqrt3Dp[3]);
}
}
// Construct the contributions G;i-1(p;i-2) and
// G;i-1(p;i).
if (j <= 1)
basis[0][i] =
(0.125d * sqrt3Dp[2]
- (0.75d * sqrtDp[2] + 0.375d * dp[3] * invDp[2] - sqrtDp[3]) * dp[3])
* d[2]
- (0.125d * dSqrt3Dp[1] - (0.75d * dSqrtDp[1] + 0.375d * dInvDp[1] * dp[3]) * dp[3])
* d[1]
- (0.125d * dSqrt3Dp[0] - (0.75d * dSqrtDp[0] + 0.375d * dInvDp[0] * dp[3]) * dp[3])
* d[0];
if (i > 1)
basis[1][i - 2] =
(0.125d * sqrt3Dp[0]
- (0.75d * sqrtDp[0] + 0.375d * dp[1] * invDp[0] - sqrtDp[1]) * dp[1])
* d[0];
// Construct the contributions -dG;i-1(p)/dp | p;i-2,
// p;i-1, and p;i.
if (j <= 1)
basis[2][i] =
-0.75d
* ((sqrtDp[2] + dp[3] * invDp[2] - 2d * sqrtDp[3]) * d[2]
- (dSqrtDp[1] + dInvDp[1] * dp[3]) * d[1]
- (dSqrtDp[0] + dInvDp[0] * dp[3]) * d[0]);
if (j <= 2 && i > 0) basis[3][i - 1] = 0d;
if (i > 1) basis[4][i - 2] = -0.75d * (sqrtDp[0] + dp[1] * invDp[0] - 2d * sqrtDp[1]) * d[0];
i++;
}
}
/**
* Use the custom spline basis functions to build an interpolation for distance. Note that the
* interpolation depends of tau at each ray parameter, but only on distance at the end points.
* When finished the tau values will have been copied into the first row of poly and the
* interpolated distance values will be in the second row. This is a straight port of FORTRAN
* routine Fitspl.
*
* @param tau Normalized tau at each ray parameter grid point
* @param xRange Normalized distance at each end of the ray parameter grid
* @param basis Ray parameter grid basis functions computed in method basisSet
* @param poly Scratch array dimensioned [3][p.length]
* @param x Normalized, interpolated distance values returned
*/
public void tauSpline(
double[] tau, double[] xRange, double[][] basis, double[][] poly, double[] x) {
int n;
double alr, gn;
double[] ap;
n = tau.length;
// Make sure we have a reasonable length branch.
if (n == 1) {
x[0] = xRange[0];
return;
}
// Set up the working arrays. In the FORTRAN routine Fitspl,
// two temporary arrays were used, a(n,2) and b(n). Since poly
// happens to be available, I've just used poly[0][n] to store
// b and poly[1][n] and poly[2][n] to store a.
poly[0] = Arrays.copyOf(tau, n);
poly[1] = Arrays.copyOf(basis[0], n);
poly[2] = Arrays.copyOf(basis[1], n);
ap = new double[3];
for (int j = 0; j < 3; j++) {
ap[j] = basis[j + 2][n - 1];
}
// Arrays ap(*,1), a, and ap(*,2) comprise n+2 x n+2 penta-
// diagonal symmetric matrix A. Let x1, tau, and xn comprise
// corresponding n+2 vector b. Then, A * g = b, may be solved
// for n+2 vector g such that interpolation I is given by
// I(p) = sum(i=0,n+1) g;i * G;i(p).
// First, eliminate the lower triangular portion of A to form A'.
alr = poly[1][0] / basis[2][0];
poly[1][0] = 1d - basis[3][0] * alr;
poly[2][0] -= basis[4][0] * alr;
poly[0][0] -= xRange[0] * alr;
for (int j = 1; j < n; j++) {
alr = poly[1][j] / poly[1][j - 1];
poly[1][j] = 1d - poly[2][j - 1] * alr;
poly[0][j] -= poly[0][j - 1] * alr;
}
alr = ap[0] / poly[1][n - 2];
ap[1] -= poly[2][n - 2] * alr;
gn = xRange[1] - poly[0][n - 2] * alr;
// Back solve the upper triangular portion of A' for
// coefficients g;i.
alr = ap[1] / poly[1][n - 1];
gn = (gn - poly[0][n - 1] * alr) / (ap[2] - poly[2][n - 1] * alr);
poly[0][n - 1] = (poly[0][n - 1] - gn * poly[2][n - 1]) / poly[1][n - 1];
for (int j = n - 2; j >= 0; j--) {
poly[0][j] = (poly[0][j] - poly[0][j + 1] * poly[2][j]) / poly[1][j];
}
// Fill in the interpolated distances.
x[0] = xRange[0];
for (int j = 1; j < n - 1; j++) {
x[j] = basis[2][j] * poly[0][j - 1] + basis[3][j] * poly[0][j] + basis[4][j] * poly[0][j + 1];
}
x[n - 1] = xRange[1];
/* System.out.println("\nFitspl: tau x b a");
for(int j=0; j<n; j++) {
System.out.format("%13.6e %13.6e %13.6e %13.6e %13.6e\n",
tau[j], x[j], poly[0][j], poly[1][j], poly[2][j]);
} */
}
}
|
def find_longest_subarray_sum_equals_target(nums, target):
maxLen = 0
sums = {0 : -1}
curSum = 0
for i in range(len(nums)):
curSum += nums[i]
if curSum - target in sums:
maxLen = max(maxLen, i - sums[curSum-target])
if curSum not in sums:
sums[curSum] = i
return maxLen
print(find_longest_subarray_sum_equals_target([5, 6, -5, 5, 3, 5, 3, -2, 0], 7))
|
from django import forms
class CustomForm:
@staticmethod
def generate_multiple_select_field():
choices = ((x, f'choice {x}') for x in range(5))
multiple_select_field = forms.ChoiceField(
widget=forms.CheckboxSelectMultiple,
label='CheckboxSelectMultiple',
choices=choices,
help_text='Some field help text'
)
return multiple_select_field
@staticmethod
def generate_checkbox_field():
checkbox_field = forms.BooleanField(label='BooleanField')
return checkbox_field
|
<reponame>MccCareplan/patientsmartapp<filename>src/app/main/graphs/generic/generic.component.ts
import { Component, Input, OnInit, ViewChild } from '@angular/core';
import { MatPaginator } from '@angular/material/paginator';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { Store } from '@ngrx/store';
import { ChartDataSets, ChartOptions } from 'chart.js';
import moment from 'moment';
import { Color, Label } from 'ng2-charts';
import { formatMccDate, getDisplayValue, getInnerValue, getValueHighlighted } from 'src/app/common/chart-utility-functions';
import { Effective, MccObservation, SimpleQuestionnaireItem } from 'src/app/generated-data-api';
import { ObservationsService } from 'src/app/services/observations.service.new';
import * as fromRoot from '../../../ngrx/reducers';
import labMappingsJSON from "../../../../assets/json/data/lab-mappings.json";
import vitalMappingsJSON from "../../../../assets/json/data/vital-mappings.json";
@Component({
selector: 'generic-graph',
templateUrl: './generic.component.html',
styleUrls: ['./generic.component.scss']
})
export class GenericGraphComponent implements OnInit {
@Input()
showTable: boolean;
@Input()
key: string;
lineChartColors: Color[] = [{
borderColor: "#409FFF"
}];
chartDataSets: ChartDataSets[] = [{}];
lineChartLabels: Label[] = [];
lineChartOptions: ChartOptions = {
responsive: true,
maintainAspectRatio: true,
elements: {
line: {
tension: 0
}
}
}
displayedColumns: any[] = ["value", "date"];
showPaginator: boolean = true;
data;
tableData;
patientId;
longTermCondition;
@ViewChild(MatSort) sort: MatSort;
@ViewChild(MatPaginator) paginator: MatPaginator;
constructor(
private store: Store<fromRoot.State>,
private obsService: ObservationsService
) {
}
ngOnInit() {
this.store.select(fromRoot.getCarePlansSummary).subscribe(c => {
if (c && c.length > 0)
this.longTermCondition = "ckd";
else if (c && c.length === 0)
this.longTermCondition = "general";
if (this.patientId && this.longTermCondition)
this.loadData();
});
this.store.select(fromRoot.getPatientProfile).subscribe(x => {
if (x && x.fhirid) {
this.patientId = x.fhirid;
if (this.patientId && this.longTermCondition)
this.loadData();
}
});
}
loadData = (): void => {
let valueToCall = this.parseMaps();
if (!valueToCall) return;
switch (valueToCall.type) {
case "valueset":
this.obsService.getObservationsByValueSet(this.patientId, valueToCall.value, "descending", "50", this.key).then(this.processData);
break;
case "code":
this.obsService.getObservations(this.patientId, valueToCall.value, this.key).then(this.processData);
break;
case "panel":
this.obsService.getObservationsByPanel(this.patientId, valueToCall.value, "descending", "50", this.key).then(this.processData);
break;
case "question":
this.obsService.getQuestionnaireItems(this.patientId, valueToCall.value).then(this.processQuestionnaire);
break;
}
}
parseMaps = (): any => {
let valueToCall: any;
for (const property in labMappingsJSON) {
if (labMappingsJSON[property] && labMappingsJSON[property].length > 0 && labMappingsJSON[property].find(x => x.name === this.key)) {
valueToCall = labMappingsJSON[property].find(x => x.name === this.key);
}
}
if (!valueToCall) {
for (const property in vitalMappingsJSON) {
if (vitalMappingsJSON[property] && vitalMappingsJSON[property].length > 0 && vitalMappingsJSON[property].find(x => x.name === this.key)) {
valueToCall = vitalMappingsJSON[property].find(x => x.name === this.key);
}
}
}
return valueToCall;
}
processData = (res: MccObservation[]): void => {
let formattedData = [];
res.forEach((res: MccObservation, index) => {
let formattedObject: any = {};
formattedObject.title = res.code.text;
formattedObject.date = this.formatDate(res.effective);
formattedObject.displayValue = getDisplayValue(res.value);
formattedObject.value = getInnerValue(res.value);
formattedObject.highlighted = getValueHighlighted(res.value);
formattedData.push(formattedObject);
})
this.data = formattedData.sort((a, b) => { return a.date > b.date ? -1 : 1; });
this.tableData = new MatTableDataSource(this.data);
this.tableData.sort = this.sort;
this.tableData.paginator = this.paginator;
this.showPaginator = this.data.length > 5;
this.processChartData(this.key);
}
processQuestionnaire = (res: SimpleQuestionnaireItem[]): void => {
let formattedData = [];
res.forEach((res: SimpleQuestionnaireItem, index) => {
let formattedObject: any = {};
formattedObject.title = res.item.text;
formattedObject.date = formatMccDate(res.authored);
formattedObject.displayValue = getDisplayValue(res.item.answers[0].value);
formattedObject.value = getInnerValue(res.item.answers[0].value);
formattedObject.highlighted = getValueHighlighted(res.item.answers[0].value);
formattedData.push(formattedObject);
})
this.data = formattedData.sort((a, b) => { return a.date > b.date ? -1 : 1; });
this.tableData = new MatTableDataSource(this.data);
this.tableData.sort = this.sort;
this.tableData.paginator = this.paginator;
this.showPaginator = this.data.length > 5;
if (res && res.length > 0) {
this.processChartData(this.key);
}
}
processChartData = (key) => {
let chartData = [...this.data].sort((a, b) => { return a.date > b.date ? 1 : -1; });
this.chartDataSets[0].data = chartData.map(x => { return x.value });
this.lineChartLabels = chartData.map(x => { return moment(x.date).format("MM/YYYY") })
this.chartDataSets[0].label = key;
this.chartDataSets[0].fill = false;
}
formatDate = (ef: Effective): Date => {
return new Date(ef.dateTime.rawDate);
}
}
|
import styled from "styled-components";
export const Wapper = styled.div`
width:1200px;
margin:0 auto;
`
export const Title = styled.h1`
font-size: 20px;
`
export const Container = styled.div`
width:1200px;
margin-top:20px;
`
|
<gh_stars>0
def processedby(processor):
""" decorator to wrap the processor around a function. """
def processedfunc(func):
def wrappedfunc(*args, **kwargs):
return processor(func, *args, **kwargs)
return wrappedfunc
return processedfunc
|
import aiohttp
from datetime import datetime
async def get_api_data(base_url, date, days):
endpoint = base_url + "/details"
if date and days:
endpoint = f"{endpoint}/{date.strftime('%Y-%m-%d')}/{days}"
async with aiohttp.ClientSession() as session:
async with session.get(endpoint) as response:
data = await response.json()
return data
|
<filename>feign-reactor-core/src/main/java/reactivefeign/ReactiveRetryPolicy.java
package reactivefeign;
import reactor.core.publisher.Flux;
import java.util.function.Function;
/**
* @author <NAME>
*/
public interface ReactiveRetryPolicy {
Function<Flux<Throwable>, Flux<Throwable>> toRetryFunction();
}
|
DIR="csvSumTotalDay"
oDIR="csvSumTotalWeek"
for i in $DIR/*.csv; do
o=${i#"$DIR/d"}
o=${o%".csv"}
echo "$i > $o"
awk -F, -v OFS=, '{ \
if (NR > 2) \
{ split($1, d,"-"); \
w = strftime("%W", mktime(d[1]" "d[2]" "d[3]" 00 00 00"));
date=d[1]""w
c[date] += $2; \
p[date] += $3; } \
else \
{ print $0 } \
}
END{ for (x in c) print x, c[x], p[x] }' $i | awk 'NR==1{a=$0;next}NR==2{b=$0; print $0"\n"a;next}{print $0;}' | awk 'NR<3{print $0;next}{print $0| "sort"}' > $oDIR/w$o.csv
done;
gnuplot histogramTotalWeek.gnu
cp $oDIR/*.png ./subsel
|
/**
* @fileoverview gRPC-Web generated client stub for taska.proto
* @enhanceable
* @public
*/
// GENERATED CODE -- DO NOT EDIT!
/* eslint-disable */
// @ts-nocheck
import * as grpcWeb from "grpc-web";
import * as board_pb from "./board_pb";
import * as card_pb from "./card_pb";
import * as list_pb from "./list_pb";
export class TaskaServiceClient {
client_: grpcWeb.AbstractClientBase;
hostname_: string;
credentials_: null | { [index: string]: string };
options_: null | { [index: string]: any };
constructor(
hostname: string,
credentials?: null | { [index: string]: string },
options?: null | { [index: string]: any }
) {
if (!options) options = {};
if (!credentials) credentials = {};
options["format"] = "binary";
this.client_ = new grpcWeb.GrpcWebClientBase(options);
this.hostname_ = hostname;
this.credentials_ = credentials;
this.options_ = options;
}
methodInfoCreateBoard = new grpcWeb.AbstractClientBase.MethodInfo(
board_pb.CreateBoardRes,
(request: board_pb.CreateBoardReq) => {
return request.serializeBinary();
},
board_pb.CreateBoardRes.deserializeBinary
);
createBoard(
request: board_pb.CreateBoardReq,
metadata: grpcWeb.Metadata | null
): Promise<board_pb.CreateBoardRes>;
createBoard(
request: board_pb.CreateBoardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: board_pb.CreateBoardRes) => void
): grpcWeb.ClientReadableStream<board_pb.CreateBoardRes>;
createBoard(
request: board_pb.CreateBoardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: board_pb.CreateBoardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/CreateBoard",
request,
metadata || {},
this.methodInfoCreateBoard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/CreateBoard",
request,
metadata || {},
this.methodInfoCreateBoard
);
}
methodInfoGetBoard = new grpcWeb.AbstractClientBase.MethodInfo(
board_pb.GetBoardRes,
(request: board_pb.GetBoardReq) => {
return request.serializeBinary();
},
board_pb.GetBoardRes.deserializeBinary
);
getBoard(
request: board_pb.GetBoardReq,
metadata: grpcWeb.Metadata | null
): Promise<board_pb.GetBoardRes>;
getBoard(
request: board_pb.GetBoardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: board_pb.GetBoardRes) => void
): grpcWeb.ClientReadableStream<board_pb.GetBoardRes>;
getBoard(
request: board_pb.GetBoardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: board_pb.GetBoardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/GetBoard",
request,
metadata || {},
this.methodInfoGetBoard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/GetBoard",
request,
metadata || {},
this.methodInfoGetBoard
);
}
methodInfoArchiveBoard = new grpcWeb.AbstractClientBase.MethodInfo(
board_pb.ArchiveBoardRes,
(request: board_pb.ArchiveBoardReq) => {
return request.serializeBinary();
},
board_pb.ArchiveBoardRes.deserializeBinary
);
archiveBoard(
request: board_pb.ArchiveBoardReq,
metadata: grpcWeb.Metadata | null
): Promise<board_pb.ArchiveBoardRes>;
archiveBoard(
request: board_pb.ArchiveBoardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: board_pb.ArchiveBoardRes) => void
): grpcWeb.ClientReadableStream<board_pb.ArchiveBoardRes>;
archiveBoard(
request: board_pb.ArchiveBoardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: board_pb.ArchiveBoardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/ArchiveBoard",
request,
metadata || {},
this.methodInfoArchiveBoard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/ArchiveBoard",
request,
metadata || {},
this.methodInfoArchiveBoard
);
}
methodInfoUnArchiveBoard = new grpcWeb.AbstractClientBase.MethodInfo(
board_pb.UnArchiveBoardRes,
(request: board_pb.UnArchiveBoardReq) => {
return request.serializeBinary();
},
board_pb.UnArchiveBoardRes.deserializeBinary
);
unArchiveBoard(
request: board_pb.UnArchiveBoardReq,
metadata: grpcWeb.Metadata | null
): Promise<board_pb.UnArchiveBoardRes>;
unArchiveBoard(
request: board_pb.UnArchiveBoardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: board_pb.UnArchiveBoardRes) => void
): grpcWeb.ClientReadableStream<board_pb.UnArchiveBoardRes>;
unArchiveBoard(
request: board_pb.UnArchiveBoardReq,
metadata: grpcWeb.Metadata | null,
callback?: (
err: grpcWeb.Error,
response: board_pb.UnArchiveBoardRes
) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/UnArchiveBoard",
request,
metadata || {},
this.methodInfoUnArchiveBoard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/UnArchiveBoard",
request,
metadata || {},
this.methodInfoUnArchiveBoard
);
}
methodInfoUpdateBoard = new grpcWeb.AbstractClientBase.MethodInfo(
board_pb.UpdateBoardRes,
(request: board_pb.UpdateBoardReq) => {
return request.serializeBinary();
},
board_pb.UpdateBoardRes.deserializeBinary
);
updateBoard(
request: board_pb.UpdateBoardReq,
metadata: grpcWeb.Metadata | null
): Promise<board_pb.UpdateBoardRes>;
updateBoard(
request: board_pb.UpdateBoardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: board_pb.UpdateBoardRes) => void
): grpcWeb.ClientReadableStream<board_pb.UpdateBoardRes>;
updateBoard(
request: board_pb.UpdateBoardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: board_pb.UpdateBoardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/UpdateBoard",
request,
metadata || {},
this.methodInfoUpdateBoard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/UpdateBoard",
request,
metadata || {},
this.methodInfoUpdateBoard
);
}
methodInfoCreateList = new grpcWeb.AbstractClientBase.MethodInfo(
list_pb.CreateListRes,
(request: list_pb.CreateListReq) => {
return request.serializeBinary();
},
list_pb.CreateListRes.deserializeBinary
);
createList(
request: list_pb.CreateListReq,
metadata: grpcWeb.Metadata | null
): Promise<list_pb.CreateListRes>;
createList(
request: list_pb.CreateListReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: list_pb.CreateListRes) => void
): grpcWeb.ClientReadableStream<list_pb.CreateListRes>;
createList(
request: list_pb.CreateListReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: list_pb.CreateListRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/CreateList",
request,
metadata || {},
this.methodInfoCreateList,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/CreateList",
request,
metadata || {},
this.methodInfoCreateList
);
}
methodInfoGetList = new grpcWeb.AbstractClientBase.MethodInfo(
list_pb.GetListRes,
(request: list_pb.GetListReq) => {
return request.serializeBinary();
},
list_pb.GetListRes.deserializeBinary
);
getList(
request: list_pb.GetListReq,
metadata: grpcWeb.Metadata | null
): Promise<list_pb.GetListRes>;
getList(
request: list_pb.GetListReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: list_pb.GetListRes) => void
): grpcWeb.ClientReadableStream<list_pb.GetListRes>;
getList(
request: list_pb.GetListReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: list_pb.GetListRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/GetList",
request,
metadata || {},
this.methodInfoGetList,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/GetList",
request,
metadata || {},
this.methodInfoGetList
);
}
methodInfoArchiveList = new grpcWeb.AbstractClientBase.MethodInfo(
list_pb.ArchiveListRes,
(request: list_pb.ArchiveListReq) => {
return request.serializeBinary();
},
list_pb.ArchiveListRes.deserializeBinary
);
archiveList(
request: list_pb.ArchiveListReq,
metadata: grpcWeb.Metadata | null
): Promise<list_pb.ArchiveListRes>;
archiveList(
request: list_pb.ArchiveListReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: list_pb.ArchiveListRes) => void
): grpcWeb.ClientReadableStream<list_pb.ArchiveListRes>;
archiveList(
request: list_pb.ArchiveListReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: list_pb.ArchiveListRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/ArchiveList",
request,
metadata || {},
this.methodInfoArchiveList,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/ArchiveList",
request,
metadata || {},
this.methodInfoArchiveList
);
}
methodInfoUnArchiveList = new grpcWeb.AbstractClientBase.MethodInfo(
list_pb.UnArchiveListRes,
(request: list_pb.UnArchiveListReq) => {
return request.serializeBinary();
},
list_pb.UnArchiveListRes.deserializeBinary
);
unArchiveList(
request: list_pb.UnArchiveListReq,
metadata: grpcWeb.Metadata | null
): Promise<list_pb.UnArchiveListRes>;
unArchiveList(
request: list_pb.UnArchiveListReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: list_pb.UnArchiveListRes) => void
): grpcWeb.ClientReadableStream<list_pb.UnArchiveListRes>;
unArchiveList(
request: list_pb.UnArchiveListReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: list_pb.UnArchiveListRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/UnArchiveList",
request,
metadata || {},
this.methodInfoUnArchiveList,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/UnArchiveList",
request,
metadata || {},
this.methodInfoUnArchiveList
);
}
methodInfoUpdateList = new grpcWeb.AbstractClientBase.MethodInfo(
list_pb.UpdateListRes,
(request: list_pb.UpdateListReq) => {
return request.serializeBinary();
},
list_pb.UpdateListRes.deserializeBinary
);
updateList(
request: list_pb.UpdateListReq,
metadata: grpcWeb.Metadata | null
): Promise<list_pb.UpdateListRes>;
updateList(
request: list_pb.UpdateListReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: list_pb.UpdateListRes) => void
): grpcWeb.ClientReadableStream<list_pb.UpdateListRes>;
updateList(
request: list_pb.UpdateListReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: list_pb.UpdateListRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/UpdateList",
request,
metadata || {},
this.methodInfoUpdateList,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/UpdateList",
request,
metadata || {},
this.methodInfoUpdateList
);
}
methodInfoCreateCard = new grpcWeb.AbstractClientBase.MethodInfo(
card_pb.CreateCardRes,
(request: card_pb.CreateCardReq) => {
return request.serializeBinary();
},
card_pb.CreateCardRes.deserializeBinary
);
createCard(
request: card_pb.CreateCardReq,
metadata: grpcWeb.Metadata | null
): Promise<card_pb.CreateCardRes>;
createCard(
request: card_pb.CreateCardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: card_pb.CreateCardRes) => void
): grpcWeb.ClientReadableStream<card_pb.CreateCardRes>;
createCard(
request: card_pb.CreateCardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: card_pb.CreateCardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/CreateCard",
request,
metadata || {},
this.methodInfoCreateCard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/CreateCard",
request,
metadata || {},
this.methodInfoCreateCard
);
}
methodInfoGetCard = new grpcWeb.AbstractClientBase.MethodInfo(
card_pb.GetCardRes,
(request: card_pb.GetCardReq) => {
return request.serializeBinary();
},
card_pb.GetCardRes.deserializeBinary
);
getCard(
request: card_pb.GetCardReq,
metadata: grpcWeb.Metadata | null
): Promise<card_pb.GetCardRes>;
getCard(
request: card_pb.GetCardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: card_pb.GetCardRes) => void
): grpcWeb.ClientReadableStream<card_pb.GetCardRes>;
getCard(
request: card_pb.GetCardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: card_pb.GetCardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/GetCard",
request,
metadata || {},
this.methodInfoGetCard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/GetCard",
request,
metadata || {},
this.methodInfoGetCard
);
}
methodInfoArchiveCard = new grpcWeb.AbstractClientBase.MethodInfo(
card_pb.ArchiveCardRes,
(request: card_pb.ArchiveCardReq) => {
return request.serializeBinary();
},
card_pb.ArchiveCardRes.deserializeBinary
);
archiveCard(
request: card_pb.ArchiveCardReq,
metadata: grpcWeb.Metadata | null
): Promise<card_pb.ArchiveCardRes>;
archiveCard(
request: card_pb.ArchiveCardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: card_pb.ArchiveCardRes) => void
): grpcWeb.ClientReadableStream<card_pb.ArchiveCardRes>;
archiveCard(
request: card_pb.ArchiveCardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: card_pb.ArchiveCardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/ArchiveCard",
request,
metadata || {},
this.methodInfoArchiveCard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/ArchiveCard",
request,
metadata || {},
this.methodInfoArchiveCard
);
}
methodInfoUnArchiveCard = new grpcWeb.AbstractClientBase.MethodInfo(
card_pb.UnArchiveCardRes,
(request: card_pb.UnArchiveCardReq) => {
return request.serializeBinary();
},
card_pb.UnArchiveCardRes.deserializeBinary
);
unArchiveCard(
request: card_pb.UnArchiveCardReq,
metadata: grpcWeb.Metadata | null
): Promise<card_pb.UnArchiveCardRes>;
unArchiveCard(
request: card_pb.UnArchiveCardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: card_pb.UnArchiveCardRes) => void
): grpcWeb.ClientReadableStream<card_pb.UnArchiveCardRes>;
unArchiveCard(
request: card_pb.UnArchiveCardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: card_pb.UnArchiveCardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/UnArchiveCard",
request,
metadata || {},
this.methodInfoUnArchiveCard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/UnArchiveCard",
request,
metadata || {},
this.methodInfoUnArchiveCard
);
}
methodInfoUpdateCard = new grpcWeb.AbstractClientBase.MethodInfo(
card_pb.UpdateCardRes,
(request: card_pb.UpdateCardReq) => {
return request.serializeBinary();
},
card_pb.UpdateCardRes.deserializeBinary
);
updateCard(
request: card_pb.UpdateCardReq,
metadata: grpcWeb.Metadata | null
): Promise<card_pb.UpdateCardRes>;
updateCard(
request: card_pb.UpdateCardReq,
metadata: grpcWeb.Metadata | null,
callback: (err: grpcWeb.Error, response: card_pb.UpdateCardRes) => void
): grpcWeb.ClientReadableStream<card_pb.UpdateCardRes>;
updateCard(
request: card_pb.UpdateCardReq,
metadata: grpcWeb.Metadata | null,
callback?: (err: grpcWeb.Error, response: card_pb.UpdateCardRes) => void
) {
if (callback !== undefined) {
return this.client_.rpcCall(
this.hostname_ + "/taska.proto.TaskaService/UpdateCard",
request,
metadata || {},
this.methodInfoUpdateCard,
callback
);
}
return this.client_.unaryCall(
this.hostname_ + "/taska.proto.TaskaService/UpdateCard",
request,
metadata || {},
this.methodInfoUpdateCard
);
}
}
|
#!/bin/bash
# Setup simple Docker image with requirements pre-installed to speed up simple dev commands.
# Run this command before running any "non-full" Docker management commands in here and rerun it after changing the requirements.
LOCAL_DIR=".local/docker"
LOG_DIR="$LOCAL_DIR/log"
CONFIG_FILE="studlan/settings/local.docker.py"
CONFIG_TEMPLATE_FILE="setup/local.docker.dev.py"
DB_FILE="$LOCAL_DIR/db.sqlite3"
DOCKER_FILE="setup/Dockerfile.dev"
DOCKER_IMAGE="studlan-tiny"
set -eu
mkdir -p "$LOCAL_DIR"
mkdir -p "$LOG_DIR"
# Add config file and exit if missing
if [[ ! -e $CONFIG_FILE ]]; then
echo "Creating new config file ..."
cp "$CONFIG_TEMPLATE_FILE" "$CONFIG_FILE"
fi
# Create DB file so Docker doesn't make it a directory
if [[ ! -e $DB_FILE ]]; then
echo "Creating DB file ..."
touch "$DB_FILE"
fi
# Add version file
echo "0.0.0-SNAPSHOT" > VERSION
echo
echo "Creating tiny Docker image ..."
docker build -f "$DOCKER_FILE" -t "$DOCKER_IMAGE" .
|
domain=$(get_option '.domain')
logger::info "Executing $(logger::highlight "$command"): $domain"
cf::target "$org" "$space"
cf::delete_domain "$domain"
|
python manage.py shell < scripts/parse.py
|
<gh_stars>0
// Copyright (c) 2017, taher and contributors
// For license information, please see license.txt
frappe.ui.form.on('Membership', {
validate:function(frm){
this.frm.refresh_fields();
console.log("js function")
}
// refresh: function(frm) {
// }
// on_submit: function(doc, dt, dn){
// this.create_item();
// console.log("in js");
// }
});
|
#! /bin/sh
unknown_platform() {
echo "Unknown platform: `uname`"
exit 1
}
missing=0
check_for () {
which $1 > /dev/null 2> /dev/null
if [ $? -ne 0 ]; then
echo "Error: can't find $1 binary"
missing=1
fi
}
check_for ant
check_for cc
check_for g++
check_for bunzip2
check_for git
check_for hg
check_for python
check_for svn
check_for unzip
check_for xml2-config
check_for bash
check_for xzdec
check_for wget
check_for virtualenv
check_for zip
case `uname` in
Linux*)
PATCH_ARGS=--backup;;
OpenBSD*)
check_for gm4;;
esac
usage() {
echo "Usage: build.sh [-f]" >&2
exit 1
}
# How many cores does this machine have (so we can pass a sensible number to
# gmake -j)?
case `uname` in
OpenBSD) max_jobs=`sysctl -n hw.ncpu`;;
Linux) max_jobs=`nproc`;;
esac
# Keep max_jobs to safe limits for pretty much any machine out there.
if [ $max_jobs -le 0 ]; then
max_jobs=1
elif [ $max_jobs -gt 8 ]; then
max_jobs=8
fi
num_jobs=1
while getopts ":f" f
do
case "$f" in
f) num_jobs=$max_jobs;;
h) usage;;
[?]) usage;;
esac
done
which pypy > /dev/null 2> /dev/null
if [ $? -eq 0 ]; then
PYTHON=`which pypy`
else
PYTHON=`which python2.7`
fi
# Let's use GNU make across the board
case `uname` in
OpenBSD) GMAKE=gmake;;
Linux) GMAKE=make;;
*) unknown_platform;;
esac
check_for ${GMAKE}
if [ $missing -eq 1 ]; then
exit 1
fi
HERE=`pwd`
wrkdir=${HERE}/work
PATCH_DIR=${HERE}/patches
ARCHIVE_DISTFILES=https://archive.org/download/softdev_warmup_experiment_artefacts/distfiles/
mkdir -p ${wrkdir}
echo "===> Working in $wrkdir"
PATCH_DIR=`pwd`/patches/
# System (from OS packages) Java 7, for making a JDK8. We must not use a JDK8
# to build a JDK8. See README-builds.html in JDK8 src tarball.
case `uname` in
Linux)
SYS_JDK7_HOME=/usr/lib/jvm/java-7-openjdk-amd64
SYS_JDK8_HOME=/usr/lib/jvm/java-8-openjdk-amd64
;;
OpenBSD)
SYS_JDK7_HOME=/usr/local/jdk-1.7.0
SYS_JDK8_HOME=/not_used_on_openbsd
;;
*) unknown_platform;;
esac
if [ ! -d ${SYS_JDK7_HOME} ]; then
echo "Can't find system Java 7"
exit 1
fi
WARMUP_STATS_VERSION=726eaa39930c9dabc0df8fcef7a42b7f6465001d
build_warmup_stats() {
echo "\n===> Download and build stats\n"
# Older OpenBSDs don't have a new enough libzip
if [ "`uname`" = "OpenBSD" ]; then
${PYTHON} -c "import sys; sys.exit(`uname -r` < 6.2)"
if [ $? != 0 ]; then
echo "skipping warmup_stats"
return
fi
fi
if ! [ -d "${HERE}/warmup_stats" ]; then
cd ${HERE} && git clone https://github.com/softdevteam/warmup_stats || exit $?
fi
cd ${HERE}/warmup_stats && git checkout ${WARMUP_STATS_V} || exit $?
if ! [ -d "${HERE}/warmup_stats/work/R-inst" ]; then
cd ${HERE}/warmup_stats && ./build.sh || exit $?
fi
}
KRUN_VERSION=019513d7ebb0bf74a55fd31a34b92f85e80854bd
build_initial_krun() {
echo "\n===> Download and build krun\n"
if ! [ -d "${HERE}/krun" ]; then
cd ${HERE} && git clone --recursive https://github.com/softdevteam/krun.git || exit $?
fi
# We do a quick build now so that VMs which link libkruntime can find it.
# Note that we will build again later once we have the JVM built, so that
# libkruntime can itself be built with Java support.
#
# Due to the above, We don't care what compiler we use at this stage.
cd ${HERE}/krun && git checkout ${KRUN_VERSION} && ${GMAKE} || exit $?
}
clean_krun() {
# See build_initial_krun() comment for why this exists
cd ${HERE}/krun && ${GMAKE} clean || exit $?
}
# We build our own fixed version of GCC, thus ruling out differences in
# packaged compilers for the different platforms.
GCC_V=4.9.4
OUR_CC=${wrkdir}/gcc-inst/bin/zgcc
OUR_CXX=${wrkdir}/gcc-inst/bin/zg++
GCC_TARBALL_URL=ftp://ftp.mirrorservice.org/sites/ftp.gnu.org/gnu/gcc/gcc-${GCC_V}/gcc-${GCC_V}.tar.gz
build_gcc() {
echo "\n===> Download and build GCC\n"
if [ -f ${OUR_CC} ]; then return; fi
cd ${wrkdir}
if ! [ -f ${wrkdir}/gcc-${GCC_V}.tar.gz ]; then
wget ${GCC_TARBALL_URL} || exit $?
fi
if ! [ -d ${wrkdir}/gcc ]; then
tar xfzp gcc-${GCC_V}.tar.gz || exit $?;
mv gcc-${GCC_V} gcc || exit $?
fi
cd gcc || exit $?
if [ `uname` = "OpenBSD" ]; then
for p in `ls ${PATCH_DIR}/openbsd_gcc_patches`; do
patch -Ep0 < ${PATCH_DIR}/openbsd_gcc_patches/$p || exit $?
done
fi
# download script uses fixed versions, so OK.
./contrib/download_prerequisites || exit $?
mkdir sd_build || exit $?
cd sd_build || exit $?
../configure \
--prefix=${wrkdir}/gcc-inst \
--disable-libcilkrts \
--program-transform-name=s,^,z, \
--verbose \
--disable-libmudflap \
--disable-libgomp \
--disable-multilib \
--disable-tls \
--enable-languages=c,c++ \
--with-system-zlib \
--disable-tls \
--enable-threads=posix \
--enable-wchar_t \
--disable-libstdcxx-pch \
--enable-cpp \
--enable-shared \
|| exit $?
${GMAKE} -j $num_jobs || exit $?
${GMAKE} install || exit $?
}
apply_gcc_lib_path() {
# Put GCC libs into linker path
# Needed for (e.g.) V8 to find libstdc++
case `uname` in
Linux) export LD_LIBRARY_PATH=${wrkdir}/gcc-inst/lib64;;
OpenBSD) export LD_LIBRARY_PATH=${wrkdir}/gcc-inst/lib;;
*) unknown_platform;;
esac
}
# CPython is used to build V8. Debian 8 package is too old.
CPYTHONV=2.7.12
CPYTHON=${wrkdir}/cpython-inst/bin/python
build_cpython() {
cd ${wrkdir} || exit $?
echo "\n===> Download and build CPython\n"
if [ -f ${wrkdir}/cpython/python ]; then return; fi
cd $wrkdir
if [ ! -f Python-${CPYTHONV}.tgz ]; then
wget http://python.org/ftp/python/${CPYTHONV}/Python-${CPYTHONV}.tgz || exit $?
fi
tar xfz Python-${CPYTHONV}.tgz || exit $?
mv Python-${CPYTHONV} cpython
cd cpython
case `uname` in
OpenBSD)
CC=${OUR_CC} LDFLAGS=-Wl,-z,wxneeded ./configure \
--prefix=${wrkdir}/cpython-inst || exit $?;;
*)
CC=${OUR_CC} ./configure \
--prefix=${wrkdir}/cpython-inst || exit $?;;
esac
${GMAKE} -j $num_jobs || exit $?
${GMAKE} install || exit $?
}
LUAJITV=2.0.4
build_luajit() {
cd ${wrkdir} || exit $?
echo "\n===> Download and build LuaJIT\n"
if [ -f ${wrkdir}/luajit/src/luajit ]; then return; fi
wget http://luajit.org/download/LuaJIT-${LUAJITV}.tar.gz || exit $?
tar xfz LuaJIT-${LUAJITV}.tar.gz
mv LuaJIT-${LUAJITV} luajit
cd luajit
CFLAGS=-DLUAJIT_ENABLE_LUA52COMPAT ${GMAKE} CC=${OUR_CC} || exit $?
}
PYPYV=5.7.1
build_pypy() {
cd ${wrkdir} || exit $?
echo "\n===> Download and build PyPy\n"
if [ -f ${wrkdir}/pypy/pypy/goal/pypy-c ]; then return; fi
if ! [ -f "${wrkdir}/pypy2-v${PYPYV}-src.tar.bz2" ]; then
url="https://bitbucket.org/pypy/pypy/downloads/pypy2-v${PYPYV}-src.tar.bz2"
case `uname` in
OpenBSD) ftp $url || exit $?;;
*) wget $url || exit $?;;
esac
fi
if ! [ -d "${wrkdir}/pypy" ]; then
bunzip2 -c - pypy2-v${PYPYV}-src.tar.bz2 | tar xf - || exit $?
mv pypy2-v${PYPYV}-src pypy || exit $?
cd pypy
patch -p0 < ${PATCH_DIR}/pypy.diff || exit $?
fi
cd ${wrkdir}/pypy/pypy/goal/ || exit $?
usession=`mktemp -d`
# Separate translate/compile so we can tag on W^X flag.
env CC=${OUR_CC} PYPY_USESSION_DIR=${usession} \
${PYTHON} ../../rpython/bin/rpython -Ojit --source --no-shared \
|| exit $?
pypy_make_dir=${usession}/usession-release-pypy2.7-v${PYPYV}-0/testing_1
cd ${pypy_make_dir} || exit $?
case `uname` in
OpenBSD)
env CC=${OUR_CC} ${GMAKE} LDFLAGSEXTRA="-Wl,-z,wxneeded" || exit $?;;
*)
env CC=${OUR_CC} ${GMAKE} || exit $?;;
esac
cp ${pypy_make_dir}/pypy-c ${wrkdir}/pypy/pypy/goal/pypy-c || exit $?
rm -rf ${usession}
}
# We build V8 using a hand rolled tarball. See bin/make_v8_source_tarball.
V8_V=5.8.283.32
V8_TARBALL=v8_fullsource_${V8_V}_2017-12-12.tar.gz
build_v8() {
cd ${wrkdir} || exit $?
echo "\n===> Download and build V8\n"
if [ -f ${wrkdir}/v8/out/native/d8 ]; then return; fi
if ! [ -f ${wrkdir}/${V8_TARBALL} ]; then
cd ${wrkdir}/ && wget ${ARCHIVE_DISTFILES}/${V8_TARBALL} || exit $?
fi
tar zxf ${V8_TARBALL} || exit $?
cd ${wrkdir}/v8 || exit $?
patch -Ep1 < ${PATCH_DIR}/v8.diff || exit $?
cd ${wrkdir}/v8/tools/clang || exit $?
patch -Ep1 < ${PATCH_DIR}/v8_clang.diff || exit $?
# Test suite build doesn't listen to CC/CXX -- symlink/path hack ahoy
ln -sf ${OUR_CC} `dirname ${OUR_CC}`/gcc
ln -sf ${OUR_CXX} `dirname ${OUR_CXX}`/g++
PATH=`dirname ${OUR_CC}`:${PATH}
# V8 mistakes our compiler for clang for some reason, hence setting
# GYP_DEFINES. It probably isn't expecting a gcc to be called zgcc.
cd ${wrkdir}/v8 || exit $?
env GYP_DEFINES="clang=0" CC=${OUR_CC} CXX=${OUR_CXX} \
LIBKRUN_DIR=${HERE}/krun/libkrun ${GMAKE} -j${num_jobs} native V=1 || exit $?
test -f out/native/d8 || exit $?
# remove the gcc/g++ symlinks from earlier and restore path
rm `dirname ${OUR_CC}`/gcc `dirname ${OUR_CC}`/g++ || exit $?
}
# There is a bug in the JDK8 build system which makes it incompatible with GNU make 4
# http://stackoverflow.com/questions/21246042/scrambled-arguments-when-building-openjdk
# Let's build 3.82 then.
GMAKE_V=3.82
build_gmake() {
echo "\n===> Download and build gmake-${GMAKE_V}\n"
if [ -f ${wrkdir}/make-${GMAKE_V}/make ]; then return; fi
cd ${wrkdir} || exit $?
wget http://ftp.gnu.org/gnu/make/make-${GMAKE_V}.tar.gz || exit $?
tar zxvf make-${GMAKE_V}.tar.gz || exit $?
cd make-${GMAKE_V} || exit $?
CC=${OUR_CC} ./configure || exit $?
${GMAKE} JOBS=$num_jobs || exit $?
cp make gmake
}
# We use the JDK we just built for all consequent Java compilation (and as a
# basis for the Graal compiler).
case `uname` in
Linux) OUR_JAVA_HOME=${wrkdir}/openjdk/build/linux-x86_64-normal-server-release/images/j2sdk-image/;;
OpenBSD) OUR_JAVA_HOME=${wrkdir}/openjdk/build/bsd-x86_64-normal-server-release/images/j2sdk-image/;;
*) unknown_platform;;
esac
JDK_TARBALL_BASE=openjdk-8u121b13-bsd-port-20170201
build_jdk() {
echo "\n===> Download and build JDK8\n"
if [ -f ${OUR_JAVA_HOME}/bin/javac ]; then return; fi
cd ${wrkdir} || exit $?
if ! [ -f "${wrkdir}/${JDK_TARBALL_BASE}.tar.xz" ]; then
wget http://www.intricatesoftware.com/distfiles/${JDK_TARBALL_BASE}.tar.xz || exit $?
fi
if ! [ -d ${wkrdir}/openjdk ]; then
xzdec ${JDK_TARBALL_BASE}.tar.xz | tar xf - || exit $?
mv ${JDK_TARBALL_BASE} openjdk
fi
cd openjdk || exit $?
JDK_BUILD_PATH=`dirname ${OUR_CC}`:${PATH}
case `uname` in
Linux)
env CC=zgcc CXX=zg++ PATH=${JDK_BUILD_PATH} bash configure \
--disable-option-checking \
--with-cups-include=/usr/local/include \
--with-debug-level=release \
--with-debug-level=release \
--disable-ccache \
--disable-freetype-bundling \
--disable-zip-debug-info \
--disable-debug-symbols \
--enable-static-libjli \
--with-zlib=system \
--with-milestone=fcs \
--with-jobs=$num_jobs \
--with-boot-jdk=${SYS_JDK7_HOME} \
|| exit $?
PATH=${JDK_BUILD_PATH} ../make-${GMAKE_V}/make all || exit $?
;;
OpenBSD)
env CPPFLAGS=-I/usr/local/include \
CC=zgcc CXX=zg++ PATH=${JDK_BUILD_PATH} ac_cv_path_NAWK=awk bash configure \
--disable-option-checking \
--with-cups-include=/usr/local/include \
--with-debug-level=release \
--with-debug-level=release \
--disable-ccache \
--disable-freetype-bundling \
--disable-zip-debug-info \
--disable-debug-symbols \
--enable-static-libjli \
--with-zlib=system \
--with-giflib=system \
--with-milestone=fcs \
--with-jobs=$num_jobs \
--with-extra-ldflags="-Wl,-z,wxneeded" \
--with-boot-jdk=${SYS_JDK7_HOME} \
|| exit $?
PATH=${JDK_BUILD_PATH} \
COMPILER_WARNINGS_FATAL=false \
DEFAULT_LIBPATH="/usr/lib:/usr/X11R6/lib:/usr/local/lib"\
../make-${GMAKE_V}/make all || exit $?
;;
*)
unknown_platform;;
esac
# JDK installs some jar files unreadable to "other" users meaning that the
# benchmark user can't access them. This becomes a problem later for graal,
# which takes a copy of this JDK's jar files.
chmod -R 755 ${wrkdir}/openjdk/build/*-release/jdk/lib || exit $?
}
# This is a bootstrap JDK used only for Graal, which requiries a very specific
# version of the JDK.
BOOT_JDK_UPDATE_V=121
BOOT_JDK_BUILD_V=13
BOOT_JAVA_V=8u${BOOT_JDK_UPDATE_V}-b${BOOT_JDK_BUILD_V}
BOOT_JAVA_HOME=${wrkdir}/jdk${BOOT_JAVA_V}_fullsource/build/linux-x86_64-normal-server-release/images/j2sdk-image/
BOOT_JDK_BASE=jdk${BOOT_JAVA_V}
BOOT_JDK_TAR=${BOOT_JDK_BASE}_fullsource.tgz
build_bootstrap_jdk() {
echo "\n===> Download and build graal bootstrap JDK8\n"
if [ -f ${BOOT_JAVA_HOME}/bin/javac ]; then return; fi
cd ${wrkdir} || exit $?
# We fetch a hand-rolled tarball, as the JDK repo build downloads things
# and I am not sure that they are fixed versions. The tarball was rolled on
# 2017-04-19 to match the current OTN build, which at the time was:
# labsjdk-8u121-jvmci-0.25-darwin-amd64.tar.gz
#
# To build the JDK8 tarball:
# hg clone http://hg.openjdk.java.net/jdk8u/jdk8u openjdk8
# hg up <tag> # plug in the right tag, e.g. `jdk8u121-b13'
# sh get_source.sh
# find . -name '.hg' -type 'd' | xargs rm -rf
# cd ..
# mv openjdk8 ${BOOT_JDK_BASE}_fullsource
# tar zcvf ${BOOT_JDK_BASE}_fullsource.tgz ${BOOT_JDK_BASE}_fullsource
# Upload to archive.org once tested
if [ ! -f ${wrkdir}/${BOOT_JDK_TAR} ]; then
wget ${ARCHIVE_DISTFILES}/${BOOT_JDK_TAR}
fi
if [ ! -d ${BOOT_JDK_BASE}_fullsource ]; then
tar zxf ${BOOT_JDK_TAR} || exit $?
fi
cd ${BOOT_JDK_BASE}_fullsource || exit $?
JDK_BUILD_PATH=`dirname ${OUR_CC}`:${PATH}
env CC=zgcc CXX=zg++ PATH=${JDK_BUILD_PATH} bash configure \
--disable-option-checking \
--with-cups-include=/usr/local/include \
--with-debug-level=release \
--with-debug-level=release \
--disable-ccache \
--disable-freetype-bundling \
--disable-zip-debug-info \
--disable-debug-symbols \
--enable-static-libjli \
--with-zlib=system \
--with-milestone=fcs \
--with-jobs=$num_jobs \
--with-boot-jdk=${SYS_JDK7_HOME} \
--with-update-version=${BOOT_JDK_UPDATE_V} \
--with-build-number=b${BOOT_JDK_BUILD_V} \
|| exit $?
PATH=${JDK_BUILD_PATH} ../make-${GMAKE_V}/make all || exit $?
}
# The latest Graal and MX at the time of writing. Note that Graal will be part
# of JDK9 soon, so the build steps you see here will be out of date soon. Also
# note that MX doesn't have releases.
JVMCI_VERSION=jvmci-0.25
MX_VERSION=720976e8c52527416f7aec95262c9a47d93602c4
GRAAL_VERSION=graal-vm-0.22
build_graal() {
echo "\n===> Download and build graal\n"
if [ -f ${wrkdir}/graal-jvmci-8/jdk1.8*/product/bin/javac ]; then return; fi
if [ ! -d ${wrkdir}/mx ]; then
cd ${wrkdir} && git clone https://github.com/graalvm/mx || exit $?
cd mx && git checkout ${MX_VERSION} && cd .. || exit $?
fi
# mx won't listen to CC/CXX
ln -sf ${OUR_CC} `dirname ${OUR_CC}`/gcc
ln -sf ${OUR_CXX} `dirname ${OUR_CXX}`/g++
GRAAL_PATH=`dirname ${OUR_CC}`:${PATH}
MX="env PATH=${GRAAL_PATH} python2.7 ${wrkdir}/mx/mx.py --java-home ${BOOT_JAVA_HOME}"
# Build a JVMCI-enabled JDK
if [ ! -d ${wrkdir}/graal-jvmci-8 ];then
hg clone http://hg.openjdk.java.net/graal/graal-jvmci-8
fi
cd graal-jvmci-8 || exit $?
hg up ${JVMCI_VERSION} || exit $?
if [ ! -d ${wrkdir}/graal-jvmci-8/jdk1.8.0 ]; then
${MX} sforceimports || exit $?
${MX} build || exit $?
fi
# Make mx use the jvmci-enabled jdk
cd ${wrkdir}/graal-jvmci-8
JVMCI_JAVA_HOME=`${MX} jdkhome`
echo "jvmci JAVA_HOME is: ${JVMCI_JAVA_HOME}"
MX="env PATH=${GRAAL_PATH} python2.7 ${wrkdir}/mx/mx.py --java-home ${JVMCI_JAVA_HOME}"
# Build graal itself
cd ${wrkdir}
if ! [ -d ${wrkdir}/graal ]; then
git clone https://github.com/graalvm/graal-core graal || exit $?
fi
cd ${wrkdir}/graal && git checkout ${GRAAL_VERSION} || exit $?
${MX} sforceimports || exit $?
${MX} || exit $? # fetches truffle
cd ${wrkdir}/truffle && git checkout ${GRAAL_VERSION} || exit $?
cd ${wrkdir}/graal && ${MX} build || exit $?
# remove the symlinks
rm `dirname ${OUR_CC}`/gcc `dirname ${OUR_CC}`/g++ || exit $?
}
# We had problems with offline mode in maven2, which at the time of writing is
# the version in Debian stable packages. We download a newer version from the
# 3.x branch.
MAVEN_V=3.5.0
MAVEN_TARBALL=apache-maven-${MAVEN_V}-bin.tar.gz
MAVEN_TARBALL_URL=https://archive.apache.org/dist/maven/maven-3/${MAVEN_V}/binaries/${MAVEN_TARBALL}
fetch_maven() {
echo "\n===> Fetch Maven\n"
cd ${wrkdir}
if ! [ -f ${wrkdir}/${MAVEN_TARBALL} ]; then
wget ${MAVEN_TARBALL_URL} || exit $?
fi
if ! [ -d ${wrkdir}/maven ]; then
tar zxvf ${MAVEN_TARBALL} && mv apache-maven-${MAVEN_V} maven || exit $?
fi
# Put maven into the PATH
export PATH=${wrkdir}/maven/bin:${PATH}
if [ "`which mvn`" != "${wrkdir}/maven/bin/mvn" ]; then
echo "The mvn we installed is not in the path correctly"
exit 1
fi
}
TRUFFLERUBY_V=graal-vm-0.22
TRUFFLERUBY_BUILDPACK_DIR=${wrkdir}/truffleruby-buildpack
TRUFFLERUBY_BUILDPACK_TARBALL=truffleruby-buildpack-${TRUFFLERUBY_V}-20170502.tgz
build_truffleruby() {
echo "\n===> Download and build TruffleRuby\n"
# maven caches dependencies, we dont ever want to pick those up, only
# what's in the jruby build pack.
if [ -e "~/.m2" ] || [ -e "~/.maven-gems" ]; then
echo "Please remove your maven configurations: ~/.m2 ~/.maven-gems";
exit $?
fi
cd ${wrkdir}
if [ -f ${wrkdir}/truffleruby/truffleruby/target/truffleruby-0-SNAPSHOT.jar ]; then return; fi
if ! [ -d ${wrkdir}/truffleruby ]; then
git clone https://github.com/graalvm/truffleruby.git || exit $?
cd ${wrkdir}/truffleruby
git checkout ${TRUFFLERUBY_V} || exit $?
patch -Ep1 < ${PATCH_DIR}/truffleruby.diff || exit $?
fi
cd ${wrkdir}
if [ ! -f ${TRUFFLERUBY_BUILDPACK_TARBALL} ]; then
wget ${ARCHIVE_DISTFILES}/${TRUFFLERUBY_BUILDPACK_TARBALL} || exit $?
fi
if [ ! -d ${TRUFFLERUBY_BUILDPACK_DIR} ]; then
cd ${wrkdir} && tar zxvf ${TRUFFLERUBY_BUILDPACK_TARBALL} || exit $?
fi
cd ${wrkdir}/truffleruby || exit $?
# To make a buildpack, you would do:
# env JAVA_HOME=${SYS_JDK8_HOME} mvn -X \
# -Dmaven.repo.local=${TRUFFLERUBY_BUILDPACK_DIR} || exit 1
# Then tar up the resultant directory, test it, and host on archive.org.
# We have to use the system JDK8 since the one we bootstrap doesn't have
# SSL configured (jdk has its own CA cert format). See:
# http://www.linuxfromscratch.org/blfs/view/svn/general/openjdk.html
env MVN_EXTRA_ARGS="-Dmaven.repo.local=${TRUFFLERUBY_BUILDPACK_DIR} --offline" \
V=1 JAVA_HOME=${SYS_JDK8_HOME} ruby tool/jt.rb build || exit $?
# To invoke the VM:
# PATH=${PATH}:/path/to/work/mx \
# JAVA_HOME=/path/to/work/graal-jvmci-8/jdk1.8.0_121/product \
# GRAAL_HOME=/path/to/work/graal \
# ../truffleruby/tool/jt.rb run --graal
#
# Check it has the JIT by evaluating (should be true):
# Truffle::Graal.graal?
}
HHVM_VERSION=HHVM-3.19.1
build_hhvm() {
echo "\n===> Download and build HHVM\n"
if [ -f ${wrkdir}/hhvm/hphp/hhvm/php ]; then return; fi
cd ${wrkdir} || exit $?
if ! [ -d ${wrkdir}/hhvm ]; then
git clone https://github.com/facebook/hhvm.git || exit $?
fi
cd hhvm || exit $?
git checkout ${HHVM_VERSION} || exit $?
git submodule update --init --recursive || exit $?
patch -Ep1 < ${PATCH_DIR}/hhvm.diff || exit $?
# Some parts of the build (e.g. OCaml) won't listen to CC/CXX
ln -sf ${OUR_CC} `dirname ${OUR_CC}`/gcc || exit $?
ln -sf ${OUR_CXX} `dirname ${OUR_CXX}`/g++ || exit $?
HHVM_PATH=`dirname ${OUR_CC}`:${PATH}
# -DBUILD_HACK=OFF since we only need the PHP part of HHVM (faster build)
# -DENABLE_EXTENSION_LZ4=OFF: https://github.com/facebook/hhvm/issues/7804
env LIBKRUN_DIR=${HERE}/krun/libkrun PATH=${HHVM_PATH} CC=${OUR_CC} \
CXX=${OUR_CXX} sh -c "cmake -DCMAKE_CXX_FLAGS=-I${HERE}/krun/libkrun -DENABLE_EXTENSION_LZ4=OFF -DBUILD_HACK=OFF ." || exit $?
${GMAKE} -j $num_jobs VERBOSE=1 || exit $?
# remove the symlinks
rm `dirname ${OUR_CC}`/gcc `dirname ${OUR_CC}`/g++ || exit $?
}
# autoconf-2.13 is needed to build spidermonkey
build_autoconf() {
echo "\n===> Download and build autoconf-2.13\n"
if [ -d ${wrkdir}/autoconf-2.13 ]; then return; fi
cd ${wrkdir} || exit $?
wget http://ftp.gnu.org/gnu/autoconf/autoconf-2.13.tar.gz || exit $?
tar xfz autoconf-2.13.tar.gz || exit $?
cd autoconf-2.13
./configure --prefix=${wrkdir}/autoconf-inst || exit $?
make install || exit $?
cd ${wrkdir}/autoconf-inst/bin
ln -s autoconf autoconf-2.13 || exit $?
cd ${HERE}
}
SPIDERMONKEY_VERSION=6583496f169c # FIREFOX_AURORA_54_BASE
build_spidermonkey() {
echo "\n===> Download and build SpiderMonkey\n"
if [ -d ${wrkdir}/spidermonkey ]; then return; fi
cd ${wrkdir} || exit $?
wget -O spidermonkey.tar.bz2 http://hg.mozilla.org/mozilla-central/archive/${SPIDERMONKEY_VERSION}.tar.bz2 || exit $?
bunzip2 -c - spidermonkey.tar.bz2 | tar xfp - || exit $?
mv mozilla-central-${SPIDERMONKEY_VERSION} spidermonkey || exit $?
cd spidermonkey
cd js/src
${wrkdir}/autoconf-inst/bin/autoconf || exit $?
mkdir build_OPT.OBJ
cd build_OPT.OBJ
AUTOCONF=${wrkdir}/autoconf-inst/bin/autoconf-2.13 MOZ_JEMALLOC4=1 CC=${OUR_CC} CXX=${OUR_CXX} ../configure --disable-tests || exit $?
LD_LIBRARY_PATH=${wrkdir}/gcc-inst/lib/ ${GMAKE} -j $num_jobs || exit $?
}
build_dacapo() {
echo "\n===> Build DaCapo\n"
if [ -f "${HERE}/extbench/dacapo-9.12-bach.jar" ]; then return; fi
# DaCapo uses a millisecond timer by default, which isn't good enough for
# our purposes. To fix this, in as minimally intrusive a way as possible: we
# download the DaCapo binary and source distributions; unpack both; patch
# the source version and recompile only the benchmarking harness; copy the
# relevant recompiled .class files back into the binary distribution; and
# rezip it. Thus the DaCapo jar we end up running is only minimally changed.
cd ${wrkdir}
mkdir -p dacapo
cd dacapo
wget "https://sourceforge.net/projects/dacapobench/files/archive/9.12-bach/dacapo-9.12-bach-src.zip/download" -O dacapo-9.12-bach.src.zip || exit $?
mkdir -p src
cd src
unzip ../dacapo-9.12-bach.src.zip || exit $?
cd benchmarks/harness/src/org/dacapo/harness/
patch -p0 < ${HERE}/patches/dacapo.diff || exit $?
cd ${wrkdir}/dacapo/src/benchmarks
ant harness || exit $?
cd ${wrkdir}/dacapo
wget "http://downloads.sourceforge.net/project/dacapobench/archive/9.12-bach/dacapo-9.12-bach.jar?r=https%3A%2F%2Fsourceforge.net%2Fprojects%2Fdacapobench%2Ffiles%2F&ts=1474888492&use_mirror=freefr" -O dacapo-9.12-bach.jar || exit $?
mkdir bin
cd bin
unzip ../dacapo-9.12-bach.jar || exit $?
cp ../src/benchmarks/harness/dist/org/dacapo/harness/Callback*.class org/dacapo/harness/ || exit $?
zip -r ${HERE}/extbench/dacapo-9.12-bach.jar ./*
}
OCTANE_V=4852334f
fetch_octane() {
echo "\n===> Download Octane\n"
if [ -d "${HERE}/extbench/octane" ]; then return; fi
cd ${HERE}/extbench
git clone https://github.com/chromium/octane || exit $?
cd octane
git checkout ${OCTANE_V} || exit $?
patch < ${PATCH_DIR}/octane.diff || exit $?
cp ${PATCH_DIR}/octane_run_we.js run_we.js || exit $?
}
build_external_benchmarks() {
echo "\n===> Download and build misc benchmarks\n"
if [ -f "${HERE}/benchmarks/richards/java/richards.java" ]; then return; fi
cat << EOF
In order to build these benchmarks, you need to agree to the licensing terms
of the Java Richards benchmark at:
http://web.archive.org/web/20050825101121/http://www.sunlabs.com/people/mario/java_benchmarking/index.html
EOF
echo -n "Have you read and agreed to these terms? [Ny] " || exit $?
read answer || exit $?
case "$answer" in
y | Y) ;;
*) exit 1;;
esac
t=`mktemp -d` || exit $?
cd $t || exit $?
wget https://archive.org/download/richards-benchmark/richdbsrc.zip || exit $?
unzip richdbsrc.zip || exit $?
mv Benchmark.java Program.java COM/sun/labs/kanban/richards_deutsch_acc_virtual/ || exit $?
cd COM/sun/labs/kanban/richards_deutsch_acc_virtual || exit $?
mv Richards.java richards.java || exit $?
cp *.java ${HERE}/benchmarks/richards/java || exit $?
cd ${HERE}/benchmarks/richards/java || exit $?
patch ${PATCH_ARGS} < ${PATCH_DIR}/java_richards.diff || exit $?
rm -fr $t
}
LIBKALIBERA_VERSION=95a9207515139a3f49114d965a163ddd5576c857
fetch_libkalibera() {
echo "\n===> Fetch libkalibera\n"
cd ${wrkdir}
if ! [ -d libkalibera ]; then \
git clone https://github.com/softdevteam/libkalibera.git || exit $?
cd ${wrkdir}/libkalibera || exit $?
git checkout ${LIBKALIBERA_VERSION} || exit $?
fi
}
build_warmup_stats
build_external_benchmarks
build_initial_krun
build_dacapo
fetch_octane
build_gcc
apply_gcc_lib_path
fetch_libkalibera
build_cpython
build_luajit
build_pypy
build_v8
build_gmake
build_jdk
case `uname` in
Linux)
build_bootstrap_jdk
build_graal
fetch_maven
build_truffleruby
build_hhvm
build_autoconf
build_spidermonkey
;;
esac
clean_krun
|
// Fill urls and titles for dialogs (`DIALOG_MESSAGE`)
//
// In:
//
// - infractions ([users.Infraction])
// - user_info (Object)
//
// Out:
//
// - info (Object) - key is `src`, value { url, title, text }
//
'use strict';
const _ = require('lodash');
module.exports = function (N, apiPath) {
N.wire.on(apiPath, async function dialogs_fetch_infraction_info(info_env) {
let message_ids = info_env.infractions.filter(i => i.src_type === N.shared.content_type.DIALOG_MESSAGE)
.map(x => x.src);
if (!message_ids.length) return;
// Fetch messages
//
let messages = await N.models.users.DlgMessage.find()
.where('_id').in(message_ids)
.lean(true);
// Fetch dialogs
//
let dialogs = await N.models.users.Dialog.find()
.where('_id').in(messages.map(x => x.parent))
.lean(true);
let params = {
user_id: info_env.user_info.user_id,
usergroup_ids: info_env.user_info.usergroups
};
// users who can give out infractions could also see all dialogs
if (!await N.settings.get('users_mod_can_add_infractions_dialogs', params, {})) {
dialogs = dialogs.filter(dialog => String(dialog.user) === String(info_env.user_info.user_id));
}
// Fetch opponents
//
let opponents = await N.models.users.User.find()
.where('_id').in(dialogs.map(x => x.to))
.lean(true);
let dialogs_by_id = _.keyBy(dialogs, '_id');
let users_by_id = _.keyBy(opponents, '_id');
messages.forEach(message => {
let dialog = dialogs_by_id[message.parent];
if (!dialog) return;
info_env.info[message._id] = {
title: users_by_id[dialog.with]?.name,
url: N.router.linkTo('users.dialog', {
dialog_id: dialog._id,
message_id: message._id
}),
text: message.md
};
});
});
};
|
<gh_stars>0
console.log("Im Linked MF!!!!!!!");
|
#include <iostream>
using namespace std;
#define LINHAS 2
#define COLUNAS 50
void calculeAreas(double triangulos[LINHAS][COLUNAS], double areas[]) {
for (int i = 0; i < COLUNAS; i++) {
areas[i] = (triangulos[0][i] * triangulos[1][i]) / 2;
cout << "A área do " << i + 1 << "º triângulo é " << areas[i] << endl;
}
}
void imprimaMatriz(double matriz[LINHAS][COLUNAS]) {
cout << "[" << endl;
for (int i = 0; i < LINHAS; i++) {
cout << " [";
for (int j = 0; j < COLUNAS; j++) {
cout << matriz[i][j];
if (j < COLUNAS - 1) {
cout << ", ";
}
}
cout << "]" << endl;
}
cout << "]" << endl;
}
int main() {
double triangulos[LINHAS][COLUNAS] = {{3, 4, 5}, {7, 8, 9}};
double areas[COLUNAS];
cout << "Matriz de Triângulos:" << endl;
imprimaMatriz(triangulos);
calculeAreas(triangulos, areas);
return 0;
}
|
<reponame>ndesmic/vertex-pad
export function getProjectionMatrix(screenHeight, screenWidth, fieldOfView, zNear, zFar){
const aspectRatio = screenHeight / screenWidth;
const fieldOfViewRadians = fieldOfView * (Math.PI / 180);
const fovRatio = 1 / Math.tan(fieldOfViewRadians / 2);
return [
[aspectRatio * fovRatio, 0 , 0 , 0],
[0 , fovRatio, 0 , 0],
[0 , 0 , zFar/(zFar - zNear) , 1],
[0 , 0 , (-zFar * zNear)/(zFar - zNear), 0]
];
}
export function getPointAtMatrix(position, target, up){
const forward = normalizeVector(subtractVector(target, position));
const newUp = normalizeVector(subtractVector(up, multiplyVector(forward, dotVector(up, forward))));
const right = crossVector(newUp, forward);
return [
[right[0] , right[1] , right[2] , 0],
[newUp[0] , newUp[1] , newUp[2] , 0],
[forward[0] , forward[1] , forward[2] , 0],
[position[0], position[1], position[2], 1]
];
}
export function getLookAtMatrix(position, target, up){
const forward = normalizeVector(subtractVector(target, position));
const newUp = normalizeVector(subtractVector(up, multiplyVector(forward, dotVector(up, forward))));
const right = crossVector(newUp, forward);
return [
[right[0] , newUp[0] , forward[0] , 0],
[right[1] , newUp[1] , forward[1] , 0],
[right[2] , newUp[2] , forward[2] , 0],
[-dotVector(position, right), -dotVector(position, newUp), -dotVector(position, forward), 1]
];
}
export function getRotationXMatrix(theta){
return [
[1, 0 , 0 , 0],
[0, Math.cos(theta), -Math.sin(theta), 0],
[0, Math.sin(theta), Math.cos(theta) , 0],
[0, 0 , 0 , 1]
];
}
export function getRotationYMatrix(theta) {
return [
[Math.cos(theta) , 0, Math.sin(theta), 0],
[0 , 1, 0 , 0],
[-Math.sin(theta), 0, Math.cos(theta), 0],
[0 , 0, 0 , 1]
];
}
export function getRotationZMatrix(theta) {
return [
[Math.cos(theta), -Math.sin(theta), 0, 0],
[Math.sin(theta), Math.cos(theta) , 0, 0],
[0 , 0 , 1, 0],
[0 , 0 , 0, 1]
];
}
export function getTranslationMatrix(x, y, z) {
return [
[1, 0, 0, x],
[0, 1, 0, y],
[0, 0, 1, z],
[0, 0, 0, 1]
];
}
export function multiplyMatrix(a, b){
const matrix = [
new Array(4),
new Array(4),
new Array(4),
new Array(4)
];
for (let c = 0; c < 4; c++){
for (let r = 0; r < 4; r++){
matrix[r][c] = a[r][0] * b[0][c] + a[r][1] * b[1][c] + a[r][2] * b[2][c] + a[r][3] * b[3][c];
}
}
return matrix;
}
export function getIdentityMatrix(){
return [
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
];
}
export function multiplyMatrixVector(vector, matrix){
//normalize 3 vectors
if(vector.length === 3){
vector.push(1);
}
return [
vector[0] * matrix[0][0] + vector[1] * matrix[1][0] + vector[2] * matrix[2][0] + vector[3] * matrix[3][0],
vector[0] * matrix[0][1] + vector[1] * matrix[1][1] + vector[2] * matrix[2][1] + vector[3] * matrix[3][1],
vector[0] * matrix[0][2] + vector[1] * matrix[1][2] + vector[2] * matrix[2][2] + vector[3] * matrix[3][2],
vector[0] * matrix[0][3] + vector[1] * matrix[1][3] + vector[2] * matrix[2][3] + vector[3] * matrix[3][3]
];
}
export function getVectorMagnitude(vec) {
return Math.sqrt(vec[0] ** 2 + vec[1] ** 2 + vec[2] ** 2);
}
export function addVector(a, b) {
return [
a[0] + b[0],
a[1] + b[1],
a[2] + b[2]
];
}
export function subtractVector(a, b) {
return [
a[0] - b[0],
a[1] - b[1],
a[2] - b[2]
];
}
export function multiplyVector(vec, s) {
return [
vec[0] * s,
vec[1] * s,
vec[2] * s
];
}
export function divideVector(vec, s) {
return [
vec[0] / s,
vec[1] / s,
vec[2] / s
];
}
export function normalizeVector(vec) {
return divideVector(vec, getVectorMagnitude(vec));
}
export function crossVector(a, b) {
return [
a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0]
];
}
export function dotVector(a, b) {
return a[0] * b[0] + a[1] * b[1] + a[2] * b[2];
}
///
export function getVectorIntersectPlane(planePoint, planeNormal, lineStart, lineEnd){
planeNormal = normalizeVector(planeNormal);
const planeDot = dotVector(planePoint, planeNormal);
const startDot = dotVector(lineStart, planeNormal);
const endDot = dotVector(lineEnd, planeNormal);
const t = (planeDot - startDot) / (endDot - startDot);
if(t === Infinity || t === -Infinity){
return null;
}
const line = subtractVector(lineEnd, lineStart);
const deltaToIntersect = multiplyVector(line, t);
return addVector(lineStart, deltaToIntersect);
}
export function isPointInInsideSpace(point, planeNormal, planePoint){
planeNormal = normalizeVector(planeNormal);
return dotVector(planeNormal, subtractVector(planePoint, point)) > 0;
}
export const UP = [0, 1, 0];
export const FORWARD = [0, 0, 1];
export const RIGHT = [1, 0, 0];
|
<filename>tests/tests.py
from .context import pymps as ppm
import numpy as np
import unittest
import copy
import os
import json
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
@classmethod
def setUpClass(cls):
'''Called only once'''
cls.mps = os.path.abspath('tests/data/example.mps')
cls.mps2 = os.path.abspath('tests/data/example2.mps')
cls.mps3 = os.path.abspath('tests/data/example3.mps')
cls.mps4 = os.path.abspath('tests/data/example4.mps')
cls.mps_errors1 = os.path.abspath('tests/data/bad_example1.mps')
cls.mps_errors2 = os.path.abspath('tests/data/bad_example2.mps')
cls.mps_errors3 = os.path.abspath('tests/data/bad_example3.mps')
cls.mps_errors4 = os.path.abspath('tests/data/bad_example4.mps')
cls.mps_errors5 = os.path.abspath('tests/data/bad_example5.mps')
cls.mps_errors6 = os.path.abspath('tests/data/bad_example6.mps')
cls.mps_errors7 = os.path.abspath('tests/data/bad_example7.mps')
cls.mps_errors8 = os.path.abspath('tests/data/bad_example8.mps')
cls.mps_errors9 = os.path.abspath('tests/data/bad_example9.mps')
expected_no_fill = {
"NAME": "EXAMPLE",
"ROWS": {
"R01": "L",
"R02": "E",
"R03": "G",
"R04": "E",
"COST": "N"
},
"COLUMNS": {
"R01": {
"C01": 30.0,
"C02": -10.0,
"C03": 50.0
},
"R02": {
"C01": 5000.0,
"C02": 0.0,
"C03": -3.0
},
"R03": {
"C01": 0.2,
"C02": 0.1,
"C03": 0.0
},
"COST": {
"C01": 10.0,
"C02": 5.0,
"C03": 5.5
},
"R04": {
"C02": 0.2,
"C03": 0.3
}
},
"RHS": {
"R01": 1500.0,
"R02": 200.0,
"R03": 12.0,
"R04": 0.0,
},
"BOUNDS": {
"C01": {
"upper": 0.0
},
"C02": {
"lower": 0.0,
"upper": 0.0,
},
"C03": {
"lower": 0.0,
}
},
"RANGES": {
"R01": {
"lower": 1486.0,
"upper": 1500.0
},
"R02": {
"lower": 200.0,
"upper": 214.0
},
"R03": {
"lower": 12.0,
"upper": 26.0
},
"R04": {
"lower": -14.0,
"upper": 0.0
}
},
"ALL_COLUMNS": [
"C01",
"C02",
"C03"
],
"OBJ_ROW": "COST",
"RHS_id": "B",
"RANGES_id": 'rhs',
"BOUNDS_id": "BOUND"
}
expected_fill = copy.deepcopy(expected_no_fill)
expected_fill['COLUMNS']['R04']['C01'] = 0
expected_fill['RHS']['R04'] = 0
expected_fill['BOUNDS']['C01']['lower'] = np.NINF
expected_fill['BOUNDS']['C03']['upper'] = np.Inf
cls.parsed_mps_fill = expected_fill
cls.parsed_mps_no_fill = expected_no_fill
cls.dual = os.path.abspath('tests/data/dual.mps')
cls.dual2 = os.path.abspath('tests/data/dual2.mps')
cls.dual3 = os.path.abspath('tests/data/dual3.mps')
expected_dual = {
"NAME": "EXAMPLE_DUAL",
"OBJSENSE": "MAX",
"OBJNAME": "DL",
"ROWS": {
"C01": "L",
"C02": "L",
"C03": "E",
"DL": "N"
},
"COLUMNS": {
"C01": {
"R01": -1.0,
"R02": 4.0,
"R03": 7.0
},
"C02": {
"R01": 2.0,
"R02": -5.0,
"R03": -8.0
},
"C03": {
"R01": -3.0,
"R02": 6.0,
"R03": 9.0
},
"DL": {
"R01": -13.0,
"R02": 14.0,
"R03": 15.0
}
},
"RHS": {
"C01": 10.0,
"C02": -11.0,
"C03": 12.0,
},
"BOUNDS": {
"R01": {
"lower": 0
},
"R02": {
"lower": 0
},
"R03": {
"lower": np.NINF,
"upper": np.Inf
}
},
}
expected_dual2 = copy.deepcopy(expected_dual)
expected_dual2['COLUMNS']["DL"] = {
"R01": 1.0,
"R02": -27.0,
"R03": -53.0
}
expected_dual2["RHS"]["DL"] = -95.0
expected_dual2["BOUNDS"]["C02_db"] = {
"lower": 0
}
expected_dual2["COLUMNS"]["C01"]["C02_db"] = 0.0
expected_dual2["COLUMNS"]["C02"]["C02_db"] = -1.0
expected_dual2["COLUMNS"]["DL"]["C02_db"] = -3.0
expected_dual3 = copy.deepcopy(expected_dual2)
del expected_dual2["COLUMNS"]["C03"]
del expected_dual2["RHS"]["C03"]
del expected_dual2["ROWS"]["C03"]
new_rl = "C01_db"
expected_dual3['COLUMNS']["DL"] = {
"R01": -8.0,
"R02": -6.0,
"R03": -20.0,
new_rl: -4.0
}
expected_dual3["COLUMNS"]["C01"] = {
"R01": 1.0,
"R02": -4.0,
"R03": -7.0,
new_rl: -1.0
}
expected_dual3["COLUMNS"]["C02"] = {
"R01": 2.0,
"R02": -5.0,
"R03": -8.0,
new_rl: 0.0
}
expected_dual3["RHS"] = {
"C01": -10.0,
"C02": -11.0,
"C03": 12.0,
"DL": -50.0
}
expected_dual3["COLUMNS"]["C03"][new_rl] = 0.0
expected_dual3["BOUNDS"][new_rl] = {
"lower": 0
}
del expected_dual3["BOUNDS"]["C02_db"]
cls.parsed_dual = expected_dual
cls.parsed_dual2 = expected_dual2
cls.parsed_dual3 = expected_dual3
def test_make_dual(self):
# test dual on problem already in standard canonical form
mps = ppm.parse_mps(self.dual, fill=True)
dual = ppm.make_dual(mps)
self.assertDictEqual(dual, self.parsed_dual)
def test_make_dual2(self):
# test dual on problem with UP, LO and FX variables
mps = ppm.parse_mps(self.dual2, fill=True)
dual = ppm.make_dual(mps)
self.assertDictEqual(dual, self.parsed_dual2)
def test_make_dual3(self):
# test dual on problem with a single LO <= x <= UP variable
self.maxDiff = None
mps = ppm.parse_mps(self.dual3, fill=True)
dual = ppm.make_dual(mps)
self.assertDictEqual(dual, self.parsed_dual3)
def test_parsed_as_mps(self):
self.maxDiff = None
mps = ppm.parse_mps(self.dual, fill=True)
dual = ppm.make_dual(mps)
dual_mps = ppm.parsed_as_mps(dual)
with open('tests/data/dual_dual.mps', 'r') as fin:
dat = fin.read()
self.assertEqual(dual_mps, dat)
def test_parse_mps_no_fill(self):
mps = ppm.parse_mps(self.mps, fill=False)
self.assertDictEqual(mps, self.parsed_mps_no_fill)
def test_parse_mps_fill(self):
mps = ppm.parse_mps(self.mps, fill=True)
self.assertDictEqual(mps['ROWS'], self.parsed_mps_fill['ROWS'])
def test_parse_mps_bounds(self):
# test bounds where LO is omitted and UP is either <0 or >0
mps = ppm.parse_mps(self.mps2, fill=True)
b_expected = {
"C01": {
"lower": 0,
"upper": 2.0
},
"C02": {
"lower": np.NINF,
"upper": 0.0,
},
"C03": {
"lower": np.NINF,
"upper": np.Inf
}
}
rhs_expected = {
"R01": 1500.0,
"R02": 200.0,
"R03": 12.0,
"R04": 0.0,
}
self.assertDictEqual(mps['BOUNDS'], b_expected)
self.assertDictEqual(mps['RHS'], rhs_expected)
# test bounds MI & PL
mps = ppm.parse_mps(self.mps3, fill=True)
# note: field 4 for MI should be ignored, and the expected value
# for C01 is a free variable
expected = {
"C01": {
"lower": np.NINF,
"upper": np.Inf,
},
"C02": {
"lower": 0,
"upper": np.Inf,
},
"C03": {
"lower": 0,
"upper": np.Inf
}
}
self.assertDictEqual(mps['BOUNDS'], expected)
# test funky bounds
mps = ppm.parse_mps(self.mps4, fill=True)
expected = {
"C01": {
"lower": 0,
"upper": 2,
},
"C02": {
"lower": 0,
"upper": np.Inf,
},
"C03": {
"lower": 0,
"upper": np.Inf
}
}
self.assertDictEqual(mps['BOUNDS'], expected)
def test_prase_mps_example_with_errors(self):
# test float value parse fail in ROW
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors1)
self.assertEqual(
"ROW value must be a float, found: moo",
str(context.exception)
)
# test unknown indicator
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors2)
self.assertEqual(
"Unknown indicator CATS found.",
str(context.exception)
)
# test float value parse fail in RHS
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors3)
self.assertEqual(
"RHS value must be a float, found: moo",
str(context.exception)
)
# test bad bound (lower > upper)
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors4)
self.assertEqual(
"Lower bound is greater than upper bound: lower -> 10.0, upper -> 0.0",
str(context.exception)
)
# test duplicated BOUNDs
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors5)
self.assertEqual(
"BOUND on COLUMN C03 specified twice!",
str(context.exception)
)
# test missing idicator
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors6)
self.assertEqual(
"Indicator record 'COLUMNS' is missing!",
str(context.exception)
)
# reference non-existant row
with self.assertRaises(AssertionError) as context:
mps = ppm.parse_mps(self.mps_errors7)
self.assertEqual(
"COLUMNS makes reference to non-existant ROW(s) {'R05'}!",
str(context.exception)
)
# test missing RHS if RANGE set
with self.assertRaises(AssertionError) as context:
mps = ppm.parse_mps(self.mps_errors8)
self.assertEqual(
"You must specify a RHS for R04 if setting a RANGE on it.",
str(context.exception)
)
# ambiguous bound
with self.assertRaises(ValueError) as context:
mps = ppm.parse_mps(self.mps_errors9)
self.assertEqual(
"The BOUND ['UP', '01', '2'] is ambiguous.",
str(context.exception)
)
if __name__ == '__main__':
unittest.main()
|
# Import Dependencies
import requests
from bs4 import BeautifulSoup
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import RegexpTokenizer
# Set URL to be scraped
url = 'http://www.amazon.com/product-reviews/B003V0JPNC'
# Request webpage and parse
response = requests.get(url)
data = response.text
soup = BeautifulSoup(data, 'html.parser')
# Extract Product reviews
reviews = soup.find_all('span', {'data-hook': 'review-body'})
# Remove punctuation and stop words from reviews
tokenizer = RegexpTokenizer(r'\w+')
cleaned_reviews = []
for review in reviews:
cleaned_review = [word for word in tokenizer.tokenize(review.text) if word not in stopwords.words('english')]
cleaned_reviews.append(cleaned_review)
# Print list of lists of cleaned reviews
print(cleaned_reviews)
|
/**
* Copyright (C) 2013 Mot<EMAIL> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.motown.ocpp.websocketjson.servlet;
import io.motown.domain.api.chargingstation.ChargingStationId;
import io.motown.ocpp.websocketjson.OcppJsonService;
import org.atmosphere.config.service.WebSocketHandlerService;
import org.atmosphere.cpr.AtmosphereRequest;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.websocket.WebSocket;
import org.atmosphere.websocket.WebSocketEventListenerAdapter;
import org.atmosphere.websocket.WebSocketStreamingHandlerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import java.io.IOException;
import java.io.Reader;
@WebSocketHandlerService
public class OcppWebSocketServlet extends WebSocketStreamingHandlerAdapter {
private static final Logger LOG = LoggerFactory.getLogger(OcppWebSocketServlet.class);
private OcppJsonService ocppJsonService;
public OcppWebSocketServlet() {
// TODO refactor this so application context is not needed - <NAME>, March 12th 2014
ApplicationContext context = ApplicationContextProvider.getApplicationContext();
this.ocppJsonService = context.getBean(OcppJsonService.class);
}
public OcppWebSocketServlet(OcppJsonService ocppJsonService) {
this.ocppJsonService = ocppJsonService;
}
@Override
public void onOpen(WebSocket webSocket) throws IOException {
final String chargingStationIdentifier = determineIdentifier(webSocket);
ocppJsonService.addWebSocket(chargingStationIdentifier, webSocket);
webSocket.resource().addEventListener(new WebSocketEventListenerAdapter() {
@Override
public void onDisconnect(AtmosphereResourceEvent event) {
LOG.info("Client [{}] disconnected", chargingStationIdentifier);
ocppJsonService.removeWebSocket(chargingStationIdentifier);
}
});
}
@Override
public void onTextStream(WebSocket webSocket, Reader reader) throws IOException {
String chargingStationId = determineIdentifier(webSocket);
ocppJsonService.handleMessage(new ChargingStationId(chargingStationId), reader);
}
public void setOcppJsonService(OcppJsonService ocppJsonService) {
this.ocppJsonService = ocppJsonService;
}
private String determineIdentifier(WebSocket webSocket) {
return determineIdentifierFromRequest(webSocket.resource().getRequest());
}
private String determineIdentifierFromRequest(AtmosphereRequest request) {
// request.getPathInfo() is said to be unreliable in several containers
String contextPath = request.getContextPath();
String servletPath = request.getServletPath() + "/";
return request.getRequestURI().substring((contextPath + servletPath).length());
}
}
|
readonly DEFAULT_VERSION=0.40.1
export VERSION=${VERSION:-$DEFAULT_VERSION}
readonly MONIKER=metabase
readonly BASE_NAME=backpack-$MONIKER
readonly IMAGE_NAME=alexanderfefelov/$BASE_NAME
readonly CONTAINER_NAME=$BASE_NAME
readonly HOST_NAME=$MONIKER.backpack.test
readonly WAIT_TIMEOUT=600
readonly DB_HOST=mysql-main-master.backpack.test
readonly DB_PORT=3306
readonly DB_DATABASE=metabase
readonly DB_ROOT_USERNAME=root
readonly DB_ROOT_PASSWORD=camycorymicu
readonly DB_USERNAME=metabase_licideophaig
readonly DB_PASSWORD=msorphorylac
readonly JAVA_OPTS="-javaagent:/app/jolokia/jolokia-jvm-1.6.2-agent.jar=config=/app/jolokia/jolokia.properties"
. ../../lib/settings/health.sh
. ../../lib/settings/log.sh
readonly REQUIRED_CONTAINERS='
backpack-mysql-main-master
'
|
/*
* Hex-Rays Decompiler project
* Copyright (c) 2007-2019 by Hex-Rays, <EMAIL>
* ALL RIGHTS RESERVED.
*
* Sample plugin for Hex-Rays Decompiler.
* It shows known value ranges of a register using get_valranges().
*
* Unfortunately this plugin is of limited use because:
* - simple cases where a single value is assigned to a register
* are automatically handled by the decompiler and the register
* is replaced by the value
* - too complex cases where the register gets its value from untrackable
* sources, it fails
* - only value ranges at the basic block start are shown
*/
#include <hexrays.hpp>
#include <frame.hpp>
// Hex-Rays API pointer
hexdsp_t *hexdsp = NULL;
//--------------------------------------------------------------------------
int idaapi init(void)
{
if ( !init_hexrays_plugin() )
return PLUGIN_SKIP; // no decompiler
const char *hxver = get_hexrays_version();
msg("Hex-rays version %s has been detected, %s ready to use\n", hxver, PLUGIN.wanted_name);
return PLUGIN_KEEP;
}
//--------------------------------------------------------------------------
void idaapi term(void)
{
if ( hexdsp != NULL )
term_hexrays_plugin();
}
//--------------------------------------------------------------------------
bool idaapi run(size_t)
{
ea_t ea = get_screen_ea();
func_t *pfn = get_func(ea);
if ( pfn == NULL )
{
warning("Please position the cursor within a function");
return true;
}
flags_t F = get_flags(ea);
if ( !is_code(F) )
{
warning("Please position the cursor on an instruction\n");
return true;
}
gco_info_t gco;
if ( !get_current_operand(&gco) )
{
warning("Could not find a register or stkvar in the current operand");
return true;
}
// generate microcode
hexrays_failure_t hf;
mba_ranges_t mbr(pfn);
mbl_array_t *mba = gen_microcode(mbr, &hf, NULL, DECOMP_WARNINGS);
if ( mba == NULL )
{
warning("%a: %s", hf.errea, hf.desc().c_str());
return true;
}
// prepare mlist for the current operand
mlist_t list;
if ( !gco.append_to_list(&list, mba) )
{
warning("Failed to represent %s as microcode list", gco.name.c_str());
delete mba;
return false;
}
op_parent_info_t ctx;
mop_t *mop = mba->find_mop(&ctx, ea, gco.is_def(), list);
if ( mop == NULL )
{
warning("Could not find %s in the microcode, sorry\n"
"Probably it has been optimized away\n", gco.name.c_str());
delete mba;
return false;
}
qstring opname;
mop->print(&opname, SHINS_SHORT);
tag_remove(&opname);
valrng_t vr;
int vrflags = VR_AT_START | VR_EXACT;
if ( ctx.blk->get_valranges(&vr, vivl_t(*mop), ctx.topins, vrflags) )
{
qstring vrstr;
vr.print(&vrstr);
warning("Value ranges of %s at %a: %s",
opname.c_str(),
ctx.blk->start,
vrstr.c_str());
}
else
{
warning("Cannot find value ranges of %s", opname.c_str());
}
// We must explicitly delete the microcode array
delete mba;
return true;
}
//--------------------------------------------------------------------------
static const char comment[] = "Sample15 plugin for Hex-Rays decompiler";
//--------------------------------------------------------------------------
//
// PLUGIN DESCRIPTION BLOCK
//
//--------------------------------------------------------------------------
plugin_t PLUGIN =
{
IDP_INTERFACE_VERSION,
0, // plugin flags
init, // initialize
term, // terminate. this pointer may be NULL.
run, // invoke plugin
comment, // long comment about the plugin
// it could appear in the status line
// or as a hint
"", // multiline help about the plugin
"Find value ranges of the register", // the preferred short name of the plugin
NULL // the preferred hotkey to run the plugin
};
|
<reponame>exKAZUu-Research/SmartMotivator
// @flow
import React from 'react';
import { Button, StyleSheet, Text, View } from 'react-native';
import { GS } from '../../style';
import { ButtonBox } from '../../design/ButtonBox';
import { D, i18n } from '../../../i18n/index';
type Props = {|
gotoTermsEmail: () => void,
gotoTermsPreparedAccount: () => void,
gotoTermsNoAccount: () => void,
|};
export function MenuComponent(props: Props) {
return (
<View style={GS.flex}>
<View style={GS.flex2} />
<Text style={S.logo}>スマートモチベーター</Text>
<View style={GS.flex2} />
<ButtonBox>
<Button title={i18n(D().startup.registration.menu.signupOrLoginWithEmail)} onPress={props.gotoTermsEmail} />
</ButtonBox>
<View style={GS.flex} />
<ButtonBox>
<Button
title={i18n(D().startup.registration.menu.loginWithIdAndPassword)}
onPress={props.gotoTermsPreparedAccount}
/>
</ButtonBox>
<View style={GS.flex} />
<ButtonBox>
<Button title={i18n(D().startup.registration.menu.signupWithoutEmail)} onPress={props.gotoTermsNoAccount} />
</ButtonBox>
<View style={GS.flex6} />
</View>
);
}
const S = StyleSheet.create({
logo: {
fontSize: 30,
textAlign: 'center',
},
});
|
#!/bin/bash
# Set Timezone
ln -sf /usr/share/zoneinfo/US/Central /etc/local
ln -sf /usr/share/zoneinfo/US/Central /etc/localtime
hwclock --systohc
# Localization
sed -i s/'#en_US.UTF-8 UTF-8'/'en_US.UTF-8 UTF-8'/ /etc/locale.gen
locale-gen
echo LANG=en_US.UTF-8 > /etc/locale.conf
# VConsole Config
echo '' > /etc/vconsole.conf
# Network Configuration
echo Enter hostname:
read HOSTNAME
echo $HOSTNAME > /etc/hostname
echo "
127.0.0.1 localhost
::1 localhost
127.0.0.1 $HOSTNAME.localdomain $HOSTNAME" >> /etc/hosts
# Network Manager
pacman -Sy --noconfirm networkmanager
systemctl enable NetworkManager
# Encrypted Swap
echo 'swap LABEL=cryptswap /dev/urandom swap,offset=2048,cipher=aes-xts-plain64,size=512' >> /etc/crypttab
echo '/dev/mapper/swap none swap defaults 0 0' >> /etc/fdisk
# Install GRUB bootloader
#pacman -Sy --noconfirm grub grub-bios
# GRUB ecrypted drive
sed -i 's#^\(GRUB_CMDLINE_LINUX="\)#GRUB_CMDLINE_LINUX="cryptdevice=/dev/sda2:cryptroot#' /etc/default/grub
sed -i 's/#GRUB_ENABLE_CRYPTODISK=y/GRUB_ENABLE_CRYPTODISK=y/' /etc/default/grub
# mkinitcpio config
sed -i '/^HOOK/s/filesystems/encrypt filesystems/' /etc/mkinitcpio.conf
# Initramfs configuration
mkinitcpio -p linux
# Intel microcode
pacman -Sy --noconfirm intel-ucode
# Mount EFI Boot Parition
mkdir /boot/EFI
mount /dev/sda1 /boot/EFI
# Configure GRUB bootloader
grub-install --target=x86_64-efi --bootloader-id=grub_uefi --efi-directory=/boot/EFI --recheck
grub-mkconfig -o /boot/grub/grub.cfg
# Set root password
echo Enter new root password
read NEW_PASS
echo "root:$NEW_PASS" | chpasswd
clear
|
<filename>client/src/components/templates/Asset/ArtworkDetails.tsx
import React, { useState } from 'react'
import Moment from 'react-moment'
import { DDO, MetaData, File } from '@nevermined-io/nevermined-sdk-js'
import styles from './ArtworkDetails.module.scss'
import Web3 from 'web3'
import ArtworkImage from '../../atoms/ArtworkImage'
import Button from '../../atoms/Button'
import FullHeightView, {ContentRow} from '../../atoms/FullHeightView'
import ArtworkFile from './ArtworkFile'
import { CloseIcon, ShareIcon, FullscreenIcon } from '../../icons'
interface ArtworkDetailsProps {
metadata: MetaData
ddo: DDO
nftDetails: any
}
export default function ArtworkDetails({ metadata, ddo, nftDetails }: ArtworkDetailsProps) {
const [fullscreen, setFullscreen] = useState(false)
const { main, additionalInformation } = metadata
const price = main.price && Web3.utils.fromWei(main.price.toString())
if (!main.files || !additionalInformation || !additionalInformation.categories) {
return <h2>Missing files or additional information</h2>
}
const file = main.files[0]
const category = additionalInformation.categories[0]
return (
<FullHeightView
fullscreen={fullscreen}
main={(
<>
<ContentRow>
<ShareIcon size={20} />
<CloseIcon size={14} />
</ContentRow>
<div className={styles.imageContainer}>
<ArtworkImage
did={ddo.id}
file={file}
/>
</div>
<ContentRow>
<span />
<span onClick={() => setFullscreen(!fullscreen)} className={styles.clickable}>
<FullscreenIcon size={20} />
</span>
</ContentRow>
</>
)}
sidebar={(
<>
<h1>{main.name}</h1>
<div className={styles.cols}>
<div>
<h4 className={styles.upper}>Creator:</h4>
<div>{additionalInformation.copyrightHolder}</div>
</div>
<div>
<h4 className={styles.upper}>Owner:</h4>
<div>@someartist</div>
</div>
<div>
<h4 className={styles.upper}>Created:</h4>
<div>
<Moment
date={main.dateCreated}
format="LL"
interval={0}/>
</div>
</div>
</div>
<p>
{additionalInformation.description}
</p>
<div className={styles.spacer} />
<h2>About the creator</h2>
<h3>@someartist</h3>
<p>
Deserunt esse laboris ut voluptate cupidatat cillum do laborum
aliquip et dolore aute do minim sunt in eiusmod reprehenderit
laborum ullamco ut consectetur enim do ut voluptate ullamco eiusmod occaecat.
</p>
<Button secondary fullWidth>go to profile</Button>
<h2>Additional information</h2>
<div className={styles.infoRow}>
<strong>Category:</strong>
<span>{category}</span>
</div>
<div className={styles.infoRow}>
<strong>License:</strong>
<span>{main.license}</span>
</div>
<div className={styles.infoRow}>
<strong>Author:</strong>
<span>{main.author}</span>
</div>
<div className={styles.infoRow}>
<strong>Copyright holder:</strong>
<span>{additionalInformation.copyrightHolder}</span>
</div>
<div className={styles.infoRow}>
<strong>DID:</strong>
<span>{ddo.id}</span>
</div>
<div className={styles.infoRow}>
<strong>Royalties:</strong>
<span>{nftDetails.royalties} %</span>
</div>
</>
)}
subsidebar={(
<>
<div className={styles.priceTitleWrapper}>
<strong>Current price</strong>
<span>Edition of {nftDetails.nftSupply}</span>
</div>
<div className={styles.priceWrapper}>
<strong>{price} NVMD</strong>
</div>
<ArtworkFile
ddo={ddo}
file={file}
price={Number(price)}
/>
<div className={styles.buttonSpacer} />
<Button fullWidth>Download High-Res File</Button>
</>
)}
/>
)
}
|
g++ -c square.cpp
g++ -c rect.cpp
g++ -c triangle.cpp
g++ -c circle.cpp
g++ -c app.cpp
g++ square.o rect.o triangle.o circle.o app.o -o app
|
package com.ceiba.combo.comando.fabrica;
import com.ceiba.combo.comando.ComandoCombo;
import com.ceiba.combo.modelo.entidad.Combo;
import org.springframework.stereotype.Component;
@Component
public class FabricaCombo {
public Combo crear(ComandoCombo comandoCombo){
return new Combo(comandoCombo.getId(),
comandoCombo.getNombre(),
comandoCombo.getPrecio()
);
}
}
|
#!/bin/bash -e
dd if=/dev/urandom bs=115200 count=300 of=test.yuv # 10 seconds video
SvtHevcEncApp -i test.yuv -w 320 -h 240 -b out.ivf .
|
#include <vector>
#include <iostream>
// Function to print elements of vector
void print(std::vector<int> v)
{
for (int i = 0; i < v.size(); i++)
std::cout << v[i] << " ";
std::cout << "\n";
}
int main()
{
std::vector<int> v = {1, 2, 3, 4, 5};
print(v);
return 0;
}
|
<filename>packages/amplication-server/src/models/Workspace.ts
import { Field, ObjectType } from '@nestjs/graphql';
import { User } from './User'; // eslint-disable-line import/no-cycle
import { App } from './App'; // eslint-disable-line import/no-cycle
import { GitOrganization } from './GitOrganization';
@ObjectType({
isAbstract: true,
description: undefined
})
export class Workspace {
@Field(() => String, {
nullable: false,
description: undefined
})
id!: string;
@Field(() => Date, {
nullable: false,
description: undefined
})
createdAt!: Date;
@Field(() => Date, {
nullable: false,
description: undefined
})
updatedAt!: Date;
@Field(() => String, {
nullable: false,
description: undefined
})
name!: string;
@Field(() => [App])
apps?: App[];
@Field(() => [GitOrganization], { nullable: true })
gitOrganizations?: GitOrganization[];
@Field(() => [User])
users?: User[];
}
|
def swap_two_numbers(a, b):
a = a + b
b = a - b
a = a - b
return a, b
a = 6
b = 10
print("The value of a is %d and b is %d" %(a, b))
a,b = swap_two_numbers(a,b)
print("The value of a is %d and b is %d" %(a, b))
|
#!/bin/bash
set -e
sphinx-apidoc -H runlmc -A "Vladimir Feinberg" --separate --force --output-dir=doc/_generated runlmc/ $(echo $(find . -iname "test_*.py"))
cp doc/index.rst doc/_generated/
cd doc
PYTHONPATH=.. sphinx-build -j $(nproc) -c . -b html _generated/ _generated/_build/
|
import {useState, useEffect, useContext} from 'react'
import { makeStyles } from '@material-ui/core/styles'
import DoneIcon from '@material-ui/icons/Done';
import { Tooltip } from '@material-ui/core'
import { IconButton } from '@material-ui/core'
import { AuthContext } from '@context/AuthContext'
import {
doFetch,
isServerDisconnected,
} from '@utils/network'
import { contentValidate } from '@utils/contentValidation'
import { RETRIEVING, VALIDATION_FINISHED } from '@common/constants';
const useStyles = makeStyles(theme => ({
root: {
color: theme.palette.primary.main,
backgroundColor: props => (props.active ? '#ffffff' : 'transparent'),
'&:hover': {
color: props => (props.active ? '#ffffff' : theme.palette.primary.main),
backgroundColor: props => (props.active ? '#07b811' : '#ffffff'),
},
border: '1px solid #0089C7',
},
}))
function ValidateContent({ active, server, owner, repo, bookId, filename, onRefresh, onContentValidation, onAction }) {
const {
state: {
authentication,
},
} = useContext(AuthContext)
const [submitValidateContent, setSubmitValidateContent] = useState(false)
useEffect(() => {
if ( !submitValidateContent ) return;
async function doSubmitValidateContent() {
let errorCode
let _errorMessage = null
let content = null
let fetchError = true
let url = `${server}/${owner}/${repo}/raw/branch/master/${filename}`
try {
onAction && onAction(RETRIEVING)
content = await doFetch(url, authentication)
.then(response => {
if (response?.status !== 200) {
errorCode = response?.status
console.warn(`doFetch - error fetching file ${filename},
status code ${errorCode},
URL=${url},
response:`,response)
fetchError = true
return null
}
fetchError = false
return response?.data
})
if (fetchError) {
_errorMessage = `Error retrieving ${filename}`
content = null // just to be sure
}
} catch (e) {
const message = e?.message
const disconnected = isServerDisconnected(e)
console.warn(`doFetch - error fetching file ${filename},
message '${message}',
disconnected=${disconnected},
URL=${url},
error message:`,
e)
_errorMessage = `Network error: ${message}`
content = null
}
if (content) {
// do the validation
onContentValidation && onContentValidation(null) // set to null first
const data = await contentValidate(owner, repo, bookId.toUpperCase(), filename, content)
onContentValidation && onContentValidation(data) // set to results
onAction && onAction(VALIDATION_FINISHED)
}
setSubmitValidateContent(false)
}
doSubmitValidateContent()
}, [submitValidateContent, server, owner, repo, filename, bookId, onRefresh])
let articleList
if ( repo.endsWith('ta') ) {
articleList = "Translation Academy Articles"
} else if ( repo.endsWith('tw') ) {
articleList = "Translation Word Articles"
}
const classes = useStyles({ active })
return (
<Tooltip title={ `Validate Content for ${filename || articleList}` }>
<IconButton className={classes.iconButton}
onClick={() => setSubmitValidateContent(true)}
aria-label="Validate Content">
<DoneIcon />
</IconButton>
</Tooltip>
)
}
export default ValidateContent
|
/*
Copyright 2019-2020 Netfoundry, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* response.js
*
* Response class provides content decoding
*/
// import http from 'http';
const Headers = require('./headers.js');
const Body = require('./body');
const extractContentType = Body.extractContentType;
const INTERNALS = Symbol('Response internals');
/**
* Expose `HttpResponse`.
*/
module.exports = HttpResponse;
/**
* Initialize a new `HttpResponse`.
*
* @api public
*/
function HttpResponse(body = null, opts = {}) {
Body.call(this, body, opts);
const status = opts.status || 200;
const headers = new Headers(opts.headers)
if (body !== null && !headers.has('Content-Type')) {
let contentType;
try {
contentType = extractContentType(body);
} catch (err) {
// Sometimes we see this on 401 responses, so just ignore exception
}
if (contentType) {
headers.append('Content-Type', contentType);
}
}
this[INTERNALS] = {
url: opts.url,
status,
// statusText: opts.statusText || STATUS_CODES[status],
headers,
counter: opts.counter
};
var ctx = mixin(this);
return ctx;
}
/**
* Mixin the prototype properties.
*
* @param {Object} obj
* @return {Object}
* @api private
*/
function mixin(obj) {
for (const key in HttpResponse.prototype) {
if (Object.prototype.hasOwnProperty.call(HttpResponse.prototype, key))
obj[key] = HttpResponse.prototype[key];
}
Object.defineProperty(obj, 'url', {
get: function() {
return this[INTERNALS].url || '';
}
});
Object.defineProperty(obj, 'status', {
get: function() {
return this[INTERNALS].status;
}
});
Object.defineProperty(obj, 'ok', {
get: function() {
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;
}
});
Object.defineProperty(obj, 'headers', {
get: function() {
return this[INTERNALS].headers;
}
});
return obj;
}
Body.mixIn(HttpResponse.prototype);
|
def combination_generator(lst, r):
# Initialize empty list
combinations = []
# Create a loop to pick every element
for i in range(len(lst)):
# Create a loop to create permutations
# of the picked elements
for j in combinations:
s = j[:]
s.append(lst[i])
if len(s) == r:
combinations.append(s)
elif len(s) < r:
combinations.append(s)
for k in range(i+1, len(lst)):
s = s[:]
s.append(lst[k])
if len(s) == r:
combinations.append(s)
return combinations
|
<filename>glew-1.10.0/auto/src/glew_str_head.c<gh_stars>1000+
#ifdef GLEW_MX
GLboolean GLEWAPIENTRY glewContextIsSupported (const GLEWContext* ctx, const char* name)
#else
GLboolean GLEWAPIENTRY glewIsSupported (const char* name)
#endif
{
GLubyte* pos = (GLubyte*)name;
GLuint len = _glewStrLen(pos);
GLboolean ret = GL_TRUE;
while (ret && len > 0)
{
if (_glewStrSame1(&pos, &len, (const GLubyte*)"GL_", 3))
{
|
<reponame>hmu332233/LetMeKnow.jbnu--ChatBot--
module M_Time
def makeMessage_time_hu
message =
"
@ 후생관 이용시간입니다.
상시판매
오전 10:00 ~ 오후 7:00
석식(백반)
오후 5:30 ~ 오후 7:00
중간 쉬는 시간 있음.
"
return message
end
def makeMessage_time_jinsu
message =
"
@ 진수당 이용시간입니다.
중식
오전 11:30 ~ 오후 2:00
석식
오후 5:30 ~ 오후 7:00
"
return message
end
def makeMessage_time_studentHall
message =
"
@ 학생회관 이용시간입니다.
조식
오전 08:00 ~ 오전 09:00
중식
오전 11:30 ~ 오후 2:00
석식
준비중입니다.
"
return message
end
def makeMessage_time_medi
message =
"
@ 의과대학식당 이용시간입니다.
중식
오전 11:30 ~ 오후 2:00
석식
오후 5:30 ~ 오후 7:00
"
return message
end
def makeMessage_time_jungdam
message =
"
@ 정담원 이용시간입니다.
중식
오전 11:30 ~ 오후 2:00
"
return message
end
#--------기숙사----------
def makeMessage_time_dormitory_food
message =
"
@ 기숙사 식당 이용시간
기존관 식당/참빛관 식당
- 월~금
조식: 07:30 ~ 09:00
중식: 11:45 ~ 13:45
석식: 17:30 ~ 19:00
- 토,일(공휴일)
조식: 08:00 ~ 09:00 / 07:30 ~ 09:00
중식: 12:00 ~ 13:00 / 11:45 ~ 13:45
석식: 17:30 ~ 19:00 / 17:30 ~ 18:30
"
return message
end
def makeMessage_time_dormitory_limite
message =
"
@ 기숙사 통금 시간
- 23:50 ~ 04:30
"
return message
end
def makeMessage_time_dormitory_water
message =
"
@ 기숙사 온수 시간
기존관(대동, 평화)
- 06:00 ~ 10:00
- 18:00 ~ 24:00
참빛, 혜민, 새빛, 한빛관
- 06:30 ~ 09:30
- 21:00 ~ 24:00
※ 온수사용량과 기온에 따라 온수 공급시간은 약간 차이가 있을 수 있음.
"
end
#-----------편의점-------------
def makeMessage_time_convenience_store
message =
"◆ 편의점 시간입니다!
- 기숙사
항시: ~ 23:15
- 제1학생회관
학기: 08:00 ~ 22:00
방학: 08:30 ~ 21:00
주말: 10:00 ~ 19:00
- 중도
학기: 08:30 ~ 21:00
방학: 09:00 ~ 20:00
주말: 10:00 ~ 19:00
- 학도
학기: 08:30 ~ 21:00
방학: 09:00 ~ 20:00
주말: 10:00 ~ 19:00
- 공대
학기: 08:30 ~ 19:00
방학: 09:00 ~ 18:00
- 진수당매점
학기: 08:30 ~ 20:00
- 건지원매점
학기: 08:30 ~ 19:00
"
return message
end
end
|
package net.fabrictest.util;
import net.fabricmc.fabric.api.command.v1.CommandRegistrationCallback;
import net.fabrictest.command.ReturnHomeCommand;
import net.fabrictest.command.SetHomeCommand;
public class ModCommandRegister {
public static void registerCommands() {
CommandRegistrationCallback.EVENT.register(SetHomeCommand::register);
CommandRegistrationCallback.EVENT.register(ReturnHomeCommand::register);
}
}
|
import React from 'react';
import { mount } from 'enzyme';
import renderer from 'react-test-renderer';
import Label from '../../components/Label';
import theme from '../../theme';
import 'jest-styled-components';
describe('<Label />', () => {
it('should match snapshot', () => {
const tree = renderer.create(<Label theme={theme}>Label 1</Label>).toJSON();
expect(tree).toMatchSnapshot();
});
it('should have correct element, text, height & font-size based on small prop', () => {
let label = mount(<Label theme={theme}>Label 1</Label>);
expect(label.find('label').length).toBe(1);
expect(label.find('label').text()).toBe('Label 1');
expect(label.find('label')).toHaveStyleRule('font-size', '1rem');
expect(label.find('label')).toHaveStyleRule('line-height', '2rem');
label = mount(
<Label small theme={theme}>
Label 1
</Label>
);
expect(label.find('label')).toHaveStyleRule('font-size', '.8rem');
expect(label.find('label')).toHaveStyleRule('line-height', '1.5rem');
});
});
|
<filename>Chapter 03/3.10.py
"""
Code illustration: 3.10.py
1. tkinter versus ttk Themed Widgets
2. new widgets introduced in ttk
Chapter 3 : Programmable Drum Machine
Tkinter GUI Application Development Blueprints
"""
from tkinter import Tk, Button, Label, Checkbutton, Entry, PanedWindow, \
Radiobutton, Scale, VERTICAL, HORIZONTAL, W
from tkinter import ttk
root = Tk()
style = ttk.Style()
print(style.theme_names())
# style.theme_use('default')
root.title('Tkinter Versus ttk Themed Widgets')
ttk.Separator(root, orient=VERTICAL).grid(
row=0, rowspan=8, column=1, sticky="wns")
Label(root, text='Tkinter Versus').grid(row=0, columnspan=2, sticky='ew')
ttk.Label(root, text='ttk').grid(row=0, column=1)
Button(root, text='tk Button').grid(row=1, column=0)
ttk.Button(root, text='ttk Button').grid(row=1, column=1)
Checkbutton(root, text='tk CheckButton').grid(row=2, column=0)
ttk.Checkbutton(root, text='ttk CheckButton').grid(row=2, column=1)
Entry(root).grid(row=3, column=0)
ttk.Entry(root).grid(row=3, column=1)
PanedWindow(root).grid(row=4, column=0)
ttk.PanedWindow(root).grid(row=4, column=1)
Radiobutton(root, text='tk Radio').grid(row=5, column=0)
ttk.Radiobutton(root, text='ttk Radio').grid(row=5, column=1)
Scale(root, orient=HORIZONTAL).grid(row=6, column=0)
ttk.Scale(root).grid(row=6, column=1)
ttk.Separator(root, orient=HORIZONTAL).grid(row=7, columnspan=2, sticky="ew")
ttk.Label(root, text='NEW WIDGETS INTRODUCED IN ttk').grid(row=8, columnspan=2)
ttk.Separator(root, orient=HORIZONTAL).grid(row=9, columnspan=2, sticky="ew")
ttk.Combobox(root).grid(row=11, column=0)
my_notebook = ttk.Notebook(root)
my_notebook.grid(row=12, column=1)
frame_1 = ttk.Frame(my_notebook)
frame_2 = ttk.Frame(my_notebook)
my_notebook.add(frame_1, text='Tab One')
my_notebook.add(frame_2, text='Tab Two')
ttk.Progressbar(root, length=140, value=65).grid(row=13, column=0)
my_tree = ttk.Treeview(root, height=2, columns=2)
my_tree.grid(row=14, columnspan=2)
my_tree.heading('#0', text='Column A', anchor=W)
my_tree.heading(2, text='Column B', anchor=W)
my_tree.column(2, stretch=0, width=70)
root.mainloop()
|
<filename>test/integration/site_layout_test.rb
require 'test_helper'
class SiteLayoutTest < ActionDispatch::IntegrationTest
test "layout links" do
get root_path
assert_template 'static_pages/home'
#Tests for the presence of a particular link-URL combination
#i.e. <a href="/about"> ...</a>
assert_select "a[href=?]", root_path, count: 2
assert_select "a[href=?]", help_path
assert_select "a[href=?]", about_path
assert_select "a[href=?]", contact_path
get contact_path
assert_select "title", full_title("Contact")
get signup_path
assert_select "title", full_title("Sign up")
end
end
|
# Create a MongoDB Database
import pymongo
# Connect to MongoDB
client = pymongo.MongoClient("mongodb://localhost:27017/")
# Create a database
db = client["blog_posts"]
# Create a collection
posts = db["posts"]
# Index this collection
posts.create_index([('title', pymongo.ASCENDING)])
|
import java.util.ArrayList;
// Uses the Subject interface to update all Observers
public class PricingScheme implements iSubject {
// maintains the list of all subscribers to the pricing scheme
// However, not really aware of what type of subscribers they are (e.g.
// University/Corporate/Individual)
private ArrayList<Observer> observers;
private double Price;
public PricingScheme() {
// Creates an ArrayList to hold all observers
observers = new ArrayList<Observer>();
}
public void register(Observer newObserver) {
// Adds a new observer to the ArrayList
observers.add(newObserver);
}
public void unregister(Observer deleteObserver) {
// Get the index of the observer to delete
int observerIndex = observers.indexOf(deleteObserver);
// Print out message (Have to increment index to match)
System.out.println("Observer " + (observerIndex + 1) + " deleted");
// Removes observer from the ArrayList
observers.remove(observerIndex);
}
public void notifyObserver() {
// Cycle through all observers and notifies them of
// price changes
for (Observer observer : observers) {
observer.update(Price);
}
}
// Change pricing scheme and notifies observers of changes
public void setPrice(double newPrice) {
this.Price = newPrice;
notifyObserver();
}
}
|
<gh_stars>0
package cmd
import (
"fmt"
"strings"
"github.com/aelindeman/goname"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
// listCmd represents the list command
var listCmd = &cobra.Command{
Use: "list [domain ...]",
Short: "List DNS records for a domain",
Aliases: []string{"ls"},
Run: listDNSRecords,
}
var listOutputFormat string
func init() {
rootCmd.AddCommand(listCmd)
listCmd.PersistentFlags().StringVarP(&listOutputFormat, "format", "f", "", "output format")
}
func listDNSRecords(cmd *cobra.Command, args []string) {
domains := parseRequestedDomains(args)
log.WithField("domains", domains).Debug("fetching DNS records for domains")
for _, domain := range domains {
records, recordsErr := GetClient().ListDNSRecords(domain)
if recordsErr != nil || records.Result.Failed() {
log.WithError(recordsErr).WithFields(log.Fields{
"domain": domain,
}).Error("error fetching DNS records")
continue
}
switch listOutputFormat {
case "json":
// printJsonRecordsList(domain, records.Records)
fmt.Println("not implemented")
case "yaml":
// printYamlRecordsList(domain, records.Records)
fmt.Println("not implemented")
default:
log.WithFields(log.Fields{
"format": listOutputFormat,
}).Warning("unrecognized output format")
fallthrough
case "basic", "":
printBasicRecordsList(domain, records.Records)
}
}
}
func parseRequestedDomains(args []string) (domains []string) {
if len(args) > 0 {
log.WithField("args", args).Debug("using domain list from args")
domains = args
} else {
log.Debug("querying account for domain list")
domainList, domainListErr := GetClient().ListDomains()
if domainListErr != nil {
log.WithError(domainListErr).Error("error fetching account's domains")
}
for d := range domainList.Domains {
domains = append(domains, d)
}
}
return domains
}
func printBasicRecordsList(domain string, records []goname.DNSRecordResponse) {
fmt.Println("#", domain)
for _, record := range records {
row := strings.Join([]string{
record.RecordID,
record.Name,
record.Type,
strings.TrimLeft(strings.Join([]string{record.Priority, record.Content}, " "), " "),
record.TTL,
}, " ")
fmt.Println(row)
}
}
|
<gh_stars>1-10
package disgordbot
import (
"testing"
)
func TestBot_AddCommand(t *testing.T) {
b := new(Bot)
if err := b.AddCommand(
Command{
Name: "One",
Short: "1",
},
Command{
Name: "Two",
Short: "2",
}); err != nil {
t.Error(err)
}
if err := b.AddCommand(
Command{
Name: "One",
}); err == nil {
t.Error("Expected name error")
}
if err := b.AddCommand(
Command{
Short: "1",
}); err == nil {
t.Error("Expected shortname error")
}
if err := b.AddCommand(
Command{
Name: "Three",
Short: "3",
},
Command{
Name: "Three",
Short: "3",
}); err == nil {
t.Error("Expected name error")
}
if len(b.commands) != 2 {
t.Errorf("Expected 2 command in the bot, found %d", len(b.commands))
}
defer func() {
if r := recover(); r == nil {
t.Error("The bot did not panic while trying to add a command while the bot is running")
}
}()
b.running = true
b.AddCommand(Command{})
}
|
#!/bin/bash
##===----------------------------------------------------------------------===##
##
## This source file is part of the Swift Tracing open source project
##
## Copyright (c) 2020 Moritz Lang and the Swift Tracing project authors
## Licensed under Apache License v2.0
##
## See LICENSE.txt for license information
##
## SPDX-License-Identifier: Apache-2.0
##
##===----------------------------------------------------------------------===##
set -eu
here="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
printf "=> Checking linux tests... "
FIRST_OUT="$(git status --porcelain)"
ruby "$here/../scripts/generate_linux_tests.rb" > /dev/null
SECOND_OUT="$(git status --porcelain)"
if [[ "$FIRST_OUT" != "$SECOND_OUT" ]]; then
printf "\033[0;31mmissing changes!\033[0m\n"
git --no-pager diff
exit 1
else
printf "\033[0;32mokay.\033[0m\n"
fi
bash $here/validate_license_headers.sh
bash $here/validate_format.sh
bash $here/validate_naming.sh
|
package fr.slvn.appops;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.app.Activity;
import android.content.Intent;
import android.widget.Toast;
import java.util.Arrays;
public class MainActivity extends Activity {
private static final String[] INCOMPATIBLE_LIST =
{
"4.4.2" // AppOpsSummary removed from whitelist
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
launchAppOps();
finish();
}
private void launchAppOps() {
if (launchAppOpsViaAction()) {
// Classic method for 4.3 worked !
} else if (isAndroidVersionIncompatible()) {
// Cannot launch AppOps on those version
uninstallAppOps();
} else if (launchAppOpsViaComponentExtra()) {
// Tricky method from 4.4 worked !
} else {
// Cannot launch AppOps :(
uninstallAppOps();
}
}
private boolean launchAppOpsViaAction() {
Intent intent = new Intent("android.settings.APP_OPS_SETTINGS");
return launchAppOps(intent);
}
private boolean isAndroidVersionIncompatible() {
return Arrays.binarySearch(
INCOMPATIBLE_LIST,
Build.VERSION.RELEASE
) > -1;
}
private boolean launchAppOpsViaComponentExtra() {
Intent intent = new Intent();
intent.setClassName("com.android.settings", "com.android.settings.Settings");
intent.setAction(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_DEFAULT);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK |
Intent.FLAG_ACTIVITY_CLEAR_TASK |
Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
intent.putExtra(":android:show_fragment", "com.android.settings.applications.AppOpsSummary");
return launchAppOps(intent);
}
protected boolean launchAppOps(Intent intent) {
try {
startActivity(intent);
} catch (Exception e) {
// Cannot launch activity !
return false;
}
return true;
}
private void uninstallAppOps() {
Toast.makeText(this, R.string.error_msg, Toast.LENGTH_LONG).show();
Uri uri = Uri.parse("package:" + getPackageName());
Intent intent = new Intent(Intent.ACTION_UNINSTALL_PACKAGE, uri);
startActivity(intent);
}
}
|
def longest_common_substring(string1, string2):
m = len(string1)
n = len(string2)
# Create a two-dimensional array (m x n) to track
# the length of longest common sub-string
sub_string_length = [[0 for _ in range(n+1)] for _ in range(m+1)]
longest_length = 0
result = ""
# Build the sub-string length array
for i in range(1, m+1):
for j in range(1, n+1):
if string1[i-1] == string2[j-1]:
sub_string_length[i][j] = sub_string_length[i-1][j-1] + 1
if sub_string_length[i][j] > longest_length:
longest_length = sub_string_length[i][j]
result = string1[i-longest_length:i]
return longest_length, result
|
#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Called with following variables set:
# - CORE_PATH is absolute path to @apache-mynewt-core
# - BSP_PATH is absolute path to hw/bsp/bsp_name
# - BIN_BASENAME is the path to prefix to target binary,
# .elf appended to name is the ELF file
# - FEATURES holds the target features string
# - EXTRA_JTAG_CMD holds extra parameters to pass to jtag software
# - RESET set if target should be reset when attaching
# - NO_GDB set if we should not start gdb to debug
#
USE_OPENOCD=1
FILE_NAME=$BIN_BASENAME.elf
# Look for 'JLINK_DEBUG' in FEATURES
for feature in $FEATURES; do
if [ $feature = "JLINK_DEBUG" ]; then
USE_OPENOCD=0
fi
done
if [ $USE_OPENOCD -eq 1 ]; then
. $CORE_PATH/hw/scripts/openocd.sh
CFG="-f interface/cmsis-dap.cfg -f target/nrf51.cfg"
# Exit openocd when gdb detaches.
EXTRA_JTAG_CMD="$EXTRA_JTAG_CMD; nrf51.cpu configure -event gdb-detach {if {[nrf51.cpu curstate] eq \"halted\"} resume;shutdown}"
openocd_debug
else
. $CORE_PATH/hw/scripts/jlink.sh
JLINK_DEV="nRF51422_xxAC"
jlink_debug
fi
|
from arbitrage.public_markets._cex import CEX
class CEXEUR(CEX):
def __init__(self):
super().__init__("EUR", "EUR")
|
#!/usr/bin/env bash
PID_FILE=server.pid
PID=$(cat "${PID_FILE}");
if [ -z "${PID}" ]; then
echo "Process id for servers is written to location: {$PID_FILE}"
go build ../main/server/
go build ../main/client/
#go build ../cmd/
rm -r logs
mkdir logs/
./server -log_dir=logs -log_level=info -id 1.1 -algorithm=paxos >logs/out1.1.txt 2>&1 &
echo $! >> ${PID_FILE}
./server -log_dir=logs -log_level=info -id 1.2 -algorithm=paxos >logs/out1.2.txt 2>&1 &
echo $! >> ${PID_FILE}
./server -log_dir=logs -log_level=info -id 1.3 -algorithm=paxos >logs/out1.3.txt 2>&1 &
echo $! >> ${PID_FILE}
./server -log_dir=logs -log_level=info -id 1.4 -algorithm=paxos >logs/out1.4.txt 2>&1 &
echo $! >> ${PID_FILE}
./server -log_dir=logs -log_level=info -id 1.5 -algorithm=paxos >logs/out1.5.txt 2>&1 &
echo $! >> ${PID_FILE}
else
echo "Servers are already started in this folder."
exit 0
fi
|
<reponame>Nazar910/assignment-system
export const PRIORITIES = {
low: 'low',
normal: 'normal',
high: 'high',
urgent: 'urgent'
};
export const Assignment = {
type: 'object',
required: ['title', 'author_id'],
additionalProperties: false,
properties: {
title: {
type: 'string'
},
description: {
type: 'string'
},
author_id: {
type: 'string'
},
assignee_id: {
type: 'string'
},
status: {
type: 'string',
default: 'todo'
},
priority: {
enum: Object.values(PRIORITIES)
}
}
}
|
<reponame>scurker/scurker.com
var particles;
window.onload = function() {
var canvas = document.getElementById('particle_canvas');
particles = new ParticleCanvas(canvas, {x: 490});
particles.start();
};
var effects = {
smoke: {
shape: 'circle',
velocity: new Vector({y: -0.35}),
xVariance: 10,
yVariance: 15,
spawnSpeed: 1,
generations: 100000,
maxParticles: 5000,
size: 16,
sizeVariance: 10,
life: 350,
lifeVariance: 50,
direction: 0,
directionVariance: 25,
color: '#ccc',
opacity: 1,
onDraw: function(p) {
p.opacity = 0.251 - (p.age / p.life) * 0.25;
}
},
explosions: {
shape: 'square',
velocity: new Vector({y: -1}),
xVariance: 5,
yVariance: 5,
spawnSpeed: 20,
generations: 100000,
maxParticles: 200,
size: 2,
sizeVariance: 0.5,
life: 250,
lifeVariance: 0,
direction: 0,
directionVariance: 180,
color: '#fff',
opacity: 1,
onDraw: function() {}
},
fountain: {
shape: 'circle',
velocity: new Vector({x: 0, y: -5}),
xVariance: 0,
yVariance: 0,
spawnSpeed: 5,
generations: 100000,
maxParticles: 500,
size: 8,
sizeVariance: 6,
life: 100,
lifeVariance: 0,
direction: 0,
directionVariance: 15,
color: '#cef',
opacity: 1,
onDraw: function(p) {
p.v.add(new Vector({y: 0.1}));
p.opacity = 1 - (p.age / p.life * 0.9);
}
},
fire: {
shape: 'circle',
velocity: new Vector({y: -3}),
xVariance: 20,
yVariance: 5,
spawnSpeed: 25,
generations: 100000,
maxParticles: 500,
size: 20,
sizeVariance: 10,
life: 30,
lifeVariance: 10,
direction: 0,
directionVariance: 15,
color: '#cef',
opacity: 1,
onDraw: function(p) {
var y = -this.age * 3;
p.size *= 0.98;
p.color = 'rgb(255, ' + (y + 255) + ', 68)';
p.opacity = 0.5 - (p.age / p.life * 0.4);
}
},
raindrops: {
shape: 'circle',
velocity: new Vector({y: 0}),
xVariance: 200,
yVariance: 200,
spawnSpeed: 1,
generations: 100000,
maxParticles: 100,
size: 20,
sizeVariance: 10,
life: 50,
lifeVariance: 10,
direction: 0,
directionVariance: 15,
color: '#cef',
opacity: 1,
onDraw: function(p) {
p.size *= 0.98;
p.opacity = 0.5 - (p.age / p.life * 0.4);
}
}
};
$.fn.toJson = function() {
var json = {};
$.each(this.serializeArray(), function() {
json[this.name] = this.value !== null ? this.value : null;
});
return json;
};
$.fn.inputSlider = function(options) {
var inputs = $(this), slider = $('<div/>');
// Update slider control on input change
inputs.bind('keyup change', function() {
$(this).closest('label').find('.ui-slider').slider('value', this.value);
});
var options = $.extend({
slide: function(e, ui) {
$(ui.handle).closest('label').find('input').val(ui.value).change();
}
}, options);
slider.insertAfter(inputs)
.slider(options);
return this;
};
function loadPreset(val) {
var obj;
if((obj = effects[val])) {
particles.update(obj);
for(var i in obj) {
var value = (obj[i] instanceof Vector) ? -obj[i].y : obj[i];
$(':input[name=' + i + ']').val(value);
}
$('input').change();
}
}
$(document).ready(function() {
$('#controls :input').change(function() {
particles.update($('#controls :input').toJson());
});
$('select').change(function() { loadPreset(this.value); });
$('#x-variance').inputSlider({
range: 'min',
min: 0,
max: 20,
value: 20
});
$('#y-variance').inputSlider({
range: 'min',
min: 0,
max: 20,
value: 5
});
$('#direction').inputSlider({
range: 'min',
min: -180,
max: 180,
value: 0
});
$('#direction-variance').inputSlider({
range: 'min',
min: 0,
max: 180,
value: 15
});
$('#velocity').inputSlider({
step: 0.01,
range: 'min',
min: 0,
max: 10,
value: 3
});
$('#size').inputSlider({
range: 'min',
min: 1,
max: 50,
value: 10
});
$('#size-variance').inputSlider({
range: 'min',
min: 0,
max: 50,
value: 5
});
$('#spawn-speed').inputSlider({
range: 'min',
min: 1,
max: 100,
value: 25
});
$('#max-particles').inputSlider({
range: 'min',
min: 1,
max: 1000,
value: 500
});
$('#life').inputSlider({
range: 'min',
min: 1,
max: 200,
value: 30
});
$('#life-variance').inputSlider({
range: 'min',
min: 0,
max: 200,
value: 10
});
});
|
<gh_stars>0
import fs from 'fs'
import path from 'path'
import execa from 'execa'
import Listr from 'listr'
import VerboseRenderer from 'listr-verbose-renderer'
import terminalLink from 'terminal-link'
import { getPaths } from '../../lib'
import c from '../../lib/colors'
export const command = 'aws [provider]'
export const description = 'Deploy to AWS using the selected provider'
export const builder = (yargs) => {
const SUPPORTED_PROVIDERS = fs
.readdirSync(path.resolve(__dirname, 'aws-providers'))
.map((file) => path.basename(file, '.js'))
.filter((file) => file !== 'README.md')
yargs
.positional('provider', {
choices: SUPPORTED_PROVIDERS,
default: 'serverless',
description: 'AWS Deploy provider to configure',
type: 'string',
})
.option('side', {
describe: 'which Side(s) to deploy',
choices: ['api'],
default: 'api',
type: 'array',
})
.option('verbose', {
describe: 'verbosity of logs',
default: true,
type: 'boolean',
})
.option('stage', {
describe:
'serverless stage pass through param: https://www.serverless.com/blog/stages-and-environments',
default: 'dev',
type: 'string',
})
.epilogue(
`Also see the ${terminalLink(
'Redwood CLI Reference',
'https://redwoodjs.com/docs/cli-commands#deploy'
)}\n`
)
}
export const handler = async (yargs) => {
const { provider, verbose } = yargs
const BASE_DIR = getPaths().base
const providerData = await import(`./aws-providers/${provider}`)
const mapCommandsToListr = ({ title, command, task, errorMessage }) => {
return {
title: title,
task: task
? task
: async () => {
try {
const executingCommand = execa(...command, {
cwd: BASE_DIR,
})
executingCommand.stdout.pipe(process.stdout)
await executingCommand
} catch (error) {
if (errorMessage) {
error.message = error.message + '\n' + errorMessage.join(' ')
}
throw error
}
},
}
}
const tasks = new Listr(
[
providerData.preRequisites &&
providerData.preRequisites.length > 0 && {
title: 'Checking pre-requisites',
task: () =>
new Listr(
providerData.preRequisites(yargs).map(mapCommandsToListr)
),
},
{
title: 'Building and Packaging...',
task: () =>
new Listr(providerData.buildCommands(yargs).map(mapCommandsToListr), {
collapse: false,
}),
},
{
title: 'Deploying to AWS',
task: () =>
new Listr(providerData.deployCommands(yargs).map(mapCommandsToListr)),
},
].filter(Boolean),
{ collapse: false, renderer: verbose && VerboseRenderer }
)
try {
await tasks.run()
} catch (e) {
console.log(c.error(e.message))
process.exit(1)
}
}
|
<gh_stars>0
/**
* Action is a container, where the name of a method and parameters can be called by a Checker or Countdown object. Actions can be stacked on an ActionList Object.
*
*
* @author <NAME>
* @version 1.0
* 2017
*/
package bontempos.Game.Act;
import java.lang.reflect.Method;
public class Action {
private static Action instance;
public boolean echo = false;
public Object target; // the target object (where action will occur)
public String method; // the name of the function within the object to invoke [0]
// function name [1]... for parameters
public Object[] params;
public String actionName;
boolean active = true;
boolean performed = false;
boolean autoRemove = true;
//--------------------------------------------------------------------------------------< INIT >
public Action(Object target, String method, Object... params) {
this.target = target;
this.method = method;
this.params = params;
}
public Action(String actionName, Object target, String method, Object... params) {
this.actionName = actionName;
this.target = target;
this.method = method;
this.params = params;
}
public Action(String actionName){
this.actionName = actionName;
//Action.perform(actionName);
//AConstants.get().check(actionName);
}
//--------------------------------------------------------------------------------------< SETTERS >
public void setAutoRemove(boolean a) {
this.autoRemove = a;
}
public void setName(String name){
this.actionName = name;
}
public void setActive(boolean b){
this.active = b;
}
public void setEcho(boolean b){
this.echo = b;
}
//--------------------------------------------------------------------------------------< GETTERS >
public static boolean perform(String actionName) {
if (instance == null) {
instance = new Action(actionName);
}
return AConstants.get().check(actionName);
}
public boolean isActive(){
return active;
}
public String getActionName(){
return actionName;
}
//--------------------------------------------------------------------------------------< METHODS >
public String eval() {
Action e = this;
if (!e.active){
return actionName + ":!active"; // ignore if not active;
}
if (e.target == null || e.method == ""){
AConstants.get().check(e.actionName);
//System.out.println("ACTION: performing action "+ e.actionName);
return actionName;
}
//System.out.println("ACTION: Evaluating action name: "+ actionName);
Class<?> cls = e.target.getClass(); // get class of target object
Method[] methods = cls.getMethods(); // get all methods of the class
// above
Method toInvoke = null; // this is the method to be invoked
for (Method method : methods) { // find the method to be
// invoked inside all
// methods
if (!e.method.equals(method.getName())) {
continue;
}
Class<?>[] paramTypes = method.getParameterTypes(); // Method found:
// test
// parameters
// (if this is
// the case)
if (e.params == null && paramTypes == null) { // are any params
// being required by
// the event?
// if not, just
// invoke the method
// with no params.
break;
} else if (e.params == null || paramTypes == null || paramTypes.length != e.params.length) {
continue;
}
// for (int i = 0; i < e.params.length; ++i) {
// if (!paramTypes[i].isAssignableFrom(e.params[i].getClass())) {
// continue methodLoop;
// }
// }
toInvoke = method;
}
if (toInvoke != null) {
try {
if(echo) {System.out.println("executing action, method: " + e.method.toString() +
", parameters: " + e.params.toString() +
", target: " + e.target.getClass().getName()
);
}
toInvoke.invoke(e.target, e.params); // here methods is invoked.
performed = true;
if (e.autoRemove){
e.active = false;
}
} catch (Exception t) {
//System.out.println(e.method.toString());
t.printStackTrace();
}
}
return actionName;
}
}
|
corpus=./data-bin/iwslt14.tokenized.de-en.nobpe.vocabSwitchout.comda-xxx.analysis
arch=transformer_iwslt_de_en
save_dir=checkpoints/iwslt14.de-en/comda-v1.word.DEBUG
mkdir -p $save_dir
CUDA_VISIBLE_DEVICES=0 python analyze.py \
$corpus \
--restore-file checkpoint_best.pt \
--raw-text \
--task perturb_analysis \
--use-xxx \
--da-strategy prototype \
--coefficient 0.9975 \
--criterion perturb_divergence \
--lr 0.001 \
--decay-until 8000 \
--lr-scheduler switchout \
--lr-shrink 0.1 \
--optimizer adam \
--min-lr 0.0 \
--max-epoch 1 \
--update-freq 1 \
--clip-norm 25.0 \
--dropout 0.25 \
--max-sentences 1 \
--arch $arch \
--save-dir $save_dir \
--no-progress-bar \
--log-interval 1 \
--encoder-embed-dim 256 \
--encoder-ffn-embed-dim 512 \
--encoder-attention-heads 4 \
--encoder-layers 2 \
--decoder-embed-dim 256 \
--decoder-ffn-embed-dim 512 \
--decoder-attention-heads 4 \
--decoder-layers 2 \
|
#
# This file is part of the CernVM File System
# This script takes care of creating, removing, and maintaining repositories
# on a Stratum 0/1 server
#
# Implementation of the "cvmfs_server check" command
# This file depends on fuctions implemented in the following files:
# - cvmfs_server_util.sh
# - cvmfs_server_common.sh
__do_check() {
local name
local upstream
local storage_dir
local url
# get repository name
check_parameter_count_with_guessing $#
name=$(get_or_guess_repository_name $1)
# sanity checks
check_repository_existence $name || die "The repository $name does not exist"
# get repository information
load_repo_config $name
# more sanity checks
is_owner_or_root $name || die "Permission denied: Repository $name is owned by $CVMFS_USER"
health_check -r $name
# check if repository is compatible to the installed CernVM-FS version
check_repository_compatibility $name
upstream=$CVMFS_UPSTREAM_STORAGE
if is_stratum1 $name; then
url=$CVMFS_STRATUM1
else
url=$CVMFS_STRATUM0
fi
# do it!
if [ $check_integrity -ne 0 ]; then
if ! is_local_upstream $upstream; then
echo "Storage integrity check only works locally. skipping."
else
echo
echo "Checking storage integrity of $name ... (may take a while)"
storage_dir=$(get_upstream_config $upstream)
__swissknife scrub -r ${storage_dir}/data || die "FAIL!"
fi
fi
local log_level_param=""
local check_chunks_param=""
[ "x$CVMFS_LOG_LEVEL" != x ] && log_level_param="-l $CVMFS_LOG_LEVEL"
[ $check_chunks -ne 0 ] && check_chunks_param="-c"
local subtree_msg=""
local subtree_param=""
if [ "x$subtree_path" != "x" ]; then
subtree_param="-s '$subtree_path'"
subtree_msg=" (starting at nested catalog '$subtree_path')"
fi
echo "Verifying integrity of ${name}${subtree_msg}..."
if [ $repair_reflog -eq 1 ]; then
__check_repair_reflog $name
fi
local with_reflog=
has_reflog_checksum $name && with_reflog="-R $(get_reflog_checksum $name)"
if is_garbage_collectable $name; then
if [ "x$tag" = "x" ]; then
echo "Warning: if garbage collection runs in parallel, "
echo " missing data chunks can be falsely reported"
fi
fi
local user_shell="$(get_user_shell $name)"
local check_cmd
check_cmd="$(__swissknife_cmd dbg) check $tag \
$check_chunks_param \
$log_level_param \
$subtree_param \
-r $url \
-t ${CVMFS_SPOOL_DIR}/tmp \
-k ${CVMFS_PUBLIC_KEY} \
-N ${CVMFS_REPOSITORY_NAME} \
$(get_swissknife_proxy) \
$(get_follow_http_redirects_flag) \
$with_reflog \
-z /etc/cvmfs/repositories.d/${name}/trusted_certs"
$user_shell "$check_cmd"
}
# Checks for mismatch between the reflog and the checksum and tries to fix them,
# either by adjusting the checksum or by removing it.
__check_repair_reflog() {
local name="$1"
load_repo_config $name
local user_shell="$(get_user_shell $name)"
local stored_checksum=
has_reflog_checksum $name && stored_checksum="$(cat $(get_reflog_checksum $name))"
local repository_url=
if is_stratum0 $name; then
repository_url="$CVMFS_STRATUM0"
else
repository_url="$CVMFS_STRATUM1"
fi
local has_reflog=0
local computed_checksum=
if $user_shell "$(__swissknife_cmd) peek -d .cvmfsreflog -r $CVMFS_UPSTREAM_STORAGE" >/dev/null; then
has_reflog=1
local url="$repository_url/.cvmfsreflog"
local rehash_cmd="curl -sS --fail --connect-timeout 10 --max-time 300 $(get_curl_proxy) $url \
| cvmfs_publish hash -a ${CVMFS_HASH_ALGORITHM:-sha1}"
computed_checksum="$($user_shell "$rehash_cmd")"
echo "Info: found $url with content hash $computed_checksum"
fi
if has_reflog_checksum $name; then
if [ $has_reflog -eq 0 ]; then
$user_shell "rm -f $(get_reflog_checksum $name)"
echo "Warning: removed dangling reflog checksum $(get_reflog_checksum $name)"
else
if [ "x$stored_checksum" != "x$computed_checksum" ]; then
$user_shell "echo $computed_checksum > $(get_reflog_checksum $name)"
echo "Warning: restored reflog checksum as $computed_checksum (was: $stored_checksum)"
fi
fi
else
# No checksum
if [ $has_reflog -eq 1 ]; then
$user_shell "echo $computed_checksum > $(get_reflog_checksum $name)"
echo "Warning: re-created missing reflog checksum as $computed_checksum"
fi
fi
# At this point we either have no .cvmfsreflog and no local reflog.chksum or
# we have both files properly in sync.
# Remaining case: a reflog is registered in the manifest but the
# .cvmfsreflog file is missing. In this case, we recreate the reflog.
if get_repo_info -R | grep -q ^Y; then
echo "Warning: a reflog hash is registered in the manifest, re-creating missing reflog"
to_syslog_for_repo $name "reference log reconstruction started"
local repository_url
local reflog_reconstruct_command="$(__swissknife_cmd dbg) reconstruct_reflog \
-r $repository_url \
$(get_swissknife_proxy) \
-u $CVMFS_UPSTREAM_STORAGE \
-n $CVMFS_REPOSITORY_NAME \
-t ${CVMFS_SPOOL_DIR}/tmp/ \
-k $CVMFS_PUBLIC_KEY \
-R $(get_reflog_checksum $name)"
if ! $user_shell "$reflog_reconstruct_command"; then
to_syslog_for_repo $name "failed to reconstruction reference log"
else
to_syslog_for_repo $name "successfully reconstructed reference log"
fi
fi
}
# This is a separate function because dash segfaults if it is inline :-(
__get_checks_repo_times() {
set -- '*'
check_parameter_count_for_multiple_repositories $#
names=$(get_or_guess_multiple_repository_names "$@")
check_multiple_repository_existence "$names"
for name in $names; do
# note that is_inactive_replica also does load_repo_config
if is_inactive_replica $name; then
continue
fi
local upstream=$CVMFS_UPSTREAM_STORAGE
if [ x$(get_upstream_type $upstream_storage) = "xgw" ]; then
continue
fi
local check_status="$(read_repo_item $name .cvmfs_status.json)"
local last_check="$(get_json_field "$check_status" last_check)"
local check_time=0
if [ -n "$last_check" ]; then
check_time="$(date --date "$last_check" +%s)"
local min_secs num_secs
min_secs="$((${CVMFS_CHECK_ALL_MIN_DAYS:-30}*60*60*24))"
num_secs="$(($(date +%s)-$check_time))"
if [ "$num_secs" -lt "$min_secs" ]; then
# less than $CVMFS_CHECK_ALL_MIN_DAYS has elapsed since last check
continue
fi
fi
echo "${check_time}:${name}"
done
}
__do_all_checks() {
local log
local repo
local repos
if [ ! -d /var/log/cvmfs ]; then
if ! mkdir /var/log/cvmfs 2>/dev/null; then
die "/var/log/cvmfs does not exist and could not create it"
fi
fi
[ -w /var/log/cvmfs ] || die "cannot write to /var/log/cvmfs"
local check_lock=/var/spool/cvmfs/is_checking_all
if ! acquire_lock $check_lock; then
to_syslog "skipping start of cvmfs_server check because $check_lock held by active process"
return 1
fi
log=/var/log/cvmfs/checks.log
# Sort the active repositories on local storage by last check time
repos="$(__get_checks_repo_times|sort -n|cut -d: -f2)"
for repo in $repos; do
(
to_syslog_for_repo $repo "started check"
echo
echo "Starting $repo at `date`"
# Work around the errexit (that is, set -e) misfeature of being
# disabled whenever the exit code is to be checked.
# See https://lists.gnu.org/archive/html/bug-bash/2012-12/msg00093.html
set +e
(set -e
__do_check $repo
)
local ret=$?
update_repo_status $repo last_check "`date --utc`"
local check_status
if [ $ret != 0 ]; then
check_status=failed
to_syslog_for_repo $repo "check failed"
echo "ERROR from cvmfs_server check!" >&2
else
check_status=succeeded
to_syslog_for_repo $repo "sucessfully completed check"
fi
update_repo_status $repo check_status $check_status
echo "Finished $repo at `date`"
) >> $log 2>&1
done
}
cvmfs_server_check() {
local retcode=0
local do_all=0
local check_chunks=1
local check_integrity=0
local subtree_path=""
local tag=
local repair_reflog=0
# optional parameter handling
OPTIND=1
while getopts "acit:s:r" option
do
case $option in
a)
do_all=1
;;
c)
check_chunks=0
;;
i)
check_integrity=1
;;
t)
tag="-n $OPTARG"
;;
s)
subtree_path="$OPTARG"
;;
r)
repair_reflog=1
;;
?)
shift $(($OPTIND-2))
usage "Command check: Unrecognized option: $1"
;;
esac
done
shift $(($OPTIND-1))
if [ $do_all -eq 1 ]; then
[ $# -eq 0 ] || die "no non-option parameters expected with -a"
__do_all_checks
# Always return success because this is used from cron and we
# don't want cron sending an email every time something fails.
# Errors will be in the log.
else
if [ x"$CVMFS_LOG_LEVEL" = x ]; then
# increase log from default "Warning" to "Info" level
CVMFS_LOG_LEVEL=2 __do_check "$@"
else
__do_check "$@"
fi
retcode=$?
fi
release_lock $check_lock
return $retcode
}
|
for i in range(1, 11):
for j in range(1, 11):
print(i * j, end='\t')
print()
|
package com.lmj.vueblog.service;
import com.lmj.vueblog.entity.User;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* 服务类
* */
public interface UserService extends IService<User> {
/**
* 用户注册
* @param user
*/
void register(User user);
}
|
package rsocktapp.demo2;
import io.rsocket.ConnectionSetupPayload;
import io.rsocket.RSocket;
import io.rsocket.SocketAcceptor;
import io.rsocket.core.RSocketServer;
import io.rsocket.frame.decoder.PayloadDecoder;
import io.rsocket.transport.netty.server.TcpServerTransport;
import reactor.core.publisher.Mono;
public class ServerApp {
public static void main(String[] args){
RSocketServer.create(new ServerHandler())
.payloadDecoder(PayloadDecoder.ZERO_COPY)
.bind(TcpServerTransport.create(7878))
.block()
.onClose();
}
public static class ServerHandler implements SocketAcceptor {
@Override
public Mono<RSocket> accept(ConnectionSetupPayload connectionSetupPayload, RSocket rSocket) {
return null;
}
}
}
|
#!/bin/tcsh
#PBS -A NTDD0005
#PBS -N testb
#PBS -q regular
#PBS -l walltime=12:00:00
#PBS -j oe
#PBS -M apinard@ucar.edu
#PBS -l select=1:ncpus=1
module load conda
conda activate ldcpy_env
setenv TMPDIR /glade/scratch/$USER/temp
mkdir -p $TMPDIR
python ./compute_batch.py -o '/glade/scratch/apinard/3D/PRECL_calcs.csv' -j './batch_scripts/3d_dssim_scripts/PRECL.json' -ts 60 -tt 75 -v -ld
|
<gh_stars>0
#include <string.h>
#include "mbedtls/sha256.h"
#include "mbedtls/md_internal.h"
#include "aws_sigv4.h"
#define SHA256_DIGEST_LENGTH 32
#define AWS_SIGV4_AUTH_HEADER_NAME "Authorization"
#define AWS_SIGV4_SIGNING_ALGORITHM "AWS4-HMAC-SHA256"
#define AWS_SIGV4_AUTH_HEADER_MAX_LEN 1024
#define AWS_SIGV4_CANONICAL_REQUEST_BUF_LEN 1024
#define AWS_SIGV4_STRING_TO_SIGN_BUF_LEN 1024
#define AWS_SIGV4_KEY_BUF_LEN 44 //aws_secret_access_key is 40 + AWS3
#define AWS_SIGV4_MAX_NUM_QUERY_COMPONENTS 50
typedef int (*aws_sigv4_compare_func_t)(const void*, const void*);
static int aws_sigv4_kv_cmp(aws_sigv4_kv_t* p1,
aws_sigv4_kv_t* p2)
{
size_t len = p1->key.len <= p2->key.len ? p1->key.len : p2->key.len;
return strncmp((char*) p1->key.data, (char*) p2->key.data, len);
}
static unsigned char* construct_query_str(unsigned char* dst_cstr,
aws_sigv4_kv_t* query_params,
size_t query_num)
{
for (size_t i = 0; i < query_num; i++)
{
/* here we assume args are percent-encoded */
dst_cstr = aws_sigv4_sprintf(dst_cstr, "%V=%V",
&query_params[i].key, &query_params[i].value);
if (i != query_num - 1)
{
*(dst_cstr++) = '&';
}
}
return dst_cstr;
}
static void parse_query_params(aws_sigv4_str_t* query_str,
aws_sigv4_kv_t* query_params,
size_t* arr_len)
{
if (aws_sigv4_empty_str(query_str)
|| query_params == NULL)
{
arr_len = 0;
return;
}
size_t idx = 0;
unsigned char* c_ptr = query_str->data;
query_params[0].key.data = c_ptr;
/* here we assume query string are well-formed */
while (c_ptr != query_str->data + query_str->len)
{
if (*c_ptr == '=')
{
query_params[idx].key.len = c_ptr - query_params[idx].key.data;
query_params[idx].value.data = ++c_ptr;
}
else if (*c_ptr == '&')
{
query_params[idx].value.len = c_ptr - query_params[idx].value.data;
query_params[++idx].key.data = ++c_ptr;
}
else
{
c_ptr++;
}
}
query_params[idx].value.len = c_ptr - query_params[idx].value.data;
*arr_len = idx + 1;
}
void get_hexdigest(aws_sigv4_str_t* str_in, aws_sigv4_str_t* hex_out)
{
static const unsigned char digits[] = "0123456789abcdef";
unsigned char* c_ptr = hex_out->data;
for (size_t i = 0; i < str_in->len; i++)
{
*(c_ptr++) = digits[(str_in->data[i] & 0xf0) >> 4];
*(c_ptr++) = digits[str_in->data[i] & 0x0f];
}
hex_out->len = str_in->len * 2;
}
void get_hex_sha256(aws_sigv4_str_t* str_in, aws_sigv4_str_t* hex_sha256_out)
{
unsigned char sha256_buf[SHA256_DIGEST_LENGTH];
mbedtls_sha256_context ctx;
mbedtls_sha256_init(&ctx);
mbedtls_sha256_starts(&ctx, 0);
mbedtls_sha256_update(&ctx, str_in->data, str_in->len);
mbedtls_sha256_finish(&ctx, sha256_buf);
aws_sigv4_str_t sha256_str = { .data = sha256_buf, .len = SHA256_DIGEST_LENGTH };
get_hexdigest(&sha256_str, hex_sha256_out);
mbedtls_sha256_free(&ctx);
}
void get_signing_key(aws_sigv4_params_t* sigv4_params, aws_sigv4_str_t* signing_key)
{
unsigned char key_buf[AWS_SIGV4_KEY_BUF_LEN] = { 0 };
unsigned char msg_buf[AWS_SIGV4_KEY_BUF_LEN] = { 0 };
aws_sigv4_str_t key = { .data = key_buf };
aws_sigv4_str_t msg = { .data = msg_buf };
/* kDate = HMAC("AWS4" + kSecret, Date) */
key.len = aws_sigv4_sprintf(key.data, "AWS4%V", &sigv4_params->secret_access_key) - key.data;
/* data in YYYYMMDD format */
msg.len = aws_sigv4_snprintf(msg.data, 8, "%V", &sigv4_params->x_amz_date) - msg.data;
/* get HMAC SHA256 */
mbedtls_md_hmac(&mbedtls_sha256_info, key.data, key.len, msg.data, msg.len,
signing_key->data);
/* kRegion = HMAC(kDate, Region) */
memcpy(key.data,signing_key->data,mbedtls_sha256_info.size);
key.len = mbedtls_sha256_info.size;
msg.len = aws_sigv4_sprintf(msg.data, "%V", &sigv4_params->region) - msg.data;
mbedtls_md_hmac(&mbedtls_sha256_info, key.data, key.len, msg.data, msg.len,
signing_key->data);
/* kService = HMAC(kRegion, Service) */
memcpy(key.data,signing_key->data,mbedtls_sha256_info.size);
key.len = mbedtls_sha256_info.size;
msg.len = aws_sigv4_sprintf(msg.data, "%V", &sigv4_params->service) - msg.data;
mbedtls_md_hmac(&mbedtls_sha256_info, key.data, key.len, msg.data, msg.len,
signing_key->data);
/* kSigning = HMAC(kService, "aws4_request") */
memcpy(key.data,signing_key->data,mbedtls_sha256_info.size);
key.len = mbedtls_sha256_info.size;
msg.len = aws_sigv4_sprintf(msg.data, "aws4_request") - msg.data;
mbedtls_md_hmac(&mbedtls_sha256_info, key.data, key.len, msg.data, msg.len,
signing_key->data);
}
void get_credential_scope(aws_sigv4_params_t* sigv4_params,
aws_sigv4_str_t* credential_scope)
{
unsigned char* str = credential_scope->data;
/* get date in yyyymmdd format */
str = aws_sigv4_snprintf(str, 8, "%V", &sigv4_params->x_amz_date);
str = aws_sigv4_sprintf(str, "/%V/%V/aws4_request",
&sigv4_params->region, &sigv4_params->service);
credential_scope->len = str - credential_scope->data;
}
void get_signed_headers(aws_sigv4_params_t* sigv4_params,
aws_sigv4_str_t* signed_headers)
{
/* TODO: Need to support additional headers and header sorting */
signed_headers->len = aws_sigv4_sprintf(signed_headers->data, "host;x-amz-content-sha256;x-amz-date;x-amz-decoded-content-length")
- signed_headers->data;
}
void get_canonical_headers(aws_sigv4_params_t* sigv4_params,
aws_sigv4_str_t* canonical_headers)
{
/* TODO: Add logic to remove leading and trailing spaces for header values */
canonical_headers->len = aws_sigv4_sprintf(canonical_headers->data,
"host:%V\nx-amz-content-sha256:%V\nx-amz-date:%V\nx-amz-decoded-content-length:%V\n",
&sigv4_params->host,
&sigv4_params->x_amz_content_sha256,
&sigv4_params->x_amz_date,
&sigv4_params->x_amz_decoded_content_length)
- canonical_headers->data;
}
void get_canonical_request(aws_sigv4_params_t* sigv4_params,
aws_sigv4_str_t* canonical_request)
{
unsigned char* str = canonical_request->data;
/* TODO: Here we assume the URI and query string have already been encoded.
* Add encoding logic in future.
*/
str = aws_sigv4_sprintf(str, "%V\n%V\n",
&sigv4_params->method,
&sigv4_params->uri);
/* query string can be empty */
if (!aws_sigv4_empty_str(&sigv4_params->query_str))
{
aws_sigv4_kv_t query_params[AWS_SIGV4_MAX_NUM_QUERY_COMPONENTS];
size_t query_num = 0;
parse_query_params(&sigv4_params->query_str, query_params, &query_num);
qsort(query_params, query_num, sizeof(aws_sigv4_kv_t),
(aws_sigv4_compare_func_t) aws_sigv4_kv_cmp);
str = construct_query_str(str, query_params, query_num);
}
*(str++) = '\n';
aws_sigv4_str_t canonical_headers = { .data = str };
get_canonical_headers(sigv4_params, &canonical_headers);
str += canonical_headers.len;
*(str++) = '\n';
aws_sigv4_str_t signed_headers = { .data = str };
get_signed_headers(sigv4_params, &signed_headers);
str += signed_headers.len;
*(str++) = '\n';
str = aws_sigv4_sprintf(str, "%V", &sigv4_params->x_amz_content_sha256);
canonical_request->len = str - canonical_request->data;
}
void get_string_to_sign(aws_sigv4_str_t* request_date,
aws_sigv4_str_t* credential_scope,
aws_sigv4_str_t* canonical_request,
aws_sigv4_str_t* string_to_sign)
{
unsigned char* str = string_to_sign->data;
str = aws_sigv4_sprintf(str, "AWS4-HMAC-SHA256\n%V\n%V\n",
request_date, credential_scope);
aws_sigv4_str_t hex_sha256 = { .data = str };
get_hex_sha256(canonical_request, &hex_sha256);
str += hex_sha256.len;
string_to_sign->len = str - string_to_sign->data;
}
int aws_sigv4_sign(aws_sigv4_params_t* sigv4_params, aws_sigv4_header_t* auth_header)
{
int rc = AWS_SIGV4_OK;
if (auth_header == NULL
|| sigv4_params == NULL
|| aws_sigv4_empty_str(&sigv4_params->secret_access_key)
|| aws_sigv4_empty_str(&sigv4_params->access_key_id)
|| aws_sigv4_empty_str(&sigv4_params->method)
|| aws_sigv4_empty_str(&sigv4_params->uri)
|| aws_sigv4_empty_str(&sigv4_params->host)
|| aws_sigv4_empty_str(&sigv4_params->x_amz_date)
|| aws_sigv4_empty_str(&sigv4_params->region)
|| aws_sigv4_empty_str(&sigv4_params->service))
{
rc = AWS_SIGV4_INVALID_INPUT_ERROR;
goto err;
}
/* TODO: Support custom memory allocator */
auth_header->value.data = calloc(AWS_SIGV4_AUTH_HEADER_MAX_LEN, sizeof(unsigned char));
if (auth_header->value.data == NULL)
{
rc = AWS_SIGV4_MEMORY_ALLOCATION_ERROR;
goto err;
}
auth_header->key.data = (unsigned char*) AWS_SIGV4_AUTH_HEADER_NAME;
auth_header->key.len = strlen(AWS_SIGV4_AUTH_HEADER_NAME);
/* AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/<credential_scope> */
unsigned char* str = auth_header->value.data;
str = aws_sigv4_sprintf(str, "AWS4-HMAC-SHA256 Credential=%V/",
&sigv4_params->access_key_id);
aws_sigv4_str_t credential_scope = { .data = str };
get_credential_scope(sigv4_params, &credential_scope);
str += credential_scope.len;
/* SignedHeaders=<signed_headers> */
str = aws_sigv4_sprintf(str, ", SignedHeaders=", &sigv4_params->access_key_id);
aws_sigv4_str_t signed_headers = { .data = str };
get_signed_headers(sigv4_params, &signed_headers);
str += signed_headers.len;
/* Signature=<signature> */
str = aws_sigv4_sprintf(str, ", Signature=", &sigv4_params->access_key_id);
/* Task 1: Create a canonical request */
unsigned char canonical_request_buf[AWS_SIGV4_CANONICAL_REQUEST_BUF_LEN] = { 0 };
aws_sigv4_str_t canonical_request = { .data = canonical_request_buf };
get_canonical_request(sigv4_params, &canonical_request);
/* Task 2: Create a string to sign */
unsigned char string_to_sign_buf[AWS_SIGV4_STRING_TO_SIGN_BUF_LEN] = { 0 };
aws_sigv4_str_t string_to_sign = { .data = string_to_sign_buf };
get_string_to_sign(&sigv4_params->x_amz_date, &credential_scope,
&canonical_request, &string_to_sign);
/* Task 3: Calculate the signature */
/* 3.1: Derive signing key */
unsigned char signing_key_buf[SHA256_DIGEST_LENGTH] = { 0 };
aws_sigv4_str_t signing_key = { .data = signing_key_buf, .len = SHA256_DIGEST_LENGTH };
get_signing_key(sigv4_params, &signing_key);
/* 3.2: Calculate signature on the string to sign */
unsigned char signed_msg_buf[SHA256_DIGEST_LENGTH] = { 0 };
aws_sigv4_str_t signed_msg = { .data = signed_msg_buf, .len = SHA256_DIGEST_LENGTH };
/* get HMAC SHA256 */
mbedtls_md_hmac(&mbedtls_sha256_info,
signing_key.data, signing_key.len,
string_to_sign.data, string_to_sign.len,
signed_msg.data);
aws_sigv4_str_t signature = { .data = str };
get_hexdigest(&signed_msg, &signature);
str += signature.len;
auth_header->value.len = str - auth_header->value.data;
return rc;
err:
/* deallocate memory in case of failure */
if (auth_header && auth_header->value.data)
{
free(auth_header->value.data);
auth_header->value.data = NULL;
}
return rc;
}
int aws_sigv4_sign_with_cstr(unsigned char* secret_access_key,
unsigned char* access_key_id,
unsigned char* method,
unsigned char* uri,
unsigned char* query_str,
unsigned char* host,
unsigned char* x_amz_content_sha256,
unsigned char* x_amz_date,
unsigned char* x_amz_decoded_content_length,
unsigned char* service,
unsigned char* region,
unsigned char** auth_header_value)
{
if (auth_header_value == NULL)
{
return AWS_SIGV4_INVALID_INPUT_ERROR;
}
aws_sigv4_params_t sigv4_params;
sigv4_params.secret_access_key = aws_sigv4_string(secret_access_key);
sigv4_params.access_key_id = aws_sigv4_string(access_key_id);
sigv4_params.method = aws_sigv4_string(method);
sigv4_params.uri = aws_sigv4_string(uri);
sigv4_params.query_str = aws_sigv4_string(query_str);
sigv4_params.host = aws_sigv4_string(host);
sigv4_params.x_amz_content_sha256 = aws_sigv4_string(x_amz_content_sha256);
sigv4_params.x_amz_decoded_content_length = aws_sigv4_string(x_amz_decoded_content_length);
sigv4_params.x_amz_date = aws_sigv4_string(x_amz_date);
sigv4_params.service = aws_sigv4_string(service);
sigv4_params.region = aws_sigv4_string(region);
aws_sigv4_header_t auth_header;
int rc = aws_sigv4_sign(&sigv4_params, &auth_header);
if (rc == AWS_SIGV4_OK)
{
*auth_header_value = auth_header.value.data;
}
return rc;
}
|
import { Settings as LayoutSettings } from '@ant-design/pro-layout'
const Settings: LayoutSettings & {
pwa?: boolean
logo?: string
} = {
navTheme: 'dark',
// 拂晓蓝 dark light
primaryColor: '#1890ff',
layout: 'mix', // side , top, mix
contentWidth: 'Fluid',
fixedHeader: false,
fixSiderbar: true,
colorWeak: false,
title: 'antd-dashboard',
pwa: false,
iconfontUrl: '',
splitMenus: true
}
export default Settings
|
def sum_list(nums):
total = 0
for num in nums:
total += num
return total
sum_of_list = sum_list([1, 2, 3, 4])
print(sum_of_list) # Output: 10
|
#!/bin/bash
curl -sc /tmp/cookie "https://drive.google.com/uc?export=download&id=1SakR8HL_e--lyN9lpdnvSKHZk-4H0cS_" > /dev/null
CODE="$(awk '/_warning_/ {print $NF}' /tmp/cookie)"
curl -Lb /tmp/cookie "https://drive.google.com/uc?export=download&confirm=${CODE}&id=1SakR8HL_e--lyN9lpdnvSKHZk-4H0cS_" -o resources.tar.gz
tar -zxvf resources.tar.gz
rm resources.tar.gz
echo Download finished.
|
<filename>src/components/neo4jDesktop/Splash.js
import React, { Component } from "react";
import { Image } from 'semantic-ui-react';
export default class Splash extends Component {
render() {
return (
<div className='Splash' style={{display: 'block', width: 320, marginLeft: 'auto', marginRight: 'auto'}}>
<Image
alt='Halin Monitoring'
className='SplashHalin'
src='img/halin-icon.png'
style={{display:'inline'}}
size='small'/>
<Image
alt='Neo4j Graph Database'
className='SplashNeo4j'
src='img/neo4j_logo_globe.png'
style={{display:'inline'}}
size='small'/>
</div>
)
}
}
|
/**
* this file will be loaded before server started
* you can define global functions used in controllers, models, templates
*/
/**
* use global.xxx to define global functions
*
* global.fn1 = function(){
*
* }
*/
'use strict';
/*****项目函数库*******/
// livi 日期格式化
global.liFormatDate = function (formatStr) {
let newdate = formatStr.split(' ')[0].replace(/-/g, "/");
newdate = newdate.split('/')
let FormatDate = newdate[2] + '/';
if (newdate[0] < 10) {
FormatDate = FormatDate + 0 + newdate[0] + '/' + newdate[1] + '/';
} else {
FormatDate = FormatDate + newdate[0] + '/' + newdate[1] + '/';
}
return FormatDate;
};
global.getNowFormatDate = function () {
var date = new Date();
var seperator1 = "-";
var seperator2 = ":";
var month = date.getMonth() + 1;
var strDate = date.getDate();
if (month >= 1 && month <= 9) {
month = "0" + month;
}
if (strDate >= 0 && strDate <= 9) {
strDate = "0" + strDate;
}
var currentdate = date.getYear() + seperator1 + month + seperator1 + strDate
+ " " + date.getHours() + seperator2 + date.getMinutes()
+ seperator2 + date.getSeconds();
return currentdate;
}
//去掉首尾空格
global.trimStr = function (str) {
return str.replace(/(^\s*)|(\s*$)/g, "");
};
global.getContent = function (filePath) {
var fs = require("fs")
//将readFile方法包装成Promise
var readFilePromise = think.promisify(fs.readFile, fs);
//读取文件内容
return readFilePromise(filePath, "utf8");
}
//10位时间戳格式化
global.formatDate = function (formatStr, fdate) {
var fTime, fStr = 'ymdhis';
if (!formatStr) {
formatStr = "y-m-d h:i:s";
}
if (fdate) {
fTime = new Date(parseInt(fdate) * 1000); //10位数时间戳
} else {
fTime = new Date();
}
var month = (fTime.getMonth() > 8) ? (fTime.getMonth() + 1) : "0" + (fTime.getMonth() + 1);
var date = (fTime.getDate() > 9) ? fTime.getDate() : "0" + fTime.getDate();
var hours = (fTime.getHours() > 9) ? fTime.getHours() : "0" + fTime.getHours();
var minutes = (fTime.getMinutes() > 9) ? fTime.getMinutes() : "0" + fTime.getMinutes();
var seconds = (fTime.getSeconds() > 9) ? fTime.getSeconds() : "0" + fTime.getSeconds();
var formatArr = [
fTime.getFullYear().toString(),
month.toString(),
date.toString(),
hours.toString(),
minutes.toString(),
seconds.toString()
]
for (var i = 0; i < formatArr.length; i++) {
formatStr = formatStr.replace(fStr.charAt(i), formatArr[i]);
}
return formatStr;
}
//获取10位数时间戳
global.time = function (str) {
var date;
if (str) {
date = new Date(Date.parse(str.replace(/-/g, "/")));
date = (date.getTime()) / 1000;
} else {
date = (new Date().getTime()) / 1000
}
return parseInt(date);
}
//获取13位数时间戳
global.timestamp = function () {
"use strict";
return new Date().getTime();
}
//13位时间戳格式化
global.timeFormat = function (formatStr, fdate) {
var fTime, fStr = 'ymdhis';
// console.log(fdate.toString())
if(think.isEmpty(fdate)){
return ''
}
fdate = fdate.toString().substr(0, 10)
if (!formatStr) {
formatStr = "y-m-d h:i:s";
}
if (fdate) {
fTime = new Date(parseInt(parseInt(fdate) * 1000)); //10位数时间戳
} else {
fTime = new Date();
}
var month = (fTime.getMonth() > 8) ? (fTime.getMonth() + 1) : "0" + (fTime.getMonth() + 1);
var date = (fTime.getDate() > 9) ? fTime.getDate() : "0" + fTime.getDate();
var hours = (fTime.getHours() > 9) ? fTime.getHours() : "0" + fTime.getHours();
var minutes = (fTime.getMinutes() > 9) ? fTime.getMinutes() : "0" + fTime.getMinutes();
var seconds = (fTime.getSeconds() > 9) ? fTime.getSeconds() : "0" + fTime.getSeconds();
var formatArr = [
fTime.getFullYear().toString(),
month.toString(),
date.toString(),
hours.toString(),
minutes.toString(),
seconds.toString()
]
for (var i = 0; i < formatArr.length; i++) {
formatStr = formatStr.replace(fStr.charAt(i), formatArr[i]);
}
return formatStr;
}
global.timeLeft = function (endDate) {
// timeLeftStamp = timeLeftStamp.toString().substr(0, 10)
//计算出相差天数
console.log('endDate', endDate)
if(think.isEmpty(endDate)){
return 0
}
let nowDate = new Date().getTime()
let timeLeftStamp = endDate - nowDate
if (timeLeftStamp < 0) {
return '0天'
}
console.log('timeLeftStamp', endDate - nowDate)
var days = Math.floor(timeLeftStamp / (24 * 3600 * 1000))
//计算出小时数
var leave1 = timeLeftStamp % (24 * 3600 * 1000)
//计算天数后剩余的毫秒数
var hours = Math.floor(leave1 / (3600 * 1000))
//计算相差分钟数
var leave2 = timeLeftStamp % (3600 * 1000)
//计算小时数后剩余的毫秒数
var minutes = Math.floor(leave2 / (60 * 1000))
//计算相差秒数
var leave3 = timeLeftStamp % (60 * 1000)
//计算分钟数后剩余的毫秒数
var seconds = Math.round(leave3 / 1000)
let formatStr = days + "天 " + hours + "小时 " + minutes + " 分钟" + seconds + " 秒"
return formatStr;
}
//中文字符串截取
global.subStr = function (str, len, hasDot) {
var newLength = 0;
var newStr = "";
var chineseRegex = /[^\x00-\xff]/g;
var singleChar = "";
var strLength = str.replace(chineseRegex, "**").length;
for (var i = 0; i < strLength; i++) {
singleChar = str.charAt(i).toString();
if (singleChar.match(chineseRegex) != null) {
newLength += 2;
} else {
newLength++;
}
if (newLength > len) {
break;
}
newStr += singleChar;
}
if (hasDot && strLength > len) {
newStr += "...";
}
return newStr;
}
//过滤html标签
global.removeTag = function (str) {
str = str.replace(/<\/?[^>]*>/g, ''); //去除HTML tag
str = str.replace(/[ | ]*\n/g, '\n'); //去除行尾空白
//str = str.replace(/\n[\s| | ]*\r/g,'\n'); //去除多余空行
str = str.replace(/ /ig, ''); //去掉
str = str.replace(/ /ig, ''); //去掉
return str;
}
//读取文件
global.readFile = function (file) {
var fs = think.require('fs'); //引入fs处理文件
var data = fs.readFileSync(file);
return data;
}
//写入文件
global.writeFile = async function (file, data) {
var fs = think.require('fs'); //引入fs处理文件
fs.writeFile(file, data, function (err) {
if (err) {
return false;
} else {
return true;
}
});
}
//去掉首尾空格
global.trimStr = function (str) {
return str.replace(/(^\s*)|(\s*$)/g, "");
}
//判断是否为数字
global.isNum = function (s) {
if (s != null) {
var r, re;
re = /\d*/i; //\d表示数字,*表示匹配多个数字
r = s.match(re);
return (r == s) ? true : false;
}
return false;
}
//判断是否存在数组中
global.inArray = function (arr, str) {
var i = arr.length;
while (i--) {
if (arr[i] === str) {
return true;
}
}
return false;
}
/**
* ltrim()
* @param str [删除左边的空格]
* @returns {*|void|string|XML}
*/
/* global ltrim */
global.ltrim = function (str) {
return str.replace(/(^\s*)/g, "");
}
/**
*
* rtrim()
* @param str [删除右边的空格]
* @returns {*|void|string|XML}
*/
/* global rtrim */
global.rtrim = function (str) {
return str.replace(/(\s*$)/g, "");
}
global.stringTrim = function (str) {
return str.trim();
}
global.stringTrim = function (str) {
return str.trim();
}
global.isPhone = function (str) {
return str.trim();
}
global.createCode = function (_idx) {
var str = '';
for (var i = 0; i < _idx; i += 1) {
str += Math.floor(Math.random() * 10);
}
return str;
}
|
package benchmarks.CLEVER.LoopMult15.Neq;
public class newV {
private int foo(int a, int b) {
int c=0;
for (int i=1;i<=a;++i)
c-=b;
return c;
}
public int main(int x) {
if (x>=13 && x<16)
return foo(x,15);
return 0;
}
}
|
<reponame>harveyaa/nixtract
"""Integration tests (and some unit tests) for NIFTI extractions
Check to ensure that the data being extracted lines up with the labels defined
in the provided roi files. The approach is fairly straightforward:
1. Take an 3D NIFTI image and duplicate the data (10 times) so that it
creates a mock 4D functional image. This is performed once per user by
`setup_mock_data.py`, which is ran by the `mock_data` pytest fixture.
2. Run nixtract-nifti on the mock 4D image with the 3D image as the
roi_file
3. Validate extracted timeseries against the expected array, which has 10
rows (number of timepoints) and N columns, where N = number of labels
in dlabel (excluding 0). Each column should only have elements that
equal the label number (i.e. column 1 should be full of 1's).
A very similar approach is used to check the `--as_voxels` functionality,
where 1) all values in the timeseries should equal the label, and 2) the number
of columns should equal the number of voxels with that label in the 3D image.
Binary masks for these tests are created in `setup_mock_data.py`.
Coordinate extraction is tested by ensuring that each coordinate (radius=None)
equals the correct label. Note that different radii aren't tested directly,
as this is passed into nilearn and is already tested there.
The underlying nilearn masker object is also checked via
`test_set_volume_masker`. This will ensure that masker-specific agruments
(e.g., `mask_img`, `radius`) are passed to the correct masker.
The Schaefer atlas (100 region, 7 networks) is used.
Additional checks where scans are discarded and regressors are used are also
performed, which are some basic functionalities of `NiftiExtractor`.
"""
import os
import pytest
import json
import subprocess
import numpy as np
import nibabel as nib
import pandas as pd
from nixtract.extractors.nifti_extractor import _set_volume_masker
from nilearn.input_data import (NiftiLabelsMasker, NiftiMasker,
NiftiSpheresMasker,NiftiMapsMasker)
from nilearn import datasets
from scipy.stats import pearsonr
def test_set_volume_masker(data_dir, mock_data):
mask = os.path.join(mock_data, 'schaefer_LH_Vis_4.nii.gz')
atlas = os.path.join(
data_dir,
'Schaefer2018_100Parcels_7Networks''_order_FSLMNI152_2mm.nii.gz'
)
atlas_prob = os.path.join(
data_dir,
'difumo64.nii.gz'
)
coordinates = os.path.join(
data_dir,
'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.Centroid_XYZ.tsv'
)
masker, n_rois = _set_volume_masker(mask, as_voxels=True)
assert isinstance(masker, NiftiMasker)
assert n_rois == 1
masker, n_rois = _set_volume_masker(mask, )
assert isinstance(masker, NiftiLabelsMasker)
assert n_rois == 1
masker, n_rois = _set_volume_masker(atlas)
assert isinstance(masker, NiftiLabelsMasker)
assert n_rois == 100
masker, n_rois = _set_volume_masker(coordinates)
assert isinstance(masker, NiftiSpheresMasker)
assert n_rois == 100
masker, n_rois = _set_volume_masker(atlas_prob)
assert isinstance(masker, NiftiMapsMasker)
assert n_rois == 64
def test_mask(mock_data, tmpdir):
roi_file = os.path.join(mock_data, 'schaefer_LH_Vis_4.nii.gz')
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file}")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
expected = np.full((10, 1), 4)
assert np.array_equal(actual.values, expected)
def test_as_voxels(mock_data, tmpdir):
roi_file = os.path.join(mock_data, 'schaefer_LH_Vis_4.nii.gz')
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file} --as_voxels")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
roi_array = nib.load(roi_file).get_fdata()
n_voxels = len(roi_array[roi_array == 4])
expected = np.full((10, n_voxels), fill_value=4)
assert np.array_equal(actual.values, expected)
def test_label_atlas(data_dir, mock_data, tmpdir):
roi_file = os.path.join(data_dir,
'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.nii.gz')
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file}")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
expected = np.tile(np.arange(1, 101), (10, 1))
assert np.array_equal(actual.values, expected)
def test_coord_atlas(data_dir, mock_data, tmpdir):
schaef = 'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.Centroid_XYZ.tsv'
roi_file = os.path.join(data_dir, schaef)
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file}")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
expected = np.tile(np.arange(1, 101), (10, 1))
assert np.array_equal(actual.values, expected)
def test_prob_atlas_shape(data_dir, mock_data, tmpdir):
difumo = 'difumo64.nii.gz'
roi_file = os.path.join(data_dir, difumo)
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file}")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
assert actual.shape[1] == 64
def test_prob_vs_label_atlas(data_dir, mock_data, tmpdir):
roi_file = os.path.join(data_dir, 'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.nii.gz')
roi_file_prob = os.path.join(mock_data, 'schaefer_prob.nii.gz')
func = datasets.fetch_adhd(n_subjects=1, data_dir=str(tmpdir)).func[0]
os.mkdir(os.path.join(tmpdir, 'labels'))
os.mkdir(os.path.join(tmpdir, 'prob'))
cmd = (f"nixtract-nifti {tmpdir / 'labels'} --input_files {func} "
f"--roi_file {roi_file}")
subprocess.run(cmd.split())
cmd_prob = (f"nixtract-nifti {tmpdir / 'prob'} --input_files {func} "
f"--roi_file {roi_file_prob}")
subprocess.run(cmd_prob.split())
ts_name = '0010042_rest_tshift_RPI_voreg_mni_timeseries.tsv'
ts = pd.read_table(os.path.join(tmpdir, 'labels/' + ts_name)).to_numpy()
ts_prob = pd.read_table(os.path.join(tmpdir, 'prob/' + ts_name)).to_numpy()
for i in range(ts.shape[1]):
assert pearsonr(ts[:, i], ts_prob[:, i])[0] > 0.98
def test_labels(data_dir, mock_data, tmpdir, nifti_label_config):
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
config_file = os.path.join(tmpdir, 'config.json')
with open(config_file, 'w') as fp:
json.dump(nifti_label_config, fp)
expected = nifti_label_config['labels']
# test with coordinates
coords = 'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.Centroid_XYZ.tsv'
roi_file = os.path.join(data_dir, coords)
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file} -c {config_file}")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
assert np.array_equal(actual.columns, expected)
# test with atlas
atlas = 'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.nii.gz'
roi_file = os.path.join(data_dir, atlas)
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file} -c {config_file}")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
assert np.array_equal(actual.columns, expected)
def test_discard_scans(data_dir, mock_data, tmpdir):
roi_file = os.path.join(data_dir,
'Schaefer2018_100Parcels_7Networks_order_FSLMNI152_2mm.nii.gz')
func = os.path.join(mock_data, 'schaefer_func.nii.gz')
cmd = (f"nixtract-nifti {tmpdir} --input_files {func} "
f"--roi_file {roi_file} --discard_scans 3")
subprocess.run(cmd.split())
actual = pd.read_table(os.path.join(tmpdir, 'schaefer_func_timeseries.tsv'))
expected = np.tile(np.arange(1, 101), (7, 1))
assert np.array_equal(actual.values, expected)
|
<filename>Tracking/Habduino/ax25modem.h
/* From Project Swift - High altitude balloon flight software */
/*=======================================================================*/
/* Copyright 2010-2012 <NAME> <<EMAIL>> */
/* <NAME> <<EMAIL>> */
/* */
/* This program is free software: you can redistribute it and/or modify */
/* it under the terms of the GNU General Public License as published by */
/* the Free Software Foundation, either version 3 of the License, or */
/* (at your option) any later version. */
/* */
/* This program is distributed in the hope that it will be useful, */
/* but WITHOUT ANY WARRANTY; without even the implied warranty of */
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/* GNU General Public License for more details. */
/* */
/* You should have received a copy of the GNU General Public License */
/* along with this program. If not, see <http://www.gnu.org/licenses/>. */
#define APRS // Uncomment to use APRS.
#ifndef __AX25MODEM_H
#define __AX25MODEM_H
#define APRS_TX_INTERVAL 1.00 // APRS TX Interval in minutes
#define APRS_CALLSIGN "WD8TA"
#define APRS_SSID (11)
extern void ax25_init(void);
extern void ax25_frame(char *scallsign, char sssid, char *dcallsign, char dssid,
char *path1, char ttl1, char *path2, char ttl2, char *data, ...);
extern char *ax25_base91enc(char *s, uint8_t n, uint32_t v);
#endif
|
def find_missing_number(arr):
n = len(arr)
i = 0
while i < n:
if (abs(arr[i] - n) - 1 < n and arr[abs(arr[i]) -1] > 0):
arr[abs(arr[i]) -1] = -arr[abs(arr[i]) - 1]
i += 1
for i in range(n):
if arr[i] > 0:
return i+1
return n+1
|
def smallest_unique_number(num_list):
num_set = set(num_list)
# find the smallest number
smallest = min(num_set)
# check whether the smallest number has multiple occurrences
if num_list.count(smallest) == 1:
return smallest
else:
num_set.remove(smallest)
return smallest_unique_number(list(num_set))
|
<gh_stars>10-100
package io.opensphere.kml.mantle.controller;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import de.micromata.opengis.kml.v_2_2_0.Geometry;
import de.micromata.opengis.kml.v_2_2_0.Placemark;
import de.micromata.opengis.kml.v_2_2_0.Style;
import de.micromata.opengis.kml.v_2_2_0.StyleState;
import io.opensphere.core.data.DataRegistry;
import io.opensphere.core.model.Altitude;
import io.opensphere.core.model.GeographicBoundingBox;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.server.ServerProviderRegistry;
import io.opensphere.kml.common.model.KMLDataSource;
import io.opensphere.kml.common.model.KMLFeature;
import io.opensphere.kml.common.util.KMLSpatialTemporalUtils;
import io.opensphere.kml.common.util.KMLStyleCache;
import io.opensphere.kml.common.util.KMLToolboxUtils;
import io.opensphere.kml.mantle.model.KMLMantleFeature;
import io.opensphere.mantle.MantleToolbox;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.element.DataElement;
import io.opensphere.mantle.data.element.MetaDataProvider;
import io.opensphere.mantle.data.element.impl.AbstractDataElementProvider;
import io.opensphere.mantle.data.element.impl.DefaultDataElement;
import io.opensphere.mantle.data.element.impl.DefaultMapDataElement;
import io.opensphere.mantle.data.geom.MapGeometrySupport;
import io.opensphere.mantle.data.geom.MapLocationGeometrySupport;
import io.opensphere.mantle.data.geom.MapPathGeometrySupport;
import io.opensphere.mantle.data.geom.util.MapGeometrySupportUtils;
/**
* Data element provider for a JAK 2.2.0 object.
*/
public class KMLDataElementProvider extends AbstractDataElementProvider<KMLFeature>
{
/** The unique type counter. */
public static final AtomicInteger ourUniqueTypeCounter = new AtomicInteger(1000);
/** The KML data source. */
private final KMLDataSource myDataSource;
/** The geometry builder. */
private final KMLGeometryBuilder myGeometryBuilder;
/**
* Constructor.
*
* @param serverRegistry The server provider registry.
* @param mantleToolbox The mantle toolbox
* @param dataRegistry The data registry
* @param dataSource The data source
* @param dataType The DataTypeInfo
* @param features The features
*/
public KMLDataElementProvider(ServerProviderRegistry serverRegistry, MantleToolbox mantleToolbox, DataRegistry dataRegistry,
KMLDataSource dataSource, DataTypeInfo dataType, Iterable<? extends KMLFeature> features)
{
super(dataType, features);
myDataSource = dataSource;
myGeometryBuilder = new KMLGeometryBuilder(dataRegistry, serverRegistry, mantleToolbox, dataSource);
}
@Override
protected DataElement createDataElement(KMLFeature feature)
{
TimeSpan timeSpan = KMLSpatialTemporalUtils.timeSpanFromTimePrimitive(feature.getTimePrimitive());
KMLMantleFeature mantleFeature = new KMLMantleFeature(feature, timeSpan);
MetaDataProvider metaDataProvider = mantleFeature.newMetaDataProvider(getDataTypeInfo().getMetaDataInfo(), myDataSource);
Geometry geometry = ((Placemark)feature.getFeature()).getGeometry();
KMLStyleCache styleCache = KMLToolboxUtils.getKmlToolbox().getStyleCache();
Style style = styleCache.getStyle(feature, StyleState.NORMAL);
Style highlightStyle = styleCache.getStyle(feature, StyleState.HIGHLIGHT);
MapGeometrySupport geomSupport = myGeometryBuilder.createMapGeometrySupport(geometry, style, highlightStyle, true);
DataElement dataElement;
if (geomSupport != null)
{
setTimeSpan(geomSupport, timeSpan);
// Nasty side effect
feature.setGeoBoundingBox(getBoundingBox(geomSupport));
feature.setGeometryColor(geomSupport.getColor());
dataElement = new DefaultMapDataElement(ourUniqueTypeCounter.incrementAndGet(), timeSpan, getDataTypeInfo(),
metaDataProvider, geomSupport);
}
else
{
dataElement = new DefaultDataElement(ourUniqueTypeCounter.incrementAndGet(), timeSpan, getDataTypeInfo(),
metaDataProvider);
}
return dataElement;
}
/**
* Set the time span in a map geometry support and its children.
*
* @param geomSupport The map geometry support.
* @param timeSpan The time span.
*/
private static void setTimeSpan(MapGeometrySupport geomSupport, TimeSpan timeSpan)
{
geomSupport.setTimeSpan(timeSpan);
List<MapGeometrySupport> children = geomSupport.getChildren();
if (children != null)
{
for (int index = 0; index < children.size(); ++index)
{
setTimeSpan(children.get(index), timeSpan);
}
}
}
/**
* Custom version of getBoundingBox that can handle geometries with
* different altitude references.
*
* @param geomSupport The geometry support
* @return The GeographicBoundingBox
*/
private static GeographicBoundingBox getBoundingBox(MapGeometrySupport geomSupport)
{
GeographicBoundingBox bounds = null;
if (geomSupport instanceof MapLocationGeometrySupport)
{
MapLocationGeometrySupport locationGeomSupport = (MapLocationGeometrySupport)geomSupport;
bounds = new GeographicBoundingBox(locationGeomSupport.getLocation(), locationGeomSupport.getLocation());
}
else if (geomSupport instanceof MapPathGeometrySupport)
{
MapPathGeometrySupport pathGeomSupport = (MapPathGeometrySupport)geomSupport;
bounds = MapGeometrySupportUtils.getBoundingBox(pathGeomSupport);
}
if (bounds != null && geomSupport.hasChildren())
{
GeographicBoundingBox childBB = MapGeometrySupportUtils.getMergedChildBounds(geomSupport, null,
Altitude.ReferenceLevel.TERRAIN);
if (childBB != null)
{
bounds = GeographicBoundingBox.merge(bounds, childBB, Altitude.ReferenceLevel.TERRAIN);
}
}
return bounds;
}
}
|
<reponame>tignear/bot
import * as moment from "moment-timezone";
export type GameEventKind = "periodical" | "fixed";
export type DayOfWeek =
| "Sunday"
| "Monday"
| "Tuesday"
| "Wednesday"
| "Thursday"
| "Friday"
| "Saturday";
export const dayOfWeekArray: readonly [
string,
string,
string,
string,
string,
string,
string
] = [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
];
export type HKTCollectionName = {
[P in GameEventKind]: CollectionNameBase<P>;
};
export type HKTCollectionNameU<T extends HKTCollectionName> =
| T["fixed"]
| T["periodical"];
export class GameEventTimingPeriodical {
constructor(
public readonly intervalInMilliseconds: moment.Duration,
public readonly lastFireTime: moment.Moment
) {}
}
export class GameEventTimingFixedEntry {
constructor(
public readonly dayOfWeek: DayOfWeek,
public readonly hours: number,
public readonly minutes: number,
public readonly tz: string
) {}
}
export class GameEventTimingFixed {
constructor(public readonly entrys: GameEventTimingFixedEntry[]) {}
}
export abstract class GameEventCollectionBase<
CollectionNameT extends CollectionNameBase<ValueT["kind"]>,
ValueT extends GameEventPeriodical | GameEventFixed
> {
readonly name: CollectionNameT;
readonly header: readonly string[];
readonly events: ValueT[];
readonly kind: ValueT["kind"];
constructor(
name: CollectionNameT,
kind: ValueT["kind"],
header: readonly string[],
events: ValueT[]
) {
this.name = name;
this.header = header;
this.kind = kind;
this.events = events;
}
}
export class GameEventCollectionPeriodical<
CollectionNameT extends CollectionNameBase<"periodical">
> extends GameEventCollectionBase<CollectionNameT, GameEventPeriodical> {
constructor(
name: CollectionNameT,
header: readonly string[],
events: GameEventPeriodical[]
) {
super(name, "periodical", header, events);
}
}
export class GameEventCollectionFixed<
CollectionNameT extends CollectionNameBase<"fixed">
> extends GameEventCollectionBase<CollectionNameT, GameEventFixed> {
constructor(
name: CollectionNameT,
header: readonly string[],
events: GameEventFixed[]
) {
super(name, "fixed", header, events);
}
}
export type GameEventCollection<CollectionNameT extends HKTCollectionName> =
| GameEventCollectionPeriodical<CollectionNameT["periodical"]>
| GameEventCollectionFixed<CollectionNameT["fixed"]>;
export type GameEventDesc = {
readonly name: string;
readonly timing: string;
readonly [s: string]: string;
};
export abstract class GameEventBase<kindT extends GameEventKind> {
constructor(
public readonly timingToNotify: readonly moment.Duration[],
public readonly header: readonly string[],
public readonly lastNotice: moment.Moment
) {
this.header = header;
}
get name(): string {
return this.desc.name;
}
abstract readonly kind: kindT;
abstract readonly desc: GameEventDesc;
}
export class GameEventPeriodical extends GameEventBase<"periodical"> {
constructor(
timingToNotify: readonly moment.Duration[],
header: readonly string[],
lastNotice: moment.Moment,
timing: GameEventTimingPeriodical,
public readonly desc: GameEventDesc
) {
super(timingToNotify, header, lastNotice);
this.timing = timing;
}
readonly timing: GameEventTimingPeriodical;
readonly kind: "periodical" = "periodical";
}
export class GameEventFixed extends GameEventBase<"fixed"> {
constructor(
timingToNotify: readonly moment.Duration[],
header: readonly string[],
lastNotice: moment.Moment,
timing: GameEventTimingFixed,
public readonly desc: GameEventDesc
) {
super(timingToNotify, header, lastNotice);
this.timing = timing;
}
readonly timing: GameEventTimingFixed;
readonly kind: "fixed" = "fixed";
}
export type GameEvent = GameEventPeriodical | GameEventFixed;
export type GameEventCollectionSwitch<
CollectionNameT extends HKTCollectionName
> = {
periodical: GameEventCollectionPeriodical<CollectionNameT["periodical"]>;
fixed: GameEventCollectionFixed<CollectionNameT["fixed"]>;
};
/* export interface NotifyOnceEvent<kindT extends GameEventKind>{
event:GameEvent<kindT>
}*/
export interface CollectionNameBase<KindT extends GameEventKind> {
readonly name: string;
readonly kind: KindT;
}
export interface GameEventRepository<
CollectionGroupIdT,
HKTCollectionNameT extends HKTCollectionName
> {
collcetionGroupId(idString: string): Promise<CollectionGroupIdT>;
listCollectionName(
collectionGroupId: CollectionGroupIdT
): Promise<HKTCollectionNameU<HKTCollectionNameT>[]>;
collectionName(
collectionGroupId: CollectionGroupIdT,
name: string
): Promise<HKTCollectionNameU<HKTCollectionNameT>>;
collection(
collectionGroupId: CollectionGroupIdT,
collectionId: HKTCollectionNameT["fixed"]
): Promise<GameEventCollectionSwitch<HKTCollectionNameT>["fixed"]>;
collection(
collectionGroupId: CollectionGroupIdT,
collectionId: HKTCollectionNameT["periodical"]
): Promise<GameEventCollectionSwitch<HKTCollectionNameT>["periodical"]>;
collection(
collectionGroupId: CollectionGroupIdT,
collectionId: HKTCollectionNameU<HKTCollectionNameT>
): Promise<GameEventCollection<HKTCollectionName>>;
put(
collectionGroupId: CollectionGroupIdT,
collectionId: HKTCollectionNameU<HKTCollectionNameT>,
value: (string | number | null)[]
): Promise<void>;
update(
collectionGroupId: CollectionGroupIdT,
collectionId: HKTCollectionNameU<HKTCollectionNameT>,
value: (string | number | null)[]
): Promise<void>;
}
export interface GameEventNotificationRepository {
register(guildId: string, event: GameEvent[]): Promise<void>;
}
function nextTimingDay(t: moment.Moment, now: moment.Moment, day: number) {
if (now.day() < day) {
return day;
}
if (now.day() > day) {
return 7 + day;
}
if (now.isAfter(t)) {
return 7 + day;
}
return day;
}
function nextTimingFixed(t: GameEventTimingFixedEntry, now: moment.Moment) {
const tm = now.clone().tz(t.tz);
tm.hour(t.hours);
tm.minute(t.minutes);
tm.second(0);
tm.millisecond(0);
tm.day(
nextTimingDay(
tm,
now,
dayOfWeekArray.findIndex((e) => e === t.dayOfWeek)
)
);
return tm;
}
export function nextTiming(
event: GameEvent,
givedNow?: moment.Moment | undefined
): moment.Moment | undefined {
const now = givedNow || moment.utc();
switch (event.kind) {
case "fixed":
return event.timing.entrys
.map((e) => nextTimingFixed(e, now))
.sort((a, b) => a.diff(b))[0];
case "periodical":
return event.timing.lastFireTime
.clone()
.add(event.timing.intervalInMilliseconds, "milliseconds");
}
}
export function nextNoticeTime(
event: GameEvent,
givedNow?: moment.Moment | undefined
): moment.Moment | undefined {
const now = givedNow || moment.utc();
const eventTime = nextTiming(event, now);
if (eventTime === undefined) {
return undefined;
}
const r = event.timingToNotify
.map((e) => eventTime.clone().subtract(e))
.filter((e) => {
const diffTime = e.diff(now, "milliseconds");
return diffTime >= 0;
})
.sort((a, b) => a.diff(b))[0];
return r;
}
|
#!/usr/bin/env sh
stack run -- $@
|
##-------------------------------------------------------------
## NB: This is unused code.
## Keep it for now in case we decide later that the enterprise
## contract should fetch data from rekor.
##-------------------------------------------------------------
# Use rekor-cli to fetch one log entry
rekor-log-entry() {
local log_index=$1
local rekor_host=${2:-rekor.sigstore.dev}
rekor-cli get --log-index $log_index --rekor_server https://$rekor_host --format json
}
# Same thing but lookup by uuid instead of log index
rekor-uuid-entry() {
local uuid=$1
local rekor_host=${2:-rekor.sigstore.dev}
rekor-cli get --uuid $uuid --rekor_server https://$rekor_host --format json
}
# Extract the log index from a transparency url
log-index-from-url() {
local url=$1
# Assume it has a url param called logIndex
perl -MURI -e '%u = URI->new(@ARGV[0])->query_form; print $u{logIndex}' "$url"
}
# Extract the rekor host from a transparency url
rekor-host-from-url() {
local url=$1
perl -MURI -e 'print URI->new(@ARGV[0])->host' "$url"
}
rekor-log-entry-from-url() {
local url=$1
rekor-log-entry $(log-index-from-url $url) $(rekor-host-from-url $url)
}
# If the rekor log entry has an attestation, extract it and
# save it separately so we can access it more conveniently
#
rekor-save-attestation-maybe() {
local entry_data="$1"
local att_file="$2"
local att_data=$( echo "$entry_data" | jq -r '.Attestation' )
if [[ -n $att_data ]] && [[ $att_data != 'null' ]] && [[ $att_data != '{}' ]]; then
echo "Saving attestation extracted from rekor data"
echo "$att_data" | base64 -d | jq > "$att_file"
fi
}
# Save a transparency log entry to a json data file
# For convenience also save the attestation if there is one.
#
rekor-log-entry-save() {
local log_index=$1
local rekor_host=${2:-rekor.sigstore.dev}
local entry_data=$( rekor-log-entry $log_index $rekor_host )
local entry_file=$( json-data-file rekor $rekor_host index $log_index entry )
local att_file=$( json-data-file rekor $rekor_host index $log_index attestation )
echo "Saving log index $log_index from $rekor_host"
echo "$entry_data" | jq > "$entry_file"
rekor-save-attestation-maybe "$entry_data" "$att_file"
}
rekor-log-entry-save-from-url() {
rekor-log-entry-save $(log-index-from-url $1) $(rekor-host-from-url $1)
}
# Just to avoid very long paths
shorten-sha() {
local sha=$1
echo "$sha" | sed 's|^sha[0-9/]\+:||' | head -c 11
}
rekor-digest-save() {
local digest=$1
local transparency_url=$2
local rekor_host=$( rekor-host-from-url $transparency_url )
local short_digest=$( shorten-sha $digest )
local uuids=$( rekor-cli search --sha "$digest" --rekor_server "https://$rekor_host" 2>/dev/null )
# It's possible to have multiple entries for a particular digest so let's save them all
for uuid in $uuids; do
local short_uuid=$( shorten-sha $uuid )
local entry_file=$( json-data-file rekor $rekor_host digest $short_digest uuid $short_uuid entry )
local att_file=$( json-data-file rekor $rekor_host digest $short_digest uuid $short_uuid attestation )
local entry_data=$( rekor-uuid-entry $uuid $rekor_host )
echo "Saving log uuid $short_uuid for image digest $short_digest from $rekor_host"
echo "$entry_data" | jq > $entry_file
rekor-save-attestation-maybe "$entry_data" "$att_file"
done
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.