text stringlengths 1 1.05M |
|---|
function handleRouteSelection() {
const selectElement = document.getElementById('httpMethodSelect');
const selectedMethod = selectElement.value;
if (selectedMethod) {
console.log(`Selected HTTP method: ${selectedMethod}`);
// Perform corresponding action based on the selected method
} else {
console.error('Error: No HTTP method selected');
}
} |
#!/usr/bin/env zsh
# The purpose of this script is to use fswatch
# to look for changes to files in this project.
#
# E.g. if a slideshow is updated/created/deleted
# then the event is fired off to build.sh where
# the new RevealJS HTML file can be created
# using Pandoc.
source ~/.profile
terminal-notifier \
-group "com.finnlesueur.science" \
-title "Pūtaiao" \
-message "fswatch started!" \
-appIcon http://localhost:1313/favicon.png
fswatch --print0 \
--event-flags \
--recursive \
--exclude=".*" \
--include="\\.md$" \
--include="\\.html$" \
--include="\\.css$" \
./content/ \
| xargs -0 -n 1 python3 sparse-build.py -d
|
#!/bin/sh
if [ "$1" == "builder" ]; then
shift
exec docker build -f Dockerfile.build -t ${USER}/burp-rest-api-builder:latest . $@
else
exec docker-compose build
fi
|
<reponame>NguyenHanh1998/fasty-frontend<gh_stars>0
import { Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { APP_FILTER, APP_INTERCEPTOR } from '@nestjs/core';
import { TypeOrmModule } from '@nestjs/typeorm';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { databaseConfig } from './config/database.config';
import { ExceptionFilter } from './config/exception/exception.filter';
import { TransformInterceptor } from './config/rest/transform.interceptor';
import { AuthModule } from './modules/auth/auth.module';
import { CratesModule } from './modules/crates/crates.module';
import { SubscriptionsModule } from './modules/subscriptions/subscriptions.module';
import { ProductsModule } from './modules/products/products.module';
import { ServeStaticModule } from '@nestjs/serve-static';
import { join } from 'path';
@Module({
imports: [
ConfigModule.forRoot({ isGlobal: true }),
TypeOrmModule.forRoot(databaseConfig),
ServeStaticModule.forRoot({
rootPath: join(__dirname, '..', 'public'),
}),
AuthModule,
CratesModule,
SubscriptionsModule,
ProductsModule,
],
controllers: [AppController],
providers: [
AppService,
{
provide: APP_INTERCEPTOR,
useClass: TransformInterceptor,
},
{
provide: APP_FILTER,
useClass: ExceptionFilter,
},
],
})
export class AppModule {}
|
// Generated by Apple Swift version 2.0 (swiftlang-700.0.52.2 clang-700.0.65)
#pragma clang diagnostic push
#if defined(__has_include) && __has_include(<swift/objc-prologue.h>)
# include <swift/objc-prologue.h>
#endif
#pragma clang diagnostic ignored "-Wauto-import"
#include <objc/NSObject.h>
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#if defined(__has_include) && __has_include(<uchar.h>)
# include <uchar.h>
#elif !defined(__cplusplus) || __cplusplus < 201103L
typedef uint_least16_t char16_t;
typedef uint_least32_t char32_t;
#endif
typedef struct _NSZone NSZone;
#if !defined(SWIFT_PASTE)
# define SWIFT_PASTE_HELPER(x, y) x##y
# define SWIFT_PASTE(x, y) SWIFT_PASTE_HELPER(x, y)
#endif
#if !defined(SWIFT_METATYPE)
# define SWIFT_METATYPE(X) Class
#endif
#if defined(__has_attribute) && __has_attribute(objc_runtime_name)
# define SWIFT_RUNTIME_NAME(X) __attribute__((objc_runtime_name(X)))
#else
# define SWIFT_RUNTIME_NAME(X)
#endif
#if defined(__has_attribute) && __has_attribute(swift_name)
# define SWIFT_COMPILE_NAME(X) __attribute__((swift_name(X)))
#else
# define SWIFT_COMPILE_NAME(X)
#endif
#if !defined(SWIFT_CLASS_EXTRA)
# define SWIFT_CLASS_EXTRA
#endif
#if !defined(SWIFT_PROTOCOL_EXTRA)
# define SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_ENUM_EXTRA)
# define SWIFT_ENUM_EXTRA
#endif
#if !defined(SWIFT_CLASS)
# if defined(__has_attribute) && __has_attribute(objc_subclassing_restricted)
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# else
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# endif
#endif
#if !defined(SWIFT_PROTOCOL)
# define SWIFT_PROTOCOL(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
# define SWIFT_PROTOCOL_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_EXTENSION)
# define SWIFT_EXTENSION(M) SWIFT_PASTE(M##_Swift_, __LINE__)
#endif
#if !defined(OBJC_DESIGNATED_INITIALIZER)
# if defined(__has_attribute) && __has_attribute(objc_designated_initializer)
# define OBJC_DESIGNATED_INITIALIZER __attribute__((objc_designated_initializer))
# else
# define OBJC_DESIGNATED_INITIALIZER
# endif
#endif
#if !defined(SWIFT_ENUM)
# define SWIFT_ENUM(_type, _name) enum _name : _type _name; enum SWIFT_ENUM_EXTRA _name : _type
#endif
typedef float swift_float2 __attribute__((__ext_vector_type__(2)));
typedef float swift_float3 __attribute__((__ext_vector_type__(3)));
typedef float swift_float4 __attribute__((__ext_vector_type__(4)));
typedef double swift_double2 __attribute__((__ext_vector_type__(2)));
typedef double swift_double3 __attribute__((__ext_vector_type__(3)));
typedef double swift_double4 __attribute__((__ext_vector_type__(4)));
typedef int swift_int2 __attribute__((__ext_vector_type__(2)));
typedef int swift_int3 __attribute__((__ext_vector_type__(3)));
typedef int swift_int4 __attribute__((__ext_vector_type__(4)));
#if defined(__has_feature) && __has_feature(modules)
@import ObjectiveC;
@import CoreGraphics;
@import UIKit;
@import Foundation;
#endif
#pragma clang diagnostic ignored "-Wproperty-attribute-mismatch"
#pragma clang diagnostic ignored "-Wduplicate-method-arg"
typedef SWIFT_ENUM(NSInteger, CWNotificationAnimationStyle) {
CWNotificationAnimationStyleTop = 0,
CWNotificationAnimationStyleBottom = 1,
CWNotificationAnimationStyleLeft = 2,
CWNotificationAnimationStyleRight = 3,
};
typedef SWIFT_ENUM(NSInteger, CWNotificationAnimationType) {
CWNotificationAnimationTypeReplace = 0,
CWNotificationAnimationTypeOverlay = 1,
};
typedef SWIFT_ENUM(NSInteger, CWNotificationStyle) {
CWNotificationStyleStatusBarNotification = 0,
CWNotificationStyleNavigationBarNotification = 1,
};
@class ScrollLabel;
@class UIView;
@class CWWindowContainer;
@class UIColor;
@class UIFont;
@class NSAttributedString;
SWIFT_CLASS("_TtC23CWStatusBarNotification23CWStatusBarNotification")
@interface CWStatusBarNotification : NSObject
@property (nonatomic) ScrollLabel * __nullable notificationLabel;
@property (nonatomic) UIView * __nullable statusBarView;
@property (nonatomic, copy) void (^ __nonnull notificationTappedClosure)(void);
@property (nonatomic) BOOL notificationIsShowing;
@property (nonatomic) BOOL notificationIsDismissing;
@property (nonatomic) CWWindowContainer * __nullable notificationWindow;
@property (nonatomic) UIColor * __nonnull notificationLabelBackgroundColor;
@property (nonatomic) UIColor * __nonnull notificationLabelTextColor;
@property (nonatomic) UIFont * __nonnull notificationLabelFont;
@property (nonatomic) CGFloat notificationLabelHeight;
@property (nonatomic) UIView * __nullable customView;
@property (nonatomic) BOOL multiline;
@property (nonatomic) UIInterfaceOrientationMask supportedInterfaceOrientations;
@property (nonatomic) NSTimeInterval notificationAnimationDuration;
@property (nonatomic) enum CWNotificationStyle notificationStyle;
@property (nonatomic) enum CWNotificationAnimationStyle notificationAnimationInStyle;
@property (nonatomic) enum CWNotificationAnimationStyle notificationAnimationOutStyle;
@property (nonatomic) enum CWNotificationAnimationType notificationAnimationType;
@property (nonatomic) UIStatusBarStyle preferredStatusBarStyle;
- (nonnull instancetype)init OBJC_DESIGNATED_INITIALIZER;
- (void)displayNotificationWithMessage:(NSString * __nonnull)message completion:(void (^ __nonnull)(void))completion;
- (void)displayNotificationWithMessage:(NSString * __nonnull)message forDuration:(NSTimeInterval)duration;
- (void)displayNotificationWithAttributedString:(NSAttributedString * __nonnull)attributedString completion:(void (^ __nonnull)(void))completion;
- (void)displayNotificationWithAttributedString:(NSAttributedString * __nonnull)attributedString forDuration:(NSTimeInterval)duration;
- (void)displayNotificationWithView:(UIView * __nonnull)view completion:(void (^ __nonnull)(void))completion;
- (void)displayNotificationWithView:(UIView * __nonnull)view forDuration:(NSTimeInterval)duration;
- (void)dismissNotificationWithCompletion:(void (^ __nullable)(void))completion;
- (void)dismissNotification;
@end
@class UIEvent;
@class NSCoder;
SWIFT_CLASS("_TtC23CWStatusBarNotification17CWWindowContainer")
@interface CWWindowContainer : UIWindow
- (UIView * __nullable)hitTest:(CGPoint)pt withEvent:(UIEvent * __nullable)event;
- (nonnull instancetype)initWithFrame:(CGRect)frame OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * __nonnull)aDecoder OBJC_DESIGNATED_INITIALIZER;
@end
SWIFT_CLASS("_TtC23CWStatusBarNotification11ScrollLabel")
@interface ScrollLabel : UILabel
- (nullable instancetype)initWithCoder:(NSCoder * __nonnull)aDecoder OBJC_DESIGNATED_INITIALIZER;
- (void)drawTextInRect:(CGRect)rect;
@end
#pragma clang diagnostic pop
|
#!/bin/sh
set -e
rm -rf TestResults
dotnet test --collect:"XPlat Code Coverage"
REPORTFILE=`find . | grep coverage.cobertura.xml`
reportgenerator \
-reports:$REPORTFILE \
-targetdir:TestResults/html
|
from sklearn.decomposition import PCA
import numpy as np
from scipy import stats as st
from .data_tools import *
from sklearn import svm
from .linear_algebra import *
from .hic_oe import oe
def cor(mat):
"""Correlation of rows with columns of mat"""
n = len(mat)
cor_mat = np.zeros_like(mat)
for i in range(n):
for j in range(i):
r, p = st.pearsonr(mat[i], mat[j])
cor_mat[i,j] = r
cor_mat[j,i] = r
return cor_mat
def get_compartments(mat, struct, enrichments=None, active=True):
"""From Lieberman-Aiden et al (2009)"""
oe_mat = oe(mat, struct)
cor_mat = cor(oe_mat)
pca = PCA(n_components=1)
pca.fit(cor_mat)
scores = pca.fit_transform(cor_mat)[:,0]
#enforce positive score = active chromatin
if enrichments is not None:
r, p = st.pearsonr(scores, enrichments)
if active and r < 0:
scores = -scores
elif not active and r > 0:
scores = -scores
#normalize
max_val = max(scores)
min_val = -min(scores)
for i, score in enumerate(scores):
if score > 0:
scores[i] = score/max_val
else:
scores[i] = score/min_val
return scores
def load_enrichments(path, structure, column):
enrichments = np.array(np.loadtxt(path, dtype=object)[:,column], dtype=float)
bin_nums = structure.nonzero_abs_indices() + structure.chrom.minPos/structure.chrom.res
return enrichments[bin_nums]
def calculate_compartment_fraction(structure1, structure2, path1, path2, size1=None, size2=None):
#compartments
contacts1 = matFromBed(path1, size1, structure1)
contacts2 = matFromBed(path2, size2, structure2)
compartments1 = np.array(get_compartments(contacts1, structure1))
compartments2 = np.array(get_compartments(contacts2, structure2))
r, p = st.pearsonr(compartments1, compartments2)
if r < 0:
compartments2 = -compartments2
#SVR
coords1 = structure1.getCoords()
coords2 = structure2.getCoords()
coords = np.concatenate((coords1, coords2))
compartments = np.concatenate((compartments1, compartments2))
clf = svm.LinearSVR()
clf.fit(coords, compartments)
coef = clf.coef_
transformed_coords1 = np.array(change_coordinate_system(coef, coords1))
transformed_coords2 = np.array(change_coordinate_system(coef, coords2))
x_diffs = transformed_coords1[:,0] - transformed_coords2[:,0]
y_diffs = transformed_coords1[:,1] - transformed_coords2[:,1]
z_diffs = transformed_coords1[:,2] - transformed_coords2[:,2]
#axis lengths
centroid1 = np.mean(transformed_coords1, axis=0)
centroid2 = np.mean(transformed_coords2, axis=0)
x_length1 = np.mean([np.abs(coord1[0] - centroid1[0]) for coord1 in transformed_coords1])
y_length1 = np.mean([np.abs(coord1[1] - centroid1[1]) for coord1 in transformed_coords1])
z_length1 = np.mean([np.abs(coord1[2] - centroid1[2]) for coord1 in transformed_coords1])
x_length2 = np.mean([np.abs(coord2[0] - centroid2[0]) for coord2 in transformed_coords2])
y_length2 = np.mean([np.abs(coord2[1] - centroid2[1]) for coord2 in transformed_coords2])
z_length2 = np.mean([np.abs(coord2[2] - centroid2[2]) for coord2 in transformed_coords2])
x_length = np.mean((x_length1, x_length2))
y_length = np.mean((y_length1, y_length2))
z_length = np.mean((z_length1, z_length2))
x_mean = np.mean(np.abs(x_diffs))/x_length
y_mean = np.mean(np.abs(y_diffs))/y_length
z_mean = np.mean(np.abs(z_diffs))/z_length
return z_mean/(x_mean + y_mean + z_mean)
|
<reponame>cartermp/opentelemetry-go<gh_stars>0
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package trace
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/otel/attribute"
ottest "go.opentelemetry.io/otel/internal/internaltest"
"go.opentelemetry.io/otel/sdk/internal/env"
"go.opentelemetry.io/otel/trace"
)
func TestSettingSpanLimits(t *testing.T) {
envLimits := func(val string) map[string]string {
return map[string]string{
env.SpanAttributeValueLengthKey: val,
env.SpanEventCountKey: val,
env.SpanAttributeCountKey: val,
env.SpanLinkCountKey: val,
env.SpanEventAttributeCountKey: val,
env.SpanLinkAttributeCountKey: val,
}
}
limits := func(n int) *SpanLimits {
lims := NewSpanLimits()
lims.AttributeValueLengthLimit = n
lims.AttributeCountLimit = n
lims.EventCountLimit = n
lims.LinkCountLimit = n
lims.AttributePerEventCountLimit = n
lims.AttributePerLinkCountLimit = n
return &lims
}
tests := []struct {
name string
env map[string]string
opt *SpanLimits
rawOpt *SpanLimits
want SpanLimits
}{
{
name: "defaults",
want: NewSpanLimits(),
},
{
name: "env",
env: envLimits("42"),
want: *(limits(42)),
},
{
name: "opt",
opt: limits(42),
want: *(limits(42)),
},
{
name: "raw-opt",
rawOpt: limits(42),
want: *(limits(42)),
},
{
name: "opt-override",
env: envLimits("-2"),
// Option take priority.
opt: limits(43),
want: *(limits(43)),
},
{
name: "raw-opt-override",
env: envLimits("-2"),
// Option take priority.
rawOpt: limits(43),
want: *(limits(43)),
},
{
name: "last-opt-wins",
opt: limits(-2),
rawOpt: limits(-3),
want: *(limits(-3)),
},
{
name: "env(unlimited)",
// OTel spec says negative SpanLinkAttributeCountKey is invalid,
// but since we will revert to the default (unlimited) which uses
// negative values to signal this than this value is expected to
// pass through.
env: envLimits("-1"),
want: *(limits(-1)),
},
{
name: "opt(unlimited)",
// Corrects to defaults.
opt: limits(-1),
want: NewSpanLimits(),
},
{
name: "raw-opt(unlimited)",
rawOpt: limits(-1),
want: *(limits(-1)),
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
if test.env != nil {
es := ottest.NewEnvStore()
t.Cleanup(func() { require.NoError(t, es.Restore()) })
for k, v := range test.env {
es.Record(k)
require.NoError(t, os.Setenv(k, v))
}
}
var opts []TracerProviderOption
if test.opt != nil {
opts = append(opts, WithSpanLimits(*test.opt))
}
if test.rawOpt != nil {
opts = append(opts, WithRawSpanLimits(*test.rawOpt))
}
assert.Equal(t, test.want, NewTracerProvider(opts...).spanLimits)
})
}
}
type recorder []ReadOnlySpan
func (r *recorder) OnStart(context.Context, ReadWriteSpan) {}
func (r *recorder) OnEnd(s ReadOnlySpan) { *r = append(*r, s) }
func (r *recorder) ForceFlush(context.Context) error { return nil }
func (r *recorder) Shutdown(context.Context) error { return nil }
func testSpanLimits(t *testing.T, limits SpanLimits) ReadOnlySpan {
rec := new(recorder)
tp := NewTracerProvider(WithRawSpanLimits(limits), WithSpanProcessor(rec))
tracer := tp.Tracer("testSpanLimits")
ctx := context.Background()
a := []attribute.KeyValue{attribute.Bool("one", true), attribute.Bool("two", true)}
l := trace.Link{
SpanContext: trace.NewSpanContext(trace.SpanContextConfig{
TraceID: [16]byte{0x01},
SpanID: [8]byte{0x01},
}),
Attributes: a,
}
_, span := tracer.Start(ctx, "span-name", trace.WithLinks(l, l))
span.SetAttributes(
attribute.String("string", "abc"),
attribute.StringSlice("stringSlice", []string{"abc", "def"}),
)
span.AddEvent("event 1", trace.WithAttributes(a...))
span.AddEvent("event 2", trace.WithAttributes(a...))
span.End()
require.NoError(t, tp.Shutdown(ctx))
require.Len(t, *rec, 1, "exported spans")
return (*rec)[0]
}
func TestSpanLimits(t *testing.T) {
t.Run("AttributeValueLengthLimit", func(t *testing.T) {
limits := NewSpanLimits()
// Unlimited.
limits.AttributeValueLengthLimit = -1
attrs := testSpanLimits(t, limits).Attributes()
assert.Contains(t, attrs, attribute.String("string", "abc"))
assert.Contains(t, attrs, attribute.StringSlice("stringSlice", []string{"abc", "def"}))
limits.AttributeValueLengthLimit = 2
attrs = testSpanLimits(t, limits).Attributes()
// Ensure string and string slice attributes are truncated.
assert.Contains(t, attrs, attribute.String("string", "ab"))
assert.Contains(t, attrs, attribute.StringSlice("stringSlice", []string{"ab", "de"}))
limits.AttributeValueLengthLimit = 0
attrs = testSpanLimits(t, limits).Attributes()
assert.Contains(t, attrs, attribute.String("string", ""))
assert.Contains(t, attrs, attribute.StringSlice("stringSlice", []string{"", ""}))
})
t.Run("AttributeCountLimit", func(t *testing.T) {
limits := NewSpanLimits()
// Unlimited.
limits.AttributeCountLimit = -1
assert.Len(t, testSpanLimits(t, limits).Attributes(), 2)
limits.AttributeCountLimit = 1
assert.Len(t, testSpanLimits(t, limits).Attributes(), 1)
// Ensure this can be disabled.
limits.AttributeCountLimit = 0
assert.Len(t, testSpanLimits(t, limits).Attributes(), 0)
})
t.Run("EventCountLimit", func(t *testing.T) {
limits := NewSpanLimits()
// Unlimited.
limits.EventCountLimit = -1
assert.Len(t, testSpanLimits(t, limits).Events(), 2)
limits.EventCountLimit = 1
assert.Len(t, testSpanLimits(t, limits).Events(), 1)
// Ensure this can be disabled.
limits.EventCountLimit = 0
assert.Len(t, testSpanLimits(t, limits).Events(), 0)
})
t.Run("AttributePerEventCountLimit", func(t *testing.T) {
limits := NewSpanLimits()
// Unlimited.
limits.AttributePerEventCountLimit = -1
for _, e := range testSpanLimits(t, limits).Events() {
assert.Len(t, e.Attributes, 2)
}
limits.AttributePerEventCountLimit = 1
for _, e := range testSpanLimits(t, limits).Events() {
assert.Len(t, e.Attributes, 1)
}
// Ensure this can be disabled.
limits.AttributePerEventCountLimit = 0
for _, e := range testSpanLimits(t, limits).Events() {
assert.Len(t, e.Attributes, 0)
}
})
t.Run("LinkCountLimit", func(t *testing.T) {
limits := NewSpanLimits()
// Unlimited.
limits.LinkCountLimit = -1
assert.Len(t, testSpanLimits(t, limits).Links(), 2)
limits.LinkCountLimit = 1
assert.Len(t, testSpanLimits(t, limits).Links(), 1)
// Ensure this can be disabled.
limits.LinkCountLimit = 0
assert.Len(t, testSpanLimits(t, limits).Links(), 0)
})
t.Run("AttributePerLinkCountLimit", func(t *testing.T) {
limits := NewSpanLimits()
// Unlimited.
limits.AttributePerLinkCountLimit = -1
for _, l := range testSpanLimits(t, limits).Links() {
assert.Len(t, l.Attributes, 2)
}
limits.AttributePerLinkCountLimit = 1
for _, l := range testSpanLimits(t, limits).Links() {
assert.Len(t, l.Attributes, 1)
}
// Ensure this can be disabled.
limits.AttributePerLinkCountLimit = 0
for _, l := range testSpanLimits(t, limits).Links() {
assert.Len(t, l.Attributes, 0)
}
})
}
|
using System;
using System.IO;
using System.Threading.Tasks;
public interface IContentProvider
{
Task<Stream> Open(string name);
}
public class ContentReader
{
public async Task<string> ReadContent(IContentProvider provider, string contentName)
{
try
{
using (Stream stream = await provider.Open(contentName))
{
using (StreamReader reader = new StreamReader(stream))
{
return await reader.ReadToEndAsync();
}
}
}
catch (Exception ex)
{
return $"Error reading content: {ex.Message}";
}
}
} |
<filename>lib/puppet/provider/local_security_policy/policy.rb
# frozen_string_literal: true
require 'fileutils'
require 'puppet/util'
begin
require 'puppet_x/twp/inifile'
require 'puppet_x/lsp/security_policy'
rescue LoadError => _detail
require 'pathname' # JJM WORK_AROUND #14073
module_base = Pathname.new(__FILE__).dirname
require module_base + '../../../' + 'puppet_x/twp/inifile.rb'
require module_base + '../../../' + 'puppet_x/lsp/security_policy.rb'
end
Puppet::Type.type(:local_security_policy).provide(:policy) do
desc 'Puppet type that models the local security policy'
# TODO: Finalize the registry key settings
# TODO: Add in registry value translation (ex: 1=enable 0=disable)
# TODO: Implement self.post_resource_eval (need to collect all resource updates the run secedit to make one call)
# limit access to windows hosts only
confine osfamily: :windows
defaultfor osfamily: :windows
# limit access to systems with these commands since this is the tools we need
commands secedit: 'secedit', reg: 'reg'
mk_resource_methods
# exports the current list of policies into a file and then parses that file into
# provider instances. If an item is found on the system but not in the lsp_mapping,
# that policy is not supported only because we cannot match the description
# furthermore, if a policy is in the mapping but not in the system we would consider
# that resource absent
def self.instances
settings = []
inf = SecurityPolicy.read_policy_settings
# need to find the policy, section_header, policy_setting, policy_value and reg_type
inf.each do |section, parameter_name, parameter_value|
next if section == 'Unicode'
next if section == 'Version'
begin
policy_desc, policy_values = SecurityPolicy.find_mapping_from_policy_name(parameter_name)
unless policy_desc.nil?
policy_hash = {
name: policy_desc,
policy_type: section,
policy_setting: parameter_name,
policy_default: policy_values[:policy_default],
policy_value: SecurityPolicy.translate_value(parameter_value, policy_values),
data_type: policy_values[:data_type],
reg_type: policy_values[:reg_type],
}
# If a policy is in the mapping but not in the system we would consider that
# resource absent. If a policy is set to the default then we would also consider that
# resource to be absent. For all other values we would consider it to be present
ensure_value = if parameter_value.nil?
:absent
elsif policy_hash[:policy_type] == 'Event Audit'
(policy_hash[:policy_value] == policy_hash[:policy_default]) ? :absent : :present
else
:present
end
policy_hash[:ensure] = ensure_value
inst = new(policy_hash)
settings << inst
end
rescue KeyError => e
Puppet.debug e.message
end
end
settings
end
# the flush method will be the last method called after applying all the other
# properties, by default nothing will be enabled or disabled unless the disable/enable are set to true
# if we ever move to a point were we can write all the settings via one big config file we
# would want to do that here.
def flush
begin
if @property_hash[:ensure] == :absent && @property_hash[:policy_type] == 'Registry Values' && @property_hash[:policy_default] != 'enabled'
# The registry key has been removed so no futher action is required
else
write_policy_to_system(resource.to_hash)
end
rescue KeyError => e
Puppet.debug e.message
# send helpful debug message to user here
end
@property_hash = resource.to_hash
end
def initialize(value = {})
super(value)
@property_flush = {}
end
# create the resource and convert any user supplied values to computer terms
def create
# do everything in flush method
end
# this is currently not implemented correctly on purpose until we can figure out how to safely remove
def destroy
case @property_hash[:policy_type]
when 'Registry Values'
@property_hash[:ensure] = :absent
if @property_hash[:policy_default] != 'enabled' # sometimes absent can mean that the default value should be 'enabled'
# deletes the registry key when the policy is absent and the default value is not 'enabled'
registry_key = 'HKEY_LOCAL_' + @property_hash[:policy_setting].split('\\')[0...-1].join('\\')
registry_value = @property_hash[:policy_setting].split('\\').last
reg(['delete', registry_key, '/v', registry_value, '/f'])
end
if @property_hash[:policy_default]
resource[:policy_value] = @property_hash[:policy_default]
end
when 'Event Audit'
@property_hash[:ensure] = :absent
# reset the Event audit value back to the default when policy is absent
resource[:policy_value] = @property_hash[:policy_default]
end
# other policy values can not be absent.
end
def self.prefetch(resources)
policies = instances
resources.keys.each do |name|
if (found_pol = policies.find { |pol| pol.name == name })
resources[name].provider = found_pol
end
end
end
def exists?
@property_hash[:ensure] == :present
end
# gets the property hash from the provider
def to_hash
instance_variable_get('@property_hash')
end
# required for easier mocking, this could be a Tempfile too
def self.temp_file
'c:\windows\temp\secedit.inf'
end
def temp_file
'c:\windows\temp\secedit.inf'
end
# converts any values that might be of a certain type specified in the mapping
# converts everything to a string
# returns the value
def convert_value(policy_hash)
case policy_hash[:data_type]
when :boolean
value = (policy_hash[:policy_value] == 'enabled') ? '1' : '0'
when :multi_select
policy_options = SecurityPolicy.find_mapping_from_policy_desc(policy_hash[:name])[:policy_options]
policy_options.each { |k, v| policy_options[k] = v.downcase }
value = policy_options.key(policy_hash[:policy_value].downcase)
when :string
value = "\"#{policy_hash[:policy_value]}\""
else
value = policy_hash[:policy_value]
end
case policy_hash[:policy_type]
when 'Registry Values'
value = "#{policy_hash[:reg_type]},#{value}"
when 'Event Audit'
value = SecurityPolicy.event_to_audit_id(policy_hash[:policy_value])
when 'Privilege Rights'
sids = Array[]
pv = policy_hash[:policy_value]
pv.split(',').sort.each do |suser|
sids << ((suser !~ %r{^(\*S-1-.+)$}) ? ('*' + Puppet::Util::Windows::SID.name_to_sid(suser).to_s) : suser.to_s)
end
value = sids.sort.join(',')
end
value
end
# writes out one policy at a time using the InfFile Class and secedit
def write_policy_to_system(policy_hash)
time = Time.now
time = time.strftime('%Y%m%d%H%M%S')
infout = "c:\\windows\\temp\\infimport-#{time}.inf"
sdbout = "c:\\windows\\temp\\sdbimport-#{time}.inf"
logout = "c:\\windows\\temp\\logout-#{time}.inf"
_status = nil
begin
# read the system state into the inifile object for easy variable setting
inf = PuppetX::IniFile.new
# these sections need to be here by default
inf['Version'] = { 'signature' => '$CHICAGO$', 'Revision' => 1 }
inf['Unicode'] = { 'Unicode' => 'yes' }
section = policy_hash[:policy_type]
section_value = { policy_hash[:policy_setting] => convert_value(policy_hash) }
# we can utilize the IniFile class to write out the data in ini format
inf[section] = section_value
inf.write(filename: infout, encoding: 'utf-8')
secedit('/configure', '/db', sdbout, '/cfg', infout)
ensure
FileUtils.rm_f(temp_file)
FileUtils.rm_f(infout)
FileUtils.rm_f(sdbout)
FileUtils.rm_f(logout)
end
end
end
|
package com.bustiblelemons.cthulhator.character.history.ui;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.View;
import com.bustiblelemons.cthulhator.R;
import com.bustiblelemons.cthulhator.character.creation.ui.AbsCharacterCreationActivity;
import com.bustiblelemons.cthulhator.character.history.logic.HistoryAdapter;
import com.bustiblelemons.cthulhator.character.history.logic.LoadHistoryEventsAsyn;
import com.bustiblelemons.cthulhator.character.history.logic.OnOpenHistoryEventDetails;
import com.bustiblelemons.cthulhator.character.history.logic.OnShowDatePicker;
import com.bustiblelemons.cthulhator.character.history.logic.ReportCharacterSettings;
import com.bustiblelemons.cthulhator.character.history.model.BirthData;
import com.bustiblelemons.cthulhator.character.history.model.HistoryEvent;
import com.bustiblelemons.cthulhator.character.history.model.TimeSpan;
import com.bustiblelemons.cthulhator.character.persistance.CharacterWrapper;
import com.bustiblelemons.cthulhator.settings.Settings;
import com.bustiblelemons.cthulhator.settings.character.CharacterSettings;
import com.bustiblelemons.cthulhator.system.brp.statistics.BRPStatistic;
import com.doomonafireball.betterpickers.calendardatepicker.CalendarDatePickerDialog;
import org.joda.time.DateTime;
import java.util.Locale;
import java.util.Random;
import java.util.Set;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
import se.emilsjolander.stickylistheaders.StickyListHeadersListView;
/**
* Created by bhm on 22.09.14.
*/
public class HistoryEditorActivity extends AbsCharacterCreationActivity
implements OnOpenHistoryEventDetails,
LoadHistoryEventsAsyn.OnHistoryEventsLoaded,
HistoryEventDialog.OnHistoryEventPassedBack,
OnShowDatePicker,
CalendarDatePickerDialog.OnDateSetListener,
ReportCharacterSettings, View.OnClickListener {
public static final int REQUEST_CODE = 8;
private static final String sDateFormat = "MMM dd, yyyy";
private static final String sCalendarDialogTag = CalendarDatePickerDialog.class.getSimpleName();
@InjectView(R.id.list)
StickyListHeadersListView listView;
private TimeSpan span = TimeSpan.EMPTY;
private CharacterWrapper mSavedCharacter;
private HistoryAdapter mHistoryAdapter;
private DateTime mBirthDate;
private DateTime mSuggestedDate;
private TimeSpan mSpan;
private Toolbar mToolbar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_history_editor);
mToolbar = (Toolbar) findViewById(R.id.header);
if (mToolbar != null) {
mToolbar.setNavigationOnClickListener(this);
setSupportActionBar(mToolbar);
}
ButterKnife.inject(this);
mSavedCharacter = getInstanceArgument();
if (listView != null) {
mHistoryAdapter = new HistoryAdapter(this, this);
listView.setAdapter(mHistoryAdapter);
listView.setOnItemClickListener(mHistoryAdapter);
}
setupBirthDate();
setBirthDayView();
long begin = mBirthDate.getMillis();
long end = mSuggestedDate.getMillis();
mSpan = new TimeSpan(begin, end);
loadHistoryAsyn();
}
@Override
public void onBackPressed() {
if (mSavedCharacter == null) {
mSavedCharacter.setFullHistory(mHistoryAdapter.getData());
setResult(RESULT_OK, mSavedCharacter);
}
super.onBackPressed();
}
private void setBirthDayView() {
if (mToolbar != null) {
String bornPrefix = getString(R.string.born);
String formatedDate = mBirthDate.toString(sDateFormat);
String formated = String.format(Locale.ENGLISH, "%s %s", bornPrefix, formatedDate);
mToolbar.setSubtitle(formated);
}
}
private void loadHistoryAsyn() {
if (mSavedCharacter != null) {
LoadHistoryEventsAsyn loadHistoryAsyn = new LoadHistoryEventsAsyn(this, mSavedCharacter);
loadHistoryAsyn.setOnHistoryEventsLoaded(this);
loadHistoryAsyn.executeCrossPlatform(mSpan);
}
}
@Override
protected void onInstanceArgumentRead(CharacterWrapper arg) {
mSavedCharacter = arg;
}
@Override
public void onOpenHistoryEventDetails(HistoryEvent event) {
if (event != null) {
HistoryEventDialog dialog = HistoryEventDialog.newInstance(event);
dialog.show(getSupportFragmentManager(), HistoryEventDialog.TAG);
}
}
@Override
public void onHistoryEventsLoaded(TimeSpan span, Set<HistoryEvent> events) {
if (events != null) {
if (mHistoryAdapter == null) {
mHistoryAdapter = new HistoryAdapter(this, this);
}
mHistoryAdapter.refreshData(events);
}
}
@OnClick(R.id.fab)
public void onAddHistoryEvent(View view) {
HistoryEvent suggestedEventWitDate = getSuggestedDate();
HistoryEventDialog dialog = HistoryEventDialog.newInstance(suggestedEventWitDate);
dialog.show(getSupportFragmentManager(), HistoryEventDialog.TAG);
}
private HistoryEvent getSuggestedDate() {
HistoryEvent event = new HistoryEvent();
event.setDate(mSuggestedDate.getMillis());
return event;
}
@Override
public void onHistoryEventEdited(HistoryEvent old, HistoryEvent newEvent) {
mSavedCharacter.removeHistoryEvent(old);
mSavedCharacter.addHistoryEvent(newEvent);
loadHistoryAsyn();
}
@Override
public void onShowDatePickerCallback(DateTime forDateTime, CalendarDatePickerDialog.OnDateSetListener callback) {
if (forDateTime != null) {
CalendarDatePickerDialog d = CalendarDatePickerDialog.newInstance(callback,
forDateTime.getYear(),
forDateTime.getMonthOfYear(),
forDateTime.getDayOfMonth());
int startYear = forDateTime.getYear() - 100;
int endYear = forDateTime.getYear() + 100;
d.setYearRange(startYear, endYear);
d.show(getSupportFragmentManager(), sCalendarDialogTag);
}
}
private void setupBirthDate() {
if (mSavedCharacter != null && mSavedCharacter.getBirth() != null) {
BirthData birthData = mSavedCharacter.getBirth();
mBirthDate = new DateTime(birthData.getDate());
} else {
CharacterSettings s = Settings.getLastPortraitSettings(this);
int defaultYear = s.getCthulhuPeriod().getDefaultYear();
int edu = mSavedCharacter.getStatisticValue(BRPStatistic.EDU.name());
int suggestedAge = edu + 6;
int estimateYear = defaultYear - suggestedAge;
Random r = new Random();
int month = r.nextInt(12);
int day = r.nextInt(27);
int h = r.nextInt(23);
mBirthDate = new DateTime(estimateYear, month, day, h, 0);
BirthData birth = new BirthData();
birth.setDate(mBirthDate.getMillis());
mSavedCharacter.setBirth(birth);
}
long suggestedEpoch = mSavedCharacter.getSuggestedBirthDateEpoch();
mSuggestedDate = new DateTime(suggestedEpoch);
}
@Override
public CharacterSettings onGetCharacterSettings() {
return Settings.getLastPortraitSettings(this);
}
@Override
public void onClick(View v) {
onBackPressed();
}
@Override
public void onDateSet(CalendarDatePickerDialog calendarDatePickerDialog,
int year, int monthOfYear, int yearOfMonth) {
int hour = mBirthDate.getHourOfDay();
int minute = mBirthDate.getMinuteOfHour();
mBirthDate = new DateTime(year, monthOfYear, yearOfMonth, hour, minute);
setBirthDayView();
}
}
|
#!/bin/bash
export MACHTYPE=x86_64
export BINDIR=$(pwd)/bin
mkdir -p $BINDIR
(cd kent/src/lib && make)
(cd kent/src/jkOwnLib && make)
(cd kent/src/hg/lib && make)
(cd kent/src/utils/bedGraphPack && make)
mkdir -p $PREFIX/bin
cp bin/bedGraphPack $PREFIX/bin
chmod +x $PREFIX/bin/bedGraphPack
|
public struct StringEncodingError: Error {}
public func encode(_ input: String) -> String {
var encodedString = ""
for char in input {
let binaryValue = String(char.asciiValue!, radix: 2)
let paddedBinaryValue = String(repeating: "0", count: max(0, 8 - binaryValue.count)) + binaryValue
encodedString += paddedBinaryValue
}
return encodedString
}
public func decode(_ input: String) throws -> String {
guard input.count % 8 == 0 else {
throw StringEncodingError()
}
var decodedString = ""
var index = input.startIndex
while index < input.endIndex {
let endIndex = input.index(index, offsetBy: 8)
let binarySubstring = input[index..<endIndex]
if let asciiValue = UInt8(binarySubstring, radix: 2) {
decodedString.append(Character(UnicodeScalar(asciiValue)))
} else {
throw StringEncodingError()
}
index = endIndex
}
return decodedString
} |
<gh_stars>1-10
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
import PurchaseRequestApprovalController from './PurchaseRequestApprovalController.js';
import Table from '../../elements/Table.js';
class PurchaseRequestApproval {
render() {
return(
'<div class="row">' +
'<div class="col-12">' +
'<div class="card">' +
'<div class="card-body">' +
'<h5 class="card-title text-center"> Approve Purchase Request </h5>' +
new Table("purchase-request-approval", [ "table", "table-striped", "table-bordered", "table-hover" ]).render() +
'</div>' +
'</div>' +
'</div>' +
'</div>'
);
}
}
export default new PurchaseRequestApproval().render(); |
public static void frequencyCount(String s) {
int[] frequency = new int[26];
for(int i = 0; i < s.length(); i++) {
int ch = (int)s.charAt(i) - 97;
if(ch >= 0)
frequency[ch]++;
}
for(int i = 0; i < 26; i++) {
if(frequency[i] != 0)
System.out.println((char)(i+97) + " : " + frequency[i]);
}
}
String s = "Hello World!";
frequencyCount(s); |
SELECT name
FROM customers
WHERE customer_id NOT IN
(SELECT customer_id FROM orders); |
package br.com.matheuslino.pacman;
import java.util.List;
import br.com.matheuslino.pacman.game.LabyrinthObjectVisitor;
public class Evasive extends Ghost {
private static final Evasive instance = new Evasive(0, 0);
Evasive(int x, int y) {
super(x, y);
}
public static Evasive getInstance() {
return instance;
}
@Override
public void accept(LabyrinthObjectVisitor visitor) {
visitor.visit(this);
}
}
|
require "spec_helper"
describe FitbitAPI::Client do
let(:client) do
FitbitAPI::Client.new(
client_id: "ABC123",
client_secret: "xyz789",
)
end
describe "#time_series_request" do
it "makes a request for a period" do
opts ={
period: "1w",
}
test_templates = {
period: "%{period}",
}
expect(client.time_series_request(test_templates, opts)).to eq(opts[:period])
end
it "makes a request for a range" do
opts ={
start_date: "2020-10-20",
end_date: "2020-11-20",
}
test_templates = {
range: "%{start_date}-%{end_date}",
}
expected = test_templates[:range] % opts
expect(client.time_series_request(test_templates, opts)).to eq(expected)
end
end
describe "#weight_logs" do
it "makes a time_series request" do
time_series_double = double(FitbitAPI::Client, :time_series => true)
expect(client).to receive(:time_series).with(anything, hash_including(:resource=> "weight"))
client.weight_logs({})
end
end
end
|
package br.com.zup.transacoes.utils;
import br.com.zup.transacoes.consumer.entity.TransacaoRepository;
import br.com.zup.transacoes.exception.CardNotFoundException;
public class Util {
public static void validadorCartao(String id, TransacaoRepository transacaoRepository) throws CardNotFoundException{
if (!transacaoRepository.existsByCartao_Idcartao(id)) {
throw new CardNotFoundException();
}
}
}
|
<reponame>ErwinYou/react-blog
import { GithubOutlined, QqOutlined, WechatOutlined } from '@ant-design/icons';
import React from 'react';
import { csdnUrl, githubUrl, QQ_QRCode, weChatQRCode } from '@/utils/constant';
import Csdn from './Csdn';
export const useAccount = () => {
const imgStyle = { width: '120px', height: '120px' };
return [
{
isLink: true,
link: githubUrl,
ico: <GithubOutlined />,
content: null
},
{
isLink: true,
link: csdnUrl,
ico: <Csdn />,
content: null
},
{
isLink: false,
link: '',
ico: <WechatOutlined />,
content: <img src={weChatQRCode} style={imgStyle} />
},
{
isLink: false,
link: '',
ico: <QqOutlined />,
content: <img src={QQ_QRCode} style={imgStyle} />
}
];
};
|
<filename>Algorithms/Java/IntegerToEnglishWords.java
/**
* Created by huqiu on 17-9-19.
*/
import java.util.*;
import java.*;
public class Solution {
String [] digit = {"One", "Two", "Three", "Four", "Five", "Six", "Seven",
"Eight", "Nine", "Ten", "Eleven", "Twelve", "Thirteen", "Fourteen", "Fifteen", "Sixteen", "Seventeen", "Eighteen", "Nineteen"};
String [] tens = {"Ten", "Twenty", "Thirty", "Forty", "Fifty", "Sixty", "Seventy", "Eighty", "Ninety"};
public String digit3toWords(int num) {
String res = "";
if (num / 100 > 0) {
res += digit[num / 100 - 1] + " Hundred";
}
int mod = num % 100;
if (mod > 0 && mod < 20) {
res += " " + digit[mod - 1];
}
else if (mod >= 20) {
res += " " + tens[mod / 10 - 1] + (mod % 10 == 0 ? "" : " " + digit[mod % 10 - 1]);
}
return res;
}
public String numberToWords(int num) {
if (num == 0) return "Zero";
String ans [] = new String[5];
String [] prefix = {"", " Thousand ", " Million ", " Billion "};
int i = 0, c = 0;
while (num > 0) {
if (num % 1000 > 0)
ans[i++] = digit3toWords(num % 1000) + prefix[c];
num /= 1000;
c++;
}
StringBuilder res = new StringBuilder();
for (int j = i-1; j >= 0; j--) {
res.append(ans[j]);
}
String result = res.toString();
int len = result.length();
if (result.charAt(0) == ' ' && result.charAt(len-1) == ' ') {
result = result.substring(1, len-1);
}
else if (result.charAt(0) == ' ') {
result = result.substring(1);
}
else if (result.charAt(len-1) == ' ') {
result = result.substring(0, len-1);
}
return result.replace(" ", " ");
}
public static void main(String[] args) {
Solution sol = new Solution();
String ca = sol.numberToWords(1000001);
System.out.println(ca);
String s = "A B c".replace(" ", " ");
System.out.println(s);
}
} |
<filename>app/controllers/users_controller.rb<gh_stars>0
class UsersController < ApplicationController
get '/users/signup' do
erb :'/users/signup'
end
post '/users/signup' do
@user = User.create(username: params[:username], password: params[:password])
end
get '/users/:id' do
"show page"
end
get '/users/login' do
erb :'users/login'
end
post '/users/login' do
#@user = User.find_by(:username => params[:username])
#if @user && @user.authenticate(params[:password])
# session[:user_id] = @user.id
# redirect to '/tasks'
#else
# redirect to '/login'
#end
end
get '/users/logout' do
#if session[:user_id] != nil
# session.clear
# redirect to '/login'
#else
# redirect to '/'
#end
end
end |
<filename>src/main/java/models/Spell.java
package models;
import java.util.ArrayList;
import java.util.List;
public class Spell {
private int id;
private String name;
private String description;
private int damage;
private int MP;
private String effects;
public Spell(String name, String description, int damage, int MP) {
this.name = name;
this.description = description;
this.damage = damage;
this.MP = MP;
}
public Spell(String name, String description, int MP, String effects) {
this.name = name;
this.description = description;
this.MP = MP;
this.effects = effects;
}
public Spell(String name, String description, int damage, int MP, String effects) {
this.name = name;
this.description = description;
this.damage = damage;
this.MP = MP;
this.effects = effects;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getDamage() {
return damage;
}
public void setDamage(int damage) {
this.damage = damage;
}
public String getEffects() {
return effects;
}
public void setEffects(String effects) {
this.effects = effects;
}
public int getMP() {
return MP;
}
public void setMP(int MP) {
this.MP = MP;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Spell spell = (Spell) o;
if (id != spell.id) return false;
if (!name.equals(spell.name)) return false;
return description.equals(spell.description);
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + name.hashCode();
result = 31 * result + description.hashCode();
return result;
}
}
|
#!/bin/bash
check_command "aws"
check_command "jq"
# Ensure the AWS region has been provided
if [ -z "${AWS_REGION}" ] || [ "${AWS_REGION}" == null ]; then
error "The AWS region must be set as AWS_REGION in ${BACKUP_VARS_FILE}"
bail "See bamboo.diy-aws-backup.vars.sh.example for the defaults."
fi
if [ -z "${AWS_ACCESS_KEY_ID}" ] || [ -z "${AWS_SECRET_ACCESS_KEY}" ]; then
# 169.254.169.254 - dynamically configured link-local addresses
# Here, get the IAM security-credentials
AWS_INSTANCE_ROLE=$(curl ${CURL_OPTIONS} http://169.254.169.254/latest/meta-data/iam/security-credentials/)
if [ -z "${AWS_INSTANCE_ROLE}" ]; then
error "Could not find the necessary credentials to run backup"
error "We recommend launching the instance with an appropiate IAM role"
error "Alternatively AWS credentials can be set as AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY in ${BACKUP_VARS_FILE}"
bail "See bamboo.diy-aws-backup.vars.sh.example for the defaults."
else
info "Using IAM instance role ${AWS_INSTANCE_ROLE}"
fi
else
info "Found AWS credentials"
export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
fi
export AWS_DEFAULT_REGION=${AWS_REGION}
export AWS_DEFAULT_OUTPUT=json
if [ -z "${INSTANCE_NAME}" ]; then
error "The ${PRODUCT} instance name must be set as INSTANCE_NAME in ${BACKUP_VARS_FILE}"
bail "See bamboo.diy-aws-backup.vars.sh.example for the defaults."
elif [ ! "${INSTANCE_NAME}" == ${INSTANCE_NAME%[[:space:]]*} ]; then
error "Instance name cannot contain spaces"
bail "See bamboo.diy-aws-backup.vars.sh.example for the defaults."
elif [ ${#INSTANCE_NAME} -ge 100 ]; then
error "Instance name must be under 100 characters in length"
bail "See bamboo.diy-aws-backup.vars.sh.example for the defaults."
fi
SNAPSHOT_TAG_KEY="Name"
SNAPSHOT_TAG_PREFIX="${INSTANCE_NAME}-"
SNAPSHOT_TAG_VALUE="${SNAPSHOT_TAG_PREFIX}`date +"%Y%m%d-%H%M%S-%3N"`"
function snapshot_ebs_volume {
local VOLUME_ID="$1"
local SNAPSHOT_ID=$(aws ec2 create-snapshot --volume-id "${VOLUME_ID}" --description "$2" | jq -r '.SnapshotId')
success "Taken snapshot ${SNAPSHOT_ID} of volume ${VOLUME_ID}"
aws ec2 create-tags --resources "${SNAPSHOT_ID}" --tags Key="${SNAPSHOT_TAG_KEY}",Value="${SNAPSHOT_TAG_VALUE}"
info "Tagged ${SNAPSHOT_ID} with ${SNAPSHOT_TAG_KEY}=${SNAPSHOT_TAG_VALUE}"
if [ ! -z "${AWS_ADDITIONAL_TAGS}" ]; then
aws ec2 create-tags --resources "${SNAPSHOT_ID}" --tags "[ ${AWS_ADDITIONAL_TAGS} ]"
info "Tagged ${SNAPSHOT_ID} with additional tags: ${AWS_ADDITIONAL_TAGS}"
fi
}
function create_volume {
local SNAPSHOT_ID="$1"
local VOLUME_TYPE="$2"
local PROVISIONED_IOPS="$3"
local OPTIONAL_ARGS=
if [ "io1" == "${VOLUME_TYPE}" ] && [ ! -z "${PROVISIONED_IOPS}" ]; then
info "Restoring volume with ${PROVISIONED_IOPS} provisioned IOPS"
OPTIONAL_ARGS="--iops ${PROVISIONED_IOPS}"
fi
eval "VOLUME_ID=$(aws ec2 create-volume --snapshot ${SNAPSHOT_ID} --availability-zone ${AWS_AVAILABILITY_ZONE} --volume-type ${VOLUME_TYPE} ${OPTIONAL_ARGS} | jq -r '.VolumeId')"
aws ec2 wait volume-available --volume-ids "${VOLUME_ID}"
success "Restored snapshot ${SNAPSHOT_ID} into volume ${VOLUME_ID}"
}
function attach_volume {
local VOLUME_ID="$1"
local DEVICE_NAME="$2"
local INSTANCE_ID="$3"
aws ec2 attach-volume --volume-id "${VOLUME_ID}" --instance "${INSTANCE_ID}" --device "${DEVICE_NAME}" > /dev/null
wait_attached_volume "${VOLUME_ID}"
success "Attached volume ${VOLUME_ID} to device ${DEVICE_NAME} at instance ${INSTANCE_ID}"
}
function wait_attached_volume {
local VOLUME_ID="$1"
info "Waiting for volume ${VOLUME_ID} to be attached. This could take some time"
TIMEOUT=120
END=$((SECONDS+${TIMEOUT}))
local STATE='attaching'
while [ $SECONDS -lt $END ]; do
# aws ec2 wait volume-in-use ${VOLUME_ID} is not enough.
# A volume state can be 'in-use' while its attachment state is still 'attaching'
# If the volume is not fully attach we cannot issue a mount command for it
STATE=$(aws ec2 describe-volumes --volume-ids ${VOLUME_ID} | jq -r '.Volumes[0].Attachments[0].State')
info "Volume ${VOLUME_ID} state: ${STATE}"
if [ "attached" == "${STATE}" ]; then
break
fi
sleep 10
done
if [ "attached" != "${STATE}" ]; then
bail "Unable to attach volume ${VOLUME_ID}. Attachment state is ${STATE} after ${TIMEOUT} seconds"
fi
}
function restore_from_snapshot {
local SNAPSHOT_ID="$1"
local VOLUME_TYPE="$2"
local PROVISIONED_IOPS="$3"
local DEVICE_NAME="$4"
local MOUNT_POINT="$5"
local VOLUME_ID=
create_volume "${SNAPSHOT_ID}" "${VOLUME_TYPE}" "${PROVISIONED_IOPS}" VOLUME_ID
local INSTANCE_ID=$(curl ${CURL_OPTIONS} http://169.254.169.254/latest/meta-data/instance-id)
attach_volume "${VOLUME_ID}" "${DEVICE_NAME}" "${INSTANCE_ID}"
mount_device "${DEVICE_NAME}" "${MOUNT_POINT}"
}
function validate_ebs_snapshot {
local SNAPSHOT_TAG="$1"
local __RETURN=$2
local SNAPSHOT_ID="$(aws ec2 describe-snapshots --filters Name=tag-key,Values=\"Name\" Name=tag-value,Values=\"${SNAPSHOT_TAG}\" | jq -r '.Snapshots[0]?.SnapshotId')"
if [ -z "${SNAPSHOT_ID}" ] || [ "${SNAPSHOT_ID}" == null ]; then
error "Could not find EBS snapshot for tag ${SNAPSHOT_TAG}"
list_available_ebs_snapshot_tags
bail "Please select an available tag"
else
info "Found EBS snapshot ${SNAPSHOT_ID} for tag ${SNAPSHOT_TAG}"
eval ${__RETURN}="${SNAPSHOT_ID}"
fi
}
function validate_device_name {
local DEVICE_NAME="${1}"
local INSTANCE_ID=$(curl ${CURL_OPTIONS} http://169.254.169.254/latest/meta-data/instance-id)
# If there's a volume taking the provided DEVICE_NAME it must be unmounted and detached
info "Checking for existing volumes using device name ${DEVICE_NAME}"
local VOLUME_ID="$(aws ec2 describe-volumes --filter Name=attachment.instance-id,Values=${INSTANCE_ID} Name=attachment.device,Values=${DEVICE_NAME} | jq -r '.Volumes[0].VolumeId')"
case "${VOLUME_ID}" in vol-*)
error "Device name ${DEVICE_NAME} appears to be taken by volume ${VOLUME_ID}"
bail "Please stop Bamboo. Stop PostgreSQL if it is running. Unmount the device and detach the volume"
;;
esac
}
function snapshot_rds_instance {
local INSTANCE_ID="$1"
if [ ! -z "${AWS_ADDITIONAL_TAGS}" ]; then
COMMA=', '
fi
AWS_TAGS="[ {\"Key\": \"${SNAPSHOT_TAG_KEY}\", \"Value\": \"${SNAPSHOT_TAG_VALUE}\"}${COMMA}${AWS_ADDITIONAL_TAGS} ]"
# We use SNAPSHOT_TAG as the snapshot identifier because it's unique and because it will allow us to relate an EBS snapshot to an RDS snapshot by tag
aws rds create-db-snapshot --db-instance-identifier "${INSTANCE_ID}" --db-snapshot-identifier "${SNAPSHOT_TAG_VALUE}" --tags "${AWS_TAGS}" > /dev/null
# Wait until the database has completed the backup
info "Waiting for instance ${INSTANCE_ID} to complete backup. This could take some time"
aws rds wait db-instance-available --db-instance-identifier "${INSTANCE_ID}"
success "Taken snapshot ${SNAPSHOT_TAG_VALUE} of RDS instance ${INSTANCE_ID}"
info "Tagged ${SNAPSHOT_TAG_VALUE} with ${AWS_TAGS}"
}
function restore_rds_instance {
local INSTANCE_ID="$1"
local SNAPSHOT_ID="$2"
local OPTIONAL_ARGS=
if [ ! -z "${RESTORE_RDS_INSTANCE_CLASS}" ]; then
info "Restoring database to instance class ${RESTORE_RDS_INSTANCE_CLASS}"
OPTIONAL_ARGS="--db-instance-class ${RESTORE_RDS_INSTANCE_CLASS}"
fi
if [ ! -z "${RESTORE_RDS_SUBNET_GROUP_NAME}" ]; then
info "Restoring database to subnet group ${RESTORE_RDS_SUBNET_GROUP_NAME}"
OPTIONAL_ARGS="${OPTIONAL_ARGS} --db-subnet-group-name ${RESTORE_RDS_SUBNET_GROUP_NAME}"
fi
aws rds restore-db-instance-from-db-snapshot --db-instance-identifier "${INSTANCE_ID}" --db-snapshot-identifier "${SNAPSHOT_ID}" ${OPTIONAL_ARGS} > /dev/null
info "Waiting until the RDS instance is available. This could take some time"
aws rds wait db-instance-available --db-instance-identifier "${INSTANCE_ID}"
info "Restored snapshot ${SNAPSHOT_ID} to instance ${INSTANCE_ID}"
if [ ! -z "${RESTORE_RDS_SECURITY_GROUP}" ]; then
# When restoring a DB instance outside of a VPC this command will need to be modified to use --db-security-groups instead of --vpc-security-group-ids
# For more information see http://docs.aws.amazon.com/cli/latest/reference/rds/modify-db-instance.html
aws rds modify-db-instance --apply-immediately --db-instance-identifier "${INSTANCE_ID}" --vpc-security-group-ids "${RESTORE_RDS_SECURITY_GROUP}" > /dev/null
info "Changed security groups of ${INSTANCE_ID} to ${RESTORE_RDS_SECURITY_GROUP}"
fi
}
function validate_ebs_volume {
local DEVICE_NAME="${1}"
local __RETURN=$2
local INSTANCE_ID=$(curl ${CURL_OPTIONS} http://169.254.169.254/latest/meta-data/instance-id)
info "Looking up volume for device name ${DEVICE_NAME}"
local VOLUME_ID="$(aws ec2 describe-volumes --filter Name=attachment.instance-id,Values=${INSTANCE_ID} Name=attachment.device,Values=${DEVICE_NAME} | jq -r '.Volumes[0].VolumeId')"
eval ${__RETURN}="${VOLUME_ID}"
}
function validate_rds_instance_id {
local INSTANCE_ID="$1"
STATE=$(aws rds describe-db-instances --db-instance-identifier ${INSTANCE_ID} | jq -r '.DBInstances[0].DBInstanceStatus')
if [ -z "${STATE}" ] || [ "${STATE}" == null ]; then
error "Could not retrieve instance status for db ${INSTANCE_ID}"
bail "Please make sure you have selected an existing rds instance"
elif [ "${STATE}" != "available" ]; then
error "The instance ${INSTANCE_ID} status is ${STATE}"
bail "The instance must be available before the backup can be started"
fi
}
function validate_rds_snapshot {
local SNAPSHOT_TAG="$1"
local SNAPSHOT_ID=$(aws rds describe-db-snapshots --db-snapshot-identifier \"${SNAPSHOT_TAG}\" | jq -r '.DBSnapshots[0]?.DBSnapshotIdentifier')
if [ -z "${SNAPSHOT_ID}" ] || [ "${SNAPSHOT_ID}" == null ]; then
error "Could not find RDS snapshot for tag ${SNAPSHOT_TAG}"
list_available_ebs_snapshot_tags
bail "Please select a tag with an associated RDS snapshot"
else
info "Found RDS snapshot ${SNAPSHOT_ID} for tag ${SNAPSHOT_TAG}"
fi
}
function list_available_ebs_snapshot_tags {
# Print a list of all snapshots tag values that start with the tag prefix
print "Available snapshot tags:"
aws ec2 describe-snapshots --filters Name=tag-key,Values="Name" Name=tag-value,Values="${SNAPSHOT_TAG_PREFIX}*" | jq -r ".Snapshots[].Tags[] | select(.Key == \"Name\") | .Value" | sort -r
}
# List all RDS DB snapshots older than the most recent ${KEEP_BACKUPS}
function list_old_rds_snapshot_ids {
if [ "${KEEP_BACKUPS}" -gt 0 ]; then
aws rds describe-db-snapshots --snapshot-type manual | jq -r ".DBSnapshots | map(select(.DBSnapshotIdentifier | startswith(\"${SNAPSHOT_TAG_PREFIX}\"))) | sort_by(.SnapshotCreateTime) | reverse | .[${KEEP_BACKUPS}:] | map(.DBSnapshotIdentifier)[]"
fi
}
function delete_rds_snapshot {
local SNAPSHOT_ID="$1"
aws rds delete-db-snapshot --db-snapshot-identifier "${SNAPSHOT_ID}" > /dev/null
}
# List all EBS snapshots older than the most recent ${KEEP_BACKUPS}
function list_old_ebs_snapshot_ids {
if [ "${KEEP_BACKUPS}" -gt 0 ]; then
aws ec2 describe-snapshots --filters "Name=tag:Name,Values=${SNAPSHOT_TAG_PREFIX}*" | jq -r ".Snapshots | sort_by(.StartTime) | reverse | .[${KEEP_BACKUPS}:] | map(.SnapshotId)[]"
fi
}
function delete_ebs_snapshot {
local SNAPSHOT_ID="$1"
aws ec2 delete-snapshot --snapshot-id "${SNAPSHOT_ID}"
}
|
from truth.truth import AssertThat
from ..emulator.c_types import Byte
test_program = """
; TestPrg
* = $1000
lda #$FF
start
sta $90
sta $8000
eor #$CC
jmp start
"""
test_program = [Byte(x) for x in [0x00, 0x10, 0xA9, 0xFF, 0x85, 0x90, 0x8D, 0x00, 0x80, 0x49, 0xCC, 0x4C, 0x02, 0x10]]
def test_load_program_into_the_correct_area_of_memory(cpu):
# when:
cpu.load_program(test_program, len(test_program))
# then:
AssertThat(cpu.Memory[0x0FFF]).IsEqualTo(Byte(0x0))
AssertThat(cpu.Memory[0x1000]).IsEqualTo(Byte(0xA9))
AssertThat(cpu.Memory[0x1001]).IsEqualTo(Byte(0xFF))
AssertThat(cpu.Memory[0x1002]).IsEqualTo(Byte(0x85))
# ...
AssertThat(cpu.Memory[0x1009]).IsEqualTo(Byte(0x4C))
AssertThat(cpu.Memory[0x100A]).IsEqualTo(Byte(0x02))
AssertThat(cpu.Memory[0x100B]).IsEqualTo(Byte(0x10))
AssertThat(cpu.Memory[0x100C]).IsEqualTo(Byte(0x0))
def test_load_program_and_execute_it(cpu):
# when:
start_address = cpu.load_program(test_program, len(test_program))
cpu.program_counter = start_address
# then:
clock = 1000
while clock > 0:
clock -= cpu.execute(1)
# TODO: LoadThe6502TestPrg - unsure on how to read in as binary
# def test_load_6502_program_and_execute_it(cpu):
# # when:
# program = []
# with open(Path(__file__).parent / "6502_functional_test.bin", "rb") as f:
# data = [Byte(x) for x in bytearray(f.read())]
# start_address = cpu.load_program(program, len(test_program))
# cpu.program_counter = 0x400
# while True:
# cpu.execute(1)
|
#!/bin/bash
datafile="./what_did_dfo_learn.uniq.concise.csv"
#only DFO tele points
#awk '$5 == 13' $datafile > /tmp/temp_three_way_venn_diagram.dat
awk '$5 == 4' $datafile > /tmp/temp_three_way_venn_diagram.dat
datafile="/tmp/temp_three_way_venn_diagram.dat"
#tele level my_move pred1_move pred2_move
total_n_samples=$(grep -v tele $datafile | wc -l)
all_agree=$(grep -v tele $datafile | awk '$5 == $4 && $4 == $3' | wc -l)
echo "all_agree: $all_agree"
echo "total_n_samples: $total_n_samples"
echo "Title: Total_Agree Total_Disagree Exclusive_Agree"
pred1_agrees_with_me=$(grep -v tele $datafile | awk '$3 == $4' | wc -l)
pred1_disagrees_with_me=$(echo $total_n_samples - $pred1_agrees_with_me | bc -l)
pred1_agrees_with_me_and_not_pred2=$(echo $pred1_agrees_with_me - $all_agree | bc -l)
echo "pred1&me: $pred1_agrees_with_me $pred1_disagrees_with_me $pred1_agrees_with_me_and_not_pred2"
pred2_agrees_with_me=$(grep -v tele $datafile | awk '$3 == $5' | wc -l)
pred2_disagrees_with_me=$(echo $total_n_samples - $pred2_agrees_with_me | bc -l)
pred2_agrees_with_me_and_not_pred1=$(echo $pred2_agrees_with_me - $all_agree | bc -l)
echo "pred2&me: $pred2_agrees_with_me $pred2_disagrees_with_me $pred2_agrees_with_me_and_not_pred1"
pred1_agrees_with_pred2=$(grep -v tele $datafile | awk '$5 == $4' | wc -l)
pred1_disagrees_with_pred2=$(echo $total_n_samples - $pred1_agrees_with_pred2 | bc -l)
pred1_agrees_with_pred2_and_not_me=$(echo $pred1_agrees_with_pred2 - $all_agree | bc -l)
echo "pred1&pred2: $pred1_agrees_with_pred2 $pred1_disagrees_with_pred2 $pred1_agrees_with_pred2_and_not_me"
echo "Sole Decisions:"
all_disagree_with_me=$(grep -v tele $datafile | awk '$3 != $4 && $3 != $5' | wc -l)
all_disagree_with_pred1=$(grep -v tele $datafile | awk '$3 != $4 && $4 != $5' | wc -l)
all_disagree_with_pred2=$(grep -v tele $datafile | awk '$5 != $4 && $3 != $5' | wc -l)
echo $all_disagree_with_me $all_disagree_with_pred1 $all_disagree_with_pred2
|
def addNDVI(image):
ndvi = image.normalizedDifference(['B8', 'B4']).rename('NDVI')
return image.addBands(ndvi)
|
#include <iostream>
#include <unordered_map>
#include <vector>
#include <functional>
#include <any>
class EventSystem {
public:
void registerEventHandler(const std::string& eventType, std::function<void(const std::any&)> handlerFunction) {
eventHandlers[eventType].push_back(handlerFunction);
}
void triggerEvent(const std::string& eventType, const std::any& eventData) {
if (eventHandlers.find(eventType) != eventHandlers.end()) {
for (const auto& handler : eventHandlers[eventType]) {
handler(eventData);
}
}
}
void unregisterEventHandler(const std::string& eventType, std::function<void(const std::any&)> handlerFunction) {
if (eventHandlers.find(eventType) != eventHandlers.end()) {
auto& handlers = eventHandlers[eventType];
handlers.erase(std::remove(handlers.begin(), handlers.end(), handlerFunction), handlers.end());
}
}
private:
std::unordered_map<std::string, std::vector<std::function<void(const std::any&)>>> eventHandlers;
};
// Example usage
void onEvent(const std::any& eventData) {
std::cout << "Event triggered with data: " << std::any_cast<int>(eventData) << std::endl;
}
int main() {
EventSystem eventSystem;
eventSystem.registerEventHandler("click", onEvent);
eventSystem.triggerEvent("click", 42);
eventSystem.unregisterEventHandler("click", onEvent);
return 0;
} |
DRYRUN=""
# DRYRUN="--dryrun "
DATETAG="LP$( date +"%Y%m%d_%H%M" )"
SNAKEFILE=../../code/pipeline/SuRE-snakemake
CONFIG=config-Dm10_I33_LP20191029.yml
LOG="${CONFIG%.yml}_run-${DATETAG}.log"
NCORES=15
RAM=100
TARGET="bedpe_merged_smpls"
TARGET="merged_ipcr_cdna"
TARGET="reversed_liftover"
TARGET="trim_iPCR"
TARGET="split_bam"
TARGET="trim_iPCR"
TARGET="sorted_cnt_tbls"
#TARGET="bedpe_BC"
# TARGET="bed2coverage_done"
CMD="/usr/bin/time -v nice -19 snakemake ${DRYRUN}-prs ${SNAKEFILE} --use-conda --resources ram=${RAM} --configfile ${CONFIG} --cores $NCORES ${TARGET} &> ${LOG}"
echo "${CMD}"
eval ${CMD}
|
public static boolean containsDuplicate(int[] array) {
Set<Integer> set = new HashSet<>();
for (int i : array) {
if (!set.add(i)) {
return true;
}
}
return false;
} |
import { Component } from '@angular/core';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css'],
})
export class AppComponent {
title = 'Platzisquare';
places:any=[
{name:'Negocio 1' ,active:true},
{name:'Negocio 2' ,active:true},
{name:'Negocio 3' ,active:false},
{name:'Negocio 4' ,active:true},
]
lat:number=19.47853
lng:number=-99.0529936
}
|
package com.java.study.zuo.basic.chapter1;
import java.util.Arrays;
public class Code_00_BubbleSort {
}
|
import {Component, Input, OnDestroy} from '@angular/core';
@Component({
selector: 'accordion',
templateUrl: 'app/common/components/accordion.html',
host: {
'class': 'panel-group'
}
})
export class Accordion {
groups: Array<AccordionGroup> = [];
addGroup(group: AccordionGroup): void {
this.groups.push(group);
}
closeOthers(openGroup: AccordionGroup): void {
this.groups.forEach((group: AccordionGroup) => {
if (group !== openGroup) {
group.isOpen = false;
}
});
}
removeGroup(group: AccordionGroup): void {
const index = this.groups.indexOf(group);
if (index !== -1) {
this.groups.splice(index, 1);
}
}
}
@Component({
selector: 'accordion-group',
templateUrl: 'app/common/components/accordion-group.html',
})
export class AccordionGroup implements OnDestroy {
private _isOpen: any = false;
@Input() heading: string;
@Input()
set isOpen(value: boolean) {
this._isOpen = value;
if (value) {
//this.accordion.closeOthers(this);
}
}
get isOpen() {
return this._isOpen;
}
constructor(private accordion: Accordion) {
this.accordion.addGroup(this);
}
ngOnDestroy() {
this.accordion.removeGroup(this);
}
toggleOpen(event: MouseEvent): void {
event.preventDefault();
this.isOpen = !this.isOpen;
}
}
|
<filename>bin/helpers/toCase.js
module.exports = (string, selectedCase) => {
string = string
.split('')
.map((char, idx) =>
idx === 0
? `${
selectedCase === 'camel' ? char.toLowerCase() : char.toUpperCase()
}`
: char
)
.join('');
return string;
};
|
xjc -extension -no-header -d src/main/java/ -p com.bigfix.schemas.besapi -b schema/9.2/bindings.xjb schema/9.2/BESAPI.xsd
xjc -extension -no-header -d src/main/java/ -p com.bigfix.schemas.bes -b schema/9.2/bindings.xjb schema/9.2/BES.xsd
|
#!/usr/bin/env bash
# Variables defined in main script
# BASEDIR
# PRGDIR
# JAVA_OPTS
MONITOR_AGENT=""
## TODO We must make sure we load any existing JAR file, only one can exist.
if [ -e "${BASEDIR}/monitor/dd-java-agent.jar" ]; then
MONITOR_AGENT="-javaagent:${BASEDIR}/monitor/dd-java-agent.jar"
fi
JAVA_HEAP="2048m"
CELLBASE_LOG_DIR=${CELLBASE_LOG_DIR:-$(grep "logDir" "${BASEDIR}/conf/configuration.yml" | cut -d ":" -f 2 | tr -d '" ')}
CELLBASE_LOG_LEVEL=${CELLBASE_LOG_LEVEL:-$(grep "logLevel" "${BASEDIR}/conf/configuration.yml" | cut -d ":" -f 2 | tr -d '" ')}
CELLBASE_LOG_OUPUT=${CELLBASE_LOG_OUPUT:-$(grep "logOuput" "${BASEDIR}/conf/configuration.yml" | cut -d ":" -f 2 | tr -d '" ')}
CELLBASE_LOG_CONFIG="log4j2.xml"
if [ `basename $PRG` = "cellbase-admin.sh" ]; then
JAVA_HEAP="8192m"
CELLBASE_LOG_CONFIG="log4j2.console.xml"
if [ CELLBASE_LOG_OUPUT = "file" ]; then
CELLBASE_LOG_CONFIG="log4j2.file.xml"
fi
fi
#Set log4j properties file
export JAVA_OPTS="${JAVA_OPTS} -Dlog4j.configurationFile=file:${BASEDIR}/conf/${CELLBASE_LOG_CONFIG}"
export JAVA_OPTS="${JAVA_OPTS} -Dcellbase.log.level=${CELLBASE_LOG_LEVEL}"
export JAVA_OPTS="${JAVA_OPTS} -Dfile.encoding=UTF-8"
export JAVA_OPTS="${JAVA_OPTS} -Xms256m -Xmx${JAVA_HEAP}"
export JAVA_OPTS="${JAVA_OPTS} ${MONITOR_AGENT}"
if [ -n "$CELLBASE_LOG_DIR" ]; then
export JAVA_OPTS="${JAVA_OPTS} -Dcellbase.log.dir=${CELLBASE_LOG_DIR}"
fi
|
#!/bin/bash
usage="Usage: afl-generateDistance.sh PATCH_LOCATION"
rm -rf temp
mkdir temp
export TMP_DIR=$PWD/temp
if [ -f $TMP_DIR/BBtargets.txt ]; then
rm $TMP_DIR/BBtargets.txt
fi
if [[ $# < 1 ]]; then
echo "$usage"
exit 1
fi
length=$#
for (( c=1; c<=length; c++ ))
do
target="$1"
echo $BUGGY_FILE:$target >> $TMP_DIR/BBtargets.txt
shift
done
export INITIAL_CC=$CC
export INITIAL_CXX=$CXX
export INITIAL_CFLAGS=$CFLAGS
export INITIAL_CXXFLAGS=$CXXFLAGS
export INITIAL_PATH=$PATH
# Set aflgo-instrumenter
export CC=$AFLGO/afl-clang-fast
export CXX=$AFLGO/afl-clang-fast++
export ADDITIONAL="-targets=$TMP_DIR/BBtargets.txt -outdir=$TMP_DIR -flto -fuse-ld=gold -Wl,-plugin-opt=save-temps"
export CFLAGS="$CFLAGS $ADDITIONAL"
export CXXFLAGS="$CXXFLAGS $ADDITIONAL"
pushd ../$SUBJECT > /dev/null
./project_config.sh
./project_build.sh
popd > /dev/null
# Clean up
cat $TMP_DIR/BBnames.txt | rev | cut -d: -f2- | rev | sort | uniq > $TMP_DIR/BBnames2.txt && mv $TMP_DIR/BBnames2.txt $TMP_DIR/BBnames.txt
cat $TMP_DIR/BBcalls.txt | sort | uniq > $TMP_DIR/BBcalls2.txt && mv $TMP_DIR/BBcalls2.txt $TMP_DIR/BBcalls.txt
export LDFLAGS=
export CC=$INITIAL_CC
export CXX=$INITIAL_CXX
export CFLAGS=$INITIAL_CFLAGS
export CXXFLAGS=$INITIAL_CXXFLAGS
export PATH=$INITIAL_PATH
# Generate distance
$AFLGO/scripts/genDistance.sh $OUT $TMP_DIR $BINARY
cp $TMP_DIR/distance.cfg.txt $OUT
#rm -rf $TMP_DIR
|
require 'spec_helper'
RSpec.describe Hitnmiss::Repository::Fetcher do
describe '#fetch' do
it 'raises error indicating not implemented' do
repo_klass = Class.new do
include Hitnmiss::Repository::Fetcher
end
expect { repo_klass.new.send(:fetch) }.to raise_error(Hitnmiss::Errors::NotImplemented)
end
end
describe '#fetch_all' do
it 'raises error indicating not implemented' do
repo_klass = Class.new do
include Hitnmiss::Repository::Fetcher
end
keyspace = double('keyspace')
expect { repo_klass.new.send(:fetch_all, keyspace) }.to raise_error(Hitnmiss::Errors::NotImplemented)
end
end
end
|
#!/bin/sh
set -e
SCRIPT_DIR=$(dirname "$0")
case $SCRIPT_DIR in
"/"*)
;;
".")
SCRIPT_DIR=$(pwd)
;;
*)
SCRIPT_DIR=$(pwd)/$(dirname "$0")
;;
esac
$SCRIPT_DIR/../common_install.sh
export ASAN_OPTIONS=allocator_may_return_null=1
export CCACHE_CPP2=yes
export CC="ccache $PWD/clang+llvm-6.0.1-x86_64-linux-gnu-ubuntu-16.04/bin/clang"
export CXX="ccache $PWD/clang+llvm-6.0.1-x86_64-linux-gnu-ubuntu-16.04/bin/clang++"
ccache -M 1G
ccache -s
# Build proj
(cd proj; ./autogen.sh && CFLAGS='-DPROJ_RENAME_SYMBOLS' CXXFLAGS='-DPROJ_RENAME_SYMBOLS' ./configure --disable-static --prefix=/usr/local && make -j3)
(cd proj; sudo make -j3 install && sudo mv /usr/local/lib/libproj.so.15.0.0 /usr/local/lib/libinternalproj.so.15.0.0 && sudo rm /usr/local/lib/libproj.so* && sudo rm /usr/local/lib/libproj.la && sudo ln -f -s libinternalproj.so.15.0.0 /usr/local/lib/libinternalproj.so.15 && sudo ln -f -s libinternalproj.so.15.0.0 /usr/local/lib/libinternalproj.so)
cd gdal
SANITIZE_FLAGS="-DMAKE_SANITIZE_HAPPY -fsanitize=undefined -fsanitize=address -fsanitize=unsigned-integer-overflow"
CFLAGS=$SANITIZE_FLAGS CXXFLAGS=$SANITIZE_FLAGS LDFLAGS="-fsanitize=undefined -fsanitize=address -lstdc++" ./configure --prefix=/usr --without-libtool --enable-debug --with-jpeg12 --with-poppler --without-podofo --with-spatialite --with-mysql --with-liblzma --with-webp --with-epsilon --with-libtiff=internal --with-rename-internal-libtiff-symbols --with-hide-internal-symbols --with-gnm --with-proj=/usr/local --with-fgdb=$PWD/../FileGDB_API-64gcc51
sed -i "s/-fsanitize=address/-fsanitize=address -shared-libasan/g" GDALmake.opt
sed -i "s/-fsanitize=unsigned-integer-overflow/-fsanitize=unsigned-integer-overflow -fno-sanitize-recover=unsigned-integer-overflow/g" GDALmake.opt
make USER_DEFS="-Werror" -j3
cd apps
make USER_DEFS="-Werror" test_ogrsf
cd ..
cd swig/python
echo "#!/bin/sh" > mycc.sh
echo "$CC -fsanitize=undefined -fsanitize=address -shared-libasan \$*" >> mycc.sh
cat mycc.sh
chmod +x mycc.sh
PATH=$PWD:$PATH CC=mycc.sh python setup.py build
cd ../..
#cd swig/java
#cat java.opt | sed "s/JAVA_HOME =.*/JAVA_HOME = \/usr\/lib\/jvm\/java-7-openjdk-amd64\//" > java.opt.tmp
#mv java.opt.tmp java.opt
#make
#cd ../..
#cd swig/perl
#make generate
#make
#cd ../..
sudo rm -f /usr/lib/libgdal.so*
sudo make install
cd swig/python
sudo python setup.py install
cd ../..
sudo ldconfig
#g++ -Wall -DDEBUG -fPIC -g ogr/ogrsf_frmts/null/ogrnulldriver.cpp -shared -o ogr_NULL.so -L. -lgdal -Iport -Igcore -Iogr -Iogr/ogrsf_frmts
#GDAL_DRIVER_PATH=$PWD ogr2ogr -f null null ../autotest/ogr/data/poly.shp
cd ../autotest/cpp
make -j3
cd ../../gdal
#wget https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/mdb-sqlite/mdb-sqlite-1.0.2.tar.bz2
#tar xjvf mdb-sqlite-1.0.2.tar.bz2
#sudo cp mdb-sqlite-1.0.2/lib/*.jar /usr/lib/jvm/java-7-openjdk-amd64/jre/lib/ext
ccache -s
|
<gh_stars>1-10
from blitzcrank.pull_text import card_grab |
#!/bin/sh
# system utilities stubs
. monitor.sh
mount()
{
cat << EOF--
zroot on /zroot (zfs, local, noatime, nfsv4acls)
zroot/ROOT/default on / (zfs, local, noatime, nfsv4acls)
devfs on /dev (devfs, local, multilabel)
zroot/tmp on /tmp (zfs, local, noatime, nosuid, nfsv4acls)
zroot/usr/home on /usr/home (zfs, local, noatime, nfsv4acls)
zroot/usr/src on /usr/src (zfs, local, noatime, nfsv4acls)
zroot/var/audit on /var/audit (zfs, local, noatime, noexec, nosuid, nfsv4acls)
zroot/var/crash on /var/crash (zfs, local, noatime, noexec, nosuid, nfsv4acls)
zroot/var/log on /var/log (zfs, local, noatime, noexec, nosuid, nfsv4acls)
zroot/var/mail on /var/mail (zfs, local, nfsv4acls)
zroot/var/tmp on /var/tmp (zfs, local, noatime, nosuid, nfsv4acls)
/opt/pot/fscomp/distfiles on /opt/distfiles (nullfs, local)
EOF--
}
umount()
{
__monitor UMOUNT "$@"
}
fetch()
{
__monitor FETCH "$@"
}
jls()
{
if [ "$1" = "-j" ]; then
case "$2" in
"pot-test"|\
"pot-test-2")
return 0 ## return true
esac
return 1
fi
cat << EOF--
JID IP Address Hostname Path
pot-test pot-test.pot-net /opt/pot/jails/pot-test/m
pot-test-2 pot-test-2.pot-net /opt/pot/jails/pot-test-2/m
EOF--
}
sysctl()
{
if [ -n "$SYSCTL_OUTPUT" ]; then
echo $SYSCTL_OUTPUT
fi
return $SYSCTL_RC
}
which()
{
if [ "$1" = potnet ]; then
if [ "$WHICH_POTNET_FAIL" = "YES" ]; then
return 1 # false
else
return 0 # true
fi
fi
}
# UUT
. ../share/pot/common.sh
# app specific stubs
test_is_verbose()
{
_is_verbose
assertNotEquals "0" "$?"
_POT_VERBOSITY=2
_is_verbose
assertEquals "0" "$?"
}
test_is_pot_running()
{
_is_pot_running
assertNotEquals "0" "$?"
_is_pot_running pot
assertNotEquals "0" "$?"
_is_pot_running pot-test
assertNotEquals "1" "$?"
_is_pot_running pot-test-2
assertNotEquals "1" "$?"
}
test_is_valid_potname()
{
_is_valid_potname "test-pot"
assertEquals "Refusing valid name" "0" "$?"
_is_valid_potname "test.pot"
assertNotEquals "Invalid name not detected" "0" "$?"
}
test_is_in_list()
{
_is_in_list
assertNotEquals "0" "$?"
_is_in_list "asdf"
assertNotEquals "0" "$?"
_is_in_list "asdf" ""
assertNotEquals "0" "$?"
_is_in_list "asdf" "asdf1 asdf2"
assertNotEquals "0" "$?"
_is_in_list "val" "val val1 val2"
assertEquals "0" "$?"
_is_in_list "val" "val1 val val2"
assertEquals "0" "$?"
_is_in_list "val" "val1 val2 val"
assertEquals "0" "$?"
_is_in_list "val" "val"
assertEquals "0" "$?"
_is_in_list "val" "val val"
assertEquals "0" "$?"
}
test_is_mounted()
{
_is_mounted
assertNotEquals "0" "$?"
_is_mounted /path/to/the/error
assertNotEquals "0" "$?"
_is_mounted /path/to/the/error ignored
assertNotEquals "0" "$?"
_is_mounted zroot/var/log
assertNotEquals "0" "$?"
_is_mounted /opt/distfiles
assertEquals "0" "$?"
_is_mounted /opt/distfiles ignored
assertEquals "0" "$?"
}
test_umount()
{
_umount
assertEquals "0" "$UMOUNT_CALLS"
_umount /path/to/the/error
assertEquals "0" "$UMOUNT_CALLS"
_umount /opt/distfiles
assertEquals "1" "$UMOUNT_CALLS"
assertEquals "-f" "$UMOUNT_CALL1_ARG1"
assertEquals "/opt/distfiles" "$UMOUNT_CALL1_ARG2"
}
test_is_cmd_flavorable_01()
{
_is_cmd_flavorable
assertNotEquals "$?" "0"
_is_cmd_flavorable help
assertNotEquals "$?" "0"
_is_cmd_flavorable help create
assertNotEquals "$?" "0"
_is_cmd_flavorable create -p help
assertNotEquals "$?" "0"
_is_cmd_flavorable add-fscomp
assertNotEquals "$?" "0"
_is_cmd_flavorable add-file
assertNotEquals "$?" "0"
}
test_is_cmd_flavorable_02()
{
_is_cmd_flavorable add-dep
assertEquals "$?" "0"
_is_cmd_flavorable add-dep -v -p me -P you
assertEquals "$?" "0"
_is_cmd_flavorable set-rss
assertEquals "$?" "0"
_is_cmd_flavorable copy-in
assertEquals "$?" "0"
_is_cmd_flavorable mount-in
assertEquals "$?" "0"
}
test_is_rctl_available()
{
_is_rctl_available
assertEquals "$?" "0"
SYSCTL_OUTPUT="0"
_is_rctl_available
assertNotEquals "$?" "0"
SYSCTL_OUTPUT=""
SYSCTL_RC=1
_is_rctl_available
assertNotEquals "$?" "0"
}
test_is_potnet_available()
{
_is_potnet_available
assertEquals "$?" "0"
WHICH_POTNET_FAIL="YES"
_is_potnet_available
assertNotEquals "$?" "0"
}
test_is_absolute_path()
{
_is_absolute_path
assertEquals "$?" "1"
_is_absolute_path "../blah"
assertEquals "$?" "1"
_is_absolute_path "/blah"
assertEquals "$?" "0"
}
setUp()
{
_POT_VERBOSITY=1
UMOUNT_CALLS=0
SYSCTL_OUTPUT="1"
SYSCTL_RC=0
WHICH_POTNET_FAIL="NO"
FETCH_CALLS=0
}
. shunit/shunit2
|
<gh_stars>0
var r = 1;
function executeTask(i) {
var a = [16+i,93,-99,95,-96,-24,-53,-71,96,-66,-21,72,-12,-32,-96,62,-42,-50,49,53,-65,52,-25,-69,88,-43,60,66,-94,-69,53,-71,-17,-58,-30,32,-16,-94,-42,-86,59,-53,94,97,-12,15,65,-35,-12,-82,-82,48,-48,66,-42,-63,33,-49,41,-85,94,66,-60,60,-65,-73,-50,-9,-48,-3,15,-77,81,95,-93,83,-21,34,-78,-61,-22,-58,65,78,41,-7,-59,20,88,-38,-15,-69,42,97,-35,61,8,2,-22,-85];
bubbleSort(a);
return i;
}
function bubbleSort(a)
{
var swapped;
do {
swapped = false;
for (var i=0; i < a.length-1; i++) {
if (a[i] > a[i+1]) {
var temp = a[i];
a[i] = a[i+1];
a[i+1] = temp;
swapped = true;
}
}
} while (swapped);
}
for ( var i = 0; i < 1000000000; i++) {
r = executeTask(i);
}
|
# diagram.py
from diagrams import Diagram, Cluster
from diagrams.aws.network import CloudMap, VPC
with Diagram("AWS Tenancy", show=False, direction="RL"):
with Cluster("Tenancy"):
vpc = VPC("VPC")
cloudmap = CloudMap("CloudMap")
vpc - cloudmap |
class MiningSystem:
def __init__(self, dm):
self.dm = dm
def bump_and_simulate(self, unit, shocks, scenario):
result_with_bump = {}
for item in shocks:
currency = unit.split("/")[0]
# Bump the raw materials
self.dm.bump_raw_materials({item: shocks[item]})
# Create a bumped simulator
bumped_simulator = Simulator(dm=self.dm, monikers_filter=sum(scenario, []))
# Generate bumped scenarios
bumped_scenarios = [
bumped_simulator.nodes[layer] for layer in [
PipelineLayer.PAP, PipelineLayer.SAP,
PipelineLayer.BENEFICIATION, PipelineLayer.MINE, PipelineLayer.MINE_BENEFICIATION
] if layer in bumped_simulator.nodes
]
# Create scenario generator for bumped simulator
scenario_generator = SGF.create_scenario_generator(ScenarioGeneratorType.SPECIFIC_SCENARIOS, bumped_simulator,
[bumped_scenarios])
# Simulate bumped scenarios
result_with_bump[item], _ = bumped_simulator.simulate(scenario_generator=scenario_generator)
# Log the simulation results
logger.info("Shock %s by %s%f: %f" % (item, currency, shocks[item], result_with_bump[item][1]["Cost PV"]))
# Reset the raw materials to original state
self.dm.reset_raw_materials({item: shocks[item]})
return result_with_bump |
/*
* Copyright 2002 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
* OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
* PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
* LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of
* any nuclear facility.
*/
package com.sun.j2ee.blueprints.purchaseorder.ejb;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.ejb.EntityBean;
import javax.ejb.EntityContext;
import javax.ejb.CreateException;
import javax.ejb.RemoveException;
import java.util.Date;
import java.util.Iterator;
import java.util.ArrayList;
import java.util.Collection;
import com.sun.j2ee.blueprints.contactinfo.ejb.ContactInfoLocal;
import com.sun.j2ee.blueprints.contactinfo.ejb.ContactInfoLocalHome;
import com.sun.j2ee.blueprints.address.ejb.AddressLocal;
import com.sun.j2ee.blueprints.address.ejb.AddressLocalHome;
import com.sun.j2ee.blueprints.creditcard.ejb.CreditCardLocal;
import com.sun.j2ee.blueprints.creditcard.ejb.CreditCardLocalHome;
import com.sun.j2ee.blueprints.lineitem.ejb.LineItemLocal;
import com.sun.j2ee.blueprints.lineitem.ejb.LineItemLocalHome;
import com.sun.j2ee.blueprints.lineitem.ejb.LineItem;
import com.sun.j2ee.blueprints.purchaseorder.ejb.JNDINames;
import com.sun.j2ee.blueprints.servicelocator.ejb.ServiceLocator;
import com.sun.j2ee.blueprints.servicelocator.ServiceLocatorException;
/**
* This is the main Entity Bean class for PurchaseOrderEJB
* It has a one-many relatioship with the LineItemEJB, and one-to-one
* relationship between ContactinfoEJB and CreditCardEJB.
*/
public abstract class PurchaseOrderEJB implements EntityBean {
private EntityContext context = null;
/**
* Accessor method for Purchasr Order ID
* @return String PO id
*/
public abstract String getPoId();
/**
* Setter method for Purchasr Order ID
* @param String PO id
*/
public abstract void setPoId(String id);
/**
* Accessor method for Purchasr Order user ID
* @return String PO user id
*/
public abstract String getPoUserId();
/**
* setter method for Purchasr Order user ID
* @param String PO user id
*/
public abstract void setPoUserId(String id);
/**
* Accessor method for Purchasr Order email ID
* @return String PO email id
*/
public abstract String getPoEmailId();
/**
* Setterr method for Purchasr Order email ID
* @param String PO email id
*/
public abstract void setPoEmailId(String id);
/**
* Accessor method for Purchasr Order date
* @return long PO date - time form epoch
*/
public abstract long getPoDate();
/**
* Setter method for Purchasr Order date
* @param long PO date - time form epoch
*/
public abstract void setPoDate(long orderDate);
/**
* Accessor method for Purchasr Order locale
* @return String PO locale
*/
public abstract String getPoLocale();
/**
* Setterr method for Purchasr Order locale
* @param String PO locale
*/
public abstract void setPoLocale(String loc);
/**
* Accessor method for Purchasr Order total value
* @return float PO total value
*/
public abstract float getPoValue();
/**
* setter method for Purchasr Order total value
* @param float PO total value
*/
public abstract void setPoValue(float amount);
/**
* Accessor method for Purchasr Order contact address
* @return <Code>ContactInfoLocal</Code> PO contact address
*/
public abstract ContactInfoLocal getContactInfo();
/**
* setter method for Purchasr Order contact address
* @param <Code>ContactInfoLocal</Code> PO contact address
*/
public abstract void setContactInfo(ContactInfoLocal addr);
/**
* Accessor method for Purchasr Order - credit card info
* @return <Code>CreditCardLocal</Code> PO credit card info
*/
public abstract CreditCardLocal getCreditCard();
/**
* Setter method for Purchasr Order - credit card info
* @param <Code>CreditCardLocal</Code> PO credit card info
*/
public abstract void setCreditCard(CreditCardLocal ccInfo);
/**
* Accessor method for Purchasr Order - line items
* @return <code>Collection</Code> PO line items
*/
public abstract Collection getLineItems();
/**
* Setter method for Purchasr Order - line items
* @param <code>Collection</Code> PO line items
*/
public abstract void setLineItems(Collection litems);
/**
* Method helps to add a line item into the CMR field
* @param <Code>LineItemLocal</Code> the local interface of line item
*/
public void addLineItem(LineItemLocal lItem){
getLineItems().add(lItem);
}
/**
* the ejb create method
* @param <Code>purchaseOrder</Code> the Purchase Order details
*/
public String ejbCreate(PurchaseOrder purchaseOrder) throws CreateException {
setPoId(purchaseOrder.getOrderId());
setPoUserId(purchaseOrder.getUserId());
setPoEmailId(purchaseOrder.getEmailId());
setPoDate(purchaseOrder.getOrderDate().getTime());
setPoLocale(purchaseOrder.getLocale().toString());
setPoValue(purchaseOrder.getTotalPrice());
return null;
}
/**
* the ejb create method
* @param <Code>purchaseOrder</Code> the Purchase Order details
*/
public void ejbPostCreate(PurchaseOrder purchaseOrder)
throws CreateException {
try {
ServiceLocator serviceLocator = new ServiceLocator();
ContactInfoLocalHome cinforef = (ContactInfoLocalHome)
serviceLocator.getLocalHome(JNDINames.CINFO_EJB);
ContactInfoLocal cinfoloc = (ContactInfoLocal)
cinforef.create(purchaseOrder.getShippingInfo());
setContactInfo(cinfoloc);
CreditCardLocalHome cardref = (CreditCardLocalHome)
serviceLocator.getLocalHome(JNDINames.CARD_EJB);
CreditCardLocal cardloc = (CreditCardLocal)
cardref.create(purchaseOrder.getCreditCard());
setCreditCard(cardloc);
LineItemLocalHome lineItemref = (LineItemLocalHome)
serviceLocator.getLocalHome(JNDINames.LI_EJB);
Collection litems = purchaseOrder.getLineItems();
Iterator it = litems.iterator();
while((it != null) && (it.hasNext())) {
LineItem li = (LineItem) it.next();
LineItemLocal lineItemloc = (LineItemLocal)
lineItemref.create(li, 0);
addLineItem(lineItemloc);
}
} catch(ServiceLocatorException ne) {
throw new CreateException("ServiceLocator Ex while persisting PO CMR :" + ne.getMessage());
}
}
/**
* This gets all line items for this po and returns a collection of
* Value objects. This is required because managed objects cant be accessed
* outside transaction bounsaries
* @return <Code>Collection</Code> of <Code>LineItem</Code> value objects
*/
public Collection getAllItems() {
Collection liColl = getLineItems();
if(liColl == null)
return(null);
ArrayList retVal = new ArrayList();
Iterator it = liColl.iterator();
while((it!=null) && (it.hasNext())) {
LineItemLocal loc = (LineItemLocal) it.next();
retVal.add(loc.getData());
}
return(retVal);
}
public PurchaseOrder getData() {
PurchaseOrder purchaseOrder = new PurchaseOrder();
purchaseOrder.setOrderId(getPoId());
purchaseOrder.setUserId(getPoUserId());
purchaseOrder.setEmailId(getPoEmailId());
purchaseOrder.setOrderDate(new Date(getPoDate()));
purchaseOrder.setLocale(getPoLocale());
purchaseOrder.setTotalPrice(getPoValue());
purchaseOrder.setBillingInfo(getContactInfo().getData());
purchaseOrder.setShippingInfo(purchaseOrder.getBillingInfo()); // XXX
purchaseOrder.setCreditCard(getCreditCard().getData());
Collection lineItems = getLineItems();
for (Iterator iterator = lineItems.iterator(); iterator.hasNext();) {
LineItemLocal lineItem = (LineItemLocal) iterator.next();
purchaseOrder.addLineItem(lineItem.getData());
}
return purchaseOrder;
}
/**
* other ejb lifecycle method
*/
public void setEntityContext(EntityContext c){ context = c; }
public void unsetEntityContext(){}
public void ejbRemove() throws RemoveException {}
public void ejbActivate() {}
public void ejbPassivate() {}
public void ejbStore() {}
public void ejbLoad() {}
}
|
<gh_stars>1-10
// Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
// Output:
// [
// ["ate","eat","tea"],
// ["nat","tan"],
// ["bat"]
// ]
// 해쉬맵에 str과 인덱스저장
function solution1(strs) {
const strsMap = new Map();
let result = [];
for (let i = 0; i < strs.length; i++) {
let tmpStr = strs[i].split('').sort().join();
if (!strsMap.has(tmpStr)) {
strsMap.set(tmpStr, [i])
} else {
let updateValue = strsMap.get(tmpStr);
strsMap.set(tmpStr, [...updateValue, i])
}
}
strsMap.forEach((val) => {
let tmpRet = [];
let i = 0;
while (i < val.length) {
tmpRet.push(strs[val[i]]);
i++;
}
result.push(tmpRet.sort())
});
return result
}
// 해쉬맵에 저장할떄부터 인덱스가아닌 값을 정렬하며 저장해서 나중에는 리턴할때 맵의 키들만 배열로 묶어서 리턴!
// 훨씬 짧아짐! ( 메모리 효율도 더 높아짐!)
function solution2(strs) {
const strsMap = new Map();
for (let i = 0; i < strs.length; i++) {
let tmpStr = strs[i].split('').sort().join();
if (!strsMap.has(tmpStr)) {
strsMap.set(tmpStr, [strs[i]])
} else {
let updateValue = strsMap.get(tmpStr);
strsMap.set(tmpStr, [...updateValue, strs[i]].sort())
}
}
return Array.from(strsMap.values())
}
console.log(solution2(["eat", "tea", "tan", "ate", "nat", "bat"])); |
/**
* functions.js
*/
function flipOver(item) {
var el = typeof item === 'object' ? item : document.getElementById(item);
el.classList.toggle('hover');
}
// Broken images
function imgBroken(image) {
// image.onerror = "";
var randNum = Math.random()*16777215;
var randHex = '#' + Math.floor(randNum).toString(16);
var txtCol = (randNum > 11250603) ? '#000' : '#fff';
$(image).parent()
.removeClass('cover__container').attr('data-error', 'Image was broken. I deleted it...')
.css({'backgroundColor': randHex, 'color': txtCol});
image.remove();
return true;
}
// Update kids
function updKids() {
var $kNum = document.getElementById('kidsNumber');
var kids = $kNum.value;
var nowKids = $('#kidsInputs .grid__container').length;
var r = -1*(nowKids-kids);
var n = 0;
var today = new Date();
var dd = today.getDate();
var m = today.getMonth()+1; //January is 0!
var mm = (m <= 9) ? '0' + m : m;
var yyyy = today.getFullYear();
if(kids === 0) {
$('#kidsInputs .grid__container').remove();
} else {
if(kids > nowKids) {
for(i = 0; i < (kids-nowKids); i++) {
n = 1 + nowKids + i;
$('#kidsInputs').append('<div class="grid__container grid--gutter" data-row="'+ n +'">' +
' <div class="grid__item grid__item--1of2">' +
' <div class="input input--date input--show-label">' +
' <input tabindex' +
' name="user-baby' + n + '-bday" id="user-baby' + n + '-bday" type="date" max="' + yyyy + '-' + mm + '-' + dd + '"' +
' autocomplete="baby' + n + '-bday"' +
' required' +
' spellcheck="true"' +
' placeholder="Podaj datę">' +
' <label for="user-baby' + n + '-bday"' +
' data-focused="' + n + '. Data urodzenia"' +
' data-original="Podaj datę urodzenia"' +
' data-invalid="To pole jest wymagane."></label>' +
' </div>' +
' </div>' +
' <div class="grid__item grid__item--1of2">' +
' <div class="input input--select input--show-label">' +
' <select name="user-baby' + n + '-gender" id="user-baby' + n + '-gender" required>' +
' <option value="" selected disabled>Podaj płeć</option>' +
' <option value="K">Dziewczynka</option>' +
' <option value="M">Chłopiec</option>' +
' </select>' +
' <label for="user-baby' + n + '-gender"' +
' data-focused="Płeć dziecka"' +
' data-original="Podaj płeć dziecka"' +
' data-invalid="To pole jest obowiązkowe"></label>' +
' </div>' +
' </div>' +
'</div>');
}
$('#kidsInputs input, #kidsInputs select').each( function() {
var inputName = this.name;
$(this).val( localStorage.getItem(inputName) ).updLabels();
});
} else {
r = -1*(nowKids-kids);
$('#kidsInputs .grid__container').slice(r).remove();
}
}
}
// Update pick answers
function updPicks() {
$('.answer--pick > input').each(function() {
var ischecked = $(this).is(':checked'),
isdisabled = $(this).prop('disabled');
if (ischecked) {
$(this).parent().addClass('picked');
$(this).parent().find('.input > input').focus();
} else {
$(this).parent().removeClass('picked');
$(this).parent().find('.input > input').off('focus');
}
if (isdisabled) {
$(this).parent().addClass('disabled');
} else {
$(this).parent().removeClass('disabled');
}
});
}
// Extend jQuery
(function( $ ) {
$.fn.extend({
// Update input labels
updLabels: function () {
$(this).each(function() {
var label = $(this).next('label');
var value = this.value;
// check if the input has any value
if ( value ) {
$(this).addClass('input--used');
if ( $(this).is(':invalid') ) {
label.html( label.data('invalid') );
} else {
label.html( label.data('focused') );
}
} else {
$(this).removeClass('input--used');
label.html( label.data('original') );
if ( $(this).is(':valid') ) {
$(this).addClass('input--valid');
}
}
});
return;
},
updCovers: function () {
return this.each(function() {
var $el = $(this);
var $parent = this.parentElement;
var height = this.height;
var width = this.width;
var widthRatio = $parent.offsetWidth / width;
var imgRatio = height / width;
var parRatio = $parent.offsetHeight / $parent.offsetWidth;
var compareRatios = imgRatio/parRatio;
var ratioDifference = imgRatio-parRatio;
if ( imgRatio >= parRatio ) {
if ( compareRatios < 1 && ratioDifference < 0.164 && ratioDifference > 0 ){
if ( !$el.hasClass('cover-height') ) $el.removeClass('cover--width').addClass('cover--height');
} else {
if ( !$el.hasClass('cover-width') ) $el.removeClass('cover--height').addClass('cover--width');
}
} else {
if ( !$el.hasClass('cover-height') ) $el.removeClass('cover--width').addClass('cover--height');
}
// console.log($(this).parent().attr('data-index') + ': widthRatio: ' + widthRatio + '; ' + parRatio + ' / ' + imgRatio + '\n (diff:' + ratioDifference + ') (comp:' + compareRatios + ')');
});
}
});
})( jQuery );
|
COLUMNS=$(tput cols)
title="THE CLEANSLATE PROGRAM"
line="-----------------------------------"
printf "%*s\n" $(((${#line}+$COLUMNS)/2)) "$line"
printf "%*s\n" $(((${#line}+$COLUMNS)/2)) "$line"
printf "%*s\n" $(((${#title}+$COLUMNS)/2)) "$title"
printf "%*s\n" $(((${#line}+$COLUMNS)/2)) "$line"
printf "%*s\n" $(((${#line}+$COLUMNS)/2)) "$line"
find . -name "*.pyc" -type f
find . -name "*.poc" -type f
find . -name ".DS_Store" -type f
find . -name "*.sqlite" -type f
echo "This operation will remove all compiled python files and erase the existing database"
read -r -p "Are you sure you want to delete all these files? [y/N] " response
case "$response" in
[yY][eE][sS]|[yY])
find . -name "*.pyc" -type f -delete
find . -name "*.poc" -type f -delete
find . -name ".DS_Store" -type f -delete
find . -name "*.sqlite" -type f -delete
echo "All files were successfully deleted."
;;
*)
echo "No files were deleted"
;;
esac
|
#!/bin/bash
declare -a arr=("account.tools.mycompany.ru" "cert.tools.mycompany.ru" "chat.tools.mycompany.ru" "confluence-backup.tools.mycompany.ru" "confluence.tools.mycompany.ru" "grafana.tools.mycompany.ru" "grid.tools.mycompany.ru" "inventory.tools.mycompany.ru" "jitsi.tools.mycompany.ru" "kibana.tools.mycompany.ru" "racktables.tools.mycompany.ru" "rancher.tools.mycompany.ru" "selenoid.tools.mycompany.ru" "sonar.tools.mycompany.ru" "teamcity.tools.mycompany.ru" "testrail.tools.mycompany.ru" "upsource.tools.mycompany.ru" "youtrack.tools.mycompany.ru" "zabbix.tools.mycompany.ru")
for i in "${arr[@]}"
do
echo "$i"
echo | openssl s_client -servername "$i" -connect "$i":443 2>/dev/null | openssl x509 -noout -issuer
done
|
<gh_stars>10-100
package de.erichseifert.gral.plots;
import de.erichseifert.gral.data.Row;
import de.erichseifert.gral.graphics.Drawable;
/**
* A renderer for symbols that are used in legend items.
*/
public interface LegendSymbolRenderer {
/**
* Returns a symbol for rendering a legend item.
* @param row Data row.
* @return A drawable object that can be used to display the symbol.
*/
Drawable getSymbol(Row row);
}
|
import argparse
import os
import pandas as pd
import numpy as np
from imctools.io import ometiffparser
def ometiff_2_analysis(filename, outfolder, basename, pannelcsv=None, metalcolumn=None, masscolumn=None, usedcolumn=None,
addsum=False, bigtiff=True, sort_channels=True, pixeltype=None):
# read the pannelcsv to find out which channels should be loaded
selmetals = None
selmass = None
outname = os.path.join(outfolder, basename)
if pannelcsv is not None:
pannel = pd.read_csv(pannelcsv)
if pannel.shape[1] > 1:
selected = pannel[usedcolumn]
if masscolumn is None:
metalcolumn = metalcolumn
selmetals = [str(n) for s, n in zip(selected, pannel[metalcolumn]) if s]
else:
selmass = [str(n) for s, n in zip(selected, pannel[masscolumn]) if s]
else:
selmetals = [pannel.columns[0]] + pannel.iloc[:,0].tolist()
ome = ometiffparser.OmetiffParser(filename)
imc_img = ome.get_imc_acquisition()
if sort_channels:
if selmetals is not None:
def mass_from_met(x):
return (''.join([m for m in x if m.isdigit()]), x)
selmetals = sorted(selmetals, key=mass_from_met)
if selmass is not None:
selmass = sorted(selmass)
writer = imc_img.get_image_writer(outname + '.tiff', metals=selmetals, mass=selmass)
if addsum:
img_sum = np.sum(writer.img_stack, axis=2)
img_sum = np.reshape(img_sum, list(img_sum.shape)+[1])
writer.img_stack = np.append(img_sum, writer.img_stack, axis=2)
writer.save_image(mode='imagej', bigtiff=bigtiff, dtype=pixeltype)
if selmass is not None:
savenames = selmass
elif selmetals is not None:
savenames = selmetals
else:
savenames = [s for s in imc_img.channel_metals]
if addsum:
savenames = ['sum'] + savenames
with open(outname + '.csv', 'w') as f:
for n in savenames:
f.write(n + '\n')
if __name__ == "__main__":
# Setup the command line arguments
parser = argparse.ArgumentParser(description='Convert OME tiffs to analysis tiffs that are more compatible with tools.\n'+
'A csv with a boolean column can be used to select subsets of channels or metals from the stack\n'+
'The channels of the tiff will have the same order as in the csv.', prog='mcd2tiff',
usage='%(prog)s ome_filename [options]')
parser.add_argument('ome_filename', type=str,
help='The path to the ome.tiff file to be converted')
parser.add_argument('--outpostfix', type=str, default=None,
help='the string to append to the name.')
parser.add_argument('--outfolder', type=str, default=None,
help='the output folder, Default is a subfolder called analysis in the current folder.')
parser.add_argument('--pannelcsv', type=str, default=None,
help='name of the csv that contains the channels to be written out.')
parser.add_argument('--metalcolumn', type=str, default='metal',
help='Column name of the metal names.'
)
parser.add_argument('--masscolumn', type=str, default=None,
help='Column name of the mass names. If provided the metal column will be ignored.'
)
parser.add_argument('--usedcolumn', type=str, default='ilastik',
help='Column that should contain booleans (0, 1) if the channel should be used.'
)
parser.add_argument('--outformat', type=str, default=None, choices=['float', 'uint16', 'uint8', 'uint32'],
help='''original or float, uint32, uint16, unit8\n
Per default the original pixeltype is taken''')
parser.add_argument('--scale', type=str, default='no', choices=['no', 'max', 'percentile99, percentile99.9, percentile99.99'],
help='scale the data?' )
parser.add_argument('--addsum', type=str, default='no', choices=['no', 'yes'],
help='Add the sum of the data as the first layer.' )
parser.add_argument('--bigtiff', type=str, default='yes', choices=['no', 'yes'],
help='Add the sum of the data as the first layer.' )
parser.add_argument('--sort_channels', type=str, default='yes', choices=['no', 'yes'],
help='Should the channels be sorted by mass?')
args = parser.parse_args()
default_subfolder = 'analysis'
fn = args.ome_filename
assert (fn[-9:] == '.ome.tiff') or (fn[-8:] == '.ome.tif') is True, 'Must be an .ome.tiff or .ome.tif'
fn_out = os.path.basename(fn).strip('.ome.tiff').strip('.ome.tif')
outpostifx = args.outpostfix
if outpostifx is not None:
fn_out = '_'.join([fn_out, outpostifx])
outfolder = args.outfolder
if outfolder is None:
outfolder = os.path.join(os.path.split(fn)[0], default_subfolder)
# create path if it doesnt exist
if not(os.path.exists(outfolder)):
os.mkdir(outfolder)
# finalize the outname
outname = os.path.join(outfolder, fn_out)
ometiff_2_analysis(args.ome_filename, outfolder, fn_out, args.pannelcsv, args.metalcolumn, args.masscolumn,
args.usedcolumn, args.addsum == 'yes',
bigtiff=args.bigtiff == 'yes',
sort_channels=args.sort_channels == 'yes',
pixeltype=args.outformat)
|
// Assuming jQuery is available for AJAX calls
$(document).ready(function() {
// Event listener for division dropdown change
$('#division').change(function() {
var divisionId = $(this).val();
if (divisionId !== '') {
// AJAX call to fetch districts based on the selected division
$.ajax({
url: 'fetch_districts.php', // Replace with actual server endpoint
type: 'POST',
data: { divisionId: divisionId },
success: function(response) {
$('#district').html(response);
$('#upazila').html('<option value="">Select Upazila</option>'); // Reset upazila dropdown
}
});
} else {
// Reset district and upazila dropdowns if no division is selected
$('#district').html('<option value="">Select District</option>');
$('#upazila').html('<option value="">Select Upazila</option>');
}
});
// Event listener for district dropdown change
$('#district').change(function() {
var districtId = $(this).val();
if (districtId !== '') {
// AJAX call to fetch upazilas based on the selected district
$.ajax({
url: 'fetch_upazilas.php', // Replace with actual server endpoint
type: 'POST',
data: { districtId: districtId },
success: function(response) {
$('#upazila').html(response);
}
});
} else {
// Reset upazila dropdown if no district is selected
$('#upazila').html('<option value="">Select Upazila</option>');
}
});
}); |
#!/bin/bash
begin='<!--RGD-START-->'
end='<!--RGD-END-->'
TEMP=$(mktemp)
trap "rm -f $TEMP" EXIT
for FILE in comps/comps-*.xml
do
for G in $(egrep "%package\s+doc" rubygem-*/*spec | awk -F/ '{print $1}'); do
grep "[>-]$G<" $FILE | sed 's/<\//-doc<\//'
done | sort -u > $TEMP
sed -i -e "/$begin/,/$end/{ /$begin/{p; r $TEMP
}; /$end/p; d }" $FILE
done
|
#!/usr/bin/env bash
export LC_ALL=C
KNOWN_VIOLATIONS=(
"src/base58.cpp:.*isspace"
"src/compchain-tx.cpp.*stoul"
"src/compchain-tx.cpp.*trim_right"
"src/compchain-tx.cpp:.*atoi"
"src/core_read.cpp.*is_digit"
"src/dbwrapper.cpp.*stoul"
"src/dbwrapper.cpp:.*vsnprintf"
"src/httprpc.cpp.*trim"
"src/init.cpp:.*atoi"
"src/netbase.cpp.*to_lower"
"src/qt/rpcconsole.cpp:.*atoi"
"src/qt/rpcconsole.cpp:.*isdigit"
"src/rest.cpp:.*strtol"
"src/rpc/server.cpp.*to_upper"
"src/test/dbwrapper_tests.cpp:.*snprintf"
"src/test/getarg_tests.cpp.*split"
"src/torcontrol.cpp:.*atoi"
"src/torcontrol.cpp:.*strtol"
"src/uint256.cpp:.*isspace"
"src/uint256.cpp:.*tolower"
"src/util.cpp:.*atoi"
"src/util.cpp:.*fprintf"
"src/util.cpp:.*tolower"
"src/utilmoneystr.cpp:.*isdigit"
"src/utilmoneystr.cpp:.*isspace"
"src/utilstrencodings.cpp:.*atoi"
"src/utilstrencodings.cpp:.*isspace"
"src/utilstrencodings.cpp:.*strtol"
"src/utilstrencodings.cpp:.*strtoll"
"src/utilstrencodings.cpp:.*strtoul"
"src/utilstrencodings.cpp:.*strtoull"
"src/utilstrencodings.h:.*atoi"
)
REGEXP_IGNORE_EXTERNAL_DEPENDENCIES="^src/(crypto/ctaes/|leveldb/|secp256k1/|tinyformat.h|univalue/)"
LOCALE_DEPENDENT_FUNCTIONS=(
alphasort # LC_COLLATE (via strcoll)
asctime # LC_TIME (directly)
asprintf # (via vasprintf)
atof # LC_NUMERIC (via strtod)
atoi # LC_NUMERIC (via strtol)
atol # LC_NUMERIC (via strtol)
atoll # (via strtoll)
atoq
btowc # LC_CTYPE (directly)
ctime # (via asctime or localtime)
dprintf # (via vdprintf)
fgetwc
fgetws
fold_case # boost::locale::fold_case
fprintf # (via vfprintf)
fputwc
fputws
fscanf # (via __vfscanf)
fwprintf # (via __vfwprintf)
getdate # via __getdate_r => isspace // __localtime_r
getwc
getwchar
is_digit # boost::algorithm::is_digit
is_space # boost::algorithm::is_space
isalnum # LC_CTYPE
isalpha # LC_CTYPE
isblank # LC_CTYPE
iscntrl # LC_CTYPE
isctype # LC_CTYPE
isdigit # LC_CTYPE
isgraph # LC_CTYPE
islower # LC_CTYPE
isprint # LC_CTYPE
ispunct # LC_CTYPE
isspace # LC_CTYPE
isupper # LC_CTYPE
iswalnum # LC_CTYPE
iswalpha # LC_CTYPE
iswblank # LC_CTYPE
iswcntrl # LC_CTYPE
iswctype # LC_CTYPE
iswdigit # LC_CTYPE
iswgraph # LC_CTYPE
iswlower # LC_CTYPE
iswprint # LC_CTYPE
iswpunct # LC_CTYPE
iswspace # LC_CTYPE
iswupper # LC_CTYPE
iswxdigit # LC_CTYPE
isxdigit # LC_CTYPE
localeconv # LC_NUMERIC + LC_MONETARY
mblen # LC_CTYPE
mbrlen
mbrtowc
mbsinit
mbsnrtowcs
mbsrtowcs
mbstowcs # LC_CTYPE
mbtowc # LC_CTYPE
mktime
normalize # boost::locale::normalize
# printf # LC_NUMERIC
putwc
putwchar
scanf # LC_NUMERIC
setlocale
snprintf
sprintf
sscanf
stod
stof
stoi
stol
stold
stoll
stoul
stoull
strcasecmp
strcasestr
strcoll # LC_COLLATE
# strerror
strfmon
strftime # LC_TIME
strncasecmp
strptime
strtod # LC_NUMERIC
strtof
strtoimax
strtol # LC_NUMERIC
strtold
strtoll
strtoq
strtoul # LC_NUMERIC
strtoull
strtoumax
strtouq
strxfrm # LC_COLLATE
swprintf
to_lower # boost::locale::to_lower
to_title # boost::locale::to_title
to_upper # boost::locale::to_upper
tolower # LC_CTYPE
toupper # LC_CTYPE
towctrans
towlower # LC_CTYPE
towupper # LC_CTYPE
trim # boost::algorithm::trim
trim_left # boost::algorithm::trim_left
trim_right # boost::algorithm::trim_right
ungetwc
vasprintf
vdprintf
versionsort
vfprintf
vfscanf
vfwprintf
vprintf
vscanf
vsnprintf
vsprintf
vsscanf
vswprintf
vwprintf
wcrtomb
wcscasecmp
wcscoll # LC_COLLATE
wcsftime # LC_TIME
wcsncasecmp
wcsnrtombs
wcsrtombs
wcstod # LC_NUMERIC
wcstof
wcstoimax
wcstol # LC_NUMERIC
wcstold
wcstoll
wcstombs # LC_CTYPE
wcstoul # LC_NUMERIC
wcstoull
wcstoumax
wcswidth
wcsxfrm # LC_COLLATE
wctob
wctomb # LC_CTYPE
wctrans
wctype
wcwidth
wprintf
)
function join_array {
local IFS="$1"
shift
echo "$*"
}
REGEXP_IGNORE_KNOWN_VIOLATIONS=$(join_array "|" "${KNOWN_VIOLATIONS[@]}")
# Invoke "git grep" only once in order to minimize run-time
REGEXP_LOCALE_DEPENDENT_FUNCTIONS=$(join_array "|" "${LOCALE_DEPENDENT_FUNCTIONS[@]}")
GIT_GREP_OUTPUT=$(git grep -E "[^a-zA-Z0-9_\`'\"<>](${REGEXP_LOCALE_DEPENDENT_FUNCTIONS}(|_r|_s))[^a-zA-Z0-9_\`'\"<>]" -- "*.cpp" "*.h")
EXIT_CODE=0
for LOCALE_DEPENDENT_FUNCTION in "${LOCALE_DEPENDENT_FUNCTIONS[@]}"; do
MATCHES=$(grep -E "[^a-zA-Z0-9_\`'\"<>]${LOCALE_DEPENDENT_FUNCTION}(|_r|_s)[^a-zA-Z0-9_\`'\"<>]" <<< "${GIT_GREP_OUTPUT}" | \
grep -vE "\.(c|cpp|h):\s*(//|\*|/\*|\").*${LOCALE_DEPENDENT_FUNCTION}" | \
grep -vE 'fprintf\(.*(stdout|stderr)')
if [[ ${REGEXP_IGNORE_EXTERNAL_DEPENDENCIES} != "" ]]; then
MATCHES=$(grep -vE "${REGEXP_IGNORE_EXTERNAL_DEPENDENCIES}" <<< "${MATCHES}")
fi
if [[ ${REGEXP_IGNORE_KNOWN_VIOLATIONS} != "" ]]; then
MATCHES=$(grep -vE "${REGEXP_IGNORE_KNOWN_VIOLATIONS}" <<< "${MATCHES}")
fi
if [[ ${MATCHES} != "" ]]; then
echo "The locale dependent function ${LOCALE_DEPENDENT_FUNCTION}(...) appears to be used:"
echo "${MATCHES}"
echo
EXIT_CODE=1
fi
done
if [[ ${EXIT_CODE} != 0 ]]; then
echo "Unnecessary locale dependence can cause bugs that are very"
echo "tricky to isolate and fix. Please avoid using locale dependent"
echo "functions if possible."
echo
echo "Advice not applicable in this specific case? Add an exception"
echo "by updating the ignore list in $0"
fi
exit ${EXIT_CODE}
|
POST _analyze
{
"analyzer": "whitespace",
"text": "The quick brown fox."
} |
def proc4(proxy, name)
proxy.process(name){ pid_file "#{name}.pid4" }
end |
#!/bin/bash
set -e
ENV_MGMT_NETWORK="10.0.0.0/24"
ENV_MGMT_OS_CONTROLLER_IP="10.0.0.11"
ENV_MGMT_OS_NETWORK_IP="10.0.0.21"
ENV_MGMT_OS_COMPUTE_IP="10.0.0.31"
ENV_MGMT_ODL_CONTROLLER_IP="10.0.0.41"
ENV_MGMT_K8S_MASTER_IP="10.0.0.51"
ENV_TUNNEL_NETWORK="10.0.1.0/24"
ENV_TUNNEL_OS_CONTROLLER_IP="10.0.1.11"
ENV_TUNNEL_OS_NETWORK_IP="10.0.1.21"
ENV_TUNNEL_OS_COMPUTE_IP="10.0.1.31"
ENV_TUNNEL_ODL_CONTROLLER_IP="10.0.1.41"
ENV_TUNNEL_K8S_MASTER_IP="10.0.1.51"
ENV_PUBLIC_INTERFACE="enp0s10"
LOG=/tmp/provision.log
date | tee $LOG # when: Thu Aug 10 07:48:13 UTC 2017
whoami | tee -a $LOG # who: root
pwd | tee -a $LOG # where: /home/vagrant
CACHE=/vagrant/cache
[ -d $CACHE ] || mkdir -p $CACHE
function use_public_apt_server() {
apt install -y software-properties-common
add-apt-repository cloud-archive:ocata
apt-get update && APT_UPDATED=true
# Reference https://docs.openstack.org/newton/install-guide-ubuntu/environment-packages.html
}
function use_local_apt_server() {
cat > /etc/apt/sources.list <<DATA
deb http://192.168.240.3/ubuntu xenial main restricted
deb http://192.168.240.3/ubuntu xenial universe
deb http://192.168.240.3/ubuntu xenial multiverse
deb http://192.168.240.3/ubuntu xenial-updates main restricted
deb http://192.168.240.3/ubuntu xenial-updates universe
deb http://192.168.240.3/ubuntu xenial-updates multiverse
deb http://192.168.240.3/ubuntu xenial-security main restricted
deb http://192.168.240.3/ubuntu xenial-security universe
deb http://192.168.240.3/ubuntu xenial-security multiverse
deb http://192.168.240.3/ubuntu-cloud-archive xenial-updates/ocata main
DATA
rm -rf /var/lib/apt/lists/*
echo 'APT::Get::AllowUnauthenticated "true";' > /etc/apt/apt.conf.d/99-use-local-apt-server
apt-get update && APT_UPDATED=true
}
function each_node_must_resolve_the_other_nodes_by_name_in_addition_to_IP_address() {
cat >> /etc/hosts <<DATA
$ENV_MGMT_OS_CONTROLLER_IP os-controller
$ENV_MGMT_OS_NETWORK_IP os-network
$ENV_MGMT_OS_COMPUTE_IP os-compute
$ENV_MGMT_ODL_CONTROLLER_IP odl-controller
$ENV_MGMT_K8S_MASTER_IP k8s-master
DATA
# Reference https://docs.openstack.org/newton/install-guide-ubuntu/environment-networking.html
}
function install_utilities() {
[ "$APT_UPDATED" == "true" ] || apt-get update && APT_UPDATED=true
apt-get install -y crudini
}
function install_python() {
PYTHON_VERSION=2.7.11-1
PYTHON_PIP_VERSION=8.1.1-2ubuntu0.4
[ "$APT_UPDATED" == "true" ] || apt-get update && APT_UPDATED=true
apt-get install -y python=$PYTHON_VERSION python-pip=$PYTHON_PIP_VERSION
#apt-get install -y python python-pip
}
function install_ntp() {
CHRONY_VERSION=2.1.1-1
[ "$APT_UPDATED" == "true" ] || apt-get update && APT_UPDATED=true
apt-get install -y chrony=$CHRONY_VERSION
#apt-get install -y chrony
# # # # # # # # # # # # # # # # ## # # # # # # # # # # # # # # # # # # # # # # # # ## # # # # # # # #
# To connect to the os-controller node
sed -i "s/^pool /#pool /g" /etc/chrony/chrony.conf
sed -i "s/^server /#server /g" /etc/chrony/chrony.conf
echo "server os-controller iburst" >> /etc/chrony/chrony.conf
# Restart the NTP service
service chrony restart
# Verify operation
chronyc sources
# Log files
# /var/log/chrony/measurements.log
# /var/log/chrony/statistics.log
# /var/log/chrony/tracking.log
# Reference https://docs.openstack.org/newton/install-guide-ubuntu/environment-ntp-other.html
}
function download_neutron() {
NEUTRON_PLUGIN_ML2_VERSION=2:10.0.3-0ubuntu1~cloud0
NEUTRON_OPENVSWITCH_AGENT_VERSION=2:10.0.3-0ubuntu1~cloud0
NEUTRON_L3_AGENT_VERSION=2:10.0.3-0ubuntu1~cloud0
NEUTRON_DHCP_AGENT_VERSION=2:10.0.3-0ubuntu1~cloud0
NEUTRON_METADATA_AGENT_VERSION=2:10.0.3-0ubuntu1~cloud0
[ "$APT_UPDATED" == "true" ] || apt-get update && APT_UPDATED=true
apt install -y neutron-plugin-ml2=$NEUTRON_PLUGIN_ML2_VERSION \
neutron-openvswitch-agent=$NEUTRON_OPENVSWITCH_AGENT_VERSION \
neutron-l3-agent=$NEUTRON_L3_AGENT_VERSION \
neutron-dhcp-agent=$NEUTRON_DHCP_AGENT_VERSION \
neutron-metadata-agent=$NEUTRON_METADATA_AGENT_VERSION
# apt install -y neutron-plugin-ml2 \
# neutron-openvswitch-agent \
# neutron-l3-agent \
# neutron-dhcp-agent \
# neutron-metadata-agent
}
function configure_neutron() {
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Edit the /etc/sysctl.conf
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "$ a net.ipv4.ip_forward = 1" /etc/sysctl.conf
sed -i "$ a net.ipv4.conf.all.rp_filter = 0" /etc/sysctl.conf
sed -i "$ a net.ipv4.conf.default.rp_filter = 0" /etc/sysctl.conf
sysctl -p
# Edit the /etc/neutron/neutron.conf file, [database] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "s|^connection = |#connection = |" /etc/neutron/neutron.conf
# Edit the /etc/neutron/neutron.conf file, [DEFAULT] section
sed -i "/^\[DEFAULT\]$/ a service_plugins = router" /etc/neutron/neutron.conf
sed -i "/^\[DEFAULT\]$/ a allow_overlapping_ips = True" /etc/neutron/neutron.conf
sed -i "/^\[DEFAULT\]$/ a transport_url = rabbit://openstack:RABBIT_PASS@os-controller" /etc/neutron/neutron.conf
sed -i "/^\[DEFAULT\]$/ a auth_strategy = keystone" /etc/neutron/neutron.conf
# Edit the /etc/neutron/neutron.conf file, [keystone_authtoken] section
echo -e "auth_uri = http://os-controller:5000\nauth_url = http://os-controller:35357\nmemcached_servers = os-controller:11211\nauth_type = password\nproject_domain_name = Default\nuser_domain_name = Default\nproject_name = service\nusername = neutron\npassword = NEUTRON_PASS\n" | sed -i "/^\[keystone_authtoken\]/ r /dev/stdin" /etc/neutron/neutron.conf
# Edit the /etc/neutron/plugins/ml2/ml2_conf.ini file, [ml2] section
sed -i "/^\[ml2\]$/ a type_drivers = flat,vlan,vxlan" /etc/neutron/plugins/ml2/ml2_conf.ini
sed -i "/^\[ml2\]$/ a tenant_network_types = vxlan" /etc/neutron/plugins/ml2/ml2_conf.ini
sed -i "/^\[ml2\]$/ a mechanism_drivers = openvswitch,l2population" /etc/neutron/plugins/ml2/ml2_conf.ini
sed -i "/^\[ml2\]$/ a extension_drivers = port_security" /etc/neutron/plugins/ml2/ml2_conf.ini
# Edit the /etc/neutron/plugins/ml2/ml2_conf.ini file, [ml2_type_flat] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "/^\[ml2_type_flat\]$/ a flat_networks = external" /etc/neutron/plugins/ml2/ml2_conf.ini
# Edit the /etc/neutron/plugins/ml2/ml2_conf.ini file, [ml2_type_vxlan] section
sed -i "/^\[ml2_type_vxlan\]$/ a vni_ranges = 1:1000" /etc/neutron/plugins/ml2/ml2_conf.ini
# Edit the /etc/neutron/plugins/ml2/ml2_conf.ini file, [securitygroup] section
sed -i "/^\[securitygroup\]$/ a enable_ipset = True" /etc/neutron/plugins/ml2/ml2_conf.ini
# Edit the /etc/neutron/plugins/ml2/openvswitch_agent.ini file, [ovs] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "/^\[ovs\]$/ a local_ip = $ENV_TUNNEL_OS_NETWORK_IP" /etc/neutron/plugins/ml2/openvswitch_agent.ini
sed -i "/^\[ovs\]$/ a bridge_mappings = external:br-ex" /etc/neutron/plugins/ml2/openvswitch_agent.ini
# Edit the /etc/neutron/plugins/ml2/openvswitch_agent.ini file, [agent] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "/^\[agent\]$/ a tunnel_types = vxlan" /etc/neutron/plugins/ml2/openvswitch_agent.ini
sed -i "/^\[agent\]$/ a l2_population = True" /etc/neutron/plugins/ml2/openvswitch_agent.ini
sed -i "/^\[agent\]$/ a prevent_arp_spoofing = True" /etc/neutron/plugins/ml2/openvswitch_agent.ini
# Edit the /etc/neutron/plugins/ml2/openvswitch_agent.ini file, [securitygroup] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "/^\[securitygroup\]$/ a enable_security_group = True" /etc/neutron/plugins/ml2/openvswitch_agent.ini
sed -i "/^\[securitygroup\]$/ a firewall_driver = neutron.agent.linux.iptables_firewall.OVSHybridIptablesFirewallDriver" /etc/neutron/plugins/ml2/openvswitch_agent.ini
# Edit the /etc/neutron/l3_agent.ini file, [DEFAULT] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "/^\[DEFAULT\]$/ a interface_driver = neutron.agent.linux.interface.OVSInterfaceDriver" /etc/neutron/l3_agent.ini
# Edit the /etc/neutron/dhcp_agent.ini file, [DEFAULT] section
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
sed -i "/^\[DEFAULT\]$/ a interface_driver = neutron.agent.linux.interface.OVSInterfaceDriver" /etc/neutron/dhcp_agent.ini
sed -i "/^\[DEFAULT\]$/ a dhcp_driver = neutron.agent.linux.dhcp.Dnsmasq" /etc/neutron/dhcp_agent.ini
sed -i "/^\[DEFAULT\]$/ a enable_isolated_metadata = True" /etc/neutron/dhcp_agent.ini
sed -i "/^\[DEFAULT\]$/ a dnsmasq_config_file = /etc/neutron/dnsmasq-neutron.conf" /etc/neutron/dhcp_agent.ini
# Create the /etc/neutron/dnsmasq-neutron.conf file to adjust MTU
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
echo "dhcp-option-force=26,1450" > /etc/neutron/dnsmasq-neutron.conf
chgrp neutron /etc/neutron/dnsmasq-neutron.conf
# Edit the /etc/neutron/metadata_agent.ini file, [DEFAULT] section
sed -i "/^\[DEFAULT\]$/ a nova_metadata_ip = os-controller" /etc/neutron/metadata_agent.ini
sed -i "/^\[DEFAULT\]$/ a metadata_proxy_shared_secret = METADATA_SECRET" /etc/neutron/metadata_agent.ini
# Configure OVS
# See https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
ifconfig $ENV_PUBLIC_INTERFACE 0.0.0.0
service openvswitch-switch restart
ovs-vsctl add-br br-ex
ovs-vsctl add-port br-ex $ENV_PUBLIC_INTERFACE
# Restart the Networking services
service openvswitch-switch restart
service neutron-openvswitch-agent restart
service neutron-dhcp-agent restart
service neutron-metadata-agent restart
service neutron-l3-agent restart
# Log files
# /var/log/neutron/neutron-dhcp-agent.log
# /var/log/neutron/neutron-l3-agent.log
# /var/log/neutron/neutron-metadata-agent.log
# /var/log/neutron/neutron-openvswitch-agent.log
# /var/log/neutron/neutron-ovs-cleanup.log
# /var/log/openvswitch/ovsdb-server.log
# /var/log/openvswitch/ovs-vswitchd.log
# References
# https://docs.openstack.org/newton/install-guide-ubuntu/neutron-controller-install.html
# https://docs.openstack.org/newton/install-guide-ubuntu/neutron-controller-install-option2.html
# https://kairen.gitbooks.io/openstack-ubuntu-newton/content/ubuntu-binary/neutron/#network-node
# https://www.centos.bz/2012/04/linux-sysctl-conf/
}
function download_lbaas() {
NEUTRON_LBAAS_AGENT_VERSION=2:10.0.1-0ubuntu1~cloud0
[ "$APT_UPDATED" == "true" ] || apt-get update && APT_UPDATED=true
apt install -y neutron-lbaasv2-agent=$NEUTRON_LBAAS_AGENT_VERSION
#apt install -y python-neutron-lbaas=
# Reference https://docs.openstack.org/ocata/networking-guide/config-lbaas.html
}
function configure_lbaas() {
# Edit the /etc/neutron/lbaas_agent.ini file, [DEFAULT] section
sed -i "/^\[DEFAULT\]$/ a interface_driver = openvswitch" /etc/neutron/lbaas_agent.ini
sed -i "/^\[DEFAULT\]$/ a ovs_use_veth = False" /etc/neutron/lbaas_agent.ini
# Edit the /etc/neutron/lbaas_agent.ini file, [haproxy] section
cat >> /etc/neutron/lbaas_agent.ini <<DATA
[haproxy]
user_group = haproxy
DATA
# Edit the /etc/neutron/neutron_lbaas.conf file, [service_providers] section
sed -i "/^\[service_providers\]$/ a service_provider = LOADBALANCERV2:Haproxy:neutron_lbaas.drivers.haproxy.plugin_driver.HaproxyOnHostPluginDriver:default" /etc/neutron/neutron_lbaas.conf
# Restart the Networking services
service neutron-lbaasv2-agent restart
# Reference https://docs.openstack.org/ocata/networking-guide/config-lbaas.html
}
function main() {
while [ $# -gt 0 ];
do
case $1 in
download)
#use_local_apt_server
use_public_apt_server
each_node_must_resolve_the_other_nodes_by_name_in_addition_to_IP_address
install_utilities
install_python
install_ntp
download_neutron
download_lbaas
;;
configure)
configure_neutron
configure_lbaas
;;
*)
echo "unknown mode"
;;
esac
shift
done
echo done
}
main $@
|
<filename>app/services/auto_match/authorizations/updating_service.rb
module AutoMatch
module Authorizations
module UpdatingService
include BaseService
def call(match, authorization, params)
match.transaction do
match.update(params) || rollback!
end
end
end
end
end
|
#!/bin/bash
#
# Copyright (c) 2018 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# -*- mode: shell-script; indent-tabs-mode: nil; sh-basic-offset: 4; -*-
# ex: ts=8 sw=4 sts=4 et filetype=sh
# Automation script to create specs to build kata containers kernel
[ -z "${DEBUG}" ] || set -o xtrace
set -o errexit
set -o nounset
set -o pipefail
source ../versions.txt
source ../scripts/pkglib.sh
SCRIPT_NAME=$0
SCRIPT_DIR=$(dirname $0)
PKG_NAME="qemu-lite"
VERSION=$qemu_lite_version
GENERATED_FILES=(qemu-lite.dsc qemu-lite.spec debian.rules _service debian.control)
STATIC_FILES=(debian.compat "${SCRIPT_DIR}/../../scripts/configure-hypervisor.sh" qemu-lite-rpmlintrc)
# Parse arguments
cli "$@"
[ "$VERBOSE" == "true" ] && set -x
PROJECT_REPO=${PROJECT_REPO:-home:${OBS_PROJECT}:${OBS_SUBPROJECT}/qemu-lite}
RELEASE=$(get_obs_pkg_release "${PROJECT_REPO}")
((RELEASE++))
set_versions "${qemu_lite_hash}"
replace_list=(
"VERSION=$VERSION"
"RELEASE=$RELEASE"
"QEMU_LITE_HASH=${qemu_lite_hash:0:10}"
)
verify
echo "Verify succeed."
get_git_info
changelog_update $VERSION
generate_files "$SCRIPT_DIR" "${replace_list[@]}"
build_pkg "${PROJECT_REPO}"
|
#!/bin/bash
#Twisted cubic
FXT="t"
FYT="t ** 2"
FZT="t ** 3"
python ../pmc3t_gen.py --dsout datasets/example2_train.csv --xt "$FXT" --yt "$FYT" --zt "$FZT" --rbegin 0 --rend 2.0 --rstep 0.001
python ../pmc3t_fit.py --trainds datasets/example2_train.csv --modelout models/example2 \
--hlayers 200 300 200 --hactivation sigmoid tanh sigmoid \
--epochs 250
python ../pmc3t_gen.py --dsout datasets/example2_test.csv --xt "$FXT" --yt "$FYT" --zt "$FZT" --rbegin 0 --rend 2.0 --rstep 0.00475
python ../pmc3t_predict.py --model models/example2 --ds datasets/example2_test.csv --predictionout predictions/example2_pred.csv
python ../pmc3t_plot.py --ds datasets/example2_test.csv --prediction predictions/example2_pred.csv
#python ../pmc3t_plot.py --ds datasets/example2_train.csv --prediction predictions/example2_pred.csv --savefig predictions/example2.png
|
<reponame>freedesktop/pvr-driver
/*
* Copyright (c) 2011 Intel Corporation. All Rights Reserved.
* Copyright (c) Imagination Technologies Limited, UK
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Authors:
* <NAME> <<EMAIL>>
*/
#ifndef _PSB_TEXSTREAMING_H_
#define _PSB_TEXSTREAMING_H_
#include <va/va.h>
#include <va/va_backend.h>
#define BC_FOURCC(a,b,c,d) \
((unsigned long) ((a) | (b)<<8 | (c)<<16 | (d)<<24))
#define BC_PIX_FMT_NV12 BC_FOURCC('N', 'V', '1', '2') /*YUV 4:2:0*/
#define BC_PIX_FMT_UYVY BC_FOURCC('U', 'Y', 'V', 'Y') /*YUV 4:2:2*/
#define BC_PIX_FMT_YUYV BC_FOURCC('Y', 'U', 'Y', 'V') /*YUV 4:2:2*/
#define BC_PIX_FMT_RGB565 BC_FOURCC('R', 'G', 'B', 'P') /*RGB 5:6:5*/
typedef struct BC_Video_ioctl_package_TAG {
int ioctl_cmd;
int device_id;
int inputparam;
int outputparam;
} BC_Video_ioctl_package;
typedef struct bc_buf_ptr {
unsigned int index;
int size;
unsigned long pa;
unsigned long handle;
} bc_buf_ptr_t;
#define BC_Video_ioctl_fill_buffer 0
#define BC_Video_ioctl_get_buffer_count 1
#define BC_Video_ioctl_get_buffer_phyaddr 2 /*get physical address by index*/
#define BC_Video_ioctl_get_buffer_index 3 /*get index by physical address*/
#define BC_Video_ioctl_request_buffers 4
#define BC_Video_ioctl_set_buffer_phyaddr 5
#define BC_Video_ioctl_release_buffer_device 6
enum BC_memory {
BC_MEMORY_MMAP = 1,
BC_MEMORY_USERPTR = 2,
};
/*
* the following types are tested for fourcc in struct bc_buf_params_t
* NV12
* UYVY
* RGB565 - not tested yet
* YUYV
*/
typedef struct bc_buf_params {
int count; /*number of buffers, [in/out]*/
int width; /*buffer width in pixel, multiple of 8 or 32*/
int height; /*buffer height in pixel*/
int stride;
unsigned int fourcc; /*buffer pixel format*/
enum BC_memory type;
} bc_buf_params_t;
int psb_add_video_bcd(
VADriverContextP ctx,
int width,
int height,
int stride,
int num_surfaces,
VASurfaceID *surface_list
);
int psb_get_video_bcd(
VADriverContextP ctx,
VASurfaceID surface
);
int psb_register_video_bcd(VADriverContextP ctx);
int psb_release_video_bcd(VADriverContextP ctx);
/*add for texture streaming end*/
#endif /*_PSB_TEXSTREAMING_H*/
|
#!/bin/sh
mkdir -p /data
cron
if [ "$1" == "y" ]; then
find /data -maxdepth 1 -mindepth 1 -type d | xargs rm -rf
fi
if [ -z "$NAME" ]; then
NAME="miner";
fi
if [ -z "$TESTNET" ]; then
TESTNET=true;
fi
if [ -z $BOOTNODE_IP ]; then
BOOTNODE_IP="testnet-bootnode.incognito.org:9330";
fi
if [ -z $MONITOR ]; then
export MONITOR="http://51.91.72.45:33333";
fi
if [ -z "$NODE_PORT" ]; then
NODE_PORT=9433;
fi
if [ -z "$LIMIT_FEE" ]; then
LIMIT_FEE=1;
fi
if [ -z "$LOG_LEVEL" ]; then
LOG_LEVEL="info";
fi
if [ -z "$PUBLIC_IP" ]; then
PUBLIC_IP=`dig -4 @resolver1.opendns.com A myip.opendns.com. +short`;
fi
CONTRACT_IP=`echo $PUBLIC_IP | cut -d '.' -f 1,4`
if [ -z "$RPC_PORT" ]; then RPC_PORT=9334; fi
if [ -z "$WS_PORT" ]; then WS_PORT=19334; fi
IS_FULL_VALIDATION=""
if [ "$FULL_VALIDATION" == "1" ]; then
IS_FULL_VALIDATION="--is_full_validation";
fi
if [ -n "$FULLNODE" ] && [ "$FULLNODE" = "1" ]; then
echo ./incognito --relayshards "all" --usecoindata --coindatapre="__coins__" --numindexerworkers=0 --indexeraccesstoken=$INDEXER_ACCESS_TOKEN $IS_FULL_VALIDATION --discoverpeers --discoverpeersaddress $BOOTNODE_IP --datadir "/data" --listen "0.0.0.0:$NODE_PORT" --externaladdress "$PUBLIC_IP:$NODE_PORT" --enablewallet --wallet "wallet" --walletpassphrase "12345678" --walletautoinit --limitfee $LIMIT_FEE --norpcauth --rpclisten "0.0.0.0:$RPC_PORT" --rpcwslisten "0.0.0.0:$WS_PORT" --loglevel "$LOG_LEVEL" > cmd.sh
./incognito --relayshards "all" --usecoindata --coindatapre="__coins__" --numindexerworkers=0 --indexeraccesstoken=$INDEXER_ACCESS_TOKEN $IS_FULL_VALIDATION --discoverpeers --discoverpeersaddress $BOOTNODE_IP --datadir "/data" --listen "0.0.0.0:$NODE_PORT" --externaladdress "$PUBLIC_IP:$NODE_PORT" --enablewallet --wallet "wallet" --walletpassphrase "12345678" --walletautoinit --limitfee $LIMIT_FEE --norpcauth --rpclisten "0.0.0.0:$RPC_PORT" --rpcwslisten "0.0.0.0:$WS_PORT" --loglevel "$LOG_LEVEL" --rpcmaxclients 1500 2>/data/error.log | cronolog /data/$CONTRACT_IP-%Y-%m-%d.log
elif [ -n "$PRIVATEKEY" ]; then
echo ./incognito --relayshards "$RELAY_SHARD" --usecoindata --coindatapre="__coins__" --numindexerworkers=0 --indexeraccesstoken=$INDEXER_ACCESS_TOKEN $IS_FULL_VALIDATION --limitfee $LIMIT_FEE --discoverpeers --discoverpeersaddress $BOOTNODE_IP --privatekey $PRIVATEKEY --datadir "/data" --listen "0.0.0.0:$NODE_PORT" --externaladdress "$PUBLIC_IP:$NODE_PORT" --norpcauth --enablewallet --wallet "incognito" --walletpassphrase "12345678" --walletautoinit --rpclisten "0.0.0.0:$RPC_PORT" --rpcwslisten "0.0.0.0:$WS_PORT" --loglevel "$LOG_LEVEL" > cmd.sh
./incognito --relayshards "$RELAY_SHARD" --usecoindata --coindatapre="__coins__" --numindexerworkers=0 --indexeraccesstoken=$INDEXER_ACCESS_TOKEN $IS_FULL_VALIDATION --limitfee $LIMIT_FEE --discoverpeers --discoverpeersaddress $BOOTNODE_IP --privatekey $PRIVATEKEY --datadir "/data" --listen "0.0.0.0:$NODE_PORT" --externaladdress "$PUBLIC_IP:$NODE_PORT" --norpcauth --enablewallet --wallet "incognito" --walletpassphrase "12345678" --walletautoinit --rpclisten "0.0.0.0:$RPC_PORT" --rpcwslisten "0.0.0.0:$WS_PORT" --loglevel "$LOG_LEVEL" 2>/data/error.log | cronolog /data/$CONTRACT_IP-%Y-%m-%d.log
elif [ -n "$MININGKEY" ]; then
echo ./incognito --relayshards "$RELAY_SHARD" --usecoindata --coindatapre="__coins__" --numindexerworkers=0 --indexeraccesstoken=$INDEXER_ACCESS_TOKEN $IS_FULL_VALIDATION --limitfee $LIMIT_FEE --discoverpeers --discoverpeersaddress $BOOTNODE_IP --miningkeys $MININGKEY --datadir "/data" --listen "0.0.0.0:$NODE_PORT" --externaladdress "$PUBLIC_IP:$NODE_PORT" --norpcauth --enablewallet --wallet "incognito" --walletpassphrase "12345678" --walletautoinit --rpclisten "0.0.0.0:$RPC_PORT" --rpcwslisten "0.0.0.0:$WS_PORT" --loglevel "$LOG_LEVEL" > cmd.sh
./incognito --relayshards "$RELAY_SHARD" --usecoindata --coindatapre="__coins__" --numindexerworkers=0 --indexeraccesstoken=$INDEXER_ACCESS_TOKEN $IS_FULL_VALIDATION --limitfee $LIMIT_FEE --discoverpeers --discoverpeersaddress $BOOTNODE_IP --miningkeys $MININGKEY --datadir "/data" --listen "0.0.0.0:$NODE_PORT" --externaladdress "$PUBLIC_IP:$NODE_PORT" --norpcauth --enablewallet --wallet "incognito" --walletpassphrase "12345678" --walletautoinit --rpclisten "0.0.0.0:$RPC_PORT" --rpcwslisten "0.0.0.0:$WS_PORT" --loglevel "$LOG_LEVEL" 2>/data/error.log | cronolog /data/$CONTRACT_IP-%Y-%m-%d.log
fi
|
<reponame>Open-Speech-EkStep/crowdsource-dataplatform
const swaggerAutogen = require('swagger-autogen')();
const doc = {
info: {
title: 'Crowdsource API',
description: 'Swagger API Documentation for Crowdsource',
},
host: 'localhost:8080',
schemes: ['http'],
};
const outputFile = './swagger/swagger-output.json';
const endpointsFiles = ['./src/app.js'];
swaggerAutogen(outputFile, endpointsFiles, doc);
|
# zsh-autoenv script to add binstubs to PATH
local BIN_PATH="${0:a:h}/bin"
local NEW_PATH=":${PATH}:"
NEW_PATH=${NEW_PATH//":$BIN_PATH:"/:}
NEW_PATH=${NEW_PATH/#:/$BIN_PATH:}
export PATH=${NEW_PATH/%:/}
export rvm_silence_path_mismatch_check_flag=1
|
<reponame>surfliner/surfliner-mirror
# frozen_string_literal: true
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
config.example_status_persistence_file_path = "spec/examples.txt"
config.disable_monkey_patching!
config.order = :random
Kernel.srand config.seed
end
|
#!/bin/bash
# githubuser--Given a GitHub username, pulls information about the user
if [ $# -ne 1 ]; then
echo "Usage: $0 <username>"
exit 1
fi
# The -s silences curl's normally verbose output.
curl -s "https://api.github.com/users/$1" | \
awk -F'"' '
/\"name\":/ {
print $4" is the name of the GitHub user."
}
/\"followers\":/{
split($3, a, " ")
sub(/,/, "", a[2])
print "They have "a[2]" followers."
}
/\"following\":/{
split($3, a, " ")
sub(/,/, "", a[2])
print "They are following "a[2]" other users."
}
/\"created_at\":/{
print "Their account was created on "$4"."
}
'
exit 0
|
#!/bin/sh
cd `dirname $0`
source ./../config.sh
exec_dir major_first_volunteer_application_rate
HIVE_DB=assurance
HIVE_TABLE=major_first_volunteer_application_rate
TARGET_TABLE=im_quality_major_data_info
DATA_NAME=第一志愿报考率
DATA_NO=ZY_DYZYBKL
function create_table() {
hadoop fs -rm -r ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} || :
hive -e "DROP TABLE IF EXISTS ${HIVE_DB}.${HIVE_TABLE};"
hive -e "CREATE EXTERNAL TABLE IF NOT EXISTS ${HIVE_DB}.${HIVE_TABLE}(
data_no String comment '数据项编号',
data_name String comment '数据项名称',
major_no String comment '专业编号',
major_name String comment '专业名称',
data_cycle String comment '数据统计周期 YEAR 年 MONTH 月 DAY 日 QUARTER 季度 OTHER 其他',
data_type String comment '数据类型 NUMBER 数值类型 ENUM 枚举类型',
data_time String comment '数据日期 年YYYY 月YYYYmm 日YYYYMMDD 季度YYYY-1,yyyy-2,yyyy-3,yyyy-4 学期 yyyy-yyyy 学期 yyyy-yyyy-1,yyyy-yyyy-2',
data_value String comment '数据项值(数字保存数字,如果是数据字典枚举保存key)',
is_new String comment '是否最新 是YES 否NO',
create_time String comment '创建时间'
) COMMENT '第一志愿报考率'
LOCATION '${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE}'"
fn_log "创建表——第一志愿报考率:${HIVE_DB}.${HIVE_TABLE}"
}
function import_table() {
hive -e "
INSERT INTO TABLE ${HIVE_DB}.${HIVE_TABLE}
select
c.data_no as data_no,
c.data_name as data_name,
a.major_code as major_no,
a.major_name as major_name,
c.data_cycle as data_cycle,
c.data_type as data_type,
a.semester_year as data_time,
a.first_volunteer_rate as data_value,
'NO' as is_new,
FROM_UNIXTIME(UNIX_TIMESTAMP()) AS create_time
from model.major_enroll_student a,assurance.im_quality_data_base_info c
where c.data_name ='${DATA_NAME}'
"
fn_log "导入数据 —— 第一志愿报考率:${HIVE_DB}.${HIVE_TABLE}"
}
#插入新数据
function import_table_new() {
hive -e "
INSERT INTO TABLE ${HIVE_DB}.${HIVE_TABLE}
select
c.data_no as data_no,
c.data_name as data_name,
a.major_code as major_no,
a.major_name as major_name,
c.data_cycle as data_cycle,
c.data_type as data_type,
a.semester_year as data_time ,
a.first_volunteer_rate as data_value,
'NO' as is_new,
FROM_UNIXTIME(UNIX_TIMESTAMP()) AS create_time
from
(
select s.major_code,
s.major_name,
s.semester_year,
sum(first_volunteer_rate) as first_volunteer_rate
from model.major_enroll_student s
where s.semester_year in
(select max(semester_year) from model.major_enroll_student )
group by s.major_code,s.major_name,s.semester_year
) a,
assurance.im_quality_data_base_info c
where c.data_name = '${DATA_NAME}'
"
fn_log "导入数据 —— 第一志愿报考率:${HIVE_DB}.${HIVE_TABLE}"
}
function export_table() {
DATE_TIME=`hive -e "select max(data_time) from ${HIVE_DB}.${HIVE_TABLE} " `
clear_mysql_data "delete from im_quality_major_data_info where data_name = '${DATA_NAME}';"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns 'data_no,data_name,major_no,major_name,data_cycle,data_type,data_time,data_value,is_new,create_time'
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'NO' where data_name = '${DATA_NAME}';"
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'YES' where data_name = '${DATA_NAME}' and data_time='${DATE_TIME}'"
fn_log "导出数据--第一志愿报考率:${HIVE_DB}.${TARGET_TABLE}"
}
function export_table_new() {
DATE_TIME=`hive -e "select max(data_time) from ${HIVE_DB}.${HIVE_TABLE} " `
clear_mysql_data "delete from im_quality_major_data_info
where data_no = '${DATA_NO}' and data_time= '${DATE_TIME}';"
sqoop export --connect ${MYSQL_URL} --username ${MYSQL_USERNAME} --password ${MYSQL_PASSWORD} \
--table ${TARGET_TABLE} --export-dir ${BASE_HIVE_DIR}/${HIVE_DB}/${HIVE_TABLE} \
--input-fields-terminated-by '\0001' --input-null-string '\\N' --input-null-non-string '\\N' \
--null-string '\\N' --null-non-string '\\N' \
--columns 'data_no,data_name,major_no,major_name,data_cycle,data_type,data_time,data_value,is_new,create_time'
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'NO' where data_no = '${DATA_NO}';"
clear_mysql_data "update assurance.im_quality_major_data_info set is_new = 'YES' where data_no = '${DATA_NO}' and data_time= '${DATE_TIME}' "
fn_log "导出数据--第一志愿报考率:${HIVE_DB}.${TARGET_TABLE}"
}
create_table
import_table
export_table |
#!/bin/bash
set -e
# Plugins
#
# Xcode Build Rule: *.lua
# -----------------------------------------------------------------------------
# Location of toolchain
if [ -z "$TOOLCHAIN_DIR" ]
then
TOOLCHAIN_DIR="$PROJECT_DIR/../../bin/mac"
fi
echo "Using lua from $TOOLCHAIN_DIR ..."
if [ ! -e "$TOOLCHAIN_DIR/lua" ]; then
echo ERROR: "$TOOLCHAIN_DIR"/lua does not exist.
exit -1
fi
DST_DIR="${TARGET_TEMP_DIR}/${CURRENT_ARCH}"
if [ ! -d "$DST_DIR" ]; then
mkdir "$DST_DIR"
fi
# "$TOOLCHAIN_DIR"/lua $SRCROOT/../../bin/rcc.lua -c "$TOOLCHAIN_DIR" -O$CONFIGURATION -o $DST_DIR/$INPUT_FILE_BASE.lu $INPUT_FILE_PATH
# "$TOOLCHAIN_DIR"/lua -epackage.path=[[$SRCROOT/../../external/loop-2.3-beta/lua/?.lua]] $SRCROOT/../../external/loop-2.3-beta/lua/precompiler.constant.lua -d $DST_DIR -o $INPUT_FILE_BASE -l $DST_DIR/?.lu -n $INPUT_FILE_BASE
"$TOOLCHAIN_DIR"/lua "$TOOLCHAIN_DIR"/../lua2c.lua "$INPUT_FILE_PATH" "$INPUT_FILE_BASE" "$DST_DIR"/"$INPUT_FILE_BASE".c
|
CUDA_VISIBLE_DEVICES=1 fairseq-generate ../fairseq_vanilla/data-bin/iwslt14.tokenized.de-en --path /n/rush_lab/users/y/checkpoints/barrier/iwslt/checkpoint_best.pt --batch-size 1 --topk 32 --rounds 3 --remove-bpe --D 3 --max-len-a 0.941281036889224 --max-len-b 0.8804326732522796 --gen-subset valid --max-size 3000 --seed 1234 > /n/rush_lab/users/y/val_ab_logs/iwslt-epoch-ab-fixD0/topk32_D3_rounds3_speed.txt 2>&1
CUDA_VISIBLE_DEVICES=1 fairseq-generate ../fairseq_vanilla/data-bin/iwslt14.tokenized.de-en --path /n/rush_lab/users/y/checkpoints/barrier/iwslt/checkpoint_best.pt --batch-size 1 --topk 128 --rounds 3 --remove-bpe --D 3 --max-len-a 0.941281036889224 --max-len-b 0.8804326732522796 --gen-subset valid --max-size 3000 --seed 1234 > /n/rush_lab/users/y/val_ab_logs/iwslt-epoch-ab-fixD0/topk128_D3_rounds3_speed.txt 2>&1
|
#!/usr/bin/env bash
ENV="Pendulum-v1"
DATETIME="$(date +"%Y-%m-%d-%T")"
LOG_DIR="logs/$ENV/RNN/$DATETIME"
CHECKPOINT_DIR="savedcheckpoints/$ENV/RNN"
ROOT_DIR="$(
cd "$(dirname "$(dirname "$0")")"
pwd
)"
cd "$ROOT_DIR"
mkdir -p "$LOG_DIR"
cp "$0" "$LOG_DIR"
PYTHONWARNINGS=ignore python3 main.py \
--mode test --gpu 0 1 0 1 \
--env "$ENV" \
--hidden-dims 128 64 \
--activation LeakyReLU \
--encoder-arch RNN \
--state-dim 128 \
--encoder-hidden-dims-before-rnn 128 \
--encoder-hidden-dims-rnn 64 \
--encoder-hidden-dims-after-rnn 128 \
--encoder-activation LeakyReLU \
--skip-connection \
--n-episodes 100 \
--n-samplers 4 \
--random-seed 0 \
--log-dir "$LOG_DIR" \
--checkpoint-dir "$CHECKPOINT_DIR" \
--load-checkpoint \
"$@" # script arguments (can override args above)
|
class IllegalAction(Exception):
pass
class TransportSystem:
def __init__(self, transport_optimizers, transport_decision_offset, core_state):
self.__transport_optimizers = transport_optimizers
self.__transport_decision_offset = transport_decision_offset
self.__core_state = core_state
def get_direct_core_action(self, agent_action):
try:
if agent_action < self.__transport_decision_offset or agent_action >= len(self.__transport_optimizers) + self.__transport_decision_offset:
raise IllegalAction()
direct_core_action = self.__transport_optimizers[agent_action - self.__transport_decision_offset].get_action(self.__core_state)
return direct_core_action
except IllegalAction:
print("Illegal action encountered")
# Example usage
class TransportOptimizer:
def get_action(self, core_state):
# Implementation of get_action method
pass
# Create an instance of TransportSystem
transport_optimizers = [TransportOptimizer(), TransportOptimizer()]
transport_system = TransportSystem(transport_optimizers, 2, core_state)
transport_system.get_direct_core_action(3) # Example call to get_direct_core_action |
<gh_stars>1-10
package com.flash3388.flashlib.frc.robot.io;
import com.flash3388.flashlib.io.Pwm;
import edu.wpi.first.hal.DIOJNI;
import edu.wpi.first.hal.PWMJNI;
import edu.wpi.first.wpilibj.SensorUtil;
public class RoboRioPwm implements Pwm {
public static final int MAX_RAW = 255;
private edu.wpi.first.wpilibj.PWM mPwm;
public RoboRioPwm(edu.wpi.first.wpilibj.PWM pwm) {
mPwm = pwm;
}
public RoboRioPwm(int port) {
this(new edu.wpi.first.wpilibj.PWM(port));
}
@Override
public void setDuty(double duty) {
setValue((int) (duty * MAX_RAW));
}
@Override
public double getDuty() {
return getValue() / (double) MAX_RAW;
}
@Override
public void setValue(int value) {
mPwm.setRaw(value);
}
@Override
public int getValue() {
return mPwm.getRaw();
}
@Override
public int getMaxValue() {
return MAX_RAW;
}
@Override
public void setFrequency(double frequency) {
throw new UnsupportedOperationException("cannot set frequency");
}
@Override
public double getFrequency() {
return SensorUtil.kSystemClockTicksPerMicrosecond * 1e3 / DIOJNI.getLoopTiming();
}
@Override
public void close() {
if (mPwm == null) {
return;
}
mPwm.close();
mPwm = null;
}
}
|
num = 23
if num % 2 == 0:
print(str(num) + " is an even number")
else:
print(str(num) + " is an odd number") |
/* global artifacts:false, it:false, contract:false, assert:false */
const WyvernAtomicizer = artifacts.require('WyvernAtomicizer')
const WyvernStatic = artifacts.require('WyvernStatic')
contract('WyvernStatic',() => {
it('is deployed',async () => {
return await WyvernStatic.deployed();
})
it('has the correct atomicizer address',async () => {
let [atomicizerInstance,staticInstance] = await Promise.all([WyvernAtomicizer.deployed(),WyvernStatic.deployed()])
assert.equal(await staticInstance.atomicizer(),atomicizerInstance.address,'incorrect atomicizer address')
})
})
|
#!/usr/bin/env bash
#Variables
## $0 - The name of the Bash script.
## $1 - $9 - The first 9 arguments to the Bash script. (As mentioned above.)
## $# - How many arguments were passed to the Bash script.
## $@ - All the arguments supplied to the Bash script.
## $? - The exit status of the most recently run process.
## $$ - The process ID of the current script.
## $USER - The username of the user running the script.
## $HOSTNAME - The hostname of the machine the script is running on.
## $SECONDS - The number of seconds since the script was started.
## $RANDOM - Returns a different random number each time is it r
#take input from cmd line, concatinate to string and print
echo "#1 Hello $1"
#setting local variables and printing string
greeting="Hello"
name=$1
echo "#2 $greeting $name"
#setting directories to variables, list dir structure
directory=/etc
echo "LIST DIRECTORY STRUCTURE:"
ls ${directory}
#Command substitution
## Takes the output of a command or program
## and save it as the value of a variable.
## To do this we place it within brackets, preceded by a $ sign.
count=$( ls /etc | wc -l )
echo "There are ${count} entries in the directory /etc"
|
<gh_stars>0
import React from 'react';
import { render, screen } from '@testing-library/react';
import AddressBookIcon from '@patternfly/react-icons/dist/esm/icons/address-book-icon';
import { EmptyState, EmptyStateVariant } from '../EmptyState';
import { EmptyStateBody } from '../EmptyStateBody';
import { EmptyStateSecondaryActions } from '../EmptyStateSecondaryActions';
import { EmptyStateIcon } from '../EmptyStateIcon';
import { EmptyStatePrimary } from '../EmptyStatePrimary';
import { Button } from '../../Button';
import { Title, TitleSizes } from '../../Title';
describe('EmptyState', () => {
test('Main', () => {
const { asFragment } = render(
<EmptyState>
<Title headingLevel="h5" size="lg">
HTTP Proxies
</Title>
<EmptyStateBody>
Defining HTTP Proxies that exist on your network allows you to perform various actions through those proxies.
</EmptyStateBody>
<Button variant="primary">New HTTP Proxy</Button>
<EmptyStateSecondaryActions>
<Button variant="link" aria-label="learn more action">
Learn more about this in the documentation.
</Button>
</EmptyStateSecondaryActions>
</EmptyState>
);
expect(asFragment()).toMatchSnapshot();
});
test('Main variant large', () => {
const { asFragment } = render(
<EmptyState variant={EmptyStateVariant.large}>
<Title headingLevel="h3" size={TitleSizes.md}>
EmptyState large
</Title>
</EmptyState>
);
expect(asFragment()).toMatchSnapshot();
});
test('Main variant small', () => {
const { asFragment } = render(
<EmptyState variant={EmptyStateVariant.small}>
<Title headingLevel="h3" size={TitleSizes.md}>
EmptyState small
</Title>
</EmptyState>
);
expect(asFragment()).toMatchSnapshot();
});
test('Main variant xs', () => {
const { asFragment } = render(
<EmptyState variant={EmptyStateVariant.xs}>
<Title headingLevel="h3" size={TitleSizes.md}>
EmptyState small
</Title>
</EmptyState>
);
expect(asFragment()).toMatchSnapshot();
});
test('Body', () => {
render(<EmptyStateBody className="custom-empty-state-body" data-testid="body-test-id" />);
expect(screen.getByTestId('body-test-id')).toHaveClass('custom-empty-state-body pf-c-empty-state__body');
});
test('Secondary Action', () => {
render(<EmptyStateSecondaryActions className="custom-empty-state-secondary" data-testid="actions-test-id" />);
expect(screen.getByTestId('actions-test-id')).toHaveClass(
'custom-empty-state-secondary pf-c-empty-state__secondary'
);
});
test('Icon', () => {
render(<EmptyStateIcon icon={AddressBookIcon} data-testid="icon-test-id" />);
expect(screen.getByTestId('icon-test-id')).toHaveClass('pf-c-empty-state__icon');
});
test('Wrap icon in a div', () => {
const { container } = render(
<EmptyStateIcon
variant="container"
component={AddressBookIcon}
className="custom-empty-state-icon"
id="empty-state-icon-id"
/>
);
expect(container.querySelector('div')).toHaveClass('pf-c-empty-state__icon custom-empty-state-icon');
expect(container.querySelector('svg')).toBeInTheDocument();
});
test('Primary div', () => {
render(
<EmptyStatePrimary data-testid="primary-test-id">
<Button variant="link">Link</Button>
</EmptyStatePrimary>
);
expect(screen.getByTestId('primary-test-id')).toHaveClass('pf-c-empty-state__primary');
});
test('Full height', () => {
const { asFragment } = render(
<EmptyState isFullHeight variant={EmptyStateVariant.large}>
<Title headingLevel="h3" size={TitleSizes.md}>
EmptyState large
</Title>
</EmptyState>
);
expect(asFragment()).toMatchSnapshot();
});
});
|
package dp.abstractFactory.banking;
import dp.abstractFactory.PM;
public class BankingPM implements PM {
@Override
public void manageProject() {
System.out.println("Banking PM manages banking project");
}
}
|
public class GradeBook {
private String courseName; // course name for this GradeBook
// method to set the course name
public void setCourseName( String name )
{
courseName = name; // store the course name
} // end method setCourseName
// method to retrieve the course name
public String getCourseName()
{
return courseName;
} // end method getCourseName
// display a welcome message to the GradeBook user
public void displayMessage ()
{
// calls getCourseName to get the name of
// the course this GradeBook represents
System.out.printf( "Welcome to the grade book for\n%s!\n",
getCourseName() );
} // end method displayMessage
}
|
<reponame>PloadyFree/bacs-learn-current
package istu.bacs.externalapi.fake;
import istu.bacs.db.problem.Problem;
import istu.bacs.db.problem.ResourceName;
import istu.bacs.db.submission.Submission;
import istu.bacs.externalapi.ExternalApi;
import java.util.List;
import java.util.Random;
import static istu.bacs.db.problem.ResourceName.FAKE;
import static istu.bacs.db.submission.Verdict.PENDING;
public class FakeApi implements ExternalApi {
private final Random rnd = new Random();
private final ProblemService problemService = new ProblemService();
private final SubmissionResultUpdater submissionResultUpdater = new SubmissionResultUpdater();
@Override
public List<Problem> getAllProblems() {
return problemService.getProblems();
}
@Override
public boolean submit(Submission submission) {
submission.setExternalSubmissionId(rnd.nextInt(Integer.MAX_VALUE));
submission.getResult().setVerdict(PENDING);
return true;
}
@Override
public boolean submit(List<Submission> submissions) {
submissions.forEach(this::submit);
return true;
}
@Override
public boolean checkSubmissionResult(Submission submission) {
submissionResultUpdater.updateSubmissionResult(submission);
return true;
}
@Override
public boolean checkSubmissionResult(List<Submission> submissions) {
submissions.forEach(this::checkSubmissionResult);
return !submissions.isEmpty();
}
@Override
public ResourceName getResourceName() {
return FAKE;
}
} |
printf "#!/bin/bash\nnode main.js \$*" > program
chmod +x program
|
#!/bin/bash
HOME=/var/www/telos-dex-contract
EOSIO_CDT_HOME=/usr/opt/eosio.cdt/1.6.3
# -----------------------------------
EOS_DIR=$HOME/libraries/eos
TELOSDECIDE_DIR=$HOME/libraries/telos-decide
DECIDE_CONTRACT_DIR=$TELOSDECIDE_DIR/contracts/decide
TELOS_CONTRACTS_DIR=$TELOSDECIDE_DIR/libraries/telos.contracts/contracts
EOS_CMAKEMODULES_DIR=$HOME/libraries/eos/CMakeModules
EOSIO_CDT_CMAKE_PATH=$EOSIO_CDT_HOME/lib/cmake/eosio.cdt
echo "-------- 4_load_telos_decide ---------"
# sudo apt-get install cmake -y
if [ ! -d $TELOSDECIDE_DIR ]; then
cd $HOME/libraries/
echo "Cloning telos-decide recursively"
git clone --recursive https://github.com/telosnetwork/telos-decide.git
else
echo "$TELOSDECIDE_DIR OK!"
fi
cd $TELOSDECIDE_DIR
rm $TELOSDECIDE_DIR/build -fr &>>/dev/null
rm $TELOSDECIDE_DIR/CMakeFiles -fr &>>/dev/null
rm $TELOSDECIDE_DIR/CMakeCache.txt &>>/dev/null
# this anulates test building
echo "cmake_minimum_required( VERSION 3.5 )" > $TELOSDECIDE_DIR/tests/CMakeLists.txt
echo "-------- cmake ---------"
cmake -DCMAKE_MODULE_PATH=$EOSIO_CDT_CMAKE_PATH .
echo "-------- make ---------"
make
echo "-------- set contract telos.decide ---------"
cleos set contract telos.decide $HOME/libraries/telos-decide/contracts/decide/ -p telos.decide@active
# /var/www/telos-dex-contract/libraries/telos-decide/contracts/decide/decide.abi
# /var/www/telos-dex-contract/libraries/telos-decide/contracts/watcher/watcher.abi
# /var/www/telos-dex-contract/libraries/telos-decide/libraries/telos.contracts/tests/test_contracts/eosio.msig.old/eosio.msig.abi
# /var/www/telos-dex-contract/libraries/telos-decide/libraries/telos.contracts/tests/test_contracts/eosio.system.old/eosio.system.abi
# /var/www/telos-dex-contract/libraries/telos-decide/tests/contracts/eosio.system/eosio.system.abi
# /var/www/telos-dex-contract/libraries/telos-decide/tests/contracts/eosio.token/eosio.token.abi
echo "-------- set telos.decide config ---------"
cleos push action telos.decide init '["v2.0.2"]' -p telos.decide
cleos push action telos.decide init '["v2.0.2+"]' -p telos.decide
cleos get table telos.decide telos.decide config
sleep 1
echo "-------- set telos.decide newtreasury VOTE ---------"
cleos push action eosio.token transfer '["eosio", "telos.decide", "1000.0000 TLOS", "deposit"]' -p eosio@active
cleos push action telos.decide newtreasury '["eosio","10000000000.0000 VOTE","public"]' -p eosio
cleos get table telos.decide telos.decide treasuries
echo "pause..."
sleep 1
echo "-------- registering voters ---------"
cleos push action telos.decide regvoter '["telosmaindex","4,VOTE","telosmaindex"]' -p telosmaindex
cleos push action telos.decide regvoter '["alice", "4,VOTE", "alice"]' -p alice
cleos push action telos.decide regvoter '["bob", "4,VOTE", "bob"]' -p bob
cleos push action telos.decide regvoter '["kate", "4,VOTE", "kate"]' -p kate
cleos push action telos.decide regvoter '["tom", "4,VOTE", "tom"]' -p tom
cleos get table telos.decide telosmaindex voters
sleep 1
cleos push action telos.decide mint '["bob", "1000.0000 VOTE", ""]' -p eosio
cleos push action telos.decide mint '["alice", "1001.0000 VOTE", ""]' -p eosio
cleos push action telos.decide mint '["tom", "2000.0000 VOTE", ""]' -p eosio
cleos push action telos.decide mint '["kate", "1000.0000 VOTE", ""]' -p eosio
# cleos get currency balance eosio.token bob TLOS
# cleos get currency balance eosio.token alice TLOS
# cleos get currency balance eosio.token tom TLOS
# cleos get currency balance eosio.token kate TLOS
echo "pause..."
sleep 1
echo "-------- 4_load_telos_decide finished ---------"
|
// https://open.kattis.com/problems/favourable
#include <iostream>
#include <sstream>
using namespace std;
typedef long long ll;
struct section {
bool end;
bool ok;
int choices[3];
};
ll cache[401];
ll c(const section a[], int k) {
if (a[k].end) return a[k].ok ? 1 : 0;
if (cache[k] != -1) return cache[k];
ll s = 0;
for (int i = 0; i < 3; i++)
s += c(a, a[k].choices[i]);
cache[k] = s;
return s;
}
int main() {
int t;
cin >> t;
section a[401];
while (t--) {
for (int i = 0; i < 401; i++) cache[i] = -1;
int n;
cin >> n;
for (int i = 0; i < n; i++) {
int k;
string s;
cin >> k >> s;
if (s[0] == 'f' || s[0] == 'c') a[k] = {true, s[0] == 'f', {0,0,0}};
else {
stringstream in(s);
int x, y, z;
in >> x;
cin >> y >> z;
a[k] = {false, false, {x, y, z}};
}
}
cout << c(a, 1) << endl;
}
}
|
import { Component } from '@angular/core';
@Component({
selector: 'app-item-list',
template: `
<h2>Item List</h2>
<ul>
<li *ngFor="let item of items">{{ item }}</li>
</ul>
<input type="text" [(ngModel)]="newItem">
<button (click)="addItem()">Add Item</button>
`
})
export class ItemListComponent {
items: string[] = [];
// Method to initialize the list of items
ngOnInit(): void {
this.items = ['Item 1', 'Item 2', 'Item 3'];
}
// Method to add new items to the list
addItem(): void {
if (this.newItem) {
this.items.push(this.newItem);
this.newItem = '';
}
}
} |
package nl.rutgerkok.bedsock.event;
/**
* Controls when your event handler is called.
*
*/
public enum EventPriority {
/**
* Should not be used under normal circumstances. Useful if you need to look at
* the unmodified event.
*/
EARLIEST,
/**
* Suitable for plugins that want to make their action known to other plugins. A
* land protection plugin would register here, so that all other
*/
EARLY,
/**
* Suitable for most plugins.
*/
NORMAL,
/**
* Suitable to plugins that override another plugin.
*/
LATE,
/**
* Should not be used under normal circumstances. Useful if you need to override
* a plugin that has the {@link #LATE} priority.
*/
LATEST,
/**
* Suitable for plugins that want to know the outcome of the event. Do not
* modify the cancelled state here.
*/
MONITOR
}
|
function insertionSort(arr) {
for(let i = 1; i < arr.length; i++) {
let curr = arr[i];
let j = i - 1;
while(j >= 0 && arr[j] > curr) {
arr[j+1] = arr[j];
j--;
}
arr[j+1] = curr;
}
return arr;
}
console.log(insertionSort([6,5,4,3,2,1])) |
import os
from argparse import ArgumentParser
import sys
sys.path.append('../')
from tqdm import tqdm
import json
from transformers import BertTokenizer
import numpy as np
from random import random, shuffle, choice, sample
import collections
import traceback
from multiprocessing import Pool, Value, Lock
from tempfile import TemporaryDirectory
from pathlib import Path
import shelve
from collections import Counter
import math
import copy
import sys
sys.setrecursionlimit(10000)
MaskedLmInstance = collections.namedtuple("MaskedLmInstance",
["index", "label"])
lock = Lock()
num_instances = Value('i', 0)
num_docs = Value('i', 0)
num_words = Value('i', 0)
TEMP_DIR = '../'
from transformers import BertTokenizer, BertModel
BertPath = '../bert_base_uncased'
tag_vocab_path = '/home/yu_guo/DataPreProcess/data/tag.vocab'
bert_tokenizer = BertTokenizer.from_pretrained(BertPath)
def get_text(html,i):
depth = 0
word = []
type_idx = []
tag_name = html[i]["name"]
tag_children = html[i]["children"]
tag_text = html[i]["text"]
tag_idx = html[i]["id"]
if tag_name == "textnode":
res = bert_tokenizer.tokenize(tag_text)
return res,[0]*len(res),depth
else:
for child_idx in tag_children:
inner_word,inner_type_idx,tag_depth = get_text(html,int(child_idx))
word += inner_word
type_idx += inner_type_idx
depth = max(depth,tag_depth)
depth += 1
assert len(type_idx)==len(word)
return ["<"+tag_name+">"]+word+["<"+tag_name+">"],[1]+type_idx+[2],depth
def get_position(x):
res = {}
out = []
for i in range(len(x)):
res[x[i]] = i
for i in range(len(x)):
out.append(res[i])
return out
def pad_positions(positions,max_num):
for i in range(len(positions)):
positions[i] = positions[i]+[0]*(max_num-len(positions[i]))
return positions
def get_layer_info(nodes,max_layer_num,node_max_length,seq_max_length):
tag = 1
node_queue=[]
nodes_info = []
texts_info = []
attention_mask = []
nodes[0]["depth"] = 1
node_queue.append(nodes[0])
node_layer_index = []
text_layer_index = []
waiting_masks = []
positions = []
node_position = []
text_position = []
last_depth = 1
text_num = []
node_num = []
current_idx = 0
max_layer_length = 0
while(len(node_queue)>0):
if node_queue[0]['depth'] != last_depth and node_queue[0]['depth'] <= max_layer_num:
node_num.append(len(node_position))
text_num.append(len(text_position))
node_position.extend(text_position)
node_position = get_position(node_position)
max_layer_length = max(max_layer_length,len(node_position))
positions.append(node_position)
node_position = []
text_position = []
current_idx = 0
last_depth = node_queue[0]['depth']
tag_idx = node_queue[0]['id']
if node_queue[0]["name"] == "textnode" or node_queue[0]["depth"] == max_layer_num :
text_info = {}
text,type_idx,_ = get_text(nodes,tag_idx)
text_layer_index.append(node_queue[0]["depth"])
node_queue = node_queue[1:]
text_info['text'] = text[:seq_max_length-1]
text_info['type_idx'] = type_idx[:seq_max_length-1]
text_position.append(current_idx)
current_idx+=1
texts_info.append(text_info)
else:
node_info = {}
tag_name = node_queue[0]["name"]
children = node_queue[0]["children"]
meaning_children = []
for child in children:
if nodes[child]["name"] == 'textnode' and nodes[child]['text'].strip()=="":
continue
else:
meaning_children.append(child)
children = meaning_children
children = children[:node_max_length-2]
children_num = len(children)
if children_num == 0:
tag = 0
break
for child in children:
nodes[child]["depth"]=node_queue[0]["depth"]+1
node_queue.append(nodes[child])
text=['[CLS]','<'+tag_name+'>']
#type_idx=[0,1]
node_layer_index.append(node_queue[0]["depth"])
node_position.append(current_idx)
current_idx+=1
waiting_mask = [0]*2 + [1]*children_num + [0] * max(node_max_length-(children_num+2),0)
waiting_mask = waiting_mask[:node_max_length]
node_queue = node_queue[1:]
node_info['text'] = text
nodes_info.append(node_info)
waiting_masks.append(waiting_mask)
if tag==0:
return None
text_num.append(len(text_position))
node_num.append(len(node_position))
node_position.extend(text_position)
node_position = get_position(node_position)
positions.append(node_position)
max_layer_length = max(max_layer_length,len(node_position))
if len(text_num) < max_layer_num:
for i in range(max_layer_num - len(node_num)):
positions.append([])
text_num += [0] * (max_layer_num-len(text_num))
node_num += [0] * (max_layer_num - len(node_num))
positions = pad_positions(positions,max_layer_length)
assert len(positions) == max_layer_num
assert len(text_num) == max_layer_num
assert len(node_num) == max_layer_num
# nodes_info = nodes_info[:node_max_length]
# layer_index = layer_index[:node_max_length]
# waiting_masks = waiting_masks[:node_max_length]
res = {
"nodes_info":nodes_info,
"texts_info":texts_info,
"node_layer_index":node_layer_index,
"text_layer_index":text_layer_index,
"waiting_mask":waiting_masks,
"node_num":node_num,
"text_num":text_num,
"position":positions
}
return res
def load_vocab(path):
vocab = []
with open(path,'r')as f:
for one_tag in f:
vocab.append(one_tag.strip())
return vocab
# model
class DocumentDatabase:
def __init__(self, reduce_memory=False):
if reduce_memory:
self.temp_dir = TemporaryDirectory(dir=TEMP_DIR)
self.working_dir = Path(self.temp_dir.name)
self.document_shelf_filepath = self.working_dir / 'shelf.db'
self.document_shelf = shelve.open(str(self.document_shelf_filepath),
flag='n', protocol=-1)
self.documents = None
else:
self.documents = []
self.document_shelf = None
self.document_shelf_filepath = None
self.temp_dir = None
self.doc_lengths = []
self.doc_cumsum = None
self.cumsum_max = None
self.reduce_memory = reduce_memory
def add_document(self, document):
if not document:
return
if self.reduce_memory:
current_idx = len(self.doc_lengths)
self.document_shelf[str(current_idx)] = document
else:
self.documents.append(document)
self.doc_lengths.append(len(document))
def __len__(self):
return len(self.doc_lengths)
def __getitem__(self, item):
if self.reduce_memory:
return self.document_shelf[str(item)]
else:
return self.documents[item]
def __contains__(self, item):
if str(item) in self.document_shelf:
return True
else:
return False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, traceback):
if self.document_shelf is not None:
self.document_shelf.close()
if self.temp_dir is not None:
self.temp_dir.cleanup()
def create_masked_lm_predictions(tokens, masked_lm_prob, max_predictions_per_seq, whole_word_mask, vocab_list):
"""Creates the predictions for the masked LM objective. This is mostly copied from the Google BERT repo, but
with several refactors to clean it up and remove a lot of unnecessary variables."""
cand_indices = []
# [MASK] word from DOC, not the query
for (i, token) in enumerate(tokens):
# Whole Word Masking means that if we mask all of the wordpieces
# corresponding to an original word. When a word has been split into
# WordPieces, the first token does not have any marker and any subsequence
# tokens are prefixed with ##. So whenever we see the ## token, we
# append it to the previous set of word indexes.
#
# Note that Whole Word Masking does *not* change the training code
# at all -- we still predict each WordPiece independently, softmaxed
# over the entire vocabulary.
if (whole_word_mask and len(cand_indices) >= 1 and token.startswith("##")):
cand_indices[-1].append(i)
else:
cand_indices.append([i])
num_to_mask = min(max_predictions_per_seq, max(1, int(round(len(cand_indices) * masked_lm_prob))))
shuffle(cand_indices)
mask_indices = sorted(sample(cand_indices, num_to_mask))
masked_lms = []
covered_indexes = set()
for index_set in cand_indices:
if len(masked_lms) >= num_to_mask:
break
# If adding a whole-word mask would exceed the maximum number of
# predictions, then just skip this candidate.
if len(masked_lms) + len(index_set) > num_to_mask:
continue
is_any_index_covered = False
for index in index_set:
if index in covered_indexes:
is_any_index_covered = True
break
if is_any_index_covered:
continue
for index in index_set:
covered_indexes.add(index)
masked_token = None
# 80% of the time, replace with [MASK]
if random() < 0.8:
masked_token = "[MASK]"
else:
# 10% of the time, keep original
if random() < 0.5:
masked_token = tokens[index]
# 10% of the time, replace with random word
else:
masked_token = choice(vocab_list)
masked_lms.append(MaskedLmInstance(index=index, label=tokens[index]))
tokens[index] = masked_token
assert len(masked_lms) <= num_to_mask
masked_lms = sorted(masked_lms, key=lambda x: x.index)
mask_indices = [p.index for p in masked_lms]
masked_token_labels = [p.label for p in masked_lms]
return tokens, mask_indices, masked_token_labels
def construct_pointwise_examples(examples,chunk_indexs,max_seq_len,max_tag_len,max_layer_num,mlm,bert_tokenizer,masked_lm_prob,
max_predictions_per_seq,bert_vocab_list,epoch_filename,word2df,mu,total_doc_cnt):
num_examples = len(examples)
num_instance = 0
wrong = 0
for doc_idx in tqdm(chunk_indexs):
example = examples[doc_idx]
layer_info = get_layer_info(example,max_layer_num=max_layer_num,node_max_length=max_tag_len,seq_max_length = max_seq_len)
if layer_info is None:
wrong+=1
continue
nodes_info = layer_info["nodes_info"]
texts_info = layer_info["texts_info"]
node_layer_index = layer_info["node_layer_index"]
text_layer_index = layer_info["text_layer_index"]
waiting_mask = layer_info["waiting_mask"]
node_num = layer_info["node_num"]
text_num = layer_info["text_num"]
position = layer_info["position"]
tag_len = len(node_layer_index)
res={}
instances = []
nodes_tokens = []
nodes_tokens_idx = []
texts_tokens = []
texts_tokens_idx = []
texts_type_idxs = []
text_labels = []
node_labels = []
for node_info in nodes_info:
tokens = node_info['text']
if mlm:
tokens,masked_lm_positions,masked_lm_labels = create_masked_lm_predictions(
tokens, masked_lm_prob, max_predictions_per_seq, True, bert_vocab_list)
else:
masked_lm_positions, masked_lm_labels = [], []
tokens_idx = bert_tokenizer.convert_tokens_to_ids(tokens)
tokens_idx_labels = bert_tokenizer.convert_tokens_to_ids(masked_lm_labels)
nodes_tokens.append(tokens+(max_tag_len-len(tokens))*['PAD'])
nodes_tokens_idx.append(tokens_idx+(max_tag_len-len(tokens))*[0])
node_label = np.array([-100] * max_tag_len)
node_label[masked_lm_positions] = tokens_idx_labels
node_labels.append(node_label.tolist())
for node_info in texts_info:
text = node_info['text']
type_idxs = node_info['type_idx']
tokens = ["[CLS]"] + text
tokens = tokens[:max_seq_len]
type_idxs = [0] + type_idxs
type_idxs = type_idxs[:max_seq_len]
if mlm:
tokens, masked_lm_positions, masked_lm_labels = create_masked_lm_predictions(
tokens, masked_lm_prob, max_predictions_per_seq, True, bert_vocab_list)
else:
masked_lm_positions, masked_lm_labels = [], []
# else:
# tokens = node_info['text']
# type_idxs = node_info['type_idx']
# masked_lm_positions, masked_lm_labels = [], []
tokens_idx = bert_tokenizer.convert_tokens_to_ids(tokens)
tokens_idx_labels = bert_tokenizer.convert_tokens_to_ids(masked_lm_labels)
texts_tokens.append(tokens+(max_seq_len-len(tokens))*['PAD'])
texts_tokens_idx.append(tokens_idx+(max_seq_len-len(tokens))*[0])
texts_type_idxs.append(type_idxs+(max_seq_len-len(tokens))*[0])
text_label = np.array([-100] * max_seq_len)
text_label[masked_lm_positions] = tokens_idx_labels
text_labels.append(text_label.tolist())
instance = {
"node_tokens_idx":sum(nodes_tokens_idx,[]),
"text_tokens_idx":sum(texts_tokens_idx,[]),
"inputs_type_idx":sum(texts_type_idxs,[]),
"node_labels":sum(node_labels,[]),
"text_labels":sum(text_labels,[]),
"node_layer_index":list(node_layer_index),
"text_layer_index":list(text_layer_index),
"waiting_mask":sum(waiting_mask,[]),
"position":sum(position,[]),
"node_num":list(node_num),
"text_num":list(text_num),
}
doc_instances=json.dumps(instance,ensure_ascii=False)
lock.acquire()
with open(epoch_filename,'a+') as epoch_file:
epoch_file.write(doc_instances + '\n')
num_instances.value += 1
lock.release()
print("wrong:",wrong)
def error_callback(e):
print('error')
print(dir(e), "\n")
traceback.print_exception(type(e), e, e.__traceback__)
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('--train_corpus', type=str, required=True)
parser.add_argument("--do_lower_case", action="store_true")
parser.add_argument("--bert_model", type=str, default='bert-base-uncased')
parser.add_argument("--reduce_memory", action="store_true",
help="Reduce memory usage for large datasets by keeping data on disc rather than in memory")
parser.add_argument("--epochs_to_generate", type=int, default=1,
help="Number of epochs of data to pregenerate")
# parser.add_argument("--output_dir", type=str, required=True)
parser.add_argument("--max_seq_len", type=int, default=256)
parser.add_argument("--max_tag_len", type=int, default=10)
parser.add_argument("--max_layer_num",type=int, default=5)
parser.add_argument("--mlm", action="store_true")
parser.add_argument("--masked_lm_prob", type=float, default=0.15,
help="Probability of masking each token for the LM task")
parser.add_argument("--max_predictions_per_seq", type=int, default=60,
help="Maximum number of tokens to mask in each sequence")
parser.add_argument("--rop_num_per_doc", type=int, default=1,
help="How many samples for each document")
parser.add_argument("--pairnum_per_doc", type=int, default=2,
help="How many samples for each document")
parser.add_argument("--num_workers", type=int, default=16,
help="The number of workers to use to write the files")
parser.add_argument("--mu", type=int, default=512,
help="The number of workers to use to write the files")
parser.add_argument('--output_dir', type=str, required=True)
args = parser.parse_args()
bert_tokenizer = BertTokenizer.from_pretrained(BertPath)
bert_model = BertModel.from_pretrained(BertPath)
# ADDITIONAL_SPECIAL_TOKENS = load_vocab(tag_vocab_path)
# bert_tokenizer.add_special_tokens({"additional_special_tokens": ADDITIONAL_SPECIAL_TOKENS})
#bert_tokenizer.add_special_tokens({"additional_special_tokens": ADDITIONAL_SPECIAL_TOKENS})
bert_model.resize_token_embeddings(len(bert_tokenizer))
bert_param_ids = list(map(id, bert_model.parameters()))
bert_vocab_list = list(bert_tokenizer.vocab.keys())
# word2df = {}
# with open(os.path.join(args.output_dir, f"word2df.json"), 'r') as wdf:
# word2df = json.loads(wdf.read().strip())
word2df = None
examples = []
with DocumentDatabase(reduce_memory=args.reduce_memory) as docs:
with open(args.train_corpus,encoding='UTF-8') as f:
idx = 0
for line in tqdm(f, desc="Loading Dataset", unit=" lines"):
idx+=1
example = json.loads(line.strip())
#example = json.loads(line)
# examples.append(example)
docs.add_document(example)
print('Reading file is done! Total doc num:{}'.format(len(docs)))
for epoch in range(args.epochs_to_generate):
epoch_filename = f"{args.output_dir}/epoch_{epoch}.json"
if os.path.exists(epoch_filename):
with open(epoch_filename, "w") as ef:
print(f"start generating {epoch_filename}")
num_processors = args.num_workers
processors = Pool(num_processors)
cand_idxs = list(range(0, len(docs)))
for i in range(num_processors):
chunk_size = int(len(cand_idxs) / num_processors)
chunk_indexs = cand_idxs[i*chunk_size:(i+1)*chunk_size]
# print("?")
r = processors.apply_async(construct_pointwise_examples, (docs, chunk_indexs, args.max_seq_len,args.max_tag_len,args.max_layer_num, args.mlm, bert_tokenizer, args.masked_lm_prob, \
args.max_predictions_per_seq, bert_vocab_list, epoch_filename, word2df, args.mu, len(docs)), error_callback=error_callback)
processors.close()
processors.join()
metrics_file = f"{args.output_dir}/epoch_{epoch}_metrics.json"
with open(metrics_file, 'w') as metrics_file:
metrics = {
"num_training_examples": num_instances.value,
"max_seq_len": args.max_seq_len
}
metrics_file.write(json.dumps(metrics)) |
<reponame>jneurock/gulp-viking-posts
/**
* Module dependencies.
*/
var toFunction = require('to-function');
var type;
try {
type = require('type-component');
} catch (e) {
type = require('type');
}
/**
* HOP reference.
*/
var has = Object.prototype.hasOwnProperty;
/**
* Iterate the given `obj` and invoke `fn(val, i)`.
*
* @param {String|Array|Object} obj
* @param {Function} fn
* @api public
*/
module.exports = function(obj, fn){
fn = toFunction(fn);
switch (type(obj)) {
case 'array':
return array(obj, fn);
case 'object':
if ('number' == typeof obj.length) return array(obj, fn);
return object(obj, fn);
case 'string':
return string(obj, fn);
}
};
/**
* Iterate string chars.
*
* @param {String} obj
* @param {Function} fn
* @api private
*/
function string(obj, fn) {
for (var i = 0; i < obj.length; ++i) {
fn(obj.charAt(i), i);
}
}
/**
* Iterate object keys.
*
* @param {Object} obj
* @param {Function} fn
* @api private
*/
function object(obj, fn) {
for (var key in obj) {
if (has.call(obj, key)) {
fn(key, obj[key]);
}
}
}
/**
* Iterate array-ish.
*
* @param {Array|Object} obj
* @param {Function} fn
* @api private
*/
function array(obj, fn) {
for (var i = 0; i < obj.length; ++i) {
fn(obj[i], i);
}
}
|
<filename>src/string_handle/Boj17214.java
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 17214번: 다항 함수의 적분
*
* @see https://www.acmicpc.net/problem/17214/
*
*/
public class Boj17214 {
private static final String X = "x";
private static final String PLUS = "+";
private static final String MINUS = "-";
private static final String INTEGRAL_CONSTANT = "W";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String polynomial = br.readLine();
System.out.println(getIntegral(polynomial));
}
private static StringBuilder getIntegral(String line) {
StringBuilder sb = new StringBuilder();
if(line.equals("0")) return sb.append(INTEGRAL_CONSTANT); // 0인 경우
if(!line.contains(X)) { // 상수 식
return sb.append(line.equals("1") || line.equals("-1") ? (line.equals("1") ? X: MINUS + X): line + X).append(PLUS).append(INTEGRAL_CONSTANT);
}
else {
boolean prev = true, post = true; // 참이면 계수 +, 거짓이면 계수 -
if(line.charAt(0) == MINUS.charAt(0)) prev = false;
String next = line.substring(1);
if(next.contains(MINUS)) post = false;
int idx = line.indexOf(X);
int head = Integer.parseInt(line.substring(0, idx)) / 2;
if(line.length() - 1 == idx) { // 1차식만 있는 경우
return sb.append(head == 1 || head == -1 ? (prev ? X + X: MINUS + X + X) : head + X + X).append(PLUS).append(INTEGRAL_CONSTANT);
}
else { // 1차식 + 상수
int tail = Integer.parseInt(line.substring(idx + 1));
return sb.append(head == 1 || head == -1 ? (prev ? X + X: MINUS + X + X) : head + X + X).append(post ? PLUS: "")
.append(tail == 1 || tail == -1 ? (post ? X: MINUS + X) : tail + X).append(PLUS).append(INTEGRAL_CONSTANT);
}
}
}
}
|
package com.tui.proof.ws.utils;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.CollectionType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class JsonToCollectionUtil {
private JsonToCollectionUtil(){}
public static <T> List<T> jsonArrayToList(String json, Class<T> elementClass) throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
CollectionType listType = objectMapper.getTypeFactory().constructCollectionType(ArrayList.class, elementClass);
return objectMapper.readValue(json, listType);
}
public static <T> List<T> jsonArrayToList2(String json, Class<T> elementClass) throws IOException {
return new ObjectMapper().readValue(json, new TypeReference<List<T>>() {});
}
}
|
#!/bin/bash
# Helper script to download and run the build-ffmpeg script.
make_dir () {
if [ ! -d $1 ]; then
if ! mkdir $1; then
printf "\n Failed to create dir %s" "$1";
exit 1
fi
fi
}
command_exists() {
if ! [[ -x $(command -v "$1") ]]; then
return 1
fi
return 0
}
TARGET='ffmpeg-build'
if ! command_exists "curl"; then
echo "curl not installed.";
exit 1
fi
echo "ffmpeg-build-script-downloader v0.1"
echo "========================================="
echo ""
echo "First we create the ffmpeg build directory $TARGET"
make_dir $TARGET
cd $TARGET
echo "Now we download and execute the build script"
echo ""
bash <(curl -s https://raw.githubusercontent.com/puligclone/ffmpeg-build-script/master/build-ffmpeg) --build --enable-gpl-and-non-free
|
import _map from 'lodash.map'
/**
* Generate fontWeight definition based on fontWeight config
*
* @param {object} Configuration object
* @return {object} fontWeight definition object
*
* @example
*
* FontWeight({fontWeight: {...}})
*/
function FontWeight( config ) {
const defs = {}
_map( config.fontWeight, (value, key) => {
defs[`font-${key}`] = { fontWeight : value}
} )
return defs;
}
/**
* Generate fontWeight definition based on fontWeight config
*
* @param {object} Configuration object
* @return {object} fontWeight definition object
*
* @example
*
* FontWeight({fontWeight: {...}})
*/
function FontFamily( config ) {
const defs = {}
_map( config.fontFamily, (level, type) => {
_map( level, ( value, weight ) => {
defs[`font-${type}-${weight}`] = { fontFamily: value }
} )
} )
return defs;
}
/**
* Generate fontSize definition based on fontSize config
*
* @param {object} Configuration object
* @return {object} fontSize definition object
*
* @example
*
* FontSize({fontSize: {...}})
*/
function FontSize( config ) {
const defs = {}
_map( config.fontSize, (value, key) => {
defs[`font-${key}`] = { fontSize : value * config.baseFontSize }
} )
return defs;
}
/**
* Generate letterSpacing definition based on letterSpacing config
*
* @param {object} Configuration object
* @return {object} letterSpacing definition object
*
* @example
*
* LetterSpacing({letterSpacing: {...}})
*/
function LetterSpacing( config ) {
const defs = {}
_map( config.letterSpacing, (value, key) => {
defs[`tracking-${key}`] = { letterSpacing : value * config.baseFontSize }
} )
return defs;
}
/**
* Generate lineHeight definition based on lineHeight config
*
* @param {object} Configuration object
* @return {object} lineHeight definition object
*
* @example
*
* LineHeight({lineHeight: {...}})
*/
function LineHeight( config ) {
const defs = {}
_map( config.lineHeight, (value, key) => {
defs[`leading-${key}`] = { lineHeight : value * config.baseFontSize }
} )
return defs;
}
/**
* Combining all typography definitions into one defs
*
* @param {object} Configuration object
* @return {object} typography definition object
*/
export default function( config ) {
const style = {
...FontFamily( config ),
...FontWeight( config ),
...FontSize( config ),
...LetterSpacing( config ),
...LineHeight( config ),
// Text Alignment
'text-left' : { textAlign: 'left' },
'text-center' : { textAlign: 'center' },
'text-right' : { textAlign: 'right' },
'text-justify' : { textAlign: 'justify' },
// Text Transform
'uppercase' : { textTransform: 'uppercase'},
'lowercase' : { textTransform: 'lowercase'},
'capitalize' : { textTransform: 'capitalize'},
'normal-case' : { textTransform: 'none'},
}
return style
} |
package com.jdc.app.service;
import static com.jdc.app.util.SqlHelper.*;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import com.jdc.app.entity.Author;
import com.jdc.app.entity.Book;
import com.jdc.app.entity.Category;
import com.jdc.app.util.ConnectionManager;
public class BookService {
private static BookService INSTANCE;
private BookService() {}
public static BookService getInstance() {
if(null == INSTANCE)
INSTANCE = new BookService();
return INSTANCE;
}
public void add(Book book) {
try(Connection conn = ConnectionManager.getConnection();
PreparedStatement stmt = conn.prepareStatement(getSql("book.insert"))) {
stmt.setString(1, book.getName());
stmt.setInt(2, book.getPrice());
stmt.setDate(3, Date.valueOf(book.getReleaseDate()));
stmt.setString(4, book.getRemark());
stmt.setInt(5, book.getCategory().getId());
stmt.setInt(6, book.getAuthor().getId());
stmt.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
}
}
public void update(Book book) {
try(Connection conn = ConnectionManager.getConnection();
PreparedStatement stmt = conn.prepareStatement(getSql("book.update"))) {
stmt.setString(1, book.getName());
stmt.setInt(2, book.getPrice());
stmt.setDate(3, Date.valueOf(book.getReleaseDate()));
stmt.setString(4, book.getRemark());
stmt.setInt(5, book.getCategory().getId());
stmt.setInt(6, book.getAuthor().getId());
stmt.setInt(7, book.getId());
stmt.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
}
}
public void delete(Book book) {
try(Connection conn = ConnectionManager.getConnection();
PreparedStatement stmt = conn.prepareStatement(getSql("book.delete"))) {
stmt.setInt(1, book.getId());
stmt.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
}
}
public List<Book> findAll() {
return findByParams(null, null, null, null);
}
public List<Book> findByParams(Category category, Author authorName, String bookName, LocalDate releaseDate) {
List<Book> list = new ArrayList<>();
StringBuilder sb = new StringBuilder(getSql("book.find"));
List<Object> params = new LinkedList<>();
if(null != category) {
sb.append(" and c.name like ?");
params.add(category.getName());
}
if(null != authorName) {
sb.append(" and a.name like ?");
params.add(authorName.getName());
}
if(null != bookName && !bookName.isEmpty()) {
sb.append(" and b.name like ?");
params.add("%".concat(bookName).concat("%"));
}
if(null != releaseDate && releaseDate.isBefore(LocalDate.now())) {
sb.append(" and release_date >= ?");
params.add(Date.valueOf(releaseDate));
}
try(Connection conn = ConnectionManager.getConnection();
PreparedStatement stmt = conn.prepareStatement(sb.toString())) {
for (int i = 0; i < params.size(); i++) {
stmt.setObject(i + 1, params.get(i));
}
ResultSet rs = stmt.executeQuery();
while(rs.next())
list.add(getObject(rs));
} catch (Exception e) {
e.printStackTrace();
}
return list;
}
public Book getObject(ResultSet rs) throws SQLException {
Book book = new Book();
book.setId(rs.getInt("id"));
book.setName(rs.getString("book_name"));
book.setPrice(rs.getInt("price"));
book.setReleaseDate(rs.getDate("release_date").toLocalDate());
book.setRemark(rs.getString("remark"));
Category cat = new Category();
cat.setName(rs.getString("category_name"));
Author auth = new Author();
auth.setName(rs.getString("author_name"));
auth.setAge(rs.getInt("age"));
auth.setCountry(rs.getString("country"));
book.setCategory(cat);
book.setAuthor(auth);
return book;
}
public void imageUpload(Book book) {
try(Connection conn = ConnectionManager.getConnection();
PreparedStatement stmt = conn.prepareStatement(getSql("book.img"))) {
stmt.setString(1, book.getImage());
stmt.setInt(2, book.getId());
stmt.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
}
}
public String findImage(Book book) {
try(Connection conn = ConnectionManager.getConnection();
PreparedStatement stmt = conn.prepareStatement(getSql("book.img.find"))) {
stmt.setInt(1, book.getId());
ResultSet rs = stmt.executeQuery();
while(rs.next())
return rs.getString("book_image");
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
} |
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Params } from '@angular/router';
import { GithubService } from '../github.service';
import { User } from '../user';
import 'rxjs/add/operator/switchMap';
@Component({
selector: 'app-profile',
templateUrl: './profile.component.html',
styleUrls: ['./profile.component.css'],
providers: [ GithubService ]
})
export class ProfileComponent implements OnInit {
private user: User;
constructor(
private githubService: GithubService,
private route: ActivatedRoute
) { }
ngOnInit(): void {
this.route.params
.switchMap((params: Params) => this.githubService.getUser(params['login']))
.subscribe(user => {console.log(user); this.user = user});
}
}
|
#!/bin/sh
# Copyright 2020 The arhat.dev Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
_download_go_pakage() {
GO111MODULE=off go get -u -v -d "$1"
}
_install_go_bin() {
package="$1"
cmd_dir="$2"
bin="$3"
# download
temp_dir="$(mktemp -d)"
cd "${temp_dir}"
GO111MODULE=on go get -d -u "${package}"
cd -
rmdir "${temp_dir}"
# build
cd "${GOPATH}/pkg/mod/${package}"
# TODO: currently the go.sum in github.com/deepmap/oapi-codegen is not synced
# once fixed in that repo, remove -mod=mod
GO111MODULE=on go build -mod=mod -o "${bin}" "${cmd_dir}"
cd -
}
install_tools_go() {
GOPATH=$(go env GOPATH)
export GOPATH
GOOS=$(go env GOHOSTOS)
GOARCH=$(go env GOHOSTARCH)
export GOOS
export GOARCH
cd "$(mktemp -d)"
_download_go_pakage github.com/gogo/protobuf/proto
_download_go_pakage github.com/gogo/protobuf/gogoproto
_install_go_bin "github.com/deepmap/oapi-codegen@v1.6.1" "./cmd/oapi-codegen" "${GOPATH}/bin/oapi-codegen"
cd -
}
install_tools_go
|
#!/bin/bash
git clone --recursive https://github.com/pfalcon/esp-open-sdk.git
cd esp-open-sdk
git checkout 03f5e898a059451ec5f3de30e7feff30455f7cec
cp ../python2_make.py .
python2 python2_make.py 'LD_LIBRARY_PATH="" make STANDALONE=y'
|
#!/usr/bin/env python
#
# Public Domain 2014-2017 MongoDB, Inc.
# Public Domain 2008-2014 WiredTiger, Inc.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# test_timestamp02.py
# Timestamps: basic semantics
#
import random
from suite_subprocess import suite_subprocess
import wiredtiger, wttest
from wtscenario import make_scenarios
def timestamp_str(t):
return '%x' % t
def timestamp_ret_str(t):
s = timestamp_str(t)
if len(s) % 2 == 1:
s = '0' + s
return s
class test_timestamp02(wttest.WiredTigerTestCase, suite_subprocess):
tablename = 'test_timestamp02'
uri = 'table:' + tablename
scenarios = make_scenarios([
('col', dict(extra_config=',key_format=r')),
('lsm', dict(extra_config=',type=lsm')),
('row', dict(extra_config='')),
])
conn_config = 'log=(enabled)'
# Check that a cursor (optionally started in a new transaction), sees the
# expected values.
def check(self, session, txn_config, expected):
if txn_config:
session.begin_transaction(txn_config)
c = session.open_cursor(self.uri, None)
actual = dict((k, v) for k, v in c if v != 0)
self.assertEqual(actual, expected)
# Search for the expected items as well as iterating
for k, v in expected.iteritems():
self.assertEqual(c[k], v, "for key " + str(k))
c.close()
if txn_config:
session.commit_transaction()
def test_basic(self):
if not wiredtiger.timestamp_build():
self.skipTest('requires a timestamp build')
self.session.create(self.uri,
'key_format=i,value_format=i' + self.extra_config)
c = self.session.open_cursor(self.uri)
# Insert keys 1..100 each with timestamp=key, in some order
orig_keys = range(1, 101)
keys = orig_keys[:]
random.shuffle(keys)
for k in keys:
self.session.begin_transaction()
c[k] = 1
self.session.commit_transaction('commit_timestamp=' + timestamp_str(k))
# Now check that we see the expected state when reading at each
# timestamp
for i, t in enumerate(orig_keys):
self.check(self.session, 'read_timestamp=' + timestamp_str(t),
dict((k, 1) for k in orig_keys[:i+1]))
# Bump the oldest timestamp, we're not going back...
self.assertEqual(self.conn.query_timestamp(), timestamp_ret_str(100))
self.conn.set_timestamp('oldest_timestamp=' + timestamp_str(100))
# Update them and retry.
random.shuffle(keys)
for k in keys:
self.session.begin_transaction()
c[k] = 2
self.session.commit_transaction('commit_timestamp=' + timestamp_str(k + 100))
for i, t in enumerate(orig_keys):
self.check(self.session, 'read_timestamp=' + timestamp_str(t + 100),
dict((k, (2 if j <= i else 1)) for j, k in enumerate(orig_keys)))
# Bump the oldest timestamp, we're not going back...
self.assertEqual(self.conn.query_timestamp(), timestamp_ret_str(200))
self.conn.set_timestamp('oldest_timestamp=' + timestamp_str(200))
# Remove them and retry
random.shuffle(keys)
for k in keys:
self.session.begin_transaction()
del c[k]
self.session.commit_transaction('commit_timestamp=' + timestamp_str(k + 200))
for i, t in enumerate(orig_keys):
self.check(self.session, 'read_timestamp=' + timestamp_str(t + 200),
dict((k, 2) for k in orig_keys[i+1:]))
if __name__ == '__main__':
wttest.run()
|
// Define a custom type that implements the BMByteSearchable trait
struct MyType<'a> {
data: &'a [u8],
}
// Implement the BMByteSearchable trait for the custom type
impl<'a> BMByteSearchable for MyType<'a> {
fn len(&self) -> usize {
self.data.len()
}
fn byte_at(&self, index: usize) -> u8 {
self.data[index]
}
}
// Example usage
fn main() {
let text = "abracadabra";
let pattern = "abra";
let my_type = MyType { data: text.as_bytes() };
let pattern_type = MyType { data: pattern.as_bytes() };
let occurrences = bmb_search(my_type, pattern_type);
println!("Pattern occurrences: {:?}", occurrences);
} |
<gh_stars>0
console.log('js0'); |
import SwiftUI
struct TodoItem: View {
@ObservedObject var main: Main
@Binding var todoIndex: Int
var body: some View {
VStack {
Text(main.todos[todoIndex].title) // Display the title of the to-do item
Text(main.todos[todoIndex].description) // Display the description of the to-do item
// Add any other relevant components to display additional details of the to-do item
}
}
}
struct TodoItem_Previews: PreviewProvider {
static var previews: some View {
TodoItem(main: Main(), todoIndex: .constant(0))
}
} |
<gh_stars>0
#pragma once
#include <vector>
#include <typed-geometry/tg.hh>
#include <glow/fwd.hh>
#include "Settings.hh"
#include "fwd.hh"
namespace glow
{
namespace pipeline
{
enum class ShadowMode
{
UpdateAlways, // Default: Redraw shadows every frame
UpdateOnce, // Draw shadows once, then switch to DontUpdate
DontUpdate // Do nothing
};
GLOW_SHARED(class, RenderScene);
class RenderScene
{
public:
RenderScene() = default;
// == Background ==
tg::color3 backgroundColor = tg::color3::black;
// == Atmosphere and sun ==
struct
{
tg::vec3 direction = normalize(tg::vec3(0.33f, 0.94f, -0.09f));
tg::color3 color = {1.f, 0.883f, 0.8235f};
float intensity = 1;
} sun;
struct
{
float intensity = .25f; ///< Multiplier for the final atmo scatter
float density = .001f; ///< Fog density
tg::color3 fogColor = {.5f, .6f, .7f}; ///< Fog color
float heightFalloffStart = 0.f; ///< Start of height falloff in world units (0 = World horizon)
float heightFalloffEnd = 1500.f; ///< End of the height falloff in world units
float heightFalloffExponent = 2.73f; ///< Height falloff function
} atmoScatter;
// == AO ==
struct
{
float radius = 1.5f;
float bias = 0.2f;
float powerExponent = 2.f;
float metersToViewSpaceUnits = 1.f;
float sharpness = 4;
float smallScaleIntensity = 1.f;
float largeScaleIntensity = 1.f;
} ao;
// == Bloom ==
float bloomThreshold = 1.5f; ///< The color luminance beyond which colors enter the bloom buffer
float bloomIntensity = 2.5f; ///< Multiplier for the luminance-normalized color of bloom
// == Postprocessing ==
float exposure = 1.f;
float gamma = 2.24f;
float contrast = 1.2f;
float brightness = 1.f;
float sharpenStrength = .2f;
bool tonemappingEnabled = true;
// == Color grading ==
// A 16x16x16 color lookup table
// If nullptr, the identity color LUT is used
SharedTexture3D colorLut = nullptr;
// == Shadows ==
struct
{
SharedTexture2DArray cascades = nullptr;
ShadowMode mode = ShadowMode::UpdateAlways;
float cascadeSplitLambda = 0.95f;
} shadow;
// == Edge outline effect ==
struct
{
bool enabled = false;
float depthThreshold = 3.333f;
float normalThreshold = 0.65f;
tg::color3 color = {0, 0, 10 / 255.f};
} edgeOutline;
#ifdef GLOW_EXTRAS_HAS_IMGUI
public:
/// Draw an ImGui configuration window to tweak scene parameters
///
/// Optionally leave the window open (No call to ImGui::End)
/// to allow for custom extensions.
void imguiConfigWindow(bool leaveWindowOpen = false);
#endif
GLOW_SHARED_CREATOR(RenderScene);
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.