text stringlengths 1 1.05M |
|---|
'use strict';
const FunctionBuilderBase = require('../function-builder-base');
const WebGLFunctionNode = require('./function-node');
/**
* @class WebGLFunctionBuilder
*
* @extends FunctionBuilderBase
*
* @desc Builds webGl functions (shaders) from JavaScript function Strings
*
*/
module.exports = class WebGL2FunctionBuilder extends FunctionBuilderBase {
constructor() {
super();
this.Node = WebGLFunctionNode;
}
}; |
import unittest
from pyjo import Model, Field
from pyjo.exceptions import RequiredFieldError, NotEditableField, FieldTypeError, ValidationError
class TestModel(unittest.TestCase):
def test_required_field(self):
class A(Model):
foo = Field(required=True)
with self.assertRaises(RequiredFieldError):
a = A()
def test_required_with_empty_default(self):
class A(Model):
foo = Field(type=str, required=True, default=None)
with self.assertRaises(RequiredFieldError):
a = A()
def test_required_with_nonempty_default(self):
class A(Model):
foo = Field(type=str, required=True, default='hello')
a = A()
self.assertEquals(a.foo, 'hello')
def test_required_empty_assignment(self):
class A(Model):
foo = Field(type=str, required=True)
a = A(foo='bar')
self.assertEquals(a.foo, 'bar')
with self.assertRaises(RequiredFieldError):
a.foo = None
def test_default_value(self):
class A(Model):
foo = Field(default='foo')
a = A()
self.assertEqual(a.foo, 'foo')
def test_default_value_invalid_type(self):
class A(Model):
foo = Field(default='foo', type=int)
with self.assertRaises(FieldTypeError):
a = A()
def test_invalid_type_on_init(self):
class A(Model):
foo = Field(type=str)
with self.assertRaises(FieldTypeError):
a = A(foo=1)
def test_invalid_type_on_set(self):
class A(Model):
foo = Field(type=str)
a = A(foo='foo')
self.assertEqual(a.foo, 'foo')
with self.assertRaises(FieldTypeError):
a.foo = 123
def test_validator(self):
class A(Model):
foo = Field(type=str, validator=lambda x: x.startswith('#'))
with self.assertRaises(FieldTypeError):
a = A(foo=1)
with self.assertRaises(ValidationError):
a = A(foo='hello')
a = A(foo='#hello')
self.assertEqual(a.foo, '#hello')
def test_json_serialization(self):
class A(Model):
foo = Field(type=str)
bar = Field(type=int)
a = A(foo='hello', bar=123)
json = a.to_dict()
self.assertEqual(json, {'foo': 'hello', 'bar': 123})
def test_json_deserialization(self):
class A(Model):
foo = Field(type=str)
bar = Field()
a = A.from_dict({'foo': 'hello', 'bar': 123})
self.assertEqual(a.foo, 'hello')
self.assertEqual(a.bar, 123)
def test_json_deser_without_required_fields(self):
class A(Model):
foo = Field(type=str, required=True)
bar = Field(required=True)
with self.assertRaises(RequiredFieldError):
a = A.from_dict({'bar': 123})
def test_json_with_submodel(self):
class A(Model):
foo = Field(type=str, required=True)
bar = Field(type=int, default=0)
class B(Model):
submodel = Field(type=A)
a = A(foo='foo', bar=123)
b = B(submodel=a)
# serialization / deserialization of the submodel
json = b.to_dict()
self.assertEqual(json, {'submodel': {'bar': 123, 'foo': 'foo'}})
c = B.from_dict(json)
self.assertEqual(c.submodel.foo, 'foo')
self.assertEqual(c.submodel.bar, 123)
# missing required fields of the submodel
with self.assertRaises(RequiredFieldError):
B.from_dict({'submodel': {'bar': 123}})
# default values of the submodel's fields
d = B.from_dict({'submodel': {'foo': 'foo'}})
self.assertEqual(d.submodel.bar, 0)
def test_multiple_nested_models(self):
class A(Model):
fA = Field(type=str)
class B(Model):
fB = Field(type=A)
class C(Model):
fC = Field(type=B)
c = C(fC=B(fB=A(fA='yo')))
pj = c.to_dict()
self.assertEqual(pj, {'fC': {'fB': {'fA': 'yo'}}})
c = C.from_dict(pj)
self.assertEqual(c.fC.fB.fA, 'yo')
with self.assertRaises(FieldTypeError):
c = C(fC=B(fB=A(fA=1)))
def test_discard_non_fields(self):
class A(Model):
foo = Field(type=str)
a = A.from_dict({'foo': 'hello', 'foo2': 'hello2'}, discard_non_fields=False)
self.assertEqual(a.foo, 'hello')
self.assertEqual(a.foo2, 'hello2')
a = A.from_dict({'foo': 'hello', 'foo2': 'hello2'}, discard_non_fields=True)
self.assertEqual(a.foo, 'hello')
self.assertEqual(hasattr(a, 'foo2'), False)
def test_model_repr(self):
class A(Model):
foo = Field(type=str)
self.assertEquals(str(A(foo='bar')), '<A()>')
class A(Model):
foo = Field(type=str, repr=True)
self.assertEquals(str(A(foo='bar')), '<A(foo=bar)>')
def test_function_default(self):
data = {'x': 0}
def incr_x():
data['x'] += 1
return data['x']
class A(Model):
foo = Field(type=int, default=incr_x)
self.assertEqual(A().foo, 1)
self.assertEqual(A().foo, 2)
def test_no_serialization_if_no_value(self):
class A(Model):
foo = Field(type=int)
bar = Field(type=int, default=1)
flag = Field(type=bool, default=False)
a = A()
self.assertEqual(a.to_dict(), {'bar': 1, 'flag': False})
a.foo = 10
self.assertEqual(a.to_dict(), {'foo': 10, 'bar': 1, 'flag': False})
del a.foo
del a.bar
del a.flag
self.assertEqual(a.to_dict(), {})
def test_del_property(self):
class A(Model):
foo = Field(type=int)
a = A()
a.foo = 5
self.assertEqual(a.foo, 5)
self.assertEqual(a.to_dict()['foo'], 5)
del a.foo
self.assertEqual(a.foo, None)
with self.assertRaises(KeyError):
a.to_dict()['foo']
def test_cast(self):
class A(Model):
foo = Field(type=int, cast=int)
a = A()
a.foo = '5'
self.assertEqual(a.foo, 5)
self.assertEqual(a.to_dict()['foo'], 5)
a.foo = 5
self.assertEqual(a.foo, 5)
self.assertEqual(a.to_dict()['foo'], 5)
a.foo = None
self.assertEqual(a.foo, None)
def test_update_from_dict(self):
class A(Model):
a = Field(type=str)
b = Field(type=int, required=True)
o = A(a='test', b=12)
with self.assertRaises(RequiredFieldError):
o.update_from_dict({
'a': 'test2',
'b': None,
})
with self.assertRaises(FieldTypeError):
o.update_from_dict({
'a': 12,
})
o.update_from_dict({
'a': 'foo',
})
self.assertEquals(o.a, 'foo')
self.assertEquals(o.b, 12)
o.update_from_dict({
'b': 1,
})
self.assertEquals(o.b, 1)
def test_after_init_hook(self):
class A(Model):
foo = Field(type=str)
bar = Field(type=int)
def after_init(self):
self.validate()
def validate(self):
if self.foo is None and self.bar is None:
raise ValidationError('one between foo and bar must be set')
a = A(foo='hello', bar=1)
a = A(foo='hello')
a = A(bar=1)
with self.assertRaises(ValidationError):
a = A()
if __name__ == '__main__':
unittest.main()
|
<reponame>Ziezi/Programming-Principles-and-Practice-Using-C-by-Bjarne-Stroustrup-
/*
TITLE Regular Polygon Chapter13Exercise18.cpp
"<NAME> "Programming Principles and Practices Using C++""
COMMENT
Objective: Create a class RegularPolygon that
instantiates an objects using as
paremeters: the points of the
corners of the polygon.
Input: -
Output: The algorithm to find the center is valid.
The non-member function implementation needs revision.
Author: 20.09.2015
Date: <NAME>
*/
#include "GUI.h"
#include "Simple_window.h"
#include <iostream>
#include "Chapter13Exercise18.h"
int main()
{
// window parameters
int winWidth = 800;
int winHeight = 600;
Point centerPoint((x_max() - winWidth) / 2, (y_max() - winHeight) / 2);
Simple_window* siw = new Simple_window(centerPoint, winWidth, winHeight, "Chapter 13 Exercise 10");
try
{
Point center(siw->x_max()/2., siw->y_max()/2.);
int sidesNumber = 6;
int radius = 200;
// generate coordinates using the original member function used for that purpose
std::vector<Point> polygonCoord;
Graph_lib::generateHex(polygonCoord, center, sidesNumber, radius);
// instantiate a polygon using valid points
RegularPolygon p(polygonCoord);
siw->attach(p);
// check the reference points generated py the validity function
// it calculates the center from each pair of vertex coordinates
// and compares them to see if the match(within error range)
std::vector<Marks* >m;
for (size_t i = 1; i < polygonCoord.size(); ++i)
{
Point c = Graph_lib::polygonCenter(polygonCoord[i - 1], polygonCoord[i], sidesNumber);
// depict the calculated centers
m.push_back(new Mark(c,'0'+ i));
siw->attach(*m[m.size() - 1]);
}
siw->wait_for_button();
// free heap memory
for(auto it = m.begin(); it != m.end(); ++it) delete *it;
}
catch(std::exception& e)
{
std::cerr << e.what() << std::endl;
}
catch(...)
{
std::cerr <<"Defaul exception!"<< std::endl;
}
} |
#!/bin/sh
#
# Copyright (C) 2008, 2012, 2013 Internet Systems Consortium, Inc. ("ISC")
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id: clean.sh,v 1.3 2012/02/03 23:46:58 tbox Exp $
#
# Clean up after lwresd tests.
#
rm -f */named.memstats
rm -f dig.out
rm -f lwresd1/lwresd.run.resolv
|
#!/bin/bash
mkdir build_release
cd build_release
OPENCL_ROOT_FLAG="-DOPENCL_ROOT=${PREFIX}"
if [[ "`uname`" == "Darwin" ]] && [[ "${OSX_VARIANT}" == "native" ]]
then
OPENCL_ROOT_FLAG="";
fi
cmake \
-G "Unix Makefiles" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_PREFIX_PATH="${PREFIX}" \
-DCMAKE_INSTALL_PREFIX="${PREFIX}" \
-DSUFFIX_BIN="" \
-DSUFFIX_LIB="" \
-DBUILD_TEST=0 \
-DBUILD_KTEST=0 \
-DBUILD_CLIENT=0 \
-DBUILD_CALLBACK_CLIENT=0 \
-DBUILD_EXAMPLES=0 \
"${OPENCL_ROOT_FLAG}" \
"${SRC_DIR}/src"
make
make install
|
<filename>Shared/Carthage/ImagePicker.framework/Headers/ImagePicker-Swift.h
// Generated by Apple Swift version 4.2.1 effective-4.1.50 (swiftlang-1000.11.42 clang-1000.11.45.1)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wgcc-compat"
#if !defined(__has_include)
# define __has_include(x) 0
#endif
#if !defined(__has_attribute)
# define __has_attribute(x) 0
#endif
#if !defined(__has_feature)
# define __has_feature(x) 0
#endif
#if !defined(__has_warning)
# define __has_warning(x) 0
#endif
#if __has_include(<swift/objc-prologue.h>)
# include <swift/objc-prologue.h>
#endif
#pragma clang diagnostic ignored "-Wauto-import"
#include <objc/NSObject.h>
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#if !defined(SWIFT_TYPEDEFS)
# define SWIFT_TYPEDEFS 1
# if __has_include(<uchar.h>)
# include <uchar.h>
# elif !defined(__cplusplus)
typedef uint_least16_t char16_t;
typedef uint_least32_t char32_t;
# endif
typedef float swift_float2 __attribute__((__ext_vector_type__(2)));
typedef float swift_float3 __attribute__((__ext_vector_type__(3)));
typedef float swift_float4 __attribute__((__ext_vector_type__(4)));
typedef double swift_double2 __attribute__((__ext_vector_type__(2)));
typedef double swift_double3 __attribute__((__ext_vector_type__(3)));
typedef double swift_double4 __attribute__((__ext_vector_type__(4)));
typedef int swift_int2 __attribute__((__ext_vector_type__(2)));
typedef int swift_int3 __attribute__((__ext_vector_type__(3)));
typedef int swift_int4 __attribute__((__ext_vector_type__(4)));
typedef unsigned int swift_uint2 __attribute__((__ext_vector_type__(2)));
typedef unsigned int swift_uint3 __attribute__((__ext_vector_type__(3)));
typedef unsigned int swift_uint4 __attribute__((__ext_vector_type__(4)));
#endif
#if !defined(SWIFT_PASTE)
# define SWIFT_PASTE_HELPER(x, y) x##y
# define SWIFT_PASTE(x, y) SWIFT_PASTE_HELPER(x, y)
#endif
#if !defined(SWIFT_METATYPE)
# define SWIFT_METATYPE(X) Class
#endif
#if !defined(SWIFT_CLASS_PROPERTY)
# if __has_feature(objc_class_property)
# define SWIFT_CLASS_PROPERTY(...) __VA_ARGS__
# else
# define SWIFT_CLASS_PROPERTY(...)
# endif
#endif
#if __has_attribute(objc_runtime_name)
# define SWIFT_RUNTIME_NAME(X) __attribute__((objc_runtime_name(X)))
#else
# define SWIFT_RUNTIME_NAME(X)
#endif
#if __has_attribute(swift_name)
# define SWIFT_COMPILE_NAME(X) __attribute__((swift_name(X)))
#else
# define SWIFT_COMPILE_NAME(X)
#endif
#if __has_attribute(objc_method_family)
# define SWIFT_METHOD_FAMILY(X) __attribute__((objc_method_family(X)))
#else
# define SWIFT_METHOD_FAMILY(X)
#endif
#if __has_attribute(noescape)
# define SWIFT_NOESCAPE __attribute__((noescape))
#else
# define SWIFT_NOESCAPE
#endif
#if __has_attribute(warn_unused_result)
# define SWIFT_WARN_UNUSED_RESULT __attribute__((warn_unused_result))
#else
# define SWIFT_WARN_UNUSED_RESULT
#endif
#if __has_attribute(noreturn)
# define SWIFT_NORETURN __attribute__((noreturn))
#else
# define SWIFT_NORETURN
#endif
#if !defined(SWIFT_CLASS_EXTRA)
# define SWIFT_CLASS_EXTRA
#endif
#if !defined(SWIFT_PROTOCOL_EXTRA)
# define SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_ENUM_EXTRA)
# define SWIFT_ENUM_EXTRA
#endif
#if !defined(SWIFT_CLASS)
# if __has_attribute(objc_subclassing_restricted)
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# else
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# endif
#endif
#if !defined(SWIFT_PROTOCOL)
# define SWIFT_PROTOCOL(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
# define SWIFT_PROTOCOL_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_EXTENSION)
# define SWIFT_EXTENSION(M) SWIFT_PASTE(M##_Swift_, __LINE__)
#endif
#if !defined(OBJC_DESIGNATED_INITIALIZER)
# if __has_attribute(objc_designated_initializer)
# define OBJC_DESIGNATED_INITIALIZER __attribute__((objc_designated_initializer))
# else
# define OBJC_DESIGNATED_INITIALIZER
# endif
#endif
#if !defined(SWIFT_ENUM_ATTR)
# if defined(__has_attribute) && __has_attribute(enum_extensibility)
# define SWIFT_ENUM_ATTR(_extensibility) __attribute__((enum_extensibility(_extensibility)))
# else
# define SWIFT_ENUM_ATTR(_extensibility)
# endif
#endif
#if !defined(SWIFT_ENUM)
# define SWIFT_ENUM(_type, _name, _extensibility) enum _name : _type _name; enum SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# if __has_feature(generalized_swift_name)
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) enum _name : _type _name SWIFT_COMPILE_NAME(SWIFT_NAME); enum SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# else
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) SWIFT_ENUM(_type, _name, _extensibility)
# endif
#endif
#if !defined(SWIFT_UNAVAILABLE)
# define SWIFT_UNAVAILABLE __attribute__((unavailable))
#endif
#if !defined(SWIFT_UNAVAILABLE_MSG)
# define SWIFT_UNAVAILABLE_MSG(msg) __attribute__((unavailable(msg)))
#endif
#if !defined(SWIFT_AVAILABILITY)
# define SWIFT_AVAILABILITY(plat, ...) __attribute__((availability(plat, __VA_ARGS__)))
#endif
#if !defined(SWIFT_DEPRECATED)
# define SWIFT_DEPRECATED __attribute__((deprecated))
#endif
#if !defined(SWIFT_DEPRECATED_MSG)
# define SWIFT_DEPRECATED_MSG(...) __attribute__((deprecated(__VA_ARGS__)))
#endif
#if __has_feature(attribute_diagnose_if_objc)
# define SWIFT_DEPRECATED_OBJC(Msg) __attribute__((diagnose_if(1, Msg, "warning")))
#else
# define SWIFT_DEPRECATED_OBJC(Msg) SWIFT_DEPRECATED_MSG(Msg)
#endif
#if __has_feature(modules)
@import CoreGraphics;
@import Foundation;
@import ObjectiveC;
@import UIKit;
#endif
#pragma clang diagnostic ignored "-Wproperty-attribute-mismatch"
#pragma clang diagnostic ignored "-Wduplicate-method-arg"
#if __has_warning("-Wpragma-clang-attribute")
# pragma clang diagnostic ignored "-Wpragma-clang-attribute"
#endif
#pragma clang diagnostic ignored "-Wunknown-pragmas"
#pragma clang diagnostic ignored "-Wnullability"
#if __has_attribute(external_source_symbol)
# pragma push_macro("any")
# undef any
# pragma clang attribute push(__attribute__((external_source_symbol(language="Swift", defined_in="ImagePicker",generated_declaration))), apply_to=any(function,enum,objc_interface,objc_category,objc_protocol))
# pragma pop_macro("any")
#endif
@class NSCoder;
SWIFT_CLASS("_TtC11ImagePicker19BottomContainerView")
@interface BottomContainerView : UIView
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)aDecoder OBJC_DESIGNATED_INITIALIZER;
- (nonnull instancetype)initWithFrame:(CGRect)frame SWIFT_UNAVAILABLE;
@end
@class UIColor;
@class UIFont;
SWIFT_CLASS("_TtC11ImagePicker13Configuration")
@interface Configuration : NSObject
@property (nonatomic, strong) UIColor * _Nonnull backgroundColor;
@property (nonatomic, strong) UIColor * _Nonnull gallerySeparatorColor;
@property (nonatomic, strong) UIColor * _Nonnull mainColor;
@property (nonatomic, strong) UIColor * _Nonnull noImagesColor;
@property (nonatomic, strong) UIColor * _Nonnull noCameraColor;
@property (nonatomic, strong) UIColor * _Nonnull settingsColor;
@property (nonatomic, strong) UIColor * _Nonnull bottomContainerColor;
@property (nonatomic, strong) UIFont * _Nonnull numberLabelFont;
@property (nonatomic, strong) UIFont * _Nonnull doneButton;
@property (nonatomic, strong) UIFont * _Nonnull flashButton;
@property (nonatomic, strong) UIFont * _Nonnull noImagesFont;
@property (nonatomic, strong) UIFont * _Nonnull noCameraFont;
@property (nonatomic, strong) UIFont * _Nonnull settingsFont;
@property (nonatomic, copy) NSString * _Nonnull OKButtonTitle;
@property (nonatomic, copy) NSString * _Nonnull cancelButtonTitle;
@property (nonatomic, copy) NSString * _Nonnull doneButtonTitle;
@property (nonatomic, copy) NSString * _Nonnull noImagesTitle;
@property (nonatomic, copy) NSString * _Nonnull noCameraTitle;
@property (nonatomic, copy) NSString * _Nonnull settingsTitle;
@property (nonatomic, copy) NSString * _Nonnull requestPermissionTitle;
@property (nonatomic, copy) NSString * _Nonnull requestPermissionMessage;
@property (nonatomic) CGFloat cellSpacing;
@property (nonatomic) CGFloat indicatorWidth;
@property (nonatomic) CGFloat indicatorHeight;
@property (nonatomic) BOOL canRotateCamera;
@property (nonatomic) BOOL collapseCollectionViewWhileShot;
@property (nonatomic) BOOL recordLocation;
@property (nonatomic) BOOL allowMultiplePhotoSelection;
@property (nonatomic) BOOL allowVideoSelection;
@property (nonatomic) BOOL showsImageCountLabel;
@property (nonatomic) BOOL flashButtonAlwaysHidden;
@property (nonatomic) BOOL managesAudioSession;
@property (nonatomic) BOOL allowPinchToZoom;
@property (nonatomic) UIInterfaceOrientationMask allowedOrientations;
@property (nonatomic) BOOL allowVolumeButtonsToTakePicture;
@property (nonatomic) BOOL useLowResolutionPreviewImage;
@property (nonatomic, strong) UIView * _Nonnull indicatorView;
- (nonnull instancetype)init OBJC_DESIGNATED_INITIALIZER;
@end
@interface Configuration (SWIFT_EXTENSION(ImagePicker))
@property (nonatomic, readonly) CGAffineTransform rotationTransform;
@end
SWIFT_CLASS("_TtC11ImagePicker16ImageGalleryView")
@interface ImageGalleryView : UIView
- (nonnull instancetype)initWithFrame:(CGRect)frame SWIFT_UNAVAILABLE;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)aDecoder OBJC_DESIGNATED_INITIALIZER;
- (void)layoutSubviews;
@end
@class UICollectionView;
@class UICollectionViewLayout;
@interface ImageGalleryView (SWIFT_EXTENSION(ImagePicker)) <UICollectionViewDelegateFlowLayout>
- (CGSize)collectionView:(UICollectionView * _Nonnull)collectionView layout:(UICollectionViewLayout * _Nonnull)collectionViewLayout sizeForItemAtIndexPath:(NSIndexPath * _Nonnull)indexPath SWIFT_WARN_UNUSED_RESULT;
@end
@interface ImageGalleryView (SWIFT_EXTENSION(ImagePicker)) <UICollectionViewDelegate>
- (void)collectionView:(UICollectionView * _Nonnull)collectionView didSelectItemAtIndexPath:(NSIndexPath * _Nonnull)indexPath;
@end
@class UICollectionViewCell;
@interface ImageGalleryView (SWIFT_EXTENSION(ImagePicker)) <UICollectionViewDataSource>
- (NSInteger)collectionView:(UICollectionView * _Nonnull)collectionView numberOfItemsInSection:(NSInteger)section SWIFT_WARN_UNUSED_RESULT;
- (UICollectionViewCell * _Nonnull)collectionView:(UICollectionView * _Nonnull)collectionView cellForItemAtIndexPath:(NSIndexPath * _Nonnull)indexPath SWIFT_WARN_UNUSED_RESULT;
@end
@protocol ImagePickerDelegate;
@class NSBundle;
SWIFT_CLASS("_TtC11ImagePicker21ImagePickerController")
@interface ImagePickerController : UIViewController
@property (nonatomic, weak) id <ImagePickerDelegate> _Nullable delegate;
- (nonnull instancetype)initWithConfiguration:(Configuration * _Nonnull)configuration OBJC_DESIGNATED_INITIALIZER;
- (nonnull instancetype)initWithNibName:(NSString * _Nullable)nibNameOrNil bundle:(NSBundle * _Nullable)nibBundleOrNil OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)aDecoder OBJC_DESIGNATED_INITIALIZER;
- (void)viewDidLoad;
- (void)viewWillAppear:(BOOL)animated;
- (void)viewDidAppear:(BOOL)animated;
@property (nonatomic, readonly) BOOL prefersStatusBarHidden;
@end
@interface ImagePickerController (SWIFT_EXTENSION(ImagePicker))
@property (nonatomic, readonly) UIInterfaceOrientationMask supportedInterfaceOrientations;
- (void)handleRotation:(NSNotification * _Nullable)note;
@end
@class UIImage;
SWIFT_PROTOCOL("_TtP11ImagePicker19ImagePickerDelegate_")
@protocol ImagePickerDelegate <NSObject>
- (void)wrapperDidPress:(ImagePickerController * _Nonnull)imagePicker images:(NSArray<UIImage *> * _Nonnull)images;
- (void)doneButtonDidPress:(ImagePickerController * _Nonnull)imagePicker images:(NSArray<UIImage *> * _Nonnull)images;
- (void)cancelButtonDidPress:(ImagePickerController * _Nonnull)imagePicker;
@end
SWIFT_CLASS("_TtC11ImagePicker7TopView")
@interface TopView : UIView
- (nonnull instancetype)initWithFrame:(CGRect)frame OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)aDecoder OBJC_DESIGNATED_INITIALIZER;
@end
#if __has_attribute(external_source_symbol)
# pragma clang attribute pop
#endif
#pragma clang diagnostic pop
|
<reponame>nedphae/contact-center-client<filename>app/components/Chat/DetailCard/panel/CustomerInfo.tsx
import React from 'react';
import { useSelector } from 'react-redux';
import { useQuery } from '@apollo/client';
import CssBaseline from '@material-ui/core/CssBaseline';
import Container from '@material-ui/core/Container';
import { getSelectedConstomer } from 'app/state/chat/chatAction';
import {
CustomerGraphql,
QUERY_OFFLINE_CUSTOMER,
} from 'app/domain/graphql/Customer';
import { Customer } from 'app/domain/Customer';
import CustomerForm from './CustomerForm';
function customerFormWithContainer(user: Customer | undefined) {
if (user) {
return (
<Container component="main" maxWidth="xs">
<CssBaseline />
<CustomerForm defaultValues={user} shouldDispatch />
</Container>
);
}
return (
<Container component="main" maxWidth="xs">
<CssBaseline />
</Container>
);
}
export default function CustomerInfo() {
const user = useSelector(getSelectedConstomer);
return customerFormWithContainer(user);
}
interface LazyCustomerInfoProps {
userId: number;
}
export function LazyCustomerInfo(props: LazyCustomerInfoProps) {
const { userId } = props;
const { data } = useQuery<CustomerGraphql>(QUERY_OFFLINE_CUSTOMER, {
variables: { userId },
});
return customerFormWithContainer(data?.getCustomer);
}
|
/**
* Scalar
* Copyright 2013 The Alliance for Networking Visual Culture.
* http://scalar.usc.edu/scalar
* <EMAIL>
*
* Licensed under the Educational Community License, Version 2.0
* (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.osedu.org/licenses /ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
;(function( $, window, document, undefined ) {
var pluginName = "scalarhelp",
defaults = {
root_url: ''
};
/**
* Manages the help dialog.
*/
function ScalarHelp( element, options ) {
this.element = $(element);
this.modal = null;
this.options = $.extend( {}, defaults, options );
this._defaults = defaults;
this._name = pluginName;
this.init();
}
ScalarHelp.prototype.init = function () {
var me = this;
var canEdit = ( !isMobile && ((scalarapi.model.user_level == "scalar:Author") || (scalarapi.model.user_level == "scalar:Commentator") || (scalarapi.model.user_level == "scalar:Reviewer")));
var content = $('<div class="body_copy"></div>');
content.append('<p>This <a href="http://scalar.usc.edu/scalar" title="Go to Scalar\'s website">Scalar</a> book is presented using an <strong>experimental interface</strong> designed to streamline and enhance the reading experience. As this interface is <strong>currently under active development</strong>, you may encounter bugs.</p>');
content.append('<p>The <strong>header bar</strong> at the top of the screen gives you access to utilities for navigating and editing (if you’re logged in and have editing privileges). If the header bar is currently hidden, scroll towards the top of the page to make it appear. Here’s a quick reference guide to the header bar icons:</p>');
var table = $( '<table summary="Description of icons"></table>' ).appendTo( content );
var descStyle;
if ( canEdit ) {
descStyle = 'half-description';
} else {
descStyle = 'description';
}
var row = $( '<tr></tr>' ).appendTo( table );
row.append( '<td class="icon"><img src="'+this.options.root_url+'/images/menu_icon.png" alt="Main menu icon" width="30" height="30" /></td><td class="' + descStyle + '">Main menu</td>' );
row = $( '<tr></tr>' ).appendTo( table );
row.append( '<td class="icon"><img src="'+this.options.root_url+'/images/search_icon.png" alt="Search icon" width="30" height="30" /></td><td class="' + descStyle + '">Search</td>' );
row = $( '<tr></tr>' ).appendTo( table );
row.append( '<td class="icon"><img src="'+this.options.root_url+'/images/visualization_icon.png" alt="Visualization icon" width="30" height="30" /></td><td class="' + descStyle + '">Toggles “pinwheel” visualization of your current location in the book</td>' );
row = $( '<tr></tr>' ).appendTo( table );
row.append( '<td class="icon"><img src="'+this.options.root_url+'/images/help_icon.png" alt="Help icon" width="30" height="30" /></td><td class="' + descStyle + '">Help</td>' );
row = $( '<tr></tr>' ).appendTo( table );
row.append( '<td class="icon"><img src="'+this.options.root_url+'/images/user_icon.png" alt="Sign in / Sign out icon" width="30" height="30" /></td><td class="' + descStyle + '">Sign in</td>' );
if ( canEdit ) {
table.find( 'tr' ).eq( 0 ).append( '<td class="icon"><img src="'+this.options.root_url+'/images/new_icon.png" alt="New page icon" width="30" height="30" /></td><td class="' + descStyle + '">New page</td>' );
table.find( 'tr' ).eq( 1 ).append( '<td class="icon"><img src="'+this.options.root_url+'/images/edit_icon.png" alt="Edit icon" width="30" height="30" /></td><td class="' + descStyle + '">Edit page/media</td>' );
table.find( 'tr' ).eq( 2 ).append( '<td class="icon"><img src="'+this.options.root_url+'/images/annotate_icon.png" alt="Annotate icon" width="30" height="30" /></td><td class="' + descStyle + '">Annotate images or time-based media</td>' );
table.find( 'tr' ).eq( 3 ).append( '<td class="icon"><img src="'+this.options.root_url+'/images/import_icon.png" alt="Import icon" width="30" height="30" /></td><td class="' + descStyle + '">Import media</td>' );
table.find( 'tr' ).eq( 4 ).append( '<td class="icon"><img src="'+this.options.root_url+'/images/options_icon.png" alt="Book dashboard icon" width="30" height="30" /></td><td class="' + descStyle + '">Dashboard</td>' );
}
content.append('<p>If you\'re used to reading Scalar books in their standard interface, you\'ll find that many things have changed, and that not all of Scalar\'s features have been implemented yet. Thanks for your patience as we continue to expand the capabilities of this new interface. We welcome <a href="mailto:<EMAIL>?subject=New%20Scalar%20interface%20feedback" title="Send your feedback by email">your feedback.</a></p>')
this.modal = content.bootstrapModal({title: 'Help'});
this.modal.on('shown.bs.modal', function() {
me.modal.find( '.modal-body a' )[ 0 ].focus();
});
this.element.replaceWith(this.element);
// set tab order for links
var ti = 1000;
this.modal.find( '.modal-body a' ).each( function() {
$( this ).attr( 'tabindex', ti );
ti++;
} );
// tabbing forward from close button brings focus to first link
this.modal.find( '.close' ).onTab( function() {
me.modal.find( '.modal-body a' )[ 0 ].focus();
});
// tabbing backwards from first link brings focus to close button
this.modal.find( '.modal-body a:eq(0)' ).onTabBack( function() {
me.modal.find( '.close' )[ 0 ].focus();
});
}
ScalarHelp.prototype.showHelp = function() {
this.modal.modal('show');
setState( ViewState.Modal );
}
ScalarHelp.prototype.hideHelp = function() {
this.modal.modal('hide');
restoreState();
}
ScalarHelp.prototype.toggleHelp = function() {
this.modal.modal('toggle');
}
$.fn[pluginName] = function ( options ) {
return this.each(function () {
if ( !$.data(this, "plugin_" + pluginName )) {
$.data( this, "plugin_" + pluginName,
new ScalarHelp( this, options ));
}
});
}
})( jQuery, window, document ); |
/* ************************************************************************
* This file provides the declarations to configure and use the DMA module
* on tm4c129encpdt
*
* Author: <NAME>
* Date Created: 15th April 2021
* DAte Modified: 13th May 2021
*
* ************************************************************************/
#ifndef _DMA_H
#define _DMA_H
#include "tm4c129encpdt.h"
/* Structures for DMA channel control table */
/* Control word is used to configure the basic properties
* of DMA transfer for a given channel. */
struct DMA_control_word{
/* First member of the structure goes as LSB
* Last member of the structure goes as MSB */
uint16_t XFERMODE : 3; /* bit 02:00 uDMA transfer mode */
uint16_t NXTUSEBURST : 1; /* bit 03 next use burst */
uint16_t XFERSIZE : 10; /* bit 13:04 transfer size */
uint16_t ARBSIZE : 4; /* bit 17:14 arbitration size */
uint16_t SRCPROT0 : 1; /* bit 18 source privilage access */
uint16_t reserved1 : 2; /* bit 20:19 reserved */
uint16_t DESTPROT0 : 1; /* bit 21 destination privilage access */
uint16_t reserved0 : 2; /* bit 23:22 reserved */
uint16_t SRCSIZE : 2; /* bit 25:24 source data size */
uint16_t SRCINC : 2; /* bit 27:26 source address increment */
uint16_t DESTSIZE : 2; /* bit 29:28 destination data size */
uint16_t DESTINC : 2; /* bit 31:30 destination address increment */
} __attribute__((packed));
/* Control structure for a channel consists of source and destination
* pointers and the control word. */
struct DMA_control_structure{
uint32_t *src_end_ptr; /* pointer to the end of source buffer or source addr */
uint32_t *dst_end_ptr; /* pointer to the end of destination buffer or destination addr */
struct DMA_control_word control_word; /* conrol word for the channel */
uint32_t reserved;
};
/* Control table consists of primary and secondary control structures
* for all the channels. control table needs to be aligned at 1024
* byte boundary */
struct DMA_control_table{
struct DMA_control_structure channel_ctl_struct[32]; /* primary control structs */
struct DMA_control_structure secondary_channel_ctl_struct[32]; /* secondary control structs */
} __attribute((aligned(1024))) DMA_control_table;
/* ********************************************************************
* This function Initializes the DMA module.
*
* param: void
*
* return: void
*
* brief: This function inititalizes the DMA module and sets the pointer
* to the control table located in the SRAM.
*
* ********************************************************************/
void DMA_init(void);
/* ********************************************************************************************
* This function configures a channel to transfer data from source
* to destination.
*
* param: channel_no DMA channel to be configured for transfer
* param: channel_encoding encoding value for the peripheral connected to channel
* param: burstModeOnly Channel responds to only burst requests when this is 1.
* param: src_end_ptr pointer to the last memory location of the
* source buffer or memory location
* param: dest_end_ptr pointer to the last memory location of the
* destination buffer or memory location
* param: control_word control word for the configuration of channel
*
* return: void
*
* brief: This function configures the DMA channel to transfer data from source location
* to the destination. the source and/or destination can be single memory locations or
* arrays of memory locations. The size of elements, transfer size, transfer type is programmed
* in the control word.
*
* ********************************************************************************************/
void DMA_configure_channel(uint8_t channel_no,uint8_t channel_encoding,uint8_t burstModeOnly,
uint32_t *src_end_ptr, uint32_t *dst_end_ptr,
struct DMA_control_word *control_word);
/* ********************************************************************************************
* This function reconfigures the dma channel after the previous transfer has been completed.
*
* param: channel_no DMA channel to be configured for transfer
* param: src_end_ptr pointer to the last memory location of the
* source buffer or memory location
* param: dest_end_ptr pointer to the last memory location of the
* destination buffer or memory location
* param: control_word control word for the configuration of channel
*
* return: void
*
* brief: This function reconfigures the dma channel after the previous transfer has been
* completed.
*
* ********************************************************************************************/
void DMA_reconfigure_channel(uint8_t channel_no,
uint32_t *src_end_ptr,
uint32_t *dst_end_ptr,
struct DMA_control_word *control_word);
/* ********************************************************************
* This function starts/enables the DMA transfers on a channel.
*
* param: channel_no Dma channel no to be enabled/ started.
*
* return: void
*
* brief: This function starts/ enables the dma channel to responds to
* transfer requests from peripherals and/or software triggers.
* ********************************************************************/
void DMA_start_transfer(uint8_t channel_no);
#endif
|
<gh_stars>0
set array 1
set line 5000
set head on
set pagesize 10000
set feedback on
set trimspool on
set numformat 9999999999999999.999
set colsep ";"
set echo on
SPOOL C:\BATCH_DETAILS.SPL
accept BATCH_NO PROMPT 'Enter the batch no ==> '
SELECT * FROM DETB_BATCH_MASTER WHERE BATCH_NO='&BATCH_NO';
SELECT * FROM ACTB_DAILY_LOG WHERE BATCH_NO ='&BATCH_NO';
SELECT * FROM DETB_JRNL_LOG WHERE BATCH_NO='&BATCH_NO';
SELECT * FROM DETB_UPLOAD_MASTER WHERE BATCH_NO='&BATCH_NO';
SELECT * FROM DETB_UPLOAD_DETAIL WHERE BATCH_NO='&BATCH_NO';
spool off |
#!/bin/bash -xe
# If you mess with the build image, you must publish a new
# image-v{X}.{Y}.{Z} tag. We can't automatically generate that tag,
# because we can't figure out what the semver should be, because we
# don't know the impact of what you've changed. So this script makes
# sure that you created the tag. And since we're running this in prow,
# it also makes sure that the tag has been pushed to upstream.
# This gets us the last non-merge commit:
commit=$(git rev-list --no-merges -n 1 HEAD)
# If that commit corresponds to an image tag, this gets the tag:
tag=$(git describe --exact-match --tag --match image-v* $commit 2>/dev/null || true)
if [[ -n "$tag" ]]; then
echo "Found tag $tag at current commit :)"
exit 0
fi
# No tag here. That's okay as long as there were no changes to the build
# image.
# Since we're in a PR, and there may be multiple commits, we want to
# check all of them; so compare against the last merge commit before
# this one. This should work both locally (that should correspond to
# upstream/master) and in CI (which creates a merge commit to test
# against).
# NOTE: This assumes we always merge with merge commits.
last_merge_commit=$(git log --merges -n1 HEAD^ --format=%H)
if [[ -n "$(git diff $last_merge_commit --name-only config/)" ]]; then
echo "Image build configuration has changed!"
echo "You must push a new image-v{X}.{Y}.{Z} tag at commit $commit!"
echo "See https://github.com/openshift/boilerplate/blob/$commit/README.md#build-images"
exit 1
fi
exit 0
|
import { Vector2, Vector3 } from "./math.js";
import { RigidBody } from "./rigidbody.js";
// Children: ContactManifold, Joint
export abstract class Constraint
{
public readonly bodyA: RigidBody;
public readonly bodyB: RigidBody;
protected beta = 0.0; // Coefficient of position correction (Positional error feedback factor)
protected gamma = 0.0; // Coefficient of Softness (Force feedback factor)
constructor(bodyA: RigidBody, bodyB: RigidBody)
{
this.bodyA = bodyA;
this.bodyB = bodyB;
}
/*
* C: Constraint equation
* C = J·v = 0
* J is depend on constraint
*
* Calculate Jacobian J and effective mass M
* M = K^-1 = (J · M^-1 · J^t)^-1
*/
public abstract prepare(): void;
/*
* Solve velocity constraint, calculate corrective impulse for current iteration
* Pc: Corrective impulse
* λ: lagrangian multiplier
*
* Pc = J^t · λ (∵ Pc ∥ J^t)
* λ = (J · M^-1 · J^t)^-1 ⋅ -(Jv + (β/h)·C(x)) where C(x): positional error
*
* with soft constraint,
* λ = (J · M^-1 · J^t + λ·I)^-1 ⋅ -( Jv + (β/h)·C(x) + (γ/h)·λ' ) where I = identity matrix and λ' = accumulated impulse
*
* More reading:
* https://pybullet.org/Bullet/phpBB3/viewtopic.php?f=4&t=1354
*/
public abstract solve(): void;
/*
* Apply impulse
* V2 = V2' + M^-1 ⋅ Pc
* Pc = J^t ⋅ λ
*
* More reading:
* https://box2d.org/files/ErinCatto_ModelingAndSolvingConstraints_GDC2009.pdf
*/
protected abstract applyImpulse(impulse: number | Vector2 | Vector3, impulse2?: number): void;
} |
<gh_stars>0
package no.item.enonic.models;
import java.time.LocalDate;
public class User extends BasicUser {
public String firstName;
public String lastName;
public String address1;
public String address2;
public String address3;
public String zipCode; // PostCode
public String countryId;
public String privateAddress;
public String organization;
public String phoneWork;
public String phoneMobile;
public String email;
public String city; //PostOffice
public LocalDate birthday;
public String memberId;
public String phonePrivate;
}
|
public IEnumerable<Domain.Models.Epic> GetEpicsSubscribedTo(int subscriberID)
{
var subscriptions = _context.Subscriptions
.Include(w => w.Writer)
.ThenInclude(r => r.RoleNavigation)
.Include(s => s.Subscriber)
.Where(s => s.SubscriberId == subscriberID);
var m_subscriptions = subscriptions.Select(Mappers.SubscriptionMapper.Map);
var m_ids = m_subscriptions.Select(s => s.Publisher.ID);
var db_epics = _context.Epics
.Include(w => w.Writer)
.ThenInclude(r => r.RoleNavigation)
.Where(e => m_ids.Contains(e.Publisher.ID));
return db_epics;
} |
<gh_stars>10-100
package chylex.hee.world.feature.stronghold.doors;
import java.util.Random;
import chylex.hee.system.abstractions.Pos.PosMutable;
import chylex.hee.system.abstractions.facing.Facing4;
import chylex.hee.world.structure.StructureWorld;
import chylex.hee.world.structure.dungeon.StructureDungeonPieceInst;
public class StrongholdDoorGrates extends StrongholdDoor{
public static StrongholdDoorGrates[] generateDoors(){
return new StrongholdDoorGrates[]{
new StrongholdDoorGrates(Facing4.EAST_POSX),
new StrongholdDoorGrates(Facing4.SOUTH_POSZ)
};
}
public StrongholdDoorGrates(Facing4 facing){
super(facing);
}
@Override
protected void generateDoor(StructureDungeonPieceInst inst, StructureWorld world, Random rand, int x, int y, int z){
PosMutable archPos = new PosMutable(x+maxX/2, 0, z+maxZ/2);
Facing4 perpendicular = facing.perpendicular();
archPos.move(perpendicular, -1);
placeLine(world, rand, placeIronBars, archPos.x, y+1, archPos.z, archPos.x, y+3, archPos.z);
archPos.move(perpendicular, 1);
placeBlock(world, rand, placeIronBars, archPos.x, y+3, archPos.z);
archPos.move(perpendicular, 1);
placeLine(world, rand, placeIronBars, archPos.x, y+1, archPos.z, archPos.x, y+3, archPos.z);
}
}
|
redis_server=../../bin/osx/redis-server
redis_sentinel=../../bin/osx/redis-sentinel
pushd ..
$redis_server server-6380/redis.conf &
$redis_sentinel server-6380/sentinel.conf &
popd
|
#!/bin/bash
./systemd/install.sh
./udev/install.sh
|
SELECT name, department
FROM employees
WHERE years_of_experience > 5; |
#include <vector>
#include <memory>
#include "URLLoaderThrottle.h" // Assuming the header file for URLLoaderThrottle is included
void releaseThrottles(std::vector<std::unique_ptr<blink::URLLoaderThrottle>>& throttles) {
throttles.clear(); // Clear the vector, releasing the memory held by unique pointers
} |
<filename>pkg/ibmcloud/cluster.go
package ibmcloud
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"sort"
"strings"
"sync"
"time"
devclustererr "github.com/codeready-toolchain/devcluster/pkg/errors"
"github.com/codeready-toolchain/devcluster/pkg/log"
"github.com/codeready-toolchain/devcluster/pkg/rest"
"github.com/pkg/errors"
)
type Configuration interface {
GetIBMCloudAPIKey() string
GetIBMCloudApiCallRetrySec() int
GetIBMCloudAccountID() string
GetIBMCloudTenantID() string
GetIBMCloudIDPName() string
}
type ICClient interface {
GetVlans(zone string) ([]Vlan, error)
GetZones() ([]Location, error)
CreateCluster(name, zone string, noSubnet bool) (string, error)
GetCluster(id string) (*Cluster, error)
DeleteCluster(id string) error
CreateCloudDirectoryUser(username string) (*CloudDirectoryUser, error)
UpdateCloudDirectoryUserPassword(id string) (*CloudDirectoryUser, error)
GetIAMUserByUserID(userID string) (*IAMUser, error)
CreateAccessPolicy(accountID, userID, clusterID string) (string, error)
DeleteAccessPolicy(id string) error
}
type Client struct {
config Configuration
token *TokenSet
tokenMux sync.RWMutex
}
func NewClient(config Configuration) *Client {
return &Client{
config: config,
}
}
func (c *Client) GetToken() TokenSet {
defer c.tokenMux.RUnlock()
c.tokenMux.RLock()
return *c.token
}
// Token returns IBM Cloud Token.
// If the token is expired or not obtained yet it will obtain a new one.
func (c *Client) Token() (TokenSet, error) {
c.tokenMux.RLock()
if tokenExpired(c.token) {
c.tokenMux.RUnlock()
c.tokenMux.Lock()
defer c.tokenMux.Unlock()
if tokenExpired(c.token) {
var err error
c.token, err = c.obtainNewToken()
if err != nil {
return TokenSet{}, err
}
}
return *c.token, nil
}
defer c.tokenMux.RUnlock()
return *c.token, nil
}
// tokenExpired return false if the token is not nil and good for at least one more minute
func tokenExpired(token *TokenSet) bool {
return token == nil || time.Now().After(time.Unix(token.Expiration-60, 0))
}
type Vlan struct {
ID string `json:"id"`
Type string `json:"type"`
}
// vlanIDByType returns the vlan ID for the given type. Returns "" if there is no such type.
func vlanIDByType(vlans []Vlan, t string) string {
for _, vlan := range vlans {
if vlan.Type == t {
return vlan.ID
}
}
return ""
}
func responseErr(res *http.Response, message, respBody string) error {
var id string
ids := res.Header["X-Request-Id"]
if len(ids) == 0 {
ids = res.Header["x-request-id"]
}
if len(ids) > 0 {
id = ids[0]
}
return errors.Errorf("%s. x-request-id: %s, Response status: %s. Response body: %s", message, id, res.Status, respBody)
}
// GetVlans fetches the list of vlans available in the zone
func (c *Client) GetVlans(zone string) ([]Vlan, error) {
token, err := c.Token()
if err != nil {
return nil, err
}
req, err := http.NewRequest("GET", fmt.Sprintf("https://containers.cloud.ibm.com/global/v1/datacenters/%s/vlans", zone), nil)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token.AccessToken)
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, errors.Wrap(err, "unable to get vlans")
}
defer rest.CloseResponse(res)
bodyString := rest.ReadBody(res.Body)
if res.StatusCode != http.StatusOK {
return nil, responseErr(res, "unable to get vlans", bodyString)
}
var vlans []Vlan
err = json.Unmarshal([]byte(bodyString), &vlans)
if err != nil {
return nil, errors.Wrapf(err, "error when unmarshal json with vlans %s ", bodyString)
}
return vlans, nil
}
type Location struct {
ID string `json:"id"`
Name string `json:"name"`
Kind string `json:"kind"`
DisplayName string `json:"display_name"`
}
// GetZones fetches the list of zones (data centers)
func (c *Client) GetZones() ([]Location, error) {
token, err := c.Token()
if err != nil {
return nil, err
}
req, err := http.NewRequest("GET", "https://containers.cloud.ibm.com/global/v1/locations", nil)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token.AccessToken)
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, errors.Wrap(err, "unable to get zones")
}
defer rest.CloseResponse(res)
bodyString := rest.ReadBody(res.Body)
if res.StatusCode != http.StatusOK {
return nil, responseErr(res, "unable to get zones", bodyString)
}
var locations []Location
err = json.Unmarshal([]byte(bodyString), &locations)
if err != nil {
return nil, errors.Wrapf(err, "error when unmarshal json with zones %s ", bodyString)
}
// Return only locations with kind == "dc" (data center)
zones := make([]Location, 0, 0)
for _, z := range locations {
if z.Kind == "dc" {
zones = append(zones, z)
}
}
// Sort by Display Name
sort.SliceStable(zones, func(i, j int) bool {
return zones[i].DisplayName < zones[j].DisplayName
})
return zones, nil
}
type ID struct {
ID string `json:"id"`
}
const ClusterConfigTemplate = `
{
"dataCenter": "%s",
"disableAutoUpdate": true,
"machineType": "b3c.4x16",
"masterVersion": "4.5_openshift",
"name": "%s",
"publicVlan": "%s",
"privateVlan": "%s",
"noSubnet": %t,
"workerNum": 2
}`
// CreateCluster creates a cluster
// Returns the cluster ID
func (c *Client) CreateCluster(name, zone string, noSubnet bool) (string, error) {
token, err := c.Token()
if err != nil {
return "", err
}
// Get vlans
vlans, err := c.GetVlans(zone)
if err != nil {
return "", err
}
private := vlanIDByType(vlans, "private")
if private == "" {
log.Infof(nil, "WARNING: no private vlan found for zone %s. New vlan will be created", zone)
}
public := vlanIDByType(vlans, "public")
if public == "" {
log.Infof(nil, "WARNING: no public vlan found for zone %s. New vlan will be created", zone)
}
body := bytes.NewBuffer([]byte(fmt.Sprintf(ClusterConfigTemplate, zone, name, public, private, noSubnet)))
req, err := http.NewRequest("POST", "https://containers.cloud.ibm.com/global/v1/clusters", body)
if err != nil {
return "", err
}
req.Header.Add("Authorization", "Bearer "+token.AccessToken)
req.Header.Add("Content-Type", "application/json")
res, err := http.DefaultClient.Do(req)
if err != nil {
return "", errors.Wrap(err, "unable to create cluster")
}
defer rest.CloseResponse(res)
bodyString := rest.ReadBody(res.Body)
if res.StatusCode != http.StatusCreated {
return "", responseErr(res, "unable to create cluster", bodyString)
}
var idObj ID
err = json.Unmarshal([]byte(bodyString), &idObj)
if err != nil {
return "", errors.Wrapf(err, "error when unmarshal json with cluster ID %s ", bodyString)
}
return idObj.ID, nil
}
type Cluster struct {
ID string `json:"id"`
Name string `json:"name"`
Region string `json:"region"`
CreatedDate string `json:"createdDate"`
MasterKubeVersion string `json:"masterKubeVersion"`
WorkerCount int `json:"workerCount"`
Location string `json:"location"`
Datacenter string `json:"datacenter"`
State string `json:"state"`
Type string `json:"type"`
Crn string `json:"crn"`
Ingress Ingress `json:"ingress"`
MasterURL string `json:"masterURL"`
}
type Ingress struct {
Hostname string `json:"hostname"`
}
// GetCluster fetches the cluster with the given ID/name
func (c *Client) GetCluster(id string) (*Cluster, error) {
token, err := c.Token()
if err != nil {
return nil, err
}
req, err := http.NewRequest("GET", fmt.Sprintf("https://containers.cloud.ibm.com/global/v2/getCluster?cluster=%s", id), nil)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token.AccessToken)
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, errors.Wrap(err, "unable to get cluster")
}
defer rest.CloseResponse(res)
bodyString := rest.ReadBody(res.Body)
if res.StatusCode == http.StatusNotFound {
return nil, devclustererr.NewNotFoundError(fmt.Sprintf("cluster %s not found", id), bodyString)
}
if res.StatusCode != http.StatusOK {
return nil, responseErr(res, "unable to get cluster", bodyString)
}
var cluster Cluster
err = json.Unmarshal([]byte(bodyString), &cluster)
if err != nil {
return nil, errors.Wrapf(err, "error when unmarshal json with cluster %s ", bodyString)
}
return &cluster, nil
}
// DeleteCluster deletes the cluster with the given ID/name
func (c *Client) DeleteCluster(id string) error {
token, err := c.Token()
if err != nil {
return err
}
req, err := http.NewRequest("DELETE", fmt.Sprintf("https://containers.cloud.ibm.com/global/v1/clusters/%s?deleteResources=true", id), nil)
if err != nil {
return err
}
req.Header.Add("Authorization", "Bearer "+token.AccessToken)
res, err := http.DefaultClient.Do(req)
if err != nil {
return errors.Wrap(err, "unable to delete cluster")
}
defer rest.CloseResponse(res)
bodyString := rest.ReadBody(res.Body)
if res.StatusCode == http.StatusNotFound {
return devclustererr.NewNotFoundError(fmt.Sprintf("cluster %s not found", id), "")
}
if res.StatusCode != http.StatusNoContent {
return responseErr(res, "unable to delete cluster", bodyString)
}
return nil
}
// obtainNewToken obtains an access token
// Returns the access token string and the time when the token is going to expire
func (c *Client) obtainNewToken() (*TokenSet, error) {
client := &http.Client{Timeout: 10 * time.Second}
res, err := client.PostForm("https://iam.cloud.ibm.com/identity/token", url.Values{
"grant_type": {"urn:ibm:params:oauth:grant-type:apikey"},
"apikey": {c.config.GetIBMCloudAPIKey()},
})
if err != nil {
return nil, err
}
defer rest.CloseResponse(res)
if res.StatusCode != http.StatusOK {
bodyString := rest.ReadBody(res.Body)
return nil, responseErr(res, "unable to obtain access token from IBM Cloud", bodyString)
}
tokenSet, err := readTokenSet(res)
if err != nil {
return nil, err
}
if tokenSet.AccessToken == "" {
return nil, errors.New("unable to obtain access token from IBM Cloud. Access Token is missing in the response")
}
return tokenSet, nil
}
// TokenSet represents a set of Access and Refresh tokens
type TokenSet struct {
AccessToken string `json:"access_token"`
ExpiresIn int64 `json:"expires_in"`
Expiration int64 `json:"expiration"`
RefreshToken string `json:"refresh_token"`
TokenType string `json:"token_type"`
}
// readTokenSet extracts json with token data from the response
func readTokenSet(res *http.Response) (*TokenSet, error) {
buf := new(bytes.Buffer)
_, err := io.Copy(buf, res.Body)
if err != nil {
return nil, err
}
jsonString := strings.TrimSpace(buf.String())
return readTokenSetFromJson(jsonString)
}
// readTokenSetFromJson parses json with a token set
func readTokenSetFromJson(jsonString string) (*TokenSet, error) {
var token TokenSet
err := json.Unmarshal([]byte(jsonString), &token)
if err != nil {
return nil, errors.Wrapf(err, "error when unmarshal json with access token %s ", jsonString)
}
return &token, nil
}
|
#!/bin/sh
python3 test16.py --prefetch=2 --ds=db --db_pool=4 --db_host=['replace'] --db_port=['replace'] --db_pwd=['replace']
if [ $? -eq 0 ]; then
sleep 15
#echo "Training complete, shutting down vm." | mail -s "Training Complete" 3110798@qq.com
#echo 'syncing file system...'
sync
#echo 'shutting down vm...'
#sudo shutdown -h now
else
echo FAIL
fi |
require 'rails_helper'
RSpec.feature 'submitting monthly service metrics' do
let(:service) { FactoryGirl.create(:service, name: 'The Submitting Data Service') }
let(:publish_token) { MonthlyServiceMetricsPublishToken.generate(service: service, month: YearMonth.new(2017, 9)) }
specify 'submitting metrics' do
visit_metrics_path
expect(page).to have_text('Provide data for The Submitting Data Service – 1 to 30 September 2017')
expect(page).to have_text('Your data will be published on 1 November 2017.')
within_fieldset('Number of transactions received, split by channel') do
fill_in 'Online', with: '18,000'
fill_in 'Phone', with: '15,000'
fill_in 'Paper', with: '16,000'
fill_in 'Face-to-face', with: '15,000'
fill_in 'Transactions received through this channel', with: '14,000'
end
within_fieldset('Number of transactions processed') do
fill_in 'Transactions processed', with: '13,000'
end
within_fieldset('Number of transactions ending in the user’s intended outcome') do
fill_in "Transactions processed with the user's intended outcome", with: '12,000'
end
within_fieldset('Total number of phone calls received') do
fill_in 'Calls received', with: '20,000'
end
within_fieldset('Number of phone calls received, split by reasons for calling') do
fill_in 'To perform a transaction', with: '15,000'
fill_in 'To get information', with: '1000'
fill_in 'To chase progress', with: '1500'
fill_in 'To challenge a decision', with: '1500'
fill_in 'Number of telephone calls for this reason', with: '1000'
end
click_button 'Submit'
metrics = MonthlyServiceMetrics.last
expect(metrics.service).to eq(service)
expect(metrics.month).to eq(YearMonth.new(2017, 9))
expect(metrics.online_transactions).to eq(18000)
expect(metrics.phone_transactions).to eq(15000)
expect(metrics.paper_transactions).to eq(16000)
expect(metrics.face_to_face_transactions).to eq(15000)
expect(metrics.other_transactions).to eq(14000)
expect(metrics.transactions_processed).to eq(13000)
expect(metrics.transactions_processed_with_intended_outcome).to eq(12000)
expect(metrics.calls_received).to eq(20000)
expect(metrics.calls_received_perform_transaction).to eq(15000)
expect(metrics.calls_received_get_information).to eq(1000)
expect(metrics.calls_received_chase_progress).to eq(1500)
expect(metrics.calls_received_challenge_decision).to eq(1500)
expect(metrics.calls_received_other).to eq(1000)
expect(page).to have_text('Upload successful')
expect(page).to have_text('Thank you for providing your monthly data. It will be published on 1 November 2017.')
expect(page).to have_text('You will next be asked to provide data on 1 November.')
end
specify "submitting invalid 'Transactions processed with the user's intended outcome' metrics" do
visit_metrics_path
fill_in "Transactions processed", with: "100"
fill_in "Transactions processed with the user's intended outcome", with: "120"
click_button 'Submit'
expect(page).to have_content("Transactions processed with intended outcome must be less than or equal to transactions processed")
end
specify "submitting invalid 'Number of calls received... to perform a transaction' metrics" do
visit_metrics_path
within_fieldset('Number of transactions received, split by channel') do
fill_in 'Phone', with: '2'
end
within_fieldset('Number of phone calls received, split by reasons for calling') do
fill_in 'To perform a transaction', with: '30'
end
click_button 'Submit'
expect(page).to have_content("This should be the same as the 'Number of transactions received, split by channel (phone)")
end
specify "submitting invalid 'Total number of calls received' metrics" do
visit_metrics_path
within_fieldset('Total number of phone calls received') do
fill_in 'Calls received', with: '100'
end
within_fieldset('Number of phone calls received, split by reasons for calling') do
fill_in 'To perform a transaction', with: '10'
fill_in 'To get information', with: '2'
fill_in 'To chase progress', with: ''
fill_in 'To challenge a decision', with: '3'
fill_in 'Number of telephone calls for this reason', with: ''
end
click_button 'Submit'
expect(page).to have_content("Calls received should be the sum of the fields within 'Number of phone calls received, split by reasons for calling'")
end
specify "submitting 'Total number of calls received' metrics with blank calls received total" do
visit_metrics_path
within_fieldset('Total number of phone calls received') do
fill_in 'Calls received', with: ''
end
within_fieldset('Number of phone calls received, split by reasons for calling') do
fill_in 'To perform a transaction', with: '10'
fill_in 'To get information', with: '2'
fill_in 'To chase progress', with: ''
fill_in 'To challenge a decision', with: '3'
fill_in 'Number of telephone calls for this reason', with: ''
end
click_button 'Submit'
expect(page).to have_content("Calls received should be the sum of the fields within 'Number of phone calls received, split by reasons for calling'")
end
specify "submitting 'Total number of calls received' metrics with blank reasons" do
visit_metrics_path
within_fieldset('Total number of phone calls received') do
fill_in 'Calls received', with: '100'
end
within_fieldset('Number of phone calls received, split by reasons for calling') do
fill_in 'To perform a transaction', with: ''
fill_in 'To get information', with: ''
fill_in 'To chase progress', with: ''
fill_in 'To challenge a decision', with: ''
fill_in 'Number of telephone calls for this reason', with: ''
end
click_button 'Submit'
expect(page).not_to have_content("Calls received should be the sum of the fields within 'Number of phone calls received, split by reasons for calling'")
end
specify "Titles are not shown for non-applicable calls" do
svc = FactoryGirl.create(:service, name: 'No calls service', calls_received_applicable: false)
token = MonthlyServiceMetricsPublishToken.generate(service: svc, month: YearMonth.new(2017, 9))
visit publish_service_metrics_path(service_id: svc, year: '2017', month: '09', publish_token: token)
expect(page).not_to have_content('Total number of phone calls received')
expect(page).to have_content('Number of phone calls received, split by reasons for calling')
end
specify "Titles are not shown for other when no-call-other" do
svc = FactoryGirl.create(:service,
name: 'No calls service',
calls_received_perform_transaction_applicable: false,
calls_received_get_information_applicable: false,
calls_received_chase_progress_applicable: false,
calls_received_challenge_decision_applicable: false,
calls_received_other_applicable: false)
token = MonthlyServiceMetricsPublishToken.generate(service: svc, month: YearMonth.new(2017, 9))
visit publish_service_metrics_path(service_id: svc, year: '2017', month: '09', publish_token: token)
expect(page).to have_content('Total number of phone calls received')
expect(page).not_to have_content('Number of phone calls received, split by reasons for calling')
end
specify "No call labels when no calls are applicable" do
svc = FactoryGirl.create(:service,
name: 'No calls service',
calls_received_applicable: false,
calls_received_perform_transaction_applicable: false,
calls_received_get_information_applicable: false,
calls_received_chase_progress_applicable: false,
calls_received_challenge_decision_applicable: false,
calls_received_other_applicable: false)
token = MonthlyServiceMetricsPublishToken.generate(service: svc, month: YearMonth.new(2017, 9))
visit publish_service_metrics_path(service_id: svc, year: '2017', month: '09', publish_token: token)
expect(page).not_to have_content('Total number of phone calls received')
expect(page).not_to have_content('Number of phone calls received, split by reasons for calling')
end
private
def visit_metrics_path
visit publish_service_metrics_path(service_id: service, year: '2017', month: '09', publish_token: publish_token)
end
def within_fieldset(text, &block)
fieldsets = all('fieldset').select do |fieldset|
fieldset.first('h2', text: text)
end
case fieldsets.size
when 1
within(fieldsets.first, &block)
when 0
raise 'No fieldset found, with heading "%s"' % [text]
else
raise 'Ambiguous fieldsets, %d fieldsets found with heading "%s"' % [fieldsets.size, text]
end
end
end
|
<reponame>nortal/spring-mvc-component-web<gh_stars>0
package com.nortal.spring.cw.core.web.component.event.confirmation;
import com.nortal.spring.cw.core.web.component.Hierarchical;
import com.nortal.spring.cw.core.web.component.global.GlobalLabel;
import com.nortal.spring.cw.core.web.component.modal.ConfirmationModalDialogComponent;
import com.nortal.spring.cw.core.web.component.modal.ModalDialogComponent;
import com.nortal.spring.cw.core.web.component.page.ComponentCaption;
/**
* Kustutamisel välja kutsutav dialoogaken
*
* @author margush
*
*/
public class DeleteConfirmationDialog extends ConfirmationModalDialogComponent {
private static final long serialVersionUID = 1L;
public DeleteConfirmationDialog(Hierarchical parent) {
super(parent);
getModalButtons().addModalButton(ModalDialogComponent.SUBMIT_MODAL_DIALOG_ACCEPT, GlobalLabel.BUTTON_YES);
}
@Override
public ComponentCaption getCaption() {
return new ComponentCaption("global.confirm.delete.title");
}
} |
choice="Pat\nAirsight\nBorne"
prof=$(printf $choice | dmenu -i -p "Firefox User Profile")
case $prof in
"Pat")
firefox --profile ~/.mozilla/firefox/*.default/
;;
"Airsight")
firefox --profile ~/.mozilla/firefox/*.airsight/
;;
"Borne")
firefox --profile ~/.mozilla/firefox/*.borne/
;;
*)
;;
esac |
cat affix_t1_in_t2.csv affix_t2_in_t1.csv f_w_rule_t1_in_t2.csv f_w_rule_t2_in_t1.csv whole_t1_in_t2.csv whole_t2_in_t1.csv> outfile.txt
|
from flask import Flask
from flask_restplus import Api, Resource
app = Flask(__name__) # create a Flask app
api = Api(app) # initialize Flask-RESTplus
# This is a dummy list of user posts.
# In a real application these would
# be pulled from a database.
posts = [
{'user_id': 123, 'title': 'My post 1', 'content': 'Lorem ipsum dolor sit amet...'},
{'user_id': 123, 'title': 'My post 2', 'content': 'Lorem ipsum dolor sit amet...'},
{'user_id': 123, 'title': 'My post 3', 'content': 'Lorem ipsum dolor sit amet...'},
]
@api.route('/posts')
class Posts(Resource):
def get(self):
user_id = request.args.get('user_id')
posts = [post for post in posts if post['user_id'] == user_id]
return posts
if __name__ == '__main__':
app.run(debug=True) |
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class URIPatternExtractor {
public static List<String> extractVariableParts(String uriPattern) {
List<String> variableParts = new ArrayList<>();
Pattern pattern = Pattern.compile("\\{([^/{}]+?)(?::[^{}]+)?\\}");
Matcher matcher = pattern.matcher(uriPattern);
while (matcher.find()) {
variableParts.add(matcher.group(1));
}
return variableParts;
}
public static void main(String[] args) {
String uriPattern = "/user/{id}/profile/{section}";
List<String> extractedParts = extractVariableParts(uriPattern);
System.out.println("Variable parts extracted from the URI pattern:");
for (String part : extractedParts) {
System.out.println(part);
}
}
} |
#!/bin/bash
: ${TRAVIS:="false"}
: ${FAST:="false"}
: ${test:=""}
: ${CACHE:="false"}
: ${CLEANUP:="true"}
iter=10
if [ "$TRAVIS" != "false" ]; then
set -o xtrace
iter=1
fi
if [ "$FAST" != "false" ]; then
iter=1
fi
set -o errexit
set -o nounset
if [ $# -lt 2 ]; then
echo "Usage: $0 <mount cmd> <dir> [test name]"
exit 1
fi
cmd=$1
mnt=$2
if [ $# -gt 2 ]; then
t=$3
else
t=
fi
prefix=$mnt/test_dir
MOUNTED=0
if [[ "$cmd" == riofs* ]]; then
RIOFS="true"
else
RIOFS="false"
fi
if [[ "$cmd" == blobfuse* ]]; then
BLOBFUSE="true"
else
BLOBFUSE="false"
fi
$cmd >& mount.log &
PID=$!
function cleanup {
if [ $MOUNTED == 1 ]; then
popd >/dev/null
if [ "$CLEANUP" = "true" ]; then
if [ "$TRAVIS" != "false" ]; then
rmdir $prefix
else
rmdir $prefix >& /dev/null || true # riofs doesn't support rmdir
fi
fi
fi
if [ "$PID" != "" ]; then
kill $PID >& /dev/null || true
fusermount -u $mnt >& /dev/null || true
sleep 1
fi
}
function cleanup_err {
err=$?
cat mount.log
if [ $MOUNTED == 1 ]; then
popd >&/dev/null || true
rmdir $prefix >&/dev/null || true
fi
if [ "$PID" != "" ]; then
kill $PID >& /dev/null || true
fusermount -u $mnt >& /dev/null || true
fi
return $err
}
trap cleanup EXIT
trap cleanup_err ERR
function wait_for_mount {
for i in $(seq 1 10); do
if grep -q $mnt /proc/mounts; then
break
fi
sleep 1
done
if ! grep -q $mnt /proc/mounts; then
echo "$mnt not mounted by $cmd"
cat mount.log
exit 1
fi
}
if [ "$TRAVIS" == "false" -a "$cmd" != "cat" ]; then
wait_for_mount
MOUNTED=1
else
# in travis we mount things externally so we know we are mounted
MOUNTED=1
fi
mkdir -p "$prefix"
pushd "$prefix" >/dev/null
SUDO=
if [ $(id -u) != 0 ]; then
SUDO=sudo
fi
function drop_cache {
if [ "$TRAVIS" == "false" ]; then
(echo 3 | $SUDO tee /proc/sys/vm/drop_caches) > /dev/null
fi
}
export TIMEFORMAT=%R
function run_test {
test=$1
shift
drop_cache
sleep 2
if [ "$CACHE" == "false" ]; then
if [ -d /tmp/cache ]; then
rm -Rf /tmp/cache/*
fi
if [ "$BLOBFUSE" == "true" ]; then
popd >/dev/null
# re-mount blobfuse to cleanup cache
if [ "$PID" != "" ]; then
fusermount -u $mnt
sleep 1
fi
$cmd >& mount.log &
PID=$!
wait_for_mount
pushd "$prefix" >/dev/null
fi
fi
echo -n "$test "
if [ $# -gt 1 ]; then
time $test $@
else
time $test
fi
}
function get_howmany {
if [ "$TRAVIS" != "false" ]; then
if [ $# == 2 ]; then
howmany=$2
else
howmany=10
fi
else
if [ $# == 0 ]; then
howmany=100
else
howmany=$1
fi
fi
}
function create_files {
get_howmany $@
for i in $(seq 1 $howmany); do
echo $i > file$i
done
}
function ls_files {
get_howmany $@
# people usually use ls in the terminal when color is on
numfiles=$(ls -1 --color=always | wc -l)
if [ "$numfiles" != "$howmany" ]; then
echo "$numfiles != $howmany"
false
fi
}
function rm_files {
get_howmany $@
for i in $(seq 1 $howmany); do
rm file$i >&/dev/null || true
done
}
function find_files {
numfiles=$(find | wc -l)
if [ "$numfiles" != 820 ]; then
echo "$numfiles != 820"
rm_tree
exit 1
fi
}
function create_tree_parallel {
(for i in $(seq 1 9); do
mkdir $i
for j in $(seq 1 9); do
mkdir $i/$j
for k in $(seq 1 9); do
touch $i/$j/$k & true
done
done
done
wait)
}
function rm_tree {
for i in $(seq 1 9); do
rm -Rf $i
done
}
function create_files_parallel {
get_howmany $@
(for i in $(seq 1 $howmany); do
echo $i > file$i & true
done
wait)
}
function rm_files_parallel {
get_howmany $@
(for i in $(seq 1 $howmany); do
rm file$i & true
done
wait)
}
function write_large_file {
count=1000
if [ "$FAST" == "true" ]; then
count=100
fi
dd if=/dev/zero of=largefile bs=1MB count=$count oflag=nocache status=none
}
function read_large_file {
dd if=largefile of=/dev/null bs=1MB iflag=nocache status=none
}
function read_first_byte {
dd if=largefile of=/dev/null bs=1 count=1 iflag=nocache status=none
}
if [ "$t" = "" -o "$t" = "create" ]; then
for i in $(seq 1 $iter); do
run_test create_files
run_test rm_files
done
fi
if [ "$t" = "" -o "$t" = "create_parallel" ]; then
for i in $(seq 1 $iter); do
run_test create_files_parallel
run_test rm_files_parallel
done
fi
function write_md5 {
seed=$(dd if=/dev/urandom bs=128 count=1 status=none | base64 -w 0)
random_cmd="openssl enc -aes-256-ctr -pbkdf2 -pass pass:$seed -nosalt"
count=1000
if [ "$FAST" == "true" ]; then
count=100
fi
MD5=$(dd if=/dev/zero bs=1MB count=$count status=none | $random_cmd | \
tee >(md5sum) >(dd of=largefile bs=1MB oflag=nocache status=none) >/dev/null | cut -f 1 '-d ')
if [ "$RIOFS" == "true" ]; then
# riofs doesn't wait for flush, so we need to wait for object to show up
# XXX kind of broken due to eventual consistency but it's hte best we can do
while ! aws s3api --endpoint ${ENDPOINT} head-object --bucket ${BUCKET} --key test_dir/largefile >& /dev/null; do sleep 0.1; done
fi
}
function read_md5 {
READ_MD5=$(md5sum largefile | cut -f 1 '-d ')
if [ "$READ_MD5" != "$MD5" ]; then
echo "$READ_MD5 != $MD5" >&2
rm largefile
exit 1
fi
}
function rm {
if [ "$RIOFS" == "true" ]; then
while ! /bin/rm $@; do true; done
else
/bin/rm $@
fi
}
if [ "$t" = "" -o "$t" = "io" ]; then
for i in $(seq 1 $iter); do
run_test write_md5
if [ "$CACHE" != "true" ]; then
run_test read_md5
run_test read_first_byte
fi
rm largefile
done
if [ "$CACHE" = "true" ]; then
write_md5
read_md5
for i in $(seq 1 $iter); do
run_test read_md5
run_test read_first_byte
done
rm largefile
fi
fi
if [ "$t" = "" -o "$t" = "ls" ]; then
create_files_parallel 2000 2000
for i in $(seq 1 $iter); do
run_test ls_files 2000 2000
done
if [ "$CLEANUP" = "true" ]; then
rm_files 2000 2000
fi
fi
if [ "$t" = "ls_create" ]; then
create_files_parallel 1000
test=dummy
sleep 10
fi
if [ "$t" = "ls_ls" ]; then
run_test ls_files 1000 1000
fi
if [ "$t" = "ls_rm" ]; then
rm_files 1000
test=dummy
fi
if [ "$t" = "" -o "$t" = "find" ]; then
create_tree_parallel
for i in $(seq 1 $iter); do
run_test find_files
done
rm_tree
fi
if [ "$t" = "find_create" ]; then
create_tree_parallel
test=dummy
sleep 10
fi
if [ "$t" = "find_find" ]; then
for i in $(seq 1 $iter); do
run_test find_files
done
fi
if [ "$t" = "issue231" ]; then
run_test write_md5
(for i in $(seq 1 20); do
run_test read_md5 & true
done; wait);
rm largefile
fi
if [ "$t" = "cleanup" ]; then
rm -Rf *
test=dummy
fi
# for https://github.com/ppenguin/goofys/issues/64
# quote: There are 5 concurrent transfers gong at a time.
# Data file size is often 100-400MB.
# Regarding the number of transfers, I think it's about 200 files.
# We read from the goofys mounted s3 bucket and write to a local spring webapp using curl.
if [ "$t" = "disable" -o "$t" = "issue64" ]; then
# setup the files
(for i in $(seq 0 9); do
dd if=/dev/zero of=file$i bs=1MB count=300 oflag=nocache status=none & true
done
wait)
if [ $? != 0 ]; then
exit $?
fi
# 200 files and 5 concurrent transfer means 40 times, do 50 times for good measure
(for i in $(seq 0 9); do
dd if=file$i of=/dev/null bs=1MB iflag=nocache status=none &
done
for i in $(seq 10 300); do
# wait for 1 to finish, then invoke more
wait -n
running=$(ps -ef | grep ' dd if=' | grep -v grep | sed 's/.*dd if=file\([0-9]\).*/\1/')
for i in $(seq 0 9); do
if echo $running | grep -v -q $i; then
dd if=file$i of=/dev/null bs=1MB iflag=nocache status=none &
break
fi
done
done
wait)
if [ $? != 0 ]; then
exit $?
fi
# cleanup
(for i in $(seq 0 9); do
rm -f file$i & true
done
wait)
fi
if [ "$test" = "" ]; then
echo "No test was run: $t"
exit 1
fi
|
#include "include/core/SkCanvas.h"
#include "include/svg/SkSVGCanvas.h"
extern "C" void C_SVG_Types(SkSVGCanvas *) {}
extern "C" SkCanvas* C_SkSVGCanvas_Make(const SkRect* bounds, SkWStream* writer, uint32_t flags) {
return SkSVGCanvas::Make(*bounds, writer, flags).release();
}
|
<filename>pkg/controller/rollout/context.go<gh_stars>0
/*
Copyright 2022 The Kruise Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package rollout
import (
"time"
rolloutv1alpha1 "github.com/openkruise/rollouts/api/v1alpha1"
"github.com/openkruise/rollouts/pkg/controller/rollout/batchrelease"
"github.com/openkruise/rollouts/pkg/util"
corev1 "k8s.io/api/core/v1"
"k8s.io/client-go/tools/record"
"k8s.io/klog/v2"
"sigs.k8s.io/controller-runtime/pkg/client"
)
type rolloutContext struct {
client.Client
rollout *rolloutv1alpha1.Rollout
newStatus *rolloutv1alpha1.RolloutStatus
isComplete bool
stableService *corev1.Service
canaryService *corev1.Service
workload *util.Workload
batchControl batchrelease.BatchRelease
recheckTime *time.Time
recorder record.EventRecorder
}
func (r *rolloutContext) reconcile() error {
// canary strategy
if r.rollout.Spec.Strategy.Canary != nil {
klog.Infof("rollout(%s/%s) run Canary action...", r.rollout.Namespace, r.rollout.Name)
return r.runCanary()
}
return nil
}
func (r *rolloutContext) finalising() (bool, error) {
// canary strategy
if r.rollout.Spec.Strategy.Canary != nil {
done, err := r.doCanaryFinalising()
if err == nil && !done {
// The finalizer is not finished, wait one second
expectedTime := time.Now().Add(time.Duration(defaultGracePeriodSeconds) * time.Second)
r.recheckTime = &expectedTime
}
return done, err
}
return false, nil
}
func (r *rolloutContext) podRevisionLabelKey() string {
return util.RsPodRevisionLabelKey
}
|
/**
* Copyright 2021 Wayne
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wayne.apihub.dao;
import com.wayne.apihub.modules.datasource.conf.SolrSourceConf;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* @author Wayne
*/
@Mapper
public interface DataSourceSolrDao {
/**
* insert solr source configuration
*
* @param solrSourceConf SolrSourceConf
*/
void insertSolrSource(@Param("solrSourceConf") SolrSourceConf solrSourceConf);
/**
* list solr source configuration
*
* @return SolrSourceConf
*/
List<SolrSourceConf> listSolrSource();
/**
* get solr source configuration by id
*
* @param id id of solr source
* @return SolrSourceConf
*/
SolrSourceConf getSolrSourceById(@Param("id") Long id);
/**
* update solr source status
*
* @param id id of solr source
* @param status status to update
*/
void updateSolrSourceStatus(@Param("id") Long id, @Param("status") Integer status);
}
|
<gh_stars>1-10
package com.jude.easy_crypto.signature;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.SignatureException;
public class SignHandler {
private final SignInfo signInfo;
public SignHandler(SignInfo signInfo) {
this.signInfo = signInfo;
}
public byte[] sign() throws InvalidKeyException, SignatureException {
return SignatureTool.sign(signInfo.signature, signInfo.privateKey, signInfo.content);
}
}
|
<gh_stars>0
const MODULE = Object('test-module');
Promise.resolve().then(() => require(`${MODULE}`));
Promise.resolve().then(() => require(`test-${MODULE}`));
|
GCP=cp
GDIRNAME=dirname
GBASENAME=basename
GMKDIR=mkdir
GMV=mv
# paths
export MITK_SRCDIR=REPLACEME_SV_TOP_SRC_DIR_MITK
export MITK_BINDIR=REPLACEME_SV_TOP_BIN_DIR_MITK
export MITK_BLDDIR=REPLACEME_SV_TOP_BLD_DIR_MITK
# build type not used on linux
export MITK_BLDTYPE=
# primary directories to install into
$GMKDIR -p $MITK_BINDIR/bin
$GMKDIR -p $MITK_BINDIR/bin/plugins
$GMKDIR -p $MITK_BINDIR/lib
$GMKDIR -p $MITK_BINDIR/include
$GCP -Rf $MITK_BLDDIR/MITK-build/bin $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/MITK-build/lib $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/bin $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/lib $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/include $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/share $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/plugins $MITK_BINDIR/plugins
$GCP -Rf $MITK_BLDDIR/ep/src/CTK-build/CMakeExternals/Install/include $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/src/CTK-build/CMakeExternals/Install/lib $MITK_BINDIR
$GCP -Rf $MITK_BLDDIR/ep/src/CTK-build/qRestAPI-build/*.REPLACEME_SV_SO_FILE_EXTENSION $MITK_BINDIR/lib
$GCP -Rf $MITK_BLDDIR/ep/src/CTK-build/qRestAPI-build/*.h $MITK_BINDIR/include
$GCP -Rf $MITK_BLDDIR/ep/src/CTK-build/CTK-build/bin/* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/CTK-build/bin/$MITK_BLDTYPE/*CTK*.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/lib
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/CTK-build/bin/$MITK_BLDTYPE/liborg*.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/lib/plugins
# copy qRestAPI from CTK-build
$GMKDIR -p $MITK_BINDIR/include/qRestAPI
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/qRestAPI/*.h $MITK_BINDIR/include/qRestAPI
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/qRestAPI-build/$MITK_BLDTYPE/REPLACEME_SV_LIB_FILE_PREFIXqRestAPI.REPLACEME_SV_LIB_FILE_EXTENSION $MITK_BINDIR/lib
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/qRestAPI-build/$MITK_BLDTYPE/REPLACEME_SV_LIB_FILE_PREFIXqRestAPI.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/qRestAPI-build/$MITK_BLDTYPE/REPLACEME_SV_LIB_FILE_PREFIXqRestAPI.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/lib
# copy PythonQt from CTK-build
$GMKDIR -p $MITK_BINDIR/include/PythonQt
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/PythonQt/src/*.h $MITK_BINDIR/include/PythonQt
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/PythonQt/src/gui/*.h $MITK_BINDIR/include/PythonQt
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/PythonQt-build/$MITK_BLDTYPE/REPLACEME_SV_LIB_FILE_PREFIXPythonQt.REPLACEME_SV_LIB_FILE_EXTENSION $MITK_BINDIR/lib
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/PythonQt-build/$MITK_BLDTYPE/REPLACEME_SV_LIB_FILE_PREFIXPythonQt.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/PythonQt-build/$MITK_BLDTYPE/REPLACEME_SV_LIB_FILE_PREFIXPythonQt.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/lib
# CTK
$GMV -f $MITK_BINDIR/bin/Python $MITK_BINDIR/bin/PythonCTK
$GMKDIR -p $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK/Libs/Core/*.h $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK/Libs/Core/*.tpp $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK/Libs/Scripting/Python/Core/*.h $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK/Libs/Scripting/Python/Widgets/*.h $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK/Libs/Visualization/VTK/Core/*.h $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK/Libs/Widgets/*.h $MITK_BINDIR/include/ctk
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/CTK-build/bin/$MITK_BLDTYPE/*.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/CTK-build/bin/$MITK_BLDTYPE/*.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/lib
$GCP -f $MITK_BLDDIR/ep/src/CTK-build/CTK-build/bin/$MITK_BLDTYPE/*.REPLACEME_SV_LIB_FILE_EXTENSION $MITK_BINDIR/lib
# copying more than needed here, but not sure how many of the subdirectories are required.
$GCP -Rf $MITK_BLDDIR/ep/src/CTK/Libs/PluginFramework $MITK_BINDIR/include/ctk
for i in $(find $MITK_BLDDIR/ep/src/CTK-build -name "*Export.h"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/include/ctk
done
$GCP -f $MITK_BLDDIR/MITK-build/lib/plugins/$MITK_BLDTYPE/* $MITK_BINDIR/lib
# mitk files
$GCP -f $MITK_BLDDIR/MITK-build/bin/$MITK_BLDTYPE/*.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/MITK-build/lib/$MITK_BLDTYPE/*.REPLACEME_SV_SO_FILE_EXTENSION* $MITK_BINDIR/lib
$GCP -f $MITK_BLDDIR/MITK-build/lib/$MITK_BLDTYPE/*.REPLACEME_SV_LIB_FILE_EXTENSION $MITK_BINDIR/lib
$GMKDIR -p $MITK_BINDIR/include/mitk
$GMKDIR -p $MITK_BINDIR/include/mitk/configs
$GMKDIR -p $MITK_BINDIR/include/mitk/exports
$GMKDIR -p $MITK_BINDIR/include/mitk/ui_files
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules
$GCP $MITK_BLDDIR/MITK-build/*.h $MITK_BINDIR/include/mitk
#
# plugins
#
# currently require the following plugins:
#
# org.blueberry.core.runtime (nested)
# org.blueberry.ui.qt (nested)
# org.mitk.core.services
# org.mitk.gui.common
# org.mitk.gui.qt.common
# org.mitk.gui.qt.common.legacy
# org.mitk.gui.qt.datamanager
for i in $MITK_SRCDIR/Plugins/org.mitk.*/src; do
$GMKDIR -p $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $i))
$GCP -R $i/*.h $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $i))
done
for i in $MITK_SRCDIR/Plugins/org.mitk.*/src/*; do
if [ -d $i ];then \
$GMKDIR -p $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $($GDIRNAME $i)))/$($GBASENAME $i); \
$GCP -R $i/*.h $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $($GDIRNAME $i)))/$($GBASENAME $i); \
fi
done
for i in $MITK_SRCDIR/Plugins/org.blueberry.*/src; do
$GMKDIR -p $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $i))
$GCP -R $i/*.h $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $i))
done
for i in $MITK_SRCDIR/Plugins/org.blueberry.*/src/*; do
if [ -d $i ];then \
$GMKDIR -p $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $($GDIRNAME $i)))/$($GBASENAME $i); \
$GCP -R $i/*.h $MITK_BINDIR/include/mitk/plugins/$($GBASENAME $($GDIRNAME $($GDIRNAME $i)))/$($GBASENAME $i); \
fi
done
for i in $(find $MITK_BLDDIR/MITK-build/Plugins -name "*Export.h"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/include/mitk/exports
done
#
# everything else
#
for i in $MITK_SRCDIR/Modules/*/include; do
$GMKDIR -p $MITK_BINDIR/include/mitk/$($GBASENAME $($GDIRNAME $i))
$GCP -R $i $MITK_BINDIR/include/mitk/$($GBASENAME $($GDIRNAME $i))
done
for i in $MITK_SRCDIR/Modules/*/include; do
$GCP $MITK_BLDDIR/MITK-build/Modules/$($GBASENAME $($GDIRNAME $i))/ui_*.h $MITK_BINDIR/include/mitk/$($GBASENAME $($GDIRNAME $i))
done
for i in $MITK_SRCDIR/Modules/*/*/include; do
$GMKDIR -p $MITK_BINDIR/include/mitk/$($GBASENAME $($GDIRNAME $($GDIRNAME $i)))/$($GBASENAME $($GDIRNAME $i))
$GCP -R $i $MITK_BINDIR/include/mitk/$($GBASENAME $($GDIRNAME $($GDIRNAME $i)))/$($GBASENAME $($GDIRNAME $i))
done
for i in $(find $MITK_BLDDIR -name "*Exports.h"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/include/mitk/exports
done
for i in $(find $MITK_BLDDIR/MITK-build/Modules -name "*Export.h"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/include/mitk/exports
done
for i in $(find $MITK_BLDDIR/MITK-build/Modules -name "ui_*.h"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/include/mitk/ui_files
done
for i in $(find $MITK_BLDDIR/MITK-build -name "*Config.h"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/include/mitk/configs
done
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/ContourModel/DataManagement
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/module
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/service
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/util
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/ImageDenoising
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/LegacyGL
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Multilabel
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Overlays
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation/Algorithms
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation/Controllers
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation/Interactions
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation/SegmentationUtilities/BooleanOperations
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation/SegmentationUtilities/MorphologicalOperations
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/SegmentationUI/Qmitk
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/SurfaceInterpolation
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/ContourModel
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/ImageDenoising
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/LegacyGL
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Multilabel
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Overlays
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/QtWidgets
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/Segmentation/Interactions
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/SegmentationUI
$GMKDIR -p $MITK_BINDIR/include/mitk/Modules/SurfaceInterpolation
$GMKDIR -p $MITK_BINDIR/include/mitk/Utilities/mbilog
$GCP -f $MITK_SRCDIR/Modules/ContourModel/DataManagement/*.h $MITK_BINDIR/include/mitk/Modules/ContourModel/DataManagement
$GCP -f $MITK_SRCDIR/Modules/CppMicroServices/core/src/module/*.h $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/module
$GCP -f $MITK_SRCDIR/Modules/CppMicroServices/core/src/service/*.h $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/service
$GCP -f $MITK_SRCDIR/Modules/CppMicroServices/core/src/util/*.h $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/util
$GCP -f $MITK_SRCDIR/Modules/CppMicroServices/core/src/module/*.tpp $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/module
$GCP -f $MITK_SRCDIR/Modules/CppMicroServices/core/src/service/*.tpp $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/service
$GCP -f $MITK_SRCDIR/Modules/CppMicroServices/core/src/util/*.tpp $MITK_BINDIR/include/mitk/Modules/CppMicroServices/core/src/util
$GCP -f $MITK_SRCDIR/Modules/ImageDenoising/*.h $MITK_BINDIR/include/mitk/Modules/ImageDenoising
$GCP -f $MITK_SRCDIR/Modules/ImageDenoising/*.txx $MITK_BINDIR/include/mitk/Modules/ImageDenoising
$GCP -f $MITK_SRCDIR/Modules/LegacyGL/*.h $MITK_BINDIR/include/mitk/Modules/LegacyGL
$GCP -f $MITK_SRCDIR/Modules/Multilabel/*.h $MITK_BINDIR/include/mitk/Modules/Multilabel
$GCP -f $MITK_SRCDIR/Modules/Overlays/*.h $MITK_BINDIR/include/mitk/Modules/Overlays
$GCP -f $MITK_SRCDIR/Modules/Segmentation/Algorithms/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation/Algorithms
$GCP -f $MITK_SRCDIR/Modules/Segmentation/Controllers/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation/Controllers
$GCP -f $MITK_SRCDIR/Modules/Segmentation/Interactions/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation/Interactions
$GCP -f $MITK_SRCDIR/Modules/Segmentation/SegmentationUtilities/BooleanOperations/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation/SegmentationUtilities/BooleanOperations
$GCP -f $MITK_SRCDIR/Modules/Segmentation/SegmentationUtilities/MorphologicalOperations/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation/SegmentationUtilities/MorphologicalOperations
$GCP -f $MITK_SRCDIR/Modules/SegmentationUI/Qmitk/*.h $MITK_BINDIR/include/mitk/Modules/SegmentationUI/Qmitk
$GCP -f $MITK_SRCDIR/Modules/SurfaceInterpolation/*.h $MITK_BINDIR/include/mitk/Modules/SurfaceInterpolation
$GCP -f $MITK_SRCDIR/Utilities/mbilog/*.h $MITK_BINDIR/include/mitk/Utilities/mbilog
$GCP -f $MITK_BLDDIR/MITK-build/Modules/ContourModel/*.h $MITK_BINDIR/include/mitk/Modules/ContourModel
$GCP -f $MITK_BLDDIR/MITK-build/Modules/ImageDenoising/*.h $MITK_BINDIR/include/mitk/Modules/ImageDenoising
$GCP -f $MITK_BLDDIR/MITK-build/Modules/LegacyGL/*.h $MITK_BINDIR/include/mitk/Modules/LegacyGL
$GCP -f $MITK_BLDDIR/MITK-build/Modules/Multilabel/*.h $MITK_BINDIR/include/mitk/Modules/Multilabel
$GCP -f $MITK_BLDDIR/MITK-build/Modules/Overlays/*.h $MITK_BINDIR/include/mitk/Modules/Overlays
$GCP -f $MITK_BLDDIR/MITK-build/Modules/QtWidgets/*.h $MITK_BINDIR/include/mitk/Modules/QtWidgets
$GCP -f $MITK_BLDDIR/MITK-build/Modules/Segmentation/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation
$GCP -f $MITK_BLDDIR/MITK-build/Modules/Segmentation/Interactions/*.h $MITK_BINDIR/include/mitk/Modules/Segmentation/Interactions
$GCP -f $MITK_BLDDIR/MITK-build/Modules/SegmentationUI/*.h $MITK_BINDIR/include/mitk/Modules/SegmentationUI
$GCP -f $MITK_BLDDIR/MITK-build/Modules/SurfaceInterpolation/*.h $MITK_BINDIR/include/mitk/Modules/SurfaceInterpolation
# copy executable
$GCP -fR $MITK_BLDDIR/MITK-build/bin/MitkWorkbench* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/MITK-build/bin/usResourceCompiler* $MITK_BINDIR/bin
$GCP -f $MITK_BLDDIR/MITK-build/bin/MitkPluginGenerator* $MITK_BINDIR/bin
for i in $(find $MITK_BLDDIR/MITK-build/lib/plugins -name "*.REPLACEME_SV_SO_FILE_EXTENSION*"); do
echo "$i $($GBASENAME $i)"
$GCP -f $i $MITK_BINDIR/bin/plugins
done
# create a wrapper script for python executable
echo "#!/bin/sh -f" > REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "export LD_LIBRARY_PATH=REPLACEME_SV_TOP_BIN_DIR_MITK/lib:\$LD_LIBRARY_PATH" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "export LD_LIBRARY_PATH=REPLACEME_SV_TOP_BIN_DIR_MITK/bin:\$LD_LIBRARY_PATH" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "export PYTHONHOME=REPLACEME_SV_TOP_BIN_DIR_PYTHON" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "export PYTHONPATH=REPLACEME_SV_TOP_BIN_DIR_PYTHON/lib/pythonREPLACEME_SV_PYTHON_MAJOR_VERSION.REPLACEME_SV_PYTHON_MINOR_VERSION/lib-dynload:REPLACEME_SV_TOP_BIN_DIR_PYTHON/lib:REPLACEME_SV_TOP_BIN_DIR_PYTHON/lib/pythonREPLACEME_SV_PYTHON_MAJOR_VERSION.REPLACEME_SV_PYTHON_MINOR_VERSION:REPLACEME_SV_TOP_BIN_DIR_PYTHON/lib/pythonREPLACEME_SV_PYTHON_MAJOR_VERSION.REPLACEME_SV_PYTHON_MINOR_VERSION/site-packages" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "if [ \"\$#\" -gt 0 ]" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "then" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo " REPLACEME_SV_TOP_BIN_DIR_MITK/bin/MitkWorkbench \"\$1\" \"\$2\" \"\$3\" \"\$4\" \"\$5\" " >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "else" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo " REPLACEME_SV_TOP_BIN_DIR_MITK/bin/MitkWorkbench" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
echo "fi" >> REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
chmod u+w,a+rx REPLACEME_SV_TOP_BIN_DIR_MITK/bin/workbench-wrapper
|
<filename>backend/subdomains/files/routes/download.js
const express = require('express'),
router = express.Router(),
path = require('path'),
fs = require('fs'),
{authInspector, ROLE} = require('../authManager'),
{ atob } = require('buffer'),
crypto = require('crypto'),
files = require('../files'),
rateLimit = require('express-rate-limit');
const UPLOAD_DIR = files.UPLOAD_DIR;
const DOWNLOADS_RATE_LIMITER = rateLimit({
windowMs: process.env.GENERAL_LIMITER_TIME_WINDOW_MINS * 60 * 1000,
max: process.env.FILES_DOWNLOADS_LIMITER_MAX_REQUESTS,
headers: false
});
router.get('/', DOWNLOADS_RATE_LIMITER, (req, res) => {
if (!req.query.key)
return res.sendStatus(400);
const currentDate = new Date();
for (let i = 0; i < downloadSessions.length; i++) {
if (req.query.key === downloadSessions[i].key) {
if (currentDate - downloadSessions[i].dateCreated < 3000) {
if (!fs.existsSync(downloadSessions[i].filePath))
return res.sendStatus(410);
res.download(downloadSessions[i].filePath);
downloadSessions.splice(i, 1);
return;
} else {
return sendStatus(401);
}
}
}
return res.sendStatus(400);
});
let downloadSessions = [];
router.post('/request', authInspector(ROLE.USER), (req, res) => {
let baseName = req.body.baseName,
uploader = req.body.uploader,
isInvited = req.body.isInvited; // isInvited is 0 or 1
if (!baseName || !uploader || (isInvited !== 0 && isInvited !== 1))
return res.sendStatus(400);
if (req.headers['Role'] === ROLE.USER && req.headers['Username'] !== uploader)
return res.status(403).send("Can't download another user's file when role=user");
const filePath = path.join(UPLOAD_DIR, (isInvited ? 'invited' : 'users'), uploader, baseName);
if (!fs.existsSync(filePath))
return res.sendStatus(404);
const downloadSession = {
key: crypto.randomBytes(16).toString('hex'),
dateCreated: new Date(),
filePath: filePath
};
downloadSessions.push(downloadSession);
res.set('Authorization', downloadSession.key);
res.sendStatus(200);
});
module.exports = router; |
<gh_stars>100-1000
import _ from 'lodash'
import {limitIterator} from '../utils'
function * traverseChild (option, child, traverse) {
const childOutputs = traverse(child, option)
// slight performance optimization
if (child.props.id == null) {
yield * childOutputs
} else {
for (let output of childOutputs) {
const newResult = child.props.id != null
? {[child.props.id]: output.result}
: output.result
const mods = {result: newResult}
yield _.assign({}, output, mods)
}
}
}
function * childrenTraversals (option, children, traverse) {
if (children && children.length > 0) {
for (let child of children) {
yield traverseChild(option, child, traverse)
}
}
}
function * visit (option, {props, children}, traverse) {
const traversals = childrenTraversals(option, children, traverse)
yield * limitIterator(traversals, props.limit)
}
export default {visit}
|
#!/usr/bin/env ash
#=================================================
# File name: ipv6-helper
# Description: Install IPV6 Modules On OpenWrt
# System Required: OpenWrt
# Version: 1.0
# Lisence: MIT
# Author: SuLingGG
# Blog: https://mlapp.cn
#=================================================
Green_font_prefix="\033[32m"
Red_font_prefix="\033[31m"
Green_background_prefix="\033[42;37m"
Red_background_prefix="\033[41;37m"
Font_color_suffix="\033[0m"
INFO="[${Green_font_prefix}INFO${Font_color_suffix}]"
ERROR="[${Red_font_prefix}ERROR${Font_color_suffix}]"
Welcome(){
echo -e "${Green_font_prefix}\nThis tool can help you install IPV6 modules on OpenWrt.\n${Font_color_suffix}"
echo -e "Usage:"
echo -e "ipv6-helper sub-command"
echo -e "Example:"
echo -e "\tipv6-helper install: Install ipv6-helper & IPV6 modules"
echo -e "\tipv6-helper remove: Remove ipv6-helper & IPV6 modules\n"
echo -e "Optional Usage:"
echo -e "\tipv6-helper server: Set IPV6 configuration to server mode"
echo -e "\tipv6-helper relay: Set IPV6 configuration to relay mode"
echo -e "\tipv6-helper hybird: Set IPV6 configuration to hybird mode"
echo -e "\tipv6-helper clean: Remove mwan3 modules\n"
}
RebootConfirm(){
echo -n -e "${Green_font_prefix}Need reboot, reboot now [y/N] (default N)? ${Font_color_suffix}"
read answer
case $answer in
Y | y)
echo -e "Rebooting...\n" && reboot;;
*)
echo -e "You can reboot later manually.\n";;
esac
}
CheckInstall(){
if [ ! -f "/etc/opkg/ipv6-installed" ];then
echo -e "${Green_font_prefix}\nYou shoud execute 'ipv6-helper install' first.\n${Green_font_prefix}"
else
echo -e "${Green_font_prefix}\nConfiguring...\n${Font_color_suffix}"
fi
}
if [ $# == 0 ];then
Welcome
elif [[ $1 = "install" ]]; then
echo -e "${Green_font_prefix}\nInstalling IPV6 modules...\n${Font_color_suffix}"
cd /www/ipv6-modules
opkg install *.ipk
echo -e "${Green_font_prefix}\nIPV6 modules install successfully.\n${Font_color_suffix}"
echo -e "${Green_font_prefix}Configuring IPV6...\n${Font_color_suffix}"
# Set server to lan
uci set dhcp.lan.dhcpv6=server
uci set dhcp.lan.ra=server
uci set dhcp.lan.ra_management=1
uci set dhcp.lan.ra_default=1
# Set server to wan6
uci set dhcp.wan6=dhcp
uci set dhcp.wan6.interface=wan
uci set dhcp.wan6.ra=server
uci set dhcp.wan6.dhcpv6=server
uci set dhcp.wan6.master=1
# Disable IPV6 ula prefix
sed -i 's/^[^#].*option ula/#&/' /etc/config/network
# Enable IPV6 dns resolution
uci delete dhcp.@dnsmasq[0].filter_aaaa
# Set mwan3 balance strategy to default
uci set mwan3.balanced.last_resort=default
# Commit changes
uci commit
# Remove mwan3 ip6tables rules
cp /lib/mwan3/mwan3.sh /lib/mwan3/mwan3.sh.orig
sed -i 's/ip6tables -t manle -w/\/bin\/true/g' /lib/mwan3/mwan3.sh
touch /etc/opkg/ipv6-installed
echo -e "${Green_font_prefix}IPV6 configure successfully.\n${Font_color_suffix}"
RebootConfirm
elif [[ $1 = "server" ]]; then
CheckInstall
# Set server to lan
uci set dhcp.lan.dhcpv6=server
uci set dhcp.lan.ra=server
uci set dhcp.lan.ra_management=1
uci set dhcp.lan.ra_default=1
uci delete dhcp.lan.ndp
# Set server to wan6
uci set dhcp.wan6=dhcp
uci set dhcp.wan6.interface=wan
uci set dhcp.wan6.ra=server
uci set dhcp.wan6.dhcpv6=server
uci set dhcp.wan6.master=1
uci delete dhcp.wan6.ndp
# Commit changes
uci commit
echo -e "${Green_font_prefix}Server mode configure successfully.\n${Font_color_suffix}"
RebootConfirm
elif [[ $1 = "relay" ]]; then
CheckInstall
# Set relay to lan
uci set dhcp.lan.dhcpv6=relay
uci set dhcp.lan.ndp=relay
uci set dhcp.lan.ra=relay
uci delete dhcp.lan.ra_management
# Set relay to wan6
uci set dhcp.wan6=dhcp
uci set dhcp.wan6.interface=wan
uci set dhcp.wan6.ra=relay
uci set dhcp.wan6.dhcpv6=relay
uci set dhcp.wan6.ndp=relay
uci set dhcp.wan6.master=1
# Commit changes
uci commit
echo -e "${Green_font_prefix}Relay mode configure successfully.\n${Font_color_suffix}"
RebootConfirm
elif [[ $1 = "hybird" ]]; then
CheckInstall
# Set hybird to lan
uci set dhcp.lan.dhcpv6=hybird
uci set dhcp.lan.ndp=hybird
uci set dhcp.lan.ra=hybird
uci set dhcp.lan.ra_management=1
uci set dhcp.lan.ra_default=1
# Set hybird to wan6
uci set dhcp.wan6=dhcp
uci set dhcp.wan6.interface=wan
uci set dhcp.wan6.ra=hybird
uci set dhcp.wan6.dhcpv6=hybird
uci set dhcp.wan6.ndp=hybird
uci set dhcp.wan6.master=1
# Commit changes
uci commit
echo -e "${Green_font_prefix}Hybird mode configure successfully.\n${Font_color_suffix}"
RebootConfirm
elif [[ $1 = "remove" ]]; then
echo -e "${Green_font_prefix}\nRemove IPV6 modules...\n${Font_color_suffix}"
opkg remove --force-removal-of-dependent-packages ipv6helper kmod-sit odhcp6c luci-proto-ipv6 ip6tables kmod-ipt-nat6 odhcpd-ipv6only kmod-ip6tables-extra
echo -e "${Green_font_prefix}\nIPV6 modules remove successfully.\n${Font_color_suffix}"
echo -e "${Green_font_prefix}Revert IPV6 configurations...\n${Font_color_suffix}"
# Remove wan6 dhcp configurations
uci delete dhcp.wan6.ra
uci delete dhcp.wan6.dhcpv6
uci delete dhcp.wan6.ndp
# Remove lan dhcp configurations
uci delete dhcp.lan.dhcpv6
uci delete dhcp.lan.ndp
uci delete dhcp.lan.ra
uci delete dhcp.lan.ra_management
uci delete dhcp.lan.ra_default
# Enable IPV6 ula prefix
sed -i 's/#.*\toption ula/\toption ula/g' /etc/config/network
# Disable IPV6 dns resolution
uci set dhcp.@dnsmasq[0].filter_aaaa=1
# Restore mwan3 balance strategy
uci set mwan3.balanced.last_resort=unreachable
# Commit changes
uci commit
# Restore mwan3 ip6tables rules
rm /lib/mwan3/mwan3.sh
cp /lib/mwan3/mwan3.sh.orig /lib/mwan3/mwan3.sh
rm -f /etc/opkg/ipv6-installed
echo -e "${Green_font_prefix}IPV6 remove successfully.\n${Font_color_suffix}"
RebootConfirm
elif [[ $1 = "clean" ]]; then
echo -e "${Green_font_prefix}\nRemove mwan3 modules...\n${Font_color_suffix}"
opkg remove mwan3 luci-app-mwan3 luci-app-mwan3helper luci-app-syncdial
echo -e "${Green_font_prefix}Mwan3 modules remove successfully.\n${Font_color_suffix}"
RebootConfirm
fi
exit 0 |
package com.roncoo.education.course.service.dao.impl;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;
import com.roncoo.education.course.service.dao.CourseUserStudyLogDao;
import com.roncoo.education.course.service.dao.impl.mapper.CourseUserStudyLogMapper;
import com.roncoo.education.course.service.dao.impl.mapper.entity.CourseUserStudyLog;
import com.roncoo.education.course.service.dao.impl.mapper.entity.CourseUserStudyLogExample;
import com.roncoo.education.util.base.Page;
import com.roncoo.education.util.base.PageUtil;
import com.roncoo.education.util.tools.IdWorker;
import com.roncoo.education.util.tools.SqlUtil;
@Repository
public class CourseUserStudyLogDaoImpl implements CourseUserStudyLogDao {
@Autowired
private JdbcTemplate jdbcTemplate;
@Autowired
private CourseUserStudyLogMapper courseUserStudyLogMapper;
@Override
public int save(CourseUserStudyLog record) {
record.setId(IdWorker.getId());
return this.courseUserStudyLogMapper.insertSelective(record);
}
@Override
public int deleteById(Long id) {
return this.courseUserStudyLogMapper.deleteByPrimaryKey(id);
}
@Override
public int updateById(CourseUserStudyLog record) {
return this.courseUserStudyLogMapper.updateByPrimaryKeySelective(record);
}
@Override
public CourseUserStudyLog getById(Long id) {
return this.courseUserStudyLogMapper.selectByPrimaryKey(id);
}
@Override
public Page<CourseUserStudyLog> listForPage(int pageCurrent, int pageSize, CourseUserStudyLogExample example) {
int count = this.courseUserStudyLogMapper.countByExample(example);
pageSize = PageUtil.checkPageSize(pageSize);
pageCurrent = PageUtil.checkPageCurrent(count, pageSize, pageCurrent);
int totalPage = PageUtil.countTotalPage(count, pageSize);
example.setLimitStart(PageUtil.countOffset(pageCurrent, pageSize));
example.setPageSize(pageSize);
return new Page<CourseUserStudyLog>(count, totalPage, pageCurrent, pageSize, this.courseUserStudyLogMapper.selectByExample(example));
}
@Override
public CourseUserStudyLog getByUserNoAndPeriodId(Long userNo, Long periodId) {
CourseUserStudyLogExample example = new CourseUserStudyLogExample();
example.createCriteria().andUserNoEqualTo(userNo).andPeriodIdEqualTo(periodId);
List<CourseUserStudyLog> list = this.courseUserStudyLogMapper.selectByExample(example);
if (list.isEmpty()) {
return null;
}
return list.get(0);
}
private <T> List<T> queryForObjectList(String sql, Class<T> clazz, Object... args) {
return jdbcTemplate.query(sql, args, new BeanPropertyRowMapper<T>(clazz));
}
@Override
public Page<CourseUserStudyLog> courseList(int PageCurrent, int PageSize, String beginGmtCreate, String endGmtCreate) {
StringBuilder sql = new StringBuilder();
sql.append("select course_name as courseName, course_id as courseId, count(course_id) as chapterId from course_user_study_log ");
if (!StringUtils.isEmpty(beginGmtCreate) && !StringUtils.isEmpty(endGmtCreate)) {
sql.append("where gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' and ").append("gmt_create <= '").append(endGmtCreate).append(" 23:59:59' ").append("group by course_id order by chapterId desc limit ?,?");
} else if (!StringUtils.isEmpty(beginGmtCreate)) {
sql.append("where gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' ").append("group by course_id order by chapterId desc limit ?,?");
} else if (!StringUtils.isEmpty(endGmtCreate)) {
sql.append("where gmt_create <= '").append(endGmtCreate).append(" 23:59:59' ").append("group by course_id order by chapterId desc limit ?,?");
} else {
sql.append("group by course_id order by chapterId desc limit ?,?");
}
int totalCount = courseTotalCount(beginGmtCreate, endGmtCreate);
int start = SqlUtil.countOffset(PageCurrent, PageSize);
int pageSize = PageUtil.checkPageSize(PageSize);
int pageCurrent = SqlUtil.checkPageCurrent(totalCount, pageSize, PageCurrent);
int totalPage = SqlUtil.countTotalPage(totalCount, pageSize);
List<CourseUserStudyLog> list = queryForObjectList(sql.toString(), CourseUserStudyLog.class, start, pageSize);
Page<CourseUserStudyLog> page = new Page<>(totalCount, totalPage, pageCurrent, pageSize, list);
return page;
}
private int courseTotalCount(String beginGmtCreate, String endGmtCreate) {
StringBuilder sql = new StringBuilder();
Map<String, Object> map = new HashMap<String, Object>();
if (!StringUtils.isEmpty(beginGmtCreate) && !StringUtils.isEmpty(endGmtCreate)) {
sql.append("select count(distinct course_id) as count from course_user_study_log ").append("where gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' and ").append("gmt_create <= '").append(endGmtCreate).append(" 23:59:59'");
} else if (!StringUtils.isEmpty(beginGmtCreate)) {
sql.append("select count(distinct course_id) as count from course_user_study_log ").append("where gmt_create >= '").append(beginGmtCreate).append(" 00:00:00'");
} else if (!StringUtils.isEmpty(endGmtCreate)) {
sql.append("select count(distinct course_id) as count from course_user_study_log ").append("where gmt_create <= '").append(endGmtCreate).append(" 23:59:59'");
} else {
sql.append("select count(distinct course_id) as count from course_user_study_log ");
}
map = jdbcTemplate.queryForMap(sql.toString());
Integer count = 0;
if (!StringUtils.isEmpty(map.get("count"))) {
count = Integer.valueOf(String.valueOf(map.get("count")));
}
return count;
}
@Override
public List<CourseUserStudyLog> countCourseIdByGmtCreate(String beginGmtCreate, String endGmtCreate) {
StringBuilder sql = new StringBuilder();
sql.append("select count(course_id) as courseId, course_name as courseName from course_user_study_log ");
if (!StringUtils.isEmpty(beginGmtCreate) && !StringUtils.isEmpty(endGmtCreate)) {
sql.append("where gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' and ").append("gmt_create <= '").append(endGmtCreate).append(" 23:59:59' ").append(" group by course_id order by courseId desc limit 0,5");
} else if (!StringUtils.isEmpty(beginGmtCreate)) {
sql.append("where gmt_create >= '").append(beginGmtCreate).append(" 00:00:00'").append(" group by course_id order by courseId desc limit 0,5");
} else if (!StringUtils.isEmpty(endGmtCreate)) {
sql.append("where gmt_create <= '").append(endGmtCreate).append(" 23:59:59'").append(" group by course_id order by courseId desc limit 0,5");
} else {
sql.append(" group by course_id order by courseId desc limit 0,5");
}
return queryForObjectList(sql.toString(), CourseUserStudyLog.class);
}
@Override
public Page<CourseUserStudyLog> periodList(Long courseId, int PageCurrent, int PageSize, String beginGmtCreate, String endGmtCreate) {
StringBuilder sql = new StringBuilder();
sql.append("select period_name as periodName, period_id as periodId, count(period_id) as chapterId from course_user_study_log where ");
if (!StringUtils.isEmpty(beginGmtCreate)) {
sql.append(" gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' and ");
}
if (!StringUtils.isEmpty(endGmtCreate)) {
sql.append(" gmt_create <= '").append(endGmtCreate).append(" 23:59:59' and ");
}
sql.append("course_id = ? group by period_id order by chapterId desc limit ?,?");
int totalCount = periodTotalCount(courseId, beginGmtCreate, endGmtCreate);
int start = SqlUtil.countOffset(PageCurrent, PageSize);
int pageSize = PageUtil.checkPageSize(PageSize);
int pageCurrent = SqlUtil.checkPageCurrent(totalCount, pageSize, PageCurrent);
int totalPage = SqlUtil.countTotalPage(totalCount, pageSize);
List<CourseUserStudyLog> list = queryForObjectList(sql.toString(), CourseUserStudyLog.class, courseId, start, pageSize);
Page<CourseUserStudyLog> page = new Page<>(totalCount, totalPage, pageCurrent, pageSize, list);
return page;
}
private int periodTotalCount(Long courseId, String beginGmtCreate, String endGmtCreate) {
StringBuilder sql = new StringBuilder();
sql.append("select count(distinct period_id) as count from course_user_study_log where ");
if (!StringUtils.isEmpty(beginGmtCreate)) {
sql.append("gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' and ");
}
if (!StringUtils.isEmpty(endGmtCreate)) {
sql.append("gmt_create <= '").append(endGmtCreate).append(" 23:59:59' and ");
}
sql.append("course_id = ?");
Integer count = 0;
Map<String, Object> map = jdbcTemplate.queryForMap(sql.toString(), courseId);
if (!StringUtils.isEmpty(map.get("count"))) {
count = Integer.valueOf(String.valueOf(map.get("count")));
}
return count;
}
@Override
public List<CourseUserStudyLog> countPeriodNoByCourseIdAndGmtCreate(Long courseId, String beginGmtCreate, String endGmtCreate) {
StringBuilder sql = new StringBuilder();
sql.append("select count(period_id) as periodId, period_name as periodName from course_user_study_log where ");
if (!StringUtils.isEmpty(beginGmtCreate)) {
sql.append("gmt_create >= '").append(beginGmtCreate).append(" 00:00:00' and ");
}
if (!StringUtils.isEmpty(endGmtCreate)) {
sql.append("gmt_create <= '").append(endGmtCreate).append(" 23:59:59' and ");
}
sql.append("course_id = ? group by period_id order by periodId desc limit 0,5");
return queryForObjectList(sql.toString(), CourseUserStudyLog.class, courseId);
}
} |
package moze_intel.projecte.gameObjs.container;
import java.util.function.Predicate;
import moze_intel.projecte.gameObjs.blocks.MatterFurnace;
import moze_intel.projecte.gameObjs.container.slots.MatterFurnaceOutputSlot;
import moze_intel.projecte.gameObjs.container.slots.SlotPredicates;
import moze_intel.projecte.gameObjs.container.slots.ValidatedSlot;
import moze_intel.projecte.gameObjs.registration.impl.BlockRegistryObject;
import moze_intel.projecte.gameObjs.registries.PEBlocks;
import moze_intel.projecte.gameObjs.registries.PEContainerTypes;
import moze_intel.projecte.gameObjs.tiles.RMFurnaceTile;
import net.minecraft.entity.player.PlayerInventory;
import net.minecraft.item.ItemStack;
import net.minecraftforge.items.IItemHandler;
public class RMFurnaceContainer extends DMFurnaceContainer {
public RMFurnaceContainer(int windowId, PlayerInventory invPlayer, RMFurnaceTile tile) {
super(PEContainerTypes.RM_FURNACE_CONTAINER, windowId, invPlayer, tile);
}
@Override
void initSlots(PlayerInventory invPlayer) {
IItemHandler fuel = tile.getFuel();
IItemHandler input = tile.getInput();
IItemHandler output = tile.getOutput();
//Fuel
this.addSlot(new ValidatedSlot(fuel, 0, 65, 53, SlotPredicates.FURNACE_FUEL));
Predicate<ItemStack> inputPredicate = stack -> !tile.getSmeltingResult(stack).isEmpty();
//Input(0)
this.addSlot(new ValidatedSlot(input, 0, 65, 17, inputPredicate));
int counter = 1;
//Input Storage
for (int i = 2; i >= 0; i--) {
for (int j = 3; j >= 0; j--) {
this.addSlot(new ValidatedSlot(input, counter++, 11 + i * 18, 8 + j * 18, inputPredicate));
}
}
counter = output.getSlots() - 1;
//Output(0)
this.addSlot(new MatterFurnaceOutputSlot(invPlayer.player, output, counter--, 125, 35));
//Output Storage
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 4; j++) {
this.addSlot(new MatterFurnaceOutputSlot(invPlayer.player, output, counter--, 147 + i * 18, 8 + j * 18));
}
}
addPlayerInventory(invPlayer, 24, 84);
}
@Override
protected BlockRegistryObject<MatterFurnace, ?> getValidBlock() {
return PEBlocks.RED_MATTER_FURNACE;
}
} |
#!/bin/bash
HERE=$(dirname $0)
cd $HERE
cat ./extension_list.txt | while read ext || [[ -n $ext ]]; do
echo $ext
code --install-extension $ext --force
done
|
import { Component, OnInit } from '@angular/core';
import { Subscription, interval } from 'rxjs';
import { StateService } from 'src/app/service/state.service';
@Component({
selector: 'app-menu',
templateUrl: './menu.component.html',
styleUrls: ['./menu.component.css']
})
export class MenuComponent implements OnInit {
open = false;
isCollapsed = false;
mySubscription: Subscription;
constructor(private stateService: StateService) {
this.yopen();
this.mySubscription = interval(10000).subscribe((x => {
this.yopen();
}));
}
yopen(){
this.stateService.isOpenPublic().subscribe(result => this.open = result);
}
ngOnInit(): void {
}
}
|
#!/bin/bash
# http://www.bagley.org/~doug/shootout/
# from David Pyke
# bash doesnt do floating point :( but we dont need it
# if we do fractional maths
#declare -r A=3877
#declare -r C=29573
#declare -r M=139968
#LAST=42
#function gen_random(){
# LAST=$(( (($LAST * $A) + $C) % $M ))
#
# RETVAL=$(( $1 * $LAST / $M ))
# RETREM=$(( $1 * $LAST % $M ))
#}
#N=$[ ${1:-1} -1 ]
#while [ $N -gt 0 ]; do
# gen_random 100
# N=$[ $N - 1 ]
#done
# gen_random 100
echo $RETVAL + $RETREM/$M
|
<reponame>mattk7/Telethon
import logging
import os
import threading
import warnings
from datetime import timedelta, datetime
from signal import signal, SIGINT, SIGTERM, SIGABRT
from threading import Lock
from time import sleep
from . import version, utils
from .crypto import rsa
from .errors import (
RPCError, BrokenAuthKeyError, ServerError, FloodWaitError,
FloodTestPhoneWaitError, TypeNotFoundError, UnauthorizedError,
PhoneMigrateError, NetworkMigrateError, UserMigrateError
)
from .network import authenticator, MtProtoSender, Connection, ConnectionMode
from .session import Session
from .tl import TLObject
from .tl.all_tlobjects import LAYER
from .tl.functions import (
InitConnectionRequest, InvokeWithLayerRequest, PingRequest
)
from .tl.functions.auth import (
ImportAuthorizationRequest, ExportAuthorizationRequest
)
from .tl.functions.help import (
GetCdnConfigRequest, GetConfigRequest
)
from .tl.functions.updates import GetStateRequest
from .tl.types.auth import ExportedAuthorization
from .update_state import UpdateState
DEFAULT_DC_ID = 4
DEFAULT_IPV4_IP = '192.168.127.12'
DEFAULT_IPV6_IP = '[2fc00:db20:35b:7399::5]'
DEFAULT_PORT = 443
__log__ = logging.getLogger(__name__)
class TelegramBareClient:
"""Bare Telegram Client with just the minimum -
The reason to distinguish between a MtProtoSender and a
TelegramClient itself is because the sender is just that,
a sender, which should know nothing about Telegram but
rather how to handle this specific connection.
The TelegramClient itself should know how to initialize
a proper connection to the servers, as well as other basic
methods such as disconnection and reconnection.
This distinction between a bare client and a full client
makes it possible to create clones of the bare version
(by using the same session, IP address and port) to be
able to execute queries on either, without the additional
cost that would involve having the methods for signing in,
logging out, and such.
"""
# Current TelegramClient version
__version__ = version.__version__
# TODO Make this thread-safe, all connections share the same DC
_config = None # Server configuration (with .dc_options)
# region Initialization
def __init__(self, session, api_id, api_hash,
connection_mode=ConnectionMode.TCP_FULL,
use_ipv6=False,
proxy=None,
update_workers=None,
spawn_read_thread=False,
timeout=timedelta(seconds=5),
**kwargs):
"""Refer to TelegramClient.__init__ for docs on this method"""
if not api_id or not api_hash:
raise ValueError(
"Your API ID or Hash cannot be empty or None. "
"Refer to telethon.rtfd.io for more information.")
self._use_ipv6 = use_ipv6
# Determine what session object we have
if isinstance(session, str) or session is None:
session = Session(session)
elif not isinstance(session, Session):
raise TypeError(
'The given session must be a str or a Session instance.'
)
# ':' in session.server_address is True if it's an IPv6 address
if (not session.server_address or
(':' in session.server_address) != use_ipv6):
session.set_dc(
DEFAULT_DC_ID,
DEFAULT_IPV6_IP if self._use_ipv6 else DEFAULT_IPV4_IP,
DEFAULT_PORT
)
self.session = session
self.api_id = int(api_id)
self.api_hash = api_hash
if self.api_id < 20: # official apps must use obfuscated
connection_mode = ConnectionMode.TCP_OBFUSCATED
# This is the main sender, which will be used from the thread
# that calls .connect(). Every other thread will spawn a new
# temporary connection. The connection on this one is always
# kept open so Telegram can send us updates.
self._sender = MtProtoSender(self.session, Connection(
mode=connection_mode, proxy=proxy, timeout=timeout
))
# Two threads may be calling reconnect() when the connection is lost,
# we only want one to actually perform the reconnection.
self._reconnect_lock = Lock()
# Cache "exported" sessions as 'dc_id: Session' not to recreate
# them all the time since generating a new key is a relatively
# expensive operation.
self._exported_sessions = {}
# This member will process updates if enabled.
# One may change self.updates.enabled at any later point.
self.updates = UpdateState(workers=update_workers)
# Used on connection - the user may modify these and reconnect
kwargs['app_version'] = kwargs.get('app_version', self.__version__)
for name, value in kwargs.items():
if not hasattr(self.session, name):
raise ValueError('Unknown named parameter', name)
setattr(self.session, name, value)
# Despite the state of the real connection, keep track of whether
# the user has explicitly called .connect() or .disconnect() here.
# This information is required by the read thread, who will be the
# one attempting to reconnect on the background *while* the user
# doesn't explicitly call .disconnect(), thus telling it to stop
# retrying. The main thread, knowing there is a background thread
# attempting reconnection as soon as it happens, will just sleep.
self._user_connected = False
# Save whether the user is authorized here (a.k.a. logged in)
self._authorized = None # None = We don't know yet
# The first request must be in invokeWithLayer(initConnection(X)).
# See https://core.telegram.org/api/invoking#saving-client-info.
self._first_request = True
# Constantly read for results and updates from within the main client,
# if the user has left enabled such option.
self._spawn_read_thread = spawn_read_thread
self._recv_thread = None
self._idling = threading.Event()
# Default PingRequest delay
self._last_ping = datetime.now()
self._ping_delay = timedelta(minutes=1)
# Some errors are known but there's nothing we can do from the
# background thread. If any of these happens, call .disconnect(),
# and raise them next time .invoke() is tried to be called.
self._background_error = None
# endregion
# region Connecting
def connect(self, _sync_updates=True):
"""Connects to the Telegram servers, executing authentication if
required. Note that authenticating to the Telegram servers is
not the same as authenticating the desired user itself, which
may require a call (or several) to 'sign_in' for the first time.
Note that the optional parameters are meant for internal use.
If '_sync_updates', sync_updates() will be called and a
second thread will be started if necessary. Note that this
will FAIL if the client is not connected to the user's
native data center, raising a "UserMigrateError", and
calling .disconnect() in the process.
"""
__log__.info('Connecting to %s:%d...',
self.session.server_address, self.session.port)
self._background_error = None # Clear previous errors
try:
self._sender.connect()
__log__.info('Connection success!')
# Connection was successful! Try syncing the update state
# UNLESS '_sync_updates' is False (we probably are in
# another data center and this would raise UserMigrateError)
# to also assert whether the user is logged in or not.
self._user_connected = True
if self._authorized is None and _sync_updates:
try:
self.sync_updates()
self._set_connected_and_authorized()
except UnauthorizedError:
self._authorized = False
elif self._authorized:
self._set_connected_and_authorized()
return True
except TypeNotFoundError as e:
# This is fine, probably layer migration
__log__.warning('Connection failed, got unexpected type with ID '
'%s. Migrating?', hex(e.invalid_constructor_id))
self.disconnect()
return self.connect(_sync_updates=_sync_updates)
except (RPCError, ConnectionError) as e:
# Probably errors from the previous session, ignore them
__log__.error('Connection failed due to %s', e)
self.disconnect()
return False
def is_connected(self):
return self._sender.is_connected()
def _wrap_init_connection(self, query):
"""Wraps query around InvokeWithLayerRequest(InitConnectionRequest())"""
return InvokeWithLayerRequest(LAYER, InitConnectionRequest(
api_id=self.api_id,
device_model=self.session.device_model,
system_version=self.session.system_version,
app_version=self.session.app_version,
lang_code=self.session.lang_code,
system_lang_code=self.session.system_lang_code,
lang_pack='', # "langPacks are for official apps only"
query=query
))
def disconnect(self):
"""Disconnects from the Telegram server
and stops all the spawned threads"""
__log__.info('Disconnecting...')
self._user_connected = False # This will stop recv_thread's loop
__log__.debug('Stopping all workers...')
self.updates.stop_workers()
# This will trigger a "ConnectionResetError" on the recv_thread,
# which won't attempt reconnecting as ._user_connected is False.
__log__.debug('Disconnecting the socket...')
self._sender.disconnect()
# TODO Shall we clear the _exported_sessions, or may be reused?
self._first_request = True # On reconnect it will be first again
self.session.close()
def __del__(self):
self.disconnect()
def _reconnect(self, new_dc=None):
"""If 'new_dc' is not set, only a call to .connect() will be made
since it's assumed that the connection has been lost and the
library is reconnecting.
If 'new_dc' is set, the client is first disconnected from the
current data center, clears the auth key for the old DC, and
connects to the new data center.
"""
if new_dc is None:
if self.is_connected():
__log__.info('Reconnection aborted: already connected')
return True
try:
__log__.info('Attempting reconnection...')
return self.connect()
except ConnectionResetError as e:
__log__.warning('Reconnection failed due to %s', e)
return False
else:
# Since we're reconnecting possibly due to a UserMigrateError,
# we need to first know the Data Centers we can connect to. Do
# that before disconnecting.
dc = self._get_dc(new_dc)
__log__.info('Reconnecting to new data center %s', dc)
self.session.set_dc(dc.id, dc.ip_address, dc.port)
# auth_key's are associated with a server, which has now changed
# so it's not valid anymore. Set to None to force recreating it.
self.session.auth_key = None
self.session.save()
self.disconnect()
return self.connect()
def set_proxy(self, proxy):
"""Change the proxy used by the connections.
"""
if self.is_connected():
raise RuntimeError("You can't change the proxy while connected.")
self._sender.connection.conn.proxy = proxy
# endregion
# region Working with different connections/Data Centers
def _on_read_thread(self):
return self._recv_thread is not None and \
threading.get_ident() == self._recv_thread.ident
def _get_dc(self, dc_id, cdn=False):
"""Gets the Data Center (DC) associated to 'dc_id'"""
if not TelegramBareClient._config:
TelegramBareClient._config = self(GetConfigRequest())
try:
if cdn:
# Ensure we have the latest keys for the CDNs
for pk in self(GetCdnConfigRequest()).public_keys:
rsa.add_key(pk.public_key)
return next(
dc for dc in TelegramBareClient._config.dc_options
if dc.id == dc_id and bool(dc.ipv6) == self._use_ipv6 and bool(dc.cdn) == cdn
)
except StopIteration:
if not cdn:
raise
# New configuration, perhaps a new CDN was added?
TelegramBareClient._config = self(GetConfigRequest())
return self._get_dc(dc_id, cdn=cdn)
def _get_exported_client(self, dc_id):
"""Creates and connects a new TelegramBareClient for the desired DC.
If it's the first time calling the method with a given dc_id,
a new session will be first created, and its auth key generated.
Exporting/Importing the authorization will also be done so that
the auth is bound with the key.
"""
# Thanks badoualy/kotlogram on /telegram/api/DefaultTelegramClient.kt
# for clearly showing how to export the authorization! ^^
session = self._exported_sessions.get(dc_id)
if session:
export_auth = None # Already bound with the auth key
else:
# TODO Add a lock, don't allow two threads to create an auth key
# (when calling .connect() if there wasn't a previous session).
# for the same data center.
dc = self._get_dc(dc_id)
# Export the current authorization to the new DC.
__log__.info('Exporting authorization for data center %s', dc)
export_auth = self(ExportAuthorizationRequest(dc_id))
# Create a temporary session for this IP address, which needs
# to be different because each auth_key is unique per DC.
#
# Construct this session with the connection parameters
# (system version, device model...) from the current one.
session = Session(self.session)
session.set_dc(dc.id, dc.ip_address, dc.port)
self._exported_sessions[dc_id] = session
__log__.info('Creating exported new client')
client = TelegramBareClient(
session, self.api_id, self.api_hash,
proxy=self._sender.connection.conn.proxy,
timeout=self._sender.connection.get_timeout()
)
client.connect(_sync_updates=False)
if isinstance(export_auth, ExportedAuthorization):
client(ImportAuthorizationRequest(
id=export_auth.id, bytes=export_auth.bytes
))
elif export_auth is not None:
__log__.warning('Unknown export auth type %s', export_auth)
client._authorized = True # We exported the auth, so we got auth
return client
def _get_cdn_client(self, cdn_redirect):
"""Similar to ._get_exported_client, but for CDNs"""
session = self._exported_sessions.get(cdn_redirect.dc_id)
if not session:
dc = self._get_dc(cdn_redirect.dc_id, cdn=True)
session = Session(self.session)
session.set_dc(dc.id, dc.ip_address, dc.port)
self._exported_sessions[cdn_redirect.dc_id] = session
__log__.info('Creating new CDN client')
client = TelegramBareClient(
session, self.api_id, self.api_hash,
proxy=self._sender.connection.conn.proxy,
timeout=self._sender.connection.get_timeout()
)
# This will make use of the new RSA keys for this specific CDN.
#
# We won't be calling GetConfigRequest because it's only called
# when needed by ._get_dc, and also it's static so it's likely
# set already. Avoid invoking non-CDN methods by not syncing updates.
client.connect(_sync_updates=False)
client._authorized = self._authorized
return client
# endregion
# region Invoking Telegram requests
def __call__(self, *requests, retries=5):
"""Invokes (sends) a MTProtoRequest and returns (receives) its result.
The invoke will be retried up to 'retries' times before raising
RuntimeError().
"""
if not all(isinstance(x, TLObject) and
x.content_related for x in requests):
raise TypeError('You can only invoke requests, not types!')
if self._background_error:
raise self._background_error
for request in requests:
request.resolve(self, utils)
# For logging purposes
if len(requests) == 1:
which = type(requests[0]).__name__
else:
which = '{} requests ({})'.format(
len(requests), [type(x).__name__ for x in requests])
# Determine the sender to be used (main or a new connection)
__log__.debug('Invoking %s', which)
call_receive = \
not self._idling.is_set() or self._reconnect_lock.locked()
for retry in range(retries):
result = self._invoke(call_receive, *requests)
if result is not None:
return result
__log__.warning('Invoking %s failed %d times, '
'reconnecting and retrying',
[str(x) for x in requests], retry + 1)
sleep(1)
# The ReadThread has priority when attempting reconnection,
# since this thread is constantly running while __call__ is
# only done sometimes. Here try connecting only once/retry.
if not self._reconnect_lock.locked():
with self._reconnect_lock:
self._reconnect()
raise RuntimeError('Number of retries reached 0.')
# Let people use client.invoke(SomeRequest()) instead client(...)
invoke = __call__
def _invoke(self, call_receive, *requests):
try:
# Ensure that we start with no previous errors (i.e. resending)
for x in requests:
x.confirm_received.clear()
x.rpc_error = None
if not self.session.auth_key:
__log__.info('Need to generate new auth key before invoking')
self._first_request = True
self.session.auth_key, self.session.time_offset = \
authenticator.do_authentication(self._sender.connection)
if self._first_request:
__log__.info('Initializing a new connection while invoking')
if len(requests) == 1:
requests = [self._wrap_init_connection(requests[0])]
else:
# We need a SINGLE request (like GetConfig) to init conn.
# Once that's done, the N original requests will be
# invoked.
TelegramBareClient._config = self(
self._wrap_init_connection(GetConfigRequest())
)
self._sender.send(*requests)
if not call_receive:
# TODO This will be slightly troublesome if we allow
# switching between constant read or not on the fly.
# Must also watch out for calling .read() from two places,
# in which case a Lock would be required for .receive().
for x in requests:
x.confirm_received.wait(
self._sender.connection.get_timeout()
)
else:
while not all(x.confirm_received.is_set() for x in requests):
self._sender.receive(update_state=self.updates)
except BrokenAuthKeyError:
__log__.error('Authorization key seems broken and was invalid!')
self.session.auth_key = None
except TypeNotFoundError as e:
# Only occurs when we call receive. May happen when
# we need to reconnect to another DC on login and
# Telegram somehow sends old objects (like configOld)
self._first_request = True
__log__.warning('Read unknown TLObject code ({}). '
'Setting again first_request flag.'
.format(hex(e.invalid_constructor_id)))
except TimeoutError:
__log__.warning('Invoking timed out') # We will just retry
except ConnectionResetError as e:
__log__.warning('Connection was reset while invoking')
if self._user_connected:
# Server disconnected us, __call__ will try reconnecting.
return None
else:
# User never called .connect(), so raise this error.
raise RuntimeError('Tried to invoke without .connect()') from e
# Clear the flag if we got this far
self._first_request = False
try:
raise next(x.rpc_error for x in requests if x.rpc_error)
except StopIteration:
if any(x.result is None for x in requests):
# "A container may only be accepted or
# rejected by the other party as a whole."
return None
if len(requests) == 1:
return requests[0].result
else:
return [x.result for x in requests]
except (PhoneMigrateError, NetworkMigrateError,
UserMigrateError) as e:
# TODO What happens with the background thread here?
# For normal use cases, this won't happen, because this will only
# be on the very first connection (not authorized, not running),
# but may be an issue for people who actually travel?
self._reconnect(new_dc=e.new_dc)
return self._invoke(call_receive, *requests)
except ServerError as e:
# Telegram is having some issues, just retry
__log__.error('Telegram servers are having internal errors %s', e)
except (FloodWaitError, FloodTestPhoneWaitError) as e:
__log__.warning('Request invoked too often, wait %ds', e.seconds)
if e.seconds > self.session.flood_sleep_threshold | 0:
raise
sleep(e.seconds)
# Some really basic functionality
def is_user_authorized(self):
"""Has the user been authorized yet
(code request sent and confirmed)?"""
return self._authorized
def get_input_entity(self, peer):
"""
Stub method, no functionality so that calling
``.get_input_entity()`` from ``.resolve()`` doesn't fail.
"""
return peer
# endregion
# region Updates handling
def sync_updates(self):
"""Synchronizes self.updates to their initial state. Will be
called automatically on connection if self.updates.enabled = True,
otherwise it should be called manually after enabling updates.
"""
self.updates.process(self(GetStateRequest()))
def add_update_handler(self, handler):
"""Adds an update handler (a function which takes a TLObject,
an update, as its parameter) and listens for updates"""
if self.updates.workers is None:
warnings.warn(
"You have not setup any workers, so you won't receive updates."
" Pass update_workers=4 when creating the TelegramClient,"
" or set client.self.updates.workers = 4"
)
self.updates.handlers.append(handler)
def remove_update_handler(self, handler):
self.updates.handlers.remove(handler)
def list_update_handlers(self):
return self.updates.handlers[:]
# endregion
# region Constant read
def _set_connected_and_authorized(self):
self._authorized = True
self.updates.setup_workers()
if self._spawn_read_thread and self._recv_thread is None:
self._recv_thread = threading.Thread(
name='ReadThread', daemon=True,
target=self._recv_thread_impl
)
self._recv_thread.start()
def _signal_handler(self, signum, frame):
if self._user_connected:
self.disconnect()
else:
os._exit(1)
def idle(self, stop_signals=(SIGINT, SIGTERM, SIGABRT)):
"""
Idles the program by looping forever and listening for updates
until one of the signals are received, which breaks the loop.
:param stop_signals:
Iterable containing signals from the signal module that will
be subscribed to TelegramClient.disconnect() (effectively
stopping the idle loop), which will be called on receiving one
of those signals.
:return:
"""
if self._spawn_read_thread and not self._on_read_thread():
raise RuntimeError('Can only idle if spawn_read_thread=False')
self._idling.set()
for sig in stop_signals:
signal(sig, self._signal_handler)
if self._on_read_thread():
__log__.info('Starting to wait for items from the network')
else:
__log__.info('Idling to receive items from the network')
while self._user_connected:
try:
if datetime.now() > self._last_ping + self._ping_delay:
self._sender.send(PingRequest(
int.from_bytes(os.urandom(8), 'big', signed=True)
))
self._last_ping = datetime.now()
__log__.debug('Receiving items from the network...')
self._sender.receive(update_state=self.updates)
except TimeoutError:
# No problem
__log__.debug('Receiving items from the network timed out')
except ConnectionResetError:
if self._user_connected:
__log__.error('Connection was reset while receiving '
'items. Reconnecting')
with self._reconnect_lock:
while self._user_connected and not self._reconnect():
sleep(0.1) # Retry forever, this is instant messaging
except:
self._idling.clear()
raise
self._idling.clear()
__log__.info('Connection closed by the user, not reading anymore')
# By using this approach, another thread will be
# created and started upon connection to constantly read
# from the other end. Otherwise, manual calls to .receive()
# must be performed. The MtProtoSender cannot be connected,
# or an error will be thrown.
#
# This way, sending and receiving will be completely independent.
def _recv_thread_impl(self):
# This thread is "idle" (only listening for updates), but also
# excepts everything unlike the manual idle because it should
# not crash.
while self._user_connected:
try:
self.idle(stop_signals=tuple())
except Exception as error:
__log__.exception('Unknown exception in the read thread! '
'Disconnecting and leaving it to main thread')
# Unknown exception, pass it to the main thread
try:
import socks
if isinstance(error, (
socks.GeneralProxyError, socks.ProxyConnectionError
)):
# This is a known error, and it's not related to
# Telegram but rather to the proxy. Disconnect and
# hand it over to the main thread.
self._background_error = error
self.disconnect()
break
except ImportError:
"Not using PySocks, so it can't be a proxy error"
self._recv_thread = None
# endregion
|
import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
# Load the data and split into train and test sets
data = pd.read_csv('stock_data.csv')
X = data.iloc[:, :-1].values
y = data.iloc[:, -1].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# Train the model
model = RandomForestClassifier(n_estimators=100, random_state=0)
model.fit(X_train, y_train)
# Make a prediction for the test set
y_pred = model.predict(X_test)
print(y_pred) |
import socket
class PortScanner:
def __init__(self, target, portlist):
self.target = target
self.portlist = portlist
def initialize_scan(self):
print('[+] Initializing scan...')
print('[i] Target host: {}'.format(self.target))
print('[i] Ports: {}'.format(self.portlist))
try:
self.scan_ports()
except Exception as e:
print('[-] An error occurred during scanning: {}'.format(e))
def scan_ports(self):
for port in self.portlist:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
result = sock.connect_ex((self.target, port))
if result == 0:
print('[+] Port {} is open'.format(port))
else:
print('[-] Port {} is closed'.format(port))
sock.close()
except socket.error as e:
print('[-] An error occurred while scanning port {}: {}'.format(port, e))
# Example usage:
target_host = 'example.com'
ports_to_scan = [80, 443, 22, 8080]
scanner = PortScanner(target_host, ports_to_scan)
scanner.initialize_scan() |
<reponame>manisoni28/sarkari
package com.ghn.android.news.item;
public class ItemAbout {
private String Id;
private String ComLogo;
private String AppName;
private String ComEmail;
private String ComWebsite;
private String ComDes;
public String getId() {
return Id;
}
public void setId(String id) {
this.Id = id;
}
public String getComLogo() {
return ComLogo;
}
public void setComLogo(String comlogo) {
this.ComLogo = comlogo;
}
public String getAppName() {
return AppName;
}
public void setAppName(String appname) {
this.AppName = appname;
}
public String getComEmail() {
return ComEmail;
}
public void setComEmail(String comemail) {
this.ComEmail = comemail;
}
public String getComWebsite() {
return ComWebsite;
}
public void setComWebsite(String comwebsite) {
this.ComWebsite = comwebsite;
}
public String getComDes() {
return ComDes;
}
public void setComDes(String comdes) {
this.ComDes = comdes;
}
}
|
def second_largest(arr):
max1 = max(arr)
arr.remove(max1)
max2 = max(arr)
return max2
second_largest([10, 20, 30, 40, 50]) # Output: 40 |
<reponame>bikedataproject/go-bike-data-lib<filename>strava/model.go<gh_stars>0
package strava
import (
"time"
geo "github.com/paulmach/go.geo"
)
// SubscriptionMessage : Struct that holds the ID of an individual webhook subscription
type SubscriptionMessage struct {
ID int `json:"id"`
}
// RefreshMessage : Struct that holds the response when refreshing strava access
type RefreshMessage struct {
TokenType string `json:"token_type"`
AccessToken string `json:"access_token"`
ExpiresAt int `json:"expires_at"`
ExpiresIn int `json:"expires_in"`
RefreshToken string `json:"refresh_token"`
}
// WebhookValidationRequest : Body of the incoming GET request to verify the endpoint
type WebhookValidationRequest struct {
HubChallenge string `json:"hub.challenge"`
}
// WebhookMessage : Body of incoming webhook messages
type WebhookMessage struct {
ObjectType string `json:"object_type"`
ObjectID int `json:"object_id"`
AspectType string `json:"aspect_type"`
OwnerID int `json:"owner_id"`
SubscriptionID int `json:"subscription_id"`
EventTime int `json:"event_time"`
Updates interface{} `json:"updates"`
}
// Activity : Struct representing an activity from Strava
type Activity struct {
Distance float32 `json:"distance"`
MovingTime int `json:"moving_time"`
ElapsedTime int `json:"elapsed_time"`
TotalElevationGain float64 `json:"total_elevation_gain"`
Type string `json:"type"`
WorkoutType int `json:"workout_type"`
StartDateLocal time.Time `json:"start_date_local"`
EndDateLocal time.Time
PointsTime []time.Time
StartLatlng []float64 `json:"start_latlng"`
EndLatlng []float64 `json:"end_latlng"`
Map ActivityMap `json:"map"`
Commute bool `json:"commute"`
LineString *geo.Path
}
// ActivityMap : Struct representing the Map field in an activity message
type ActivityMap struct {
ID string `json:"id"`
Polyline string `json:"polyline"`
ResourceState int `json:"resource_state"`
SummaryPolyline string `json:"summary_polyline"`
}
|
-- phpMyAdmin SQL Dump
-- version 3.2.3
-- http://www.phpmyadmin.net
--
-- 호스트: localhost
-- 처리한 시간: 18-05-13 16:18
-- 서버 버전: 5.1.41
-- PHP 버전: 5.2.12
SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
--
-- 데이터베이스: `worf`
--
CREATE DATABASE `worf` DEFAULT CHARACTER SET latin1 COLLATE latin1_swedish_ci;
USE `worf`;
-- --------------------------------------------------------
--
-- 테이블 구조 `message`
--
CREATE TABLE IF NOT EXISTS `message` (
`no` int(11) NOT NULL AUTO_INCREMENT,
`src_id` int(11) DEFAULT NULL,
`dest_id` int(11) DEFAULT NULL,
`text` varchar(255) DEFAULT NULL,
`time` varchar(255) DEFAULT NULL,
PRIMARY KEY (`no`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=58 ;
--
-- 테이블의 덤프 데이터 `message`
--
INSERT INTO `message` (`no`, `src_id`, `dest_id`, `text`, `time`) VALUES
(1, 1, 2, 'hi!', '2016-05-21'),
(2, 1, 2, '123', '2018-05-11 20:59:10'),
(3, 1, 2, '123', '2018-05-11 21:04:12'),
(4, 1, 2, 'dsgdfgsdg', '2018-05-12 07:52:58'),
(5, 1, 2, 'dsgdfgsdg', '2018-05-12 07:53:35'),
(6, 2, 1, '', '2018-05-12 07:59:22'),
(7, 2, 1, 'sdgsdfg', '2018-05-12 07:59:23'),
(8, 2, 1, 'sdgsdfg', '2018-05-12 07:59:24'),
(9, 2, 5, 'sdfgsdg', '2018-05-12 07:59:31'),
(10, 2, 5, 'sdfgsdg', '2018-05-12 07:59:31'),
(11, 2, 5, 'sdfgsdg', '2018-05-12 07:59:31'),
(12, 2, 5, 'sdfgsdg', '2018-05-12 07:59:32'),
(13, 2, 5, 'sdfg', '2018-05-12 08:13:53'),
(14, 2, 5, 'sdfg', '2018-05-12 08:14:50'),
(15, 2, 5, 'sdfg', '2018-05-12 08:14:53'),
(16, 5, 2, 'sdfgsdfg', '2018-05-12 08:16:06'),
(17, 5, 2, 'sdfgsdgfsg', '2018-05-12 08:18:41'),
(18, 2, 5, 'sdfgdsfg', '2018-05-12 08:18:44'),
(19, 6, 1, '%ED%95%98%EC%9D%B4%ED%95%98%EC%9D%B4', '2018-05-12 08:30:16'),
(20, 6, 1, '%EB%82%98%EB%8F%84%ED%95%98%EC%9D%B4', '2018-05-12 09:44:59'),
(21, 6, 2, '%EB%B0%A9%EA%B0%80%EC%9B%8C%EC%9A%94', '2018-05-12 09:45:21'),
(22, 5, 2, 'dfgdfg', '2018-05-12 09:54:59'),
(23, 7, 3, '%EB%B0%A9%EA%B0%80%EC%9A%94', '2018-05-12 10:04:56'),
(24, 7, 3, '%EB%88%84%EA%B5%AC%EC%84%B8%EC%9A%94', '2018-05-12 10:05:08'),
(25, 7, 3, '%EB%82%9C%EB%82%98%EB%82%98', '2018-05-12 10:07:28'),
(26, 6, 3, '%ED%95%98%EC%9D%B4%ED%95%98%EC%9D%B4', '2018-05-12 23:06:44'),
(27, 6, 3, '%ED%95%98%EC%9D%B4%ED%95%98%EC%9D%B4', '2018-05-12 23:06:48'),
(28, 12, 8, 'hello', '2018-05-13 01:50:39'),
(29, 12, 8, 'hello', '2018-05-13 01:50:40'),
(30, 12, 8, 'hello', '2018-05-13 01:50:46'),
(43, 10, 8, 'oh+see+you+long+time+my+friend', '2018-05-13 04:28:00'),
(42, 10, 9, 'banga+banga%21', '2018-05-13 04:27:45'),
(41, 10, 12, 'Hi+nice+meet+you', '2018-05-13 04:27:11'),
(34, 8, 9, 'Hello+Im+UnYoung', '2018-05-13 04:23:48'),
(35, 10, 11, 'Hello+Im+UnYoung', '2018-05-13 04:24:05'),
(36, 11, 10, 'Oh+....+HI%21', '2018-05-13 04:24:15'),
(37, 11, 10, 'I+Want+to+get+experience+on+Korea', '2018-05-13 04:24:36'),
(38, 10, 11, 'Yea+we+going+to+get+together', '2018-05-13 04:24:58'),
(39, 10, 11, '%3A%29', '2018-05-13 04:25:13'),
(40, 11, 10, 'Haha', '2018-05-13 04:25:20'),
(44, 10, 7, 'what+sup+man%7E+', '2018-05-13 04:28:14'),
(45, 10, 31, 'hi', '2018-05-13 13:39:31'),
(46, 10, 29, 'hey+paul', '2018-05-13 13:40:52'),
(47, 32, 31, '%EC%95%88%EB%85%95%ED%95%98%EC%84%B8%EC%9A%94', '2018-05-13 13:47:09'),
(48, 32, 31, 'hi', '2018-05-13 13:47:14'),
(49, 32, 10, 'hello+do+you+like+pocketmon%3F', '2018-05-13 13:47:55'),
(50, 32, 30, 'hi+wang', '2018-05-13 13:48:11'),
(51, 32, 29, 'gary+your+name+alike+him', '2018-05-13 13:48:36'),
(52, 31, 32, 'hi%21', '2018-05-13 13:50:40'),
(53, 32, 29, 'hi', '2018-05-13 14:06:39'),
(54, 31, 32, 'hellohello', '2018-05-13 14:07:15'),
(55, 32, 31, '%EC%95%88%EB%85%95', '2018-05-13 15:29:23'),
(56, 31, 32, 'hellohello', '2018-05-13 15:29:47'),
(57, 32, 29, 'hi+rehi', '2018-05-13 15:44:17');
-- --------------------------------------------------------
--
-- 테이블 구조 `user`
--
CREATE TABLE IF NOT EXISTS `user` (
`no` int(11) NOT NULL AUTO_INCREMENT,
`id` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`depart` varchar(255) DEFAULT NULL,
`major` varchar(255) DEFAULT NULL,
`language` varchar(255) DEFAULT NULL,
`studID` varchar(255) DEFAULT NULL,
`intro` varchar(255) DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`isforeigner` int(11) DEFAULT NULL,
`hashtag` varchar(255) DEFAULT NULL,
PRIMARY KEY (`no`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=33 ;
--
-- 테이블의 덤프 데이터 `user`
--
INSERT INTO `user` (`no`, `id`, `email`, `depart`, `major`, `language`, `studID`, `intro`, `name`, `password`, `isforeigner`, `hashtag`) VALUES
(10, 'dhdk33', '<EMAIL>', '', '%EC%BB%B4%ED%93%A8%ED%84%B0', '%EC%98%81%EC%96%B4', '12112', '%EB%82%98%EB%8F%84%EC%A2%8B%EC%95%84', '%ED%8F%AC%EC%BC%93%EB%AA%AC%EC%A2%8B%EC%95%84', '1111', 1, '%23%ED%96%84%EB%B2%84%EA%B1%B0%2F%23%ED%94%BC%EC%9E%90%2F%23%EC%A4%91%EC%8B%9D%2F'),
(14, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(15, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(16, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(27, 'qwer', 'qwe', '', 'sad', 'English', 'asd', 'I+want+to+eat+Dakgalbie', 'David', '1111', 1, ''),
(28, 'qwer', 'qwe', '', 'sad', 'English', 'asd', 'I+want+to+eat+Dakgalbie', 'David', '1111', 1, ''),
(32, 'worf', '11', '', 'qq', 'Korean', '1111', 'hi', 'worf', '1111', 0, '%23%EB%8D%95%EC%A7%84%EA%B3%B5%EC%9B%90%2F%23%EC%B9%98%ED%82%A8%2F%23%EC%96%91%EA%B6%81%2F%23%ED%95%9C%EC%98%A5%EB%A7%88%EC%9D%84%2F%23%EB%83%89%EB%A9%B4%2F%23%ED%96%84%EB%B2%84%EA%B1%B0%2F%23%EC%A7%AC%EB%BD%95%2F%23%EC%A4%91%EC%8B%9D%2F%23%EA%B0%88%EB%B9'),
(12, 'sarr94', '<EMAIL>', '', 'computer+science', 'english', '201414243', 'i+want+to+eat+dakgalbi', 'paul', '1111', 1, '%23%ED%8C%8C%EC%8A%A4%ED%83%80%2F%23%ED%96%84%EB%B2%84%EA%B1%B0%2F%23%EC%96%B8%EC%96%B4%EA%B5%90%EB%A5%98%2F%23%EC%82%BC%EA%B2%B9%EC%82%B4%2F%23%ED%94%BC%EC%9E%90%2F%23%EC%98%81%ED%99%94%2F'),
(13, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(17, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(18, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(19, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(20, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(21, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(22, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(23, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(24, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(25, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(26, 'g-dragon', '<EMAIL>', '', 'computer', 'English', '20171717', 'hello%2Ci%5C%27m+g-dragon.', 'dragon', 'dragon', 1, ''),
(29, 'qwer', 'qwe', '', 'sad', 'English', 'asd', 'i+going+to+han+ok+maul', 'Gary', '1111', 1, '%23%EC%B9%98%ED%82%A8%2F%23%EB%8D%95%EC%A7%84%EA%B3%B5%EC%9B%90%2F%23%ED%95%9C%EC%98%A5%EB%A7%88%EC%9D%84%2F%23%EC%96%91%EA%B6%81%2F%23%ED%96%84%EB%B2%84%EA%B1%B0%2F%23%EC%A4%91%EC%8B%9D%2F%23%ED%95%9C%EC%8B%9D%2F%23%EC%A7%AC%EB%BD%95%2F%23%EB%83%89%EB%A9'),
(30, 'qwer', 'qwe', '', 'sad', 'Chiness', 'asd', 'enjoy', 'Wang', '1111', 1, '%23%EB%8D%95%EC%A7%84%EA%B3%B5%EC%9B%90%2F%23%ED%95%9C%EC%98%A5%EB%A7%88%EC%9D%84%2F%23%EC%A4%91%EC%8B%9D%2F%23%ED%95%9C%EC%8B%9D%2F%23%EC%A7%AC%EB%BD%95%2F%23%EB%83%89%EB%A9%B4%2F%23%EC%A7%9C%EC%9E%A5%EB%A9%B4%2F%23%ED%94%BC%EC%9E%90%2F%23%EC%82%BC%EA%B2'),
(31, 'qwer', 'qwe', '', 'sad', 'Chiness', 'asd', 'So+cool%21', 'Chen', '1111', 1, '%23%EB%8D%95%EC%A7%84%EA%B3%B5%EC%9B%90%2F%23%ED%95%9C%EC%98%A5%EB%A7%88%EC%9D%84%2F%23%EC%96%91%EA%B6%81%2F%23%ED%96%84%EB%B2%84%EA%B1%B0%2F%23%EC%A4%91%EC%8B%9D%2F%23%ED%95%9C%EC%8B%9D%2F%23%EC%A7%AC%EB%BD%95%2F%23%EB%83%89%EB%A9%B4%2F%23%EC%A7%9C%EC%9E'); |
<gh_stars>0
package mechconstruct.util;
import mechconstruct.MechConstruct;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class LogUtils {
private static Logger logger = LogManager.getLogger(MechConstruct.MOD_ID);
public static void info(String message) {
logger.log(Level.INFO, message);
}
public static void error(String message) {
logger.log(Level.ERROR, message);
}
public static void log(Level level, String message) {
logger.log(level, message);
}
}
|
// this gets replaced automatically
document.querySelector("#updated-ts").innerText = new Date(/*TIMESTAMP*/0).toLocaleString();
const copybuffer = document.querySelector("#copybuffer");
function addEmotes(target, list) {
const targetEl = document.querySelector(target);
for (const emote in list) {
if (list.hasOwnProperty(emote)) {
const el = document.createElement("div");
el.classList.add("emote");
const imgContEl = document.createElement("div");
const imgEl = document.createElement("img");
imgEl.src = "https://tetr.io/res/" + list[emote];
imgEl.height = 32;
imgEl.onload = function () {
if (imgEl.width > imgEl.height) {
imgEl.width = 32;
imgEl.removeAttribute("height");
}
}
imgContEl.appendChild(imgEl);
el.appendChild(imgContEl);
const textEl = document.createElement("div");
textEl.classList.add("text");
textEl.innerText = ":" + emote + ":";
el.addEventListener("click", e => {
e.preventDefault();
copybuffer.value = textEl.innerText;
copybuffer.select();
document.execCommand("copy");
el.classList.add("copied");
setTimeout(() => {
el.classList.remove("copied");
}, 1000);
});
el.appendChild(textEl);
targetEl.appendChild(el);
}
}
}
fetch("emotes.json").then(res => res.json()).then(emotes => {
addEmotes("#emotes-base", emotes.base);
addEmotes("#emotes-supporter", emotes.supporter);
addEmotes("#emotes-verified", emotes.verified);
addEmotes("#emotes-staff", emotes.staff);
});
|
#! /bin/sh
BASEDIR=$(dirname $0)
cd $BASEDIR/..
if [ -d build-mac ]
then
sudo rm -f -R -f build-mac
fi
#---------------------------------------------------------------------------------------------------------
#variables
DEMO=0
if [ "$1" == "demo" ]
then
DEMO=1
fi
VERSION=`echo | grep PLUG_VERSION_HEX config.h`
VERSION=${VERSION//\#define PLUG_VERSION_HEX }
VERSION=${VERSION//\'}
MAJOR_VERSION=$(($VERSION & 0xFFFF0000))
MAJOR_VERSION=$(($MAJOR_VERSION >> 16))
MINOR_VERSION=$(($VERSION & 0x0000FF00))
MINOR_VERSION=$(($MINOR_VERSION >> 8))
BUG_FIX=$(($VERSION & 0x000000FF))
FULL_VERSION=$MAJOR_VERSION"."$MINOR_VERSION"."$BUG_FIX
PLUGIN_NAME=`echo | grep BUNDLE_NAME config.h`
PLUGIN_NAME=${PLUGIN_NAME//\#define BUNDLE_NAME }
PLUGIN_NAME=${PLUGIN_NAME//\"}
DMG_NAME=$PLUGIN_NAME-v$FULL_VERSION-mac
if [ $DEMO == 1 ]
then
DMG_NAME=$DMG_NAME-demo
fi
# work out the paths to the binaries
VST2=`echo | grep VST2_PATH ../../common-mac.xcconfig`
VST2=${VST2//\VST2_PATH = }/$PLUGIN_NAME.vst
VST3=`echo | grep VST3_PATH ../../common-mac.xcconfig`
VST3=${VST3//\VST3_PATH = }/$PLUGIN_NAME.vst3
AU=`echo | grep AU_PATH ../../common-mac.xcconfig`
AU=${AU//\AU_PATH = }/$PLUGIN_NAME.component
APP=`echo | grep APP_PATH ../../common-mac.xcconfig`
APP=${APP//\APP_PATH = }/$PLUGIN_NAME.app
# Dev build folder
AAX=`echo | grep AAX_PATH ../../common-mac.xcconfig`
AAX=${AAX//\AAX_PATH = }/$PLUGIN_NAME.aaxplugin
AAX_FINAL="/Library/Application Support/Avid/Audio/Plug-Ins/$PLUGIN_NAME.aaxplugin"
PKG="installer/build-mac/$PLUGIN_NAME Installer.pkg"
PKG_US="installer/build-mac/$PLUGIN_NAME Installer.unsigned.pkg"
CERT_ID=`echo | grep CERTIFICATE_ID ../../common-mac.xcconfig`
CERT_ID=${CERT_ID//\CERTIFICATE_ID = }
if [ $DEMO == 1 ]
then
echo "making $PLUGIN_NAME version $FULL_VERSION DEMO mac distribution..."
# cp "resources/img/AboutBox_Demo.png" "resources/img/AboutBox.png"
else
echo "making $PLUGIN_NAME version $FULL_VERSION mac distribution..."
# cp "resources/img/AboutBox_Registered.png" "resources/img/AboutBox.png"
fi
echo ""
#---------------------------------------------------------------------------------------------------------
./scripts/update_installer_version.py $DEMO
echo "touching source to force recompile"
touch *.cpp
#---------------------------------------------------------------------------------------------------------
#remove existing dist folder
#if [ -d installer/dist ]
#then
# rm -R installer/dist
#fi
#mkdir installer/dist
#remove existing binaries
if [ -d $APP ]
then
sudo rm -f -R -f $APP
fi
if [ -d $AU ]
then
sudo rm -f -R $AU
fi
if [ -d $VST2 ]
then
sudo rm -f -R $VST2
fi
if [ -d $VST3 ]
then
sudo rm -f -R $VST3
fi
if [ -d "${AAX}" ]
then
sudo rm -f -R "${AAX}"
fi
if [ -d "${AAX_FINAL}" ]
then
sudo rm -f -R "${AAX_FINAL}"
fi
#---------------------------------------------------------------------------------------------------------
# build xcode project. Change target to build individual formats
xcodebuild -project ./projects/$PLUGIN_NAME-macOS.xcodeproj -xcconfig ./config/$PLUGIN_NAME-mac.xcconfig DEMO_VERSION=$DEMO -target "All" -configuration Release 2> ./build-mac.log
if [ -s build-mac.log ]
then
echo "build failed due to following errors:"
echo ""
cat build-mac.log
exit 1
else
rm build-mac.log
fi
#---------------------------------------------------------------------------------------------------------
#icon stuff - http://maxao.free.fr/telechargements/setfileicon.gz
echo "setting icons"
echo ""
setfileicon resources/$PLUGIN_NAME.icns $AU
setfileicon resources/$PLUGIN_NAME.icns $VST2
setfileicon resources/$PLUGIN_NAME.icns $VST3
setfileicon resources/$PLUGIN_NAME.icns "${AAX}"
#---------------------------------------------------------------------------------------------------------
#strip debug symbols from binaries
echo "stripping binaries"
strip -x $AU/Contents/Resources/plugin.vst3/Contents/MacOS/$PLUGIN_NAME
strip -x $VST2/Contents/MacOS/$PLUGIN_NAME
strip -x $VST3/Contents/MacOS/$PLUGIN_NAME
strip -x $APP/Contents/MacOS/$PLUGIN_NAME
strip -x "${AAX}/Contents/MacOS/$PLUGIN_NAME"
#---------------------------------------------------------------------------------------------------------
#ProTools stuff
echo "copying AAX ${PLUGIN_NAME} from 3PDev to main AAX folder"
sudo cp -p -R "${AAX}" "${AAX_FINAL}"
mkdir "${AAX_FINAL}/Contents/Factory Presets/"
echo "code sign AAX binary"
/Applications/PACEAntiPiracy/Eden/Fusion/Current/bin/wraptool sign --verbose --account XXXX --wcguid XXXX --signid "Developer ID Application: ""${CERT_ID}" --in "${AAX_FINAL}" --out "${AAX_FINAL}"
#---------------------------------------------------------------------------------------------------------
#Mac AppStore stuff
#xcodebuild -project $PLUGIN_NAME.xcodeproj -xcconfig $PLUGIN_NAME.xcconfig -target "APP" -configuration Release 2> ./build-mac.log
#echo "code signing app for appstore"
#echo ""
#codesign -f -s "3rd Party Mac Developer Application: ""${CERT_ID}" $APP --entitlements resources/$PLUGIN_NAME.entitlements
#echo "building pkg for app store"
#echo ""
#productbuild \
# --component $APP /Applications \
# --sign "3rd Party Mac Developer Installer: ""${CERT_ID}" \
# --product "/Applications/$PLUGIN_NAME.app/Contents/Info.plist" installer/$PLUGIN_NAME.pkg
#---------------------------------------------------------------------------------------------------------
#10.8 Gatekeeper/Developer ID stuff
#echo "code app binary for Gatekeeper on 10.8"
#echo ""
#codesign -f -s "Developer ID Application: ""${CERT_ID}" $APP
#---------------------------------------------------------------------------------------------------------
# installer, uses Packages http://s.sudre.free.fr/Software/Packages/about.html
sudo sudo rm -R -f installer/$PLUGIN_NAME-mac.dmg
echo "building installer"
echo ""
packagesbuild installer/$PLUGIN_NAME.pkgproj
echo "code-sign installer for Gatekeeper on 10.8"
echo ""
mv "${PKG}" "${PKG_US}"
productsign --sign "Developer ID Installer: ""${CERT_ID}" "${PKG_US}" "${PKG}"
rm -R -f "${PKG_US}"
#set installer icon
setfileicon resources/$PLUGIN_NAME.icns "${PKG}"
#---------------------------------------------------------------------------------------------------------
# dmg, can use dmgcanvas http://www.araelium.com/dmgcanvas/ to make a nice dmg
echo "building dmg"
echo ""
if [ -d installer/$PLUGIN_NAME.dmgCanvas ]
then
dmgcanvas installer/$PLUGIN_NAME.dmgCanvas installer/$DMG_NAME.dmg
else
hdiutil create installer/$DMG_NAME.dmg -format UDZO -srcfolder installer/build-mac/ -ov -anyowners -volname $PLUGIN_NAME
fi
sudo rm -R -f installer/build-mac/
#---------------------------------------------------------------------------------------------------------
# zip
# echo "copying binaries..."
# echo ""
# cp -R $AU installer/dist/$PLUGIN_NAME.component
# cp -R $VST2 installer/dist/$PLUGIN_NAME.vst
# cp -R $VST3 installer/dist/$PLUGIN_NAME.vst3
# cp -R $AAX installer/dist/$PLUGIN_NAME.aaxplugin
# cp -R $APP installer/dist/$PLUGIN_NAME.app
#
# echo "zipping binaries..."
# echo ""
# ditto -c -k installer/dist installer/$PLUGIN_NAME-mac.zip
# rm -R installer/dist
#---------------------------------------------------------------------------------------------------------
if [ $DEMO == 1 ]
then
git checkout installer/tf_vst.iss
git checkout installer/tf_vst.pkgproj
git checkout resources/img/AboutBox.png
fi
echo "done"
|
#!/usr/bin/env bash
# Copyright (c) 2015-2016 Spotify AB.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -uo pipefail
LICENSE_HEADER_TEMPLATE_FILE="$1"
LICENSED_SOURCE_FILES="${*:2}"
FORMAT_FAIL="\033[31;1m"
FORMAT_SUCCESS="\033[32m"
FORMAT_COMMENT="\033[94m"
FORMAT_BOLD_ON="\033[1m"
FORMAT_RESET="\033[0m"
SYMBOL_FAIL="✗"
SYMBOL_SUCCESS="✓"
INVALID_SOURCE_FILES=""
# Check each file in our public API and internal sources.
for SOURCE_FILE in $LICENSED_SOURCE_FILES; do
# Diff the source file’s first few lines with the license header template. They should not
# differ. Also the header needs to be.
diff \
--brief \
"$LICENSE_HEADER_TEMPLATE_FILE" \
<(head -n \
"$(wc -l "$LICENSE_HEADER_TEMPLATE_FILE" | awk '{print $1}')" \
"$SOURCE_FILE") \
&> /dev/null
# shellcheck disable=SC2181
if [ "$?" == "0" ]; then
echo -en "${FORMAT_SUCCESS}${SYMBOL_SUCCESS}${FORMAT_RESET} \"${SOURCE_FILE}\""
else
echo -en "${FORMAT_FAIL}${SYMBOL_FAIL} \"${SOURCE_FILE}\" ${FORMAT_BOLD_ON}[invalid license header]${FORMAT_BOLD_ON}"
INVALID_SOURCE_FILES+="$SOURCE_FILE "
fi
echo -e "$FORMAT_RESET"
done
echo
if [ -n "$INVALID_SOURCE_FILES" ]; then
echo -e "${FORMAT_FAIL}${SYMBOL_FAIL} [FAILURE] The following source files contains an invalid license header:${FORMAT_RESET}"
for SOURCE_FILE in $INVALID_SOURCE_FILES; do
echo " - \"$SOURCE_FILE\""
done
echo -e "\n${FORMAT_COMMENT}Please make sure the license header in the mentioned files matches that of the license header template at \`$LICENSE_HEADER_TEMPLATE_FILE\`.${FORMAT_RESET}"
exit 1
else
echo -e "${FORMAT_SUCCESS}${SYMBOL_SUCCESS} [SUCCESS] All source files contains the required license.${FORMAT_RESET}"
exit 0
fi
|
def remove_rubbish_words(data, rubbishList):
"""
Remove rubbish words from the given text data list.
:param data: A list of text data where rubbish words need to be removed.
:param rubbishList: A list of rubbish words that should be removed from the text data.
:return: A new list containing the text data with the rubbish words removed.
"""
tmp = data.copy() # Create a copy of the original list to avoid modifying it directly
tmp = [word for word in tmp if word.strip() not in rubbishList and not is_number(word.strip())]
return tmp
def is_number(s):
"""
Check if the given string is a numeric value.
:param s: Input string to be checked.
:return: True if the string is a numeric value, False otherwise.
"""
try:
float(s)
return True
except ValueError:
return False |
def substring_position(string, substring)
return string.index(substring)
end |
<filename>src/components/AppearOnViewContainer/index.js
import React, {useState} from 'react'
import VisibilitySensor from 'react-visibility-sensor'
import { useSpring, animated} from 'react-spring'
import styles from './appear-on-view-container.module.scss'
const AppearOnViewContainer = ({children}) => {
const [visible, setVisible] = useState(false)
const [seen, setSeen] = useState(false)
const {translateY, opacity } = useSpring({
from: {
translateY: (visible || seen) ? 150 : 0,
opacity: (visible || seen) ? 0 : 1,
},
to: {
translateY: (visible || seen) ? 0 : 150,
opacity: (visible || seen) ? 1 : 0,
},
})
const handleVisibility = (isVisible) => {
setVisible(isVisible)
if(isVisible){
if(!seen){
setSeen(true)
}
}
}
return(
<VisibilitySensor partialVisibility onChange={handleVisibility}>
<animated.div
style={{
transform: translateY.interpolate(
(translateY) => `translateY(${translateY}px)`
),
opacity: opacity,
}}
>
{children}
</animated.div>
</VisibilitySensor>
)
}
export default AppearOnViewContainer |
<gh_stars>0
package com.example_gzh.xiaoxiaoweather.util;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import com.example_gzh.xiaoxiaoweather.MyApplication;
import okhttp3.OkHttpClient;
import okhttp3.Request;
/**
* Created by <NAME> on 2017/6/11.
*/
public class HttpUtil {
public static void sendOkHttpRequest(String address,okhttp3.Callback callback){
OkHttpClient client=new OkHttpClient();
Request request=new Request.Builder().url(address).build();
client.newCall(request).enqueue(callback);
}
/**
* 需要在Manifest.xml中申请权限
* 判断网络是否连接
* @return 网络连接状态
* */
public static boolean isNetworkConnected(Context context){
if (context!=null) {
ConnectivityManager manager = (ConnectivityManager)
context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo =
manager.getActiveNetworkInfo();
if (networkInfo != null) {
return networkInfo.isConnected();
}
}
return false;
}
/**
* 判断设备是否连上WiFi
* @return 设备WIFI是否可用
* */
public static boolean isWiFiAvailable(Context context){
if (context!=null) {
ConnectivityManager manager = (ConnectivityManager)
MyApplication.getContext().getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo =
manager.getNetworkInfo(ConnectivityManager.TYPE_WIFI);
if (networkInfo != null) {
return networkInfo.isAvailable();
}
}
return false;
}
/**
* 判断移动网络是否可用
* @return 移动网络是否可用
* */
public static boolean isMobileAvailable(Context context){
if (context!=null) {
ConnectivityManager manager = (ConnectivityManager)
MyApplication.getContext().getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo =
manager.getNetworkInfo(ConnectivityManager.TYPE_MOBILE);
if (networkInfo != null) {
return networkInfo.isAvailable();
}
}
return false;
}
}
|
import UIKit
class ViewController: UIViewController {
// Outlets
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var totalIncomeLabel: UILabel!
@IBOutlet weak var totalExpensesLabel: UILabel!
// Properties
private var incomes = [Income]()
private var expenses = [Expense]()
private var totalIncome: Float = 0.0
private var totalExpenses: Float = 0.0
override func viewDidLoad() {
super.viewDidLoad()
// Calculate total income and total expenses
for income in incomes {
totalIncome += income.amount
}
for expense in expenses {
totalExpenses += expense.amount
}
// Set total income and total expenses label
totalIncomeLabel.text = "\(totalIncome)"
totalExpensesLabel.text = "\(totalExpenses)"
// Setup table view
tableView.dataSource = self
tableView.delegate = self
}
// ...
}
extension ViewController: UITableViewDataSource, UITableViewDelegate {
func numberOfSections(in tableView: UITableView) -> Int {
return 2
}
func tableView(_ tableView: UITableView, titleForHeaderInSection section: Int) -> String? {
if section == 0 {
return "Incomes"
} else {
return "Expenses"
}
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if section == 0 {
return incomes.count
} else {
return expenses.count
}
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) as! TransactionCell
if indexPath.section == 0 {
let income = incomes[indexPath.row]
cell.transactionNameLabel.text = income.name
cell.transactionAmountLabel.text = "\(income.amount)"
} else {
let expense = expenses[indexPath.row]
cell.transactionNameLabel.text = expense.name
cell.transactionAmountLabel.text = "\(expense.amount)"
}
return cell
}
} |
<filename>client/test/unit/common/services/httpRequestTracker.spec.js
describe('httpRequestTracker', function () {
var http, httpRequestTracker;
beforeEach(module('services.httpRequestTracker'));
beforeEach(inject(function ($injector) {
httpRequestTracker = $injector.get('httpRequestTracker');
http = $injector.get('$http');
}));
it('should not report pending requests if no requests in progress', function () {
expect(httpRequestTracker.hasPendingRequests()).toBeFalsy();
});
it('should report pending requests if requests are in progress', function () {
http.pendingRequests.push({});
expect(httpRequestTracker.hasPendingRequests()).toBeTruthy();
});
}); |
<filename>src/main/java/com/github/chen0040/leetcode/day17/package-info.java
/**
* Created by xschen on 12/8/2017.
*/
package com.github.chen0040.leetcode.day17;
|
#!/bin/bash
# Copyright 2012/2014 Brno University of Technology (Author: Karel Vesely)
# Apache 2.0
# Begin configuration.
config= # config, which is also sent to all other scripts
# NETWORK INITIALIZATION
nnet_init= # select initialized MLP (override initialization)
nnet_proto= # select network prototype (initialize it)
proto_opts= # non-default options for 'make_nnet_proto.py'
feature_transform= # provide feature transform (=splice,rescaling,...) (don't build new one)
network_type=dnn # (dnn,cnn1d,cnn2d,lstm) select type of neural network
cnn_proto_opts= # extra options for 'make_cnn_proto.py'
#
hid_layers=4 # nr. of hidden layers (prior to sotfmax or bottleneck)
hid_dim=1024 # select hidden dimension
bn_dim= # set a value to get a bottleneck network
dbn= # select DBN to prepend to the MLP initialization
momentum=0.9
#
init_opts= # options, passed to the initialization script
# FEATURE PROCESSING
copy_feats=false # resave the train/cv features into /tmp (disabled by default)
copy_feats_tmproot= # tmproot for copy-feats (optional)
# feature config (applies always)
online=false
cmvn_opts=
delta_opts=
# feature_transform:
splice=5 # temporal splicing
splice_step=1 # stepsize of the splicing (1 == no gap between frames)
splice_left=
splice_right=
feat_type=plain
# feature config (applies to feat_type traps)
traps_dct_basis=11 # nr. od DCT basis (applies to `traps` feat_type, splice10 )
# feature config (applies to feat_type transf) (ie. LDA+MLLT, no fMLLR)
transf=
splice_after_transf=5
# feature config (applies to feat_type lda)
lda_dim=300 # LDA dimension (applies to `lda` feat_type)
# LABELS
labels= # use these labels to train (override deafault pdf alignments, has to be in 'Posterior' format, see ali-to-post)
num_tgt= # force to use number of outputs in the MLP (default is autodetect)
# TRAINING SCHEDULER
learn_rate=0.008 # initial learning rate
train_opts= # options, passed to the training script
train_tool= # optionally change the training tool
frame_weights= # per-frame weights for gradient weighting
sort_by_len=false # whether to sort the utterances by their lengths
kld_scale=
si_model=
# OTHER
seed=777 # seed value used for training data shuffling and initialization
skip_cuda_check=false
# End configuration.
echo "$0 $@" # Print the command line for logging
[ -f path.sh ] && . ./path.sh;
. parse_options.sh || exit 1;
if [ $# != 6 ]; then
echo "Usage: $0 <data-train> <data-dev> <lang-dir> <ali-train> <ali-dev> <exp-dir>"
echo " e.g.: $0 data/train data/cv data/lang exp/mono_ali_train exp/mono_ali_cv exp/mono_nnet"
echo ""
echo " Training data : <data-train>,<ali-train> (for optimizing cross-entropy)"
echo " Held-out data : <data-dev>,<ali-dev> (for learn-rate/model selection based on cross-entopy)"
echo " note.: <ali-train>,<ali-dev> can point to same directory, or 2 separate directories."
echo ""
echo "main options (for others, see top of script file)"
echo " --config <config-file> # config containing options"
echo ""
echo " --apply-cmvn <bool> # apply CMN"
echo " --norm-vars <bool> # add CVN if CMN already active"
echo " --splice <N> # concatenate input features"
echo " --feat-type <type> # select type of input features"
echo ""
echo " --mlp-proto <file> # use this NN prototype"
echo " --feature-transform <file> # re-use this input feature transform"
echo " --hid-layers <N> # number of hidden layers"
echo " --hid-dim <N> # width of hidden layers"
echo " --bn-dim <N> # make bottle-neck network with bn-with N"
echo ""
echo " --learn-rate <float> # initial leaning-rate"
echo " --copy-feats <bool> # copy input features to /tmp (it's faster)"
echo ""
exit 1;
fi
data=$1
data_cv=$2
lang=$3
alidir=$4
alidir_cv=$5
dir=$6
# Using alidir for supervision (default)
if [ -z "$labels" ]; then
#silphonelist=`cat $lang/phones/silence.csl` || exit 1;
for f in $alidir/ali.tr.scp $alidir_cv/ali.cv.scp; do
[ ! -f $f ] && echo "$0: no such file $f" && exit 1;
done
fi
for f in $data/feats.scp $data_cv/feats.scp; do
[ ! -f $f ] && echo "$0: no such file $f" && exit 1;
done
echo
echo "# INFO"
echo "$0 : Training Neural Network"
printf "\t dir : $dir \n"
printf "\t Train-set : $data $alidir \n"
printf "\t CV-set : $data_cv $alidir_cv \n"
mkdir -p $dir/{log,nnet}
# skip when already trained
[ -e $dir/final.nnet ] && printf "\nSKIPPING TRAINING... ($0)\nnnet already trained : $dir/final.nnet ($(readlink $dir/final.nnet))\n\n" && exit 0
# check if CUDA is compiled in,
if ! $skip_cuda_check; then
cuda-compiled || { echo 'CUDA was not compiled in, skipping! Check src/kaldi.mk and src/configure' && exit 1; }
fi
###### PREPARE ALIGNMENTS ######
echo
echo "# PREPARING ALIGNMENTS"
if [ ! -z "$labels" ]; then
echo "Using targets '$labels' (by force)"
labels_tr="$labels"
labels_cv="$labels"
else
echo "Using PDF targets from dirs '$alidir' '$alidir_cv'"
# define pdf-alignment rspecifiers
#labels_tr="ark:ali-to-pdf $alidir/final.mdl \"ark:gunzip -c $alidir/ali.*.gz |\" ark:- | ali-to-post ark:- ark:- |"
#labels_cv="ark:ali-to-pdf $alidir/final.mdl \"ark:gunzip -c $alidir_cv/ali.*.gz |\" ark:- | ali-to-post ark:- ark:- |"
#labels_tr="ark,o:ali-to-pdf $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- | ali-to-post ark:- ark:- |"
#labels_cv="ark,o:ali-to-pdf $alidir/final.mdl scp:$alidir_cv/ali.cv.scp ark:- | ali-to-post ark:- ark:- |"
labels_tr="ark,o:ali-to-post scp:$alidir/ali.tr.scp ark:- |"
labels_cv="ark,o:ali-to-post scp:$alidir_cv/ali.cv.scp ark:- |"
#
#labels_tr_pdf="ark:ali-to-pdf $alidir/final.mdl \"ark:gunzip -c $alidir/ali.*.gz |\" ark:- |" # for analyze-counts.
#labels_tr_phn="ark:ali-to-phones --per-frame=true $alidir/final.mdl \"ark:gunzip -c $alidir/ali.*.gz |\" ark:- |"
#labels_tr_pdf="ark:ali-to-pdf $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- |" # for analyze-counts.
#labels_tr_phn="ark:ali-to-phones --per-frame=true $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- |"
labels_tr_pdf="ark:copy-align scp:$alidir/ali.tr.scp ark:- |" # for analyze-counts.
#labels_tr_phn="ark:ali-to-phones --per-frame=true $alidir/final.mdl scp:$alidir/ali.tr.scp ark:- |"
# get pdf-counts, used later to post-process DNN posteriors
([ -e $dir/ali_train_pdf.counts ] || analyze-counts --verbose=1 --binary=false "$labels_tr_pdf" $dir/ali_train_pdf.counts 2>$dir/log/analyze_counts_pdf.log || exit 1)&
# copy the old transition model, will be needed by decoder
# copy-transition-model --binary=false $alidir/final.mdl $dir/final.mdl || exit 1
# copy the tree
# cp $alidir/tree $dir/tree || exit 1
# make phone counts for analysis
#[ -e $lang/phones.txt ] && analyze-counts --verbose=1 --symbol-table=$lang/phones.txt "$labels_tr_phn" /dev/null 2>$dir/log/analyze_counts_phones.log || exit 1
fi
###### PREPARE FEATURES ######
echo
echo "# PREPARING FEATURES"
# shuffle the list
echo "Preparing train/cv lists :"
if $sort_by_len; then
echo "Sort utterences by lengths :"
feat-to-len scp:$data/feats.scp ark,t:- | awk '{print $2}' > $dir/len.tmp || exit 1;
paste -d " " $data/feats.scp $dir/len.tmp | sort -k3 -n - | awk '{print $1 " " $2}' > $dir/train.scp || exit 1;
feat-to-len scp:$data_cv/feats.scp ark,t:- | awk '{print $2}' > $dir/len.tmp || exit 1;
paste -d " " $data_cv/feats.scp $dir/len.tmp | sort -k3 -n - | awk '{print $1 " " $2}' > $dir/cv.scp || exit 1;
rm -f $dir/len.tmp
else
#cat $data/feats.scp | utils/shuffle_list.pl --srand ${seed:-777} > $dir/train.scp
#cat $data/feats.scp | shuf | shuf > $dir/train.scp
cp $data/feats.scp $dir/train.scp
cp $data_cv/feats.scp $dir/cv.scp
fi
# print the list sizes
wc -l $dir/train.scp $dir/cv.scp
# re-save the train/cv features to /tmp, reduces LAN traffic, avoids disk-seeks due to shuffled features
if [ "$copy_feats" == "true" ]; then
tmpdir=$(mktemp -d $copy_feats_tmproot); mv $dir/train.scp{,_non_local}; mv $dir/cv.scp{,_non_local}
copy-feats scp:$dir/train.scp_non_local ark,scp:$tmpdir/train.ark,$dir/train.scp || exit 1
copy-feats scp:$dir/cv.scp_non_local ark,scp:$tmpdir/cv.ark,$dir/cv.scp || exit 1
trap "echo \"Removing features tmpdir $tmpdir @ $(hostname)\"; ls $tmpdir; rm -r $tmpdir" EXIT
fi
#create a 10k utt subset for global cmvn estimates
head -n 400000 $dir/train.scp > $dir/train.scp.10k
###### PREPARE FEATURE PIPELINE ######
# optionally import feature setup from pre-training,
if [ ! -z $feature_transform ]; then
D=$(dirname $feature_transform)
[ -e $D/norm_vars ] && cmvn_opts="--norm-means=true --norm-vars=$(cat $D/norm_vars)" # Bwd-compatibility,
[ -e $D/cmvn_opts ] && cmvn_opts=$(cat $D/cmvn_opts)
[ -e $D/delta_order ] && delta_opts="--delta-order=$(cat $D/delta_order)" # Bwd-compatibility,
[ -e $D/delta_opts ] && delta_opts=$(cat $D/delta_opts)
echo "Imported config : cmvn_opts='$cmvn_opts' delta_opts='$delta_opts'"
fi
# read the features,
feats_tr="ark:copy-feats scp:$dir/train.scp ark:- |"
feats_cv="ark:copy-feats scp:$dir/cv.scp ark:- |"
# optionally add per-speaker CMVN,
if [ ! -z "$cmvn_opts" -a "$online" == "false" ]; then
echo "Will use CMVN statistics : $data/cmvn.scp, $data_cv/cmvn.scp"
[ ! -r $data/cmvn.scp ] && echo "Missing $data/cmvn.scp" && exit 1;
[ ! -r $data_cv/cmvn.scp ] && echo "Missing $data_cv/cmvn.scp" && exit 1;
feats_tr="$feats_tr apply-cmvn $cmvn_opts --utt2spk=ark:$data/utt2spk scp:$data/cmvn.scp ark:- ark:- |"
feats_cv="$feats_cv apply-cmvn $cmvn_opts --utt2spk=ark:$data_cv/utt2spk scp:$data_cv/cmvn.scp ark:- ark:- |"
elif [ "$online" == "true" ];then
#[ ! -f $data/global_cmvn.stats ] && echo "$0: no such file $data/global_cmvn.stats" && exit 1;
feats_tr="$feats_tr apply-cmvn-sliding $cmvn_opts ark:- ark:- |"
feats_cv="$feats_cv apply-cmvn-sliding $cmvn_opts ark:- ark:- |"
else
echo "apply-cmvn is not used"
fi
# optionally add deltas,
if [ ! -z "$delta_opts" ]; then
feats_tr="$feats_tr add-deltas $delta_opts ark:- ark:- |"
feats_cv="$feats_cv add-deltas $delta_opts ark:- ark:- |"
echo "add-deltas with $delta_opts"
fi
# optionally skip frames,
if [ ! -z "$skip_opts" ]; then
train_tool="$train_tool $skip_opts"
echo "training with $skip_opts"
fi
# keep track of the config,
[ ! -z "$cmvn_opts" ] && echo "$cmvn_opts" >$dir/cmvn_opts
[ ! -z "$delta_opts" ] && echo "$delta_opts" >$dir/delta_opts
[ ! -z "$skip_opts" ] && echo "$skip_opts" >$dir/skip_opts
[ ! -z "$online" ] && echo "$online" >$dir/online
#
# get feature dim
echo "Getting feature dim : "
feat_dim=$(feat-to-dim --print-args=false "$feats_tr" -)
echo "Feature dim is : $feat_dim"
# Now we will start building complex feature_transform which will
# be forwarded in CUDA to have fast run-time.
#
# We will use 1GPU for both feature_transform and MLP training in one binary tool.
# This is against the kaldi spirit to have many independent small processing units,
# but it is necessary because of compute exclusive mode, where GPU cannot be shared
# by multiple processes.
if [ ! -z "$feature_transform" ]; then
echo "Using pre-computed feature-transform : '$feature_transform'"
tmp=$dir/$(basename $feature_transform)
cp $feature_transform $tmp; feature_transform=$tmp
else
# Generate the splice transform
if [ -z "$splice_left" -o -z "$splice_right" ];then
splice_left=$splice
splice_right=$splice
fi
echo "Using splice + $splice_right / - $splice_left, step $splice_step"
feature_transform=$dir/tr_splice$splice_right-$splice_step.nnet
utils/nnet/gen_splice.py --fea-dim=$feat_dim --splice-left=$splice_left --splice-right=$splice_right --splice-step=$splice_step > $feature_transform
# Choose further processing of spliced features
echo "Feature type : $feat_type"
case $feat_type in
plain)
;;
traps)
#generate hamming+dct transform
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_hamm_dct${traps_dct_basis}.nnet
echo "Preparing Hamming DCT transform into : $feature_transform"
#prepare matrices with time-transposed hamming and dct
utils/nnet/gen_hamm_mat.py --fea-dim=$feat_dim --splice=$splice > $dir/hamm.mat
utils/nnet/gen_dct_mat.py --fea-dim=$feat_dim --splice=$splice --dct-basis=$traps_dct_basis > $dir/dct.mat
#put everything together
compose-transforms --binary=false $dir/dct.mat $dir/hamm.mat - | \
transf-to-nnet - - | \
nnet-concat --binary=false $feature_transform_old - $feature_transform || exit 1
;;
transf)
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_transf_splice${splice_after_transf}.nnet
[ -z $transf ] && transf=$alidir/final.mat
[ ! -f $transf ] && echo "Missing transf $transf" && exit 1
feat_dim=$(feat-to-dim "$feats_tr nnet-forward 'nnet-concat $feature_transform_old \"transf-to-nnet $transf - |\" - |' ark:- ark:- |" -)
nnet-concat --binary=false $feature_transform_old \
"transf-to-nnet $transf - |" \
"utils/nnet/gen_splice.py --fea-dim=$feat_dim --splice=$splice_after_transf |" \
$feature_transform || exit 1
;;
lda)
transf=$dir/lda$lda_dim.mat
#get the LDA statistics
if [ ! -r "$dir/lda.acc" ]; then
echo "LDA: Converting alignments to posteriors $dir/lda_post.scp"
ali-to-post "ark:gunzip -c $alidir/ali.*.gz|" ark:- | \
weight-silence-post 0.0 $silphonelist $alidir/final.mdl ark:- ark,scp:$dir/lda_post.ark,$dir/lda_post.scp 2>$dir/log/ali-to-post-lda.log || exit 1;
echo "Accumulating LDA statistics $dir/lda.acc on top of spliced feats"
acc-lda --rand-prune=4.0 $alidir/final.mdl "$feats_tr nnet-forward $feature_transform ark:- ark:- |" scp:$dir/lda_post.scp $dir/lda.acc 2>$dir/log/acc-lda.log || exit 1;
else
echo "LDA: Using pre-computed stats $dir/lda.acc"
fi
#estimate the transform
echo "Estimating LDA transform $dir/lda.mat from the statistics $dir/lda.acc"
est-lda --write-full-matrix=$dir/lda.full.mat --dim=$lda_dim $transf $dir/lda.acc 2>$dir/log/lda.log || exit 1;
#append the LDA matrix to feature_transform
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_lda${lda_dim}.nnet
transf-to-nnet $transf - | \
nnet-concat --binary=false $feature_transform_old - $feature_transform || exit 1
#remove the temporary file
rm $dir/lda_post.{ark,scp}
;;
*)
echo "Unknown feature type $feat_type"
exit 1;
;;
esac
# keep track of feat_type
echo $feat_type > $dir/feat_type
# Renormalize the MLP input to zero mean and unit variance
feature_transform_old=$feature_transform
feature_transform=${feature_transform%.nnet}_cmvn-g.nnet
echo "Renormalizing MLP input features into $feature_transform"
nnet-forward --use-gpu=yes \
$feature_transform_old "$(echo $feats_tr | sed 's|train.scp|train.scp.10k|')" \
ark:- 2>$dir/log/nnet-forward-cmvn.log |\
compute-cmvn-stats ark:- - | cmvn-to-nnet - - |\
nnet-concat --binary=false $feature_transform_old - $feature_transform
[ ! -f $feature_transform ] && cat $dir/log/nnet-forward-cmvn.log && echo "Error: Global CMVN failed, was the CUDA GPU okay?" && echo && exit 1
fi
###### MAKE LINK TO THE FINAL feature_transform, so the other scripts will find it ######
(cd $dir; [ ! -f final.feature_transform ] && ln -s $(basename $feature_transform) final.feature_transform )
###### INITIALIZE THE NNET ######
echo
echo "# NN-INITIALIZATION"
[ ! -z "$nnet_init" ] && echo "Using pre-initialized network '$nnet_init'";
if [ ! -z "$nnet_proto" ]; then
echo "Initializing using network prototype '$nnet_proto'";
nnet_init=$dir/nnet.init; log=$dir/log/nnet_initialize.log
nnet-initialize $nnet_proto $nnet_init 2>$log || { cat $log; exit 1; }
fi
if [[ -z "$nnet_init" && -z "$nnet_proto" ]]; then
echo "Getting input/output dims :"
#initializing the MLP, get the i/o dims...
#input-dim
num_fea=$(feat-to-dim "$feats_tr nnet-forward $feature_transform ark:- ark:- |" - )
{ #optioanlly take output dim of DBN
[ ! -z $dbn ] && num_fea=$(nnet-forward "nnet-concat $feature_transform $dbn -|" "$feats_tr" ark:- | feat-to-dim ark:- -)
[ -z "$num_fea" ] && echo "Getting nnet input dimension failed!!" && exit 1
}
#output-dim
[ -z $num_tgt ] && num_tgt=$(hmm-info --print-args=false $alidir/final.mdl | grep pdfs | awk '{ print $NF }')
# make network prototype
nnet_proto=$dir/nnet.proto
echo "Genrating network prototype $nnet_proto"
case "$network_type" in
dnn)
utils/nnet/make_nnet_proto.py $proto_opts \
${bn_dim:+ --bottleneck-dim=$bn_dim} \
$num_fea $num_tgt $hid_layers $hid_dim >$nnet_proto || exit 1
;;
cnn1d)
delta_order=$([ -z $delta_opts ] && echo "0" || { echo $delta_opts | tr ' ' '\n' | grep "delta[-_]order" | sed 's:^.*=::'; })
echo "Debug : $delta_opts, delta_order $delta_order"
utils/nnet/make_cnn_proto.py $cnn_proto_opts \
--splice=$splice --delta-order=$delta_order --dir=$dir \
$num_fea >$nnet_proto || exit 1
cnn_fea=$(cat $nnet_proto | grep -v '^$' | tail -n1 | awk '{ print $5; }')
utils/nnet/make_nnet_proto.py $proto_opts \
--no-proto-head --no-smaller-input-weights \
${bn_dim:+ --bottleneck-dim=$bn_dim} \
"$cnn_fea" $num_tgt $hid_layers $hid_dim >>$nnet_proto || exit 1
;;
cnn2d)
#TODO, to be filled by Vijay...
;;
lstm)
utils/nnet/make_lstm_proto.py $proto_opts \
$num_fea $num_tgt >$nnet_proto || exit 1
;;
*) echo "Unknown : --network_type $network_type" && exit 1;
esac
# initialize
nnet_init=$dir/nnet.init; log=$dir/log/nnet_initialize.log
echo "Initializing $nnet_proto -> $nnet_init"
nnet-initialize $nnet_proto $nnet_init 2>$log || { cat $log; exit 1; }
# optionally prepend dbn to the initialization
if [ ! -z $dbn ]; then
nnet_init_old=$nnet_init; nnet_init=$dir/nnet_$(basename $dbn)_dnn.init
nnet-concat $dbn $nnet_init_old $nnet_init || exit 1
fi
fi
###### TRAIN ######
echo
echo "# RUNNING THE NN-TRAINING SCHEDULER"
steps/nnet/train_scheduler_asgd_sds.sh \
--feature-transform $feature_transform \
--learn-rate $learn_rate \
--momentum $momentum \
--randomizer-seed $seed \
${train_opts} \
${train_tool:+ --train-tool "$train_tool"} \
${frame_weights:+ --frame-weights "$frame_weights"} \
${kld_scale:+ --kld-scale "$kld_scale"} \
${si_model:+ --si-model "$si_model"} \
${config:+ --config $config} \
$nnet_init "$feats_tr" "$feats_cv" "$labels_tr" "$labels_cv" $dir || exit 1
if $prepend_cnn; then
echo "Preparing feature transform with CNN layers for RBM pre-training."
nnet-concat $dir/final.feature_transform "nnet-copy --remove-last-layers=$(((hid_layers+1)*2)) $dir/final.nnet - |" \
$dir/final.feature_transform_cnn 2>$dir/log/concat_transf_cnn.log || exit 1
fi
echo "$0 successfuly finished.. $dir"
sleep 3
exit 0
|
#!/bin/bash
set -euo pipefail
cd "$( dirname "${BASH_SOURCE[0]}" )/.."
goimports -w $(go list -f {{.Dir}} ./... | grep -v vendor | grep -v fosite$)
goimports -w *.go
|
def stars_and_bars(n, k):
count = 0
for i in range(k+1):
j = n - i
if j >= 0:
count += 1
return count
print(stars_and_bars(10, 10)) |
function isAnagram($str1, $str2){
if (strlen($str1) != strlen($str2)) {
return false;
}
$arr1 = str_split($str1);
$arr2 = str_split($str2);
sort($arr1);
sort($arr2);
return ($arr1 == $arr2);
}
$result = isAnagram('elephant', 'pentahle');
echo $result; |
<filename>acmicpc/1003/1003.py<gh_stars>1-10
fibo_element = [0, 1]
def count_numbers(num:int):
if num == 0:
print('1 0')
elif num == 1:
print('0 1')
else:
length = len(fibo_element)
if length < num + 1:
for i in range(length, num +1):
fibo_element.append(fibo_element[i-1] + fibo_element[i-2])
print(f'{fibo_element[num-1]} {fibo_element[num]}')
for _ in range(int(input())):
n = int(input())
count_numbers(n)
|
#!/bin/bash
#
# Install Robolectric into the local Maven repository.
#
set -e
PROJECT=$(cd $(dirname "$0")/..; pwd)
if [ -z ${INCLUDE_SOURCE+x} ]; then SOURCE_ARG=""; else SOURCE_ARG="source:jar"; fi
if [ -z ${INCLUDE_JAVADOC+x} ]; then JAVADOC_ARG=""; else JAVADOC_ARG="javadoc:jar"; fi
echo "Building Robolectric..."
cd "$PROJECT"; mvn -D skipTests clean $SOURCE_ARG $JAVADOC_ARG install
echo "Building shadows for API 16..."
cd "$PROJECT"/robolectric-shadows/shadows-core; mvn -P android-16 clean $SOURCE_ARG $JAVADOC_ARG install
echo "Building shadows for API 17..."
cd "$PROJECT"/robolectric-shadows/shadows-core; mvn -P android-17 clean $SOURCE_ARG $JAVADOC_ARG install
echo "Building shadows for API 18..."
cd "$PROJECT"/robolectric-shadows/shadows-core; mvn -P android-18 clean $SOURCE_ARG $JAVADOC_ARG install
echo "Building shadows for API 19..."
cd "$PROJECT"/robolectric-shadows/shadows-core; mvn -P android-19 clean $SOURCE_ARG $JAVADOC_ARG install
echo "Building shadows for API 21..."
cd "$PROJECT"/robolectric-shadows/shadows-core; mvn -P android-21 clean $SOURCE_ARG $JAVADOC_ARG install
echo "Running Tests..."
cd "$PROJECT"; mvn test
|
import React from 'react';
import '../Header/Header.css';
import Typed from 'react-typed';
const Header = () => {
return (
<div className="header-wrapper">
<canvas></canvas>
<div className="main-info">
<h1><NAME>'s Portfolio</h1>
<Typed
className="typed-text"
strings={["Web Design", "Web Development", "Node"]}
typeSpeed={40}
backSpeed={60}
loop
/>
</div>
</div>
)
}
export default Header
|
const jwt = require('jsonwebtoken');
const conf = require('../../config');
const {HttpError} = require('./error');
/**
* 创建token
* @param dat
* @returns {*}
*/
const createToken = function (dat) {
return jwt.sign(dat, conf.tokenObs, {
expiresIn: conf.cookieOptions.maxAge / 1000 + 's'
});
};
/**
* 检测token合法性
* @param ctx
* @param next
* @returns {Promise<void>}
*/
const checkToken = async (ctx, next) => {
// Authorization TODO:如果需要允许所有跨域,那么只有不使用cookie,改为Authorization存token
const token = ctx.get('Authorization');
// const token = ctx.cookie.get('token');
if (!token) {
throw new HttpError(401);
}
try {
jwt.verify(token, conf.tokenObs, function (err, decoded) {
if (err) {
throw new HttpError(401);
} else {
ctx.res.USER = decoded;
}
});
} catch (err) {
throw new HttpError(401);
}
await next();
};
module.exports = {
createToken,
checkToken
};
|
'use strict';
// simple test file using low level put command based only on ssh2 lib
const path = require('path');
const fs = require('fs');
const dotenvPath = path.join(__dirname, '..', '.env');
require('dotenv').config({path: dotenvPath});
const Client = require('ssh2');
const config = {
host: process.env.SFTP_SERVER,
username: process.env.SFTP_USER,
password: <PASSWORD>,
port: process.env.SFTP_PORT || 22
};
const client = new Client();
const sourceFile = process.argv[2];
const remotePath = process.argv[3];
client
.on('ready', function () {
client.sftp(function (err, sftp) {
console.log(`Sending ${sourceFile} to ${remotePath}`);
if (err) {
console.log(`Error: ${err.message}`);
return err;
}
let stream = sftp.createWriteStream(remotePath, {encoding: null});
stream.on('error', (err) => {
console.log(`Stream Error: ${err.message}`);
client.end();
});
stream.on('finish', () => {
console.log(`File successfully uploaded to ${remotePath}`);
client.end();
});
let rdr = fs.createReadStream(sourceFile, {encoding: null});
rdr.on('error', (err) => {
console.log(`Reader Error: ${err.message}`);
client.end();
});
rdr.pipe(stream);
});
})
.on('error', function (err) {
console.error(err.message);
})
.connect(config);
|
package im.status.keycard.globalplatform;
import im.status.keycard.applet.Identifiers;
import org.bouncycastle.util.encoders.Hex;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.SecureRandom;
import im.status.keycard.io.APDUCommand;
import im.status.keycard.io.APDUException;
import im.status.keycard.io.APDUResponse;
import im.status.keycard.io.CardChannel;
/**
* Command set used for loading, installing and removing applets and packages. This class is generic and can work with
* any package and applet, but utility methods specific to the Keycard have been provided.
*/
public class GlobalPlatformCommandSet {
static final byte INS_SELECT = (byte) 0xA4;
static final byte INS_INITIALIZE_UPDATE = (byte) 0x50;
static final byte INS_EXTERNAL_AUTHENTICATE = (byte) 0x82;
static final byte INS_DELETE = (byte) 0xE4;
static final byte INS_INSTALL = (byte) 0xE6;
static final byte INS_LOAD = (byte) 0xE8;
static final byte INS_PUT_KEY = (byte) 0xD8;
static final byte SELECT_P1_BY_NAME = (byte) 0x04;
static final byte EXTERNAL_AUTHENTICATE_P1 = (byte) 0x01;
static final byte INSTALL_FOR_LOAD_P1 = (byte) 0x02;
static final byte INSTALL_FOR_INSTALL_P1 = (byte) 0x0C;
static final byte LOAD_P1_MORE_BLOCKS = (byte) 0x00;
static final byte LOAD_P1_LAST_BLOCK = (byte) 0x80;
private final CardChannel apduChannel;
private SecureChannel secureChannel;
private SCP02Keys cardKeys;
private Session session;
private final byte[] gpDefaultKey = Hex.decode("<KEY>");
private final SCP02Keys gpDefaultKeys = new SCP02Keys(gpDefaultKey, gpDefaultKey, gpDefaultKey);
private final byte[] developmentKey = Hex.decode("<KEY>");
/**
* Constructs a new command set with the given CardChannel.
*
* @param apduChannel the channel to the card
*/
public GlobalPlatformCommandSet(CardChannel apduChannel) {
this.apduChannel = apduChannel;
setCardKeys(developmentKey);
}
/**
* Sets the given key as all of ENC, MAC and DEK static keys, used to derive session keys.
* @param key the key
*/
public void setCardKeys(byte[] key) {
setCardKeys(key, key, key);
}
/**
* Sets the the ENC, MAC and DEK static keys, used to derive session keys.
*
* @param encKey the ENC key
* @param macKey the MAC key
* @param dekKey the DEK key
*/
public void setCardKeys(byte[] encKey, byte[] macKey, byte[] dekKey) {
this.cardKeys = new SCP02Keys(encKey, macKey, dekKey);;
}
/**
* Selects the ISD of the card.
*
* @return the card response
*
* @throws IOException communication error
*/
public APDUResponse select() throws IOException {
APDUCommand cmd = new APDUCommand(0x00, INS_SELECT, SELECT_P1_BY_NAME, 0, new byte[0]);
return apduChannel.send(cmd);
}
/**
* Sends an INITIALIZE UPDATE command. Use the openSecureChannel method instead of calling this directly, unless you
* need to use a specific host challenge.
*
* @param hostChallenge the host challenge.
* @return the card response
*
* @throws IOException communication error
*/
public APDUResponse initializeUpdate(byte[] hostChallenge) throws IOException, APDUException {
APDUCommand cmd = new APDUCommand(0x80, INS_INITIALIZE_UPDATE, 0, 0, hostChallenge, true);
APDUResponse resp = apduChannel.send(cmd);
if (resp.isOK()) {
try {
this.session = SecureChannel.verifyChallenge(hostChallenge, this.cardKeys, resp);
} catch(APDUException e) {
this.session = SecureChannel.verifyChallenge(hostChallenge, gpDefaultKeys, resp);
this.session.markAsUsingFallbackKeys();
}
this.secureChannel = new SecureChannel(this.apduChannel, this.session.getKeys());
}
return resp;
}
/**
* Sends an EXTERNAL AUTHENTICATE command. Use the openSecureChannel method instead of calling this directly, unless you
* need to use a specific host challenge.
*
* @param hostChallenge the host challenge.
* @return the card response
*
* @throws IOException communication error
*/
public APDUResponse externalAuthenticate(byte[] hostChallenge) throws IOException {
byte[] cardChallenge = this.session.getCardChallenge();
byte[] data = new byte[cardChallenge.length + hostChallenge.length];
System.arraycopy(cardChallenge, 0, data, 0, cardChallenge.length);
System.arraycopy(hostChallenge, 0, data, cardChallenge.length, hostChallenge.length);
byte[] paddedData = Crypto.appendDESPadding(data);
byte[] hostCryptogram = Crypto.mac3des(this.session.getKeys().encKeyData, paddedData, Crypto.NullBytes8);
APDUCommand cmd = new APDUCommand(0x84, INS_EXTERNAL_AUTHENTICATE, EXTERNAL_AUTHENTICATE_P1, 0, hostCryptogram);
return this.secureChannel.send(cmd);
}
/**
* Convenience method for openSecureChannel with auto key ugprade.
*
* @throws APDUException the card didn't respond 0x9000 to either INITIALIZE UPDATE or EXTERNAL AUTHENTICATE
* @throws IOException communication error
*/
public void openSecureChannel() throws APDUException, IOException {
openSecureChannel(true);
}
/**
* Opens an SCP02 secure channel. If with the current keys the card cryptogram cannot be verified, an attempt is made
* to use the default GlobalPlatform keys instead. This does not require additional commands to the card. In case
* the autoUpgradeKeys is set to true and the default GlobalPlatform keys were used, a PUT KEY command is sent to
* change the keys to the current ones.
*
* @param autoUpgradeKeys upgrade keys if default GP keys are loaded
* @throws APDUException the card didn't respond 0x9000 to either INITIALIZE UPDATE or EXTERNAL AUTHENTICATE
* @throws IOException communication error
*/
public void openSecureChannel(boolean autoUpgradeKeys) throws APDUException, IOException {
SecureRandom random = new SecureRandom();
byte[] hostChallenge = new byte[8];
random.nextBytes(hostChallenge);
initializeUpdate(hostChallenge).checkOK();
externalAuthenticate(hostChallenge).checkOK();
if (this.session.usesFallbackKeys() && autoUpgradeKeys) {
this.putSCP02Keys(this.cardKeys.getEncKeyData(), this.cardKeys.getMacKeyData(), this.cardKeys.getDekKeyData(), 0, 1).checkOK();
}
}
/**
* Sends a PUT KEY APDU to load or replace SCP02 keys. The key is used for all 3 of ENC, MAC and DEK.
*
* @param key the key to load
* @param oldKvn the KVN to replace, 0 to put a new key without replacing
* @param newKvn the KVN of the new keyset
* @return
* @throws IOException
*/
public APDUResponse putSCP02Keys(byte[] key, int oldKvn, int newKvn) throws IOException {
return putSCP02Keys(key, key, key, oldKvn, newKvn);
}
/**
* Sends a PUT KEY APDU to load or replace SCP02 keys. The keys are assumed to be 3DES keys
*
* @param encKey the ENC key to load
* @param macKey the MAC key to load
* @param dekKey the DEK key to load
* @param oldKvn the KVN to replace, 0 to put a new key without replacing
* @param newKvn the KVN of the new keyset
* @return
* @throws IOException
*/
public APDUResponse putSCP02Keys(byte[] encKey, byte[] macKey, byte[] dekKey, int oldKvn, int newKvn) throws IOException {
if (encKey.length != 16 || macKey.length != 16 || dekKey.length != 16){
throw new IllegalArgumentException("All keys must be 16-byte 3DES keys");
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bos.write(newKvn);
writeSCP02Key(bos, encKey);
writeSCP02Key(bos, macKey);
writeSCP02Key(bos, dekKey);
APDUCommand cmd = new APDUCommand(0x84, INS_PUT_KEY, oldKvn, 0x81, bos.toByteArray());
return this.secureChannel.send(cmd);
}
/**
* writes an encrypted key for the PUT KEY command
* @param bos the output stream to write to
* @param key the key to encrypt and write
* @throws IOException if the ByteArrayOutputStream throws it (never)
*/
private void writeSCP02Key(ByteArrayOutputStream bos, byte[] key) throws IOException {
byte[] encrypted = Crypto.ecb3des(session.getKeys().getDekKeyData(), key);
byte[] kcv = Crypto.kcv3des(key);
bos.write(0x80);
bos.write(encrypted.length);
bos.write(encrypted);
bos.write(kcv.length);
bos.write(kcv);
}
/**
* Deletes the Keycard applet instance.
*
* @return the card response
* @throws IOException communication error
*/
public APDUResponse deleteKeycardInstance() throws IOException {
return delete(Identifiers.getKeycardInstanceAID());
}
/**
* Deletes the Keycard Cash applet instance.
*
* @return the card response
* @throws IOException communication error
*/
public APDUResponse deleteCashInstance() throws IOException {
return delete(Identifiers.CASH_INSTANCE_AID);
}
/**
* Deletes the NDEF applet instance.
*
* @return the card response
* @throws IOException communication error
*/
public APDUResponse deleteNDEFInstance() throws IOException {
return delete(Identifiers.NDEF_INSTANCE_AID);
}
/**
* Deletes the Keycard package.
*
* @return the card response
* @throws IOException communication error
*/
public APDUResponse deleteKeycardPackage() throws IOException {
return delete(Identifiers.PACKAGE_AID);
}
/**
* Deletes the Keycard package and all applets installed from it. This is the method to use to remove a Keycard
* installation.
*
* @throws APDUException one of the DELETE commands failed
* @throws IOException communication error
*/
public void deleteKeycardInstancesAndPackage() throws IOException, APDUException {
deleteNDEFInstance().checkSW(APDUResponse.SW_OK, APDUResponse.SW_REFERENCED_DATA_NOT_FOUND);
deleteKeycardInstance().checkSW(APDUResponse.SW_OK, APDUResponse.SW_REFERENCED_DATA_NOT_FOUND);
deleteCashInstance().checkSW(APDUResponse.SW_OK, APDUResponse.SW_REFERENCED_DATA_NOT_FOUND);
deleteKeycardPackage().checkSW(APDUResponse.SW_OK, APDUResponse.SW_REFERENCED_DATA_NOT_FOUND);
}
/**
* Sends a DELETE APDU with the given AID
* @param aid the AID to the delete
* @return the raw card response
*
* @throws IOException communication error.
*/
public APDUResponse delete(byte[] aid) throws IOException {
byte[] data = new byte[aid.length + 2];
data[0] = 0x4F;
data[1] = (byte) aid.length;
System.arraycopy(aid, 0, data, 2, aid.length);
APDUCommand cmd = new APDUCommand(0x80, INS_DELETE, 0, 0, data);
return this.secureChannel.send(cmd);
}
/**
* Loads the Keycard package.
*
* @param in the CAP file as an InputStream
* @param cb the progress callback
*
* @throws IOException communication error
* @throws APDUException one of the INSTALL [for Load] or LOAD commands failed
*/
public void loadKeycardPackage(InputStream in, LoadCallback cb) throws IOException, APDUException {
installForLoad(Identifiers.PACKAGE_AID).checkOK();
Load load = new Load(in);
byte[] block;
int steps = load.blocksCount();
while((block = load.nextDataBlock()) != null) {
load(block, (load.getCount() - 1), load.hasMore()).checkOK();
cb.blockLoaded(load.getCount(), steps);
}
}
/**
* Sends an INSTALL [for LOAD] APDU. Use only if loading something other than the Keycard package.
*
* @param aid the AID
*
* @return the card response
* @throws IOException communication error
*/
public APDUResponse installForLoad(byte[] aid) throws IOException {
return installForLoad(aid, new byte[0]);
}
/**
* Sends an INSTALL [for LOAD] APDU with package extradition. Use only if loading something other than the Keycard package.
*
* @param aid the AID
* @param sdaid the AID of the SD target of the extradition
*
* @return the card response
* @throws IOException communication error
*/
public APDUResponse installForLoad(byte[] aid, byte[] sdaid) throws IOException {
ByteArrayOutputStream data = new ByteArrayOutputStream();
data.write(aid.length);
data.write(aid);
data.write(sdaid.length);
data.write(sdaid);
// empty hash length and hash
data.write(0x00);
data.write(0x00);
data.write(0x00);
APDUCommand cmd = new APDUCommand(0x80, INS_INSTALL, INSTALL_FOR_LOAD_P1, 0, data.toByteArray());
return this.secureChannel.send(cmd);
}
/**
* Sends a single LOAD APDU. Use only if loading something other than the Keycard package.
*
* @param data the data of the block
* @param count the block number
* @param hasMoreBlocks whether there are more blocks coming or not
* @return the card response
* @throws IOException communication error
*/
public APDUResponse load(byte[] data, int count, boolean hasMoreBlocks) throws IOException {
int p1 = hasMoreBlocks ? LOAD_P1_MORE_BLOCKS : LOAD_P1_LAST_BLOCK;
APDUCommand cmd = new APDUCommand(0x80, INS_LOAD, p1, count, data);
return this.secureChannel.send(cmd);
}
/**
* Sends an INSTALL [for Install & Make Selectable] command. Use only if not installing applets part of the Keycard
* package
*
* @param packageAID the package AID
* @param appletAID the applet AID
* @param instanceAID the instance AID
* @param params the installation parameters
* @return the card response
* @throws IOException communication error
*/
public APDUResponse installForInstall(byte[] packageAID, byte[] appletAID, byte[] instanceAID, byte[] params) throws IOException {
ByteArrayOutputStream data = new ByteArrayOutputStream();
data.write(packageAID.length);
data.write(packageAID);
data.write(appletAID.length);
data.write(appletAID);
data.write(instanceAID.length);
data.write(instanceAID);
byte[] privileges = new byte[]{0x00};
data.write(privileges.length);
data.write(privileges);
byte[] fullParams = new byte[2 + params.length];
fullParams[0] = (byte) 0xC9;
fullParams[1] = (byte) params.length;
System.arraycopy(params, 0, fullParams, 2, params.length);
data.write(fullParams.length);
data.write(fullParams);
// empty perform token
data.write(0x00);
APDUCommand cmd = new APDUCommand(0x80, INS_INSTALL, INSTALL_FOR_INSTALL_P1, 0, data.toByteArray());
return this.secureChannel.send(cmd);
}
/**
* Installs the NDEF applet from the Keycard package.
*
* @param ndefRecord the initial NDEF record. Can be a zero-length array but not null
* @return the card response
* @throws IOException communication error
*/
public APDUResponse installNDEFApplet(byte[] ndefRecord) throws IOException {
return installForInstall(Identifiers.PACKAGE_AID, Identifiers.NDEF_AID, Identifiers.NDEF_INSTANCE_AID, ndefRecord);
}
/**
* Installs the Keycard applet.
*
* @return the card response
* @throws IOException communication error.
*/
public APDUResponse installKeycardApplet() throws IOException {
return installForInstall(Identifiers.PACKAGE_AID, Identifiers.KEYCARD_AID, Identifiers.getKeycardInstanceAID(), new byte[0]);
}
/**
* Installs the Cash applet.
*
* @param cashData the initial Cash data. Can be a zero-length array but not null
* @return the card response
* @throws IOException communication error.
*/
public APDUResponse installCashApplet(byte[] cashData) throws IOException {
return installForInstall(Identifiers.PACKAGE_AID, Identifiers.CASH_AID, Identifiers.CASH_INSTANCE_AID, cashData);
}
/**
* Installs the Cash applet.
*
* @return the card response
* @throws IOException communication error.
*/
public APDUResponse installCashApplet() throws IOException {
return installCashApplet(new byte[0]);
}
}
|
package io.cattle.platform.docker.process.account;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.core.constants.AccountConstants;
import io.cattle.platform.core.constants.NetworkConstants;
import io.cattle.platform.core.constants.NetworkServiceConstants;
import io.cattle.platform.core.constants.NetworkServiceProviderConstants;
import io.cattle.platform.core.dao.GenericResourceDao;
import io.cattle.platform.core.dao.NetworkDao;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.core.model.Network;
import io.cattle.platform.docker.constants.DockerNetworkConstants;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.handler.ProcessPostListener;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.process.common.handler.AbstractObjectProcessLogic;
import io.cattle.platform.util.type.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import com.netflix.config.DynamicStringListProperty;
public class DockerAccountCreate extends AbstractObjectProcessLogic implements ProcessPostListener {
DynamicStringListProperty KINDS = ArchaiusUtil.getList("docker.network.create.account.types");
DynamicStringListProperty DOCKER_NETWORK_SUBNET_CIDR = ArchaiusUtil.getList("docker.network.subnet.cidr");
DynamicStringListProperty DOCKER_VIP_SUBNET_CIDR = ArchaiusUtil.getList("docker.vip.subnet.cidr");
@Inject
NetworkDao networkDao;
@Inject
GenericResourceDao resourceDao;
@Override
public String[] getProcessNames() {
return new String[]{"account.create"};
}
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
Account account = (Account)state.getResource();
if (!KINDS.get().contains(account.getKind())) {
return null;
}
Map<String, Network> networksByKind = getNetworksByUuid(account);
createNetwork(DockerNetworkConstants.KIND_DOCKER_HOST, account, networksByKind, "Docker Host Network Mode", null);
createNetwork(DockerNetworkConstants.KIND_DOCKER_NONE, account, networksByKind, "Docker None Network Mode", null);
createNetwork(DockerNetworkConstants.KIND_DOCKER_CONTAINER, account, networksByKind, "Docker Container Network Mode", null);
createNetwork(DockerNetworkConstants.KIND_DOCKER_BRIDGE, account, networksByKind, "Docker Bridge Network Mode", null);
Network managedNetwork = createManagedNetwork(account, networksByKind);
return new HandlerResult(AccountConstants.FIELD_DEFAULT_NETWORK_ID, managedNetwork.getId()).withShouldContinue(true);
}
protected Network createManagedNetwork(Account account, Map<String, Network> networksByKind) {
Network network = createNetwork(NetworkConstants.KIND_HOSTONLY, account, networksByKind,
"Rancher Managed Network",
NetworkConstants.FIELD_HOST_VNET_URI, "bridge://docker0",
NetworkConstants.FIELD_DYNAMIC_CREATE_VNET, true);
networkDao.addManagedNetworkSubnet(network);
createAgentInstanceProvider(network);
return network;
}
protected void createAgentInstanceProvider(Network network) {
List<String> servicesKinds = new ArrayList<String>();
servicesKinds.add(NetworkServiceConstants.KIND_DNS);
servicesKinds.add(NetworkServiceConstants.KIND_LINK);
servicesKinds.add(NetworkServiceConstants.KIND_IPSEC_TUNNEL);
servicesKinds.add(NetworkServiceConstants.KIND_PORT_SERVICE);
servicesKinds.add(NetworkServiceConstants.KIND_HOST_NAT_GATEWAY);
servicesKinds.add(NetworkServiceConstants.KIND_HEALTH_CHECK);
networkDao.createNsp(network, servicesKinds, NetworkServiceProviderConstants.KIND_AGENT_INSTANCE);
}
protected Network createNetwork(String kind, Account account, Map<String, Network> networksByKind,
String name, String key, Object... valueKeyValue) {
Network network = networksByKind.get(kind);
if (network != null) {
return network;
}
Map<String, Object> data = key == null ? new HashMap<String, Object>() :
CollectionUtils.asMap(key, valueKeyValue);
data.put(ObjectMetaDataManager.NAME_FIELD, name);
data.put(ObjectMetaDataManager.ACCOUNT_FIELD, account.getId());
data.put(ObjectMetaDataManager.KIND_FIELD, kind);
return resourceDao.createAndSchedule(Network.class, data);
}
protected Map<String, Network> getNetworksByUuid(Account account) {
Map<String, Network> result = new HashMap<>();
for (Network network : networkDao.getNetworksForAccount(account.getId(), null)) {
result.put(network.getKind(), network);
}
return result;
}
}
|
#!/bin/sh
# Copyright 2019 Google LLC
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
#################################### Scalar ###################################
tools/xngen src/f32-gemm/scalar.c.in -D MR=1 -D NR=4 -D INC=0 -o src/f32-gemm/1x4-scalar.c
tools/xngen src/f32-gemm/scalar.c.in -D MR=1 -D NR=4 -D INC=1 -o src/f32-gemminc/1x4-scalar.c
tools/xngen src/f32-gemm/scalar.c.in -D MR=2 -D NR=4 -D INC=0 -o src/f32-gemm/2x4-scalar.c
tools/xngen src/f32-gemm/scalar.c.in -D MR=2 -D NR=4 -D INC=1 -o src/f32-gemminc/2x4-scalar.c
tools/xngen src/f32-gemm/scalar.c.in -D MR=4 -D NR=2 -D INC=0 -o src/f32-gemm/4x2-scalar.c
tools/xngen src/f32-gemm/scalar.c.in -D MR=4 -D NR=4 -D INC=0 -o src/f32-gemm/4x4-scalar.c
tools/xngen src/f32-gemm/scalar.c.in -D MR=4 -D NR=4 -D INC=1 -o src/f32-gemminc/4x4-scalar.c
############################### AArch64 assembly ##############################
tools/xngen src/f32-gemm/1x12-aarch64-neonfma-cortex-a53.S.in -D INC=0 -o src/f32-gemm/1x12-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/1x12-aarch64-neonfma-cortex-a53.S.in -D INC=1 -o src/f32-gemminc/1x12-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/1x8-aarch64-neonfma-cortex-a53.S.in -D INC=0 -o src/f32-gemm/1x8-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/1x8-aarch64-neonfma-cortex-a53.S.in -D INC=1 -o src/f32-gemminc/1x8-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/1x8-aarch64-neonfma-cortex-a57.S.in -D INC=0 -o src/f32-gemm/1x8-aarch64-neonfma-cortex-a57.S
tools/xngen src/f32-gemm/1x8-aarch64-neonfma-cortex-a57.S.in -D INC=1 -o src/f32-gemminc/1x8-aarch64-neonfma-cortex-a57.S
tools/xngen src/f32-gemm/1x8-aarch64-neonfma-cortex-a75.S.in -D INC=0 -o src/f32-gemm/1x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/1x8-aarch64-neonfma-cortex-a75.S.in -D INC=1 -o src/f32-gemminc/1x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/4x12-aarch64-neonfma-cortex-a53.S.in -D INC=0 -o src/f32-gemm/4x12-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/4x12-aarch64-neonfma-cortex-a53.S.in -D INC=1 -o src/f32-gemminc/4x12-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-cortex-a53.S.in -D INC=0 -o src/f32-gemm/4x8-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-cortex-a53.S.in -D INC=1 -o src/f32-gemminc/4x8-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-cortex-a57.S.in -D INC=0 -o src/f32-gemm/4x8-aarch64-neonfma-cortex-a57.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-cortex-a57.S.in -D INC=1 -o src/f32-gemminc/4x8-aarch64-neonfma-cortex-a57.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-cortex-a75.S.in -D INC=0 -o src/f32-gemm/4x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-cortex-a75.S.in -D INC=1 -o src/f32-gemminc/4x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-ld128.S.in -D INC=0 -o src/f32-gemm/4x8-aarch64-neonfma-ld128.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-ld128.S.in -D INC=1 -o src/f32-gemminc/4x8-aarch64-neonfma-ld128.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-ld64.S.in -D INC=0 -o src/f32-gemm/4x8-aarch64-neonfma-ld64.S
tools/xngen src/f32-gemm/4x8-aarch64-neonfma-ld64.S.in -D INC=1 -o src/f32-gemminc/4x8-aarch64-neonfma-ld64.S
tools/xngen src/f32-gemm/5x8-aarch64-neonfma-cortex-a75.S.in -D INC=0 -o src/f32-gemm/5x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/5x8-aarch64-neonfma-cortex-a75.S.in -D INC=1 -o src/f32-gemminc/5x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a53.S.in -D INC=0 -o src/f32-gemm/6x8-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a53.S.in -D INC=1 -o src/f32-gemminc/6x8-aarch64-neonfma-cortex-a53.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a57.S.in -D INC=0 -o src/f32-gemm/6x8-aarch64-neonfma-cortex-a57.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a57.S.in -D INC=1 -o src/f32-gemminc/6x8-aarch64-neonfma-cortex-a57.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a73.S.in -D INC=0 -o src/f32-gemm/6x8-aarch64-neonfma-cortex-a73.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a73.S.in -D INC=1 -o src/f32-gemminc/6x8-aarch64-neonfma-cortex-a73.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a75.S.in -D INC=0 -o src/f32-gemm/6x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-cortex-a75.S.in -D INC=1 -o src/f32-gemminc/6x8-aarch64-neonfma-cortex-a75.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-ld64.S.in -D INC=0 -o src/f32-gemm/6x8-aarch64-neonfma-ld64.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-ld64.S.in -D INC=1 -o src/f32-gemminc/6x8-aarch64-neonfma-ld64.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-ld128.S.in -D INC=0 -o src/f32-gemm/6x8-aarch64-neonfma-ld128.S
tools/xngen src/f32-gemm/6x8-aarch64-neonfma-ld128.S.in -D INC=1 -o src/f32-gemminc/6x8-aarch64-neonfma-ld128.S
################################### ARM NEON ##################################
### LD64 micro-kernels
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=1 -D NR=8 -D FMA=1 -D INC=0 -o src/f32-gemm/1x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=1 -D NR=8 -D FMA=1 -D INC=1 -o src/f32-gemminc/1x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=4 -D NR=8 -D FMA=0 -D INC=0 -o src/f32-gemm/4x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=4 -D NR=8 -D FMA=0 -D INC=1 -o src/f32-gemminc/4x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=4 -D NR=8 -D FMA=1 -D INC=0 -o src/f32-gemm/4x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=4 -D NR=8 -D FMA=1 -D INC=1 -o src/f32-gemminc/4x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=5 -D NR=8 -D FMA=0 -D INC=0 -o src/f32-gemm/5x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=5 -D NR=8 -D FMA=0 -D INC=1 -o src/f32-gemminc/5x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=5 -D NR=8 -D FMA=1 -D INC=0 -o src/f32-gemm/5x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=5 -D NR=8 -D FMA=1 -D INC=1 -o src/f32-gemminc/5x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=6 -D NR=8 -D FMA=0 -D INC=0 -o src/f32-gemm/6x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=6 -D NR=8 -D FMA=0 -D INC=1 -o src/f32-gemminc/6x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=6 -D NR=8 -D FMA=1 -D INC=0 -o src/f32-gemm/6x8-neonfma-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=6 -D NR=8 -D FMA=1 -D INC=1 -o src/f32-gemminc/6x8-neonfma-ld64.c
### LD128 micro-kernels
tools/xngen src/f32-gemm/neon-ld128.c.in -D MR=4 -D NR=8 -D FMA=0 -D INC=0 -o src/f32-gemm/4x8-neon-ld128.c
tools/xngen src/f32-gemm/neon-ld128.c.in -D MR=4 -D NR=8 -D FMA=0 -D INC=1 -o src/f32-gemminc/4x8-neon-ld128.c
tools/xngen src/f32-gemm/neon-ld128.c.in -D MR=4 -D NR=8 -D FMA=1 -D INC=0 -o src/f32-gemm/4x8-neonfma-ld128.c
tools/xngen src/f32-gemm/neon-ld128.c.in -D MR=4 -D NR=8 -D FMA=1 -D INC=1 -o src/f32-gemminc/4x8-neonfma-ld128.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=1 -D NR=8 -D FMA=0 -D INC=0 -o src/f32-gemm/1x8-neon-ld64.c
tools/xngen src/f32-gemm/neon-ld64.c.in -D MR=1 -D NR=8 -D FMA=0 -D INC=1 -o src/f32-gemminc/1x8-neon-ld64.c
### MRx2 micro-kernels
tools/xngen src/f32-gemm/MRx2-neon-ld64.c.in -D MR=4 -D NR=2 -D FMA=0 -D INC=0 -o src/f32-gemm/4x2-neon-ld64.c
tools/xngen src/f32-gemm/MRx2-neon-ld64.c.in -D MR=4 -D NR=2 -D FMA=1 -D INC=0 -o src/f32-gemm/4x2-neonfma-ld64.c
#################################### PSIMD ####################################
### LOAD1+BROADCAST micro-kernels
tools/xngen src/f32-gemm/psimd-loadsplat.c.in -D MR=1 -D NR=8 -D INC=0 -o src/f32-gemm/1x8-psimd-loadsplat.c
tools/xngen src/f32-gemm/psimd-loadsplat.c.in -D MR=1 -D NR=8 -D INC=1 -o src/f32-gemminc/1x8-psimd-loadsplat.c
tools/xngen src/f32-gemm/psimd-loadsplat.c.in -D MR=4 -D NR=8 -D INC=0 -o src/f32-gemm/4x8-psimd-loadsplat.c
tools/xngen src/f32-gemm/psimd-loadsplat.c.in -D MR=4 -D NR=8 -D INC=1 -o src/f32-gemminc/4x8-psimd-loadsplat.c
tools/xngen src/f32-gemm/psimd-loadsplat.c.in -D MR=6 -D NR=8 -D INC=0 -o src/f32-gemm/6x8-psimd-loadsplat.c
tools/xngen src/f32-gemm/psimd-loadsplat.c.in -D MR=6 -D NR=8 -D INC=1 -o src/f32-gemminc/6x8-psimd-loadsplat.c
### LOAD4+DUPLICATE micro-kernels
tools/xngen src/f32-gemm/psimd-splat.c.in -D MR=1 -D NR=8 -D INC=0 -o src/f32-gemm/1x8-psimd-splat.c
tools/xngen src/f32-gemm/psimd-splat.c.in -D MR=1 -D NR=8 -D INC=1 -o src/f32-gemminc/1x8-psimd-splat.c
tools/xngen src/f32-gemm/psimd-splat.c.in -D MR=4 -D NR=8 -D INC=0 -o src/f32-gemm/4x8-psimd-splat.c
tools/xngen src/f32-gemm/psimd-splat.c.in -D MR=4 -D NR=8 -D INC=1 -o src/f32-gemminc/4x8-psimd-splat.c
tools/xngen src/f32-gemm/psimd-splat.c.in -D MR=6 -D NR=8 -D INC=0 -o src/f32-gemm/6x8-psimd-splat.c
tools/xngen src/f32-gemm/psimd-splat.c.in -D MR=6 -D NR=8 -D INC=1 -o src/f32-gemminc/6x8-psimd-splat.c
### LOAD4+PERMUTE micro-kernels
tools/xngen src/f32-gemm/psimd-s4.c.in -D MR=1 -D NR=8 -D INC=0 -o src/f32-gemm/1x8s4-psimd.c
tools/xngen src/f32-gemm/psimd-s4.c.in -D MR=1 -D NR=8 -D INC=1 -o src/f32-gemminc/1x8s4-psimd.c
tools/xngen src/f32-gemm/psimd-s4.c.in -D MR=4 -D NR=8 -D INC=0 -o src/f32-gemm/4x8s4-psimd.c
tools/xngen src/f32-gemm/psimd-s4.c.in -D MR=4 -D NR=8 -D INC=1 -o src/f32-gemminc/4x8s4-psimd.c
tools/xngen src/f32-gemm/psimd-s4.c.in -D MR=6 -D NR=8 -D INC=0 -o src/f32-gemm/6x8s4-psimd.c
tools/xngen src/f32-gemm/psimd-s4.c.in -D MR=6 -D NR=8 -D INC=1 -o src/f32-gemminc/6x8s4-psimd.c
################################### x86 SSE ###################################
### LOAD1+BROADCAST micro-kernels
tools/xngen src/f32-gemm/sse-load1.c.in -D MR=1 -D NR=8 -D INC=0 -o src/f32-gemm/1x8-sse-load1.c
tools/xngen src/f32-gemm/sse-load1.c.in -D MR=1 -D NR=8 -D INC=1 -o src/f32-gemminc/1x8-sse-load1.c
tools/xngen src/f32-gemm/sse-load1.c.in -D MR=4 -D NR=8 -D INC=0 -o src/f32-gemm/4x8-sse-load1.c
tools/xngen src/f32-gemm/sse-load1.c.in -D MR=4 -D NR=8 -D INC=1 -o src/f32-gemminc/4x8-sse-load1.c
### LOAD4+DUPLICATE micro-kernels
tools/xngen src/f32-gemm/sse-dup.c.in -D MR=1 -D NR=8 -D INC=0 -o src/f32-gemm/1x8-sse-dup.c
tools/xngen src/f32-gemm/sse-dup.c.in -D MR=1 -D NR=8 -D INC=1 -o src/f32-gemminc/1x8-sse-dup.c
tools/xngen src/f32-gemm/sse-dup.c.in -D MR=4 -D NR=8 -D INC=0 -o src/f32-gemm/4x8-sse-dup.c
tools/xngen src/f32-gemm/sse-dup.c.in -D MR=4 -D NR=8 -D INC=1 -o src/f32-gemminc/4x8-sse-dup.c
### LOAD4+PERMUTE micro-kernels
tools/xngen src/f32-gemm/sse-shuffle.c.in -D MR=1 -D NR=8 -D INC=0 -o src/f32-gemm/1x8s4-sse.c
tools/xngen src/f32-gemm/sse-shuffle.c.in -D MR=1 -D NR=8 -D INC=1 -o src/f32-gemminc/1x8s4-sse.c
tools/xngen src/f32-gemm/sse-shuffle.c.in -D MR=4 -D NR=8 -D INC=0 -o src/f32-gemm/4x8s4-sse.c
tools/xngen src/f32-gemm/sse-shuffle.c.in -D MR=4 -D NR=8 -D INC=1 -o src/f32-gemminc/4x8s4-sse.c
################################## Unit tests #################################
tools/generate-gemm-test.py --spec test/f32-gemm.yaml --output test/f32-gemm.cc
tools/generate-gemm-test.py --spec test/f32-gemminc.yaml --output test/f32-gemminc.cc
|
package main
/* i javad */
/*eslint-disable */
import (
"Aramooz/web/controllers"
"time"
"github.com/iris-contrib/middleware/cors"
"github.com/kataras/iris"
"github.com/kataras/iris/sessions"
"github.com/kataras/iris/middleware/logger"
"github.com/kataras/iris/middleware/recover"
"github.com/kataras/iris/mvc"
)
// <NAME>
// this app use Iris as frame work , any other framework works too
func main() {
app := iris.New()
app.Logger().SetLevel("debug")
// Optionally, add two built'n handlers
// that can recover from any http-relative panics
// and log the requests to the terminal.
app.Use(recover.New())
app.Use(logger.New())
sessionManager := sessions.New(sessions.Config{
Cookie: "b502320222bfe165e6bc37db8ea466c3bad11fad72de1d54bbcfe220bb3c94c8",
Expires: 60 * time.Minute,
AllowReclaim: true,
})
//app.RegisterView(iris.HTML("./web/views", ".html"))
//app.StaticWeb("/public", "./web/public")
crs := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "HEAD", "POST", "PUT", "OPTIONS", "DELETE"},
AllowedHeaders: []string{"Accept", "X-USER", "content-type", "X-Requested-With", "Content-Length", "Accept-Encoding", "X-CSRF-Token", "Authorization", "Authorization-Token", "Screen"},
AllowCredentials: true,
})
//mvc.New(app.Party("/user", crs)).Handle(new(controllers.UserController))
mvc.New(app.Party("/user", crs)).Handle(new(controllers.UserController))
mvc.New(app.Party("/exam", crs)).Handle(new(controllers.ExamController))
mvc.New(app.Party("/addquestion", crs)).Handle(new(controllers.QuestionController))
//dg := newClient()
//txn := dg.NewTxn()
// Method: GET
// Resource: http://localhost:8080
app.Handle("GET", "/", func(ctx iris.Context) {
S := sessionManager.Start(ctx)
U := struct {
Uid string
Name string
}{
Uid: "0x213124",
Name: "Hamid",
}
S.Set("Con", U)
visits := S.Increment("visits", 1)
ctx.Writef("you visit this site %d time and %#v", S.Get("visits"), visits)
})
// same as app.Handle("GET", "/ping", [...])
// Method: GET
// Resource: http://localhost:8080/ping
app.Get("/ping", func(ctx iris.Context) {
S := sessionManager.Start(ctx)
ctx.Writef("you visit this site %d time and %#v", S.Get("visits"), S.Get("Con"))
})
// Method: GET
// Resource: http://localhost:8080/hello
app.Get("/hello", func(ctx iris.Context) {
ctx.JSON(iris.Map{"message": "Hello Iris!"})
})
// http://localhost:8080
// http://localhost:8080/ping
// http://localhost:8080/hello
app.Run(iris.Addr(":9090"), iris.WithoutServerError(iris.ErrServerClosed))
}
|
<gh_stars>0
$(function () {
$('#btnAdd').click(function () {
var num = $('.clonedInput').length, // Checks to see how many "duplicatable" input fields we currently have
newNum = new Number(num + 1), // The numeric ID of the new input field being added, increasing by 1 each time
newElem = $('#entry' + num).clone().attr('id', 'entry' + newNum).fadeIn('slow'); // create the new element via clone(), and manipulate it's ID using newNum value
/* This is where we manipulate the name/id values of the input inside the new, cloned element
Below are examples of what forms elements you can clone, but not the only ones.
There are 2 basic structures below: one for an H2, and one for form elements.
To make more, you can copy the one for form elements and simply update the classes for its label and input.
Keep in mind that the .val() method is what clears the element when it gets cloned. Radio and checkboxes need .val([]) instead of .val('').
*/
// H2 - section
newElem.find('.heading-reference').attr('id', 'ID' + newNum + '_reference').attr('name', 'ID' + newNum + '_reference').html('Timesheet #' + newNum);
newElem.find('.label_ttl').attr('for', 'ID' + newNum + '_title');
newElem.find('.name').attr('id', 'ID' + newNum + 'name').attr('name', 'ID' + newNum + 'name').val('');
// name
newElem.find('.project_name').attr('for', 'ID' + newNum + 'project_name');
newElem.find('.project_name').attr('id', 'ID' + newNum + 'project_name').attr('project_name', 'ID' + newNum + 'project_name').val('');
// start time
newElem.find('.start_time').attr('for', 'ID' + newNum + 'start_time');
newElem.find('.start_time').attr('data-link-field', 'ID' + newNum + 'start_time').attr('id', 'ID' + newNum + 'start_time').attr('name', 'ID' + newNum + 'start_time').val('').datetimepicker({
weekStart: 1,
todayBtn: 1,
autoclose: 1,
todayHighlight: 1,
startView: 2,
forceParse: 0,
showMeridian: 1
});
// asana link
newElem.find('.asana_link').attr('for', 'ID' + newNum + 'asana_link');
newElem.find('.asana_link').attr('id', 'ID' + newNum + 'asana_link').attr('name', 'ID' + newNum + 'asana_link').val('');
// end time
newElem.find('.end_time').attr('for', 'ID' + newNum + 'end_time');
newElem.find('.end_time').attr('data-link-field', 'ID' + newNum + 'start_time').attr('id', 'ID' + newNum + 'end_time').attr('name', 'ID' + newNum + 'end_time').val('').datetimepicker({
//language: 'fr',
weekStart: 1,
todayBtn: 1,
autoclose: 1,
todayHighlight: 1,
startView: 2,
forceParse: 0,
showMeridian: 1
});
// client id
newElem.find('.client_id').attr('for', 'ID' + newNum + 'client_id');
newElem.find('.client_id').attr('id', 'ID' + newNum + 'client_id').attr('name', 'ID' + newNum + 'client_id').val('');
$('#entry' + num).after(newElem);
$('#ID' + newNum + '_title').focus();
// Enable the "remove" button. This only shows once you have a duplicated section.
$('#btnDel').attr('disabled', false);
// Right now you can only add 4 sections, for a total of 5. Change '5' below to the max number of sections you want to allow.
if (newNum == 5)
$('#btnAdd').attr('disabled', true).prop('value', "You've reached the limit"); // value here updates the text in the 'add' button when the limit is reached
});
$('#btnDel').click(function () {
// Confirmation dialog box. Works on all desktop browsers and iPhone.
if (confirm("Are you sure you wish to remove this section? This cannot be undone."))
{
var num = $('.clonedInput').length;
// how many "duplicatable" input fields we currently have
$('#entry' + num).slideUp('slow', function () {$(this).remove();
// if only one element remains, disable the "remove" button
if (num -1 === 1)
$('#btnDel').attr('disabled', true);
// enable the "add" button
$('#btnAdd').attr('disabled', false).prop('value', "add section");});
}
return false; // Removes the last section you added
});
// Enable the "add" button
$('#btnAdd').attr('disabled', false);
// Disable the "remove" button
$('#btnDel').attr('disabled', true);
}); |
symlink() {
OVERWRITTEN=""
if [ -e "$2" ] || [ -h "$2" ]; then
OVERWRITTEN="(Overwritten)"
if ! rm -r "$2"; then
substep_error "Failed to remove existing file(s) at $2."
fi
fi
if ln -s "$1" "$2"; then
substep_success "Symlinked $2 to $1. $OVERWRITTEN"
else
substep_error "Symlinking $2 to $1 failed."
fi
}
clear_broken_symlinks() {
find -L "$1" -type l | while read fn; do
if rm "$fn"; then
substep_success "Removed broken symlink at $fn."
else
substep_error "Failed to remove broken symlink at $fn."
fi
done
}
# Took these printing functions from https://github.com/Sajjadhosn/dotfiles
coloredEcho() {
local exp="$1";
local color="$2";
local arrow="$3";
if ! [[ $color =~ '^[0-9]$' ]] ; then
case $(echo $color | tr '[:upper:]' '[:lower:]') in
black) color=0 ;;
red) color=1 ;;
green) color=2 ;;
yellow) color=3 ;;
blue) color=4 ;;
magenta) color=5 ;;
cyan) color=6 ;;
white|*) color=7 ;; # white or invalid color
esac
fi
tput bold;
tput setaf "$color";
echo "$arrow $exp";
tput sgr0;
}
info() {
coloredEcho "$1" blue "========>"
}
success() {
coloredEcho "$1" green "========>"
}
error() {
coloredEcho "$1" red "========>"
}
substep_info() {
coloredEcho "$1" magenta "===="
}
substep_success() {
coloredEcho "$1" cyan "===="
}
substep_error() {
coloredEcho "$1" red "===="
} |
//fonction onclick sur div
//retourne la 1er carte
//stock dans une variable
//si la 1er variable exite on stock dans une 2e variable
//fontction comparer les 2 variables
var global_card = document.getElementsByClassName("cart");
var global_data = {
first_card_value: "",
second_card_value: "",
first_card_id: "",
second_card_id: "",
tab_id_win: []
}
function return_cart(button_cart){
if (! global_data.first_card_value) {
global_data.first_card_value = button_cart.getAttribute("valeur");
button_cart.innerHTML = global_data.first_card_value;
global_data.first_card_id = button_cart.id;
}
else {
global_data.second_card_value = button_cart.getAttribute("valeur");
button_cart.innerHTML = global_data.second_card_value;
global_data.second_card_id = button_cart.id;
setTimeout(function(){ compare(); }, 700);
}
}
function compare(){
if (global_data.first_card_value == global_data.second_card_value) {
global_data.tab_id_win.push(global_data.first_card_id);
global_data.tab_id_win.push(global_data.second_card_id);
if (global_data.tab_id_win.length-1 == global_card.length) {
document.getElementById('win').style.display = "block";
}
}
else {
document.getElementById(global_data.first_card_id).innerHTML = "";
document.getElementById(global_data.second_card_id).innerHTML = "";
}
global_data.first_card_value="";
global_data.second_card_value="";
}
function restart(){
global_data.first_card_value="";
global_data.second_card_value="";
}
|
"""A test class for testing the class CodeElement."""
from typing import List, Union
from concepttordf import Concept
from datacatalogtordf import URI
import pytest
from pytest_mock import MockFixture
from rdflib import Graph
from skolemizer.testutils import skolemization, SkolemUtils
from modelldcatnotordf.modelldcatno import CodeElement, CodeList
from tests.testutils import assert_isomorphic
def test_instantiate_codeelement() -> None:
"""It does not raise an exception."""
try:
_ = CodeElement()
except Exception:
pytest.fail("Unexpected Exception ..")
def test_to_graph_should_return_codeelement(mocker: MockFixture) -> None:
"""It returns an codeelement graph isomorphic to spec."""
codeelement = CodeElement()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeElement
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_identifier() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement;
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_dct_identifier_as_graph() -> None:
"""It returns a dct_identifier graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.dct_identifier = "123456789"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
dct:identifier "123456789";
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_identifier_set_at_constructor() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement = CodeElement("http://example.com/codeelements/1")
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement;
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_subject() -> None:
"""It returns a subject graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
subject = Concept()
subject.identifier = "https://example.com/subjects/1"
codeelement.subject = subject
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
dct:subject <https://example.com/subjects/1> ;
.
<https://example.com/subjects/1> a skos:Concept .
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_preflabel() -> None:
"""It returns a preflabel graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.preflabel = {"nb": "Liste", "en": "List"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:prefLabel "List"@en, "Liste"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_notation_as_graph() -> None:
"""It returns a notation graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.notation = "str"
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:notation "str";
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_in_scheme_both_identifiers() -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codelist = CodeList()
codelist.identifier = "http://example.com/codelists/1"
inschemes: List[CodeList] = [codelist]
codeelement.in_scheme = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:inScheme <http://example.com/codelists/1> .
<http://example.com/codelists/1> a modelldcatno:CodeList ;
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_in_scheme_skolemization_codeelement_identifier(
mocker: MockFixture,
) -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codelist = CodeList()
inschemes: List[CodeList] = [codelist]
codeelement.in_scheme = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:inScheme
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeList .
"""
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_is_codeelement_of_both_skolemized(
mocker: MockFixture,
) -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codelist = CodeList()
inschemes: List[CodeList] = [codelist]
codeelement.in_scheme = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeElement ;
skos:inScheme
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
.
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
a modelldcatno:CodeList
.
"""
skolemutils = SkolemUtils()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=skolemutils.get_skolemization,
)
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_in_scheme_skolemization_codelist_id(
mocker: MockFixture,
) -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codelist = CodeList()
codelist.identifier = "http://example.com/codelists/1"
inschemes: List[CodeList] = [codelist]
codeelement.in_scheme = inschemes
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeElement ;
skos:inScheme <http://example.com/codelists/1>
.
<http://example.com/codelists/1> a modelldcatno:CodeList .
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_top_concept_of_both_identifiers() -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codelist = CodeList()
codelist.identifier = "http://example.com/codelists/1"
inschemes: List[CodeList] = [codelist]
codeelement.top_concept_of = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:topConceptOf <http://example.com/codelists/1> .
<http://example.com/codelists/1> a modelldcatno:CodeList ;
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_top_concept_of_skolemization_codeelement_id(
mocker: MockFixture,
) -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codelist = CodeList()
inschemes: List[CodeList] = [codelist]
codeelement.top_concept_of = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:topConceptOf
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
.
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeList .
"""
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_top_concept_of_both_skolemized(
mocker: MockFixture,
) -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codelist = CodeList()
inschemes: List[CodeList] = [codelist]
codeelement.top_concept_of = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeElement ; skos:topConceptOf
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
.
<http://example.com/.well-known/skolem/21043186-80ce-11eb-9829-cf7c8fc855ce>
a modelldcatno:CodeList
.
"""
skolemutils = SkolemUtils()
mocker.patch(
"skolemizer.Skolemizer.add_skolemization",
side_effect=skolemutils.get_skolemization,
)
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_top_concept_of_skolemization_codelist_id(
mocker: MockFixture,
) -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codelist = CodeList()
codelist.identifier = "http://example.com/codelists/1"
inschemes: List[CodeList] = [codelist]
codeelement.top_concept_of = inschemes
mocker.patch(
"skolemizer.Skolemizer.add_skolemization", return_value=skolemization,
)
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/.well-known/skolem/284db4d2-80c2-11eb-82c3-83e80baa2f94>
a modelldcatno:CodeElement ;
skos:topConceptOf <http://example.com/codelists/1>
.
<http://example.com/codelists/1> a modelldcatno:CodeList .
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_altlabel() -> None:
"""It returns a altlabel graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.altlabel = {"nb": "Samling", "en": "Collection"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:altLabel "Collection"@en, "Samling"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_definition() -> None:
"""It returns a definition graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.definition = {
"nb": "Ordnet mengde elementer",
"en": "Ordered set of elements",
}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:definition "Ordered set of elements"@en, "Ordnet mengde elementer"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_example() -> None:
"""It returns a example graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.example = ["An example", "Another example"]
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:example "An example", "Another example"
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_hiddenlabel() -> None:
"""It returns a hiddenlabel graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.hiddenlabel = {"nb": "En uegnet betegnelse", "en": "A hidden label"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:hiddenLabel "A hidden label"@en, "En uegnet betegnelse"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_note() -> None:
"""It returns a note graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.note = {"en": "A note", "nb": "En merknad"}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:note "A note"@en, "En merknad"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_scopenote() -> None:
"""It returns a scopenote graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.scopenote = {
"en": "A scope note",
"nb": "En merknad ang. bruken av kodeelementet",
}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:scopeNote "A scope note"@en, "En merknad ang. bruken av kodeelementet"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_exclusion_note() -> None:
"""It returns a exclusion_note graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.exclusion_note = {
"en": "An exclusion note",
"nb": "En merknad ang. ekskluderte elementer",
}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
xkos:exclusionNote "An exclusion note"@en,
"En merknad ang. ekskluderte elementer"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_inclusion_note() -> None:
"""It returns a inclusion_note graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codeelement.inclusion_note = {
"en": "An inclusion note",
"nb": "En merknad om hva som er inkludert i kodeelementet",
}
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
xkos:inclusionNote "An inclusion note"@en,
"En merknad om hva som er inkludert i kodeelementet"@nb
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_next() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement1 = CodeElement()
codeelement1.identifier = "http://example.com/codeelements/1"
codeelement2 = CodeElement()
codeelement2.identifier = "http://example.com/codeelements/2"
codeelement1.next_element = codeelement2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1>
a modelldcatno:CodeElement;
xkos:next <http://example.com/codeelements/2> .
<http://example.com/codeelements/2> a modelldcatno:CodeElement .
"""
g1 = Graph().parse(data=codeelement1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_next_skolemized() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement1 = CodeElement()
codeelement1.identifier = "http://example.com/codeelements/1"
codeelement2 = CodeElement()
codeelement1.next_element = codeelement2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1>
a modelldcatno:CodeElement;
xkos:next [ a modelldcatno:CodeElement ] .
"""
g1 = Graph().parse(data=codeelement1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_previous() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement1 = CodeElement()
codeelement1.identifier = "http://example.com/codeelements/1"
codeelement2 = CodeElement()
codeelement2.identifier = "http://example.com/codeelements/2"
codeelement1.previous_element = codeelement2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1>
a modelldcatno:CodeElement;
xkos:previous <http://example.com/codeelements/2> .
<http://example.com/codeelements/2> a modelldcatno:CodeElement .
"""
g1 = Graph().parse(data=codeelement1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_previous_skolemized() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement1 = CodeElement()
codeelement1.identifier = "http://example.com/codeelements/1"
codeelement2 = CodeElement()
codeelement1.previous_element = codeelement2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1>
a modelldcatno:CodeElement;
xkos:previous [ a modelldcatno:CodeElement ] .
"""
g1 = Graph().parse(data=codeelement1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_subject_as_uri() -> None:
"""It returns a subject graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
subject = "https://example.com/subjects/1"
codeelement.subject = subject
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
dct:subject <https://example.com/subjects/1> ;
.
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_in_scheme_as_uri() -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codelist = "http://example.com/codelists/1"
inschemes: List[Union[CodeList, URI]] = [codelist]
codeelement.in_scheme = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:inScheme <http://example.com/codelists/1> .
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_top_concept_of_as_uri() -> None:
"""It returns a is_codeelement_of graph isomorphic to spec."""
codeelement = CodeElement()
codeelement.identifier = "http://example.com/codeelements/1"
codelist = "http://example.com/codelists/1"
inschemes: List[Union[CodeList, URI]] = [codelist]
codeelement.top_concept_of = inschemes
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
<http://example.com/codeelements/1> a modelldcatno:CodeElement ;
skos:topConceptOf <http://example.com/codelists/1> .
"""
g1 = Graph().parse(data=codeelement.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_next_as_uri() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement1 = CodeElement()
codeelement1.identifier = "http://example.com/codeelements/1"
codeelement2 = "http://example.com/codeelements/2"
codeelement1.next_element = codeelement2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1>
a modelldcatno:CodeElement;
xkos:next <http://example.com/codeelements/2> .
"""
g1 = Graph().parse(data=codeelement1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
def test_to_graph_should_return_previous_as_uri() -> None:
"""It returns an identifier graph isomorphic to spec."""
codeelement1 = CodeElement()
codeelement1.identifier = "http://example.com/codeelements/1"
codeelement2 = "http://example.com/codeelements/2"
codeelement1.previous_element = codeelement2
src = """
@prefix dct: <http://purl.org/dc/terms/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix dcat: <http://www.w3.org/ns/dcat#> .
@prefix modelldcatno: <https://data.norge.no/vocabulary/modelldcatno#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix xkos: <http://rdf-vocabulary.ddialliance.org/xkos#> .
<http://example.com/codeelements/1>
a modelldcatno:CodeElement;
xkos:previous <http://example.com/codeelements/2> .
"""
g1 = Graph().parse(data=codeelement1.to_rdf(), format="turtle")
g2 = Graph().parse(data=src, format="turtle")
assert_isomorphic(g1, g2)
|
import java.util.Optional;
import java.util.stream.Stream;
public interface TicketBusRepository extends MongoRepository<TicketBusEntity, String> {
Optional<TicketBusEntity> findByReference(String reference);
default Optional<TicketBusEntity> findByReference(String reference) {
return findAll().stream()
.filter(ticket -> ticket.getReference().equals(reference))
.findFirst();
}
} |
#pragma once
#include "Graphics/Mesh.h"
#include "Physics/BoundingBox.h"
class ObjLoader
{
public:
~ObjLoader();
void LoadMeshData(const std::string& fileName);
void LoadMeshData(const std::string& fileName, const std::string& matName);
[[nodiscard]] std::vector<Vertex> GetVertices() const { return meshVertices; }
[[nodiscard]] std::vector<GLuint> GetIndices() const { return indices; }
[[nodiscard]] std::vector<Mesh> GetMeshData() const { return meshes; }
private:
std::vector<glm::vec3> vertices = std::vector<glm::vec3>();
std::vector<glm::vec3> normals = std::vector<glm::vec3>();
std::vector<glm::vec2> uvCoords = std::vector<glm::vec2>();
std::vector<GLuint> indices = std::vector<GLuint>();
std::vector<GLuint> normalIndices = std::vector<GLuint>();
std::vector<GLuint> uvIndices = std::vector<GLuint>();
std::vector<Vertex> meshVertices = std::vector<Vertex>();
std::vector<Mesh> meshes = std::vector<Mesh>();
MaterialHandler::Material currentMaterial;
void PostProcessing();
void LoadMaterial(const std::string& fileName);
void LoadMaterialLibrary(const std::string& matPath) const;
};
|
package authcopy
import (
"github.com/devopsfaith/krakend/config"
"github.com/devopsfaith/krakend/logging"
"github.com/gin-gonic/gin"
)
const authHeader = "Authorization"
const Namespace = "github_com/zean00/authcopy"
//CopyConfig authcopy config
type CopyConfig struct {
CookieKey string
QueryKey string
Overwrite bool
}
//New create middleware
func New(logger logging.Logger, config config.ExtraConfig) gin.HandlerFunc {
cfg := configGetter(logger, config)
if cfg == nil {
logger.Info("[authcopy] Empty config")
return func(c *gin.Context) {
c.Next()
}
}
return func(c *gin.Context) {
if token := c.Request.Header.Get(authHeader); token != "" && !cfg.Overwrite {
c.Next()
return
}
cookie, err := c.Request.Cookie(cfg.CookieKey)
if err == nil {
logger.Debug("[authcopy] Copying from cookie")
c.Request.Header.Set(authHeader, "Bearer "+cookie.Value)
}
query := c.Request.URL.Query().Get(cfg.QueryKey)
if query != "" {
logger.Debug("[authcopy] Copying from query")
c.Request.Header.Set(authHeader, "Bearer "+query)
val := c.Request.URL.Query()
val.Del(cfg.QueryKey)
c.Request.URL.RawQuery = val.Encode()
}
c.Next()
}
}
func configGetter(logger logging.Logger, config config.ExtraConfig) *CopyConfig {
v, ok := config[Namespace]
if !ok {
return nil
}
tmp, ok := v.(map[string]interface{})
if !ok {
return nil
}
cfg := new(CopyConfig)
cfg.Overwrite = false
ck, ok := tmp["cookie_key"].(string)
if ok {
cfg.CookieKey = ck
}
qk, ok := tmp["query_key"].(string)
if ok {
cfg.QueryKey = qk
}
ow, ok := tmp["overwrite"].(bool)
if ok {
cfg.Overwrite = ow
}
if cfg.CookieKey == "" && cfg.QueryKey == "" {
return nil
}
return cfg
}
|
import { Component, OnInit } from '@angular/core';
import { _HttpClient } from '@delon/theme';
import { concat, merge, Observable, of } from 'rxjs';
declare var echarts : any ;
@Component({
selector: 'app-dashboard',
templateUrl: './dashboard.component.html',
styleUrls : ['./dashboard.component.less']
})
export class DashboardComponent implements OnInit {
constructor(
private http: _HttpClient
) {};
ngOnInit() {
const chart1 = echarts.init(document.getElementById('main'));
const chart2 = echarts.init(document.getElementById('main2'));
chart1.setOption(this.chart);
chart2.setOption(this.chart2);
};
chart = {
tooltip : {
trigger: 'axis',
axisPointer: {
type: 'cross',
label: {
backgroundColor: '#6a7985'
}
}
},
legend: {
data:['邮件营销','联盟广告','视频广告','直接访问','搜索引擎']
},
grid: {
left: '3%',
right: '4%',
bottom: '3%',
containLabel: true
},
xAxis : [
{
type : 'category',
boundaryGap : false,
data : ['周一','周二','周三','周四','周五','周六','周日']
}
],
yAxis : [
{
type : 'value'
}
],
series : [
{
name:'邮件营销',
type:'line',
stack: '总量',
areaStyle: {},
data:[120, 132, 101, 134, 90, 230, 210]
},
{
name:'联盟广告',
type:'line',
stack: '总量',
areaStyle: {},
data:[220, 182, 191, 234, 290, 330, 310]
},
{
name:'视频广告',
type:'line',
stack: '总量',
areaStyle: {},
data:[150, 232, 201, 154, 190, 330, 410]
},
{
name:'直接访问',
type:'line',
stack: '总量',
areaStyle: {normal: {}},
data:[320, 332, 301, 334, 390, 330, 320]
},
{
name:'搜索引擎',
type:'line',
stack: '总量',
label: {
normal: {
show: true,
position: 'top'
}
},
areaStyle: {normal: {}},
data:[820, 932, 901, 934, 1290, 1330, 1320]
}
]
};
chart2 = {
series: [
{
name:'访问来源',
type:'pie',
selectedMode: 'single',
radius: [0, '30%'],
label: {
normal: {
position: 'inner'
}
},
labelLine: {
normal: {
show: false
}
},
data:[
{value:335, name:'直达', selected:true},
{value:679, name:'营销广告'},
{value:1548, name:'搜索引擎'}
]
},
{
name:'访问来源',
type:'pie',
radius: ['40%', '55%'],
label: {
normal: {
formatter: '{a|{a}}{abg|}\n{hr|}\n {b|{b}:}{c} {per|{d}%} ',
backgroundColor: '#eee',
borderColor: '#aaa',
borderWidth: 1,
borderRadius: 4,
// shadowBlur:3,
// shadowOffsetX: 2,
// shadowOffsetY: 2,
// shadowColor: '#999',
// padding: [0, 7],
rich: {
a: {
color: '#999',
lineHeight: 22,
align: 'center'
},
// abg: {
// backgroundColor: '#333',
// width: '100%',
// align: 'right',
// height: 22,
// borderRadius: [4, 4, 0, 0]
// },
hr: {
borderColor: '#aaa',
width: '100%',
borderWidth: 0.5,
height: 0
},
b: {
fontSize: 16,
lineHeight: 33
},
per: {
color: '#eee',
backgroundColor: '#334455',
padding: [2, 4],
borderRadius: 2
}
}
}
},
data:[
{value:335, name:'直达'},
{value:310, name:'邮件营销'},
{value:234, name:'联盟广告'},
{value:135, name:'视频广告'},
{value:1048, name:'百度'},
{value:251, name:'谷歌'},
{value:147, name:'必应'},
{value:102, name:'其他'}
]
}
]
};
}
|
<reponame>getmetamapper/metamapper
"""
Drop a table (`app`.`departments`) and re-create it with the
same name. However, the underlying column structure has slightly changed.
Example SQL:
DROP TABLE `app`.`departments`;
CREATE TABLE `app`.`departments` (
id SERIAL NOT NULL,
category VARCHAR(40) NOT NULL,
dept_name VARCHAR(40) NOT NULL,
dept_head INT NOT NULL,
PRIMARY KEY (id),
UNIQUE (dept_name)
);
"""
from app.revisioner.tests.e2e import inspected
from app.revisioner.tests.test_e2e import mutate_inspected
preload_fixtures = ['datastore']
inspected_tables = mutate_inspected(inspected.tables_and_views, [
{
"type": "dropped",
"filters": (
lambda row: row['table_object_id'] == 16522
),
},
])
inspected_tables += [
{
"schema_object_id": 16441,
"table_schema": "app",
"table_object_id": 26522,
"table_name": "departments",
"table_type": "base table",
"properties": {},
"columns": [
{
"column_object_id": "26522/1",
"column_name": "id",
"column_description": None,
"ordinal_position": 1,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": False,
"is_primary": True,
"default_value": "nextval('app.departments_id_seq'::regclass)"
},
{
"column_object_id": "26522/2",
"column_name": "category",
"column_description": None,
"ordinal_position": 2,
"data_type": "varchar",
"max_length": 255,
"numeric_scale": None,
"is_nullable": False,
"is_primary": True,
"default_value": ""
},
{
"column_object_id": "26522/3",
"column_name": "dept_name",
"column_description": None,
"ordinal_position": 3,
"data_type": "character varying",
"max_length": 40,
"numeric_scale": None,
"is_nullable": False,
"is_primary": False,
"default_value": ""
},
{
"column_object_id": "26522/4",
"column_name": "dept_head",
"column_description": None,
"ordinal_position": 3,
"data_type": "integer",
"max_length": 32,
"numeric_scale": 0,
"is_nullable": True,
"is_primary": False,
"default_value": ""
}
]
}
]
inspected_indexes = mutate_inspected(inspected.indexes, [
{
"type": "dropped",
"filters": (
lambda row: row['table_object_id'] == 16522
),
},
])
inspected_indexes += [
{
"schema_name": "app",
"schema_object_id": 16441,
"table_name": "departments",
"table_object_id": 26522,
"index_name": "departments_dept_name_key",
"index_object_id": 26528,
"is_unique": True,
"is_primary": False,
"definition": "CREATE UNIQUE INDEX departments_dept_name_key ON app.departments USING btree (dept_name)",
"columns": [
{
"column_name": "dept_name",
"ordinal_position": 1
}
]
}
]
test_cases = [
{
"model": "Table",
"description": "The `app`.`departments` table should still exist.",
"filters": {
"schema__name": "app",
"name": "departments",
},
"assertions": [
{
"summarized": "It should have the same Table identity.",
"evaluation": lambda datastore, table: table.pk,
"pass_value": 2,
},
{
"summarized": "It should have an updated `object_id` value.",
"evaluation": lambda datastore, table: table.object_id,
"pass_value": "26522",
},
{
"summarized": "It should have the expected columns.",
"evaluation": lambda datastore, table: set(table.columns.values_list("name", flat=True)),
"pass_value": {
"id",
"category",
"dept_name",
"dept_head",
},
},
]
},
{
"model": "Column",
"description": "tbd.",
"filters": {
"table_id": 2,
"name": "dept_name",
},
"assertions": [
{
"summarized": "It should have the same Column identity.",
"evaluation": lambda datastore, column: column.pk,
"pass_value": 15,
},
{
"summarized": "It should have the same Column identity.",
"evaluation": lambda datastore, column: column.object_id,
"pass_value": "26522/3",
},
]
},
{
"model": "Column",
"description": "tbd.",
"filters": {
"table_id": 2,
"object_id": "26522/2",
},
"assertions": [
{
"summarized": "It should have the same Column identity.",
"evaluation": lambda datastore, column: column.name,
"pass_value": "category",
},
]
},
]
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import TwitterSvg from '@rsuite/icon-font/lib/legacy/Twitter';
const Twitter = createSvgIcon({
as: TwitterSvg,
ariaLabel: 'twitter',
category: 'legacy',
displayName: 'Twitter'
});
export default Twitter;
|
from typing import List
import re
def count_word_occurrences(word: str, word_list: List[str]) -> int:
count = 0
word = word.lower()
for sentence in word_list:
sentence = sentence.lower()
count += len(re.findall(r'\b' + word + r'\b', sentence))
return count |
#!/bin/bash
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Initializes a fresh GCE VM to become a Kokoro Linux performance worker.
# You shouldn't run this script on your own,
# use create_linux_kokoro_performance_worker.sh instead.
set -ex
sudo apt-get update
# Install Java 8 JDK (to build gRPC Java)
sudo apt-get install -y openjdk-8-jdk
sudo apt-get install -y unzip lsof
sudo apt-get install -y \
autoconf \
autotools-dev \
build-essential \
bzip2 \
ccache \
curl \
gcc \
gcc-multilib \
git \
gyp \
lcov \
libc6 \
libc6-dbg \
libc6-dev \
libcurl4-openssl-dev \
libgtest-dev \
libreadline-dev \
libssl-dev \
libtool \
make \
strace \
python-dev \
python-pip \
python-setuptools \
python-yaml \
python3-dev \
python3-pip \
python3-setuptools \
python3-yaml \
telnet \
unzip \
wget \
zip \
zlib1g-dev
# perftools
sudo apt-get install -y google-perftools libgoogle-perftools-dev
# netperf
sudo apt-get install -y netperf
# required to run kokoro_log_reader.py
sudo apt-get install -y python-psutil python3-psutil
# gcloud tools, including gsutil
sudo apt-get install -y google-cloud-sdk
# C++ dependencies
sudo apt-get install -y libgtest-dev libc++-dev clang
# Python dependencies
sudo pip install --upgrade pip==19.3.1
sudo pip install tabulate
sudo pip install google-api-python-client oauth2client
sudo pip install virtualenv
# pypy is used instead of python for postprocessing benchmark outputs
# because some reports are huge and pypy is much faster.
# TODO(jtattermusch): get rid of pypy once possible, it's hard to
# keep track of all the installed variants of python.
sudo apt-get install -y pypy pypy-dev
curl -O https://bootstrap.pypa.io/get-pip.py
sudo pypy get-pip.py
sudo pypy -m pip install tabulate
sudo pypy -m pip install google-api-python-client oauth2client
# TODO(jtattermusch): for some reason, we need psutil installed
# in pypy for kokoro_log_reader.py (strange, because the command is
# "python kokoro_log_reader.py" and pypy is not the system default)
sudo pypy -m pip install psutil
# Node dependencies (nvm has to be installed under user kbuilder)
touch .profile
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.25.4/install.sh | bash
# silence shellcheck as it cannot follow the following `source` path statically:
# shellcheck disable=SC1090
source ~/.nvm/nvm.sh
nvm install 0.12 && npm config set cache /tmp/npm-cache
nvm install 4 && npm config set cache /tmp/npm-cache
nvm install 5 && npm config set cache /tmp/npm-cache
nvm alias default 4
# C# dependencies
sudo apt-get install -y cmake
# C# mono dependencies (http://www.mono-project.com/docs/getting-started/install/linux/#debian-ubuntu-and-derivatives)
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
echo "deb https://download.mono-project.com/repo/ubuntu stable-bionic main" | sudo tee /etc/apt/sources.list.d/mono-official-stable.list
sudo apt-get update
sudo apt-get install -y mono-devel
# C# .NET Core dependencies (https://www.microsoft.com/net/download)
wget -q https://packages.microsoft.com/config/ubuntu/18.04/packages-microsoft-prod.deb
sudo dpkg -i packages-microsoft-prod.deb
sudo apt-get install -y apt-transport-https
sudo apt-get update
sudo apt-get install -y dotnet-sdk-2.1
# Install .NET Core 1.0.5 Runtime (required to run netcoreapp1.0)
wget -q https://download.microsoft.com/download/2/4/A/24A06858-E8AC-469B-8AE6-D0CEC9BA982A/dotnet-ubuntu.16.04-x64.1.0.5.tar.gz
mkdir -p dotnet105_download
tar zxf dotnet-ubuntu.16.04-x64.1.0.5.tar.gz -C dotnet105_download
sudo cp -r dotnet105_download/shared/Microsoft.NETCore.App/1.0.5/ /usr/share/dotnet/shared/Microsoft.NETCore.App/
# To prevent "Failed to initialize CoreCLR, HRESULT: 0x80131500" with .NET Core 1.0.5 runtime
wget -q http://security.ubuntu.com/ubuntu/pool/main/i/icu/libicu55_55.1-7ubuntu0.4_amd64.deb
sudo dpkg -i libicu55_55.1-7ubuntu0.4_amd64.deb
# Install .NET Core 1.1.10 runtime (required to run netcoreapp1.1)
wget -q -O dotnet_old.tar.gz https://download.visualstudio.microsoft.com/download/pr/b25b5650-0cb8-4699-a347-48d73650da0b/920966211e9bb1907232bbda1faa895a/dotnet-ubuntu.18.04-x64.1.1.10.tar.gz
mkdir -p dotnet_old
tar zxf dotnet_old.tar.gz -C dotnet_old
sudo cp -r dotnet_old/shared/Microsoft.NETCore.App/1.1.10/ /usr/share/dotnet/shared/Microsoft.NETCore.App/
# Ruby dependencies
gpg --keyserver hkp://pgp.mit.edu --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB
curl -sSL https://get.rvm.io | bash -s stable --ruby
# silence shellcheck as it cannot follow the following `source` path statically:
# shellcheck disable=SC1090
source ~/.rvm/scripts/rvm
git clone https://github.com/rbenv/rbenv.git ~/.rbenv
export PATH="$HOME/.rbenv/bin:$PATH"
eval "$(rbenv init -)"
git clone https://github.com/rbenv/ruby-build.git ~/.rbenv/plugins/ruby-build
export PATH="$HOME/.rbenv/plugins/ruby-build/bin:$PATH"
rbenv install 2.4.0
rbenv global 2.4.0
ruby -v
# Install bundler (prerequisite for gRPC Ruby)
gem install bundler
# PHP dependencies
sudo apt-get install -y php7.2 php7.2-dev php-pear unzip zlib1g-dev
sudo wget https://phar.phpunit.de/phpunit-8.5.8.phar && \
sudo mv phpunit-8.5.8.phar /usr/local/bin/phpunit && \
sudo chmod +x /usr/local/bin/phpunit
curl -sS https://getcomposer.org/installer | php
sudo mv composer.phar /usr/local/bin/composer
# Java dependencies - nothing as we already have Java JDK 8
# Go dependencies
# Currently, the golang package available via apt-get doesn't have the latest go.
# Significant performance improvements with grpc-go have been observed after
# upgrading from go 1.5 to a later version, so a later go version is preferred.
# Following go install instructions from https://golang.org/doc/install
GO_VERSION=1.10
OS=linux
ARCH=amd64
curl -O https://storage.googleapis.com/golang/go${GO_VERSION}.${OS}-${ARCH}.tar.gz
sudo tar -C /usr/local -xzf go$GO_VERSION.$OS-$ARCH.tar.gz
# Put go on the PATH, keep the usual installation dir
sudo ln -s /usr/local/go/bin/go /usr/bin/go
rm go$GO_VERSION.$OS-$ARCH.tar.gz
# Install perf, to profile benchmarks. (need to get the right linux-tools-<> for kernel version)
sudo apt-get install -y linux-tools-common linux-tools-generic "linux-tools-$(uname -r)"
# see http://unix.stackexchange.com/questions/14227/do-i-need-root-admin-permissions-to-run-userspace-perf-tool-perf-events-ar
echo 0 | sudo tee /proc/sys/kernel/perf_event_paranoid
# see http://stackoverflow.com/questions/21284906/perf-couldnt-record-kernel-reference-relocation-symbol
echo 0 | sudo tee /proc/sys/kernel/kptr_restrict
# qps workers under perf appear to need a lot of mmap pages under certain scenarios and perf args in
# order to not lose perf events or time out
echo 4096 | sudo tee /proc/sys/kernel/perf_event_mlock_kb
# Fetch scripts to generate flame graphs from perf data collected
# on benchmarks
git clone -v https://github.com/brendangregg/FlameGraph ~/FlameGraph
# Install scipy and numpy for benchmarking scripts
sudo apt-get install -y python-scipy python-numpy
# Install docker
curl -sSL https://get.docker.com/ | sh
# Enable kbuilder to use docker without sudo:
sudo usermod -aG docker kbuilder
# Add pubkey of Kokoro driver VM to allow SSH
# silence false-positive shellcheck warning ("< redirect does not affect sudo")
# shellcheck disable=SC2024
sudo tee --append ~kbuilder/.ssh/authorized_keys < kokoro_performance.pub
# Kokoro requires /tmpfs/READY file to exist the directory and file itself should
# be owned by kbuilder.
sudo mkdir /tmpfs
sudo chown kbuilder /tmpfs
touch /tmpfs/READY
# Disable automatic updates to prevent spurious apt-get install failures
# See https://github.com/grpc/grpc/issues/17794
sudo sed -i 's/APT::Periodic::Update-Package-Lists "1"/APT::Periodic::Update-Package-Lists "0"/' /etc/apt/apt.conf.d/10periodic
sudo sed -i 's/APT::Periodic::AutocleanInterval "1"/APT::Periodic::AutocleanInterval "0"/' /etc/apt/apt.conf.d/10periodic
sudo sed -i 's/APT::Periodic::Update-Package-Lists "1"/APT::Periodic::Update-Package-Lists "0"/' /etc/apt/apt.conf.d/20auto-upgrades
sudo sed -i 's/APT::Periodic::Unattended-Upgrade "1"/APT::Periodic::Unattended-Upgrade "0"/' /etc/apt/apt.conf.d/20auto-upgrades
# Restart for VM to pick up kernel update
echo 'Successfully initialized the linux worker, going for reboot in 10 seconds'
sleep 10
sudo reboot
|
package org.hiro;
public class Stone {
@Deprecated
char[] st_name;
String name;
int st_value;
}
|
<gh_stars>100-1000
export interface ColorGridProps {
color: string;
toneScale: Array<number>;
}
|
/* THIS FILE AUTO-GENERATED FROM astra_core_api.cpp.lpp. DO NOT EDIT. */
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Orbbec 3D
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#include "astra_core_api.h"
#include <astra_core/StreamServiceProxy.hpp>
ASTRA_BEGIN_DECLS
ASTRA_LOCAL astra_streamservice_proxy_t* __astra_api_proxy_ptr = nullptr;
static astra::StreamServiceProxy* get_api_proxy()
{
return reinterpret_cast<astra::StreamServiceProxy*>(__astra_api_proxy_ptr);
}
ASTRA_API_PROXY astra_streamservice_proxy_t* astra_api_get_proxy()
{
return __astra_api_proxy_ptr;
}
ASTRA_API_PROXY void astra_api_set_proxy(astra_streamservice_proxy_t* proxy)
{
__astra_api_proxy_ptr = proxy;
}
ASTRA_API astra_status_t astra_streamset_open(const char* connectionString,
astra_streamsetconnection_t* streamSet)
{
return get_api_proxy()->streamset_open(connectionString, streamSet);
}
ASTRA_API astra_status_t astra_streamset_close(astra_streamsetconnection_t* streamSet)
{
return get_api_proxy()->streamset_close(streamSet);
}
ASTRA_API astra_status_t astra_reader_create(astra_streamsetconnection_t streamSet,
astra_reader_t* reader)
{
return get_api_proxy()->reader_create(streamSet, reader);
}
ASTRA_API astra_status_t astra_reader_destroy(astra_reader_t* reader)
{
return get_api_proxy()->reader_destroy(reader);
}
ASTRA_API astra_status_t astra_reader_get_stream(astra_reader_t reader,
astra_stream_type_t type,
astra_stream_subtype_t subtype,
astra_streamconnection_t* connection)
{
return get_api_proxy()->reader_get_stream(reader, type, subtype, connection);
}
ASTRA_API astra_status_t astra_stream_get_description(astra_streamconnection_t connection,
astra_stream_desc_t* description)
{
return get_api_proxy()->stream_get_description(connection, description);
}
ASTRA_API astra_status_t astra_stream_start(astra_streamconnection_t connection)
{
return get_api_proxy()->stream_start(connection);
}
ASTRA_API astra_status_t astra_stream_stop(astra_streamconnection_t connection)
{
return get_api_proxy()->stream_stop(connection);
}
ASTRA_API astra_status_t astra_reader_open_frame(astra_reader_t reader,
int timeoutMillis,
astra_reader_frame_t* frame)
{
return get_api_proxy()->reader_open_frame(reader, timeoutMillis, frame);
}
ASTRA_API astra_status_t astra_reader_close_frame(astra_reader_frame_t* frame)
{
return get_api_proxy()->reader_close_frame(frame);
}
ASTRA_API astra_status_t astra_reader_register_frame_ready_callback(astra_reader_t reader,
astra_frame_ready_callback_t callback,
void* clientTag,
astra_reader_callback_id_t* callbackId)
{
return get_api_proxy()->reader_register_frame_ready_callback(reader, callback, clientTag, callbackId);
}
ASTRA_API astra_status_t astra_reader_unregister_frame_ready_callback(astra_reader_callback_id_t* callbackId)
{
return get_api_proxy()->reader_unregister_frame_ready_callback(callbackId);
}
ASTRA_API astra_status_t astra_reader_get_frame(astra_reader_frame_t frame,
astra_stream_type_t type,
astra_stream_subtype_t subtype,
astra_frame_t** subFrame)
{
return get_api_proxy()->reader_get_frame(frame, type, subtype, subFrame);
}
ASTRA_API astra_status_t astra_stream_set_parameter(astra_streamconnection_t connection,
astra_parameter_id parameterId,
size_t inByteLength,
astra_parameter_data_t inData)
{
return get_api_proxy()->stream_set_parameter(connection, parameterId, inByteLength, inData);
}
ASTRA_API astra_status_t astra_stream_get_parameter(astra_streamconnection_t connection,
astra_parameter_id parameterId,
size_t* resultByteLength,
astra_result_token_t* token)
{
return get_api_proxy()->stream_get_parameter(connection, parameterId, resultByteLength, token);
}
ASTRA_API astra_status_t astra_stream_get_result(astra_streamconnection_t connection,
astra_result_token_t token,
size_t dataByteLength,
astra_parameter_data_t dataDestination)
{
return get_api_proxy()->stream_get_result(connection, token, dataByteLength, dataDestination);
}
ASTRA_API astra_status_t astra_stream_invoke(astra_streamconnection_t connection,
astra_command_id commandId,
size_t inByteLength,
astra_parameter_data_t inData,
size_t* resultByteLength,
astra_result_token_t* token)
{
return get_api_proxy()->stream_invoke(connection, commandId, inByteLength, inData, resultByteLength, token);
}
ASTRA_API astra_status_t astra_temp_update()
{
return get_api_proxy()->temp_update();
}
ASTRA_END_DECLS
|
<gh_stars>0
import DynamicArray from '.';
describe('DynamicArray', () => {
describe('constructor', () => {
describe('when passing no argument', () => {
it('initializes a dynamic array object with capacity of 10', () => {
expect(new DynamicArray().capacity).toBe(10);
});
});
describe('when passing invalid argument', () => {
describe('when passing something not parsable as a number', () => {
it('throws a range error', () => {
expect(() => new DynamicArray('some string')).toThrow(RangeError);
});
});
describe('when passing a negative number', () => {
it('throws a range error', () => {
expect(() => new DynamicArray(-1)).toThrow(RangeError);
});
});
});
describe('when passing valid argument', () => {
const CAPACITY = 20;
describe('when passing a non negative number', () => {
it('initializes a dynamic array object with the passed capacity', () => {
expect(new DynamicArray(CAPACITY).capacity).toBe(CAPACITY);
});
});
describe('when passing a string corresponding to a non negative number', () => {
it('initializes a dynamic array object with the passed capacity', () => {
expect(new DynamicArray(CAPACITY.toString()).capacity).toBe(CAPACITY);
});
});
});
});
describe('methods', () => {
let dynamicArray;
beforeAll(() => {
dynamicArray = new DynamicArray();
});
describe('#length', () => {
describe('when array has no elements', () => {
it('returns 0', () => expect(dynamicArray).toHaveLength(0));
});
describe('when array has elements', () => {
const elemNumber = 10;
beforeAll(() => {
for (let i = 0; i < elemNumber; i += 1) {
dynamicArray.add('element');
}
});
afterAll(() => dynamicArray.clear());
it('returns the number of elements', () => {
expect(dynamicArray).toHaveLength(elemNumber);
});
});
});
describe('#isEmpty', () => {
describe('when array has no elements', () => {
it('returns true', () => expect(dynamicArray.isEmpty()).toBe(true));
});
describe('when array has elements', () => {
beforeAll(() => dynamicArray.add('element'));
afterAll(() => dynamicArray.clear());
it('returns false', () => expect(dynamicArray.isEmpty()).toBe(false));
});
});
describe('#get', () => {
describe('when passing a non existent index', () => {
it('returns undefined', () => expect(dynamicArray.get(0)).toBeUndefined());
});
describe('when passing an existent index', () => {
const elem = 'element';
beforeAll(() => dynamicArray.add(elem));
afterAll(() => dynamicArray.clear());
it('returns the element', () => expect(dynamicArray.get(0)).toBe(elem));
});
});
describe('#set', () => {
describe('when calling it with non existent index', () => {
it('throws an error', () => {
expect(() => dynamicArray.set(0, 'element')).toThrow(Error);
});
});
describe('when calling it with existent index', () => {
beforeAll(() => dynamicArray.add('element'));
afterAll(() => dynamicArray.clear());
it('replaces the element at the specified index with a new element', () => {
const otherElem = 'other element';
dynamicArray.set(0, otherElem);
expect(dynamicArray.get(0)).toBe(otherElem);
});
});
});
describe('#clear', () => {
beforeAll(() => {
dynamicArray.add('element');
dynamicArray.clear();
});
it('clears the elements', () => expect(dynamicArray.get(0)).toBeUndefined());
it('leaves the array with length of 0', () => {
expect(dynamicArray).toHaveLength(0);
});
});
describe('#add', () => {
const elem = 'element';
let arrayLength;
beforeAll(() => {
arrayLength = dynamicArray.length;
dynamicArray.add(elem);
});
afterAll(() => dynamicArray.clear());
it('adds an element at the end of the array', () => {
expect(dynamicArray.get(arrayLength)).toBe(elem);
});
it('increases the length of the array by 1', () => {
expect(dynamicArray).toHaveLength(arrayLength + 1);
});
});
describe('#removeAt', () => {
const elem = 'one element';
const indexToRemove = 5;
let arrayLength;
beforeAll(() => {
for (let i = 0; i < 5; i += 1) {
dynamicArray.add('element');
}
dynamicArray.add(elem);
for (let i = 0; i < 5; i += 1) {
dynamicArray.add('element');
}
arrayLength = dynamicArray.length;
});
afterAll(() => dynamicArray.clear());
describe('when passing invalid index', () => {
describe('when passing negative index', () => {
it('throws a range error', () => {
expect(() => dynamicArray.removeAt(-1)).toThrow(RangeError);
});
});
describe('when passing index greater than the max valid index', () => {
it('throws a range error', () => {
expect(() => dynamicArray.removeAt(arrayLength)).toThrow(RangeError);
});
});
});
describe('when passing valid index', () => {
let removedElem;
beforeAll(() => {
removedElem = dynamicArray.removeAt(indexToRemove);
});
it('returns the removed element', () => expect(removedElem).toBe(elem));
it('removes the element from the array', () => {
expect(dynamicArray.get(indexToRemove)).not.toBe(elem);
});
it('decreases the length of the array by 1', () => {
expect(dynamicArray).toHaveLength(arrayLength - 1);
});
});
});
describe('#remove', () => {
const elem = 'element';
let arrayLength;
beforeAll(() => {
dynamicArray.add(elem);
arrayLength = dynamicArray.length;
});
afterAll(() => dynamicArray.clear());
describe('when passing non existent element', () => {
it('returns false', () => {
expect(dynamicArray.remove(`other ${elem}`)).toBe(false);
});
});
describe('when passing existent element', () => {
let removedElem;
beforeAll(() => {
removedElem = dynamicArray.remove(elem);
});
it('returns the removed element', () => expect(removedElem).toBe(elem));
it('removes the element from the array', () => {
expect(dynamicArray.get(0)).not.toBe(elem);
});
it('decreases the length of the array by 1', () => {
expect(dynamicArray).toHaveLength(arrayLength - 1);
});
});
});
describe('#indexOf', () => {
const elem = 'element';
beforeAll(() => dynamicArray.add(elem));
afterAll(() => dynamicArray.clear());
describe('when passing non existent element', () => {
it('returns -1', () => expect(dynamicArray.indexOf(`other ${elem}`)).toBe(-1));
});
describe('when passing existent element', () => {
it('returns the index of the element', () => {
expect(dynamicArray.indexOf(elem)).toBe(0);
});
});
});
describe('#contains', () => {
const elem = 'element';
beforeAll(() => dynamicArray.add(elem));
afterAll(() => dynamicArray.clear());
describe('when passing non existent element', () => {
it('returns false', () => {
expect(dynamicArray.contains(`other ${elem}`)).toBe(false);
});
});
describe('when passing existent element', () => {
it('returns true', () => {
expect(dynamicArray.contains(elem)).toBe(true);
});
});
});
});
});
|
<reponame>Priapos1004/ML101<gh_stars>0
from azureml.core import Workspace
if __name__ == "__main__":
# creates a resource group with a ml workspace in it
workspace_name = "irony-ws"
subscription_id = "..." # insert your own subscription id
resource_group = "irony"
location = "West Europe"
azure_workspace = Workspace.create(
name=workspace_name,
subscription_id=subscription_id,
resource_group=resource_group,
location=location,
create_resource_group=True, # change to False if you already have a resource group
exist_ok=True,
)
|
#!/bin/bash
HS=$(find src/ batchd-core/src batchd-amazonka/src batchd-docker/src/ batchd-libvirt/src -name \*.hs)
stack exec -- hgettext -o po/batchd.pot -k __ -k __f -k __s -k __sf $HS
cd po/
for PO in *.po
do msgmerge -U $PO batchd.pot
done
|
import { database } from "../services/firebase"
function Reference(ref: string) {
return(
database.ref(database.getDatabase(), ref)
)
}
export {Reference} |
/**
* @author <NAME>
*/
import { connect } from 'react-redux';
import Chat from './chat';
import { loadMessages, putMessage } from './../../reducers/chat/actions';
import { signOut } from './../../reducers/session/actions';
const mapStateToProps = (state) => ({
...state.chat,
...state.session,
});
const mapDispatchToProps = (dispatch) => ({
loadMessages: () => {
dispatch(loadMessages());
},
putMessage: (message, user) => {
dispatch(putMessage(message, user));
},
signOut: () => {
dispatch(signOut());
},
});
const ChatContainer = connect(mapStateToProps, mapDispatchToProps)(Chat);
export default ChatContainer; |
<reponame>richardmarston/cim4j<filename>CGMES_2.4.15_27JAN2020/cim4j/ExcREXSFeedbackSignalKind.java
package cim4j;
import java.util.Map;
import java.util.HashMap;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
/*
Type of rate feedback signals.
*/
public class ExcREXSFeedbackSignalKind extends BaseClass
{
private enum ExcREXSFeedbackSignalKind_ENUM
{
/**
* The voltage regulator output voltage is used. It is the same as exciter field voltage.
*/
fieldVoltage,
/**
* The exciter field current is used.
*/
fieldCurrent,
/**
* The output voltage of the exciter is used.
*/
outputVoltage,
MAX_ExcREXSFeedbackSignalKind_ENUM;
}
private ExcREXSFeedbackSignalKind_ENUM value;
public BaseClass construct() {
return new ExcREXSFeedbackSignalKind();
}
public ExcREXSFeedbackSignalKind() {}
public ExcREXSFeedbackSignalKind(java.lang.String s) {
setValue(s);
}
public void setValue(java.lang.String s) {
try
{
value = ExcREXSFeedbackSignalKind_ENUM.valueOf(s.trim());
}
catch (IllegalArgumentException iae)
{
System.out.println("NumberFormatException: " + iae.getMessage());
}
}
public void setAttribute(java.lang.String a, java.lang.String s) {
try
{
value = ExcREXSFeedbackSignalKind_ENUM.valueOf(s.trim());
}
catch (IllegalArgumentException iae)
{
System.out.println("NumberFormatException: " + iae.getMessage());
}
}
public void setAttribute(java.lang.String attributeName, BaseClass value) {
throw new IllegalArgumentException("ENUM cannot set attribute: " + attributeName);
}
private java.lang.String debugName = "ExcREXSFeedbackSignalKind";
public java.lang.String debugString(){
return debugName;
}
public java.lang.String toString(boolean b) {
return "Enum (" + value.toString() + ")";
}
};
|
const userModel = require('../models').model.User;
const bcrypt = require('bcrypt');
const salt = bcrypt.genSaltSync(10);
const userRoleModel = require('../models').model.UserRole;
const fs = require('fs');
const userCtrl = {};
userCtrl.getMany = async function (req, res, next) {
try {
const query = req.query;
const whereQuery = {};
const allKeys = Object.keys(query);
for (let index = 0; index < allKeys.length; index++) {
const _queryKey = allKeys[index];
if (_queryKey == "name") {
whereQuery.fullName = query[_queryKey];
continue;
}
if (_queryKey == "email") {
whereQuery.email = query[_queryKey];
continue;
}
}
const user = await userModel.findAll({ where: whereQuery });
return res.status(200).json({
success: true,
data: user
})
} catch (error) {
console.log(error);
return res.status(400).json({
success: false,
message: "Error when get all User!"
});
}
};
userCtrl.getById = async function (req, res, next) {
try {
const user = await userModel.findByPk(req.params.id);
return res.status(200).json({
success: true,
data: user
});
} catch (error) {
console.log(error);
return res.status(400).json({
success: false,
message: "Error when get User by id!"
});
}
};
userCtrl.createData = async function (req, res, next) {
try {
let { email, password, dateOfBirth } = req.body;
console.log("BODY:", JSON.stringify(req.body, null, 2));
const role = await userRoleModel.findAll({ where: { type: "Customer" } });
const user = await userModel.findOne({ where: { email } });
if (user) {
return res.status(400).json({
success: false,
message: "Email is existed!"
});
}
const hash = bcrypt.hashSync(password, salt);
req.body.password = <PASSWORD>;
if (dateOfBirth) {
dateOfBirth = new Date(dateOfBirth).toLocaleString({ timeZone: "VN" });
req.body.dateOfBirth = dateOfBirth;
}
req.body.roleId = role[0].id;
req.body.photo = '';
await userModel.create(req.body);
return res.status(200).json({
success: true,
message: "Create User successfully!"
});
} catch (error) {
console.log(error);
return res.status(400).json({
success: false,
message: "Error when create User!"
});
}
};
userCtrl.updateById = async function (req, res, next) {
try {
const { id } = req.params;
const { password } = req.body;
let { dateOfBirth } = req.body;
const userInfo = await userModel.findByPk(id);
if (password) {
const hash = bcrypt.hashSync(password, salt);
req.body.password = <PASSWORD>;
}
if (dateOfBirth) {
dateOfBirth = new Date(dateOfBirth).toLocaleString({ timeZone: "VN" });
req.body.dateOfBirth = dateOfBirth;
}
const processFile = req.file || {};
let orgName = processFile.originalname || '';
if (orgName !== '') {
if (userInfo.photo !== orgName) {
orgName = orgName.trim().replace(/ /g, '-');
const fullPathInServer = processFile.path;
const newFullPath = `uploads/${orgName}`;
fs.renameSync(fullPathInServer, newFullPath);
req.body.photo = newFullPath.split("/")[1];
}
}
await userModel.update(req.body, {
where: { id }
});
return res.status(200).json({
success: true,
message: "Update User successfully!"
});
} catch (error) {
console.log("ERROR:", error);
return res.status(400).json({
success: false,
message: "Error when update User!"
});
}
};
userCtrl.deleteById = async function (req, res, next) {
try {
const { id } = req.params;
await userModel.destroy({ where: { id } });
return res.status(200).json({
success: true,
message: "Delete User successfully!"
});
} catch (error) {
console.log(error);
return res.status(400).json({
success: false,
message: "Error when delete User!"
});
}
};
module.exports = userCtrl; |
exec java -jar /inspectit-ocelot-configurationserver.jar |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --gres=gpu:1 # request GPU generic resource
#SBATCH --cpus-per-task=2 #Maximum of CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=8000M # memory per node
#SBATCH --time=0-01:30 # time (DD-HH:MM)
#SBATCH --output=./job_script_output/Camera1_Sep_18_1430_1500_Prescribed_Behavior_1_%N-%j.out # %N for node name, %j for jobID
## Main processing command
## -v: path to the raw video file
## -o: directory to save processed video
python ./process_video_low_frequent_frame.py -v ../ROM_raw_videos_clips/Sep_18/Camera1_Sep_18_1430_1500_Prescribed_Behavior_1.mp4 -o ../ROM_raw_videos_clips_processed_camera2/Sep_18
|
<reponame>sho-darp/gotunes
package itunes
// MiniplayerWindow the miniplayer window
type MiniplayerWindow struct {
Window
}
|
<reponame>damymetzke/NodeBuildUtilities<gh_stars>1-10
declare module 'cck'
{
// disable this because of stupid source
// eslint-disable-next-line @typescript-eslint/ban-types
function check(val: Object, checkType: string, checkPara?: string): boolean;
}
|
<filename>sandbox/src/main/java/de/cofinpro/techat/mvcozark/sandbox/controller/IndexController.java<gh_stars>0
package de.cofinpro.techat.mvcozark.sandbox.controller;
import de.cofinpro.techat.mvcozark.shared.annotation.RequestHeader;
import de.cofinpro.techat.mvcozark.shared.util.Utils;
import org.slf4j.Logger;
import javax.enterprise.context.RequestScoped;
import javax.inject.Inject;
import javax.mvc.Models;
import javax.mvc.Viewable;
import javax.mvc.annotation.Controller;
import javax.mvc.annotation.View;
import javax.servlet.http.HttpSession;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
/**
* Dieser Controller zeigt alle Return-Types, wie Views gesteuert werden können:
* - String
* - void (mit @View-Annotation)
* - Viewable (der eine @View-Annotation überschreibt)
* - Response
* Created by <NAME> on 22.10.2017.
*/
@Controller
@Path("/")
@RequestScoped
public class IndexController {
@Inject
private Models models;
@Inject
private Logger logger;
@Inject @RequestHeader("X-Cofinpro")
private String cofinproHeader;
@Inject
private HttpSession session;
@GET
public String getDefault() {
models.put("test", "helloWorld");
return "/index/index.th.htm";
}
@GET
@Path("/somethings")
@View("/index/somethingOne.th.html")
public void getSomething(@QueryParam("param") @DefaultValue("none") String param, @HeaderParam("X-Cofinpro") String cofinpro) {
models.put("param", param);
models.put("hello", cofinproHeader);
logger.info("Models = {}", Utils.toString(models));
}
@GET
@Path("/somethings2")
@View("/does/not/exist/and/will/be/overridden")
public Viewable getSomething2(@QueryParam("param") @DefaultValue("none") String param) {
models.put("param", param);
session.setAttribute("test", param);
return new Viewable("/index/somethingOne.th.html", models);
}
@GET
@Path("/somethings3")
@View("does/not/exist/and/will/be/overridden")
public Response getSomething3(@QueryParam("param") @DefaultValue("foobar") String param) {
models.put("aabbcc", param);
return Response.ok().header("X-Cofinpro-Office", "Frankfurt").entity("/index/somethingOne.th.html").build();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.